influxdb/influxql/parser_test.go

2342 lines
85 KiB
Go
Raw Normal View History

2014-11-22 04:12:48 +00:00
package influxql_test
import (
"encoding/json"
"fmt"
2014-11-22 04:12:48 +00:00
"reflect"
"regexp"
2014-11-22 04:12:48 +00:00
"strings"
"testing"
"time"
"github.com/influxdb/influxdb/influxql"
)
// Ensure the parser can parse a multi-statement query.
func TestParser_ParseQuery(t *testing.T) {
s := `SELECT a FROM b; SELECT c FROM d`
q, err := influxql.NewParser(strings.NewReader(s)).ParseQuery()
if err != nil {
t.Fatalf("unexpected error: %s", err)
} else if len(q.Statements) != 2 {
t.Fatalf("unexpected statement count: %d", len(q.Statements))
}
}
2015-03-13 22:35:36 +00:00
func TestParser_ParseQuery_TrailingSemicolon(t *testing.T) {
2015-03-06 13:52:25 +00:00
s := `SELECT value FROM cpu;`
q, err := influxql.NewParser(strings.NewReader(s)).ParseQuery()
if err != nil {
t.Fatalf("unexpected error: %s", err)
} else if len(q.Statements) != 1 {
t.Fatalf("unexpected statement count: %d", len(q.Statements))
}
}
// Ensure the parser can parse an empty query.
func TestParser_ParseQuery_Empty(t *testing.T) {
q, err := influxql.NewParser(strings.NewReader(``)).ParseQuery()
if err != nil {
t.Fatalf("unexpected error: %s", err)
} else if len(q.Statements) != 0 {
t.Fatalf("unexpected statement count: %d", len(q.Statements))
}
}
// Ensure the parser can return an error from an malformed statement.
func TestParser_ParseQuery_ParseError(t *testing.T) {
_, err := influxql.NewParser(strings.NewReader(`SELECT`)).ParseQuery()
if err == nil || err.Error() != `found EOF, expected identifier, string, number, bool at line 1, char 8` {
t.Fatalf("unexpected error: %s", err)
}
}
2014-11-22 04:12:48 +00:00
// Ensure the parser can parse strings into Statement ASTs.
func TestParser_ParseStatement(t *testing.T) {
// For use in various tests.
now := time.Now()
2014-11-22 04:12:48 +00:00
var tests = []struct {
skip bool
2014-11-22 04:12:48 +00:00
s string
stmt influxql.Statement
err string
}{
2014-11-25 23:23:10 +00:00
// SELECT * statement
{
s: `SELECT * FROM myseries`,
stmt: &influxql.SelectStatement{
2015-03-19 19:31:46 +00:00
IsRawQuery: true,
2015-01-12 19:43:38 +00:00
Fields: []*influxql.Field{
2015-02-01 20:33:12 +00:00
{Expr: &influxql.Wildcard{}},
2014-11-25 23:23:10 +00:00
},
2015-03-06 13:52:25 +00:00
Sources: []influxql.Source{&influxql.Measurement{Name: "myseries"}},
2014-11-25 23:23:10 +00:00
},
},
{
s: `SELECT * FROM myseries GROUP BY *`,
stmt: &influxql.SelectStatement{
IsRawQuery: true,
Fields: []*influxql.Field{
{Expr: &influxql.Wildcard{}},
},
Sources: []influxql.Source{&influxql.Measurement{Name: "myseries"}},
Dimensions: []*influxql.Dimension{{Expr: &influxql.Wildcard{}}},
},
},
{
s: `SELECT field1, * FROM myseries GROUP BY *`,
stmt: &influxql.SelectStatement{
IsRawQuery: true,
Fields: []*influxql.Field{
{Expr: &influxql.VarRef{Val: "field1"}},
{Expr: &influxql.Wildcard{}},
},
Sources: []influxql.Source{&influxql.Measurement{Name: "myseries"}},
Dimensions: []*influxql.Dimension{{Expr: &influxql.Wildcard{}}},
},
},
{
s: `SELECT *, field1 FROM myseries GROUP BY *`,
stmt: &influxql.SelectStatement{
IsRawQuery: true,
Fields: []*influxql.Field{
{Expr: &influxql.Wildcard{}},
{Expr: &influxql.VarRef{Val: "field1"}},
},
Sources: []influxql.Source{&influxql.Measurement{Name: "myseries"}},
Dimensions: []*influxql.Dimension{{Expr: &influxql.Wildcard{}}},
},
},
2014-11-25 23:23:10 +00:00
// SELECT statement
2014-11-22 04:12:48 +00:00
{
2015-08-27 17:52:22 +00:00
s: fmt.Sprintf(`SELECT mean(field1), sum(field2) ,count(field3) AS field_x FROM myseries WHERE host = 'hosta.influxdb.org' and time > '%s' GROUP BY time(10h) ORDER BY DESC LIMIT 20 OFFSET 10;`, now.UTC().Format(time.RFC3339Nano)),
2014-11-22 04:12:48 +00:00
stmt: &influxql.SelectStatement{
2015-03-20 19:40:51 +00:00
IsRawQuery: false,
2015-01-12 19:43:38 +00:00
Fields: []*influxql.Field{
2015-03-20 19:40:51 +00:00
{Expr: &influxql.Call{Name: "mean", Args: []influxql.Expr{&influxql.VarRef{Val: "field1"}}}},
{Expr: &influxql.Call{Name: "sum", Args: []influxql.Expr{&influxql.VarRef{Val: "field2"}}}},
{Expr: &influxql.Call{Name: "count", Args: []influxql.Expr{&influxql.VarRef{Val: "field3"}}}, Alias: "field_x"},
2014-11-22 04:12:48 +00:00
},
2015-03-06 13:52:25 +00:00
Sources: []influxql.Source{&influxql.Measurement{Name: "myseries"}},
2014-11-22 04:12:48 +00:00
Condition: &influxql.BinaryExpr{
Op: influxql.AND,
LHS: &influxql.BinaryExpr{
Op: influxql.EQ,
LHS: &influxql.VarRef{Val: "host"},
RHS: &influxql.StringLiteral{Val: "hosta.influxdb.org"},
},
RHS: &influxql.BinaryExpr{
Op: influxql.GT,
LHS: &influxql.VarRef{Val: "time"},
RHS: &influxql.TimeLiteral{Val: now.UTC()},
},
2014-11-22 04:12:48 +00:00
},
2015-03-19 21:05:23 +00:00
Dimensions: []*influxql.Dimension{{Expr: &influxql.Call{Name: "time", Args: []influxql.Expr{&influxql.DurationLiteral{Val: 10 * time.Hour}}}}},
2015-01-12 19:47:22 +00:00
SortFields: []*influxql.SortField{
2015-08-27 17:52:22 +00:00
{Ascending: false},
},
2015-01-25 20:34:49 +00:00
Limit: 20,
Offset: 10,
2014-11-22 04:12:48 +00:00
},
},
{
s: `SELECT "foo.bar.baz" AS foo FROM myseries`,
stmt: &influxql.SelectStatement{
IsRawQuery: true,
Fields: []*influxql.Field{
{Expr: &influxql.VarRef{Val: "foo.bar.baz"}, Alias: "foo"},
},
Sources: []influxql.Source{&influxql.Measurement{Name: "myseries"}},
},
},
{
s: `SELECT "foo.bar.baz" AS foo FROM foo`,
stmt: &influxql.SelectStatement{
IsRawQuery: true,
Fields: []*influxql.Field{
{Expr: &influxql.VarRef{Val: "foo.bar.baz"}, Alias: "foo"},
},
Sources: []influxql.Source{&influxql.Measurement{Name: "foo"}},
},
},
2014-11-22 04:12:48 +00:00
2015-05-19 22:25:23 +00:00
// derivative
{
s: `SELECT derivative(field1, 1h) FROM myseries;`,
stmt: &influxql.SelectStatement{
IsRawQuery: false,
Fields: []*influxql.Field{
{Expr: &influxql.Call{Name: "derivative", Args: []influxql.Expr{&influxql.VarRef{Val: "field1"}, &influxql.DurationLiteral{Val: time.Hour}}}},
},
Sources: []influxql.Source{&influxql.Measurement{Name: "myseries"}},
},
},
{
s: fmt.Sprintf(`SELECT derivative(field1, 1h) FROM myseries WHERE time > '%s'`, now.UTC().Format(time.RFC3339Nano)),
stmt: &influxql.SelectStatement{
IsRawQuery: false,
Fields: []*influxql.Field{
{Expr: &influxql.Call{Name: "derivative", Args: []influxql.Expr{&influxql.VarRef{Val: "field1"}, &influxql.DurationLiteral{Val: time.Hour}}}},
},
Sources: []influxql.Source{&influxql.Measurement{Name: "myseries"}},
Condition: &influxql.BinaryExpr{
Op: influxql.GT,
LHS: &influxql.VarRef{Val: "time"},
RHS: &influxql.TimeLiteral{Val: now.UTC()},
},
},
},
2015-05-19 22:25:23 +00:00
{
s: `SELECT derivative(mean(field1), 1h) FROM myseries;`,
stmt: &influxql.SelectStatement{
IsRawQuery: false,
Fields: []*influxql.Field{
{Expr: &influxql.Call{Name: "derivative", Args: []influxql.Expr{&influxql.Call{Name: "mean", Args: []influxql.Expr{&influxql.VarRef{Val: "field1"}}}, &influxql.DurationLiteral{Val: time.Hour}}}},
},
Sources: []influxql.Source{&influxql.Measurement{Name: "myseries"}},
},
},
{
s: `SELECT derivative(mean(field1)) FROM myseries;`,
stmt: &influxql.SelectStatement{
IsRawQuery: false,
Fields: []*influxql.Field{
{Expr: &influxql.Call{Name: "derivative", Args: []influxql.Expr{&influxql.Call{Name: "mean", Args: []influxql.Expr{&influxql.VarRef{Val: "field1"}}}}}},
},
Sources: []influxql.Source{&influxql.Measurement{Name: "myseries"}},
},
},
// SELECT statement (lowercase)
{
s: `select my_field from myseries`,
stmt: &influxql.SelectStatement{
2015-03-19 19:31:46 +00:00
IsRawQuery: true,
Fields: []*influxql.Field{{Expr: &influxql.VarRef{Val: "my_field"}}},
Sources: []influxql.Source{&influxql.Measurement{Name: "myseries"}},
},
},
// SELECT statement (lowercase) with quoted field
{
s: `select 'my_field' from myseries`,
stmt: &influxql.SelectStatement{
IsRawQuery: true,
Fields: []*influxql.Field{{Expr: &influxql.StringLiteral{Val: "my_field"}}},
Sources: []influxql.Source{&influxql.Measurement{Name: "myseries"}},
},
},
// SELECT statement with multiple ORDER BY fields
{
skip: true,
s: `SELECT field1 FROM myseries ORDER BY ASC, field1, field2 DESC LIMIT 10`,
stmt: &influxql.SelectStatement{
2015-03-19 19:31:46 +00:00
IsRawQuery: true,
Fields: []*influxql.Field{{Expr: &influxql.VarRef{Val: "field1"}}},
Sources: []influxql.Source{&influxql.Measurement{Name: "myseries"}},
2015-01-12 19:47:22 +00:00
SortFields: []*influxql.SortField{
2015-02-01 20:33:12 +00:00
{Ascending: true},
{Name: "field1"},
{Name: "field2"},
},
Limit: 10,
},
},
// SELECT statement with SLIMIT and SOFFSET
{
s: `SELECT field1 FROM myseries SLIMIT 10 SOFFSET 5`,
stmt: &influxql.SelectStatement{
2015-03-19 19:31:46 +00:00
IsRawQuery: true,
Fields: []*influxql.Field{{Expr: &influxql.VarRef{Val: "field1"}}},
Sources: []influxql.Source{&influxql.Measurement{Name: "myseries"}},
SLimit: 10,
SOffset: 5,
},
},
// SELECT * FROM cpu WHERE host = 'serverC' AND region =~ /.*west.*/
{
s: `SELECT * FROM cpu WHERE host = 'serverC' AND region =~ /.*west.*/`,
stmt: &influxql.SelectStatement{
2015-03-19 19:31:46 +00:00
IsRawQuery: true,
Fields: []*influxql.Field{{Expr: &influxql.Wildcard{}}},
Sources: []influxql.Source{&influxql.Measurement{Name: "cpu"}},
Condition: &influxql.BinaryExpr{
Op: influxql.AND,
LHS: &influxql.BinaryExpr{
Op: influxql.EQ,
LHS: &influxql.VarRef{Val: "host"},
RHS: &influxql.StringLiteral{Val: "serverC"},
},
RHS: &influxql.BinaryExpr{
Op: influxql.EQREGEX,
LHS: &influxql.VarRef{Val: "region"},
RHS: &influxql.RegexLiteral{Val: regexp.MustCompile(".*west.*")},
},
},
},
},
// select percentile statements
{
s: `select percentile("field1", 2.0) from cpu`,
stmt: &influxql.SelectStatement{
IsRawQuery: false,
Fields: []*influxql.Field{
{Expr: &influxql.Call{Name: "percentile", Args: []influxql.Expr{&influxql.VarRef{Val: "field1"}, &influxql.NumberLiteral{Val: 2.0}}}},
},
Sources: []influxql.Source{&influxql.Measurement{Name: "cpu"}},
},
},
// select top statements
{
s: `select top("field1", 2) from cpu`,
stmt: &influxql.SelectStatement{
IsRawQuery: false,
Fields: []*influxql.Field{
{Expr: &influxql.Call{Name: "top", Args: []influxql.Expr{&influxql.VarRef{Val: "field1"}, &influxql.NumberLiteral{Val: 2}}}},
},
Sources: []influxql.Source{&influxql.Measurement{Name: "cpu"}},
},
},
{
s: `select top(field1, 2) from cpu`,
stmt: &influxql.SelectStatement{
IsRawQuery: false,
Fields: []*influxql.Field{
{Expr: &influxql.Call{Name: "top", Args: []influxql.Expr{&influxql.VarRef{Val: "field1"}, &influxql.NumberLiteral{Val: 2}}}},
},
Sources: []influxql.Source{&influxql.Measurement{Name: "cpu"}},
},
},
{
s: `select top(field1, 2), tag1 from cpu`,
stmt: &influxql.SelectStatement{
IsRawQuery: false,
Fields: []*influxql.Field{
{Expr: &influxql.Call{Name: "top", Args: []influxql.Expr{&influxql.VarRef{Val: "field1"}, &influxql.NumberLiteral{Val: 2}}}},
{Expr: &influxql.VarRef{Val: "tag1"}},
},
Sources: []influxql.Source{&influxql.Measurement{Name: "cpu"}},
},
},
{
s: `select top(field1, tag1, 2), tag1 from cpu`,
stmt: &influxql.SelectStatement{
IsRawQuery: false,
Fields: []*influxql.Field{
{Expr: &influxql.Call{Name: "top", Args: []influxql.Expr{&influxql.VarRef{Val: "field1"}, &influxql.VarRef{Val: "tag1"}, &influxql.NumberLiteral{Val: 2}}}},
{Expr: &influxql.VarRef{Val: "tag1"}},
},
Sources: []influxql.Source{&influxql.Measurement{Name: "cpu"}},
},
},
2015-05-12 16:13:44 +00:00
// select distinct statements
{
s: `select distinct(field1) from cpu`,
2015-05-12 16:13:44 +00:00
stmt: &influxql.SelectStatement{
IsRawQuery: false,
Fields: []*influxql.Field{
{Expr: &influxql.Call{Name: "distinct", Args: []influxql.Expr{&influxql.VarRef{Val: "field1"}}}},
},
Sources: []influxql.Source{&influxql.Measurement{Name: "cpu"}},
2015-05-12 16:13:44 +00:00
},
},
{
s: `select distinct field2 from network`,
stmt: &influxql.SelectStatement{
IsRawQuery: true,
Fields: []*influxql.Field{
{Expr: &influxql.Distinct{Val: "field2"}},
},
Sources: []influxql.Source{&influxql.Measurement{Name: "network"}},
},
},
2015-05-19 00:13:44 +00:00
{
s: `select count(distinct field3) from metrics`,
stmt: &influxql.SelectStatement{
IsRawQuery: false,
Fields: []*influxql.Field{
{Expr: &influxql.Call{Name: "count", Args: []influxql.Expr{&influxql.Distinct{Val: "field3"}}}},
},
Sources: []influxql.Source{&influxql.Measurement{Name: "metrics"}},
},
},
{
s: `select count(distinct field3), sum(field4) from metrics`,
stmt: &influxql.SelectStatement{
IsRawQuery: false,
Fields: []*influxql.Field{
{Expr: &influxql.Call{Name: "count", Args: []influxql.Expr{&influxql.Distinct{Val: "field3"}}}},
{Expr: &influxql.Call{Name: "sum", Args: []influxql.Expr{&influxql.VarRef{Val: "field4"}}}},
},
Sources: []influxql.Source{&influxql.Measurement{Name: "metrics"}},
},
},
{
s: `select count(distinct(field3)), sum(field4) from metrics`,
stmt: &influxql.SelectStatement{
IsRawQuery: false,
Fields: []*influxql.Field{
{Expr: &influxql.Call{Name: "count", Args: []influxql.Expr{&influxql.Call{Name: "distinct", Args: []influxql.Expr{&influxql.VarRef{Val: "field3"}}}}}},
{Expr: &influxql.Call{Name: "sum", Args: []influxql.Expr{&influxql.VarRef{Val: "field4"}}}},
},
Sources: []influxql.Source{&influxql.Measurement{Name: "metrics"}},
},
},
// SELECT * FROM WHERE time
{
s: fmt.Sprintf(`SELECT * FROM cpu WHERE time > '%s'`, now.UTC().Format(time.RFC3339Nano)),
stmt: &influxql.SelectStatement{
IsRawQuery: true,
Fields: []*influxql.Field{{Expr: &influxql.Wildcard{}}},
Sources: []influxql.Source{&influxql.Measurement{Name: "cpu"}},
Condition: &influxql.BinaryExpr{
Op: influxql.GT,
LHS: &influxql.VarRef{Val: "time"},
RHS: &influxql.TimeLiteral{Val: now.UTC()},
},
},
},
2015-03-28 00:04:46 +00:00
// SELECT * FROM WHERE field comparisons
{
s: `SELECT * FROM cpu WHERE load > 100`,
stmt: &influxql.SelectStatement{
IsRawQuery: true,
Fields: []*influxql.Field{{Expr: &influxql.Wildcard{}}},
Sources: []influxql.Source{&influxql.Measurement{Name: "cpu"}},
Condition: &influxql.BinaryExpr{
Op: influxql.GT,
LHS: &influxql.VarRef{Val: "load"},
RHS: &influxql.NumberLiteral{Val: 100},
},
},
},
{
s: `SELECT * FROM cpu WHERE load >= 100`,
stmt: &influxql.SelectStatement{
IsRawQuery: true,
Fields: []*influxql.Field{{Expr: &influxql.Wildcard{}}},
Sources: []influxql.Source{&influxql.Measurement{Name: "cpu"}},
Condition: &influxql.BinaryExpr{
Op: influxql.GTE,
LHS: &influxql.VarRef{Val: "load"},
RHS: &influxql.NumberLiteral{Val: 100},
},
},
},
{
s: `SELECT * FROM cpu WHERE load = 100`,
stmt: &influxql.SelectStatement{
IsRawQuery: true,
Fields: []*influxql.Field{{Expr: &influxql.Wildcard{}}},
Sources: []influxql.Source{&influxql.Measurement{Name: "cpu"}},
Condition: &influxql.BinaryExpr{
Op: influxql.EQ,
LHS: &influxql.VarRef{Val: "load"},
RHS: &influxql.NumberLiteral{Val: 100},
},
},
},
{
s: `SELECT * FROM cpu WHERE load <= 100`,
stmt: &influxql.SelectStatement{
IsRawQuery: true,
Fields: []*influxql.Field{{Expr: &influxql.Wildcard{}}},
Sources: []influxql.Source{&influxql.Measurement{Name: "cpu"}},
Condition: &influxql.BinaryExpr{
Op: influxql.LTE,
LHS: &influxql.VarRef{Val: "load"},
RHS: &influxql.NumberLiteral{Val: 100},
},
},
},
{
s: `SELECT * FROM cpu WHERE load < 100`,
stmt: &influxql.SelectStatement{
IsRawQuery: true,
Fields: []*influxql.Field{{Expr: &influxql.Wildcard{}}},
Sources: []influxql.Source{&influxql.Measurement{Name: "cpu"}},
Condition: &influxql.BinaryExpr{
Op: influxql.LT,
LHS: &influxql.VarRef{Val: "load"},
RHS: &influxql.NumberLiteral{Val: 100},
},
},
},
{
s: `SELECT * FROM cpu WHERE load != 100`,
stmt: &influxql.SelectStatement{
IsRawQuery: true,
Fields: []*influxql.Field{{Expr: &influxql.Wildcard{}}},
Sources: []influxql.Source{&influxql.Measurement{Name: "cpu"}},
Condition: &influxql.BinaryExpr{
Op: influxql.NEQ,
LHS: &influxql.VarRef{Val: "load"},
2015-03-28 00:04:46 +00:00
RHS: &influxql.NumberLiteral{Val: 100},
},
},
},
2015-03-13 22:35:36 +00:00
// SELECT * FROM /<regex>/
{
s: `SELECT * FROM /cpu.*/`,
stmt: &influxql.SelectStatement{
2015-03-19 19:31:46 +00:00
IsRawQuery: true,
Fields: []*influxql.Field{{Expr: &influxql.Wildcard{}}},
2015-03-13 22:35:36 +00:00
Sources: []influxql.Source{&influxql.Measurement{
Regex: &influxql.RegexLiteral{Val: regexp.MustCompile("cpu.*")}},
},
2015-03-13 22:35:36 +00:00
},
},
// SELECT * FROM "db"."rp"./<regex>/
{
s: `SELECT * FROM "db"."rp"./cpu.*/`,
stmt: &influxql.SelectStatement{
IsRawQuery: true,
Fields: []*influxql.Field{{Expr: &influxql.Wildcard{}}},
Sources: []influxql.Source{&influxql.Measurement{
Database: `db`,
RetentionPolicy: `rp`,
Regex: &influxql.RegexLiteral{Val: regexp.MustCompile("cpu.*")}},
},
},
},
// SELECT * FROM "db"../<regex>/
{
s: `SELECT * FROM "db"../cpu.*/`,
stmt: &influxql.SelectStatement{
IsRawQuery: true,
Fields: []*influxql.Field{{Expr: &influxql.Wildcard{}}},
Sources: []influxql.Source{&influxql.Measurement{
Database: `db`,
Regex: &influxql.RegexLiteral{Val: regexp.MustCompile("cpu.*")}},
},
},
},
// SELECT * FROM "rp"./<regex>/
{
s: `SELECT * FROM "rp"./cpu.*/`,
stmt: &influxql.SelectStatement{
IsRawQuery: true,
Fields: []*influxql.Field{{Expr: &influxql.Wildcard{}}},
Sources: []influxql.Source{&influxql.Measurement{
RetentionPolicy: `rp`,
Regex: &influxql.RegexLiteral{Val: regexp.MustCompile("cpu.*")}},
},
},
},
// SELECT statement with group by
{
s: `SELECT sum(value) FROM "kbps" WHERE time > now() - 120s AND deliveryservice='steam-dns' and cachegroup = 'total' GROUP BY time(60s)`,
stmt: &influxql.SelectStatement{
IsRawQuery: false,
Fields: []*influxql.Field{
{Expr: &influxql.Call{Name: "sum", Args: []influxql.Expr{&influxql.VarRef{Val: "value"}}}},
},
Sources: []influxql.Source{&influxql.Measurement{Name: "kbps"}},
Dimensions: []*influxql.Dimension{{Expr: &influxql.Call{Name: "time", Args: []influxql.Expr{&influxql.DurationLiteral{Val: 60 * time.Second}}}}},
Condition: &influxql.BinaryExpr{ // 1
Op: influxql.AND,
LHS: &influxql.BinaryExpr{ // 2
Op: influxql.AND,
LHS: &influxql.BinaryExpr{ //3
Op: influxql.GT,
LHS: &influxql.VarRef{Val: "time"},
RHS: &influxql.BinaryExpr{
Op: influxql.SUB,
LHS: &influxql.Call{Name: "now"},
RHS: &influxql.DurationLiteral{Val: mustParseDuration("120s")},
},
},
RHS: &influxql.BinaryExpr{
Op: influxql.EQ,
LHS: &influxql.VarRef{Val: "deliveryservice"},
RHS: &influxql.StringLiteral{Val: "steam-dns"},
},
},
RHS: &influxql.BinaryExpr{
Op: influxql.EQ,
LHS: &influxql.VarRef{Val: "cachegroup"},
RHS: &influxql.StringLiteral{Val: "total"},
},
},
},
},
// SELECT statement with fill
{
s: fmt.Sprintf(`SELECT mean(value) FROM cpu where time < '%s' GROUP BY time(5m) fill(1)`, now.UTC().Format(time.RFC3339Nano)),
stmt: &influxql.SelectStatement{
Fields: []*influxql.Field{{
Expr: &influxql.Call{
Name: "mean",
Args: []influxql.Expr{&influxql.VarRef{Val: "value"}}}}},
Sources: []influxql.Source{&influxql.Measurement{Name: "cpu"}},
Condition: &influxql.BinaryExpr{
Op: influxql.LT,
LHS: &influxql.VarRef{Val: "time"},
RHS: &influxql.TimeLiteral{Val: now.UTC()},
},
2015-03-19 19:31:46 +00:00
Dimensions: []*influxql.Dimension{{Expr: &influxql.Call{Name: "time", Args: []influxql.Expr{&influxql.DurationLiteral{Val: 5 * time.Minute}}}}},
Fill: influxql.NumberFill,
FillValue: float64(1),
},
},
2015-04-08 02:21:37 +00:00
// SELECT statement with FILL(none) -- check case insensitivity
{
s: fmt.Sprintf(`SELECT mean(value) FROM cpu where time < '%s' GROUP BY time(5m) FILL(none)`, now.UTC().Format(time.RFC3339Nano)),
2015-04-08 02:21:37 +00:00
stmt: &influxql.SelectStatement{
Fields: []*influxql.Field{{
Expr: &influxql.Call{
Name: "mean",
Args: []influxql.Expr{&influxql.VarRef{Val: "value"}}}}},
Sources: []influxql.Source{&influxql.Measurement{Name: "cpu"}},
Condition: &influxql.BinaryExpr{
Op: influxql.LT,
LHS: &influxql.VarRef{Val: "time"},
RHS: &influxql.TimeLiteral{Val: now.UTC()},
},
2015-04-08 02:21:37 +00:00
Dimensions: []*influxql.Dimension{{Expr: &influxql.Call{Name: "time", Args: []influxql.Expr{&influxql.DurationLiteral{Val: 5 * time.Minute}}}}},
Fill: influxql.NoFill,
},
},
// SELECT statement with previous fill
{
s: fmt.Sprintf(`SELECT mean(value) FROM cpu where time < '%s' GROUP BY time(5m) FILL(previous)`, now.UTC().Format(time.RFC3339Nano)),
stmt: &influxql.SelectStatement{
Fields: []*influxql.Field{{
Expr: &influxql.Call{
Name: "mean",
Args: []influxql.Expr{&influxql.VarRef{Val: "value"}}}}},
Sources: []influxql.Source{&influxql.Measurement{Name: "cpu"}},
Condition: &influxql.BinaryExpr{
Op: influxql.LT,
LHS: &influxql.VarRef{Val: "time"},
RHS: &influxql.TimeLiteral{Val: now.UTC()},
},
2015-03-19 19:31:46 +00:00
Dimensions: []*influxql.Dimension{{Expr: &influxql.Call{Name: "time", Args: []influxql.Expr{&influxql.DurationLiteral{Val: 5 * time.Minute}}}}},
Fill: influxql.PreviousFill,
},
},
// See issues https://github.com/influxdb/influxdb/issues/1647
// and https://github.com/influxdb/influxdb/issues/4404
// DELETE statement
//{
// s: `DELETE FROM myseries WHERE host = 'hosta.influxdb.org'`,
// stmt: &influxql.DeleteStatement{
// Source: &influxql.Measurement{Name: "myseries"},
// Condition: &influxql.BinaryExpr{
// Op: influxql.EQ,
// LHS: &influxql.VarRef{Val: "host"},
// RHS: &influxql.StringLiteral{Val: "hosta.influxdb.org"},
// },
// },
//},
2015-03-10 19:46:05 +00:00
// SHOW SERVERS
{
s: `SHOW SERVERS`,
stmt: &influxql.ShowServersStatement{},
},
// SHOW GRANTS
{
s: `SHOW GRANTS FOR jdoe`,
stmt: &influxql.ShowGrantsForUserStatement{Name: "jdoe"},
},
2015-01-26 03:40:50 +00:00
// SHOW DATABASES
2015-01-09 15:47:57 +00:00
{
2015-01-26 03:40:50 +00:00
s: `SHOW DATABASES`,
stmt: &influxql.ShowDatabasesStatement{},
2015-01-09 15:47:57 +00:00
},
2015-01-26 03:40:50 +00:00
// SHOW SERIES statement
{
2015-01-26 03:40:50 +00:00
s: `SHOW SERIES`,
stmt: &influxql.ShowSeriesStatement{},
},
// SHOW SERIES FROM
{
s: `SHOW SERIES FROM cpu`,
stmt: &influxql.ShowSeriesStatement{
2015-05-28 16:58:39 +00:00
Sources: []influxql.Source{&influxql.Measurement{Name: "cpu"}},
},
},
// SHOW SERIES FROM /<regex>/
{
s: `SHOW SERIES FROM /[cg]pu/`,
stmt: &influxql.ShowSeriesStatement{
2015-05-28 16:58:39 +00:00
Sources: []influxql.Source{
&influxql.Measurement{
Regex: &influxql.RegexLiteral{Val: regexp.MustCompile(`[cg]pu`)},
},
},
},
},
2015-03-13 00:20:58 +00:00
// SHOW SERIES with OFFSET 0
{
s: `SHOW SERIES OFFSET 0`,
stmt: &influxql.ShowSeriesStatement{Offset: 0},
},
// SHOW SERIES with LIMIT 2 OFFSET 0
{
s: `SHOW SERIES LIMIT 2 OFFSET 0`,
stmt: &influxql.ShowSeriesStatement{Offset: 0, Limit: 2},
},
2015-01-26 03:40:50 +00:00
// SHOW SERIES WHERE with ORDER BY and LIMIT
{
skip: true,
s: `SHOW SERIES WHERE region = 'order by desc' ORDER BY DESC, field1, field2 DESC LIMIT 10`,
2015-01-26 03:40:50 +00:00
stmt: &influxql.ShowSeriesStatement{
Condition: &influxql.BinaryExpr{
2014-12-16 14:06:28 +00:00
Op: influxql.EQ,
LHS: &influxql.VarRef{Val: "region"},
2015-08-27 17:52:22 +00:00
RHS: &influxql.StringLiteral{Val: "order by desc"},
},
2015-01-12 19:47:22 +00:00
SortFields: []*influxql.SortField{
2015-08-27 17:52:22 +00:00
&influxql.SortField{Ascending: false},
&influxql.SortField{Name: "field1", Ascending: true},
&influxql.SortField{Name: "field2"},
},
Limit: 10,
},
},
2015-01-26 03:40:50 +00:00
// SHOW MEASUREMENTS WHERE with ORDER BY and LIMIT
{
skip: true,
s: `SHOW MEASUREMENTS WHERE region = 'uswest' ORDER BY ASC, field1, field2 DESC LIMIT 10`,
2015-01-26 03:40:50 +00:00
stmt: &influxql.ShowMeasurementsStatement{
Condition: &influxql.BinaryExpr{
2014-12-16 14:06:28 +00:00
Op: influxql.EQ,
LHS: &influxql.VarRef{Val: "region"},
RHS: &influxql.StringLiteral{Val: "uswest"},
},
2015-01-12 19:47:22 +00:00
SortFields: []*influxql.SortField{
2015-02-01 20:33:12 +00:00
{Ascending: true},
{Name: "field1"},
{Name: "field2"},
},
Limit: 10,
},
},
// SHOW MEASUREMENTS WITH MEASUREMENT = cpu
{
s: `SHOW MEASUREMENTS WITH MEASUREMENT = cpu`,
stmt: &influxql.ShowMeasurementsStatement{
Source: &influxql.Measurement{Name: "cpu"},
},
},
// SHOW MEASUREMENTS WITH MEASUREMENT =~ /regex/
{
s: `SHOW MEASUREMENTS WITH MEASUREMENT =~ /[cg]pu/`,
stmt: &influxql.ShowMeasurementsStatement{
Source: &influxql.Measurement{
Regex: &influxql.RegexLiteral{Val: regexp.MustCompile(`[cg]pu`)},
},
},
},
2015-01-26 03:40:50 +00:00
// SHOW RETENTION POLICIES
{
s: `SHOW RETENTION POLICIES ON mydb`,
2015-01-26 03:40:50 +00:00
stmt: &influxql.ShowRetentionPoliciesStatement{
Database: "mydb",
},
},
2015-01-29 01:26:15 +00:00
// SHOW TAG KEYS
{
s: `SHOW TAG KEYS FROM src`,
stmt: &influxql.ShowTagKeysStatement{
2015-05-28 16:58:39 +00:00
Sources: []influxql.Source{&influxql.Measurement{Name: "src"}},
},
},
// SHOW TAG KEYS with LIMIT
{
s: `SHOW TAG KEYS FROM src LIMIT 2`,
stmt: &influxql.ShowTagKeysStatement{
Sources: []influxql.Source{&influxql.Measurement{Name: "src"}},
Limit: 2,
},
},
2015-09-22 18:25:13 +00:00
// SHOW TAG KEYS with OFFSET
{
s: `SHOW TAG KEYS FROM src OFFSET 1`,
stmt: &influxql.ShowTagKeysStatement{
Sources: []influxql.Source{&influxql.Measurement{Name: "src"}},
Offset: 1,
},
},
// SHOW TAG KEYS with LIMIT and OFFSET
{
s: `SHOW TAG KEYS FROM src LIMIT 2 OFFSET 1`,
stmt: &influxql.ShowTagKeysStatement{
Sources: []influxql.Source{&influxql.Measurement{Name: "src"}},
Limit: 2,
Offset: 1,
},
},
// SHOW TAG KEYS with SLIMIT
{
s: `SHOW TAG KEYS FROM src SLIMIT 2`,
stmt: &influxql.ShowTagKeysStatement{
Sources: []influxql.Source{&influxql.Measurement{Name: "src"}},
SLimit: 2,
},
},
// SHOW TAG KEYS with SOFFSET
{
s: `SHOW TAG KEYS FROM src SOFFSET 1`,
stmt: &influxql.ShowTagKeysStatement{
Sources: []influxql.Source{&influxql.Measurement{Name: "src"}},
SOffset: 1,
},
},
// SHOW TAG KEYS with SLIMIT and SOFFSET
{
s: `SHOW TAG KEYS FROM src SLIMIT 2 SOFFSET 1`,
stmt: &influxql.ShowTagKeysStatement{
Sources: []influxql.Source{&influxql.Measurement{Name: "src"}},
SLimit: 2,
SOffset: 1,
},
},
// SHOW TAG KEYS with LIMIT, OFFSET, SLIMIT, and SOFFSET
{
s: `SHOW TAG KEYS FROM src LIMIT 4 OFFSET 3 SLIMIT 2 SOFFSET 1`,
stmt: &influxql.ShowTagKeysStatement{
Sources: []influxql.Source{&influxql.Measurement{Name: "src"}},
Limit: 4,
Offset: 3,
SLimit: 2,
SOffset: 1,
},
},
// SHOW TAG KEYS FROM /<regex>/
{
s: `SHOW TAG KEYS FROM /[cg]pu/`,
stmt: &influxql.ShowTagKeysStatement{
2015-05-28 16:58:39 +00:00
Sources: []influxql.Source{
&influxql.Measurement{
Regex: &influxql.RegexLiteral{Val: regexp.MustCompile(`[cg]pu`)},
},
},
2015-01-29 01:26:15 +00:00
},
},
2015-01-26 03:40:50 +00:00
// SHOW TAG KEYS
{
skip: true,
s: `SHOW TAG KEYS FROM src WHERE region = 'uswest' ORDER BY ASC, field1, field2 DESC LIMIT 10`,
2015-01-26 03:40:50 +00:00
stmt: &influxql.ShowTagKeysStatement{
2015-05-28 16:58:39 +00:00
Sources: []influxql.Source{&influxql.Measurement{Name: "src"}},
Condition: &influxql.BinaryExpr{
2014-12-16 14:06:28 +00:00
Op: influxql.EQ,
LHS: &influxql.VarRef{Val: "region"},
RHS: &influxql.StringLiteral{Val: "uswest"},
},
2015-01-12 19:47:22 +00:00
SortFields: []*influxql.SortField{
2015-02-01 20:33:12 +00:00
{Ascending: true},
{Name: "field1"},
{Name: "field2"},
},
Limit: 10,
},
},
// SHOW TAG VALUES FROM ... WITH KEY = ...
{
skip: true,
s: `SHOW TAG VALUES FROM src WITH KEY = region WHERE region = 'uswest' ORDER BY ASC, field1, field2 DESC LIMIT 10`,
2015-01-26 03:40:50 +00:00
stmt: &influxql.ShowTagValuesStatement{
2015-05-28 16:58:39 +00:00
Sources: []influxql.Source{&influxql.Measurement{Name: "src"}},
TagKeys: []string{"region"},
Condition: &influxql.BinaryExpr{
2014-12-16 14:06:28 +00:00
Op: influxql.EQ,
LHS: &influxql.VarRef{Val: "region"},
RHS: &influxql.StringLiteral{Val: "uswest"},
},
2015-01-12 19:47:22 +00:00
SortFields: []*influxql.SortField{
2015-02-01 20:33:12 +00:00
{Ascending: true},
{Name: "field1"},
{Name: "field2"},
},
Limit: 10,
},
},
// SHOW TAG VALUES FROM ... WITH KEY IN...
2015-01-28 10:02:36 +00:00
{
s: `SHOW TAG VALUES FROM cpu WITH KEY IN (region, host) WHERE region = 'uswest'`,
2015-01-28 10:02:36 +00:00
stmt: &influxql.ShowTagValuesStatement{
2015-05-28 16:58:39 +00:00
Sources: []influxql.Source{&influxql.Measurement{Name: "cpu"}},
TagKeys: []string{"region", "host"},
2015-01-28 10:02:36 +00:00
Condition: &influxql.BinaryExpr{
Op: influxql.EQ,
LHS: &influxql.VarRef{Val: "region"},
RHS: &influxql.StringLiteral{Val: "uswest"},
2015-01-28 10:02:36 +00:00
},
},
},
// SHOW TAG VALUES ... AND TAG KEY =
{
s: `SHOW TAG VALUES FROM cpu WITH KEY IN (region,service,host)WHERE region = 'uswest'`,
2015-01-28 10:02:36 +00:00
stmt: &influxql.ShowTagValuesStatement{
2015-05-28 16:58:39 +00:00
Sources: []influxql.Source{&influxql.Measurement{Name: "cpu"}},
TagKeys: []string{"region", "service", "host"},
2015-01-28 10:02:36 +00:00
Condition: &influxql.BinaryExpr{
Op: influxql.EQ,
LHS: &influxql.VarRef{Val: "region"},
RHS: &influxql.StringLiteral{Val: "uswest"},
2015-01-28 10:02:36 +00:00
},
},
},
// SHOW TAG VALUES WITH KEY = ...
2015-01-28 10:02:36 +00:00
{
s: `SHOW TAG VALUES WITH KEY = host WHERE region = 'uswest'`,
2015-01-28 10:02:36 +00:00
stmt: &influxql.ShowTagValuesStatement{
TagKeys: []string{"host"},
2015-01-28 10:02:36 +00:00
Condition: &influxql.BinaryExpr{
Op: influxql.EQ,
LHS: &influxql.VarRef{Val: "region"},
RHS: &influxql.StringLiteral{Val: "uswest"},
2015-01-28 10:02:36 +00:00
},
},
},
// SHOW TAG VALUES FROM /<regex>/ WITH KEY = ...
{
s: `SHOW TAG VALUES FROM /[cg]pu/ WITH KEY = host`,
stmt: &influxql.ShowTagValuesStatement{
2015-05-28 16:58:39 +00:00
Sources: []influxql.Source{
&influxql.Measurement{
Regex: &influxql.RegexLiteral{Val: regexp.MustCompile(`[cg]pu`)},
},
},
TagKeys: []string{"host"},
},
},
// SHOW TAG VALUES WITH KEY = "..."
{
s: `SHOW TAG VALUES WITH KEY = "host" WHERE region = 'uswest'`,
stmt: &influxql.ShowTagValuesStatement{
TagKeys: []string{`host`},
Condition: &influxql.BinaryExpr{
Op: influxql.EQ,
LHS: &influxql.VarRef{Val: "region"},
RHS: &influxql.StringLiteral{Val: "uswest"},
},
},
},
2015-01-26 03:40:50 +00:00
// SHOW USERS
2015-01-14 16:53:17 +00:00
{
2015-01-26 03:40:50 +00:00
s: `SHOW USERS`,
stmt: &influxql.ShowUsersStatement{},
2015-01-14 16:53:17 +00:00
},
2015-01-26 03:40:50 +00:00
// SHOW FIELD KEYS
{
skip: true,
s: `SHOW FIELD KEYS FROM src ORDER BY ASC, field1, field2 DESC LIMIT 10`,
2015-01-26 03:40:50 +00:00
stmt: &influxql.ShowFieldKeysStatement{
2015-05-28 16:58:39 +00:00
Sources: []influxql.Source{&influxql.Measurement{Name: "src"}},
2015-01-12 19:47:22 +00:00
SortFields: []*influxql.SortField{
2015-02-01 20:33:12 +00:00
{Ascending: true},
{Name: "field1"},
{Name: "field2"},
},
Limit: 10,
},
},
{
s: `SHOW FIELD KEYS FROM /[cg]pu/`,
stmt: &influxql.ShowFieldKeysStatement{
2015-05-28 16:58:39 +00:00
Sources: []influxql.Source{
&influxql.Measurement{
Regex: &influxql.RegexLiteral{Val: regexp.MustCompile(`[cg]pu`)},
},
},
},
},
// DROP SERIES statement
2015-02-17 23:29:25 +00:00
{
s: `DROP SERIES FROM src`,
stmt: &influxql.DropSeriesStatement{Sources: []influxql.Source{&influxql.Measurement{Name: "src"}}},
2015-02-17 23:29:25 +00:00
},
{
s: `DROP SERIES WHERE host = 'hosta.influxdb.org'`,
stmt: &influxql.DropSeriesStatement{
Condition: &influxql.BinaryExpr{
Op: influxql.EQ,
LHS: &influxql.VarRef{Val: "host"},
RHS: &influxql.StringLiteral{Val: "hosta.influxdb.org"},
},
},
},
2015-02-17 23:29:25 +00:00
{
s: `DROP SERIES FROM src WHERE host = 'hosta.influxdb.org'`,
stmt: &influxql.DropSeriesStatement{
Sources: []influxql.Source{&influxql.Measurement{Name: "src"}},
2015-02-17 23:29:25 +00:00
Condition: &influxql.BinaryExpr{
Op: influxql.EQ,
LHS: &influxql.VarRef{Val: "host"},
RHS: &influxql.StringLiteral{Val: "hosta.influxdb.org"},
},
},
},
2015-10-03 02:49:11 +00:00
// DROP SERVER statement
{
s: `DROP META SERVER 123`,
stmt: &influxql.DropServerStatement{NodeID: 123, Meta: true},
2015-10-03 02:49:11 +00:00
},
{
s: `DROP DATA SERVER 123`,
stmt: &influxql.DropServerStatement{NodeID: 123, Meta: false},
2015-10-03 02:49:11 +00:00
},
2015-01-26 03:40:50 +00:00
// SHOW CONTINUOUS QUERIES statement
{
2015-01-26 03:40:50 +00:00
s: `SHOW CONTINUOUS QUERIES`,
stmt: &influxql.ShowContinuousQueriesStatement{},
},
// CREATE CONTINUOUS QUERY ... INTO <measurement>
2014-11-25 04:49:09 +00:00
{
Add continuous query option for customizing resampling This makes the following syntax possible: CREATE CONTINUOUS QUERY mycq ON mydb RESAMPLE EVERY 1m FOR 1h BEGIN SELECT mean(value) INTO cpu_mean FROM cpu GROUP BY time(5m) END The RESAMPLE option customizes how often an interval will be sampled and the duration. The interval is customized with EVERY. Any intervals within the resampling duration on a multiple of the resample interval will be updated with the new results from the query. The duration is customized with FOR. This determines how long an interval will participate in resampling. Both options are optional. If RESAMPLE is in the syntax, at least one of the two needs to be given. The default for both is the interval of the continuous query. The service also improves tracking of the last run time and the logic of when a query for an interval should be run. When determining the oldest interval to run for a query, the continuous query service determines what would have been the optimal time to perform the next query based on the last run time. It then uses this time to determine the oldest interval that should be run using the resample duration and will resample all intervals between this time and the current time as opposed to potentially forgetting about the last run in an interval if the continuous query service gets delayed for some reason. This removes the previous config options for customizing continuous queries since they are no longer relevant and adds a new option of customizing the run interval. The run interval determines how often the continuous query service polls for when it should execute a query. This option defaults to 1s, but can be set to 1m if the least common factor of all continuous queries' intervals is a higher value (like 1m).
2015-12-18 20:32:05 +00:00
s: `CREATE CONTINUOUS QUERY myquery ON testdb RESAMPLE EVERY 1m FOR 1h BEGIN SELECT count(field1) INTO measure1 FROM myseries GROUP BY time(5m) END`,
2014-11-25 04:49:09 +00:00
stmt: &influxql.CreateContinuousQueryStatement{
Name: "myquery",
Database: "testdb",
2014-11-25 04:49:09 +00:00
Source: &influxql.SelectStatement{
Fields: []*influxql.Field{{Expr: &influxql.Call{Name: "count", Args: []influxql.Expr{&influxql.VarRef{Val: "field1"}}}}},
Target: &influxql.Target{Measurement: &influxql.Measurement{Name: "measure1", IsTarget: true}},
2015-03-06 13:52:25 +00:00
Sources: []influxql.Source{&influxql.Measurement{Name: "myseries"}},
Dimensions: []*influxql.Dimension{
2015-02-16 20:30:58 +00:00
{
Expr: &influxql.Call{
Name: "time",
Args: []influxql.Expr{
&influxql.DurationLiteral{Val: 5 * time.Minute},
},
},
},
},
},
Add continuous query option for customizing resampling This makes the following syntax possible: CREATE CONTINUOUS QUERY mycq ON mydb RESAMPLE EVERY 1m FOR 1h BEGIN SELECT mean(value) INTO cpu_mean FROM cpu GROUP BY time(5m) END The RESAMPLE option customizes how often an interval will be sampled and the duration. The interval is customized with EVERY. Any intervals within the resampling duration on a multiple of the resample interval will be updated with the new results from the query. The duration is customized with FOR. This determines how long an interval will participate in resampling. Both options are optional. If RESAMPLE is in the syntax, at least one of the two needs to be given. The default for both is the interval of the continuous query. The service also improves tracking of the last run time and the logic of when a query for an interval should be run. When determining the oldest interval to run for a query, the continuous query service determines what would have been the optimal time to perform the next query based on the last run time. It then uses this time to determine the oldest interval that should be run using the resample duration and will resample all intervals between this time and the current time as opposed to potentially forgetting about the last run in an interval if the continuous query service gets delayed for some reason. This removes the previous config options for customizing continuous queries since they are no longer relevant and adds a new option of customizing the run interval. The run interval determines how often the continuous query service polls for when it should execute a query. This option defaults to 1s, but can be set to 1m if the least common factor of all continuous queries' intervals is a higher value (like 1m).
2015-12-18 20:32:05 +00:00
ResampleEvery: time.Minute,
ResampleFor: time.Hour,
},
},
{
s: `CREATE CONTINUOUS QUERY myquery ON testdb RESAMPLE FOR 1h BEGIN SELECT count(field1) INTO measure1 FROM myseries GROUP BY time(5m) END`,
stmt: &influxql.CreateContinuousQueryStatement{
Name: "myquery",
Database: "testdb",
Source: &influxql.SelectStatement{
Fields: []*influxql.Field{{Expr: &influxql.Call{Name: "count", Args: []influxql.Expr{&influxql.VarRef{Val: "field1"}}}}},
Target: &influxql.Target{Measurement: &influxql.Measurement{Name: "measure1", IsTarget: true}},
Sources: []influxql.Source{&influxql.Measurement{Name: "myseries"}},
Dimensions: []*influxql.Dimension{
{
Expr: &influxql.Call{
Name: "time",
Args: []influxql.Expr{
&influxql.DurationLiteral{Val: 5 * time.Minute},
},
},
},
},
},
ResampleFor: time.Hour,
},
},
{
s: `CREATE CONTINUOUS QUERY myquery ON testdb RESAMPLE EVERY 1m BEGIN SELECT count(field1) INTO measure1 FROM myseries GROUP BY time(5m) END`,
stmt: &influxql.CreateContinuousQueryStatement{
Name: "myquery",
Database: "testdb",
Source: &influxql.SelectStatement{
Fields: []*influxql.Field{{Expr: &influxql.Call{Name: "count", Args: []influxql.Expr{&influxql.VarRef{Val: "field1"}}}}},
Target: &influxql.Target{Measurement: &influxql.Measurement{Name: "measure1", IsTarget: true}},
Sources: []influxql.Source{&influxql.Measurement{Name: "myseries"}},
Dimensions: []*influxql.Dimension{
{
Expr: &influxql.Call{
Name: "time",
Args: []influxql.Expr{
&influxql.DurationLiteral{Val: 5 * time.Minute},
},
},
},
},
},
ResampleEvery: time.Minute,
},
},
{
s: `create continuous query "this.is-a.test" on segments begin select * into measure1 from cpu_load_short end`,
stmt: &influxql.CreateContinuousQueryStatement{
Name: "this.is-a.test",
Database: "segments",
Source: &influxql.SelectStatement{
IsRawQuery: true,
Fields: []*influxql.Field{{Expr: &influxql.Wildcard{}}},
Target: &influxql.Target{Measurement: &influxql.Measurement{Name: "measure1", IsTarget: true}},
Sources: []influxql.Source{&influxql.Measurement{Name: "cpu_load_short"}},
},
},
},
// CREATE CONTINUOUS QUERY ... INTO <retention-policy>.<measurement>
{
s: `CREATE CONTINUOUS QUERY myquery ON testdb BEGIN SELECT count(field1) INTO "1h.policy1"."cpu.load" FROM myseries GROUP BY time(5m) END`,
stmt: &influxql.CreateContinuousQueryStatement{
Name: "myquery",
Database: "testdb",
Source: &influxql.SelectStatement{
Fields: []*influxql.Field{{Expr: &influxql.Call{Name: "count", Args: []influxql.Expr{&influxql.VarRef{Val: "field1"}}}}},
Target: &influxql.Target{
Measurement: &influxql.Measurement{RetentionPolicy: "1h.policy1", Name: "cpu.load", IsTarget: true},
},
2015-03-06 13:52:25 +00:00
Sources: []influxql.Source{&influxql.Measurement{Name: "myseries"}},
Dimensions: []*influxql.Dimension{
2015-02-16 20:30:58 +00:00
{
Expr: &influxql.Call{
Name: "time",
Args: []influxql.Expr{
&influxql.DurationLiteral{Val: 5 * time.Minute},
},
},
},
},
2014-11-25 04:49:09 +00:00
},
},
},
// CREATE CONTINUOUS QUERY for non-aggregate SELECT stmts
{
s: `CREATE CONTINUOUS QUERY myquery ON testdb BEGIN SELECT value INTO "policy1"."value" FROM myseries END`,
stmt: &influxql.CreateContinuousQueryStatement{
Name: "myquery",
Database: "testdb",
Source: &influxql.SelectStatement{
IsRawQuery: true,
Fields: []*influxql.Field{{Expr: &influxql.VarRef{Val: "value"}}},
Target: &influxql.Target{
Measurement: &influxql.Measurement{RetentionPolicy: "policy1", Name: "value", IsTarget: true},
},
2015-03-06 13:52:25 +00:00
Sources: []influxql.Source{&influxql.Measurement{Name: "myseries"}},
},
},
},
// CREATE CONTINUOUS QUERY for non-aggregate SELECT stmts with multiple values
{
s: `CREATE CONTINUOUS QUERY myquery ON testdb BEGIN SELECT transmit_rx, transmit_tx INTO "policy1"."network" FROM myseries END`,
stmt: &influxql.CreateContinuousQueryStatement{
Name: "myquery",
Database: "testdb",
Source: &influxql.SelectStatement{
IsRawQuery: true,
Fields: []*influxql.Field{{Expr: &influxql.VarRef{Val: "transmit_rx"}},
{Expr: &influxql.VarRef{Val: "transmit_tx"}}},
Target: &influxql.Target{
Measurement: &influxql.Measurement{RetentionPolicy: "policy1", Name: "network", IsTarget: true},
},
2015-03-06 13:52:25 +00:00
Sources: []influxql.Source{&influxql.Measurement{Name: "myseries"}},
},
},
},
// CREATE CONTINUOUS QUERY with backreference measurement name
{
s: `CREATE CONTINUOUS QUERY myquery ON testdb BEGIN SELECT mean(value) INTO "policy1".:measurement FROM /^[a-z]+.*/ GROUP BY time(1m) END`,
stmt: &influxql.CreateContinuousQueryStatement{
Name: "myquery",
Database: "testdb",
Source: &influxql.SelectStatement{
Fields: []*influxql.Field{{Expr: &influxql.Call{Name: "mean", Args: []influxql.Expr{&influxql.VarRef{Val: "value"}}}}},
Target: &influxql.Target{
Measurement: &influxql.Measurement{RetentionPolicy: "policy1", IsTarget: true},
},
Sources: []influxql.Source{&influxql.Measurement{Regex: &influxql.RegexLiteral{Val: regexp.MustCompile(`^[a-z]+.*`)}}},
Dimensions: []*influxql.Dimension{
{
Expr: &influxql.Call{
Name: "time",
Args: []influxql.Expr{
&influxql.DurationLiteral{Val: 1 * time.Minute},
},
},
},
},
},
},
},
2014-12-31 13:47:21 +00:00
// CREATE DATABASE statement
{
s: `CREATE DATABASE testdb`,
2014-12-31 13:47:21 +00:00
stmt: &influxql.CreateDatabaseStatement{
Name: "testdb",
IfNotExists: false,
RetentionPolicyCreate: false,
},
},
{
s: `CREATE DATABASE IF NOT EXISTS testdb`,
stmt: &influxql.CreateDatabaseStatement{
Name: "testdb",
IfNotExists: true,
RetentionPolicyCreate: false,
},
},
{
s: `CREATE DATABASE testdb WITH DURATION 24h`,
stmt: &influxql.CreateDatabaseStatement{
Name: "testdb",
IfNotExists: false,
RetentionPolicyCreate: true,
RetentionPolicyDuration: 24 * time.Hour,
RetentionPolicyReplication: 1,
RetentionPolicyName: "default",
},
},
{
s: `CREATE DATABASE IF NOT EXISTS testdb WITH DURATION 24h`,
stmt: &influxql.CreateDatabaseStatement{
Name: "testdb",
IfNotExists: true,
RetentionPolicyCreate: true,
RetentionPolicyDuration: 24 * time.Hour,
RetentionPolicyReplication: 1,
RetentionPolicyName: "default",
},
},
{
s: `CREATE DATABASE testdb WITH REPLICATION 2`,
stmt: &influxql.CreateDatabaseStatement{
Name: "testdb",
IfNotExists: false,
RetentionPolicyCreate: true,
RetentionPolicyDuration: 0,
RetentionPolicyReplication: 2,
RetentionPolicyName: "default",
},
},
{
s: `CREATE DATABASE IF NOT EXISTS testdb WITH REPLICATION 2`,
stmt: &influxql.CreateDatabaseStatement{
Name: "testdb",
IfNotExists: true,
RetentionPolicyCreate: true,
RetentionPolicyDuration: 0,
RetentionPolicyReplication: 2,
RetentionPolicyName: "default",
},
},
{
s: `CREATE DATABASE testdb WITH NAME test_name`,
stmt: &influxql.CreateDatabaseStatement{
Name: "testdb",
IfNotExists: false,
RetentionPolicyCreate: true,
RetentionPolicyDuration: 0,
RetentionPolicyReplication: 1,
RetentionPolicyName: "test_name",
},
},
{
s: `CREATE DATABASE IF NOT EXISTS testdb WITH NAME test_name`,
stmt: &influxql.CreateDatabaseStatement{
Name: "testdb",
IfNotExists: true,
RetentionPolicyCreate: true,
RetentionPolicyDuration: 0,
RetentionPolicyReplication: 1,
RetentionPolicyName: "test_name",
},
},
{
s: `CREATE DATABASE testdb WITH DURATION 24h REPLICATION 2 NAME test_name`,
stmt: &influxql.CreateDatabaseStatement{
Name: "testdb",
IfNotExists: false,
RetentionPolicyCreate: true,
RetentionPolicyDuration: 24 * time.Hour,
RetentionPolicyReplication: 2,
RetentionPolicyName: "test_name",
},
},
{
s: `CREATE DATABASE IF NOT EXISTS testdb WITH DURATION 24h REPLICATION 2 NAME test_name`,
stmt: &influxql.CreateDatabaseStatement{
Name: "testdb",
IfNotExists: true,
RetentionPolicyCreate: true,
RetentionPolicyDuration: 24 * time.Hour,
RetentionPolicyReplication: 2,
RetentionPolicyName: "test_name",
2014-12-31 13:47:21 +00:00
},
},
2014-12-31 16:22:07 +00:00
// CREATE USER statement
{
s: `CREATE USER testuser WITH PASSWORD 'pwd1337'`,
2014-12-31 16:22:07 +00:00
stmt: &influxql.CreateUserStatement{
Name: "testuser",
Password: "pwd1337",
},
},
// CREATE USER ... WITH ALL PRIVILEGES
{
s: `CREATE USER testuser WITH PASSWORD 'pwd1337' WITH ALL PRIVILEGES`,
stmt: &influxql.CreateUserStatement{
Name: "testuser",
Password: "pwd1337",
Admin: true,
},
},
// SET PASSWORD FOR USER
{
s: `SET PASSWORD FOR testuser = 'pwd1337'`,
stmt: &influxql.SetPasswordUserStatement{
2015-04-08 15:26:23 +00:00
Name: "testuser",
Password: "pwd1337",
},
},
// DROP CONTINUOUS QUERY statement
{
2015-03-25 00:11:26 +00:00
s: `DROP CONTINUOUS QUERY myquery ON foo`,
stmt: &influxql.DropContinuousQueryStatement{Name: "myquery", Database: "foo"},
},
2015-01-05 03:32:49 +00:00
// DROP DATABASE statement
{
s: `DROP DATABASE testdb`,
stmt: &influxql.DropDatabaseStatement{
Name: "testdb",
IfExists: false,
},
},
{
s: `DROP DATABASE IF EXISTS testdb`,
stmt: &influxql.DropDatabaseStatement{
Name: "testdb",
IfExists: true,
},
2015-01-05 03:32:49 +00:00
},
2015-02-23 21:26:56 +00:00
// DROP MEASUREMENT statement
{
s: `DROP MEASUREMENT cpu`,
stmt: &influxql.DropMeasurementStatement{Name: "cpu"},
},
// DROP RETENTION POLICY
{
s: `DROP RETENTION POLICY "1h.cpu" ON mydb`,
stmt: &influxql.DropRetentionPolicyStatement{
Name: `1h.cpu`,
Database: `mydb`,
},
},
2015-01-05 03:56:25 +00:00
// DROP USER statement
{
s: `DROP USER jdoe`,
stmt: &influxql.DropUserStatement{Name: "jdoe"},
},
2015-01-03 07:06:18 +00:00
// GRANT READ
2015-01-03 03:56:26 +00:00
{
s: `GRANT READ ON testdb TO jdoe`,
stmt: &influxql.GrantStatement{
Privilege: influxql.ReadPrivilege,
On: "testdb",
User: "jdoe",
},
},
2015-01-03 07:06:18 +00:00
// GRANT WRITE
2015-01-03 03:56:26 +00:00
{
s: `GRANT WRITE ON testdb TO jdoe`,
stmt: &influxql.GrantStatement{
Privilege: influxql.WritePrivilege,
On: "testdb",
User: "jdoe",
},
},
2015-01-03 07:06:18 +00:00
// GRANT ALL
2015-01-03 03:56:26 +00:00
{
s: `GRANT ALL ON testdb TO jdoe`,
stmt: &influxql.GrantStatement{
Privilege: influxql.AllPrivileges,
On: "testdb",
User: "jdoe",
},
},
2015-01-03 07:06:18 +00:00
// GRANT ALL PRIVILEGES
2015-01-03 03:56:26 +00:00
{
s: `GRANT ALL PRIVILEGES ON testdb TO jdoe`,
stmt: &influxql.GrantStatement{
Privilege: influxql.AllPrivileges,
On: "testdb",
User: "jdoe",
},
},
// GRANT ALL admin privilege
{
s: `GRANT ALL TO jdoe`,
stmt: &influxql.GrantAdminStatement{
User: "jdoe",
},
},
// GRANT ALL PRVILEGES admin privilege
2015-01-03 07:06:18 +00:00
{
s: `GRANT ALL PRIVILEGES TO jdoe`,
stmt: &influxql.GrantAdminStatement{
User: "jdoe",
2015-01-03 07:06:18 +00:00
},
},
// REVOKE READ
{
s: `REVOKE READ on testdb FROM jdoe`,
stmt: &influxql.RevokeStatement{
Privilege: influxql.ReadPrivilege,
On: "testdb",
User: "jdoe",
},
},
// REVOKE WRITE
{
s: `REVOKE WRITE ON testdb FROM jdoe`,
stmt: &influxql.RevokeStatement{
Privilege: influxql.WritePrivilege,
On: "testdb",
User: "jdoe",
},
},
// REVOKE ALL
{
s: `REVOKE ALL ON testdb FROM jdoe`,
stmt: &influxql.RevokeStatement{
Privilege: influxql.AllPrivileges,
On: "testdb",
User: "jdoe",
},
},
// REVOKE ALL PRIVILEGES
{
s: `REVOKE ALL PRIVILEGES ON testdb FROM jdoe`,
stmt: &influxql.RevokeStatement{
Privilege: influxql.AllPrivileges,
On: "testdb",
User: "jdoe",
},
},
// REVOKE ALL admin privilege
2015-01-03 07:06:18 +00:00
{
s: `REVOKE ALL FROM jdoe`,
stmt: &influxql.RevokeAdminStatement{
User: "jdoe",
},
},
// REVOKE ALL PRIVILEGES admin privilege
{
s: `REVOKE ALL PRIVILEGES FROM jdoe`,
stmt: &influxql.RevokeAdminStatement{
User: "jdoe",
2015-01-03 07:06:18 +00:00
},
},
// CREATE RETENTION POLICY
{
s: `CREATE RETENTION POLICY policy1 ON testdb DURATION 1h REPLICATION 2`,
stmt: &influxql.CreateRetentionPolicyStatement{
Name: "policy1",
Database: "testdb",
Duration: time.Hour,
Replication: 2,
},
},
// CREATE RETENTION POLICY with infinite retention
{
s: `CREATE RETENTION POLICY policy1 ON testdb DURATION INF REPLICATION 2`,
stmt: &influxql.CreateRetentionPolicyStatement{
Name: "policy1",
Database: "testdb",
Duration: 0,
Replication: 2,
},
},
// CREATE RETENTION POLICY ... DEFAULT
{
s: `CREATE RETENTION POLICY policy1 ON testdb DURATION 2m REPLICATION 4 DEFAULT`,
stmt: &influxql.CreateRetentionPolicyStatement{
Name: "policy1",
Database: "testdb",
Duration: 2 * time.Minute,
Replication: 4,
Default: true,
},
},
// ALTER RETENTION POLICY
{
s: `ALTER RETENTION POLICY policy1 ON testdb DURATION 1m REPLICATION 4 DEFAULT`,
stmt: newAlterRetentionPolicyStatement("policy1", "testdb", time.Minute, 4, true),
},
// ALTER RETENTION POLICY with options in reverse order
{
s: `ALTER RETENTION POLICY policy1 ON testdb DEFAULT REPLICATION 4 DURATION 1m`,
stmt: newAlterRetentionPolicyStatement("policy1", "testdb", time.Minute, 4, true),
},
// ALTER RETENTION POLICY with infinite retention
{
s: `ALTER RETENTION POLICY policy1 ON testdb DEFAULT REPLICATION 4 DURATION INF`,
stmt: newAlterRetentionPolicyStatement("policy1", "testdb", 0, 4, true),
},
// ALTER RETENTION POLICY without optional DURATION
{
s: `ALTER RETENTION POLICY policy1 ON testdb DEFAULT REPLICATION 4`,
stmt: newAlterRetentionPolicyStatement("policy1", "testdb", -1, 4, true),
},
// ALTER RETENTION POLICY without optional REPLICATION
{
s: `ALTER RETENTION POLICY policy1 ON testdb DEFAULT`,
stmt: newAlterRetentionPolicyStatement("policy1", "testdb", -1, -1, true),
},
// ALTER RETENTION POLICY without optional DEFAULT
{
s: `ALTER RETENTION POLICY policy1 ON testdb REPLICATION 4`,
stmt: newAlterRetentionPolicyStatement("policy1", "testdb", -1, 4, false),
},
// ALTER default retention policy unquoted
{
s: `ALTER RETENTION POLICY default ON testdb REPLICATION 4`,
stmt: newAlterRetentionPolicyStatement("default", "testdb", -1, 4, false),
},
2015-03-12 23:07:41 +00:00
// SHOW STATS
{
s: `SHOW STATS`,
stmt: &influxql.ShowStatsStatement{
2015-09-22 23:28:24 +00:00
Module: "",
2015-03-12 23:07:41 +00:00
},
},
{
2015-09-22 23:28:24 +00:00
s: `SHOW STATS FOR 'cluster'`,
2015-03-12 23:07:41 +00:00
stmt: &influxql.ShowStatsStatement{
2015-09-22 23:28:24 +00:00
Module: "cluster",
2015-03-12 23:07:41 +00:00
},
},
2015-11-13 23:26:30 +00:00
// SHOW SHARD GROUPS
{
s: `SHOW SHARD GROUPS`,
stmt: &influxql.ShowShardGroupsStatement{},
},
// SHOW SHARDS
{
s: `SHOW SHARDS`,
stmt: &influxql.ShowShardsStatement{},
},
2015-03-24 03:13:54 +00:00
// SHOW DIAGNOSTICS
{
s: `SHOW DIAGNOSTICS`,
stmt: &influxql.ShowDiagnosticsStatement{},
},
{
s: `SHOW DIAGNOSTICS FOR 'build'`,
stmt: &influxql.ShowDiagnosticsStatement{
Module: "build",
},
},
2015-03-24 03:13:54 +00:00
// CREATE SUBSCRIPTION
{
s: `CREATE SUBSCRIPTION "name" ON "db"."rp" DESTINATIONS ANY 'udp://host1:9093', 'udp://host2:9093'`,
stmt: &influxql.CreateSubscriptionStatement{
Name: "name",
Database: "db",
RetentionPolicy: "rp",
Destinations: []string{"udp://host1:9093", "udp://host2:9093"},
Mode: "ANY",
},
},
// DROP SUBSCRIPTION
{
s: `DROP SUBSCRIPTION "name" ON "db"."rp"`,
stmt: &influxql.DropSubscriptionStatement{
Name: "name",
Database: "db",
RetentionPolicy: "rp",
},
},
// SHOW SUBSCRIPTIONS
{
s: `SHOW SUBSCRIPTIONS`,
stmt: &influxql.ShowSubscriptionsStatement{},
},
2014-11-22 04:12:48 +00:00
// Errors
{s: ``, err: `found EOF, expected SELECT, DELETE, SHOW, CREATE, DROP, GRANT, REVOKE, ALTER, SET at line 1, char 1`},
2014-11-22 04:12:48 +00:00
{s: `SELECT`, err: `found EOF, expected identifier, string, number, bool at line 1, char 8`},
{s: `SELECT time FROM myseries`, err: `at least 1 non-time field must be queried`},
{s: `blah blah`, err: `found blah, expected SELECT, DELETE, SHOW, CREATE, DROP, GRANT, REVOKE, ALTER, SET at line 1, char 1`},
{s: `SELECT field1 X`, err: `found X, expected FROM at line 1, char 15`},
2015-01-19 23:01:24 +00:00
{s: `SELECT field1 FROM "series" WHERE X +;`, err: `found ;, expected identifier, string, number, bool at line 1, char 38`},
{s: `SELECT field1 FROM myseries GROUP`, err: `found EOF, expected BY at line 1, char 35`},
{s: `SELECT field1 FROM myseries LIMIT`, err: `found EOF, expected number at line 1, char 35`},
2015-01-25 20:34:49 +00:00
{s: `SELECT field1 FROM myseries LIMIT 10.5`, err: `fractional parts not allowed in LIMIT at line 1, char 35`},
{s: `SELECT top() FROM myseries`, err: `invalid number of arguments for top, expected at least 2, got 0`},
{s: `SELECT top(field1) FROM myseries`, err: `invalid number of arguments for top, expected at least 2, got 1`},
{s: `SELECT top(field1,foo) FROM myseries`, err: `expected integer as last argument in top(), found foo`},
2015-10-03 02:49:11 +00:00
{s: `SELECT top(field1,host,'server',foo) FROM myseries`, err: `expected integer as last argument in top(), found foo`},
{s: `SELECT top(field1,5,'server',2) FROM myseries`, err: `only fields or tags are allowed in top(), found 5.000`},
{s: `SELECT top(field1,max(foo),'server',2) FROM myseries`, err: `only fields or tags are allowed in top(), found max(foo)`},
{s: `SELECT bottom() FROM myseries`, err: `invalid number of arguments for bottom, expected at least 2, got 0`},
{s: `SELECT bottom(field1) FROM myseries`, err: `invalid number of arguments for bottom, expected at least 2, got 1`},
{s: `SELECT bottom(field1,foo) FROM myseries`, err: `expected integer as last argument in bottom(), found foo`},
2015-10-03 02:49:11 +00:00
{s: `SELECT bottom(field1,host,'server',foo) FROM myseries`, err: `expected integer as last argument in bottom(), found foo`},
{s: `SELECT bottom(field1,5,'server',2) FROM myseries`, err: `only fields or tags are allowed in bottom(), found 5.000`},
{s: `SELECT bottom(field1,max(foo),'server',2) FROM myseries`, err: `only fields or tags are allowed in bottom(), found max(foo)`},
{s: `SELECT percentile() FROM myseries`, err: `invalid number of arguments for percentile, expected 2, got 0`},
{s: `SELECT percentile(field1) FROM myseries`, err: `invalid number of arguments for percentile, expected 2, got 1`},
{s: `SELECT percentile(field1, foo) FROM myseries`, err: `expected float argument in percentile()`},
2015-01-25 20:34:49 +00:00
{s: `SELECT field1 FROM myseries OFFSET`, err: `found EOF, expected number at line 1, char 36`},
{s: `SELECT field1 FROM myseries OFFSET 10.5`, err: `fractional parts not allowed in OFFSET at line 1, char 36`},
{s: `SELECT field1 FROM myseries ORDER`, err: `found EOF, expected BY at line 1, char 35`},
{s: `SELECT field1 FROM myseries ORDER BY`, err: `found EOF, expected identifier, ASC, DESC at line 1, char 38`},
{s: `SELECT field1 FROM myseries ORDER BY /`, err: `found /, expected identifier, ASC, DESC at line 1, char 38`},
{s: `SELECT field1 FROM myseries ORDER BY 1`, err: `found 1, expected identifier, ASC, DESC at line 1, char 38`},
{s: `SELECT field1 FROM myseries ORDER BY time ASC,`, err: `found EOF, expected identifier at line 1, char 47`},
{s: `SELECT field1 FROM myseries ORDER BY time, field1`, err: `only ORDER BY time supported at this time`},
2015-01-19 23:01:24 +00:00
{s: `SELECT field1 AS`, err: `found EOF, expected identifier at line 1, char 18`},
2015-03-20 22:47:22 +00:00
{s: `SELECT field1 FROM foo group by time(1s)`, err: `GROUP BY requires at least one aggregate function`},
{s: `SELECT count(value), value FROM foo`, err: `mixing aggregate and non-aggregate queries is not supported`},
{s: `SELECT count(value)/10, value FROM foo`, err: `mixing aggregate and non-aggregate queries is not supported`},
{s: `SELECT count(value) FROM foo group by time(1s)`, err: `aggregate functions with GROUP BY time require a WHERE time clause`},
{s: `SELECT count(value) FROM foo group by time(1s) where host = 'hosta.influxdb.org'`, err: `aggregate functions with GROUP BY time require a WHERE time clause`},
{s: `SELECT count(value) FROM foo group by time`, err: `time() is a function and expects at least one argument`},
{s: `SELECT count(value) FROM foo group by 'time'`, err: `only time and tag dimensions allowed`},
{s: `SELECT count(value) FROM foo where time > now() and time < now() group by time()`, err: `time dimension expected one argument`},
{s: `SELECT count(value) FROM foo where time > now() and time < now() group by time(b)`, err: `time dimension must have one duration argument`},
{s: `SELECT count(value) FROM foo where time > now() and time < now() group by time(1s), time(2s)`, err: `multiple time dimensions not allowed`},
{s: `SELECT field1 FROM 12`, err: `found 12, expected identifier at line 1, char 20`},
{s: `SELECT 1000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000 FROM myseries`, err: `unable to parse number at line 1, char 8`},
{s: `SELECT 10.5h FROM myseries`, err: `found h, expected FROM at line 1, char 12`},
{s: `SELECT distinct(field1), sum(field1) FROM myseries`, err: `aggregate function distinct() can not be combined with other functions or fields`},
{s: `SELECT distinct(field1), field2 FROM myseries`, err: `aggregate function distinct() can not be combined with other functions or fields`},
2015-05-15 21:58:00 +00:00
{s: `SELECT distinct(field1, field2) FROM myseries`, err: `distinct function can only have one argument`},
{s: `SELECT distinct() FROM myseries`, err: `distinct function requires at least one argument`},
{s: `SELECT distinct FROM myseries`, err: `found FROM, expected identifier at line 1, char 17`},
{s: `SELECT distinct field1, field2 FROM myseries`, err: `aggregate function distinct() can not be combined with other functions or fields`},
2015-05-19 00:13:44 +00:00
{s: `SELECT count(distinct) FROM myseries`, err: `found ), expected (, identifier at line 1, char 22`},
{s: `SELECT count(distinct field1, field2) FROM myseries`, err: `count(distinct <field>) can only have one argument`},
2015-05-19 18:43:16 +00:00
{s: `select count(distinct(too, many, arguments)) from myseries`, err: `count(distinct <field>) can only have one argument`},
{s: `select count() from myseries`, err: `invalid number of arguments for count, expected 1, got 0`},
{s: `SELECT derivative(), field1 FROM myseries`, err: `mixing aggregate and non-aggregate queries is not supported`},
2015-05-19 22:25:23 +00:00
{s: `select derivative() from myseries`, err: `invalid number of arguments for derivative, expected at least 1 but no more than 2, got 0`},
{s: `select derivative(mean(value), 1h, 3) from myseries`, err: `invalid number of arguments for derivative, expected at least 1 but no more than 2, got 3`},
{s: `SELECT derivative(value) FROM myseries group by time(1h)`, err: `aggregate function required inside the call to derivative`},
{s: `SELECT derivative(top(value)) FROM myseries where time < now() and time > now() - 1d group by time(1h)`, err: `invalid number of arguments for top, expected at least 2, got 1`},
{s: `SELECT derivative(bottom(value)) FROM myseries where time < now() and time > now() - 1d group by time(1h)`, err: `invalid number of arguments for bottom, expected at least 2, got 1`},
{s: `SELECT derivative(max()) FROM myseries where time < now() and time > now() - 1d group by time(1h)`, err: `invalid number of arguments for max, expected 1, got 0`},
{s: `SELECT derivative(percentile(value)) FROM myseries where time < now() and time > now() - 1d group by time(1h)`, err: `invalid number of arguments for percentile, expected 2, got 1`},
{s: `SELECT non_negative_derivative(), field1 FROM myseries`, err: `mixing aggregate and non-aggregate queries is not supported`},
{s: `select non_negative_derivative() from myseries`, err: `invalid number of arguments for non_negative_derivative, expected at least 1 but no more than 2, got 0`},
{s: `select non_negative_derivative(mean(value), 1h, 3) from myseries`, err: `invalid number of arguments for non_negative_derivative, expected at least 1 but no more than 2, got 3`},
{s: `SELECT non_negative_derivative(value) FROM myseries group by time(1h)`, err: `aggregate function required inside the call to non_negative_derivative`},
{s: `SELECT non_negative_derivative(top(value)) FROM myseries where time < now() and time > now() - 1d group by time(1h)`, err: `invalid number of arguments for top, expected at least 2, got 1`},
{s: `SELECT non_negative_derivative(bottom(value)) FROM myseries where time < now() and time > now() - 1d group by time(1h)`, err: `invalid number of arguments for bottom, expected at least 2, got 1`},
{s: `SELECT non_negative_derivative(max()) FROM myseries where time < now() and time > now() - 1d group by time(1h)`, err: `invalid number of arguments for max, expected 1, got 0`},
{s: `SELECT non_negative_derivative(percentile(value)) FROM myseries where time < now() and time > now() - 1d group by time(1h)`, err: `invalid number of arguments for percentile, expected 2, got 1`},
{s: `SELECT field1 from myseries WHERE host =~ 'asd' LIMIT 1`, err: `found asd, expected regex at line 1, char 42`},
{s: `SELECT value > 2 FROM cpu`, err: `invalid operator > in SELECT clause at line 1, char 8; operator is intended for WHERE clause`},
{s: `SELECT value = 2 FROM cpu`, err: `invalid operator = in SELECT clause at line 1, char 8; operator is intended for WHERE clause`},
{s: `SELECT s =~ /foo/ FROM cpu`, err: `invalid operator =~ in SELECT clause at line 1, char 8; operator is intended for WHERE clause`},
// See issues https://github.com/influxdb/influxdb/issues/1647
// and https://github.com/influxdb/influxdb/issues/4404
//{s: `DELETE`, err: `found EOF, expected FROM at line 1, char 8`},
//{s: `DELETE FROM`, err: `found EOF, expected identifier at line 1, char 13`},
//{s: `DELETE FROM myseries WHERE`, err: `found EOF, expected identifier, string, number, bool at line 1, char 28`},
{s: `DELETE`, err: `DELETE FROM is currently not supported. Use DROP SERIES or DROP MEASUREMENT instead`},
{s: `DELETE FROM`, err: `DELETE FROM is currently not supported. Use DROP SERIES or DROP MEASUREMENT instead`},
{s: `DELETE FROM myseries WHERE`, err: `DELETE FROM is currently not supported. Use DROP SERIES or DROP MEASUREMENT instead`},
2015-02-23 20:51:52 +00:00
{s: `DROP MEASUREMENT`, err: `found EOF, expected identifier at line 1, char 18`},
{s: `DROP SERIES`, err: `found EOF, expected FROM, WHERE at line 1, char 13`},
{s: `DROP SERIES FROM`, err: `found EOF, expected identifier at line 1, char 18`},
{s: `DROP SERIES FROM src WHERE`, err: `found EOF, expected identifier, string, number, bool at line 1, char 28`},
{s: `DROP META SERVER`, err: `found EOF, expected number at line 1, char 18`},
{s: `DROP DATA SERVER abc`, err: `found abc, expected number at line 1, char 18`},
2015-01-26 03:40:50 +00:00
{s: `SHOW CONTINUOUS`, err: `found EOF, expected QUERIES at line 1, char 17`},
{s: `SHOW RETENTION`, err: `found EOF, expected POLICIES at line 1, char 16`},
{s: `SHOW RETENTION ON`, err: `found ON, expected POLICIES at line 1, char 16`},
{s: `SHOW RETENTION POLICIES`, err: `found EOF, expected ON at line 1, char 25`},
{s: `SHOW RETENTION POLICIES mydb`, err: `found mydb, expected ON at line 1, char 25`},
{s: `SHOW RETENTION POLICIES ON`, err: `found EOF, expected identifier at line 1, char 28`},
2015-11-13 23:26:30 +00:00
{s: `SHOW SHARD`, err: `found EOF, expected GROUPS at line 1, char 12`},
{s: `SHOW FOO`, err: `found FOO, expected CONTINUOUS, DATABASES, DIAGNOSTICS, FIELD, GRANTS, MEASUREMENTS, RETENTION, SERIES, SERVERS, SHARD, SHARDS, STATS, SUBSCRIPTIONS, TAG, USERS at line 1, char 6`},
2015-09-22 23:28:24 +00:00
{s: `SHOW STATS FOR`, err: `found EOF, expected string at line 1, char 16`},
{s: `SHOW DIAGNOSTICS FOR`, err: `found EOF, expected string at line 1, char 22`},
{s: `SHOW GRANTS`, err: `found EOF, expected FOR at line 1, char 13`},
{s: `SHOW GRANTS FOR`, err: `found EOF, expected identifier at line 1, char 17`},
{s: `DROP CONTINUOUS`, err: `found EOF, expected QUERY at line 1, char 17`},
2015-01-19 23:01:24 +00:00
{s: `DROP CONTINUOUS QUERY`, err: `found EOF, expected identifier at line 1, char 23`},
2015-03-25 00:11:26 +00:00
{s: `DROP CONTINUOUS QUERY myquery`, err: `found EOF, expected ON at line 1, char 31`},
{s: `DROP CONTINUOUS QUERY myquery ON`, err: `found EOF, expected identifier at line 1, char 34`},
{s: `CREATE CONTINUOUS`, err: `found EOF, expected QUERY at line 1, char 19`},
{s: `CREATE CONTINUOUS QUERY`, err: `found EOF, expected identifier at line 1, char 25`},
{s: `CREATE CONTINUOUS QUERY cq ON db RESAMPLE FOR 5s BEGIN SELECT mean(value) INTO cpu_mean FROM cpu GROUP BY time(10s) END`, err: `FOR duration must be >= GROUP BY time duration: must be a minimum of 10s, got 5s`},
{s: `CREATE CONTINUOUS QUERY cq ON db RESAMPLE EVERY 10s FOR 5s BEGIN SELECT mean(value) INTO cpu_mean FROM cpu GROUP BY time(5s) END`, err: `FOR duration must be >= GROUP BY time duration: must be a minimum of 10s, got 5s`},
{s: `DROP FOO`, err: `found FOO, expected SERIES, CONTINUOUS, MEASUREMENT, SERVER, SUBSCRIPTION at line 1, char 6`},
{s: `CREATE FOO`, err: `found FOO, expected CONTINUOUS, DATABASE, USER, RETENTION, SUBSCRIPTION at line 1, char 8`},
{s: `CREATE DATABASE`, err: `found EOF, expected identifier at line 1, char 17`},
{s: `CREATE DATABASE "testdb" WITH`, err: `found EOF, expected DURATION, REPLICATION, NAME at line 1, char 31`},
{s: `CREATE DATABASE "testdb" WITH DURATION`, err: `found EOF, expected duration at line 1, char 40`},
{s: `CREATE DATABASE "testdb" WITH REPLICATION`, err: `found EOF, expected number at line 1, char 43`},
{s: `CREATE DATABASE "testdb" WITH NAME`, err: `found EOF, expected identifier at line 1, char 36`},
{s: `CREATE DATABASE IF`, err: `found EOF, expected NOT at line 1, char 20`},
{s: `CREATE DATABASE IF NOT`, err: `found EOF, expected EXISTS at line 1, char 24`},
{s: `CREATE DATABASE IF NOT EXISTS`, err: `found EOF, expected identifier at line 1, char 31`},
{s: `CREATE DATABASE IF NOT EXISTS "testdb" WITH`, err: `found EOF, expected DURATION, REPLICATION, NAME at line 1, char 45`},
{s: `CREATE DATABASE IF NOT EXISTS "testdb" WITH DURATION`, err: `found EOF, expected duration at line 1, char 54`},
{s: `CREATE DATABASE IF NOT EXISTS "testdb" WITH REPLICATION`, err: `found EOF, expected number at line 1, char 57`},
{s: `CREATE DATABASE IF NOT EXISTS "testdb" WITH NAME`, err: `found EOF, expected identifier at line 1, char 50`},
2015-01-05 03:32:49 +00:00
{s: `DROP DATABASE`, err: `found EOF, expected identifier at line 1, char 15`},
{s: `DROP DATABASE IF`, err: `found EOF, expected EXISTS at line 1, char 18`},
{s: `DROP DATABASE IF EXISTS`, err: `found EOF, expected identifier at line 1, char 25`},
{s: `DROP RETENTION`, err: `found EOF, expected POLICY at line 1, char 16`},
{s: `DROP RETENTION POLICY`, err: `found EOF, expected identifier at line 1, char 23`},
{s: `DROP RETENTION POLICY "1h.cpu"`, err: `found EOF, expected ON at line 1, char 31`},
{s: `DROP RETENTION POLICY "1h.cpu" ON`, err: `found EOF, expected identifier at line 1, char 35`},
2015-01-05 03:56:25 +00:00
{s: `DROP USER`, err: `found EOF, expected identifier at line 1, char 11`},
{s: `DROP SUBSCRIPTION`, err: `found EOF, expected identifier at line 1, char 19`},
{s: `DROP SUBSCRIPTION "name"`, err: `found EOF, expected ON at line 1, char 25`},
{s: `DROP SUBSCRIPTION "name" ON `, err: `found EOF, expected identifier at line 1, char 30`},
{s: `DROP SUBSCRIPTION "name" ON "db"`, err: `found EOF, expected . at line 1, char 33`},
{s: `DROP SUBSCRIPTION "name" ON "db".`, err: `found EOF, expected identifier at line 1, char 34`},
2014-12-31 16:22:07 +00:00
{s: `CREATE USER testuser`, err: `found EOF, expected WITH at line 1, char 22`},
{s: `CREATE USER testuser WITH`, err: `found EOF, expected PASSWORD at line 1, char 27`},
{s: `CREATE USER testuser WITH PASSWORD`, err: `found EOF, expected string at line 1, char 36`},
{s: `CREATE USER testuser WITH PASSWORD 'pwd' WITH`, err: `found EOF, expected ALL at line 1, char 47`},
{s: `CREATE USER testuser WITH PASSWORD 'pwd' WITH ALL`, err: `found EOF, expected PRIVILEGES at line 1, char 51`},
{s: `CREATE SUBSCRIPTION`, err: `found EOF, expected identifier at line 1, char 21`},
{s: `CREATE SUBSCRIPTION "name"`, err: `found EOF, expected ON at line 1, char 27`},
{s: `CREATE SUBSCRIPTION "name" ON `, err: `found EOF, expected identifier at line 1, char 32`},
{s: `CREATE SUBSCRIPTION "name" ON "db"`, err: `found EOF, expected . at line 1, char 35`},
{s: `CREATE SUBSCRIPTION "name" ON "db".`, err: `found EOF, expected identifier at line 1, char 36`},
{s: `CREATE SUBSCRIPTION "name" ON "db"."rp"`, err: `found EOF, expected DESTINATIONS at line 1, char 40`},
{s: `CREATE SUBSCRIPTION "name" ON "db"."rp" DESTINATIONS`, err: `found EOF, expected ALL, ANY at line 1, char 54`},
{s: `CREATE SUBSCRIPTION "name" ON "db"."rp" DESTINATIONS ALL `, err: `found EOF, expected string at line 1, char 59`},
2015-01-03 03:56:26 +00:00
{s: `GRANT`, err: `found EOF, expected READ, WRITE, ALL [PRIVILEGES] at line 1, char 7`},
{s: `GRANT BOGUS`, err: `found BOGUS, expected READ, WRITE, ALL [PRIVILEGES] at line 1, char 7`},
{s: `GRANT READ`, err: `found EOF, expected ON at line 1, char 12`},
{s: `GRANT READ FROM`, err: `found FROM, expected ON at line 1, char 12`},
2015-01-19 23:01:24 +00:00
{s: `GRANT READ ON`, err: `found EOF, expected identifier at line 1, char 15`},
{s: `GRANT READ ON TO`, err: `found TO, expected identifier at line 1, char 15`},
2015-01-03 03:56:26 +00:00
{s: `GRANT READ ON testdb`, err: `found EOF, expected TO at line 1, char 22`},
{s: `GRANT READ ON testdb TO`, err: `found EOF, expected identifier at line 1, char 25`},
{s: `GRANT READ TO`, err: `found TO, expected ON at line 1, char 12`},
{s: `GRANT WRITE`, err: `found EOF, expected ON at line 1, char 13`},
{s: `GRANT WRITE FROM`, err: `found FROM, expected ON at line 1, char 13`},
{s: `GRANT WRITE ON`, err: `found EOF, expected identifier at line 1, char 16`},
{s: `GRANT WRITE ON TO`, err: `found TO, expected identifier at line 1, char 16`},
{s: `GRANT WRITE ON testdb`, err: `found EOF, expected TO at line 1, char 23`},
{s: `GRANT WRITE ON testdb TO`, err: `found EOF, expected identifier at line 1, char 26`},
{s: `GRANT WRITE TO`, err: `found TO, expected ON at line 1, char 13`},
{s: `GRANT ALL`, err: `found EOF, expected ON, TO at line 1, char 11`},
{s: `GRANT ALL PRIVILEGES`, err: `found EOF, expected ON, TO at line 1, char 22`},
{s: `GRANT ALL FROM`, err: `found FROM, expected ON, TO at line 1, char 11`},
{s: `GRANT ALL PRIVILEGES FROM`, err: `found FROM, expected ON, TO at line 1, char 22`},
{s: `GRANT ALL ON`, err: `found EOF, expected identifier at line 1, char 14`},
{s: `GRANT ALL PRIVILEGES ON`, err: `found EOF, expected identifier at line 1, char 25`},
{s: `GRANT ALL ON TO`, err: `found TO, expected identifier at line 1, char 14`},
{s: `GRANT ALL PRIVILEGES ON TO`, err: `found TO, expected identifier at line 1, char 25`},
{s: `GRANT ALL ON testdb`, err: `found EOF, expected TO at line 1, char 21`},
{s: `GRANT ALL PRIVILEGES ON testdb`, err: `found EOF, expected TO at line 1, char 32`},
{s: `GRANT ALL ON testdb FROM`, err: `found FROM, expected TO at line 1, char 21`},
{s: `GRANT ALL PRIVILEGES ON testdb FROM`, err: `found FROM, expected TO at line 1, char 32`},
{s: `GRANT ALL ON testdb TO`, err: `found EOF, expected identifier at line 1, char 24`},
{s: `GRANT ALL PRIVILEGES ON testdb TO`, err: `found EOF, expected identifier at line 1, char 35`},
{s: `GRANT ALL TO`, err: `found EOF, expected identifier at line 1, char 14`},
{s: `GRANT ALL PRIVILEGES TO`, err: `found EOF, expected identifier at line 1, char 25`},
{s: `REVOKE`, err: `found EOF, expected READ, WRITE, ALL [PRIVILEGES] at line 1, char 8`},
2015-01-03 07:06:18 +00:00
{s: `REVOKE BOGUS`, err: `found BOGUS, expected READ, WRITE, ALL [PRIVILEGES] at line 1, char 8`},
{s: `REVOKE READ`, err: `found EOF, expected ON at line 1, char 13`},
{s: `REVOKE READ TO`, err: `found TO, expected ON at line 1, char 13`},
2015-01-19 23:01:24 +00:00
{s: `REVOKE READ ON`, err: `found EOF, expected identifier at line 1, char 16`},
{s: `REVOKE READ ON FROM`, err: `found FROM, expected identifier at line 1, char 16`},
2015-01-03 07:06:18 +00:00
{s: `REVOKE READ ON testdb`, err: `found EOF, expected FROM at line 1, char 23`},
2015-01-19 23:01:24 +00:00
{s: `REVOKE READ ON testdb FROM`, err: `found EOF, expected identifier at line 1, char 28`},
{s: `REVOKE READ FROM`, err: `found FROM, expected ON at line 1, char 13`},
{s: `REVOKE WRITE`, err: `found EOF, expected ON at line 1, char 14`},
{s: `REVOKE WRITE TO`, err: `found TO, expected ON at line 1, char 14`},
{s: `REVOKE WRITE ON`, err: `found EOF, expected identifier at line 1, char 17`},
{s: `REVOKE WRITE ON FROM`, err: `found FROM, expected identifier at line 1, char 17`},
{s: `REVOKE WRITE ON testdb`, err: `found EOF, expected FROM at line 1, char 24`},
{s: `REVOKE WRITE ON testdb FROM`, err: `found EOF, expected identifier at line 1, char 29`},
{s: `REVOKE WRITE FROM`, err: `found FROM, expected ON at line 1, char 14`},
{s: `REVOKE ALL`, err: `found EOF, expected ON, FROM at line 1, char 12`},
{s: `REVOKE ALL PRIVILEGES`, err: `found EOF, expected ON, FROM at line 1, char 23`},
{s: `REVOKE ALL TO`, err: `found TO, expected ON, FROM at line 1, char 12`},
{s: `REVOKE ALL PRIVILEGES TO`, err: `found TO, expected ON, FROM at line 1, char 23`},
{s: `REVOKE ALL ON`, err: `found EOF, expected identifier at line 1, char 15`},
{s: `REVOKE ALL PRIVILEGES ON`, err: `found EOF, expected identifier at line 1, char 26`},
{s: `REVOKE ALL ON FROM`, err: `found FROM, expected identifier at line 1, char 15`},
{s: `REVOKE ALL PRIVILEGES ON FROM`, err: `found FROM, expected identifier at line 1, char 26`},
{s: `REVOKE ALL ON testdb`, err: `found EOF, expected FROM at line 1, char 22`},
{s: `REVOKE ALL PRIVILEGES ON testdb`, err: `found EOF, expected FROM at line 1, char 33`},
{s: `REVOKE ALL ON testdb TO`, err: `found TO, expected FROM at line 1, char 22`},
{s: `REVOKE ALL PRIVILEGES ON testdb TO`, err: `found TO, expected FROM at line 1, char 33`},
{s: `REVOKE ALL ON testdb FROM`, err: `found EOF, expected identifier at line 1, char 27`},
{s: `REVOKE ALL PRIVILEGES ON testdb FROM`, err: `found EOF, expected identifier at line 1, char 38`},
{s: `REVOKE ALL FROM`, err: `found EOF, expected identifier at line 1, char 17`},
{s: `REVOKE ALL PRIVILEGES FROM`, err: `found EOF, expected identifier at line 1, char 28`},
{s: `CREATE RETENTION`, err: `found EOF, expected POLICY at line 1, char 18`},
{s: `CREATE RETENTION POLICY`, err: `found EOF, expected identifier at line 1, char 25`},
{s: `CREATE RETENTION POLICY policy1`, err: `found EOF, expected ON at line 1, char 33`},
{s: `CREATE RETENTION POLICY policy1 ON`, err: `found EOF, expected identifier at line 1, char 36`},
{s: `CREATE RETENTION POLICY policy1 ON testdb`, err: `found EOF, expected DURATION at line 1, char 43`},
{s: `CREATE RETENTION POLICY policy1 ON testdb DURATION`, err: `found EOF, expected duration at line 1, char 52`},
{s: `CREATE RETENTION POLICY policy1 ON testdb DURATION bad`, err: `found bad, expected duration at line 1, char 52`},
{s: `CREATE RETENTION POLICY policy1 ON testdb DURATION 1h`, err: `found EOF, expected REPLICATION at line 1, char 54`},
{s: `CREATE RETENTION POLICY policy1 ON testdb DURATION 1h REPLICATION`, err: `found EOF, expected number at line 1, char 67`},
{s: `CREATE RETENTION POLICY policy1 ON testdb DURATION 1h REPLICATION 3.14`, err: `number must be an integer at line 1, char 67`},
{s: `CREATE RETENTION POLICY policy1 ON testdb DURATION 1h REPLICATION 0`, err: `invalid value 0: must be 1 <= n <= 2147483647 at line 1, char 67`},
{s: `CREATE RETENTION POLICY policy1 ON testdb DURATION 1h REPLICATION bad`, err: `found bad, expected number at line 1, char 67`},
2015-10-03 02:49:11 +00:00
{s: `CREATE RETENTION POLICY policy1 ON testdb DURATION 1h REPLICATION 1 foo`, err: `found foo, expected DEFAULT at line 1, char 69`},
{s: `ALTER`, err: `found EOF, expected RETENTION at line 1, char 7`},
{s: `ALTER RETENTION`, err: `found EOF, expected POLICY at line 1, char 17`},
{s: `ALTER RETENTION POLICY`, err: `found EOF, expected identifier at line 1, char 24`},
{s: `ALTER RETENTION POLICY policy1`, err: `found EOF, expected ON at line 1, char 32`}, {s: `ALTER RETENTION POLICY policy1 ON`, err: `found EOF, expected identifier at line 1, char 35`},
{s: `ALTER RETENTION POLICY policy1 ON testdb`, err: `found EOF, expected DURATION, RETENTION, DEFAULT at line 1, char 42`},
{s: `SET`, err: `found EOF, expected PASSWORD at line 1, char 5`},
{s: `SET PASSWORD`, err: `found EOF, expected FOR at line 1, char 14`},
{s: `SET PASSWORD something`, err: `found something, expected FOR at line 1, char 14`},
{s: `SET PASSWORD FOR`, err: `found EOF, expected identifier at line 1, char 18`},
{s: `SET PASSWORD FOR dejan`, err: `found EOF, expected = at line 1, char 24`},
{s: `SET PASSWORD FOR dejan =`, err: `found EOF, expected string at line 1, char 25`},
{s: `SET PASSWORD FOR dejan = bla`, err: `found bla, expected string at line 1, char 26`},
2014-11-22 04:12:48 +00:00
}
for i, tt := range tests {
if tt.skip {
t.Logf("skipping test of '%s'", tt.s)
continue
}
2014-11-22 04:12:48 +00:00
stmt, err := influxql.NewParser(strings.NewReader(tt.s)).ParseStatement()
2015-03-20 19:40:51 +00:00
// We are memoizing a field so for testing we need to...
2015-03-20 17:22:50 +00:00
if s, ok := tt.stmt.(*influxql.SelectStatement); ok {
s.GroupByInterval()
} else if st, ok := stmt.(*influxql.CreateContinuousQueryStatement); ok { // if it's a CQ, there is a non-exported field that gets memoized during parsing that needs to be set
if st != nil && st.Source != nil {
tt.stmt.(*influxql.CreateContinuousQueryStatement).Source.GroupByInterval()
}
}
if !reflect.DeepEqual(tt.err, errstring(err)) {
t.Errorf("%d. %q: error mismatch:\n exp=%s\n got=%s\n\n", i, tt.s, tt.err, err)
2014-11-22 04:12:48 +00:00
} else if tt.err == "" && !reflect.DeepEqual(tt.stmt, stmt) {
2015-08-27 17:52:22 +00:00
t.Logf("\n# %s\nexp=%s\ngot=%s\n", tt.s, mustMarshalJSON(tt.stmt), mustMarshalJSON(stmt))
t.Logf("\nSQL exp=%s\nSQL got=%s\n", tt.stmt.String(), stmt.String())
2014-11-22 04:12:48 +00:00
t.Errorf("%d. %q\n\nstmt mismatch:\n\nexp=%#v\n\ngot=%#v\n\n", i, tt.s, tt.stmt, stmt)
}
}
}
// Ensure the parser can parse expressions into an AST.
func TestParser_ParseExpr(t *testing.T) {
var tests = []struct {
s string
expr influxql.Expr
err string
}{
// Primitives
2014-11-22 04:12:48 +00:00
{s: `100`, expr: &influxql.NumberLiteral{Val: 100}},
{s: `'foo bar'`, expr: &influxql.StringLiteral{Val: "foo bar"}},
2014-11-22 04:12:48 +00:00
{s: `true`, expr: &influxql.BooleanLiteral{Val: true}},
{s: `false`, expr: &influxql.BooleanLiteral{Val: false}},
{s: `my_ident`, expr: &influxql.VarRef{Val: "my_ident"}},
{s: `'2000-01-01 00:00:00'`, expr: &influxql.TimeLiteral{Val: mustParseTime("2000-01-01T00:00:00Z")}},
2015-02-05 22:51:36 +00:00
{s: `'2000-01-01 00:00:00.232'`, expr: &influxql.TimeLiteral{Val: mustParseTime("2000-01-01T00:00:00.232Z")}},
{s: `'2000-01-32 00:00:00'`, err: `unable to parse datetime at line 1, char 1`},
{s: `'2000-01-01'`, expr: &influxql.TimeLiteral{Val: mustParseTime("2000-01-01T00:00:00Z")}},
{s: `'2000-01-99'`, err: `unable to parse date at line 1, char 1`},
2014-11-22 04:12:48 +00:00
// Simple binary expression
2014-11-22 04:12:48 +00:00
{
s: `1 + 2`,
expr: &influxql.BinaryExpr{
Op: influxql.ADD,
LHS: &influxql.NumberLiteral{Val: 1},
RHS: &influxql.NumberLiteral{Val: 2},
},
},
// Binary expression with LHS precedence
2014-11-22 04:12:48 +00:00
{
s: `1 * 2 + 3`,
expr: &influxql.BinaryExpr{
Op: influxql.ADD,
LHS: &influxql.BinaryExpr{
Op: influxql.MUL,
LHS: &influxql.NumberLiteral{Val: 1},
RHS: &influxql.NumberLiteral{Val: 2},
},
RHS: &influxql.NumberLiteral{Val: 3},
},
},
// Binary expression with RHS precedence
2014-11-22 04:12:48 +00:00
{
s: `1 + 2 * 3`,
expr: &influxql.BinaryExpr{
Op: influxql.ADD,
LHS: &influxql.NumberLiteral{Val: 1},
RHS: &influxql.BinaryExpr{
Op: influxql.MUL,
LHS: &influxql.NumberLiteral{Val: 2},
RHS: &influxql.NumberLiteral{Val: 3},
},
},
},
// Binary expression with LHS paren group.
2014-11-25 06:12:32 +00:00
{
s: `(1 + 2) * 3`,
expr: &influxql.BinaryExpr{
Op: influxql.MUL,
LHS: &influxql.ParenExpr{
Expr: &influxql.BinaryExpr{
Op: influxql.ADD,
LHS: &influxql.NumberLiteral{Val: 1},
RHS: &influxql.NumberLiteral{Val: 2},
},
},
RHS: &influxql.NumberLiteral{Val: 3},
},
},
// Binary expression with no precedence, tests left associativity.
{
s: `1 * 2 * 3`,
expr: &influxql.BinaryExpr{
Op: influxql.MUL,
LHS: &influxql.BinaryExpr{
Op: influxql.MUL,
LHS: &influxql.NumberLiteral{Val: 1},
RHS: &influxql.NumberLiteral{Val: 2},
},
RHS: &influxql.NumberLiteral{Val: 3},
},
},
// Binary expression with regex.
{
s: `region =~ /us.*/`,
expr: &influxql.BinaryExpr{
Op: influxql.EQREGEX,
LHS: &influxql.VarRef{Val: "region"},
RHS: &influxql.RegexLiteral{Val: regexp.MustCompile(`us.*`)},
},
},
// Binary expression with quoted '/' regex.
{
2015-10-08 09:40:12 +00:00
s: `url =~ /http\:\/\/www\.example\.com/`,
expr: &influxql.BinaryExpr{
Op: influxql.EQREGEX,
LHS: &influxql.VarRef{Val: "url"},
2015-10-08 09:40:12 +00:00
RHS: &influxql.RegexLiteral{Val: regexp.MustCompile(`http\://www\.example\.com`)},
},
},
// Complex binary expression.
2014-11-22 04:12:48 +00:00
{
s: `value + 3 < 30 AND 1 + 2 OR true`,
expr: &influxql.BinaryExpr{
Op: influxql.OR,
LHS: &influxql.BinaryExpr{
Op: influxql.AND,
LHS: &influxql.BinaryExpr{
Op: influxql.LT,
LHS: &influxql.BinaryExpr{
Op: influxql.ADD,
LHS: &influxql.VarRef{Val: "value"},
RHS: &influxql.NumberLiteral{Val: 3},
},
RHS: &influxql.NumberLiteral{Val: 30},
},
RHS: &influxql.BinaryExpr{
Op: influxql.ADD,
LHS: &influxql.NumberLiteral{Val: 1},
RHS: &influxql.NumberLiteral{Val: 2},
},
},
RHS: &influxql.BooleanLiteral{Val: true},
},
},
2014-11-25 03:43:23 +00:00
// Complex binary expression.
{
s: `time > now() - 1d AND time < now() + 1d`,
expr: &influxql.BinaryExpr{
Op: influxql.AND,
LHS: &influxql.BinaryExpr{
Op: influxql.GT,
LHS: &influxql.VarRef{Val: "time"},
RHS: &influxql.BinaryExpr{
Op: influxql.SUB,
LHS: &influxql.Call{Name: "now"},
RHS: &influxql.DurationLiteral{Val: mustParseDuration("1d")},
},
},
RHS: &influxql.BinaryExpr{
Op: influxql.LT,
LHS: &influxql.VarRef{Val: "time"},
RHS: &influxql.BinaryExpr{
Op: influxql.ADD,
LHS: &influxql.Call{Name: "now"},
RHS: &influxql.DurationLiteral{Val: mustParseDuration("1d")},
},
},
},
},
// Function call (empty)
2014-11-25 03:43:23 +00:00
{
s: `my_func()`,
expr: &influxql.Call{
Name: "my_func",
},
},
// Function call (multi-arg)
2014-11-25 03:43:23 +00:00
{
s: `my_func(1, 2 + 3)`,
expr: &influxql.Call{
Name: "my_func",
Args: []influxql.Expr{
&influxql.NumberLiteral{Val: 1},
&influxql.BinaryExpr{
Op: influxql.ADD,
LHS: &influxql.NumberLiteral{Val: 2},
RHS: &influxql.NumberLiteral{Val: 3},
},
},
},
},
2014-11-22 04:12:48 +00:00
}
for i, tt := range tests {
expr, err := influxql.NewParser(strings.NewReader(tt.s)).ParseExpr()
if !reflect.DeepEqual(tt.err, errstring(err)) {
t.Errorf("%d. %q: error mismatch:\n exp=%s\n got=%s\n\n", i, tt.s, tt.err, err)
} else if tt.err == "" && !reflect.DeepEqual(tt.expr, expr) {
t.Errorf("%d. %q\n\nexpr mismatch:\n\nexp=%#v\n\ngot=%#v\n\n", i, tt.s, tt.expr, expr)
}
}
}
// Ensure a time duration can be parsed.
func TestParseDuration(t *testing.T) {
var tests = []struct {
s string
d time.Duration
err string
}{
{s: `10u`, d: 10 * time.Microsecond},
{s: `10µ`, d: 10 * time.Microsecond},
2014-11-22 04:12:48 +00:00
{s: `15ms`, d: 15 * time.Millisecond},
{s: `100s`, d: 100 * time.Second},
{s: `2m`, d: 2 * time.Minute},
{s: `2h`, d: 2 * time.Hour},
{s: `2d`, d: 2 * 24 * time.Hour},
{s: `2w`, d: 2 * 7 * 24 * time.Hour},
{s: ``, err: "invalid duration"},
{s: `3`, err: "invalid duration"},
{s: `1000`, err: "invalid duration"},
2014-11-22 04:12:48 +00:00
{s: `w`, err: "invalid duration"},
{s: `ms`, err: "invalid duration"},
2014-11-22 04:12:48 +00:00
{s: `1.2w`, err: "invalid duration"},
{s: `10x`, err: "invalid duration"},
}
for i, tt := range tests {
d, err := influxql.ParseDuration(tt.s)
if !reflect.DeepEqual(tt.err, errstring(err)) {
t.Errorf("%d. %q: error mismatch:\n exp=%s\n got=%s\n\n", i, tt.s, tt.err, err)
} else if tt.d != d {
t.Errorf("%d. %q\n\nduration mismatch:\n\nexp=%#v\n\ngot=%#v\n\n", i, tt.s, tt.d, d)
}
}
}
// Ensure a time duration can be formatted.
func TestFormatDuration(t *testing.T) {
var tests = []struct {
d time.Duration
s string
}{
{d: 3 * time.Microsecond, s: `3u`},
{d: 1001 * time.Microsecond, s: `1001u`},
{d: 15 * time.Millisecond, s: `15ms`},
{d: 100 * time.Second, s: `100s`},
{d: 2 * time.Minute, s: `2m`},
{d: 2 * time.Hour, s: `2h`},
{d: 2 * 24 * time.Hour, s: `2d`},
{d: 2 * 7 * 24 * time.Hour, s: `2w`},
}
for i, tt := range tests {
s := influxql.FormatDuration(tt.d)
if tt.s != s {
t.Errorf("%d. %v: mismatch: %s != %s", i, tt.d, tt.s, s)
}
}
}
// Ensure a string can be quoted.
func TestQuote(t *testing.T) {
for i, tt := range []struct {
in string
out string
}{
{``, `''`},
{`foo`, `'foo'`},
{"foo\nbar", `'foo\nbar'`},
{`foo bar\\`, `'foo bar\\\\'`},
{`'foo'`, `'\'foo\''`},
} {
if out := influxql.QuoteString(tt.in); tt.out != out {
t.Errorf("%d. %s: mismatch: %s != %s", i, tt.in, tt.out, out)
}
}
}
// Ensure an identifier's segments can be quoted.
func TestQuoteIdent(t *testing.T) {
for i, tt := range []struct {
ident []string
s string
}{
{[]string{``}, ``},
{[]string{`select`}, `"select"`},
{[]string{`in-bytes`}, `"in-bytes"`},
{[]string{`foo`, `bar`}, `"foo".bar`},
{[]string{`foo`, ``, `bar`}, `"foo"..bar`},
{[]string{`foo bar`, `baz`}, `"foo bar".baz`},
{[]string{`foo.bar`, `baz`}, `"foo.bar".baz`},
{[]string{`foo.bar`, `rp`, `baz`}, `"foo.bar"."rp".baz`},
{[]string{`foo.bar`, `rp`, `1baz`}, `"foo.bar"."rp"."1baz"`},
} {
if s := influxql.QuoteIdent(tt.ident...); tt.s != s {
t.Errorf("%d. %s: mismatch: %s != %s", i, tt.ident, tt.s, s)
}
}
}
2015-02-17 23:29:25 +00:00
// Ensure DropSeriesStatement can convert to a string
func TestDropSeriesStatement_String(t *testing.T) {
var tests = []struct {
s string
stmt influxql.Statement
}{
{
s: `DROP SERIES FROM src`,
stmt: &influxql.DropSeriesStatement{Sources: []influxql.Source{&influxql.Measurement{Name: "src"}}},
2015-02-17 23:29:25 +00:00
},
{
s: `DROP SERIES FROM src WHERE host = 'hosta.influxdb.org'`,
stmt: &influxql.DropSeriesStatement{
Sources: []influxql.Source{&influxql.Measurement{Name: "src"}},
2015-02-17 23:29:25 +00:00
Condition: &influxql.BinaryExpr{
Op: influxql.EQ,
LHS: &influxql.VarRef{Val: "host"},
RHS: &influxql.StringLiteral{Val: "hosta.influxdb.org"},
},
},
},
{
s: `DROP SERIES FROM src WHERE host = 'hosta.influxdb.org'`,
stmt: &influxql.DropSeriesStatement{
Sources: []influxql.Source{&influxql.Measurement{Name: "src"}},
Condition: &influxql.BinaryExpr{
Op: influxql.EQ,
LHS: &influxql.VarRef{Val: "host"},
RHS: &influxql.StringLiteral{Val: "hosta.influxdb.org"},
},
},
},
{
s: `DROP SERIES WHERE host = 'hosta.influxdb.org'`,
stmt: &influxql.DropSeriesStatement{
Condition: &influxql.BinaryExpr{
Op: influxql.EQ,
LHS: &influxql.VarRef{Val: "host"},
RHS: &influxql.StringLiteral{Val: "hosta.influxdb.org"},
},
},
},
2015-02-17 23:29:25 +00:00
}
for _, test := range tests {
s := test.stmt.String()
if s != test.s {
t.Errorf("error rendering string. expected %s, actual: %s", test.s, s)
}
}
}
func BenchmarkParserParseStatement(b *testing.B) {
b.ReportAllocs()
s := `SELECT field FROM "series" WHERE value > 10`
for i := 0; i < b.N; i++ {
if stmt, err := influxql.NewParser(strings.NewReader(s)).ParseStatement(); err != nil {
b.Fatalf("unexpected error: %s", err)
} else if stmt == nil {
2015-01-11 23:14:22 +00:00
b.Fatalf("expected statement: %s", stmt)
}
}
b.SetBytes(int64(len(s)))
}
2014-12-08 05:08:39 +00:00
// MustParseSelectStatement parses a select statement. Panic on error.
func MustParseSelectStatement(s string) *influxql.SelectStatement {
stmt, err := influxql.NewParser(strings.NewReader(s)).ParseStatement()
panicIfErr(err)
2014-12-08 05:08:39 +00:00
return stmt.(*influxql.SelectStatement)
}
2014-12-11 16:58:33 +00:00
// MustParseExpr parses an expression. Panic on error.
func MustParseExpr(s string) influxql.Expr {
expr, err := influxql.NewParser(strings.NewReader(s)).ParseExpr()
panicIfErr(err)
2014-12-11 16:58:33 +00:00
return expr
}
2014-11-22 04:12:48 +00:00
// errstring converts an error to its string representation.
func errstring(err error) string {
if err != nil {
return err.Error()
}
return ""
}
// newAlterRetentionPolicyStatement creates an initialized AlterRetentionPolicyStatement.
func newAlterRetentionPolicyStatement(name string, DB string, d time.Duration, replication int, dfault bool) *influxql.AlterRetentionPolicyStatement {
stmt := &influxql.AlterRetentionPolicyStatement{
Name: name,
Database: DB,
Default: dfault,
}
if d > -1 {
stmt.Duration = &d
}
if replication > -1 {
stmt.Replication = &replication
}
return stmt
}
// mustMarshalJSON encodes a value to JSON.
func mustMarshalJSON(v interface{}) []byte {
b, err := json.Marshal(v)
panicIfErr(err)
return b
}
func mustParseDuration(s string) time.Duration {
d, err := influxql.ParseDuration(s)
panicIfErr(err)
return d
}
func panicIfErr(err error) {
if err != nil {
panic(err)
}
}