Merge pull request #4063 from influxdb/issue-3978
Allow wildcards with fields! Fixes #3978pull/4058/head
commit
2d667298f3
|
@ -52,6 +52,7 @@ With this release InfluxDB is moving to Go 1.5.
|
|||
- [#3926](https://github.com/influxdb/influxdb/issues/3926): First or last value of `GROUP BY time(x)` is often null. Fixed by [#4038](https://github.com/influxdb/influxdb/pull/4038)
|
||||
- [#4053](https://github.com/influxdb/influxdb/pull/4053): Prohibit dropping default retention policy.
|
||||
- [#4060](https://github.com/influxdb/influxdb/pull/4060): Don't log EOF error in openTSDB input.
|
||||
- [#3978](https://github.com/influxdb/influxdb/issues/3978): [0.9.3] (regression) cannot use GROUP BY * with more than a single field in SELECT clause
|
||||
|
||||
## v0.9.3 [2015-08-26]
|
||||
|
||||
|
|
|
@ -2718,6 +2718,9 @@ func TestServer_Query_Wildcards(t *testing.T) {
|
|||
fmt.Sprintf(`wgroup,region=us-east value=10.0 %d`, mustParseTime(time.RFC3339Nano, "2000-01-01T00:00:00Z").UnixNano()),
|
||||
fmt.Sprintf(`wgroup,region=us-east value=20.0 %d`, mustParseTime(time.RFC3339Nano, "2000-01-01T00:00:10Z").UnixNano()),
|
||||
fmt.Sprintf(`wgroup,region=us-west value=30.0 %d`, mustParseTime(time.RFC3339Nano, "2000-01-01T00:00:20Z").UnixNano()),
|
||||
|
||||
fmt.Sprintf(`m1,region=us-east value=10.0 %d`, mustParseTime(time.RFC3339Nano, "2000-01-01T00:00:00Z").UnixNano()),
|
||||
fmt.Sprintf(`m2,host=server01 field=20.0 %d`, mustParseTime(time.RFC3339Nano, "2000-01-01T00:00:01Z").UnixNano()),
|
||||
}
|
||||
|
||||
test := NewTest("db0", "rp0")
|
||||
|
@ -2748,6 +2751,48 @@ func TestServer_Query_Wildcards(t *testing.T) {
|
|||
command: `SELECT mean(value) FROM wgroup WHERE time >= '2000-01-01T00:00:00Z' AND time < '2000-01-01T00:01:00Z' GROUP BY *,TIME(1m)`,
|
||||
exp: `{"results":[{"series":[{"name":"wgroup","tags":{"region":"us-east"},"columns":["time","mean"],"values":[["2000-01-01T00:00:00Z",15]]},{"name":"wgroup","tags":{"region":"us-west"},"columns":["time","mean"],"values":[["2000-01-01T00:00:00Z",30]]}]}]}`,
|
||||
},
|
||||
&Query{
|
||||
name: "wildcard and field in select",
|
||||
params: url.Values{"db": []string{"db0"}},
|
||||
command: `SELECT value, * FROM wildcard`,
|
||||
exp: `{"results":[{"series":[{"name":"wildcard","columns":["time","region","value","valx"],"values":[["2000-01-01T00:00:00Z","us-east",10,null],["2000-01-01T00:00:10Z","us-east",null,20],["2000-01-01T00:00:20Z","us-east",30,40]]}]}]}`,
|
||||
},
|
||||
&Query{
|
||||
name: "field and wildcard in select",
|
||||
params: url.Values{"db": []string{"db0"}},
|
||||
command: `SELECT value, * FROM wildcard`,
|
||||
exp: `{"results":[{"series":[{"name":"wildcard","columns":["time","region","value","valx"],"values":[["2000-01-01T00:00:00Z","us-east",10,null],["2000-01-01T00:00:10Z","us-east",null,20],["2000-01-01T00:00:20Z","us-east",30,40]]}]}]}`,
|
||||
},
|
||||
&Query{
|
||||
name: "field and wildcard in group by",
|
||||
params: url.Values{"db": []string{"db0"}},
|
||||
command: `SELECT * FROM wildcard GROUP BY region, *`,
|
||||
exp: `{"results":[{"series":[{"name":"wildcard","tags":{"region":"us-east"},"columns":["time","value","valx"],"values":[["2000-01-01T00:00:00Z",10,null],["2000-01-01T00:00:10Z",null,20],["2000-01-01T00:00:20Z",30,40]]}]}]}`,
|
||||
},
|
||||
&Query{
|
||||
name: "wildcard and field in group by",
|
||||
params: url.Values{"db": []string{"db0"}},
|
||||
command: `SELECT * FROM wildcard GROUP BY *, region`,
|
||||
exp: `{"results":[{"series":[{"name":"wildcard","tags":{"region":"us-east"},"columns":["time","value","valx"],"values":[["2000-01-01T00:00:00Z",10,null],["2000-01-01T00:00:10Z",null,20],["2000-01-01T00:00:20Z",30,40]]}]}]}`,
|
||||
},
|
||||
&Query{
|
||||
name: "wildcard with multiple measurements",
|
||||
params: url.Values{"db": []string{"db0"}},
|
||||
command: `SELECT * FROM m1, m2`,
|
||||
exp: `{"results":[{"series":[{"name":"m1","columns":["time","field","host","region","value"],"values":[["2000-01-01T00:00:00Z",null,null,"us-east",10]]},{"name":"m2","columns":["time","field","host","region","value"],"values":[["2000-01-01T00:00:01Z",20,"server01",null,null]]}]}]}`,
|
||||
},
|
||||
&Query{
|
||||
name: "wildcard with multiple measurements via regex",
|
||||
params: url.Values{"db": []string{"db0"}},
|
||||
command: `SELECT * FROM /^m.*/`,
|
||||
exp: `{"results":[{"series":[{"name":"m1","columns":["time","field","host","region","value"],"values":[["2000-01-01T00:00:00Z",null,null,"us-east",10]]},{"name":"m2","columns":["time","field","host","region","value"],"values":[["2000-01-01T00:00:01Z",20,"server01",null,null]]}]}]}`,
|
||||
},
|
||||
&Query{
|
||||
name: "wildcard with multiple measurements via regex and limit",
|
||||
params: url.Values{"db": []string{"db0"}},
|
||||
command: `SELECT * FROM db0../^m.*/ LIMIT 2`,
|
||||
exp: `{"results":[{"series":[{"name":"m1","columns":["time","field","host","region","value"],"values":[["2000-01-01T00:00:00Z",null,null,"us-east",10]]},{"name":"m2","columns":["time","field","host","region","value"],"values":[["2000-01-01T00:00:01Z",20,"server01",null,null]]}]}]}`,
|
||||
},
|
||||
}...)
|
||||
|
||||
for i, query := range test.queries {
|
||||
|
|
|
@ -1058,10 +1058,6 @@ func (s *SelectStatement) validate(tr targetRequirement) error {
|
|||
return err
|
||||
}
|
||||
|
||||
if err := s.validateWildcard(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -1181,13 +1177,6 @@ func (s *SelectStatement) validateAggregates(tr targetRequirement) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (s *SelectStatement) validateWildcard() error {
|
||||
if s.HasWildcard() && len(s.Fields) > 1 {
|
||||
return fmt.Errorf("wildcards can not be combined with other fields")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *SelectStatement) HasDistinct() bool {
|
||||
// determine if we have a call named distinct
|
||||
for _, f := range s.Fields {
|
||||
|
|
|
@ -1473,13 +1473,6 @@ func (p *Parser) parseDropContinuousQueryStatement() (*DropContinuousQueryStatem
|
|||
func (p *Parser) parseFields() (Fields, error) {
|
||||
var fields Fields
|
||||
|
||||
// Check for "*" (i.e., "all fields")
|
||||
if tok, _, _ := p.scanIgnoreWhitespace(); tok == MUL {
|
||||
fields = append(fields, &Field{&Wildcard{}, ""})
|
||||
return fields, nil
|
||||
}
|
||||
p.unscan()
|
||||
|
||||
for {
|
||||
// Parse the field.
|
||||
f, err := p.parseField()
|
||||
|
|
|
@ -73,6 +73,41 @@ func TestParser_ParseStatement(t *testing.T) {
|
|||
Sources: []influxql.Source{&influxql.Measurement{Name: "myseries"}},
|
||||
},
|
||||
},
|
||||
{
|
||||
s: `SELECT * FROM myseries GROUP BY *`,
|
||||
stmt: &influxql.SelectStatement{
|
||||
IsRawQuery: true,
|
||||
Fields: []*influxql.Field{
|
||||
{Expr: &influxql.Wildcard{}},
|
||||
},
|
||||
Sources: []influxql.Source{&influxql.Measurement{Name: "myseries"}},
|
||||
Dimensions: []*influxql.Dimension{{Expr: &influxql.Wildcard{}}},
|
||||
},
|
||||
},
|
||||
{
|
||||
s: `SELECT field1, * FROM myseries GROUP BY *`,
|
||||
stmt: &influxql.SelectStatement{
|
||||
IsRawQuery: true,
|
||||
Fields: []*influxql.Field{
|
||||
{Expr: &influxql.VarRef{Val: "field1"}},
|
||||
{Expr: &influxql.Wildcard{}},
|
||||
},
|
||||
Sources: []influxql.Source{&influxql.Measurement{Name: "myseries"}},
|
||||
Dimensions: []*influxql.Dimension{{Expr: &influxql.Wildcard{}}},
|
||||
},
|
||||
},
|
||||
{
|
||||
s: `SELECT *, field1 FROM myseries GROUP BY *`,
|
||||
stmt: &influxql.SelectStatement{
|
||||
IsRawQuery: true,
|
||||
Fields: []*influxql.Field{
|
||||
{Expr: &influxql.Wildcard{}},
|
||||
{Expr: &influxql.VarRef{Val: "field1"}},
|
||||
},
|
||||
Sources: []influxql.Source{&influxql.Measurement{Name: "myseries"}},
|
||||
Dimensions: []*influxql.Dimension{{Expr: &influxql.Wildcard{}}},
|
||||
},
|
||||
},
|
||||
|
||||
// SELECT statement
|
||||
{
|
||||
|
@ -1362,9 +1397,6 @@ func TestParser_ParseStatement(t *testing.T) {
|
|||
{s: `SELECT value > 2 FROM cpu`, err: `invalid operator > in SELECT clause at line 1, char 8; operator is intended for WHERE clause`},
|
||||
{s: `SELECT value = 2 FROM cpu`, err: `invalid operator = in SELECT clause at line 1, char 8; operator is intended for WHERE clause`},
|
||||
{s: `SELECT s =~ /foo/ FROM cpu`, err: `invalid operator =~ in SELECT clause at line 1, char 8; operator is intended for WHERE clause`},
|
||||
{s: `SELECT foo, * from cpu`, err: `wildcards can not be combined with other fields`},
|
||||
{s: `SELECT *, * from cpu`, err: `found ,, expected FROM at line 1, char 9`},
|
||||
{s: `SELECT *, foo from cpu`, err: `found ,, expected FROM at line 1, char 9`},
|
||||
{s: `DELETE`, err: `found EOF, expected FROM at line 1, char 8`},
|
||||
{s: `DELETE FROM`, err: `found EOF, expected identifier at line 1, char 13`},
|
||||
{s: `DELETE FROM myseries WHERE`, err: `found EOF, expected identifier, string, number, bool at line 1, char 28`},
|
||||
|
|
Loading…
Reference in New Issue