add dedupe iterator

pull/5196/head
Ben Johnson 2016-02-05 10:23:35 -07:00
parent 21d2a4c3de
commit 627cd9d486
15 changed files with 692 additions and 11 deletions

View File

@ -4566,6 +4566,12 @@ func TestServer_Query_DropAndRecreateMeasurement(t *testing.T) {
}
test.addQueries([]*Query{
&Query{
name: "verify cpu measurement exists in db1",
command: `SELECT * FROM cpu`,
exp: `{"results":[{"series":[{"name":"cpu","columns":["time","host","region","val"],"values":[["2000-01-01T00:00:00Z","serverA","uswest",23.2]]}]}]}`,
params: url.Values{"db": []string{"db1"}},
},
&Query{
name: "Drop Measurement, series tags preserved tests",
command: `SHOW MEASUREMENTS`,

View File

@ -800,6 +800,9 @@ type SelectStatement struct {
// Removes the "time" column from the output.
OmitTime bool
// Removes duplicate rows from raw queries.
Dedupe bool
}
// HasDerivative returns true if one of the function calls in the statement is a

View File

@ -2,3 +2,5 @@ package influxql
//go:generate tmpl -data=@tmpldata iterator.gen.go.tmpl
//go:generate tmpl -data=@tmpldata point.gen.go.tmpl
//go:generate protoc --gogo_out=. internal/internal.proto

View File

@ -0,0 +1,153 @@
// Code generated by protoc-gen-gogo.
// source: internal/internal.proto
// DO NOT EDIT!
/*
Package internal is a generated protocol buffer package.
It is generated from these files:
internal/internal.proto
It has these top-level messages:
Point
Aux
*/
package internal
import proto "github.com/gogo/protobuf/proto"
import math "math"
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = math.Inf
type Point struct {
Name *string `protobuf:"bytes,1,req" json:"Name,omitempty"`
Tags *string `protobuf:"bytes,2,req" json:"Tags,omitempty"`
Time *int64 `protobuf:"varint,3,req" json:"Time,omitempty"`
Nil *bool `protobuf:"varint,4,req" json:"Nil,omitempty"`
Aux []*Aux `protobuf:"bytes,5,rep" json:"Aux,omitempty"`
FloatValue *float64 `protobuf:"fixed64,6,opt" json:"FloatValue,omitempty"`
IntegerValue *int64 `protobuf:"varint,7,opt" json:"IntegerValue,omitempty"`
StringValue *string `protobuf:"bytes,8,opt" json:"StringValue,omitempty"`
BooleanValue *bool `protobuf:"varint,9,opt" json:"BooleanValue,omitempty"`
XXX_unrecognized []byte `json:"-"`
}
func (m *Point) Reset() { *m = Point{} }
func (m *Point) String() string { return proto.CompactTextString(m) }
func (*Point) ProtoMessage() {}
func (m *Point) GetName() string {
if m != nil && m.Name != nil {
return *m.Name
}
return ""
}
func (m *Point) GetTags() string {
if m != nil && m.Tags != nil {
return *m.Tags
}
return ""
}
func (m *Point) GetTime() int64 {
if m != nil && m.Time != nil {
return *m.Time
}
return 0
}
func (m *Point) GetNil() bool {
if m != nil && m.Nil != nil {
return *m.Nil
}
return false
}
func (m *Point) GetAux() []*Aux {
if m != nil {
return m.Aux
}
return nil
}
func (m *Point) GetFloatValue() float64 {
if m != nil && m.FloatValue != nil {
return *m.FloatValue
}
return 0
}
func (m *Point) GetIntegerValue() int64 {
if m != nil && m.IntegerValue != nil {
return *m.IntegerValue
}
return 0
}
func (m *Point) GetStringValue() string {
if m != nil && m.StringValue != nil {
return *m.StringValue
}
return ""
}
func (m *Point) GetBooleanValue() bool {
if m != nil && m.BooleanValue != nil {
return *m.BooleanValue
}
return false
}
type Aux struct {
DataType *int32 `protobuf:"varint,1,req" json:"DataType,omitempty"`
FloatValue *float64 `protobuf:"fixed64,2,opt" json:"FloatValue,omitempty"`
IntegerValue *int64 `protobuf:"varint,3,opt" json:"IntegerValue,omitempty"`
StringValue *string `protobuf:"bytes,4,opt" json:"StringValue,omitempty"`
BooleanValue *bool `protobuf:"varint,5,opt" json:"BooleanValue,omitempty"`
XXX_unrecognized []byte `json:"-"`
}
func (m *Aux) Reset() { *m = Aux{} }
func (m *Aux) String() string { return proto.CompactTextString(m) }
func (*Aux) ProtoMessage() {}
func (m *Aux) GetDataType() int32 {
if m != nil && m.DataType != nil {
return *m.DataType
}
return 0
}
func (m *Aux) GetFloatValue() float64 {
if m != nil && m.FloatValue != nil {
return *m.FloatValue
}
return 0
}
func (m *Aux) GetIntegerValue() int64 {
if m != nil && m.IntegerValue != nil {
return *m.IntegerValue
}
return 0
}
func (m *Aux) GetStringValue() string {
if m != nil && m.StringValue != nil {
return *m.StringValue
}
return ""
}
func (m *Aux) GetBooleanValue() bool {
if m != nil && m.BooleanValue != nil {
return *m.BooleanValue
}
return false
}
func init() {
}

View File

@ -0,0 +1,22 @@
package internal;
message Point {
required string Name = 1;
required string Tags = 2;
required int64 Time = 3;
required bool Nil = 4;
repeated Aux Aux = 5;
optional double FloatValue = 6;
optional int64 IntegerValue = 7;
optional string StringValue = 8;
optional bool BooleanValue = 9;
}
message Aux {
required int32 DataType = 1;
optional double FloatValue = 2;
optional int64 IntegerValue = 3;
optional string StringValue = 4;
optional bool BooleanValue = 5;
}

View File

@ -6,8 +6,11 @@ package influxql
import (
"container/heap"
"fmt"
"log"
"sort"
"sync"
"github.com/gogo/protobuf/proto"
)
// FloatIterator represents a stream of float points.
@ -721,6 +724,53 @@ func (itr *floatBoolTransformIterator) Next() *BooleanPoint {
// new point if possible.
type floatBoolTransformFunc func(p *FloatPoint) *BooleanPoint
// floatDedupeIterator only outputs unique points.
// This differs from the DistinctIterator in that it compares all aux fields too.
// This iterator is relatively inefficient and should only be used on small
// datasets such as meta query results.
type floatDedupeIterator struct {
input FloatIterator
m map[string]struct{} // lookup of points already sent
}
// newFloatDedupeIterator returns a new instance of floatDedupeIterator.
func newFloatDedupeIterator(input FloatIterator) *floatDedupeIterator {
return &floatDedupeIterator{
input: input,
m: make(map[string]struct{}),
}
}
// Close closes the iterator and all child iterators.
func (itr *floatDedupeIterator) Close() error { return itr.input.Close() }
// Next returns the next unique point from the input iterator.
func (itr *floatDedupeIterator) Next() *FloatPoint {
for {
// Read next point.
p := itr.input.Next()
if p == nil {
return nil
}
// Serialize to bytes to store in lookup.
buf, err := proto.Marshal(encodeFloatPoint(p))
if err != nil {
log.Println("error marshaling dedupe point:", err)
continue
}
// If the point has already been output then move to the next point.
if _, ok := itr.m[string(buf)]; ok {
continue
}
// Otherwise mark it as emitted and return point.
itr.m[string(buf)] = struct{}{}
return p
}
}
// IntegerIterator represents a stream of integer points.
type IntegerIterator interface {
Iterator
@ -1429,6 +1479,53 @@ func (itr *integerBoolTransformIterator) Next() *BooleanPoint {
// new point if possible.
type integerBoolTransformFunc func(p *IntegerPoint) *BooleanPoint
// integerDedupeIterator only outputs unique points.
// This differs from the DistinctIterator in that it compares all aux fields too.
// This iterator is relatively inefficient and should only be used on small
// datasets such as meta query results.
type integerDedupeIterator struct {
input IntegerIterator
m map[string]struct{} // lookup of points already sent
}
// newIntegerDedupeIterator returns a new instance of integerDedupeIterator.
func newIntegerDedupeIterator(input IntegerIterator) *integerDedupeIterator {
return &integerDedupeIterator{
input: input,
m: make(map[string]struct{}),
}
}
// Close closes the iterator and all child iterators.
func (itr *integerDedupeIterator) Close() error { return itr.input.Close() }
// Next returns the next unique point from the input iterator.
func (itr *integerDedupeIterator) Next() *IntegerPoint {
for {
// Read next point.
p := itr.input.Next()
if p == nil {
return nil
}
// Serialize to bytes to store in lookup.
buf, err := proto.Marshal(encodeIntegerPoint(p))
if err != nil {
log.Println("error marshaling dedupe point:", err)
continue
}
// If the point has already been output then move to the next point.
if _, ok := itr.m[string(buf)]; ok {
continue
}
// Otherwise mark it as emitted and return point.
itr.m[string(buf)] = struct{}{}
return p
}
}
// StringIterator represents a stream of string points.
type StringIterator interface {
Iterator
@ -2137,6 +2234,53 @@ func (itr *stringBoolTransformIterator) Next() *BooleanPoint {
// new point if possible.
type stringBoolTransformFunc func(p *StringPoint) *BooleanPoint
// stringDedupeIterator only outputs unique points.
// This differs from the DistinctIterator in that it compares all aux fields too.
// This iterator is relatively inefficient and should only be used on small
// datasets such as meta query results.
type stringDedupeIterator struct {
input StringIterator
m map[string]struct{} // lookup of points already sent
}
// newStringDedupeIterator returns a new instance of stringDedupeIterator.
func newStringDedupeIterator(input StringIterator) *stringDedupeIterator {
return &stringDedupeIterator{
input: input,
m: make(map[string]struct{}),
}
}
// Close closes the iterator and all child iterators.
func (itr *stringDedupeIterator) Close() error { return itr.input.Close() }
// Next returns the next unique point from the input iterator.
func (itr *stringDedupeIterator) Next() *StringPoint {
for {
// Read next point.
p := itr.input.Next()
if p == nil {
return nil
}
// Serialize to bytes to store in lookup.
buf, err := proto.Marshal(encodeStringPoint(p))
if err != nil {
log.Println("error marshaling dedupe point:", err)
continue
}
// If the point has already been output then move to the next point.
if _, ok := itr.m[string(buf)]; ok {
continue
}
// Otherwise mark it as emitted and return point.
itr.m[string(buf)] = struct{}{}
return p
}
}
// BooleanIterator represents a stream of boolean points.
type BooleanIterator interface {
Iterator
@ -2844,3 +2988,50 @@ func (itr *booleanBoolTransformIterator) Next() *BooleanPoint {
// The point passed in may be modified and returned rather than allocating a
// new point if possible.
type booleanBoolTransformFunc func(p *BooleanPoint) *BooleanPoint
// booleanDedupeIterator only outputs unique points.
// This differs from the DistinctIterator in that it compares all aux fields too.
// This iterator is relatively inefficient and should only be used on small
// datasets such as meta query results.
type booleanDedupeIterator struct {
input BooleanIterator
m map[string]struct{} // lookup of points already sent
}
// newBooleanDedupeIterator returns a new instance of booleanDedupeIterator.
func newBooleanDedupeIterator(input BooleanIterator) *booleanDedupeIterator {
return &booleanDedupeIterator{
input: input,
m: make(map[string]struct{}),
}
}
// Close closes the iterator and all child iterators.
func (itr *booleanDedupeIterator) Close() error { return itr.input.Close() }
// Next returns the next unique point from the input iterator.
func (itr *booleanDedupeIterator) Next() *BooleanPoint {
for {
// Read next point.
p := itr.input.Next()
if p == nil {
return nil
}
// Serialize to bytes to store in lookup.
buf, err := proto.Marshal(encodeBooleanPoint(p))
if err != nil {
log.Println("error marshaling dedupe point:", err)
continue
}
// If the point has already been output then move to the next point.
if _, ok := itr.m[string(buf)]; ok {
continue
}
// Otherwise mark it as emitted and return point.
itr.m[string(buf)] = struct{}{}
return p
}
}

View File

@ -5,6 +5,9 @@ import (
"fmt"
"sort"
"sync"
"log"
"github.com/gogo/protobuf/proto"
)
{{range .}}
@ -723,5 +726,51 @@ func (itr *{{.name}}BoolTransformIterator) Next() *BooleanPoint {
// new point if possible.
type {{.name}}BoolTransformFunc func(p *{{.Name}}Point) *BooleanPoint
// {{.name}}DedupeIterator only outputs unique points.
// This differs from the DistinctIterator in that it compares all aux fields too.
// This iterator is relatively inefficient and should only be used on small
// datasets such as meta query results.
type {{.name}}DedupeIterator struct {
input {{.Name}}Iterator
m map[string]struct{} // lookup of points already sent
}
// new{{.Name}}DedupeIterator returns a new instance of {{.name}}DedupeIterator.
func new{{.Name}}DedupeIterator(input {{.Name}}Iterator) *{{.name}}DedupeIterator {
return &{{.name}}DedupeIterator{
input: input,
m: make(map[string]struct{}),
}
}
// Close closes the iterator and all child iterators.
func (itr *{{.name}}DedupeIterator) Close() error { return itr.input.Close() }
// Next returns the next unique point from the input iterator.
func (itr *{{.name}}DedupeIterator) Next() *{{.Name}}Point {
for {
// Read next point.
p := itr.input.Next()
if p == nil {
return nil
}
// Serialize to bytes to store in lookup.
buf, err := proto.Marshal(encode{{.Name}}Point(p))
if err != nil {
log.Println("error marshaling dedupe point:", err)
continue
}
// If the point has already been output then move to the next point.
if _, ok := itr.m[string(buf)]; ok {
continue
}
// Otherwise mark it as emitted and return point.
itr.m[string(buf)] = struct{}{}
return p
}
}
{{end}}

View File

@ -162,6 +162,28 @@ func NewLimitIterator(input Iterator, opt IteratorOptions) Iterator {
}
}
// NewDedupeIterator returns an iterator that only outputs unique points.
// This iterator maintains a serialized copy of each row so it is inefficient
// to use on large datasets. It is intended for small datasets such as meta queries.
func NewDedupeIterator(input Iterator) Iterator {
if input == nil {
return nil
}
switch input := input.(type) {
case FloatIterator:
return newFloatDedupeIterator(input)
case IntegerIterator:
return newIntegerDedupeIterator(input)
case StringIterator:
return newStringDedupeIterator(input)
case BooleanIterator:
return newBooleanDedupeIterator(input)
default:
panic(fmt.Sprintf("unsupported dedupe iterator type: %T", input))
}
}
// AuxIterator represents an iterator that can split off separate auxilary iterators.
type AuxIterator interface {
Iterator
@ -412,6 +434,9 @@ type IteratorOptions struct {
// Limits the number of series.
SLimit, SOffset int
// Removes duplicate rows from raw queries.
Dedupe bool
}
// newIteratorOptionsStmt creates the iterator options from stmt.
@ -450,6 +475,7 @@ func newIteratorOptionsStmt(stmt *SelectStatement) (opt IteratorOptions, err err
opt.Sources = stmt.Sources
opt.Condition = stmt.Condition
opt.Ascending = stmt.TimeAscending()
opt.Dedupe = stmt.Dedupe
opt.Limit, opt.Offset = stmt.Limit, stmt.Offset
opt.SLimit, opt.SOffset = stmt.SLimit, stmt.SOffset

View File

@ -3,6 +3,11 @@
package influxql
import (
"github.com/gogo/protobuf/proto"
"github.com/influxdb/influxdb/influxql/internal"
)
// FloatPoint represents a point with a float64 value.
type FloatPoint struct {
Name string
@ -41,6 +46,29 @@ func (v *FloatPoint) Clone() *FloatPoint {
return &other
}
func encodeFloatPoint(p *FloatPoint) *internal.Point {
return &internal.Point{
Name: proto.String(p.Name),
Tags: proto.String(p.Tags.ID()),
Time: proto.Int64(p.Time),
Nil: proto.Bool(p.Nil),
Aux: encodeAux(p.Aux),
FloatValue: proto.Float64(p.Value),
}
}
func decodeFloatPoint(pb *internal.Point) *FloatPoint {
return &FloatPoint{
Name: pb.GetName(),
Tags: newTagsID(pb.GetTags()),
Time: pb.GetTime(),
Nil: pb.GetNil(),
Aux: decodeAux(pb.Aux),
Value: pb.GetFloatValue(),
}
}
// floatPoints represents a slice of points sortable by value.
type floatPoints []FloatPoint
@ -122,6 +150,29 @@ func (v *IntegerPoint) Clone() *IntegerPoint {
return &other
}
func encodeIntegerPoint(p *IntegerPoint) *internal.Point {
return &internal.Point{
Name: proto.String(p.Name),
Tags: proto.String(p.Tags.ID()),
Time: proto.Int64(p.Time),
Nil: proto.Bool(p.Nil),
Aux: encodeAux(p.Aux),
IntegerValue: proto.Int64(p.Value),
}
}
func decodeIntegerPoint(pb *internal.Point) *IntegerPoint {
return &IntegerPoint{
Name: pb.GetName(),
Tags: newTagsID(pb.GetTags()),
Time: pb.GetTime(),
Nil: pb.GetNil(),
Aux: decodeAux(pb.Aux),
Value: pb.GetIntegerValue(),
}
}
// integerPoints represents a slice of points sortable by value.
type integerPoints []IntegerPoint
@ -203,6 +254,29 @@ func (v *StringPoint) Clone() *StringPoint {
return &other
}
func encodeStringPoint(p *StringPoint) *internal.Point {
return &internal.Point{
Name: proto.String(p.Name),
Tags: proto.String(p.Tags.ID()),
Time: proto.Int64(p.Time),
Nil: proto.Bool(p.Nil),
Aux: encodeAux(p.Aux),
StringValue: proto.String(p.Value),
}
}
func decodeStringPoint(pb *internal.Point) *StringPoint {
return &StringPoint{
Name: pb.GetName(),
Tags: newTagsID(pb.GetTags()),
Time: pb.GetTime(),
Nil: pb.GetNil(),
Aux: decodeAux(pb.Aux),
Value: pb.GetStringValue(),
}
}
// stringPoints represents a slice of points sortable by value.
type stringPoints []StringPoint
@ -284,6 +358,29 @@ func (v *BooleanPoint) Clone() *BooleanPoint {
return &other
}
func encodeBooleanPoint(p *BooleanPoint) *internal.Point {
return &internal.Point{
Name: proto.String(p.Name),
Tags: proto.String(p.Tags.ID()),
Time: proto.Int64(p.Time),
Nil: proto.Bool(p.Nil),
Aux: encodeAux(p.Aux),
BooleanValue: proto.Bool(p.Value),
}
}
func decodeBooleanPoint(pb *internal.Point) *BooleanPoint {
return &BooleanPoint{
Name: pb.GetName(),
Tags: newTagsID(pb.GetTags()),
Time: pb.GetTime(),
Nil: pb.GetNil(),
Aux: decodeAux(pb.Aux),
Value: pb.GetBooleanValue(),
}
}
// booleanPoints represents a slice of points sortable by value.
type booleanPoints []BooleanPoint

View File

@ -1,5 +1,10 @@
package influxql
import (
"github.com/gogo/protobuf/proto"
"github.com/influxdb/influxdb/influxql/internal"
)
{{range .}}
// {{.Name}}Point represents a point with a {{.Type}} value.
@ -40,6 +45,37 @@ func (v *{{.Name}}Point) Clone() *{{.Name}}Point {
return &other
}
func encode{{.Name}}Point(p *{{.Name}}Point) *internal.Point {
return &internal.Point{
Name: proto.String(p.Name),
Tags: proto.String(p.Tags.ID()),
Time: proto.Int64(p.Time),
Nil: proto.Bool(p.Nil),
Aux: encodeAux(p.Aux),
{{if eq .Name "Float"}}
FloatValue: proto.Float64(p.Value),
{{else if eq .Name "Integer"}}
IntegerValue: proto.Int64(p.Value),
{{else if eq .Name "String"}}
StringValue: proto.String(p.Value),
{{else if eq .Name "Boolean"}}
BooleanValue: proto.Bool(p.Value),
{{end}}
}
}
func decode{{.Name}}Point(pb *internal.Point) *{{.Name}}Point {
return &{{.Name}}Point{
Name: pb.GetName(),
Tags: newTagsID(pb.GetTags()),
Time: pb.GetTime(),
Nil: pb.GetNil(),
Aux: decodeAux(pb.Aux),
Value: pb.Get{{.Name}}Value(),
}
}
// {{.name}}Points represents a slice of points sortable by value.
type {{.name}}Points []{{.Name}}Point

View File

@ -2,6 +2,9 @@ package influxql
import (
"sort"
"github.com/gogo/protobuf/proto"
"github.com/influxdb/influxdb/influxql/internal"
)
// ZeroTime is the Unix nanosecond timestamp for time.Time{}.
@ -44,6 +47,15 @@ func NewTags(m map[string]string) Tags {
}
}
// newTagsID returns a new instance of Tags parses from a tag id.
func newTagsID(id string) Tags {
m := decodeTags([]byte(id))
if len(m) == 0 {
return Tags{}
}
return Tags{id: id, m: m}
}
// ID returns the string identifier for the tags.
func (t Tags) ID() string { return t.id }
@ -150,3 +162,68 @@ func encodeTags(m map[string]string) []byte {
}
return b
}
// decodeTags parses an identifier into a map of tags.
func decodeTags(id []byte) map[string]string { panic("FIXME: implement") }
func encodeAux(aux []interface{}) []*internal.Aux {
pb := make([]*internal.Aux, len(aux))
for i := range aux {
switch v := aux[i].(type) {
case float64:
pb[i] = &internal.Aux{DataType: proto.Int32(Float), FloatValue: proto.Float64(v)}
case *float64:
pb[i] = &internal.Aux{DataType: proto.Int32(Float)}
case int64:
pb[i] = &internal.Aux{DataType: proto.Int32(Integer), IntegerValue: proto.Int64(v)}
case *int64:
pb[i] = &internal.Aux{DataType: proto.Int32(Integer)}
case string:
pb[i] = &internal.Aux{DataType: proto.Int32(String), StringValue: proto.String(v)}
case *string:
pb[i] = &internal.Aux{DataType: proto.Int32(String)}
case bool:
pb[i] = &internal.Aux{DataType: proto.Int32(Boolean), BooleanValue: proto.Bool(v)}
case *bool:
pb[i] = &internal.Aux{DataType: proto.Int32(Boolean)}
default:
pb[i] = &internal.Aux{DataType: proto.Int32(int32(Unknown))}
}
}
return pb
}
func decodeAux(pb []*internal.Aux) []interface{} {
aux := make([]interface{}, len(pb))
for i := range pb {
switch pb[i].GetDataType() {
case Float:
if pb[i].FloatValue != nil {
aux[i] = *pb[i].FloatValue
} else {
aux[i] = (*float64)(nil)
}
case Integer:
if pb[i].IntegerValue != nil {
aux[i] = *pb[i].IntegerValue
} else {
aux[i] = (*int64)(nil)
}
case String:
if pb[i].StringValue != nil {
aux[i] = *pb[i].StringValue
} else {
aux[i] = (*string)(nil)
}
case Boolean:
if pb[i].BooleanValue != nil {
aux[i] = *pb[i].BooleanValue
} else {
aux[i] = (*bool)(nil)
}
default:
aux[i] = nil
}
}
return aux
}

View File

@ -75,6 +75,11 @@ func buildAuxIterators(fields Fields, ic IteratorCreator, opt IteratorOptions) (
return nil, err
}
// Filter out duplicate rows, if required.
if opt.Dedupe {
input = NewDedupeIterator(input)
}
// Apply limit & offset.
if opt.Limit > 0 || opt.Offset > 0 {
input = NewLimitIterator(input, opt)

View File

@ -703,11 +703,6 @@ func (c *Client) ShardGroupsByTimeRange(database, policy string, min, max time.T
// ShardIDsByTimeRange returns a slice of shards that may contain data in the time range.
func (c *Client) ShardIDsByTimeRange(sources influxql.Sources, tmin, tmax time.Time) (a []uint64, err error) {
// If a system-wide source is included then return all shard ids.
if sources.HasSystemSource() {
return c.ShardIDs(), nil
}
m := make(map[uint64]struct{})
for _, src := range sources {
mm, ok := src.(*influxql.Measurement)

View File

@ -563,19 +563,27 @@ func (q *QueryExecutor) planShowMeasurements(stmt *influxql.ShowMeasurementsStat
}
}
return q.PlanSelect(&influxql.SelectStatement{
ss := &influxql.SelectStatement{
Fields: influxql.Fields{
{Expr: &influxql.VarRef{Val: "name"}},
},
Sources: influxql.Sources{
&influxql.Measurement{Database: database, Name: "_measurements"},
&influxql.Measurement{Name: "_measurements"},
},
Condition: condition,
Offset: stmt.Offset,
Limit: stmt.Limit,
SortFields: stmt.SortFields,
OmitTime: true,
}, chunkSize)
Dedupe: true,
}
// Normalize the statement.
if err := q.normalizeStatement(ss, database); err != nil {
return nil, err
}
return q.PlanSelect(ss, chunkSize)
}
// planShowTagKeys creates an execution plan for a SHOW MEASUREMENTS statement and returns an Executor.
@ -612,19 +620,27 @@ func (q *QueryExecutor) planShowTagKeys(stmt *influxql.ShowTagKeysStatement, dat
}
}
return q.PlanSelect(&influxql.SelectStatement{
ss := &influxql.SelectStatement{
Fields: influxql.Fields{
{Expr: &influxql.VarRef{Val: "tagKey"}},
},
Sources: influxql.Sources{
&influxql.Measurement{Database: database, Name: "_tagKeys"},
&influxql.Measurement{Name: "_tagKeys"},
},
Condition: condition,
Offset: stmt.Offset,
Limit: stmt.Limit,
SortFields: stmt.SortFields,
OmitTime: true,
}, chunkSize)
Dedupe: true,
}
// Normalize the statement.
if err := q.normalizeStatement(ss, database); err != nil {
return nil, err
}
return q.PlanSelect(ss, chunkSize)
}
func (q *QueryExecutor) executeStatement(statementID int, stmt influxql.Statement, database string, results chan *influxql.Result, chunkSize int, closing chan struct{}) error {

View File

@ -317,6 +317,9 @@ func (s *Store) DeleteMeasurement(database, name string) error {
// Remove underlying data.
for _, sh := range s.shards {
if sh.index != db {
continue
}
if err := sh.DeleteMeasurement(m.Name, m.SeriesKeys()); err != nil {
return err
}