Merge pull request #8297 from influxdata/jw-merge-123

Merge 1.2.3 changes to master
pull/5068/merge
Jason Wilder 2017-04-17 09:18:46 -06:00 committed by GitHub
commit eeaad877bc
6 changed files with 50 additions and 176 deletions

View File

@ -47,10 +47,13 @@
### Bugfixes ### Bugfixes
- [#8190](https://github.com/influxdata/influxdb/issues/8190): History file should redact passwords before saving to history.
- [#8187](https://github.com/influxdata/influxdb/pull/8187): Several statements were missing the DefaultDatabase method
- [#8022](https://github.com/influxdata/influxdb/issues/8022): Segment violation in models.Tags.Get - [#8022](https://github.com/influxdata/influxdb/issues/8022): Segment violation in models.Tags.Get
- [#8155](https://github.com/influxdata/influxdb/pull/8155): Simplify admin user check. - [#8155](https://github.com/influxdata/influxdb/pull/8155): Simplify admin user check.
- [#8167](https://github.com/influxdata/influxdb/issues/8167): Fix a regression when math was used with selectors. - [#8167](https://github.com/influxdata/influxdb/issues/8167): Fix a regression when math was used with selectors.
- [#8175](https://github.com/influxdata/influxdb/issues/8175): Ensure the input for certain functions in the query engine are ordered. - [#8175](https://github.com/influxdata/influxdb/issues/8175): Ensure the input for certain functions in the query engine are ordered.
- [#8254](https://github.com/influxdata/influxdb/pull/8254): Fix delete time fields creating unparseable points
## v1.2.2 [2017-03-14] ## v1.2.2 [2017-03-14]

View File

@ -159,9 +159,6 @@ type FieldIterator interface {
// FloatValue returns the float value of the current field. // FloatValue returns the float value of the current field.
FloatValue() (float64, error) FloatValue() (float64, error)
// Delete deletes the current field.
Delete()
// Reset resets the iterator to its initial state. // Reset resets the iterator to its initial state.
Reset() Reset()
} }
@ -2051,26 +2048,6 @@ func (p *point) FloatValue() (float64, error) {
return f, nil return f, nil
} }
// Delete deletes the current field.
func (p *point) Delete() {
switch {
case p.it.end == p.it.start:
case p.it.end >= len(p.fields):
// Remove the trailing comma if there are more than one fields
p.fields = bytes.TrimSuffix(p.fields[:p.it.start], []byte(","))
case p.it.start == 0:
p.fields = p.fields[p.it.end:]
default:
p.fields = append(p.fields[:p.it.start], p.fields[p.it.end:]...)
}
p.it.end = p.it.start
p.it.key = nil
p.it.valueBuf = nil
p.it.fieldType = Empty
}
// Reset resets the iterator to its initial state. // Reset resets the iterator to its initial state.
func (p *point) Reset() { func (p *point) Reset() {
p.it.fieldType = Empty p.it.fieldType = Empty

View File

@ -2183,130 +2183,6 @@ m a=2i,b=3i,c=true,d="stuff",e=-0.23,f=123.456
} }
} }
func TestPoint_FieldIterator_Delete_Begin(t *testing.T) {
points, err := models.ParsePointsString(`m a=1,b=2,c=3`)
if err != nil || len(points) != 1 {
t.Fatal("failed parsing point")
}
fi := points[0].FieldIterator()
fi.Next() // a
fi.Delete()
fi.Reset()
got := toFields(fi)
exp := models.Fields{"b": float64(2), "c": float64(3)}
if !reflect.DeepEqual(got, exp) {
t.Fatalf("Delete failed, got %#v, exp %#v", got, exp)
}
if _, err = models.ParsePointsString(points[0].String()); err != nil {
t.Fatalf("Failed to parse point: %v", err)
}
}
func TestPoint_FieldIterator_Delete_Middle(t *testing.T) {
points, err := models.ParsePointsString(`m a=1,b=2,c=3`)
if err != nil || len(points) != 1 {
t.Fatal("failed parsing point")
}
fi := points[0].FieldIterator()
fi.Next() // a
fi.Next() // b
fi.Delete()
fi.Reset()
got := toFields(fi)
exp := models.Fields{"a": float64(1), "c": float64(3)}
if !reflect.DeepEqual(got, exp) {
t.Fatalf("Delete failed, got %#v, exp %#v", got, exp)
}
if _, err = models.ParsePointsString(points[0].String()); err != nil {
t.Fatalf("Failed to parse point: %v", err)
}
}
func TestPoint_FieldIterator_Delete_End(t *testing.T) {
points, err := models.ParsePointsString(`m a=1,b=2,c=3`)
if err != nil || len(points) != 1 {
t.Fatal("failed parsing point")
}
fi := points[0].FieldIterator()
fi.Next() // a
fi.Next() // b
fi.Next() // c
fi.Delete()
fi.Reset()
got := toFields(fi)
exp := models.Fields{"a": float64(1), "b": float64(2)}
if !reflect.DeepEqual(got, exp) {
t.Fatalf("Delete failed, got %#v, exp %#v", got, exp)
}
if _, err = models.ParsePointsString(points[0].String()); err != nil {
t.Fatalf("Failed to parse point: %v", err)
}
}
func TestPoint_FieldIterator_Delete_Nothing(t *testing.T) {
points, err := models.ParsePointsString(`m a=1,b=2,c=3`)
if err != nil || len(points) != 1 {
t.Fatal("failed parsing point")
}
fi := points[0].FieldIterator()
fi.Delete()
fi.Reset()
got := toFields(fi)
exp := models.Fields{"a": float64(1), "b": float64(2), "c": float64(3)}
if !reflect.DeepEqual(got, exp) {
t.Fatalf("Delete failed, got %#v, exp %#v", got, exp)
}
if _, err = models.ParsePointsString(points[0].String()); err != nil {
t.Fatalf("Failed to parse point: %v", err)
}
}
func TestPoint_FieldIterator_Delete_Twice(t *testing.T) {
points, err := models.ParsePointsString(`m a=1,b=2,c=3`)
if err != nil || len(points) != 1 {
t.Fatal("failed parsing point")
}
fi := points[0].FieldIterator()
fi.Next() // a
fi.Next() // b
fi.Delete()
fi.Delete() // no-op
fi.Reset()
got := toFields(fi)
exp := models.Fields{"a": float64(1), "c": float64(3)}
if !reflect.DeepEqual(got, exp) {
t.Fatalf("Delete failed, got %#v, exp %#v", got, exp)
}
if _, err = models.ParsePointsString(points[0].String()); err != nil {
t.Fatalf("Failed to parse point: %v", err)
}
}
func TestEscapeStringField(t *testing.T) { func TestEscapeStringField(t *testing.T) {
cases := []struct { cases := []struct {
in string in string

View File

@ -37,8 +37,12 @@ func init() {
tsdb.RegisterEngine("tsm1", NewEngine) tsdb.RegisterEngine("tsm1", NewEngine)
} }
// Ensure Engine implements the interface. var (
var _ tsdb.Engine = &Engine{} // Ensure Engine implements the interface.
_ tsdb.Engine = &Engine{}
// Static objects to prevent small allocs.
timeBytes = []byte("time")
)
const ( const (
// keyFieldSeparator separates the series key from the field name in the composite key // keyFieldSeparator separates the series key from the field name in the composite key
@ -712,6 +716,11 @@ func (e *Engine) WritePoints(points []models.Point) error {
iter := p.FieldIterator() iter := p.FieldIterator()
t := p.Time().UnixNano() t := p.Time().UnixNano()
for iter.Next() { for iter.Next() {
// Skip fields name "time", they are illegal
if bytes.Equal(iter.FieldKey(), timeBytes) {
continue
}
keyBuf = append(keyBuf[:baseLen], iter.FieldKey()...) keyBuf = append(keyBuf[:baseLen], iter.FieldKey()...)
var v Value var v Value
switch iter.Type() { switch iter.Type() {

View File

@ -547,17 +547,21 @@ func (s *Shard) validateSeriesAndFields(points []models.Point) ([]models.Point,
names := make([][]byte, len(points)) names := make([][]byte, len(points))
tagsSlice := make([]models.Tags, len(points)) tagsSlice := make([]models.Tags, len(points))
// Drop any series w/ a "time" tag, these are illegal
var j int
for i, p := range points { for i, p := range points {
keys[i] = p.Key()
names[i] = []byte(p.Name())
tags := p.Tags() tags := p.Tags()
if v := tags.Get(timeBytes); v != nil { if v := tags.Get(timeBytes); v != nil {
s.logger.Info(fmt.Sprintf("dropping tag 'time' from '%s'\n", p.PrecisionString(""))) dropped++
tags.Delete(timeBytes) continue
p.SetTags(tags)
} }
tagsSlice[i] = tags keys[j] = p.Key()
names[j] = []byte(p.Name())
tagsSlice[j] = tags
points[j] = points[i]
j++
} }
points, keys, names, tagsSlice = points[:j], keys[:j], names[:j], tagsSlice[:j]
// Add new series. Check for partial writes. // Add new series. Check for partial writes.
var droppedKeys map[string]struct{} var droppedKeys map[string]struct{}
@ -581,14 +585,14 @@ func (s *Shard) validateSeriesAndFields(points []models.Point) ([]models.Point,
iter := p.FieldIterator() iter := p.FieldIterator()
for iter.Next() { for iter.Next() {
if bytes.Equal(iter.FieldKey(), timeBytes) { if bytes.Equal(iter.FieldKey(), timeBytes) {
s.logger.Info(fmt.Sprintf("dropping field 'time' from '%s'\n", p.PrecisionString("")))
iter.Delete()
continue continue
} }
validField = true validField = true
break
} }
if !validField { if !validField {
dropped++
continue continue
} }
@ -604,10 +608,14 @@ func (s *Shard) validateSeriesAndFields(points []models.Point) ([]models.Point,
// see if the field definitions need to be saved to the shard // see if the field definitions need to be saved to the shard
mf := s.engine.MeasurementFields(p.Name()) mf := s.engine.MeasurementFields(p.Name())
if mf == nil { if mf == nil {
var createType influxql.DataType var createType influxql.DataType
for iter.Next() { for iter.Next() {
// Skip fields name "time", they are illegal
if bytes.Equal(iter.FieldKey(), timeBytes) {
continue
}
switch iter.Type() { switch iter.Type() {
case models.Float: case models.Float:
createType = influxql.Float createType = influxql.Float
@ -643,6 +651,12 @@ func (s *Shard) validateSeriesAndFields(points []models.Point) ([]models.Point,
// validate field types and encode data // validate field types and encode data
for iter.Next() { for iter.Next() {
// Skip fields name "time", they are illegal
if bytes.Equal(iter.FieldKey(), timeBytes) {
continue
}
var fieldType influxql.DataType var fieldType influxql.DataType
switch iter.Type() { switch iter.Type() {
case models.Float: case models.Float:
@ -698,6 +712,10 @@ func (s *Shard) MeasurementFields(name []byte) *MeasurementFields {
return s.engine.MeasurementFields(string(name)) return s.engine.MeasurementFields(string(name))
} }
func (s *Shard) MeasurementExists(name []byte) (bool, error) {
return s.engine.MeasurementExists(name)
}
// WriteTo writes the shard's data to w. // WriteTo writes the shard's data to w.
func (s *Shard) WriteTo(w io.Writer) (int64, error) { func (s *Shard) WriteTo(w io.Writer) (int64, error) {
if err := s.ready(); err != nil { if err := s.ready(); err != nil {

View File

@ -1,7 +1,6 @@
package tsdb_test package tsdb_test
import ( import (
"bytes"
"fmt" "fmt"
"io/ioutil" "io/ioutil"
"os" "os"
@ -21,7 +20,6 @@ import (
_ "github.com/influxdata/influxdb/tsdb/engine" _ "github.com/influxdata/influxdb/tsdb/engine"
_ "github.com/influxdata/influxdb/tsdb/index" _ "github.com/influxdata/influxdb/tsdb/index"
"github.com/influxdata/influxdb/tsdb/index/inmem" "github.com/influxdata/influxdb/tsdb/index/inmem"
"github.com/uber-go/zap"
) )
func TestShardWriteAndIndex(t *testing.T) { func TestShardWriteAndIndex(t *testing.T) {
@ -220,12 +218,8 @@ func TestWriteTimeTag(t *testing.T) {
time.Unix(1, 2), time.Unix(1, 2),
) )
buf := bytes.NewBuffer(nil) if err := sh.WritePoints([]models.Point{pt}); err == nil {
sh.WithLogger(zap.New(zap.NewTextEncoder(), zap.Output(zap.AddSync(buf)))) t.Fatal("expected error: got nil")
if err := sh.WritePoints([]models.Point{pt}); err != nil {
t.Fatalf("unexpected error: %v", err)
} else if got, exp := buf.String(), "dropping field 'time'"; !strings.Contains(got, exp) {
t.Fatalf("unexpected log message: %s", strings.TrimSpace(got))
} }
pt = models.MustNewPoint( pt = models.MustNewPoint(
@ -235,12 +229,8 @@ func TestWriteTimeTag(t *testing.T) {
time.Unix(1, 2), time.Unix(1, 2),
) )
buf = bytes.NewBuffer(nil)
sh.WithLogger(zap.New(zap.NewTextEncoder(), zap.Output(zap.AddSync(buf))))
if err := sh.WritePoints([]models.Point{pt}); err != nil { if err := sh.WritePoints([]models.Point{pt}); err != nil {
t.Fatalf("unexpected error: %v", err) t.Fatalf("unexpected error: %v", err)
} else if got, exp := buf.String(), "dropping field 'time'"; !strings.Contains(got, exp) {
t.Fatalf("unexpected log message: %s", strings.TrimSpace(got))
} }
mf := sh.MeasurementFields([]byte("cpu")) mf := sh.MeasurementFields([]byte("cpu"))
@ -276,12 +266,13 @@ func TestWriteTimeField(t *testing.T) {
time.Unix(1, 2), time.Unix(1, 2),
) )
buf := bytes.NewBuffer(nil) if err := sh.WritePoints([]models.Point{pt}); err == nil {
sh.WithLogger(zap.New(zap.NewTextEncoder(), zap.Output(zap.AddSync(buf)))) t.Fatal("expected error: got nil")
if err := sh.WritePoints([]models.Point{pt}); err != nil { }
t.Fatalf("unexpected error: %v", err)
} else if got, exp := buf.String(), "dropping tag 'time'"; !strings.Contains(got, exp) { key := models.MakeKey([]byte("cpu"), nil)
t.Fatalf("unexpected log message: %s", strings.TrimSpace(got)) if ok, err := sh.MeasurementExists(key); ok && err == nil {
t.Fatal("unexpected series")
} }
} }