From 89532f0c29b90be313b36101e2413eefdbf5b4f0 Mon Sep 17 00:00:00 2001 From: Stuart Carnie Date: Tue, 28 Jul 2020 15:59:11 -0700 Subject: [PATCH] refactor: move v2/v1/models to v2/models --- cmd/influx_inspect/buildtsi/buildtsi.go | 2 +- .../verify/seriesfile/verify_test.go | 2 +- gather/metrics.go | 2 +- http/write_handler.go | 2 +- http/write_handler_test.go | 55 - influxql/query/compile.go | 2 +- influxql/query/emitter.go | 2 +- influxql/query/executor.go | 2 +- influxql/query/result.go | 2 +- influxql/query/subquery_test.go | 2 +- influxql/query/task_manager.go | 2 +- mock/points_writer.go | 2 +- mock/reads_resultset.go | 2 +- {v1/models => models}/consistency.go | 0 {v1/models => models}/fieldtype_string.go | 0 {v1/models => models}/gen.go | 0 models/points.go | 287 +- models/points_internal_test.go | 2 +- models/points_parser.go | 349 --- models/points_test.go | 1608 +++------- models/testdata/line-protocol.txt | 554 ---- pkg/data/gen/merged_series_generator.go | 2 +- pkg/data/gen/series_generator.go | 2 +- pkg/data/gen/specs.go | 2 +- pkg/data/gen/specs_test.go | 2 +- pkg/data/gen/tags_sequence.go | 2 +- pkg/data/gen/values_sequence.gen.go | 2 +- pkg/data/gen/values_sequence.gen.go.tmpl | 2 +- query/stdlib/experimental/to.go | 2 +- query/stdlib/experimental/to_test.go | 2 +- query/stdlib/influxdata/influxdb/to.go | 2 +- query/stdlib/influxdata/influxdb/to_test.go | 30 +- storage/engine.go | 2 +- storage/flux/predicate.go | 2 +- storage/flux/reader.go | 2 +- storage/flux/table.gen.go | 2 +- storage/flux/table.gen.go.tmpl | 2 +- storage/flux/table.go | 2 +- storage/points_writer.go | 2 +- storage/reads/group_resultset.go | 2 +- storage/reads/group_resultset_test.go | 2 +- storage/reads/keymerger.go | 2 +- storage/reads/keymerger_test.go | 2 +- storage/reads/resultset.go | 2 +- storage/reads/resultset_lineprotocol.go | 2 +- storage/reads/series_cursor.go | 2 +- storage/reads/store.go | 2 +- storage/reads/store_test.go | 2 +- storage/reads/tagsbuffer.go | 2 +- v1/coordinator/points_writer.go | 2 +- v1/coordinator/points_writer_test.go | 2 +- v1/coordinator/statement_executor.go | 2 +- v1/coordinator/statement_executor_test.go | 2 +- v1/internal/authorizer.go | 2 +- v1/internal/tsdb_store.go | 2 +- v1/models/inline_fnv.go | 32 - v1/models/inline_fnv_test.go | 29 - v1/models/inline_strconv_parse.go | 44 - v1/models/inline_strconv_parse_test.go | 103 - v1/models/points.go | 2552 ---------------- v1/models/points_internal_test.go | 17 - v1/models/points_test.go | 2601 ----------------- v1/models/rows.go | 62 - v1/models/statistic.go | 42 - v1/models/statistic_test.go | 55 - v1/models/tagkeysset.go | 156 - v1/models/tagkeysset_test.go | 325 -- v1/models/time.go | 74 - v1/models/uint_support.go | 7 - v1/monitor/reporter.go | 2 +- v1/monitor/service.go | 2 +- v1/monitor/service_test.go | 2 +- v1/services/meta/data.go | 2 +- v1/services/meta/internal/meta.pb.go | 10 +- v1/services/storage/predicate_influxql.go | 2 +- v1/services/storage/series_cursor.go | 2 +- v1/services/storage/series_cursor_test.go | 2 +- v1/services/storage/source.pb.go | 15 +- v1/services/storage/store.go | 2 +- v1/tsdb/batcher.go | 2 +- v1/tsdb/batcher_test.go | 2 +- v1/tsdb/cursors/cursor.go | 2 +- v1/tsdb/engine.go | 2 +- .../engine/tsm1/array_cursor_iterator.gen.go | 2 +- .../tsm1/array_cursor_iterator.gen.go.tmpl | 2 +- v1/tsdb/engine/tsm1/array_cursor_iterator.go | 2 +- v1/tsdb/engine/tsm1/cache.go | 2 +- v1/tsdb/engine/tsm1/engine.go | 2 +- v1/tsdb/engine/tsm1/engine_test.go | 2 +- v1/tsdb/engine/tsm1/file_store.go | 2 +- .../tsm1/file_store_key_iterator_test.go | 58 +- v1/tsdb/engine/tsm1/ring_test.go | 4 +- v1/tsdb/engine/tsm1/wal.go | 2 +- v1/tsdb/field_validator.go | 2 +- v1/tsdb/guard.go | 2 +- v1/tsdb/guard_test.go | 2 +- v1/tsdb/index.go | 2 +- v1/tsdb/index/inmem/inmem.go | 2 +- v1/tsdb/index/inmem/inmem_test.go | 2 +- v1/tsdb/index/inmem/meta.go | 2 +- v1/tsdb/index/inmem/meta_test.go | 2 +- v1/tsdb/index/internal/file_set.go | 2 +- v1/tsdb/index/tsi1/file_set_test.go | 2 +- v1/tsdb/index/tsi1/index.go | 2 +- v1/tsdb/index/tsi1/index_file.go | 2 +- v1/tsdb/index/tsi1/index_file_test.go | 2 +- v1/tsdb/index/tsi1/index_files_test.go | 2 +- v1/tsdb/index/tsi1/index_test.go | 2 +- v1/tsdb/index/tsi1/log_file.go | 2 +- v1/tsdb/index/tsi1/log_file_test.go | 2 +- v1/tsdb/index/tsi1/partition.go | 2 +- v1/tsdb/index/tsi1/tsi1_test.go | 2 +- v1/tsdb/index_test.go | 2 +- v1/tsdb/internal/meta.pb.go | 12 +- v1/tsdb/meta.go | 2 +- v1/tsdb/meta_test.go | 2 +- v1/tsdb/series_cursor.go | 2 +- v1/tsdb/series_file.go | 2 +- v1/tsdb/series_file_test.go | 2 +- v1/tsdb/series_index.go | 2 +- v1/tsdb/series_partition.go | 2 +- v1/tsdb/shard.go | 2 +- v1/tsdb/shard_internal_test.go | 2 +- v1/tsdb/shard_test.go | 2 +- v1/tsdb/store.go | 2 +- v1/tsdb/store_test.go | 2 +- 126 files changed, 800 insertions(+), 8477 deletions(-) rename {v1/models => models}/consistency.go (100%) rename {v1/models => models}/fieldtype_string.go (100%) rename {v1/models => models}/gen.go (100%) delete mode 100644 models/points_parser.go delete mode 100644 models/testdata/line-protocol.txt delete mode 100644 v1/models/inline_fnv.go delete mode 100644 v1/models/inline_fnv_test.go delete mode 100644 v1/models/inline_strconv_parse.go delete mode 100644 v1/models/inline_strconv_parse_test.go delete mode 100644 v1/models/points.go delete mode 100644 v1/models/points_internal_test.go delete mode 100644 v1/models/points_test.go delete mode 100644 v1/models/rows.go delete mode 100644 v1/models/statistic.go delete mode 100644 v1/models/statistic_test.go delete mode 100644 v1/models/tagkeysset.go delete mode 100644 v1/models/tagkeysset_test.go delete mode 100644 v1/models/time.go delete mode 100644 v1/models/uint_support.go diff --git a/cmd/influx_inspect/buildtsi/buildtsi.go b/cmd/influx_inspect/buildtsi/buildtsi.go index 7520707d18..2e81b90596 100644 --- a/cmd/influx_inspect/buildtsi/buildtsi.go +++ b/cmd/influx_inspect/buildtsi/buildtsi.go @@ -16,8 +16,8 @@ import ( "sync/atomic" "github.com/influxdata/influxdb/v2/logger" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/pkg/file" - "github.com/influxdata/influxdb/v2/v1/models" "github.com/influxdata/influxdb/v2/v1/tsdb" "github.com/influxdata/influxdb/v2/v1/tsdb/engine/tsm1" "github.com/influxdata/influxdb/v2/v1/tsdb/index/tsi1" diff --git a/cmd/influx_inspect/verify/seriesfile/verify_test.go b/cmd/influx_inspect/verify/seriesfile/verify_test.go index 42204b1c64..8e100dede5 100644 --- a/cmd/influx_inspect/verify/seriesfile/verify_test.go +++ b/cmd/influx_inspect/verify/seriesfile/verify_test.go @@ -10,7 +10,7 @@ import ( "time" "github.com/influxdata/influxdb/v2/cmd/influx_inspect/verify/seriesfile" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/v1/tsdb" "go.uber.org/zap" ) diff --git a/gather/metrics.go b/gather/metrics.go index 3fcbfb8385..6ce4007290 100644 --- a/gather/metrics.go +++ b/gather/metrics.go @@ -7,7 +7,7 @@ import ( "github.com/gogo/protobuf/proto" "github.com/influxdata/influxdb/v2" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" ) // MetricsCollection is the struct including metrics and other requirements. diff --git a/http/write_handler.go b/http/write_handler.go index 8d438b6386..f69510e76b 100644 --- a/http/write_handler.go +++ b/http/write_handler.go @@ -16,8 +16,8 @@ import ( kitio "github.com/influxdata/influxdb/v2/kit/io" "github.com/influxdata/influxdb/v2/kit/tracing" kithttp "github.com/influxdata/influxdb/v2/kit/transport/http" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/storage" - "github.com/influxdata/influxdb/v2/v1/models" "github.com/opentracing/opentracing-go" "go.uber.org/zap" "istio.io/pkg/log" diff --git a/http/write_handler_test.go b/http/write_handler_test.go index 2700b99995..17289a9811 100644 --- a/http/write_handler_test.go +++ b/http/write_handler_test.go @@ -16,7 +16,6 @@ import ( httpmock "github.com/influxdata/influxdb/v2/http/mock" kithttp "github.com/influxdata/influxdb/v2/kit/transport/http" "github.com/influxdata/influxdb/v2/mock" - "github.com/influxdata/influxdb/v2/models" influxtesting "github.com/influxdata/influxdb/v2/testing" "go.uber.org/zap/zaptest" ) @@ -293,60 +292,6 @@ func TestWriteHandler_handleWrite(t *testing.T) { body: `{"code":"request too large","message":"unable to read data: points batch is too large"}`, }, }, - { - name: "bytes limit rejected", - request: request{ - org: "043e0780ee2b1000", - bucket: "04504b356e23b000", - body: "m1,t1=v1 f1=1", - auth: bucketWritePermission("043e0780ee2b1000", "04504b356e23b000"), - }, - state: state{ - org: testOrg("043e0780ee2b1000"), - bucket: testBucket("043e0780ee2b1000", "04504b356e23b000"), - opts: []WriteHandlerOption{WithParserOptions(models.WithParserMaxBytes(5))}, - }, - wants: wants{ - code: 413, - body: `{"code":"request too large","message":"points: number of allocated bytes exceeded"}`, - }, - }, - { - name: "lines limit rejected", - request: request{ - org: "043e0780ee2b1000", - bucket: "04504b356e23b000", - body: "m1,t1=v1 f1=1\nm1,t1=v1 f1=1\nm1,t1=v1 f1=1\n", - auth: bucketWritePermission("043e0780ee2b1000", "04504b356e23b000"), - }, - state: state{ - org: testOrg("043e0780ee2b1000"), - bucket: testBucket("043e0780ee2b1000", "04504b356e23b000"), - opts: []WriteHandlerOption{WithParserOptions(models.WithParserMaxLines(2))}, - }, - wants: wants{ - code: 413, - body: `{"code":"request too large","message":"points: number of lines exceeded"}`, - }, - }, - { - name: "values limit rejected", - request: request{ - org: "043e0780ee2b1000", - bucket: "04504b356e23b000", - body: "m1,t1=v1 f1=1,f2=2\nm1,t1=v1 f1=1,f2=2\nm1,t1=v1 f1=1,f2=2\n", - auth: bucketWritePermission("043e0780ee2b1000", "04504b356e23b000"), - }, - state: state{ - org: testOrg("043e0780ee2b1000"), - bucket: testBucket("043e0780ee2b1000", "04504b356e23b000"), - opts: []WriteHandlerOption{WithParserOptions(models.WithParserMaxValues(4))}, - }, - wants: wants{ - code: 413, - body: `{"code":"request too large","message":"points: number of values exceeded"}`, - }, - }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { diff --git a/influxql/query/compile.go b/influxql/query/compile.go index acc11c99ba..1bef0f63d2 100644 --- a/influxql/query/compile.go +++ b/influxql/query/compile.go @@ -6,7 +6,7 @@ import ( "strings" "time" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxql" ) diff --git a/influxql/query/emitter.go b/influxql/query/emitter.go index 17580f22a7..d07c4794f3 100644 --- a/influxql/query/emitter.go +++ b/influxql/query/emitter.go @@ -1,7 +1,7 @@ package query import ( - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" ) // Emitter reads from a cursor into rows. diff --git a/influxql/query/executor.go b/influxql/query/executor.go index 0edeaf7cc5..3758e704b6 100644 --- a/influxql/query/executor.go +++ b/influxql/query/executor.go @@ -11,7 +11,7 @@ import ( "sync/atomic" "time" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxql" "go.uber.org/zap" ) diff --git a/influxql/query/result.go b/influxql/query/result.go index 0e15aa0b87..f2bb8197e3 100644 --- a/influxql/query/result.go +++ b/influxql/query/result.go @@ -5,7 +5,7 @@ import ( "errors" "fmt" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxql" ) diff --git a/influxql/query/subquery_test.go b/influxql/query/subquery_test.go index 28a5f4d911..93fb7f9bf7 100644 --- a/influxql/query/subquery_test.go +++ b/influxql/query/subquery_test.go @@ -7,7 +7,7 @@ import ( "github.com/google/go-cmp/cmp" "github.com/influxdata/influxdb/v2/influxql/query" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxql" ) diff --git a/influxql/query/task_manager.go b/influxql/query/task_manager.go index 3d7de8fd10..e860c32ea3 100644 --- a/influxql/query/task_manager.go +++ b/influxql/query/task_manager.go @@ -8,7 +8,7 @@ import ( "sync" "time" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxql" "go.uber.org/zap" ) diff --git a/mock/points_writer.go b/mock/points_writer.go index 914adddef5..5c779c1553 100644 --- a/mock/points_writer.go +++ b/mock/points_writer.go @@ -5,7 +5,7 @@ import ( "sync" "github.com/influxdata/influxdb/v2" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" ) // PointsWriter is a mock structure for writing points. diff --git a/mock/reads_resultset.go b/mock/reads_resultset.go index b96669f85b..16afa54bad 100644 --- a/mock/reads_resultset.go +++ b/mock/reads_resultset.go @@ -1,9 +1,9 @@ package mock import ( + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/pkg/data/gen" "github.com/influxdata/influxdb/v2/storage/reads" - "github.com/influxdata/influxdb/v2/v1/models" "github.com/influxdata/influxdb/v2/v1/tsdb/cursors" ) diff --git a/v1/models/consistency.go b/models/consistency.go similarity index 100% rename from v1/models/consistency.go rename to models/consistency.go diff --git a/v1/models/fieldtype_string.go b/models/fieldtype_string.go similarity index 100% rename from v1/models/fieldtype_string.go rename to models/fieldtype_string.go diff --git a/v1/models/gen.go b/models/gen.go similarity index 100% rename from v1/models/gen.go rename to models/gen.go diff --git a/models/points.go b/models/points.go index dc4adf3bf2..2410c59f33 100644 --- a/models/points.go +++ b/models/points.go @@ -74,14 +74,6 @@ var ( // ErrInvalidKevValuePairs is returned when the number of key, value pairs // is odd, indicating a missing value. ErrInvalidKevValuePairs = errors.New("key/value pairs is an odd length") - - // ErrMeasurementTagExpected is returned by ParseMeasurement when parsing a - // series key where the first tag key is not a measurement. - ErrMeasurementTagExpected = errors.New("measurement tag expected") - - // ErrInvalidKey is returned by ParseMeasurement when parsing a an empty - // or invalid series key. - ErrInvalidKey = errors.New("invalid key") ) const ( @@ -191,25 +183,6 @@ const ( Unsigned ) -func (t FieldType) String() string { - switch t { - case Integer: - return "Integer" - case Float: - return "Float" - case Boolean: - return "Boolean" - case String: - return "String" - case Empty: - return "Empty" - case Unsigned: - return "Unsigned" - default: - return "" - } -} - // FieldIterator provides a low-allocation interface to iterate through a point's fields. type FieldIterator interface { // Next indicates whether there any fields remaining. @@ -244,32 +217,13 @@ type FieldIterator interface { type Points []Point // Len implements sort.Interface. -func (p Points) Len() int { return len(p) } +func (a Points) Len() int { return len(a) } // Less implements sort.Interface. -func (p Points) Less(i, j int) bool { return p[i].Time().Before(p[j].Time()) } +func (a Points) Less(i, j int) bool { return a[i].Time().Before(a[j].Time()) } // Swap implements sort.Interface. -func (p Points) Swap(i, j int) { p[i], p[j] = p[j], p[i] } - -func (p Points) String() string { - const sep = "\n" - switch len(p) { - case 0: - return "" - case 1: - return p[0].String() - } - var b strings.Builder - b.WriteString(p[0].String()) - - for _, s := range p[1:] { - b.WriteString(sep) - b.WriteString(s.String()) - } - - return b.String() -} +func (a Points) Swap(i, j int) { a[i], a[j] = a[j], a[i] } // point is the default implementation of Point. type point struct { @@ -283,6 +237,9 @@ type point struct { // text encoding of field data fields []byte + // text encoding of timestamp + ts []byte + // cached version of parsed fields from data cachedFields map[string]interface{} @@ -323,17 +280,13 @@ const ( // ParsePoints returns a slice of Points from a text representation of a point // with each point separated by newlines. If any points fail to parse, a non-nil error // will be returned in addition to the points that parsed successfully. -// -// The mm argument supplies the new measurement which is generated by calling -// EscapeMeasurement(EncodeName(orgID, bucketID)). The existing measurement is -// moved to the "_m" tag. -func ParsePoints(buf, mm []byte) ([]Point, error) { - return ParsePointsWithPrecision(buf, mm, time.Now().UTC(), "n") +func ParsePoints(buf []byte) ([]Point, error) { + return ParsePointsWithPrecision(buf, time.Now().UTC(), "n") } // ParsePointsString is identical to ParsePoints but accepts a string. -func ParsePointsString(buf, mm string) ([]Point, error) { - return ParsePoints([]byte(buf), []byte(mm)) +func ParsePointsString(buf string) ([]Point, error) { + return ParsePoints([]byte(buf)) } // ParseKey returns the measurement name and tags from a point. @@ -362,7 +315,7 @@ func ParseKeyBytesWithTags(buf []byte, tags Tags) ([]byte, Tags) { } else { name = buf[:i] } - return UnescapeMeasurement(name), tags + return unescapeMeasurement(name), tags } func ParseTags(buf []byte) Tags { @@ -384,35 +337,7 @@ func ParseName(buf []byte) []byte { name = buf[:i] } - return UnescapeMeasurement(name) -} - -// ParseMeasurement returns the value of the tag identified by MeasurementTagKey; otherwise, -// an error is returned. -// -// buf must be a normalized series key, such that the tags are -// lexicographically sorted and therefore the measurement tag is first. -func ParseMeasurement(buf []byte) ([]byte, error) { - pos, name := scanTo(buf, 0, ',') - - // it's an empty key, so there are no tags - if len(name) == 0 { - return nil, ErrInvalidKey - } - - i := pos + 1 - var key, value []byte - i, key = scanTo(buf, i, '=') - if string(key) != MeasurementTagKey { - return nil, ErrMeasurementTagExpected - } - - _, value = scanTagValue(buf, i+1) - if bytes.IndexByte(value, '\\') != -1 { - // hasEscape - return unescapeTag(value), nil - } - return value, nil + return unescapeMeasurement(name) } // ValidPrecision checks if the precision is known. @@ -425,21 +350,137 @@ func ValidPrecision(precision string) bool { } } -func ParsePointsWithOptions(buf []byte, mm []byte, opts ...ParserOption) (_ []Point, err error) { - pp := newPointsParser(mm, opts...) - err = pp.parsePoints(buf) - return pp.points, err -} - // ParsePointsWithPrecision is similar to ParsePoints, but allows the // caller to provide a precision for time. // // NOTE: to minimize heap allocations, the returned Points will refer to subslices of buf. // This can have the unintended effect preventing buf from being garbage collected. -func ParsePointsWithPrecision(buf []byte, mm []byte, defaultTime time.Time, precision string) (_ []Point, err error) { - pp := newPointsParser(mm, WithParserDefaultTime(defaultTime), WithParserPrecision(precision)) - err = pp.parsePoints(buf) - return pp.points, err +func ParsePointsWithPrecision(buf []byte, defaultTime time.Time, precision string) ([]Point, error) { + points := make([]Point, 0, bytes.Count(buf, []byte{'\n'})+1) + var ( + pos int + block []byte + failed []string + ) + for pos < len(buf) { + pos, block = scanLine(buf, pos) + pos++ + + if len(block) == 0 { + continue + } + + start := skipWhitespace(block, 0) + + // If line is all whitespace, just skip it + if start >= len(block) { + continue + } + + // lines which start with '#' are comments + if block[start] == '#' { + continue + } + + // strip the newline if one is present + if block[len(block)-1] == '\n' { + block = block[:len(block)-1] + } + + pt, err := parsePoint(block[start:], defaultTime, precision) + if err != nil { + failed = append(failed, fmt.Sprintf("unable to parse '%s': %v", string(block[start:]), err)) + } else { + points = append(points, pt) + } + + } + if len(failed) > 0 { + return points, fmt.Errorf("%s", strings.Join(failed, "\n")) + } + return points, nil + +} + +func parsePoint(buf []byte, defaultTime time.Time, precision string) (Point, error) { + // scan the first block which is measurement[,tag1=value1,tag2=value2...] + pos, key, err := scanKey(buf, 0) + if err != nil { + return nil, err + } + + // measurement name is required + if len(key) == 0 { + return nil, fmt.Errorf("missing measurement") + } + + if len(key) > MaxKeyLength { + return nil, fmt.Errorf("max key length exceeded: %v > %v", len(key), MaxKeyLength) + } + + // scan the second block is which is field1=value1[,field2=value2,...] + pos, fields, err := scanFields(buf, pos) + if err != nil { + return nil, err + } + + // at least one field is required + if len(fields) == 0 { + return nil, fmt.Errorf("missing fields") + } + + var maxKeyErr error + err = walkFields(fields, func(k, v []byte) bool { + if sz := seriesKeySize(key, k); sz > MaxKeyLength { + maxKeyErr = fmt.Errorf("max key length exceeded: %v > %v", sz, MaxKeyLength) + return false + } + return true + }) + + if err != nil { + return nil, err + } + + if maxKeyErr != nil { + return nil, maxKeyErr + } + + // scan the last block which is an optional integer timestamp + pos, ts, err := scanTime(buf, pos) + if err != nil { + return nil, err + } + + pt := &point{ + key: key, + fields: fields, + ts: ts, + } + + if len(ts) == 0 { + pt.time = defaultTime + pt.SetPrecision(precision) + } else { + ts, err := parseIntBytes(ts, 10, 64) + if err != nil { + return nil, err + } + pt.time, err = SafeCalcTime(ts, precision) + if err != nil { + return nil, err + } + + // Determine if there are illegal non-whitespace characters after the + // timestamp block. + for pos < len(buf) { + if buf[pos] != ' ' { + return nil, ErrInvalidPoint + } + pos++ + } + } + return pt, nil } // GetPrecisionMultiplier will return a multiplier for the precision specified. @@ -1229,7 +1270,7 @@ func EscapeMeasurement(in []byte) []byte { return in } -func UnescapeMeasurement(in []byte) []byte { +func unescapeMeasurement(in []byte) []byte { if bytes.IndexByte(in, '\\') == -1 { return in } @@ -1328,15 +1369,6 @@ func NewPoint(name string, tags Tags, fields Fields, t time.Time) (Point, error) }, nil } -// NewPointFromSeries returns a Point given the serialized key, some fields, and a time. -func NewPointFromSeries(key []byte, fields Fields, t time.Time) Point { - return &point{ - key: key, - time: t, - fields: fields.MarshalBinary(), - } -} - // pointKey checks some basic requirements for valid points, and returns the // key, along with an possible error. func pointKey(measurement string, tags Tags, fields Fields, t time.Time) ([]byte, error) { @@ -1358,7 +1390,7 @@ func pointKey(measurement string, tags Tags, fields Fields, t time.Time) ([]byte return nil, fmt.Errorf("+/-Inf is an unsupported value for field %s", key) } if math.IsNaN(value) { - return nil, fmt.Errorf("NAN is an unsupported value for field %s", key) + return nil, fmt.Errorf("NaN is an unsupported value for field %s", key) } case float32: // Ensure the caller validates and handles invalid field values @@ -1366,7 +1398,7 @@ func pointKey(measurement string, tags Tags, fields Fields, t time.Time) ([]byte return nil, fmt.Errorf("+/-Inf is an unsupported value for field %s", key) } if math.IsNaN(float64(value)) { - return nil, fmt.Errorf("NAN is an unsupported value for field %s", key) + return nil, fmt.Errorf("NaN is an unsupported value for field %s", key) } } if len(key) == 0 { @@ -1374,15 +1406,9 @@ func pointKey(measurement string, tags Tags, fields Fields, t time.Time) ([]byte } } - estimatedSize := len(measurement) + 10 // add additional buffer for escaping & spaces - for _, t := range tags { - estimatedSize += len(t.Key) + len(t.Value) + 2 - } - buf := make([]byte, 0, estimatedSize) - - key := AppendMakeKey(buf, []byte(measurement), tags) + key := MakeKey([]byte(measurement), tags) for field := range fields { - sz := seriesKeySizeV1(key, []byte(field)) + sz := seriesKeySize(key, []byte(field)) if sz > MaxKeyLength { return nil, fmt.Errorf("max key length exceeded: %v > %v", sz, MaxKeyLength) } @@ -1391,12 +1417,10 @@ func pointKey(measurement string, tags Tags, fields Fields, t time.Time) ([]byte return key, nil } -func seriesKeySizeV1(key, field []byte) int { - return len(key) + len("#!~#") + len(field) -} - -func seriesKeySizeV2(key, mm, field []byte) int { - return len(mm) + len(",\xFF=") + len(field) + len(",\x00=") + len(key) + len("#!~#") + len(field) +func seriesKeySize(key, field []byte) int { + // 4 is the length of the tsm1.fieldKeySeparator constant. It's inlined here to avoid a circular + // dependency. + return len(key) + 4 + len(field) } // NewPointFromBytes returns a new Point from a marshalled Point. @@ -1565,12 +1589,10 @@ func walkTags(buf []byte, fn func(key, value []byte) bool) { // walkFields walks each field key and value via fn. If fn returns false, the iteration // is stopped. The values are the raw byte slices and not the converted types. -func walkFields(buf []byte, fn func(key, value, data []byte) bool) error { +func walkFields(buf []byte, fn func(key, value []byte) bool) error { var i int var key, val []byte for len(buf) > 0 { - data := buf - i, key = scanTo(buf, 0, '=') if i > len(buf)-2 { return fmt.Errorf("invalid value: field-key=%s", key) @@ -1578,7 +1600,7 @@ func walkFields(buf []byte, fn func(key, value, data []byte) bool) error { buf = buf[i+1:] i, val = scanFieldValue(buf, 0) buf = buf[i:] - if !fn(key, val, data[:len(data)-len(buf)]) { + if !fn(key, val) { break } @@ -1630,7 +1652,7 @@ func MakeKey(name []byte, tags Tags) []byte { func AppendMakeKey(dst []byte, name []byte, tags Tags) []byte { // unescape the name and then re-escape it to avoid double escaping. // The key should always be stored in escaped form. - dst = append(dst, EscapeMeasurement(UnescapeMeasurement(name))...) + dst = append(dst, EscapeMeasurement(unescapeMeasurement(name))...) dst = tags.AppendHashKey(dst) return dst } @@ -2569,3 +2591,12 @@ func ValidTagTokens(tags Tags) bool { } return true } + +// ValidKeyTokens returns true if the measurement name and all tags are valid. +func ValidKeyTokens(name string, tags Tags) bool { + if !ValidToken([]byte(name)) { + return false + } + + return ValidTagTokens(tags) +} diff --git a/models/points_internal_test.go b/models/points_internal_test.go index efff599bb6..3a760d37b0 100644 --- a/models/points_internal_test.go +++ b/models/points_internal_test.go @@ -3,7 +3,7 @@ package models import "testing" func TestMarshalPointNoFields(t *testing.T) { - points, err := ParsePointsString("m,k=v f=0i", "foo") + points, err := ParsePointsString("m,k=v f=0i") if err != nil { t.Fatal(err) } diff --git a/models/points_parser.go b/models/points_parser.go deleted file mode 100644 index bfe1d4d004..0000000000 --- a/models/points_parser.go +++ /dev/null @@ -1,349 +0,0 @@ -package models - -import ( - "bytes" - "errors" - "fmt" - "strings" - "time" - "unsafe" -) - -// Limits errors -var ( - // ErrLimitMaxLinesExceeded is the error returned by ParsePointsWithOptions when - // the number of lines in the source buffer exceeds the specified limit. - ErrLimitMaxLinesExceeded = errors.New("points: number of lines exceeded") - - // ErrLimitMaxValuesExceeded is the error returned by ParsePointsWithOptions when - // the number of parsed values exceeds the specified limit. - ErrLimitMaxValuesExceeded = errors.New("points: number of values exceeded") - - // ErrLimitMaxBytesExceeded is the error returned by ParsePointsWithOptions when - // the number of allocated bytes to parse the source buffer exceeds the specified limit. - ErrLimitMaxBytesExceeded = errors.New("points: number of allocated bytes exceeded") - - errLimit = errors.New("points: limit exceeded") -) - -type ParserStats struct { - // BytesN reports the number of bytes allocated to parse the request. - BytesN int -} - -type ParserOption func(*pointsParser) - -// WithParserPrecision specifies the default precision for to use to truncate timestamps. -func WithParserPrecision(precision string) ParserOption { - return func(pp *pointsParser) { - pp.precision = precision - } -} - -// WithParserDefaultTime specifies the default time to assign to values when no timestamp is provided. -func WithParserDefaultTime(t time.Time) ParserOption { - return func(pp *pointsParser) { - pp.defaultTime = t - } -} - -// WithParserMaxBytes specifies the maximum number of bytes that may be allocated when processing a single request. -func WithParserMaxBytes(n int) ParserOption { - return func(pp *pointsParser) { - pp.maxBytes = n - } -} - -// WithParserMaxLines specifies the maximum number of lines that may be parsed when processing a single request. -func WithParserMaxLines(n int) ParserOption { - return func(pp *pointsParser) { - pp.maxLines = n - } -} - -// WithParserMaxValues specifies the maximum number of values that may be parsed when processing a single request. -func WithParserMaxValues(n int) ParserOption { - return func(pp *pointsParser) { - pp.maxValues = n - } -} - -// WithParserStats specifies that s will contain statistics about the parsed request. -func WithParserStats(s *ParserStats) ParserOption { - return func(pp *pointsParser) { - pp.stats = s - } -} - -type parserState int - -const ( - parserStateOK parserState = iota - parserStateBytesLimit - parserStateValueLimit -) - -type pointsParser struct { - maxLines int - maxBytes int - maxValues int - bytesN int - orgBucket []byte - defaultTime time.Time // truncated time to assign to points which have no associated timestamp. - precision string - points []Point - state parserState - stats *ParserStats -} - -func newPointsParser(orgBucket []byte, opts ...ParserOption) *pointsParser { - pp := &pointsParser{ - orgBucket: orgBucket, - defaultTime: time.Now(), - precision: "ns", - state: parserStateOK, - } - - for _, opt := range opts { - opt(pp) - } - - // truncate the time based in the specified precision - pp.defaultTime = truncateTimeWithPrecision(pp.defaultTime, pp.precision) - - return pp -} - -func (pp *pointsParser) parsePoints(buf []byte) (err error) { - lineCount := bytes.Count(buf, []byte{'\n'}) - if pp.maxLines > 0 && lineCount > pp.maxLines { - return ErrLimitMaxLinesExceeded - } - - if !pp.checkAlloc(lineCount+1, int(unsafe.Sizeof(Point(nil)))) { - return ErrLimitMaxBytesExceeded - } - - pp.points = make([]Point, 0, lineCount+1) - - var ( - pos int - block []byte - failed []string - ) - for pos < len(buf) && pp.state == parserStateOK { - pos, block = scanLine(buf, pos) - pos++ - - if len(block) == 0 { - continue - } - - // lines which start with '#' are comments - start := skipWhitespace(block, 0) - - // If line is all whitespace, just skip it - if start >= len(block) { - continue - } - - if block[start] == '#' { - continue - } - - // strip the newline if one is present - if lb := block[len(block)-1]; lb == '\n' || lb == '\r' { - block = block[:len(block)-1] - } - - err = pp.parsePointsAppend(block[start:]) - if err != nil { - if errors.Is(err, errLimit) { - break - } - - if !pp.checkAlloc(1, len(block[start:])) { - pp.state = parserStateBytesLimit - break - } - - failed = append(failed, fmt.Sprintf("unable to parse '%s': %v", string(block[start:]), err)) - } - } - - if pp.stats != nil { - pp.stats.BytesN = pp.bytesN - } - - if pp.state != parserStateOK { - switch pp.state { - case parserStateBytesLimit: - return ErrLimitMaxBytesExceeded - case parserStateValueLimit: - return ErrLimitMaxValuesExceeded - default: - panic("unreachable") - } - } - - if len(failed) > 0 { - return fmt.Errorf("%s", strings.Join(failed, "\n")) - } - - return nil -} - -func (pp *pointsParser) parsePointsAppend(buf []byte) error { - // scan the first block which is measurement[,tag1=value1,tag2=value=2...] - pos, key, err := scanKey(buf, 0) - if err != nil { - return err - } - - // measurement name is required - if len(key) == 0 { - return fmt.Errorf("missing measurement") - } - - if len(key) > MaxKeyLength { - return fmt.Errorf("max key length exceeded: %v > %v", len(key), MaxKeyLength) - } - - // Since the measurement is converted to a tag and measurements & tags have - // different escaping rules, we need to check if the measurement needs escaping. - _, i, _ := scanMeasurement(key, 0) - keyMeasurement := key[:i-1] - if bytes.IndexByte(keyMeasurement, '=') != -1 { - escapedKeyMeasurement := bytes.Replace(keyMeasurement, []byte("="), []byte(`\=`), -1) - - sz := len(escapedKeyMeasurement) + (len(key) - len(keyMeasurement)) - if !pp.checkAlloc(1, sz) { - return errLimit - } - newKey := make([]byte, sz) - copy(newKey, escapedKeyMeasurement) - copy(newKey[len(escapedKeyMeasurement):], key[len(keyMeasurement):]) - key = newKey - } - - // scan the second block is which is field1=value1[,field2=value2,...] - // at least one field is required - pos, fields, err := scanFields(buf, pos) - if err != nil { - return err - } else if len(fields) == 0 { - return fmt.Errorf("missing fields") - } - - // scan the last block which is an optional integer timestamp - pos, ts, err := scanTime(buf, pos) - if err != nil { - return err - } - - // Build point with timestamp only. - pt := point{} - - if len(ts) == 0 { - pt.time = pp.defaultTime - } else { - ts, err := parseIntBytes(ts, 10, 64) - if err != nil { - return err - } - pt.time, err = SafeCalcTime(ts, pp.precision) - if err != nil { - return err - } - - // Determine if there are illegal non-whitespace characters after the - // timestamp block. - for pos < len(buf) { - if buf[pos] != ' ' { - return ErrInvalidPoint - } - pos++ - } - } - - // Loop over fields and split points while validating field. - var walkFieldsErr error - if err := walkFields(fields, func(k, v, fieldBuf []byte) bool { - var newKey []byte - newKey, walkFieldsErr = pp.newV2Key(key, k) - if walkFieldsErr != nil { - return false - } - - walkFieldsErr = pp.append(point{time: pt.time, key: newKey, fields: fieldBuf}) - return walkFieldsErr == nil - }); err != nil { - return err - } else if walkFieldsErr != nil { - return walkFieldsErr - } - - return nil -} - -func (pp *pointsParser) append(p point) error { - if pp.maxValues > 0 && len(pp.points) > pp.maxValues { - pp.state = parserStateValueLimit - return errLimit - } - if !pp.checkAlloc(1, int(unsafe.Sizeof(p))) { - return errLimit - } - pp.points = append(pp.points, &p) - return nil -} - -func (pp *pointsParser) checkAlloc(n, size int) bool { - newBytes := pp.bytesN + (n * size) - if pp.maxBytes > 0 && newBytes > pp.maxBytes { - pp.state = parserStateBytesLimit - return false - } - pp.bytesN = newBytes - return true -} - -// newV2Key returns a new key by converting the old measurement & field into keys. -func (pp *pointsParser) newV2Key(oldKey, field []byte) ([]byte, error) { - mm := pp.orgBucket - if sz := seriesKeySizeV2(oldKey, mm, field); sz > MaxKeyLength { - return nil, fmt.Errorf("max key length exceeded: %v > %v", sz, MaxKeyLength) - } - - sz := len(mm) + 1 + len(MeasurementTagKey) + 1 + len(oldKey) + 1 + len(FieldKeyTagKey) + 1 + len(field) - if !pp.checkAlloc(1, sz) { - return nil, errLimit - } - newKey := make([]byte, sz) - buf := newKey - - copy(buf, mm) - buf = buf[len(mm):] - - buf[0], buf[1], buf[2], buf = ',', MeasurementTagKeyBytes[0], '=', buf[3:] - copy(buf, oldKey) - buf = buf[len(oldKey):] - - buf[0], buf[1], buf[2], buf = ',', FieldKeyTagKeyBytes[0], '=', buf[3:] - copy(buf, field) - - return newKey, nil -} - -func truncateTimeWithPrecision(t time.Time, precision string) time.Time { - switch precision { - case "us": - return t.Truncate(time.Microsecond) - case "ms": - return t.Truncate(time.Millisecond) - case "s": - return t.Truncate(time.Second) - default: - return t - } -} diff --git a/models/points_test.go b/models/points_test.go index 263bd1c50f..545679d4da 100644 --- a/models/points_test.go +++ b/models/points_test.go @@ -2,25 +2,18 @@ package models_test import ( "bytes" - "encoding/binary" "errors" "fmt" "io" - "io/ioutil" "math" "math/rand" - "path/filepath" "reflect" "strconv" "strings" "testing" "time" - "github.com/google/go-cmp/cmp" - "github.com/influxdata/influxdb/v2" "github.com/influxdata/influxdb/v2/models" - "github.com/influxdata/influxdb/v2/v1/tsdb" - "github.com/stretchr/testify/assert" ) var ( @@ -39,16 +32,6 @@ var ( sink interface{} ) -type ID uint64 - -// EncodeName converts org/bucket pairs to the tsdb internal serialization -func EncodeName(org, bucket ID) [16]byte { - var nameBytes [16]byte - binary.BigEndian.PutUint64(nameBytes[0:8], uint64(org)) - binary.BigEndian.PutUint64(nameBytes[8:16], uint64(bucket)) - return nameBytes -} - func TestMarshal(t *testing.T) { got := tags.HashKey() if exp := ",apple=orange,foo=bar,host=serverA,region=uswest"; string(got) != exp { @@ -100,7 +83,7 @@ func TestMarshalFields(t *testing.T) { } func TestTags_HashKey(t *testing.T) { - tags := models.NewTags(map[string]string{"A FOO": "bar", "APPLE": "orange", "host": "serverA", "region": "uswest"}) + tags = models.NewTags(map[string]string{"A FOO": "bar", "APPLE": "orange", "host": "serverA", "region": "uswest"}) got := tags.HashKey() if exp := ",A\\ FOO=bar,APPLE=orange,host=serverA,region=uswest"; string(got) != exp { t.Log("got: ", string(got)) @@ -121,15 +104,15 @@ func TestPoint_Tags(t *testing.T) { Tags models.Tags Err error }{ - {`cpu value=1`, models.NewTags(map[string]string{models.MeasurementTagKey: "cpu", models.FieldKeyTagKey: "value"}), nil}, - {"cpu,tag0=v0 value=1", models.NewTags(map[string]string{models.MeasurementTagKey: "cpu", models.FieldKeyTagKey: "value", "tag0": "v0"}), nil}, - {"cpu,tag0=v0,tag1=v0 value=1", models.NewTags(map[string]string{models.MeasurementTagKey: "cpu", models.FieldKeyTagKey: "value", "tag0": "v0", "tag1": "v0"}), nil}, - {`cpu,tag0=v\ 0 value=1`, models.NewTags(map[string]string{models.MeasurementTagKey: "cpu", models.FieldKeyTagKey: "value", "tag0": "v 0"}), nil}, - {`cpu,tag0=v\ 0\ 1,tag1=v2 value=1`, models.NewTags(map[string]string{models.MeasurementTagKey: "cpu", models.FieldKeyTagKey: "value", "tag0": "v 0 1", "tag1": "v2"}), nil}, - {`cpu,tag0=\, value=1`, models.NewTags(map[string]string{models.MeasurementTagKey: "cpu", models.FieldKeyTagKey: "value", "tag0": ","}), nil}, - {`cpu,ta\ g0=\, value=1`, models.NewTags(map[string]string{models.MeasurementTagKey: "cpu", models.FieldKeyTagKey: "value", "ta g0": ","}), nil}, - {`cpu,tag0=\,1 value=1`, models.NewTags(map[string]string{models.MeasurementTagKey: "cpu", models.FieldKeyTagKey: "value", "tag0": ",1"}), nil}, - {`cpu,tag0=1\"\",t=k value=1`, models.NewTags(map[string]string{models.MeasurementTagKey: "cpu", models.FieldKeyTagKey: "value", "tag0": `1\"\"`, "t": "k"}), nil}, + {`cpu value=1`, models.Tags{}, nil}, + {"cpu,tag0=v0 value=1", models.NewTags(map[string]string{"tag0": "v0"}), nil}, + {"cpu,tag0=v0,tag1=v0 value=1", models.NewTags(map[string]string{"tag0": "v0", "tag1": "v0"}), nil}, + {`cpu,tag0=v\ 0 value=1`, models.NewTags(map[string]string{"tag0": "v 0"}), nil}, + {`cpu,tag0=v\ 0\ 1,tag1=v2 value=1`, models.NewTags(map[string]string{"tag0": "v 0 1", "tag1": "v2"}), nil}, + {`cpu,tag0=\, value=1`, models.NewTags(map[string]string{"tag0": ","}), nil}, + {`cpu,ta\ g0=\, value=1`, models.NewTags(map[string]string{"ta g0": ","}), nil}, + {`cpu,tag0=\,1 value=1`, models.NewTags(map[string]string{"tag0": ",1"}), nil}, + {`cpu,tag0=1\"\",t=k value=1`, models.NewTags(map[string]string{"tag0": `1\"\"`, "t": "k"}), nil}, {"cpu,_measurement=v0,tag0=v0 value=1", nil, errors.New(`unable to parse 'cpu,_measurement=v0,tag0=v0 value=1': cannot use reserved tag key "_measurement"`)}, // the following are all unsorted tag keys to ensure this works for both cases {"cpu,tag0=v0,_measurement=v0 value=1", nil, errors.New(`unable to parse 'cpu,tag0=v0,_measurement=v0 value=1': cannot use reserved tag key "_measurement"`)}, @@ -139,7 +122,7 @@ func TestPoint_Tags(t *testing.T) { for _, example := range examples { t.Run(example.Point, func(t *testing.T) { - pts, err := models.ParsePointsString(example.Point, "mm") + pts, err := models.ParsePointsString(example.Point) if err != nil { if !reflect.DeepEqual(example.Err, err) { t.Fatalf("expected %#v, found %#v", example.Err, err) @@ -163,7 +146,7 @@ func TestPoint_Tags(t *testing.T) { } func TestPoint_StringSize(t *testing.T) { - testPointCube(t, func(p models.Point) { + testPoint_cube(t, func(p models.Point) { l := p.StringSize() s := p.String() @@ -171,10 +154,11 @@ func TestPoint_StringSize(t *testing.T) { t.Errorf("Incorrect length for %q. got %v, exp %v", s, l, len(s)) } }) + } func TestPoint_AppendString(t *testing.T) { - testPointCube(t, func(p models.Point) { + testPoint_cube(t, func(p models.Point) { got := p.AppendString(nil) exp := []byte(p.String()) @@ -184,7 +168,7 @@ func TestPoint_AppendString(t *testing.T) { }) } -func testPointCube(t *testing.T, f func(p models.Point)) { +func testPoint_cube(t *testing.T, f func(p models.Point)) { // heard of a table-driven test? let's make a cube-driven test... tagList := []models.Tags{nil, {models.NewTag([]byte("foo"), []byte("bar"))}, tags} fieldList := []models.Fields{{"a": 42.0}, {"a": 42, "b": "things"}, fields} @@ -247,7 +231,7 @@ func BenchmarkNewPoint(b *testing.B) { } func BenchmarkNewPointFromBinary(b *testing.B) { - pts, err := models.ParsePointsString("cpu value1=1.0,value2=1.0,value3=3.0,value4=4,value5=\"five\" 1000000000", "") + pts, err := models.ParsePointsString("cpu value1=1.0,value2=1.0,value3=3.0,value4=4,value5=\"five\" 1000000000") if err != nil { b.Fatalf("unexpected error ParsePointsString: %v", err) } @@ -273,7 +257,7 @@ func BenchmarkParsePointNoTags5000(b *testing.B) { lines := strings.Join(batch[:], "\n") b.ResetTimer() for i := 0; i < b.N; i++ { - models.ParsePoints([]byte(lines), []byte("mm")) + models.ParsePoints([]byte(lines)) b.SetBytes(int64(len(lines))) } } @@ -281,7 +265,7 @@ func BenchmarkParsePointNoTags5000(b *testing.B) { func BenchmarkParsePointNoTags(b *testing.B) { line := `cpu value=1i 1000000000` for i := 0; i < b.N; i++ { - models.ParsePoints([]byte(line), []byte("mm")) + models.ParsePoints([]byte(line)) b.SetBytes(int64(len(line))) } } @@ -290,7 +274,7 @@ func BenchmarkParsePointWithPrecisionN(b *testing.B) { line := `cpu value=1i 1000000000` defaultTime := time.Now().UTC() for i := 0; i < b.N; i++ { - models.ParsePointsWithPrecision([]byte(line), []byte("mm"), defaultTime, "ns") + models.ParsePointsWithPrecision([]byte(line), defaultTime, "n") b.SetBytes(int64(len(line))) } } @@ -299,7 +283,7 @@ func BenchmarkParsePointWithPrecisionU(b *testing.B) { line := `cpu value=1i 1000000000` defaultTime := time.Now().UTC() for i := 0; i < b.N; i++ { - models.ParsePointsWithPrecision([]byte(line), []byte("mm"), defaultTime, "us") + models.ParsePointsWithPrecision([]byte(line), defaultTime, "u") b.SetBytes(int64(len(line))) } } @@ -307,7 +291,7 @@ func BenchmarkParsePointWithPrecisionU(b *testing.B) { func BenchmarkParsePointsTagsSorted2(b *testing.B) { line := `cpu,host=serverA,region=us-west value=1i 1000000000` for i := 0; i < b.N; i++ { - models.ParsePoints([]byte(line), []byte("mm")) + models.ParsePoints([]byte(line)) b.SetBytes(int64(len(line))) } } @@ -315,16 +299,15 @@ func BenchmarkParsePointsTagsSorted2(b *testing.B) { func BenchmarkParsePointsTagsSorted5(b *testing.B) { line := `cpu,env=prod,host=serverA,region=us-west,target=servers,zone=1c value=1i 1000000000` for i := 0; i < b.N; i++ { - models.ParsePoints([]byte(line), []byte("mm")) + models.ParsePoints([]byte(line)) b.SetBytes(int64(len(line))) } } func BenchmarkParsePointsTagsSorted10(b *testing.B) { - b.ReportAllocs() line := `cpu,env=prod,host=serverA,region=us-west,tag1=value1,tag2=value2,tag3=value3,tag4=value4,tag5=value5,target=servers,zone=1c value=1i 1000000000` for i := 0; i < b.N; i++ { - models.ParsePoints([]byte(line), []byte("mm")) + models.ParsePoints([]byte(line)) b.SetBytes(int64(len(line))) } } @@ -332,7 +315,7 @@ func BenchmarkParsePointsTagsSorted10(b *testing.B) { func BenchmarkParsePointsTagsUnSorted2(b *testing.B) { line := `cpu,region=us-west,host=serverA value=1i 1000000000` for i := 0; i < b.N; i++ { - pt, _ := models.ParsePoints([]byte(line), []byte("mm")) + pt, _ := models.ParsePoints([]byte(line)) b.SetBytes(int64(len(line))) pt[0].Key() } @@ -341,7 +324,7 @@ func BenchmarkParsePointsTagsUnSorted2(b *testing.B) { func BenchmarkParsePointsTagsUnSorted5(b *testing.B) { line := `cpu,region=us-west,host=serverA,env=prod,target=servers,zone=1c value=1i 1000000000` for i := 0; i < b.N; i++ { - pt, _ := models.ParsePoints([]byte(line), []byte("mm")) + pt, _ := models.ParsePoints([]byte(line)) b.SetBytes(int64(len(line))) pt[0].Key() } @@ -350,7 +333,7 @@ func BenchmarkParsePointsTagsUnSorted5(b *testing.B) { func BenchmarkParsePointsTagsUnSorted10(b *testing.B) { line := `cpu,region=us-west,host=serverA,env=prod,target=servers,zone=1c,tag1=value1,tag2=value2,tag3=value3,tag4=value4,tag5=value5 value=1i 1000000000` for i := 0; i < b.N; i++ { - pt, _ := models.ParsePoints([]byte(line), []byte("mm")) + pt, _ := models.ParsePoints([]byte(line)) b.SetBytes(int64(len(line))) pt[0].Key() } @@ -363,32 +346,6 @@ func BenchmarkParseKey(b *testing.B) { } } -var ( - dummyName []byte -) - -func BenchmarkParseMeasurement(b *testing.B) { - benchmarks := []struct { - input string - }{ - {input: "m,\x00=value"}, - {input: "m\\ q,\x00=value"}, - {input: "m,\x00=v\\ alue"}, - {input: "m,\x00=value,tag0=val0"}, - {input: "m,\x00=v\\ alue,tag0=val0"}, - } - - for _, bm := range benchmarks { - b.Run(bm.input, func(b *testing.B) { - var name []byte - for i := 0; i < b.N; i++ { - name, _ = models.ParseMeasurement([]byte(bm.input)) - } - dummyName = name - }) - } -} - // TestPoint wraps a models.Point but also makes available the raw // arguments to the Point. // @@ -415,60 +372,60 @@ func NewTestPoint(name string, tags models.Tags, fields models.Fields, time time } } -func testParsePoints(t *testing.T, line string, mm string, points ...TestPoint) { - t.Helper() - - pts, err := models.ParsePointsWithPrecision([]byte(line), []byte(mm), time.Unix(0, 0), "ns") +func test(t *testing.T, line string, point TestPoint) { + pts, err := models.ParsePointsWithPrecision([]byte(line), time.Unix(0, 0), "n") if err != nil { t.Fatalf(`ParsePoints("%s") mismatch. got %v, exp nil`, line, err) } - if exp := len(points); len(pts) != exp { + if exp := 1; len(pts) != exp { t.Fatalf(`ParsePoints("%s") len mismatch. got %d, exp %d`, line, len(pts), exp) } - for i, point := range points { - if exp := point.Key(); !bytes.Equal(pts[i].Key(), exp) { - t.Errorf("%d. ParsePoints(\"%s\") key mismatch.\ngot %v\nexp %v", i, line, string(pts[i].Key()), string(exp)) - } + if exp := point.Key(); !bytes.Equal(pts[0].Key(), exp) { + t.Errorf("ParsePoints(\"%s\") key mismatch.\ngot %v\nexp %v", line, string(pts[0].Key()), string(exp)) + } - if exp := len(point.Tags()); len(pts[i].Tags()) != exp { - t.Errorf(`%d. ParsePoints("%s") tags mismatch. got %v, exp %v`, i, line, pts[i].Tags(), exp) - } + if exp := len(point.Tags()); len(pts[0].Tags()) != exp { + t.Errorf(`ParsePoints("%s") tags mismatch. got %v, exp %v`, line, pts[0].Tags(), exp) + } - for _, tag := range pts[i].Tags() { - if !bytes.Equal(tag.Value, point.RawTags.Get(tag.Key)) { - t.Errorf(`%d. ParsePoints("%s") tags mismatch. got %s, exp %s`, i, line, tag.Value, point.RawTags.Get(tag.Key)) + for _, tag := range pts[0].Tags() { + if !bytes.Equal(tag.Value, point.RawTags.Get(tag.Key)) { + t.Errorf(`ParsePoints("%s") tags mismatch. got %s, exp %s`, line, tag.Value, point.RawTags.Get(tag.Key)) + } + } + + for name, value := range point.RawFields { + fields, err := pts[0].Fields() + if err != nil { + t.Fatal(err) + } + val := fields[name] + expfval, ok := val.(float64) + + if ok && math.IsNaN(expfval) { + gotfval, ok := value.(float64) + if ok && !math.IsNaN(gotfval) { + t.Errorf(`ParsePoints("%s") field '%s' mismatch. exp NaN`, line, name) } } - - for name, value := range point.RawFields { - fields, err := pts[i].Fields() - if err != nil { - t.Fatal(err) - } - val := fields[name] - expfval, ok := val.(float64) - - if ok && math.IsNaN(expfval) { - gotfval, ok := value.(float64) - if ok && !math.IsNaN(gotfval) { - t.Errorf(`%d. ParsePoints("%s") field '%s' mismatch. exp NaN`, i, line, name) - } - } - if !reflect.DeepEqual(val, value) { - t.Errorf(`%d. ParsePoints("%s") field '%s' mismatch. got %[3]v (%[3]T), exp %[4]v (%[4]T)`, i, line, name, val, value) - } + if !reflect.DeepEqual(val, value) { + t.Errorf(`ParsePoints("%s") field '%s' mismatch. got %[3]v (%[3]T), exp %[4]v (%[4]T)`, line, name, val, value) } + } - if !pts[i].Time().Equal(point.Time()) { - t.Errorf(`%d. ParsePoints("%s") time mismatch. got %v, exp %v`, i, line, pts[i].Time(), point.Time()) - } + if !pts[0].Time().Equal(point.Time()) { + t.Errorf(`ParsePoints("%s") time mismatch. got %v, exp %v`, line, pts[0].Time(), point.Time()) + } + + if !strings.HasPrefix(pts[0].String(), line) { + t.Errorf("ParsePoints string mismatch.\ngot: %v\nexp: %v", pts[0].String(), line) } } func TestParsePointNoValue(t *testing.T) { - pts, err := models.ParsePointsString("", "mm") + pts, err := models.ParsePointsString("") if err != nil { t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, "", err) } @@ -479,7 +436,7 @@ func TestParsePointNoValue(t *testing.T) { } func TestParsePointWhitespaceValue(t *testing.T) { - pts, err := models.ParsePointsString(" ", "mm") + pts, err := models.ParsePointsString(" ") if err != nil { t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, "", err) } @@ -499,7 +456,7 @@ func TestParsePointNoFields(t *testing.T) { } for i, example := range examples { - _, err := models.ParsePointsString(example, "mm") + _, err := models.ParsePointsString(example) if err == nil { t.Errorf(`[Example %d] ParsePoints("%s") mismatch. got nil, exp error`, i, example) } else if !strings.HasSuffix(err.Error(), expectedSuffix) { @@ -509,7 +466,7 @@ func TestParsePointNoFields(t *testing.T) { } func TestParsePointNoTimestamp(t *testing.T) { - testParsePoints(t, "cpu value=1", "mm", NewTestPoint("mm", models.NewTags(map[string]string{models.FieldKeyTagKey: "value", models.MeasurementTagKey: "cpu"}), models.Fields{"value": 1.0}, time.Unix(0, 0))) + test(t, "cpu value=1", NewTestPoint("cpu", nil, models.Fields{"value": 1.0}, time.Unix(0, 0))) } func TestParsePointMissingQuote(t *testing.T) { @@ -520,7 +477,7 @@ func TestParsePointMissingQuote(t *testing.T) { } for i, example := range examples { - _, err := models.ParsePointsString(example, "mm") + _, err := models.ParsePointsString(example) if err == nil { t.Errorf(`[Example %d] ParsePoints("%s") mismatch. got nil, exp error`, i, example) } else if !strings.HasSuffix(err.Error(), expectedSuffix) { @@ -542,7 +499,7 @@ func TestParsePointMissingTagKey(t *testing.T) { } for i, example := range examples { - _, err := models.ParsePointsString(example, "mm") + _, err := models.ParsePointsString(example) if err == nil { t.Errorf(`[Example %d] ParsePoints("%s") mismatch. got nil, exp error`, i, example) } else if !strings.HasSuffix(err.Error(), expectedSuffix) { @@ -550,7 +507,7 @@ func TestParsePointMissingTagKey(t *testing.T) { } } - _, err := models.ParsePointsString(`cpu,host=serverA,\ =us-east value=1i`, "mm") + _, err := models.ParsePointsString(`cpu,host=serverA,\ =us-east value=1i`) if err != nil { t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,\ =us-east value=1i`, err) } @@ -569,7 +526,7 @@ func TestParsePointMissingTagValue(t *testing.T) { } for i, example := range examples { - _, err := models.ParsePointsString(example, "mm") + _, err := models.ParsePointsString(example) if err == nil { t.Errorf(`[Example %d] ParsePoints("%s") mismatch. got nil, exp error`, i, example) } else if !strings.HasSuffix(err.Error(), expectedSuffix) { @@ -586,7 +543,7 @@ func TestParsePointInvalidTagFormat(t *testing.T) { } for i, example := range examples { - _, err := models.ParsePointsString(example, "mm") + _, err := models.ParsePointsString(example) if err == nil { t.Errorf(`[Example %d] ParsePoints("%s") mismatch. got nil, exp error`, i, example) } else if !strings.HasSuffix(err.Error(), expectedSuffix) { @@ -596,53 +553,53 @@ func TestParsePointInvalidTagFormat(t *testing.T) { } func TestParsePointMissingFieldName(t *testing.T) { - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west =`, "mm") + _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west =`) if err == nil { t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west =`) } - _, err = models.ParsePointsString(`cpu,host=serverA,region=us-west =123i`, "mm") + _, err = models.ParsePointsString(`cpu,host=serverA,region=us-west =123i`) if err == nil { t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west =123i`) } - _, err = models.ParsePointsString(`cpu,host=serverA,region=us-west a\ =123i`, "mm") + _, err = models.ParsePointsString(`cpu,host=serverA,region=us-west a\ =123i`) if err != nil { t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west a\ =123i`) } - _, err = models.ParsePointsString(`cpu,host=serverA,region=us-west value=123i,=456i`, "mm") + _, err = models.ParsePointsString(`cpu,host=serverA,region=us-west value=123i,=456i`) if err == nil { t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west value=123i,=456i`) } } func TestParsePointMissingFieldValue(t *testing.T) { - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=`, "mm") + _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=`) if err == nil { t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west value=`) } - _, err = models.ParsePointsString(`cpu,host=serverA,region=us-west value= 1000000000i`, "mm") + _, err = models.ParsePointsString(`cpu,host=serverA,region=us-west value= 1000000000i`) if err == nil { t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west value= 1000000000i`) } - _, err = models.ParsePointsString(`cpu,host=serverA,region=us-west value=,value2=1i`, "mm") + _, err = models.ParsePointsString(`cpu,host=serverA,region=us-west value=,value2=1i`) if err == nil { t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west value=,value2=1i`) } - _, err = models.ParsePointsString(`cpu,host=server01,region=us-west 1434055562000000000i`, "mm") + _, err = models.ParsePointsString(`cpu,host=server01,region=us-west 1434055562000000000i`) if err == nil { t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=server01,region=us-west 1434055562000000000i`) } - _, err = models.ParsePointsString(`cpu,host=server01,region=us-west value=1i,b`, "mm") + _, err = models.ParsePointsString(`cpu,host=server01,region=us-west value=1i,b`) if err == nil { t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=server01,region=us-west value=1i,b`) } - _, err = models.ParsePointsString(`m f="blah"=123,r 1531703600000000000`, "mm") + _, err = models.ParsePointsString(`m f="blah"=123,r 1531703600000000000`) if err == nil { t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `m f="blah"=123,r 1531703600000000000`) } @@ -662,7 +619,7 @@ func TestParsePointBadNumber(t *testing.T) { "cpu v= ", "cpu v=-123u", } { - _, err := models.ParsePointsString(tt, "mm") + _, err := models.ParsePointsString(tt) if err == nil { t.Errorf("Point %q should be invalid", tt) } @@ -671,14 +628,14 @@ func TestParsePointBadNumber(t *testing.T) { func TestParsePointMaxInt64(t *testing.T) { // out of range - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=9223372036854775808i`, "mm") + _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=9223372036854775808i`) exp := `unable to parse 'cpu,host=serverA,region=us-west value=9223372036854775808i': unable to parse integer 9223372036854775808: strconv.ParseInt: parsing "9223372036854775808": value out of range` if err == nil || (err != nil && err.Error() != exp) { t.Fatalf("Error mismatch:\nexp: %s\ngot: %v", exp, err) } // max int - p, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=9223372036854775807i`, "mm") + p, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=9223372036854775807i`) if err != nil { t.Fatalf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=9223372036854775807i`, err) } @@ -691,7 +648,7 @@ func TestParsePointMaxInt64(t *testing.T) { } // leading zeros - _, err = models.ParsePointsString(`cpu,host=serverA,region=us-west value=0009223372036854775807i`, "mm") + _, err = models.ParsePointsString(`cpu,host=serverA,region=us-west value=0009223372036854775807i`) if err != nil { t.Fatalf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=0009223372036854775807i`, err) } @@ -699,13 +656,13 @@ func TestParsePointMaxInt64(t *testing.T) { func TestParsePointMinInt64(t *testing.T) { // out of range - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=-9223372036854775809i`, "mm") + _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=-9223372036854775809i`) if err == nil { t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west value=-9223372036854775809i`) } // min int - p, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=-9223372036854775808i`, "mm") + p, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=-9223372036854775808i`) if err != nil { t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=-9223372036854775808i`, err) } @@ -718,7 +675,7 @@ func TestParsePointMinInt64(t *testing.T) { } // leading zeros - _, err = models.ParsePointsString(`cpu,host=serverA,region=us-west value=-0009223372036854775808i`, "mm") + _, err = models.ParsePointsString(`cpu,host=serverA,region=us-west value=-0009223372036854775808i`) if err != nil { t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=-0009223372036854775808i`, err) } @@ -726,13 +683,13 @@ func TestParsePointMinInt64(t *testing.T) { func TestParsePointMaxFloat64(t *testing.T) { // out of range - _, err := models.ParsePointsString(fmt.Sprintf(`cpu,host=serverA,region=us-west value=%s`, "1"+string(maxFloat64)), "mm") + _, err := models.ParsePointsString(fmt.Sprintf(`cpu,host=serverA,region=us-west value=%s`, "1"+string(maxFloat64))) if err == nil { t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west value=...`) } // max float - p, err := models.ParsePointsString(fmt.Sprintf(`cpu,host=serverA,region=us-west value=%s`, string(maxFloat64)), "mm") + p, err := models.ParsePointsString(fmt.Sprintf(`cpu,host=serverA,region=us-west value=%s`, string(maxFloat64))) if err != nil { t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=9223372036854775807`, err) } @@ -745,7 +702,7 @@ func TestParsePointMaxFloat64(t *testing.T) { } // leading zeros - _, err = models.ParsePointsString(fmt.Sprintf(`cpu,host=serverA,region=us-west value=%s`, "0000"+string(maxFloat64)), "mm") + _, err = models.ParsePointsString(fmt.Sprintf(`cpu,host=serverA,region=us-west value=%s`, "0000"+string(maxFloat64))) if err != nil { t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=0009223372036854775807`, err) } @@ -753,13 +710,13 @@ func TestParsePointMaxFloat64(t *testing.T) { func TestParsePointMinFloat64(t *testing.T) { // out of range - _, err := models.ParsePointsString(fmt.Sprintf(`cpu,host=serverA,region=us-west value=%s`, "-1"+string(minFloat64)[1:]), "mm") + _, err := models.ParsePointsString(fmt.Sprintf(`cpu,host=serverA,region=us-west value=%s`, "-1"+string(minFloat64)[1:])) if err == nil { t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west value=...`) } // min float - p, err := models.ParsePointsString(fmt.Sprintf(`cpu,host=serverA,region=us-west value=%s`, string(minFloat64)), "mm") + p, err := models.ParsePointsString(fmt.Sprintf(`cpu,host=serverA,region=us-west value=%s`, string(minFloat64))) if err != nil { t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=...`, err) } @@ -772,7 +729,7 @@ func TestParsePointMinFloat64(t *testing.T) { } // leading zeros - _, err = models.ParsePointsString(fmt.Sprintf(`cpu,host=serverA,region=us-west value=%s`, "-0000000"+string(minFloat64)[1:]), "mm") + _, err = models.ParsePointsString(fmt.Sprintf(`cpu,host=serverA,region=us-west value=%s`, "-0000000"+string(minFloat64)[1:])) if err != nil { t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=...`, err) } @@ -780,14 +737,14 @@ func TestParsePointMinFloat64(t *testing.T) { func TestParsePointMaxUint64(t *testing.T) { // out of range - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=18446744073709551616u`, "mm") + _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=18446744073709551616u`) exp := `unable to parse 'cpu,host=serverA,region=us-west value=18446744073709551616u': unable to parse unsigned 18446744073709551616: strconv.ParseUint: parsing "18446744073709551616": value out of range` if err == nil || (err != nil && err.Error() != exp) { t.Fatalf("Error mismatch:\nexp: %s\ngot: %v", exp, err) } // max int - p, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=18446744073709551615u`, "mm") + p, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=18446744073709551615u`) if err != nil { t.Fatalf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=18446744073709551615u`, err) } @@ -800,7 +757,7 @@ func TestParsePointMaxUint64(t *testing.T) { } // leading zeros - _, err = models.ParsePointsString(`cpu,host=serverA,region=us-west value=00018446744073709551615u`, "mm") + _, err = models.ParsePointsString(`cpu,host=serverA,region=us-west value=00018446744073709551615u`) if err != nil { t.Fatalf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=00018446744073709551615u`, err) } @@ -808,13 +765,13 @@ func TestParsePointMaxUint64(t *testing.T) { func TestParsePointMinUint64(t *testing.T) { // out of range - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=--1u`, "mm") + _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=--1u`) if err == nil { t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west value=-1u`) } // min int - p, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=0u`, "mm") + p, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=0u`) if err != nil { t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=0u`, err) } @@ -827,92 +784,75 @@ func TestParsePointMinUint64(t *testing.T) { } // leading zeros - _, err = models.ParsePointsString(`cpu,host=serverA,region=us-west value=0000u`, "mm") + _, err = models.ParsePointsString(`cpu,host=serverA,region=us-west value=0000u`) if err != nil { t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=0000u`, err) } } func TestParsePointNumberNonNumeric(t *testing.T) { - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=.1a`, "mm") + _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=.1a`) if err == nil { t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west value=.1a`) } } func TestParsePointNegativeWrongPlace(t *testing.T) { - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=0.-1`, "mm") + _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=0.-1`) if err == nil { t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west value=0.-1`) } } func TestParsePointOnlyNegativeSign(t *testing.T) { - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=-`, "mm") + _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=-`) if err == nil { t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west value=-`) } } func TestParsePointFloatMultipleDecimals(t *testing.T) { - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=1.1.1`, "mm") + _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=1.1.1`) if err == nil { t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west value=1.1.1`) } } -func TestParseWithLineBreaks(t *testing.T) { - ss := []string{ - "cpu,host=serverA,region=us-west value=1i\ncpu,host=serverA,region=us-west value=2i", - "cpu,host=serverA,region=us-west value=1i\n\ncpu,host=serverA,region=us-west value=2i", - "cpu,host=serverA,region=us-west value=1i\r\ncpu,host=serverA,region=us-west value=2i", - } - for _, s := range ss { - pp, err := models.ParsePointsString(s, "mm") - if err != nil { - t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, s, err) - } - if l := len(pp); l != 2 { - t.Errorf(`ParsePoints("%s") mismatch. got %v, exp 2`, s, l) - } - } -} - func TestParsePointInteger(t *testing.T) { - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=1i`, "mm") + _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=1i`) if err != nil { t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=1i`, err) } } func TestParsePointNegativeInteger(t *testing.T) { - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=-1i`, "mm") + _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=-1i`) if err != nil { t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=-1i`, err) } } func TestParsePointNegativeFloat(t *testing.T) { - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=-1.0`, "mm") + _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=-1.0`) if err != nil { t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=-1.0`, err) } } func TestParsePointFloatNoLeadingDigit(t *testing.T) { - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=.1`, "mm") + _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=.1`) if err != nil { t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=-1.0`, err) } } func TestParsePointFloatScientific(t *testing.T) { - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=1.0e4`, "mm") + _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=1.0e4`) if err != nil { t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=1.0e4`, err) } - pts, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=1e4`, "mm") + pts, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=1e4`) if err != nil { t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=1.0e4`, err) } @@ -927,12 +867,12 @@ func TestParsePointFloatScientific(t *testing.T) { } func TestParsePointFloatScientificUpper(t *testing.T) { - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=1.0E4`, "mm") + _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=1.0E4`) if err != nil { t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=1.0E4`, err) } - pts, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=1E4`, "mm") + pts, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=1E4`) if err != nil { t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=1.0E4`, err) } @@ -947,33 +887,33 @@ func TestParsePointFloatScientificUpper(t *testing.T) { } func TestParsePointFloatScientificDecimal(t *testing.T) { - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=1.0e-4`, "mm") + _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=1.0e-4`) if err != nil { t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=1.0e-4`, err) } } func TestParsePointFloatNegativeScientific(t *testing.T) { - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=-1.0e-4`, "mm") + _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=-1.0e-4`) if err != nil { t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=-1.0e-4`, err) } } func TestParsePointBooleanInvalid(t *testing.T) { - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=a`, "mm") + _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=a`) if err == nil { t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west value=a`) } } func TestParsePointScientificIntInvalid(t *testing.T) { - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=9ie10`, "mm") + _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=9ie10`) if err == nil { t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west value=9ie10`) } - _, err = models.ParsePointsString(`cpu,host=serverA,region=us-west value=9e10i`, "mm") + _, err = models.ParsePointsString(`cpu,host=serverA,region=us-west value=9e10i`) if err == nil { t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west value=9e10i`) } @@ -991,9 +931,9 @@ func TestParsePointWhitespace(t *testing.T) { `, } - expPoint := NewTestPoint("mm", models.NewTags(map[string]string{models.FieldKeyTagKey: "value", models.MeasurementTagKey: "cpu"}), models.Fields{"value": 1.0}, time.Unix(0, 1257894000000000000)) + expPoint := NewTestPoint("cpu", models.Tags{}, models.Fields{"value": 1.0}, time.Unix(0, 1257894000000000000)) for i, example := range examples { - pts, err := models.ParsePoints([]byte(example), []byte("mm")) + pts, err := models.ParsePoints([]byte(example)) if err != nil { t.Fatalf(`[Example %d] ParsePoints("%s") error. got %v, exp nil`, i, example, err) } @@ -1030,23 +970,21 @@ func TestParsePointWhitespace(t *testing.T) { func TestParsePointUnescape(t *testing.T) { // commas in measurement name - testParsePoints(t, `foo\,bar value=1i`, "mm", + test(t, `foo\,bar value=1i`, NewTestPoint( - "mm", - models.NewTags(map[string]string{models.FieldKeyTagKey: "value", models.MeasurementTagKey: "foo,bar"}), // comma in the name + "foo,bar", // comma in the name + models.NewTags(map[string]string{}), models.Fields{ "value": int64(1), }, time.Unix(0, 0))) // comma in measurement name with tags - testParsePoints(t, `cpu\,main,regions=east value=1.0`, "mm", + test(t, `cpu\,main,regions=east value=1.0`, NewTestPoint( - "mm", + "cpu,main", // comma in the name models.NewTags(map[string]string{ - models.FieldKeyTagKey: "value", - models.MeasurementTagKey: "cpu,main", // comma in the name - "regions": "east", + "regions": "east", }), models.Fields{ "value": 1.0, @@ -1054,13 +992,11 @@ func TestParsePointUnescape(t *testing.T) { time.Unix(0, 0))) // spaces in measurement name - testParsePoints(t, `cpu\ load,region=east value=1.0`, "mm", + test(t, `cpu\ load,region=east value=1.0`, NewTestPoint( - "mm", + "cpu load", // space in the name models.NewTags(map[string]string{ - models.FieldKeyTagKey: "value", - models.MeasurementTagKey: "cpu load", // space in the name - "region": "east", + "region": "east", }), models.Fields{ "value": 1.0, @@ -1068,13 +1004,11 @@ func TestParsePointUnescape(t *testing.T) { time.Unix(0, 0))) // equals in measurement name - testParsePoints(t, `cpu\=load,region=east value=1.0`, "mm", + test(t, `cpu\=load,region=east value=1.0`, NewTestPoint( - "mm", + `cpu\=load`, // backslash is literal models.NewTags(map[string]string{ - models.FieldKeyTagKey: "value", - models.MeasurementTagKey: `cpu\=load`, // backslash is literal - "region": "east", + "region": "east", }), models.Fields{ "value": 1.0, @@ -1082,13 +1016,11 @@ func TestParsePointUnescape(t *testing.T) { time.Unix(0, 0))) // equals in measurement name - testParsePoints(t, `cpu=load,region=east value=1.0`, "mm", + test(t, `cpu=load,region=east value=1.0`, NewTestPoint( - "mm", + `cpu=load`, // literal equals is fine in measurement name models.NewTags(map[string]string{ - models.FieldKeyTagKey: "value", - models.MeasurementTagKey: `cpu=load`, // literal equals is fine in measurement name - "region": "east", + "region": "east", }), models.Fields{ "value": 1.0, @@ -1096,12 +1028,10 @@ func TestParsePointUnescape(t *testing.T) { time.Unix(0, 0))) // commas in tag names - testParsePoints(t, `cpu,region\,zone=east value=1.0`, "mm", - NewTestPoint("mm", + test(t, `cpu,region\,zone=east value=1.0`, + NewTestPoint("cpu", models.NewTags(map[string]string{ - models.FieldKeyTagKey: "value", - models.MeasurementTagKey: `cpu`, - "region,zone": "east", // comma in the tag key + "region,zone": "east", // comma in the tag key }), models.Fields{ "value": 1.0, @@ -1109,12 +1039,10 @@ func TestParsePointUnescape(t *testing.T) { time.Unix(0, 0))) // spaces in tag name - testParsePoints(t, `cpu,region\ zone=east value=1.0`, "mm", - NewTestPoint("mm", + test(t, `cpu,region\ zone=east value=1.0`, + NewTestPoint("cpu", models.NewTags(map[string]string{ - models.FieldKeyTagKey: "value", - models.MeasurementTagKey: `cpu`, - "region zone": "east", // space in the tag name + "region zone": "east", // space in the tag name }), models.Fields{ "value": 1.0, @@ -1122,12 +1050,10 @@ func TestParsePointUnescape(t *testing.T) { time.Unix(0, 0))) // backslash with escaped equals in tag name - testParsePoints(t, `cpu,reg\\=ion=east value=1.0`, "mm", - NewTestPoint("mm", + test(t, `cpu,reg\\=ion=east value=1.0`, + NewTestPoint("cpu", models.NewTags(map[string]string{ - models.FieldKeyTagKey: "value", - models.MeasurementTagKey: `cpu`, - `reg\=ion`: "east", + `reg\=ion`: "east", }), models.Fields{ "value": 1.0, @@ -1135,25 +1061,21 @@ func TestParsePointUnescape(t *testing.T) { time.Unix(0, 0))) // space is tag name - testParsePoints(t, `cpu,\ =east value=1.0`, "mm", - NewTestPoint("mm", - models.Tags{ - {Key: []byte(models.MeasurementTagKey), Value: []byte("cpu")}, - {Key: []byte(" "), Value: []byte("east")}, // tag name is single space - {Key: []byte(models.FieldKeyTagKey), Value: []byte("value")}, - }, + test(t, `cpu,\ =east value=1.0`, + NewTestPoint("cpu", + models.NewTags(map[string]string{ + " ": "east", // tag name is single space + }), models.Fields{ "value": 1.0, }, time.Unix(0, 0))) // commas in tag values - testParsePoints(t, `cpu,regions=east\,west value=1.0`, "mm", - NewTestPoint("mm", + test(t, `cpu,regions=east\,west value=1.0`, + NewTestPoint("cpu", models.NewTags(map[string]string{ - models.FieldKeyTagKey: "value", - models.MeasurementTagKey: `cpu`, - "regions": "east,west", // comma in the tag value + "regions": "east,west", // comma in the tag value }), models.Fields{ "value": 1.0, @@ -1161,13 +1083,11 @@ func TestParsePointUnescape(t *testing.T) { time.Unix(0, 0))) // backslash literal followed by escaped space - testParsePoints(t, `cpu,regions=\\ east value=1.0`, "mm", + test(t, `cpu,regions=\\ east value=1.0`, NewTestPoint( - "mm", + "cpu", models.NewTags(map[string]string{ - models.FieldKeyTagKey: "value", - models.MeasurementTagKey: `cpu`, - "regions": `\ east`, + "regions": `\ east`, }), models.Fields{ "value": 1.0, @@ -1175,13 +1095,11 @@ func TestParsePointUnescape(t *testing.T) { time.Unix(0, 0))) // backslash literal followed by escaped space - testParsePoints(t, `cpu,regions=eas\\ t value=1.0`, "mm", + test(t, `cpu,regions=eas\\ t value=1.0`, NewTestPoint( - "mm", + "cpu", models.NewTags(map[string]string{ - models.FieldKeyTagKey: "value", - models.MeasurementTagKey: `cpu`, - "regions": `eas\ t`, + "regions": `eas\ t`, }), models.Fields{ "value": 1.0, @@ -1189,13 +1107,11 @@ func TestParsePointUnescape(t *testing.T) { time.Unix(0, 0))) // backslash literal followed by trailing space - testParsePoints(t, `cpu,regions=east\\ value=1.0`, "mm", + test(t, `cpu,regions=east\\ value=1.0`, NewTestPoint( - "mm", + "cpu", models.NewTags(map[string]string{ - models.FieldKeyTagKey: "value", - models.MeasurementTagKey: `cpu`, - "regions": `east\ `, + "regions": `east\ `, }), models.Fields{ "value": 1.0, @@ -1203,12 +1119,10 @@ func TestParsePointUnescape(t *testing.T) { time.Unix(0, 0))) // spaces in tag values - testParsePoints(t, `cpu,regions=east\ west value=1.0`, "mm", - NewTestPoint("mm", + test(t, `cpu,regions=east\ west value=1.0`, + NewTestPoint("cpu", models.NewTags(map[string]string{ - models.FieldKeyTagKey: "value", - models.MeasurementTagKey: `cpu`, - "regions": "east west", // comma in the tag value + "regions": "east west", // comma in the tag value }), models.Fields{ "value": 1.0, @@ -1216,12 +1130,10 @@ func TestParsePointUnescape(t *testing.T) { time.Unix(0, 0))) // commas in field keys - testParsePoints(t, `cpu,regions=east value\,ms=1.0`, "mm", - NewTestPoint("mm", + test(t, `cpu,regions=east value\,ms=1.0`, + NewTestPoint("cpu", models.NewTags(map[string]string{ - models.FieldKeyTagKey: "value,ms", - models.MeasurementTagKey: `cpu`, - "regions": "east", + "regions": "east", }), models.Fields{ "value,ms": 1.0, // comma in the field keys @@ -1229,12 +1141,10 @@ func TestParsePointUnescape(t *testing.T) { time.Unix(0, 0))) // spaces in field keys - testParsePoints(t, `cpu,regions=east value\ ms=1.0`, "mm", - NewTestPoint("mm", + test(t, `cpu,regions=east value\ ms=1.0`, + NewTestPoint("cpu", models.NewTags(map[string]string{ - models.FieldKeyTagKey: "value ms", - models.MeasurementTagKey: `cpu`, - "regions": "east", + "regions": "east", }), models.Fields{ "value ms": 1.0, // comma in the field keys @@ -1242,13 +1152,11 @@ func TestParsePointUnescape(t *testing.T) { time.Unix(0, 0))) // tag with no value - testParsePoints(t, `cpu,regions=east value="1"`, "mm", - NewTestPoint("mm", + test(t, `cpu,regions=east value="1"`, + NewTestPoint("cpu", models.NewTags(map[string]string{ - models.FieldKeyTagKey: "value", - models.MeasurementTagKey: `cpu`, - "regions": "east", - "foobar": "", + "regions": "east", + "foobar": "", }), models.Fields{ "value": "1", @@ -1256,12 +1164,10 @@ func TestParsePointUnescape(t *testing.T) { time.Unix(0, 0))) // commas in field values - testParsePoints(t, `cpu,regions=east value="1,0"`, "mm", - NewTestPoint("mm", + test(t, `cpu,regions=east value="1,0"`, + NewTestPoint("cpu", models.NewTags(map[string]string{ - models.FieldKeyTagKey: "value", - models.MeasurementTagKey: `cpu`, - "regions": "east", + "regions": "east", }), models.Fields{ "value": "1,0", // comma in the field value @@ -1269,13 +1175,11 @@ func TestParsePointUnescape(t *testing.T) { time.Unix(0, 0))) // random character escaped - testParsePoints(t, `cpu,regions=eas\t value=1.0`, "mm", + test(t, `cpu,regions=eas\t value=1.0`, NewTestPoint( - "mm", + "cpu", models.NewTags(map[string]string{ - models.FieldKeyTagKey: "value", - models.MeasurementTagKey: `cpu`, - "regions": "eas\\t", + "regions": "eas\\t", }), models.Fields{ "value": 1.0, @@ -1283,13 +1187,11 @@ func TestParsePointUnescape(t *testing.T) { time.Unix(0, 0))) // backslash literal followed by escaped characters - testParsePoints(t, `cpu,regions=\\,\,\=east value=1.0`, "mm", + test(t, `cpu,regions=\\,\,\=east value=1.0`, NewTestPoint( - "mm", + "cpu", models.NewTags(map[string]string{ - models.FieldKeyTagKey: "value", - models.MeasurementTagKey: `cpu`, - "regions": `\,,=east`, + "regions": `\,,=east`, }), models.Fields{ "value": 1.0, @@ -1297,69 +1199,34 @@ func TestParsePointUnescape(t *testing.T) { time.Unix(0, 0))) // field keys using escape char. - testParsePoints(t, `cpu \a=1i`, "mm", + test(t, `cpu \a=1i`, NewTestPoint( - "mm", - models.NewTags(map[string]string{ - models.FieldKeyTagKey: "\\a", - models.MeasurementTagKey: `cpu`, - }), + "cpu", + models.NewTags(map[string]string{}), models.Fields{ "\\a": int64(1), // Left as parsed since it's not a known escape sequence. }, time.Unix(0, 0))) // measurement, tag and tag value with equals - testParsePoints(t, `cpu=load,equals\=foo=tag\=value value=1i`, "mm", + test(t, `cpu=load,equals\=foo=tag\=value value=1i`, NewTestPoint( - "mm", + "cpu=load", // Not escaped models.NewTags(map[string]string{ - models.FieldKeyTagKey: "value", - models.MeasurementTagKey: `cpu=load`, // Not escaped - "equals=foo": "tag=value", // Tag and value unescaped + "equals=foo": "tag=value", // Tag and value unescaped }), models.Fields{ "value": int64(1), }, time.Unix(0, 0))) -} -func TestPoints_String(t *testing.T) { - tags := models.NewTags(map[string]string{ - "t1": "v1", - "t2": "v2", - }) - pts := make(models.Points, 5) - for i := 0; i < len(pts); i++ { - point, err := models.NewPoint( - "m1", - tags, - models.Fields{ - "f1": i, - }, - time.Unix(0, int64(i)), - ) - if err != nil { - t.Fatalf("unable to create point %v", err) - } - pts[i] = point - } - got := pts.String() - want := `m1,t1=v1,t2=v2 f1=0i 0 -m1,t1=v1,t2=v2 f1=1i 1 -m1,t1=v1,t2=v2 f1=2i 2 -m1,t1=v1,t2=v2 f1=3i 3 -m1,t1=v1,t2=v2 f1=4i 4` - if got != want { - t.Errorf("Points.String() %v| \n want \n%v", got, want) - } } func TestParsePointWithTags(t *testing.T) { - testParsePoints(t, - "cpu,host=serverA,region=us-east value=1.0 1000000000", "mm", - NewTestPoint("mm", - models.NewTags(map[string]string{models.MeasurementTagKey: "cpu", models.FieldKeyTagKey: "value", "host": "serverA", "region": "us-east"}), + test(t, + "cpu,host=serverA,region=us-east value=1.0 1000000000", + NewTestPoint("cpu", + models.NewTags(map[string]string{"host": "serverA", "region": "us-east"}), models.Fields{"value": 1.0}, time.Unix(1, 0))) } @@ -1381,7 +1248,7 @@ func TestParsePointWithDuplicateTags(t *testing.T) { err: `unable to parse 'cpu,b=2,c=3,b=1 value=1i 1000000000': duplicate tags`, }, } { - _, err := models.ParsePointsString(tt.line, "mm") + _, err := models.ParsePointsString(tt.line) if err == nil || tt.err != err.Error() { t.Errorf("%d. ParsePoint() expected error '%s'. got '%s'", i, tt.err, err) } @@ -1389,49 +1256,25 @@ func TestParsePointWithDuplicateTags(t *testing.T) { } func TestParsePointWithStringField(t *testing.T) { - testParsePoints(t, `cpu,host=serverA,region=us-east value=1.0,str="foo",str2="bar" 1000000000`, "mm", - NewTestPoint("mm", + test(t, `cpu,host=serverA,region=us-east value=1.0,str="foo",str2="bar" 1000000000`, + NewTestPoint("cpu", models.NewTags(map[string]string{ - models.MeasurementTagKey: "cpu", - models.FieldKeyTagKey: "value", - "host": "serverA", - "region": "us-east", + "host": "serverA", + "region": "us-east", }), models.Fields{ "value": 1.0, - }, - time.Unix(1, 0)), - NewTestPoint("mm", - models.NewTags(map[string]string{ - models.MeasurementTagKey: "cpu", - models.FieldKeyTagKey: "str", - "host": "serverA", - "region": "us-east", - }), - models.Fields{ - "str": "foo", - }, - time.Unix(1, 0)), - NewTestPoint("mm", - models.NewTags(map[string]string{ - models.MeasurementTagKey: "cpu", - models.FieldKeyTagKey: "str2", - "host": "serverA", - "region": "us-east", - }), - models.Fields{ - "str2": "bar", + "str": "foo", + "str2": "bar", }, time.Unix(1, 0)), ) - testParsePoints(t, `cpu,host=serverA,region=us-east str="foo \" bar" 1000000000`, "mm", - NewTestPoint("mm", + test(t, `cpu,host=serverA,region=us-east str="foo \" bar" 1000000000`, + NewTestPoint("cpu", models.NewTags(map[string]string{ - models.MeasurementTagKey: "cpu", - models.FieldKeyTagKey: "str", - "host": "serverA", - "region": "us-east", + "host": "serverA", + "region": "us-east", }), models.Fields{ "str": `foo " bar`, @@ -1442,58 +1285,32 @@ func TestParsePointWithStringField(t *testing.T) { } func TestParsePointWithStringWithSpaces(t *testing.T) { - testParsePoints(t, `cpu,host=serverA,region=us-east value=1.0,str="foo bar" 1000000000`, "mm", + test(t, `cpu,host=serverA,region=us-east value=1.0,str="foo bar" 1000000000`, NewTestPoint( - "mm", + "cpu", models.NewTags(map[string]string{ - models.MeasurementTagKey: "cpu", - models.FieldKeyTagKey: "value", - "host": "serverA", - "region": "us-east", + "host": "serverA", + "region": "us-east", }), models.Fields{ "value": 1.0, - }, - time.Unix(1, 0)), - NewTestPoint( - "mm", - models.NewTags(map[string]string{ - models.MeasurementTagKey: "cpu", - models.FieldKeyTagKey: "str", - "host": "serverA", - "region": "us-east", - }), - models.Fields{ - "str": "foo bar", // spaces in string value + "str": "foo bar", // spaces in string value }, time.Unix(1, 0)), ) } func TestParsePointWithStringWithNewline(t *testing.T) { - testParsePoints(t, "cpu,host=serverA,region=us-east value=1.0,str=\"foo\nbar\" 1000000000", "mm", + test(t, "cpu,host=serverA,region=us-east value=1.0,str=\"foo\nbar\" 1000000000", NewTestPoint( - "mm", + "cpu", models.NewTags(map[string]string{ - models.MeasurementTagKey: "cpu", - models.FieldKeyTagKey: "value", - "host": "serverA", - "region": "us-east", + "host": "serverA", + "region": "us-east", }), models.Fields{ "value": 1.0, - }, - time.Unix(1, 0)), - NewTestPoint( - "mm", - models.NewTags(map[string]string{ - models.MeasurementTagKey: "cpu", - models.FieldKeyTagKey: "str", - "host": "serverA", - "region": "us-east", - }), - models.Fields{ - "str": "foo\nbar", // newline in string value + "str": "foo\nbar", // newline in string value }, time.Unix(1, 0)), ) @@ -1501,84 +1318,45 @@ func TestParsePointWithStringWithNewline(t *testing.T) { func TestParsePointWithStringWithCommas(t *testing.T) { // escaped comma - testParsePoints(t, `cpu,host=serverA,region=us-east value=1.0,str="foo\,bar" 1000000000`, "mm", + test(t, `cpu,host=serverA,region=us-east value=1.0,str="foo\,bar" 1000000000`, NewTestPoint( - "mm", + "cpu", models.NewTags(map[string]string{ - models.MeasurementTagKey: "cpu", - models.FieldKeyTagKey: "value", - "host": "serverA", - "region": "us-east", + "host": "serverA", + "region": "us-east", }), models.Fields{ "value": 1.0, - }, - time.Unix(1, 0)), - NewTestPoint( - "mm", - models.NewTags(map[string]string{ - models.MeasurementTagKey: "cpu", - models.FieldKeyTagKey: "str", - "host": "serverA", - "region": "us-east", - }), - models.Fields{ - "str": `foo\,bar`, // commas in string value + "str": `foo\,bar`, // commas in string value }, time.Unix(1, 0)), ) // non-escaped comma - testParsePoints(t, `cpu,host=serverA,region=us-east value=1.0,str="foo,bar" 1000000000`, "mm", + test(t, `cpu,host=serverA,region=us-east value=1.0,str="foo,bar" 1000000000`, NewTestPoint( - "mm", + "cpu", models.NewTags(map[string]string{ - models.MeasurementTagKey: "cpu", - models.FieldKeyTagKey: "value", - "host": "serverA", - "region": "us-east", + "host": "serverA", + "region": "us-east", }), models.Fields{ "value": 1.0, - }, - time.Unix(1, 0)), - NewTestPoint( - "mm", - models.NewTags(map[string]string{ - models.MeasurementTagKey: "cpu", - models.FieldKeyTagKey: "str", - "host": "serverA", - "region": "us-east", - }), - models.Fields{ - "str": "foo,bar", // commas in string value + "str": "foo,bar", // commas in string value }, time.Unix(1, 0)), ) // string w/ trailing escape chars - testParsePoints(t, `cpu,host=serverA,region=us-east str="foo\\",str2="bar" 1000000000`, "mm", + test(t, `cpu,host=serverA,region=us-east str="foo\\",str2="bar" 1000000000`, NewTestPoint( - "mm", + "cpu", models.NewTags(map[string]string{ - models.MeasurementTagKey: "cpu", - models.FieldKeyTagKey: "str", - "host": "serverA", - "region": "us-east", - }), - models.Fields{ - "str": "foo\\", // trailing escape char - }, - time.Unix(1, 0)), - NewTestPoint( - "mm", - models.NewTags(map[string]string{ - models.MeasurementTagKey: "cpu", - models.FieldKeyTagKey: "str2", - "host": "serverA", - "region": "us-east", + "host": "serverA", + "region": "us-east", }), models.Fields{ + "str": "foo\\", // trailing escape char "str2": "bar", }, time.Unix(1, 0)), @@ -1587,14 +1365,12 @@ func TestParsePointWithStringWithCommas(t *testing.T) { func TestParsePointQuotedMeasurement(t *testing.T) { // non-escaped comma - testParsePoints(t, `"cpu",host=serverA,region=us-east value=1.0 1000000000`, "mm", + test(t, `"cpu",host=serverA,region=us-east value=1.0 1000000000`, NewTestPoint( - `mm`, + `"cpu"`, models.NewTags(map[string]string{ - models.MeasurementTagKey: `"cpu"`, - models.FieldKeyTagKey: "value", - "host": "serverA", - "region": "us-east", + "host": "serverA", + "region": "us-east", }), models.Fields{ "value": 1.0, @@ -1604,15 +1380,13 @@ func TestParsePointQuotedMeasurement(t *testing.T) { } func TestParsePointQuotedTags(t *testing.T) { - testParsePoints(t, `cpu,"host"="serverA",region=us-east value=1.0 1000000000`, "mm", + test(t, `cpu,"host"="serverA",region=us-east value=1.0 1000000000`, NewTestPoint( - "mm", - models.Tags{ - {Key: []byte(models.MeasurementTagKey), Value: []byte("cpu")}, - {Key: []byte(`"host"`), Value: []byte(`"serverA"`)}, - {Key: []byte("region"), Value: []byte("us-east")}, - {Key: []byte(models.FieldKeyTagKey), Value: []byte("value")}, - }, + "cpu", + models.NewTags(map[string]string{ + `"host"`: `"serverA"`, + "region": "us-east", + }), models.Fields{ "value": 1.0, }, @@ -1621,7 +1395,7 @@ func TestParsePointQuotedTags(t *testing.T) { } func TestParsePoint_TrailingSlash(t *testing.T) { - _, err := models.ParsePointsString(`a v=1 0\`, "mm") + _, err := models.ParsePointsString(`a v=1 0\`) if err == nil { t.Fatalf("ParsePoints failed: %v", err) } else if !strings.Contains(err.Error(), "bad timestamp") { @@ -1630,7 +1404,7 @@ func TestParsePoint_TrailingSlash(t *testing.T) { } func TestParsePointsUnbalancedQuotedTags(t *testing.T) { - pts, err := models.ParsePointsString("baz,mytag=\"a x=1 1441103862125\nbaz,mytag=a z=1 1441103862126", "mm") + pts, err := models.ParsePointsString("baz,mytag=\"a x=1 1441103862125\nbaz,mytag=a z=1 1441103862126") if err != nil { t.Fatalf("ParsePoints failed: %v", err) } @@ -1640,7 +1414,7 @@ func TestParsePointsUnbalancedQuotedTags(t *testing.T) { } // Expected " in the tag value - exp := models.MustNewPoint("mm", models.NewTags(map[string]string{models.FieldKeyTagKey: "x", models.MeasurementTagKey: "baz", "mytag": `"a`}), + exp := models.MustNewPoint("baz", models.NewTags(map[string]string{"mytag": `"a`}), models.Fields{"x": float64(1)}, time.Unix(0, 1441103862125)) if pts[0].String() != exp.String() { @@ -1648,24 +1422,23 @@ func TestParsePointsUnbalancedQuotedTags(t *testing.T) { } // Expected two points to ensure we did not overscan the line - exp = models.MustNewPoint("mm", models.NewTags(map[string]string{models.MeasurementTagKey: "baz", models.FieldKeyTagKey: "z", "mytag": `a`}), + exp = models.MustNewPoint("baz", models.NewTags(map[string]string{"mytag": `a`}), models.Fields{"z": float64(1)}, time.Unix(0, 1441103862126)) if pts[1].String() != exp.String() { t.Errorf("Point mismatch:\ngot: %v\nexp: %v", pts[1].String(), exp.String()) } + } func TestParsePointEscapedStringsAndCommas(t *testing.T) { // non-escaped comma and quotes - testParsePoints(t, `cpu,host=serverA,region=us-east value="{Hello\"{,}\" World}" 1000000000`, "mm", + test(t, `cpu,host=serverA,region=us-east value="{Hello\"{,}\" World}" 1000000000`, NewTestPoint( - "mm", + "cpu", models.NewTags(map[string]string{ - models.MeasurementTagKey: "cpu", - models.FieldKeyTagKey: "value", - "host": "serverA", - "region": "us-east", + "host": "serverA", + "region": "us-east", }), models.Fields{ "value": `{Hello"{,}" World}`, @@ -1674,14 +1447,12 @@ func TestParsePointEscapedStringsAndCommas(t *testing.T) { ) // escaped comma and quotes - testParsePoints(t, `cpu,host=serverA,region=us-east value="{Hello\"{\,}\" World}" 1000000000`, "mm", + test(t, `cpu,host=serverA,region=us-east value="{Hello\"{\,}\" World}" 1000000000`, NewTestPoint( - "mm", + "cpu", models.NewTags(map[string]string{ - models.MeasurementTagKey: "cpu", - models.FieldKeyTagKey: "value", - "host": "serverA", - "region": "us-east", + "host": "serverA", + "region": "us-east", }), models.Fields{ "value": `{Hello"{\,}" World}`, @@ -1691,81 +1462,56 @@ func TestParsePointEscapedStringsAndCommas(t *testing.T) { } func TestParsePointWithStringWithEquals(t *testing.T) { - testParsePoints(t, `cpu,host=serverA,region=us-east str="foo=bar",value=1.0 1000000000`, "mm", + test(t, `cpu,host=serverA,region=us-east str="foo=bar",value=1.0 1000000000`, NewTestPoint( - "mm", + "cpu", models.NewTags(map[string]string{ - models.MeasurementTagKey: "cpu", - models.FieldKeyTagKey: "str", - "host": "serverA", - "region": "us-east", - }), - models.Fields{ - "str": "foo=bar", // spaces in string value - }, - time.Unix(1, 0)), - NewTestPoint( - "mm", - models.NewTags(map[string]string{ - models.MeasurementTagKey: "cpu", - models.FieldKeyTagKey: "value", - "host": "serverA", - "region": "us-east", + "host": "serverA", + "region": "us-east", }), models.Fields{ "value": 1.0, + "str": "foo=bar", // spaces in string value }, time.Unix(1, 0)), ) } func TestParsePointWithStringWithBackslash(t *testing.T) { - testParsePoints(t, `cpu value="test\\\"" 1000000000`, "mm", + test(t, `cpu value="test\\\"" 1000000000`, NewTestPoint( - "mm", - models.NewTags(map[string]string{ - models.MeasurementTagKey: "cpu", - models.FieldKeyTagKey: "value", - }), + "cpu", + models.NewTags(map[string]string{}), models.Fields{ "value": `test\"`, }, time.Unix(1, 0)), ) - testParsePoints(t, `cpu value="test\\" 1000000000`, "mm", + test(t, `cpu value="test\\" 1000000000`, NewTestPoint( - "mm", - models.NewTags(map[string]string{ - models.MeasurementTagKey: "cpu", - models.FieldKeyTagKey: "value", - }), + "cpu", + models.NewTags(map[string]string{}), models.Fields{ "value": `test\`, }, time.Unix(1, 0)), ) - testParsePoints(t, `cpu value="test\\\"" 1000000000`, "mm", + test(t, `cpu value="test\\\"" 1000000000`, NewTestPoint( - "mm", - models.NewTags(map[string]string{ - models.MeasurementTagKey: "cpu", - models.FieldKeyTagKey: "value", - }), + "cpu", + models.NewTags(map[string]string{}), models.Fields{ "value": `test\"`, }, time.Unix(1, 0)), ) - testParsePoints(t, `cpu value="test\"" 1000000000`, "mm", + test(t, `cpu value="test\"" 1000000000`, NewTestPoint( - "mm", - models.NewTags(map[string]string{ - models.MeasurementTagKey: "cpu", - models.FieldKeyTagKey: "value", - }), + "cpu", + models.NewTags(map[string]string{}), models.Fields{ "value": `test"`, }, @@ -1774,29 +1520,36 @@ func TestParsePointWithStringWithBackslash(t *testing.T) { } func TestParsePointWithBoolField(t *testing.T) { - testParsePoints(t, `cpu,host=serverA,region=us-east true=true,t=t,T=T,TRUE=TRUE,True=True,false=false,f=f,F=F,FALSE=FALSE,False=False 1000000000`, "mm", - NewTestPoint("mm", models.NewTags(map[string]string{models.MeasurementTagKey: "cpu", models.FieldKeyTagKey: "true", "host": "serverA", "region": "us-east"}), models.Fields{"true": true}, time.Unix(1, 0)), - NewTestPoint("mm", models.NewTags(map[string]string{models.MeasurementTagKey: "cpu", models.FieldKeyTagKey: "t", "host": "serverA", "region": "us-east"}), models.Fields{"t": true}, time.Unix(1, 0)), - NewTestPoint("mm", models.NewTags(map[string]string{models.MeasurementTagKey: "cpu", models.FieldKeyTagKey: "T", "host": "serverA", "region": "us-east"}), models.Fields{"T": true}, time.Unix(1, 0)), - NewTestPoint("mm", models.NewTags(map[string]string{models.MeasurementTagKey: "cpu", models.FieldKeyTagKey: "TRUE", "host": "serverA", "region": "us-east"}), models.Fields{"TRUE": true}, time.Unix(1, 0)), - NewTestPoint("mm", models.NewTags(map[string]string{models.MeasurementTagKey: "cpu", models.FieldKeyTagKey: "True", "host": "serverA", "region": "us-east"}), models.Fields{"True": true}, time.Unix(1, 0)), - NewTestPoint("mm", models.NewTags(map[string]string{models.MeasurementTagKey: "cpu", models.FieldKeyTagKey: "false", "host": "serverA", "region": "us-east"}), models.Fields{"false": false}, time.Unix(1, 0)), - NewTestPoint("mm", models.NewTags(map[string]string{models.MeasurementTagKey: "cpu", models.FieldKeyTagKey: "f", "host": "serverA", "region": "us-east"}), models.Fields{"f": false}, time.Unix(1, 0)), - NewTestPoint("mm", models.NewTags(map[string]string{models.MeasurementTagKey: "cpu", models.FieldKeyTagKey: "F", "host": "serverA", "region": "us-east"}), models.Fields{"F": false}, time.Unix(1, 0)), - NewTestPoint("mm", models.NewTags(map[string]string{models.MeasurementTagKey: "cpu", models.FieldKeyTagKey: "FALSE", "host": "serverA", "region": "us-east"}), models.Fields{"FALSE": false}, time.Unix(1, 0)), - NewTestPoint("mm", models.NewTags(map[string]string{models.MeasurementTagKey: "cpu", models.FieldKeyTagKey: "False", "host": "serverA", "region": "us-east"}), models.Fields{"False": false}, time.Unix(1, 0)), + test(t, `cpu,host=serverA,region=us-east true=true,t=t,T=T,TRUE=TRUE,True=True,false=false,f=f,F=F,FALSE=FALSE,False=False 1000000000`, + NewTestPoint( + "cpu", + models.NewTags(map[string]string{ + "host": "serverA", + "region": "us-east", + }), + models.Fields{ + "t": true, + "T": true, + "true": true, + "True": true, + "TRUE": true, + "f": false, + "F": false, + "false": false, + "False": false, + "FALSE": false, + }, + time.Unix(1, 0)), ) } func TestParsePointUnicodeString(t *testing.T) { - testParsePoints(t, `cpu,host=serverA,region=us-east value="wè" 1000000000`, "mm", + test(t, `cpu,host=serverA,region=us-east value="wè" 1000000000`, NewTestPoint( - "mm", + "cpu", models.NewTags(map[string]string{ - models.MeasurementTagKey: "cpu", - models.FieldKeyTagKey: "value", - "host": "serverA", - "region": "us-east", + "host": "serverA", + "region": "us-east", }), models.Fields{ "value": "wè", @@ -1806,13 +1559,10 @@ func TestParsePointUnicodeString(t *testing.T) { } func TestParsePointNegativeTimestamp(t *testing.T) { - testParsePoints(t, `cpu value=1 -1`, "mm", + test(t, `cpu value=1 -1`, NewTestPoint( - "mm", - models.NewTags(map[string]string{ - models.MeasurementTagKey: "cpu", - models.FieldKeyTagKey: "value", - }), + "cpu", + models.NewTags(map[string]string{}), models.Fields{ "value": 1.0, }, @@ -1821,13 +1571,10 @@ func TestParsePointNegativeTimestamp(t *testing.T) { } func TestParsePointMaxTimestamp(t *testing.T) { - testParsePoints(t, fmt.Sprintf(`cpu value=1 %d`, models.MaxNanoTime), "mm", + test(t, fmt.Sprintf(`cpu value=1 %d`, models.MaxNanoTime), NewTestPoint( - "mm", - models.NewTags(map[string]string{ - models.MeasurementTagKey: "cpu", - models.FieldKeyTagKey: "value", - }), + "cpu", + models.NewTags(map[string]string{}), models.Fields{ "value": 1.0, }, @@ -1836,13 +1583,10 @@ func TestParsePointMaxTimestamp(t *testing.T) { } func TestParsePointMinTimestamp(t *testing.T) { - testParsePoints(t, `cpu value=1 -9223372036854775806`, "mm", + test(t, `cpu value=1 -9223372036854775806`, NewTestPoint( - "mm", - models.NewTags(map[string]string{ - models.MeasurementTagKey: "cpu", - models.FieldKeyTagKey: "value", - }), + "cpu", + models.NewTags(map[string]string{}), models.Fields{ "value": 1.0, }, @@ -1862,7 +1606,7 @@ func TestParsePointInvalidTimestamp(t *testing.T) { } for i, example := range examples { - _, err := models.ParsePointsString(example, "mm") + _, err := models.ParsePointsString(example) if err == nil { t.Fatalf("[Example %d] ParsePoints failed: %v", i, err) } @@ -1870,13 +1614,10 @@ func TestParsePointInvalidTimestamp(t *testing.T) { } func TestNewPointFloatWithoutDecimal(t *testing.T) { - testParsePoints(t, `cpu value=1 1000000000`, "mm", + test(t, `cpu value=1 1000000000`, NewTestPoint( - "mm", - models.NewTags(map[string]string{ - models.MeasurementTagKey: "cpu", - models.FieldKeyTagKey: "value", - }), + "cpu", + models.NewTags(map[string]string{}), models.Fields{ "value": 1.0, }, @@ -1884,13 +1625,10 @@ func TestNewPointFloatWithoutDecimal(t *testing.T) { ) } func TestNewPointNegativeFloat(t *testing.T) { - testParsePoints(t, `cpu value=-0.64 1000000000`, "mm", + test(t, `cpu value=-0.64 1000000000`, NewTestPoint( - "mm", - models.NewTags(map[string]string{ - models.MeasurementTagKey: "cpu", - models.FieldKeyTagKey: "value", - }), + "cpu", + models.NewTags(map[string]string{}), models.Fields{ "value": -0.64, }, @@ -1899,13 +1637,10 @@ func TestNewPointNegativeFloat(t *testing.T) { } func TestNewPointFloatNoDecimal(t *testing.T) { - testParsePoints(t, `cpu value=1. 1000000000`, "mm", + test(t, `cpu value=1. 1000000000`, NewTestPoint( - "mm", - models.NewTags(map[string]string{ - models.MeasurementTagKey: "cpu", - models.FieldKeyTagKey: "value", - }), + "cpu", + models.NewTags(map[string]string{}), models.Fields{ "value": 1.0, }, @@ -1914,13 +1649,10 @@ func TestNewPointFloatNoDecimal(t *testing.T) { } func TestNewPointFloatScientific(t *testing.T) { - testParsePoints(t, `cpu value=6.632243e+06 1000000000`, "mm", + test(t, `cpu value=6.632243e+06 1000000000`, NewTestPoint( - "mm", - models.NewTags(map[string]string{ - models.MeasurementTagKey: "cpu", - models.FieldKeyTagKey: "value", - }), + "cpu", + models.NewTags(map[string]string{}), models.Fields{ "value": float64(6632243), }, @@ -1929,13 +1661,10 @@ func TestNewPointFloatScientific(t *testing.T) { } func TestNewPointLargeInteger(t *testing.T) { - testParsePoints(t, `cpu value=6632243i 1000000000`, "mm", + test(t, `cpu value=6632243i 1000000000`, NewTestPoint( - "mm", - models.NewTags(map[string]string{ - models.MeasurementTagKey: "cpu", - models.FieldKeyTagKey: "value", - }), + "cpu", + models.NewTags(map[string]string{}), models.Fields{ "value": int64(6632243), // if incorrectly encoded as a float, it would show up as 6.632243e+06 }, @@ -1944,17 +1673,17 @@ func TestNewPointLargeInteger(t *testing.T) { } func TestParsePointNaN(t *testing.T) { - _, err := models.ParsePointsString("cpu value=NaN 1000000000", "mm") + _, err := models.ParsePointsString("cpu value=NaN 1000000000") if err == nil { t.Fatalf("ParsePoints expected error, got nil") } - _, err = models.ParsePointsString("cpu value=nAn 1000000000", "mm") + _, err = models.ParsePointsString("cpu value=nAn 1000000000") if err == nil { t.Fatalf("ParsePoints expected error, got nil") } - _, err = models.ParsePointsString("cpu value=NaN", "mm") + _, err = models.ParsePointsString("cpu value=NaN") if err == nil { t.Fatalf("ParsePoints expected error, got nil") } @@ -1966,47 +1695,18 @@ func TestNewPointLargeNumberOfTags(t *testing.T) { tags += fmt.Sprintf(",tag%d=value%d", i, i) } - pt, err := models.ParsePointsString(fmt.Sprintf("cpu%s value=1", tags), "mm") + pt, err := models.ParsePointsString(fmt.Sprintf("cpu%s value=1", tags)) if err != nil { t.Fatalf("ParsePoints() with max tags failed: %v", err) } - if len(pt[0].Tags()) != 257 { // add two for _m & _f + if len(pt[0].Tags()) != 255 { t.Fatalf("expected %d tags, got %d", 255, len(pt[0].Tags())) } } func TestParsePointIntsFloats(t *testing.T) { - pts, err := models.ParsePoints([]byte(`cpu,host=serverA,region=us-east int=10i,float=11.0,float2=12.1 1000000000`), []byte("mm")) - if err != nil { - t.Fatalf(`ParsePoints() failed. got %s`, err) - } - - if exp := 3; len(pts) != exp { - t.Errorf("ParsePoint() len mismatch: got %v, exp %v", len(pts), exp) - } - - if fields, err := pts[0].Fields(); err != nil { - t.Fatal(err) - } else if _, ok := fields["int"].(int64); !ok { - t.Errorf("ParsePoint() int field mismatch: got %T, exp %T", fields["int"], int64(10)) - } - - if fields, err := pts[1].Fields(); err != nil { - t.Fatal(err) - } else if _, ok := fields["float"].(float64); !ok { - t.Errorf("ParsePoint() float field mismatch: got %T, exp %T", fields["float64"], float64(11.0)) - } - - if fields, err := pts[2].Fields(); err != nil { - t.Fatal(err) - } else if _, ok := fields["float2"].(float64); !ok { - t.Errorf("ParsePoint() float field mismatch: got %T, exp %T", fields["float64"], float64(12.1)) - } -} - -func TestParsePointKeyUnsorted(t *testing.T) { - pts, err := models.ParsePoints([]byte("cpu,last=1,first=2 value=1i"), []byte("mm")) + pts, err := models.ParsePoints([]byte(`cpu,host=serverA,region=us-east int=10i,float=11.0,float2=12.1 1000000000`)) if err != nil { t.Fatalf(`ParsePoints() failed. got %s`, err) } @@ -2016,61 +1716,62 @@ func TestParsePointKeyUnsorted(t *testing.T) { } pt := pts[0] - if exp := "cpu,first=2,last=1"; string(pt.Key()) != "mm,\x00=cpu,first=2,last=1,\xff=value" { + fields, err := pt.Fields() + if err != nil { + t.Fatal(err) + } + if _, ok := fields["int"].(int64); !ok { + t.Errorf("ParsePoint() int field mismatch: got %T, exp %T", fields["int"], int64(10)) + } + + if _, ok := fields["float"].(float64); !ok { + t.Errorf("ParsePoint() float field mismatch: got %T, exp %T", fields["float64"], float64(11.0)) + } + + if _, ok := fields["float2"].(float64); !ok { + t.Errorf("ParsePoint() float field mismatch: got %T, exp %T", fields["float64"], float64(12.1)) + } +} + +func TestParsePointKeyUnsorted(t *testing.T) { + pts, err := models.ParsePoints([]byte("cpu,last=1,first=2 value=1i")) + if err != nil { + t.Fatalf(`ParsePoints() failed. got %s`, err) + } + + if exp := 1; len(pts) != exp { + t.Errorf("ParsePoint() len mismatch: got %v, exp %v", len(pts), exp) + } + pt := pts[0] + + if exp := "cpu,first=2,last=1"; string(pt.Key()) != exp { t.Errorf("ParsePoint key not sorted. got %v, exp %v", string(pt.Key()), exp) } } func TestParsePointToString(t *testing.T) { - for i, tt := range []struct { - line string - exp string - pt models.Point - }{ - { - line: `cpu,host=serverA,region=us-east bool=false 1000000000`, - exp: "mm,\x00=cpu,host=serverA,region=us-east,\xff=bool bool=false 1000000000", - pt: models.MustNewPoint("mm", models.NewTags(map[string]string{models.MeasurementTagKey: "cpu", models.FieldKeyTagKey: "bool", "host": "serverA", "region": "us-east"}), models.Fields{"bool": false}, time.Unix(1, 0)), - }, - { - line: `cpu,host=serverA,region=us-east float=11 1000000000`, - exp: "mm,\x00=cpu,host=serverA,region=us-east,\xff=float float=11 1000000000", - pt: models.MustNewPoint("mm", models.NewTags(map[string]string{models.MeasurementTagKey: "cpu", models.FieldKeyTagKey: "float", "host": "serverA", "region": "us-east"}), models.Fields{"float": float64(11.0)}, time.Unix(1, 0)), - }, - { - line: `cpu,host=serverA,region=us-east float2=12.123 1000000000`, - exp: "mm,\x00=cpu,host=serverA,region=us-east,\xff=float2 float2=12.123 1000000000", - pt: models.MustNewPoint("mm", models.NewTags(map[string]string{models.MeasurementTagKey: "cpu", models.FieldKeyTagKey: "float2", "host": "serverA", "region": "us-east"}), models.Fields{"float2": float64(12.123)}, time.Unix(1, 0)), - }, - { - line: `cpu,host=serverA,region=us-east int=10i 1000000000`, - exp: "mm,\x00=cpu,host=serverA,region=us-east,\xff=int int=10i 1000000000", - pt: models.MustNewPoint("mm", models.NewTags(map[string]string{models.MeasurementTagKey: "cpu", models.FieldKeyTagKey: "int", "host": "serverA", "region": "us-east"}), models.Fields{"int": 10}, time.Unix(1, 0)), - }, - { - line: `cpu,host=serverA,region=us-east str="string val" 1000000000`, - exp: "mm,\x00=cpu,host=serverA,region=us-east,\xff=str str=\"string val\" 1000000000", - pt: models.MustNewPoint("mm", models.NewTags(map[string]string{models.MeasurementTagKey: "cpu", models.FieldKeyTagKey: "str", "host": "serverA", "region": "us-east"}), models.Fields{"str": "string val"}, time.Unix(1, 0)), - }, - } { - pts, err := models.ParsePoints([]byte(tt.line), []byte("mm")) - if err != nil { - t.Fatalf(`%d. ParsePoints() failed. got %s`, i, err) - } - if exp := 1; len(pts) != exp { - t.Errorf("%d. ParsePoint() len mismatch: got %v, exp %v", i, len(pts), exp) - } - pt := pts[0] + line := `cpu,host=serverA,region=us-east bool=false,float=11,float2=12.123,int=10i,str="string val" 1000000000` + pts, err := models.ParsePoints([]byte(line)) + if err != nil { + t.Fatalf(`ParsePoints() failed. got %s`, err) + } + if exp := 1; len(pts) != exp { + t.Errorf("ParsePoint() len mismatch: got %v, exp %v", len(pts), exp) + } + pt := pts[0] - got := pt.String() - if tt.exp != got { - t.Errorf("%d. ParsePoint() to string mismatch:\n got %v\n exp %v", i, got, tt.exp) - } + got := pt.String() + if line != got { + t.Errorf("ParsePoint() to string mismatch:\n got %v\n exp %v", got, line) + } - got = tt.pt.String() - if tt.exp != got { - t.Errorf("%d. NewPoint() to string mismatch:\n got %v\n exp %v", i, got, tt.exp) - } + pt = models.MustNewPoint("cpu", models.NewTags(map[string]string{"host": "serverA", "region": "us-east"}), + models.Fields{"int": 10, "float": float64(11.0), "float2": float64(12.123), "bool": false, "str": "string val"}, + time.Unix(1, 0)) + + got = pt.String() + if line != got { + t.Errorf("NewPoint() to string mismatch:\n got %v\n exp %v", got, line) } } @@ -2085,35 +1786,35 @@ func TestParsePointsWithPrecision(t *testing.T) { name: "nanosecond by default", line: `cpu,host=serverA,region=us-east value=1.0 946730096789012345`, precision: "", - exp: "mm,\x00=cpu,host=serverA,region=us-east,\xff=value value=1.0 946730096789012345", + exp: "cpu,host=serverA,region=us-east value=1.0 946730096789012345", }, { name: "nanosecond", line: `cpu,host=serverA,region=us-east value=1.0 946730096789012345`, - precision: "ns", - exp: "mm,\x00=cpu,host=serverA,region=us-east,\xff=value value=1.0 946730096789012345", + precision: "n", + exp: "cpu,host=serverA,region=us-east value=1.0 946730096789012345", }, { name: "microsecond", line: `cpu,host=serverA,region=us-east value=1.0 946730096789012`, precision: "us", - exp: "mm,\x00=cpu,host=serverA,region=us-east,\xff=value value=1.0 946730096789012000", + exp: "cpu,host=serverA,region=us-east value=1.0 946730096789012000", }, { name: "millisecond", line: `cpu,host=serverA,region=us-east value=1.0 946730096789`, precision: "ms", - exp: "mm,\x00=cpu,host=serverA,region=us-east,\xff=value value=1.0 946730096789000000", + exp: "cpu,host=serverA,region=us-east value=1.0 946730096789000000", }, { name: "second", line: `cpu,host=serverA,region=us-east value=1.0 946730096`, precision: "s", - exp: "mm,\x00=cpu,host=serverA,region=us-east,\xff=value value=1.0 946730096000000000", + exp: "cpu,host=serverA,region=us-east value=1.0 946730096000000000", }, } for _, test := range tests { - pts, err := models.ParsePointsWithPrecision([]byte(test.line), []byte("mm"), time.Now().UTC(), test.precision) + pts, err := models.ParsePointsWithPrecision([]byte(test.line), time.Now().UTC(), test.precision) if err != nil { t.Fatalf(`%s: ParsePoints() failed. got %s`, test.name, err) } @@ -2140,32 +1841,32 @@ func TestParsePointsWithPrecisionNoTime(t *testing.T) { { name: "no precision", precision: "", - exp: "mm,\x00=cpu,host=serverA,region=us-east,\xff=value value=1.0 946730096789012345", + exp: "cpu,host=serverA,region=us-east value=1.0 946730096789012345", }, { name: "nanosecond precision", - precision: "ns", - exp: "mm,\x00=cpu,host=serverA,region=us-east,\xff=value value=1.0 946730096789012345", + precision: "n", + exp: "cpu,host=serverA,region=us-east value=1.0 946730096789012345", }, { name: "microsecond precision", precision: "us", - exp: "mm,\x00=cpu,host=serverA,region=us-east,\xff=value value=1.0 946730096789012000", + exp: "cpu,host=serverA,region=us-east value=1.0 946730096789012000", }, { name: "millisecond precision", precision: "ms", - exp: "mm,\x00=cpu,host=serverA,region=us-east,\xff=value value=1.0 946730096789000000", + exp: "cpu,host=serverA,region=us-east value=1.0 946730096789000000", }, { name: "second precision", precision: "s", - exp: "mm,\x00=cpu,host=serverA,region=us-east,\xff=value value=1.0 946730096000000000", + exp: "cpu,host=serverA,region=us-east value=1.0 946730096000000000", }, } for _, test := range tests { - pts, err := models.ParsePointsWithPrecision([]byte(line), []byte("mm"), tm, test.precision) + pts, err := models.ParsePointsWithPrecision([]byte(line), tm, test.precision) if err != nil { t.Fatalf(`%s: ParsePoints() failed. got %s`, test.name, err) } @@ -2191,33 +1892,33 @@ func TestParsePointsWithPrecisionComments(t *testing.T) { { name: "comment only", batch: `# comment only`, - exp: "mm,\x00=cpu,host=serverA,region=us-east,\xff=value value=1.0 946730096789012345", + exp: "cpu,host=serverA,region=us-east value=1.0 946730096789012345", lenPoints: 0, }, { name: "point with comment above", batch: `# a point is below cpu,host=serverA,region=us-east value=1.0 946730096789012345`, - exp: "mm,\x00=cpu,host=serverA,region=us-east,\xff=value value=1.0 946730096789012345", + exp: "cpu,host=serverA,region=us-east value=1.0 946730096789012345", lenPoints: 1, }, { name: "point with comment below", batch: `cpu,host=serverA,region=us-east value=1.0 946730096789012345 # end of points`, - exp: "mm,\x00=cpu,host=serverA,region=us-east,\xff=value value=1.0 946730096789012345", + exp: "cpu,host=serverA,region=us-east value=1.0 946730096789012345", lenPoints: 1, }, { name: "indented comment", batch: ` # a point is below cpu,host=serverA,region=us-east value=1.0 946730096789012345`, - exp: "mm,\x00=cpu,host=serverA,region=us-east,\xff=value value=1.0 946730096789012345", + exp: "cpu,host=serverA,region=us-east value=1.0 946730096789012345", lenPoints: 1, }, } for _, test := range tests { - pts, err := models.ParsePointsWithPrecision([]byte(test.batch), []byte("mm"), time.Now().UTC(), "") + pts, err := models.ParsePointsWithPrecision([]byte(test.batch), time.Now().UTC(), "") if err != nil { t.Fatalf(`%s: ParsePoints() failed. got %s`, test.name, err) } @@ -2385,6 +2086,16 @@ func TestRoundedString(t *testing.T) { precision: time.Second, exp: "cpu value=1 946730097000000000", }, + { + name: "minute precision", + precision: time.Minute, + exp: "cpu value=1 946730100000000000", + }, + { + name: "hour precision", + precision: time.Hour, + exp: "cpu value=1 946731600000000000", + }, } for _, test := range tests { @@ -2401,24 +2112,29 @@ func TestRoundedString(t *testing.T) { func TestParsePointsStringWithExtraBuffer(t *testing.T) { b := make([]byte, 70*5000) buf := bytes.NewBuffer(b) - buf.WriteString(fmt.Sprintf("%s value=%.3f 1\n", "cpu,host=A,region=uswest", rand.Float64())) + key := "cpu,host=A,region=uswest" + buf.WriteString(fmt.Sprintf("%s value=%.3f 1\n", key, rand.Float64())) - points, err := models.ParsePointsString(buf.String(), "mm") + points, err := models.ParsePointsString(buf.String()) if err != nil { t.Fatalf("failed to write points: %s", err.Error()) } pointKey := string(points[0].Key()) - exp := "mm,\x00=cpu,host=A,region=uswest,\xff=value" - if exp != pointKey { - t.Fatalf("unexpected key: got %s, exp %s", pointKey, exp) + + if len(key) != len(pointKey) { + t.Fatalf("expected length of both keys are same but got %d and %d", len(key), len(pointKey)) + } + + if key != pointKey { + t.Fatalf("expected both keys are same but got %s and %s", key, pointKey) } } func TestParsePointsQuotesInFieldKey(t *testing.T) { buf := `cpu "a=1 cpu value=2 1` - points, err := models.ParsePointsString(buf, "mm") + points, err := models.ParsePointsString(buf) if err != nil { t.Fatalf("failed to write points: %s", err.Error()) } @@ -2438,7 +2154,7 @@ cpu value=2 1` // The following input should not parse buf = `cpu "\, '= "\ v=1.0` - _, err = models.ParsePointsString(buf, "mm") + _, err = models.ParsePointsString(buf) if err == nil { t.Fatalf("expected parsing failure but got no error") } @@ -2447,21 +2163,7 @@ cpu value=2 1` func TestParsePointsQuotesInTags(t *testing.T) { buf := `t159,label=hey\ "ya a=1i,value=0i t159,label=another a=2i,value=1i 1` - points, err := models.ParsePointsString(buf, "mm") - if err != nil { - t.Fatalf("failed to write points: %s", err.Error()) - } - - if len(points) != 4 { - t.Fatalf("expected 4 points, got %d", len(points)) - } -} - -func TestParsePointsBlankLine(t *testing.T) { - buf := `cpu value=1i 1000000000 - -cpu value=2i 2000000000` - points, err := models.ParsePointsString(buf, "mm") + points, err := models.ParsePointsString(buf) if err != nil { t.Fatalf("failed to write points: %s", err.Error()) } @@ -2471,124 +2173,17 @@ cpu value=2i 2000000000` } } -func mustReadTestData(tb testing.TB, name string, repeat int) []byte { - tb.Helper() - filename := filepath.Join("testdata", name) - d, err := ioutil.ReadFile(filename) +func TestParsePointsBlankLine(t *testing.T) { + buf := `cpu value=1i 1000000000 + +cpu value=2i 2000000000` + points, err := models.ParsePointsString(buf) if err != nil { - tb.Fatalf("error reading file %q: %v", filename, err) - } - var buf []byte - for i := 0; i < repeat; i++ { - buf = append(buf, d...) - } - return buf -} - -func TestParsePointsWithOptions(t *testing.T) { - readGood := func(tb testing.TB) []byte { - return mustReadTestData(tb, "line-protocol.txt", 1) + t.Fatalf("failed to write points: %s", err.Error()) } - readBad := func(tb testing.TB) []byte { - buf := mustReadTestData(tb, "line-protocol.txt", 1) - buf = append(buf, "cpu,foo=bar data=1.3i 100000\n"...) - return append(buf, bytes.Repeat([]byte("foo foo foo"), 100000)...) - } - - tests := []struct { - name string - read func(testing.TB) []byte - opts []models.ParserOption - exp error - }{ - { - name: "lines are limited", - read: readGood, - opts: []models.ParserOption{models.WithParserMaxLines(10)}, - exp: models.ErrLimitMaxLinesExceeded, - }, - { - name: "lines are not limited with large value", - read: readGood, - opts: []models.ParserOption{models.WithParserMaxLines(1000)}, - exp: nil, - }, - { - name: "lines are not limited", - read: readGood, - opts: []models.ParserOption{}, - exp: nil, - }, - - { - name: "values are limited", - read: readGood, - opts: []models.ParserOption{models.WithParserMaxValues(10)}, - exp: models.ErrLimitMaxValuesExceeded, - }, - { - name: "values are not limited with large value", - read: readGood, - opts: []models.ParserOption{models.WithParserMaxValues(1000)}, - exp: nil, - }, - { - name: "values are not limited", - read: readGood, - opts: []models.ParserOption{}, - exp: nil, - }, - - { - name: "bytes are limited allocating slice", - read: readGood, - opts: []models.ParserOption{models.WithParserMaxBytes(10)}, - exp: models.ErrLimitMaxBytesExceeded, - }, - { - name: "bytes are limited whilst parsing", - read: readGood, - opts: []models.ParserOption{models.WithParserMaxBytes(10000)}, - exp: models.ErrLimitMaxBytesExceeded, - }, - { - name: "bytes are not limited with large value", - read: readGood, - opts: []models.ParserOption{models.WithParserMaxBytes(500000)}, - exp: nil, - }, - { - name: "bytes are limited appending large error", - read: readBad, - opts: []models.ParserOption{models.WithParserMaxBytes(500000)}, - exp: models.ErrLimitMaxBytesExceeded, - }, - { - name: "bytes are not limited", - read: readGood, - opts: []models.ParserOption{}, - exp: nil, - }, - } - - cmpopt := cmp.Transformer("error", func(e error) string { - return e.Error() - }) - - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - buf := test.read(t) - encoded := EncodeName(ID(1000), ID(2000)) - mm := models.EscapeMeasurement(encoded[:]) - - var stats models.ParserStats - opts := append(test.opts, models.WithParserStats(&stats)) - _, got := models.ParsePointsWithOptions(buf, mm, opts...) - if !cmp.Equal(got, test.exp, cmpopt) { - t.Errorf("unexpected error; -got/+exp\n%s", cmp.Diff(got, test.exp, cmpopt)) - } - }) + if len(points) != 2 { + t.Fatalf("expected 2 points, got %d", len(points)) } } @@ -2614,61 +2209,79 @@ func TestNewPointsRejectsEmptyFieldNames(t *testing.T) { } func TestNewPointsRejectsMaxKey(t *testing.T) { - name := "mm" - key := strings.Repeat("a", models.MaxKeyLength-len("mm,\xff=value,\x00=")-len("#!~#value")) + var key string + // tsm field key is point key, separator (4 bytes) and field + for i := 0; i < models.MaxKeyLength-len("value")-4; i++ { + key += "a" + } // Test max key len - if _, err := models.NewPoint(name, models.NewTags(map[string]string{models.FieldKeyTagKey: "value", models.MeasurementTagKey: key}), models.Fields{"value": 1}, time.Now()); err != nil { + if _, err := models.NewPoint(key, nil, models.Fields{"value": 1, "ok": 2.0}, time.Now()); err != nil { t.Fatalf("new point with max key. got: %v, expected: nil", err) } - if _, err := models.ParsePointsString(fmt.Sprintf("%v value=1", key), name); err != nil { + if _, err := models.ParsePointsString(fmt.Sprintf("%v value=1,ok=2.0", key)); err != nil { t.Fatalf("parse point with max key. got: %v, expected: nil", err) } // Test 1 byte over max key len key += "a" - if _, err := models.NewPoint(name, models.NewTags(map[string]string{models.FieldKeyTagKey: "value", models.MeasurementTagKey: key}), models.Fields{"value": 1}, time.Now()); err == nil { + if _, err := models.NewPoint(key, nil, models.Fields{"value": 1, "ok": 2.0}, time.Now()); err == nil { t.Fatalf("new point with max key. got: nil, expected: error") } - if _, err := models.ParsePointsString(fmt.Sprintf("%v value=1", key), name); err == nil { + if _, err := models.ParsePointsString(fmt.Sprintf("%v value=1,ok=2.0", key)); err == nil { t.Fatalf("parse point with max key. got: nil, expected: error") } } func TestPoint_FieldIterator_Simple(t *testing.T) { - p, err := models.ParsePoints([]byte(`m v=42i,f=42 36`), []byte("mm")) + + p, err := models.ParsePoints([]byte(`m v=42i,f=42 36`)) if err != nil { t.Fatal(err) } - if len(p) != 2 { + if len(p) != 1 { t.Fatalf("wrong number of points, got %d, exp %d", len(p), 1) } - if fi := p[0].FieldIterator(); !fi.Next() { - t.Fatal("field iterator terminated before first field of first point") - } else if fi.Type() != models.Integer { - t.Fatalf("'42i' should be an Integer, got %v", fi.Type()) - } else if iv, err := fi.IntegerValue(); err != nil { - t.Fatal(err) - } else if exp, got := int64(42), iv; exp != got { - t.Fatalf("'42i' should be %d, got %d", exp, got) - } else if fi.Next() { - t.Fatal("field iterator didn't terminate") + fi := p[0].FieldIterator() + + if !fi.Next() { + t.Fatal("field iterator terminated before first field") } - if fi := p[1].FieldIterator(); !fi.Next() { - t.Fatalf("field iterator terminated before first field of second point") - } else if fi.Type() != models.Float { - t.Fatalf("'42' should be a Float, got %v", fi.Type()) - } else if fv, err := fi.FloatValue(); err != nil { + if fi.Type() != models.Integer { + t.Fatalf("'42i' should be an Integer, got %v", fi.Type()) + } + + iv, err := fi.IntegerValue() + if err != nil { t.Fatal(err) - } else if exp, got := 42.0, fv; exp != got { + } + if exp, got := int64(42), iv; exp != got { + t.Fatalf("'42i' should be %d, got %d", exp, got) + } + + if !fi.Next() { + t.Fatalf("field iterator terminated before second field") + } + + if fi.Type() != models.Float { + t.Fatalf("'42' should be a Float, got %v", fi.Type()) + } + + fv, err := fi.FloatValue() + if err != nil { + t.Fatal(err) + } + if exp, got := 42.0, fv; exp != got { t.Fatalf("'42' should be %f, got %f", exp, got) - } else if fi.Next() { + } + + if fi.Next() { t.Fatal("field iterator didn't terminate") } } @@ -2703,6 +2316,7 @@ func toFields(fi models.FieldIterator) models.Fields { } func TestPoint_FieldIterator_FieldMap(t *testing.T) { + points, err := models.ParsePointsString(` m v=42 m v=42i @@ -2711,7 +2325,7 @@ m v=true m v="string\"with\"escapes" m v=42i,f=42,g=42.314,u=123u m a=2i,b=3i,c=true,d="stuff",e=-0.23,f=123.456 -`, "mm") +`) if err != nil { t.Fatal("failed to parse test points:", err) @@ -2750,9 +2364,9 @@ func TestEscapeStringField(t *testing.T) { } pointLine := fmt.Sprintf(`t s="%s"`, got) - testParsePoints(t, pointLine, "mm", NewTestPoint( - "mm", - models.NewTags(map[string]string{models.FieldKeyTagKey: "s", models.MeasurementTagKey: "t"}), + test(t, pointLine, NewTestPoint( + "t", + models.NewTags(nil), models.Fields{"s": c.in}, time.Unix(0, 0), )) @@ -2803,191 +2417,6 @@ func TestParseName(t *testing.T) { } } -func TestParseMeasurement(t *testing.T) { - testCases := []struct { - input string - exp string - expErr error - }{ - {input: "%s,\x00=value", exp: "value"}, - {input: "%s\\ q,\x00=value", exp: "value"}, - {input: "%s,\x00=v\\ alue", exp: "v alue"}, - {input: "%s,\x00=value,tag0=val0", exp: "value"}, - {input: "%s,\x00=v\\ alue,tag0=val0", exp: "v alue"}, - {input: "%s,tag0=val0", exp: "", expErr: models.ErrMeasurementTagExpected}, // missing \x00 - {input: "%s", exp: "", expErr: models.ErrMeasurementTagExpected}, // missing tags - {input: "", exp: "", expErr: models.ErrInvalidKey}, // invalid key - } - - makeName := func(s string) string { - if len(s) < 2 { - return "" - } - return s[2:] - } - - t.Run("measurement did not require escaping", func(t *testing.T) { - orgBucketEnc := tsdb.EncodeName(influxdb.ID(0xff00ff), influxdb.ID(0xff11ff)) - orgBucket := string(models.EscapeMeasurement(orgBucketEnc[:])) - for _, tc := range testCases { - t.Run(makeName(tc.input), func(t *testing.T) { - var key string - if len(tc.input) > 0 { - key = fmt.Sprintf(tc.input, orgBucket) - } - - name, err := models.ParseMeasurement([]byte(key)) - if !bytes.Equal([]byte(tc.exp), name) { - t.Errorf("%s produced measurement %s but expected %s", tc.input, string(name), tc.exp) - } - - assert.Equal(t, tc.expErr, err) - }) - } - }) - - t.Run("measurement required escaping", func(t *testing.T) { - orgBucketEnc := tsdb.EncodeName(influxdb.ID(0xff2cff), influxdb.ID(0xff20ff)) - orgBucket := string(models.EscapeMeasurement(orgBucketEnc[:])) - for _, tc := range testCases { - t.Run(makeName(tc.input), func(t *testing.T) { - var key string - if len(tc.input) > 0 { - key = fmt.Sprintf(tc.input, orgBucket) - } - - name, err := models.ParseMeasurement([]byte(key)) - if !bytes.Equal([]byte(tc.exp), name) { - t.Errorf("%s produced measurement %s but expected %s", tc.input, string(name), tc.exp) - } - - assert.Equal(t, tc.expErr, err) - }) - } - }) - -} - -func TestValidTagTokens(t *testing.T) { - testCases := []struct { - tags models.Tags - expected bool - }{ - {tags: models.NewTags(map[string]string{}), expected: true}, - {tags: models.NewTags(map[string]string{"foo": "bar"}), expected: true}, - {tags: models.NewTags(map[string]string{"foo": "bar", "_foo": "cpu", "hello": "こんにちは", "a smile": "😂"}), expected: true}, - - // These cases have invalid keys, but since they're used for special tags (measurement and field key), they're not validated. - {tags: models.NewTags(map[string]string{models.MeasurementTagKey: "bar"}), expected: true}, - {tags: models.NewTags(map[string]string{"\x00": "bar"}), expected: true}, - {tags: models.NewTags(map[string]string{string([]byte{0}): "bar"}), expected: true}, - {tags: models.NewTags(map[string]string{"\x00": "bar"}), expected: true}, - {tags: models.NewTags(map[string]string{"\u0000": "bar"}), expected: true}, - {tags: models.NewTags(map[string]string{models.FieldKeyTagKey: "bar"}), expected: true}, - {tags: models.NewTags(map[string]string{"\xff": "bar"}), expected: true}, - {tags: models.NewTags(map[string]string{string([]byte{255}): "bar"}), expected: true}, - - // These cases all have invalid tag values - {tags: models.NewTags(map[string]string{string([]byte{0}): "\x00"}), expected: false}, - {tags: models.NewTags(map[string]string{"\x00": "\x00"}), expected: false}, - {tags: models.NewTags(map[string]string{"\u0000": "\x00"}), expected: false}, - {tags: models.NewTags(map[string]string{"\xff": "\x00"}), expected: false}, - {tags: models.NewTags(map[string]string{string([]byte{255}): "\x00"}), expected: false}, - {tags: models.NewTags(map[string]string{string([]byte{100, 200}): "bar", "_foo": "cpu"}), expected: false}, - {tags: models.NewTags(map[string]string{"good key": string([]byte{255})}), expected: false}, - } - - for i, testCase := range testCases { - if got := models.ValidTagTokens(testCase.tags); got != testCase.expected { - t.Fatalf("[example %d] got %v, expected %v for tags %s", i+1, got, testCase.expected, testCase.tags) - } - } -} - -func equalError(a, b error) bool { - return a == nil && b == nil || a != nil && b != nil && a.Error() == b.Error() -} - -func TestNewTagsKeyValues(t *testing.T) { - t.Run("sorted", func(t *testing.T) { - t.Run("no dupes", func(t *testing.T) { - got, _ := models.NewTagsKeyValuesStrings(nil, "tag0", "v0", "tag1", "v1", "tag2", "v2") - exp := models.NewTags(map[string]string{ - "tag0": "v0", - "tag1": "v1", - "tag2": "v2", - }) - if !cmp.Equal(got, exp) { - t.Errorf("unxpected; -got/+exp\n%s", cmp.Diff(got, exp)) - } - }) - - t.Run("dupes", func(t *testing.T) { - got, _ := models.NewTagsKeyValuesStrings(nil, "tag0", "v0", "tag1", "v1", "tag1", "v1", "tag2", "v2", "tag2", "v2") - exp := models.NewTags(map[string]string{ - "tag0": "v0", - "tag1": "v1", - "tag2": "v2", - }) - if !cmp.Equal(got, exp) { - t.Errorf("unxpected; -got/+exp\n%s", cmp.Diff(got, exp)) - } - }) - }) - - t.Run("unsorted", func(t *testing.T) { - t.Run("no dupes", func(t *testing.T) { - got, _ := models.NewTagsKeyValuesStrings(nil, "tag2", "v2", "tag0", "v0", "tag1", "v1") - exp := models.NewTags(map[string]string{ - "tag0": "v0", - "tag1": "v1", - "tag2": "v2", - }) - if !cmp.Equal(got, exp) { - t.Errorf("unxpected; -got/+exp\n%s", cmp.Diff(got, exp)) - } - }) - - t.Run("dupes", func(t *testing.T) { - got, _ := models.NewTagsKeyValuesStrings(nil, "tag2", "v2", "tag0", "v0", "tag1", "v1", "tag2", "v2", "tag0", "v0", "tag1", "v1") - exp := models.NewTags(map[string]string{ - "tag0": "v0", - "tag1": "v1", - "tag2": "v2", - }) - if !cmp.Equal(got, exp) { - t.Errorf("unxpected; -got/+exp\n%s", cmp.Diff(got, exp)) - } - }) - }) - - t.Run("odd number of keys", func(t *testing.T) { - got, err := models.NewTagsKeyValuesStrings(nil, "tag2", "v2", "tag0", "v0", "tag1") - - if !cmp.Equal(got, models.Tags(nil)) { - t.Errorf("expected nil") - } - - if !cmp.Equal(err, models.ErrInvalidKevValuePairs, cmp.Comparer(equalError)) { - t.Errorf("expected ErrInvalidKevValuePairs, got: %v", err) - } - }) -} - -func TestTags_KeyValues(t *testing.T) { - tags := models.NewTags(map[string]string{ - "tag0": "v0", - "tag1": "v1", - "tag2": "v2", - }) - - got := tags.KeyValues(nil) - exp := [][]byte{[]byte("tag0"), []byte("v0"), []byte("tag1"), []byte("v1"), []byte("tag2"), []byte("v2")} - if !cmp.Equal(got, exp) { - t.Errorf("unexpected, -got/+exp\n%s", cmp.Diff(got, exp)) - } -} - func BenchmarkEscapeStringField_Plain(b *testing.B) { s := "nothing special" for i := 0; i < b.N; i++ { @@ -3017,33 +2446,10 @@ func BenchmarkEscapeString_QuotesAndBackslashes(b *testing.B) { } } -func BenchmarkParseKeyBytes(b *testing.B) { - buf := []byte("cpu,tag0=value0,tag1=value1,tag2=value2,tag3=value3,tag4=value4,tag5=value5") - for i := 0; i < b.N; i++ { - models.ParseKeyBytes(buf) - } -} - -func BenchmarkParseKeyBytesWithTags(b *testing.B) { - var tags models.Tags - buf := []byte("cpu,tag0=value0,tag1=value1,tag2=value2,tag3=value3,tag4=value4,tag5=value5") - for i := 0; i < b.N; i++ { - _, tags = models.ParseKeyBytesWithTags(buf, tags[:0]) - } -} - func BenchmarkParseTags(b *testing.B) { - buf := []byte("cpu,tag0=value0,tag1=value1,tag2=value2,tag3=value3,tag4=value4,tag5=value5") + tags := []byte("cpu,tag0=value0,tag1=value1,tag2=value2,tag3=value3,tag4=value4,tag5=value5") for i := 0; i < b.N; i++ { - models.ParseTags(buf) - } -} - -func BenchmarkParseTagsWithTags(b *testing.B) { - var tags models.Tags - buf := []byte("cpu,tag0=value0,tag1=value1,tag2=value2,tag3=value3,tag4=value4,tag5=value5") - for i := 0; i < b.N; i++ { - tags = models.ParseTagsWithTags(buf, tags[:0]) + models.ParseTags(tags) } } @@ -3168,73 +2574,3 @@ func BenchmarkNewTagsKeyValues(b *testing.B) { }) }) } - -func benchParseFile(b *testing.B, name string, repeat int, fn func(b *testing.B, buf []byte, mm []byte, now time.Time)) { - b.Helper() - buf := mustReadTestData(b, name, repeat) - encoded := EncodeName(ID(1000), ID(2000)) - mm := models.EscapeMeasurement(encoded[:]) - now := time.Now() - - b.ResetTimer() - b.ReportAllocs() - b.SetBytes(int64(len(buf))) - - fn(b, buf, mm, now) -} - -func BenchmarkParsePointsWithPrecision(b *testing.B) { - cases := []struct { - name string - repeat int - }{ - {"line-protocol.txt", 1}, - {"line-protocol.txt", 315}, - } - - for _, tc := range cases { - b.Run(fmt.Sprintf("%s/%d", tc.name, tc.repeat), func(b *testing.B) { - benchParseFile(b, tc.name, tc.repeat, func(b *testing.B, buf []byte, mm []byte, now time.Time) { - for i := 0; i < b.N; i++ { - pts, err := models.ParsePointsWithPrecision(buf, mm, now, "ns") - if err != nil { - b.Errorf("error parsing points: %v", err) - } - _ = pts - } - }) - }) - } -} - -func BenchmarkParsePointsWithOptions(b *testing.B) { - cases := []struct { - name string - repeat int - }{ - {"line-protocol.txt", 1}, - {"line-protocol.txt", 315}, - } - - for _, tc := range cases { - b.Run(fmt.Sprintf("%s/%d", tc.name, tc.repeat), func(b *testing.B) { - benchParseFile(b, tc.name, tc.repeat, func(b *testing.B, buf []byte, mm []byte, now time.Time) { - for i := 0; i < b.N; i++ { - pts, err := models.ParsePointsWithOptions(buf, mm) - if err != nil { - b.Errorf("error parsing points: %v", err) - } - _ = pts - } - }) - }) - } -} - -func BenchmarkValidToken(b *testing.B) { - token := []byte("Hello世界") - b.ReportAllocs() - for i := 0; i < b.N; i++ { - models.ValidToken(token) - } -} diff --git a/models/testdata/line-protocol.txt b/models/testdata/line-protocol.txt deleted file mode 100644 index da6f440d0d..0000000000 --- a/models/testdata/line-protocol.txt +++ /dev/null @@ -1,554 +0,0 @@ -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal circuitbreaker_redis_consecutive_successes=0 1578431517778522000 -prometheus,endpoint=/api/v2/query,env=toolsus1,hostname=host1,nodename=node1,org_id=332e4ccb1c0d7943,role=gateway-internal,status=500 http_query_request_bytes=19098 1578431517778528000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal circuitbreaker_redis_total_successes=0 1578431517778535000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal go_memstats_frees_total=148566386293 1578431517778535000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal go_memstats_heap_inuse_bytes=134979584 1578431517778536000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal go_memstats_mspan_sys_bytes=23150592 1578431517778538000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal circuitbreaker_kafka_write_total_failures=0 1578431517778538000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal go_memstats_stack_inuse_bytes=5144576 1578431517778539000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=GET,nodename=node1,path=/api/v2,role=gateway-internal,status=2XX,user_agent=Chrome http_api_requests_total=2 1578431517778541000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=GET,nodename=node1,path=/api/v2/dashboards,role=gateway-internal,status=2XX,user_agent=Chrome http_api_requests_total=2 1578431517778543000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=GET,nodename=node1,path=/api/v2/dashboards/3f7cfe811c58e9bc/cells/68f629c9e1766828/view,role=gateway-internal,status=2XX,user_agent=Chrome http_api_requests_total=1 1578431517778544000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=GET,nodename=node1,path=/api/v2/dashboards/3f7cfe811c58e9bc/cells/6de7b64431004afd/view,role=gateway-internal,status=2XX,user_agent=Chrome http_api_requests_total=1 1578431517778545000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=GET,nodename=node1,path=/api/v2/labels,role=gateway-internal,status=2XX,user_agent=Chrome http_api_requests_total=1 1578431517778546000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=GET,nodename=node1,path=/api/v2/me,role=gateway-internal,status=2XX,user_agent=Chrome http_api_requests_total=22 1578431517778549000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=GET,nodename=node1,path=/api/v2/setup,role=gateway-internal,status=2XX,user_agent=Chrome http_api_requests_total=1 1578431517778550000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=GET,nodename=node1,path=/debug/pprof/allocs,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_requests_total=569 1578431517778553000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=GET,nodename=node1,path=/debug/pprof/block,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_requests_total=569 1578431517778554000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=GET,nodename=node1,path=/debug/pprof/goroutine,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_requests_total=569 1578431517778555000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=GET,nodename=node1,path=/debug/pprof/heap,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_requests_total=569 1578431517778556000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=GET,nodename=node1,path=/debug/pprof/mutex,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_requests_total=569 1578431517778557000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=GET,nodename=node1,path=/debug/pprof/profile,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_requests_total=569 1578431517778558000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=GET,nodename=node1,path=/debug/pprof/threadcreate,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_requests_total=569 1578431517778559000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=GET,nodename=node1,path=/health,role=gateway-internal,status=2XX,user_agent=kube-probe http_api_requests_total=17080 1578431517778561000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=GET,nodename=node1,path=/metrics,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_requests_total=34165 1578431517778562000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=GET,nodename=node1,path=/ready,role=gateway-internal,status=2XX,user_agent=kube-probe http_api_requests_total=68330 1578431517778563000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=POST,nodename=node1,path=/api/v2/query,role=gateway-internal,status=5XX,user_agent=Chrome http_api_requests_total=5 1578431517778564000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=POST,nodename=node1,path=/api/v2/write,role=gateway-internal,status=2XX,user_agent=Telegraf http_api_requests_total=1499492 1578431517778564000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=POST,nodename=node1,path=/api/v2/write,role=gateway-internal,status=5XX,user_agent=Telegraf http_api_requests_total=143641 1578431517778565000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal,status=error storage_producer_node_request_duration_seconds_count=141578,storage_producer_node_request_duration_seconds_sum=201042.39155161564 1578431517778567000 -prometheus,env=toolsus1,hostname=host1,le=0.001,nodename=node1,role=gateway-internal,status=error storage_producer_node_request_duration_seconds_bucket=0 1578431517778567000 -prometheus,env=toolsus1,hostname=host1,le=0.0015,nodename=node1,role=gateway-internal,status=error storage_producer_node_request_duration_seconds_bucket=0 1578431517778567000 -prometheus,env=toolsus1,hostname=host1,le=0.0022500000000000003,nodename=node1,role=gateway-internal,status=error storage_producer_node_request_duration_seconds_bucket=0 1578431517778567000 -prometheus,env=toolsus1,hostname=host1,le=0.0033750000000000004,nodename=node1,role=gateway-internal,status=error storage_producer_node_request_duration_seconds_bucket=0 1578431517778567000 -prometheus,env=toolsus1,hostname=host1,le=0.005062500000000001,nodename=node1,role=gateway-internal,status=error storage_producer_node_request_duration_seconds_bucket=0 1578431517778567000 -prometheus,env=toolsus1,hostname=host1,le=0.0075937500000000015,nodename=node1,role=gateway-internal,status=error storage_producer_node_request_duration_seconds_bucket=0 1578431517778567000 -prometheus,env=toolsus1,hostname=host1,le=0.011390625000000001,nodename=node1,role=gateway-internal,status=error storage_producer_node_request_duration_seconds_bucket=0 1578431517778567000 -prometheus,env=toolsus1,hostname=host1,le=0.017085937500000002,nodename=node1,role=gateway-internal,status=error storage_producer_node_request_duration_seconds_bucket=0 1578431517778567000 -prometheus,env=toolsus1,hostname=host1,le=0.025628906250000003,nodename=node1,role=gateway-internal,status=error storage_producer_node_request_duration_seconds_bucket=0 1578431517778567000 -prometheus,env=toolsus1,hostname=host1,le=0.03844335937500001,nodename=node1,role=gateway-internal,status=error storage_producer_node_request_duration_seconds_bucket=0 1578431517778567000 -prometheus,env=toolsus1,hostname=host1,le=0.057665039062500006,nodename=node1,role=gateway-internal,status=error storage_producer_node_request_duration_seconds_bucket=0 1578431517778567000 -prometheus,env=toolsus1,hostname=host1,le=0.08649755859375001,nodename=node1,role=gateway-internal,status=error storage_producer_node_request_duration_seconds_bucket=0 1578431517778567000 -prometheus,env=toolsus1,hostname=host1,le=0.129746337890625,nodename=node1,role=gateway-internal,status=error storage_producer_node_request_duration_seconds_bucket=3162 1578431517778567000 -prometheus,env=toolsus1,hostname=host1,le=0.277ea440004d086e,nodename=node1,role=gateway-internal,status=error storage_producer_node_request_duration_seconds_bucket=12617 1578431517778567000 -prometheus,env=toolsus1,hostname=host1,le=0.29192926025390625,nodename=node1,role=gateway-internal,status=error storage_producer_node_request_duration_seconds_bucket=21507 1578431517778567000 -prometheus,env=toolsus1,hostname=host1,le=0.43789389038085935,nodename=node1,role=gateway-internal,status=error storage_producer_node_request_duration_seconds_bucket=30739 1578431517778567000 -prometheus,env=toolsus1,hostname=host1,le=0.656840835571289,nodename=node1,role=gateway-internal,status=error storage_producer_node_request_duration_seconds_bucket=40151 1578431517778567000 -prometheus,env=toolsus1,hostname=host1,le=0.0f52079ffbaba445,nodename=node1,role=gateway-internal,status=error storage_producer_node_request_duration_seconds_bucket=53068 1578431517778567000 -prometheus,env=toolsus1,hostname=host1,le=1.39025843efc762bb,nodename=node1,role=gateway-internal,status=error storage_producer_node_request_duration_seconds_bucket=71804 1578431517778567000 -prometheus,env=toolsus1,hostname=host1,le=2.30d0587a02d01d38,nodename=node1,role=gateway-internal,status=error storage_producer_node_request_duration_seconds_bucket=108711 1578431517778567000 -prometheus,env=toolsus1,hostname=host1,le=3.3864f697e8d4ab0b,nodename=node1,role=gateway-internal,status=error storage_producer_node_request_duration_seconds_bucket=140717 1578431517778567000 -prometheus,env=toolsus1,hostname=host1,le=4.987885095119475,nodename=node1,role=gateway-internal,status=error storage_producer_node_request_duration_seconds_bucket=141414 1578431517778567000 -prometheus,env=toolsus1,hostname=host1,le=7.481827642679213,nodename=node1,role=gateway-internal,status=error storage_producer_node_request_duration_seconds_bucket=141503 1578431517778567000 -prometheus,env=toolsus1,hostname=host1,le=11.222741464018819,nodename=node1,role=gateway-internal,status=error storage_producer_node_request_duration_seconds_bucket=141507 1578431517778567000 -prometheus,env=toolsus1,hostname=host1,le=16.83411219602823,nodename=node1,role=gateway-internal,status=error storage_producer_node_request_duration_seconds_bucket=141526 1578431517778567000 -prometheus,env=toolsus1,hostname=host1,le=+Inf,nodename=node1,role=gateway-internal,status=error storage_producer_node_request_duration_seconds_bucket=141578 1578431517778567000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal,status=ok storage_producer_node_request_duration_seconds_count=1501555,storage_producer_node_request_duration_seconds_sum=1188856.9056605487 1578431517778608000 -prometheus,env=toolsus1,hostname=host1,le=0.001,nodename=node1,role=gateway-internal,status=ok storage_producer_node_request_duration_seconds_bucket=0 1578431517778608000 -prometheus,env=toolsus1,hostname=host1,le=0.0015,nodename=node1,role=gateway-internal,status=ok storage_producer_node_request_duration_seconds_bucket=0 1578431517778608000 -prometheus,env=toolsus1,hostname=host1,le=0.0022500000000000003,nodename=node1,role=gateway-internal,status=ok storage_producer_node_request_duration_seconds_bucket=0 1578431517778608000 -prometheus,env=toolsus1,hostname=host1,le=0.0033750000000000004,nodename=node1,role=gateway-internal,status=ok storage_producer_node_request_duration_seconds_bucket=0 1578431517778608000 -prometheus,env=toolsus1,hostname=host1,le=0.005062500000000001,nodename=node1,role=gateway-internal,status=ok storage_producer_node_request_duration_seconds_bucket=0 1578431517778608000 -prometheus,env=toolsus1,hostname=host1,le=0.0075937500000000015,nodename=node1,role=gateway-internal,status=ok storage_producer_node_request_duration_seconds_bucket=0 1578431517778608000 -prometheus,env=toolsus1,hostname=host1,le=0.011390625000000001,nodename=node1,role=gateway-internal,status=ok storage_producer_node_request_duration_seconds_bucket=0 1578431517778608000 -prometheus,env=toolsus1,hostname=host1,le=0.017085937500000002,nodename=node1,role=gateway-internal,status=ok storage_producer_node_request_duration_seconds_bucket=0 1578431517778608000 -prometheus,env=toolsus1,hostname=host1,le=0.025628906250000003,nodename=node1,role=gateway-internal,status=ok storage_producer_node_request_duration_seconds_bucket=0 1578431517778608000 -prometheus,env=toolsus1,hostname=host1,le=0.03844335937500001,nodename=node1,role=gateway-internal,status=ok storage_producer_node_request_duration_seconds_bucket=0 1578431517778608000 -prometheus,env=toolsus1,hostname=host1,le=0.057665039062500006,nodename=node1,role=gateway-internal,status=ok storage_producer_node_request_duration_seconds_bucket=0 1578431517778608000 -prometheus,env=toolsus1,hostname=host1,le=0.08649755859375001,nodename=node1,role=gateway-internal,status=ok storage_producer_node_request_duration_seconds_bucket=0 1578431517778608000 -prometheus,env=toolsus1,hostname=host1,le=0.129746337890625,nodename=node1,role=gateway-internal,status=ok storage_producer_node_request_duration_seconds_bucket=22662 1578431517778608000 -prometheus,env=toolsus1,hostname=host1,le=0.277ea440004d086e,nodename=node1,role=gateway-internal,status=ok storage_producer_node_request_duration_seconds_bucket=170473 1578431517778608000 -prometheus,env=toolsus1,hostname=host1,le=0.29192926025390625,nodename=node1,role=gateway-internal,status=ok storage_producer_node_request_duration_seconds_bucket=360340 1578431517778608000 -prometheus,env=toolsus1,hostname=host1,le=0.43789389038085935,nodename=node1,role=gateway-internal,status=ok storage_producer_node_request_duration_seconds_bucket=559743 1578431517778608000 -prometheus,env=toolsus1,hostname=host1,le=0.656840835571289,nodename=node1,role=gateway-internal,status=ok storage_producer_node_request_duration_seconds_bucket=820196 1578431517778608000 -prometheus,env=toolsus1,hostname=host1,le=0.0f52079ffbaba445,nodename=node1,role=gateway-internal,status=ok storage_producer_node_request_duration_seconds_bucket=1175909 1578431517778608000 -prometheus,env=toolsus1,hostname=host1,le=1.39025843efc762bb,nodename=node1,role=gateway-internal,status=ok storage_producer_node_request_duration_seconds_bucket=1351424 1578431517778608000 -prometheus,env=toolsus1,hostname=host1,le=2.30d0587a02d01d38,nodename=node1,role=gateway-internal,status=ok storage_producer_node_request_duration_seconds_bucket=1420019 1578431517778608000 -prometheus,env=toolsus1,hostname=host1,le=3.3864f697e8d4ab0b,nodename=node1,role=gateway-internal,status=ok storage_producer_node_request_duration_seconds_bucket=1465150 1578431517778608000 -prometheus,env=toolsus1,hostname=host1,le=4.987885095119475,nodename=node1,role=gateway-internal,status=ok storage_producer_node_request_duration_seconds_bucket=1493100 1578431517778608000 -prometheus,env=toolsus1,hostname=host1,le=7.481827642679213,nodename=node1,role=gateway-internal,status=ok storage_producer_node_request_duration_seconds_bucket=1500952 1578431517778608000 -prometheus,env=toolsus1,hostname=host1,le=11.222741464018819,nodename=node1,role=gateway-internal,status=ok storage_producer_node_request_duration_seconds_bucket=1501375 1578431517778608000 -prometheus,env=toolsus1,hostname=host1,le=16.83411219602823,nodename=node1,role=gateway-internal,status=ok storage_producer_node_request_duration_seconds_bucket=1501491 1578431517778608000 -prometheus,env=toolsus1,hostname=host1,le=+Inf,nodename=node1,role=gateway-internal,status=ok storage_producer_node_request_duration_seconds_bucket=1501555 1578431517778608000 -prometheus,endpoint=/api/v2/write,env=toolsus1,hostname=host1,nodename=node1,org_id=40c69bd39226fa67,role=gateway-internal,status=204 http_write_request_count=39558 1578431517778638000 -prometheus,endpoint=/api/v2/write,env=toolsus1,hostname=host1,nodename=node1,org_id=40c69bd39226fa67,role=gateway-internal,status=500 http_write_request_count=9751 1578431517778639000 -prometheus,endpoint=/api/v2/write,env=toolsus1,hostname=host1,nodename=node1,org_id=332e4ccb1c0d7943,role=gateway-internal,status=204 http_write_request_count=1459934 1578431517778641000 -prometheus,endpoint=/api/v2/write,env=toolsus1,hostname=host1,nodename=node1,org_id=332e4ccb1c0d7943,role=gateway-internal,status=500 http_write_request_count=133890 1578431517778642000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal circuitbreaker_redis_consecutive_failures=0 1578431517778643000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal go_memstats_mallocs_total=148566627376 1578431517778643000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal go_memstats_stack_sys_bytes=5144576 1578431517778644000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal circuitbreaker_kafka_write_consecutive_successes=0 1578431517778644000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal go_memstats_heap_alloc_bytes=119636872 1578431517778645000 -prometheus,env=toolsus1,hostname=host1,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_duration_seconds_count=2063,storage_backup_firehose_request_duration_seconds_sum=1187.3129012100005 1578431517778645000 -prometheus,env=toolsus1,hostname=host1,le=0.001,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_duration_seconds_bucket=79 1578431517778645000 -prometheus,env=toolsus1,hostname=host1,le=0.0015,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_duration_seconds_bucket=84 1578431517778645000 -prometheus,env=toolsus1,hostname=host1,le=0.0022500000000000003,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_duration_seconds_bucket=86 1578431517778645000 -prometheus,env=toolsus1,hostname=host1,le=0.0033750000000000004,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_duration_seconds_bucket=91 1578431517778645000 -prometheus,env=toolsus1,hostname=host1,le=0.005062500000000001,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_duration_seconds_bucket=93 1578431517778645000 -prometheus,env=toolsus1,hostname=host1,le=0.0075937500000000015,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_duration_seconds_bucket=93 1578431517778645000 -prometheus,env=toolsus1,hostname=host1,le=0.011390625000000001,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_duration_seconds_bucket=93 1578431517778645000 -prometheus,env=toolsus1,hostname=host1,le=0.017085937500000002,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_duration_seconds_bucket=94 1578431517778645000 -prometheus,env=toolsus1,hostname=host1,le=0.025628906250000003,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_duration_seconds_bucket=95 1578431517778645000 -prometheus,env=toolsus1,hostname=host1,le=0.03844335937500001,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_duration_seconds_bucket=95 1578431517778645000 -prometheus,env=toolsus1,hostname=host1,le=0.057665039062500006,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_duration_seconds_bucket=97 1578431517778645000 -prometheus,env=toolsus1,hostname=host1,le=0.08649755859375001,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_duration_seconds_bucket=100 1578431517778645000 -prometheus,env=toolsus1,hostname=host1,le=0.129746337890625,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_duration_seconds_bucket=100 1578431517778645000 -prometheus,env=toolsus1,hostname=host1,le=0.277ea440004d086e,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_duration_seconds_bucket=100 1578431517778645000 -prometheus,env=toolsus1,hostname=host1,le=0.29192926025390625,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_duration_seconds_bucket=104 1578431517778645000 -prometheus,env=toolsus1,hostname=host1,le=0.43789389038085935,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_duration_seconds_bucket=580 1578431517778645000 -prometheus,env=toolsus1,hostname=host1,le=0.656840835571289,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_duration_seconds_bucket=1431 1578431517778645000 -prometheus,env=toolsus1,hostname=host1,le=0.0f52079ffbaba445,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_duration_seconds_bucket=1977 1578431517778645000 -prometheus,env=toolsus1,hostname=host1,le=1.39025843efc762bb,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_duration_seconds_bucket=2019 1578431517778645000 -prometheus,env=toolsus1,hostname=host1,le=2.30d0587a02d01d38,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_duration_seconds_bucket=2063 1578431517778645000 -prometheus,env=toolsus1,hostname=host1,le=3.3864f697e8d4ab0b,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_duration_seconds_bucket=2063 1578431517778645000 -prometheus,env=toolsus1,hostname=host1,le=4.987885095119475,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_duration_seconds_bucket=2063 1578431517778645000 -prometheus,env=toolsus1,hostname=host1,le=7.481827642679213,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_duration_seconds_bucket=2063 1578431517778645000 -prometheus,env=toolsus1,hostname=host1,le=11.222741464018819,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_duration_seconds_bucket=2063 1578431517778645000 -prometheus,env=toolsus1,hostname=host1,le=16.83411219602823,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_duration_seconds_bucket=2063 1578431517778645000 -prometheus,env=toolsus1,hostname=host1,le=+Inf,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_duration_seconds_bucket=2063 1578431517778645000 -prometheus,env=toolsus1,hostname=host1,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_duration_seconds_count=1499492,storage_backup_firehose_request_duration_seconds_sum=81862.25789057177 1578431517778682000 -prometheus,env=toolsus1,hostname=host1,le=0.001,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_duration_seconds_bucket=0 1578431517778682000 -prometheus,env=toolsus1,hostname=host1,le=0.0015,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_duration_seconds_bucket=0 1578431517778682000 -prometheus,env=toolsus1,hostname=host1,le=0.0022500000000000003,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_duration_seconds_bucket=0 1578431517778682000 -prometheus,env=toolsus1,hostname=host1,le=0.0033750000000000004,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_duration_seconds_bucket=0 1578431517778682000 -prometheus,env=toolsus1,hostname=host1,le=0.005062500000000001,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_duration_seconds_bucket=0 1578431517778682000 -prometheus,env=toolsus1,hostname=host1,le=0.0075937500000000015,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_duration_seconds_bucket=2 1578431517778682000 -prometheus,env=toolsus1,hostname=host1,le=0.011390625000000001,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_duration_seconds_bucket=14237 1578431517778682000 -prometheus,env=toolsus1,hostname=host1,le=0.017085937500000002,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_duration_seconds_bucket=185830 1578431517778682000 -prometheus,env=toolsus1,hostname=host1,le=0.025628906250000003,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_duration_seconds_bucket=411651 1578431517778682000 -prometheus,env=toolsus1,hostname=host1,le=0.03844335937500001,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_duration_seconds_bucket=805604 1578431517778682000 -prometheus,env=toolsus1,hostname=host1,le=0.057665039062500006,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_duration_seconds_bucket=1140106 1578431517778682000 -prometheus,env=toolsus1,hostname=host1,le=0.08649755859375001,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_duration_seconds_bucket=1308659 1578431517778682000 -prometheus,env=toolsus1,hostname=host1,le=0.129746337890625,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_duration_seconds_bucket=1402601 1578431517778682000 -prometheus,env=toolsus1,hostname=host1,le=0.277ea440004d086e,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_duration_seconds_bucket=1449158 1578431517778682000 -prometheus,env=toolsus1,hostname=host1,le=0.29192926025390625,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_duration_seconds_bucket=1478115 1578431517778682000 -prometheus,env=toolsus1,hostname=host1,le=0.43789389038085935,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_duration_seconds_bucket=1490317 1578431517778682000 -prometheus,env=toolsus1,hostname=host1,le=0.656840835571289,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_duration_seconds_bucket=1496059 1578431517778682000 -prometheus,env=toolsus1,hostname=host1,le=0.0f52079ffbaba445,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_duration_seconds_bucket=1498355 1578431517778682000 -prometheus,env=toolsus1,hostname=host1,le=1.39025843efc762bb,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_duration_seconds_bucket=1499179 1578431517778682000 -prometheus,env=toolsus1,hostname=host1,le=2.30d0587a02d01d38,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_duration_seconds_bucket=1499424 1578431517778682000 -prometheus,env=toolsus1,hostname=host1,le=3.3864f697e8d4ab0b,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_duration_seconds_bucket=1499475 1578431517778682000 -prometheus,env=toolsus1,hostname=host1,le=4.987885095119475,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_duration_seconds_bucket=1499491 1578431517778682000 -prometheus,env=toolsus1,hostname=host1,le=7.481827642679213,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_duration_seconds_bucket=1499492 1578431517778682000 -prometheus,env=toolsus1,hostname=host1,le=11.222741464018819,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_duration_seconds_bucket=1499492 1578431517778682000 -prometheus,env=toolsus1,hostname=host1,le=16.83411219602823,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_duration_seconds_bucket=1499492 1578431517778682000 -prometheus,env=toolsus1,hostname=host1,le=+Inf,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_duration_seconds_bucket=1499492 1578431517778682000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal go_goroutines=252 1578431517778716000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal go_memstats_alloc_bytes=119636872 1578431517778716000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal go_memstats_mspan_inuse_bytes=1254528 1578431517778717000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal,status=error storage_producer_node_requests_total=141578 1578431517778717000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal,status=ok storage_producer_node_requests_total=1501555 1578431517778718000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal go_memstats_heap_sys_bytes=1538359296 1578431517778719000 -prometheus,endpoint=/api/v2/query,env=toolsus1,hostname=host1,nodename=node1,org_id=332e4ccb1c0d7943,role=gateway-internal,status=500 http_query_request_count=5 1578431517778720000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal go_memstats_buck_hash_sys_bytes=4219449 1578431517778721000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal go_memstats_gc_sys_bytes=59664384 1578431517778721000 -prometheus,endpoint=/api/v2/write,env=toolsus1,hostname=host1,nodename=node1,org_id=40c69bd39226fa67,role=gateway-internal,status=204 http_write_response_bytes=0 1578431517778723000 -prometheus,endpoint=/api/v2/write,env=toolsus1,hostname=host1,nodename=node1,org_id=40c69bd39226fa67,role=gateway-internal,status=500 http_write_response_bytes=1031089 1578431517778724000 -prometheus,endpoint=/api/v2/write,env=toolsus1,hostname=host1,nodename=node1,org_id=332e4ccb1c0d7943,role=gateway-internal,status=204 http_write_response_bytes=0 1578431517778725000 -prometheus,endpoint=/api/v2/write,env=toolsus1,hostname=host1,nodename=node1,org_id=332e4ccb1c0d7943,role=gateway-internal,status=500 http_write_response_bytes=15460253 1578431517778727000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal go_gc_duration_seconds_sum=3349.391384265,go_gc_duration_seconds_count=475295 1578431517778728000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,quantile=0,role=gateway-internal go_gc_duration_seconds=0.000016941 1578431517778728000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,quantile=0.25,role=gateway-internal go_gc_duration_seconds=0.000055612 1578431517778728000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,quantile=0.5,role=gateway-internal go_gc_duration_seconds=0.000138294 1578431517778728000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,quantile=0.75,role=gateway-internal go_gc_duration_seconds=0.000994865 1578431517778728000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,quantile=1,role=gateway-internal go_gc_duration_seconds=0.076084449 1578431517778728000 -prometheus,endpoint=/api/v2/query,env=toolsus1,hostname=host1,nodename=node1,org_id=332e4ccb1c0d7943,role=gateway-internal,status=500 http_query_response_bytes=340 1578431517778735000 -prometheus,endpoint=/api/v2/write,env=toolsus1,hostname=host1,nodename=node1,org_id=40c69bd39226fa67,role=gateway-internal,status=204 http_write_request_bytes=267959637 1578431517778736000 -prometheus,endpoint=/api/v2/write,env=toolsus1,hostname=host1,nodename=node1,org_id=40c69bd39226fa67,role=gateway-internal,status=500 http_write_request_bytes=850922009 1578431517778737000 -prometheus,endpoint=/api/v2/write,env=toolsus1,hostname=host1,nodename=node1,org_id=332e4ccb1c0d7943,role=gateway-internal,status=204 http_write_request_bytes=536340398235 1578431517778737000 -prometheus,endpoint=/api/v2/write,env=toolsus1,hostname=host1,nodename=node1,org_id=332e4ccb1c0d7943,role=gateway-internal,status=500 http_write_request_bytes=80176295201 1578431517778739000 -prometheus,env=toolsus1,error=false,hostname=host1,method=FindAuthorizationByToken,nodename=node1,role=gateway-internal auth_prometheus_request_duration_seconds_sum=919.3827050980769,auth_prometheus_request_duration_seconds_count=1643133 1578431517778741000 -prometheus,env=toolsus1,error=false,hostname=host1,le=0.001,method=FindAuthorizationByToken,nodename=node1,role=gateway-internal auth_prometheus_request_duration_seconds_bucket=1606623 1578431517778741000 -prometheus,env=toolsus1,error=false,hostname=host1,le=0.0015,method=FindAuthorizationByToken,nodename=node1,role=gateway-internal auth_prometheus_request_duration_seconds_bucket=1613702 1578431517778741000 -prometheus,env=toolsus1,error=false,hostname=host1,le=0.0022500000000000003,method=FindAuthorizationByToken,nodename=node1,role=gateway-internal auth_prometheus_request_duration_seconds_bucket=1618370 1578431517778741000 -prometheus,env=toolsus1,error=false,hostname=host1,le=0.0033750000000000004,method=FindAuthorizationByToken,nodename=node1,role=gateway-internal auth_prometheus_request_duration_seconds_bucket=1622331 1578431517778741000 -prometheus,env=toolsus1,error=false,hostname=host1,le=0.005062500000000001,method=FindAuthorizationByToken,nodename=node1,role=gateway-internal auth_prometheus_request_duration_seconds_bucket=1626334 1578431517778741000 -prometheus,env=toolsus1,error=false,hostname=host1,le=0.0075937500000000015,method=FindAuthorizationByToken,nodename=node1,role=gateway-internal auth_prometheus_request_duration_seconds_bucket=1630070 1578431517778741000 -prometheus,env=toolsus1,error=false,hostname=host1,le=0.011390625000000001,method=FindAuthorizationByToken,nodename=node1,role=gateway-internal auth_prometheus_request_duration_seconds_bucket=1633208 1578431517778741000 -prometheus,env=toolsus1,error=false,hostname=host1,le=0.017085937500000002,method=FindAuthorizationByToken,nodename=node1,role=gateway-internal auth_prometheus_request_duration_seconds_bucket=1635693 1578431517778741000 -prometheus,env=toolsus1,error=false,hostname=host1,le=0.025628906250000003,method=FindAuthorizationByToken,nodename=node1,role=gateway-internal auth_prometheus_request_duration_seconds_bucket=1637696 1578431517778741000 -prometheus,env=toolsus1,error=false,hostname=host1,le=0.03844335937500001,method=FindAuthorizationByToken,nodename=node1,role=gateway-internal auth_prometheus_request_duration_seconds_bucket=1639178 1578431517778741000 -prometheus,env=toolsus1,error=false,hostname=host1,le=0.057665039062500006,method=FindAuthorizationByToken,nodename=node1,role=gateway-internal auth_prometheus_request_duration_seconds_bucket=1640358 1578431517778741000 -prometheus,env=toolsus1,error=false,hostname=host1,le=0.08649755859375001,method=FindAuthorizationByToken,nodename=node1,role=gateway-internal auth_prometheus_request_duration_seconds_bucket=1641226 1578431517778741000 -prometheus,env=toolsus1,error=false,hostname=host1,le=0.129746337890625,method=FindAuthorizationByToken,nodename=node1,role=gateway-internal auth_prometheus_request_duration_seconds_bucket=1641864 1578431517778741000 -prometheus,env=toolsus1,error=false,hostname=host1,le=0.277ea440004d086e,method=FindAuthorizationByToken,nodename=node1,role=gateway-internal auth_prometheus_request_duration_seconds_bucket=1642312 1578431517778741000 -prometheus,env=toolsus1,error=false,hostname=host1,le=0.29192926025390625,method=FindAuthorizationByToken,nodename=node1,role=gateway-internal auth_prometheus_request_duration_seconds_bucket=1642645 1578431517778741000 -prometheus,env=toolsus1,error=false,hostname=host1,le=0.43789389038085935,method=FindAuthorizationByToken,nodename=node1,role=gateway-internal auth_prometheus_request_duration_seconds_bucket=1642901 1578431517778741000 -prometheus,env=toolsus1,error=false,hostname=host1,le=0.656840835571289,method=FindAuthorizationByToken,nodename=node1,role=gateway-internal auth_prometheus_request_duration_seconds_bucket=1643045 1578431517778741000 -prometheus,env=toolsus1,error=false,hostname=host1,le=0.0f52079ffbaba445,method=FindAuthorizationByToken,nodename=node1,role=gateway-internal auth_prometheus_request_duration_seconds_bucket=1643109 1578431517778741000 -prometheus,env=toolsus1,error=false,hostname=host1,le=1.39025843efc762bb,method=FindAuthorizationByToken,nodename=node1,role=gateway-internal auth_prometheus_request_duration_seconds_bucket=1643127 1578431517778741000 -prometheus,env=toolsus1,error=false,hostname=host1,le=2.30d0587a02d01d38,method=FindAuthorizationByToken,nodename=node1,role=gateway-internal auth_prometheus_request_duration_seconds_bucket=1643133 1578431517778741000 -prometheus,env=toolsus1,error=false,hostname=host1,le=3.3864f697e8d4ab0b,method=FindAuthorizationByToken,nodename=node1,role=gateway-internal auth_prometheus_request_duration_seconds_bucket=1643133 1578431517778741000 -prometheus,env=toolsus1,error=false,hostname=host1,le=4.987885095119475,method=FindAuthorizationByToken,nodename=node1,role=gateway-internal auth_prometheus_request_duration_seconds_bucket=1643133 1578431517778741000 -prometheus,env=toolsus1,error=false,hostname=host1,le=7.481827642679213,method=FindAuthorizationByToken,nodename=node1,role=gateway-internal auth_prometheus_request_duration_seconds_bucket=1643133 1578431517778741000 -prometheus,env=toolsus1,error=false,hostname=host1,le=11.222741464018819,method=FindAuthorizationByToken,nodename=node1,role=gateway-internal auth_prometheus_request_duration_seconds_bucket=1643133 1578431517778741000 -prometheus,env=toolsus1,error=false,hostname=host1,le=16.83411219602823,method=FindAuthorizationByToken,nodename=node1,role=gateway-internal auth_prometheus_request_duration_seconds_bucket=1643133 1578431517778741000 -prometheus,env=toolsus1,error=false,hostname=host1,le=+Inf,method=FindAuthorizationByToken,nodename=node1,role=gateway-internal auth_prometheus_request_duration_seconds_bucket=1643133 1578431517778741000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal circuitbreaker_kafka_write_total_successes=0 1578431517778773000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal go_memstats_lookups_total=0 1578431517778775000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal,status=error storage_producer_node_values_total=793441442 1578431517778775000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal,status=ok storage_producer_node_values_total=4795395785 1578431517778776000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal go_memstats_mcache_inuse_bytes=27776 1578431517778776000 -prometheus,env=toolsus1,hostname=host1,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_bytes_count=2063,storage_backup_firehose_request_bytes_sum=151485142 1578431517778777000 -prometheus,env=toolsus1,hostname=host1,le=1,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_bytes_bucket=0 1578431517778777000 -prometheus,env=toolsus1,hostname=host1,le=1.75,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_bytes_bucket=0 1578431517778777000 -prometheus,env=toolsus1,hostname=host1,le=3.0625,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_bytes_bucket=0 1578431517778777000 -prometheus,env=toolsus1,hostname=host1,le=5.359375,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_bytes_bucket=0 1578431517778777000 -prometheus,env=toolsus1,hostname=host1,le=9.37890625,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_bytes_bucket=0 1578431517778777000 -prometheus,env=toolsus1,hostname=host1,le=16.4130859375,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_bytes_bucket=0 1578431517778777000 -prometheus,env=toolsus1,hostname=host1,le=28.722900390625,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_bytes_bucket=0 1578431517778777000 -prometheus,env=toolsus1,hostname=host1,le=50.26507568359375,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_bytes_bucket=0 1578431517778777000 -prometheus,env=toolsus1,hostname=host1,le=87.96388244628906,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_bytes_bucket=0 1578431517778777000 -prometheus,env=toolsus1,hostname=host1,le=153.93679428100586,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_bytes_bucket=0 1578431517778777000 -prometheus,env=toolsus1,hostname=host1,le=269.38938999176025,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_bytes_bucket=0 1578431517778777000 -prometheus,env=toolsus1,hostname=host1,le=471.43143248558044,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_bytes_bucket=45 1578431517778777000 -prometheus,env=toolsus1,hostname=host1,le=825.0050068497658,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_bytes_bucket=77 1578431517778777000 -prometheus,env=toolsus1,hostname=host1,le=1443.75876198709,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_bytes_bucket=85 1578431517778777000 -prometheus,env=toolsus1,hostname=host1,le=2526.5778334774077,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_bytes_bucket=99 1578431517778777000 -prometheus,env=toolsus1,hostname=host1,le=4421.5112085854635,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_bytes_bucket=102 1578431517778777000 -prometheus,env=toolsus1,hostname=host1,le=7737.644615024561,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_bytes_bucket=109 1578431517778777000 -prometheus,env=toolsus1,hostname=host1,le=13540.878076292982,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_bytes_bucket=264 1578431517778777000 -prometheus,env=toolsus1,hostname=host1,le=23696.53663351272,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_bytes_bucket=801 1578431517778777000 -prometheus,env=toolsus1,hostname=host1,le=41468.93910864726,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_bytes_bucket=983 1578431517778777000 -prometheus,env=toolsus1,hostname=host1,le=72570.6434401327,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_bytes_bucket=1282 1578431517778777000 -prometheus,env=toolsus1,hostname=host1,le=126998.62602023222,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_bytes_bucket=1577 1578431517778777000 -prometheus,env=toolsus1,hostname=host1,le=222247.59553540638,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_bytes_bucket=2021 1578431517778777000 -prometheus,env=toolsus1,hostname=host1,le=388933.29218696116,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_bytes_bucket=2026 1578431517778777000 -prometheus,env=toolsus1,hostname=host1,le=680633.2613271821,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_bytes_bucket=2063 1578431517778777000 -prometheus,env=toolsus1,hostname=host1,le=1.1911082073225686e+06,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_bytes_bucket=2063 1578431517778777000 -prometheus,env=toolsus1,hostname=host1,le=2.1777f6e4feb2ea86+06,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_bytes_bucket=2063 1578431517778777000 -prometheus,env=toolsus1,hostname=host1,le=3.6477688849253664e+06,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_bytes_bucket=2063 1578431517778777000 -prometheus,env=toolsus1,hostname=host1,le=6.3b589ca9a39b4eee+06,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_bytes_bucket=2063 1578431517778777000 -prometheus,env=toolsus1,hostname=host1,le=1.1171292210083935e+07,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_bytes_bucket=2063 1578431517778777000 -prometheus,env=toolsus1,hostname=host1,le=1.9549761367646888e+07,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_bytes_bucket=2063 1578431517778777000 -prometheus,env=toolsus1,hostname=host1,le=3.2fcf160832c9145b+07,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_bytes_bucket=2063 1578431517778777000 -prometheus,env=toolsus1,hostname=host1,le=5.98711441884186e+07,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_bytes_bucket=2063 1578431517778777000 -prometheus,env=toolsus1,hostname=host1,le=1.0477450232973254e+08,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_bytes_bucket=2063 1578431517778777000 -prometheus,env=toolsus1,hostname=host1,le=1.8335537907703194e+08,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_bytes_bucket=2063 1578431517778777000 -prometheus,env=toolsus1,hostname=host1,le=+Inf,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_request_bytes_bucket=2063 1578431517778777000 -prometheus,env=toolsus1,hostname=host1,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_bytes_count=1499492,storage_backup_firehose_request_bytes_sum=71314743155 1578431517778821000 -prometheus,env=toolsus1,hostname=host1,le=1,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_bytes_bucket=0 1578431517778821000 -prometheus,env=toolsus1,hostname=host1,le=1.75,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_bytes_bucket=0 1578431517778821000 -prometheus,env=toolsus1,hostname=host1,le=3.0625,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_bytes_bucket=0 1578431517778821000 -prometheus,env=toolsus1,hostname=host1,le=5.359375,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_bytes_bucket=0 1578431517778821000 -prometheus,env=toolsus1,hostname=host1,le=9.37890625,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_bytes_bucket=0 1578431517778821000 -prometheus,env=toolsus1,hostname=host1,le=16.4130859375,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_bytes_bucket=0 1578431517778821000 -prometheus,env=toolsus1,hostname=host1,le=28.722900390625,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_bytes_bucket=0 1578431517778821000 -prometheus,env=toolsus1,hostname=host1,le=50.26507568359375,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_bytes_bucket=0 1578431517778821000 -prometheus,env=toolsus1,hostname=host1,le=87.96388244628906,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_bytes_bucket=0 1578431517778821000 -prometheus,env=toolsus1,hostname=host1,le=153.93679428100586,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_bytes_bucket=0 1578431517778821000 -prometheus,env=toolsus1,hostname=host1,le=269.38938999176025,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_bytes_bucket=7 1578431517778821000 -prometheus,env=toolsus1,hostname=host1,le=471.43143248558044,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_bytes_bucket=9309 1578431517778821000 -prometheus,env=toolsus1,hostname=host1,le=825.0050068497658,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_bytes_bucket=16030 1578431517778821000 -prometheus,env=toolsus1,hostname=host1,le=1443.75876198709,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_bytes_bucket=93295 1578431517778821000 -prometheus,env=toolsus1,hostname=host1,le=2526.5778334774077,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_bytes_bucket=135980 1578431517778821000 -prometheus,env=toolsus1,hostname=host1,le=4421.5112085854635,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_bytes_bucket=194000 1578431517778821000 -prometheus,env=toolsus1,hostname=host1,le=7737.644615024561,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_bytes_bucket=244271 1578431517778821000 -prometheus,env=toolsus1,hostname=host1,le=13540.878076292982,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_bytes_bucket=416897 1578431517778821000 -prometheus,env=toolsus1,hostname=host1,le=23696.53663351272,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_bytes_bucket=679904 1578431517778821000 -prometheus,env=toolsus1,hostname=host1,le=41468.93910864726,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_bytes_bucket=851773 1578431517778821000 -prometheus,env=toolsus1,hostname=host1,le=72570.6434401327,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_bytes_bucket=1147159 1578431517778821000 -prometheus,env=toolsus1,hostname=host1,le=126998.62602023222,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_bytes_bucket=1353875 1578431517778821000 -prometheus,env=toolsus1,hostname=host1,le=222247.59553540638,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_bytes_bucket=1497154 1578431517778821000 -prometheus,env=toolsus1,hostname=host1,le=388933.29218696116,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_bytes_bucket=1499466 1578431517778821000 -prometheus,env=toolsus1,hostname=host1,le=680633.2613271821,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_bytes_bucket=1499492 1578431517778821000 -prometheus,env=toolsus1,hostname=host1,le=1.1911082073225686e+06,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_bytes_bucket=1499492 1578431517778821000 -prometheus,env=toolsus1,hostname=host1,le=2.1777f6e4feb2ea86+06,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_bytes_bucket=1499492 1578431517778821000 -prometheus,env=toolsus1,hostname=host1,le=3.6477688849253664e+06,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_bytes_bucket=1499492 1578431517778821000 -prometheus,env=toolsus1,hostname=host1,le=6.3b589ca9a39b4eee+06,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_bytes_bucket=1499492 1578431517778821000 -prometheus,env=toolsus1,hostname=host1,le=1.1171292210083935e+07,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_bytes_bucket=1499492 1578431517778821000 -prometheus,env=toolsus1,hostname=host1,le=1.9549761367646888e+07,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_bytes_bucket=1499492 1578431517778821000 -prometheus,env=toolsus1,hostname=host1,le=3.2fcf160832c9145b+07,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_bytes_bucket=1499492 1578431517778821000 -prometheus,env=toolsus1,hostname=host1,le=5.98711441884186e+07,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_bytes_bucket=1499492 1578431517778821000 -prometheus,env=toolsus1,hostname=host1,le=1.0477450232973254e+08,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_bytes_bucket=1499492 1578431517778821000 -prometheus,env=toolsus1,hostname=host1,le=1.8335537907703194e+08,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_bytes_bucket=1499492 1578431517778821000 -prometheus,env=toolsus1,hostname=host1,le=+Inf,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_request_bytes_bucket=1499492 1578431517778821000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal,version=go1.12.14 go_info=1 1578431517778868000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal go_memstats_alloc_bytes_total=44851926126768 1578431517778869000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal go_memstats_heap_objects=241083 1578431517778869000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal go_memstats_last_gc_time_seconds=1578417663.9609165 1578431517778870000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal go_memstats_other_sys_bytes=6473663 1578431517778871000 -prometheus,env=toolsus1,error=false,hostname=host1,method=FindAuthorizationByToken,nodename=node1,role=gateway-internal auth_prometheus_requests_total=1643133 1578431517778872000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal circuitbreaker_kafka_write_requests=0 1578431517778872000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal go_memstats_mcache_sys_bytes=32768 1578431517778873000 -prometheus,env=toolsus1,hostname=host1,msg=write,nodename=node1,role=gateway-internal,status=error storage_backup_firehose_requests_total=2063 1578431517778873000 -prometheus,env=toolsus1,hostname=host1,msg=write,nodename=node1,role=gateway-internal,status=ok storage_backup_firehose_requests_total=1499492 1578431517778874000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal go_threads=41 1578431517778875000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=GET,nodename=node1,path=/api/v2,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_count=2,http_api_request_duration_seconds_sum=0.00017243 1578431517778875000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.005,method=GET,nodename=node1,path=/api/v2,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=2 1578431517778875000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.01,method=GET,nodename=node1,path=/api/v2,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=2 1578431517778875000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.025,method=GET,nodename=node1,path=/api/v2,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=2 1578431517778875000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.05,method=GET,nodename=node1,path=/api/v2,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=2 1578431517778875000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.1,method=GET,nodename=node1,path=/api/v2,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=2 1578431517778875000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.25,method=GET,nodename=node1,path=/api/v2,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=2 1578431517778875000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.5,method=GET,nodename=node1,path=/api/v2,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=2 1578431517778875000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=1,method=GET,nodename=node1,path=/api/v2,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=2 1578431517778875000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=2.5,method=GET,nodename=node1,path=/api/v2,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=2 1578431517778875000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=5,method=GET,nodename=node1,path=/api/v2,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=2 1578431517778875000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=10,method=GET,nodename=node1,path=/api/v2,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=2 1578431517778875000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=+Inf,method=GET,nodename=node1,path=/api/v2,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=2 1578431517778875000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=GET,nodename=node1,path=/api/v2/dashboards,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_sum=0.015926886,http_api_request_duration_seconds_count=2 1578431517778895000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.005,method=GET,nodename=node1,path=/api/v2/dashboards,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=0 1578431517778895000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.01,method=GET,nodename=node1,path=/api/v2/dashboards,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=2 1578431517778895000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.025,method=GET,nodename=node1,path=/api/v2/dashboards,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=2 1578431517778895000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.05,method=GET,nodename=node1,path=/api/v2/dashboards,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=2 1578431517778895000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.1,method=GET,nodename=node1,path=/api/v2/dashboards,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=2 1578431517778895000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.25,method=GET,nodename=node1,path=/api/v2/dashboards,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=2 1578431517778895000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.5,method=GET,nodename=node1,path=/api/v2/dashboards,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=2 1578431517778895000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=1,method=GET,nodename=node1,path=/api/v2/dashboards,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=2 1578431517778895000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=2.5,method=GET,nodename=node1,path=/api/v2/dashboards,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=2 1578431517778895000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=5,method=GET,nodename=node1,path=/api/v2/dashboards,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=2 1578431517778895000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=10,method=GET,nodename=node1,path=/api/v2/dashboards,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=2 1578431517778895000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=+Inf,method=GET,nodename=node1,path=/api/v2/dashboards,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=2 1578431517778895000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=GET,nodename=node1,path=/api/v2/dashboards/3f7cfe811c58e9bc/cells/68f629c9e1766828/view,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_count=1,http_api_request_duration_seconds_sum=0.013298411 1578431517778915000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.005,method=GET,nodename=node1,path=/api/v2/dashboards/3f7cfe811c58e9bc/cells/68f629c9e1766828/view,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=0 1578431517778915000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.01,method=GET,nodename=node1,path=/api/v2/dashboards/3f7cfe811c58e9bc/cells/68f629c9e1766828/view,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=0 1578431517778915000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.025,method=GET,nodename=node1,path=/api/v2/dashboards/3f7cfe811c58e9bc/cells/68f629c9e1766828/view,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517778915000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.05,method=GET,nodename=node1,path=/api/v2/dashboards/3f7cfe811c58e9bc/cells/68f629c9e1766828/view,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517778915000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.1,method=GET,nodename=node1,path=/api/v2/dashboards/3f7cfe811c58e9bc/cells/68f629c9e1766828/view,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517778915000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.25,method=GET,nodename=node1,path=/api/v2/dashboards/3f7cfe811c58e9bc/cells/68f629c9e1766828/view,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517778915000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.5,method=GET,nodename=node1,path=/api/v2/dashboards/3f7cfe811c58e9bc/cells/68f629c9e1766828/view,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517778915000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=1,method=GET,nodename=node1,path=/api/v2/dashboards/3f7cfe811c58e9bc/cells/68f629c9e1766828/view,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517778915000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=2.5,method=GET,nodename=node1,path=/api/v2/dashboards/3f7cfe811c58e9bc/cells/68f629c9e1766828/view,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517778915000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=5,method=GET,nodename=node1,path=/api/v2/dashboards/3f7cfe811c58e9bc/cells/68f629c9e1766828/view,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517778915000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=10,method=GET,nodename=node1,path=/api/v2/dashboards/3f7cfe811c58e9bc/cells/68f629c9e1766828/view,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517778915000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=+Inf,method=GET,nodename=node1,path=/api/v2/dashboards/3f7cfe811c58e9bc/cells/68f629c9e1766828/view,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517778915000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=GET,nodename=node1,path=/api/v2/dashboards/3f7cfe811c58e9bc/cells/6de7b64431004afd/view,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_count=1,http_api_request_duration_seconds_sum=0.034666319 1578431517778931000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.005,method=GET,nodename=node1,path=/api/v2/dashboards/3f7cfe811c58e9bc/cells/6de7b64431004afd/view,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=0 1578431517778931000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.01,method=GET,nodename=node1,path=/api/v2/dashboards/3f7cfe811c58e9bc/cells/6de7b64431004afd/view,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=0 1578431517778931000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.025,method=GET,nodename=node1,path=/api/v2/dashboards/3f7cfe811c58e9bc/cells/6de7b64431004afd/view,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=0 1578431517778931000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.05,method=GET,nodename=node1,path=/api/v2/dashboards/3f7cfe811c58e9bc/cells/6de7b64431004afd/view,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517778931000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.1,method=GET,nodename=node1,path=/api/v2/dashboards/3f7cfe811c58e9bc/cells/6de7b64431004afd/view,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517778931000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.25,method=GET,nodename=node1,path=/api/v2/dashboards/3f7cfe811c58e9bc/cells/6de7b64431004afd/view,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517778931000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.5,method=GET,nodename=node1,path=/api/v2/dashboards/3f7cfe811c58e9bc/cells/6de7b64431004afd/view,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517778931000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=1,method=GET,nodename=node1,path=/api/v2/dashboards/3f7cfe811c58e9bc/cells/6de7b64431004afd/view,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517778931000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=2.5,method=GET,nodename=node1,path=/api/v2/dashboards/3f7cfe811c58e9bc/cells/6de7b64431004afd/view,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517778931000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=5,method=GET,nodename=node1,path=/api/v2/dashboards/3f7cfe811c58e9bc/cells/6de7b64431004afd/view,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517778931000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=10,method=GET,nodename=node1,path=/api/v2/dashboards/3f7cfe811c58e9bc/cells/6de7b64431004afd/view,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517778931000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=+Inf,method=GET,nodename=node1,path=/api/v2/dashboards/3f7cfe811c58e9bc/cells/6de7b64431004afd/view,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517778931000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=GET,nodename=node1,path=/api/v2/labels,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_count=1,http_api_request_duration_seconds_sum=0.011593125 1578431517778949000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.005,method=GET,nodename=node1,path=/api/v2/labels,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=0 1578431517778949000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.01,method=GET,nodename=node1,path=/api/v2/labels,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=0 1578431517778949000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.025,method=GET,nodename=node1,path=/api/v2/labels,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517778949000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.05,method=GET,nodename=node1,path=/api/v2/labels,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517778949000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.1,method=GET,nodename=node1,path=/api/v2/labels,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517778949000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.25,method=GET,nodename=node1,path=/api/v2/labels,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517778949000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.5,method=GET,nodename=node1,path=/api/v2/labels,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517778949000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=1,method=GET,nodename=node1,path=/api/v2/labels,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517778949000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=2.5,method=GET,nodename=node1,path=/api/v2/labels,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517778949000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=5,method=GET,nodename=node1,path=/api/v2/labels,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517778949000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=10,method=GET,nodename=node1,path=/api/v2/labels,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517778949000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=+Inf,method=GET,nodename=node1,path=/api/v2/labels,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517778949000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=GET,nodename=node1,path=/api/v2/me,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_sum=0.412263942,http_api_request_duration_seconds_count=22 1578431517778988000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.005,method=GET,nodename=node1,path=/api/v2/me,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=3 1578431517778988000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.01,method=GET,nodename=node1,path=/api/v2/me,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=18 1578431517778988000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.025,method=GET,nodename=node1,path=/api/v2/me,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=20 1578431517778988000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.05,method=GET,nodename=node1,path=/api/v2/me,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=21 1578431517778988000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.1,method=GET,nodename=node1,path=/api/v2/me,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=21 1578431517778988000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.25,method=GET,nodename=node1,path=/api/v2/me,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=21 1578431517778988000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.5,method=GET,nodename=node1,path=/api/v2/me,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=22 1578431517778988000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=1,method=GET,nodename=node1,path=/api/v2/me,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=22 1578431517778988000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=2.5,method=GET,nodename=node1,path=/api/v2/me,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=22 1578431517778988000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=5,method=GET,nodename=node1,path=/api/v2/me,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=22 1578431517778988000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=10,method=GET,nodename=node1,path=/api/v2/me,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=22 1578431517778988000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=+Inf,method=GET,nodename=node1,path=/api/v2/me,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=22 1578431517778988000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=GET,nodename=node1,path=/api/v2/setup,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_count=1,http_api_request_duration_seconds_sum=0.000062196 1578431517779019000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.005,method=GET,nodename=node1,path=/api/v2/setup,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517779019000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.01,method=GET,nodename=node1,path=/api/v2/setup,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517779019000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.025,method=GET,nodename=node1,path=/api/v2/setup,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517779019000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.05,method=GET,nodename=node1,path=/api/v2/setup,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517779019000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.1,method=GET,nodename=node1,path=/api/v2/setup,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517779019000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.25,method=GET,nodename=node1,path=/api/v2/setup,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517779019000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.5,method=GET,nodename=node1,path=/api/v2/setup,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517779019000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=1,method=GET,nodename=node1,path=/api/v2/setup,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517779019000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=2.5,method=GET,nodename=node1,path=/api/v2/setup,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517779019000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=5,method=GET,nodename=node1,path=/api/v2/setup,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517779019000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=10,method=GET,nodename=node1,path=/api/v2/setup,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517779019000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=+Inf,method=GET,nodename=node1,path=/api/v2/setup,role=gateway-internal,status=2XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517779019000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=GET,nodename=node1,path=/debug/pprof/allocs,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_count=569,http_api_request_duration_seconds_sum=62.762816807000014 1578431517779036000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.005,method=GET,nodename=node1,path=/debug/pprof/allocs,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=0 1578431517779036000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.01,method=GET,nodename=node1,path=/debug/pprof/allocs,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=0 1578431517779036000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.025,method=GET,nodename=node1,path=/debug/pprof/allocs,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=24 1578431517779036000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.05,method=GET,nodename=node1,path=/debug/pprof/allocs,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=414 1578431517779036000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.1,method=GET,nodename=node1,path=/debug/pprof/allocs,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=482 1578431517779036000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.25,method=GET,nodename=node1,path=/debug/pprof/allocs,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=523 1578431517779036000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.5,method=GET,nodename=node1,path=/debug/pprof/allocs,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=543 1578431517779036000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=1,method=GET,nodename=node1,path=/debug/pprof/allocs,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=558 1578431517779036000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=2.5,method=GET,nodename=node1,path=/debug/pprof/allocs,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=568 1578431517779036000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=5,method=GET,nodename=node1,path=/debug/pprof/allocs,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=569 1578431517779036000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=10,method=GET,nodename=node1,path=/debug/pprof/allocs,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=569 1578431517779036000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=+Inf,method=GET,nodename=node1,path=/debug/pprof/allocs,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=569 1578431517779036000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=GET,nodename=node1,path=/debug/pprof/block,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_count=569,http_api_request_duration_seconds_sum=6.49957022300001 1578431517779055000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.005,method=GET,nodename=node1,path=/debug/pprof/block,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=446 1578431517779055000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.01,method=GET,nodename=node1,path=/debug/pprof/block,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=501 1578431517779055000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.025,method=GET,nodename=node1,path=/debug/pprof/block,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=527 1578431517779055000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.05,method=GET,nodename=node1,path=/debug/pprof/block,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=542 1578431517779055000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.1,method=GET,nodename=node1,path=/debug/pprof/block,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=559 1578431517779055000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.25,method=GET,nodename=node1,path=/debug/pprof/block,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=565 1578431517779055000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.5,method=GET,nodename=node1,path=/debug/pprof/block,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=567 1578431517779055000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=1,method=GET,nodename=node1,path=/debug/pprof/block,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=569 1578431517779055000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=2.5,method=GET,nodename=node1,path=/debug/pprof/block,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=569 1578431517779055000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=5,method=GET,nodename=node1,path=/debug/pprof/block,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=569 1578431517779055000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=10,method=GET,nodename=node1,path=/debug/pprof/block,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=569 1578431517779055000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=+Inf,method=GET,nodename=node1,path=/debug/pprof/block,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=569 1578431517779055000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=GET,nodename=node1,path=/debug/pprof/goroutine,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_count=569,http_api_request_duration_seconds_sum=29.609671701999982 1578431517779072000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.005,method=GET,nodename=node1,path=/debug/pprof/goroutine,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=300 1578431517779072000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.01,method=GET,nodename=node1,path=/debug/pprof/goroutine,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=414 1578431517779072000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.025,method=GET,nodename=node1,path=/debug/pprof/goroutine,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=479 1578431517779072000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.05,method=GET,nodename=node1,path=/debug/pprof/goroutine,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=501 1578431517779072000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.1,method=GET,nodename=node1,path=/debug/pprof/goroutine,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=523 1578431517779072000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.25,method=GET,nodename=node1,path=/debug/pprof/goroutine,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=546 1578431517779072000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.5,method=GET,nodename=node1,path=/debug/pprof/goroutine,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=551 1578431517779072000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=1,method=GET,nodename=node1,path=/debug/pprof/goroutine,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=563 1578431517779072000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=2.5,method=GET,nodename=node1,path=/debug/pprof/goroutine,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=569 1578431517779072000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=5,method=GET,nodename=node1,path=/debug/pprof/goroutine,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=569 1578431517779072000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=10,method=GET,nodename=node1,path=/debug/pprof/goroutine,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=569 1578431517779072000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=+Inf,method=GET,nodename=node1,path=/debug/pprof/goroutine,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=569 1578431517779072000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=GET,nodename=node1,path=/debug/pprof/heap,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_count=569,http_api_request_duration_seconds_sum=62.90559356000006 1578431517779092000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.005,method=GET,nodename=node1,path=/debug/pprof/heap,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=0 1578431517779092000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.01,method=GET,nodename=node1,path=/debug/pprof/heap,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=0 1578431517779092000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.025,method=GET,nodename=node1,path=/debug/pprof/heap,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=31 1578431517779092000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.05,method=GET,nodename=node1,path=/debug/pprof/heap,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=412 1578431517779092000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.1,method=GET,nodename=node1,path=/debug/pprof/heap,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=484 1578431517779092000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.25,method=GET,nodename=node1,path=/debug/pprof/heap,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=527 1578431517779092000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.5,method=GET,nodename=node1,path=/debug/pprof/heap,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=542 1578431517779092000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=1,method=GET,nodename=node1,path=/debug/pprof/heap,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=558 1578431517779092000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=2.5,method=GET,nodename=node1,path=/debug/pprof/heap,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=567 1578431517779092000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=5,method=GET,nodename=node1,path=/debug/pprof/heap,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=569 1578431517779092000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=10,method=GET,nodename=node1,path=/debug/pprof/heap,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=569 1578431517779092000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=+Inf,method=GET,nodename=node1,path=/debug/pprof/heap,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=569 1578431517779092000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=GET,nodename=node1,path=/debug/pprof/mutex,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_count=569,http_api_request_duration_seconds_sum=6.235567286000004 1578431517779108000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.005,method=GET,nodename=node1,path=/debug/pprof/mutex,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=444 1578431517779108000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.01,method=GET,nodename=node1,path=/debug/pprof/mutex,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=504 1578431517779108000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.025,method=GET,nodename=node1,path=/debug/pprof/mutex,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=538 1578431517779108000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.05,method=GET,nodename=node1,path=/debug/pprof/mutex,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=544 1578431517779108000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.1,method=GET,nodename=node1,path=/debug/pprof/mutex,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=558 1578431517779108000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.25,method=GET,nodename=node1,path=/debug/pprof/mutex,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=562 1578431517779108000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.5,method=GET,nodename=node1,path=/debug/pprof/mutex,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=568 1578431517779108000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=1,method=GET,nodename=node1,path=/debug/pprof/mutex,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=569 1578431517779108000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=2.5,method=GET,nodename=node1,path=/debug/pprof/mutex,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=569 1578431517779108000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=5,method=GET,nodename=node1,path=/debug/pprof/mutex,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=569 1578431517779108000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=10,method=GET,nodename=node1,path=/debug/pprof/mutex,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=569 1578431517779108000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=+Inf,method=GET,nodename=node1,path=/debug/pprof/mutex,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=569 1578431517779108000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=GET,nodename=node1,path=/debug/pprof/profile,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_count=569,http_api_request_duration_seconds_sum=17129.434994090017 1578431517779129000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.005,method=GET,nodename=node1,path=/debug/pprof/profile,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=0 1578431517779129000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.01,method=GET,nodename=node1,path=/debug/pprof/profile,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=0 1578431517779129000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.025,method=GET,nodename=node1,path=/debug/pprof/profile,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=0 1578431517779129000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.05,method=GET,nodename=node1,path=/debug/pprof/profile,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=0 1578431517779129000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.1,method=GET,nodename=node1,path=/debug/pprof/profile,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=0 1578431517779129000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.25,method=GET,nodename=node1,path=/debug/pprof/profile,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=0 1578431517779129000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.5,method=GET,nodename=node1,path=/debug/pprof/profile,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=0 1578431517779129000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=1,method=GET,nodename=node1,path=/debug/pprof/profile,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=0 1578431517779129000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=2.5,method=GET,nodename=node1,path=/debug/pprof/profile,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=0 1578431517779129000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=5,method=GET,nodename=node1,path=/debug/pprof/profile,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=0 1578431517779129000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=10,method=GET,nodename=node1,path=/debug/pprof/profile,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=0 1578431517779129000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=+Inf,method=GET,nodename=node1,path=/debug/pprof/profile,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=569 1578431517779129000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=GET,nodename=node1,path=/debug/pprof/threadcreate,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_count=569,http_api_request_duration_seconds_sum=7.0344016 1578431517779145000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.005,method=GET,nodename=node1,path=/debug/pprof/threadcreate,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=435 1578431517779145000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.01,method=GET,nodename=node1,path=/debug/pprof/threadcreate,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=493 1578431517779145000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.025,method=GET,nodename=node1,path=/debug/pprof/threadcreate,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=532 1578431517779145000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.05,method=GET,nodename=node1,path=/debug/pprof/threadcreate,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=547 1578431517779145000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.1,method=GET,nodename=node1,path=/debug/pprof/threadcreate,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=558 1578431517779145000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.25,method=GET,nodename=node1,path=/debug/pprof/threadcreate,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=565 1578431517779145000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.5,method=GET,nodename=node1,path=/debug/pprof/threadcreate,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=567 1578431517779145000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=1,method=GET,nodename=node1,path=/debug/pprof/threadcreate,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=568 1578431517779145000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=2.5,method=GET,nodename=node1,path=/debug/pprof/threadcreate,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=569 1578431517779145000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=5,method=GET,nodename=node1,path=/debug/pprof/threadcreate,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=569 1578431517779145000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=10,method=GET,nodename=node1,path=/debug/pprof/threadcreate,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=569 1578431517779145000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=+Inf,method=GET,nodename=node1,path=/debug/pprof/threadcreate,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=569 1578431517779145000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=GET,nodename=node1,path=/health,role=gateway-internal,status=2XX,user_agent=kube-probe http_api_request_duration_seconds_count=17080,http_api_request_duration_seconds_sum=107.89479148400041 1578431517779163000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.005,method=GET,nodename=node1,path=/health,role=gateway-internal,status=2XX,user_agent=kube-probe http_api_request_duration_seconds_bucket=14892 1578431517779163000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.01,method=GET,nodename=node1,path=/health,role=gateway-internal,status=2XX,user_agent=kube-probe http_api_request_duration_seconds_bucket=16117 1578431517779163000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.025,method=GET,nodename=node1,path=/health,role=gateway-internal,status=2XX,user_agent=kube-probe http_api_request_duration_seconds_bucket=16670 1578431517779163000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.05,method=GET,nodename=node1,path=/health,role=gateway-internal,status=2XX,user_agent=kube-probe http_api_request_duration_seconds_bucket=16845 1578431517779163000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.1,method=GET,nodename=node1,path=/health,role=gateway-internal,status=2XX,user_agent=kube-probe http_api_request_duration_seconds_bucket=16944 1578431517779163000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.25,method=GET,nodename=node1,path=/health,role=gateway-internal,status=2XX,user_agent=kube-probe http_api_request_duration_seconds_bucket=17049 1578431517779163000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.5,method=GET,nodename=node1,path=/health,role=gateway-internal,status=2XX,user_agent=kube-probe http_api_request_duration_seconds_bucket=17068 1578431517779163000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=1,method=GET,nodename=node1,path=/health,role=gateway-internal,status=2XX,user_agent=kube-probe http_api_request_duration_seconds_bucket=17080 1578431517779163000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=2.5,method=GET,nodename=node1,path=/health,role=gateway-internal,status=2XX,user_agent=kube-probe http_api_request_duration_seconds_bucket=17080 1578431517779163000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=5,method=GET,nodename=node1,path=/health,role=gateway-internal,status=2XX,user_agent=kube-probe http_api_request_duration_seconds_bucket=17080 1578431517779163000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=10,method=GET,nodename=node1,path=/health,role=gateway-internal,status=2XX,user_agent=kube-probe http_api_request_duration_seconds_bucket=17080 1578431517779163000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=+Inf,method=GET,nodename=node1,path=/health,role=gateway-internal,status=2XX,user_agent=kube-probe http_api_request_duration_seconds_bucket=17080 1578431517779163000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=GET,nodename=node1,path=/metrics,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_count=34165,http_api_request_duration_seconds_sum=442.01653550500083 1578431517779180000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.005,method=GET,nodename=node1,path=/metrics,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=20433 1578431517779180000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.01,method=GET,nodename=node1,path=/metrics,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=28136 1578431517779180000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.025,method=GET,nodename=node1,path=/metrics,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=32252 1578431517779180000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.05,method=GET,nodename=node1,path=/metrics,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=33182 1578431517779180000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.1,method=GET,nodename=node1,path=/metrics,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=33562 1578431517779180000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.25,method=GET,nodename=node1,path=/metrics,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=33868 1578431517779180000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.5,method=GET,nodename=node1,path=/metrics,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=34038 1578431517779180000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=1,method=GET,nodename=node1,path=/metrics,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=34141 1578431517779180000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=2.5,method=GET,nodename=node1,path=/metrics,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=34165 1578431517779180000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=5,method=GET,nodename=node1,path=/metrics,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=34165 1578431517779180000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=10,method=GET,nodename=node1,path=/metrics,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=34165 1578431517779180000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=+Inf,method=GET,nodename=node1,path=/metrics,role=gateway-internal,status=2XX,user_agent=Go-http-client http_api_request_duration_seconds_bucket=34165 1578431517779180000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=GET,nodename=node1,path=/ready,role=gateway-internal,status=2XX,user_agent=kube-probe http_api_request_duration_seconds_count=68330,http_api_request_duration_seconds_sum=6.938958378999966 1578431517779197000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.005,method=GET,nodename=node1,path=/ready,role=gateway-internal,status=2XX,user_agent=kube-probe http_api_request_duration_seconds_bucket=68262 1578431517779197000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.01,method=GET,nodename=node1,path=/ready,role=gateway-internal,status=2XX,user_agent=kube-probe http_api_request_duration_seconds_bucket=68278 1578431517779197000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.025,method=GET,nodename=node1,path=/ready,role=gateway-internal,status=2XX,user_agent=kube-probe http_api_request_duration_seconds_bucket=68292 1578431517779197000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.05,method=GET,nodename=node1,path=/ready,role=gateway-internal,status=2XX,user_agent=kube-probe http_api_request_duration_seconds_bucket=68310 1578431517779197000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.1,method=GET,nodename=node1,path=/ready,role=gateway-internal,status=2XX,user_agent=kube-probe http_api_request_duration_seconds_bucket=68315 1578431517779197000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.25,method=GET,nodename=node1,path=/ready,role=gateway-internal,status=2XX,user_agent=kube-probe http_api_request_duration_seconds_bucket=68324 1578431517779197000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.5,method=GET,nodename=node1,path=/ready,role=gateway-internal,status=2XX,user_agent=kube-probe http_api_request_duration_seconds_bucket=68329 1578431517779197000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=1,method=GET,nodename=node1,path=/ready,role=gateway-internal,status=2XX,user_agent=kube-probe http_api_request_duration_seconds_bucket=68330 1578431517779197000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=2.5,method=GET,nodename=node1,path=/ready,role=gateway-internal,status=2XX,user_agent=kube-probe http_api_request_duration_seconds_bucket=68330 1578431517779197000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=5,method=GET,nodename=node1,path=/ready,role=gateway-internal,status=2XX,user_agent=kube-probe http_api_request_duration_seconds_bucket=68330 1578431517779197000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=10,method=GET,nodename=node1,path=/ready,role=gateway-internal,status=2XX,user_agent=kube-probe http_api_request_duration_seconds_bucket=68330 1578431517779197000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=+Inf,method=GET,nodename=node1,path=/ready,role=gateway-internal,status=2XX,user_agent=kube-probe http_api_request_duration_seconds_bucket=68330 1578431517779197000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=POST,nodename=node1,path=/api/v2/query,role=gateway-internal,status=5XX,user_agent=Chrome http_api_request_duration_seconds_count=5,http_api_request_duration_seconds_sum=198.594680122 1578431517779213000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.005,method=POST,nodename=node1,path=/api/v2/query,role=gateway-internal,status=5XX,user_agent=Chrome http_api_request_duration_seconds_bucket=0 1578431517779213000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.01,method=POST,nodename=node1,path=/api/v2/query,role=gateway-internal,status=5XX,user_agent=Chrome http_api_request_duration_seconds_bucket=0 1578431517779213000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.025,method=POST,nodename=node1,path=/api/v2/query,role=gateway-internal,status=5XX,user_agent=Chrome http_api_request_duration_seconds_bucket=0 1578431517779213000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.05,method=POST,nodename=node1,path=/api/v2/query,role=gateway-internal,status=5XX,user_agent=Chrome http_api_request_duration_seconds_bucket=0 1578431517779213000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.1,method=POST,nodename=node1,path=/api/v2/query,role=gateway-internal,status=5XX,user_agent=Chrome http_api_request_duration_seconds_bucket=0 1578431517779213000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.25,method=POST,nodename=node1,path=/api/v2/query,role=gateway-internal,status=5XX,user_agent=Chrome http_api_request_duration_seconds_bucket=0 1578431517779213000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.5,method=POST,nodename=node1,path=/api/v2/query,role=gateway-internal,status=5XX,user_agent=Chrome http_api_request_duration_seconds_bucket=0 1578431517779213000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=1,method=POST,nodename=node1,path=/api/v2/query,role=gateway-internal,status=5XX,user_agent=Chrome http_api_request_duration_seconds_bucket=0 1578431517779213000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=2.5,method=POST,nodename=node1,path=/api/v2/query,role=gateway-internal,status=5XX,user_agent=Chrome http_api_request_duration_seconds_bucket=0 1578431517779213000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=5,method=POST,nodename=node1,path=/api/v2/query,role=gateway-internal,status=5XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517779213000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=10,method=POST,nodename=node1,path=/api/v2/query,role=gateway-internal,status=5XX,user_agent=Chrome http_api_request_duration_seconds_bucket=1 1578431517779213000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=+Inf,method=POST,nodename=node1,path=/api/v2/query,role=gateway-internal,status=5XX,user_agent=Chrome http_api_request_duration_seconds_bucket=5 1578431517779213000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=POST,nodename=node1,path=/api/v2/write,role=gateway-internal,status=2XX,user_agent=Telegraf http_api_request_duration_seconds_count=1499492,http_api_request_duration_seconds_sum=1597291.4117566838 1578431517779237000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.005,method=POST,nodename=node1,path=/api/v2/write,role=gateway-internal,status=2XX,user_agent=Telegraf http_api_request_duration_seconds_bucket=0 1578431517779237000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.01,method=POST,nodename=node1,path=/api/v2/write,role=gateway-internal,status=2XX,user_agent=Telegraf http_api_request_duration_seconds_bucket=0 1578431517779237000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.025,method=POST,nodename=node1,path=/api/v2/write,role=gateway-internal,status=2XX,user_agent=Telegraf http_api_request_duration_seconds_bucket=0 1578431517779237000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.05,method=POST,nodename=node1,path=/api/v2/write,role=gateway-internal,status=2XX,user_agent=Telegraf http_api_request_duration_seconds_bucket=0 1578431517779237000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.1,method=POST,nodename=node1,path=/api/v2/write,role=gateway-internal,status=2XX,user_agent=Telegraf http_api_request_duration_seconds_bucket=0 1578431517779237000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.25,method=POST,nodename=node1,path=/api/v2/write,role=gateway-internal,status=2XX,user_agent=Telegraf http_api_request_duration_seconds_bucket=123231 1578431517779237000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.5,method=POST,nodename=node1,path=/api/v2/write,role=gateway-internal,status=2XX,user_agent=Telegraf http_api_request_duration_seconds_bucket=456270 1578431517779237000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=1,method=POST,nodename=node1,path=/api/v2/write,role=gateway-internal,status=2XX,user_agent=Telegraf http_api_request_duration_seconds_bucket=983538 1578431517779237000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=2.5,method=POST,nodename=node1,path=/api/v2/write,role=gateway-internal,status=2XX,user_agent=Telegraf http_api_request_duration_seconds_bucket=1375813 1578431517779237000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=5,method=POST,nodename=node1,path=/api/v2/write,role=gateway-internal,status=2XX,user_agent=Telegraf http_api_request_duration_seconds_bucket=1472021 1578431517779237000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=10,method=POST,nodename=node1,path=/api/v2/write,role=gateway-internal,status=2XX,user_agent=Telegraf http_api_request_duration_seconds_bucket=1499185 1578431517779237000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=+Inf,method=POST,nodename=node1,path=/api/v2/write,role=gateway-internal,status=2XX,user_agent=Telegraf http_api_request_duration_seconds_bucket=1499492 1578431517779237000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,method=POST,nodename=node1,path=/api/v2/write,role=gateway-internal,status=5XX,user_agent=Telegraf http_api_request_duration_seconds_count=143641,http_api_request_duration_seconds_sum=237157.3812770574 1578431517779255000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.005,method=POST,nodename=node1,path=/api/v2/write,role=gateway-internal,status=5XX,user_agent=Telegraf http_api_request_duration_seconds_bucket=0 1578431517779255000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.01,method=POST,nodename=node1,path=/api/v2/write,role=gateway-internal,status=5XX,user_agent=Telegraf http_api_request_duration_seconds_bucket=0 1578431517779255000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.025,method=POST,nodename=node1,path=/api/v2/write,role=gateway-internal,status=5XX,user_agent=Telegraf http_api_request_duration_seconds_bucket=0 1578431517779255000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.05,method=POST,nodename=node1,path=/api/v2/write,role=gateway-internal,status=5XX,user_agent=Telegraf http_api_request_duration_seconds_bucket=0 1578431517779255000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.1,method=POST,nodename=node1,path=/api/v2/write,role=gateway-internal,status=5XX,user_agent=Telegraf http_api_request_duration_seconds_bucket=0 1578431517779255000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.25,method=POST,nodename=node1,path=/api/v2/write,role=gateway-internal,status=5XX,user_agent=Telegraf http_api_request_duration_seconds_bucket=14335 1578431517779255000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=0.5,method=POST,nodename=node1,path=/api/v2/write,role=gateway-internal,status=5XX,user_agent=Telegraf http_api_request_duration_seconds_bucket=28882 1578431517779255000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=1,method=POST,nodename=node1,path=/api/v2/write,role=gateway-internal,status=5XX,user_agent=Telegraf http_api_request_duration_seconds_bucket=45498 1578431517779255000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=2.5,method=POST,nodename=node1,path=/api/v2/write,role=gateway-internal,status=5XX,user_agent=Telegraf http_api_request_duration_seconds_bucket=109858 1578431517779255000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=5,method=POST,nodename=node1,path=/api/v2/write,role=gateway-internal,status=5XX,user_agent=Telegraf http_api_request_duration_seconds_bucket=143159 1578431517779255000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=10,method=POST,nodename=node1,path=/api/v2/write,role=gateway-internal,status=5XX,user_agent=Telegraf http_api_request_duration_seconds_bucket=143564 1578431517779255000 -prometheus,env=toolsus1,handler=gateway,hostname=host1,le=+Inf,method=POST,nodename=node1,path=/api/v2/write,role=gateway-internal,status=5XX,user_agent=Telegraf http_api_request_duration_seconds_bucket=143641 1578431517779255000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal go_memstats_gc_cpu_fraction=0.030752107927046763 1578431517779273000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal go_memstats_heap_released_bytes=774291456 1578431517779274000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal go_memstats_next_gc_bytes=140163248 1578431517779274000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal go_memstats_sys_bytes=1637044728 1578431517779275000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal circuitbreaker_kafka_write_consecutive_failures=0 1578431517779275000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal circuitbreaker_redis_requests=0 1578431517779275000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal circuitbreaker_redis_total_failures=0 1578431517779276000 -prometheus,env=toolsus1,hostname=host1,nodename=node1,role=gateway-internal go_memstats_heap_idle_bytes=1403379712 1578431517779276000 diff --git a/pkg/data/gen/merged_series_generator.go b/pkg/data/gen/merged_series_generator.go index 31899f7b30..b948ac7ce7 100644 --- a/pkg/data/gen/merged_series_generator.go +++ b/pkg/data/gen/merged_series_generator.go @@ -4,7 +4,7 @@ import ( "container/heap" "math" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" ) type mergedSeriesGenerator struct { diff --git a/pkg/data/gen/series_generator.go b/pkg/data/gen/series_generator.go index 790876e63b..1894f5944c 100644 --- a/pkg/data/gen/series_generator.go +++ b/pkg/data/gen/series_generator.go @@ -4,7 +4,7 @@ import ( "math" "time" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" ) type SeriesGenerator interface { diff --git a/pkg/data/gen/specs.go b/pkg/data/gen/specs.go index 458b1ef32d..c6d0cfb70d 100644 --- a/pkg/data/gen/specs.go +++ b/pkg/data/gen/specs.go @@ -11,7 +11,7 @@ import ( "unicode/utf8" "github.com/BurntSushi/toml" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" "github.com/pkg/errors" ) diff --git a/pkg/data/gen/specs_test.go b/pkg/data/gen/specs_test.go index 8ba9c8627f..aef0149053 100644 --- a/pkg/data/gen/specs_test.go +++ b/pkg/data/gen/specs_test.go @@ -6,7 +6,7 @@ import ( "github.com/BurntSushi/toml" "github.com/google/go-cmp/cmp" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" ) func countableSequenceFnCmp(a, b NewCountableSequenceFn) bool { diff --git a/pkg/data/gen/tags_sequence.go b/pkg/data/gen/tags_sequence.go index 3860c0db42..42acea5b40 100644 --- a/pkg/data/gen/tags_sequence.go +++ b/pkg/data/gen/tags_sequence.go @@ -6,7 +6,7 @@ import ( "math/rand" "sort" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" ) type TagsSequence interface { diff --git a/pkg/data/gen/values_sequence.gen.go b/pkg/data/gen/values_sequence.gen.go index 0e76441028..dfb8784f61 100644 --- a/pkg/data/gen/values_sequence.gen.go +++ b/pkg/data/gen/values_sequence.gen.go @@ -7,7 +7,7 @@ package gen import ( - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/v1/tsdb" ) diff --git a/pkg/data/gen/values_sequence.gen.go.tmpl b/pkg/data/gen/values_sequence.gen.go.tmpl index b39e95f10d..6d553fa743 100644 --- a/pkg/data/gen/values_sequence.gen.go.tmpl +++ b/pkg/data/gen/values_sequence.gen.go.tmpl @@ -1,7 +1,7 @@ package gen import ( - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/v1/tsdb" ) diff --git a/query/stdlib/experimental/to.go b/query/stdlib/experimental/to.go index a049011397..cc6f0c61ae 100644 --- a/query/stdlib/experimental/to.go +++ b/query/stdlib/experimental/to.go @@ -12,10 +12,10 @@ import ( "github.com/influxdata/flux/semantic" "github.com/influxdata/flux/stdlib/experimental" platform "github.com/influxdata/influxdb/v2" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/query" "github.com/influxdata/influxdb/v2/query/stdlib/influxdata/influxdb" "github.com/influxdata/influxdb/v2/storage" - "github.com/influxdata/influxdb/v2/v1/models" ) // ToKind is the kind for the `to` flux function diff --git a/query/stdlib/experimental/to_test.go b/query/stdlib/experimental/to_test.go index 8f7dd4c7fc..d0a89d3ae3 100644 --- a/query/stdlib/experimental/to_test.go +++ b/query/stdlib/experimental/to_test.go @@ -15,11 +15,11 @@ import ( "github.com/influxdata/flux/stdlib/universe" platform "github.com/influxdata/influxdb/v2" "github.com/influxdata/influxdb/v2/mock" + "github.com/influxdata/influxdb/v2/models" _ "github.com/influxdata/influxdb/v2/query/builtin" pquerytest "github.com/influxdata/influxdb/v2/query/querytest" "github.com/influxdata/influxdb/v2/query/stdlib/experimental" "github.com/influxdata/influxdb/v2/query/stdlib/influxdata/influxdb" - "github.com/influxdata/influxdb/v2/v1/models" ) func TestTo_Query(t *testing.T) { diff --git a/query/stdlib/influxdata/influxdb/to.go b/query/stdlib/influxdata/influxdb/to.go index 4f62934d73..832647f4cb 100644 --- a/query/stdlib/influxdata/influxdb/to.go +++ b/query/stdlib/influxdata/influxdb/to.go @@ -19,9 +19,9 @@ import ( "github.com/influxdata/flux/values" platform "github.com/influxdata/influxdb/v2" "github.com/influxdata/influxdb/v2/kit/tracing" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/query" "github.com/influxdata/influxdb/v2/storage" - "github.com/influxdata/influxdb/v2/v1/models" ) // ToKind is the kind for the `to` flux function diff --git a/query/stdlib/influxdata/influxdb/to_test.go b/query/stdlib/influxdata/influxdb/to_test.go index 80ff3d2e0a..dc4694f3d5 100644 --- a/query/stdlib/influxdata/influxdb/to_test.go +++ b/query/stdlib/influxdata/influxdb/to_test.go @@ -21,7 +21,6 @@ import ( _ "github.com/influxdata/influxdb/v2/query/builtin" pquerytest "github.com/influxdata/influxdb/v2/query/querytest" "github.com/influxdata/influxdb/v2/query/stdlib/influxdata/influxdb" - "github.com/influxdata/influxdb/v2/v1/tsdb" ) func TestTo_Query(t *testing.T) { @@ -133,8 +132,6 @@ func TestToOpSpec_BucketsAccessed(t *testing.T) { } func TestTo_Process(t *testing.T) { - oid, _ := mock.OrganizationLookup{}.Lookup(context.Background(), "my-org") - bid, _ := mock.BucketLookup{}.Lookup(context.Background(), oid, "my-bucket") type wanted struct { result *mock.PointsWriter tables []*executetest.Table @@ -174,7 +171,7 @@ func TestTo_Process(t *testing.T) { })}, want: wanted{ result: &mock.PointsWriter{ - Points: mockPoints(oid, bid, `a _value=2 11 + Points: mockPoints(`a _value=2 11 a _value=2 21 b _value=1 21 a _value=3 31 @@ -249,7 +246,7 @@ c _value=4 41`), }, want: wanted{ result: &mock.PointsWriter{ - Points: mockPoints(oid, bid, `a,tag1=a,tag2=aa _value=2 11 + Points: mockPoints(`a,tag1=a,tag2=aa _value=2 11 a,tag1=a,tag2=bb _value=2 21 a,tag1=b,tag2=cc _value=1 21 a,tag1=a,tag2=dd _value=3 31 @@ -328,7 +325,7 @@ b,tagA=c,tagB=ee,tagC=jj _value=4 41`), })}, want: wanted{ result: &mock.PointsWriter{ - Points: mockPoints(oid, bid, `a,tag2=aa _value=2 11 + Points: mockPoints(`a,tag2=aa _value=2 11 a,tag2=bb _value=2 21 b,tag2=cc _value=1 21 a,tag2=dd _value=3 31 @@ -382,7 +379,7 @@ c,tag2=ee _value=4 41`), })}, want: wanted{ result: &mock.PointsWriter{ - Points: mockPoints(oid, bid, `m,tag1=a,tag2=aa _value=2 11 + Points: mockPoints(`m,tag1=a,tag2=aa _value=2 11 m,tag1=a,tag2=bb _value=2 21 m,tag1=b,tag2=cc _value=1 21 m,tag1=a,tag2=dd _value=3 31 @@ -458,7 +455,7 @@ m,tag1=c,tag2=ee _value=4 41`), })}, want: wanted{ result: &mock.PointsWriter{ - Points: mockPoints(oid, bid, `a temperature=2 11 + Points: mockPoints(`a temperature=2 11 a temperature=2 21 b temperature=1 21 a temperature=3 31 @@ -561,7 +558,7 @@ c temperature=4 41`), })}, want: wanted{ result: &mock.PointsWriter{ - Points: mockPoints(oid, bid, `a day="Monday",humidity=1,ratio=2,temperature=2 11 + Points: mockPoints(`a day="Monday",humidity=1,ratio=2,temperature=2 11 a day="Tuesday",humidity=2,ratio=1,temperature=2 21 b day="Wednesday",humidity=4,ratio=0.25,temperature=1 21 a day="Thursday",humidity=3,ratio=1,temperature=3 31 @@ -650,7 +647,7 @@ c day="Friday",humidity=5,ratio=0.8,temperature=4 41`), })}, want: wanted{ result: &mock.PointsWriter{ - Points: mockPoints(oid, bid, `a,tag2=d humidity=50i,temperature=2 11 + Points: mockPoints(`a,tag2=d humidity=50i,temperature=2 11 a,tag2=d humidity=50i,temperature=2 21 b,tag2=d humidity=50i,temperature=1 21 a,tag2=e humidity=60i,temperature=3 31 @@ -707,7 +704,7 @@ c,tag2=e humidity=65i,temperature=4 41`), })}, want: wanted{ result: &mock.PointsWriter{ - Points: mockPoints(oid, bid, `a _value=2 11 + Points: mockPoints(`a _value=2 11 a _value=2 21 b _value=1 21 a _hello=3 31 @@ -762,7 +759,7 @@ c _hello=4 41`), })}, want: wanted{ result: &mock.PointsWriter{ - Points: mockPoints(oid, bid, `m,tag1=a,tag2=aa _value=2 11 + Points: mockPoints(`m,tag1=a,tag2=aa _value=2 11 m,tag1=a,tag2=bb _value=2 21 m,tag1=b,tag2=cc _value=1 21 m,tag1=a,tag2=dd _value=3 31 @@ -817,7 +814,7 @@ m,tag1=c,tag2=ee _value=4 41`), })}, want: wanted{ result: &mock.PointsWriter{ - Points: mockPoints(oid, bid, `m,tag1=a,tag2=aa _value=2 11 + Points: mockPoints(`m,tag1=a,tag2=aa _value=2 11 m,tag1=a,tag2=bb _value=2 21 m,tag1=b,tag2=cc _value=1 21 m,tag1=a,tag2=dd _value=3 31`), @@ -870,7 +867,7 @@ m,tag1=a,tag2=dd _value=3 31`), })}, want: wanted{ result: &mock.PointsWriter{ - Points: mockPoints(oid, bid, `m,tag1=a,tag2=aa _value=2 11 + Points: mockPoints(`m,tag1=a,tag2=aa _value=2 11 m,tag1=a,tag2=bb _value=2 21 m,tag1=b,tag2=cc _value=1 21 m,tag1=a,tag2=dd _value=3 31 @@ -951,9 +948,8 @@ func pointsToStr(points []models.Point) string { return outStr } -func mockPoints(org, bucket platform.ID, pointdata string) []models.Point { - name := tsdb.EncodeName(org, bucket) - points, err := models.ParsePoints([]byte(pointdata), name[:]) +func mockPoints(pointdata string) []models.Point { + points, err := models.ParsePoints([]byte(pointdata)) if err != nil { return nil } diff --git a/storage/engine.go b/storage/engine.go index 66e5be0503..0bef0e1c3b 100644 --- a/storage/engine.go +++ b/storage/engine.go @@ -14,8 +14,8 @@ import ( "github.com/influxdata/influxdb/v2" "github.com/influxdata/influxdb/v2/kit/tracing" "github.com/influxdata/influxdb/v2/logger" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/v1/coordinator" - "github.com/influxdata/influxdb/v2/v1/models" "github.com/influxdata/influxdb/v2/v1/services/meta" "github.com/influxdata/influxdb/v2/v1/tsdb" "github.com/pkg/errors" diff --git a/storage/flux/predicate.go b/storage/flux/predicate.go index 990cff269e..f6dd6e3159 100644 --- a/storage/flux/predicate.go +++ b/storage/flux/predicate.go @@ -5,8 +5,8 @@ import ( "github.com/influxdata/flux/ast" "github.com/influxdata/flux/semantic" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/storage/reads/datatypes" - "github.com/influxdata/influxdb/v2/v1/models" "github.com/pkg/errors" ) diff --git a/storage/flux/reader.go b/storage/flux/reader.go index e7cbe3d14f..cecb03837e 100644 --- a/storage/flux/reader.go +++ b/storage/flux/reader.go @@ -10,10 +10,10 @@ import ( "github.com/influxdata/flux/execute" "github.com/influxdata/flux/memory" "github.com/influxdata/flux/values" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/query/stdlib/influxdata/influxdb" storage "github.com/influxdata/influxdb/v2/storage/reads" "github.com/influxdata/influxdb/v2/storage/reads/datatypes" - "github.com/influxdata/influxdb/v2/v1/models" "github.com/influxdata/influxdb/v2/v1/tsdb/cursors" ) diff --git a/storage/flux/table.gen.go b/storage/flux/table.gen.go index 7adbdc4f70..610e4062fb 100644 --- a/storage/flux/table.gen.go +++ b/storage/flux/table.gen.go @@ -14,8 +14,8 @@ import ( "github.com/influxdata/flux/execute" "github.com/influxdata/flux/memory" "github.com/influxdata/influxdb/v2" + "github.com/influxdata/influxdb/v2/models" storage "github.com/influxdata/influxdb/v2/storage/reads" - "github.com/influxdata/influxdb/v2/v1/models" "github.com/influxdata/influxdb/v2/v1/tsdb/cursors" ) diff --git a/storage/flux/table.gen.go.tmpl b/storage/flux/table.gen.go.tmpl index c1dfecaa54..af84ba1ae0 100644 --- a/storage/flux/table.gen.go.tmpl +++ b/storage/flux/table.gen.go.tmpl @@ -9,7 +9,7 @@ import ( "github.com/influxdata/flux/memory" "github.com/influxdata/influxdb/v2" storage "github.com/influxdata/influxdb/v2/storage/reads" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/v1/tsdb/cursors" ) {{range .}} diff --git a/storage/flux/table.go b/storage/flux/table.go index 7e7b1d0f60..ef4ad113eb 100644 --- a/storage/flux/table.go +++ b/storage/flux/table.go @@ -11,7 +11,7 @@ import ( "github.com/influxdata/flux/arrow" "github.com/influxdata/flux/execute" "github.com/influxdata/flux/memory" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" ) type table struct { diff --git a/storage/points_writer.go b/storage/points_writer.go index 66a2942d4c..47398bf907 100644 --- a/storage/points_writer.go +++ b/storage/points_writer.go @@ -6,7 +6,7 @@ import ( "time" "github.com/influxdata/influxdb/v2" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" ) // PointsWriter describes the ability to write points into a storage engine. diff --git a/storage/reads/group_resultset.go b/storage/reads/group_resultset.go index c1040a800a..d66eb507b1 100644 --- a/storage/reads/group_resultset.go +++ b/storage/reads/group_resultset.go @@ -7,8 +7,8 @@ import ( "math" "sort" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/storage/reads/datatypes" - "github.com/influxdata/influxdb/v2/v1/models" "github.com/influxdata/influxdb/v2/v1/tsdb/cursors" ) diff --git a/storage/reads/group_resultset_test.go b/storage/reads/group_resultset_test.go index ca7e95196c..0ef73e5906 100644 --- a/storage/reads/group_resultset_test.go +++ b/storage/reads/group_resultset_test.go @@ -7,10 +7,10 @@ import ( "github.com/google/go-cmp/cmp" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/pkg/data/gen" "github.com/influxdata/influxdb/v2/storage/reads" "github.com/influxdata/influxdb/v2/storage/reads/datatypes" - "github.com/influxdata/influxdb/v2/v1/models" ) func TestNewGroupResultSet_Sorting(t *testing.T) { diff --git a/storage/reads/keymerger.go b/storage/reads/keymerger.go index 2ce4f07cd5..da47ee2c1b 100644 --- a/storage/reads/keymerger.go +++ b/storage/reads/keymerger.go @@ -4,7 +4,7 @@ import ( "bytes" "strings" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" ) // tagsKeyMerger is responsible for determining a merged set of tag keys diff --git a/storage/reads/keymerger_test.go b/storage/reads/keymerger_test.go index 00bc60269e..f779bf4b2a 100644 --- a/storage/reads/keymerger_test.go +++ b/storage/reads/keymerger_test.go @@ -7,7 +7,7 @@ import ( "testing" "github.com/google/go-cmp/cmp" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" ) func TestKeyMerger_MergeTagKeys(t *testing.T) { diff --git a/storage/reads/resultset.go b/storage/reads/resultset.go index 71e90f29cb..151519e825 100644 --- a/storage/reads/resultset.go +++ b/storage/reads/resultset.go @@ -4,8 +4,8 @@ import ( "context" "math" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/storage/reads/datatypes" - "github.com/influxdata/influxdb/v2/v1/models" "github.com/influxdata/influxdb/v2/v1/tsdb/cursors" ) diff --git a/storage/reads/resultset_lineprotocol.go b/storage/reads/resultset_lineprotocol.go index c88adfe965..2fb95b7509 100644 --- a/storage/reads/resultset_lineprotocol.go +++ b/storage/reads/resultset_lineprotocol.go @@ -5,7 +5,7 @@ import ( "io" "strconv" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/v1/tsdb/cursors" ) diff --git a/storage/reads/series_cursor.go b/storage/reads/series_cursor.go index 1d691d5e7e..57558191a4 100644 --- a/storage/reads/series_cursor.go +++ b/storage/reads/series_cursor.go @@ -3,7 +3,7 @@ package reads import ( "context" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/v1/tsdb/cursors" "github.com/influxdata/influxql" ) diff --git a/storage/reads/store.go b/storage/reads/store.go index 19cbc88691..bcca13c9d0 100644 --- a/storage/reads/store.go +++ b/storage/reads/store.go @@ -4,8 +4,8 @@ import ( "context" "github.com/gogo/protobuf/proto" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/storage/reads/datatypes" - "github.com/influxdata/influxdb/v2/v1/models" "github.com/influxdata/influxdb/v2/v1/tsdb/cursors" ) diff --git a/storage/reads/store_test.go b/storage/reads/store_test.go index 4e5e5f5bd6..03a2e3764a 100644 --- a/storage/reads/store_test.go +++ b/storage/reads/store_test.go @@ -6,8 +6,8 @@ import ( "io" "strings" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/storage/reads" - "github.com/influxdata/influxdb/v2/v1/models" "github.com/influxdata/influxdb/v2/v1/tsdb/cursors" ) diff --git a/storage/reads/tagsbuffer.go b/storage/reads/tagsbuffer.go index 1195960ab2..e0982b0b27 100644 --- a/storage/reads/tagsbuffer.go +++ b/storage/reads/tagsbuffer.go @@ -1,7 +1,7 @@ package reads import ( - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" ) type tagsBuffer struct { diff --git a/v1/coordinator/points_writer.go b/v1/coordinator/points_writer.go index 5dbec9068a..a8dfedddb9 100644 --- a/v1/coordinator/points_writer.go +++ b/v1/coordinator/points_writer.go @@ -8,8 +8,8 @@ import ( "sync/atomic" "time" + "github.com/influxdata/influxdb/v2/models" influxdb "github.com/influxdata/influxdb/v2/v1" - "github.com/influxdata/influxdb/v2/v1/models" "github.com/influxdata/influxdb/v2/v1/services/meta" "github.com/influxdata/influxdb/v2/v1/tsdb" "go.uber.org/zap" diff --git a/v1/coordinator/points_writer_test.go b/v1/coordinator/points_writer_test.go index cae7f48ff4..2929df40a4 100644 --- a/v1/coordinator/points_writer_test.go +++ b/v1/coordinator/points_writer_test.go @@ -8,9 +8,9 @@ import ( "testing" "time" + "github.com/influxdata/influxdb/v2/models" influxdb "github.com/influxdata/influxdb/v2/v1" "github.com/influxdata/influxdb/v2/v1/coordinator" - "github.com/influxdata/influxdb/v2/v1/models" "github.com/influxdata/influxdb/v2/v1/services/meta" "github.com/influxdata/influxdb/v2/v1/tsdb" ) diff --git a/v1/coordinator/statement_executor.go b/v1/coordinator/statement_executor.go index d88ce6626e..661ee9a2fb 100644 --- a/v1/coordinator/statement_executor.go +++ b/v1/coordinator/statement_executor.go @@ -12,10 +12,10 @@ import ( "time" "github.com/influxdata/influxdb/v2/influxql/query" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/pkg/tracing" "github.com/influxdata/influxdb/v2/pkg/tracing/fields" influxdb "github.com/influxdata/influxdb/v2/v1" - "github.com/influxdata/influxdb/v2/v1/models" "github.com/influxdata/influxdb/v2/v1/monitor" "github.com/influxdata/influxdb/v2/v1/services/meta" "github.com/influxdata/influxdb/v2/v1/tsdb" diff --git a/v1/coordinator/statement_executor_test.go b/v1/coordinator/statement_executor_test.go index 7d5dda9e8f..665a65f306 100644 --- a/v1/coordinator/statement_executor_test.go +++ b/v1/coordinator/statement_executor_test.go @@ -15,9 +15,9 @@ import ( "github.com/davecgh/go-spew/spew" "github.com/influxdata/influxdb/v2/influxql/query" "github.com/influxdata/influxdb/v2/logger" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/v1/coordinator" "github.com/influxdata/influxdb/v2/v1/internal" - "github.com/influxdata/influxdb/v2/v1/models" "github.com/influxdata/influxdb/v2/v1/services/meta" "github.com/influxdata/influxdb/v2/v1/tsdb" "github.com/influxdata/influxql" diff --git a/v1/internal/authorizer.go b/v1/internal/authorizer.go index 548d384ef4..85a0737aa5 100644 --- a/v1/internal/authorizer.go +++ b/v1/internal/authorizer.go @@ -1,7 +1,7 @@ package internal import ( - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxql" ) diff --git a/v1/internal/tsdb_store.go b/v1/internal/tsdb_store.go index 875e21268c..2d9708aa01 100644 --- a/v1/internal/tsdb_store.go +++ b/v1/internal/tsdb_store.go @@ -5,7 +5,7 @@ import ( "time" "github.com/influxdata/influxdb/v2/influxql/query" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/v1/tsdb" "github.com/influxdata/influxql" "go.uber.org/zap" diff --git a/v1/models/inline_fnv.go b/v1/models/inline_fnv.go deleted file mode 100644 index 9cf56c6860..0000000000 --- a/v1/models/inline_fnv.go +++ /dev/null @@ -1,32 +0,0 @@ -package models // import "github.com/influxdata/influxdb/v2/v1/models" - -// from stdlib hash/fnv/fnv.go -const ( - prime64 = 1099511628211 - offset64 = 14695981039346656037 -) - -// InlineFNV64a is an alloc-free port of the standard library's fnv64a. -// See https://en.wikipedia.org/wiki/Fowler%E2%80%93Noll%E2%80%93Vo_hash_function. -type InlineFNV64a uint64 - -// NewInlineFNV64a returns a new instance of InlineFNV64a. -func NewInlineFNV64a() InlineFNV64a { - return offset64 -} - -// Write adds data to the running hash. -func (s *InlineFNV64a) Write(data []byte) (int, error) { - hash := uint64(*s) - for _, c := range data { - hash ^= uint64(c) - hash *= prime64 - } - *s = InlineFNV64a(hash) - return len(data), nil -} - -// Sum64 returns the uint64 of the current resulting hash. -func (s *InlineFNV64a) Sum64() uint64 { - return uint64(*s) -} diff --git a/v1/models/inline_fnv_test.go b/v1/models/inline_fnv_test.go deleted file mode 100644 index 0c59bff190..0000000000 --- a/v1/models/inline_fnv_test.go +++ /dev/null @@ -1,29 +0,0 @@ -package models_test - -import ( - "hash/fnv" - "testing" - "testing/quick" - - "github.com/influxdata/influxdb/v2/v1/models" -) - -func TestInlineFNV64aEquivalenceFuzz(t *testing.T) { - f := func(data []byte) bool { - stdlibFNV := fnv.New64a() - stdlibFNV.Write(data) - want := stdlibFNV.Sum64() - - inlineFNV := models.NewInlineFNV64a() - inlineFNV.Write(data) - got := inlineFNV.Sum64() - - return want == got - } - cfg := &quick.Config{ - MaxCount: 10000, - } - if err := quick.Check(f, cfg); err != nil { - t.Fatal(err) - } -} diff --git a/v1/models/inline_strconv_parse.go b/v1/models/inline_strconv_parse.go deleted file mode 100644 index 0219c16b40..0000000000 --- a/v1/models/inline_strconv_parse.go +++ /dev/null @@ -1,44 +0,0 @@ -package models // import "github.com/influxdata/influxdb/v2/v1/models" - -import ( - "reflect" - "strconv" - "unsafe" -) - -// parseIntBytes is a zero-alloc wrapper around strconv.ParseInt. -func parseIntBytes(b []byte, base int, bitSize int) (i int64, err error) { - s := unsafeBytesToString(b) - return strconv.ParseInt(s, base, bitSize) -} - -// parseUintBytes is a zero-alloc wrapper around strconv.ParseUint. -func parseUintBytes(b []byte, base int, bitSize int) (i uint64, err error) { - s := unsafeBytesToString(b) - return strconv.ParseUint(s, base, bitSize) -} - -// parseFloatBytes is a zero-alloc wrapper around strconv.ParseFloat. -func parseFloatBytes(b []byte, bitSize int) (float64, error) { - s := unsafeBytesToString(b) - return strconv.ParseFloat(s, bitSize) -} - -// parseBoolBytes is a zero-alloc wrapper around strconv.ParseBool. -func parseBoolBytes(b []byte) (bool, error) { - return strconv.ParseBool(unsafeBytesToString(b)) -} - -// unsafeBytesToString converts a []byte to a string without a heap allocation. -// -// It is unsafe, and is intended to prepare input to short-lived functions -// that require strings. -func unsafeBytesToString(in []byte) string { - src := *(*reflect.SliceHeader)(unsafe.Pointer(&in)) - dst := reflect.StringHeader{ - Data: src.Data, - Len: src.Len, - } - s := *(*string)(unsafe.Pointer(&dst)) - return s -} diff --git a/v1/models/inline_strconv_parse_test.go b/v1/models/inline_strconv_parse_test.go deleted file mode 100644 index 119f543d78..0000000000 --- a/v1/models/inline_strconv_parse_test.go +++ /dev/null @@ -1,103 +0,0 @@ -package models - -import ( - "strconv" - "testing" - "testing/quick" -) - -func TestParseIntBytesEquivalenceFuzz(t *testing.T) { - f := func(b []byte, base int, bitSize int) bool { - exp, expErr := strconv.ParseInt(string(b), base, bitSize) - got, gotErr := parseIntBytes(b, base, bitSize) - - return exp == got && checkErrs(expErr, gotErr) - } - - cfg := &quick.Config{ - MaxCount: 10000, - } - - if err := quick.Check(f, cfg); err != nil { - t.Fatal(err) - } -} - -func TestParseIntBytesValid64bitBase10EquivalenceFuzz(t *testing.T) { - buf := []byte{} - f := func(n int64) bool { - buf = strconv.AppendInt(buf[:0], n, 10) - - exp, expErr := strconv.ParseInt(string(buf), 10, 64) - got, gotErr := parseIntBytes(buf, 10, 64) - - return exp == got && checkErrs(expErr, gotErr) - } - - cfg := &quick.Config{ - MaxCount: 10000, - } - - if err := quick.Check(f, cfg); err != nil { - t.Fatal(err) - } -} - -func TestParseFloatBytesEquivalenceFuzz(t *testing.T) { - f := func(b []byte, bitSize int) bool { - exp, expErr := strconv.ParseFloat(string(b), bitSize) - got, gotErr := parseFloatBytes(b, bitSize) - - return exp == got && checkErrs(expErr, gotErr) - } - - cfg := &quick.Config{ - MaxCount: 10000, - } - - if err := quick.Check(f, cfg); err != nil { - t.Fatal(err) - } -} - -func TestParseFloatBytesValid64bitEquivalenceFuzz(t *testing.T) { - buf := []byte{} - f := func(n float64) bool { - buf = strconv.AppendFloat(buf[:0], n, 'f', -1, 64) - - exp, expErr := strconv.ParseFloat(string(buf), 64) - got, gotErr := parseFloatBytes(buf, 64) - - return exp == got && checkErrs(expErr, gotErr) - } - - cfg := &quick.Config{ - MaxCount: 10000, - } - - if err := quick.Check(f, cfg); err != nil { - t.Fatal(err) - } -} - -func TestParseBoolBytesEquivalence(t *testing.T) { - var buf []byte - for _, s := range []string{"1", "t", "T", "TRUE", "true", "True", "0", "f", "F", "FALSE", "false", "False", "fail", "TrUe", "FAlSE", "numbers", ""} { - buf = append(buf[:0], s...) - - exp, expErr := strconv.ParseBool(s) - got, gotErr := parseBoolBytes(buf) - - if got != exp || !checkErrs(expErr, gotErr) { - t.Errorf("Failed to parse boolean value %q correctly: wanted (%t, %v), got (%t, %v)", s, exp, expErr, got, gotErr) - } - } -} - -func checkErrs(a, b error) bool { - if (a == nil) != (b == nil) { - return false - } - - return a == nil || a.Error() == b.Error() -} diff --git a/v1/models/points.go b/v1/models/points.go deleted file mode 100644 index 7bffe67f44..0000000000 --- a/v1/models/points.go +++ /dev/null @@ -1,2552 +0,0 @@ -// Package models implements basic objects used throughout the TICK stack. -package models // import "github.com/influxdata/influxdb/v2/v1/models" - -import ( - "bytes" - "encoding/binary" - "errors" - "fmt" - "io" - "math" - "sort" - "strconv" - "strings" - "time" - "unicode" - "unicode/utf8" - - "github.com/influxdata/influxdb/v2/pkg/escape" -) - -// Values used to store the field key and measurement name as special internal -// tags. -const ( - FieldKeyTagKey = "\xff" - MeasurementTagKey = "\x00" -) - -// Predefined byte representations of special tag keys. -var ( - FieldKeyTagKeyBytes = []byte(FieldKeyTagKey) - MeasurementTagKeyBytes = []byte(MeasurementTagKey) -) - -type escapeSet struct { - k [1]byte - esc [2]byte -} - -var ( - measurementEscapeCodes = [...]escapeSet{ - {k: [1]byte{','}, esc: [2]byte{'\\', ','}}, - {k: [1]byte{' '}, esc: [2]byte{'\\', ' '}}, - } - - tagEscapeCodes = [...]escapeSet{ - {k: [1]byte{','}, esc: [2]byte{'\\', ','}}, - {k: [1]byte{' '}, esc: [2]byte{'\\', ' '}}, - {k: [1]byte{'='}, esc: [2]byte{'\\', '='}}, - } - - // ErrPointMustHaveAField is returned when operating on a point that does not have any fields. - ErrPointMustHaveAField = errors.New("point without fields is unsupported") - - // ErrInvalidNumber is returned when a number is expected but not provided. - ErrInvalidNumber = errors.New("invalid number") - - // ErrInvalidPoint is returned when a point cannot be parsed correctly. - ErrInvalidPoint = errors.New("point is invalid") - - // ErrInvalidKevValuePairs is returned when the number of key, value pairs - // is odd, indicating a missing value. - ErrInvalidKevValuePairs = errors.New("key/value pairs is an odd length") -) - -const ( - // MaxKeyLength is the largest allowed size of the combined measurement and tag keys. - MaxKeyLength = 65535 -) - -// enableUint64Support will enable uint64 support if set to true. -var enableUint64Support = false - -// EnableUintSupport manually enables uint support for the point parser. -// This function will be removed in the future and only exists for unit tests during the -// transition. -func EnableUintSupport() { - enableUint64Support = true -} - -// Point defines the values that will be written to the database. -type Point interface { - // Name return the measurement name for the point. - Name() []byte - - // SetName updates the measurement name for the point. - SetName(string) - - // Tags returns the tag set for the point. - Tags() Tags - - // ForEachTag iterates over each tag invoking fn. If fn return false, iteration stops. - ForEachTag(fn func(k, v []byte) bool) - - // AddTag adds or replaces a tag value for a point. - AddTag(key, value string) - - // SetTags replaces the tags for the point. - SetTags(tags Tags) - - // HasTag returns true if the tag exists for the point. - HasTag(tag []byte) bool - - // Fields returns the fields for the point. - Fields() (Fields, error) - - // Time return the timestamp for the point. - Time() time.Time - - // SetTime updates the timestamp for the point. - SetTime(t time.Time) - - // UnixNano returns the timestamp of the point as nanoseconds since Unix epoch. - UnixNano() int64 - - // HashID returns a non-cryptographic checksum of the point's key. - HashID() uint64 - - // Key returns the key (measurement joined with tags) of the point. - Key() []byte - - // String returns a string representation of the point. If there is a - // timestamp associated with the point then it will be specified with the default - // precision of nanoseconds. - String() string - - // MarshalBinary returns a binary representation of the point. - MarshalBinary() ([]byte, error) - - // PrecisionString returns a string representation of the point. If there - // is a timestamp associated with the point then it will be specified in the - // given unit. - PrecisionString(precision string) string - - // RoundedString returns a string representation of the point. If there - // is a timestamp associated with the point, then it will be rounded to the - // given duration. - RoundedString(d time.Duration) string - - // Split will attempt to return multiple points with the same timestamp whose - // string representations are no longer than size. Points with a single field or - // a point without a timestamp may exceed the requested size. - Split(size int) []Point - - // Round will round the timestamp of the point to the given duration. - Round(d time.Duration) - - // StringSize returns the length of the string that would be returned by String(). - StringSize() int - - // AppendString appends the result of String() to the provided buffer and returns - // the result, potentially reducing string allocations. - AppendString(buf []byte) []byte - - // FieldIterator returns a FieldIterator that can be used to traverse the - // fields of a point without constructing the in-memory map. - FieldIterator() FieldIterator -} - -// FieldType represents the type of a field. -type FieldType int - -const ( - // Integer indicates the field's type is integer. - Integer FieldType = iota - - // Float indicates the field's type is float. - Float - - // Boolean indicates the field's type is boolean. - Boolean - - // String indicates the field's type is string. - String - - // Empty is used to indicate that there is no field. - Empty - - // Unsigned indicates the field's type is an unsigned integer. - Unsigned -) - -// FieldIterator provides a low-allocation interface to iterate through a point's fields. -type FieldIterator interface { - // Next indicates whether there any fields remaining. - Next() bool - - // FieldKey returns the key of the current field. - FieldKey() []byte - - // Type returns the FieldType of the current field. - Type() FieldType - - // StringValue returns the string value of the current field. - StringValue() string - - // IntegerValue returns the integer value of the current field. - IntegerValue() (int64, error) - - // UnsignedValue returns the unsigned value of the current field. - UnsignedValue() (uint64, error) - - // BooleanValue returns the boolean value of the current field. - BooleanValue() (bool, error) - - // FloatValue returns the float value of the current field. - FloatValue() (float64, error) - - // Reset resets the iterator to its initial state. - Reset() -} - -// Points represents a sortable list of points by timestamp. -type Points []Point - -// Len implements sort.Interface. -func (a Points) Len() int { return len(a) } - -// Less implements sort.Interface. -func (a Points) Less(i, j int) bool { return a[i].Time().Before(a[j].Time()) } - -// Swap implements sort.Interface. -func (a Points) Swap(i, j int) { a[i], a[j] = a[j], a[i] } - -// point is the default implementation of Point. -type point struct { - time time.Time - - // text encoding of measurement and tags - // key must always be stored sorted by tags, if the original line was not sorted, - // we need to resort it - key []byte - - // text encoding of field data - fields []byte - - // text encoding of timestamp - ts []byte - - // cached version of parsed fields from data - cachedFields map[string]interface{} - - // cached version of parsed name from key - cachedName string - - // cached version of parsed tags - cachedTags Tags - - it fieldIterator -} - -// type assertions -var ( - _ Point = (*point)(nil) - _ FieldIterator = (*point)(nil) -) - -const ( - // the number of characters for the largest possible int64 (9223372036854775807) - maxInt64Digits = 19 - - // the number of characters for the smallest possible int64 (-9223372036854775808) - minInt64Digits = 20 - - // the number of characters for the largest possible uint64 (18446744073709551615) - maxUint64Digits = 20 - - // the number of characters required for the largest float64 before a range check - // would occur during parsing - maxFloat64Digits = 25 - - // the number of characters required for smallest float64 before a range check occur - // would occur during parsing - minFloat64Digits = 27 -) - -// ParsePoints returns a slice of Points from a text representation of a point -// with each point separated by newlines. If any points fail to parse, a non-nil error -// will be returned in addition to the points that parsed successfully. -func ParsePoints(buf []byte) ([]Point, error) { - return ParsePointsWithPrecision(buf, time.Now().UTC(), "n") -} - -// ParsePointsString is identical to ParsePoints but accepts a string. -func ParsePointsString(buf string) ([]Point, error) { - return ParsePoints([]byte(buf)) -} - -// ParseKey returns the measurement name and tags from a point. -// -// NOTE: to minimize heap allocations, the returned Tags will refer to subslices of buf. -// This can have the unintended effect preventing buf from being garbage collected. -func ParseKey(buf []byte) (string, Tags) { - name, tags := ParseKeyBytes(buf) - return string(name), tags -} - -func ParseKeyBytes(buf []byte) ([]byte, Tags) { - return ParseKeyBytesWithTags(buf, nil) -} - -func ParseKeyBytesWithTags(buf []byte, tags Tags) ([]byte, Tags) { - // Ignore the error because scanMeasurement returns "missing fields" which we ignore - // when just parsing a key - state, i, _ := scanMeasurement(buf, 0) - - var name []byte - if state == tagKeyState { - tags = parseTags(buf, tags) - // scanMeasurement returns the location of the comma if there are tags, strip that off - name = buf[:i-1] - } else { - name = buf[:i] - } - return unescapeMeasurement(name), tags -} - -func ParseTags(buf []byte) Tags { - return parseTags(buf, nil) -} - -func ParseName(buf []byte) []byte { - // Ignore the error because scanMeasurement returns "missing fields" which we ignore - // when just parsing a key - state, i, _ := scanMeasurement(buf, 0) - var name []byte - if state == tagKeyState { - name = buf[:i-1] - } else { - name = buf[:i] - } - - return unescapeMeasurement(name) -} - -// ParsePointsWithPrecision is similar to ParsePoints, but allows the -// caller to provide a precision for time. -// -// NOTE: to minimize heap allocations, the returned Points will refer to subslices of buf. -// This can have the unintended effect preventing buf from being garbage collected. -func ParsePointsWithPrecision(buf []byte, defaultTime time.Time, precision string) ([]Point, error) { - points := make([]Point, 0, bytes.Count(buf, []byte{'\n'})+1) - var ( - pos int - block []byte - failed []string - ) - for pos < len(buf) { - pos, block = scanLine(buf, pos) - pos++ - - if len(block) == 0 { - continue - } - - start := skipWhitespace(block, 0) - - // If line is all whitespace, just skip it - if start >= len(block) { - continue - } - - // lines which start with '#' are comments - if block[start] == '#' { - continue - } - - // strip the newline if one is present - if block[len(block)-1] == '\n' { - block = block[:len(block)-1] - } - - pt, err := parsePoint(block[start:], defaultTime, precision) - if err != nil { - failed = append(failed, fmt.Sprintf("unable to parse '%s': %v", string(block[start:]), err)) - } else { - points = append(points, pt) - } - - } - if len(failed) > 0 { - return points, fmt.Errorf("%s", strings.Join(failed, "\n")) - } - return points, nil - -} - -func parsePoint(buf []byte, defaultTime time.Time, precision string) (Point, error) { - // scan the first block which is measurement[,tag1=value1,tag2=value2...] - pos, key, err := scanKey(buf, 0) - if err != nil { - return nil, err - } - - // measurement name is required - if len(key) == 0 { - return nil, fmt.Errorf("missing measurement") - } - - if len(key) > MaxKeyLength { - return nil, fmt.Errorf("max key length exceeded: %v > %v", len(key), MaxKeyLength) - } - - // scan the second block is which is field1=value1[,field2=value2,...] - pos, fields, err := scanFields(buf, pos) - if err != nil { - return nil, err - } - - // at least one field is required - if len(fields) == 0 { - return nil, fmt.Errorf("missing fields") - } - - var maxKeyErr error - err = walkFields(fields, func(k, v []byte) bool { - if sz := seriesKeySize(key, k); sz > MaxKeyLength { - maxKeyErr = fmt.Errorf("max key length exceeded: %v > %v", sz, MaxKeyLength) - return false - } - return true - }) - - if err != nil { - return nil, err - } - - if maxKeyErr != nil { - return nil, maxKeyErr - } - - // scan the last block which is an optional integer timestamp - pos, ts, err := scanTime(buf, pos) - if err != nil { - return nil, err - } - - pt := &point{ - key: key, - fields: fields, - ts: ts, - } - - if len(ts) == 0 { - pt.time = defaultTime - pt.SetPrecision(precision) - } else { - ts, err := parseIntBytes(ts, 10, 64) - if err != nil { - return nil, err - } - pt.time, err = SafeCalcTime(ts, precision) - if err != nil { - return nil, err - } - - // Determine if there are illegal non-whitespace characters after the - // timestamp block. - for pos < len(buf) { - if buf[pos] != ' ' { - return nil, ErrInvalidPoint - } - pos++ - } - } - return pt, nil -} - -// GetPrecisionMultiplier will return a multiplier for the precision specified. -func GetPrecisionMultiplier(precision string) int64 { - d := time.Nanosecond - switch precision { - case "u": - d = time.Microsecond - case "ms": - d = time.Millisecond - case "s": - d = time.Second - case "m": - d = time.Minute - case "h": - d = time.Hour - } - return int64(d) -} - -// scanKey scans buf starting at i for the measurement and tag portion of the point. -// It returns the ending position and the byte slice of key within buf. If there -// are tags, they will be sorted if they are not already. -func scanKey(buf []byte, i int) (int, []byte, error) { - start := skipWhitespace(buf, i) - - i = start - - // Determines whether the tags are sort, assume they are - sorted := true - - // indices holds the indexes within buf of the start of each tag. For example, - // a buf of 'cpu,host=a,region=b,zone=c' would have indices slice of [4,11,20] - // which indicates that the first tag starts at buf[4], seconds at buf[11], and - // last at buf[20] - indices := make([]int, 100) - - // tracks how many commas we've seen so we know how many values are indices. - // Since indices is an arbitrarily large slice, - // we need to know how many values in the buffer are in use. - commas := 0 - - // First scan the Point's measurement. - state, i, err := scanMeasurement(buf, i) - if err != nil { - return i, buf[start:i], err - } - - // Optionally scan tags if needed. - if state == tagKeyState { - i, commas, indices, err = scanTags(buf, i, indices) - if err != nil { - return i, buf[start:i], err - } - } - - // Now we know where the key region is within buf, and the location of tags, we - // need to determine if duplicate tags exist and if the tags are sorted. This iterates - // over the list comparing each tag in the sequence with each other. - for j := 0; j < commas-1; j++ { - // get the left and right tags - _, left := scanTo(buf[indices[j]:indices[j+1]-1], 0, '=') - _, right := scanTo(buf[indices[j+1]:indices[j+2]-1], 0, '=') - - // If left is greater than right, the tags are not sorted. We do not have to - // continue because the short path no longer works. - // If the tags are equal, then there are duplicate tags, and we should abort. - // If the tags are not sorted, this pass may not find duplicate tags and we - // need to do a more exhaustive search later. - if cmp := bytes.Compare(left, right); cmp > 0 { - sorted = false - break - } else if cmp == 0 { - return i, buf[start:i], fmt.Errorf("duplicate tags") - } - } - - // If the tags are not sorted, then sort them. This sort is inline and - // uses the tag indices we created earlier. The actual buffer is not sorted, the - // indices are using the buffer for value comparison. After the indices are sorted, - // the buffer is reconstructed from the sorted indices. - if !sorted && commas > 0 { - // Get the measurement name for later - measurement := buf[start : indices[0]-1] - - // Sort the indices - indices := indices[:commas] - insertionSort(0, commas, buf, indices) - - // Create a new key using the measurement and sorted indices - b := make([]byte, len(buf[start:i])) - pos := copy(b, measurement) - for _, i := range indices { - b[pos] = ',' - pos++ - _, v := scanToSpaceOr(buf, i, ',') - pos += copy(b[pos:], v) - } - - // Check again for duplicate tags now that the tags are sorted. - for j := 0; j < commas-1; j++ { - // get the left and right tags - _, left := scanTo(buf[indices[j]:], 0, '=') - _, right := scanTo(buf[indices[j+1]:], 0, '=') - - // If the tags are equal, then there are duplicate tags, and we should abort. - // If the tags are not sorted, this pass may not find duplicate tags and we - // need to do a more exhaustive search later. - if bytes.Equal(left, right) { - return i, b, fmt.Errorf("duplicate tags") - } - } - - return i, b, nil - } - - return i, buf[start:i], nil -} - -// The following constants allow us to specify which state to move to -// next, when scanning sections of a Point. -const ( - tagKeyState = iota - tagValueState - fieldsState -) - -// scanMeasurement examines the measurement part of a Point, returning -// the next state to move to, and the current location in the buffer. -func scanMeasurement(buf []byte, i int) (int, int, error) { - // Check first byte of measurement, anything except a comma is fine. - // It can't be a space, since whitespace is stripped prior to this - // function call. - if i >= len(buf) || buf[i] == ',' { - return -1, i, fmt.Errorf("missing measurement") - } - - for { - i++ - if i >= len(buf) { - // cpu - return -1, i, fmt.Errorf("missing fields") - } - - if buf[i-1] == '\\' { - // Skip character (it's escaped). - continue - } - - // Unescaped comma; move onto scanning the tags. - if buf[i] == ',' { - return tagKeyState, i + 1, nil - } - - // Unescaped space; move onto scanning the fields. - if buf[i] == ' ' { - // cpu value=1.0 - return fieldsState, i, nil - } - } -} - -// scanTags examines all the tags in a Point, keeping track of and -// returning the updated indices slice, number of commas and location -// in buf where to start examining the Point fields. -func scanTags(buf []byte, i int, indices []int) (int, int, []int, error) { - var ( - err error - commas int - state = tagKeyState - ) - - for { - switch state { - case tagKeyState: - // Grow our indices slice if we have too many tags. - if commas >= len(indices) { - newIndics := make([]int, cap(indices)*2) - copy(newIndics, indices) - indices = newIndics - } - indices[commas] = i - commas++ - - i, err = scanTagsKey(buf, i) - state = tagValueState // tag value always follows a tag key - case tagValueState: - state, i, err = scanTagsValue(buf, i) - case fieldsState: - indices[commas] = i + 1 - return i, commas, indices, nil - } - - if err != nil { - return i, commas, indices, err - } - } -} - -// scanTagsKey scans each character in a tag key. -func scanTagsKey(buf []byte, i int) (int, error) { - // First character of the key. - if i >= len(buf) || buf[i] == ' ' || buf[i] == ',' || buf[i] == '=' { - // cpu,{'', ' ', ',', '='} - return i, fmt.Errorf("missing tag key") - } - - // Examine each character in the tag key until we hit an unescaped - // equals (the tag value), or we hit an error (i.e., unescaped - // space or comma). - for { - i++ - - // Either we reached the end of the buffer or we hit an - // unescaped comma or space. - if i >= len(buf) || - ((buf[i] == ' ' || buf[i] == ',') && buf[i-1] != '\\') { - // cpu,tag{'', ' ', ','} - return i, fmt.Errorf("missing tag value") - } - - if buf[i] == '=' && buf[i-1] != '\\' { - // cpu,tag= - return i + 1, nil - } - } -} - -// scanTagsValue scans each character in a tag value. -func scanTagsValue(buf []byte, i int) (int, int, error) { - // Tag value cannot be empty. - if i >= len(buf) || buf[i] == ',' || buf[i] == ' ' { - // cpu,tag={',', ' '} - return -1, i, fmt.Errorf("missing tag value") - } - - // Examine each character in the tag value until we hit an unescaped - // comma (move onto next tag key), an unescaped space (move onto - // fields), or we error out. - for { - i++ - if i >= len(buf) { - // cpu,tag=value - return -1, i, fmt.Errorf("missing fields") - } - - // An unescaped equals sign is an invalid tag value. - if buf[i] == '=' && buf[i-1] != '\\' { - // cpu,tag={'=', 'fo=o'} - return -1, i, fmt.Errorf("invalid tag format") - } - - if buf[i] == ',' && buf[i-1] != '\\' { - // cpu,tag=foo, - return tagKeyState, i + 1, nil - } - - // cpu,tag=foo value=1.0 - // cpu, tag=foo\= value=1.0 - if buf[i] == ' ' && buf[i-1] != '\\' { - return fieldsState, i, nil - } - } -} - -func insertionSort(l, r int, buf []byte, indices []int) { - for i := l + 1; i < r; i++ { - for j := i; j > l && less(buf, indices, j, j-1); j-- { - indices[j], indices[j-1] = indices[j-1], indices[j] - } - } -} - -func less(buf []byte, indices []int, i, j int) bool { - // This grabs the tag names for i & j, it ignores the values - _, a := scanTo(buf, indices[i], '=') - _, b := scanTo(buf, indices[j], '=') - return bytes.Compare(a, b) < 0 -} - -// scanFields scans buf, starting at i for the fields section of a point. It returns -// the ending position and the byte slice of the fields within buf. -func scanFields(buf []byte, i int) (int, []byte, error) { - start := skipWhitespace(buf, i) - i = start - quoted := false - - // tracks how many '=' we've seen - equals := 0 - - // tracks how many commas we've seen - commas := 0 - - for { - // reached the end of buf? - if i >= len(buf) { - break - } - - // escaped characters? - if buf[i] == '\\' && i+1 < len(buf) { - i += 2 - continue - } - - // If the value is quoted, scan until we get to the end quote - // Only quote values in the field value since quotes are not significant - // in the field key - if buf[i] == '"' && equals > commas { - quoted = !quoted - i++ - continue - } - - // If we see an =, ensure that there is at least on char before and after it - if buf[i] == '=' && !quoted { - equals++ - - // check for "... =123" but allow "a\ =123" - if buf[i-1] == ' ' && buf[i-2] != '\\' { - return i, buf[start:i], fmt.Errorf("missing field key") - } - - // check for "...a=123,=456" but allow "a=123,a\,=456" - if buf[i-1] == ',' && buf[i-2] != '\\' { - return i, buf[start:i], fmt.Errorf("missing field key") - } - - // check for "... value=" - if i+1 >= len(buf) { - return i, buf[start:i], fmt.Errorf("missing field value") - } - - // check for "... value=,value2=..." - if buf[i+1] == ',' || buf[i+1] == ' ' { - return i, buf[start:i], fmt.Errorf("missing field value") - } - - if isNumeric(buf[i+1]) || buf[i+1] == '-' || buf[i+1] == 'N' || buf[i+1] == 'n' { - var err error - i, err = scanNumber(buf, i+1) - if err != nil { - return i, buf[start:i], err - } - continue - } - // If next byte is not a double-quote, the value must be a boolean - if buf[i+1] != '"' { - var err error - i, _, err = scanBoolean(buf, i+1) - if err != nil { - return i, buf[start:i], err - } - continue - } - } - - if buf[i] == ',' && !quoted { - commas++ - } - - // reached end of block? - if buf[i] == ' ' && !quoted { - break - } - i++ - } - - if quoted { - return i, buf[start:i], fmt.Errorf("unbalanced quotes") - } - - // check that all field sections had key and values (e.g. prevent "a=1,b" - if equals == 0 || commas != equals-1 { - return i, buf[start:i], fmt.Errorf("invalid field format") - } - - return i, buf[start:i], nil -} - -// scanTime scans buf, starting at i for the time section of a point. It -// returns the ending position and the byte slice of the timestamp within buf -// and and error if the timestamp is not in the correct numeric format. -func scanTime(buf []byte, i int) (int, []byte, error) { - start := skipWhitespace(buf, i) - i = start - - for { - // reached the end of buf? - if i >= len(buf) { - break - } - - // Reached end of block or trailing whitespace? - if buf[i] == '\n' || buf[i] == ' ' { - break - } - - // Handle negative timestamps - if i == start && buf[i] == '-' { - i++ - continue - } - - // Timestamps should be integers, make sure they are so we don't need - // to actually parse the timestamp until needed. - if buf[i] < '0' || buf[i] > '9' { - return i, buf[start:i], fmt.Errorf("bad timestamp") - } - i++ - } - return i, buf[start:i], nil -} - -func isNumeric(b byte) bool { - return (b >= '0' && b <= '9') || b == '.' -} - -// scanNumber returns the end position within buf, start at i after -// scanning over buf for an integer, or float. It returns an -// error if a invalid number is scanned. -func scanNumber(buf []byte, i int) (int, error) { - start := i - var isInt, isUnsigned bool - - // Is negative number? - if i < len(buf) && buf[i] == '-' { - i++ - // There must be more characters now, as just '-' is illegal. - if i == len(buf) { - return i, ErrInvalidNumber - } - } - - // how many decimal points we've see - decimal := false - - // indicates the number is float in scientific notation - scientific := false - - for { - if i >= len(buf) { - break - } - - if buf[i] == ',' || buf[i] == ' ' { - break - } - - if buf[i] == 'i' && i > start && !(isInt || isUnsigned) { - isInt = true - i++ - continue - } else if buf[i] == 'u' && i > start && !(isInt || isUnsigned) { - isUnsigned = true - i++ - continue - } - - if buf[i] == '.' { - // Can't have more than 1 decimal (e.g. 1.1.1 should fail) - if decimal { - return i, ErrInvalidNumber - } - decimal = true - } - - // `e` is valid for floats but not as the first char - if i > start && (buf[i] == 'e' || buf[i] == 'E') { - scientific = true - i++ - continue - } - - // + and - are only valid at this point if they follow an e (scientific notation) - if (buf[i] == '+' || buf[i] == '-') && (buf[i-1] == 'e' || buf[i-1] == 'E') { - i++ - continue - } - - // NaN is an unsupported value - if i+2 < len(buf) && (buf[i] == 'N' || buf[i] == 'n') { - return i, ErrInvalidNumber - } - - if !isNumeric(buf[i]) { - return i, ErrInvalidNumber - } - i++ - } - - if (isInt || isUnsigned) && (decimal || scientific) { - return i, ErrInvalidNumber - } - - numericDigits := i - start - if isInt { - numericDigits-- - } - if decimal { - numericDigits-- - } - if buf[start] == '-' { - numericDigits-- - } - - if numericDigits == 0 { - return i, ErrInvalidNumber - } - - // It's more common that numbers will be within min/max range for their type but we need to prevent - // out or range numbers from being parsed successfully. This uses some simple heuristics to decide - // if we should parse the number to the actual type. It does not do it all the time because it incurs - // extra allocations and we end up converting the type again when writing points to disk. - if isInt { - // Make sure the last char is an 'i' for integers (e.g. 9i10 is not valid) - if buf[i-1] != 'i' { - return i, ErrInvalidNumber - } - // Parse the int to check bounds the number of digits could be larger than the max range - // We subtract 1 from the index to remove the `i` from our tests - if len(buf[start:i-1]) >= maxInt64Digits || len(buf[start:i-1]) >= minInt64Digits { - if _, err := parseIntBytes(buf[start:i-1], 10, 64); err != nil { - return i, fmt.Errorf("unable to parse integer %s: %s", buf[start:i-1], err) - } - } - } else if isUnsigned { - // Return an error if uint64 support has not been enabled. - if !enableUint64Support { - return i, ErrInvalidNumber - } - // Make sure the last char is a 'u' for unsigned - if buf[i-1] != 'u' { - return i, ErrInvalidNumber - } - // Make sure the first char is not a '-' for unsigned - if buf[start] == '-' { - return i, ErrInvalidNumber - } - // Parse the uint to check bounds the number of digits could be larger than the max range - // We subtract 1 from the index to remove the `u` from our tests - if len(buf[start:i-1]) >= maxUint64Digits { - if _, err := parseUintBytes(buf[start:i-1], 10, 64); err != nil { - return i, fmt.Errorf("unable to parse unsigned %s: %s", buf[start:i-1], err) - } - } - } else { - // Parse the float to check bounds if it's scientific or the number of digits could be larger than the max range - if scientific || len(buf[start:i]) >= maxFloat64Digits || len(buf[start:i]) >= minFloat64Digits { - if _, err := parseFloatBytes(buf[start:i], 10); err != nil { - return i, fmt.Errorf("invalid float") - } - } - } - - return i, nil -} - -// scanBoolean returns the end position within buf, start at i after -// scanning over buf for boolean. Valid values for a boolean are -// t, T, true, TRUE, f, F, false, FALSE. It returns an error if a invalid boolean -// is scanned. -func scanBoolean(buf []byte, i int) (int, []byte, error) { - start := i - - if i < len(buf) && (buf[i] != 't' && buf[i] != 'f' && buf[i] != 'T' && buf[i] != 'F') { - return i, buf[start:i], fmt.Errorf("invalid boolean") - } - - i++ - for { - if i >= len(buf) { - break - } - - if buf[i] == ',' || buf[i] == ' ' { - break - } - i++ - } - - // Single char bool (t, T, f, F) is ok - if i-start == 1 { - return i, buf[start:i], nil - } - - // length must be 4 for true or TRUE - if (buf[start] == 't' || buf[start] == 'T') && i-start != 4 { - return i, buf[start:i], fmt.Errorf("invalid boolean") - } - - // length must be 5 for false or FALSE - if (buf[start] == 'f' || buf[start] == 'F') && i-start != 5 { - return i, buf[start:i], fmt.Errorf("invalid boolean") - } - - // Otherwise - valid := false - switch buf[start] { - case 't': - valid = bytes.Equal(buf[start:i], []byte("true")) - case 'f': - valid = bytes.Equal(buf[start:i], []byte("false")) - case 'T': - valid = bytes.Equal(buf[start:i], []byte("TRUE")) || bytes.Equal(buf[start:i], []byte("True")) - case 'F': - valid = bytes.Equal(buf[start:i], []byte("FALSE")) || bytes.Equal(buf[start:i], []byte("False")) - } - - if !valid { - return i, buf[start:i], fmt.Errorf("invalid boolean") - } - - return i, buf[start:i], nil - -} - -// skipWhitespace returns the end position within buf, starting at i after -// scanning over spaces in tags. -func skipWhitespace(buf []byte, i int) int { - for i < len(buf) { - if buf[i] != ' ' && buf[i] != '\t' && buf[i] != 0 { - break - } - i++ - } - return i -} - -// scanLine returns the end position in buf and the next line found within -// buf. -func scanLine(buf []byte, i int) (int, []byte) { - start := i - quoted := false - fields := false - - // tracks how many '=' and commas we've seen - // this duplicates some of the functionality in scanFields - equals := 0 - commas := 0 - for { - // reached the end of buf? - if i >= len(buf) { - break - } - - // skip past escaped characters - if buf[i] == '\\' && i+2 < len(buf) { - i += 2 - continue - } - - if buf[i] == ' ' { - fields = true - } - - // If we see a double quote, makes sure it is not escaped - if fields { - if !quoted && buf[i] == '=' { - i++ - equals++ - continue - } else if !quoted && buf[i] == ',' { - i++ - commas++ - continue - } else if buf[i] == '"' && equals > commas { - i++ - quoted = !quoted - continue - } - } - - if buf[i] == '\n' && !quoted { - break - } - - i++ - } - - return i, buf[start:i] -} - -// scanTo returns the end position in buf and the next consecutive block -// of bytes, starting from i and ending with stop byte, where stop byte -// has not been escaped. -// -// If there are leading spaces, they are skipped. -func scanTo(buf []byte, i int, stop byte) (int, []byte) { - start := i - for { - // reached the end of buf? - if i >= len(buf) { - break - } - - // Reached unescaped stop value? - if buf[i] == stop && (i == 0 || buf[i-1] != '\\') { - break - } - i++ - } - - return i, buf[start:i] -} - -// scanTo returns the end position in buf and the next consecutive block -// of bytes, starting from i and ending with stop byte. If there are leading -// spaces, they are skipped. -func scanToSpaceOr(buf []byte, i int, stop byte) (int, []byte) { - start := i - if buf[i] == stop || buf[i] == ' ' { - return i, buf[start:i] - } - - for { - i++ - if buf[i-1] == '\\' { - continue - } - - // reached the end of buf? - if i >= len(buf) { - return i, buf[start:i] - } - - // reached end of block? - if buf[i] == stop || buf[i] == ' ' { - return i, buf[start:i] - } - } -} - -func scanTagValue(buf []byte, i int) (int, []byte) { - start := i - for { - if i >= len(buf) { - break - } - - if buf[i] == ',' && buf[i-1] != '\\' { - break - } - i++ - } - if i > len(buf) { - return i, nil - } - return i, buf[start:i] -} - -func scanFieldValue(buf []byte, i int) (int, []byte) { - start := i - quoted := false - for i < len(buf) { - // Only escape char for a field value is a double-quote and backslash - if buf[i] == '\\' && i+1 < len(buf) && (buf[i+1] == '"' || buf[i+1] == '\\') { - i += 2 - continue - } - - // Quoted value? (e.g. string) - if buf[i] == '"' { - i++ - quoted = !quoted - continue - } - - if buf[i] == ',' && !quoted { - break - } - i++ - } - return i, buf[start:i] -} - -func EscapeMeasurement(in []byte) []byte { - for _, c := range measurementEscapeCodes { - if bytes.IndexByte(in, c.k[0]) != -1 { - in = bytes.Replace(in, c.k[:], c.esc[:], -1) - } - } - return in -} - -func unescapeMeasurement(in []byte) []byte { - if bytes.IndexByte(in, '\\') == -1 { - return in - } - - for i := range measurementEscapeCodes { - c := &measurementEscapeCodes[i] - if bytes.IndexByte(in, c.k[0]) != -1 { - in = bytes.Replace(in, c.esc[:], c.k[:], -1) - } - } - return in -} - -func escapeTag(in []byte) []byte { - for i := range tagEscapeCodes { - c := &tagEscapeCodes[i] - if bytes.IndexByte(in, c.k[0]) != -1 { - in = bytes.Replace(in, c.k[:], c.esc[:], -1) - } - } - return in -} - -func unescapeTag(in []byte) []byte { - if bytes.IndexByte(in, '\\') == -1 { - return in - } - - for i := range tagEscapeCodes { - c := &tagEscapeCodes[i] - if bytes.IndexByte(in, c.k[0]) != -1 { - in = bytes.Replace(in, c.esc[:], c.k[:], -1) - } - } - return in -} - -// escapeStringFieldReplacer replaces double quotes and backslashes -// with the same character preceded by a backslash. -// As of Go 1.7 this benchmarked better in allocations and CPU time -// compared to iterating through a string byte-by-byte and appending to a new byte slice, -// calling strings.Replace twice, and better than (*Regex).ReplaceAllString. -var escapeStringFieldReplacer = strings.NewReplacer(`"`, `\"`, `\`, `\\`) - -// EscapeStringField returns a copy of in with any double quotes or -// backslashes with escaped values. -func EscapeStringField(in string) string { - return escapeStringFieldReplacer.Replace(in) -} - -// unescapeStringField returns a copy of in with any escaped double-quotes -// or backslashes unescaped. -func unescapeStringField(in string) string { - if strings.IndexByte(in, '\\') == -1 { - return in - } - - var out []byte - i := 0 - for { - if i >= len(in) { - break - } - // unescape backslashes - if in[i] == '\\' && i+1 < len(in) && in[i+1] == '\\' { - out = append(out, '\\') - i += 2 - continue - } - // unescape double-quotes - if in[i] == '\\' && i+1 < len(in) && in[i+1] == '"' { - out = append(out, '"') - i += 2 - continue - } - out = append(out, in[i]) - i++ - - } - return string(out) -} - -// NewPoint returns a new point with the given measurement name, tags, fields and timestamp. If -// an unsupported field value (NaN, or +/-Inf) or out of range time is passed, this function -// returns an error. -func NewPoint(name string, tags Tags, fields Fields, t time.Time) (Point, error) { - key, err := pointKey(name, tags, fields, t) - if err != nil { - return nil, err - } - - return &point{ - key: key, - time: t, - fields: fields.MarshalBinary(), - }, nil -} - -// pointKey checks some basic requirements for valid points, and returns the -// key, along with an possible error. -func pointKey(measurement string, tags Tags, fields Fields, t time.Time) ([]byte, error) { - if len(fields) == 0 { - return nil, ErrPointMustHaveAField - } - - if !t.IsZero() { - if err := CheckTime(t); err != nil { - return nil, err - } - } - - for key, value := range fields { - switch value := value.(type) { - case float64: - // Ensure the caller validates and handles invalid field values - if math.IsInf(value, 0) { - return nil, fmt.Errorf("+/-Inf is an unsupported value for field %s", key) - } - if math.IsNaN(value) { - return nil, fmt.Errorf("NaN is an unsupported value for field %s", key) - } - case float32: - // Ensure the caller validates and handles invalid field values - if math.IsInf(float64(value), 0) { - return nil, fmt.Errorf("+/-Inf is an unsupported value for field %s", key) - } - if math.IsNaN(float64(value)) { - return nil, fmt.Errorf("NaN is an unsupported value for field %s", key) - } - } - if len(key) == 0 { - return nil, fmt.Errorf("all fields must have non-empty names") - } - } - - key := MakeKey([]byte(measurement), tags) - for field := range fields { - sz := seriesKeySize(key, []byte(field)) - if sz > MaxKeyLength { - return nil, fmt.Errorf("max key length exceeded: %v > %v", sz, MaxKeyLength) - } - } - - return key, nil -} - -func seriesKeySize(key, field []byte) int { - // 4 is the length of the tsm1.fieldKeySeparator constant. It's inlined here to avoid a circular - // dependency. - return len(key) + 4 + len(field) -} - -// NewPointFromBytes returns a new Point from a marshalled Point. -func NewPointFromBytes(b []byte) (Point, error) { - p := &point{} - if err := p.UnmarshalBinary(b); err != nil { - return nil, err - } - - // This does some basic validation to ensure there are fields and they - // can be unmarshalled as well. - iter := p.FieldIterator() - var hasField bool - for iter.Next() { - if len(iter.FieldKey()) == 0 { - continue - } - hasField = true - switch iter.Type() { - case Float: - _, err := iter.FloatValue() - if err != nil { - return nil, fmt.Errorf("unable to unmarshal field %s: %s", string(iter.FieldKey()), err) - } - case Integer: - _, err := iter.IntegerValue() - if err != nil { - return nil, fmt.Errorf("unable to unmarshal field %s: %s", string(iter.FieldKey()), err) - } - case Unsigned: - _, err := iter.UnsignedValue() - if err != nil { - return nil, fmt.Errorf("unable to unmarshal field %s: %s", string(iter.FieldKey()), err) - } - case String: - // Skip since this won't return an error - case Boolean: - _, err := iter.BooleanValue() - if err != nil { - return nil, fmt.Errorf("unable to unmarshal field %s: %s", string(iter.FieldKey()), err) - } - } - } - - if !hasField { - return nil, ErrPointMustHaveAField - } - - return p, nil -} - -// MustNewPoint returns a new point with the given measurement name, tags, fields and timestamp. If -// an unsupported field value (NaN) is passed, this function panics. -func MustNewPoint(name string, tags Tags, fields Fields, time time.Time) Point { - pt, err := NewPoint(name, tags, fields, time) - if err != nil { - panic(err.Error()) - } - return pt -} - -// Key returns the key (measurement joined with tags) of the point. -func (p *point) Key() []byte { - return p.key -} - -func (p *point) name() []byte { - _, name := scanTo(p.key, 0, ',') - return name -} - -func (p *point) Name() []byte { - return escape.Unescape(p.name()) -} - -// SetName updates the measurement name for the point. -func (p *point) SetName(name string) { - p.cachedName = "" - p.key = MakeKey([]byte(name), p.Tags()) -} - -// Time return the timestamp for the point. -func (p *point) Time() time.Time { - return p.time -} - -// SetTime updates the timestamp for the point. -func (p *point) SetTime(t time.Time) { - p.time = t -} - -// Round will round the timestamp of the point to the given duration. -func (p *point) Round(d time.Duration) { - p.time = p.time.Round(d) -} - -// Tags returns the tag set for the point. -func (p *point) Tags() Tags { - if p.cachedTags != nil { - return p.cachedTags - } - p.cachedTags = parseTags(p.key, nil) - return p.cachedTags -} - -func (p *point) ForEachTag(fn func(k, v []byte) bool) { - walkTags(p.key, fn) -} - -func (p *point) HasTag(tag []byte) bool { - if len(p.key) == 0 { - return false - } - - var exists bool - walkTags(p.key, func(key, value []byte) bool { - if bytes.Equal(tag, key) { - exists = true - return false - } - return true - }) - - return exists -} - -func walkTags(buf []byte, fn func(key, value []byte) bool) { - if len(buf) == 0 { - return - } - - pos, name := scanTo(buf, 0, ',') - - // it's an empty key, so there are no tags - if len(name) == 0 { - return - } - - hasEscape := bytes.IndexByte(buf, '\\') != -1 - i := pos + 1 - var key, value []byte - for { - if i >= len(buf) { - break - } - i, key = scanTo(buf, i, '=') - i, value = scanTagValue(buf, i+1) - - if len(value) == 0 { - continue - } - - if hasEscape { - if !fn(unescapeTag(key), unescapeTag(value)) { - return - } - } else { - if !fn(key, value) { - return - } - } - - i++ - } -} - -// walkFields walks each field key and value via fn. If fn returns false, the iteration -// is stopped. The values are the raw byte slices and not the converted types. -func walkFields(buf []byte, fn func(key, value []byte) bool) error { - var i int - var key, val []byte - for len(buf) > 0 { - i, key = scanTo(buf, 0, '=') - if i > len(buf)-2 { - return fmt.Errorf("invalid value: field-key=%s", key) - } - buf = buf[i+1:] - i, val = scanFieldValue(buf, 0) - buf = buf[i:] - if !fn(key, val) { - break - } - - // slice off comma - if len(buf) > 0 { - buf = buf[1:] - } - } - return nil -} - -// parseTags parses buf into the provided destination tags, returning destination -// Tags, which may have a different length and capacity. -func parseTags(buf []byte, dst Tags) Tags { - if len(buf) == 0 { - return nil - } - - n := bytes.Count(buf, []byte(",")) - if cap(dst) < n { - dst = make(Tags, n) - } else { - dst = dst[:n] - } - - // Ensure existing behaviour when point has no tags and nil slice passed in. - if dst == nil { - dst = Tags{} - } - - // Series keys can contain escaped commas, therefore the number of commas - // in a series key only gives an estimation of the upper bound on the number - // of tags. - var i int - walkTags(buf, func(key, value []byte) bool { - dst[i].Key, dst[i].Value = key, value - i++ - return true - }) - return dst[:i] -} - -// MakeKey creates a key for a set of tags. -func MakeKey(name []byte, tags Tags) []byte { - return AppendMakeKey(nil, name, tags) -} - -// AppendMakeKey appends the key derived from name and tags to dst and returns the extended buffer. -func AppendMakeKey(dst []byte, name []byte, tags Tags) []byte { - // unescape the name and then re-escape it to avoid double escaping. - // The key should always be stored in escaped form. - dst = append(dst, EscapeMeasurement(unescapeMeasurement(name))...) - dst = tags.AppendHashKey(dst) - return dst -} - -// SetTags replaces the tags for the point. -func (p *point) SetTags(tags Tags) { - p.key = MakeKey(p.Name(), tags) - p.cachedTags = tags -} - -// AddTag adds or replaces a tag value for a point. -func (p *point) AddTag(key, value string) { - tags := p.Tags() - tags = append(tags, Tag{Key: []byte(key), Value: []byte(value)}) - sort.Sort(tags) - p.cachedTags = tags - p.key = MakeKey(p.Name(), tags) -} - -// Fields returns the fields for the point. -func (p *point) Fields() (Fields, error) { - if p.cachedFields != nil { - return p.cachedFields, nil - } - cf, err := p.unmarshalBinary() - if err != nil { - return nil, err - } - p.cachedFields = cf - return p.cachedFields, nil -} - -// SetPrecision will round a time to the specified precision. -func (p *point) SetPrecision(precision string) { - switch precision { - case "n": - case "u": - p.SetTime(p.Time().Truncate(time.Microsecond)) - case "ms": - p.SetTime(p.Time().Truncate(time.Millisecond)) - case "s": - p.SetTime(p.Time().Truncate(time.Second)) - case "m": - p.SetTime(p.Time().Truncate(time.Minute)) - case "h": - p.SetTime(p.Time().Truncate(time.Hour)) - } -} - -// String returns the string representation of the point. -func (p *point) String() string { - if p.Time().IsZero() { - return string(p.Key()) + " " + string(p.fields) - } - return string(p.Key()) + " " + string(p.fields) + " " + strconv.FormatInt(p.UnixNano(), 10) -} - -// AppendString appends the string representation of the point to buf. -func (p *point) AppendString(buf []byte) []byte { - buf = append(buf, p.key...) - buf = append(buf, ' ') - buf = append(buf, p.fields...) - - if !p.time.IsZero() { - buf = append(buf, ' ') - buf = strconv.AppendInt(buf, p.UnixNano(), 10) - } - - return buf -} - -// StringSize returns the length of the string that would be returned by String(). -func (p *point) StringSize() int { - size := len(p.key) + len(p.fields) + 1 - - if !p.time.IsZero() { - digits := 1 // even "0" has one digit - t := p.UnixNano() - if t < 0 { - // account for negative sign, then negate - digits++ - t = -t - } - for t > 9 { // already accounted for one digit - digits++ - t /= 10 - } - size += digits + 1 // digits and a space - } - - return size -} - -// MarshalBinary returns a binary representation of the point. -func (p *point) MarshalBinary() ([]byte, error) { - if len(p.fields) == 0 { - return nil, ErrPointMustHaveAField - } - - tb, err := p.time.MarshalBinary() - if err != nil { - return nil, err - } - - b := make([]byte, 8+len(p.key)+len(p.fields)+len(tb)) - i := 0 - - binary.BigEndian.PutUint32(b[i:], uint32(len(p.key))) - i += 4 - - i += copy(b[i:], p.key) - - binary.BigEndian.PutUint32(b[i:i+4], uint32(len(p.fields))) - i += 4 - - i += copy(b[i:], p.fields) - - copy(b[i:], tb) - return b, nil -} - -// UnmarshalBinary decodes a binary representation of the point into a point struct. -func (p *point) UnmarshalBinary(b []byte) error { - var n int - - // Read key length. - if len(b) < 4 { - return io.ErrShortBuffer - } - n, b = int(binary.BigEndian.Uint32(b[:4])), b[4:] - - // Read key. - if len(b) < n { - return io.ErrShortBuffer - } - p.key, b = b[:n], b[n:] - - // Read fields length. - if len(b) < 4 { - return io.ErrShortBuffer - } - n, b = int(binary.BigEndian.Uint32(b[:4])), b[4:] - - // Read fields. - if len(b) < n { - return io.ErrShortBuffer - } - p.fields, b = b[:n], b[n:] - - // Read timestamp. - return p.time.UnmarshalBinary(b) -} - -// PrecisionString returns a string representation of the point. If there -// is a timestamp associated with the point then it will be specified in the -// given unit. -func (p *point) PrecisionString(precision string) string { - if p.Time().IsZero() { - return fmt.Sprintf("%s %s", p.Key(), string(p.fields)) - } - return fmt.Sprintf("%s %s %d", p.Key(), string(p.fields), - p.UnixNano()/GetPrecisionMultiplier(precision)) -} - -// RoundedString returns a string representation of the point. If there -// is a timestamp associated with the point, then it will be rounded to the -// given duration. -func (p *point) RoundedString(d time.Duration) string { - if p.Time().IsZero() { - return fmt.Sprintf("%s %s", p.Key(), string(p.fields)) - } - return fmt.Sprintf("%s %s %d", p.Key(), string(p.fields), - p.time.Round(d).UnixNano()) -} - -func (p *point) unmarshalBinary() (Fields, error) { - iter := p.FieldIterator() - fields := make(Fields, 8) - for iter.Next() { - if len(iter.FieldKey()) == 0 { - continue - } - switch iter.Type() { - case Float: - v, err := iter.FloatValue() - if err != nil { - return nil, fmt.Errorf("unable to unmarshal field %s: %s", string(iter.FieldKey()), err) - } - fields[string(iter.FieldKey())] = v - case Integer: - v, err := iter.IntegerValue() - if err != nil { - return nil, fmt.Errorf("unable to unmarshal field %s: %s", string(iter.FieldKey()), err) - } - fields[string(iter.FieldKey())] = v - case Unsigned: - v, err := iter.UnsignedValue() - if err != nil { - return nil, fmt.Errorf("unable to unmarshal field %s: %s", string(iter.FieldKey()), err) - } - fields[string(iter.FieldKey())] = v - case String: - fields[string(iter.FieldKey())] = iter.StringValue() - case Boolean: - v, err := iter.BooleanValue() - if err != nil { - return nil, fmt.Errorf("unable to unmarshal field %s: %s", string(iter.FieldKey()), err) - } - fields[string(iter.FieldKey())] = v - } - } - return fields, nil -} - -// HashID returns a non-cryptographic checksum of the point's key. -func (p *point) HashID() uint64 { - h := NewInlineFNV64a() - h.Write(p.key) - sum := h.Sum64() - return sum -} - -// UnixNano returns the timestamp of the point as nanoseconds since Unix epoch. -func (p *point) UnixNano() int64 { - return p.Time().UnixNano() -} - -// Split will attempt to return multiple points with the same timestamp whose -// string representations are no longer than size. Points with a single field or -// a point without a timestamp may exceed the requested size. -func (p *point) Split(size int) []Point { - if p.time.IsZero() || p.StringSize() <= size { - return []Point{p} - } - - // key string, timestamp string, spaces - size -= len(p.key) + len(strconv.FormatInt(p.time.UnixNano(), 10)) + 2 - - var points []Point - var start, cur int - - for cur < len(p.fields) { - end, _ := scanTo(p.fields, cur, '=') - end, _ = scanFieldValue(p.fields, end+1) - - if cur > start && end-start > size { - points = append(points, &point{ - key: p.key, - time: p.time, - fields: p.fields[start : cur-1], - }) - start = cur - } - - cur = end + 1 - } - - points = append(points, &point{ - key: p.key, - time: p.time, - fields: p.fields[start:], - }) - - return points -} - -// Tag represents a single key/value tag pair. -type Tag struct { - Key []byte - Value []byte -} - -// NewTag returns a new Tag. -func NewTag(key, value []byte) Tag { - return Tag{ - Key: key, - Value: value, - } -} - -// Size returns the size of the key and value. -func (t Tag) Size() int { return len(t.Key) + len(t.Value) } - -// Clone returns a shallow copy of Tag. -// -// Tags associated with a Point created by ParsePointsWithPrecision will hold references to the byte slice that was parsed. -// Use Clone to create a Tag with new byte slices that do not refer to the argument to ParsePointsWithPrecision. -func (t Tag) Clone() Tag { - other := Tag{ - Key: make([]byte, len(t.Key)), - Value: make([]byte, len(t.Value)), - } - - copy(other.Key, t.Key) - copy(other.Value, t.Value) - - return other -} - -// String returns the string reprsentation of the tag. -func (t *Tag) String() string { - var buf bytes.Buffer - buf.WriteByte('{') - buf.WriteString(string(t.Key)) - buf.WriteByte(' ') - buf.WriteString(string(t.Value)) - buf.WriteByte('}') - return buf.String() -} - -// Tags represents a sorted list of tags. -type Tags []Tag - -// NewTags returns a new Tags from a map. -func NewTags(m map[string]string) Tags { - if len(m) == 0 { - return nil - } - a := make(Tags, 0, len(m)) - for k, v := range m { - a = append(a, NewTag([]byte(k), []byte(v))) - } - sort.Sort(a) - return a -} - -// NewTagsKeyValues returns a new Tags from a list of key, value pairs, -// ensuring the returned result is correctly sorted. Duplicate keys are removed, -// however, it which duplicate that remains is undefined. -// NewTagsKeyValues will return ErrInvalidKevValuePairs if len(kvs) is not even. -// If the input is guaranteed to be even, the error can be safely ignored. -// If a has enough capacity, it will be reused. -func NewTagsKeyValues(a Tags, kv ...[]byte) (Tags, error) { - if len(kv)%2 == 1 { - return nil, ErrInvalidKevValuePairs - } - if len(kv) == 0 { - return nil, nil - } - - l := len(kv) / 2 - if cap(a) < l { - a = make(Tags, 0, l) - } else { - a = a[:0] - } - - for i := 0; i < len(kv)-1; i += 2 { - a = append(a, NewTag(kv[i], kv[i+1])) - } - - if !a.sorted() { - sort.Sort(a) - } - - // remove duplicates - j := 0 - for i := 0; i < len(a)-1; i++ { - if !bytes.Equal(a[i].Key, a[i+1].Key) { - if j != i { - // only copy if j has deviated from i, indicating duplicates - a[j] = a[i] - } - j++ - } - } - - a[j] = a[len(a)-1] - j++ - - return a[:j], nil -} - -// NewTagsKeyValuesStrings is equivalent to NewTagsKeyValues, except that -// it will allocate new byte slices for each key, value pair. -func NewTagsKeyValuesStrings(a Tags, kvs ...string) (Tags, error) { - kv := make([][]byte, len(kvs)) - for i := range kvs { - kv[i] = []byte(kvs[i]) - } - return NewTagsKeyValues(a, kv...) -} - -// Keys returns the list of keys for a tag set. -func (a Tags) Keys() []string { - if len(a) == 0 { - return nil - } - keys := make([]string, len(a)) - for i, tag := range a { - keys[i] = string(tag.Key) - } - return keys -} - -// Values returns the list of values for a tag set. -func (a Tags) Values() []string { - if len(a) == 0 { - return nil - } - values := make([]string, len(a)) - for i, tag := range a { - values[i] = string(tag.Value) - } - return values -} - -// String returns the string representation of the tags. -func (a Tags) String() string { - var buf bytes.Buffer - buf.WriteByte('[') - for i := range a { - buf.WriteString(a[i].String()) - if i < len(a)-1 { - buf.WriteByte(' ') - } - } - buf.WriteByte(']') - return buf.String() -} - -// Size returns the number of bytes needed to store all tags. Note, this is -// the number of bytes needed to store all keys and values and does not account -// for data structures or delimiters for example. -func (a Tags) Size() int { - var total int - for i := range a { - total += a[i].Size() - } - return total -} - -// Clone returns a copy of the slice where the elements are a result of calling `Clone` on the original elements -// -// Tags associated with a Point created by ParsePointsWithPrecision will hold references to the byte slice that was parsed. -// Use Clone to create Tags with new byte slices that do not refer to the argument to ParsePointsWithPrecision. -func (a Tags) Clone() Tags { - if len(a) == 0 { - return nil - } - - others := make(Tags, len(a)) - for i := range a { - others[i] = a[i].Clone() - } - - return others -} - -// sorted returns true if a is sorted and is an optimization -// to avoid an allocation when calling sort.IsSorted, improving -// performance as much as 50%. -func (a Tags) sorted() bool { - for i := len(a) - 1; i > 0; i-- { - if bytes.Compare(a[i].Key, a[i-1].Key) == -1 { - return false - } - } - return true -} - -func (a Tags) Len() int { return len(a) } -func (a Tags) Less(i, j int) bool { return bytes.Compare(a[i].Key, a[j].Key) == -1 } -func (a Tags) Swap(i, j int) { a[i], a[j] = a[j], a[i] } - -// Equal returns true if a equals other. -func (a Tags) Equal(other Tags) bool { - if len(a) != len(other) { - return false - } - for i := range a { - if !bytes.Equal(a[i].Key, other[i].Key) || !bytes.Equal(a[i].Value, other[i].Value) { - return false - } - } - return true -} - -// CompareTags returns -1 if a < b, 1 if a > b, and 0 if a == b. -func CompareTags(a, b Tags) int { - // Compare each key & value until a mismatch. - for i := 0; i < len(a) && i < len(b); i++ { - if cmp := bytes.Compare(a[i].Key, b[i].Key); cmp != 0 { - return cmp - } - if cmp := bytes.Compare(a[i].Value, b[i].Value); cmp != 0 { - return cmp - } - } - - // If all tags are equal up to this point then return shorter tagset. - if len(a) < len(b) { - return -1 - } else if len(a) > len(b) { - return 1 - } - - // All tags are equal. - return 0 -} - -// Get returns the value for a key. -func (a Tags) Get(key []byte) []byte { - // OPTIMIZE: Use sort.Search if tagset is large. - - for _, t := range a { - if bytes.Equal(t.Key, key) { - return t.Value - } - } - return nil -} - -// GetString returns the string value for a string key. -func (a Tags) GetString(key string) string { - return string(a.Get([]byte(key))) -} - -// Set sets the value for a key. -func (a *Tags) Set(key, value []byte) { - for i, t := range *a { - if bytes.Equal(t.Key, key) { - (*a)[i].Value = value - return - } - } - *a = append(*a, Tag{Key: key, Value: value}) - sort.Sort(*a) -} - -// SetString sets the string value for a string key. -func (a *Tags) SetString(key, value string) { - a.Set([]byte(key), []byte(value)) -} - -// Delete removes a tag by key. -func (a *Tags) Delete(key []byte) { - for i, t := range *a { - if bytes.Equal(t.Key, key) { - copy((*a)[i:], (*a)[i+1:]) - (*a)[len(*a)-1] = Tag{} - *a = (*a)[:len(*a)-1] - return - } - } -} - -// Map returns a map representation of the tags. -func (a Tags) Map() map[string]string { - m := make(map[string]string, len(a)) - for _, t := range a { - m[string(t.Key)] = string(t.Value) - } - return m -} - -// Merge merges the tags combining the two. If both define a tag with the -// same key, the merged value overwrites the old value. -// A new map is returned. -func (a Tags) Merge(other map[string]string) Tags { - merged := make(map[string]string, len(a)+len(other)) - for _, t := range a { - merged[string(t.Key)] = string(t.Value) - } - for k, v := range other { - merged[k] = v - } - return NewTags(merged) -} - -// HashKey hashes all of a tag's keys. -func (a Tags) HashKey() []byte { - return a.AppendHashKey(nil) -} - -func (a Tags) needsEscape() bool { - for i := range a { - t := &a[i] - for j := range tagEscapeCodes { - c := &tagEscapeCodes[j] - if bytes.IndexByte(t.Key, c.k[0]) != -1 || bytes.IndexByte(t.Value, c.k[0]) != -1 { - return true - } - } - } - return false -} - -// AppendHashKey appends the result of hashing all of a tag's keys and values to dst and returns the extended buffer. -func (a Tags) AppendHashKey(dst []byte) []byte { - // Empty maps marshal to empty bytes. - if len(a) == 0 { - return dst - } - - // Type invariant: Tags are sorted - - sz := 0 - var escaped Tags - if a.needsEscape() { - var tmp [20]Tag - if len(a) < len(tmp) { - escaped = tmp[:len(a)] - } else { - escaped = make(Tags, len(a)) - } - - for i := range a { - t := &a[i] - nt := &escaped[i] - nt.Key = escapeTag(t.Key) - nt.Value = escapeTag(t.Value) - sz += len(nt.Key) + len(nt.Value) - } - } else { - sz = a.Size() - escaped = a - } - - sz += len(escaped) + (len(escaped) * 2) // separators - - // Generate marshaled bytes. - if cap(dst)-len(dst) < sz { - nd := make([]byte, len(dst), len(dst)+sz) - copy(nd, dst) - dst = nd - } - buf := dst[len(dst) : len(dst)+sz] - idx := 0 - for i := range escaped { - k := &escaped[i] - if len(k.Value) == 0 { - continue - } - buf[idx] = ',' - idx++ - copy(buf[idx:], k.Key) - idx += len(k.Key) - buf[idx] = '=' - idx++ - copy(buf[idx:], k.Value) - idx += len(k.Value) - } - return dst[:len(dst)+idx] -} - -// CopyTags returns a shallow copy of tags. -func CopyTags(a Tags) Tags { - other := make(Tags, len(a)) - copy(other, a) - return other -} - -// DeepCopyTags returns a deep copy of tags. -func DeepCopyTags(a Tags) Tags { - // Calculate size of keys/values in bytes. - var n int - for _, t := range a { - n += len(t.Key) + len(t.Value) - } - - // Build single allocation for all key/values. - buf := make([]byte, n) - - // Copy tags to new set. - other := make(Tags, len(a)) - for i, t := range a { - copy(buf, t.Key) - other[i].Key, buf = buf[:len(t.Key)], buf[len(t.Key):] - - copy(buf, t.Value) - other[i].Value, buf = buf[:len(t.Value)], buf[len(t.Value):] - } - - return other -} - -// Fields represents a mapping between a Point's field names and their -// values. -type Fields map[string]interface{} - -// FieldIterator returns a FieldIterator that can be used to traverse the -// fields of a point without constructing the in-memory map. -func (p *point) FieldIterator() FieldIterator { - p.Reset() - return p -} - -type fieldIterator struct { - start, end int - key, keybuf []byte - valueBuf []byte - fieldType FieldType -} - -// Next indicates whether there any fields remaining. -func (p *point) Next() bool { - p.it.start = p.it.end - if p.it.start >= len(p.fields) { - return false - } - - p.it.end, p.it.key = scanTo(p.fields, p.it.start, '=') - if escape.IsEscaped(p.it.key) { - p.it.keybuf = escape.AppendUnescaped(p.it.keybuf[:0], p.it.key) - p.it.key = p.it.keybuf - } - - p.it.end, p.it.valueBuf = scanFieldValue(p.fields, p.it.end+1) - p.it.end++ - - if len(p.it.valueBuf) == 0 { - p.it.fieldType = Empty - return true - } - - c := p.it.valueBuf[0] - - if c == '"' { - p.it.fieldType = String - return true - } - - if strings.IndexByte(`0123456789-.nNiIu`, c) >= 0 { - if p.it.valueBuf[len(p.it.valueBuf)-1] == 'i' { - p.it.fieldType = Integer - p.it.valueBuf = p.it.valueBuf[:len(p.it.valueBuf)-1] - } else if p.it.valueBuf[len(p.it.valueBuf)-1] == 'u' { - p.it.fieldType = Unsigned - p.it.valueBuf = p.it.valueBuf[:len(p.it.valueBuf)-1] - } else { - p.it.fieldType = Float - } - return true - } - - // to keep the same behavior that currently exists, default to boolean - p.it.fieldType = Boolean - return true -} - -// FieldKey returns the key of the current field. -func (p *point) FieldKey() []byte { - return p.it.key -} - -// Type returns the FieldType of the current field. -func (p *point) Type() FieldType { - return p.it.fieldType -} - -// StringValue returns the string value of the current field. -func (p *point) StringValue() string { - return unescapeStringField(string(p.it.valueBuf[1 : len(p.it.valueBuf)-1])) -} - -// IntegerValue returns the integer value of the current field. -func (p *point) IntegerValue() (int64, error) { - n, err := parseIntBytes(p.it.valueBuf, 10, 64) - if err != nil { - return 0, fmt.Errorf("unable to parse integer value %q: %v", p.it.valueBuf, err) - } - return n, nil -} - -// UnsignedValue returns the unsigned value of the current field. -func (p *point) UnsignedValue() (uint64, error) { - n, err := parseUintBytes(p.it.valueBuf, 10, 64) - if err != nil { - return 0, fmt.Errorf("unable to parse unsigned value %q: %v", p.it.valueBuf, err) - } - return n, nil -} - -// BooleanValue returns the boolean value of the current field. -func (p *point) BooleanValue() (bool, error) { - b, err := parseBoolBytes(p.it.valueBuf) - if err != nil { - return false, fmt.Errorf("unable to parse bool value %q: %v", p.it.valueBuf, err) - } - return b, nil -} - -// FloatValue returns the float value of the current field. -func (p *point) FloatValue() (float64, error) { - f, err := parseFloatBytes(p.it.valueBuf, 64) - if err != nil { - return 0, fmt.Errorf("unable to parse floating point value %q: %v", p.it.valueBuf, err) - } - return f, nil -} - -// Reset resets the iterator to its initial state. -func (p *point) Reset() { - p.it.fieldType = Empty - p.it.key = nil - p.it.valueBuf = nil - p.it.start = 0 - p.it.end = 0 -} - -// MarshalBinary encodes all the fields to their proper type and returns the binary -// representation -// NOTE: uint64 is specifically not supported due to potential overflow when we decode -// again later to an int64 -// NOTE2: uint is accepted, and may be 64 bits, and is for some reason accepted... -func (p Fields) MarshalBinary() []byte { - sz := len(p) - 1 // separators - keys := make([]string, 0, len(p)) - for k := range p { - keys = append(keys, k) - sz += len(k) - } - - // Only sort if we have multiple fields to sort. - // This length check removes an allocation incurred by the sort. - if len(keys) > 1 { - sort.Strings(keys) - } - - b := make([]byte, 0, sz) - for i, k := range keys { - if i > 0 { - b = append(b, ',') - } - b = appendField(b, k, p[k]) - } - return b -} - -func appendField(b []byte, k string, v interface{}) []byte { - b = append(b, []byte(escape.String(k))...) - b = append(b, '=') - - // check popular types first - switch v := v.(type) { - case float64: - b = strconv.AppendFloat(b, v, 'f', -1, 64) - case int64: - b = strconv.AppendInt(b, v, 10) - b = append(b, 'i') - case string: - b = append(b, '"') - b = append(b, []byte(EscapeStringField(v))...) - b = append(b, '"') - case bool: - b = strconv.AppendBool(b, v) - case int32: - b = strconv.AppendInt(b, int64(v), 10) - b = append(b, 'i') - case int16: - b = strconv.AppendInt(b, int64(v), 10) - b = append(b, 'i') - case int8: - b = strconv.AppendInt(b, int64(v), 10) - b = append(b, 'i') - case int: - b = strconv.AppendInt(b, int64(v), 10) - b = append(b, 'i') - case uint64: - b = strconv.AppendUint(b, v, 10) - b = append(b, 'u') - case uint32: - b = strconv.AppendInt(b, int64(v), 10) - b = append(b, 'i') - case uint16: - b = strconv.AppendInt(b, int64(v), 10) - b = append(b, 'i') - case uint8: - b = strconv.AppendInt(b, int64(v), 10) - b = append(b, 'i') - case uint: - // TODO: 'uint' should be converted to writing as an unsigned integer, - // but we cannot since that would break backwards compatibility. - b = strconv.AppendInt(b, int64(v), 10) - b = append(b, 'i') - case float32: - b = strconv.AppendFloat(b, float64(v), 'f', -1, 32) - case []byte: - b = append(b, v...) - case nil: - // skip - default: - // Can't determine the type, so convert to string - b = append(b, '"') - b = append(b, []byte(EscapeStringField(fmt.Sprintf("%v", v)))...) - b = append(b, '"') - - } - - return b -} - -// ValidKeyToken returns true if the token used for measurement, tag key, or tag -// value is a valid unicode string and only contains printable, non-replacement characters. -func ValidKeyToken(s string) bool { - if !utf8.ValidString(s) { - return false - } - for _, r := range s { - if !unicode.IsPrint(r) || r == unicode.ReplacementChar { - return false - } - } - return true -} - -// ValidKeyTokens returns true if the measurement name and all tags are valid. -func ValidKeyTokens(name string, tags Tags) bool { - if !ValidKeyToken(name) { - return false - } - for _, tag := range tags { - if !ValidKeyToken(string(tag.Key)) || !ValidKeyToken(string(tag.Value)) { - return false - } - } - return true -} diff --git a/v1/models/points_internal_test.go b/v1/models/points_internal_test.go deleted file mode 100644 index 3a760d37b0..0000000000 --- a/v1/models/points_internal_test.go +++ /dev/null @@ -1,17 +0,0 @@ -package models - -import "testing" - -func TestMarshalPointNoFields(t *testing.T) { - points, err := ParsePointsString("m,k=v f=0i") - if err != nil { - t.Fatal(err) - } - - // It's unclear how this can ever happen, but we've observed points that were marshalled without any fields. - points[0].(*point).fields = []byte{} - - if _, err := points[0].MarshalBinary(); err != ErrPointMustHaveAField { - t.Fatalf("got error %v, exp %v", err, ErrPointMustHaveAField) - } -} diff --git a/v1/models/points_test.go b/v1/models/points_test.go deleted file mode 100644 index 7514ef2787..0000000000 --- a/v1/models/points_test.go +++ /dev/null @@ -1,2601 +0,0 @@ -package models_test - -import ( - "bytes" - "fmt" - "io" - "math" - "math/rand" - "reflect" - "strconv" - "strings" - "testing" - "time" - - "github.com/influxdata/influxdb/v2/v1/models" -) - -var ( - tags = models.NewTags(map[string]string{"foo": "bar", "apple": "orange", "host": "serverA", "region": "uswest"}) - fields = models.Fields{ - "int64": int64(math.MaxInt64), - "uint32": uint32(math.MaxUint32), - "string": "String field that has a decent length, probably some log message or something", - "boolean": false, - "float64-tiny": float64(math.SmallestNonzeroFloat64), - "float64-large": float64(math.MaxFloat64), - } - maxFloat64 = strconv.FormatFloat(math.MaxFloat64, 'f', 1, 64) - minFloat64 = strconv.FormatFloat(-math.MaxFloat64, 'f', 1, 64) - - sink interface{} -) - -func TestMarshal(t *testing.T) { - got := tags.HashKey() - if exp := ",apple=orange,foo=bar,host=serverA,region=uswest"; string(got) != exp { - t.Log("got: ", string(got)) - t.Log("exp: ", exp) - t.Error("invalid match") - } -} - -func TestMarshalFields(t *testing.T) { - for _, tt := range []struct { - name string - value interface{} - exp string - }{ - { - name: "Float", - value: float64(2), - exp: `value=2`, - }, - { - name: "Integer", - value: int64(2), - exp: `value=2i`, - }, - { - name: "Unsigned", - value: uint64(2), - exp: `value=2u`, - }, - { - name: "String", - value: "foobar", - exp: `value="foobar"`, - }, - { - name: "Boolean", - value: true, - exp: `value=true`, - }, - } { - t.Run(tt.name, func(t *testing.T) { - fields := map[string]interface{}{"value": tt.value} - if have, want := models.Fields(fields).MarshalBinary(), []byte(tt.exp); !bytes.Equal(have, want) { - t.Fatalf("unexpected field output: %s != %s", string(have), string(want)) - } - }) - } -} - -func TestTags_HashKey(t *testing.T) { - tags = models.NewTags(map[string]string{"A FOO": "bar", "APPLE": "orange", "host": "serverA", "region": "uswest"}) - got := tags.HashKey() - if exp := ",A\\ FOO=bar,APPLE=orange,host=serverA,region=uswest"; string(got) != exp { - t.Log("got: ", string(got)) - t.Log("exp: ", exp) - t.Error("invalid match") - } -} - -func BenchmarkMarshal(b *testing.B) { - for i := 0; i < b.N; i++ { - tags.HashKey() - } -} -func TestPoint_Tags(t *testing.T) { - examples := []struct { - Point string - Tags models.Tags - }{ - {`cpu value=1`, models.Tags{}}, - {"cpu,tag0=v0 value=1", models.NewTags(map[string]string{"tag0": "v0"})}, - {"cpu,tag0=v0,tag1=v0 value=1", models.NewTags(map[string]string{"tag0": "v0", "tag1": "v0"})}, - {`cpu,tag0=v\ 0 value=1`, models.NewTags(map[string]string{"tag0": "v 0"})}, - {`cpu,tag0=v\ 0\ 1,tag1=v2 value=1`, models.NewTags(map[string]string{"tag0": "v 0 1", "tag1": "v2"})}, - {`cpu,tag0=\, value=1`, models.NewTags(map[string]string{"tag0": ","})}, - {`cpu,ta\ g0=\, value=1`, models.NewTags(map[string]string{"ta g0": ","})}, - {`cpu,tag0=\,1 value=1`, models.NewTags(map[string]string{"tag0": ",1"})}, - {`cpu,tag0=1\"\",t=k value=1`, models.NewTags(map[string]string{"tag0": `1\"\"`, "t": "k"})}, - } - - for _, example := range examples { - t.Run(example.Point, func(t *testing.T) { - pts, err := models.ParsePointsString(example.Point) - if err != nil { - t.Fatal(err) - } else if len(pts) != 1 { - t.Fatalf("parsed %d points, expected 1", len(pts)) - } - - // Repeat to test Tags() caching - for i := 0; i < 2; i++ { - tags := pts[0].Tags() - if !reflect.DeepEqual(tags, example.Tags) { - t.Fatalf("got %#v (%s), expected %#v", tags, tags.String(), example.Tags) - } - } - - }) - } -} - -func TestPoint_StringSize(t *testing.T) { - testPoint_cube(t, func(p models.Point) { - l := p.StringSize() - s := p.String() - - if l != len(s) { - t.Errorf("Incorrect length for %q. got %v, exp %v", s, l, len(s)) - } - }) - -} - -func TestPoint_AppendString(t *testing.T) { - testPoint_cube(t, func(p models.Point) { - got := p.AppendString(nil) - exp := []byte(p.String()) - - if !reflect.DeepEqual(exp, got) { - t.Errorf("AppendString() didn't match String(): got %v, exp %v", got, exp) - } - }) -} - -func testPoint_cube(t *testing.T, f func(p models.Point)) { - // heard of a table-driven test? let's make a cube-driven test... - tagList := []models.Tags{nil, {models.NewTag([]byte("foo"), []byte("bar"))}, tags} - fieldList := []models.Fields{{"a": 42.0}, {"a": 42, "b": "things"}, fields} - timeList := []time.Time{time.Time{}, time.Unix(0, 0), time.Unix(-34526, 0), time.Unix(231845, 0), time.Now()} - - for _, tagSet := range tagList { - for _, fieldSet := range fieldList { - for _, pointTime := range timeList { - p, err := models.NewPoint("test", tagSet, fieldSet, pointTime) - if err != nil { - t.Errorf("unexpected error creating point: %v", err) - continue - } - - f(p) - } - } - } -} - -func TestTag_Clone(t *testing.T) { - tag := models.NewTag([]byte("key"), []byte("value")) - - c := tag.Clone() - - if &c.Key == &tag.Key || !bytes.Equal(c.Key, tag.Key) { - t.Fatalf("key %s should have been a clone of %s", c.Key, tag.Key) - } - - if &c.Value == &tag.Value || !bytes.Equal(c.Value, tag.Value) { - t.Fatalf("value %s should have been a clone of %s", c.Value, tag.Value) - } -} - -func TestTags_Clone(t *testing.T) { - tags := models.NewTags(map[string]string{"k1": "v1", "k2": "v2", "k3": "v3"}) - - clone := tags.Clone() - - for i := range tags { - tag := tags[i] - c := clone[i] - if &c.Key == &tag.Key || !bytes.Equal(c.Key, tag.Key) { - t.Fatalf("key %s should have been a clone of %s", c.Key, tag.Key) - } - - if &c.Value == &tag.Value || !bytes.Equal(c.Value, tag.Value) { - t.Fatalf("value %s should have been a clone of %s", c.Value, tag.Value) - } - } -} - -var p models.Point - -func BenchmarkNewPoint(b *testing.B) { - ts := time.Now() - for i := 0; i < b.N; i++ { - p, _ = models.NewPoint("measurement", tags, fields, ts) - } -} - -func BenchmarkNewPointFromBinary(b *testing.B) { - pts, err := models.ParsePointsString("cpu value1=1.0,value2=1.0,value3=3.0,value4=4,value5=\"five\" 1000000000") - if err != nil { - b.Fatalf("unexpected error ParsePointsString: %v", err) - } - - bytes, err := pts[0].MarshalBinary() - if err != nil { - b.Fatalf("unexpected error MarshalBinary: %v", err) - } - - for i := 0; i < b.N; i++ { - _, err := models.NewPointFromBytes(bytes) - if err != nil { - b.Fatalf("unexpected error NewPointsFromBytes: %v", err) - } - } -} - -func BenchmarkParsePointNoTags5000(b *testing.B) { - var batch [5000]string - for i := 0; i < len(batch); i++ { - batch[i] = `cpu value=1i 1000000000` - } - lines := strings.Join(batch[:], "\n") - b.ResetTimer() - for i := 0; i < b.N; i++ { - models.ParsePoints([]byte(lines)) - b.SetBytes(int64(len(lines))) - } -} - -func BenchmarkParsePointNoTags(b *testing.B) { - line := `cpu value=1i 1000000000` - for i := 0; i < b.N; i++ { - models.ParsePoints([]byte(line)) - b.SetBytes(int64(len(line))) - } -} - -func BenchmarkParsePointWithPrecisionN(b *testing.B) { - line := `cpu value=1i 1000000000` - defaultTime := time.Now().UTC() - for i := 0; i < b.N; i++ { - models.ParsePointsWithPrecision([]byte(line), defaultTime, "n") - b.SetBytes(int64(len(line))) - } -} - -func BenchmarkParsePointWithPrecisionU(b *testing.B) { - line := `cpu value=1i 1000000000` - defaultTime := time.Now().UTC() - for i := 0; i < b.N; i++ { - models.ParsePointsWithPrecision([]byte(line), defaultTime, "u") - b.SetBytes(int64(len(line))) - } -} - -func BenchmarkParsePointsTagsSorted2(b *testing.B) { - line := `cpu,host=serverA,region=us-west value=1i 1000000000` - for i := 0; i < b.N; i++ { - models.ParsePoints([]byte(line)) - b.SetBytes(int64(len(line))) - } -} - -func BenchmarkParsePointsTagsSorted5(b *testing.B) { - line := `cpu,env=prod,host=serverA,region=us-west,target=servers,zone=1c value=1i 1000000000` - for i := 0; i < b.N; i++ { - models.ParsePoints([]byte(line)) - b.SetBytes(int64(len(line))) - } -} - -func BenchmarkParsePointsTagsSorted10(b *testing.B) { - line := `cpu,env=prod,host=serverA,region=us-west,tag1=value1,tag2=value2,tag3=value3,tag4=value4,tag5=value5,target=servers,zone=1c value=1i 1000000000` - for i := 0; i < b.N; i++ { - models.ParsePoints([]byte(line)) - b.SetBytes(int64(len(line))) - } -} - -func BenchmarkParsePointsTagsUnSorted2(b *testing.B) { - line := `cpu,region=us-west,host=serverA value=1i 1000000000` - for i := 0; i < b.N; i++ { - pt, _ := models.ParsePoints([]byte(line)) - b.SetBytes(int64(len(line))) - pt[0].Key() - } -} - -func BenchmarkParsePointsTagsUnSorted5(b *testing.B) { - line := `cpu,region=us-west,host=serverA,env=prod,target=servers,zone=1c value=1i 1000000000` - for i := 0; i < b.N; i++ { - pt, _ := models.ParsePoints([]byte(line)) - b.SetBytes(int64(len(line))) - pt[0].Key() - } -} - -func BenchmarkParsePointsTagsUnSorted10(b *testing.B) { - line := `cpu,region=us-west,host=serverA,env=prod,target=servers,zone=1c,tag1=value1,tag2=value2,tag3=value3,tag4=value4,tag5=value5 value=1i 1000000000` - for i := 0; i < b.N; i++ { - pt, _ := models.ParsePoints([]byte(line)) - b.SetBytes(int64(len(line))) - pt[0].Key() - } -} - -func BenchmarkParseKey(b *testing.B) { - line := `cpu,region=us-west,host=serverA,env=prod,target=servers,zone=1c,tag1=value1,tag2=value2,tag3=value3,tag4=value4,tag5=value5` - for i := 0; i < b.N; i++ { - models.ParseKey([]byte(line)) - } -} - -// TestPoint wraps a models.Point but also makes available the raw -// arguments to the Point. -// -// This is useful for ensuring that comparisons between results of -// operations on Points match the expected input data to the Point, -// since models.Point does not expose the raw input data (e.g., tags) -// via its API. -type TestPoint struct { - RawFields models.Fields - RawTags models.Tags - RawTime time.Time - models.Point -} - -// NewTestPoint returns a new TestPoint. -// -// NewTestPoint panics if it is not a valid models.Point. -func NewTestPoint(name string, tags models.Tags, fields models.Fields, time time.Time) TestPoint { - return TestPoint{ - RawTags: tags, - RawFields: fields, - RawTime: time, - Point: models.MustNewPoint(name, tags, fields, time), - } -} - -func test(t *testing.T, line string, point TestPoint) { - pts, err := models.ParsePointsWithPrecision([]byte(line), time.Unix(0, 0), "n") - if err != nil { - t.Fatalf(`ParsePoints("%s") mismatch. got %v, exp nil`, line, err) - } - - if exp := 1; len(pts) != exp { - t.Fatalf(`ParsePoints("%s") len mismatch. got %d, exp %d`, line, len(pts), exp) - } - - if exp := point.Key(); !bytes.Equal(pts[0].Key(), exp) { - t.Errorf("ParsePoints(\"%s\") key mismatch.\ngot %v\nexp %v", line, string(pts[0].Key()), string(exp)) - } - - if exp := len(point.Tags()); len(pts[0].Tags()) != exp { - t.Errorf(`ParsePoints("%s") tags mismatch. got %v, exp %v`, line, pts[0].Tags(), exp) - } - - for _, tag := range pts[0].Tags() { - if !bytes.Equal(tag.Value, point.RawTags.Get(tag.Key)) { - t.Errorf(`ParsePoints("%s") tags mismatch. got %s, exp %s`, line, tag.Value, point.RawTags.Get(tag.Key)) - } - } - - for name, value := range point.RawFields { - fields, err := pts[0].Fields() - if err != nil { - t.Fatal(err) - } - val := fields[name] - expfval, ok := val.(float64) - - if ok && math.IsNaN(expfval) { - gotfval, ok := value.(float64) - if ok && !math.IsNaN(gotfval) { - t.Errorf(`ParsePoints("%s") field '%s' mismatch. exp NaN`, line, name) - } - } - if !reflect.DeepEqual(val, value) { - t.Errorf(`ParsePoints("%s") field '%s' mismatch. got %[3]v (%[3]T), exp %[4]v (%[4]T)`, line, name, val, value) - } - } - - if !pts[0].Time().Equal(point.Time()) { - t.Errorf(`ParsePoints("%s") time mismatch. got %v, exp %v`, line, pts[0].Time(), point.Time()) - } - - if !strings.HasPrefix(pts[0].String(), line) { - t.Errorf("ParsePoints string mismatch.\ngot: %v\nexp: %v", pts[0].String(), line) - } -} - -func TestParsePointNoValue(t *testing.T) { - pts, err := models.ParsePointsString("") - if err != nil { - t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, "", err) - } - - if exp := 0; len(pts) != exp { - t.Errorf(`ParsePoints("%s") len mismatch. got %v, exp %v`, "", len(pts), exp) - } -} - -func TestParsePointWhitespaceValue(t *testing.T) { - pts, err := models.ParsePointsString(" ") - if err != nil { - t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, "", err) - } - - if exp := 0; len(pts) != exp { - t.Errorf(`ParsePoints("%s") len mismatch. got %v, exp %v`, "", len(pts), exp) - } -} - -func TestParsePointNoFields(t *testing.T) { - expectedSuffix := "missing fields" - examples := []string{ - "cpu_load_short,host=server01,region=us-west", - "cpu", - "cpu,host==", - "=", - } - - for i, example := range examples { - _, err := models.ParsePointsString(example) - if err == nil { - t.Errorf(`[Example %d] ParsePoints("%s") mismatch. got nil, exp error`, i, example) - } else if !strings.HasSuffix(err.Error(), expectedSuffix) { - t.Errorf(`[Example %d] ParsePoints("%s") mismatch. got %q, exp suffix %q`, i, example, err, expectedSuffix) - } - } -} - -func TestParsePointNoTimestamp(t *testing.T) { - test(t, "cpu value=1", NewTestPoint("cpu", nil, models.Fields{"value": 1.0}, time.Unix(0, 0))) -} - -func TestParsePointMissingQuote(t *testing.T) { - expectedSuffix := "unbalanced quotes" - examples := []string{ - `cpu,host=serverA value="test`, - `cpu,host=serverA value="test""`, - } - - for i, example := range examples { - _, err := models.ParsePointsString(example) - if err == nil { - t.Errorf(`[Example %d] ParsePoints("%s") mismatch. got nil, exp error`, i, example) - } else if !strings.HasSuffix(err.Error(), expectedSuffix) { - t.Errorf(`[Example %d] ParsePoints("%s") mismatch. got %q, exp suffix %q`, i, example, err, expectedSuffix) - } - } -} - -func TestParsePointMissingTagKey(t *testing.T) { - expectedSuffix := "missing tag key" - examples := []string{ - `cpu, value=1`, - `cpu,`, - `cpu,,,`, - `cpu,host=serverA,=us-east value=1i`, - `cpu,host=serverAa\,,=us-east value=1i`, - `cpu,host=serverA\,,=us-east value=1i`, - `cpu, =serverA value=1i`, - } - - for i, example := range examples { - _, err := models.ParsePointsString(example) - if err == nil { - t.Errorf(`[Example %d] ParsePoints("%s") mismatch. got nil, exp error`, i, example) - } else if !strings.HasSuffix(err.Error(), expectedSuffix) { - t.Errorf(`[Example %d] ParsePoints("%s") mismatch. got %q, exp suffix %q`, i, example, err, expectedSuffix) - } - } - - _, err := models.ParsePointsString(`cpu,host=serverA,\ =us-east value=1i`) - if err != nil { - t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,\ =us-east value=1i`, err) - } -} - -func TestParsePointMissingTagValue(t *testing.T) { - expectedSuffix := "missing tag value" - examples := []string{ - `cpu,host`, - `cpu,host,`, - `cpu,host=`, - `cpu,host value=1i`, - `cpu,host=serverA,region value=1i`, - `cpu,host=serverA,region= value=1i`, - `cpu,host=serverA,region=,zone=us-west value=1i`, - } - - for i, example := range examples { - _, err := models.ParsePointsString(example) - if err == nil { - t.Errorf(`[Example %d] ParsePoints("%s") mismatch. got nil, exp error`, i, example) - } else if !strings.HasSuffix(err.Error(), expectedSuffix) { - t.Errorf(`[Example %d] ParsePoints("%s") mismatch. got %q, exp suffix %q`, i, example, err, expectedSuffix) - } - } -} - -func TestParsePointInvalidTagFormat(t *testing.T) { - expectedSuffix := "invalid tag format" - examples := []string{ - `cpu,host=f=o,`, - `cpu,host=f\==o,`, - } - - for i, example := range examples { - _, err := models.ParsePointsString(example) - if err == nil { - t.Errorf(`[Example %d] ParsePoints("%s") mismatch. got nil, exp error`, i, example) - } else if !strings.HasSuffix(err.Error(), expectedSuffix) { - t.Errorf(`[Example %d] ParsePoints("%s") mismatch. got %q, exp suffix %q`, i, example, err, expectedSuffix) - } - } -} - -func TestParsePointMissingFieldName(t *testing.T) { - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west =`) - if err == nil { - t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west =`) - } - - _, err = models.ParsePointsString(`cpu,host=serverA,region=us-west =123i`) - if err == nil { - t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west =123i`) - } - - _, err = models.ParsePointsString(`cpu,host=serverA,region=us-west a\ =123i`) - if err != nil { - t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west a\ =123i`) - } - _, err = models.ParsePointsString(`cpu,host=serverA,region=us-west value=123i,=456i`) - if err == nil { - t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west value=123i,=456i`) - } -} - -func TestParsePointMissingFieldValue(t *testing.T) { - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=`) - if err == nil { - t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west value=`) - } - - _, err = models.ParsePointsString(`cpu,host=serverA,region=us-west value= 1000000000i`) - if err == nil { - t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west value= 1000000000i`) - } - - _, err = models.ParsePointsString(`cpu,host=serverA,region=us-west value=,value2=1i`) - if err == nil { - t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west value=,value2=1i`) - } - - _, err = models.ParsePointsString(`cpu,host=server01,region=us-west 1434055562000000000i`) - if err == nil { - t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=server01,region=us-west 1434055562000000000i`) - } - - _, err = models.ParsePointsString(`cpu,host=server01,region=us-west value=1i,b`) - if err == nil { - t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=server01,region=us-west value=1i,b`) - } - - _, err = models.ParsePointsString(`m f="blah"=123,r 1531703600000000000`) - if err == nil { - t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `m f="blah"=123,r 1531703600000000000`) - } -} - -func TestParsePointBadNumber(t *testing.T) { - for _, tt := range []string{ - "cpu v=- ", - "cpu v=-i ", - "cpu v=-. ", - "cpu v=. ", - "cpu v=1.0i ", - "cpu v=1ii ", - "cpu v=1a ", - "cpu v=-e-e-e ", - "cpu v=42+3 ", - "cpu v= ", - "cpu v=-123u", - } { - _, err := models.ParsePointsString(tt) - if err == nil { - t.Errorf("Point %q should be invalid", tt) - } - } -} - -func TestParsePointMaxInt64(t *testing.T) { - // out of range - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=9223372036854775808i`) - exp := `unable to parse 'cpu,host=serverA,region=us-west value=9223372036854775808i': unable to parse integer 9223372036854775808: strconv.ParseInt: parsing "9223372036854775808": value out of range` - if err == nil || (err != nil && err.Error() != exp) { - t.Fatalf("Error mismatch:\nexp: %s\ngot: %v", exp, err) - } - - // max int - p, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=9223372036854775807i`) - if err != nil { - t.Fatalf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=9223372036854775807i`, err) - } - fields, err := p[0].Fields() - if err != nil { - t.Fatal(err) - } - if exp, got := int64(9223372036854775807), fields["value"].(int64); exp != got { - t.Fatalf("ParsePoints Value mismatch. \nexp: %v\ngot: %v", exp, got) - } - - // leading zeros - _, err = models.ParsePointsString(`cpu,host=serverA,region=us-west value=0009223372036854775807i`) - if err != nil { - t.Fatalf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=0009223372036854775807i`, err) - } -} - -func TestParsePointMinInt64(t *testing.T) { - // out of range - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=-9223372036854775809i`) - if err == nil { - t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west value=-9223372036854775809i`) - } - - // min int - p, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=-9223372036854775808i`) - if err != nil { - t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=-9223372036854775808i`, err) - } - fields, err := p[0].Fields() - if err != nil { - t.Fatal(err) - } - if exp, got := int64(-9223372036854775808), fields["value"].(int64); exp != got { - t.Fatalf("ParsePoints Value mismatch. \nexp: %v\ngot: %v", exp, got) - } - - // leading zeros - _, err = models.ParsePointsString(`cpu,host=serverA,region=us-west value=-0009223372036854775808i`) - if err != nil { - t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=-0009223372036854775808i`, err) - } -} - -func TestParsePointMaxFloat64(t *testing.T) { - // out of range - _, err := models.ParsePointsString(fmt.Sprintf(`cpu,host=serverA,region=us-west value=%s`, "1"+string(maxFloat64))) - if err == nil { - t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west value=...`) - } - - // max float - p, err := models.ParsePointsString(fmt.Sprintf(`cpu,host=serverA,region=us-west value=%s`, string(maxFloat64))) - if err != nil { - t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=9223372036854775807`, err) - } - fields, err := p[0].Fields() - if err != nil { - t.Fatal(err) - } - if exp, got := math.MaxFloat64, fields["value"].(float64); exp != got { - t.Fatalf("ParsePoints Value mismatch. \nexp: %v\ngot: %v", exp, got) - } - - // leading zeros - _, err = models.ParsePointsString(fmt.Sprintf(`cpu,host=serverA,region=us-west value=%s`, "0000"+string(maxFloat64))) - if err != nil { - t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=0009223372036854775807`, err) - } -} - -func TestParsePointMinFloat64(t *testing.T) { - // out of range - _, err := models.ParsePointsString(fmt.Sprintf(`cpu,host=serverA,region=us-west value=%s`, "-1"+string(minFloat64)[1:])) - if err == nil { - t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west value=...`) - } - - // min float - p, err := models.ParsePointsString(fmt.Sprintf(`cpu,host=serverA,region=us-west value=%s`, string(minFloat64))) - if err != nil { - t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=...`, err) - } - fields, err := p[0].Fields() - if err != nil { - t.Fatal(err) - } - if exp, got := -math.MaxFloat64, fields["value"].(float64); exp != got { - t.Fatalf("ParsePoints Value mismatch. \nexp: %v\ngot: %v", exp, got) - } - - // leading zeros - _, err = models.ParsePointsString(fmt.Sprintf(`cpu,host=serverA,region=us-west value=%s`, "-0000000"+string(minFloat64)[1:])) - if err != nil { - t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=...`, err) - } -} - -func TestParsePointMaxUint64(t *testing.T) { - // out of range - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=18446744073709551616u`) - exp := `unable to parse 'cpu,host=serverA,region=us-west value=18446744073709551616u': unable to parse unsigned 18446744073709551616: strconv.ParseUint: parsing "18446744073709551616": value out of range` - if err == nil || (err != nil && err.Error() != exp) { - t.Fatalf("Error mismatch:\nexp: %s\ngot: %v", exp, err) - } - - // max int - p, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=18446744073709551615u`) - if err != nil { - t.Fatalf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=18446744073709551615u`, err) - } - fields, err := p[0].Fields() - if err != nil { - t.Fatal(err) - } - if exp, got := uint64(18446744073709551615), fields["value"].(uint64); exp != got { - t.Fatalf("ParsePoints Value mismatch. \nexp: %v\ngot: %v", exp, got) - } - - // leading zeros - _, err = models.ParsePointsString(`cpu,host=serverA,region=us-west value=00018446744073709551615u`) - if err != nil { - t.Fatalf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=00018446744073709551615u`, err) - } -} - -func TestParsePointMinUint64(t *testing.T) { - // out of range - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=--1u`) - if err == nil { - t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west value=-1u`) - } - - // min int - p, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=0u`) - if err != nil { - t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=0u`, err) - } - fields, err := p[0].Fields() - if err != nil { - t.Fatal(err) - } - if exp, got := uint64(0), fields["value"].(uint64); exp != got { - t.Fatalf("ParsePoints Value mismatch. \nexp: %v\ngot: %v", exp, got) - } - - // leading zeros - _, err = models.ParsePointsString(`cpu,host=serverA,region=us-west value=0000u`) - if err != nil { - t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=0000u`, err) - } -} - -func TestParsePointNumberNonNumeric(t *testing.T) { - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=.1a`) - if err == nil { - t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west value=.1a`) - } -} - -func TestParsePointNegativeWrongPlace(t *testing.T) { - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=0.-1`) - if err == nil { - t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west value=0.-1`) - } -} - -func TestParsePointOnlyNegativeSign(t *testing.T) { - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=-`) - if err == nil { - t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west value=-`) - } -} - -func TestParsePointFloatMultipleDecimals(t *testing.T) { - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=1.1.1`) - if err == nil { - t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west value=1.1.1`) - } -} - -func TestParsePointInteger(t *testing.T) { - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=1i`) - if err != nil { - t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=1i`, err) - } -} - -func TestParsePointNegativeInteger(t *testing.T) { - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=-1i`) - if err != nil { - t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=-1i`, err) - } -} - -func TestParsePointNegativeFloat(t *testing.T) { - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=-1.0`) - if err != nil { - t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=-1.0`, err) - } -} - -func TestParsePointFloatNoLeadingDigit(t *testing.T) { - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=.1`) - if err != nil { - t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=-1.0`, err) - } -} - -func TestParsePointFloatScientific(t *testing.T) { - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=1.0e4`) - if err != nil { - t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=1.0e4`, err) - } - - pts, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=1e4`) - if err != nil { - t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=1.0e4`, err) - } - - fields, err := pts[0].Fields() - if err != nil { - t.Fatal(err) - } - if fields["value"] != 1e4 { - t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=1e4`, err) - } -} - -func TestParsePointFloatScientificUpper(t *testing.T) { - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=1.0E4`) - if err != nil { - t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=1.0E4`, err) - } - - pts, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=1E4`) - if err != nil { - t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=1.0E4`, err) - } - - fields, err := pts[0].Fields() - if err != nil { - t.Fatal(err) - } - if fields["value"] != 1e4 { - t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=1E4`, err) - } -} - -func TestParsePointFloatScientificDecimal(t *testing.T) { - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=1.0e-4`) - if err != nil { - t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=1.0e-4`, err) - } -} - -func TestParsePointFloatNegativeScientific(t *testing.T) { - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=-1.0e-4`) - if err != nil { - t.Errorf(`ParsePoints("%s") mismatch. got %v, exp nil`, `cpu,host=serverA,region=us-west value=-1.0e-4`, err) - } -} - -func TestParsePointBooleanInvalid(t *testing.T) { - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=a`) - if err == nil { - t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west value=a`) - } -} - -func TestParsePointScientificIntInvalid(t *testing.T) { - _, err := models.ParsePointsString(`cpu,host=serverA,region=us-west value=9ie10`) - if err == nil { - t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west value=9ie10`) - } - - _, err = models.ParsePointsString(`cpu,host=serverA,region=us-west value=9e10i`) - if err == nil { - t.Errorf(`ParsePoints("%s") mismatch. got nil, exp error`, `cpu,host=serverA,region=us-west value=9e10i`) - } -} - -func TestParsePointWhitespace(t *testing.T) { - examples := []string{ - `cpu value=1.0 1257894000000000000`, - `cpu value=1.0 1257894000000000000`, - `cpu value=1.0 1257894000000000000`, - `cpu value=1.0 1257894000000000000 `, - `cpu value=1.0 1257894000000000000 -`, - `cpu value=1.0 1257894000000000000 -`, - } - - expPoint := NewTestPoint("cpu", models.Tags{}, models.Fields{"value": 1.0}, time.Unix(0, 1257894000000000000)) - for i, example := range examples { - pts, err := models.ParsePoints([]byte(example)) - if err != nil { - t.Fatalf(`[Example %d] ParsePoints("%s") error. got %v, exp nil`, i, example, err) - } - - if got, exp := len(pts), 1; got != exp { - t.Fatalf("[Example %d] got %d points, expected %d", i, got, exp) - } - - if got, exp := string(pts[0].Name()), string(expPoint.Name()); got != exp { - t.Fatalf("[Example %d] got %v measurement, expected %v", i, got, exp) - } - - fields, err := pts[0].Fields() - if err != nil { - t.Fatal(err) - } - eFields, err := expPoint.Fields() - if err != nil { - t.Fatal(err) - } - if got, exp := len(fields), len(eFields); got != exp { - t.Fatalf("[Example %d] got %d fields, expected %d", i, got, exp) - } - - if got, exp := fields["value"], eFields["value"]; got != exp { - t.Fatalf(`[Example %d] got %v for field "value", expected %v`, i, got, exp) - } - - if got, exp := pts[0].Time().UnixNano(), expPoint.Time().UnixNano(); got != exp { - t.Fatalf(`[Example %d] got %d time, expected %d`, i, got, exp) - } - } -} - -func TestParsePointUnescape(t *testing.T) { - // commas in measurement name - test(t, `foo\,bar value=1i`, - NewTestPoint( - "foo,bar", // comma in the name - models.NewTags(map[string]string{}), - models.Fields{ - "value": int64(1), - }, - time.Unix(0, 0))) - - // comma in measurement name with tags - test(t, `cpu\,main,regions=east value=1.0`, - NewTestPoint( - "cpu,main", // comma in the name - models.NewTags(map[string]string{ - "regions": "east", - }), - models.Fields{ - "value": 1.0, - }, - time.Unix(0, 0))) - - // spaces in measurement name - test(t, `cpu\ load,region=east value=1.0`, - NewTestPoint( - "cpu load", // space in the name - models.NewTags(map[string]string{ - "region": "east", - }), - models.Fields{ - "value": 1.0, - }, - time.Unix(0, 0))) - - // equals in measurement name - test(t, `cpu\=load,region=east value=1.0`, - NewTestPoint( - `cpu\=load`, // backslash is literal - models.NewTags(map[string]string{ - "region": "east", - }), - models.Fields{ - "value": 1.0, - }, - time.Unix(0, 0))) - - // equals in measurement name - test(t, `cpu=load,region=east value=1.0`, - NewTestPoint( - `cpu=load`, // literal equals is fine in measurement name - models.NewTags(map[string]string{ - "region": "east", - }), - models.Fields{ - "value": 1.0, - }, - time.Unix(0, 0))) - - // commas in tag names - test(t, `cpu,region\,zone=east value=1.0`, - NewTestPoint("cpu", - models.NewTags(map[string]string{ - "region,zone": "east", // comma in the tag key - }), - models.Fields{ - "value": 1.0, - }, - time.Unix(0, 0))) - - // spaces in tag name - test(t, `cpu,region\ zone=east value=1.0`, - NewTestPoint("cpu", - models.NewTags(map[string]string{ - "region zone": "east", // space in the tag name - }), - models.Fields{ - "value": 1.0, - }, - time.Unix(0, 0))) - - // backslash with escaped equals in tag name - test(t, `cpu,reg\\=ion=east value=1.0`, - NewTestPoint("cpu", - models.NewTags(map[string]string{ - `reg\=ion`: "east", - }), - models.Fields{ - "value": 1.0, - }, - time.Unix(0, 0))) - - // space is tag name - test(t, `cpu,\ =east value=1.0`, - NewTestPoint("cpu", - models.NewTags(map[string]string{ - " ": "east", // tag name is single space - }), - models.Fields{ - "value": 1.0, - }, - time.Unix(0, 0))) - - // commas in tag values - test(t, `cpu,regions=east\,west value=1.0`, - NewTestPoint("cpu", - models.NewTags(map[string]string{ - "regions": "east,west", // comma in the tag value - }), - models.Fields{ - "value": 1.0, - }, - time.Unix(0, 0))) - - // backslash literal followed by escaped space - test(t, `cpu,regions=\\ east value=1.0`, - NewTestPoint( - "cpu", - models.NewTags(map[string]string{ - "regions": `\ east`, - }), - models.Fields{ - "value": 1.0, - }, - time.Unix(0, 0))) - - // backslash literal followed by escaped space - test(t, `cpu,regions=eas\\ t value=1.0`, - NewTestPoint( - "cpu", - models.NewTags(map[string]string{ - "regions": `eas\ t`, - }), - models.Fields{ - "value": 1.0, - }, - time.Unix(0, 0))) - - // backslash literal followed by trailing space - test(t, `cpu,regions=east\\ value=1.0`, - NewTestPoint( - "cpu", - models.NewTags(map[string]string{ - "regions": `east\ `, - }), - models.Fields{ - "value": 1.0, - }, - time.Unix(0, 0))) - - // spaces in tag values - test(t, `cpu,regions=east\ west value=1.0`, - NewTestPoint("cpu", - models.NewTags(map[string]string{ - "regions": "east west", // comma in the tag value - }), - models.Fields{ - "value": 1.0, - }, - time.Unix(0, 0))) - - // commas in field keys - test(t, `cpu,regions=east value\,ms=1.0`, - NewTestPoint("cpu", - models.NewTags(map[string]string{ - "regions": "east", - }), - models.Fields{ - "value,ms": 1.0, // comma in the field keys - }, - time.Unix(0, 0))) - - // spaces in field keys - test(t, `cpu,regions=east value\ ms=1.0`, - NewTestPoint("cpu", - models.NewTags(map[string]string{ - "regions": "east", - }), - models.Fields{ - "value ms": 1.0, // comma in the field keys - }, - time.Unix(0, 0))) - - // tag with no value - test(t, `cpu,regions=east value="1"`, - NewTestPoint("cpu", - models.NewTags(map[string]string{ - "regions": "east", - "foobar": "", - }), - models.Fields{ - "value": "1", - }, - time.Unix(0, 0))) - - // commas in field values - test(t, `cpu,regions=east value="1,0"`, - NewTestPoint("cpu", - models.NewTags(map[string]string{ - "regions": "east", - }), - models.Fields{ - "value": "1,0", // comma in the field value - }, - time.Unix(0, 0))) - - // random character escaped - test(t, `cpu,regions=eas\t value=1.0`, - NewTestPoint( - "cpu", - models.NewTags(map[string]string{ - "regions": "eas\\t", - }), - models.Fields{ - "value": 1.0, - }, - time.Unix(0, 0))) - - // backslash literal followed by escaped characters - test(t, `cpu,regions=\\,\,\=east value=1.0`, - NewTestPoint( - "cpu", - models.NewTags(map[string]string{ - "regions": `\,,=east`, - }), - models.Fields{ - "value": 1.0, - }, - time.Unix(0, 0))) - - // field keys using escape char. - test(t, `cpu \a=1i`, - NewTestPoint( - "cpu", - models.NewTags(map[string]string{}), - models.Fields{ - "\\a": int64(1), // Left as parsed since it's not a known escape sequence. - }, - time.Unix(0, 0))) - - // measurement, tag and tag value with equals - test(t, `cpu=load,equals\=foo=tag\=value value=1i`, - NewTestPoint( - "cpu=load", // Not escaped - models.NewTags(map[string]string{ - "equals=foo": "tag=value", // Tag and value unescaped - }), - models.Fields{ - "value": int64(1), - }, - time.Unix(0, 0))) - -} - -func TestParsePointWithTags(t *testing.T) { - test(t, - "cpu,host=serverA,region=us-east value=1.0 1000000000", - NewTestPoint("cpu", - models.NewTags(map[string]string{"host": "serverA", "region": "us-east"}), - models.Fields{"value": 1.0}, time.Unix(1, 0))) -} - -func TestParsePointWithDuplicateTags(t *testing.T) { - for i, tt := range []struct { - line string - err string - }{ - { - line: `cpu,host=serverA,host=serverB value=1i 1000000000`, - err: `unable to parse 'cpu,host=serverA,host=serverB value=1i 1000000000': duplicate tags`, - }, - { - line: `cpu,b=2,b=1,c=3 value=1i 1000000000`, - err: `unable to parse 'cpu,b=2,b=1,c=3 value=1i 1000000000': duplicate tags`, - }, - { - line: `cpu,b=2,c=3,b=1 value=1i 1000000000`, - err: `unable to parse 'cpu,b=2,c=3,b=1 value=1i 1000000000': duplicate tags`, - }, - } { - _, err := models.ParsePointsString(tt.line) - if err == nil || tt.err != err.Error() { - t.Errorf("%d. ParsePoint() expected error '%s'. got '%s'", i, tt.err, err) - } - } -} - -func TestParsePointWithStringField(t *testing.T) { - test(t, `cpu,host=serverA,region=us-east value=1.0,str="foo",str2="bar" 1000000000`, - NewTestPoint("cpu", - models.NewTags(map[string]string{ - "host": "serverA", - "region": "us-east", - }), - models.Fields{ - "value": 1.0, - "str": "foo", - "str2": "bar", - }, - time.Unix(1, 0)), - ) - - test(t, `cpu,host=serverA,region=us-east str="foo \" bar" 1000000000`, - NewTestPoint("cpu", - models.NewTags(map[string]string{ - "host": "serverA", - "region": "us-east", - }), - models.Fields{ - "str": `foo " bar`, - }, - time.Unix(1, 0)), - ) - -} - -func TestParsePointWithStringWithSpaces(t *testing.T) { - test(t, `cpu,host=serverA,region=us-east value=1.0,str="foo bar" 1000000000`, - NewTestPoint( - "cpu", - models.NewTags(map[string]string{ - "host": "serverA", - "region": "us-east", - }), - models.Fields{ - "value": 1.0, - "str": "foo bar", // spaces in string value - }, - time.Unix(1, 0)), - ) -} - -func TestParsePointWithStringWithNewline(t *testing.T) { - test(t, "cpu,host=serverA,region=us-east value=1.0,str=\"foo\nbar\" 1000000000", - NewTestPoint( - "cpu", - models.NewTags(map[string]string{ - "host": "serverA", - "region": "us-east", - }), - models.Fields{ - "value": 1.0, - "str": "foo\nbar", // newline in string value - }, - time.Unix(1, 0)), - ) -} - -func TestParsePointWithStringWithCommas(t *testing.T) { - // escaped comma - test(t, `cpu,host=serverA,region=us-east value=1.0,str="foo\,bar" 1000000000`, - NewTestPoint( - "cpu", - models.NewTags(map[string]string{ - "host": "serverA", - "region": "us-east", - }), - models.Fields{ - "value": 1.0, - "str": `foo\,bar`, // commas in string value - }, - time.Unix(1, 0)), - ) - - // non-escaped comma - test(t, `cpu,host=serverA,region=us-east value=1.0,str="foo,bar" 1000000000`, - NewTestPoint( - "cpu", - models.NewTags(map[string]string{ - "host": "serverA", - "region": "us-east", - }), - models.Fields{ - "value": 1.0, - "str": "foo,bar", // commas in string value - }, - time.Unix(1, 0)), - ) - - // string w/ trailing escape chars - test(t, `cpu,host=serverA,region=us-east str="foo\\",str2="bar" 1000000000`, - NewTestPoint( - "cpu", - models.NewTags(map[string]string{ - "host": "serverA", - "region": "us-east", - }), - models.Fields{ - "str": "foo\\", // trailing escape char - "str2": "bar", - }, - time.Unix(1, 0)), - ) -} - -func TestParsePointQuotedMeasurement(t *testing.T) { - // non-escaped comma - test(t, `"cpu",host=serverA,region=us-east value=1.0 1000000000`, - NewTestPoint( - `"cpu"`, - models.NewTags(map[string]string{ - "host": "serverA", - "region": "us-east", - }), - models.Fields{ - "value": 1.0, - }, - time.Unix(1, 0)), - ) -} - -func TestParsePointQuotedTags(t *testing.T) { - test(t, `cpu,"host"="serverA",region=us-east value=1.0 1000000000`, - NewTestPoint( - "cpu", - models.NewTags(map[string]string{ - `"host"`: `"serverA"`, - "region": "us-east", - }), - models.Fields{ - "value": 1.0, - }, - time.Unix(1, 0)), - ) -} - -func TestParsePoint_TrailingSlash(t *testing.T) { - _, err := models.ParsePointsString(`a v=1 0\`) - if err == nil { - t.Fatalf("ParsePoints failed: %v", err) - } else if !strings.Contains(err.Error(), "bad timestamp") { - t.Fatalf("ParsePoints unexpected error: %v", err) - } -} - -func TestParsePointsUnbalancedQuotedTags(t *testing.T) { - pts, err := models.ParsePointsString("baz,mytag=\"a x=1 1441103862125\nbaz,mytag=a z=1 1441103862126") - if err != nil { - t.Fatalf("ParsePoints failed: %v", err) - } - - if exp := 2; len(pts) != exp { - t.Fatalf("ParsePoints count mismatch. got %v, exp %v", len(pts), exp) - } - - // Expected " in the tag value - exp := models.MustNewPoint("baz", models.NewTags(map[string]string{"mytag": `"a`}), - models.Fields{"x": float64(1)}, time.Unix(0, 1441103862125)) - - if pts[0].String() != exp.String() { - t.Errorf("Point mismatch:\ngot: %v\nexp: %v", pts[0].String(), exp.String()) - } - - // Expected two points to ensure we did not overscan the line - exp = models.MustNewPoint("baz", models.NewTags(map[string]string{"mytag": `a`}), - models.Fields{"z": float64(1)}, time.Unix(0, 1441103862126)) - - if pts[1].String() != exp.String() { - t.Errorf("Point mismatch:\ngot: %v\nexp: %v", pts[1].String(), exp.String()) - } - -} - -func TestParsePointEscapedStringsAndCommas(t *testing.T) { - // non-escaped comma and quotes - test(t, `cpu,host=serverA,region=us-east value="{Hello\"{,}\" World}" 1000000000`, - NewTestPoint( - "cpu", - models.NewTags(map[string]string{ - "host": "serverA", - "region": "us-east", - }), - models.Fields{ - "value": `{Hello"{,}" World}`, - }, - time.Unix(1, 0)), - ) - - // escaped comma and quotes - test(t, `cpu,host=serverA,region=us-east value="{Hello\"{\,}\" World}" 1000000000`, - NewTestPoint( - "cpu", - models.NewTags(map[string]string{ - "host": "serverA", - "region": "us-east", - }), - models.Fields{ - "value": `{Hello"{\,}" World}`, - }, - time.Unix(1, 0)), - ) -} - -func TestParsePointWithStringWithEquals(t *testing.T) { - test(t, `cpu,host=serverA,region=us-east str="foo=bar",value=1.0 1000000000`, - NewTestPoint( - "cpu", - models.NewTags(map[string]string{ - "host": "serverA", - "region": "us-east", - }), - models.Fields{ - "value": 1.0, - "str": "foo=bar", // spaces in string value - }, - time.Unix(1, 0)), - ) -} - -func TestParsePointWithStringWithBackslash(t *testing.T) { - test(t, `cpu value="test\\\"" 1000000000`, - NewTestPoint( - "cpu", - models.NewTags(map[string]string{}), - models.Fields{ - "value": `test\"`, - }, - time.Unix(1, 0)), - ) - - test(t, `cpu value="test\\" 1000000000`, - NewTestPoint( - "cpu", - models.NewTags(map[string]string{}), - models.Fields{ - "value": `test\`, - }, - time.Unix(1, 0)), - ) - - test(t, `cpu value="test\\\"" 1000000000`, - NewTestPoint( - "cpu", - models.NewTags(map[string]string{}), - models.Fields{ - "value": `test\"`, - }, - time.Unix(1, 0)), - ) - - test(t, `cpu value="test\"" 1000000000`, - NewTestPoint( - "cpu", - models.NewTags(map[string]string{}), - models.Fields{ - "value": `test"`, - }, - time.Unix(1, 0)), - ) -} - -func TestParsePointWithBoolField(t *testing.T) { - test(t, `cpu,host=serverA,region=us-east true=true,t=t,T=T,TRUE=TRUE,True=True,false=false,f=f,F=F,FALSE=FALSE,False=False 1000000000`, - NewTestPoint( - "cpu", - models.NewTags(map[string]string{ - "host": "serverA", - "region": "us-east", - }), - models.Fields{ - "t": true, - "T": true, - "true": true, - "True": true, - "TRUE": true, - "f": false, - "F": false, - "false": false, - "False": false, - "FALSE": false, - }, - time.Unix(1, 0)), - ) -} - -func TestParsePointUnicodeString(t *testing.T) { - test(t, `cpu,host=serverA,region=us-east value="wè" 1000000000`, - NewTestPoint( - "cpu", - models.NewTags(map[string]string{ - "host": "serverA", - "region": "us-east", - }), - models.Fields{ - "value": "wè", - }, - time.Unix(1, 0)), - ) -} - -func TestParsePointNegativeTimestamp(t *testing.T) { - test(t, `cpu value=1 -1`, - NewTestPoint( - "cpu", - models.NewTags(map[string]string{}), - models.Fields{ - "value": 1.0, - }, - time.Unix(0, -1)), - ) -} - -func TestParsePointMaxTimestamp(t *testing.T) { - test(t, fmt.Sprintf(`cpu value=1 %d`, models.MaxNanoTime), - NewTestPoint( - "cpu", - models.NewTags(map[string]string{}), - models.Fields{ - "value": 1.0, - }, - time.Unix(0, models.MaxNanoTime)), - ) -} - -func TestParsePointMinTimestamp(t *testing.T) { - test(t, `cpu value=1 -9223372036854775806`, - NewTestPoint( - "cpu", - models.NewTags(map[string]string{}), - models.Fields{ - "value": 1.0, - }, - time.Unix(0, models.MinNanoTime)), - ) -} - -func TestParsePointInvalidTimestamp(t *testing.T) { - examples := []string{ - "cpu value=1 9223372036854775808", - "cpu value=1 -92233720368547758078", - "cpu value=1 -", - "cpu value=1 -/", - "cpu value=1 -1?", - "cpu value=1 1-", - "cpu value=1 9223372036854775807 12", - } - - for i, example := range examples { - _, err := models.ParsePointsString(example) - if err == nil { - t.Fatalf("[Example %d] ParsePoints failed: %v", i, err) - } - } -} - -func TestNewPointFloatWithoutDecimal(t *testing.T) { - test(t, `cpu value=1 1000000000`, - NewTestPoint( - "cpu", - models.NewTags(map[string]string{}), - models.Fields{ - "value": 1.0, - }, - time.Unix(1, 0)), - ) -} -func TestNewPointNegativeFloat(t *testing.T) { - test(t, `cpu value=-0.64 1000000000`, - NewTestPoint( - "cpu", - models.NewTags(map[string]string{}), - models.Fields{ - "value": -0.64, - }, - time.Unix(1, 0)), - ) -} - -func TestNewPointFloatNoDecimal(t *testing.T) { - test(t, `cpu value=1. 1000000000`, - NewTestPoint( - "cpu", - models.NewTags(map[string]string{}), - models.Fields{ - "value": 1.0, - }, - time.Unix(1, 0)), - ) -} - -func TestNewPointFloatScientific(t *testing.T) { - test(t, `cpu value=6.632243e+06 1000000000`, - NewTestPoint( - "cpu", - models.NewTags(map[string]string{}), - models.Fields{ - "value": float64(6632243), - }, - time.Unix(1, 0)), - ) -} - -func TestNewPointLargeInteger(t *testing.T) { - test(t, `cpu value=6632243i 1000000000`, - NewTestPoint( - "cpu", - models.NewTags(map[string]string{}), - models.Fields{ - "value": int64(6632243), // if incorrectly encoded as a float, it would show up as 6.632243e+06 - }, - time.Unix(1, 0)), - ) -} - -func TestParsePointNaN(t *testing.T) { - _, err := models.ParsePointsString("cpu value=NaN 1000000000") - if err == nil { - t.Fatalf("ParsePoints expected error, got nil") - } - - _, err = models.ParsePointsString("cpu value=nAn 1000000000") - if err == nil { - t.Fatalf("ParsePoints expected error, got nil") - } - - _, err = models.ParsePointsString("cpu value=NaN") - if err == nil { - t.Fatalf("ParsePoints expected error, got nil") - } -} - -func TestNewPointLargeNumberOfTags(t *testing.T) { - tags := "" - for i := 0; i < 255; i++ { - tags += fmt.Sprintf(",tag%d=value%d", i, i) - } - - pt, err := models.ParsePointsString(fmt.Sprintf("cpu%s value=1", tags)) - if err != nil { - t.Fatalf("ParsePoints() with max tags failed: %v", err) - } - - if len(pt[0].Tags()) != 255 { - t.Fatalf("expected %d tags, got %d", 255, len(pt[0].Tags())) - } -} - -func TestParsePointIntsFloats(t *testing.T) { - pts, err := models.ParsePoints([]byte(`cpu,host=serverA,region=us-east int=10i,float=11.0,float2=12.1 1000000000`)) - if err != nil { - t.Fatalf(`ParsePoints() failed. got %s`, err) - } - - if exp := 1; len(pts) != exp { - t.Errorf("ParsePoint() len mismatch: got %v, exp %v", len(pts), exp) - } - pt := pts[0] - - fields, err := pt.Fields() - if err != nil { - t.Fatal(err) - } - if _, ok := fields["int"].(int64); !ok { - t.Errorf("ParsePoint() int field mismatch: got %T, exp %T", fields["int"], int64(10)) - } - - if _, ok := fields["float"].(float64); !ok { - t.Errorf("ParsePoint() float field mismatch: got %T, exp %T", fields["float64"], float64(11.0)) - } - - if _, ok := fields["float2"].(float64); !ok { - t.Errorf("ParsePoint() float field mismatch: got %T, exp %T", fields["float64"], float64(12.1)) - } -} - -func TestParsePointKeyUnsorted(t *testing.T) { - pts, err := models.ParsePoints([]byte("cpu,last=1,first=2 value=1i")) - if err != nil { - t.Fatalf(`ParsePoints() failed. got %s`, err) - } - - if exp := 1; len(pts) != exp { - t.Errorf("ParsePoint() len mismatch: got %v, exp %v", len(pts), exp) - } - pt := pts[0] - - if exp := "cpu,first=2,last=1"; string(pt.Key()) != exp { - t.Errorf("ParsePoint key not sorted. got %v, exp %v", string(pt.Key()), exp) - } -} - -func TestParsePointToString(t *testing.T) { - line := `cpu,host=serverA,region=us-east bool=false,float=11,float2=12.123,int=10i,str="string val" 1000000000` - pts, err := models.ParsePoints([]byte(line)) - if err != nil { - t.Fatalf(`ParsePoints() failed. got %s`, err) - } - if exp := 1; len(pts) != exp { - t.Errorf("ParsePoint() len mismatch: got %v, exp %v", len(pts), exp) - } - pt := pts[0] - - got := pt.String() - if line != got { - t.Errorf("ParsePoint() to string mismatch:\n got %v\n exp %v", got, line) - } - - pt = models.MustNewPoint("cpu", models.NewTags(map[string]string{"host": "serverA", "region": "us-east"}), - models.Fields{"int": 10, "float": float64(11.0), "float2": float64(12.123), "bool": false, "str": "string val"}, - time.Unix(1, 0)) - - got = pt.String() - if line != got { - t.Errorf("NewPoint() to string mismatch:\n got %v\n exp %v", got, line) - } -} - -func TestParsePointsWithPrecision(t *testing.T) { - tests := []struct { - name string - line string - precision string - exp string - }{ - { - name: "nanosecond by default", - line: `cpu,host=serverA,region=us-east value=1.0 946730096789012345`, - precision: "", - exp: "cpu,host=serverA,region=us-east value=1.0 946730096789012345", - }, - { - name: "nanosecond", - line: `cpu,host=serverA,region=us-east value=1.0 946730096789012345`, - precision: "n", - exp: "cpu,host=serverA,region=us-east value=1.0 946730096789012345", - }, - { - name: "microsecond", - line: `cpu,host=serverA,region=us-east value=1.0 946730096789012`, - precision: "u", - exp: "cpu,host=serverA,region=us-east value=1.0 946730096789012000", - }, - { - name: "millisecond", - line: `cpu,host=serverA,region=us-east value=1.0 946730096789`, - precision: "ms", - exp: "cpu,host=serverA,region=us-east value=1.0 946730096789000000", - }, - { - name: "second", - line: `cpu,host=serverA,region=us-east value=1.0 946730096`, - precision: "s", - exp: "cpu,host=serverA,region=us-east value=1.0 946730096000000000", - }, - { - name: "minute", - line: `cpu,host=serverA,region=us-east value=1.0 15778834`, - precision: "m", - exp: "cpu,host=serverA,region=us-east value=1.0 946730040000000000", - }, - { - name: "hour", - line: `cpu,host=serverA,region=us-east value=1.0 262980`, - precision: "h", - exp: "cpu,host=serverA,region=us-east value=1.0 946728000000000000", - }, - } - for _, test := range tests { - pts, err := models.ParsePointsWithPrecision([]byte(test.line), time.Now().UTC(), test.precision) - if err != nil { - t.Fatalf(`%s: ParsePoints() failed. got %s`, test.name, err) - } - if exp := 1; len(pts) != exp { - t.Errorf("%s: ParsePoint() len mismatch: got %v, exp %v", test.name, len(pts), exp) - } - pt := pts[0] - - got := pt.String() - if got != test.exp { - t.Errorf("%s: ParsePoint() to string mismatch:\n got %v\n exp %v", test.name, got, test.exp) - } - } -} - -func TestParsePointsWithPrecisionNoTime(t *testing.T) { - line := `cpu,host=serverA,region=us-east value=1.0` - tm, _ := time.Parse(time.RFC3339Nano, "2000-01-01T12:34:56.789012345Z") - tests := []struct { - name string - precision string - exp string - }{ - { - name: "no precision", - precision: "", - exp: "cpu,host=serverA,region=us-east value=1.0 946730096789012345", - }, - { - name: "nanosecond precision", - precision: "n", - exp: "cpu,host=serverA,region=us-east value=1.0 946730096789012345", - }, - { - name: "microsecond precision", - precision: "u", - exp: "cpu,host=serverA,region=us-east value=1.0 946730096789012000", - }, - { - name: "millisecond precision", - precision: "ms", - exp: "cpu,host=serverA,region=us-east value=1.0 946730096789000000", - }, - { - name: "second precision", - precision: "s", - exp: "cpu,host=serverA,region=us-east value=1.0 946730096000000000", - }, - { - name: "minute precision", - precision: "m", - exp: "cpu,host=serverA,region=us-east value=1.0 946730040000000000", - }, - { - name: "hour precision", - precision: "h", - exp: "cpu,host=serverA,region=us-east value=1.0 946728000000000000", - }, - } - - for _, test := range tests { - pts, err := models.ParsePointsWithPrecision([]byte(line), tm, test.precision) - if err != nil { - t.Fatalf(`%s: ParsePoints() failed. got %s`, test.name, err) - } - if exp := 1; len(pts) != exp { - t.Errorf("%s: ParsePoint() len mismatch: got %v, exp %v", test.name, len(pts), exp) - } - pt := pts[0] - - got := pt.String() - if got != test.exp { - t.Errorf("%s: ParsePoint() to string mismatch:\n got %v\n exp %v", test.name, got, test.exp) - } - } -} - -func TestParsePointsWithPrecisionComments(t *testing.T) { - tests := []struct { - name string - batch string - exp string - lenPoints int - }{ - { - name: "comment only", - batch: `# comment only`, - exp: "cpu,host=serverA,region=us-east value=1.0 946730096789012345", - lenPoints: 0, - }, - { - name: "point with comment above", - batch: `# a point is below -cpu,host=serverA,region=us-east value=1.0 946730096789012345`, - exp: "cpu,host=serverA,region=us-east value=1.0 946730096789012345", - lenPoints: 1, - }, - { - name: "point with comment below", - batch: `cpu,host=serverA,region=us-east value=1.0 946730096789012345 -# end of points`, - exp: "cpu,host=serverA,region=us-east value=1.0 946730096789012345", - lenPoints: 1, - }, - { - name: "indented comment", - batch: ` # a point is below -cpu,host=serverA,region=us-east value=1.0 946730096789012345`, - exp: "cpu,host=serverA,region=us-east value=1.0 946730096789012345", - lenPoints: 1, - }, - } - for _, test := range tests { - pts, err := models.ParsePointsWithPrecision([]byte(test.batch), time.Now().UTC(), "") - if err != nil { - t.Fatalf(`%s: ParsePoints() failed. got %s`, test.name, err) - } - pointsLength := len(pts) - if exp := test.lenPoints; pointsLength != exp { - t.Errorf("%s: ParsePoint() len mismatch: got %v, exp %v", test.name, pointsLength, exp) - } - - if pointsLength > 0 { - pt := pts[0] - - got := pt.String() - if got != test.exp { - t.Errorf("%s: ParsePoint() to string mismatch:\n got %v\n exp %v", test.name, got, test.exp) - } - } - } -} - -func TestNewPointEscaped(t *testing.T) { - // commas - pt := models.MustNewPoint("cpu,main", models.NewTags(map[string]string{"tag,bar": "value"}), models.Fields{"name,bar": 1.0}, time.Unix(0, 0)) - if exp := `cpu\,main,tag\,bar=value name\,bar=1 0`; pt.String() != exp { - t.Errorf("NewPoint().String() mismatch.\ngot %v\nexp %v", pt.String(), exp) - } - - // spaces - pt = models.MustNewPoint("cpu main", models.NewTags(map[string]string{"tag bar": "value"}), models.Fields{"name bar": 1.0}, time.Unix(0, 0)) - if exp := `cpu\ main,tag\ bar=value name\ bar=1 0`; pt.String() != exp { - t.Errorf("NewPoint().String() mismatch.\ngot %v\nexp %v", pt.String(), exp) - } - - // equals - pt = models.MustNewPoint("cpu=main", models.NewTags(map[string]string{"tag=bar": "value=foo"}), models.Fields{"name=bar": 1.0}, time.Unix(0, 0)) - if exp := `cpu=main,tag\=bar=value\=foo name\=bar=1 0`; pt.String() != exp { - t.Errorf("NewPoint().String() mismatch.\ngot %v\nexp %v", pt.String(), exp) - } -} - -func TestNewPointWithoutField(t *testing.T) { - _, err := models.NewPoint("cpu", models.NewTags(map[string]string{"tag": "bar"}), models.Fields{}, time.Unix(0, 0)) - if err == nil { - t.Fatalf(`NewPoint() expected error. got nil`) - } -} - -func TestNewPointUnhandledType(t *testing.T) { - // nil value - pt := models.MustNewPoint("cpu", nil, models.Fields{"value": nil}, time.Unix(0, 0)) - if exp := `cpu value= 0`; pt.String() != exp { - t.Errorf("NewPoint().String() mismatch.\ngot %v\nexp %v", pt.String(), exp) - } - - // unsupported type gets stored as string - now := time.Unix(0, 0).UTC() - pt = models.MustNewPoint("cpu", nil, models.Fields{"value": now}, time.Unix(0, 0)) - if exp := `cpu value="1970-01-01 00:00:00 +0000 UTC" 0`; pt.String() != exp { - t.Errorf("NewPoint().String() mismatch.\ngot %v\nexp %v", pt.String(), exp) - } - - fields, err := pt.Fields() - if err != nil { - t.Fatal(err) - } - if exp := "1970-01-01 00:00:00 +0000 UTC"; fields["value"] != exp { - t.Errorf("NewPoint().String() mismatch.\ngot %v\nexp %v", pt.String(), exp) - } -} - -func TestMakeKeyEscaped(t *testing.T) { - if exp, got := `cpu\ load`, models.MakeKey([]byte(`cpu\ load`), models.NewTags(map[string]string{})); string(got) != exp { - t.Errorf("MakeKey() mismatch.\ngot %v\nexp %v", got, exp) - } - - if exp, got := `cpu\ load`, models.MakeKey([]byte(`cpu load`), models.NewTags(map[string]string{})); string(got) != exp { - t.Errorf("MakeKey() mismatch.\ngot %v\nexp %v", got, exp) - } - - if exp, got := `cpu\,load`, models.MakeKey([]byte(`cpu\,load`), models.NewTags(map[string]string{})); string(got) != exp { - t.Errorf("MakeKey() mismatch.\ngot %v\nexp %v", got, exp) - } - - if exp, got := `cpu\,load`, models.MakeKey([]byte(`cpu,load`), models.NewTags(map[string]string{})); string(got) != exp { - t.Errorf("MakeKey() mismatch.\ngot %v\nexp %v", got, exp) - } - -} - -func TestPrecisionString(t *testing.T) { - tags := map[string]interface{}{"value": float64(1)} - tm, _ := time.Parse(time.RFC3339Nano, "2000-01-01T12:34:56.789012345Z") - tests := []struct { - name string - precision string - exp string - }{ - { - name: "no precision", - precision: "", - exp: "cpu value=1 946730096789012345", - }, - { - name: "nanosecond precision", - precision: "ns", - exp: "cpu value=1 946730096789012345", - }, - { - name: "microsecond precision", - precision: "u", - exp: "cpu value=1 946730096789012", - }, - { - name: "millisecond precision", - precision: "ms", - exp: "cpu value=1 946730096789", - }, - { - name: "second precision", - precision: "s", - exp: "cpu value=1 946730096", - }, - { - name: "minute precision", - precision: "m", - exp: "cpu value=1 15778834", - }, - { - name: "hour precision", - precision: "h", - exp: "cpu value=1 262980", - }, - } - - for _, test := range tests { - pt := models.MustNewPoint("cpu", nil, tags, tm) - act := pt.PrecisionString(test.precision) - - if act != test.exp { - t.Errorf("%s: PrecisionString() mismatch:\n actual: %v\n exp: %v", - test.name, act, test.exp) - } - } -} - -func TestRoundedString(t *testing.T) { - tags := map[string]interface{}{"value": float64(1)} - tm, _ := time.Parse(time.RFC3339Nano, "2000-01-01T12:34:56.789012345Z") - tests := []struct { - name string - precision time.Duration - exp string - }{ - { - name: "no precision", - precision: time.Duration(0), - exp: "cpu value=1 946730096789012345", - }, - { - name: "nanosecond precision", - precision: time.Nanosecond, - exp: "cpu value=1 946730096789012345", - }, - { - name: "microsecond precision", - precision: time.Microsecond, - exp: "cpu value=1 946730096789012000", - }, - { - name: "millisecond precision", - precision: time.Millisecond, - exp: "cpu value=1 946730096789000000", - }, - { - name: "second precision", - precision: time.Second, - exp: "cpu value=1 946730097000000000", - }, - { - name: "minute precision", - precision: time.Minute, - exp: "cpu value=1 946730100000000000", - }, - { - name: "hour precision", - precision: time.Hour, - exp: "cpu value=1 946731600000000000", - }, - } - - for _, test := range tests { - pt := models.MustNewPoint("cpu", nil, tags, tm) - act := pt.RoundedString(test.precision) - - if act != test.exp { - t.Errorf("%s: RoundedString() mismatch:\n actual: %v\n exp: %v", - test.name, act, test.exp) - } - } -} - -func TestParsePointsStringWithExtraBuffer(t *testing.T) { - b := make([]byte, 70*5000) - buf := bytes.NewBuffer(b) - key := "cpu,host=A,region=uswest" - buf.WriteString(fmt.Sprintf("%s value=%.3f 1\n", key, rand.Float64())) - - points, err := models.ParsePointsString(buf.String()) - if err != nil { - t.Fatalf("failed to write points: %s", err.Error()) - } - - pointKey := string(points[0].Key()) - - if len(key) != len(pointKey) { - t.Fatalf("expected length of both keys are same but got %d and %d", len(key), len(pointKey)) - } - - if key != pointKey { - t.Fatalf("expected both keys are same but got %s and %s", key, pointKey) - } -} - -func TestParsePointsQuotesInFieldKey(t *testing.T) { - buf := `cpu "a=1 -cpu value=2 1` - points, err := models.ParsePointsString(buf) - if err != nil { - t.Fatalf("failed to write points: %s", err.Error()) - } - - fields, err := points[0].Fields() - if err != nil { - t.Fatal(err) - } - value, ok := fields["\"a"] - if !ok { - t.Fatalf("expected to parse field '\"a'") - } - - if value != float64(1) { - t.Fatalf("expected field value to be 1, got %v", value) - } - - // The following input should not parse - buf = `cpu "\, '= "\ v=1.0` - _, err = models.ParsePointsString(buf) - if err == nil { - t.Fatalf("expected parsing failure but got no error") - } -} - -func TestParsePointsQuotesInTags(t *testing.T) { - buf := `t159,label=hey\ "ya a=1i,value=0i -t159,label=another a=2i,value=1i 1` - points, err := models.ParsePointsString(buf) - if err != nil { - t.Fatalf("failed to write points: %s", err.Error()) - } - - if len(points) != 2 { - t.Fatalf("expected 2 points, got %d", len(points)) - } -} - -func TestParsePointsBlankLine(t *testing.T) { - buf := `cpu value=1i 1000000000 - -cpu value=2i 2000000000` - points, err := models.ParsePointsString(buf) - if err != nil { - t.Fatalf("failed to write points: %s", err.Error()) - } - - if len(points) != 2 { - t.Fatalf("expected 2 points, got %d", len(points)) - } -} - -func TestNewPointsWithBytesWithCorruptData(t *testing.T) { - corrupted := []byte{0, 0, 0, 3, 102, 111, 111, 0, 0, 0, 4, 61, 34, 65, 34, 1, 0, 0, 0, 14, 206, 86, 119, 24, 32, 72, 233, 168, 2, 148} - p, err := models.NewPointFromBytes(corrupted) - if p != nil || err == nil { - t.Fatalf("NewPointFromBytes: got: (%v, %v), expected: (nil, error)", p, err) - } -} - -func TestNewPointsWithShortBuffer(t *testing.T) { - _, err := models.NewPointFromBytes([]byte{0, 0, 0, 3, 4}) - if err != io.ErrShortBuffer { - t.Fatalf("NewPointFromBytes: got: (%v, %v), expected: (nil, error)", p, err) - } -} - -func TestNewPointsRejectsEmptyFieldNames(t *testing.T) { - if _, err := models.NewPoint("foo", nil, models.Fields{"": 1}, time.Now()); err == nil { - t.Fatalf("new point with empty field name. got: nil, expected: error") - } -} - -func TestNewPointsRejectsMaxKey(t *testing.T) { - var key string - // tsm field key is point key, separator (4 bytes) and field - for i := 0; i < models.MaxKeyLength-len("value")-4; i++ { - key += "a" - } - - // Test max key len - if _, err := models.NewPoint(key, nil, models.Fields{"value": 1, "ok": 2.0}, time.Now()); err != nil { - t.Fatalf("new point with max key. got: %v, expected: nil", err) - } - - if _, err := models.ParsePointsString(fmt.Sprintf("%v value=1,ok=2.0", key)); err != nil { - t.Fatalf("parse point with max key. got: %v, expected: nil", err) - } - - // Test 1 byte over max key len - key += "a" - if _, err := models.NewPoint(key, nil, models.Fields{"value": 1, "ok": 2.0}, time.Now()); err == nil { - t.Fatalf("new point with max key. got: nil, expected: error") - } - - if _, err := models.ParsePointsString(fmt.Sprintf("%v value=1,ok=2.0", key)); err == nil { - t.Fatalf("parse point with max key. got: nil, expected: error") - } - -} - -func TestPoint_FieldIterator_Simple(t *testing.T) { - - p, err := models.ParsePoints([]byte(`m v=42i,f=42 36`)) - if err != nil { - t.Fatal(err) - } - - if len(p) != 1 { - t.Fatalf("wrong number of points, got %d, exp %d", len(p), 1) - } - - fi := p[0].FieldIterator() - - if !fi.Next() { - t.Fatal("field iterator terminated before first field") - } - - if fi.Type() != models.Integer { - t.Fatalf("'42i' should be an Integer, got %v", fi.Type()) - } - - iv, err := fi.IntegerValue() - if err != nil { - t.Fatal(err) - } - if exp, got := int64(42), iv; exp != got { - t.Fatalf("'42i' should be %d, got %d", exp, got) - } - - if !fi.Next() { - t.Fatalf("field iterator terminated before second field") - } - - if fi.Type() != models.Float { - t.Fatalf("'42' should be a Float, got %v", fi.Type()) - } - - fv, err := fi.FloatValue() - if err != nil { - t.Fatal(err) - } - if exp, got := 42.0, fv; exp != got { - t.Fatalf("'42' should be %f, got %f", exp, got) - } - - if fi.Next() { - t.Fatal("field iterator didn't terminate") - } -} - -func toFields(fi models.FieldIterator) models.Fields { - m := make(models.Fields) - for fi.Next() { - var v interface{} - var err error - switch fi.Type() { - case models.Float: - v, err = fi.FloatValue() - case models.Integer: - v, err = fi.IntegerValue() - case models.Unsigned: - v, err = fi.UnsignedValue() - case models.String: - v = fi.StringValue() - case models.Boolean: - v, err = fi.BooleanValue() - case models.Empty: - v = nil - default: - panic("unknown type") - } - if err != nil { - panic(err) - } - m[string(fi.FieldKey())] = v - } - return m -} - -func TestPoint_FieldIterator_FieldMap(t *testing.T) { - - points, err := models.ParsePointsString(` -m v=42 -m v=42i -m v="string" -m v=true -m v="string\"with\"escapes" -m v=42i,f=42,g=42.314,u=123u -m a=2i,b=3i,c=true,d="stuff",e=-0.23,f=123.456 -`) - - if err != nil { - t.Fatal("failed to parse test points:", err) - } - - for _, p := range points { - exp, err := p.Fields() - if err != nil { - t.Fatal(err) - } - got := toFields(p.FieldIterator()) - - if !reflect.DeepEqual(got, exp) { - t.Errorf("FieldIterator failed for %#q: got %#v, exp %#v", p.String(), got, exp) - } - } -} - -func TestEscapeStringField(t *testing.T) { - cases := []struct { - in string - expOut string - }{ - {in: "abcdefg", expOut: "abcdefg"}, - {in: `one double quote " .`, expOut: `one double quote \" .`}, - {in: `quote " then backslash \ .`, expOut: `quote \" then backslash \\ .`}, - {in: `backslash \ then quote " .`, expOut: `backslash \\ then quote \" .`}, - } - - for _, c := range cases { - // Unescapes as expected. - got := models.EscapeStringField(c.in) - if got != c.expOut { - t.Errorf("unexpected result from EscapeStringField(%s)\ngot [%s]\nexp [%s]\n", c.in, got, c.expOut) - continue - } - - pointLine := fmt.Sprintf(`t s="%s"`, got) - test(t, pointLine, NewTestPoint( - "t", - models.NewTags(nil), - models.Fields{"s": c.in}, - time.Unix(0, 0), - )) - } -} - -func TestParseKeyBytes(t *testing.T) { - testCases := []struct { - input string - expectedName string - expectedTags map[string]string - }{ - {input: "m,k=v", expectedName: "m", expectedTags: map[string]string{"k": "v"}}, - {input: "m\\ q,k=v", expectedName: "m q", expectedTags: map[string]string{"k": "v"}}, - {input: "m,k\\ q=v", expectedName: "m", expectedTags: map[string]string{"k q": "v"}}, - {input: "m\\ q,k\\ q=v", expectedName: "m q", expectedTags: map[string]string{"k q": "v"}}, - } - - for _, testCase := range testCases { - t.Run(testCase.input, func(t *testing.T) { - name, tags := models.ParseKeyBytes([]byte(testCase.input)) - if !bytes.Equal([]byte(testCase.expectedName), name) { - t.Errorf("%s produced measurement %s but expected %s", testCase.input, string(name), testCase.expectedName) - } - if !tags.Equal(models.NewTags(testCase.expectedTags)) { - t.Errorf("%s produced tags %s but expected %s", testCase.input, tags.String(), models.NewTags(testCase.expectedTags).String()) - } - }) - } -} - -func TestParseName(t *testing.T) { - testCases := []struct { - input string - expectedName string - }{ - {input: "m,k=v", expectedName: "m"}, - {input: "m\\ q,k=v", expectedName: "m q"}, - } - - for _, testCase := range testCases { - t.Run(testCase.input, func(t *testing.T) { - name := models.ParseName([]byte(testCase.input)) - if !bytes.Equal([]byte(testCase.expectedName), name) { - t.Errorf("%s produced measurement %s but expected %s", testCase.input, string(name), testCase.expectedName) - } - }) - } -} - -func BenchmarkEscapeStringField_Plain(b *testing.B) { - s := "nothing special" - for i := 0; i < b.N; i++ { - sink = models.EscapeStringField(s) - } -} - -func BenchmarkEscapeString_Quotes(b *testing.B) { - s := `Hello, "world"` - for i := 0; i < b.N; i++ { - sink = models.EscapeStringField(s) - } -} - -func BenchmarkEscapeString_Backslashes(b *testing.B) { - s := `C:\windows\system32` - for i := 0; i < b.N; i++ { - sink = models.EscapeStringField(s) - } -} - -func BenchmarkEscapeString_QuotesAndBackslashes(b *testing.B) { - s1 := `a quote " then backslash \ .` - s2 := `a backslash \ then quote " .` - for i := 0; i < b.N; i++ { - sink = [...]string{models.EscapeStringField(s1), models.EscapeStringField(s2)} - } -} - -func BenchmarkParseTags(b *testing.B) { - tags := []byte("cpu,tag0=value0,tag1=value1,tag2=value2,tag3=value3,tag4=value4,tag5=value5") - for i := 0; i < b.N; i++ { - models.ParseTags(tags) - } -} - -func BenchmarkEscapeMeasurement(b *testing.B) { - benchmarks := []struct { - m []byte - }{ - {[]byte("this_is_a_test")}, - {[]byte("this,is,a,test")}, - } - - for _, bm := range benchmarks { - b.Run(string(bm.m), func(b *testing.B) { - b.ReportAllocs() - for i := 0; i < b.N; i++ { - models.EscapeMeasurement(bm.m) - } - }) - } -} - -func makeTags(key, val string, n int) models.Tags { - tags := make(models.Tags, n) - for i := range tags { - tags[i].Key = []byte(fmt.Sprintf("%s%03d", key, i)) - tags[i].Value = []byte(fmt.Sprintf("%s%03d", val, i)) - } - return tags -} - -func BenchmarkTags_HashKey(b *testing.B) { - benchmarks := []struct { - name string - t models.Tags - }{ - {"5 tags-no esc", makeTags("tag_foo", "val_bar", 5)}, - {"25 tags-no esc", makeTags("tag_foo", "val_bar", 25)}, - {"5 tags-esc", makeTags("tag foo", "val bar", 5)}, - {"25 tags-esc", makeTags("tag foo", "val bar", 25)}, - } - for _, bm := range benchmarks { - b.Run(bm.name, func(b *testing.B) { - b.ReportAllocs() - for i := 0; i < b.N; i++ { - bm.t.HashKey() - } - }) - } -} - -func BenchmarkMakeKey(b *testing.B) { - benchmarks := []struct { - m []byte - t models.Tags - }{ - {[]byte("this_is_a_test"), nil}, - {[]byte("this,is,a,test"), nil}, - {[]byte(`this\ is\ a\ test`), nil}, - - {[]byte("this_is_a_test"), makeTags("tag_foo", "val_bar", 8)}, - {[]byte("this,is,a,test"), makeTags("tag_foo", "val_bar", 8)}, - {[]byte("this_is_a_test"), makeTags("tag_foo", "val bar", 8)}, - {[]byte("this,is,a,test"), makeTags("tag_foo", "val bar", 8)}, - } - - for _, bm := range benchmarks { - b.Run(string(bm.m), func(b *testing.B) { - b.ReportAllocs() - for i := 0; i < b.N; i++ { - models.MakeKey(bm.m, bm.t) - } - }) - } -} - -func BenchmarkNewTagsKeyValues(b *testing.B) { - b.Run("sorted", func(b *testing.B) { - b.Run("no dupes", func(b *testing.B) { - kv := [][]byte{[]byte("tag0"), []byte("v0"), []byte("tag1"), []byte("v1"), []byte("tag2"), []byte("v2")} - - b.Run("preallocate", func(b *testing.B) { - t := make(models.Tags, 3) - b.ReportAllocs() - for i := 0; i < b.N; i++ { - _, _ = models.NewTagsKeyValues(t, kv...) - } - }) - - b.Run("allocate", func(b *testing.B) { - b.ReportAllocs() - for i := 0; i < b.N; i++ { - _, _ = models.NewTagsKeyValues(nil, kv...) - } - }) - }) - - b.Run("dupes", func(b *testing.B) { - kv := [][]byte{[]byte("tag0"), []byte("v0"), []byte("tag1"), []byte("v1"), []byte("tag1"), []byte("v1"), []byte("tag2"), []byte("v2"), []byte("tag2"), []byte("v2")} - b.ReportAllocs() - - for i := 0; i < b.N; i++ { - _, _ = models.NewTagsKeyValues(nil, kv...) - } - }) - }) - b.Run("unsorted", func(b *testing.B) { - b.Run("no dupes", func(b *testing.B) { - kv := [][]byte{[]byte("tag1"), []byte("v1"), []byte("tag0"), []byte("v0"), []byte("tag2"), []byte("v2")} - b.ReportAllocs() - - for i := 0; i < b.N; i++ { - _, _ = models.NewTagsKeyValues(nil, kv...) - } - }) - b.Run("dupes", func(b *testing.B) { - kv := [][]byte{[]byte("tag1"), []byte("v1"), []byte("tag2"), []byte("v2"), []byte("tag0"), []byte("v0"), []byte("tag1"), []byte("v1"), []byte("tag2"), []byte("v2")} - b.ReportAllocs() - - for i := 0; i < b.N; i++ { - _, _ = models.NewTagsKeyValues(nil, kv...) - } - }) - }) -} - -func init() { - // Force uint support to be enabled for testing. - models.EnableUintSupport() -} diff --git a/v1/models/rows.go b/v1/models/rows.go deleted file mode 100644 index c087a4882d..0000000000 --- a/v1/models/rows.go +++ /dev/null @@ -1,62 +0,0 @@ -package models - -import ( - "sort" -) - -// Row represents a single row returned from the execution of a statement. -type Row struct { - Name string `json:"name,omitempty"` - Tags map[string]string `json:"tags,omitempty"` - Columns []string `json:"columns,omitempty"` - Values [][]interface{} `json:"values,omitempty"` - Partial bool `json:"partial,omitempty"` -} - -// SameSeries returns true if r contains values for the same series as o. -func (r *Row) SameSeries(o *Row) bool { - return r.tagsHash() == o.tagsHash() && r.Name == o.Name -} - -// tagsHash returns a hash of tag key/value pairs. -func (r *Row) tagsHash() uint64 { - h := NewInlineFNV64a() - keys := r.tagsKeys() - for _, k := range keys { - h.Write([]byte(k)) - h.Write([]byte(r.Tags[k])) - } - return h.Sum64() -} - -// tagKeys returns a sorted list of tag keys. -func (r *Row) tagsKeys() []string { - a := make([]string, 0, len(r.Tags)) - for k := range r.Tags { - a = append(a, k) - } - sort.Strings(a) - return a -} - -// Rows represents a collection of rows. Rows implements sort.Interface. -type Rows []*Row - -// Len implements sort.Interface. -func (p Rows) Len() int { return len(p) } - -// Less implements sort.Interface. -func (p Rows) Less(i, j int) bool { - // Sort by name first. - if p[i].Name != p[j].Name { - return p[i].Name < p[j].Name - } - - // Sort by tag set hash. Tags don't have a meaningful sort order so we - // just compute a hash and sort by that instead. This allows the tests - // to receive rows in a predictable order every time. - return p[i].tagsHash() < p[j].tagsHash() -} - -// Swap implements sort.Interface. -func (p Rows) Swap(i, j int) { p[i], p[j] = p[j], p[i] } diff --git a/v1/models/statistic.go b/v1/models/statistic.go deleted file mode 100644 index 553e9d09fb..0000000000 --- a/v1/models/statistic.go +++ /dev/null @@ -1,42 +0,0 @@ -package models - -// Statistic is the representation of a statistic used by the monitoring service. -type Statistic struct { - Name string `json:"name"` - Tags map[string]string `json:"tags"` - Values map[string]interface{} `json:"values"` -} - -// NewStatistic returns an initialized Statistic. -func NewStatistic(name string) Statistic { - return Statistic{ - Name: name, - Tags: make(map[string]string), - Values: make(map[string]interface{}), - } -} - -// StatisticTags is a map that can be merged with others without causing -// mutations to either map. -type StatisticTags map[string]string - -// Merge creates a new map containing the merged contents of tags and t. -// If both tags and the receiver map contain the same key, the value in tags -// is used in the resulting map. -// -// Merge always returns a usable map. -func (t StatisticTags) Merge(tags map[string]string) map[string]string { - // Add everything in tags to the result. - out := make(map[string]string, len(tags)) - for k, v := range tags { - out[k] = v - } - - // Only add values from t that don't appear in tags. - for k, v := range t { - if _, ok := tags[k]; !ok { - out[k] = v - } - } - return out -} diff --git a/v1/models/statistic_test.go b/v1/models/statistic_test.go deleted file mode 100644 index a6a792d6b5..0000000000 --- a/v1/models/statistic_test.go +++ /dev/null @@ -1,55 +0,0 @@ -package models_test - -import ( - "reflect" - "testing" - - "github.com/influxdata/influxdb/v2/v1/models" -) - -func TestTags_Merge(t *testing.T) { - examples := []struct { - Base map[string]string - Arg map[string]string - Result map[string]string - }{ - { - Base: nil, - Arg: nil, - Result: map[string]string{}, - }, - { - Base: nil, - Arg: map[string]string{"foo": "foo"}, - Result: map[string]string{"foo": "foo"}, - }, - { - Base: map[string]string{"foo": "foo"}, - Arg: nil, - Result: map[string]string{"foo": "foo"}, - }, - { - Base: map[string]string{"foo": "foo"}, - Arg: map[string]string{"bar": "bar"}, - Result: map[string]string{"foo": "foo", "bar": "bar"}, - }, - { - Base: map[string]string{"foo": "foo", "bar": "bar"}, - Arg: map[string]string{"zoo": "zoo"}, - Result: map[string]string{"foo": "foo", "bar": "bar", "zoo": "zoo"}, - }, - { - Base: map[string]string{"foo": "foo", "bar": "bar"}, - Arg: map[string]string{"bar": "newbar"}, - Result: map[string]string{"foo": "foo", "bar": "newbar"}, - }, - } - - for i, example := range examples { - i++ - result := models.StatisticTags(example.Base).Merge(example.Arg) - if got, exp := result, example.Result; !reflect.DeepEqual(got, exp) { - t.Errorf("[Example %d] got %#v, expected %#v", i, got, exp) - } - } -} diff --git a/v1/models/tagkeysset.go b/v1/models/tagkeysset.go deleted file mode 100644 index d165bdce33..0000000000 --- a/v1/models/tagkeysset.go +++ /dev/null @@ -1,156 +0,0 @@ -package models - -import ( - "bytes" - "strings" -) - -// TagKeysSet provides set operations for combining Tags. -type TagKeysSet struct { - i int - keys [2][][]byte - tmp [][]byte -} - -// Clear removes all the elements of TagKeysSet and ensures all internal -// buffers are reset. -func (set *TagKeysSet) Clear() { - set.clear(set.keys[0]) - set.clear(set.keys[1]) - set.clear(set.tmp) - set.i = 0 - set.keys[0] = set.keys[0][:0] -} - -func (set *TagKeysSet) clear(b [][]byte) { - b = b[:cap(b)] - for i := range b { - b[i] = nil - } -} - -// KeysBytes returns the merged keys in lexicographical order. -// The slice is valid until the next call to UnionKeys, UnionBytes or Reset. -func (set *TagKeysSet) KeysBytes() [][]byte { - return set.keys[set.i&1] -} - -// Keys returns a copy of the merged keys in lexicographical order. -func (set *TagKeysSet) Keys() []string { - keys := set.KeysBytes() - s := make([]string, 0, len(keys)) - for i := range keys { - s = append(s, string(keys[i])) - } - return s -} - -func (set *TagKeysSet) String() string { - var s []string - for _, k := range set.KeysBytes() { - s = append(s, string(k)) - } - return strings.Join(s, ",") -} - -// IsSupersetKeys returns true if the TagKeysSet is a superset of all the keys -// contained in other. -func (set *TagKeysSet) IsSupersetKeys(other Tags) bool { - keys := set.keys[set.i&1] - i, j := 0, 0 - for i < len(keys) && j < len(other) { - if cmp := bytes.Compare(keys[i], other[j].Key); cmp > 0 { - return false - } else if cmp == 0 { - j++ - } - i++ - } - - return j == len(other) -} - -// IsSupersetBytes returns true if the TagKeysSet is a superset of all the keys -// in other. -// Other must be lexicographically sorted or the results are undefined. -func (set *TagKeysSet) IsSupersetBytes(other [][]byte) bool { - keys := set.keys[set.i&1] - i, j := 0, 0 - for i < len(keys) && j < len(other) { - if cmp := bytes.Compare(keys[i], other[j]); cmp > 0 { - return false - } else if cmp == 0 { - j++ - } - i++ - } - - return j == len(other) -} - -// UnionKeys updates the set so that it is the union of itself and all the -// keys contained in other. -func (set *TagKeysSet) UnionKeys(other Tags) { - if set.IsSupersetKeys(other) { - return - } - - if l := len(other); cap(set.tmp) < l { - set.tmp = make([][]byte, l) - } else { - set.tmp = set.tmp[:l] - } - - for i := range other { - set.tmp[i] = other[i].Key - } - - set.merge(set.tmp) -} - -// UnionBytes updates the set so that it is the union of itself and all the -// keys contained in other. -// Other must be lexicographically sorted or the results are undefined. -func (set *TagKeysSet) UnionBytes(other [][]byte) { - if set.IsSupersetBytes(other) { - return - } - - set.merge(other) -} - -func (set *TagKeysSet) merge(in [][]byte) { - keys := set.keys[set.i&1] - l := len(keys) + len(in) - set.i = (set.i + 1) & 1 - keya := set.keys[set.i&1] - if cap(keya) < l { - keya = make([][]byte, 0, l) - } else { - keya = keya[:0] - } - - i, j := 0, 0 - for i < len(keys) && j < len(in) { - ki, kj := keys[i], in[j] - if cmp := bytes.Compare(ki, kj); cmp < 0 { - i++ - } else if cmp > 0 { - ki = kj - j++ - } else { - i++ - j++ - } - - keya = append(keya, ki) - } - - if i < len(keys) { - keya = append(keya, keys[i:]...) - } else if j < len(in) { - keya = append(keya, in[j:]...) - } - - set.keys[set.i&1] = keya -} diff --git a/v1/models/tagkeysset_test.go b/v1/models/tagkeysset_test.go deleted file mode 100644 index ca3748dffb..0000000000 --- a/v1/models/tagkeysset_test.go +++ /dev/null @@ -1,325 +0,0 @@ -package models_test - -import ( - "bytes" - "math/rand" - "strconv" - "testing" - - "github.com/google/go-cmp/cmp" - "github.com/influxdata/influxdb/v2/v1/models" -) - -func TestTagKeysSet_UnionKeys(t *testing.T) { - tests := []struct { - name string - tags []models.Tags - exp string - }{ - { - name: "mixed", - tags: []models.Tags{ - models.ParseTags([]byte("foo,tag0=v0,tag1=v0,tag2=v0")), - models.ParseTags([]byte("foo,tag0=v0,tag1=v0,tag2=v1")), - models.ParseTags([]byte("foo,tag0=v0")), - models.ParseTags([]byte("foo,tag0=v0,tag3=v0")), - }, - exp: "tag0,tag1,tag2,tag3", - }, - { - name: "mixed 2", - tags: []models.Tags{ - models.ParseTags([]byte("foo,tag0=v0")), - models.ParseTags([]byte("foo,tag0=v0,tag3=v0")), - models.ParseTags([]byte("foo,tag0=v0,tag1=v0,tag2=v0")), - models.ParseTags([]byte("foo,tag0=v0,tag1=v0,tag2=v1")), - }, - exp: "tag0,tag1,tag2,tag3", - }, - { - name: "all different", - tags: []models.Tags{ - models.ParseTags([]byte("foo,tag0=v0")), - models.ParseTags([]byte("foo,tag1=v0")), - models.ParseTags([]byte("foo,tag2=v1")), - models.ParseTags([]byte("foo,tag3=v0")), - }, - exp: "tag0,tag1,tag2,tag3", - }, - { - name: "new tags,verify clear", - tags: []models.Tags{ - models.ParseTags([]byte("foo,tag9=v0")), - models.ParseTags([]byte("foo,tag8=v0")), - }, - exp: "tag8,tag9", - }, - } - - var km models.TagKeysSet - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - km.Clear() - for _, tags := range tt.tags { - km.UnionKeys(tags) - } - - if got := km.String(); !cmp.Equal(got, tt.exp) { - t.Errorf("unexpected keys -got/+exp\n%s", cmp.Diff(got, tt.exp)) - } - }) - } -} - -func TestTagKeysSet_IsSuperset(t *testing.T) { - var km models.TagKeysSet - km.UnionBytes(bytes.Split([]byte("tag0,tag3,tag5,tag7"), commaB)) - - tests := []struct { - name string - tags models.Tags - exp bool - }{ - { - tags: models.ParseTags([]byte("foo,tag0=v,tag3=v")), - exp: true, - }, - { - tags: models.ParseTags([]byte("foo,tag3=v")), - exp: true, - }, - { - tags: models.ParseTags([]byte("foo,tag7=v")), - exp: true, - }, - { - tags: models.ParseTags([]byte("foo,tag3=v,tag7=v")), - exp: true, - }, - { - tags: models.ParseTags([]byte("foo,tag0=v,tag3=v,tag5=v,tag7=v")), - exp: true, - }, - { - tags: models.ParseTags([]byte("foo")), - exp: true, - }, - { - tags: models.ParseTags([]byte("foo,tag0=v,tag2=v")), - exp: false, - }, - { - tags: models.ParseTags([]byte("foo,tag1=v")), - exp: false, - }, - { - tags: models.ParseTags([]byte("foo,tag6=v")), - exp: false, - }, - { - tags: models.ParseTags([]byte("foo,tag8=v")), - exp: false, - }, - { - tags: models.ParseTags([]byte("foo,tag0=v,tag3=v,tag5=v,tag8=v")), - exp: false, - }, - { - tags: models.ParseTags([]byte("foo,tag0=v,tag3=v,tag5=v,tag6=v")), - exp: false, - }, - { - tags: models.ParseTags([]byte("foo,tag0=v,tag3=v,tag5=v,tag7=v,tag8=v")), - exp: false, - }, - } - - for _, tt := range tests { - t.Run("tags/"+tt.name, func(t *testing.T) { - if got := km.IsSupersetKeys(tt.tags); got != tt.exp { - t.Errorf("unexpected IsSuperset -got/+exp\n%s", cmp.Diff(got, tt.exp)) - } - }) - } - - for _, tt := range tests { - t.Run("bytes/"+tt.name, func(t *testing.T) { - var keys [][]byte - for i := range tt.tags { - keys = append(keys, tt.tags[i].Key) - } - if got := km.IsSupersetBytes(keys); got != tt.exp { - t.Errorf("unexpected IsSupersetBytes -got/+exp\n%s", cmp.Diff(got, tt.exp)) - } - }) - } -} - -var commaB = []byte(",") - -func TestTagKeysSet_UnionBytes(t *testing.T) { - - tests := []struct { - name string - keys [][][]byte - exp string - }{ - { - name: "mixed", - keys: [][][]byte{ - bytes.Split([]byte("tag0,tag1,tag2"), commaB), - bytes.Split([]byte("tag0,tag1,tag2"), commaB), - bytes.Split([]byte("tag0"), commaB), - bytes.Split([]byte("tag0,tag3"), commaB), - }, - exp: "tag0,tag1,tag2,tag3", - }, - { - name: "mixed 2", - keys: [][][]byte{ - bytes.Split([]byte("tag0"), commaB), - bytes.Split([]byte("tag0,tag3"), commaB), - bytes.Split([]byte("tag0,tag1,tag2"), commaB), - bytes.Split([]byte("tag0,tag1,tag2"), commaB), - }, - exp: "tag0,tag1,tag2,tag3", - }, - { - name: "all different", - keys: [][][]byte{ - bytes.Split([]byte("tag0"), commaB), - bytes.Split([]byte("tag3"), commaB), - bytes.Split([]byte("tag1"), commaB), - bytes.Split([]byte("tag2"), commaB), - }, - exp: "tag0,tag1,tag2,tag3", - }, - { - name: "new tags,verify clear", - keys: [][][]byte{ - bytes.Split([]byte("tag9"), commaB), - bytes.Split([]byte("tag8"), commaB), - }, - exp: "tag8,tag9", - }, - } - - var km models.TagKeysSet - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - km.Clear() - for _, keys := range tt.keys { - km.UnionBytes(keys) - } - - if got := km.String(); !cmp.Equal(got, tt.exp) { - t.Errorf("unexpected keys -got/+exp\n%s", cmp.Diff(got, tt.exp)) - } - }) - } -} - -func BenchmarkTagKeysSet_UnionBytes(b *testing.B) { - keys := [][][]byte{ - bytes.Split([]byte("tag00,tag01,tag02"), commaB), - bytes.Split([]byte("tag00,tag01,tag02"), commaB), - bytes.Split([]byte("tag00,tag01,tag05,tag06,tag10,tag11,tag12,tag13,tag14,tag15"), commaB), - bytes.Split([]byte("tag00"), commaB), - bytes.Split([]byte("tag00,tag03"), commaB), - bytes.Split([]byte("tag01,tag03,tag13,tag14,tag15"), commaB), - bytes.Split([]byte("tag04,tag05"), commaB), - } - - rand.Seed(20040409) - - tests := []int{ - 10, - 1000, - 1000000, - } - - for _, n := range tests { - b.Run(strconv.Itoa(n), func(b *testing.B) { - b.ResetTimer() - - var km models.TagKeysSet - for i := 0; i < b.N; i++ { - for j := 0; j < n; j++ { - km.UnionBytes(keys[rand.Int()%len(keys)]) - } - km.Clear() - } - }) - } -} - -type XorShift64Star struct { - state uint64 -} - -func (x *XorShift64Star) Next() uint64 { - x.state ^= x.state >> 12 - x.state ^= x.state << 25 - x.state ^= x.state >> 27 - return x.state * 2685821657736338717 -} - -func BenchmarkTagKeysSet_UnionKeys(b *testing.B) { - tags := []models.Tags{ - models.ParseTags([]byte("foo,tag00=v0,tag01=v0,tag02=v0")), - models.ParseTags([]byte("foo,tag00=v0,tag01=v0,tag02=v0")), - models.ParseTags([]byte("foo,tag00=v0,tag01=v0,tag05=v0,tag06=v0,tag10=v0,tag11=v0,tag12=v0,tag13=v0,tag14=v0,tag15=v0")), - models.ParseTags([]byte("foo,tag00=v0")), - models.ParseTags([]byte("foo,tag00=v0,tag03=v0")), - models.ParseTags([]byte("foo,tag01=v0,tag03=v0,tag13=v0,tag14=v0,tag15=v0")), - models.ParseTags([]byte("foo,tag04=v0,tag05=v0")), - } - - rnd := XorShift64Star{state: 20040409} - - tests := []int{ - 10, - 1000, - 1000000, - } - - for _, n := range tests { - b.Run(strconv.Itoa(n), func(b *testing.B) { - b.ResetTimer() - - var km models.TagKeysSet - for i := 0; i < b.N; i++ { - for j := 0; j < n; j++ { - km.UnionKeys(tags[rnd.Next()%uint64(len(tags))]) - } - km.Clear() - } - }) - } -} - -func BenchmarkTagKeysSet_IsSuperset(b *testing.B) { - var km models.TagKeysSet - km.UnionBytes(bytes.Split([]byte("tag0,tag3,tag5,tag7"), commaB)) - - tests := []struct { - name string - tags models.Tags - }{ - {name: "last/true", tags: models.ParseTags([]byte("foo,tag7=v"))}, - {name: "last/false", tags: models.ParseTags([]byte("foo,tag8=v"))}, - {name: "first_last/true", tags: models.ParseTags([]byte("foo,tag0=v,tag7=v"))}, - {name: "all/true", tags: models.ParseTags([]byte("foo,tag0=v,tag3=v,tag5=v,tag7=v"))}, - {name: "first not last/false", tags: models.ParseTags([]byte("foo,tag0=v,tag8=v"))}, - {name: "all but last/false", tags: models.ParseTags([]byte("foo,tag0=v,tag3=v,tag5=v,tag7=v,tag8=v"))}, - } - - for _, n := range tests { - b.Run(n.name, func(b *testing.B) { - b.ResetTimer() - for i := 0; i < b.N; i++ { - km.IsSupersetKeys(n.tags) - } - }) - } -} diff --git a/v1/models/time.go b/v1/models/time.go deleted file mode 100644 index 297892c6da..0000000000 --- a/v1/models/time.go +++ /dev/null @@ -1,74 +0,0 @@ -package models - -// Helper time methods since parsing time can easily overflow and we only support a -// specific time range. - -import ( - "fmt" - "math" - "time" -) - -const ( - // MinNanoTime is the minimum time that can be represented. - // - // 1677-09-21 00:12:43.145224194 +0000 UTC - // - // The two lowest minimum integers are used as sentinel values. The - // minimum value needs to be used as a value lower than any other value for - // comparisons and another separate value is needed to act as a sentinel - // default value that is unusable by the user, but usable internally. - // Because these two values need to be used for a special purpose, we do - // not allow users to write points at these two times. - MinNanoTime = int64(math.MinInt64) + 2 - - // MaxNanoTime is the maximum time that can be represented. - // - // 2262-04-11 23:47:16.854775806 +0000 UTC - // - // The highest time represented by a nanosecond needs to be used for an - // exclusive range in the shard group, so the maximum time needs to be one - // less than the possible maximum number of nanoseconds representable by an - // int64 so that we don't lose a point at that one time. - MaxNanoTime = int64(math.MaxInt64) - 1 -) - -var ( - minNanoTime = time.Unix(0, MinNanoTime).UTC() - maxNanoTime = time.Unix(0, MaxNanoTime).UTC() - - // ErrTimeOutOfRange gets returned when time is out of the representable range using int64 nanoseconds since the epoch. - ErrTimeOutOfRange = fmt.Errorf("time outside range %d - %d", MinNanoTime, MaxNanoTime) -) - -// SafeCalcTime safely calculates the time given. Will return error if the time is outside the -// supported range. -func SafeCalcTime(timestamp int64, precision string) (time.Time, error) { - mult := GetPrecisionMultiplier(precision) - if t, ok := safeSignedMult(timestamp, mult); ok { - tme := time.Unix(0, t).UTC() - return tme, CheckTime(tme) - } - - return time.Time{}, ErrTimeOutOfRange -} - -// CheckTime checks that a time is within the safe range. -func CheckTime(t time.Time) error { - if t.Before(minNanoTime) || t.After(maxNanoTime) { - return ErrTimeOutOfRange - } - return nil -} - -// Perform the multiplication and check to make sure it didn't overflow. -func safeSignedMult(a, b int64) (int64, bool) { - if a == 0 || b == 0 || a == 1 || b == 1 { - return a * b, true - } - if a == MinNanoTime || b == MaxNanoTime { - return 0, false - } - c := a * b - return c, c/b == a -} diff --git a/v1/models/uint_support.go b/v1/models/uint_support.go deleted file mode 100644 index 18d1ca06e2..0000000000 --- a/v1/models/uint_support.go +++ /dev/null @@ -1,7 +0,0 @@ -// +build uint uint64 - -package models - -func init() { - EnableUintSupport() -} diff --git a/v1/monitor/reporter.go b/v1/monitor/reporter.go index ef0aa6cb00..ac7ec1e25b 100644 --- a/v1/monitor/reporter.go +++ b/v1/monitor/reporter.go @@ -1,6 +1,6 @@ package monitor -import "github.com/influxdata/influxdb/v2/v1/models" +import "github.com/influxdata/influxdb/v2/models" // Reporter is an interface for gathering internal statistics. type Reporter interface { diff --git a/v1/monitor/service.go b/v1/monitor/service.go index 9216a4d59a..7f529f8e23 100644 --- a/v1/monitor/service.go +++ b/v1/monitor/service.go @@ -14,7 +14,7 @@ import ( "time" "github.com/influxdata/influxdb/v2/logger" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/v1/monitor/diagnostics" "github.com/influxdata/influxdb/v2/v1/services/meta" "go.uber.org/zap" diff --git a/v1/monitor/service_test.go b/v1/monitor/service_test.go index 0ad393c2d9..bf83d5881f 100644 --- a/v1/monitor/service_test.go +++ b/v1/monitor/service_test.go @@ -11,8 +11,8 @@ import ( "testing" "time" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/toml" - "github.com/influxdata/influxdb/v2/v1/models" "github.com/influxdata/influxdb/v2/v1/monitor" "github.com/influxdata/influxdb/v2/v1/services/meta" "go.uber.org/zap" diff --git a/v1/services/meta/data.go b/v1/services/meta/data.go index d02a692001..df765feb27 100644 --- a/v1/services/meta/data.go +++ b/v1/services/meta/data.go @@ -13,8 +13,8 @@ import ( "github.com/gogo/protobuf/proto" "github.com/influxdata/influxdb/v2/influxql/query" + "github.com/influxdata/influxdb/v2/models" influxdb "github.com/influxdata/influxdb/v2/v1" - "github.com/influxdata/influxdb/v2/v1/models" internal "github.com/influxdata/influxdb/v2/v1/services/meta/internal" "github.com/influxdata/influxql" ) diff --git a/v1/services/meta/internal/meta.pb.go b/v1/services/meta/internal/meta.pb.go index 403b868317..a294853f9e 100644 --- a/v1/services/meta/internal/meta.pb.go +++ b/v1/services/meta/internal/meta.pb.go @@ -55,9 +55,13 @@ It has these top-level messages: */ package meta -import proto "github.com/gogo/protobuf/proto" -import fmt "fmt" -import math "math" +import ( + fmt "fmt" + + proto "github.com/gogo/protobuf/proto" + + math "math" +) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal diff --git a/v1/services/storage/predicate_influxql.go b/v1/services/storage/predicate_influxql.go index 7fa273ca67..9853917999 100644 --- a/v1/services/storage/predicate_influxql.go +++ b/v1/services/storage/predicate_influxql.go @@ -1,7 +1,7 @@ package storage import ( - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxql" ) diff --git a/v1/services/storage/series_cursor.go b/v1/services/storage/series_cursor.go index e88111537d..b3436d89ec 100644 --- a/v1/services/storage/series_cursor.go +++ b/v1/services/storage/series_cursor.go @@ -6,9 +6,9 @@ import ( "sort" "github.com/influxdata/influxdb/v2/influxql/query" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/storage/reads" "github.com/influxdata/influxdb/v2/storage/reads/datatypes" - "github.com/influxdata/influxdb/v2/v1/models" "github.com/influxdata/influxdb/v2/v1/tsdb" "github.com/influxdata/influxql" opentracing "github.com/opentracing/opentracing-go" diff --git a/v1/services/storage/series_cursor_test.go b/v1/services/storage/series_cursor_test.go index d10d5a51b5..a3b44be565 100644 --- a/v1/services/storage/series_cursor_test.go +++ b/v1/services/storage/series_cursor_test.go @@ -4,7 +4,7 @@ import ( "testing" "github.com/google/go-cmp/cmp" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxql" ) diff --git a/v1/services/storage/source.pb.go b/v1/services/storage/source.pb.go index 186b9bc50c..a5456d73c8 100644 --- a/v1/services/storage/source.pb.go +++ b/v1/services/storage/source.pb.go @@ -12,12 +12,17 @@ */ package storage -import proto "github.com/gogo/protobuf/proto" -import fmt "fmt" -import math "math" -import _ "github.com/gogo/protobuf/gogoproto" +import ( + fmt "fmt" -import io "io" + proto "github.com/gogo/protobuf/proto" + + math "math" + + _ "github.com/gogo/protobuf/gogoproto" + + io "io" +) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal diff --git a/v1/services/storage/store.go b/v1/services/storage/store.go index 44b5479e0d..79924fe938 100644 --- a/v1/services/storage/store.go +++ b/v1/services/storage/store.go @@ -9,9 +9,9 @@ import ( "github.com/gogo/protobuf/proto" "github.com/gogo/protobuf/types" "github.com/influxdata/influxdb/v2/influxql/query" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/storage/reads" "github.com/influxdata/influxdb/v2/storage/reads/datatypes" - "github.com/influxdata/influxdb/v2/v1/models" "github.com/influxdata/influxdb/v2/v1/services/meta" "github.com/influxdata/influxdb/v2/v1/tsdb" "github.com/influxdata/influxdb/v2/v1/tsdb/cursors" diff --git a/v1/tsdb/batcher.go b/v1/tsdb/batcher.go index eb67398cae..4356ff1c8f 100644 --- a/v1/tsdb/batcher.go +++ b/v1/tsdb/batcher.go @@ -5,7 +5,7 @@ import ( "sync/atomic" "time" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" ) // PointBatcher accepts Points and will emit a batch of those points when either diff --git a/v1/tsdb/batcher_test.go b/v1/tsdb/batcher_test.go index 41769cca7a..11f362174d 100644 --- a/v1/tsdb/batcher_test.go +++ b/v1/tsdb/batcher_test.go @@ -4,7 +4,7 @@ import ( "testing" "time" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/v1/tsdb" ) diff --git a/v1/tsdb/cursors/cursor.go b/v1/tsdb/cursors/cursor.go index dfcaabdc32..7876d42edd 100644 --- a/v1/tsdb/cursors/cursor.go +++ b/v1/tsdb/cursors/cursor.go @@ -3,7 +3,7 @@ package cursors import ( "context" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" ) const DefaultMaxPointsPerBlock = 1000 diff --git a/v1/tsdb/engine.go b/v1/tsdb/engine.go index 42e44c136d..3926150a8f 100644 --- a/v1/tsdb/engine.go +++ b/v1/tsdb/engine.go @@ -12,9 +12,9 @@ import ( "time" "github.com/influxdata/influxdb/v2/influxql/query" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/pkg/estimator" "github.com/influxdata/influxdb/v2/pkg/limiter" - "github.com/influxdata/influxdb/v2/v1/models" "github.com/influxdata/influxql" "go.uber.org/zap" ) diff --git a/v1/tsdb/engine/tsm1/array_cursor_iterator.gen.go b/v1/tsdb/engine/tsm1/array_cursor_iterator.gen.go index 408e8587b5..67c506ed6d 100644 --- a/v1/tsdb/engine/tsm1/array_cursor_iterator.gen.go +++ b/v1/tsdb/engine/tsm1/array_cursor_iterator.gen.go @@ -10,7 +10,7 @@ import ( "context" "github.com/influxdata/influxdb/v2/influxql/query" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/v1/tsdb" ) diff --git a/v1/tsdb/engine/tsm1/array_cursor_iterator.gen.go.tmpl b/v1/tsdb/engine/tsm1/array_cursor_iterator.gen.go.tmpl index 1b7da83793..2c76060e09 100644 --- a/v1/tsdb/engine/tsm1/array_cursor_iterator.gen.go.tmpl +++ b/v1/tsdb/engine/tsm1/array_cursor_iterator.gen.go.tmpl @@ -3,7 +3,7 @@ package tsm1 import ( "context" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/influxql/query" "github.com/influxdata/influxdb/v2/v1/tsdb" ) diff --git a/v1/tsdb/engine/tsm1/array_cursor_iterator.go b/v1/tsdb/engine/tsm1/array_cursor_iterator.go index 0af8654939..fe2331fd12 100644 --- a/v1/tsdb/engine/tsm1/array_cursor_iterator.go +++ b/v1/tsdb/engine/tsm1/array_cursor_iterator.go @@ -5,8 +5,8 @@ import ( "fmt" "github.com/influxdata/influxdb/v2/influxql/query" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/pkg/metrics" - "github.com/influxdata/influxdb/v2/v1/models" "github.com/influxdata/influxdb/v2/v1/tsdb" "github.com/influxdata/influxql" ) diff --git a/v1/tsdb/engine/tsm1/cache.go b/v1/tsdb/engine/tsm1/cache.go index a3e56dde32..364862bd7f 100644 --- a/v1/tsdb/engine/tsm1/cache.go +++ b/v1/tsdb/engine/tsm1/cache.go @@ -8,7 +8,7 @@ import ( "sync/atomic" "time" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/v1/tsdb" "github.com/influxdata/influxql" "go.uber.org/zap" diff --git a/v1/tsdb/engine/tsm1/engine.go b/v1/tsdb/engine/tsm1/engine.go index f3aee4e83a..4e7b1ca2a3 100644 --- a/v1/tsdb/engine/tsm1/engine.go +++ b/v1/tsdb/engine/tsm1/engine.go @@ -22,6 +22,7 @@ import ( "github.com/influxdata/influxdb/v2/influxql/query" "github.com/influxdata/influxdb/v2/logger" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/pkg/bytesutil" "github.com/influxdata/influxdb/v2/pkg/estimator" "github.com/influxdata/influxdb/v2/pkg/file" @@ -30,7 +31,6 @@ import ( "github.com/influxdata/influxdb/v2/pkg/radix" intar "github.com/influxdata/influxdb/v2/pkg/tar" "github.com/influxdata/influxdb/v2/pkg/tracing" - "github.com/influxdata/influxdb/v2/v1/models" "github.com/influxdata/influxdb/v2/v1/tsdb" _ "github.com/influxdata/influxdb/v2/v1/tsdb/index" "github.com/influxdata/influxdb/v2/v1/tsdb/index/inmem" diff --git a/v1/tsdb/engine/tsm1/engine_test.go b/v1/tsdb/engine/tsm1/engine_test.go index 06bbd8e165..630ef838bd 100644 --- a/v1/tsdb/engine/tsm1/engine_test.go +++ b/v1/tsdb/engine/tsm1/engine_test.go @@ -22,8 +22,8 @@ import ( "github.com/google/go-cmp/cmp" "github.com/influxdata/influxdb/v2/influxql/query" "github.com/influxdata/influxdb/v2/logger" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/pkg/deep" - "github.com/influxdata/influxdb/v2/v1/models" "github.com/influxdata/influxdb/v2/v1/tsdb" "github.com/influxdata/influxdb/v2/v1/tsdb/engine/tsm1" "github.com/influxdata/influxdb/v2/v1/tsdb/index/inmem" diff --git a/v1/tsdb/engine/tsm1/file_store.go b/v1/tsdb/engine/tsm1/file_store.go index b027df2490..5079155795 100644 --- a/v1/tsdb/engine/tsm1/file_store.go +++ b/v1/tsdb/engine/tsm1/file_store.go @@ -18,10 +18,10 @@ import ( "time" "github.com/influxdata/influxdb/v2/influxql/query" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/pkg/file" "github.com/influxdata/influxdb/v2/pkg/limiter" "github.com/influxdata/influxdb/v2/pkg/metrics" - "github.com/influxdata/influxdb/v2/v1/models" "github.com/influxdata/influxdb/v2/v1/tsdb" "go.uber.org/zap" ) diff --git a/v1/tsdb/engine/tsm1/file_store_key_iterator_test.go b/v1/tsdb/engine/tsm1/file_store_key_iterator_test.go index f62c4919a4..832c242e1b 100644 --- a/v1/tsdb/engine/tsm1/file_store_key_iterator_test.go +++ b/v1/tsdb/engine/tsm1/file_store_key_iterator_test.go @@ -149,34 +149,36 @@ func (t *mockTSMFile) KeyAt(idx int) ([]byte, byte) { return []byte(t.keys[idx]), BlockFloat64 } -func (*mockTSMFile) Path() string { panic("implement me") } -func (*mockTSMFile) Read(key []byte, t int64) ([]Value, error) { panic("implement me") } -func (*mockTSMFile) ReadAt(entry *IndexEntry, values []Value) ([]Value, error) { panic("implement me") } -func (*mockTSMFile) Entries(key []byte) []IndexEntry { panic("implement me") } -func (*mockTSMFile) ReadEntries(key []byte, entries *[]IndexEntry) []IndexEntry { panic("implement me") } -func (*mockTSMFile) ContainsValue(key []byte, t int64) bool { panic("implement me") } -func (*mockTSMFile) Contains(key []byte) bool { panic("implement me") } -func (*mockTSMFile) OverlapsTimeRange(min, max int64) bool { panic("implement me") } -func (*mockTSMFile) OverlapsKeyRange(min, max []byte) bool { panic("implement me") } -func (*mockTSMFile) TimeRange() (int64, int64) { panic("implement me") } -func (*mockTSMFile) TombstoneRange(key []byte) []TimeRange { panic("implement me") } -func (*mockTSMFile) KeyRange() ([]byte, []byte) { panic("implement me") } -func (*mockTSMFile) Type(key []byte) (byte, error) { panic("implement me") } -func (*mockTSMFile) BatchDelete() BatchDeleter { panic("implement me") } -func (*mockTSMFile) Delete(keys [][]byte) error { panic("implement me") } -func (*mockTSMFile) DeleteRange(keys [][]byte, min, max int64) error { panic("implement me") } -func (*mockTSMFile) HasTombstones() bool { panic("implement me") } -func (*mockTSMFile) TombstoneFiles() []FileStat { panic("implement me") } -func (*mockTSMFile) Close() error { panic("implement me") } -func (*mockTSMFile) Size() uint32 { panic("implement me") } -func (*mockTSMFile) Rename(path string) error { panic("implement me") } -func (*mockTSMFile) Remove() error { panic("implement me") } -func (*mockTSMFile) InUse() bool { panic("implement me") } -func (*mockTSMFile) Ref() { panic("implement me") } -func (*mockTSMFile) Unref() { panic("implement me") } -func (*mockTSMFile) Stats() FileStat { panic("implement me") } -func (*mockTSMFile) BlockIterator() *BlockIterator { panic("implement me") } -func (*mockTSMFile) Free() error { panic("implement me") } +func (*mockTSMFile) Path() string { panic("implement me") } +func (*mockTSMFile) Read(key []byte, t int64) ([]Value, error) { panic("implement me") } +func (*mockTSMFile) ReadAt(entry *IndexEntry, values []Value) ([]Value, error) { panic("implement me") } +func (*mockTSMFile) Entries(key []byte) []IndexEntry { panic("implement me") } +func (*mockTSMFile) ReadEntries(key []byte, entries *[]IndexEntry) []IndexEntry { + panic("implement me") +} +func (*mockTSMFile) ContainsValue(key []byte, t int64) bool { panic("implement me") } +func (*mockTSMFile) Contains(key []byte) bool { panic("implement me") } +func (*mockTSMFile) OverlapsTimeRange(min, max int64) bool { panic("implement me") } +func (*mockTSMFile) OverlapsKeyRange(min, max []byte) bool { panic("implement me") } +func (*mockTSMFile) TimeRange() (int64, int64) { panic("implement me") } +func (*mockTSMFile) TombstoneRange(key []byte) []TimeRange { panic("implement me") } +func (*mockTSMFile) KeyRange() ([]byte, []byte) { panic("implement me") } +func (*mockTSMFile) Type(key []byte) (byte, error) { panic("implement me") } +func (*mockTSMFile) BatchDelete() BatchDeleter { panic("implement me") } +func (*mockTSMFile) Delete(keys [][]byte) error { panic("implement me") } +func (*mockTSMFile) DeleteRange(keys [][]byte, min, max int64) error { panic("implement me") } +func (*mockTSMFile) HasTombstones() bool { panic("implement me") } +func (*mockTSMFile) TombstoneFiles() []FileStat { panic("implement me") } +func (*mockTSMFile) Close() error { panic("implement me") } +func (*mockTSMFile) Size() uint32 { panic("implement me") } +func (*mockTSMFile) Rename(path string) error { panic("implement me") } +func (*mockTSMFile) Remove() error { panic("implement me") } +func (*mockTSMFile) InUse() bool { panic("implement me") } +func (*mockTSMFile) Ref() { panic("implement me") } +func (*mockTSMFile) Unref() { panic("implement me") } +func (*mockTSMFile) Stats() FileStat { panic("implement me") } +func (*mockTSMFile) BlockIterator() *BlockIterator { panic("implement me") } +func (*mockTSMFile) Free() error { panic("implement me") } func (*mockTSMFile) ReadFloatBlockAt(*IndexEntry, *[]FloatValue) ([]FloatValue, error) { panic("implement me") diff --git a/v1/tsdb/engine/tsm1/ring_test.go b/v1/tsdb/engine/tsm1/ring_test.go index 394de7246e..c8bea3abf1 100644 --- a/v1/tsdb/engine/tsm1/ring_test.go +++ b/v1/tsdb/engine/tsm1/ring_test.go @@ -79,7 +79,9 @@ func benchmarkRingGetPartition(b *testing.B, r *ring, keys int) { } } -func BenchmarkRing_getPartition_100(b *testing.B) { benchmarkRingGetPartition(b, MustNewRing(256), 100) } +func BenchmarkRing_getPartition_100(b *testing.B) { + benchmarkRingGetPartition(b, MustNewRing(256), 100) +} func BenchmarkRing_getPartition_1000(b *testing.B) { benchmarkRingGetPartition(b, MustNewRing(256), 1000) } diff --git a/v1/tsdb/engine/tsm1/wal.go b/v1/tsdb/engine/tsm1/wal.go index 8593e216cb..69b972c9c8 100644 --- a/v1/tsdb/engine/tsm1/wal.go +++ b/v1/tsdb/engine/tsm1/wal.go @@ -18,9 +18,9 @@ import ( "time" "github.com/golang/snappy" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/pkg/limiter" "github.com/influxdata/influxdb/v2/pkg/pool" - "github.com/influxdata/influxdb/v2/v1/models" "go.uber.org/zap" ) diff --git a/v1/tsdb/field_validator.go b/v1/tsdb/field_validator.go index 7a800c8db0..ac347af79a 100644 --- a/v1/tsdb/field_validator.go +++ b/v1/tsdb/field_validator.go @@ -4,7 +4,7 @@ import ( "bytes" "fmt" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxql" ) diff --git a/v1/tsdb/guard.go b/v1/tsdb/guard.go index d77d144e63..6060365289 100644 --- a/v1/tsdb/guard.go +++ b/v1/tsdb/guard.go @@ -4,7 +4,7 @@ import ( "bytes" "sync" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxql" ) diff --git a/v1/tsdb/guard_test.go b/v1/tsdb/guard_test.go index 0638faca53..c13cdb141a 100644 --- a/v1/tsdb/guard_test.go +++ b/v1/tsdb/guard_test.go @@ -5,7 +5,7 @@ import ( "time" "github.com/davecgh/go-spew/spew" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxql" ) diff --git a/v1/tsdb/index.go b/v1/tsdb/index.go index 39481dfe81..be020b2ca3 100644 --- a/v1/tsdb/index.go +++ b/v1/tsdb/index.go @@ -10,10 +10,10 @@ import ( "sync" "github.com/influxdata/influxdb/v2/influxql/query" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/pkg/bytesutil" "github.com/influxdata/influxdb/v2/pkg/estimator" "github.com/influxdata/influxdb/v2/pkg/slices" - "github.com/influxdata/influxdb/v2/v1/models" "github.com/influxdata/influxql" "go.uber.org/zap" ) diff --git a/v1/tsdb/index/inmem/inmem.go b/v1/tsdb/index/inmem/inmem.go index f9d0ce0eee..446269b037 100644 --- a/v1/tsdb/index/inmem/inmem.go +++ b/v1/tsdb/index/inmem/inmem.go @@ -20,11 +20,11 @@ import ( "unsafe" "github.com/influxdata/influxdb/v2/influxql/query" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/pkg/bytesutil" "github.com/influxdata/influxdb/v2/pkg/escape" "github.com/influxdata/influxdb/v2/pkg/estimator" "github.com/influxdata/influxdb/v2/pkg/estimator/hll" - "github.com/influxdata/influxdb/v2/v1/models" "github.com/influxdata/influxdb/v2/v1/tsdb" "github.com/influxdata/influxql" "go.uber.org/zap" diff --git a/v1/tsdb/index/inmem/inmem_test.go b/v1/tsdb/index/inmem/inmem_test.go index 249b2753f1..18fc032cb2 100644 --- a/v1/tsdb/index/inmem/inmem_test.go +++ b/v1/tsdb/index/inmem/inmem_test.go @@ -6,7 +6,7 @@ import ( "os" "testing" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/v1/tsdb" "github.com/influxdata/influxdb/v2/v1/tsdb/index/inmem" ) diff --git a/v1/tsdb/index/inmem/meta.go b/v1/tsdb/index/inmem/meta.go index 427ef5aab6..a48b2dd8d0 100644 --- a/v1/tsdb/index/inmem/meta.go +++ b/v1/tsdb/index/inmem/meta.go @@ -9,9 +9,9 @@ import ( "unsafe" "github.com/influxdata/influxdb/v2/influxql/query" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/pkg/bytesutil" "github.com/influxdata/influxdb/v2/pkg/radix" - "github.com/influxdata/influxdb/v2/v1/models" "github.com/influxdata/influxdb/v2/v1/tsdb" "github.com/influxdata/influxql" ) diff --git a/v1/tsdb/index/inmem/meta_test.go b/v1/tsdb/index/inmem/meta_test.go index 99ba8f3f20..62158efd8b 100644 --- a/v1/tsdb/index/inmem/meta_test.go +++ b/v1/tsdb/index/inmem/meta_test.go @@ -9,7 +9,7 @@ import ( "time" "github.com/influxdata/influxdb/v2/influxql/query" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/v1/tsdb" "github.com/influxdata/influxql" ) diff --git a/v1/tsdb/index/internal/file_set.go b/v1/tsdb/index/internal/file_set.go index 90a311fb13..ea7b9a6666 100644 --- a/v1/tsdb/index/internal/file_set.go +++ b/v1/tsdb/index/internal/file_set.go @@ -1,9 +1,9 @@ package internal import ( + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/pkg/bloom" "github.com/influxdata/influxdb/v2/pkg/estimator" - "github.com/influxdata/influxdb/v2/v1/models" "github.com/influxdata/influxdb/v2/v1/tsdb" "github.com/influxdata/influxdb/v2/v1/tsdb/index/tsi1" ) diff --git a/v1/tsdb/index/tsi1/file_set_test.go b/v1/tsdb/index/tsi1/file_set_test.go index 92c548fa1b..1115e6c16a 100644 --- a/v1/tsdb/index/tsi1/file_set_test.go +++ b/v1/tsdb/index/tsi1/file_set_test.go @@ -6,7 +6,7 @@ import ( "sort" "testing" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/v1/tsdb" ) diff --git a/v1/tsdb/index/tsi1/index.go b/v1/tsdb/index/tsi1/index.go index daea94ea43..6ab5c51f65 100644 --- a/v1/tsdb/index/tsi1/index.go +++ b/v1/tsdb/index/tsi1/index.go @@ -14,10 +14,10 @@ import ( "unsafe" "github.com/cespare/xxhash" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/pkg/estimator" "github.com/influxdata/influxdb/v2/pkg/estimator/hll" "github.com/influxdata/influxdb/v2/pkg/slices" - "github.com/influxdata/influxdb/v2/v1/models" "github.com/influxdata/influxdb/v2/v1/tsdb" "github.com/influxdata/influxql" "go.uber.org/zap" diff --git a/v1/tsdb/index/tsi1/index_file.go b/v1/tsdb/index/tsi1/index_file.go index d3a42fe6f9..1fe6cd97b5 100644 --- a/v1/tsdb/index/tsi1/index_file.go +++ b/v1/tsdb/index/tsi1/index_file.go @@ -9,10 +9,10 @@ import ( "sync" "unsafe" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/pkg/estimator" "github.com/influxdata/influxdb/v2/pkg/estimator/hll" "github.com/influxdata/influxdb/v2/pkg/mmap" - "github.com/influxdata/influxdb/v2/v1/models" "github.com/influxdata/influxdb/v2/v1/tsdb" ) diff --git a/v1/tsdb/index/tsi1/index_file_test.go b/v1/tsdb/index/tsi1/index_file_test.go index 0eaafaa515..fd34272858 100644 --- a/v1/tsdb/index/tsi1/index_file_test.go +++ b/v1/tsdb/index/tsi1/index_file_test.go @@ -6,7 +6,7 @@ import ( "testing" "time" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/v1/tsdb" "github.com/influxdata/influxdb/v2/v1/tsdb/index/tsi1" ) diff --git a/v1/tsdb/index/tsi1/index_files_test.go b/v1/tsdb/index/tsi1/index_files_test.go index 51807e00f6..ed163dac06 100644 --- a/v1/tsdb/index/tsi1/index_files_test.go +++ b/v1/tsdb/index/tsi1/index_files_test.go @@ -4,7 +4,7 @@ import ( "bytes" "testing" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/v1/tsdb/index/tsi1" ) diff --git a/v1/tsdb/index/tsi1/index_test.go b/v1/tsdb/index/tsi1/index_test.go index 8d5512f66e..d067901600 100644 --- a/v1/tsdb/index/tsi1/index_test.go +++ b/v1/tsdb/index/tsi1/index_test.go @@ -12,7 +12,7 @@ import ( "sync" "testing" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/v1/tsdb" "github.com/influxdata/influxdb/v2/v1/tsdb/index/tsi1" ) diff --git a/v1/tsdb/index/tsi1/log_file.go b/v1/tsdb/index/tsi1/log_file.go index ba0f0f3a7e..8f4d1105b5 100644 --- a/v1/tsdb/index/tsi1/log_file.go +++ b/v1/tsdb/index/tsi1/log_file.go @@ -14,11 +14,11 @@ import ( "time" "unsafe" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/pkg/bloom" "github.com/influxdata/influxdb/v2/pkg/estimator" "github.com/influxdata/influxdb/v2/pkg/estimator/hll" "github.com/influxdata/influxdb/v2/pkg/mmap" - "github.com/influxdata/influxdb/v2/v1/models" "github.com/influxdata/influxdb/v2/v1/tsdb" ) diff --git a/v1/tsdb/index/tsi1/log_file_test.go b/v1/tsdb/index/tsi1/log_file_test.go index 21bf5a8461..a3f27c140a 100644 --- a/v1/tsdb/index/tsi1/log_file_test.go +++ b/v1/tsdb/index/tsi1/log_file_test.go @@ -16,8 +16,8 @@ import ( "github.com/influxdata/influxdb/v2/pkg/slices" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/pkg/bloom" - "github.com/influxdata/influxdb/v2/v1/models" "github.com/influxdata/influxdb/v2/v1/tsdb" "github.com/influxdata/influxdb/v2/v1/tsdb/index/tsi1" ) diff --git a/v1/tsdb/index/tsi1/partition.go b/v1/tsdb/index/tsi1/partition.go index e3c2043559..e48fddbcbd 100644 --- a/v1/tsdb/index/tsi1/partition.go +++ b/v1/tsdb/index/tsi1/partition.go @@ -16,9 +16,9 @@ import ( "unsafe" "github.com/influxdata/influxdb/v2/logger" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/pkg/bytesutil" "github.com/influxdata/influxdb/v2/pkg/estimator" - "github.com/influxdata/influxdb/v2/v1/models" "github.com/influxdata/influxdb/v2/v1/tsdb" "github.com/influxdata/influxql" "go.uber.org/zap" diff --git a/v1/tsdb/index/tsi1/tsi1_test.go b/v1/tsdb/index/tsi1/tsi1_test.go index cee600c670..0862f5256a 100644 --- a/v1/tsdb/index/tsi1/tsi1_test.go +++ b/v1/tsdb/index/tsi1/tsi1_test.go @@ -8,7 +8,7 @@ import ( "reflect" "testing" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/v1/tsdb" "github.com/influxdata/influxdb/v2/v1/tsdb/index/tsi1" ) diff --git a/v1/tsdb/index_test.go b/v1/tsdb/index_test.go index 7fd5bdb43f..8357e5f858 100644 --- a/v1/tsdb/index_test.go +++ b/v1/tsdb/index_test.go @@ -12,9 +12,9 @@ import ( "github.com/influxdata/influxdb/v2/influxql/query" "github.com/influxdata/influxdb/v2/logger" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/pkg/slices" "github.com/influxdata/influxdb/v2/v1/internal" - "github.com/influxdata/influxdb/v2/v1/models" "github.com/influxdata/influxdb/v2/v1/tsdb" "github.com/influxdata/influxdb/v2/v1/tsdb/index/inmem" "github.com/influxdata/influxdb/v2/v1/tsdb/index/tsi1" diff --git a/v1/tsdb/internal/meta.pb.go b/v1/tsdb/internal/meta.pb.go index b09b9d74b3..af601763d4 100644 --- a/v1/tsdb/internal/meta.pb.go +++ b/v1/tsdb/internal/meta.pb.go @@ -3,11 +3,15 @@ package tsdb -import proto "github.com/gogo/protobuf/proto" -import fmt "fmt" -import math "math" +import ( + fmt "fmt" -import io "io" + proto "github.com/gogo/protobuf/proto" + + math "math" + + io "io" +) // Reference imports to suppress errors if they are not otherwise used. var _ = proto.Marshal diff --git a/v1/tsdb/meta.go b/v1/tsdb/meta.go index 32767e2a80..5929e141e0 100644 --- a/v1/tsdb/meta.go +++ b/v1/tsdb/meta.go @@ -5,7 +5,7 @@ package tsdb import ( "sort" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" ) // MarshalTags converts a tag set to bytes for use as a lookup key. diff --git a/v1/tsdb/meta_test.go b/v1/tsdb/meta_test.go index 678ed8fe40..dc92e76f4b 100644 --- a/v1/tsdb/meta_test.go +++ b/v1/tsdb/meta_test.go @@ -5,7 +5,7 @@ import ( "fmt" "testing" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/v1/tsdb" ) diff --git a/v1/tsdb/series_cursor.go b/v1/tsdb/series_cursor.go index bc9605aff8..127fe6b541 100644 --- a/v1/tsdb/series_cursor.go +++ b/v1/tsdb/series_cursor.go @@ -6,7 +6,7 @@ import ( "sort" "sync" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxql" ) diff --git a/v1/tsdb/series_file.go b/v1/tsdb/series_file.go index 2babb64b8a..da7d21701a 100644 --- a/v1/tsdb/series_file.go +++ b/v1/tsdb/series_file.go @@ -12,9 +12,9 @@ import ( "sync" "github.com/cespare/xxhash" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/pkg/binaryutil" "github.com/influxdata/influxdb/v2/pkg/limiter" - "github.com/influxdata/influxdb/v2/v1/models" "go.uber.org/zap" "golang.org/x/sync/errgroup" ) diff --git a/v1/tsdb/series_file_test.go b/v1/tsdb/series_file_test.go index 1740ada993..21bfbea8c9 100644 --- a/v1/tsdb/series_file_test.go +++ b/v1/tsdb/series_file_test.go @@ -9,7 +9,7 @@ import ( "testing" "github.com/influxdata/influxdb/v2/logger" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/v1/tsdb" "golang.org/x/sync/errgroup" ) diff --git a/v1/tsdb/series_index.go b/v1/tsdb/series_index.go index e34e1918a4..5914234ef1 100644 --- a/v1/tsdb/series_index.go +++ b/v1/tsdb/series_index.go @@ -7,9 +7,9 @@ import ( "io" "os" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/pkg/mmap" "github.com/influxdata/influxdb/v2/pkg/rhh" - "github.com/influxdata/influxdb/v2/v1/models" ) const ( diff --git a/v1/tsdb/series_partition.go b/v1/tsdb/series_partition.go index 07b9e33995..c576c81a2e 100644 --- a/v1/tsdb/series_partition.go +++ b/v1/tsdb/series_partition.go @@ -11,9 +11,9 @@ import ( "sync" "github.com/influxdata/influxdb/v2/logger" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/pkg/limiter" "github.com/influxdata/influxdb/v2/pkg/rhh" - "github.com/influxdata/influxdb/v2/v1/models" "go.uber.org/zap" ) diff --git a/v1/tsdb/shard.go b/v1/tsdb/shard.go index 4408321292..761c708617 100644 --- a/v1/tsdb/shard.go +++ b/v1/tsdb/shard.go @@ -20,12 +20,12 @@ import ( "github.com/gogo/protobuf/proto" "github.com/influxdata/influxdb/v2/influxql/query" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/pkg/bytesutil" "github.com/influxdata/influxdb/v2/pkg/estimator" "github.com/influxdata/influxdb/v2/pkg/file" "github.com/influxdata/influxdb/v2/pkg/limiter" "github.com/influxdata/influxdb/v2/pkg/slices" - "github.com/influxdata/influxdb/v2/v1/models" internal "github.com/influxdata/influxdb/v2/v1/tsdb/internal" "github.com/influxdata/influxql" "go.uber.org/zap" diff --git a/v1/tsdb/shard_internal_test.go b/v1/tsdb/shard_internal_test.go index 8313ce5ff9..f8b4abd959 100644 --- a/v1/tsdb/shard_internal_test.go +++ b/v1/tsdb/shard_internal_test.go @@ -15,7 +15,7 @@ import ( "github.com/google/go-cmp/cmp" "github.com/google/go-cmp/cmp/cmpopts" "github.com/influxdata/influxdb/v2/logger" - "github.com/influxdata/influxdb/v2/v1/models" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxql" ) diff --git a/v1/tsdb/shard_test.go b/v1/tsdb/shard_test.go index 9dbf1096a9..63efb62bf0 100644 --- a/v1/tsdb/shard_test.go +++ b/v1/tsdb/shard_test.go @@ -23,8 +23,8 @@ import ( "github.com/google/go-cmp/cmp" "github.com/google/go-cmp/cmp/cmpopts" "github.com/influxdata/influxdb/v2/influxql/query" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/pkg/deep" - "github.com/influxdata/influxdb/v2/v1/models" "github.com/influxdata/influxdb/v2/v1/tsdb" _ "github.com/influxdata/influxdb/v2/v1/tsdb/engine" _ "github.com/influxdata/influxdb/v2/v1/tsdb/index" diff --git a/v1/tsdb/store.go b/v1/tsdb/store.go index 2630233e44..48075f7fe4 100644 --- a/v1/tsdb/store.go +++ b/v1/tsdb/store.go @@ -18,10 +18,10 @@ import ( "github.com/influxdata/influxdb/v2/influxql/query" "github.com/influxdata/influxdb/v2/logger" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/pkg/estimator" "github.com/influxdata/influxdb/v2/pkg/estimator/hll" "github.com/influxdata/influxdb/v2/pkg/limiter" - "github.com/influxdata/influxdb/v2/v1/models" "github.com/influxdata/influxql" "go.uber.org/zap" "go.uber.org/zap/zapcore" diff --git a/v1/tsdb/store_test.go b/v1/tsdb/store_test.go index f2e9314f02..891e3c9b9c 100644 --- a/v1/tsdb/store_test.go +++ b/v1/tsdb/store_test.go @@ -21,10 +21,10 @@ import ( "github.com/davecgh/go-spew/spew" "github.com/influxdata/influxdb/v2/influxql/query" "github.com/influxdata/influxdb/v2/logger" + "github.com/influxdata/influxdb/v2/models" "github.com/influxdata/influxdb/v2/pkg/deep" "github.com/influxdata/influxdb/v2/pkg/slices" "github.com/influxdata/influxdb/v2/v1/internal" - "github.com/influxdata/influxdb/v2/v1/models" "github.com/influxdata/influxdb/v2/v1/tsdb" "github.com/influxdata/influxdb/v2/v1/tsdb/index/inmem" "github.com/influxdata/influxql"