build: fix 'go generate' and regenerate the universe (#21089)
parent
7b74bb41fc
commit
b1283bc998
|
@ -1,27 +1,13 @@
|
|||
// Code generated by protoc-gen-gogo. DO NOT EDIT.
|
||||
// source: internal/internal.proto
|
||||
|
||||
/*
|
||||
Package query is a generated protocol buffer package.
|
||||
|
||||
It is generated from these files:
|
||||
internal/internal.proto
|
||||
|
||||
It has these top-level messages:
|
||||
Point
|
||||
Aux
|
||||
IteratorOptions
|
||||
Measurements
|
||||
Measurement
|
||||
Interval
|
||||
IteratorStats
|
||||
VarRef
|
||||
*/
|
||||
package query
|
||||
|
||||
import proto "github.com/gogo/protobuf/proto"
|
||||
import fmt "fmt"
|
||||
import math "math"
|
||||
import (
|
||||
fmt "fmt"
|
||||
proto "github.com/gogo/protobuf/proto"
|
||||
math "math"
|
||||
)
|
||||
|
||||
// Reference imports to suppress errors if they are not otherwise used.
|
||||
var _ = proto.Marshal
|
||||
|
@ -32,29 +18,50 @@ var _ = math.Inf
|
|||
// is compatible with the proto package it is being compiled against.
|
||||
// A compilation error at this line likely means your copy of the
|
||||
// proto package needs to be updated.
|
||||
const _ = proto.GoGoProtoPackageIsVersion2 // please upgrade the proto package
|
||||
const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package
|
||||
|
||||
type Point struct {
|
||||
Name *string `protobuf:"bytes,1,req,name=Name" json:"Name,omitempty"`
|
||||
Tags *string `protobuf:"bytes,2,req,name=Tags" json:"Tags,omitempty"`
|
||||
Time *int64 `protobuf:"varint,3,req,name=Time" json:"Time,omitempty"`
|
||||
Nil *bool `protobuf:"varint,4,req,name=Nil" json:"Nil,omitempty"`
|
||||
Aux []*Aux `protobuf:"bytes,5,rep,name=Aux" json:"Aux,omitempty"`
|
||||
Aggregated *uint32 `protobuf:"varint,6,opt,name=Aggregated" json:"Aggregated,omitempty"`
|
||||
FloatValue *float64 `protobuf:"fixed64,7,opt,name=FloatValue" json:"FloatValue,omitempty"`
|
||||
IntegerValue *int64 `protobuf:"varint,8,opt,name=IntegerValue" json:"IntegerValue,omitempty"`
|
||||
StringValue *string `protobuf:"bytes,9,opt,name=StringValue" json:"StringValue,omitempty"`
|
||||
BooleanValue *bool `protobuf:"varint,10,opt,name=BooleanValue" json:"BooleanValue,omitempty"`
|
||||
UnsignedValue *uint64 `protobuf:"varint,12,opt,name=UnsignedValue" json:"UnsignedValue,omitempty"`
|
||||
Stats *IteratorStats `protobuf:"bytes,11,opt,name=Stats" json:"Stats,omitempty"`
|
||||
Trace []byte `protobuf:"bytes,13,opt,name=Trace" json:"Trace,omitempty"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
Name *string `protobuf:"bytes,1,req,name=Name" json:"Name,omitempty"`
|
||||
Tags *string `protobuf:"bytes,2,req,name=Tags" json:"Tags,omitempty"`
|
||||
Time *int64 `protobuf:"varint,3,req,name=Time" json:"Time,omitempty"`
|
||||
Nil *bool `protobuf:"varint,4,req,name=Nil" json:"Nil,omitempty"`
|
||||
Aux []*Aux `protobuf:"bytes,5,rep,name=Aux" json:"Aux,omitempty"`
|
||||
Aggregated *uint32 `protobuf:"varint,6,opt,name=Aggregated" json:"Aggregated,omitempty"`
|
||||
FloatValue *float64 `protobuf:"fixed64,7,opt,name=FloatValue" json:"FloatValue,omitempty"`
|
||||
IntegerValue *int64 `protobuf:"varint,8,opt,name=IntegerValue" json:"IntegerValue,omitempty"`
|
||||
StringValue *string `protobuf:"bytes,9,opt,name=StringValue" json:"StringValue,omitempty"`
|
||||
BooleanValue *bool `protobuf:"varint,10,opt,name=BooleanValue" json:"BooleanValue,omitempty"`
|
||||
UnsignedValue *uint64 `protobuf:"varint,12,opt,name=UnsignedValue" json:"UnsignedValue,omitempty"`
|
||||
Stats *IteratorStats `protobuf:"bytes,11,opt,name=Stats" json:"Stats,omitempty"`
|
||||
Trace []byte `protobuf:"bytes,13,opt,name=Trace" json:"Trace,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *Point) Reset() { *m = Point{} }
|
||||
func (m *Point) String() string { return proto.CompactTextString(m) }
|
||||
func (*Point) ProtoMessage() {}
|
||||
func (*Point) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{0} }
|
||||
func (m *Point) Reset() { *m = Point{} }
|
||||
func (m *Point) String() string { return proto.CompactTextString(m) }
|
||||
func (*Point) ProtoMessage() {}
|
||||
func (*Point) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_41ca0a4a9dd77d9e, []int{0}
|
||||
}
|
||||
func (m *Point) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_Point.Unmarshal(m, b)
|
||||
}
|
||||
func (m *Point) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_Point.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *Point) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_Point.Merge(m, src)
|
||||
}
|
||||
func (m *Point) XXX_Size() int {
|
||||
return xxx_messageInfo_Point.Size(m)
|
||||
}
|
||||
func (m *Point) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_Point.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_Point proto.InternalMessageInfo
|
||||
|
||||
func (m *Point) GetName() string {
|
||||
if m != nil && m.Name != nil {
|
||||
|
@ -148,19 +155,40 @@ func (m *Point) GetTrace() []byte {
|
|||
}
|
||||
|
||||
type Aux struct {
|
||||
DataType *int32 `protobuf:"varint,1,req,name=DataType" json:"DataType,omitempty"`
|
||||
FloatValue *float64 `protobuf:"fixed64,2,opt,name=FloatValue" json:"FloatValue,omitempty"`
|
||||
IntegerValue *int64 `protobuf:"varint,3,opt,name=IntegerValue" json:"IntegerValue,omitempty"`
|
||||
StringValue *string `protobuf:"bytes,4,opt,name=StringValue" json:"StringValue,omitempty"`
|
||||
BooleanValue *bool `protobuf:"varint,5,opt,name=BooleanValue" json:"BooleanValue,omitempty"`
|
||||
UnsignedValue *uint64 `protobuf:"varint,6,opt,name=UnsignedValue" json:"UnsignedValue,omitempty"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
DataType *int32 `protobuf:"varint,1,req,name=DataType" json:"DataType,omitempty"`
|
||||
FloatValue *float64 `protobuf:"fixed64,2,opt,name=FloatValue" json:"FloatValue,omitempty"`
|
||||
IntegerValue *int64 `protobuf:"varint,3,opt,name=IntegerValue" json:"IntegerValue,omitempty"`
|
||||
StringValue *string `protobuf:"bytes,4,opt,name=StringValue" json:"StringValue,omitempty"`
|
||||
BooleanValue *bool `protobuf:"varint,5,opt,name=BooleanValue" json:"BooleanValue,omitempty"`
|
||||
UnsignedValue *uint64 `protobuf:"varint,6,opt,name=UnsignedValue" json:"UnsignedValue,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *Aux) Reset() { *m = Aux{} }
|
||||
func (m *Aux) String() string { return proto.CompactTextString(m) }
|
||||
func (*Aux) ProtoMessage() {}
|
||||
func (*Aux) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{1} }
|
||||
func (m *Aux) Reset() { *m = Aux{} }
|
||||
func (m *Aux) String() string { return proto.CompactTextString(m) }
|
||||
func (*Aux) ProtoMessage() {}
|
||||
func (*Aux) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_41ca0a4a9dd77d9e, []int{1}
|
||||
}
|
||||
func (m *Aux) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_Aux.Unmarshal(m, b)
|
||||
}
|
||||
func (m *Aux) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_Aux.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *Aux) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_Aux.Merge(m, src)
|
||||
}
|
||||
func (m *Aux) XXX_Size() int {
|
||||
return xxx_messageInfo_Aux.Size(m)
|
||||
}
|
||||
func (m *Aux) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_Aux.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_Aux proto.InternalMessageInfo
|
||||
|
||||
func (m *Aux) GetDataType() int32 {
|
||||
if m != nil && m.DataType != nil {
|
||||
|
@ -205,35 +233,56 @@ func (m *Aux) GetUnsignedValue() uint64 {
|
|||
}
|
||||
|
||||
type IteratorOptions struct {
|
||||
Expr *string `protobuf:"bytes,1,opt,name=Expr" json:"Expr,omitempty"`
|
||||
Aux []string `protobuf:"bytes,2,rep,name=Aux" json:"Aux,omitempty"`
|
||||
Fields []*VarRef `protobuf:"bytes,17,rep,name=Fields" json:"Fields,omitempty"`
|
||||
Sources []*Measurement `protobuf:"bytes,3,rep,name=Sources" json:"Sources,omitempty"`
|
||||
Interval *Interval `protobuf:"bytes,4,opt,name=Interval" json:"Interval,omitempty"`
|
||||
Dimensions []string `protobuf:"bytes,5,rep,name=Dimensions" json:"Dimensions,omitempty"`
|
||||
GroupBy []string `protobuf:"bytes,19,rep,name=GroupBy" json:"GroupBy,omitempty"`
|
||||
Fill *int32 `protobuf:"varint,6,opt,name=Fill" json:"Fill,omitempty"`
|
||||
FillValue *float64 `protobuf:"fixed64,7,opt,name=FillValue" json:"FillValue,omitempty"`
|
||||
Condition *string `protobuf:"bytes,8,opt,name=Condition" json:"Condition,omitempty"`
|
||||
StartTime *int64 `protobuf:"varint,9,opt,name=StartTime" json:"StartTime,omitempty"`
|
||||
EndTime *int64 `protobuf:"varint,10,opt,name=EndTime" json:"EndTime,omitempty"`
|
||||
Location *string `protobuf:"bytes,21,opt,name=Location" json:"Location,omitempty"`
|
||||
Ascending *bool `protobuf:"varint,11,opt,name=Ascending" json:"Ascending,omitempty"`
|
||||
Limit *int64 `protobuf:"varint,12,opt,name=Limit" json:"Limit,omitempty"`
|
||||
Offset *int64 `protobuf:"varint,13,opt,name=Offset" json:"Offset,omitempty"`
|
||||
SLimit *int64 `protobuf:"varint,14,opt,name=SLimit" json:"SLimit,omitempty"`
|
||||
SOffset *int64 `protobuf:"varint,15,opt,name=SOffset" json:"SOffset,omitempty"`
|
||||
StripName *bool `protobuf:"varint,22,opt,name=StripName" json:"StripName,omitempty"`
|
||||
Dedupe *bool `protobuf:"varint,16,opt,name=Dedupe" json:"Dedupe,omitempty"`
|
||||
MaxSeriesN *int64 `protobuf:"varint,18,opt,name=MaxSeriesN" json:"MaxSeriesN,omitempty"`
|
||||
Ordered *bool `protobuf:"varint,20,opt,name=Ordered" json:"Ordered,omitempty"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
Expr *string `protobuf:"bytes,1,opt,name=Expr" json:"Expr,omitempty"`
|
||||
Aux []string `protobuf:"bytes,2,rep,name=Aux" json:"Aux,omitempty"`
|
||||
Fields []*VarRef `protobuf:"bytes,17,rep,name=Fields" json:"Fields,omitempty"`
|
||||
Sources []*Measurement `protobuf:"bytes,3,rep,name=Sources" json:"Sources,omitempty"`
|
||||
Interval *Interval `protobuf:"bytes,4,opt,name=Interval" json:"Interval,omitempty"`
|
||||
Dimensions []string `protobuf:"bytes,5,rep,name=Dimensions" json:"Dimensions,omitempty"`
|
||||
GroupBy []string `protobuf:"bytes,19,rep,name=GroupBy" json:"GroupBy,omitempty"`
|
||||
Fill *int32 `protobuf:"varint,6,opt,name=Fill" json:"Fill,omitempty"`
|
||||
FillValue *float64 `protobuf:"fixed64,7,opt,name=FillValue" json:"FillValue,omitempty"`
|
||||
Condition *string `protobuf:"bytes,8,opt,name=Condition" json:"Condition,omitempty"`
|
||||
StartTime *int64 `protobuf:"varint,9,opt,name=StartTime" json:"StartTime,omitempty"`
|
||||
EndTime *int64 `protobuf:"varint,10,opt,name=EndTime" json:"EndTime,omitempty"`
|
||||
Location *string `protobuf:"bytes,21,opt,name=Location" json:"Location,omitempty"`
|
||||
Ascending *bool `protobuf:"varint,11,opt,name=Ascending" json:"Ascending,omitempty"`
|
||||
Limit *int64 `protobuf:"varint,12,opt,name=Limit" json:"Limit,omitempty"`
|
||||
Offset *int64 `protobuf:"varint,13,opt,name=Offset" json:"Offset,omitempty"`
|
||||
SLimit *int64 `protobuf:"varint,14,opt,name=SLimit" json:"SLimit,omitempty"`
|
||||
SOffset *int64 `protobuf:"varint,15,opt,name=SOffset" json:"SOffset,omitempty"`
|
||||
StripName *bool `protobuf:"varint,22,opt,name=StripName" json:"StripName,omitempty"`
|
||||
Dedupe *bool `protobuf:"varint,16,opt,name=Dedupe" json:"Dedupe,omitempty"`
|
||||
MaxSeriesN *int64 `protobuf:"varint,18,opt,name=MaxSeriesN" json:"MaxSeriesN,omitempty"`
|
||||
Ordered *bool `protobuf:"varint,20,opt,name=Ordered" json:"Ordered,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *IteratorOptions) Reset() { *m = IteratorOptions{} }
|
||||
func (m *IteratorOptions) String() string { return proto.CompactTextString(m) }
|
||||
func (*IteratorOptions) ProtoMessage() {}
|
||||
func (*IteratorOptions) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{2} }
|
||||
func (m *IteratorOptions) Reset() { *m = IteratorOptions{} }
|
||||
func (m *IteratorOptions) String() string { return proto.CompactTextString(m) }
|
||||
func (*IteratorOptions) ProtoMessage() {}
|
||||
func (*IteratorOptions) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_41ca0a4a9dd77d9e, []int{2}
|
||||
}
|
||||
func (m *IteratorOptions) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_IteratorOptions.Unmarshal(m, b)
|
||||
}
|
||||
func (m *IteratorOptions) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_IteratorOptions.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *IteratorOptions) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_IteratorOptions.Merge(m, src)
|
||||
}
|
||||
func (m *IteratorOptions) XXX_Size() int {
|
||||
return xxx_messageInfo_IteratorOptions.Size(m)
|
||||
}
|
||||
func (m *IteratorOptions) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_IteratorOptions.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_IteratorOptions proto.InternalMessageInfo
|
||||
|
||||
func (m *IteratorOptions) GetExpr() string {
|
||||
if m != nil && m.Expr != nil {
|
||||
|
@ -390,14 +439,35 @@ func (m *IteratorOptions) GetOrdered() bool {
|
|||
}
|
||||
|
||||
type Measurements struct {
|
||||
Items []*Measurement `protobuf:"bytes,1,rep,name=Items" json:"Items,omitempty"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
Items []*Measurement `protobuf:"bytes,1,rep,name=Items" json:"Items,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *Measurements) Reset() { *m = Measurements{} }
|
||||
func (m *Measurements) String() string { return proto.CompactTextString(m) }
|
||||
func (*Measurements) ProtoMessage() {}
|
||||
func (*Measurements) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{3} }
|
||||
func (m *Measurements) Reset() { *m = Measurements{} }
|
||||
func (m *Measurements) String() string { return proto.CompactTextString(m) }
|
||||
func (*Measurements) ProtoMessage() {}
|
||||
func (*Measurements) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_41ca0a4a9dd77d9e, []int{3}
|
||||
}
|
||||
func (m *Measurements) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_Measurements.Unmarshal(m, b)
|
||||
}
|
||||
func (m *Measurements) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_Measurements.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *Measurements) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_Measurements.Merge(m, src)
|
||||
}
|
||||
func (m *Measurements) XXX_Size() int {
|
||||
return xxx_messageInfo_Measurements.Size(m)
|
||||
}
|
||||
func (m *Measurements) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_Measurements.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_Measurements proto.InternalMessageInfo
|
||||
|
||||
func (m *Measurements) GetItems() []*Measurement {
|
||||
if m != nil {
|
||||
|
@ -407,19 +477,40 @@ func (m *Measurements) GetItems() []*Measurement {
|
|||
}
|
||||
|
||||
type Measurement struct {
|
||||
Database *string `protobuf:"bytes,1,opt,name=Database" json:"Database,omitempty"`
|
||||
RetentionPolicy *string `protobuf:"bytes,2,opt,name=RetentionPolicy" json:"RetentionPolicy,omitempty"`
|
||||
Name *string `protobuf:"bytes,3,opt,name=Name" json:"Name,omitempty"`
|
||||
Regex *string `protobuf:"bytes,4,opt,name=Regex" json:"Regex,omitempty"`
|
||||
IsTarget *bool `protobuf:"varint,5,opt,name=IsTarget" json:"IsTarget,omitempty"`
|
||||
SystemIterator *string `protobuf:"bytes,6,opt,name=SystemIterator" json:"SystemIterator,omitempty"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
Database *string `protobuf:"bytes,1,opt,name=Database" json:"Database,omitempty"`
|
||||
RetentionPolicy *string `protobuf:"bytes,2,opt,name=RetentionPolicy" json:"RetentionPolicy,omitempty"`
|
||||
Name *string `protobuf:"bytes,3,opt,name=Name" json:"Name,omitempty"`
|
||||
Regex *string `protobuf:"bytes,4,opt,name=Regex" json:"Regex,omitempty"`
|
||||
IsTarget *bool `protobuf:"varint,5,opt,name=IsTarget" json:"IsTarget,omitempty"`
|
||||
SystemIterator *string `protobuf:"bytes,6,opt,name=SystemIterator" json:"SystemIterator,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *Measurement) Reset() { *m = Measurement{} }
|
||||
func (m *Measurement) String() string { return proto.CompactTextString(m) }
|
||||
func (*Measurement) ProtoMessage() {}
|
||||
func (*Measurement) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{4} }
|
||||
func (m *Measurement) Reset() { *m = Measurement{} }
|
||||
func (m *Measurement) String() string { return proto.CompactTextString(m) }
|
||||
func (*Measurement) ProtoMessage() {}
|
||||
func (*Measurement) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_41ca0a4a9dd77d9e, []int{4}
|
||||
}
|
||||
func (m *Measurement) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_Measurement.Unmarshal(m, b)
|
||||
}
|
||||
func (m *Measurement) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_Measurement.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *Measurement) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_Measurement.Merge(m, src)
|
||||
}
|
||||
func (m *Measurement) XXX_Size() int {
|
||||
return xxx_messageInfo_Measurement.Size(m)
|
||||
}
|
||||
func (m *Measurement) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_Measurement.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_Measurement proto.InternalMessageInfo
|
||||
|
||||
func (m *Measurement) GetDatabase() string {
|
||||
if m != nil && m.Database != nil {
|
||||
|
@ -464,15 +555,36 @@ func (m *Measurement) GetSystemIterator() string {
|
|||
}
|
||||
|
||||
type Interval struct {
|
||||
Duration *int64 `protobuf:"varint,1,opt,name=Duration" json:"Duration,omitempty"`
|
||||
Offset *int64 `protobuf:"varint,2,opt,name=Offset" json:"Offset,omitempty"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
Duration *int64 `protobuf:"varint,1,opt,name=Duration" json:"Duration,omitempty"`
|
||||
Offset *int64 `protobuf:"varint,2,opt,name=Offset" json:"Offset,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *Interval) Reset() { *m = Interval{} }
|
||||
func (m *Interval) String() string { return proto.CompactTextString(m) }
|
||||
func (*Interval) ProtoMessage() {}
|
||||
func (*Interval) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{5} }
|
||||
func (m *Interval) Reset() { *m = Interval{} }
|
||||
func (m *Interval) String() string { return proto.CompactTextString(m) }
|
||||
func (*Interval) ProtoMessage() {}
|
||||
func (*Interval) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_41ca0a4a9dd77d9e, []int{5}
|
||||
}
|
||||
func (m *Interval) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_Interval.Unmarshal(m, b)
|
||||
}
|
||||
func (m *Interval) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_Interval.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *Interval) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_Interval.Merge(m, src)
|
||||
}
|
||||
func (m *Interval) XXX_Size() int {
|
||||
return xxx_messageInfo_Interval.Size(m)
|
||||
}
|
||||
func (m *Interval) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_Interval.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_Interval proto.InternalMessageInfo
|
||||
|
||||
func (m *Interval) GetDuration() int64 {
|
||||
if m != nil && m.Duration != nil {
|
||||
|
@ -489,15 +601,36 @@ func (m *Interval) GetOffset() int64 {
|
|||
}
|
||||
|
||||
type IteratorStats struct {
|
||||
SeriesN *int64 `protobuf:"varint,1,opt,name=SeriesN" json:"SeriesN,omitempty"`
|
||||
PointN *int64 `protobuf:"varint,2,opt,name=PointN" json:"PointN,omitempty"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
SeriesN *int64 `protobuf:"varint,1,opt,name=SeriesN" json:"SeriesN,omitempty"`
|
||||
PointN *int64 `protobuf:"varint,2,opt,name=PointN" json:"PointN,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *IteratorStats) Reset() { *m = IteratorStats{} }
|
||||
func (m *IteratorStats) String() string { return proto.CompactTextString(m) }
|
||||
func (*IteratorStats) ProtoMessage() {}
|
||||
func (*IteratorStats) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{6} }
|
||||
func (m *IteratorStats) Reset() { *m = IteratorStats{} }
|
||||
func (m *IteratorStats) String() string { return proto.CompactTextString(m) }
|
||||
func (*IteratorStats) ProtoMessage() {}
|
||||
func (*IteratorStats) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_41ca0a4a9dd77d9e, []int{6}
|
||||
}
|
||||
func (m *IteratorStats) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_IteratorStats.Unmarshal(m, b)
|
||||
}
|
||||
func (m *IteratorStats) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_IteratorStats.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *IteratorStats) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_IteratorStats.Merge(m, src)
|
||||
}
|
||||
func (m *IteratorStats) XXX_Size() int {
|
||||
return xxx_messageInfo_IteratorStats.Size(m)
|
||||
}
|
||||
func (m *IteratorStats) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_IteratorStats.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_IteratorStats proto.InternalMessageInfo
|
||||
|
||||
func (m *IteratorStats) GetSeriesN() int64 {
|
||||
if m != nil && m.SeriesN != nil {
|
||||
|
@ -514,15 +647,36 @@ func (m *IteratorStats) GetPointN() int64 {
|
|||
}
|
||||
|
||||
type VarRef struct {
|
||||
Val *string `protobuf:"bytes,1,req,name=Val" json:"Val,omitempty"`
|
||||
Type *int32 `protobuf:"varint,2,opt,name=Type" json:"Type,omitempty"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
Val *string `protobuf:"bytes,1,req,name=Val" json:"Val,omitempty"`
|
||||
Type *int32 `protobuf:"varint,2,opt,name=Type" json:"Type,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *VarRef) Reset() { *m = VarRef{} }
|
||||
func (m *VarRef) String() string { return proto.CompactTextString(m) }
|
||||
func (*VarRef) ProtoMessage() {}
|
||||
func (*VarRef) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{7} }
|
||||
func (m *VarRef) Reset() { *m = VarRef{} }
|
||||
func (m *VarRef) String() string { return proto.CompactTextString(m) }
|
||||
func (*VarRef) ProtoMessage() {}
|
||||
func (*VarRef) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_41ca0a4a9dd77d9e, []int{7}
|
||||
}
|
||||
func (m *VarRef) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_VarRef.Unmarshal(m, b)
|
||||
}
|
||||
func (m *VarRef) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_VarRef.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *VarRef) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_VarRef.Merge(m, src)
|
||||
}
|
||||
func (m *VarRef) XXX_Size() int {
|
||||
return xxx_messageInfo_VarRef.Size(m)
|
||||
}
|
||||
func (m *VarRef) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_VarRef.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_VarRef proto.InternalMessageInfo
|
||||
|
||||
func (m *VarRef) GetVal() string {
|
||||
if m != nil && m.Val != nil {
|
||||
|
@ -549,9 +703,9 @@ func init() {
|
|||
proto.RegisterType((*VarRef)(nil), "query.VarRef")
|
||||
}
|
||||
|
||||
func init() { proto.RegisterFile("internal/internal.proto", fileDescriptorInternal) }
|
||||
func init() { proto.RegisterFile("internal/internal.proto", fileDescriptor_41ca0a4a9dd77d9e) }
|
||||
|
||||
var fileDescriptorInternal = []byte{
|
||||
var fileDescriptor_41ca0a4a9dd77d9e = []byte{
|
||||
// 796 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x8c, 0x55, 0x6d, 0x6f, 0xe3, 0x44,
|
||||
0x10, 0x96, 0xe3, 0x3a, 0x8d, 0x27, 0xcd, 0xf5, 0x58, 0x4a, 0x59, 0xa1, 0x13, 0xb2, 0x2c, 0x40,
|
||||
|
|
|
@ -4,6 +4,18 @@ package models
|
|||
|
||||
import "strconv"
|
||||
|
||||
func _() {
|
||||
// An "invalid array index" compiler error signifies that the constant values have changed.
|
||||
// Re-run the stringer command to generate them again.
|
||||
var x [1]struct{}
|
||||
_ = x[Integer-0]
|
||||
_ = x[Float-1]
|
||||
_ = x[Boolean-2]
|
||||
_ = x[String-3]
|
||||
_ = x[Empty-4]
|
||||
_ = x[Unsigned-5]
|
||||
}
|
||||
|
||||
const _FieldType_name = "IntegerFloatBooleanStringEmptyUnsigned"
|
||||
|
||||
var _FieldType_index = [...]uint8{0, 7, 12, 19, 25, 30, 38}
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -9,7 +9,7 @@ import (
|
|||
"github.com/influxdata/flux"
|
||||
"github.com/influxdata/flux/arrow"
|
||||
"github.com/influxdata/flux/execute"
|
||||
"github.com/influxdata/flux/interval"
|
||||
"github.com/influxdata/flux/interval"
|
||||
"github.com/influxdata/flux/memory"
|
||||
"github.com/influxdata/flux/values"
|
||||
"github.com/influxdata/influxdb/v2"
|
||||
|
@ -136,7 +136,7 @@ func new{{.Name}}WindowTable(
|
|||
}
|
||||
if t.createEmpty {
|
||||
start := int64(bounds.Start)
|
||||
t.windowBounds = window.GetLatestBounds(values.Time(start))
|
||||
t.windowBounds = window.GetLatestBounds(values.Time(start))
|
||||
}
|
||||
t.readTags(tags)
|
||||
t.init(t.advance)
|
||||
|
@ -443,7 +443,7 @@ func new{{.Name}}EmptyWindowSelectorTable(
|
|||
arr: cur.Next(),
|
||||
rangeStart: rangeStart,
|
||||
rangeStop: rangeStop,
|
||||
windowBounds: window.GetLatestBounds(values.Time(rangeStart)),
|
||||
windowBounds: window.GetLatestBounds(values.Time(rangeStart)),
|
||||
window: window,
|
||||
timeColumn: timeColumn,
|
||||
}
|
||||
|
@ -521,7 +521,7 @@ func (t *{{.name}}EmptyWindowSelectorTable) startTimes(builder *array.{{.ArrowTy
|
|||
builder.AppendNull()
|
||||
}
|
||||
|
||||
t.windowBounds = t.window.NextBounds(t.windowBounds)
|
||||
t.windowBounds = t.window.NextBounds(t.windowBounds)
|
||||
|
||||
// If the current array is non-empty and has
|
||||
// been read in its entirety, call Next().
|
||||
|
@ -568,7 +568,7 @@ func (t *{{.name}}EmptyWindowSelectorTable) stopTimes(builder *array.{{.ArrowTyp
|
|||
builder.AppendNull()
|
||||
}
|
||||
|
||||
t.windowBounds = t.window.NextBounds(t.windowBounds)
|
||||
t.windowBounds = t.window.NextBounds(t.windowBounds)
|
||||
|
||||
// If the current array is non-empty and has
|
||||
// been read in its entirety, call Next().
|
||||
|
@ -632,7 +632,7 @@ func (t *{{.name}}EmptyWindowSelectorTable) startStopTimes(builder *array.{{.Arr
|
|||
builder.AppendNull()
|
||||
}
|
||||
|
||||
t.windowBounds = t.window.NextBounds(t.windowBounds)
|
||||
t.windowBounds = t.window.NextBounds(t.windowBounds)
|
||||
|
||||
// If the current array is non-empty and has
|
||||
// been read in its entirety, call Next().
|
||||
|
|
|
@ -5,7 +5,7 @@ import (
|
|||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/influxdata/flux/interval"
|
||||
"github.com/influxdata/flux/interval"
|
||||
"github.com/influxdata/flux/values"
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/influxdata/influxdb/v2/tsdb/cursors"
|
||||
|
@ -57,11 +57,11 @@ func TestNewWindowAggregateArrayCursorMonths_{{$ColType}}(t *testing.T) {
|
|||
{{range .Aggs}}
|
||||
{{$Agg := .Name}}
|
||||
t.Run("{{$Agg}}", func(t *testing.T) {
|
||||
window, _ := interval.NewWindow(
|
||||
values.MakeDuration(int64(time.Hour), 0, false),
|
||||
values.MakeDuration(int64(time.Hour), 0, false),
|
||||
values.MakeDuration(0, 0, false),
|
||||
)
|
||||
window, _ := interval.NewWindow(
|
||||
values.MakeDuration(int64(time.Hour), 0, false),
|
||||
values.MakeDuration(int64(time.Hour), 0, false),
|
||||
values.MakeDuration(0, 0, false),
|
||||
)
|
||||
|
||||
want := &{{$colType}}Window{{$Agg}}ArrayCursor{
|
||||
{{$ColType}}ArrayCursor: &Mock{{$ColType}}ArrayCursor{},
|
||||
|
@ -87,11 +87,11 @@ func TestNewWindowAggregateArrayCursor_{{$ColType}}(t *testing.T) {
|
|||
{{range .Aggs}}
|
||||
{{$Agg := .Name}}
|
||||
t.Run("{{$Agg}}", func(t *testing.T) {
|
||||
window, _ := interval.NewWindow(
|
||||
values.MakeDuration(0, 1, false),
|
||||
values.MakeDuration(0, 1, false),
|
||||
values.MakeDuration(0, 0, false),
|
||||
)
|
||||
window, _ := interval.NewWindow(
|
||||
values.MakeDuration(0, 1, false),
|
||||
values.MakeDuration(0, 1, false),
|
||||
values.MakeDuration(0, 0, false),
|
||||
)
|
||||
|
||||
want := &{{$colType}}Window{{$Agg}}ArrayCursor{
|
||||
{{$ColType}}ArrayCursor: &Mock{{$ColType}}ArrayCursor{},
|
||||
|
|
|
@ -0,0 +1,259 @@
|
|||
package cursors
|
||||
|
||||
{{range .}}
|
||||
{{- $typename := print .Name "Array" }}
|
||||
{{- $hasType := or (and .Type true) false }}
|
||||
|
||||
type {{ $typename }} struct {
|
||||
Timestamps []int64
|
||||
{{- if $hasType }}
|
||||
Values []{{.Type}}
|
||||
{{- end }}
|
||||
}
|
||||
|
||||
func New{{$typename}}Len(sz int) *{{$typename}} {
|
||||
return &{{$typename}}{
|
||||
Timestamps: make([]int64, sz),
|
||||
{{- if $hasType }}
|
||||
Values: make([]{{.Type}}, sz),
|
||||
{{- end }}
|
||||
}
|
||||
}
|
||||
|
||||
func (a *{{ $typename }}) MinTime() int64 {
|
||||
return a.Timestamps[0]
|
||||
}
|
||||
|
||||
func (a *{{ $typename }}) MaxTime() int64 {
|
||||
return a.Timestamps[len(a.Timestamps)-1]
|
||||
}
|
||||
|
||||
func (a *{{ $typename}}) Len() int {
|
||||
return len(a.Timestamps)
|
||||
}
|
||||
|
||||
// search performs a binary search for UnixNano() v in a
|
||||
// and returns the position, i, where v would be inserted.
|
||||
// An additional check of a.Timestamps[i] == v is necessary
|
||||
// to determine if the value v exists.
|
||||
func (a *{{ $typename }}) search(v int64) int {
|
||||
// Define: f(x) → a.Timestamps[x] < v
|
||||
// Define: f(-1) == true, f(n) == false
|
||||
// Invariant: f(lo-1) == true, f(hi) == false
|
||||
lo := 0
|
||||
hi := a.Len()
|
||||
for lo < hi {
|
||||
mid := int(uint(lo+hi) >> 1)
|
||||
if a.Timestamps[mid] < v {
|
||||
lo = mid + 1 // preserves f(lo-1) == true
|
||||
} else {
|
||||
hi = mid // preserves f(hi) == false
|
||||
}
|
||||
}
|
||||
|
||||
// lo == hi
|
||||
return lo
|
||||
}
|
||||
|
||||
// FindRange returns the positions where min and max would be
|
||||
// inserted into the array. If a[0].UnixNano() > max or
|
||||
// a[len-1].UnixNano() < min then FindRange returns (-1, -1)
|
||||
// indicating the array is outside the [min, max]. The values must
|
||||
// be deduplicated and sorted before calling FindRange or the results
|
||||
// are undefined.
|
||||
func (a *{{ $typename }}) FindRange(min, max int64) (int, int) {
|
||||
if a.Len() == 0 || min > max {
|
||||
return -1, -1
|
||||
}
|
||||
|
||||
minVal := a.MinTime()
|
||||
maxVal := a.MaxTime()
|
||||
|
||||
if maxVal < min || minVal > max {
|
||||
return -1, -1
|
||||
}
|
||||
|
||||
return a.search(min), a.search(max)
|
||||
}
|
||||
|
||||
{{- if $hasType }}
|
||||
// Exclude removes the subset of values in [min, max]. The values must
|
||||
// be deduplicated and sorted before calling Exclude or the results are undefined.
|
||||
func (a *{{ $typename }}) Exclude(min, max int64) {
|
||||
rmin, rmax := a.FindRange(min, max)
|
||||
if rmin == -1 && rmax == -1 {
|
||||
return
|
||||
}
|
||||
|
||||
// a.Timestamps[rmin] ≥ min
|
||||
// a.Timestamps[rmax] ≥ max
|
||||
|
||||
if rmax < a.Len() {
|
||||
if a.Timestamps[rmax] == max {
|
||||
rmax++
|
||||
}
|
||||
rest := a.Len()-rmax
|
||||
if rest > 0 {
|
||||
ts := a.Timestamps[:rmin+rest]
|
||||
copy(ts[rmin:], a.Timestamps[rmax:])
|
||||
a.Timestamps = ts
|
||||
|
||||
vs := a.Values[:rmin+rest]
|
||||
copy(vs[rmin:], a.Values[rmax:])
|
||||
a.Values = vs
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
a.Timestamps = a.Timestamps[:rmin]
|
||||
a.Values = a.Values[:rmin]
|
||||
}
|
||||
|
||||
// Include returns the subset values between min and max inclusive. The values must
|
||||
// be deduplicated and sorted before calling Include or the results are undefined.
|
||||
func (a *{{ $typename }}) Include(min, max int64) {
|
||||
rmin, rmax := a.FindRange(min, max)
|
||||
if rmin == -1 && rmax == -1 {
|
||||
a.Timestamps = a.Timestamps[:0]
|
||||
a.Values = a.Values[:0]
|
||||
return
|
||||
}
|
||||
|
||||
// a.Timestamps[rmin] ≥ min
|
||||
// a.Timestamps[rmax] ≥ max
|
||||
|
||||
if rmax < a.Len() && a.Timestamps[rmax] == max {
|
||||
rmax++
|
||||
}
|
||||
|
||||
if rmin > -1 {
|
||||
ts := a.Timestamps[:rmax-rmin]
|
||||
copy(ts, a.Timestamps[rmin:rmax])
|
||||
a.Timestamps = ts
|
||||
vs := a.Values[:rmax-rmin]
|
||||
copy(vs, a.Values[rmin:rmax])
|
||||
a.Values = vs
|
||||
} else {
|
||||
a.Timestamps = a.Timestamps[:rmax]
|
||||
a.Values = a.Values[:rmax]
|
||||
}
|
||||
}
|
||||
|
||||
// Merge overlays b to top of a. If two values conflict with
|
||||
// the same timestamp, b is used. Both a and b must be sorted
|
||||
// in ascending order.
|
||||
func (a *{{ $typename }}) Merge(b *{{ $typename }}) {
|
||||
if a.Len() == 0 {
|
||||
*a = *b
|
||||
return
|
||||
}
|
||||
|
||||
if b.Len() == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
// Normally, both a and b should not contain duplicates. Due to a bug in older versions, it's
|
||||
// possible stored blocks might contain duplicate values. Remove them if they exists before
|
||||
// merging.
|
||||
// a = a.Deduplicate()
|
||||
// b = b.Deduplicate()
|
||||
|
||||
if a.MaxTime() < b.MinTime() {
|
||||
a.Timestamps = append(a.Timestamps, b.Timestamps...)
|
||||
a.Values = append(a.Values, b.Values...)
|
||||
return
|
||||
}
|
||||
|
||||
if b.MaxTime() < a.MinTime() {
|
||||
var tmp {{$typename}}
|
||||
tmp.Timestamps = append(b.Timestamps, a.Timestamps...)
|
||||
tmp.Values = append(b.Values, a.Values...)
|
||||
*a = tmp
|
||||
return
|
||||
}
|
||||
|
||||
out := New{{$typename}}Len(a.Len()+b.Len())
|
||||
i, j, k := 0, 0, 0
|
||||
for i < len(a.Timestamps) && j < len(b.Timestamps) {
|
||||
if a.Timestamps[i] < b.Timestamps[j] {
|
||||
out.Timestamps[k] = a.Timestamps[i]
|
||||
out.Values[k] = a.Values[i]
|
||||
i++
|
||||
} else if a.Timestamps[i] == b.Timestamps[j] {
|
||||
out.Timestamps[k] = b.Timestamps[j]
|
||||
out.Values[k] = b.Values[j]
|
||||
i++
|
||||
j++
|
||||
} else {
|
||||
out.Timestamps[k] = b.Timestamps[j]
|
||||
out.Values[k] = b.Values[j]
|
||||
j++
|
||||
}
|
||||
k++
|
||||
}
|
||||
|
||||
if i < len(a.Timestamps) {
|
||||
n := copy(out.Timestamps[k:], a.Timestamps[i:])
|
||||
copy(out.Values[k:], a.Values[i:])
|
||||
k += n
|
||||
} else if j < len(b.Timestamps) {
|
||||
n := copy(out.Timestamps[k:], b.Timestamps[j:])
|
||||
copy(out.Values[k:], b.Values[j:])
|
||||
k += n
|
||||
}
|
||||
|
||||
a.Timestamps = out.Timestamps[:k]
|
||||
a.Values = out.Values[:k]
|
||||
}
|
||||
{{ else }}
|
||||
// Exclude removes the subset of timestamps in [min, max]. The timestamps must
|
||||
// be deduplicated and sorted before calling Exclude or the results are undefined.
|
||||
func (a *{{ $typename }}) Exclude(min, max int64) {
|
||||
rmin, rmax := a.FindRange(min, max)
|
||||
if rmin == -1 && rmax == -1 {
|
||||
return
|
||||
}
|
||||
|
||||
// a.Timestamps[rmin] ≥ min
|
||||
// a.Timestamps[rmax] ≥ max
|
||||
|
||||
if rmax < a.Len() {
|
||||
if a.Timestamps[rmax] == max {
|
||||
rmax++
|
||||
}
|
||||
rest := a.Len()-rmax
|
||||
if rest > 0 {
|
||||
ts := a.Timestamps[:rmin+rest]
|
||||
copy(ts[rmin:], a.Timestamps[rmax:])
|
||||
a.Timestamps = ts
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
a.Timestamps = a.Timestamps[:rmin]
|
||||
}
|
||||
|
||||
// Contains returns true if values exist between min and max inclusive. The
|
||||
// values must be sorted before calling Contains or the results are undefined.
|
||||
func (a *{{ $typename }}) Contains(min, max int64) bool {
|
||||
rmin, rmax := a.FindRange(min, max)
|
||||
if rmin == -1 && rmax == -1 {
|
||||
return false
|
||||
}
|
||||
|
||||
// a.Timestamps[rmin] ≥ min
|
||||
// a.Timestamps[rmax] ≥ max
|
||||
|
||||
if a.Timestamps[rmin] == min {
|
||||
return true
|
||||
}
|
||||
|
||||
if rmax < a.Len() && a.Timestamps[rmax] == max {
|
||||
return true
|
||||
}
|
||||
|
||||
return rmax-rmin > 0
|
||||
}
|
||||
{{ end }}
|
||||
|
||||
{{ end }}
|
|
@ -0,0 +1,26 @@
|
|||
[
|
||||
{
|
||||
"Name":"Float",
|
||||
"Type":"float64"
|
||||
},
|
||||
{
|
||||
"Name":"Integer",
|
||||
"Type":"int64"
|
||||
},
|
||||
{
|
||||
"Name":"Unsigned",
|
||||
"Type":"uint64"
|
||||
},
|
||||
{
|
||||
"Name":"String",
|
||||
"Type":"string"
|
||||
},
|
||||
{
|
||||
"Name":"Boolean",
|
||||
"Type":"bool"
|
||||
},
|
||||
{
|
||||
"Name":"Timestamp",
|
||||
"Type": null
|
||||
}
|
||||
]
|
File diff suppressed because it is too large
Load Diff
|
@ -1514,9 +1514,11 @@ func (si *ShardInfo) unmarshal(pb *internal.ShardInfo) {
|
|||
si.ID = pb.GetID()
|
||||
|
||||
// If deprecated "OwnerIDs" exists then convert it to "Owners" format.
|
||||
if len(pb.GetOwnerIDs()) > 0 {
|
||||
si.Owners = make([]ShardOwner, len(pb.GetOwnerIDs()))
|
||||
for i, x := range pb.GetOwnerIDs() {
|
||||
//lint:ignore SA1019 we need to check for the presence of the deprecated field so we can convert it
|
||||
oldStyleOwnerIds := pb.GetOwnerIDs()
|
||||
if len(oldStyleOwnerIds) > 0 {
|
||||
si.Owners = make([]ShardOwner, len(oldStyleOwnerIds))
|
||||
for i, x := range oldStyleOwnerIds {
|
||||
si.Owners[i].unmarshal(&internal.ShardOwner{
|
||||
NodeID: proto.Uint64(x),
|
||||
})
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,3 +1,3 @@
|
|||
package storage
|
||||
|
||||
//go:generate protoc -I$GOPATH/src/github.com/influxdata/influxdb/vendor -I. --gogofaster_out=. source.proto
|
||||
//go:generate protoc -I$GOPATH/src -I. --gogofaster_out=. source.proto
|
||||
|
|
|
@ -1,27 +1,15 @@
|
|||
// Code generated by protoc-gen-gogo. DO NOT EDIT.
|
||||
// source: source.proto
|
||||
|
||||
/*
|
||||
Package storage is a generated protocol buffer package.
|
||||
|
||||
It is generated from these files:
|
||||
source.proto
|
||||
|
||||
It has these top-level messages:
|
||||
ReadSource
|
||||
*/
|
||||
package storage
|
||||
|
||||
import (
|
||||
fmt "fmt"
|
||||
|
||||
proto "github.com/gogo/protobuf/proto"
|
||||
|
||||
math "math"
|
||||
|
||||
_ "github.com/gogo/protobuf/gogoproto"
|
||||
|
||||
proto "github.com/gogo/protobuf/proto"
|
||||
io "io"
|
||||
math "math"
|
||||
math_bits "math/bits"
|
||||
)
|
||||
|
||||
// Reference imports to suppress errors if they are not otherwise used.
|
||||
|
@ -33,7 +21,7 @@ var _ = math.Inf
|
|||
// is compatible with the proto package it is being compiled against.
|
||||
// A compilation error at this line likely means your copy of the
|
||||
// proto package needs to be updated.
|
||||
const _ = proto.GoGoProtoPackageIsVersion2 // please upgrade the proto package
|
||||
const _ = proto.GoGoProtoPackageIsVersion3 // please upgrade the proto package
|
||||
|
||||
type ReadSource struct {
|
||||
// Database identifies which database to query.
|
||||
|
@ -42,18 +30,67 @@ type ReadSource struct {
|
|||
RetentionPolicy string `protobuf:"bytes,2,opt,name=retention_policy,json=retentionPolicy,proto3" json:"retention_policy,omitempty"`
|
||||
}
|
||||
|
||||
func (m *ReadSource) Reset() { *m = ReadSource{} }
|
||||
func (m *ReadSource) String() string { return proto.CompactTextString(m) }
|
||||
func (*ReadSource) ProtoMessage() {}
|
||||
func (*ReadSource) Descriptor() ([]byte, []int) { return fileDescriptorSource, []int{0} }
|
||||
func (m *ReadSource) Reset() { *m = ReadSource{} }
|
||||
func (m *ReadSource) String() string { return proto.CompactTextString(m) }
|
||||
func (*ReadSource) ProtoMessage() {}
|
||||
func (*ReadSource) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_9229c9b6eb8e6b85, []int{0}
|
||||
}
|
||||
func (m *ReadSource) XXX_Unmarshal(b []byte) error {
|
||||
return m.Unmarshal(b)
|
||||
}
|
||||
func (m *ReadSource) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
if deterministic {
|
||||
return xxx_messageInfo_ReadSource.Marshal(b, m, deterministic)
|
||||
} else {
|
||||
b = b[:cap(b)]
|
||||
n, err := m.MarshalToSizedBuffer(b)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return b[:n], nil
|
||||
}
|
||||
}
|
||||
func (m *ReadSource) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_ReadSource.Merge(m, src)
|
||||
}
|
||||
func (m *ReadSource) XXX_Size() int {
|
||||
return m.Size()
|
||||
}
|
||||
func (m *ReadSource) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_ReadSource.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_ReadSource proto.InternalMessageInfo
|
||||
|
||||
func init() {
|
||||
proto.RegisterType((*ReadSource)(nil), "com.github.influxdata.influxdb.services.storage.ReadSource")
|
||||
}
|
||||
|
||||
func init() { proto.RegisterFile("source.proto", fileDescriptor_9229c9b6eb8e6b85) }
|
||||
|
||||
var fileDescriptor_9229c9b6eb8e6b85 = []byte{
|
||||
// 219 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xe2, 0x29, 0xce, 0x2f, 0x2d,
|
||||
0x4a, 0x4e, 0xd5, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xd2, 0x4f, 0xce, 0xcf, 0xd5, 0x4b, 0xcf,
|
||||
0x2c, 0xc9, 0x28, 0x4d, 0xd2, 0xcb, 0xcc, 0x4b, 0xcb, 0x29, 0xad, 0x48, 0x49, 0x2c, 0x49, 0x84,
|
||||
0x31, 0x93, 0xf4, 0x8a, 0x53, 0x8b, 0xca, 0x32, 0x93, 0x53, 0x8b, 0xf5, 0x8a, 0x4b, 0xf2, 0x8b,
|
||||
0x12, 0xd3, 0x53, 0xa5, 0x74, 0xa1, 0x8a, 0x93, 0xf3, 0x73, 0xf5, 0xd3, 0xf3, 0xd3, 0xf3, 0xf5,
|
||||
0xc1, 0xe6, 0x24, 0x95, 0xa6, 0x81, 0x79, 0x60, 0x0e, 0x98, 0x05, 0x31, 0x5f, 0x29, 0x83, 0x8b,
|
||||
0x2b, 0x28, 0x35, 0x31, 0x25, 0x18, 0x6c, 0xa7, 0x90, 0x14, 0x17, 0x07, 0xc8, 0xf8, 0xa4, 0xc4,
|
||||
0xe2, 0x54, 0x09, 0x46, 0x05, 0x46, 0x0d, 0xce, 0x20, 0x38, 0x5f, 0xc8, 0x8e, 0x4b, 0xa0, 0x28,
|
||||
0xb5, 0x24, 0x35, 0xaf, 0x24, 0x33, 0x3f, 0x2f, 0xbe, 0x20, 0x3f, 0x27, 0x33, 0xb9, 0x52, 0x82,
|
||||
0x09, 0xa4, 0xc6, 0x49, 0xf8, 0xd1, 0x3d, 0x79, 0xfe, 0x20, 0x98, 0x5c, 0x00, 0x58, 0x2a, 0x88,
|
||||
0xbf, 0x08, 0x55, 0xc0, 0x49, 0xf5, 0xc4, 0x43, 0x39, 0x86, 0x13, 0x8f, 0xe4, 0x18, 0x2f, 0x3c,
|
||||
0x92, 0x63, 0x7c, 0xf0, 0x48, 0x8e, 0x71, 0xc2, 0x63, 0x39, 0x86, 0x0b, 0x8f, 0xe5, 0x18, 0x6e,
|
||||
0x3c, 0x96, 0x63, 0x88, 0x62, 0x87, 0xba, 0x3f, 0x89, 0x0d, 0xec, 0x2e, 0x63, 0x40, 0x00, 0x00,
|
||||
0x00, 0xff, 0xff, 0x6c, 0xc4, 0xa8, 0x70, 0x07, 0x01, 0x00, 0x00,
|
||||
}
|
||||
|
||||
func (m *ReadSource) Marshal() (dAtA []byte, err error) {
|
||||
size := m.Size()
|
||||
dAtA = make([]byte, size)
|
||||
n, err := m.MarshalTo(dAtA)
|
||||
n, err := m.MarshalToSizedBuffer(dAtA[:size])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -61,53 +98,47 @@ func (m *ReadSource) Marshal() (dAtA []byte, err error) {
|
|||
}
|
||||
|
||||
func (m *ReadSource) MarshalTo(dAtA []byte) (int, error) {
|
||||
var i int
|
||||
size := m.Size()
|
||||
return m.MarshalToSizedBuffer(dAtA[:size])
|
||||
}
|
||||
|
||||
func (m *ReadSource) MarshalToSizedBuffer(dAtA []byte) (int, error) {
|
||||
i := len(dAtA)
|
||||
_ = i
|
||||
var l int
|
||||
_ = l
|
||||
if len(m.Database) > 0 {
|
||||
dAtA[i] = 0xa
|
||||
i++
|
||||
i = encodeVarintSource(dAtA, i, uint64(len(m.Database)))
|
||||
i += copy(dAtA[i:], m.Database)
|
||||
}
|
||||
if len(m.RetentionPolicy) > 0 {
|
||||
dAtA[i] = 0x12
|
||||
i++
|
||||
i -= len(m.RetentionPolicy)
|
||||
copy(dAtA[i:], m.RetentionPolicy)
|
||||
i = encodeVarintSource(dAtA, i, uint64(len(m.RetentionPolicy)))
|
||||
i += copy(dAtA[i:], m.RetentionPolicy)
|
||||
i--
|
||||
dAtA[i] = 0x12
|
||||
}
|
||||
return i, nil
|
||||
if len(m.Database) > 0 {
|
||||
i -= len(m.Database)
|
||||
copy(dAtA[i:], m.Database)
|
||||
i = encodeVarintSource(dAtA, i, uint64(len(m.Database)))
|
||||
i--
|
||||
dAtA[i] = 0xa
|
||||
}
|
||||
return len(dAtA) - i, nil
|
||||
}
|
||||
|
||||
func encodeFixed64Source(dAtA []byte, offset int, v uint64) int {
|
||||
dAtA[offset] = uint8(v)
|
||||
dAtA[offset+1] = uint8(v >> 8)
|
||||
dAtA[offset+2] = uint8(v >> 16)
|
||||
dAtA[offset+3] = uint8(v >> 24)
|
||||
dAtA[offset+4] = uint8(v >> 32)
|
||||
dAtA[offset+5] = uint8(v >> 40)
|
||||
dAtA[offset+6] = uint8(v >> 48)
|
||||
dAtA[offset+7] = uint8(v >> 56)
|
||||
return offset + 8
|
||||
}
|
||||
func encodeFixed32Source(dAtA []byte, offset int, v uint32) int {
|
||||
dAtA[offset] = uint8(v)
|
||||
dAtA[offset+1] = uint8(v >> 8)
|
||||
dAtA[offset+2] = uint8(v >> 16)
|
||||
dAtA[offset+3] = uint8(v >> 24)
|
||||
return offset + 4
|
||||
}
|
||||
func encodeVarintSource(dAtA []byte, offset int, v uint64) int {
|
||||
offset -= sovSource(v)
|
||||
base := offset
|
||||
for v >= 1<<7 {
|
||||
dAtA[offset] = uint8(v&0x7f | 0x80)
|
||||
v >>= 7
|
||||
offset++
|
||||
}
|
||||
dAtA[offset] = uint8(v)
|
||||
return offset + 1
|
||||
return base
|
||||
}
|
||||
func (m *ReadSource) Size() (n int) {
|
||||
if m == nil {
|
||||
return 0
|
||||
}
|
||||
var l int
|
||||
_ = l
|
||||
l = len(m.Database)
|
||||
|
@ -122,14 +153,7 @@ func (m *ReadSource) Size() (n int) {
|
|||
}
|
||||
|
||||
func sovSource(x uint64) (n int) {
|
||||
for {
|
||||
n++
|
||||
x >>= 7
|
||||
if x == 0 {
|
||||
break
|
||||
}
|
||||
}
|
||||
return n
|
||||
return (math_bits.Len64(x|1) + 6) / 7
|
||||
}
|
||||
func sozSource(x uint64) (n int) {
|
||||
return sovSource(uint64((x << 1) ^ uint64((int64(x) >> 63))))
|
||||
|
@ -149,7 +173,7 @@ func (m *ReadSource) Unmarshal(dAtA []byte) error {
|
|||
}
|
||||
b := dAtA[iNdEx]
|
||||
iNdEx++
|
||||
wire |= (uint64(b) & 0x7F) << shift
|
||||
wire |= uint64(b&0x7F) << shift
|
||||
if b < 0x80 {
|
||||
break
|
||||
}
|
||||
|
@ -177,7 +201,7 @@ func (m *ReadSource) Unmarshal(dAtA []byte) error {
|
|||
}
|
||||
b := dAtA[iNdEx]
|
||||
iNdEx++
|
||||
stringLen |= (uint64(b) & 0x7F) << shift
|
||||
stringLen |= uint64(b&0x7F) << shift
|
||||
if b < 0x80 {
|
||||
break
|
||||
}
|
||||
|
@ -187,6 +211,9 @@ func (m *ReadSource) Unmarshal(dAtA []byte) error {
|
|||
return ErrInvalidLengthSource
|
||||
}
|
||||
postIndex := iNdEx + intStringLen
|
||||
if postIndex < 0 {
|
||||
return ErrInvalidLengthSource
|
||||
}
|
||||
if postIndex > l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
|
@ -206,7 +233,7 @@ func (m *ReadSource) Unmarshal(dAtA []byte) error {
|
|||
}
|
||||
b := dAtA[iNdEx]
|
||||
iNdEx++
|
||||
stringLen |= (uint64(b) & 0x7F) << shift
|
||||
stringLen |= uint64(b&0x7F) << shift
|
||||
if b < 0x80 {
|
||||
break
|
||||
}
|
||||
|
@ -216,6 +243,9 @@ func (m *ReadSource) Unmarshal(dAtA []byte) error {
|
|||
return ErrInvalidLengthSource
|
||||
}
|
||||
postIndex := iNdEx + intStringLen
|
||||
if postIndex < 0 {
|
||||
return ErrInvalidLengthSource
|
||||
}
|
||||
if postIndex > l {
|
||||
return io.ErrUnexpectedEOF
|
||||
}
|
||||
|
@ -227,7 +257,7 @@ func (m *ReadSource) Unmarshal(dAtA []byte) error {
|
|||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if skippy < 0 {
|
||||
if (skippy < 0) || (iNdEx+skippy) < 0 {
|
||||
return ErrInvalidLengthSource
|
||||
}
|
||||
if (iNdEx + skippy) > l {
|
||||
|
@ -245,6 +275,7 @@ func (m *ReadSource) Unmarshal(dAtA []byte) error {
|
|||
func skipSource(dAtA []byte) (n int, err error) {
|
||||
l := len(dAtA)
|
||||
iNdEx := 0
|
||||
depth := 0
|
||||
for iNdEx < l {
|
||||
var wire uint64
|
||||
for shift := uint(0); ; shift += 7 {
|
||||
|
@ -276,10 +307,8 @@ func skipSource(dAtA []byte) (n int, err error) {
|
|||
break
|
||||
}
|
||||
}
|
||||
return iNdEx, nil
|
||||
case 1:
|
||||
iNdEx += 8
|
||||
return iNdEx, nil
|
||||
case 2:
|
||||
var length int
|
||||
for shift := uint(0); ; shift += 7 {
|
||||
|
@ -296,73 +325,34 @@ func skipSource(dAtA []byte) (n int, err error) {
|
|||
break
|
||||
}
|
||||
}
|
||||
iNdEx += length
|
||||
if length < 0 {
|
||||
return 0, ErrInvalidLengthSource
|
||||
}
|
||||
return iNdEx, nil
|
||||
iNdEx += length
|
||||
case 3:
|
||||
for {
|
||||
var innerWire uint64
|
||||
var start int = iNdEx
|
||||
for shift := uint(0); ; shift += 7 {
|
||||
if shift >= 64 {
|
||||
return 0, ErrIntOverflowSource
|
||||
}
|
||||
if iNdEx >= l {
|
||||
return 0, io.ErrUnexpectedEOF
|
||||
}
|
||||
b := dAtA[iNdEx]
|
||||
iNdEx++
|
||||
innerWire |= (uint64(b) & 0x7F) << shift
|
||||
if b < 0x80 {
|
||||
break
|
||||
}
|
||||
}
|
||||
innerWireType := int(innerWire & 0x7)
|
||||
if innerWireType == 4 {
|
||||
break
|
||||
}
|
||||
next, err := skipSource(dAtA[start:])
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
iNdEx = start + next
|
||||
}
|
||||
return iNdEx, nil
|
||||
depth++
|
||||
case 4:
|
||||
return iNdEx, nil
|
||||
if depth == 0 {
|
||||
return 0, ErrUnexpectedEndOfGroupSource
|
||||
}
|
||||
depth--
|
||||
case 5:
|
||||
iNdEx += 4
|
||||
return iNdEx, nil
|
||||
default:
|
||||
return 0, fmt.Errorf("proto: illegal wireType %d", wireType)
|
||||
}
|
||||
if iNdEx < 0 {
|
||||
return 0, ErrInvalidLengthSource
|
||||
}
|
||||
if depth == 0 {
|
||||
return iNdEx, nil
|
||||
}
|
||||
}
|
||||
panic("unreachable")
|
||||
return 0, io.ErrUnexpectedEOF
|
||||
}
|
||||
|
||||
var (
|
||||
ErrInvalidLengthSource = fmt.Errorf("proto: negative length found during unmarshalling")
|
||||
ErrIntOverflowSource = fmt.Errorf("proto: integer overflow")
|
||||
ErrInvalidLengthSource = fmt.Errorf("proto: negative length found during unmarshaling")
|
||||
ErrIntOverflowSource = fmt.Errorf("proto: integer overflow")
|
||||
ErrUnexpectedEndOfGroupSource = fmt.Errorf("proto: unexpected end of group")
|
||||
)
|
||||
|
||||
func init() { proto.RegisterFile("source.proto", fileDescriptorSource) }
|
||||
|
||||
var fileDescriptorSource = []byte{
|
||||
// 210 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xe2, 0x29, 0xce, 0x2f, 0x2d,
|
||||
0x4a, 0x4e, 0xd5, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xd2, 0x4f, 0xce, 0xcf, 0xd5, 0x4b, 0xcf,
|
||||
0x2c, 0xc9, 0x28, 0x4d, 0xd2, 0xcb, 0xcc, 0x4b, 0xcb, 0x29, 0xad, 0x48, 0x49, 0x2c, 0x49, 0x84,
|
||||
0x31, 0x93, 0xf4, 0x8a, 0x53, 0x8b, 0xca, 0x32, 0x93, 0x53, 0x8b, 0xf5, 0x8a, 0x4b, 0xf2, 0x8b,
|
||||
0x12, 0xd3, 0x53, 0xa5, 0x74, 0xa1, 0x8a, 0x93, 0xf3, 0x73, 0xf5, 0xd3, 0xf3, 0xd3, 0xf3, 0xf5,
|
||||
0xc1, 0xe6, 0x24, 0x95, 0xa6, 0x81, 0x79, 0x60, 0x0e, 0x98, 0x05, 0x31, 0x5f, 0x29, 0x83, 0x8b,
|
||||
0x2b, 0x28, 0x35, 0x31, 0x25, 0x18, 0x6c, 0xa7, 0x90, 0x14, 0x17, 0x07, 0xc8, 0xf8, 0xa4, 0xc4,
|
||||
0xe2, 0x54, 0x09, 0x46, 0x05, 0x46, 0x0d, 0xce, 0x20, 0x38, 0x5f, 0xc8, 0x8e, 0x4b, 0xa0, 0x28,
|
||||
0xb5, 0x24, 0x35, 0xaf, 0x24, 0x33, 0x3f, 0x2f, 0xbe, 0x20, 0x3f, 0x27, 0x33, 0xb9, 0x52, 0x82,
|
||||
0x09, 0xa4, 0xc6, 0x49, 0xf8, 0xd1, 0x3d, 0x79, 0xfe, 0x20, 0x98, 0x5c, 0x00, 0x58, 0x2a, 0x88,
|
||||
0xbf, 0x08, 0x55, 0xc0, 0x49, 0xf6, 0xc4, 0x43, 0x39, 0x86, 0x13, 0x8f, 0xe4, 0x18, 0x2f, 0x3c,
|
||||
0x92, 0x63, 0x7c, 0xf0, 0x48, 0x8e, 0x71, 0xc2, 0x63, 0x39, 0x86, 0x28, 0x76, 0xa8, 0xbb, 0x93,
|
||||
0xd8, 0xc0, 0xee, 0x31, 0x06, 0x04, 0x00, 0x00, 0xff, 0xff, 0x9c, 0x5a, 0xd6, 0xd1, 0xff, 0x00,
|
||||
0x00, 0x00,
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue