Add static check

Signed-off-by: neza2017 <yefu.chen@zilliz.com>
pull/4973/head^2
neza2017 2020-11-30 19:38:23 +08:00 committed by yefu.chen
parent 2358fae703
commit d3858500c6
23 changed files with 253 additions and 107 deletions

View File

@ -175,7 +175,7 @@ func (sa *SegIDAssigner) syncSegments() {
resp, err := sa.masterClient.AssignSegmentID(ctx, req)
if resp.Status.GetErrorCode() != commonpb.ErrorCode_SUCCESS {
log.Panic("GRPC AssignSegmentID Failed")
log.Println("GRPC AssignSegmentID Failed", resp, err)
return
}

View File

@ -473,7 +473,7 @@ const char descriptor_table_protodef_service_5fmsg_2eproto[] PROTOBUF_SECTION_VA
"g\030\002 \001(\t\"z\n\010RowBatch\022\027\n\017collection_name\030\001"
" \001(\t\022\025\n\rpartition_tag\030\002 \001(\t\022+\n\010row_data\030"
"\003 \003(\0132\031.milvus.proto.common.Blob\022\021\n\thash"
"_keys\030\004 \003(\005\"d\n\020PlaceholderValue\022\013\n\003tag\030\001"
"_keys\030\004 \003(\r\"d\n\020PlaceholderValue\022\013\n\003tag\030\001"
" \001(\t\0223\n\004type\030\002 \001(\0162%.milvus.proto.servic"
"e.PlaceholderType\022\016\n\006values\030\003 \003(\014\"P\n\020Pla"
"ceholderGroup\022<\n\014placeholders\030\001 \003(\0132&.mi"
@ -1265,10 +1265,10 @@ const char* RowBatch::_InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::i
} while (::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<::PROTOBUF_NAMESPACE_ID::uint8>(ptr) == 26);
} else goto handle_unusual;
continue;
// repeated int32 hash_keys = 4;
// repeated uint32 hash_keys = 4;
case 4:
if (PROTOBUF_PREDICT_TRUE(static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 34)) {
ptr = ::PROTOBUF_NAMESPACE_ID::internal::PackedInt32Parser(mutable_hash_keys(), ptr, ctx);
ptr = ::PROTOBUF_NAMESPACE_ID::internal::PackedUInt32Parser(mutable_hash_keys(), ptr, ctx);
CHK_(ptr);
} else if (static_cast<::PROTOBUF_NAMESPACE_ID::uint8>(tag) == 32) {
add_hash_keys(::PROTOBUF_NAMESPACE_ID::internal::ReadVarint(&ptr));
@ -1346,15 +1346,15 @@ bool RowBatch::MergePartialFromCodedStream(
break;
}
// repeated int32 hash_keys = 4;
// repeated uint32 hash_keys = 4;
case 4: {
if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (34 & 0xFF)) {
DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadPackedPrimitive<
::PROTOBUF_NAMESPACE_ID::int32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT32>(
::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>(
input, this->mutable_hash_keys())));
} else if (static_cast< ::PROTOBUF_NAMESPACE_ID::uint8>(tag) == (32 & 0xFF)) {
DO_((::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::ReadRepeatedPrimitiveNoInline<
::PROTOBUF_NAMESPACE_ID::int32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_INT32>(
::PROTOBUF_NAMESPACE_ID::uint32, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32>(
1, 34u, input, this->mutable_hash_keys())));
} else {
goto handle_unusual;
@ -1418,14 +1418,14 @@ void RowBatch::SerializeWithCachedSizes(
output);
}
// repeated int32 hash_keys = 4;
// repeated uint32 hash_keys = 4;
if (this->hash_keys_size() > 0) {
::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteTag(4, ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WIRETYPE_LENGTH_DELIMITED, output);
output->WriteVarint32(_hash_keys_cached_byte_size_.load(
std::memory_order_relaxed));
}
for (int i = 0, n = this->hash_keys_size(); i < n; i++) {
::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteInt32NoTag(
::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteUInt32NoTag(
this->hash_keys(i), output);
}
@ -1472,7 +1472,7 @@ void RowBatch::SerializeWithCachedSizes(
3, this->row_data(static_cast<int>(i)), target);
}
// repeated int32 hash_keys = 4;
// repeated uint32 hash_keys = 4;
if (this->hash_keys_size() > 0) {
target = ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::WriteTagToArray(
4,
@ -1482,7 +1482,7 @@ void RowBatch::SerializeWithCachedSizes(
_hash_keys_cached_byte_size_.load(std::memory_order_relaxed),
target);
target = ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::
WriteInt32NoTagToArray(this->hash_keys_, target);
WriteUInt32NoTagToArray(this->hash_keys_, target);
}
if (_internal_metadata_.have_unknown_fields()) {
@ -1517,10 +1517,10 @@ size_t RowBatch::ByteSizeLong() const {
}
}
// repeated int32 hash_keys = 4;
// repeated uint32 hash_keys = 4;
{
size_t data_size = ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::
Int32Size(this->hash_keys_);
UInt32Size(this->hash_keys_);
if (data_size > 0) {
total_size += 1 +
::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::Int32Size(

View File

@ -573,15 +573,15 @@ class RowBatch :
const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::milvus::proto::common::Blob >&
row_data() const;
// repeated int32 hash_keys = 4;
// repeated uint32 hash_keys = 4;
int hash_keys_size() const;
void clear_hash_keys();
::PROTOBUF_NAMESPACE_ID::int32 hash_keys(int index) const;
void set_hash_keys(int index, ::PROTOBUF_NAMESPACE_ID::int32 value);
void add_hash_keys(::PROTOBUF_NAMESPACE_ID::int32 value);
const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::PROTOBUF_NAMESPACE_ID::int32 >&
::PROTOBUF_NAMESPACE_ID::uint32 hash_keys(int index) const;
void set_hash_keys(int index, ::PROTOBUF_NAMESPACE_ID::uint32 value);
void add_hash_keys(::PROTOBUF_NAMESPACE_ID::uint32 value);
const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::PROTOBUF_NAMESPACE_ID::uint32 >&
hash_keys() const;
::PROTOBUF_NAMESPACE_ID::RepeatedField< ::PROTOBUF_NAMESPACE_ID::int32 >*
::PROTOBUF_NAMESPACE_ID::RepeatedField< ::PROTOBUF_NAMESPACE_ID::uint32 >*
mutable_hash_keys();
// string collection_name = 1;
@ -612,7 +612,7 @@ class RowBatch :
::PROTOBUF_NAMESPACE_ID::internal::InternalMetadataWithArena _internal_metadata_;
::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::milvus::proto::common::Blob > row_data_;
::PROTOBUF_NAMESPACE_ID::RepeatedField< ::PROTOBUF_NAMESPACE_ID::int32 > hash_keys_;
::PROTOBUF_NAMESPACE_ID::RepeatedField< ::PROTOBUF_NAMESPACE_ID::uint32 > hash_keys_;
mutable std::atomic<int> _hash_keys_cached_byte_size_;
::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr collection_name_;
::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr partition_tag_;
@ -2777,31 +2777,31 @@ RowBatch::row_data() const {
return row_data_;
}
// repeated int32 hash_keys = 4;
// repeated uint32 hash_keys = 4;
inline int RowBatch::hash_keys_size() const {
return hash_keys_.size();
}
inline void RowBatch::clear_hash_keys() {
hash_keys_.Clear();
}
inline ::PROTOBUF_NAMESPACE_ID::int32 RowBatch::hash_keys(int index) const {
inline ::PROTOBUF_NAMESPACE_ID::uint32 RowBatch::hash_keys(int index) const {
// @@protoc_insertion_point(field_get:milvus.proto.service.RowBatch.hash_keys)
return hash_keys_.Get(index);
}
inline void RowBatch::set_hash_keys(int index, ::PROTOBUF_NAMESPACE_ID::int32 value) {
inline void RowBatch::set_hash_keys(int index, ::PROTOBUF_NAMESPACE_ID::uint32 value) {
hash_keys_.Set(index, value);
// @@protoc_insertion_point(field_set:milvus.proto.service.RowBatch.hash_keys)
}
inline void RowBatch::add_hash_keys(::PROTOBUF_NAMESPACE_ID::int32 value) {
inline void RowBatch::add_hash_keys(::PROTOBUF_NAMESPACE_ID::uint32 value) {
hash_keys_.Add(value);
// @@protoc_insertion_point(field_add:milvus.proto.service.RowBatch.hash_keys)
}
inline const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::PROTOBUF_NAMESPACE_ID::int32 >&
inline const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::PROTOBUF_NAMESPACE_ID::uint32 >&
RowBatch::hash_keys() const {
// @@protoc_insertion_point(field_list:milvus.proto.service.RowBatch.hash_keys)
return hash_keys_;
}
inline ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::PROTOBUF_NAMESPACE_ID::int32 >*
inline ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::PROTOBUF_NAMESPACE_ID::uint32 >*
RowBatch::mutable_hash_keys() {
// @@protoc_insertion_point(field_mutable_list:milvus.proto.service.RowBatch.hash_keys)
return &hash_keys_;

View File

@ -110,7 +110,7 @@ func (mt *metaTable) saveCollectionMeta(coll *pb.CollectionMeta) error {
collBytes := proto.MarshalTextString(coll)
mt.collID2Meta[coll.ID] = *coll
mt.collName2ID[coll.Schema.Name] = coll.ID
return mt.client.Save("/collection/"+strconv.FormatInt(coll.ID, 10), string(collBytes))
return mt.client.Save("/collection/"+strconv.FormatInt(coll.ID, 10), collBytes)
}
// metaTable.ddLock.Lock() before call this function
@ -119,7 +119,7 @@ func (mt *metaTable) saveSegmentMeta(seg *pb.SegmentMeta) error {
mt.segID2Meta[seg.SegmentID] = *seg
return mt.client.Save("/segment/"+strconv.FormatInt(seg.SegmentID, 10), string(segBytes))
return mt.client.Save("/segment/"+strconv.FormatInt(seg.SegmentID, 10), segBytes)
}
// metaTable.ddLock.Lock() before call this function
@ -132,7 +132,7 @@ func (mt *metaTable) saveCollectionAndDeleteSegmentsMeta(coll *pb.CollectionMeta
kvs := make(map[string]string)
collStrs := proto.MarshalTextString(coll)
kvs["/collection/"+strconv.FormatInt(coll.ID, 10)] = string(collStrs)
kvs["/collection/"+strconv.FormatInt(coll.ID, 10)] = collStrs
for _, segID := range segIDs {
_, ok := mt.segID2Meta[segID]
@ -152,14 +152,14 @@ func (mt *metaTable) saveCollectionsAndSegmentsMeta(coll *pb.CollectionMeta, seg
kvs := make(map[string]string)
collBytes := proto.MarshalTextString(coll)
kvs["/collection/"+strconv.FormatInt(coll.ID, 10)] = string(collBytes)
kvs["/collection/"+strconv.FormatInt(coll.ID, 10)] = collBytes
mt.collID2Meta[coll.ID] = *coll
mt.collName2ID[coll.Schema.Name] = coll.ID
segBytes := proto.MarshalTextString(seg)
kvs["/segment/"+strconv.FormatInt(seg.SegmentID, 10)] = string(segBytes)
kvs["/segment/"+strconv.FormatInt(seg.SegmentID, 10)] = segBytes
mt.segID2Meta[seg.SegmentID] = *seg

View File

@ -186,7 +186,7 @@ func TestSegmentManager_SegmentStats(t *testing.T) {
baseMsg := msgstream.BaseMsg{
BeginTimestamp: 0,
EndTimestamp: 0,
HashValues: []int32{1},
HashValues: []uint32{1},
}
msg := msgstream.QueryNodeSegStatsMsg{
QueryNodeSegStats: stats,
@ -358,7 +358,7 @@ func TestSegmentManager_RPC(t *testing.T) {
},
},
BaseMsg: msgstream.BaseMsg{
HashValues: []int32{0},
HashValues: []uint32{0},
},
},
},

View File

@ -60,7 +60,7 @@ func (syncMsgProducer *timeSyncMsgProducer) broadcastMsg(barrier TimeTickBarrier
baseMsg := ms.BaseMsg{
BeginTimestamp: timetick,
EndTimestamp: timetick,
HashValues: []int32{0},
HashValues: []uint32{0},
}
timeTickResult := internalPb.TimeTickMsg{
MsgType: internalPb.MsgType_kTimeTick,

View File

@ -69,7 +69,7 @@ func (ttBarrier *softTimeTickBarrier) Start() error {
for _, timetickmsg := range ttmsgs.Msgs {
ttmsg := timetickmsg.(*ms.TimeTickMsg)
oldT, ok := ttBarrier.peer2LastTt[ttmsg.PeerID]
log.Printf("[softTimeTickBarrier] peer(%d)=%d\n", ttmsg.PeerID, ttmsg.Timestamp)
// log.Printf("[softTimeTickBarrier] peer(%d)=%d\n", ttmsg.PeerID, ttmsg.Timestamp)
if !ok {
log.Printf("[softTimeTickBarrier] Warning: peerID %d not exist\n", ttmsg.PeerID)

View File

@ -16,7 +16,7 @@ import (
func getTtMsg(msgType internalPb.MsgType, peerID UniqueID, timeStamp uint64) ms.TsMsg {
baseMsg := ms.BaseMsg{
HashValues: []int32{int32(peerID)},
HashValues: []uint32{uint32(peerID)},
}
timeTickResult := internalPb.TimeTickMsg{
MsgType: internalPb.MsgType_kTimeTick,

View File

@ -148,7 +148,7 @@ func (ms *PulsarMsgStream) Produce(msgPack *MsgPack) error {
bucketValues[index] = channelID
continue
}
bucketValues[index] = hashValue % int32(len(ms.producers))
bucketValues[index] = int32(hashValue % uint32(len(ms.producers)))
}
reBucketValues[channelID] = bucketValues
}

View File

@ -36,11 +36,11 @@ func repackFunc(msgs []TsMsg, hashKeys [][]int32) (map[int32]*MsgPack, error) {
return result, nil
}
func getTsMsg(msgType MsgType, reqID UniqueID, hashValue int32) TsMsg {
func getTsMsg(msgType MsgType, reqID UniqueID, hashValue uint32) TsMsg {
baseMsg := BaseMsg{
BeginTimestamp: 0,
EndTimestamp: 0,
HashValues: []int32{hashValue},
HashValues: []uint32{hashValue},
}
switch msgType {
case internalPb.MsgType_kInsert:
@ -129,11 +129,11 @@ func getTsMsg(msgType MsgType, reqID UniqueID, hashValue int32) TsMsg {
return nil
}
func getTimeTickMsg(reqID UniqueID, hashValue int32, time uint64) TsMsg {
func getTimeTickMsg(reqID UniqueID, hashValue uint32, time uint64) TsMsg {
baseMsg := BaseMsg{
BeginTimestamp: 0,
EndTimestamp: 0,
HashValues: []int32{hashValue},
HashValues: []uint32{hashValue},
}
timeTickResult := internalPb.TimeTickMsg{
MsgType: internalPb.MsgType_kTimeTick,
@ -369,7 +369,7 @@ func TestStream_PulsarMsgStream_InsertRepackFunc(t *testing.T) {
baseMsg := BaseMsg{
BeginTimestamp: 0,
EndTimestamp: 0,
HashValues: []int32{1, 3},
HashValues: []uint32{1, 3},
}
insertRequest := internalPb.InsertRequest{
@ -422,7 +422,7 @@ func TestStream_PulsarMsgStream_DeleteRepackFunc(t *testing.T) {
baseMsg := BaseMsg{
BeginTimestamp: 0,
EndTimestamp: 0,
HashValues: []int32{1, 3},
HashValues: []uint32{1, 3},
}
deleteRequest := internalPb.DeleteRequest{

View File

@ -11,7 +11,7 @@ type TsMsg interface {
BeginTs() Timestamp
EndTs() Timestamp
Type() MsgType
HashKeys() []int32
HashKeys() []uint32
Marshal(TsMsg) ([]byte, error)
Unmarshal([]byte) (TsMsg, error)
}
@ -19,7 +19,7 @@ type TsMsg interface {
type BaseMsg struct {
BeginTimestamp Timestamp
EndTimestamp Timestamp
HashValues []int32
HashValues []uint32
}
func (bm *BaseMsg) BeginTs() Timestamp {
@ -30,7 +30,7 @@ func (bm *BaseMsg) EndTs() Timestamp {
return bm.EndTimestamp
}
func (bm *BaseMsg) HashKeys() []int32 {
func (bm *BaseMsg) HashKeys() []uint32 {
return bm.HashValues
}

View File

@ -87,11 +87,11 @@ func newRepackFunc(tsMsgs []TsMsg, hashKeys [][]int32) (map[int32]*MsgPack, erro
return result, nil
}
func getInsertTask(reqID UniqueID, hashValue int32) TsMsg {
func getInsertTask(reqID UniqueID, hashValue uint32) TsMsg {
baseMsg := BaseMsg{
BeginTimestamp: 0,
EndTimestamp: 0,
HashValues: []int32{hashValue},
HashValues: []uint32{hashValue},
}
insertRequest := internalPb.InsertRequest{
MsgType: internalPb.MsgType_kInsert,

View File

@ -30,7 +30,7 @@ message RowBatch {
string collection_name = 1;
string partition_tag = 2;
repeated common.Blob row_data = 3;
repeated int32 hash_keys = 4;
repeated uint32 hash_keys = 4;
}
/**

View File

@ -148,7 +148,7 @@ type RowBatch struct {
CollectionName string `protobuf:"bytes,1,opt,name=collection_name,json=collectionName,proto3" json:"collection_name,omitempty"`
PartitionTag string `protobuf:"bytes,2,opt,name=partition_tag,json=partitionTag,proto3" json:"partition_tag,omitempty"`
RowData []*commonpb.Blob `protobuf:"bytes,3,rep,name=row_data,json=rowData,proto3" json:"row_data,omitempty"`
HashKeys []int32 `protobuf:"varint,4,rep,packed,name=hash_keys,json=hashKeys,proto3" json:"hash_keys,omitempty"`
HashKeys []uint32 `protobuf:"varint,4,rep,packed,name=hash_keys,json=hashKeys,proto3" json:"hash_keys,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
@ -200,7 +200,7 @@ func (m *RowBatch) GetRowData() []*commonpb.Blob {
return nil
}
func (m *RowBatch) GetHashKeys() []int32 {
func (m *RowBatch) GetHashKeys() []uint32 {
if m != nil {
return m.HashKeys
}
@ -881,36 +881,36 @@ var fileDescriptor_b4b40b84dd2f74cb = []byte{
0x9d, 0x41, 0x7f, 0x1a, 0xd1, 0x98, 0x18, 0x7b, 0xc4, 0x45, 0x2f, 0xa0, 0x18, 0xf1, 0x51, 0xdf,
0x21, 0x92, 0x98, 0x7a, 0x5d, 0x6f, 0x94, 0xb6, 0x56, 0x9b, 0x73, 0x6d, 0xca, 0xba, 0xd3, 0xf1,
0xf9, 0x00, 0xff, 0x1d, 0xf1, 0xd1, 0x1e, 0x91, 0x04, 0xad, 0xc3, 0xa2, 0x47, 0x84, 0xd7, 0xff,
0x4c, 0xc7, 0xc2, 0xcc, 0xd7, 0xf5, 0xc6, 0x02, 0x2e, 0x26, 0x86, 0x23, 0x3a, 0x16, 0xd6, 0x08,
0xaa, 0xa7, 0x3e, 0xb1, 0xa9, 0xc7, 0x7d, 0x87, 0x46, 0xe7, 0xc4, 0x8f, 0x27, 0x35, 0x69, 0x93,
0x9a, 0xd0, 0x0e, 0xe4, 0xe5, 0x38, 0xa4, 0x2a, 0xa9, 0xca, 0xd6, 0x66, 0xf3, 0xb6, 0xd9, 0x34,
0x67, 0xe2, 0xf4, 0xc6, 0x21, 0xc5, 0xca, 0x05, 0xad, 0x40, 0xe1, 0x32, 0x89, 0x2a, 0x54, 0xc6,
0x06, 0xce, 0x34, 0xeb, 0xd3, 0x1c, 0xf0, 0xdb, 0x88, 0xc7, 0x21, 0x3a, 0x04, 0x23, 0x9c, 0xda,
0x84, 0xa9, 0xa9, 0x1a, 0xff, 0xff, 0x2d, 0x9c, 0x4a, 0x1b, 0xcf, 0xf9, 0x5a, 0x5f, 0x34, 0x58,
0x78, 0x1f, 0xd3, 0x68, 0x7c, 0xf7, 0x19, 0x6c, 0x42, 0x65, 0x6e, 0x06, 0xc2, 0xcc, 0xd5, 0xf5,
0xc6, 0x22, 0x2e, 0xcf, 0x0e, 0x41, 0x24, 0xed, 0x71, 0x84, 0x6f, 0xea, 0x69, 0x7b, 0x1c, 0xe1,
0xa3, 0x67, 0xb0, 0x3c, 0x83, 0xdd, 0x77, 0x93, 0x62, 0xcc, 0x7c, 0x5d, 0x6b, 0x18, 0xb8, 0x1a,
0xde, 0x28, 0xd2, 0xfa, 0x08, 0x95, 0x33, 0x19, 0xb1, 0xc0, 0xc5, 0x54, 0x84, 0x3c, 0x10, 0x14,
0x6d, 0x43, 0x41, 0x48, 0x22, 0x63, 0xa1, 0xf2, 0x2a, 0x6d, 0xad, 0xdf, 0x3a, 0xd4, 0x33, 0xf5,
0x05, 0x67, 0x5f, 0x51, 0x0d, 0x16, 0x54, 0x27, 0xb3, 0x45, 0x49, 0x15, 0xeb, 0x02, 0x8c, 0x0e,
0xe7, 0xfe, 0x23, 0x86, 0x2e, 0x5e, 0x87, 0x26, 0x80, 0xd2, 0xbc, 0x8f, 0x99, 0x90, 0x0f, 0x03,
0x98, 0xee, 0x44, 0xda, 0xe0, 0xeb, 0x9d, 0x18, 0xc0, 0x3f, 0x07, 0x81, 0xa4, 0x2e, 0x8d, 0x1e,
0x1b, 0x43, 0x9f, 0x60, 0x08, 0xa8, 0x65, 0x18, 0x98, 0x04, 0x2e, 0x7d, 0x70, 0xa7, 0x06, 0xd4,
0x65, 0x81, 0xea, 0x94, 0x8e, 0x53, 0x25, 0x59, 0x10, 0x1a, 0x38, 0x6a, 0x41, 0x74, 0x9c, 0x88,
0xd6, 0x77, 0x0d, 0xfe, 0x9d, 0x72, 0xd3, 0x1e, 0x15, 0x76, 0xc4, 0xc2, 0x44, 0xbc, 0x1f, 0xec,
0x2b, 0x28, 0xa4, 0xcc, 0xa7, 0x70, 0x4b, 0x3f, 0x1d, 0x64, 0xca, 0x8a, 0x53, 0xc0, 0x33, 0x65,
0xc0, 0x99, 0x13, 0x6a, 0x03, 0x24, 0x81, 0x98, 0x90, 0xcc, 0x16, 0x19, 0x91, 0xfc, 0x77, 0x2b,
0xee, 0x11, 0x1d, 0xab, 0xdb, 0x3a, 0x25, 0x2c, 0xc2, 0x33, 0x4e, 0xd6, 0x37, 0x0d, 0x6a, 0x13,
0xc6, 0x7c, 0x70, 0x3d, 0x2f, 0x21, 0xaf, 0xce, 0x32, 0xad, 0x66, 0xe3, 0x17, 0xf7, 0x3e, 0x4b,
0xd0, 0x58, 0x39, 0x3c, 0x46, 0x25, 0x47, 0x90, 0x7f, 0xc7, 0xa4, 0xba, 0xea, 0x83, 0xbd, 0x94,
0x72, 0x74, 0x9c, 0x88, 0x68, 0x75, 0x86, 0x6d, 0x73, 0x8a, 0xbb, 0x26, 0x94, 0xba, 0x92, 0x0c,
0x80, 0x47, 0x19, 0xa9, 0xe5, 0x70, 0xa6, 0x59, 0xe7, 0x50, 0x52, 0x9c, 0x83, 0xa9, 0x88, 0x7d,
0x79, 0xbf, 0x66, 0x20, 0xc8, 0x7b, 0x4c, 0x8a, 0x0c, 0x52, 0xc9, 0x4f, 0x5f, 0xc3, 0xd2, 0x0d,
0x76, 0x45, 0x45, 0xc8, 0x77, 0x4f, 0xba, 0xfb, 0xd5, 0xbf, 0xd0, 0x32, 0x94, 0xcf, 0xf7, 0x77,
0x7b, 0x27, 0xb8, 0xdf, 0x39, 0xe8, 0xb6, 0xf1, 0x45, 0xd5, 0x41, 0x55, 0x30, 0x32, 0xd3, 0x9b,
0xe3, 0x93, 0x76, 0xaf, 0x4a, 0x3b, 0xbb, 0x1f, 0xda, 0x2e, 0x93, 0x5e, 0x3c, 0x48, 0x50, 0x5b,
0x57, 0xcc, 0xf7, 0xd9, 0x95, 0xa4, 0xb6, 0xd7, 0x4a, 0x33, 0x7a, 0xee, 0x30, 0x21, 0x23, 0x36,
0x88, 0x25, 0x75, 0x5a, 0x2c, 0x90, 0x34, 0x0a, 0x88, 0xdf, 0x52, 0x69, 0xb6, 0xb2, 0x01, 0x84,
0x83, 0x41, 0x41, 0x19, 0xb6, 0x7f, 0x04, 0x00, 0x00, 0xff, 0xff, 0x33, 0xc8, 0x08, 0xe2, 0xaf,
0x4c, 0xc7, 0xc2, 0xcc, 0xd7, 0xf5, 0x46, 0x19, 0x17, 0x13, 0xc3, 0x11, 0x1d, 0x0b, 0x6b, 0x04,
0xd5, 0x53, 0x9f, 0xd8, 0xd4, 0xe3, 0xbe, 0x43, 0xa3, 0x73, 0xe2, 0xc7, 0x93, 0x9a, 0xb4, 0x49,
0x4d, 0x68, 0x07, 0xf2, 0x72, 0x1c, 0x52, 0x95, 0x54, 0x65, 0x6b, 0xb3, 0x79, 0xdb, 0x6c, 0x9a,
0x33, 0x71, 0x7a, 0xe3, 0x90, 0x62, 0xe5, 0x82, 0x56, 0xa0, 0x70, 0x99, 0x44, 0x15, 0x2a, 0x63,
0x03, 0x67, 0x9a, 0xf5, 0x69, 0x0e, 0xf8, 0x6d, 0xc4, 0xe3, 0x10, 0x1d, 0x82, 0x11, 0x4e, 0x6d,
0xc2, 0xd4, 0x54, 0x8d, 0xff, 0xff, 0x16, 0x4e, 0xa5, 0x8d, 0xe7, 0x7c, 0xad, 0x2f, 0x1a, 0x2c,
0xbc, 0x8f, 0x69, 0x34, 0xbe, 0xfb, 0x0c, 0x36, 0xa1, 0x32, 0x37, 0x03, 0x61, 0xe6, 0xea, 0x7a,
0x63, 0x11, 0x97, 0x67, 0x87, 0x20, 0x92, 0xf6, 0x38, 0xc2, 0x37, 0xf5, 0xb4, 0x3d, 0x8e, 0xf0,
0xd1, 0x33, 0x58, 0x9e, 0xc1, 0xee, 0xbb, 0x49, 0x31, 0x66, 0xbe, 0xae, 0x35, 0x0c, 0x5c, 0x0d,
0x6f, 0x14, 0x69, 0x7d, 0x84, 0xca, 0x99, 0x8c, 0x58, 0xe0, 0x62, 0x2a, 0x42, 0x1e, 0x08, 0x8a,
0xb6, 0xa1, 0x20, 0x24, 0x91, 0xb1, 0x50, 0x79, 0x95, 0xb6, 0xd6, 0x6f, 0x1d, 0xea, 0x99, 0xfa,
0x82, 0xb3, 0xaf, 0xa8, 0x06, 0x0b, 0xaa, 0x93, 0xd9, 0xa2, 0xa4, 0x8a, 0x75, 0x01, 0x46, 0x87,
0x73, 0xff, 0x11, 0x43, 0x17, 0xaf, 0x43, 0x13, 0x40, 0x69, 0xde, 0xc7, 0x4c, 0xc8, 0x87, 0x01,
0x4c, 0x77, 0x22, 0x6d, 0xf0, 0xf5, 0x4e, 0x0c, 0xe0, 0x9f, 0x83, 0x40, 0x52, 0x97, 0x46, 0x8f,
0x8d, 0xa1, 0x4f, 0x30, 0x04, 0xd4, 0x32, 0x0c, 0x4c, 0x02, 0x97, 0x3e, 0xb8, 0x53, 0x03, 0xea,
0xb2, 0x40, 0x75, 0x4a, 0xc7, 0xa9, 0x92, 0x2c, 0x08, 0x0d, 0x1c, 0xb5, 0x20, 0x3a, 0x4e, 0x44,
0xeb, 0xbb, 0x06, 0xff, 0x4e, 0xb9, 0x69, 0x8f, 0x0a, 0x3b, 0x62, 0x61, 0x22, 0xde, 0x0f, 0xf6,
0x15, 0x14, 0x52, 0xe6, 0x53, 0xb8, 0xa5, 0x9f, 0x0e, 0x32, 0x65, 0xc5, 0x29, 0xe0, 0x99, 0x32,
0xe0, 0xcc, 0x09, 0xb5, 0x01, 0x92, 0x40, 0x4c, 0x48, 0x66, 0x8b, 0x8c, 0x48, 0xfe, 0xbb, 0x15,
0xf7, 0x88, 0x8e, 0xd5, 0x6d, 0x9d, 0x12, 0x16, 0xe1, 0x19, 0x27, 0xeb, 0x9b, 0x06, 0xb5, 0x09,
0x63, 0x3e, 0xb8, 0x9e, 0x97, 0x90, 0x57, 0x67, 0x99, 0x56, 0xb3, 0xf1, 0x8b, 0x7b, 0x9f, 0x25,
0x68, 0xac, 0x1c, 0x1e, 0xa3, 0x92, 0x23, 0xc8, 0xbf, 0x63, 0x52, 0x5d, 0xf5, 0xc1, 0x5e, 0x4a,
0x39, 0x3a, 0x4e, 0x44, 0xb4, 0x3a, 0xc3, 0xb6, 0x39, 0xc5, 0x5d, 0x13, 0x4a, 0x5d, 0x49, 0x06,
0xc0, 0xa3, 0x8c, 0xd4, 0x72, 0x38, 0xd3, 0xac, 0x73, 0x28, 0x29, 0xce, 0xc1, 0x54, 0xc4, 0xbe,
0xbc, 0x5f, 0x33, 0x10, 0xe4, 0x3d, 0x26, 0x45, 0x06, 0xa9, 0xe4, 0xa7, 0xaf, 0x61, 0xe9, 0x06,
0xbb, 0xa2, 0x22, 0xe4, 0xbb, 0x27, 0xdd, 0xfd, 0xea, 0x5f, 0x68, 0x19, 0xca, 0xe7, 0xfb, 0xbb,
0xbd, 0x13, 0xdc, 0xef, 0x1c, 0x74, 0xdb, 0xf8, 0xa2, 0xea, 0xa0, 0x2a, 0x18, 0x99, 0xe9, 0xcd,
0xf1, 0x49, 0xbb, 0x57, 0xa5, 0x9d, 0xdd, 0x0f, 0x6d, 0x97, 0x49, 0x2f, 0x1e, 0x24, 0xa8, 0xad,
0x2b, 0xe6, 0xfb, 0xec, 0x4a, 0x52, 0xdb, 0x6b, 0xa5, 0x19, 0x3d, 0x77, 0x98, 0x90, 0x11, 0x1b,
0xc4, 0x92, 0x3a, 0x2d, 0x16, 0x48, 0x1a, 0x05, 0xc4, 0x6f, 0xa9, 0x34, 0x5b, 0xd9, 0x00, 0xc2,
0xc1, 0xa0, 0xa0, 0x0c, 0xdb, 0x3f, 0x02, 0x00, 0x00, 0xff, 0xff, 0xee, 0x08, 0x5d, 0xa4, 0xaf,
0x07, 0x00, 0x00,
}

View File

@ -231,7 +231,7 @@ func TestProxy_Insert(t *testing.T) {
CollectionName: collectionName,
PartitionTag: "haha",
RowData: make([]*commonpb.Blob, 0),
HashKeys: make([]int32, 0),
HashKeys: make([]uint32, 0),
}
wg.Add(1)
@ -281,7 +281,7 @@ func TestProxy_Search(t *testing.T) {
for j := 0; j < 4; j++ {
searchResultMsg := &msgstream.SearchResultMsg{
BaseMsg: msgstream.BaseMsg{
HashValues: []int32{1},
HashValues: []uint32{1},
},
SearchResult: internalpb.SearchResult{
MsgType: internalpb.MsgType_kSearchResult,

View File

@ -7,6 +7,8 @@ import (
"math"
"strconv"
"github.com/zilliztech/milvus-distributed/internal/util/typeutil"
"github.com/golang/protobuf/proto"
"github.com/zilliztech/milvus-distributed/internal/allocator"
@ -102,12 +104,17 @@ func (it *InsertTask) Execute() error {
var rowIDBegin UniqueID
var rowIDEnd UniqueID
if autoID || true {
if it.HashValues == nil || len(it.HashValues) == 0 {
it.HashValues = make([]uint32, 0)
}
rowNums := len(it.BaseInsertTask.RowData)
rowIDBegin, rowIDEnd, _ = it.rowIDAllocator.Alloc(uint32(rowNums))
it.BaseInsertTask.RowIDs = make([]UniqueID, rowNums)
for i := rowIDBegin; i < rowIDEnd; i++ {
offset := i - rowIDBegin
it.BaseInsertTask.RowIDs[offset] = i
hashValue, _ := typeutil.Hash32Int64(i)
it.HashValues = append(it.HashValues, hashValue)
}
}
@ -180,6 +187,14 @@ func (cct *CreateCollectionTask) PreExecute() error {
return err
}
if err := ValidateDuplicatedFieldName(cct.schema.Fields); err != nil {
return err
}
if err := ValidatePrimaryKey(cct.schema); err != nil {
return err
}
// validate field name
for _, field := range cct.schema.Fields {
if err := ValidateFieldName(field.Name); err != nil {
@ -212,6 +227,9 @@ func (cct *CreateCollectionTask) PreExecute() error {
}
}
}
if err := ValidateVectorFieldMetricType(field); err != nil {
return err
}
}
return nil
@ -368,7 +386,7 @@ func (qt *QueryTask) Execute() error {
var tsMsg msgstream.TsMsg = &msgstream.SearchMsg{
SearchRequest: qt.SearchRequest,
BaseMsg: msgstream.BaseMsg{
HashValues: []int32{int32(Params.ProxyID())},
HashValues: []uint32{uint32(Params.ProxyID())},
BeginTimestamp: qt.Timestamp,
EndTimestamp: qt.Timestamp,
},
@ -395,24 +413,16 @@ func (qt *QueryTask) PostExecute() error {
return errors.New("wait to finish failed, timeout")
case searchResults := <-qt.resultBuf:
filterSearchResult := make([]*internalpb.SearchResult, 0)
var filterReason string
for _, partialSearchResult := range searchResults {
if partialSearchResult.Status.ErrorCode == commonpb.ErrorCode_SUCCESS {
filterSearchResult = append(filterSearchResult, partialSearchResult)
} else {
filterReason += partialSearchResult.Status.Reason + "\n"
}
}
rlen := len(filterSearchResult) // query node num
rlen := len(filterSearchResult) // query num
if rlen <= 0 {
qt.result = &servicepb.QueryResult{
Status: &commonpb.Status{
ErrorCode: commonpb.ErrorCode_UNEXPECTED_ERROR,
Reason: filterReason,
},
}
return errors.New(filterReason)
qt.result = &servicepb.QueryResult{}
return nil
}
n := len(filterSearchResult[0].Hits) // n

View File

@ -70,7 +70,7 @@ func (tt *timeTick) tick() error {
msgPack := msgstream.MsgPack{}
timeTickMsg := &msgstream.TimeTickMsg{
BaseMsg: msgstream.BaseMsg{
HashValues: []int32{int32(Params.ProxyID())},
HashValues: []uint32{uint32(Params.ProxyID())},
},
TimeTickMsg: internalpb.TimeTickMsg{
MsgType: internalpb.MsgType_kTimeTick,

View File

@ -5,6 +5,7 @@ import (
"strings"
"github.com/zilliztech/milvus-distributed/internal/errors"
"github.com/zilliztech/milvus-distributed/internal/proto/schemapb"
)
func isAlpha(c uint8) bool {
@ -127,3 +128,55 @@ func ValidateDimension(dim int64, isBinary bool) error {
}
return nil
}
func ValidateVectorFieldMetricType(field *schemapb.FieldSchema) error {
if field.DataType != schemapb.DataType_VECTOR_FLOAT {
return nil
}
for _, params := range field.IndexParams {
if params.Key == "metric_type" {
return nil
}
}
return errors.New("vector float without metric_type")
}
func ValidateDuplicatedFieldName(fields []*schemapb.FieldSchema) error {
names := make(map[string]bool)
for _, field := range fields {
_, ok := names[field.Name]
if ok {
return errors.New("duplicated filed name")
}
names[field.Name] = true
}
return nil
}
func ValidatePrimaryKey(coll *schemapb.CollectionSchema) error {
//no primary key for auto id
if coll.AutoID {
for _, field := range coll.Fields {
if field.IsPrimaryKey {
return errors.Errorf("collection %s is auto id, so filed %s should not defined as primary key", coll.Name, field.Name)
}
}
return nil
}
idx := -1
for i, field := range coll.Fields {
if field.IsPrimaryKey {
if idx != -1 {
return errors.Errorf("there are more than one primary key, filed name = %s, %s", coll.Fields[idx].Name, field.Name)
}
if field.DataType != schemapb.DataType_INT64 {
return errors.Errorf("the data type of primary key should be int64")
}
idx = i
}
}
if idx == -1 {
return errors.Errorf("primay key is undefined")
}
return nil
}

View File

@ -4,6 +4,8 @@ import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/zilliztech/milvus-distributed/internal/proto/commonpb"
"github.com/zilliztech/milvus-distributed/internal/proto/schemapb"
)
func TestValidateCollectionName(t *testing.T) {
@ -95,3 +97,84 @@ func TestValidateDimension(t *testing.T) {
assert.NotNil(t, ValidateDimension(Params.MaxDimension()+1, false))
assert.NotNil(t, ValidateDimension(9, true))
}
func TestValidateVectorFieldMetricType(t *testing.T) {
field1 := &schemapb.FieldSchema{
Name: "",
IsPrimaryKey: false,
Description: "",
DataType: schemapb.DataType_INT64,
TypeParams: nil,
IndexParams: nil,
}
assert.Nil(t, ValidateVectorFieldMetricType(field1))
field1.DataType = schemapb.DataType_VECTOR_FLOAT
assert.NotNil(t, ValidateVectorFieldMetricType(field1))
field1.IndexParams = []*commonpb.KeyValuePair{
&commonpb.KeyValuePair{
Key: "abcdefg",
Value: "",
},
}
assert.NotNil(t, ValidateVectorFieldMetricType(field1))
field1.IndexParams = append(field1.IndexParams, &commonpb.KeyValuePair{
Key: "metric_type",
Value: "",
})
assert.Nil(t, ValidateVectorFieldMetricType(field1))
}
func TestValidateDuplicatedFieldName(t *testing.T) {
fields := []*schemapb.FieldSchema{
{Name: "abc"},
{Name: "def"},
}
assert.Nil(t, ValidateDuplicatedFieldName(fields))
fields = append(fields, &schemapb.FieldSchema{
Name: "abc",
})
assert.NotNil(t, ValidateDuplicatedFieldName(fields))
}
func TestValidatePrimaryKey(t *testing.T) {
coll := schemapb.CollectionSchema{
Name: "coll1",
Description: "",
AutoID: true,
Fields: nil,
}
coll.Fields = append(coll.Fields, &schemapb.FieldSchema{
Name: "f1",
IsPrimaryKey: false,
Description: "",
DataType: 0,
TypeParams: nil,
IndexParams: nil,
})
assert.Nil(t, ValidatePrimaryKey(&coll))
pf := &schemapb.FieldSchema{
Name: "f2",
IsPrimaryKey: true,
Description: "",
DataType: 0,
TypeParams: nil,
IndexParams: nil,
}
coll.Fields = append(coll.Fields, pf)
assert.NotNil(t, ValidatePrimaryKey(&coll))
coll.AutoID = false
assert.NotNil(t, ValidatePrimaryKey(&coll))
pf.DataType = schemapb.DataType_BOOL
assert.NotNil(t, ValidatePrimaryKey(&coll))
pf.DataType = schemapb.DataType_INT64
assert.Nil(t, ValidatePrimaryKey(&coll))
coll.Fields = append(coll.Fields, &schemapb.FieldSchema{
Name: "",
IsPrimaryKey: true,
Description: "",
DataType: 0,
TypeParams: nil,
IndexParams: nil,
})
assert.NotNil(t, ValidatePrimaryKey(&coll))
}

View File

@ -129,8 +129,8 @@ func TestDataSyncService_Start(t *testing.T) {
for i := 0; i < msgLength; i++ {
var msg msgstream.TsMsg = &msgstream.InsertMsg{
BaseMsg: msgstream.BaseMsg{
HashValues: []int32{
int32(i), int32(i),
HashValues: []uint32{
uint32(i), uint32(i),
},
},
InsertRequest: internalPb.InsertRequest{
@ -163,7 +163,7 @@ func TestDataSyncService_Start(t *testing.T) {
baseMsg := msgstream.BaseMsg{
BeginTimestamp: 0,
EndTimestamp: 0,
HashValues: []int32{0},
HashValues: []uint32{0},
}
timeTickResult := internalPb.TimeTickMsg{
MsgType: internalPb.MsgType_kTimeTick,

View File

@ -297,7 +297,7 @@ func (ss *searchService) search(msg msgstream.TsMsg) error {
Hits: hits,
}
searchResultMsg := &msgstream.SearchResultMsg{
BaseMsg: msgstream.BaseMsg{HashValues: []int32{0}},
BaseMsg: msgstream.BaseMsg{HashValues: []uint32{0}},
SearchResult: results,
}
err = ss.publishSearchResult(searchResultMsg)
@ -342,7 +342,7 @@ func (ss *searchService) publishFailedSearchResult(msg msgstream.TsMsg) error {
}
tsMsg := &msgstream.SearchResultMsg{
BaseMsg: msgstream.BaseMsg{HashValues: []int32{0}},
BaseMsg: msgstream.BaseMsg{HashValues: []uint32{0}},
SearchResult: results,
}
msgPack.Msgs = append(msgPack.Msgs, tsMsg)

View File

@ -143,7 +143,7 @@ func TestSearch_Search(t *testing.T) {
searchMsg := &msgstream.SearchMsg{
BaseMsg: msgstream.BaseMsg{
HashValues: []int32{0},
HashValues: []uint32{0},
},
SearchRequest: internalpb.SearchRequest{
MsgType: internalpb.MsgType_kSearch,
@ -188,8 +188,8 @@ func TestSearch_Search(t *testing.T) {
var msg msgstream.TsMsg = &msgstream.InsertMsg{
BaseMsg: msgstream.BaseMsg{
HashValues: []int32{
int32(i),
HashValues: []uint32{
uint32(i),
},
},
InsertRequest: internalpb.InsertRequest{
@ -221,7 +221,7 @@ func TestSearch_Search(t *testing.T) {
baseMsg := msgstream.BaseMsg{
BeginTimestamp: 0,
EndTimestamp: 0,
HashValues: []int32{0},
HashValues: []uint32{0},
}
timeTickResult := internalpb.TimeTickMsg{
MsgType: internalpb.MsgType_kTimeTick,

View File

@ -74,7 +74,7 @@ func (sService *statsService) sendSegmentStatistic() {
func (sService *statsService) publicStatistic(statistic *internalpb.QueryNodeSegStats) {
var msg msgstream.TsMsg = &msgstream.QueryNodeSegStatsMsg{
BaseMsg: msgstream.BaseMsg{
HashValues: []int32{0},
HashValues: []uint32{0},
},
QueryNodeSegStats: *statistic,
}