Support Bulk Load in Milvus 2.2 (#18982)

Signed-off-by: Yuchen Gao <yuchen.gao@zilliz.com>

Signed-off-by: Yuchen Gao <yuchen.gao@zilliz.com>
pull/19448/head
Ten Thousand Leaves 2022-09-26 18:06:54 +08:00 committed by GitHub
parent 0f1c2ea26a
commit b30c9d4f60
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
78 changed files with 5375 additions and 2781 deletions

View File

@ -73,6 +73,9 @@ const (
ErrorCode_SegmentNotFound ErrorCode = 47
ErrorCode_ForceDeny ErrorCode = 48
ErrorCode_RateLimit ErrorCode = 49
// Service availability.
// NA: Not Available.
ErrorCode_DataCoordNA ErrorCode = 100
// internal error code.
ErrorCode_DDRequestRace ErrorCode = 1000
)
@ -127,6 +130,7 @@ var ErrorCode_name = map[int32]string{
47: "SegmentNotFound",
48: "ForceDeny",
49: "RateLimit",
100: "DataCoordNA",
1000: "DDRequestRace",
}
@ -180,6 +184,7 @@ var ErrorCode_value = map[string]int32{
"SegmentNotFound": 47,
"ForceDeny": 48,
"RateLimit": 49,
"DataCoordNA": 100,
"DDRequestRace": 1000,
}
@ -658,36 +663,30 @@ func (ConsistencyLevel) EnumDescriptor() ([]byte, []int) {
type ImportState int32
const (
ImportState_ImportPending ImportState = 0
ImportState_ImportFailed ImportState = 1
ImportState_ImportStarted ImportState = 2
ImportState_ImportDownloaded ImportState = 3
ImportState_ImportParsed ImportState = 4
ImportState_ImportPersisted ImportState = 5
ImportState_ImportCompleted ImportState = 6
ImportState_ImportAllocSegment ImportState = 10
ImportState_ImportPending ImportState = 0
ImportState_ImportFailed ImportState = 1
ImportState_ImportStarted ImportState = 2
ImportState_ImportPersisted ImportState = 5
ImportState_ImportCompleted ImportState = 6
ImportState_ImportFailedAndCleaned ImportState = 7
)
var ImportState_name = map[int32]string{
0: "ImportPending",
1: "ImportFailed",
2: "ImportStarted",
3: "ImportDownloaded",
4: "ImportParsed",
5: "ImportPersisted",
6: "ImportCompleted",
10: "ImportAllocSegment",
0: "ImportPending",
1: "ImportFailed",
2: "ImportStarted",
5: "ImportPersisted",
6: "ImportCompleted",
7: "ImportFailedAndCleaned",
}
var ImportState_value = map[string]int32{
"ImportPending": 0,
"ImportFailed": 1,
"ImportStarted": 2,
"ImportDownloaded": 3,
"ImportParsed": 4,
"ImportPersisted": 5,
"ImportCompleted": 6,
"ImportAllocSegment": 10,
"ImportPending": 0,
"ImportFailed": 1,
"ImportStarted": 2,
"ImportPersisted": 5,
"ImportCompleted": 6,
"ImportFailedAndCleaned": 7,
}
func (x ImportState) String() string {
@ -1394,164 +1393,164 @@ func init() {
func init() { proto.RegisterFile("common.proto", fileDescriptor_555bd8c177793206) }
var fileDescriptor_555bd8c177793206 = []byte{
// 2532 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x58, 0x59, 0x73, 0x24, 0x47,
0x11, 0xde, 0x9e, 0x19, 0x1d, 0x53, 0x33, 0x92, 0x4a, 0x25, 0xad, 0x76, 0xbc, 0x87, 0x57, 0x16,
0x36, 0x88, 0xc1, 0xd6, 0xda, 0xeb, 0x08, 0x20, 0x88, 0x30, 0x81, 0x34, 0x23, 0x69, 0x15, 0xd6,
0x45, 0x4b, 0x6b, 0x08, 0x22, 0x60, 0xa3, 0xa6, 0x3b, 0x35, 0xaa, 0xdd, 0xee, 0xae, 0xa6, 0xab,
0x46, 0xab, 0xe1, 0xc9, 0x18, 0xf0, 0x33, 0x98, 0x3f, 0xc0, 0x0f, 0xe0, 0xbe, 0x1f, 0xb9, 0xb1,
0xb9, 0x9e, 0xb9, 0xe1, 0x11, 0x1e, 0x89, 0xe0, 0x30, 0x3e, 0x89, 0xac, 0xea, 0x6b, 0xb4, 0x6b,
0x78, 0xe0, 0xad, 0xeb, 0xcb, 0xac, 0xbc, 0x2a, 0x2b, 0x33, 0xab, 0x49, 0xd3, 0x93, 0x61, 0x28,
0xa3, 0x95, 0x38, 0x91, 0x5a, 0xb2, 0xb9, 0x50, 0x04, 0x27, 0x03, 0x65, 0x57, 0x2b, 0x96, 0x74,
0x71, 0xb1, 0x2f, 0x65, 0x3f, 0x80, 0x6b, 0x06, 0xec, 0x0d, 0x8e, 0xae, 0xf9, 0xa0, 0xbc, 0x44,
0xc4, 0x5a, 0x26, 0x96, 0x71, 0xe9, 0x16, 0x19, 0x3f, 0xd0, 0x5c, 0x0f, 0x14, 0x7b, 0x8a, 0x10,
0x48, 0x12, 0x99, 0xdc, 0xf2, 0xa4, 0x0f, 0x2d, 0x67, 0xd1, 0x59, 0x9e, 0xbe, 0xfe, 0xe0, 0xca,
0x7d, 0xa4, 0xae, 0xac, 0x23, 0x5b, 0x47, 0xfa, 0xe0, 0xd6, 0x21, 0xfb, 0x64, 0x0b, 0x64, 0x3c,
0x01, 0xae, 0x64, 0xd4, 0xaa, 0x2c, 0x3a, 0xcb, 0x75, 0x37, 0x5d, 0x2d, 0xbd, 0x9b, 0x34, 0x9f,
0x86, 0xe1, 0x33, 0x3c, 0x18, 0xc0, 0x3e, 0x17, 0x09, 0xa3, 0xa4, 0x7a, 0x07, 0x86, 0x46, 0x7e,
0xdd, 0xc5, 0x4f, 0x36, 0x4f, 0xc6, 0x4e, 0x90, 0x9c, 0x6e, 0xb4, 0x8b, 0xa5, 0x27, 0x49, 0xe3,
0x69, 0x18, 0x76, 0xb9, 0xe6, 0x6f, 0xb1, 0x8d, 0x91, 0x9a, 0xcf, 0x35, 0x37, 0xbb, 0x9a, 0xae,
0xf9, 0x5e, 0xba, 0x4c, 0x6a, 0x6b, 0x81, 0xec, 0x15, 0x22, 0x1d, 0x43, 0x4c, 0x45, 0x9e, 0x10,
0xba, 0x1f, 0x70, 0x0f, 0x8e, 0x65, 0xe0, 0x43, 0x62, 0x4c, 0x42, 0xb9, 0x9a, 0xf7, 0x33, 0xb9,
0x9a, 0xf7, 0xd9, 0x7b, 0x49, 0x4d, 0x0f, 0x63, 0x6b, 0xcd, 0xf4, 0xf5, 0x87, 0xef, 0x1b, 0x81,
0x92, 0x98, 0xc3, 0x61, 0x0c, 0xae, 0xd9, 0x81, 0x21, 0x30, 0x8a, 0x54, 0xab, 0xba, 0x58, 0x5d,
0x6e, 0xba, 0xe9, 0x6a, 0xe9, 0xa3, 0x23, 0x7a, 0x37, 0x13, 0x39, 0x88, 0xd9, 0x16, 0x69, 0xc6,
0x05, 0xa6, 0x5a, 0xce, 0x62, 0x75, 0xb9, 0x71, 0xfd, 0x91, 0xff, 0xa5, 0xcd, 0x18, 0xed, 0x8e,
0x6c, 0x5d, 0x7a, 0x8c, 0x4c, 0xac, 0xfa, 0x7e, 0x02, 0x4a, 0xb1, 0x69, 0x52, 0x11, 0x71, 0xea,
0x4c, 0x45, 0xc4, 0x18, 0xa3, 0x58, 0x26, 0xda, 0xf8, 0x52, 0x75, 0xcd, 0xf7, 0xd2, 0x0b, 0x0e,
0x99, 0xd8, 0x51, 0xfd, 0x35, 0xae, 0x80, 0xbd, 0x87, 0x4c, 0x86, 0xaa, 0x7f, 0xcb, 0xf8, 0x6b,
0x4f, 0xfc, 0xf2, 0x7d, 0x2d, 0xd8, 0x51, 0x7d, 0xe3, 0xe7, 0x44, 0x68, 0x3f, 0x30, 0xc0, 0xa1,
0xea, 0x6f, 0x75, 0x53, 0xc9, 0x76, 0xc1, 0x2e, 0x93, 0xba, 0x16, 0x21, 0x28, 0xcd, 0xc3, 0xb8,
0x55, 0x5d, 0x74, 0x96, 0x6b, 0x6e, 0x01, 0xb0, 0x8b, 0x64, 0x52, 0xc9, 0x41, 0xe2, 0xc1, 0x56,
0xb7, 0x55, 0x33, 0xdb, 0xf2, 0xf5, 0xd2, 0x53, 0xa4, 0xbe, 0xa3, 0xfa, 0x37, 0x80, 0xfb, 0x90,
0xb0, 0xc7, 0x49, 0xad, 0xc7, 0x95, 0xb5, 0xa8, 0xf1, 0xd6, 0x16, 0xa1, 0x07, 0xae, 0xe1, 0x5c,
0xfa, 0x18, 0x69, 0x76, 0x77, 0xb6, 0xff, 0x0f, 0x09, 0x68, 0xba, 0x3a, 0xe6, 0x89, 0xbf, 0xcb,
0xc3, 0x2c, 0x11, 0x0b, 0x60, 0xe9, 0x55, 0x87, 0x34, 0xf7, 0x13, 0x71, 0x22, 0x02, 0xe8, 0xc3,
0xfa, 0xa9, 0x66, 0x1f, 0x20, 0x0d, 0xd9, 0xbb, 0x0d, 0x9e, 0x2e, 0xc7, 0xee, 0xea, 0x7d, 0xf5,
0xec, 0x19, 0x3e, 0x13, 0x3e, 0x22, 0xf3, 0x6f, 0xb6, 0x47, 0x68, 0x2a, 0x21, 0xce, 0x04, 0xff,
0xd7, 0x94, 0xb3, 0x62, 0x72, 0x23, 0xdc, 0x19, 0x39, 0x0a, 0xb0, 0x36, 0x99, 0x4d, 0x05, 0x46,
0x3c, 0x84, 0x5b, 0x22, 0xf2, 0xe1, 0xd4, 0x1c, 0xc2, 0x58, 0xc6, 0x8b, 0xae, 0x6c, 0x21, 0xcc,
0x1e, 0x25, 0xec, 0x1e, 0x5e, 0x65, 0x0e, 0x65, 0xcc, 0xa5, 0x67, 0x98, 0x55, 0xfb, 0x6f, 0x93,
0xa4, 0x9e, 0xdf, 0x79, 0xd6, 0x20, 0x13, 0x07, 0x03, 0xcf, 0x03, 0xa5, 0xe8, 0x39, 0x36, 0x47,
0x66, 0x6e, 0x46, 0x70, 0x1a, 0x83, 0xa7, 0xc1, 0x37, 0x3c, 0xd4, 0x61, 0xb3, 0x64, 0xaa, 0x23,
0xa3, 0x08, 0x3c, 0xbd, 0xc1, 0x45, 0x00, 0x3e, 0xad, 0xb0, 0x79, 0x42, 0xf7, 0x21, 0x09, 0x85,
0x52, 0x42, 0x46, 0x5d, 0x88, 0x04, 0xf8, 0xb4, 0xca, 0x2e, 0x90, 0xb9, 0x8e, 0x0c, 0x02, 0xf0,
0xb4, 0x90, 0xd1, 0xae, 0xd4, 0xeb, 0xa7, 0x42, 0x69, 0x45, 0x6b, 0x28, 0x76, 0x2b, 0x08, 0xa0,
0xcf, 0x83, 0xd5, 0xa4, 0x3f, 0x08, 0x21, 0xd2, 0x74, 0x0c, 0x65, 0xa4, 0x60, 0x57, 0x84, 0x10,
0xa1, 0x24, 0x3a, 0x51, 0x42, 0x8d, 0xb5, 0x18, 0x5b, 0x3a, 0xc9, 0x1e, 0x20, 0xe7, 0x53, 0xb4,
0xa4, 0x80, 0x87, 0x40, 0xeb, 0x6c, 0x86, 0x34, 0x52, 0xd2, 0xe1, 0xde, 0xfe, 0xd3, 0x94, 0x94,
0x24, 0xb8, 0xf2, 0xae, 0x0b, 0x9e, 0x4c, 0x7c, 0xda, 0x28, 0x99, 0xf0, 0x0c, 0x78, 0x5a, 0x26,
0x5b, 0x5d, 0xda, 0x44, 0x83, 0x53, 0xf0, 0x00, 0x78, 0xe2, 0x1d, 0xbb, 0xa0, 0x06, 0x81, 0xa6,
0x53, 0x8c, 0x92, 0xe6, 0x86, 0x08, 0x60, 0x57, 0xea, 0x0d, 0x39, 0x88, 0x7c, 0x3a, 0xcd, 0xa6,
0x09, 0xd9, 0x01, 0xcd, 0xd3, 0x08, 0xcc, 0xa0, 0xda, 0x0e, 0xf7, 0x8e, 0x21, 0x05, 0x28, 0x5b,
0x20, 0xac, 0xc3, 0xa3, 0x48, 0xea, 0x4e, 0x02, 0x5c, 0xc3, 0x86, 0xb9, 0xcd, 0x74, 0x16, 0xcd,
0x19, 0xc1, 0x45, 0x00, 0x94, 0x15, 0xdc, 0x5d, 0x08, 0x20, 0xe7, 0x9e, 0x2b, 0xb8, 0x53, 0x1c,
0xb9, 0xe7, 0xd1, 0xf8, 0xb5, 0x81, 0x08, 0x7c, 0x13, 0x12, 0x7b, 0x2c, 0xe7, 0xd1, 0xc6, 0xd4,
0xf8, 0xdd, 0xed, 0xad, 0x83, 0x43, 0xba, 0xc0, 0xce, 0x93, 0xd9, 0x14, 0xd9, 0x01, 0x9d, 0x08,
0xcf, 0x04, 0xef, 0x02, 0x9a, 0xba, 0x37, 0xd0, 0x7b, 0x47, 0x3b, 0x10, 0xca, 0x64, 0x48, 0x5b,
0x78, 0xa0, 0x46, 0x52, 0x76, 0x44, 0xf4, 0x01, 0xd4, 0xb0, 0x1e, 0xc6, 0x7a, 0x58, 0x84, 0x97,
0x5e, 0x64, 0x97, 0xc8, 0x85, 0x9b, 0xb1, 0xcf, 0x35, 0x6c, 0x85, 0x58, 0x6a, 0x0e, 0xb9, 0xba,
0x83, 0xee, 0x0e, 0x12, 0xa0, 0x97, 0xd8, 0x45, 0xb2, 0x30, 0x7a, 0x16, 0x79, 0xb0, 0x2e, 0xe3,
0x46, 0xeb, 0x6d, 0x27, 0x01, 0x1f, 0x22, 0x2d, 0x78, 0x90, 0x6d, 0xbc, 0x52, 0x48, 0xbd, 0x97,
0xf8, 0x20, 0x12, 0xad, 0xe7, 0xf7, 0x12, 0xaf, 0xb2, 0x16, 0x99, 0xdf, 0x04, 0x7d, 0x2f, 0x65,
0x11, 0x29, 0xdb, 0x42, 0x19, 0xd2, 0x4d, 0x05, 0x89, 0xca, 0x28, 0x0f, 0x31, 0x46, 0xa6, 0x37,
0x41, 0x23, 0x98, 0x61, 0x4b, 0x18, 0x27, 0x6b, 0x9e, 0x2b, 0x03, 0xc8, 0xe0, 0xb7, 0x61, 0x0c,
0xba, 0x89, 0x8c, 0xcb, 0xe0, 0xc3, 0xe8, 0xe6, 0x5e, 0x0c, 0x09, 0xd7, 0x80, 0x32, 0xca, 0xb4,
0x47, 0x50, 0xce, 0x01, 0x60, 0x04, 0xca, 0xf0, 0xdb, 0x0b, 0xb8, 0xac, 0xf5, 0x1d, 0x98, 0xc3,
0x29, 0x37, 0xd8, 0x3a, 0x99, 0x91, 0x96, 0xd1, 0xeb, 0x54, 0x49, 0x7e, 0xff, 0x33, 0xe2, 0x3b,
0x31, 0x55, 0xec, 0xbe, 0xcd, 0x84, 0x47, 0x3a, 0xc3, 0xdb, 0xec, 0x21, 0x72, 0xc5, 0x85, 0xa3,
0x04, 0xd4, 0xf1, 0xbe, 0x0c, 0x84, 0x37, 0xdc, 0x8a, 0x8e, 0x64, 0x9e, 0x92, 0xc8, 0xf2, 0x2e,
0xb4, 0x04, 0xc3, 0x62, 0xe9, 0x19, 0xfc, 0x28, 0xc6, 0x64, 0x57, 0xea, 0x03, 0x2c, 0x87, 0xdb,
0xa6, 0xc0, 0xd2, 0xc7, 0x50, 0xcb, 0xae, 0x74, 0x21, 0x0e, 0x84, 0xc7, 0x57, 0x4f, 0xb8, 0x08,
0x78, 0x2f, 0x00, 0xba, 0x82, 0x41, 0x39, 0x80, 0x3e, 0x5e, 0xd9, 0xfc, 0x7c, 0xaf, 0xb1, 0x29,
0x52, 0xdf, 0x90, 0x89, 0x07, 0x5d, 0x88, 0x86, 0xf4, 0x71, 0x5c, 0xba, 0x5c, 0xc3, 0xb6, 0x08,
0x85, 0xa6, 0x4f, 0x30, 0x46, 0xa6, 0xba, 0x5d, 0x17, 0x3e, 0x3e, 0x00, 0xa5, 0x5d, 0xee, 0x01,
0xfd, 0xcb, 0x44, 0xdb, 0x23, 0xc4, 0xa4, 0x1c, 0x0e, 0x27, 0x80, 0x06, 0x14, 0xab, 0x5d, 0x19,
0x01, 0x3d, 0xc7, 0x9a, 0x64, 0xf2, 0x66, 0x24, 0x94, 0x1a, 0x80, 0x4f, 0x1d, 0xbc, 0x6e, 0x5b,
0xd1, 0x7e, 0x22, 0xfb, 0xd8, 0x07, 0x69, 0x05, 0xa9, 0x1b, 0x22, 0x12, 0xea, 0xd8, 0x14, 0x1a,
0x42, 0xc6, 0xd3, 0x7b, 0x57, 0x63, 0x75, 0x32, 0xe6, 0x82, 0x4e, 0x86, 0x74, 0xac, 0xfd, 0x9c,
0x43, 0x9a, 0xa9, 0xb1, 0x56, 0xcf, 0x3c, 0xa1, 0xe5, 0x75, 0xa1, 0x29, 0xcf, 0x7c, 0x07, 0xeb,
0xdf, 0x66, 0x22, 0xef, 0x8a, 0xa8, 0x4f, 0x2b, 0x28, 0xf8, 0x00, 0x78, 0x60, 0x94, 0x34, 0xc8,
0xc4, 0x46, 0x30, 0x30, 0x1a, 0x6b, 0x46, 0x3f, 0x2e, 0x90, 0x6d, 0x0c, 0x49, 0x98, 0x29, 0x31,
0xf8, 0x74, 0x1c, 0xbd, 0xb7, 0xf7, 0x03, 0x69, 0x13, 0xed, 0xf7, 0x93, 0x99, 0x33, 0xe3, 0x04,
0x9b, 0x24, 0xb5, 0x54, 0x35, 0x25, 0xcd, 0x35, 0x11, 0xf1, 0x64, 0x68, 0x8b, 0x10, 0xf5, 0xf1,
0x72, 0x6e, 0x04, 0x92, 0xeb, 0x14, 0x80, 0xf6, 0xcb, 0x4d, 0xd3, 0xcf, 0xcd, 0xc6, 0x29, 0x52,
0xbf, 0x19, 0xf9, 0x70, 0x24, 0x22, 0xf0, 0xe9, 0x39, 0x53, 0x1c, 0xec, 0xb5, 0x2a, 0x6e, 0xa9,
0x8f, 0xc1, 0x44, 0x63, 0x4a, 0x18, 0xe0, 0x0d, 0xbf, 0xc1, 0x55, 0x09, 0x3a, 0xc2, 0x03, 0xee,
0x9a, 0x69, 0xb1, 0x57, 0xde, 0xde, 0x37, 0x07, 0x7c, 0x2c, 0xef, 0x16, 0x98, 0xa2, 0xc7, 0xa8,
0x69, 0x13, 0xf4, 0xc1, 0x50, 0x69, 0x08, 0x3b, 0x32, 0x3a, 0x12, 0x7d, 0x45, 0x05, 0x6a, 0xda,
0x96, 0xdc, 0x2f, 0x6d, 0xbf, 0x8d, 0x29, 0xe6, 0x42, 0x00, 0x5c, 0x95, 0xa5, 0xde, 0x31, 0xe5,
0xd1, 0x98, 0xba, 0x1a, 0x08, 0xae, 0x68, 0x80, 0xae, 0xa0, 0x95, 0x76, 0x19, 0xe2, 0xf9, 0xae,
0x06, 0x1a, 0x12, 0xbb, 0x8e, 0xd8, 0x3c, 0x99, 0xb1, 0xfc, 0xfb, 0x3c, 0xd1, 0xc2, 0x08, 0x79,
0xd1, 0x31, 0x99, 0x94, 0xc8, 0xb8, 0xc0, 0x5e, 0xc2, 0x6e, 0xd4, 0xbc, 0xc1, 0x55, 0x01, 0xfd,
0xcc, 0x61, 0x0b, 0x64, 0x36, 0x73, 0xad, 0xc0, 0x7f, 0xee, 0xb0, 0x39, 0x32, 0x8d, 0xae, 0xe5,
0x98, 0xa2, 0xbf, 0x30, 0x20, 0x3a, 0x51, 0x02, 0x7f, 0x69, 0x24, 0xa4, 0x5e, 0x94, 0xf0, 0x5f,
0x19, 0x65, 0x28, 0x21, 0x4d, 0x22, 0x45, 0x5f, 0x71, 0xd0, 0xd2, 0x4c, 0x59, 0x0a, 0xd3, 0x57,
0x0d, 0x23, 0x4a, 0xcd, 0x19, 0x5f, 0x33, 0x8c, 0xa9, 0xcc, 0x1c, 0x7d, 0xdd, 0xa0, 0x37, 0x78,
0xe4, 0xcb, 0xa3, 0xa3, 0x1c, 0x7d, 0xc3, 0x61, 0x2d, 0x32, 0x87, 0xdb, 0xd7, 0x78, 0xc0, 0x23,
0xaf, 0xe0, 0x7f, 0xd3, 0x61, 0xe7, 0x09, 0x3d, 0xa3, 0x4e, 0xd1, 0x67, 0x2b, 0x8c, 0x66, 0xf1,
0x35, 0xf7, 0x88, 0x7e, 0xb1, 0x62, 0x62, 0x95, 0x32, 0x5a, 0xec, 0x4b, 0x15, 0x36, 0x6d, 0x83,
0x6e, 0xd7, 0x5f, 0xae, 0xb0, 0x06, 0x19, 0xdf, 0x8a, 0x14, 0x24, 0x9a, 0x7e, 0x16, 0xf3, 0x7b,
0xdc, 0x96, 0x5a, 0xfa, 0x39, 0xbc, 0x51, 0x63, 0x26, 0xbf, 0xe9, 0x0b, 0xd8, 0xc6, 0x99, 0x0b,
0x0a, 0x22, 0xbf, 0x74, 0x77, 0x14, 0xfd, 0xbc, 0xd9, 0x61, 0xfb, 0x24, 0xfd, 0x7b, 0xd5, 0x84,
0xa6, 0xdc, 0x34, 0xff, 0x51, 0x45, 0x13, 0x36, 0x41, 0x17, 0x37, 0x9b, 0xfe, 0xb3, 0xca, 0x2e,
0x92, 0xf3, 0x19, 0x66, 0x5a, 0x58, 0x7e, 0xa7, 0xff, 0x55, 0x65, 0x97, 0xc9, 0x05, 0xac, 0xe7,
0x79, 0xde, 0xe0, 0x26, 0xa1, 0xb4, 0xf0, 0x14, 0x7d, 0xb9, 0xca, 0x2e, 0x91, 0x85, 0x4d, 0xd0,
0xf9, 0x79, 0x94, 0x88, 0xff, 0xae, 0xb2, 0x29, 0x32, 0x89, 0xb7, 0x5e, 0xc0, 0x09, 0xd0, 0x57,
0xaa, 0x78, 0xa8, 0xd9, 0x32, 0x35, 0xe7, 0xd5, 0x2a, 0x86, 0xfa, 0x43, 0x5c, 0x7b, 0xc7, 0xdd,
0xb0, 0x73, 0xcc, 0xa3, 0x08, 0x02, 0x45, 0x5f, 0xab, 0x62, 0x40, 0x5d, 0x08, 0xe5, 0x09, 0x94,
0xe0, 0xd7, 0x8d, 0xd3, 0x86, 0xf9, 0x83, 0x03, 0x48, 0x86, 0x39, 0xe1, 0x8d, 0x2a, 0x1e, 0x8d,
0xe5, 0x1f, 0xa5, 0xbc, 0x59, 0x65, 0x57, 0x48, 0xcb, 0x16, 0x8b, 0xec, 0x60, 0x90, 0xd8, 0x07,
0xac, 0xc3, 0xf4, 0xd9, 0x5a, 0x2e, 0xb1, 0x0b, 0x81, 0xe6, 0xf9, 0xbe, 0x4f, 0xd6, 0xd0, 0x2e,
0xbc, 0x5c, 0x45, 0xf9, 0x55, 0xf4, 0xb9, 0x1a, 0x9e, 0xe8, 0x26, 0xe8, 0xb4, 0x02, 0x2b, 0xfa,
0x29, 0x9c, 0x9a, 0xa6, 0x6f, 0x46, 0x6a, 0xd0, 0xcb, 0x0d, 0xa5, 0x9f, 0xce, 0x36, 0x77, 0x85,
0xd2, 0x89, 0xe8, 0x0d, 0x4c, 0xa6, 0x7f, 0xa6, 0x86, 0x4e, 0x1d, 0x0c, 0x23, 0x6f, 0x04, 0x7e,
0xde, 0xc8, 0x4c, 0x6d, 0x33, 0x46, 0xfd, 0xba, 0xc6, 0x66, 0x08, 0xb1, 0xb7, 0xda, 0x00, 0xbf,
0xc9, 0xe4, 0xe1, 0x98, 0x74, 0x02, 0x89, 0xe9, 0x21, 0xf4, 0xb7, 0xb9, 0x89, 0xa5, 0xda, 0x49,
0x7f, 0x57, 0xc3, 0xa0, 0x1f, 0x8a, 0x10, 0x0e, 0x85, 0x77, 0x87, 0x7e, 0xb5, 0x8e, 0xf6, 0x99,
0x98, 0xec, 0x4a, 0x1f, 0x6c, 0x8e, 0x7c, 0xad, 0x8e, 0x29, 0x87, 0x99, 0x6c, 0x53, 0xee, 0xeb,
0x66, 0x9d, 0xb6, 0x82, 0xad, 0x2e, 0xfd, 0x06, 0x8e, 0x6b, 0x24, 0x5d, 0x1f, 0x1e, 0xec, 0xd1,
0x6f, 0xd6, 0x51, 0xd5, 0x6a, 0x10, 0x48, 0x8f, 0xeb, 0xfc, 0x3e, 0x7d, 0xab, 0x8e, 0x17, 0xb2,
0xa4, 0x3d, 0x3d, 0xf7, 0x6f, 0xd7, 0x8d, 0xa3, 0x16, 0x37, 0xe9, 0xda, 0xc5, 0xb2, 0xfa, 0x1d,
0x23, 0x15, 0x9f, 0x96, 0x68, 0xc9, 0xa1, 0xa6, 0xdf, 0x35, 0x7c, 0x67, 0x27, 0x10, 0xfa, 0xfb,
0x46, 0x9a, 0xa1, 0x25, 0xec, 0x0f, 0x0d, 0x7b, 0xc3, 0x46, 0x47, 0x0e, 0xfa, 0x47, 0x03, 0x9f,
0x1d, 0x53, 0xe8, 0x9f, 0x1a, 0x68, 0x58, 0x79, 0xd2, 0xc0, 0x79, 0x5b, 0xd1, 0x3f, 0x37, 0xd0,
0x82, 0x62, 0xa6, 0xa0, 0xdf, 0x6b, 0x62, 0xb0, 0xb2, 0x69, 0x82, 0x7e, 0xbf, 0x89, 0x6e, 0x9e,
0x99, 0x23, 0xe8, 0x0f, 0x9a, 0xe6, 0x38, 0xf2, 0x09, 0x82, 0xfe, 0xb0, 0x04, 0x20, 0x17, 0xfd,
0x51, 0xd3, 0xd4, 0xb0, 0x91, 0xa9, 0x81, 0xfe, 0xb8, 0x89, 0xb6, 0x9d, 0x9d, 0x17, 0xe8, 0x4f,
0x9a, 0xf6, 0xb8, 0xf3, 0x49, 0x81, 0xfe, 0xb4, 0x89, 0x77, 0xe8, 0xfe, 0x33, 0x02, 0x7d, 0xd1,
0xe8, 0x2a, 0xa6, 0x03, 0xfa, 0x52, 0xb3, 0xbd, 0x44, 0x26, 0xba, 0x2a, 0x30, 0x9d, 0x67, 0x82,
0x54, 0xbb, 0x2a, 0xa0, 0xe7, 0xb0, 0x50, 0xaf, 0x49, 0x19, 0xac, 0x9f, 0xc6, 0xc9, 0x33, 0x4f,
0x50, 0xa7, 0xbd, 0x46, 0x66, 0x3a, 0x32, 0x8c, 0x79, 0x7e, 0x61, 0x4d, 0xb3, 0xb1, 0x5d, 0x0a,
0x7c, 0x9b, 0x2a, 0xe7, 0xb0, 0xda, 0xaf, 0x9f, 0x82, 0x37, 0x30, 0x3d, 0xd1, 0xc1, 0x25, 0x6e,
0xc2, 0x20, 0xfb, 0xb4, 0xd2, 0xfe, 0x30, 0xa1, 0x1d, 0x19, 0x29, 0xa1, 0x34, 0x44, 0xde, 0x70,
0x1b, 0x4e, 0x20, 0x30, 0x9d, 0x57, 0x27, 0x32, 0xea, 0xd3, 0x73, 0xe6, 0x49, 0x02, 0xe6, 0x69,
0x61, 0xfb, 0xf3, 0x1a, 0x8e, 0x1d, 0xe6, 0xdd, 0x31, 0x4d, 0xc8, 0xfa, 0x09, 0x44, 0x7a, 0xc0,
0x83, 0x60, 0x48, 0xab, 0xb8, 0xee, 0x0c, 0x94, 0x96, 0xa1, 0xf8, 0x04, 0xb6, 0xe9, 0xf6, 0x57,
0x1c, 0xd2, 0xb0, 0xcd, 0x38, 0x37, 0xcd, 0x2e, 0xf7, 0x21, 0xf2, 0x85, 0x11, 0x8e, 0x63, 0xb3,
0x81, 0xd2, 0x09, 0xc2, 0x29, 0x98, 0x0e, 0x34, 0x4f, 0x74, 0xf6, 0xbe, 0xb1, 0x50, 0x57, 0xde,
0x8d, 0x02, 0xc9, 0x7d, 0x33, 0x11, 0xe4, 0x5b, 0xf7, 0x79, 0xa2, 0xcc, 0x58, 0x80, 0xaf, 0x8a,
0x54, 0x7e, 0x62, 0xfc, 0xf1, 0xe9, 0x58, 0x01, 0x16, 0x3e, 0x8f, 0x63, 0xfb, 0xb5, 0xa0, 0x49,
0xf6, 0x2c, 0xd3, 0x49, 0xfb, 0x3a, 0x21, 0xc5, 0x8b, 0xd2, 0xf8, 0x53, 0xb4, 0xd1, 0x73, 0x18,
0x95, 0xcd, 0x40, 0xf6, 0x78, 0x40, 0x1d, 0x9c, 0x22, 0x4c, 0x52, 0x54, 0xda, 0xcf, 0x8f, 0x91,
0x99, 0x33, 0xef, 0x47, 0xb4, 0x2d, 0x5f, 0xac, 0x06, 0x78, 0x72, 0x57, 0xc8, 0x03, 0x39, 0x72,
0xcf, 0xd8, 0xe0, 0xe0, 0xcc, 0x99, 0x93, 0xcf, 0xcc, 0x0f, 0x15, 0x76, 0x95, 0x5c, 0x2a, 0x88,
0xf7, 0x4e, 0x0d, 0x58, 0xba, 0x5b, 0x39, 0xc3, 0xd9, 0xf1, 0xa1, 0x86, 0x11, 0xcd, 0xa9, 0x58,
0x0d, 0xec, 0x6b, 0xaf, 0x78, 0xec, 0xda, 0xb6, 0x48, 0xc7, 0xf1, 0x01, 0x56, 0xd8, 0x98, 0xa7,
0x15, 0x9d, 0xc0, 0x18, 0xe6, 0x84, 0xb4, 0x65, 0x4d, 0x8e, 0x80, 0x69, 0xeb, 0xaa, 0xe3, 0x80,
0x9e, 0x83, 0x58, 0xb3, 0x8a, 0x72, 0x41, 0xf0, 0x59, 0x70, 0x26, 0x04, 0xb6, 0x2e, 0x35, 0x46,
0x28, 0x06, 0xeb, 0x82, 0xe6, 0x22, 0xa0, 0x4d, 0x3c, 0xa8, 0x91, 0xb8, 0xd8, 0x1d, 0x53, 0x23,
0xca, 0xd3, 0x2e, 0x38, 0x8d, 0x13, 0x51, 0x31, 0xb1, 0x9b, 0xfe, 0x39, 0x33, 0x82, 0x99, 0xfa,
0x48, 0xe9, 0x88, 0xba, 0x52, 0xa3, 0xa7, 0xb3, 0xa3, 0x8e, 0x9a, 0x04, 0xa1, 0x6c, 0x24, 0xba,
0xd6, 0xee, 0xbd, 0xbb, 0x11, 0x24, 0xea, 0x58, 0xc4, 0x74, 0x6e, 0x24, 0x68, 0xb6, 0x44, 0x99,
0xbc, 0x98, 0x1f, 0x09, 0x05, 0x9a, 0x5e, 0x6c, 0x3a, 0x3f, 0x7a, 0x60, 0xa6, 0x48, 0x14, 0xd4,
0x85, 0x11, 0xea, 0x0e, 0x8f, 0x78, 0xbf, 0xa4, 0xf0, 0xc2, 0x88, 0xc2, 0x52, 0x75, 0x6a, 0xbd,
0x4f, 0x92, 0xd9, 0xfc, 0x6f, 0xc7, 0x2d, 0x38, 0xd5, 0xb7, 0x64, 0xef, 0x36, 0xbb, 0xba, 0x62,
0xff, 0x52, 0xae, 0x64, 0x7f, 0x29, 0x57, 0x76, 0x40, 0x29, 0x14, 0x19, 0x9b, 0xfc, 0x68, 0xfd,
0x75, 0xc2, 0xfc, 0xc6, 0x79, 0xe8, 0xfe, 0x3f, 0xc7, 0x4a, 0xbf, 0x65, 0xdc, 0x99, 0xb8, 0xb4,
0xda, 0xeb, 0xdd, 0x5e, 0xdb, 0x26, 0xd3, 0x42, 0x66, 0xfb, 0xfa, 0x49, 0xec, 0xad, 0x35, 0x3a,
0x66, 0xdf, 0x3e, 0xca, 0xd8, 0x77, 0x3e, 0xb2, 0xdc, 0x17, 0xfa, 0x78, 0xd0, 0x43, 0x69, 0xd7,
0x2c, 0xdb, 0x63, 0x42, 0xa6, 0x5f, 0xd7, 0x78, 0x2c, 0xae, 0x59, 0x35, 0x71, 0xef, 0x0b, 0x8e,
0xd3, 0x1b, 0x37, 0x9a, 0x9f, 0xfc, 0x4f, 0x00, 0x00, 0x00, 0xff, 0xff, 0x20, 0x60, 0xb7, 0x1c,
0x7a, 0x15, 0x00, 0x00,
// 2531 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x58, 0xc9, 0x73, 0x24, 0x47,
0xd5, 0x57, 0xa9, 0x5b, 0x4b, 0x67, 0xb7, 0x5a, 0x4f, 0x29, 0x8d, 0xa6, 0x3d, 0x8b, 0x47, 0xd6,
0x67, 0x7f, 0x9f, 0xbe, 0xfe, 0x6c, 0x8d, 0x3d, 0x8e, 0xf8, 0x20, 0x88, 0x30, 0x81, 0xd4, 0x2d,
0x69, 0x14, 0xd6, 0x46, 0x49, 0x63, 0x08, 0x22, 0x60, 0x22, 0xbb, 0xea, 0xa9, 0x95, 0x33, 0xd5,
0x95, 0x45, 0x65, 0xb6, 0x46, 0xcd, 0xc9, 0x18, 0xf0, 0x85, 0x0b, 0x98, 0x7f, 0x80, 0x3f, 0x00,
0x08, 0x76, 0x38, 0xb2, 0x63, 0xb3, 0x9d, 0xd9, 0xe1, 0x08, 0x77, 0x16, 0xe3, 0x95, 0x78, 0x59,
0xd5, 0x55, 0xd5, 0x9a, 0x31, 0x1c, 0xb8, 0x75, 0xfe, 0xde, 0xcb, 0xb7, 0xe5, 0xdb, 0xaa, 0x59,
0xcd, 0x53, 0xbd, 0x9e, 0x0a, 0x57, 0xa3, 0x58, 0x19, 0xc5, 0xe7, 0x7b, 0x32, 0x38, 0xed, 0xeb,
0xe4, 0xb4, 0x9a, 0x90, 0x2e, 0x2d, 0x75, 0x95, 0xea, 0x06, 0x78, 0xdd, 0x82, 0x9d, 0xfe, 0xf1,
0x75, 0x1f, 0xb5, 0x17, 0xcb, 0xc8, 0xa8, 0x38, 0x61, 0x5c, 0xbe, 0xcd, 0x26, 0x0f, 0x8d, 0x30,
0x7d, 0xcd, 0x9f, 0x61, 0x0c, 0xe3, 0x58, 0xc5, 0xb7, 0x3d, 0xe5, 0x63, 0xc3, 0x59, 0x72, 0x56,
0xea, 0x37, 0x1e, 0x5e, 0x7d, 0x80, 0xd4, 0xd5, 0x0d, 0x62, 0x6b, 0x29, 0x1f, 0xdd, 0x0a, 0x0e,
0x7f, 0xf2, 0x45, 0x36, 0x19, 0xa3, 0xd0, 0x2a, 0x6c, 0x8c, 0x2f, 0x39, 0x2b, 0x15, 0x37, 0x3d,
0x2d, 0xff, 0x3f, 0xab, 0x3d, 0x8b, 0x83, 0xe7, 0x44, 0xd0, 0xc7, 0x03, 0x21, 0x63, 0x0e, 0xac,
0x74, 0x17, 0x07, 0x56, 0x7e, 0xc5, 0xa5, 0x9f, 0x7c, 0x81, 0x4d, 0x9c, 0x12, 0x39, 0xbd, 0x98,
0x1c, 0x96, 0x9f, 0x66, 0xd5, 0x67, 0x71, 0xd0, 0x16, 0x46, 0xbc, 0xc3, 0x35, 0xce, 0xca, 0xbe,
0x30, 0xc2, 0xde, 0xaa, 0xb9, 0xf6, 0xf7, 0xf2, 0x15, 0x56, 0x5e, 0x0f, 0x54, 0x27, 0x17, 0xe9,
0x58, 0x62, 0x2a, 0xf2, 0x94, 0xc1, 0x41, 0x20, 0x3c, 0x3c, 0x51, 0x81, 0x8f, 0xb1, 0x35, 0x89,
0xe4, 0x1a, 0xd1, 0x1d, 0xca, 0x35, 0xa2, 0xcb, 0xdf, 0xcd, 0xca, 0x66, 0x10, 0x25, 0xd6, 0xd4,
0x6f, 0x3c, 0xfa, 0xc0, 0x08, 0x14, 0xc4, 0x1c, 0x0d, 0x22, 0x74, 0xed, 0x0d, 0x0a, 0x81, 0x55,
0xa4, 0x1b, 0xa5, 0xa5, 0xd2, 0x4a, 0xcd, 0x4d, 0x4f, 0xcb, 0x1f, 0x1e, 0xd1, 0xbb, 0x15, 0xab,
0x7e, 0xc4, 0xb7, 0x59, 0x2d, 0xca, 0x31, 0xdd, 0x70, 0x96, 0x4a, 0x2b, 0xd5, 0x1b, 0x8f, 0xfd,
0x3b, 0x6d, 0xd6, 0x68, 0x77, 0xe4, 0xea, 0xf2, 0x13, 0x6c, 0x6a, 0xcd, 0xf7, 0x63, 0xd4, 0x9a,
0xd7, 0xd9, 0xb8, 0x8c, 0x52, 0x67, 0xc6, 0x65, 0x44, 0x31, 0x8a, 0x54, 0x6c, 0xac, 0x2f, 0x25,
0xd7, 0xfe, 0x5e, 0x7e, 0xc9, 0x61, 0x53, 0xbb, 0xba, 0xbb, 0x2e, 0x34, 0xf2, 0x77, 0xb1, 0xe9,
0x9e, 0xee, 0xde, 0xb6, 0xfe, 0x26, 0x2f, 0x7e, 0xe5, 0x81, 0x16, 0xec, 0xea, 0xae, 0xf5, 0x73,
0xaa, 0x97, 0xfc, 0xa0, 0x00, 0xf7, 0x74, 0x77, 0xbb, 0x9d, 0x4a, 0x4e, 0x0e, 0xfc, 0x0a, 0xab,
0x18, 0xd9, 0x43, 0x6d, 0x44, 0x2f, 0x6a, 0x94, 0x96, 0x9c, 0x95, 0xb2, 0x9b, 0x03, 0xfc, 0x12,
0x9b, 0xd6, 0xaa, 0x1f, 0x7b, 0xb8, 0xdd, 0x6e, 0x94, 0xed, 0xb5, 0xec, 0xbc, 0xfc, 0x0c, 0xab,
0xec, 0xea, 0xee, 0x4d, 0x14, 0x3e, 0xc6, 0xfc, 0x49, 0x56, 0xee, 0x08, 0x9d, 0x58, 0x54, 0x7d,
0x67, 0x8b, 0xc8, 0x03, 0xd7, 0x72, 0x2e, 0x7f, 0x84, 0xd5, 0xda, 0xbb, 0x3b, 0xff, 0x81, 0x04,
0x32, 0x5d, 0x9f, 0x88, 0xd8, 0xdf, 0x13, 0xbd, 0x61, 0x22, 0xe6, 0xc0, 0xf2, 0xeb, 0x0e, 0xab,
0x1d, 0xc4, 0xf2, 0x54, 0x06, 0xd8, 0xc5, 0x8d, 0x33, 0xc3, 0xdf, 0xc7, 0xaa, 0xaa, 0x73, 0x07,
0x3d, 0x53, 0x8c, 0xdd, 0xb5, 0x07, 0xea, 0xd9, 0xb7, 0x7c, 0x36, 0x7c, 0x4c, 0x65, 0xbf, 0xf9,
0x3e, 0x83, 0x54, 0x42, 0x34, 0x14, 0xfc, 0x2f, 0x53, 0x2e, 0x11, 0x93, 0x19, 0xe1, 0xce, 0xaa,
0x51, 0x80, 0x37, 0xd9, 0x5c, 0x2a, 0x30, 0x14, 0x3d, 0xbc, 0x2d, 0x43, 0x1f, 0xcf, 0xec, 0x23,
0x4c, 0x0c, 0x79, 0xc9, 0x95, 0x6d, 0x82, 0xf9, 0xe3, 0x8c, 0xdf, 0xc7, 0xab, 0xed, 0xa3, 0x4c,
0xb8, 0x70, 0x8e, 0x59, 0x37, 0x5f, 0xa8, 0xb0, 0x4a, 0x56, 0xf3, 0xbc, 0xca, 0xa6, 0x0e, 0xfb,
0x9e, 0x87, 0x5a, 0xc3, 0x18, 0x9f, 0x67, 0xb3, 0xb7, 0x42, 0x3c, 0x8b, 0xd0, 0x33, 0xe8, 0x5b,
0x1e, 0x70, 0xf8, 0x1c, 0x9b, 0x69, 0xa9, 0x30, 0x44, 0xcf, 0x6c, 0x0a, 0x19, 0xa0, 0x0f, 0xe3,
0x7c, 0x81, 0xc1, 0x01, 0xc6, 0x3d, 0xa9, 0xb5, 0x54, 0x61, 0x1b, 0x43, 0x89, 0x3e, 0x94, 0xf8,
0x45, 0x36, 0xdf, 0x52, 0x41, 0x80, 0x9e, 0x91, 0x2a, 0xdc, 0x53, 0x66, 0xe3, 0x4c, 0x6a, 0xa3,
0xa1, 0x4c, 0x62, 0xb7, 0x83, 0x00, 0xbb, 0x22, 0x58, 0x8b, 0xbb, 0xfd, 0x1e, 0x86, 0x06, 0x26,
0x48, 0x46, 0x0a, 0xb6, 0x65, 0x0f, 0x43, 0x92, 0x04, 0x53, 0x05, 0xd4, 0x5a, 0x4b, 0xb1, 0x85,
0x69, 0xfe, 0x10, 0xbb, 0x90, 0xa2, 0x05, 0x05, 0xa2, 0x87, 0x50, 0xe1, 0xb3, 0xac, 0x9a, 0x92,
0x8e, 0xf6, 0x0f, 0x9e, 0x05, 0x56, 0x90, 0xe0, 0xaa, 0x7b, 0x2e, 0x7a, 0x2a, 0xf6, 0xa1, 0x5a,
0x30, 0xe1, 0x39, 0xf4, 0x8c, 0x8a, 0xb7, 0xdb, 0x50, 0x23, 0x83, 0x53, 0xf0, 0x10, 0x45, 0xec,
0x9d, 0xb8, 0xa8, 0xfb, 0x81, 0x81, 0x19, 0x0e, 0xac, 0xb6, 0x29, 0x03, 0xdc, 0x53, 0x66, 0x53,
0xf5, 0x43, 0x1f, 0xea, 0xbc, 0xce, 0xd8, 0x2e, 0x1a, 0x91, 0x46, 0x60, 0x96, 0xd4, 0xb6, 0x84,
0x77, 0x82, 0x29, 0x00, 0x7c, 0x91, 0xf1, 0x96, 0x08, 0x43, 0x65, 0x5a, 0x31, 0x0a, 0x83, 0x9b,
0xb6, 0x9a, 0x61, 0x8e, 0xcc, 0x19, 0xc1, 0x65, 0x80, 0xc0, 0x73, 0xee, 0x36, 0x06, 0x98, 0x71,
0xcf, 0xe7, 0xdc, 0x29, 0x4e, 0xdc, 0x0b, 0x64, 0xfc, 0x7a, 0x5f, 0x06, 0xbe, 0x0d, 0x49, 0xf2,
0x2c, 0x17, 0xc8, 0xc6, 0xd4, 0xf8, 0xbd, 0x9d, 0xed, 0xc3, 0x23, 0x58, 0xe4, 0x17, 0xd8, 0x5c,
0x8a, 0xec, 0xa2, 0x89, 0xa5, 0x67, 0x83, 0x77, 0x91, 0x4c, 0xdd, 0xef, 0x9b, 0xfd, 0xe3, 0x5d,
0xec, 0xa9, 0x78, 0x00, 0x0d, 0x7a, 0x50, 0x2b, 0x69, 0xf8, 0x44, 0xf0, 0x10, 0x69, 0xd8, 0xe8,
0x45, 0x66, 0x90, 0x87, 0x17, 0x2e, 0xf1, 0xcb, 0xec, 0xe2, 0xad, 0xc8, 0x17, 0x06, 0xb7, 0x7b,
0xd4, 0x6a, 0x8e, 0x84, 0xbe, 0x4b, 0xee, 0xf6, 0x63, 0x84, 0xcb, 0xfc, 0x12, 0x5b, 0x1c, 0x7d,
0x8b, 0x2c, 0x58, 0x57, 0xe8, 0x62, 0xe2, 0x6d, 0x2b, 0x46, 0x1f, 0x43, 0x23, 0x45, 0x30, 0xbc,
0x78, 0x35, 0x97, 0x7a, 0x3f, 0xf1, 0x61, 0x22, 0x26, 0x9e, 0xdf, 0x4f, 0xbc, 0xc6, 0x1b, 0x6c,
0x61, 0x0b, 0xcd, 0xfd, 0x94, 0x25, 0xa2, 0xec, 0x48, 0x6d, 0x49, 0xb7, 0x34, 0xc6, 0x7a, 0x48,
0x79, 0x84, 0x73, 0x56, 0xdf, 0x42, 0x43, 0xe0, 0x10, 0x5b, 0xa6, 0x38, 0x25, 0xe6, 0xb9, 0x2a,
0xc0, 0x21, 0xfc, 0x5f, 0x14, 0x83, 0x76, 0xac, 0xa2, 0x22, 0xf8, 0x28, 0xb9, 0xb9, 0x1f, 0x61,
0x2c, 0x0c, 0x92, 0x8c, 0x22, 0xed, 0x31, 0x92, 0x73, 0x88, 0x14, 0x81, 0x22, 0xfc, 0xdf, 0x39,
0x5c, 0xd4, 0xfa, 0x3f, 0x94, 0xc3, 0x29, 0x37, 0x26, 0x7d, 0x72, 0x48, 0x5a, 0x21, 0xaf, 0x53,
0x25, 0x59, 0xfd, 0x0f, 0x89, 0xff, 0x4b, 0xa9, 0x92, 0xdc, 0xdb, 0x8a, 0x45, 0x68, 0x86, 0x78,
0x93, 0x3f, 0xc2, 0xae, 0xba, 0x78, 0x1c, 0xa3, 0x3e, 0x39, 0x50, 0x81, 0xf4, 0x06, 0xdb, 0xe1,
0xb1, 0xca, 0x52, 0x92, 0x58, 0xfe, 0x8f, 0x2c, 0xa1, 0xb0, 0x24, 0xf4, 0x21, 0xfc, 0x38, 0xc5,
0x64, 0x4f, 0x99, 0x43, 0x6a, 0x87, 0x3b, 0xb6, 0xc1, 0xc2, 0x13, 0xa4, 0x65, 0x4f, 0xb9, 0x18,
0x05, 0xd2, 0x13, 0x6b, 0xa7, 0x42, 0x06, 0xa2, 0x13, 0x20, 0xac, 0x52, 0x50, 0x0e, 0xb1, 0x4b,
0x25, 0x9b, 0xbd, 0xef, 0x75, 0x3e, 0xc3, 0x2a, 0x9b, 0x2a, 0xf6, 0xb0, 0x8d, 0xe1, 0x00, 0x9e,
0xa4, 0xa3, 0x2b, 0x0c, 0xee, 0xc8, 0x9e, 0x34, 0xf0, 0x14, 0xe5, 0x1b, 0xcd, 0xf9, 0x96, 0x52,
0xb1, 0xbf, 0xb7, 0x06, 0x3e, 0xe7, 0x6c, 0xa6, 0xdd, 0x76, 0xf1, 0xa3, 0x7d, 0xd4, 0xc6, 0x15,
0x1e, 0xc2, 0x9f, 0xa6, 0x9a, 0x1e, 0x63, 0x36, 0x07, 0x69, 0x5b, 0x41, 0xb2, 0x28, 0x3f, 0xed,
0xa9, 0x10, 0x61, 0x8c, 0xd7, 0xd8, 0xf4, 0xad, 0x50, 0x6a, 0xdd, 0x47, 0x1f, 0x1c, 0xaa, 0xbf,
0xed, 0xf0, 0x20, 0x56, 0x5d, 0x1a, 0x8c, 0x30, 0x4e, 0xd4, 0x4d, 0x19, 0x4a, 0x7d, 0x62, 0x3b,
0x0f, 0x63, 0x93, 0x69, 0x21, 0x96, 0x79, 0x85, 0x4d, 0xb8, 0x68, 0xe2, 0x01, 0x4c, 0x34, 0x5f,
0x70, 0x58, 0x2d, 0xb5, 0x3e, 0xd1, 0xb3, 0xc0, 0xa0, 0x78, 0xce, 0x35, 0x65, 0xa5, 0xe0, 0x50,
0x43, 0xdc, 0x8a, 0xd5, 0x3d, 0x19, 0x76, 0x61, 0x9c, 0x04, 0x1f, 0xa2, 0x08, 0xac, 0x92, 0x2a,
0x9b, 0xda, 0x0c, 0xfa, 0x56, 0x63, 0xd9, 0xea, 0xa7, 0x03, 0xb1, 0x4d, 0x10, 0x89, 0x52, 0x27,
0x42, 0x1f, 0x26, 0x29, 0x1c, 0x49, 0xc1, 0x10, 0x6d, 0xaa, 0xf9, 0x5e, 0x36, 0x7b, 0x6e, 0xbf,
0xe0, 0xd3, 0xac, 0x9c, 0xaa, 0x06, 0x56, 0x5b, 0x97, 0xa1, 0x88, 0x07, 0x49, 0x57, 0x02, 0x9f,
0xa2, 0xb7, 0x19, 0x28, 0x61, 0x52, 0x00, 0x9b, 0xaf, 0xd6, 0xec, 0x80, 0xb7, 0x17, 0x67, 0x58,
0xe5, 0x56, 0xe8, 0xe3, 0xb1, 0x0c, 0xd1, 0x87, 0x31, 0xdb, 0x2d, 0x92, 0x3a, 0xcb, 0xcb, 0x96,
0xc2, 0x5d, 0x27, 0x63, 0x0a, 0x18, 0x52, 0xc9, 0xdf, 0x14, 0xba, 0x00, 0x1d, 0xd3, 0x8b, 0xb7,
0xed, 0xfa, 0xd8, 0x29, 0x5e, 0xef, 0xda, 0x17, 0x3f, 0x51, 0xf7, 0x72, 0x4c, 0xc3, 0x09, 0x69,
0xda, 0x42, 0x73, 0x38, 0xd0, 0x06, 0x7b, 0x2d, 0x15, 0x1e, 0xcb, 0xae, 0x06, 0x49, 0x9a, 0x76,
0x94, 0xf0, 0x0b, 0xd7, 0xef, 0x50, 0xce, 0xb9, 0x18, 0xa0, 0xd0, 0x45, 0xa9, 0x77, 0x6d, 0xbf,
0xb4, 0xa6, 0xae, 0x05, 0x52, 0x68, 0x08, 0xc8, 0x15, 0xb2, 0x32, 0x39, 0xf6, 0xe8, 0x7d, 0xd7,
0x02, 0x83, 0x71, 0x72, 0x0e, 0xf9, 0x02, 0x9b, 0x4d, 0xf8, 0x0f, 0x44, 0x6c, 0xa4, 0x15, 0xf2,
0xb2, 0x63, 0x33, 0x29, 0x56, 0x51, 0x8e, 0xbd, 0x42, 0xe3, 0xa9, 0x76, 0x53, 0xe8, 0x1c, 0xfa,
0x89, 0xc3, 0x17, 0xd9, 0xdc, 0xd0, 0xb5, 0x1c, 0xff, 0xa9, 0xc3, 0xe7, 0x59, 0x9d, 0x5c, 0xcb,
0x30, 0x0d, 0x3f, 0xb3, 0x20, 0x39, 0x51, 0x00, 0x7f, 0x6e, 0x25, 0xa4, 0x5e, 0x14, 0xf0, 0x5f,
0x58, 0x65, 0x24, 0x21, 0x4d, 0x22, 0x0d, 0xaf, 0x39, 0x64, 0xe9, 0x50, 0x59, 0x0a, 0xc3, 0xeb,
0x96, 0x91, 0xa4, 0x66, 0x8c, 0x6f, 0x58, 0xc6, 0x54, 0x66, 0x86, 0xbe, 0x69, 0xd1, 0x9b, 0x22,
0xf4, 0xd5, 0xf1, 0x71, 0x86, 0xbe, 0xe5, 0xf0, 0x06, 0x9b, 0xa7, 0xeb, 0xeb, 0x22, 0x10, 0xa1,
0x97, 0xf3, 0xbf, 0xed, 0xf0, 0x0b, 0x0c, 0xce, 0xa9, 0xd3, 0xf0, 0xfc, 0x38, 0x87, 0x61, 0x7c,
0x6d, 0x1d, 0xc1, 0x17, 0xc6, 0x6d, 0xac, 0x52, 0xc6, 0x04, 0xfb, 0xe2, 0x38, 0xaf, 0x27, 0x41,
0x4f, 0xce, 0x5f, 0x1a, 0xe7, 0x55, 0x36, 0xb9, 0x1d, 0x6a, 0x8c, 0x0d, 0x7c, 0x86, 0xf2, 0x7b,
0x32, 0xe9, 0xbd, 0xf0, 0x59, 0xaa, 0xa8, 0x09, 0x9b, 0xdf, 0xf0, 0x12, 0xcd, 0x75, 0xee, 0xa2,
0xc6, 0xd0, 0x2f, 0xd4, 0x8e, 0x86, 0xcf, 0xd9, 0x1b, 0xc9, 0xe0, 0x84, 0xbf, 0x94, 0x6c, 0x68,
0x8a, 0x53, 0xf4, 0xaf, 0x25, 0x32, 0x61, 0x0b, 0x4d, 0x5e, 0xd9, 0xf0, 0xb7, 0x12, 0xbf, 0xc4,
0x2e, 0x0c, 0x31, 0x3b, 0xd3, 0xb2, 0x9a, 0xfe, 0x7b, 0x89, 0x5f, 0x61, 0x17, 0xa9, 0xc1, 0x67,
0x79, 0x43, 0x97, 0xa4, 0x36, 0xd2, 0xd3, 0xf0, 0x6a, 0x89, 0x5f, 0x66, 0x8b, 0x5b, 0x68, 0xb2,
0xf7, 0x28, 0x10, 0xff, 0x51, 0xe2, 0x33, 0x6c, 0x9a, 0xaa, 0x5e, 0xe2, 0x29, 0xc2, 0x6b, 0x25,
0x7a, 0xd4, 0xe1, 0x31, 0x35, 0xe7, 0xf5, 0x12, 0x85, 0xfa, 0x03, 0xc2, 0x78, 0x27, 0xed, 0x5e,
0xeb, 0x44, 0x84, 0x21, 0x06, 0x1a, 0xde, 0x28, 0x51, 0x40, 0x5d, 0xec, 0xa9, 0x53, 0x2c, 0xc0,
0x6f, 0x5a, 0xa7, 0x2d, 0xf3, 0xfb, 0xfb, 0x18, 0x0f, 0x32, 0xc2, 0x5b, 0x25, 0x7a, 0x9a, 0x84,
0x7f, 0x94, 0xf2, 0x76, 0x89, 0x5f, 0x65, 0x8d, 0xa4, 0x59, 0x0c, 0x1f, 0x86, 0x88, 0x5d, 0xa4,
0xc6, 0x0c, 0xcf, 0x97, 0x33, 0x89, 0x6d, 0x0c, 0x8c, 0xc8, 0xee, 0x7d, 0xbc, 0x4c, 0x76, 0x51,
0x71, 0xe5, 0xfd, 0x58, 0xc3, 0x0b, 0x65, 0x7a, 0xd1, 0x2d, 0x34, 0x69, 0x4b, 0xd6, 0xf0, 0x09,
0x5a, 0xa3, 0xea, 0xb7, 0x42, 0xdd, 0xef, 0x64, 0x86, 0xc2, 0x27, 0x87, 0x97, 0xdb, 0x52, 0x9b,
0x58, 0x76, 0xfa, 0x36, 0xd3, 0x3f, 0x55, 0x26, 0xa7, 0x0e, 0x07, 0xa1, 0x37, 0x02, 0xbf, 0x68,
0x65, 0xa6, 0xb6, 0x59, 0xa3, 0x7e, 0x59, 0xe6, 0xb3, 0x8c, 0x25, 0x55, 0x6d, 0x81, 0x5f, 0x0d,
0xe5, 0xd1, 0xde, 0x74, 0x8a, 0xb1, 0x1d, 0x2a, 0xf0, 0xeb, 0xcc, 0xc4, 0x42, 0xef, 0x84, 0xdf,
0x94, 0x29, 0xe8, 0x47, 0xb2, 0x87, 0x47, 0xd2, 0xbb, 0x0b, 0x5f, 0xae, 0x90, 0x7d, 0x36, 0x26,
0x7b, 0xca, 0xc7, 0x24, 0x47, 0xbe, 0x52, 0xa1, 0x94, 0xa3, 0x4c, 0x4e, 0x52, 0xee, 0xab, 0xf6,
0x9c, 0x8e, 0x82, 0xed, 0x36, 0x7c, 0x8d, 0xf6, 0x37, 0x96, 0x9e, 0x8f, 0x0e, 0xf7, 0xe1, 0xeb,
0x15, 0x52, 0xb5, 0x16, 0x04, 0xca, 0x13, 0x26, 0xab, 0xa7, 0x6f, 0x54, 0xa8, 0x20, 0x0b, 0xda,
0xd3, 0x77, 0xff, 0x66, 0xc5, 0x3a, 0x9a, 0xe0, 0x36, 0x5d, 0xdb, 0xd4, 0x56, 0xbf, 0x65, 0xa5,
0xd2, 0x0c, 0x22, 0x4b, 0x8e, 0x0c, 0x7c, 0xdb, 0xf2, 0x9d, 0x5f, 0x49, 0xe0, 0xb7, 0xd5, 0x34,
0x43, 0x0b, 0xd8, 0xef, 0xaa, 0x49, 0x85, 0x8d, 0xee, 0x20, 0xf0, 0x7b, 0x0b, 0x9f, 0xdf, 0x5b,
0xe0, 0x0f, 0x55, 0x32, 0xac, 0xb8, 0x7a, 0xd0, 0x02, 0xae, 0xe1, 0x8f, 0x55, 0xb2, 0x20, 0x5f,
0x32, 0xe0, 0x3b, 0x35, 0x0a, 0xd6, 0x70, 0xbd, 0x80, 0xef, 0xd6, 0xc8, 0xcd, 0x73, 0x8b, 0x05,
0x7c, 0xaf, 0x66, 0x9f, 0x23, 0x5b, 0x29, 0xe0, 0xfb, 0x05, 0x80, 0xb8, 0xe0, 0x07, 0x35, 0xdb,
0xc3, 0x46, 0xd6, 0x08, 0xf8, 0x61, 0x8d, 0x6c, 0x3b, 0xbf, 0x40, 0xc0, 0x8f, 0x6a, 0xc9, 0x73,
0x67, 0xab, 0x03, 0xfc, 0xb8, 0x46, 0x35, 0xf4, 0xe0, 0xa5, 0x01, 0x5e, 0xb6, 0xba, 0xf2, 0x75,
0x01, 0x5e, 0xa9, 0x35, 0x97, 0xd9, 0x54, 0x5b, 0x07, 0x76, 0xf2, 0x4c, 0xb1, 0x52, 0x5b, 0x07,
0x30, 0x46, 0x8d, 0x7a, 0x5d, 0xa9, 0x60, 0xe3, 0x2c, 0x8a, 0x9f, 0x7b, 0x0a, 0x9c, 0xe6, 0x3a,
0x9b, 0x6d, 0xa9, 0x5e, 0x24, 0xb2, 0x82, 0xb5, 0xc3, 0x26, 0x99, 0x52, 0xe8, 0x27, 0xa9, 0x32,
0x46, 0xdd, 0x7e, 0xe3, 0x0c, 0xbd, 0xbe, 0x9d, 0x89, 0x0e, 0x1d, 0xe9, 0x12, 0x05, 0xd9, 0x87,
0xf1, 0xe6, 0x07, 0x19, 0xb4, 0x54, 0xa8, 0xa5, 0x36, 0x18, 0x7a, 0x83, 0x1d, 0x3c, 0xc5, 0xc0,
0x4e, 0x5e, 0x13, 0xab, 0xb0, 0x0b, 0x63, 0xf6, 0x1b, 0x05, 0xed, 0xb7, 0x46, 0x32, 0x9f, 0xd7,
0x69, 0x0f, 0xb1, 0x1f, 0x22, 0x75, 0xc6, 0x36, 0x4e, 0x31, 0x34, 0x7d, 0x11, 0x04, 0x03, 0x28,
0xd1, 0xb9, 0xd5, 0xd7, 0x46, 0xf5, 0xe4, 0xc7, 0x68, 0x4c, 0x37, 0x3f, 0xed, 0xb0, 0x6a, 0x32,
0x8c, 0x33, 0xd3, 0x92, 0xe3, 0x01, 0x86, 0xbe, 0xb4, 0xc2, 0x69, 0x8f, 0xb6, 0x50, 0xba, 0x41,
0x38, 0x39, 0xd3, 0xa1, 0x11, 0xb1, 0xb5, 0xd0, 0x7e, 0x3e, 0xa4, 0xf7, 0x62, 0x6b, 0xa7, 0x0f,
0x13, 0x39, 0x98, 0xfb, 0x32, 0x49, 0x0b, 0x63, 0x51, 0xdc, 0x5a, 0xe8, 0xb7, 0x02, 0x14, 0x34,
0xaf, 0xa7, 0x9a, 0x37, 0x18, 0xcb, 0x3f, 0x1f, 0xad, 0xad, 0xf9, 0x88, 0x1c, 0x23, 0x8f, 0xb7,
0x02, 0xd5, 0x11, 0x01, 0x38, 0xb4, 0x21, 0xd8, 0x07, 0x1f, 0x6f, 0xbe, 0x38, 0xc1, 0x66, 0xcf,
0x7d, 0x2c, 0x92, 0xc9, 0xd9, 0x61, 0x2d, 0xa0, 0x57, 0xb9, 0xca, 0x1e, 0xca, 0x90, 0xfb, 0x56,
0x02, 0x87, 0x16, 0xcc, 0x8c, 0x7c, 0x6e, 0x37, 0x18, 0xe7, 0xd7, 0xd8, 0xe5, 0x9c, 0x78, 0xff,
0x46, 0x40, 0x6d, 0xb9, 0x91, 0x31, 0x9c, 0x5f, 0x0d, 0xca, 0x14, 0xad, 0x8c, 0x4a, 0x95, 0x9e,
0x7c, 0xda, 0xe5, 0x5f, 0xb6, 0xc9, 0xc8, 0x83, 0x49, 0xfa, 0xda, 0xca, 0x6d, 0xcc, 0x52, 0x06,
0xa6, 0x28, 0x8e, 0x19, 0x21, 0x1d, 0x47, 0xd3, 0x23, 0x60, 0x3a, 0x96, 0x2a, 0x14, 0xdc, 0x0c,
0xa4, 0x7e, 0x94, 0xb7, 0x02, 0x46, 0xdf, 0x00, 0xe7, 0x42, 0x90, 0xf4, 0x9c, 0xea, 0x08, 0xc5,
0x62, 0x6d, 0x34, 0x42, 0x06, 0x50, 0xa3, 0x1d, 0x68, 0x24, 0x2e, 0xc9, 0x8d, 0x99, 0x11, 0xe5,
0xe9, 0x84, 0xab, 0xd3, 0xb6, 0x93, 0xaf, 0xe7, 0x76, 0x36, 0xce, 0x8e, 0x60, 0xb6, 0xf7, 0x01,
0x8c, 0xa8, 0x2b, 0x0c, 0x71, 0x98, 0x1b, 0x75, 0xd4, 0x26, 0x09, 0xf0, 0x91, 0xe8, 0x26, 0x76,
0xef, 0xdf, 0x0b, 0x31, 0xd6, 0x27, 0x32, 0x82, 0xf9, 0x91, 0xa0, 0x25, 0xed, 0xc7, 0xe6, 0xc5,
0xc2, 0x48, 0x28, 0xc8, 0xf4, 0xfc, 0xd2, 0x85, 0xd1, 0x07, 0xb3, 0x0d, 0x20, 0xa7, 0x2e, 0x8e,
0x50, 0x77, 0x45, 0x28, 0xba, 0x05, 0x85, 0x17, 0x47, 0x14, 0x16, 0x3a, 0x4f, 0xe3, 0x3d, 0x8a,
0xcd, 0x65, 0x7f, 0x6d, 0xdc, 0xc6, 0x33, 0x73, 0x5b, 0x75, 0xee, 0xf0, 0x6b, 0xab, 0xc9, 0x5f,
0x92, 0xab, 0xc3, 0xbf, 0x24, 0x57, 0x77, 0x51, 0x6b, 0x12, 0x19, 0xd9, 0xfc, 0x68, 0xfc, 0x79,
0xca, 0xfe, 0x67, 0xf3, 0xc8, 0x83, 0xff, 0x09, 0x2b, 0xfc, 0x07, 0xe3, 0xce, 0x46, 0x85, 0xd3,
0x7e, 0xe7, 0xce, 0xfa, 0x0e, 0xab, 0x4b, 0x35, 0xbc, 0xd7, 0x8d, 0x23, 0x6f, 0xbd, 0xda, 0xb2,
0xf7, 0x0e, 0x48, 0xc6, 0x81, 0xf3, 0xa1, 0x95, 0xae, 0x34, 0x27, 0xfd, 0x0e, 0x49, 0xbb, 0x9e,
0xb0, 0x3d, 0x21, 0x55, 0xfa, 0xeb, 0xba, 0x88, 0xe4, 0xf5, 0x44, 0x4d, 0xd4, 0xf9, 0xbc, 0xe3,
0x74, 0x26, 0xad, 0xe6, 0xa7, 0xff, 0x19, 0x00, 0x00, 0xff, 0xff, 0x05, 0x17, 0xce, 0xdd, 0x67,
0x15, 0x00, 0x00,
}

View File

@ -5269,8 +5269,9 @@ type GetImportStateResponse struct {
IdList []int64 `protobuf:"varint,4,rep,packed,name=id_list,json=idList,proto3" json:"id_list,omitempty"`
Infos []*commonpb.KeyValuePair `protobuf:"bytes,5,rep,name=infos,proto3" json:"infos,omitempty"`
Id int64 `protobuf:"varint,6,opt,name=id,proto3" json:"id,omitempty"`
DataQueryable bool `protobuf:"varint,7,opt,name=data_queryable,json=dataQueryable,proto3" json:"data_queryable,omitempty"`
DataIndexed bool `protobuf:"varint,8,opt,name=data_indexed,json=dataIndexed,proto3" json:"data_indexed,omitempty"`
CollectionId int64 `protobuf:"varint,7,opt,name=collection_id,json=collectionId,proto3" json:"collection_id,omitempty"`
SegmentIds []int64 `protobuf:"varint,8,rep,packed,name=segment_ids,json=segmentIds,proto3" json:"segment_ids,omitempty"`
CreateTs int64 `protobuf:"varint,9,opt,name=create_ts,json=createTs,proto3" json:"create_ts,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
@ -5343,21 +5344,30 @@ func (m *GetImportStateResponse) GetId() int64 {
return 0
}
func (m *GetImportStateResponse) GetDataQueryable() bool {
func (m *GetImportStateResponse) GetCollectionId() int64 {
if m != nil {
return m.DataQueryable
return m.CollectionId
}
return false
return 0
}
func (m *GetImportStateResponse) GetDataIndexed() bool {
func (m *GetImportStateResponse) GetSegmentIds() []int64 {
if m != nil {
return m.DataIndexed
return m.SegmentIds
}
return false
return nil
}
func (m *GetImportStateResponse) GetCreateTs() int64 {
if m != nil {
return m.CreateTs
}
return 0
}
type ListImportTasksRequest struct {
CollectionName string `protobuf:"bytes,1,opt,name=collection_name,json=collectionName,proto3" json:"collection_name,omitempty"`
Limit int64 `protobuf:"varint,2,opt,name=limit,proto3" json:"limit,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
@ -5388,6 +5398,20 @@ func (m *ListImportTasksRequest) XXX_DiscardUnknown() {
var xxx_messageInfo_ListImportTasksRequest proto.InternalMessageInfo
func (m *ListImportTasksRequest) GetCollectionName() string {
if m != nil {
return m.CollectionName
}
return ""
}
func (m *ListImportTasksRequest) GetLimit() int64 {
if m != nil {
return m.Limit
}
return 0
}
type ListImportTasksResponse struct {
Status *commonpb.Status `protobuf:"bytes,1,opt,name=status,proto3" json:"status,omitempty"`
Tasks []*GetImportStateResponse `protobuf:"bytes,2,rep,name=tasks,proto3" json:"tasks,omitempty"`
@ -7083,337 +7107,338 @@ func init() {
func init() { proto.RegisterFile("milvus.proto", fileDescriptor_02345ba45cc0e303) }
var fileDescriptor_02345ba45cc0e303 = []byte{
// 5274 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xdc, 0x3d, 0x4b, 0x6c, 0x1c, 0x47,
0x76, 0xec, 0xf9, 0xcf, 0x9b, 0x0f, 0x87, 0xc5, 0xdf, 0x78, 0x24, 0x59, 0x54, 0xdb, 0xb2, 0x69,
0x69, 0x4d, 0xd9, 0x94, 0x3f, 0x6b, 0xd9, 0x6b, 0x5b, 0x12, 0x6d, 0x89, 0xb0, 0x3e, 0x74, 0xd3,
0x76, 0xb0, 0x71, 0x8c, 0x46, 0x73, 0xba, 0x38, 0x6c, 0xab, 0xa7, 0x7b, 0xdc, 0xdd, 0x43, 0x8a,
0xce, 0x65, 0x81, 0xcd, 0x2e, 0x36, 0xc8, 0x66, 0x8d, 0x7c, 0x17, 0x39, 0xe4, 0x83, 0x60, 0x73,
0x08, 0xb2, 0x09, 0xe2, 0x24, 0x40, 0x80, 0xcd, 0x21, 0x77, 0x23, 0xbf, 0x3d, 0x04, 0xc9, 0x22,
0x39, 0x2e, 0x02, 0xe4, 0x10, 0x20, 0x87, 0xdc, 0x92, 0x20, 0x41, 0x7d, 0xba, 0xa7, 0xba, 0xa7,
0x7a, 0x38, 0xe4, 0x58, 0x26, 0x65, 0x9e, 0xa6, 0x5f, 0xfd, 0x5e, 0xbd, 0x7a, 0xbf, 0xaa, 0xf7,
0xaa, 0x08, 0xd5, 0xae, 0x65, 0xef, 0xf6, 0xfd, 0x95, 0x9e, 0xe7, 0x06, 0x2e, 0x9a, 0x15, 0xbf,
0x56, 0xd8, 0x47, 0xab, 0xda, 0x76, 0xbb, 0x5d, 0xd7, 0x61, 0xc0, 0x56, 0xd5, 0x6f, 0xef, 0xe0,
0xae, 0xc1, 0xbf, 0x96, 0x3a, 0xae, 0xdb, 0xb1, 0xf1, 0x25, 0xfa, 0xb5, 0xd5, 0xdf, 0xbe, 0x64,
0x62, 0xbf, 0xed, 0x59, 0xbd, 0xc0, 0xf5, 0x58, 0x0d, 0xf5, 0x77, 0x14, 0x40, 0xd7, 0x3d, 0x6c,
0x04, 0xf8, 0xaa, 0x6d, 0x19, 0xbe, 0x86, 0x3f, 0xea, 0x63, 0x3f, 0x40, 0xcf, 0x40, 0x6e, 0xcb,
0xf0, 0x71, 0x53, 0x59, 0x52, 0x96, 0x2b, 0xab, 0xa7, 0x57, 0x62, 0x03, 0xf3, 0x01, 0x6f, 0xfb,
0x9d, 0x6b, 0x86, 0x8f, 0x35, 0x5a, 0x13, 0x2d, 0x42, 0xd1, 0xdc, 0xd2, 0x1d, 0xa3, 0x8b, 0x9b,
0x99, 0x25, 0x65, 0xb9, 0xac, 0x15, 0xcc, 0xad, 0x3b, 0x46, 0x17, 0xa3, 0x27, 0x61, 0xba, 0xed,
0xda, 0x36, 0x6e, 0x07, 0x96, 0xeb, 0xb0, 0x0a, 0x59, 0x5a, 0xa1, 0x3e, 0x00, 0xd3, 0x8a, 0x73,
0x90, 0x37, 0x08, 0x0e, 0xcd, 0x1c, 0x2d, 0x66, 0x1f, 0xaa, 0x0f, 0x8d, 0x35, 0xcf, 0xed, 0x3d,
0x28, 0xec, 0xa2, 0x41, 0xb3, 0xe2, 0xa0, 0xbf, 0xad, 0xc0, 0xcc, 0x55, 0x3b, 0xc0, 0xde, 0x09,
0x25, 0xca, 0x0f, 0x33, 0xb0, 0xc8, 0x56, 0xed, 0x7a, 0x54, 0xfd, 0x38, 0xb1, 0x5c, 0x80, 0x02,
0xe3, 0x3b, 0x8a, 0x66, 0x55, 0xe3, 0x5f, 0xe8, 0x0c, 0x80, 0xbf, 0x63, 0x78, 0xa6, 0xaf, 0x3b,
0xfd, 0x6e, 0x33, 0xbf, 0xa4, 0x2c, 0xe7, 0xb5, 0x32, 0x83, 0xdc, 0xe9, 0x77, 0x91, 0x06, 0x33,
0x6d, 0xd7, 0xf1, 0x2d, 0x3f, 0xc0, 0x4e, 0x7b, 0x5f, 0xb7, 0xf1, 0x2e, 0xb6, 0x9b, 0x85, 0x25,
0x65, 0xb9, 0xbe, 0x7a, 0x5e, 0x8a, 0xf7, 0xf5, 0x41, 0xed, 0x5b, 0xa4, 0xb2, 0xd6, 0x68, 0x27,
0x20, 0x57, 0xd0, 0x67, 0xaf, 0x4e, 0x97, 0x94, 0x86, 0xd2, 0xfc, 0xbf, 0xf0, 0x4f, 0x51, 0x7f,
0x57, 0x81, 0x79, 0xc2, 0x44, 0x27, 0x82, 0x58, 0x21, 0x86, 0x19, 0x11, 0xc3, 0x3f, 0x54, 0x60,
0xee, 0xa6, 0xe1, 0x9f, 0x8c, 0xd5, 0x3c, 0x03, 0x10, 0x58, 0x5d, 0xac, 0xfb, 0x81, 0xd1, 0xed,
0xd1, 0x15, 0xcd, 0x69, 0x65, 0x02, 0xd9, 0x24, 0x00, 0xf5, 0xeb, 0x50, 0xbd, 0xe6, 0xba, 0xb6,
0x86, 0xfd, 0x9e, 0xeb, 0xf8, 0x18, 0x5d, 0x86, 0x82, 0x1f, 0x18, 0x41, 0xdf, 0xe7, 0x48, 0x9e,
0x92, 0x22, 0xb9, 0x49, 0xab, 0x68, 0xbc, 0x2a, 0xe1, 0xeb, 0x5d, 0xc3, 0xee, 0x33, 0x1c, 0x4b,
0x1a, 0xfb, 0x50, 0xdf, 0x87, 0xfa, 0x66, 0xe0, 0x59, 0x4e, 0xe7, 0x73, 0xec, 0xbc, 0x1c, 0x76,
0xfe, 0x6f, 0x0a, 0x3c, 0xb2, 0x46, 0xf5, 0xdf, 0xd6, 0x09, 0x11, 0x1b, 0x15, 0xaa, 0x03, 0xc8,
0xfa, 0x1a, 0x25, 0x75, 0x56, 0x8b, 0xc1, 0x12, 0x8b, 0x91, 0x4f, 0x2c, 0x46, 0xc8, 0x4c, 0x59,
0x91, 0x99, 0xbe, 0x91, 0x87, 0x96, 0x6c, 0xa2, 0x93, 0x90, 0xf4, 0x6b, 0x91, 0x84, 0x67, 0x68,
0xa3, 0x84, 0x7c, 0x72, 0xab, 0x33, 0x18, 0x6d, 0x93, 0x02, 0x22, 0x45, 0x90, 0x9c, 0x69, 0x56,
0x32, 0xd3, 0x55, 0x98, 0xdf, 0xb5, 0xbc, 0xa0, 0x6f, 0xd8, 0x7a, 0x7b, 0xc7, 0x70, 0x1c, 0x6c,
0x53, 0xda, 0x11, 0xd5, 0x97, 0x5d, 0x2e, 0x6b, 0xb3, 0xbc, 0xf0, 0x3a, 0x2b, 0x23, 0x04, 0xf4,
0xd1, 0x73, 0xb0, 0xd0, 0xdb, 0xd9, 0xf7, 0xad, 0xf6, 0x50, 0xa3, 0x3c, 0x6d, 0x34, 0x17, 0x96,
0xc6, 0x5a, 0x5d, 0x84, 0x99, 0x36, 0xd5, 0x9e, 0xa6, 0x4e, 0x28, 0xc9, 0x48, 0x5b, 0xa0, 0xa4,
0x6d, 0xf0, 0x82, 0x77, 0x42, 0x38, 0x41, 0x2b, 0xac, 0xdc, 0x0f, 0xda, 0x42, 0x83, 0x22, 0x6d,
0x30, 0xcb, 0x0b, 0xdf, 0x0d, 0xda, 0x83, 0x36, 0x71, 0xbd, 0x57, 0x4a, 0xea, 0xbd, 0x26, 0x14,
0xa9, 0x1e, 0xc7, 0x7e, 0xb3, 0x4c, 0xd1, 0x0c, 0x3f, 0xd1, 0x3a, 0x4c, 0xfb, 0x81, 0xe1, 0x05,
0x7a, 0xcf, 0xf5, 0x2d, 0x42, 0x17, 0xbf, 0x09, 0x4b, 0xd9, 0xe5, 0xca, 0xea, 0x92, 0x74, 0x91,
0xde, 0xc2, 0xfb, 0x6b, 0x46, 0x60, 0x6c, 0x18, 0x96, 0xa7, 0xd5, 0x69, 0xc3, 0x8d, 0xb0, 0x9d,
0x5c, 0xb9, 0x56, 0x26, 0x52, 0xae, 0x32, 0xce, 0xae, 0xca, 0x38, 0x5b, 0xfd, 0x2b, 0x05, 0xe6,
0x6f, 0xb9, 0x86, 0x79, 0x32, 0xe4, 0xec, 0x3c, 0xd4, 0x3d, 0xdc, 0xb3, 0xad, 0xb6, 0x41, 0xd6,
0x63, 0x0b, 0x7b, 0x54, 0xd2, 0xf2, 0x5a, 0x8d, 0x43, 0xef, 0x50, 0xe0, 0x95, 0xe2, 0x67, 0xaf,
0xe6, 0x1a, 0xf9, 0x66, 0x56, 0xfd, 0xbe, 0x02, 0x4d, 0x0d, 0xdb, 0xd8, 0xf0, 0x4f, 0x86, 0xa2,
0x60, 0x98, 0x15, 0x9a, 0x59, 0xf5, 0x3f, 0x14, 0x98, 0xbb, 0x81, 0x03, 0x22, 0x9c, 0x96, 0x1f,
0x58, 0xed, 0x63, 0xf5, 0x4d, 0x9e, 0x84, 0xe9, 0x9e, 0xe1, 0x05, 0x56, 0x54, 0x2f, 0x14, 0xd5,
0x7a, 0x04, 0x66, 0xf2, 0x76, 0x09, 0x66, 0x3b, 0x7d, 0xc3, 0x33, 0x9c, 0x00, 0x63, 0x41, 0x80,
0x98, 0x32, 0x43, 0x51, 0x51, 0x24, 0x3f, 0x6c, 0xbe, 0xd0, 0xcc, 0xaa, 0xdf, 0x52, 0x60, 0x3e,
0x31, 0xdf, 0x49, 0xb4, 0xd8, 0x8b, 0x90, 0x27, 0xbf, 0xfc, 0x66, 0x86, 0x0a, 0xd5, 0xb9, 0x34,
0xa1, 0x7a, 0x8f, 0x18, 0x0c, 0x2a, 0x55, 0xac, 0x3e, 0x71, 0x08, 0x1f, 0xbd, 0x81, 0x03, 0x41,
0xbf, 0x9d, 0x84, 0x15, 0x18, 0xd0, 0xe9, 0x13, 0x05, 0xce, 0xa6, 0xe2, 0x77, 0x2c, 0x14, 0xfb,
0x2f, 0x05, 0x16, 0x36, 0x77, 0xdc, 0xbd, 0x01, 0x4a, 0x0f, 0x82, 0x52, 0x71, 0xeb, 0x98, 0x4d,
0x58, 0x47, 0xf4, 0x2c, 0xe4, 0x82, 0xfd, 0x1e, 0xa6, 0xe2, 0x5e, 0x5f, 0x3d, 0xb3, 0x22, 0xd9,
0x3f, 0xad, 0x10, 0x24, 0xdf, 0xd9, 0xef, 0x61, 0x8d, 0x56, 0x45, 0x4f, 0x41, 0x23, 0x41, 0xfb,
0xd0, 0x96, 0x4c, 0xc7, 0x89, 0xef, 0x87, 0xb6, 0x37, 0x27, 0xda, 0xde, 0xff, 0xcc, 0xc0, 0xe2,
0xd0, 0xb4, 0x27, 0x59, 0x00, 0x19, 0x3e, 0x19, 0x29, 0x3e, 0x44, 0xcd, 0x09, 0x55, 0x2d, 0x93,
0x6c, 0x6a, 0xb2, 0xcb, 0x59, 0xad, 0x26, 0x98, 0x59, 0xd3, 0x47, 0x4f, 0x03, 0x1a, 0xb2, 0x7e,
0x4c, 0x72, 0x73, 0xda, 0x4c, 0xd2, 0xfc, 0x51, 0x13, 0x2b, 0xb5, 0x7f, 0x8c, 0x2c, 0x39, 0x6d,
0x4e, 0x62, 0x00, 0x7d, 0xf4, 0x2c, 0xcc, 0x59, 0xce, 0x6d, 0xdc, 0x75, 0xbd, 0x7d, 0xbd, 0x87,
0xbd, 0x36, 0x76, 0x02, 0xa3, 0x83, 0xfd, 0x66, 0x81, 0x62, 0x34, 0x1b, 0x96, 0x6d, 0x0c, 0x8a,
0xd0, 0x0b, 0xb0, 0xf8, 0x51, 0x1f, 0x7b, 0xfb, 0xba, 0x8f, 0xbd, 0x5d, 0xab, 0x8d, 0x75, 0x63,
0xd7, 0xb0, 0x6c, 0x63, 0xcb, 0xc6, 0xcd, 0xe2, 0x52, 0x76, 0xb9, 0xa4, 0xcd, 0xd3, 0xe2, 0x4d,
0x56, 0x7a, 0x35, 0x2c, 0x54, 0xff, 0x5c, 0x81, 0x05, 0xb6, 0x19, 0xda, 0x08, 0xd5, 0xce, 0x31,
0x1b, 0x9b, 0xb8, 0x56, 0xe4, 0x5b, 0xb7, 0x5a, 0x4c, 0x29, 0xaa, 0x9f, 0x2a, 0x30, 0x47, 0xf6,
0x24, 0x0f, 0x13, 0xce, 0x7f, 0xaa, 0xc0, 0xec, 0x4d, 0xc3, 0x7f, 0x98, 0x50, 0xfe, 0x17, 0xee,
0x88, 0x44, 0x38, 0x3f, 0x1c, 0x16, 0x73, 0xd8, 0x63, 0xc9, 0x4b, 0x3c, 0x16, 0xf5, 0x2f, 0x07,
0x8e, 0xca, 0xc3, 0x35, 0x41, 0xf5, 0x47, 0x0a, 0x9c, 0xb9, 0x81, 0x83, 0x08, 0xeb, 0x93, 0xe1,
0xd1, 0x8c, 0xc9, 0x54, 0xdf, 0x63, 0xde, 0x80, 0x14, 0xf9, 0x63, 0x31, 0xb6, 0xbf, 0x94, 0x81,
0x79, 0x62, 0x75, 0x4e, 0x06, 0x13, 0x8c, 0xb3, 0xad, 0x95, 0x30, 0x4a, 0x5e, 0x2a, 0x09, 0xa1,
0x09, 0x2f, 0x8c, 0x6d, 0xc2, 0xd5, 0x3f, 0xcb, 0x30, 0xd7, 0x43, 0xa4, 0xc6, 0x24, 0xcb, 0x22,
0xc1, 0x35, 0x23, 0xc5, 0x55, 0x85, 0x6a, 0x04, 0x59, 0x5f, 0x0b, 0xcd, 0x6f, 0x0c, 0x76, 0x52,
0xad, 0xaf, 0xfa, 0x5d, 0x05, 0x16, 0xc2, 0x43, 0x83, 0x4d, 0xdc, 0xe9, 0x62, 0x27, 0x38, 0x3a,
0x0f, 0x25, 0x39, 0x20, 0x23, 0xe1, 0x80, 0xd3, 0x50, 0xf6, 0xd9, 0x38, 0xd1, 0x79, 0xc0, 0x00,
0xa0, 0xfe, 0xb5, 0x02, 0x8b, 0x43, 0xe8, 0x4c, 0xb2, 0x88, 0x4d, 0x28, 0x5a, 0x8e, 0x89, 0xef,
0x47, 0xd8, 0x84, 0x9f, 0xa4, 0x64, 0xab, 0x6f, 0xd9, 0x66, 0x84, 0x46, 0xf8, 0x89, 0xce, 0x41,
0x15, 0x3b, 0xc4, 0xc7, 0xd0, 0x69, 0x5d, 0xca, 0xc8, 0x25, 0xad, 0xc2, 0x60, 0xeb, 0x04, 0x44,
0x1a, 0x6f, 0x5b, 0x98, 0x36, 0xce, 0xb3, 0xc6, 0xfc, 0x53, 0xfd, 0x65, 0x05, 0x66, 0x09, 0x17,
0x72, 0xec, 0xfd, 0x07, 0x4b, 0xcd, 0x25, 0xa8, 0x08, 0x6c, 0xc6, 0x27, 0x22, 0x82, 0xd4, 0x7b,
0x30, 0x17, 0x47, 0x67, 0x12, 0x6a, 0x3e, 0x0a, 0x10, 0xad, 0x15, 0x93, 0x86, 0xac, 0x26, 0x40,
0xd4, 0xdf, 0xc8, 0x84, 0x61, 0x05, 0x4a, 0xa6, 0x63, 0x3e, 0xcd, 0xa4, 0x4b, 0x22, 0xea, 0xf3,
0x32, 0x85, 0xd0, 0xe2, 0x35, 0xa8, 0xe2, 0xfb, 0x81, 0x67, 0xe8, 0x3d, 0xc3, 0x33, 0xba, 0x4c,
0xac, 0xc6, 0x52, 0xbd, 0x15, 0xda, 0x6c, 0x83, 0xb6, 0x22, 0x83, 0x50, 0x16, 0x61, 0x83, 0x14,
0xd8, 0x20, 0x14, 0x32, 0xd8, 0xa7, 0x55, 0x9a, 0x59, 0xf5, 0xc7, 0xc4, 0xeb, 0xe3, 0x6c, 0x7d,
0xd2, 0x29, 0x13, 0x9f, 0x53, 0x5e, 0x3a, 0xa7, 0x6a, 0x33, 0xab, 0xfe, 0x81, 0x02, 0x0d, 0x3a,
0x97, 0x35, 0x1e, 0x5c, 0xb2, 0x5c, 0x27, 0xd1, 0x58, 0x49, 0x34, 0x1e, 0x21, 0x8d, 0x2f, 0x41,
0x81, 0xaf, 0x44, 0x76, 0xdc, 0x95, 0xe0, 0x0d, 0x0e, 0x98, 0x8f, 0xfa, 0xfb, 0x0a, 0xcc, 0x27,
0x68, 0x3f, 0x89, 0x08, 0xbc, 0x03, 0x88, 0xcd, 0xd0, 0x1c, 0x4c, 0x3b, 0xb4, 0xdc, 0xe7, 0xa5,
0x66, 0x2a, 0x49, 0x24, 0x6d, 0xc6, 0x4a, 0x40, 0x7c, 0xf5, 0x27, 0x0a, 0x9c, 0xbe, 0x81, 0x03,
0x5a, 0xf5, 0x1a, 0x51, 0x43, 0x1b, 0x9e, 0xdb, 0xf1, 0xb0, 0xef, 0x7f, 0x09, 0x18, 0xe5, 0x37,
0x99, 0xcf, 0x27, 0x9b, 0xdb, 0x24, 0x0b, 0x71, 0x0e, 0xaa, 0x74, 0x30, 0x6c, 0xea, 0x9e, 0xbb,
0xe7, 0x73, 0x86, 0xaa, 0x70, 0x98, 0xe6, 0xee, 0x51, 0xce, 0x08, 0xdc, 0xc0, 0xb0, 0x59, 0x05,
0x6e, 0x6c, 0x28, 0x84, 0x14, 0x53, 0xa9, 0x0c, 0x11, 0x23, 0x9d, 0xe3, 0x2f, 0x01, 0xb1, 0x7f,
0xc0, 0x4e, 0xce, 0xc4, 0x39, 0x4d, 0x42, 0xe4, 0xe7, 0x99, 0x6b, 0xca, 0x66, 0x55, 0x5f, 0x3d,
0x2b, 0x6d, 0x23, 0x0c, 0xc6, 0x6a, 0xa3, 0xb3, 0x50, 0xd9, 0x36, 0x2c, 0x5b, 0xf7, 0xb0, 0xe1,
0xbb, 0x0e, 0x9f, 0x31, 0x10, 0x90, 0x46, 0x21, 0xea, 0xdf, 0x2a, 0x2c, 0xbe, 0xfb, 0x65, 0x50,
0x86, 0xb5, 0x66, 0x56, 0xfd, 0x61, 0x06, 0x6a, 0xeb, 0x8e, 0x8f, 0xbd, 0xe0, 0xe4, 0xef, 0x63,
0xd0, 0x6b, 0x50, 0xa1, 0x33, 0xf4, 0x75, 0xd3, 0x08, 0x0c, 0x6e, 0xfa, 0x1e, 0x95, 0x46, 0x76,
0xde, 0x24, 0xf5, 0xd6, 0x8c, 0xc0, 0xd0, 0x18, 0x99, 0x7c, 0xf2, 0x1b, 0x9d, 0x82, 0xf2, 0x8e,
0xe1, 0xef, 0xe8, 0xf7, 0xf0, 0x3e, 0x73, 0x2e, 0x6b, 0x5a, 0x89, 0x00, 0xde, 0xc2, 0xfb, 0x3e,
0x7a, 0x04, 0x4a, 0x4e, 0xbf, 0xcb, 0x44, 0xae, 0xb8, 0xa4, 0x2c, 0xd7, 0xb4, 0xa2, 0xd3, 0xef,
0x12, 0x81, 0x63, 0xe4, 0x2a, 0x35, 0xb3, 0xea, 0xdf, 0x64, 0xa0, 0x7e, 0xbb, 0x4f, 0xb6, 0x4f,
0x34, 0x40, 0xd5, 0xb7, 0x83, 0xa3, 0xb1, 0xe7, 0x05, 0xc8, 0x32, 0x47, 0x84, 0xb4, 0x68, 0x4a,
0x67, 0xb0, 0xbe, 0xe6, 0x6b, 0xa4, 0x12, 0x0d, 0xce, 0xf4, 0xdb, 0x6d, 0xee, 0xd3, 0x65, 0x29,
0xd6, 0x65, 0x02, 0x61, 0x1e, 0xdd, 0x29, 0x28, 0x63, 0xcf, 0x8b, 0x3c, 0x3e, 0x3a, 0x27, 0xec,
0x79, 0xac, 0x50, 0x85, 0xaa, 0xd1, 0xbe, 0xe7, 0xb8, 0x7b, 0x36, 0x36, 0x3b, 0xd8, 0xa4, 0x8c,
0x50, 0xd2, 0x62, 0x30, 0xc6, 0x2a, 0x84, 0x03, 0xf4, 0xb6, 0x13, 0x50, 0x5f, 0x20, 0x4b, 0x58,
0x85, 0x40, 0xae, 0x3b, 0x01, 0x29, 0x36, 0xb1, 0x8d, 0x03, 0x4c, 0x8b, 0x8b, 0xac, 0x98, 0x41,
0x78, 0x71, 0xbf, 0x17, 0xb5, 0x2e, 0xb1, 0x62, 0x06, 0x21, 0xc5, 0xa7, 0xa1, 0x3c, 0x38, 0x40,
0x2f, 0x0f, 0xce, 0x3b, 0x29, 0x40, 0xfd, 0xa9, 0x02, 0xb5, 0x35, 0xda, 0xd5, 0x43, 0xc0, 0x7d,
0x08, 0x72, 0xf8, 0x7e, 0xcf, 0xe3, 0xc2, 0x44, 0x7f, 0x8f, 0x64, 0x28, 0xc6, 0x35, 0xe5, 0x66,
0x56, 0xfd, 0x76, 0x0e, 0x6a, 0x9b, 0xd8, 0xf0, 0xda, 0x3b, 0x0f, 0xc5, 0x61, 0x4e, 0x03, 0xb2,
0xa6, 0x6f, 0xf3, 0x79, 0x92, 0x9f, 0xe8, 0x22, 0xcc, 0xf4, 0x6c, 0xa3, 0x8d, 0x77, 0x5c, 0xdb,
0xc4, 0x9e, 0xde, 0xf1, 0xdc, 0x3e, 0x0b, 0x40, 0x56, 0xb5, 0x86, 0x50, 0x70, 0x83, 0xc0, 0xd1,
0x8b, 0x50, 0x32, 0x7d, 0x5b, 0xa7, 0xbb, 0xe0, 0x22, 0xd5, 0xbe, 0xf2, 0xf9, 0xad, 0xf9, 0x36,
0xdd, 0x04, 0x17, 0x4d, 0xf6, 0x03, 0x3d, 0x06, 0x35, 0xb7, 0x1f, 0xf4, 0xfa, 0x81, 0xce, 0x44,
0xb6, 0x59, 0xa2, 0xe8, 0x55, 0x19, 0x90, 0x4a, 0xb4, 0x8f, 0xde, 0x84, 0x9a, 0x4f, 0x49, 0x19,
0x3a, 0xc0, 0xe5, 0x71, 0xdd, 0xae, 0x2a, 0x6b, 0xc7, 0x3d, 0xe0, 0xa7, 0xa0, 0x11, 0x78, 0xc6,
0x2e, 0xb6, 0x85, 0x00, 0x0f, 0x50, 0xfe, 0x9c, 0x66, 0xf0, 0x41, 0x74, 0x34, 0x25, 0x1c, 0x54,
0x49, 0x0b, 0x07, 0xa1, 0x3a, 0x64, 0x9c, 0x8f, 0x68, 0xa4, 0x31, 0xab, 0x65, 0x9c, 0x8f, 0x18,
0x23, 0xd4, 0x9b, 0x59, 0xf5, 0x2d, 0xc8, 0xdd, 0xb4, 0x02, 0x4a, 0x61, 0x22, 0xfe, 0x0a, 0xdd,
0x87, 0x50, 0x21, 0x7f, 0x04, 0x4a, 0x9e, 0xbb, 0xc7, 0xf4, 0x1a, 0xf1, 0xc9, 0xaa, 0x5a, 0xd1,
0x73, 0xf7, 0xa8, 0xd2, 0xa2, 0xc9, 0x2a, 0xae, 0x87, 0x99, 0x87, 0x99, 0xd1, 0xf8, 0x97, 0xfa,
0x27, 0xca, 0x80, 0xab, 0x88, 0x26, 0xf2, 0x8f, 0xa6, 0x8a, 0x5e, 0x83, 0xa2, 0xc7, 0xda, 0x8f,
0x0c, 0x95, 0x8b, 0x23, 0x51, 0xbd, 0x1a, 0xb6, 0x1a, 0x9b, 0x01, 0xc9, 0x0e, 0xb3, 0xfa, 0xa6,
0xdd, 0xf7, 0x1f, 0x84, 0x14, 0xc8, 0xc2, 0x0e, 0x59, 0x79, 0x18, 0x84, 0xae, 0xc6, 0xf4, 0x52,
0x56, 0xfd, 0xef, 0x1c, 0xd4, 0x38, 0x3e, 0x93, 0xb8, 0x1a, 0xa9, 0x38, 0x6d, 0x42, 0x85, 0x8c,
0xad, 0xfb, 0xb8, 0x13, 0x9e, 0xae, 0x54, 0x56, 0x57, 0xa5, 0xae, 0x76, 0x0c, 0x0d, 0x9a, 0x96,
0xb0, 0x49, 0x1b, 0xbd, 0xe1, 0x04, 0xde, 0xbe, 0x06, 0xed, 0x08, 0x80, 0xda, 0x30, 0xb3, 0x4d,
0x2a, 0xeb, 0x62, 0xd7, 0x39, 0xda, 0xf5, 0x8b, 0x63, 0x74, 0x4d, 0xbf, 0x92, 0xfd, 0x4f, 0x6f,
0xc7, 0xa1, 0xe8, 0x03, 0xb6, 0xa4, 0xba, 0x8f, 0x0d, 0x2e, 0x1f, 0xdc, 0xd8, 0x3e, 0x3f, 0x36,
0xf6, 0x06, 0x13, 0x20, 0x36, 0x40, 0xad, 0x2d, 0xc2, 0x5a, 0x1f, 0xc0, 0x74, 0x02, 0x05, 0x22,
0x11, 0xf7, 0xf0, 0x3e, 0xdf, 0x78, 0x91, 0x9f, 0xe8, 0x39, 0x31, 0x29, 0x26, 0xcd, 0xcc, 0xdf,
0x72, 0x9d, 0xce, 0x55, 0xcf, 0x33, 0xf6, 0x79, 0xd2, 0xcc, 0x95, 0xcc, 0x57, 0x95, 0xd6, 0x16,
0xcc, 0xc9, 0xa6, 0xf9, 0xb9, 0x8e, 0xf1, 0x3a, 0xa0, 0xe1, 0x79, 0x4a, 0x46, 0x88, 0xa5, 0xf6,
0x64, 0x85, 0x1e, 0xd4, 0x4f, 0xb2, 0x50, 0x7d, 0xbb, 0x8f, 0xbd, 0xfd, 0xe3, 0xb4, 0x09, 0xa1,
0x4d, 0xcb, 0x09, 0x36, 0x6d, 0x48, 0x0d, 0xe7, 0x25, 0x6a, 0x58, 0x62, 0x4c, 0x0a, 0x52, 0x63,
0x22, 0xd3, 0xb3, 0xc5, 0x43, 0xe9, 0xd9, 0x52, 0xaa, 0x9e, 0x5d, 0x83, 0x2a, 0x8b, 0xc0, 0x1d,
0xd6, 0x14, 0x54, 0x68, 0x33, 0x66, 0x09, 0x98, 0x3e, 0x68, 0x34, 0xb3, 0xea, 0x1f, 0x2b, 0xd1,
0x8a, 0x4c, 0xa4, 0x4f, 0x63, 0x4e, 0x6a, 0xe6, 0xd0, 0x4e, 0xea, 0xd8, 0xfa, 0xf4, 0x53, 0x05,
0xca, 0xef, 0xe1, 0x76, 0xe0, 0x7a, 0x44, 0x66, 0x25, 0xcd, 0x94, 0x31, 0x76, 0x0e, 0x99, 0xe4,
0xce, 0xe1, 0x32, 0x94, 0x2c, 0x53, 0x37, 0x08, 0xc3, 0xd3, 0x71, 0x47, 0xf9, 0xa7, 0x45, 0xcb,
0xa4, 0x92, 0x31, 0x7e, 0x1c, 0xe5, 0xfb, 0x0a, 0x54, 0x19, 0xce, 0x3e, 0x6b, 0xf9, 0xb2, 0x30,
0x9c, 0x22, 0x93, 0x42, 0xfe, 0x11, 0x4d, 0xf4, 0xe6, 0xd4, 0x60, 0xd8, 0xab, 0x00, 0x84, 0xc8,
0xbc, 0x39, 0x13, 0xe2, 0x25, 0x29, 0xb6, 0xac, 0x39, 0x25, 0xf8, 0xcd, 0x29, 0xad, 0x4c, 0x5a,
0xd1, 0x2e, 0xae, 0x15, 0x21, 0x4f, 0x5b, 0xab, 0xff, 0xa3, 0xc0, 0xec, 0x75, 0xc3, 0x6e, 0xaf,
0x59, 0x7e, 0x60, 0x38, 0xed, 0x09, 0x3c, 0xd2, 0x2b, 0x50, 0x74, 0x7b, 0xba, 0x8d, 0xb7, 0x03,
0x8e, 0xd2, 0xb9, 0x11, 0x33, 0x62, 0x64, 0xd0, 0x0a, 0x6e, 0xef, 0x16, 0xde, 0x0e, 0xd0, 0x2b,
0x50, 0x72, 0x7b, 0xba, 0x67, 0x75, 0x76, 0x02, 0x4e, 0xfd, 0x31, 0x1a, 0x17, 0xdd, 0x9e, 0x46,
0x5a, 0x08, 0x87, 0x51, 0xb9, 0x43, 0x1e, 0x46, 0xa9, 0x3f, 0x1e, 0x9a, 0xfe, 0x04, 0x32, 0x70,
0x05, 0x4a, 0x96, 0x13, 0xe8, 0xa6, 0xe5, 0x87, 0x24, 0x38, 0x23, 0xe7, 0x21, 0x27, 0xa0, 0x33,
0xa0, 0x6b, 0xea, 0x04, 0x64, 0x6c, 0xf4, 0x3a, 0xc0, 0xb6, 0xed, 0x1a, 0xbc, 0x35, 0xa3, 0xc1,
0x59, 0xb9, 0xf8, 0x90, 0x6a, 0x61, 0xfb, 0x32, 0x6d, 0x44, 0x7a, 0x18, 0x2c, 0xe9, 0xdf, 0x2b,
0x30, 0xbf, 0x81, 0x3d, 0x96, 0x13, 0x16, 0xf0, 0x93, 0xe4, 0x75, 0x67, 0xdb, 0x8d, 0x1f, 0xe6,
0x2b, 0x89, 0xc3, 0xfc, 0xcf, 0xe7, 0x00, 0x3b, 0xb6, 0x9f, 0x64, 0x21, 0xa5, 0x70, 0x3f, 0x19,
0x06, 0xce, 0xd8, 0xc6, 0xbc, 0x9e, 0xb2, 0x4c, 0x1c, 0x5f, 0xf1, 0x7c, 0x42, 0xfd, 0x35, 0x96,
0x37, 0x23, 0x9d, 0xd4, 0xd1, 0x19, 0x76, 0x01, 0xb8, 0xe1, 0x48, 0x98, 0x91, 0x27, 0x20, 0xa1,
0x3b, 0x52, 0x14, 0xd1, 0x6f, 0x29, 0xb0, 0x94, 0x8e, 0xd5, 0x24, 0xbe, 0xd5, 0xeb, 0x90, 0xb7,
0x9c, 0x6d, 0x37, 0x3c, 0xa7, 0xbc, 0x20, 0x95, 0x05, 0xf9, 0xb8, 0xac, 0xa1, 0xfa, 0x0f, 0x19,
0x68, 0xbc, 0xcd, 0xf2, 0x30, 0xbe, 0xf0, 0xe5, 0xef, 0xe2, 0xae, 0xee, 0x5b, 0x1f, 0xe3, 0x70,
0xf9, 0xbb, 0xb8, 0xbb, 0x69, 0x7d, 0x8c, 0x63, 0x9c, 0x91, 0x8f, 0x73, 0xc6, 0xe8, 0x83, 0x79,
0xf1, 0x1c, 0xba, 0x18, 0x3f, 0x87, 0x5e, 0x80, 0x82, 0xe3, 0x9a, 0x78, 0x7d, 0x8d, 0xef, 0xc1,
0xf9, 0xd7, 0x80, 0xd5, 0xca, 0x87, 0x63, 0x35, 0x32, 0x14, 0xed, 0xc2, 0x64, 0x29, 0x9d, 0x04,
0x47, 0xf6, 0xa9, 0x7e, 0x4f, 0x81, 0xd6, 0x0d, 0x1c, 0x24, 0xa9, 0x7a, 0x7c, 0xfc, 0xf7, 0x89,
0x02, 0xa7, 0xa4, 0x08, 0x4d, 0xc2, 0x7a, 0x2f, 0xc7, 0x59, 0x4f, 0x7e, 0x44, 0x3e, 0x34, 0x24,
0xe7, 0xba, 0x67, 0xa1, 0xba, 0xd6, 0xef, 0x76, 0x23, 0xdf, 0xee, 0x1c, 0x54, 0x3d, 0xf6, 0x93,
0xed, 0x8b, 0x99, 0x65, 0xae, 0x70, 0x18, 0xd9, 0xfd, 0xaa, 0x17, 0xa1, 0xc6, 0x9b, 0x70, 0xac,
0x5b, 0x50, 0xf2, 0xf8, 0x6f, 0x5e, 0x3f, 0xfa, 0x56, 0xe7, 0x61, 0x56, 0xc3, 0x1d, 0xc2, 0xf4,
0xde, 0x2d, 0xcb, 0xb9, 0xc7, 0x87, 0x51, 0xbf, 0xa9, 0xc0, 0x5c, 0x1c, 0xce, 0xfb, 0x7a, 0x01,
0x8a, 0x86, 0x69, 0x7a, 0xd8, 0xf7, 0x47, 0x2e, 0xcb, 0x55, 0x56, 0x47, 0x0b, 0x2b, 0x0b, 0x94,
0xcb, 0x8c, 0x4d, 0x39, 0x55, 0x87, 0x99, 0x1b, 0x38, 0xb8, 0x8d, 0x03, 0x6f, 0xa2, 0xf4, 0x88,
0x26, 0xd9, 0x98, 0xd2, 0xc6, 0x9c, 0x2d, 0xc2, 0x4f, 0xf5, 0xbb, 0x0a, 0x20, 0x71, 0x84, 0x49,
0x96, 0x59, 0xa4, 0x72, 0x26, 0x4e, 0x65, 0x96, 0xa0, 0xd6, 0xed, 0xb9, 0x0e, 0x76, 0x02, 0xd1,
0x11, 0xab, 0x45, 0x50, 0xca, 0x7e, 0x3f, 0x55, 0x00, 0xdd, 0x72, 0x0d, 0xf3, 0x9a, 0x61, 0x4f,
0xe6, 0x38, 0x9c, 0x01, 0xf0, 0xbd, 0xb6, 0xce, 0xe5, 0x38, 0xc3, 0xf5, 0x92, 0xd7, 0xbe, 0xc3,
0x44, 0xf9, 0x2c, 0x54, 0x4c, 0x3f, 0xe0, 0xc5, 0x61, 0xb4, 0x1e, 0x4c, 0x3f, 0x60, 0xe5, 0x34,
0x4f, 0x9c, 0xec, 0xd8, 0xb0, 0xa9, 0x0b, 0xc1, 0xce, 0x1c, 0xad, 0xd6, 0x60, 0x05, 0x9b, 0x11,
0x5c, 0x22, 0x5c, 0xf9, 0xf4, 0x9c, 0xcd, 0x99, 0x66, 0x5e, 0xdd, 0x86, 0xc5, 0xdb, 0x86, 0xd3,
0x37, 0xec, 0xeb, 0x6e, 0xb7, 0x67, 0xc4, 0x72, 0x8c, 0x93, 0x1a, 0x53, 0x91, 0x68, 0xcc, 0x47,
0x59, 0xea, 0x23, 0x73, 0xfa, 0xe9, 0xe4, 0x72, 0x9a, 0x00, 0x61, 0xe3, 0x14, 0x9b, 0x8a, 0xea,
0x43, 0x73, 0x78, 0x9c, 0x49, 0x96, 0x98, 0x62, 0x17, 0x76, 0x25, 0xea, 0xf3, 0x01, 0x4c, 0x7d,
0x0d, 0x1e, 0xa1, 0xf9, 0xa8, 0x21, 0x28, 0x16, 0x56, 0x49, 0x76, 0xa0, 0x48, 0x3a, 0xf8, 0xa3,
0x0c, 0x55, 0x8a, 0x43, 0x3d, 0x4c, 0x82, 0xf8, 0x95, 0x78, 0x10, 0xe3, 0xf1, 0x94, 0x34, 0xf8,
0xf8, 0x88, 0x5c, 0x7d, 0x2f, 0xc3, 0x34, 0xbe, 0x8f, 0xdb, 0xfd, 0xc0, 0x72, 0x3a, 0x1b, 0xb6,
0xe1, 0xdc, 0x71, 0xb9, 0x91, 0x4a, 0x82, 0xd1, 0xe3, 0x50, 0x23, 0xcb, 0xe0, 0xf6, 0x03, 0x5e,
0x8f, 0x59, 0xab, 0x38, 0x90, 0xf4, 0x47, 0xe6, 0x6b, 0xe3, 0x00, 0x9b, 0xbc, 0x1e, 0x33, 0x5d,
0x49, 0x30, 0xa1, 0xd6, 0xb6, 0x61, 0xd9, 0x51, 0x35, 0x76, 0xa2, 0x1c, 0x83, 0x0d, 0x91, 0x9b,
0x80, 0xfd, 0xc3, 0x90, 0xfb, 0x9f, 0x94, 0x04, 0xb9, 0x79, 0x0f, 0xc7, 0x45, 0xee, 0x9b, 0x00,
0x5d, 0xec, 0x75, 0xf0, 0x3a, 0x35, 0x19, 0xec, 0xa8, 0x67, 0x59, 0x6a, 0x32, 0x06, 0x1d, 0xdc,
0x0e, 0x1b, 0x68, 0x42, 0x5b, 0xf5, 0x06, 0xcc, 0x4a, 0xaa, 0x10, 0x6d, 0xe8, 0xbb, 0x7d, 0xaf,
0x8d, 0xc3, 0x63, 0xc3, 0xf0, 0x93, 0x58, 0xcf, 0xc0, 0xf0, 0x3a, 0x38, 0xe0, 0x8c, 0xcd, 0xbf,
0xd4, 0x17, 0x68, 0x90, 0x90, 0x9e, 0x84, 0xc4, 0xb8, 0x39, 0x9e, 0x0b, 0xa1, 0x0c, 0xe5, 0x42,
0x6c, 0xd3, 0x40, 0x9c, 0xd8, 0x6e, 0xc2, 0x3c, 0x16, 0x7a, 0xba, 0x84, 0x4d, 0x7e, 0x75, 0x2a,
0xfc, 0x54, 0xff, 0x57, 0x81, 0xda, 0x7a, 0xb7, 0xe7, 0x0e, 0x42, 0x4f, 0x63, 0x6f, 0x61, 0x87,
0x4f, 0xec, 0x33, 0xb2, 0x13, 0xfb, 0xc7, 0xa0, 0x16, 0xbf, 0x64, 0xc3, 0x4e, 0x04, 0xab, 0x6d,
0xf1, 0x72, 0xcd, 0x29, 0x28, 0x7b, 0xee, 0x9e, 0x4e, 0x14, 0xb0, 0xc9, 0x33, 0x66, 0x4a, 0x9e,
0xbb, 0x47, 0xd4, 0xb2, 0x89, 0xe6, 0x20, 0xbf, 0x6d, 0xd9, 0x51, 0xb2, 0x17, 0xfb, 0x40, 0x2f,
0x93, 0x0d, 0x1e, 0x8b, 0x9f, 0x17, 0xc6, 0xdd, 0x67, 0x85, 0x2d, 0x98, 0x9e, 0x43, 0x4d, 0x45,
0x7d, 0x1f, 0xea, 0xe1, 0xf4, 0x27, 0xbc, 0x3c, 0x16, 0x18, 0xfe, 0xbd, 0x30, 0xab, 0x85, 0x7d,
0xa8, 0x17, 0x59, 0x34, 0x95, 0xf6, 0x1f, 0x5b, 0x7d, 0x04, 0x39, 0x52, 0x83, 0x0b, 0x15, 0xfd,
0xad, 0xfe, 0x5d, 0x06, 0x16, 0x92, 0xb5, 0x27, 0x41, 0xe9, 0x85, 0xb8, 0x20, 0xc9, 0xef, 0x02,
0x89, 0xa3, 0x71, 0x21, 0xe2, 0x4b, 0xd1, 0x76, 0xfb, 0x4e, 0xc0, 0xb5, 0x15, 0x59, 0x8a, 0xeb,
0xe4, 0x1b, 0x2d, 0x42, 0xd1, 0x32, 0x75, 0x9b, 0x6c, 0x0a, 0x99, 0x49, 0x2b, 0x58, 0xe6, 0x2d,
0xb2, 0x61, 0x7c, 0x31, 0x74, 0xd4, 0xc6, 0x4e, 0x85, 0x61, 0xf5, 0x51, 0x1d, 0x32, 0x96, 0xc9,
0xd5, 0x53, 0xc6, 0x32, 0x09, 0x57, 0xd1, 0xd3, 0x04, 0x7a, 0x38, 0xc4, 0xf3, 0xb8, 0x09, 0x3b,
0xd4, 0x08, 0xf4, 0xed, 0x10, 0x48, 0x7c, 0x39, 0x5a, 0x8d, 0x07, 0xec, 0xa9, 0xbf, 0x5d, 0xd2,
0x2a, 0x04, 0xb6, 0xce, 0x40, 0x6a, 0x13, 0x16, 0x08, 0x6a, 0x6c, 0x8a, 0xef, 0x90, 0x05, 0x09,
0x3d, 0xb4, 0x5f, 0x51, 0x60, 0x71, 0xa8, 0x68, 0x12, 0x5a, 0x5f, 0x15, 0x97, 0xbf, 0xb2, 0x7a,
0x51, 0xaa, 0x73, 0xe4, 0x8b, 0x1b, 0xf2, 0xca, 0xaf, 0x33, 0x77, 0x4a, 0x63, 0xa9, 0xba, 0x0f,
0x38, 0xf1, 0x6b, 0x19, 0x1a, 0x7b, 0x56, 0xb0, 0xa3, 0xd3, 0xdb, 0x65, 0xd4, 0x97, 0x61, 0x09,
0x0e, 0x25, 0xad, 0x4e, 0xe0, 0x9b, 0x04, 0x4c, 0xfc, 0x19, 0x5f, 0xfd, 0x8e, 0x02, 0xb3, 0x31,
0xb4, 0x26, 0x21, 0xd3, 0x2b, 0xc4, 0xcd, 0x63, 0x1d, 0x71, 0x4a, 0x2d, 0x49, 0x29, 0xc5, 0x47,
0xa3, 0x5a, 0x39, 0x6a, 0xa1, 0xfe, 0x44, 0x81, 0x8a, 0x50, 0x42, 0xf6, 0x8f, 0xbc, 0x6c, 0xb0,
0x7f, 0x8c, 0x00, 0x63, 0x91, 0xe1, 0x31, 0x18, 0xe8, 0x2a, 0xe1, 0xea, 0x83, 0x90, 0x7b, 0x69,
0xfa, 0xe8, 0x26, 0xd4, 0x19, 0x99, 0x22, 0xd4, 0xa5, 0xc7, 0x3a, 0x51, 0x56, 0xa9, 0xe1, 0x99,
0x1c, 0x4b, 0xad, 0xe6, 0x0b, 0x5f, 0x2c, 0xb6, 0xed, 0x9a, 0x98, 0x8e, 0x94, 0x1f, 0xda, 0xcd,
0x55, 0xc5, 0xa6, 0xc4, 0x23, 0xb6, 0xb1, 0x61, 0x62, 0x2f, 0x9a, 0x5b, 0xf4, 0x4d, 0x5c, 0x50,
0xf6, 0x5b, 0x27, 0x3b, 0x04, 0xae, 0x75, 0x81, 0x81, 0xc8, 0xe6, 0x01, 0x3d, 0x01, 0xd3, 0x66,
0x37, 0x76, 0xb5, 0x31, 0xf4, 0x99, 0xcd, 0xae, 0x70, 0xa7, 0x31, 0x86, 0x50, 0x2e, 0x8e, 0xd0,
0xb7, 0x06, 0x97, 0xc5, 0x3d, 0x6c, 0x62, 0x27, 0xb0, 0x0c, 0xfb, 0xe8, 0x3c, 0xd9, 0x82, 0x52,
0xdf, 0xc7, 0x9e, 0x60, 0x24, 0xa2, 0x6f, 0x52, 0xd6, 0x33, 0x7c, 0x7f, 0xcf, 0xf5, 0x4c, 0x8e,
0x65, 0xf4, 0x3d, 0x22, 0x91, 0x95, 0x5d, 0x30, 0x96, 0x27, 0xb2, 0xbe, 0x00, 0x8b, 0x5d, 0xd7,
0xb4, 0xb6, 0x2d, 0x59, 0xfe, 0x2b, 0x69, 0x36, 0x1f, 0x16, 0xc7, 0xda, 0x85, 0x57, 0x73, 0x66,
0xc5, 0xab, 0x39, 0x3f, 0xc8, 0xc0, 0xe2, 0xbb, 0x3d, 0xf3, 0x0b, 0xa0, 0xc3, 0x12, 0x54, 0x5c,
0xdb, 0xdc, 0x88, 0x93, 0x42, 0x04, 0x91, 0x1a, 0x0e, 0xde, 0x8b, 0x6a, 0xb0, 0x70, 0x81, 0x08,
0x1a, 0x99, 0xf8, 0x7b, 0x24, 0x7a, 0x15, 0x46, 0xd1, 0xab, 0xfc, 0xd9, 0xab, 0x85, 0x52, 0xa6,
0x31, 0xd7, 0xcc, 0xa8, 0x3f, 0x0f, 0x8b, 0x2c, 0x85, 0xe0, 0x01, 0x53, 0x29, 0x5c, 0xa3, 0x79,
0x71, 0x8d, 0x3e, 0x84, 0x79, 0xa2, 0xcd, 0xc9, 0xd0, 0xef, 0xfa, 0xd8, 0x9b, 0x50, 0x49, 0x9d,
0x86, 0x72, 0x38, 0x5a, 0x98, 0xb2, 0x3d, 0x00, 0xa8, 0x3f, 0x07, 0x73, 0x89, 0xb1, 0x8e, 0x38,
0xcb, 0x70, 0x26, 0x0b, 0xe2, 0x4c, 0x96, 0x00, 0x34, 0xd7, 0xc6, 0x6f, 0x38, 0x81, 0x15, 0xec,
0x13, 0x2f, 0x41, 0x70, 0xbf, 0xe8, 0x6f, 0x52, 0x83, 0x8c, 0x3b, 0xa2, 0xc6, 0xaf, 0x2a, 0x30,
0xc3, 0x24, 0x97, 0x74, 0x75, 0xf4, 0x55, 0x78, 0x11, 0x0a, 0x98, 0x8e, 0xc2, 0x4f, 0x1d, 0xce,
0xca, 0x55, 0x75, 0x84, 0xae, 0xc6, 0xab, 0x4b, 0xc5, 0x28, 0x80, 0xe9, 0x35, 0xcf, 0xed, 0x4d,
0x86, 0x11, 0xf5, 0x4c, 0x6c, 0x2c, 0xfa, 0x9a, 0x25, 0x02, 0xb8, 0x93, 0xc6, 0x18, 0xff, 0xa8,
0xc0, 0xc2, 0xdd, 0x1e, 0xf6, 0x8c, 0x00, 0x13, 0xa2, 0x4d, 0x36, 0xfa, 0x28, 0xd9, 0x8d, 0x61,
0x96, 0x8d, 0x63, 0x86, 0x5e, 0x89, 0xdd, 0x27, 0x94, 0xef, 0x47, 0x12, 0x58, 0x0e, 0xee, 0x25,
0x84, 0xf3, 0x5a, 0x14, 0xe7, 0xf5, 0x23, 0x05, 0x66, 0x36, 0x31, 0xb1, 0x63, 0x93, 0x4d, 0xe9,
0x32, 0xe4, 0x08, 0x96, 0xe3, 0x2e, 0x30, 0xad, 0x8c, 0x2e, 0xc0, 0x8c, 0xe5, 0xb4, 0xed, 0xbe,
0x89, 0x75, 0x32, 0x7f, 0x9d, 0xb8, 0x71, 0xdc, 0x79, 0x98, 0xe6, 0x05, 0x64, 0x1a, 0xc4, 0x44,
0x4b, 0x79, 0xfc, 0x3e, 0xe3, 0xf1, 0x28, 0x71, 0x8b, 0xa1, 0xa0, 0x1c, 0x06, 0x85, 0xe7, 0x21,
0x4f, 0x86, 0x0e, 0x9d, 0x08, 0x79, 0xab, 0x81, 0x98, 0x68, 0xac, 0xb6, 0xfa, 0x0b, 0x0a, 0x20,
0x91, 0x6c, 0x93, 0x68, 0x89, 0x97, 0xc4, 0x84, 0x8d, 0xec, 0x48, 0xd4, 0xd9, 0x4c, 0xa3, 0x54,
0x0d, 0xf5, 0xd3, 0x68, 0xf5, 0xe8, 0x72, 0x4f, 0xb2, 0x7a, 0x64, 0x5e, 0x23, 0x57, 0x4f, 0x20,
0x02, 0xad, 0x2c, 0xae, 0x1e, 0xe5, 0x58, 0xc9, 0xea, 0x11, 0x9c, 0xe9, 0xea, 0x71, 0xfd, 0xde,
0x6c, 0x66, 0xc8, 0xa2, 0x31, 0x64, 0xc3, 0x45, 0xa3, 0x23, 0x2b, 0x87, 0x19, 0xf9, 0x79, 0xc8,
0x93, 0x11, 0x0f, 0xa6, 0x57, 0xb8, 0x68, 0xb4, 0xb6, 0xb0, 0x68, 0x1c, 0x81, 0x07, 0xbf, 0x68,
0x83, 0x99, 0x0e, 0x16, 0x4d, 0x85, 0xea, 0xdd, 0xad, 0x0f, 0x71, 0x3b, 0x18, 0xa1, 0x79, 0xcf,
0xc3, 0xf4, 0x86, 0x67, 0xed, 0x5a, 0x36, 0xee, 0x8c, 0x52, 0xe1, 0xdf, 0x51, 0xa0, 0x76, 0xc3,
0x33, 0x9c, 0xc0, 0x0d, 0xd5, 0xf8, 0x91, 0xe8, 0x79, 0x0d, 0xca, 0xbd, 0x70, 0x34, 0xce, 0x03,
0x8f, 0xcb, 0x23, 0x33, 0x71, 0x9c, 0xb4, 0x41, 0x33, 0xf5, 0x3d, 0x98, 0xa3, 0x98, 0x24, 0xd1,
0x7e, 0x15, 0x4a, 0x54, 0x99, 0x5b, 0xfc, 0xa0, 0xa3, 0xb2, 0xaa, 0xca, 0xb7, 0x34, 0xe2, 0x34,
0xb4, 0xa8, 0x8d, 0xfa, 0xaf, 0x0a, 0x54, 0x68, 0xd9, 0x60, 0x82, 0x87, 0x97, 0xf2, 0x97, 0xa0,
0xe0, 0x52, 0x92, 0x8f, 0x0c, 0xe0, 0x8a, 0xab, 0xa2, 0xf1, 0x06, 0xc4, 0x43, 0x66, 0xbf, 0x44,
0x8d, 0x0c, 0x0c, 0xc4, 0x75, 0x72, 0xb1, 0xc3, 0x70, 0xa7, 0x6a, 0x79, 0xbc, 0xf9, 0x85, 0x4d,
0xe8, 0x5e, 0x8d, 0xf1, 0x24, 0xad, 0x70, 0x74, 0x11, 0xfe, 0x6a, 0xc2, 0xc6, 0x2e, 0xa5, 0x63,
0x21, 0x37, 0xb2, 0x31, 0xcd, 0x4a, 0xf6, 0x6a, 0x31, 0xb4, 0x26, 0xdc, 0xab, 0x45, 0x2c, 0x30,
0x6a, 0xaf, 0x26, 0x22, 0x37, 0x60, 0x80, 0x7f, 0x56, 0x60, 0x91, 0xdb, 0xb4, 0x88, 0xb7, 0x8e,
0x81, 0x4c, 0xe8, 0x6b, 0xdc, 0xf6, 0x66, 0xa9, 0xed, 0x7d, 0x6a, 0x94, 0xed, 0x8d, 0xf0, 0x3c,
0xc0, 0xf8, 0x9e, 0x87, 0xf2, 0x6d, 0xda, 0xf0, 0x8d, 0xfb, 0x01, 0x6a, 0x42, 0x71, 0x17, 0x7b,
0xbe, 0xe5, 0x3a, 0x5c, 0xc4, 0xc3, 0xcf, 0x0b, 0xe7, 0xa0, 0x14, 0xde, 0x30, 0x44, 0x45, 0xc8,
0x5e, 0xb5, 0xed, 0xc6, 0x14, 0xaa, 0x42, 0x69, 0x9d, 0x5f, 0xa3, 0x6b, 0x28, 0x17, 0x5e, 0x87,
0x59, 0x89, 0xdd, 0x47, 0x33, 0x50, 0xbb, 0x6a, 0x52, 0xef, 0xf2, 0x1d, 0x97, 0x00, 0x1b, 0x53,
0x68, 0x01, 0x90, 0x86, 0xbb, 0xee, 0x2e, 0xad, 0xf8, 0xa6, 0xe7, 0x76, 0x29, 0x5c, 0xb9, 0xf0,
0x34, 0xcc, 0xc9, 0xb0, 0x47, 0x65, 0xc8, 0x53, 0x6a, 0x34, 0xa6, 0x10, 0x40, 0x41, 0xc3, 0xbb,
0xee, 0x3d, 0xdc, 0x50, 0x56, 0xff, 0xe2, 0x02, 0xd4, 0x18, 0xee, 0xfc, 0x3e, 0x3c, 0xd2, 0xa1,
0x91, 0x7c, 0x12, 0x0c, 0x7d, 0x45, 0x7e, 0x62, 0x2a, 0x7f, 0x39, 0xac, 0x35, 0x8a, 0x99, 0xd4,
0x29, 0xf4, 0x3e, 0xd4, 0xe3, 0x8f, 0x68, 0x21, 0x79, 0xf8, 0x58, 0xfa, 0xd2, 0xd6, 0x41, 0x9d,
0xeb, 0x50, 0x8b, 0xbd, 0x7f, 0x85, 0xe4, 0x0b, 0x2c, 0x7b, 0x23, 0xab, 0x25, 0xd7, 0x26, 0xe2,
0x1b, 0x55, 0x0c, 0xfb, 0xf8, 0x83, 0x34, 0x29, 0xd8, 0x4b, 0x5f, 0xad, 0x39, 0x08, 0x7b, 0x03,
0x66, 0x86, 0xde, 0x8b, 0x41, 0x4f, 0xa7, 0x1c, 0x88, 0xc8, 0xdf, 0x95, 0x39, 0x68, 0x88, 0x3d,
0x40, 0xc3, 0x6f, 0x3a, 0xa1, 0x15, 0xf9, 0x0a, 0xa4, 0xbd, 0x72, 0xd5, 0xba, 0x34, 0x76, 0xfd,
0x88, 0x70, 0xdf, 0x56, 0x60, 0x31, 0xe5, 0x69, 0x11, 0x74, 0x39, 0xed, 0x74, 0x6c, 0xc4, 0x43,
0x29, 0xad, 0xe7, 0x0e, 0xd7, 0x28, 0x42, 0xc4, 0x81, 0xe9, 0xc4, 0xcb, 0x1a, 0xe8, 0x62, 0xea,
0x75, 0xe0, 0xe1, 0x67, 0x47, 0x5a, 0x5f, 0x19, 0xaf, 0x72, 0x34, 0xde, 0x07, 0x30, 0x9d, 0x78,
0x56, 0x22, 0x65, 0x3c, 0xf9, 0xe3, 0x13, 0x07, 0x2d, 0xe8, 0xd7, 0xa1, 0x16, 0x7b, 0xff, 0x21,
0x85, 0xe3, 0x65, 0x6f, 0x44, 0x1c, 0xd4, 0xf5, 0x07, 0x50, 0x15, 0x9f, 0x69, 0x40, 0xcb, 0x69,
0xb2, 0x34, 0xd4, 0xf1, 0x61, 0x44, 0x69, 0x70, 0xbd, 0x7a, 0x84, 0x28, 0x0d, 0xdd, 0x48, 0x1f,
0x5f, 0x94, 0x84, 0xfe, 0x47, 0x8a, 0xd2, 0xa1, 0x87, 0xf8, 0xa6, 0x42, 0x8f, 0xe7, 0x25, 0xd7,
0xf7, 0xd1, 0x6a, 0x1a, 0x6f, 0xa6, 0x3f, 0x54, 0xd0, 0xba, 0x7c, 0xa8, 0x36, 0x11, 0x15, 0xef,
0x41, 0x3d, 0x7e, 0x49, 0x3d, 0x85, 0x8a, 0xd2, 0x7b, 0xfd, 0xad, 0x8b, 0x63, 0xd5, 0x8d, 0x06,
0x7b, 0x17, 0x2a, 0xc2, 0x2b, 0x9f, 0xe8, 0xc9, 0x11, 0x7c, 0x2c, 0x3e, 0x79, 0x79, 0x10, 0x25,
0xdf, 0x86, 0x72, 0xf4, 0x38, 0x27, 0x3a, 0x9f, 0xca, 0xbf, 0x87, 0xe9, 0x72, 0x13, 0x60, 0xf0,
0xf2, 0x26, 0x7a, 0x42, 0xda, 0xe7, 0xd0, 0xd3, 0x9c, 0x07, 0x75, 0x1a, 0x4d, 0x9f, 0xdd, 0xe2,
0x19, 0x35, 0x7d, 0xf1, 0x22, 0xda, 0x41, 0xdd, 0xee, 0x40, 0x2d, 0x76, 0xa1, 0x34, 0x4d, 0x84,
0x25, 0x17, 0x7e, 0x5b, 0x17, 0xc6, 0xa9, 0x1a, 0xad, 0xdf, 0x0e, 0xd4, 0x62, 0x97, 0xf9, 0x52,
0x46, 0x92, 0x5d, 0x62, 0x4c, 0x19, 0x49, 0x7a, 0x37, 0x50, 0x9d, 0x42, 0xdf, 0x10, 0xee, 0x0d,
0xc6, 0x2e, 0x69, 0xa2, 0x67, 0x47, 0xf6, 0x23, 0xbb, 0xac, 0xda, 0x5a, 0x3d, 0x4c, 0x93, 0x08,
0x05, 0xce, 0x55, 0x8c, 0xa4, 0xe9, 0x5c, 0x75, 0x98, 0x95, 0xda, 0x84, 0x02, 0xbb, 0x95, 0x87,
0xd4, 0x94, 0xab, 0xb9, 0xc2, 0x95, 0xbd, 0xd6, 0x63, 0xd2, 0x3a, 0xf1, 0x7b, 0x6a, 0xac, 0x53,
0x76, 0x52, 0x9a, 0xd2, 0x69, 0xec, 0x26, 0xd6, 0xb8, 0x9d, 0x6a, 0x50, 0x60, 0x57, 0x43, 0x52,
0x3a, 0x8d, 0xdd, 0x7b, 0x6a, 0x8d, 0xae, 0xc3, 0xf6, 0xbb, 0x53, 0x68, 0x03, 0xf2, 0x34, 0xfc,
0x8c, 0xce, 0x8d, 0xba, 0x6e, 0x30, 0xaa, 0xc7, 0xd8, 0x8d, 0x04, 0x75, 0x0a, 0xdd, 0x85, 0x3c,
0x0d, 0xe0, 0xa5, 0xf4, 0x28, 0xe6, 0xe1, 0xb7, 0x46, 0x56, 0x09, 0x51, 0x34, 0xa1, 0x2a, 0x66,
0xcb, 0xa6, 0x98, 0x2c, 0x49, 0x3e, 0x71, 0x6b, 0x9c, 0x9a, 0xe1, 0x28, 0x4c, 0x8c, 0x06, 0xa1,
0xf8, 0x74, 0x31, 0x1a, 0x0a, 0xf3, 0xa7, 0x8b, 0xd1, 0x70, 0x64, 0x5f, 0x9d, 0x42, 0xbf, 0xa8,
0x40, 0x33, 0x2d, 0x85, 0x13, 0xa5, 0x7a, 0x40, 0xa3, 0xf2, 0x50, 0x5b, 0xcf, 0x1f, 0xb2, 0x55,
0x84, 0xcb, 0xc7, 0x34, 0xee, 0x37, 0x94, 0xb4, 0x79, 0x29, 0xad, 0xbf, 0x94, 0x44, 0xc4, 0xd6,
0x33, 0xe3, 0x37, 0x88, 0xc6, 0xde, 0x82, 0x8a, 0x10, 0x73, 0x4c, 0xd1, 0xbc, 0xc3, 0xc1, 0xd2,
0x94, 0x55, 0x95, 0x84, 0x2f, 0x19, 0x7b, 0xd3, 0x4c, 0xbf, 0x14, 0x66, 0x14, 0x13, 0x07, 0x53,
0xd8, 0x3b, 0x96, 0x28, 0xa8, 0x4e, 0x21, 0x0c, 0x55, 0x31, 0xed, 0x2f, 0x85, 0x1b, 0x25, 0x19,
0x83, 0xad, 0xa7, 0xc6, 0xa8, 0x19, 0x0d, 0xa3, 0x03, 0x0c, 0xd2, 0xee, 0x52, 0x6c, 0xdd, 0x50,
0xe6, 0x5f, 0xeb, 0xc9, 0x03, 0xeb, 0x89, 0x66, 0x5f, 0x48, 0xa4, 0x4b, 0xa1, 0xfe, 0x70, 0xaa,
0xdd, 0x18, 0x7b, 0x91, 0xe1, 0xd4, 0xac, 0x94, 0xbd, 0x48, 0x6a, 0x16, 0x58, 0xeb, 0xd2, 0xd8,
0xf5, 0xa3, 0xf9, 0x7c, 0x04, 0x8d, 0x64, 0x2a, 0x5b, 0xca, 0x1e, 0x37, 0x25, 0xb3, 0xae, 0xf5,
0xf4, 0x98, 0xb5, 0x45, 0x7b, 0x78, 0x6a, 0x18, 0xa7, 0x9f, 0xb1, 0x82, 0x1d, 0x9a, 0x21, 0x35,
0xce, 0xac, 0xc5, 0x64, 0xac, 0x71, 0x66, 0x1d, 0x4b, 0xbd, 0xe2, 0xc6, 0x8b, 0x66, 0x1b, 0xa4,
0x19, 0x2f, 0x31, 0xe9, 0x27, 0xc5, 0xce, 0xc4, 0x33, 0x63, 0x98, 0xfb, 0x19, 0xcf, 0x62, 0x40,
0x17, 0xc6, 0x4a, 0x75, 0x18, 0xe5, 0x7e, 0xca, 0xd3, 0x22, 0xd8, 0xd6, 0x2d, 0x91, 0xa4, 0x91,
0xb2, 0x95, 0x92, 0x67, 0x79, 0xa4, 0x6c, 0xdd, 0x52, 0xf2, 0x3e, 0xa8, 0x60, 0x35, 0x92, 0x11,
0xef, 0xd1, 0x67, 0x21, 0xc9, 0x50, 0xe7, 0xc1, 0xc7, 0x15, 0x8d, 0x64, 0x28, 0x39, 0x65, 0x80,
0x94, 0x88, 0xf3, 0x18, 0x03, 0x24, 0xa3, 0xb0, 0x29, 0x03, 0xa4, 0x04, 0x6b, 0xc7, 0xf0, 0x5d,
0x63, 0xd1, 0xcf, 0x14, 0x53, 0x28, 0x8b, 0x90, 0xa6, 0x98, 0x42, 0x69, 0xe0, 0x96, 0x79, 0xf4,
0x83, 0x20, 0x66, 0x8a, 0x96, 0x1b, 0x8a, 0x72, 0x1e, 0x84, 0xfe, 0x5d, 0x28, 0x85, 0x51, 0x48,
0xf4, 0x78, 0xaa, 0x8b, 0x78, 0x88, 0x0e, 0x3f, 0x80, 0xe9, 0xc4, 0x09, 0x5e, 0x0a, 0x8b, 0xca,
0xa3, 0x90, 0x07, 0xaf, 0x27, 0x0c, 0xe2, 0x55, 0x29, 0x44, 0x18, 0x8a, 0x03, 0xa6, 0xa8, 0xfa,
0xe1, 0xc0, 0x97, 0x38, 0x00, 0x41, 0x6c, 0xe4, 0x00, 0x42, 0xa8, 0x6a, 0xe4, 0x00, 0x62, 0x90,
0x86, 0x71, 0x64, 0xf2, 0x80, 0x32, 0x85, 0x23, 0x53, 0x4e, 0x8b, 0x0f, 0x22, 0xd1, 0x16, 0x54,
0x84, 0x23, 0x6f, 0x34, 0x0a, 0x35, 0xf1, 0xac, 0x3e, 0xc5, 0x55, 0x90, 0x9c, 0x9e, 0xab, 0x53,
0xab, 0x7d, 0xa8, 0x6e, 0x78, 0xee, 0xfd, 0xf0, 0x11, 0xd1, 0x2f, 0xc8, 0xd0, 0x5f, 0x69, 0x43,
0x9d, 0x55, 0xd0, 0xf1, 0xfd, 0x40, 0x77, 0xb7, 0x3e, 0x44, 0xa7, 0x57, 0xd8, 0xbf, 0xe6, 0x58,
0x09, 0xff, 0x35, 0xc7, 0xca, 0x9b, 0x96, 0x8d, 0xef, 0xf2, 0x2c, 0xc8, 0x7f, 0x2f, 0x8e, 0xb8,
0xb9, 0x17, 0x1d, 0x59, 0x6b, 0xfc, 0xbf, 0x83, 0xbc, 0x71, 0x3f, 0xb8, 0xbb, 0xf5, 0xe1, 0xb5,
0xf7, 0x3e, 0x7b, 0xb5, 0x08, 0xf9, 0xd5, 0x95, 0x67, 0x57, 0x9e, 0x81, 0xba, 0x15, 0x55, 0xef,
0x78, 0xbd, 0xf6, 0xb5, 0x0a, 0x6b, 0xb4, 0x41, 0xfa, 0xd9, 0x50, 0x7e, 0x76, 0xb9, 0x63, 0x05,
0x3b, 0xfd, 0x2d, 0xb2, 0x04, 0x97, 0x58, 0xb5, 0xa7, 0x2d, 0x97, 0xff, 0xba, 0x64, 0xf4, 0x2c,
0xfe, 0xb3, 0xb7, 0xf5, 0x7b, 0x8a, 0xb2, 0x55, 0xa0, 0xa3, 0x5f, 0xfe, 0xff, 0x00, 0x00, 0x00,
0xff, 0xff, 0xdf, 0x0f, 0x9c, 0x21, 0x8c, 0x64, 0x00, 0x00,
// 5293 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xdc, 0x7d, 0x4b, 0x6c, 0x1c, 0x47,
0x7a, 0x30, 0x7b, 0xde, 0xf3, 0xcd, 0x83, 0xc3, 0xe2, 0x6b, 0x3c, 0x92, 0x2c, 0xaa, 0x6d, 0xd9,
0xb4, 0xb4, 0xa6, 0x6c, 0xca, 0x8f, 0xb5, 0xec, 0xb5, 0x2d, 0x89, 0xb6, 0x44, 0x58, 0x0f, 0xba,
0x29, 0x7b, 0xb1, 0xbf, 0x7f, 0xa3, 0xd1, 0x9c, 0x2e, 0x0e, 0xdb, 0xea, 0xe9, 0x1e, 0x75, 0xf7,
0x90, 0xa2, 0x73, 0x59, 0x60, 0xb3, 0x8b, 0x0d, 0xb2, 0x59, 0x23, 0xcf, 0x45, 0x0e, 0x79, 0x20,
0xd8, 0x1c, 0x82, 0x6c, 0x82, 0x38, 0x09, 0x10, 0x60, 0x73, 0xc8, 0xdd, 0x48, 0x90, 0xec, 0x21,
0x48, 0x16, 0xc9, 0x71, 0x91, 0x20, 0x87, 0x00, 0x39, 0xe4, 0x96, 0x04, 0x09, 0xea, 0xd1, 0x3d,
0xd5, 0x3d, 0xd5, 0xc3, 0xa1, 0xc6, 0x32, 0x29, 0xf3, 0x34, 0xfd, 0xd5, 0x57, 0x55, 0x5f, 0x7d,
0xf5, 0xbd, 0xaa, 0xea, 0xab, 0x22, 0x54, 0xbb, 0x96, 0xbd, 0xdb, 0xf7, 0x57, 0x7a, 0x9e, 0x1b,
0xb8, 0x68, 0x56, 0xfc, 0x5a, 0x61, 0x1f, 0xad, 0x6a, 0xdb, 0xed, 0x76, 0x5d, 0x87, 0x01, 0x5b,
0x55, 0xbf, 0xbd, 0x83, 0xbb, 0x06, 0xff, 0x5a, 0xea, 0xb8, 0x6e, 0xc7, 0xc6, 0x17, 0xe8, 0xd7,
0x56, 0x7f, 0xfb, 0x82, 0x89, 0xfd, 0xb6, 0x67, 0xf5, 0x02, 0xd7, 0x63, 0x18, 0xea, 0x6f, 0x2b,
0x80, 0xae, 0x7a, 0xd8, 0x08, 0xf0, 0x65, 0xdb, 0x32, 0x7c, 0x0d, 0xdf, 0xeb, 0x63, 0x3f, 0x40,
0xcf, 0x41, 0x6e, 0xcb, 0xf0, 0x71, 0x53, 0x59, 0x52, 0x96, 0x2b, 0xab, 0x27, 0x57, 0x62, 0x1d,
0xf3, 0x0e, 0x6f, 0xfa, 0x9d, 0x2b, 0x86, 0x8f, 0x35, 0x8a, 0x89, 0x16, 0xa1, 0x68, 0x6e, 0xe9,
0x8e, 0xd1, 0xc5, 0xcd, 0xcc, 0x92, 0xb2, 0x5c, 0xd6, 0x0a, 0xe6, 0xd6, 0x2d, 0xa3, 0x8b, 0xd1,
0xd3, 0x30, 0xdd, 0x76, 0x6d, 0x1b, 0xb7, 0x03, 0xcb, 0x75, 0x18, 0x42, 0x96, 0x22, 0xd4, 0x07,
0x60, 0x8a, 0x38, 0x07, 0x79, 0x83, 0xd0, 0xd0, 0xcc, 0xd1, 0x62, 0xf6, 0xa1, 0xfa, 0xd0, 0x58,
0xf3, 0xdc, 0xde, 0xc3, 0xa2, 0x2e, 0xea, 0x34, 0x2b, 0x76, 0xfa, 0x5b, 0x0a, 0xcc, 0x5c, 0xb6,
0x03, 0xec, 0x1d, 0x53, 0xa6, 0xfc, 0x28, 0x03, 0x8b, 0x6c, 0xd6, 0xae, 0x46, 0xe8, 0x47, 0x49,
0xe5, 0x02, 0x14, 0x98, 0xdc, 0x51, 0x32, 0xab, 0x1a, 0xff, 0x42, 0xa7, 0x00, 0xfc, 0x1d, 0xc3,
0x33, 0x7d, 0xdd, 0xe9, 0x77, 0x9b, 0xf9, 0x25, 0x65, 0x39, 0xaf, 0x95, 0x19, 0xe4, 0x56, 0xbf,
0x8b, 0x34, 0x98, 0x69, 0xbb, 0x8e, 0x6f, 0xf9, 0x01, 0x76, 0xda, 0xfb, 0xba, 0x8d, 0x77, 0xb1,
0xdd, 0x2c, 0x2c, 0x29, 0xcb, 0xf5, 0xd5, 0xb3, 0x52, 0xba, 0xaf, 0x0e, 0xb0, 0x6f, 0x10, 0x64,
0xad, 0xd1, 0x4e, 0x40, 0x2e, 0xa1, 0xcf, 0x5e, 0x9f, 0x2e, 0x29, 0x0d, 0xa5, 0xf9, 0xbf, 0xe1,
0x9f, 0xa2, 0xfe, 0x8e, 0x02, 0xf3, 0x44, 0x88, 0x8e, 0x05, 0xb3, 0x42, 0x0a, 0x33, 0x22, 0x85,
0x7f, 0xa0, 0xc0, 0xdc, 0x75, 0xc3, 0x3f, 0x1e, 0xb3, 0x79, 0x0a, 0x20, 0xb0, 0xba, 0x58, 0xf7,
0x03, 0xa3, 0xdb, 0xa3, 0x33, 0x9a, 0xd3, 0xca, 0x04, 0xb2, 0x49, 0x00, 0xea, 0x37, 0xa0, 0x7a,
0xc5, 0x75, 0x6d, 0x0d, 0xfb, 0x3d, 0xd7, 0xf1, 0x31, 0xba, 0x08, 0x05, 0x3f, 0x30, 0x82, 0xbe,
0xcf, 0x89, 0x3c, 0x21, 0x25, 0x72, 0x93, 0xa2, 0x68, 0x1c, 0x95, 0xc8, 0xf5, 0xae, 0x61, 0xf7,
0x19, 0x8d, 0x25, 0x8d, 0x7d, 0xa8, 0x1f, 0x40, 0x7d, 0x33, 0xf0, 0x2c, 0xa7, 0xf3, 0x39, 0x36,
0x5e, 0x0e, 0x1b, 0xff, 0x17, 0x05, 0x1e, 0x5b, 0xa3, 0xf6, 0x6f, 0xeb, 0x98, 0xa8, 0x8d, 0x0a,
0xd5, 0x01, 0x64, 0x7d, 0x8d, 0xb2, 0x3a, 0xab, 0xc5, 0x60, 0x89, 0xc9, 0xc8, 0x27, 0x26, 0x23,
0x14, 0xa6, 0xac, 0x28, 0x4c, 0xdf, 0xcc, 0x43, 0x4b, 0x36, 0xd0, 0x49, 0x58, 0xfa, 0xb5, 0x48,
0xc3, 0x33, 0xb4, 0x52, 0x42, 0x3f, 0xb9, 0xd7, 0x19, 0xf4, 0xb6, 0x49, 0x01, 0x91, 0x21, 0x48,
0x8e, 0x34, 0x2b, 0x19, 0xe9, 0x2a, 0xcc, 0xef, 0x5a, 0x5e, 0xd0, 0x37, 0x6c, 0xbd, 0xbd, 0x63,
0x38, 0x0e, 0xb6, 0x29, 0xef, 0x88, 0xe9, 0xcb, 0x2e, 0x97, 0xb5, 0x59, 0x5e, 0x78, 0x95, 0x95,
0x11, 0x06, 0xfa, 0xe8, 0x05, 0x58, 0xe8, 0xed, 0xec, 0xfb, 0x56, 0x7b, 0xa8, 0x52, 0x9e, 0x56,
0x9a, 0x0b, 0x4b, 0x63, 0xb5, 0xce, 0xc3, 0x4c, 0x9b, 0x5a, 0x4f, 0x53, 0x27, 0x9c, 0x64, 0xac,
0x2d, 0x50, 0xd6, 0x36, 0x78, 0xc1, 0x9d, 0x10, 0x4e, 0xc8, 0x0a, 0x91, 0xfb, 0x41, 0x5b, 0xa8,
0x50, 0xa4, 0x15, 0x66, 0x79, 0xe1, 0x7b, 0x41, 0x7b, 0x50, 0x27, 0x6e, 0xf7, 0x4a, 0x49, 0xbb,
0xd7, 0x84, 0x22, 0xb5, 0xe3, 0xd8, 0x6f, 0x96, 0x29, 0x99, 0xe1, 0x27, 0x5a, 0x87, 0x69, 0x3f,
0x30, 0xbc, 0x40, 0xef, 0xb9, 0xbe, 0x45, 0xf8, 0xe2, 0x37, 0x61, 0x29, 0xbb, 0x5c, 0x59, 0x5d,
0x92, 0x4e, 0xd2, 0x3b, 0x78, 0x7f, 0xcd, 0x08, 0x8c, 0x0d, 0xc3, 0xf2, 0xb4, 0x3a, 0xad, 0xb8,
0x11, 0xd6, 0x93, 0x1b, 0xd7, 0xca, 0x44, 0xc6, 0x55, 0x26, 0xd9, 0x55, 0x99, 0x64, 0xab, 0x7f,
0xa9, 0xc0, 0xfc, 0x0d, 0xd7, 0x30, 0x8f, 0x87, 0x9e, 0x9d, 0x85, 0xba, 0x87, 0x7b, 0xb6, 0xd5,
0x36, 0xc8, 0x7c, 0x6c, 0x61, 0x8f, 0x6a, 0x5a, 0x5e, 0xab, 0x71, 0xe8, 0x2d, 0x0a, 0xbc, 0x54,
0xfc, 0xec, 0xf5, 0x5c, 0x23, 0xdf, 0xcc, 0xaa, 0x3f, 0x50, 0xa0, 0xa9, 0x61, 0x1b, 0x1b, 0xfe,
0xf1, 0x30, 0x14, 0x8c, 0xb2, 0x42, 0x33, 0xab, 0xfe, 0xbb, 0x02, 0x73, 0xd7, 0x70, 0x40, 0x94,
0xd3, 0xf2, 0x03, 0xab, 0x7d, 0xa4, 0xb1, 0xc9, 0xd3, 0x30, 0xdd, 0x33, 0xbc, 0xc0, 0x8a, 0xf0,
0x42, 0x55, 0xad, 0x47, 0x60, 0xa6, 0x6f, 0x17, 0x60, 0xb6, 0xd3, 0x37, 0x3c, 0xc3, 0x09, 0x30,
0x16, 0x14, 0x88, 0x19, 0x33, 0x14, 0x15, 0x45, 0xfa, 0xc3, 0xc6, 0x0b, 0xcd, 0xac, 0xfa, 0x6d,
0x05, 0xe6, 0x13, 0xe3, 0x9d, 0xc4, 0x8a, 0xbd, 0x0c, 0x79, 0xf2, 0xcb, 0x6f, 0x66, 0xa8, 0x52,
0x9d, 0x49, 0x53, 0xaa, 0xf7, 0x89, 0xc3, 0xa0, 0x5a, 0xc5, 0xf0, 0x49, 0x40, 0xf8, 0xf8, 0x35,
0x1c, 0x08, 0xf6, 0xed, 0x38, 0xcc, 0xc0, 0x80, 0x4f, 0x9f, 0x28, 0x70, 0x3a, 0x95, 0xbe, 0x23,
0xe1, 0xd8, 0x7f, 0x2a, 0xb0, 0xb0, 0xb9, 0xe3, 0xee, 0x0d, 0x48, 0x7a, 0x18, 0x9c, 0x8a, 0x7b,
0xc7, 0x6c, 0xc2, 0x3b, 0xa2, 0xe7, 0x21, 0x17, 0xec, 0xf7, 0x30, 0x55, 0xf7, 0xfa, 0xea, 0xa9,
0x15, 0xc9, 0xfa, 0x69, 0x85, 0x10, 0x79, 0x67, 0xbf, 0x87, 0x35, 0x8a, 0x8a, 0x9e, 0x81, 0x46,
0x82, 0xf7, 0xa1, 0x2f, 0x99, 0x8e, 0x33, 0xdf, 0x0f, 0x7d, 0x6f, 0x4e, 0xf4, 0xbd, 0xff, 0x91,
0x81, 0xc5, 0xa1, 0x61, 0x4f, 0x32, 0x01, 0x32, 0x7a, 0x32, 0x52, 0x7a, 0x88, 0x99, 0x13, 0x50,
0x2d, 0x93, 0x2c, 0x6a, 0xb2, 0xcb, 0x59, 0xad, 0x26, 0xb8, 0x59, 0xd3, 0x47, 0xcf, 0x02, 0x1a,
0xf2, 0x7e, 0x4c, 0x73, 0x73, 0xda, 0x4c, 0xd2, 0xfd, 0x51, 0x17, 0x2b, 0xf5, 0x7f, 0x8c, 0x2d,
0x39, 0x6d, 0x4e, 0xe2, 0x00, 0x7d, 0xf4, 0x3c, 0xcc, 0x59, 0xce, 0x4d, 0xdc, 0x75, 0xbd, 0x7d,
0xbd, 0x87, 0xbd, 0x36, 0x76, 0x02, 0xa3, 0x83, 0xfd, 0x66, 0x81, 0x52, 0x34, 0x1b, 0x96, 0x6d,
0x0c, 0x8a, 0xd0, 0x4b, 0xb0, 0x78, 0xaf, 0x8f, 0xbd, 0x7d, 0xdd, 0xc7, 0xde, 0xae, 0xd5, 0xc6,
0xba, 0xb1, 0x6b, 0x58, 0xb6, 0xb1, 0x65, 0xe3, 0x66, 0x71, 0x29, 0xbb, 0x5c, 0xd2, 0xe6, 0x69,
0xf1, 0x26, 0x2b, 0xbd, 0x1c, 0x16, 0xaa, 0x7f, 0xa6, 0xc0, 0x02, 0x5b, 0x0c, 0x6d, 0x84, 0x66,
0xe7, 0x88, 0x9d, 0x4d, 0xdc, 0x2a, 0xf2, 0xa5, 0x5b, 0x2d, 0x66, 0x14, 0xd5, 0x4f, 0x15, 0x98,
0x23, 0x6b, 0x92, 0x47, 0x89, 0xe6, 0x3f, 0x51, 0x60, 0xf6, 0xba, 0xe1, 0x3f, 0x4a, 0x24, 0xff,
0x13, 0x0f, 0x44, 0x22, 0x9a, 0x1f, 0x0d, 0x8f, 0x39, 0x1c, 0xb1, 0xe4, 0x25, 0x11, 0x8b, 0xfa,
0x17, 0x83, 0x40, 0xe5, 0xd1, 0x1a, 0xa0, 0xfa, 0x63, 0x05, 0x4e, 0x5d, 0xc3, 0x41, 0x44, 0xf5,
0xf1, 0x88, 0x68, 0xc6, 0x14, 0xaa, 0xef, 0xb3, 0x68, 0x40, 0x4a, 0xfc, 0x91, 0x38, 0xdb, 0x5f,
0xcc, 0xc0, 0x3c, 0xf1, 0x3a, 0xc7, 0x43, 0x08, 0xc6, 0x59, 0xd6, 0x4a, 0x04, 0x25, 0x2f, 0xd5,
0x84, 0xd0, 0x85, 0x17, 0xc6, 0x76, 0xe1, 0xea, 0x9f, 0x66, 0x58, 0xe8, 0x21, 0x72, 0x63, 0x92,
0x69, 0x91, 0xd0, 0x9a, 0x91, 0xd2, 0xaa, 0x42, 0x35, 0x82, 0xac, 0xaf, 0x85, 0xee, 0x37, 0x06,
0x3b, 0xae, 0xde, 0x57, 0xfd, 0x9e, 0x02, 0x0b, 0xe1, 0xa6, 0xc1, 0x26, 0xee, 0x74, 0xb1, 0x13,
0x3c, 0xb8, 0x0c, 0x25, 0x25, 0x20, 0x23, 0x91, 0x80, 0x93, 0x50, 0xf6, 0x59, 0x3f, 0xd1, 0x7e,
0xc0, 0x00, 0xa0, 0xfe, 0x95, 0x02, 0x8b, 0x43, 0xe4, 0x4c, 0x32, 0x89, 0x4d, 0x28, 0x5a, 0x8e,
0x89, 0xef, 0x47, 0xd4, 0x84, 0x9f, 0xa4, 0x64, 0xab, 0x6f, 0xd9, 0x66, 0x44, 0x46, 0xf8, 0x89,
0xce, 0x40, 0x15, 0x3b, 0x24, 0xc6, 0xd0, 0x29, 0x2e, 0x15, 0xe4, 0x92, 0x56, 0x61, 0xb0, 0x75,
0x02, 0x22, 0x95, 0xb7, 0x2d, 0x4c, 0x2b, 0xe7, 0x59, 0x65, 0xfe, 0xa9, 0xfe, 0x92, 0x02, 0xb3,
0x44, 0x0a, 0x39, 0xf5, 0xfe, 0xc3, 0xe5, 0xe6, 0x12, 0x54, 0x04, 0x31, 0xe3, 0x03, 0x11, 0x41,
0xea, 0x5d, 0x98, 0x8b, 0x93, 0x33, 0x09, 0x37, 0x1f, 0x07, 0x88, 0xe6, 0x8a, 0x69, 0x43, 0x56,
0x13, 0x20, 0xea, 0xaf, 0x67, 0xc2, 0x63, 0x05, 0xca, 0xa6, 0x23, 0xde, 0xcd, 0xa4, 0x53, 0x22,
0xda, 0xf3, 0x32, 0x85, 0xd0, 0xe2, 0x35, 0xa8, 0xe2, 0xfb, 0x81, 0x67, 0xe8, 0x3d, 0xc3, 0x33,
0xba, 0x4c, 0xad, 0xc6, 0x32, 0xbd, 0x15, 0x5a, 0x6d, 0x83, 0xd6, 0x22, 0x9d, 0x50, 0x11, 0x61,
0x9d, 0x14, 0x58, 0x27, 0x14, 0x32, 0x58, 0xa7, 0x55, 0x9a, 0x59, 0xf5, 0x27, 0x24, 0xea, 0xe3,
0x62, 0x7d, 0xdc, 0x39, 0x13, 0x1f, 0x53, 0x5e, 0x3a, 0xa6, 0x6a, 0x33, 0xab, 0xfe, 0xbe, 0x02,
0x0d, 0x3a, 0x96, 0x35, 0x7e, 0xb8, 0x64, 0xb9, 0x4e, 0xa2, 0xb2, 0x92, 0xa8, 0x3c, 0x42, 0x1b,
0x5f, 0x81, 0x02, 0x9f, 0x89, 0xec, 0xb8, 0x33, 0xc1, 0x2b, 0x1c, 0x30, 0x1e, 0xf5, 0xf7, 0x14,
0x98, 0x4f, 0xf0, 0x7e, 0x12, 0x15, 0xb8, 0x03, 0x88, 0x8d, 0xd0, 0x1c, 0x0c, 0x3b, 0xf4, 0xdc,
0x67, 0xa5, 0x6e, 0x2a, 0xc9, 0x24, 0x6d, 0xc6, 0x4a, 0x40, 0x7c, 0xf5, 0xa7, 0x0a, 0x9c, 0xbc,
0x86, 0x03, 0x8a, 0x7a, 0x85, 0x98, 0xa1, 0x0d, 0xcf, 0xed, 0x78, 0xd8, 0xf7, 0xbf, 0x04, 0x82,
0xf2, 0x1b, 0x2c, 0xe6, 0x93, 0x8d, 0x6d, 0x92, 0x89, 0x38, 0x03, 0x55, 0xda, 0x19, 0x36, 0x75,
0xcf, 0xdd, 0xf3, 0xb9, 0x40, 0x55, 0x38, 0x4c, 0x73, 0xf7, 0xa8, 0x64, 0x04, 0x6e, 0x60, 0xd8,
0x0c, 0x81, 0x3b, 0x1b, 0x0a, 0x21, 0xc5, 0x54, 0x2b, 0x43, 0xc2, 0x48, 0xe3, 0xf8, 0x4b, 0xc0,
0xec, 0x1f, 0xb2, 0x9d, 0x33, 0x71, 0x4c, 0x93, 0x30, 0xf9, 0x45, 0x16, 0x9a, 0xb2, 0x51, 0xd5,
0x57, 0x4f, 0x4b, 0xeb, 0x08, 0x9d, 0x31, 0x6c, 0x74, 0x1a, 0x2a, 0xdb, 0x86, 0x65, 0xeb, 0x1e,
0x36, 0x7c, 0xd7, 0xe1, 0x23, 0x06, 0x02, 0xd2, 0x28, 0x44, 0xfd, 0x1b, 0x85, 0x9d, 0xef, 0x7e,
0x19, 0x8c, 0x61, 0xad, 0x99, 0x55, 0x7f, 0x94, 0x81, 0xda, 0xba, 0xe3, 0x63, 0x2f, 0x38, 0xfe,
0xeb, 0x18, 0xf4, 0x06, 0x54, 0xe8, 0x08, 0x7d, 0xdd, 0x34, 0x02, 0x83, 0xbb, 0xbe, 0xc7, 0xa5,
0x27, 0x3b, 0x6f, 0x13, 0xbc, 0x35, 0x23, 0x30, 0x34, 0xc6, 0x26, 0x9f, 0xfc, 0x46, 0x27, 0xa0,
0xbc, 0x63, 0xf8, 0x3b, 0xfa, 0x5d, 0xbc, 0xcf, 0x82, 0xcb, 0x9a, 0x56, 0x22, 0x80, 0x77, 0xf0,
0xbe, 0x8f, 0x1e, 0x83, 0x92, 0xd3, 0xef, 0x32, 0x95, 0x2b, 0x2e, 0x29, 0xcb, 0x35, 0xad, 0xe8,
0xf4, 0xbb, 0x44, 0xe1, 0x18, 0xbb, 0x4a, 0xcd, 0xac, 0xfa, 0xd7, 0x19, 0xa8, 0xdf, 0xec, 0x93,
0xe5, 0x13, 0x3d, 0xa0, 0xea, 0xdb, 0xc1, 0x83, 0x89, 0xe7, 0x39, 0xc8, 0xb2, 0x40, 0x84, 0xd4,
0x68, 0x4a, 0x47, 0xb0, 0xbe, 0xe6, 0x6b, 0x04, 0x89, 0x1e, 0xce, 0xf4, 0xdb, 0x6d, 0x1e, 0xd3,
0x65, 0x29, 0xd5, 0x65, 0x02, 0x61, 0x11, 0xdd, 0x09, 0x28, 0x63, 0xcf, 0x8b, 0x22, 0x3e, 0x3a,
0x26, 0xec, 0x79, 0xac, 0x50, 0x85, 0xaa, 0xd1, 0xbe, 0xeb, 0xb8, 0x7b, 0x36, 0x36, 0x3b, 0xd8,
0xa4, 0x82, 0x50, 0xd2, 0x62, 0x30, 0x26, 0x2a, 0x44, 0x02, 0xf4, 0xb6, 0x13, 0xd0, 0x58, 0x20,
0x4b, 0x44, 0x85, 0x40, 0xae, 0x3a, 0x01, 0x29, 0x36, 0xb1, 0x8d, 0x03, 0x4c, 0x8b, 0x8b, 0xac,
0x98, 0x41, 0x78, 0x71, 0xbf, 0x17, 0xd5, 0x2e, 0xb1, 0x62, 0x06, 0x21, 0xc5, 0x27, 0xa1, 0x3c,
0xd8, 0x40, 0x2f, 0x0f, 0xf6, 0x3b, 0x29, 0x40, 0xfd, 0x99, 0x02, 0xb5, 0x35, 0xda, 0xd4, 0x23,
0x20, 0x7d, 0x08, 0x72, 0xf8, 0x7e, 0xcf, 0xe3, 0xca, 0x44, 0x7f, 0x8f, 0x14, 0x28, 0x26, 0x35,
0xe5, 0x66, 0x56, 0xfd, 0x4e, 0x0e, 0x6a, 0x9b, 0xd8, 0xf0, 0xda, 0x3b, 0x8f, 0xc4, 0x66, 0x4e,
0x03, 0xb2, 0xa6, 0x6f, 0xf3, 0x71, 0x92, 0x9f, 0xe8, 0x3c, 0xcc, 0xf4, 0x6c, 0xa3, 0x8d, 0x77,
0x5c, 0xdb, 0xc4, 0x9e, 0xde, 0xf1, 0xdc, 0x3e, 0x3b, 0x80, 0xac, 0x6a, 0x0d, 0xa1, 0xe0, 0x1a,
0x81, 0xa3, 0x97, 0xa1, 0x64, 0xfa, 0xb6, 0x4e, 0x57, 0xc1, 0x45, 0x6a, 0x7d, 0xe5, 0xe3, 0x5b,
0xf3, 0x6d, 0xba, 0x08, 0x2e, 0x9a, 0xec, 0x07, 0x7a, 0x02, 0x6a, 0x6e, 0x3f, 0xe8, 0xf5, 0x03,
0x9d, 0xa9, 0x6c, 0xb3, 0x44, 0xc9, 0xab, 0x32, 0x20, 0xd5, 0x68, 0x1f, 0xbd, 0x0d, 0x35, 0x9f,
0xb2, 0x32, 0x0c, 0x80, 0xcb, 0xe3, 0x86, 0x5d, 0x55, 0x56, 0x8f, 0x47, 0xc0, 0xcf, 0x40, 0x23,
0xf0, 0x8c, 0x5d, 0x6c, 0x0b, 0x07, 0x3c, 0x40, 0xe5, 0x73, 0x9a, 0xc1, 0x07, 0xa7, 0xa3, 0x29,
0xc7, 0x41, 0x95, 0xb4, 0xe3, 0x20, 0x54, 0x87, 0x8c, 0x73, 0x8f, 0x9e, 0x34, 0x66, 0xb5, 0x8c,
0x73, 0x8f, 0x09, 0x42, 0xbd, 0x99, 0x55, 0xdf, 0x81, 0xdc, 0x75, 0x2b, 0xa0, 0x1c, 0x26, 0xea,
0xaf, 0xd0, 0x75, 0x08, 0x55, 0xf2, 0xc7, 0xa0, 0xe4, 0xb9, 0x7b, 0xcc, 0xae, 0x91, 0x98, 0xac,
0xaa, 0x15, 0x3d, 0x77, 0x8f, 0x1a, 0x2d, 0x9a, 0xac, 0xe2, 0x7a, 0x98, 0x45, 0x98, 0x19, 0x8d,
0x7f, 0xa9, 0x7f, 0xac, 0x0c, 0xa4, 0x8a, 0x58, 0x22, 0xff, 0xc1, 0x4c, 0xd1, 0x1b, 0x50, 0xf4,
0x58, 0xfd, 0x91, 0x47, 0xe5, 0x62, 0x4f, 0xd4, 0xae, 0x86, 0xb5, 0xc6, 0x16, 0x40, 0xb2, 0xc2,
0xac, 0xbe, 0x6d, 0xf7, 0xfd, 0x87, 0xa1, 0x05, 0xb2, 0x63, 0x87, 0xac, 0xfc, 0x18, 0x84, 0xce,
0xc6, 0xf4, 0x52, 0x56, 0xfd, 0xaf, 0x1c, 0xd4, 0x38, 0x3d, 0x93, 0x84, 0x1a, 0xa9, 0x34, 0x6d,
0x42, 0x85, 0xf4, 0xad, 0xfb, 0xb8, 0x13, 0xee, 0xae, 0x54, 0x56, 0x57, 0xa5, 0xa1, 0x76, 0x8c,
0x0c, 0x9a, 0x96, 0xb0, 0x49, 0x2b, 0xbd, 0xe5, 0x04, 0xde, 0xbe, 0x06, 0xed, 0x08, 0x80, 0xda,
0x30, 0xb3, 0x4d, 0x90, 0x75, 0xb1, 0xe9, 0x1c, 0x6d, 0xfa, 0xe5, 0x31, 0x9a, 0xa6, 0x5f, 0xc9,
0xf6, 0xa7, 0xb7, 0xe3, 0x50, 0xf4, 0x21, 0x9b, 0x52, 0xdd, 0xc7, 0x06, 0xd7, 0x0f, 0xee, 0x6c,
0x5f, 0x1c, 0x9b, 0x7a, 0x83, 0x29, 0x10, 0xeb, 0xa0, 0xd6, 0x16, 0x61, 0xad, 0x0f, 0x61, 0x3a,
0x41, 0x02, 0xd1, 0x88, 0xbb, 0x78, 0x9f, 0x2f, 0xbc, 0xc8, 0x4f, 0xf4, 0x82, 0x98, 0x14, 0x93,
0xe6, 0xe6, 0x6f, 0xb8, 0x4e, 0xe7, 0xb2, 0xe7, 0x19, 0xfb, 0x3c, 0x69, 0xe6, 0x52, 0xe6, 0xab,
0x4a, 0x6b, 0x0b, 0xe6, 0x64, 0xc3, 0xfc, 0x5c, 0xfb, 0x78, 0x13, 0xd0, 0xf0, 0x38, 0x25, 0x3d,
0xc4, 0x52, 0x7b, 0xb2, 0x42, 0x0b, 0xea, 0x27, 0x59, 0xa8, 0xbe, 0xdb, 0xc7, 0xde, 0xfe, 0x51,
0xfa, 0x84, 0xd0, 0xa7, 0xe5, 0x04, 0x9f, 0x36, 0x64, 0x86, 0xf3, 0x12, 0x33, 0x2c, 0x71, 0x26,
0x05, 0xa9, 0x33, 0x91, 0xd9, 0xd9, 0xe2, 0xa1, 0xec, 0x6c, 0x29, 0xd5, 0xce, 0xae, 0x41, 0x95,
0x9d, 0xc0, 0x1d, 0xd6, 0x15, 0x54, 0x68, 0x35, 0xe6, 0x09, 0x98, 0x3d, 0x68, 0x34, 0xb3, 0xea,
0x1f, 0x29, 0xd1, 0x8c, 0x4c, 0x64, 0x4f, 0x63, 0x41, 0x6a, 0xe6, 0xd0, 0x41, 0xea, 0xd8, 0xf6,
0xf4, 0x53, 0x05, 0xca, 0xef, 0xe3, 0x76, 0xe0, 0x7a, 0x44, 0x67, 0x25, 0xd5, 0x94, 0x31, 0x56,
0x0e, 0x99, 0xe4, 0xca, 0xe1, 0x22, 0x94, 0x2c, 0x53, 0x37, 0x88, 0xc0, 0xd3, 0x7e, 0x47, 0xc5,
0xa7, 0x45, 0xcb, 0xa4, 0x9a, 0x31, 0xfe, 0x39, 0xca, 0x0f, 0x14, 0xa8, 0x32, 0x9a, 0x7d, 0x56,
0xf3, 0x55, 0xa1, 0x3b, 0x45, 0xa6, 0x85, 0xfc, 0x23, 0x1a, 0xe8, 0xf5, 0xa9, 0x41, 0xb7, 0x97,
0x01, 0x08, 0x93, 0x79, 0x75, 0xa6, 0xc4, 0x4b, 0x52, 0x6a, 0x59, 0x75, 0xca, 0xf0, 0xeb, 0x53,
0x5a, 0x99, 0xd4, 0xa2, 0x4d, 0x5c, 0x29, 0x42, 0x9e, 0xd6, 0x56, 0xff, 0x5b, 0x81, 0xd9, 0xab,
0x86, 0xdd, 0x5e, 0xb3, 0xfc, 0xc0, 0x70, 0xda, 0x13, 0x44, 0xa4, 0x97, 0xa0, 0xe8, 0xf6, 0x74,
0x1b, 0x6f, 0x07, 0x9c, 0xa4, 0x33, 0x23, 0x46, 0xc4, 0xd8, 0xa0, 0x15, 0xdc, 0xde, 0x0d, 0xbc,
0x1d, 0xa0, 0xd7, 0xa0, 0xe4, 0xf6, 0x74, 0xcf, 0xea, 0xec, 0x04, 0x9c, 0xfb, 0x63, 0x54, 0x2e,
0xba, 0x3d, 0x8d, 0xd4, 0x10, 0x36, 0xa3, 0x72, 0x87, 0xdc, 0x8c, 0x52, 0x7f, 0x32, 0x34, 0xfc,
0x09, 0x74, 0xe0, 0x12, 0x94, 0x2c, 0x27, 0xd0, 0x4d, 0xcb, 0x0f, 0x59, 0x70, 0x4a, 0x2e, 0x43,
0x4e, 0x40, 0x47, 0x40, 0xe7, 0xd4, 0x09, 0x48, 0xdf, 0xe8, 0x4d, 0x80, 0x6d, 0xdb, 0x35, 0x78,
0x6d, 0xc6, 0x83, 0xd3, 0x72, 0xf5, 0x21, 0x68, 0x61, 0xfd, 0x32, 0xad, 0x44, 0x5a, 0x18, 0x4c,
0xe9, 0xdf, 0x2a, 0x30, 0xbf, 0x81, 0x3d, 0x96, 0x13, 0x16, 0xf0, 0x9d, 0xe4, 0x75, 0x67, 0xdb,
0x8d, 0x6f, 0xe6, 0x2b, 0x89, 0xcd, 0xfc, 0xcf, 0x67, 0x03, 0x3b, 0xb6, 0x9e, 0x64, 0x47, 0x4a,
0xe1, 0x7a, 0x32, 0x3c, 0x38, 0x63, 0x0b, 0xf3, 0x7a, 0xca, 0x34, 0x71, 0x7a, 0xc5, 0xfd, 0x09,
0xf5, 0x57, 0x59, 0xde, 0x8c, 0x74, 0x50, 0x0f, 0x2e, 0xb0, 0x0b, 0xc0, 0x1d, 0x47, 0xc2, 0x8d,
0x3c, 0x05, 0x09, 0xdb, 0x91, 0x62, 0x88, 0x7e, 0x53, 0x81, 0xa5, 0x74, 0xaa, 0x26, 0x89, 0xad,
0xde, 0x84, 0xbc, 0xe5, 0x6c, 0xbb, 0xe1, 0x3e, 0xe5, 0x39, 0xa9, 0x2e, 0xc8, 0xfb, 0x65, 0x15,
0xd5, 0xbf, 0xcb, 0x40, 0xe3, 0x5d, 0x96, 0x87, 0xf1, 0x85, 0x4f, 0x7f, 0x17, 0x77, 0x75, 0xdf,
0xfa, 0x18, 0x87, 0xd3, 0xdf, 0xc5, 0xdd, 0x4d, 0xeb, 0x63, 0x1c, 0x93, 0x8c, 0x7c, 0x5c, 0x32,
0x46, 0x6f, 0xcc, 0x8b, 0xfb, 0xd0, 0xc5, 0xf8, 0x3e, 0xf4, 0x02, 0x14, 0x1c, 0xd7, 0xc4, 0xeb,
0x6b, 0x7c, 0x0d, 0xce, 0xbf, 0x06, 0xa2, 0x56, 0x3e, 0x9c, 0xa8, 0x91, 0xae, 0x68, 0x13, 0x26,
0x4b, 0xe9, 0x24, 0x34, 0xb2, 0x4f, 0xf5, 0xfb, 0x0a, 0xb4, 0xae, 0xe1, 0x20, 0xc9, 0xd5, 0xa3,
0x93, 0xbf, 0x4f, 0x14, 0x38, 0x21, 0x25, 0x68, 0x12, 0xd1, 0x7b, 0x35, 0x2e, 0x7a, 0xf2, 0x2d,
0xf2, 0xa1, 0x2e, 0xb9, 0xd4, 0x3d, 0x0f, 0xd5, 0xb5, 0x7e, 0xb7, 0x1b, 0xc5, 0x76, 0x67, 0xa0,
0xea, 0xb1, 0x9f, 0x6c, 0x5d, 0xcc, 0x3c, 0x73, 0x85, 0xc3, 0xc8, 0xea, 0x57, 0x3d, 0x0f, 0x35,
0x5e, 0x85, 0x53, 0xdd, 0x82, 0x92, 0xc7, 0x7f, 0x73, 0xfc, 0xe8, 0x5b, 0x9d, 0x87, 0x59, 0x0d,
0x77, 0x88, 0xd0, 0x7b, 0x37, 0x2c, 0xe7, 0x2e, 0xef, 0x46, 0xfd, 0x96, 0x02, 0x73, 0x71, 0x38,
0x6f, 0xeb, 0x25, 0x28, 0x1a, 0xa6, 0xe9, 0x61, 0xdf, 0x1f, 0x39, 0x2d, 0x97, 0x19, 0x8e, 0x16,
0x22, 0x0b, 0x9c, 0xcb, 0x8c, 0xcd, 0x39, 0x55, 0x87, 0x99, 0x6b, 0x38, 0xb8, 0x89, 0x03, 0x6f,
0xa2, 0xf4, 0x88, 0x26, 0x59, 0x98, 0xd2, 0xca, 0x5c, 0x2c, 0xc2, 0x4f, 0xf5, 0x7b, 0x0a, 0x20,
0xb1, 0x87, 0x49, 0xa6, 0x59, 0xe4, 0x72, 0x26, 0xce, 0x65, 0x96, 0xa0, 0xd6, 0xed, 0xb9, 0x0e,
0x76, 0x02, 0x31, 0x10, 0xab, 0x45, 0x50, 0x2a, 0x7e, 0x3f, 0x53, 0x00, 0xdd, 0x70, 0x0d, 0xf3,
0x8a, 0x61, 0x4f, 0x16, 0x38, 0x9c, 0x02, 0xf0, 0xbd, 0xb6, 0xce, 0xf5, 0x38, 0xc3, 0xed, 0x92,
0xd7, 0xbe, 0xc5, 0x54, 0xf9, 0x34, 0x54, 0x4c, 0x3f, 0xe0, 0xc5, 0xe1, 0x69, 0x3d, 0x98, 0x7e,
0xc0, 0xca, 0x69, 0x9e, 0x38, 0x59, 0xb1, 0x61, 0x53, 0x17, 0x0e, 0x3b, 0x73, 0x14, 0xad, 0xc1,
0x0a, 0x36, 0x23, 0xb8, 0x44, 0xb9, 0xf2, 0xe9, 0x39, 0x9b, 0x33, 0xcd, 0xbc, 0xba, 0x0d, 0x8b,
0x37, 0x0d, 0xa7, 0x6f, 0xd8, 0x57, 0xdd, 0x6e, 0xcf, 0x88, 0xe5, 0x18, 0x27, 0x2d, 0xa6, 0x22,
0xb1, 0x98, 0x8f, 0xb3, 0xd4, 0x47, 0x16, 0xf4, 0xd3, 0xc1, 0xe5, 0x34, 0x01, 0xc2, 0xfa, 0x29,
0x36, 0x15, 0xd5, 0x87, 0xe6, 0x70, 0x3f, 0x93, 0x4c, 0x31, 0xa5, 0x2e, 0x6c, 0x4a, 0xb4, 0xe7,
0x03, 0x98, 0xfa, 0x06, 0x3c, 0x46, 0xf3, 0x51, 0x43, 0x50, 0xec, 0x58, 0x25, 0xd9, 0x80, 0x22,
0x69, 0xe0, 0x0f, 0x33, 0xd4, 0x28, 0x0e, 0xb5, 0x30, 0x09, 0xe1, 0x97, 0xe2, 0x87, 0x18, 0x4f,
0xa6, 0xa4, 0xc1, 0xc7, 0x7b, 0xe4, 0xe6, 0x7b, 0x19, 0xa6, 0xf1, 0x7d, 0xdc, 0xee, 0x07, 0x96,
0xd3, 0xd9, 0xb0, 0x0d, 0xe7, 0x96, 0xcb, 0x9d, 0x54, 0x12, 0x8c, 0x9e, 0x84, 0x1a, 0x99, 0x06,
0xb7, 0x1f, 0x70, 0x3c, 0xe6, 0xad, 0xe2, 0x40, 0xd2, 0x1e, 0x19, 0xaf, 0x8d, 0x03, 0x6c, 0x72,
0x3c, 0xe6, 0xba, 0x92, 0x60, 0xc2, 0xad, 0x6d, 0xc3, 0xb2, 0x23, 0x34, 0xb6, 0xa3, 0x1c, 0x83,
0x0d, 0xb1, 0x9b, 0x80, 0xfd, 0xc3, 0xb0, 0xfb, 0x1f, 0x94, 0x04, 0xbb, 0x79, 0x0b, 0x47, 0xc5,
0xee, 0xeb, 0x00, 0x5d, 0xec, 0x75, 0xf0, 0x3a, 0x75, 0x19, 0x6c, 0xab, 0x67, 0x59, 0xea, 0x32,
0x06, 0x0d, 0xdc, 0x0c, 0x2b, 0x68, 0x42, 0x5d, 0xf5, 0x1a, 0xcc, 0x4a, 0x50, 0x88, 0x35, 0xf4,
0xdd, 0xbe, 0xd7, 0xc6, 0xe1, 0xb6, 0x61, 0xf8, 0x49, 0xbc, 0x67, 0x60, 0x78, 0x1d, 0x1c, 0x70,
0xc1, 0xe6, 0x5f, 0xea, 0x4b, 0xf4, 0x90, 0x90, 0xee, 0x84, 0xc4, 0xa4, 0x39, 0x9e, 0x0b, 0xa1,
0x0c, 0xe5, 0x42, 0x6c, 0xd3, 0x83, 0x38, 0xb1, 0xde, 0x84, 0x79, 0x2c, 0x74, 0x77, 0x09, 0x9b,
0xfc, 0xea, 0x54, 0xf8, 0xa9, 0xfe, 0x8f, 0x02, 0xb5, 0xf5, 0x6e, 0xcf, 0x1d, 0x1c, 0x3d, 0x8d,
0xbd, 0x84, 0x1d, 0xde, 0xb1, 0xcf, 0xc8, 0x76, 0xec, 0x9f, 0x80, 0x5a, 0xfc, 0x92, 0x0d, 0xdb,
0x11, 0xac, 0xb6, 0xc5, 0xcb, 0x35, 0x27, 0xa0, 0xec, 0xb9, 0x7b, 0x3a, 0x31, 0xc0, 0x26, 0xcf,
0x98, 0x29, 0x79, 0xee, 0x1e, 0x31, 0xcb, 0x26, 0x9a, 0x83, 0xfc, 0xb6, 0x65, 0x47, 0xc9, 0x5e,
0xec, 0x03, 0xbd, 0x4a, 0x16, 0x78, 0xec, 0xfc, 0xbc, 0x30, 0xee, 0x3a, 0x2b, 0xac, 0xc1, 0xec,
0x1c, 0x6a, 0x2a, 0xea, 0x07, 0x50, 0x0f, 0x87, 0x3f, 0xe1, 0xe5, 0xb1, 0xc0, 0xf0, 0xef, 0x86,
0x59, 0x2d, 0xec, 0x43, 0x3d, 0xcf, 0x4e, 0x53, 0x69, 0xfb, 0xb1, 0xd9, 0x47, 0x90, 0x23, 0x18,
0x5c, 0xa9, 0xe8, 0x6f, 0xf5, 0x5f, 0x33, 0xb0, 0x90, 0xc4, 0x9e, 0x84, 0xa4, 0x97, 0xe2, 0x8a,
0x24, 0xbf, 0x0b, 0x24, 0xf6, 0xc6, 0x95, 0x88, 0x4f, 0x45, 0xdb, 0xed, 0x3b, 0x01, 0xb7, 0x56,
0x64, 0x2a, 0xae, 0x92, 0x6f, 0xb4, 0x08, 0x45, 0xcb, 0xd4, 0x6d, 0xb2, 0x28, 0x64, 0x2e, 0xad,
0x60, 0x99, 0x37, 0xc8, 0x82, 0xf1, 0xe5, 0x30, 0x50, 0x1b, 0x3b, 0x15, 0x86, 0xe1, 0xa3, 0x3a,
0x64, 0x2c, 0x93, 0x9b, 0xa7, 0x8c, 0x65, 0x52, 0x71, 0x11, 0xf3, 0xd1, 0x79, 0x88, 0x2d, 0xba,
0x31, 0x93, 0x38, 0x61, 0xae, 0x2b, 0x34, 0x63, 0xbd, 0x14, 0x57, 0x1f, 0x93, 0xca, 0x13, 0xcb,
0x71, 0xd3, 0x03, 0x9f, 0x06, 0xdd, 0x59, 0xad, 0xc4, 0x00, 0x77, 0x7c, 0xf5, 0xeb, 0xb0, 0x40,
0x68, 0x66, 0x63, 0xbf, 0x43, 0x66, 0xea, 0xd0, 0xb2, 0x3f, 0x07, 0x79, 0xdb, 0xea, 0x5a, 0xa1,
0xb6, 0xb3, 0x0f, 0xf5, 0x97, 0x15, 0x58, 0x1c, 0x6a, 0x79, 0x92, 0x39, 0xbc, 0x2c, 0x8a, 0x55,
0x65, 0xf5, 0xbc, 0xd4, 0x96, 0xc9, 0x85, 0x26, 0x94, 0xc1, 0x5f, 0x63, 0x61, 0x9a, 0xc6, 0x52,
0x80, 0x1f, 0x72, 0x42, 0xd9, 0x32, 0x34, 0xf6, 0xac, 0x60, 0x47, 0xa7, 0xb7, 0xd6, 0x68, 0x8c,
0xc4, 0x12, 0x27, 0x4a, 0x5a, 0x9d, 0xc0, 0x37, 0x09, 0x98, 0xc4, 0x49, 0xbe, 0xfa, 0x5d, 0x05,
0x66, 0x63, 0x64, 0x4d, 0xc2, 0xa6, 0xd7, 0x48, 0xf8, 0xc8, 0x1a, 0xe2, 0x9c, 0x5a, 0x92, 0x72,
0x8a, 0xf7, 0x46, 0xad, 0x7d, 0x54, 0x43, 0xfd, 0xa9, 0x02, 0x15, 0xa1, 0x84, 0xac, 0x4b, 0x79,
0xd9, 0x60, 0x5d, 0x1a, 0x01, 0xc6, 0x62, 0xc3, 0x13, 0x30, 0xb0, 0x81, 0xc2, 0x95, 0x0a, 0x21,
0xa7, 0xd3, 0xf4, 0xd1, 0x75, 0xa8, 0x33, 0x36, 0x45, 0xa4, 0x4b, 0xb7, 0x8b, 0xa2, 0x6c, 0x55,
0xc3, 0x33, 0x39, 0x95, 0x5a, 0xcd, 0x17, 0xbe, 0xd8, 0x99, 0xb9, 0x6b, 0x62, 0xda, 0x53, 0x7e,
0x68, 0x95, 0x58, 0x15, 0xab, 0x92, 0x48, 0xdb, 0xc6, 0x86, 0x89, 0xbd, 0x68, 0x6c, 0xd1, 0x37,
0xd1, 0x2a, 0xf6, 0x5b, 0x27, 0x2b, 0x0f, 0x6e, 0xcd, 0x81, 0x81, 0xc8, 0xa2, 0x04, 0x3d, 0x05,
0xd3, 0x66, 0x37, 0x76, 0x65, 0x32, 0x8c, 0xc5, 0xcd, 0xae, 0x70, 0x57, 0x32, 0x46, 0x50, 0x2e,
0x4e, 0xd0, 0xb7, 0x07, 0x97, 0xd0, 0x3d, 0x6c, 0x62, 0x27, 0xb0, 0x0c, 0xfb, 0xc1, 0x65, 0xb2,
0x05, 0xa5, 0xbe, 0x8f, 0x3d, 0xc1, 0xf9, 0x44, 0xdf, 0xa4, 0xac, 0x67, 0xf8, 0xfe, 0x9e, 0xeb,
0x99, 0x9c, 0xca, 0xe8, 0x7b, 0x44, 0x82, 0x2c, 0xbb, 0xb8, 0x2c, 0x4f, 0x90, 0x7d, 0x09, 0x16,
0xbb, 0xae, 0x69, 0x6d, 0x5b, 0xb2, 0xbc, 0x5a, 0x52, 0x6d, 0x3e, 0x2c, 0x8e, 0xd5, 0x0b, 0xaf,
0xfc, 0xcc, 0x8a, 0x57, 0x7e, 0x7e, 0x98, 0x81, 0xc5, 0xf7, 0x7a, 0xe6, 0x17, 0xc0, 0x87, 0x25,
0xa8, 0xb8, 0xb6, 0xb9, 0x11, 0x67, 0x85, 0x08, 0x22, 0x18, 0x0e, 0xde, 0x8b, 0x30, 0xd8, 0x31,
0x84, 0x08, 0x1a, 0x99, 0x50, 0xfc, 0x40, 0xfc, 0x2a, 0x8c, 0xe2, 0x57, 0xf9, 0xb3, 0xd7, 0x0b,
0xa5, 0x4c, 0x63, 0xae, 0x99, 0x51, 0x7f, 0x0e, 0x16, 0x59, 0x6a, 0xc2, 0x43, 0xe6, 0x52, 0x38,
0x47, 0xf3, 0xe2, 0x1c, 0x7d, 0x04, 0xf3, 0xc4, 0x9a, 0x93, 0xae, 0xdf, 0xf3, 0xb1, 0x37, 0xa1,
0x91, 0x3a, 0x09, 0xe5, 0xb0, 0xb7, 0x30, 0x15, 0x7c, 0x00, 0x50, 0xff, 0x3f, 0xcc, 0x25, 0xfa,
0x7a, 0xc0, 0x51, 0x86, 0x23, 0x59, 0x10, 0x47, 0xb2, 0x04, 0xa0, 0xb9, 0x36, 0x7e, 0xcb, 0x09,
0xac, 0x60, 0x9f, 0x44, 0x1f, 0x82, 0x6b, 0xa3, 0xbf, 0x09, 0x06, 0xe9, 0x77, 0x04, 0xc6, 0xaf,
0x28, 0x30, 0xc3, 0x34, 0x97, 0x34, 0xf5, 0xe0, 0xb3, 0xf0, 0x32, 0x14, 0x30, 0xed, 0x85, 0xef,
0x66, 0x9c, 0x96, 0x9b, 0xea, 0x88, 0x5c, 0x8d, 0xa3, 0x4b, 0xd5, 0x28, 0x80, 0xe9, 0x35, 0xcf,
0xed, 0x4d, 0x46, 0x11, 0x8d, 0x78, 0x6c, 0x2c, 0xc6, 0xb0, 0x25, 0x02, 0xb8, 0x95, 0x26, 0x18,
0x7f, 0xaf, 0xc0, 0xc2, 0xed, 0x1e, 0xf6, 0x8c, 0x00, 0x13, 0xa6, 0x4d, 0xd6, 0xfb, 0x28, 0xdd,
0x8d, 0x51, 0x96, 0x8d, 0x53, 0x86, 0x5e, 0x8b, 0xdd, 0x53, 0x94, 0xaf, 0x73, 0x12, 0x54, 0x0e,
0xee, 0x3b, 0x84, 0xe3, 0x5a, 0x14, 0xc7, 0xf5, 0x63, 0x05, 0x66, 0x36, 0x31, 0xf1, 0x63, 0x93,
0x0d, 0xe9, 0x22, 0xe4, 0x08, 0x95, 0xe3, 0x4e, 0x30, 0x45, 0x46, 0xe7, 0x60, 0xc6, 0x72, 0xda,
0x76, 0xdf, 0xc4, 0x3a, 0x19, 0xbf, 0x4e, 0xc2, 0x43, 0x1e, 0x3c, 0x4c, 0xf3, 0x02, 0x32, 0x0c,
0xe2, 0xa2, 0xa5, 0x32, 0x7e, 0x9f, 0xc9, 0x78, 0x94, 0x10, 0xc6, 0x48, 0x50, 0x0e, 0x43, 0xc2,
0x8b, 0x90, 0x27, 0x5d, 0x87, 0x41, 0x84, 0xbc, 0xd6, 0x40, 0x4d, 0x34, 0x86, 0xad, 0xfe, 0xbc,
0x02, 0x48, 0x64, 0xdb, 0x24, 0x56, 0xe2, 0x15, 0x31, 0x11, 0x24, 0x3b, 0x92, 0x74, 0x36, 0xd2,
0x28, 0x05, 0x44, 0xfd, 0x34, 0x9a, 0x3d, 0x3a, 0xdd, 0x93, 0xcc, 0x1e, 0x19, 0xd7, 0xc8, 0xd9,
0x13, 0x98, 0x40, 0x91, 0xc5, 0xd9, 0xa3, 0x12, 0x2b, 0x99, 0x3d, 0x42, 0x33, 0x9d, 0x3d, 0x6e,
0xdf, 0x9b, 0xcd, 0x0c, 0x99, 0x34, 0x46, 0x6c, 0x38, 0x69, 0xb4, 0x67, 0xe5, 0x30, 0x3d, 0xbf,
0x08, 0x79, 0xd2, 0xe3, 0xc1, 0xfc, 0x0a, 0x27, 0x8d, 0x62, 0x0b, 0x93, 0xc6, 0x09, 0x78, 0xf8,
0x93, 0x36, 0x18, 0xe9, 0x60, 0xd2, 0x54, 0xa8, 0xde, 0xde, 0xfa, 0x08, 0xb7, 0x83, 0x11, 0x96,
0xf7, 0x2c, 0x4c, 0x6f, 0x78, 0xd6, 0xae, 0x65, 0xe3, 0xce, 0x28, 0x13, 0xfe, 0x5d, 0x05, 0x6a,
0xd7, 0x3c, 0xc3, 0x09, 0xdc, 0xd0, 0x8c, 0x3f, 0x10, 0x3f, 0xaf, 0x40, 0xb9, 0x17, 0xf6, 0xc6,
0x65, 0xe0, 0x49, 0xf9, 0x89, 0x4f, 0x9c, 0x26, 0x6d, 0x50, 0x4d, 0x7d, 0x1f, 0xe6, 0x28, 0x25,
0x49, 0xb2, 0x5f, 0x87, 0x12, 0x35, 0xe6, 0x16, 0xdf, 0x40, 0xa9, 0xac, 0xaa, 0xf2, 0x25, 0x8d,
0x38, 0x0c, 0x2d, 0xaa, 0xa3, 0xfe, 0xb3, 0x02, 0x15, 0x5a, 0x36, 0x18, 0xe0, 0xe1, 0xb5, 0xfc,
0x15, 0x28, 0xb8, 0x94, 0xe5, 0x23, 0x0f, 0x86, 0xc5, 0x59, 0xd1, 0x78, 0x05, 0x12, 0x21, 0xb3,
0x5f, 0xa2, 0x45, 0x06, 0x06, 0xe2, 0x36, 0xb9, 0xd8, 0x61, 0xb4, 0x53, 0xb3, 0x3c, 0xde, 0xf8,
0xc2, 0x2a, 0x74, 0xad, 0xc6, 0x64, 0x92, 0x22, 0x3c, 0xb8, 0x0a, 0x7f, 0x35, 0xe1, 0x63, 0x97,
0xd2, 0xa9, 0x90, 0x3b, 0xd9, 0x98, 0x65, 0x25, 0x6b, 0xb5, 0x18, 0x59, 0x13, 0xae, 0xd5, 0x22,
0x11, 0x18, 0xb5, 0x56, 0x13, 0x89, 0x1b, 0x08, 0xc0, 0x3f, 0x2a, 0xb0, 0xc8, 0x7d, 0x5a, 0x24,
0x5b, 0x47, 0xc0, 0x26, 0xf4, 0x35, 0xee, 0x7b, 0xb3, 0xd4, 0xf7, 0x3e, 0x33, 0xca, 0xf7, 0x46,
0x74, 0x1e, 0xe0, 0x7c, 0xcf, 0x42, 0xf9, 0x26, 0xad, 0xf8, 0xd6, 0xfd, 0x00, 0x35, 0xa1, 0xb8,
0x8b, 0x3d, 0xdf, 0x72, 0x1d, 0xae, 0xe2, 0xe1, 0xe7, 0xb9, 0x33, 0x50, 0x0a, 0x6f, 0x2e, 0xa2,
0x22, 0x64, 0x2f, 0xdb, 0x76, 0x63, 0x0a, 0x55, 0xa1, 0xb4, 0xce, 0xaf, 0xe7, 0x35, 0x94, 0x73,
0x6f, 0xc2, 0xac, 0xc4, 0xef, 0xa3, 0x19, 0xa8, 0x5d, 0x36, 0x69, 0x74, 0x79, 0xc7, 0x25, 0xc0,
0xc6, 0x14, 0x5a, 0x00, 0xa4, 0xe1, 0xae, 0xbb, 0x4b, 0x11, 0xdf, 0xf6, 0xdc, 0x2e, 0x85, 0x2b,
0xe7, 0x9e, 0x85, 0x39, 0x19, 0xf5, 0xa8, 0x0c, 0x79, 0xca, 0x8d, 0xc6, 0x14, 0x02, 0x28, 0x68,
0x78, 0xd7, 0xbd, 0x8b, 0x1b, 0xca, 0xea, 0x9f, 0x9f, 0x83, 0x1a, 0xa3, 0x9d, 0xdf, 0xb3, 0x47,
0x3a, 0x34, 0x92, 0x4f, 0x8d, 0xa1, 0xaf, 0xc8, 0x77, 0x62, 0xe5, 0x2f, 0x92, 0xb5, 0x46, 0x09,
0x93, 0x3a, 0x85, 0x3e, 0x80, 0x7a, 0xfc, 0x71, 0x2e, 0x24, 0x3f, 0x96, 0x96, 0xbe, 0xe0, 0x75,
0x50, 0xe3, 0x3a, 0xd4, 0x62, 0xef, 0x6a, 0x21, 0xf9, 0x04, 0xcb, 0xde, 0xde, 0x6a, 0xc9, 0xad,
0x89, 0xf8, 0xf6, 0x15, 0xa3, 0x3e, 0xfe, 0xd0, 0x4d, 0x0a, 0xf5, 0xd2, 0xd7, 0x70, 0x0e, 0xa2,
0xde, 0x80, 0x99, 0xa1, 0x77, 0x68, 0xd0, 0xb3, 0x29, 0x1b, 0x22, 0xf2, 0xf7, 0x6a, 0x0e, 0xea,
0x62, 0x0f, 0xd0, 0xf0, 0x5b, 0x51, 0x68, 0x45, 0x3e, 0x03, 0x69, 0xaf, 0x67, 0xb5, 0x2e, 0x8c,
0x8d, 0x1f, 0x31, 0xee, 0x3b, 0x0a, 0x2c, 0xa6, 0x3c, 0x59, 0x82, 0x2e, 0xa6, 0xed, 0x8e, 0x8d,
0x78, 0x80, 0xa5, 0xf5, 0xc2, 0xe1, 0x2a, 0x45, 0x84, 0x38, 0x30, 0x9d, 0x78, 0xb1, 0x03, 0x9d,
0x4f, 0xbd, 0x66, 0x3c, 0xfc, 0x9c, 0x49, 0xeb, 0x2b, 0xe3, 0x21, 0x47, 0xfd, 0x7d, 0x08, 0xd3,
0x89, 0xe7, 0x2a, 0x52, 0xfa, 0x93, 0x3f, 0x6a, 0x71, 0xd0, 0x84, 0x7e, 0x03, 0x6a, 0xb1, 0x77,
0x25, 0x52, 0x24, 0x5e, 0xf6, 0xf6, 0xc4, 0x41, 0x4d, 0x7f, 0x08, 0x55, 0xf1, 0xf9, 0x07, 0xb4,
0x9c, 0xa6, 0x4b, 0x43, 0x0d, 0x1f, 0x46, 0x95, 0x06, 0xd7, 0xb6, 0x47, 0xa8, 0xd2, 0xd0, 0x4d,
0xf7, 0xf1, 0x55, 0x49, 0x68, 0x7f, 0xa4, 0x2a, 0x1d, 0xba, 0x8b, 0x6f, 0x29, 0x74, 0xdb, 0x5f,
0xf2, 0x2c, 0x00, 0x5a, 0x4d, 0x93, 0xcd, 0xf4, 0x07, 0x10, 0x5a, 0x17, 0x0f, 0x55, 0x27, 0xe2,
0xe2, 0x5d, 0xa8, 0xc7, 0x2f, 0xbf, 0xa7, 0x70, 0x51, 0xfa, 0x5e, 0x40, 0xeb, 0xfc, 0x58, 0xb8,
0x51, 0x67, 0xef, 0x41, 0x45, 0x78, 0x3d, 0x14, 0x3d, 0x3d, 0x42, 0x8e, 0xc5, 0xa7, 0x34, 0x0f,
0xe2, 0xe4, 0xbb, 0x50, 0x8e, 0x1e, 0xfd, 0x44, 0x67, 0x53, 0xe5, 0xf7, 0x30, 0x4d, 0x6e, 0x02,
0x0c, 0x5e, 0xf4, 0x44, 0x4f, 0x49, 0xdb, 0x1c, 0x7a, 0xf2, 0xf3, 0xa0, 0x46, 0xa3, 0xe1, 0xb3,
0xdb, 0x41, 0xa3, 0x86, 0x2f, 0x5e, 0x70, 0x3b, 0xa8, 0xd9, 0x1d, 0xa8, 0xc5, 0x2e, 0xaa, 0xa6,
0xa9, 0xb0, 0xe4, 0x22, 0x71, 0xeb, 0xdc, 0x38, 0xa8, 0xd1, 0xfc, 0xed, 0x40, 0x2d, 0x76, 0x49,
0x30, 0xa5, 0x27, 0xd9, 0xe5, 0xc8, 0x94, 0x9e, 0xa4, 0x77, 0x0e, 0xd5, 0x29, 0xf4, 0x4d, 0xe1,
0x3e, 0x62, 0xec, 0xf2, 0x27, 0x7a, 0x7e, 0x64, 0x3b, 0xb2, 0x4b, 0xb0, 0xad, 0xd5, 0xc3, 0x54,
0x89, 0x48, 0xe0, 0x52, 0xc5, 0x58, 0x9a, 0x2e, 0x55, 0x87, 0x99, 0xa9, 0x4d, 0x28, 0xb0, 0xdb,
0x7e, 0x48, 0x4d, 0xb9, 0xf2, 0x2b, 0x5c, 0x05, 0x6c, 0x3d, 0x21, 0xc5, 0x89, 0xdf, 0x7f, 0x63,
0x8d, 0xb2, 0x9d, 0xd2, 0x94, 0x46, 0x63, 0x37, 0xbc, 0xc6, 0x6d, 0x54, 0x83, 0x02, 0xbb, 0x72,
0x92, 0xd2, 0x68, 0xec, 0x3e, 0x55, 0x6b, 0x34, 0x0e, 0x5b, 0xef, 0x4e, 0xa1, 0x0d, 0xc8, 0xd3,
0x63, 0x6d, 0x74, 0x66, 0xd4, 0x35, 0x86, 0x51, 0x2d, 0xc6, 0x6e, 0x3a, 0xa8, 0x53, 0xe8, 0x36,
0xe4, 0x69, 0x0a, 0x58, 0x4a, 0x8b, 0x62, 0x7e, 0x7f, 0x6b, 0x24, 0x4a, 0x48, 0xa2, 0x09, 0x55,
0x31, 0x0b, 0x37, 0xc5, 0x65, 0x49, 0xf2, 0x94, 0x5b, 0xe3, 0x60, 0x86, 0xbd, 0x30, 0x35, 0x1a,
0x1c, 0xf1, 0xa7, 0xab, 0xd1, 0x50, 0xfa, 0x40, 0xba, 0x1a, 0x0d, 0x67, 0x0c, 0xa8, 0x53, 0xe8,
0x17, 0x14, 0x68, 0xa6, 0xa5, 0x86, 0xa2, 0xd4, 0x08, 0x68, 0x54, 0x7e, 0x6b, 0xeb, 0xc5, 0x43,
0xd6, 0x8a, 0x68, 0xf9, 0x98, 0x9e, 0xfb, 0x0d, 0x25, 0x83, 0x5e, 0x48, 0x6b, 0x2f, 0x25, 0xc1,
0xb1, 0xf5, 0xdc, 0xf8, 0x15, 0xa2, 0xbe, 0xb7, 0xa0, 0x22, 0x9c, 0x39, 0xa6, 0x58, 0xde, 0xe1,
0xc3, 0xd2, 0x94, 0x59, 0x95, 0x1c, 0x5f, 0x32, 0xf1, 0xa6, 0x19, 0x84, 0x29, 0xc2, 0x28, 0x26,
0x24, 0xa6, 0x88, 0x77, 0x2c, 0x01, 0x51, 0x9d, 0x42, 0x18, 0xaa, 0x62, 0x3a, 0x61, 0x8a, 0x34,
0x4a, 0x32, 0x11, 0x5b, 0xcf, 0x8c, 0x81, 0x19, 0x75, 0xa3, 0x03, 0x0c, 0xd2, 0xf9, 0x52, 0x7c,
0xdd, 0x50, 0x46, 0x61, 0xeb, 0xe9, 0x03, 0xf1, 0x44, 0xb7, 0x2f, 0x24, 0xe8, 0xa5, 0x70, 0x7f,
0x38, 0x85, 0x6f, 0x8c, 0xb5, 0xc8, 0x70, 0xca, 0x57, 0xca, 0x5a, 0x24, 0x35, 0xbb, 0xac, 0x75,
0x61, 0x6c, 0xfc, 0x68, 0x3c, 0xf7, 0xa0, 0x91, 0x4c, 0x91, 0x4b, 0x59, 0xe3, 0xa6, 0x64, 0xec,
0xb5, 0x9e, 0x1d, 0x13, 0x5b, 0xf4, 0x87, 0x27, 0x86, 0x69, 0xfa, 0xba, 0x15, 0xec, 0xd0, 0xcc,
0xab, 0x71, 0x46, 0x2d, 0x26, 0x79, 0x8d, 0x33, 0xea, 0x58, 0x4a, 0x17, 0x77, 0x5e, 0x34, 0xdb,
0x20, 0xcd, 0x79, 0x89, 0xc9, 0x44, 0x29, 0x7e, 0x26, 0x9e, 0x71, 0xc3, 0xc2, 0xcf, 0x78, 0x16,
0x03, 0x3a, 0x37, 0x56, 0xaa, 0xc3, 0xa8, 0xf0, 0x53, 0x9e, 0x16, 0xc1, 0x96, 0x6e, 0x89, 0x24,
0x8d, 0x94, 0xa5, 0x94, 0x3c, 0x49, 0x24, 0x65, 0xe9, 0x96, 0x92, 0xf7, 0x41, 0x15, 0xab, 0x91,
0x3c, 0xf1, 0x1e, 0xbd, 0x17, 0x92, 0x3c, 0xea, 0x3c, 0x78, 0xbb, 0xa2, 0x91, 0x3c, 0x4a, 0x4e,
0xe9, 0x20, 0xe5, 0xc4, 0x79, 0x8c, 0x0e, 0x92, 0xa7, 0xb0, 0x29, 0x1d, 0xa4, 0x1c, 0xd6, 0x8e,
0x11, 0xbb, 0xc6, 0x4e, 0x3f, 0x53, 0x5c, 0xa1, 0xec, 0x84, 0x34, 0xc5, 0x15, 0x4a, 0x0f, 0x6e,
0x59, 0x44, 0x3f, 0x38, 0xc4, 0x4c, 0xb1, 0x72, 0x43, 0xa7, 0x9c, 0x07, 0x91, 0x7f, 0x1b, 0x4a,
0xe1, 0x29, 0x24, 0x7a, 0x32, 0x35, 0x44, 0x3c, 0x44, 0x83, 0x1f, 0xc2, 0x74, 0x62, 0x07, 0x2f,
0x45, 0x44, 0xe5, 0xa7, 0x90, 0x07, 0xcf, 0x27, 0x0c, 0xce, 0xab, 0x52, 0x98, 0x30, 0x74, 0x0e,
0x98, 0x62, 0xea, 0x87, 0x0f, 0xbe, 0xc4, 0x0e, 0x08, 0x61, 0x23, 0x3b, 0x10, 0x8e, 0xaa, 0x46,
0x76, 0x20, 0x1e, 0xd2, 0x30, 0x89, 0x4c, 0x6e, 0x50, 0xa6, 0x48, 0x64, 0xca, 0x6e, 0xf1, 0x41,
0x2c, 0xda, 0x82, 0x8a, 0xb0, 0xe5, 0x8d, 0x46, 0x91, 0x26, 0xee, 0xd5, 0xa7, 0x84, 0x0a, 0x92,
0xdd, 0x73, 0x75, 0x6a, 0xb5, 0x0f, 0xd5, 0x0d, 0xcf, 0xbd, 0x1f, 0x3e, 0x4e, 0xfa, 0x05, 0x39,
0xfa, 0x4b, 0x6d, 0xa8, 0x33, 0x04, 0x1d, 0xdf, 0x0f, 0x74, 0x77, 0xeb, 0x23, 0x74, 0x72, 0x85,
0xfd, 0xcb, 0x8f, 0x95, 0xf0, 0x5f, 0x7e, 0xac, 0xbc, 0x6d, 0xd9, 0xf8, 0x36, 0xcf, 0xae, 0xfc,
0xb7, 0xe2, 0x88, 0x1b, 0x81, 0xd1, 0x96, 0xb5, 0xc6, 0xff, 0xeb, 0xc8, 0x5b, 0xf7, 0x83, 0xdb,
0x5b, 0x1f, 0x5d, 0x79, 0xff, 0xb3, 0xd7, 0x8b, 0x90, 0x5f, 0x5d, 0x79, 0x7e, 0xe5, 0x39, 0xa8,
0x5b, 0x11, 0x7a, 0xc7, 0xeb, 0xb5, 0xaf, 0x54, 0x58, 0xa5, 0x0d, 0xd2, 0xce, 0x86, 0xf2, 0xff,
0x96, 0x3b, 0x56, 0xb0, 0xd3, 0xdf, 0x22, 0x53, 0x70, 0x81, 0xa1, 0x3d, 0x6b, 0xb9, 0xfc, 0xd7,
0x05, 0xa3, 0x67, 0xf1, 0x9f, 0xbd, 0xad, 0xdf, 0x55, 0x94, 0xad, 0x02, 0xed, 0xfd, 0xe2, 0xff,
0x05, 0x00, 0x00, 0xff, 0xff, 0xe6, 0x9c, 0xc8, 0xa5, 0xe4, 0x64, 0x00, 0x00,
}
// Reference imports to suppress errors if they are not otherwise used.

View File

@ -96,14 +96,6 @@ rootCoord:
# seconds (24 hours).
# Note: If default value is to be changed, change also the default in: internal/util/paramtable/component_param.go
importTaskRetention: 86400
# (in seconds) During index building phase of an import task, Milvus will check the building status of a task's
# segments' indices every `importIndexCheckInterval` seconds. Default 300 seconds (5 minutes).
# Note: If default value is to be changed, change also the default in: internal/util/paramtable/component_param.go
importIndexCheckInterval: 300
# (in seconds) Maximum time to wait before pushing flushed segments online (make them searchable) during importing.
# Default 1200 seconds (20 minutes).
# Note: If default value is to be changed, change also the default in: internal/util/paramtable/component_param.go
importIndexWaitLimit: 1200
# Related configuration of proxy, used to validate client requests and reduce the returned results.
proxy:

View File

@ -132,22 +132,6 @@ rootCoord:
# seconds (24 hours).
# Note: If default value is to be changed, change also the default in: internal/util/paramtable/component_param.go
importTaskRetention: 86400
# (in seconds) Check an import task's segment loading state in queryNodes every `importSegmentStateCheckInterval`
# seconds. Default 10 seconds.
# Note: If default value is to be changed, change also the default in: internal/util/paramtable/component_param.go
importSegmentStateCheckInterval: 10
# (in seconds) Maximum time to wait for segments in a single import task to be loaded in queryNodes.
# Default 60 seconds (1 minute).
# Note: If default value is to be changed, change also the default in: internal/util/paramtable/component_param.go
importSegmentStateWaitLimit: 60
# (in seconds) Check the building status of a task's segments' indices every `importIndexCheckInterval` seconds.
# Default 10 seconds.
# Note: If default value is to be changed, change also the default in: internal/util/paramtable/component_param.go
importIndexCheckInterval: 10
# (in seconds) Maximum time to wait for indices to be built on a single import task's segments.
# Default 600 seconds (10 minutes).
# Note: If default value is to be changed, change also the default in: internal/util/paramtable/component_param.go
importIndexWaitLimit: 600
# Related configuration of proxy, used to validate client requests and reduce the returned results.
proxy:

View File

@ -361,7 +361,7 @@ const char descriptor_table_protodef_common_2eproto[] PROTOBUF_SECTION_VARIABLE(
"2\037.milvus.proto.common.ObjectType\022>\n\020obj"
"ect_privilege\030\002 \001(\0162$.milvus.proto.commo"
"n.ObjectPrivilege\022\031\n\021object_name_index\030\003"
" \001(\005\022\032\n\022object_name_indexs\030\004 \001(\005*\361\010\n\tErr"
" \001(\005\022\032\n\022object_name_indexs\030\004 \001(\005*\202\t\n\tErr"
"orCode\022\013\n\007Success\020\000\022\023\n\017UnexpectedError\020\001"
"\022\021\n\rConnectFailed\020\002\022\024\n\020PermissionDenied\020"
"\003\022\027\n\023CollectionNotExists\020\004\022\023\n\017IllegalArg"
@ -389,88 +389,88 @@ const char descriptor_table_protodef_common_2eproto[] PROTOBUF_SECTION_VARIABLE(
"!\n\035RefreshPolicyInfoCacheFailure\020+\022\025\n\021Li"
"stPolicyFailure\020,\022\022\n\016NotShardLeader\020-\022\026\n"
"\022NoReplicaAvailable\020.\022\023\n\017SegmentNotFound"
"\020/\022\r\n\tForceDeny\0200\022\r\n\tRateLimit\0201\022\022\n\rDDRe"
"questRace\020\350\007*c\n\nIndexState\022\022\n\016IndexState"
"None\020\000\022\014\n\010Unissued\020\001\022\016\n\nInProgress\020\002\022\014\n\010"
"Finished\020\003\022\n\n\006Failed\020\004\022\t\n\005Retry\020\005*\202\001\n\014Se"
"gmentState\022\024\n\020SegmentStateNone\020\000\022\014\n\010NotE"
"xist\020\001\022\013\n\007Growing\020\002\022\n\n\006Sealed\020\003\022\013\n\007Flush"
"ed\020\004\022\014\n\010Flushing\020\005\022\013\n\007Dropped\020\006\022\r\n\tImpor"
"ting\020\007*>\n\017PlaceholderType\022\010\n\004None\020\000\022\020\n\014B"
"inaryVector\020d\022\017\n\013FloatVector\020e*\370\014\n\007MsgTy"
"pe\022\r\n\tUndefined\020\000\022\024\n\020CreateCollection\020d\022"
"\022\n\016DropCollection\020e\022\021\n\rHasCollection\020f\022\026"
"\n\022DescribeCollection\020g\022\023\n\017ShowCollection"
"s\020h\022\024\n\020GetSystemConfigs\020i\022\022\n\016LoadCollect"
"ion\020j\022\025\n\021ReleaseCollection\020k\022\017\n\013CreateAl"
"ias\020l\022\r\n\tDropAlias\020m\022\016\n\nAlterAlias\020n\022\024\n\017"
"CreatePartition\020\310\001\022\022\n\rDropPartition\020\311\001\022\021"
"\n\014HasPartition\020\312\001\022\026\n\021DescribePartition\020\313"
"\001\022\023\n\016ShowPartitions\020\314\001\022\023\n\016LoadPartitions"
"\020\315\001\022\026\n\021ReleasePartitions\020\316\001\022\021\n\014ShowSegme"
"nts\020\372\001\022\024\n\017DescribeSegment\020\373\001\022\021\n\014LoadSegm"
"ents\020\374\001\022\024\n\017ReleaseSegments\020\375\001\022\024\n\017Handoff"
"Segments\020\376\001\022\030\n\023LoadBalanceSegments\020\377\001\022\025\n"
"\020DescribeSegments\020\200\002\022\020\n\013CreateIndex\020\254\002\022\022"
"\n\rDescribeIndex\020\255\002\022\016\n\tDropIndex\020\256\002\022\013\n\006In"
"sert\020\220\003\022\013\n\006Delete\020\221\003\022\n\n\005Flush\020\222\003\022\027\n\022Rese"
"ndSegmentStats\020\223\003\022\013\n\006Search\020\364\003\022\021\n\014Search"
"Result\020\365\003\022\022\n\rGetIndexState\020\366\003\022\032\n\025GetInde"
"xBuildProgress\020\367\003\022\034\n\027GetCollectionStatis"
"tics\020\370\003\022\033\n\026GetPartitionStatistics\020\371\003\022\r\n\010"
"Retrieve\020\372\003\022\023\n\016RetrieveResult\020\373\003\022\024\n\017Watc"
"hDmChannels\020\374\003\022\025\n\020RemoveDmChannels\020\375\003\022\027\n"
"\022WatchQueryChannels\020\376\003\022\030\n\023RemoveQueryCha"
"nnels\020\377\003\022\035\n\030SealedSegmentsChangeInfo\020\200\004\022"
"\027\n\022WatchDeltaChannels\020\201\004\022\024\n\017GetShardLead"
"ers\020\202\004\022\020\n\013GetReplicas\020\203\004\022\023\n\016UnsubDmChann"
"el\020\204\004\022\024\n\017GetDistribution\020\205\004\022\025\n\020SyncDistr"
"ibution\020\206\004\022\020\n\013SegmentInfo\020\330\004\022\017\n\nSystemIn"
"fo\020\331\004\022\024\n\017GetRecoveryInfo\020\332\004\022\024\n\017GetSegmen"
"tState\020\333\004\022\r\n\010TimeTick\020\260\t\022\023\n\016QueryNodeSta"
"ts\020\261\t\022\016\n\tLoadIndex\020\262\t\022\016\n\tRequestID\020\263\t\022\017\n"
"\nRequestTSO\020\264\t\022\024\n\017AllocateSegment\020\265\t\022\026\n\021"
"SegmentStatistics\020\266\t\022\025\n\020SegmentFlushDone"
"\020\267\t\022\017\n\nDataNodeTt\020\270\t\022\025\n\020CreateCredential"
"\020\334\013\022\022\n\rGetCredential\020\335\013\022\025\n\020DeleteCredent"
"ial\020\336\013\022\025\n\020UpdateCredential\020\337\013\022\026\n\021ListCre"
"dUsernames\020\340\013\022\017\n\nCreateRole\020\300\014\022\r\n\010DropRo"
"le\020\301\014\022\024\n\017OperateUserRole\020\302\014\022\017\n\nSelectRol"
"e\020\303\014\022\017\n\nSelectUser\020\304\014\022\023\n\016SelectResource\020"
"\305\014\022\025\n\020OperatePrivilege\020\306\014\022\020\n\013SelectGrant"
"\020\307\014\022\033\n\026RefreshPolicyInfoCache\020\310\014\022\017\n\nList"
"Policy\020\311\014*\"\n\007DslType\022\007\n\003Dsl\020\000\022\016\n\nBoolExp"
"rV1\020\001*B\n\017CompactionState\022\021\n\rUndefiedStat"
"e\020\000\022\r\n\tExecuting\020\001\022\r\n\tCompleted\020\002*X\n\020Con"
"sistencyLevel\022\n\n\006Strong\020\000\022\013\n\007Session\020\001\022\013"
"\n\007Bounded\020\002\022\016\n\nEventually\020\003\022\016\n\nCustomize"
"d\020\004*\257\001\n\013ImportState\022\021\n\rImportPending\020\000\022\020"
"\n\014ImportFailed\020\001\022\021\n\rImportStarted\020\002\022\024\n\020I"
"mportDownloaded\020\003\022\020\n\014ImportParsed\020\004\022\023\n\017I"
"mportPersisted\020\005\022\023\n\017ImportCompleted\020\006\022\026\n"
"\022ImportAllocSegment\020\n*2\n\nObjectType\022\016\n\nC"
"ollection\020\000\022\n\n\006Global\020\001\022\010\n\004User\020\002*\206\005\n\017Ob"
"jectPrivilege\022\020\n\014PrivilegeAll\020\000\022\035\n\031Privi"
"legeCreateCollection\020\001\022\033\n\027PrivilegeDropC"
"ollection\020\002\022\037\n\033PrivilegeDescribeCollecti"
"on\020\003\022\034\n\030PrivilegeShowCollections\020\004\022\021\n\rPr"
"ivilegeLoad\020\005\022\024\n\020PrivilegeRelease\020\006\022\027\n\023P"
"rivilegeCompaction\020\007\022\023\n\017PrivilegeInsert\020"
"\010\022\023\n\017PrivilegeDelete\020\t\022\032\n\026PrivilegeGetSt"
"atistics\020\n\022\030\n\024PrivilegeCreateIndex\020\013\022\030\n\024"
"PrivilegeIndexDetail\020\014\022\026\n\022PrivilegeDropI"
"ndex\020\r\022\023\n\017PrivilegeSearch\020\016\022\022\n\016Privilege"
"Flush\020\017\022\022\n\016PrivilegeQuery\020\020\022\030\n\024Privilege"
"LoadBalance\020\021\022\023\n\017PrivilegeImport\020\022\022\034\n\030Pr"
"ivilegeCreateOwnership\020\023\022\027\n\023PrivilegeUpd"
"ateUser\020\024\022\032\n\026PrivilegeDropOwnership\020\025\022\034\n"
"\030PrivilegeSelectOwnership\020\026\022\034\n\030Privilege"
"ManageOwnership\020\027\022\027\n\023PrivilegeSelectUser"
"\020\030:^\n\021privilege_ext_obj\022\037.google.protobu"
"f.MessageOptions\030\351\007 \001(\0132!.milvus.proto.c"
"ommon.PrivilegeExtBL\n\016io.milvus.grpcB\013Co"
"mmonProtoP\001Z(github.com/milvus-io/milvus"
"/api/commonpb\240\001\001b\006proto3"
"\020/\022\r\n\tForceDeny\0200\022\r\n\tRateLimit\0201\022\017\n\013Data"
"CoordNA\020d\022\022\n\rDDRequestRace\020\350\007*c\n\nIndexSt"
"ate\022\022\n\016IndexStateNone\020\000\022\014\n\010Unissued\020\001\022\016\n"
"\nInProgress\020\002\022\014\n\010Finished\020\003\022\n\n\006Failed\020\004\022"
"\t\n\005Retry\020\005*\202\001\n\014SegmentState\022\024\n\020SegmentSt"
"ateNone\020\000\022\014\n\010NotExist\020\001\022\013\n\007Growing\020\002\022\n\n\006"
"Sealed\020\003\022\013\n\007Flushed\020\004\022\014\n\010Flushing\020\005\022\013\n\007D"
"ropped\020\006\022\r\n\tImporting\020\007*>\n\017PlaceholderTy"
"pe\022\010\n\004None\020\000\022\020\n\014BinaryVector\020d\022\017\n\013FloatV"
"ector\020e*\370\014\n\007MsgType\022\r\n\tUndefined\020\000\022\024\n\020Cr"
"eateCollection\020d\022\022\n\016DropCollection\020e\022\021\n\r"
"HasCollection\020f\022\026\n\022DescribeCollection\020g\022"
"\023\n\017ShowCollections\020h\022\024\n\020GetSystemConfigs"
"\020i\022\022\n\016LoadCollection\020j\022\025\n\021ReleaseCollect"
"ion\020k\022\017\n\013CreateAlias\020l\022\r\n\tDropAlias\020m\022\016\n"
"\nAlterAlias\020n\022\024\n\017CreatePartition\020\310\001\022\022\n\rD"
"ropPartition\020\311\001\022\021\n\014HasPartition\020\312\001\022\026\n\021De"
"scribePartition\020\313\001\022\023\n\016ShowPartitions\020\314\001\022"
"\023\n\016LoadPartitions\020\315\001\022\026\n\021ReleasePartition"
"s\020\316\001\022\021\n\014ShowSegments\020\372\001\022\024\n\017DescribeSegme"
"nt\020\373\001\022\021\n\014LoadSegments\020\374\001\022\024\n\017ReleaseSegme"
"nts\020\375\001\022\024\n\017HandoffSegments\020\376\001\022\030\n\023LoadBala"
"nceSegments\020\377\001\022\025\n\020DescribeSegments\020\200\002\022\020\n"
"\013CreateIndex\020\254\002\022\022\n\rDescribeIndex\020\255\002\022\016\n\tD"
"ropIndex\020\256\002\022\013\n\006Insert\020\220\003\022\013\n\006Delete\020\221\003\022\n\n"
"\005Flush\020\222\003\022\027\n\022ResendSegmentStats\020\223\003\022\013\n\006Se"
"arch\020\364\003\022\021\n\014SearchResult\020\365\003\022\022\n\rGetIndexSt"
"ate\020\366\003\022\032\n\025GetIndexBuildProgress\020\367\003\022\034\n\027Ge"
"tCollectionStatistics\020\370\003\022\033\n\026GetPartition"
"Statistics\020\371\003\022\r\n\010Retrieve\020\372\003\022\023\n\016Retrieve"
"Result\020\373\003\022\024\n\017WatchDmChannels\020\374\003\022\025\n\020Remov"
"eDmChannels\020\375\003\022\027\n\022WatchQueryChannels\020\376\003\022"
"\030\n\023RemoveQueryChannels\020\377\003\022\035\n\030SealedSegme"
"ntsChangeInfo\020\200\004\022\027\n\022WatchDeltaChannels\020\201"
"\004\022\024\n\017GetShardLeaders\020\202\004\022\020\n\013GetReplicas\020\203"
"\004\022\023\n\016UnsubDmChannel\020\204\004\022\024\n\017GetDistributio"
"n\020\205\004\022\025\n\020SyncDistribution\020\206\004\022\020\n\013SegmentIn"
"fo\020\330\004\022\017\n\nSystemInfo\020\331\004\022\024\n\017GetRecoveryInf"
"o\020\332\004\022\024\n\017GetSegmentState\020\333\004\022\r\n\010TimeTick\020\260"
"\t\022\023\n\016QueryNodeStats\020\261\t\022\016\n\tLoadIndex\020\262\t\022\016"
"\n\tRequestID\020\263\t\022\017\n\nRequestTSO\020\264\t\022\024\n\017Alloc"
"ateSegment\020\265\t\022\026\n\021SegmentStatistics\020\266\t\022\025\n"
"\020SegmentFlushDone\020\267\t\022\017\n\nDataNodeTt\020\270\t\022\025\n"
"\020CreateCredential\020\334\013\022\022\n\rGetCredential\020\335\013"
"\022\025\n\020DeleteCredential\020\336\013\022\025\n\020UpdateCredent"
"ial\020\337\013\022\026\n\021ListCredUsernames\020\340\013\022\017\n\nCreate"
"Role\020\300\014\022\r\n\010DropRole\020\301\014\022\024\n\017OperateUserRol"
"e\020\302\014\022\017\n\nSelectRole\020\303\014\022\017\n\nSelectUser\020\304\014\022\023"
"\n\016SelectResource\020\305\014\022\025\n\020OperatePrivilege\020"
"\306\014\022\020\n\013SelectGrant\020\307\014\022\033\n\026RefreshPolicyInf"
"oCache\020\310\014\022\017\n\nListPolicy\020\311\014*\"\n\007DslType\022\007\n"
"\003Dsl\020\000\022\016\n\nBoolExprV1\020\001*B\n\017CompactionStat"
"e\022\021\n\rUndefiedState\020\000\022\r\n\tExecuting\020\001\022\r\n\tC"
"ompleted\020\002*X\n\020ConsistencyLevel\022\n\n\006Strong"
"\020\000\022\013\n\007Session\020\001\022\013\n\007Bounded\020\002\022\016\n\nEventual"
"ly\020\003\022\016\n\nCustomized\020\004*\213\001\n\013ImportState\022\021\n\r"
"ImportPending\020\000\022\020\n\014ImportFailed\020\001\022\021\n\rImp"
"ortStarted\020\002\022\023\n\017ImportPersisted\020\005\022\023\n\017Imp"
"ortCompleted\020\006\022\032\n\026ImportFailedAndCleaned"
"\020\007*2\n\nObjectType\022\016\n\nCollection\020\000\022\n\n\006Glob"
"al\020\001\022\010\n\004User\020\002*\206\005\n\017ObjectPrivilege\022\020\n\014Pr"
"ivilegeAll\020\000\022\035\n\031PrivilegeCreateCollectio"
"n\020\001\022\033\n\027PrivilegeDropCollection\020\002\022\037\n\033Priv"
"ilegeDescribeCollection\020\003\022\034\n\030PrivilegeSh"
"owCollections\020\004\022\021\n\rPrivilegeLoad\020\005\022\024\n\020Pr"
"ivilegeRelease\020\006\022\027\n\023PrivilegeCompaction\020"
"\007\022\023\n\017PrivilegeInsert\020\010\022\023\n\017PrivilegeDelet"
"e\020\t\022\032\n\026PrivilegeGetStatistics\020\n\022\030\n\024Privi"
"legeCreateIndex\020\013\022\030\n\024PrivilegeIndexDetai"
"l\020\014\022\026\n\022PrivilegeDropIndex\020\r\022\023\n\017Privilege"
"Search\020\016\022\022\n\016PrivilegeFlush\020\017\022\022\n\016Privileg"
"eQuery\020\020\022\030\n\024PrivilegeLoadBalance\020\021\022\023\n\017Pr"
"ivilegeImport\020\022\022\034\n\030PrivilegeCreateOwners"
"hip\020\023\022\027\n\023PrivilegeUpdateUser\020\024\022\032\n\026Privil"
"egeDropOwnership\020\025\022\034\n\030PrivilegeSelectOwn"
"ership\020\026\022\034\n\030PrivilegeManageOwnership\020\027\022\027"
"\n\023PrivilegeSelectUser\020\030:^\n\021privilege_ext"
"_obj\022\037.google.protobuf.MessageOptions\030\351\007"
" \001(\0132!.milvus.proto.common.PrivilegeExtB"
"L\n\016io.milvus.grpcB\013CommonProtoP\001Z(github"
".com/milvus-io/milvus/api/commonpb\240\001\001b\006p"
"roto3"
;
static const ::PROTOBUF_NAMESPACE_ID::internal::DescriptorTable*const descriptor_table_common_2eproto_deps[1] = {
&::descriptor_table_google_2fprotobuf_2fdescriptor_2eproto,
@ -491,7 +491,7 @@ static ::PROTOBUF_NAMESPACE_ID::internal::SCCInfoBase*const descriptor_table_com
static ::PROTOBUF_NAMESPACE_ID::internal::once_flag descriptor_table_common_2eproto_once;
static bool descriptor_table_common_2eproto_initialized = false;
const ::PROTOBUF_NAMESPACE_ID::internal::DescriptorTable descriptor_table_common_2eproto = {
&descriptor_table_common_2eproto_initialized, descriptor_table_protodef_common_2eproto, "common.proto", 5264,
&descriptor_table_common_2eproto_initialized, descriptor_table_protodef_common_2eproto, "common.proto", 5245,
&descriptor_table_common_2eproto_once, descriptor_table_common_2eproto_sccs, descriptor_table_common_2eproto_deps, 11, 1,
schemas, file_default_instances, TableStruct_common_2eproto::offsets,
file_level_metadata_common_2eproto, 11, file_level_enum_descriptors_common_2eproto, file_level_service_descriptors_common_2eproto,
@ -557,6 +557,7 @@ bool ErrorCode_IsValid(int value) {
case 47:
case 48:
case 49:
case 100:
case 1000:
return true;
default:
@ -764,11 +765,9 @@ bool ImportState_IsValid(int value) {
case 0:
case 1:
case 2:
case 3:
case 4:
case 5:
case 6:
case 10:
case 7:
return true;
default:
return false;

View File

@ -162,6 +162,7 @@ enum ErrorCode : int {
SegmentNotFound = 47,
ForceDeny = 48,
RateLimit = 49,
DataCoordNA = 100,
DDRequestRace = 1000,
ErrorCode_INT_MIN_SENTINEL_DO_NOT_USE_ = std::numeric_limits<::PROTOBUF_NAMESPACE_ID::int32>::min(),
ErrorCode_INT_MAX_SENTINEL_DO_NOT_USE_ = std::numeric_limits<::PROTOBUF_NAMESPACE_ID::int32>::max()
@ -457,17 +458,15 @@ enum ImportState : int {
ImportPending = 0,
ImportFailed = 1,
ImportStarted = 2,
ImportDownloaded = 3,
ImportParsed = 4,
ImportPersisted = 5,
ImportCompleted = 6,
ImportAllocSegment = 10,
ImportFailedAndCleaned = 7,
ImportState_INT_MIN_SENTINEL_DO_NOT_USE_ = std::numeric_limits<::PROTOBUF_NAMESPACE_ID::int32>::min(),
ImportState_INT_MAX_SENTINEL_DO_NOT_USE_ = std::numeric_limits<::PROTOBUF_NAMESPACE_ID::int32>::max()
};
bool ImportState_IsValid(int value);
constexpr ImportState ImportState_MIN = ImportPending;
constexpr ImportState ImportState_MAX = ImportAllocSegment;
constexpr ImportState ImportState_MAX = ImportFailedAndCleaned;
constexpr int ImportState_ARRAYSIZE = ImportState_MAX + 1;
const ::PROTOBUF_NAMESPACE_ID::EnumDescriptor* ImportState_descriptor();

View File

@ -149,11 +149,6 @@ func (c *Cluster) ReCollectSegmentStats(ctx context.Context, nodeID int64) {
c.sessionManager.ReCollectSegmentStats(ctx, nodeID)
}
// AddSegment triggers a AddSegment call from session manager.
func (c *Cluster) AddSegment(ctx context.Context, nodeID int64, req *datapb.AddSegmentRequest) {
c.sessionManager.AddSegment(ctx, nodeID, req)
}
// GetSessions returns all sessions
func (c *Cluster) GetSessions() []*Session {
return c.sessionManager.GetSessions()

View File

@ -23,7 +23,6 @@ import (
"time"
"github.com/golang/protobuf/proto"
"github.com/milvus-io/milvus/api/commonpb"
"github.com/milvus-io/milvus/internal/kv"
etcdkv "github.com/milvus-io/milvus/internal/kv/etcd"
"github.com/milvus-io/milvus/internal/proto/datapb"
@ -641,74 +640,3 @@ func TestCluster_ReCollectSegmentStats(t *testing.T) {
time.Sleep(500 * time.Millisecond)
})
}
func TestCluster_AddSegment(t *testing.T) {
kv := getMetaKv(t)
defer func() {
kv.RemoveWithPrefix("")
kv.Close()
}()
t.Run("add segment succeed", func(t *testing.T) {
ctx, cancel := context.WithCancel(context.TODO())
defer cancel()
var mockSessionCreator = func(ctx context.Context, addr string) (types.DataNode, error) {
return newMockDataNodeClient(1, nil)
}
sessionManager := NewSessionManager(withSessionCreator(mockSessionCreator))
channelManager, err := NewChannelManager(kv, newMockHandler())
assert.Nil(t, err)
cluster := NewCluster(sessionManager, channelManager)
defer cluster.Close()
addr := "localhost:8080"
info := &NodeInfo{
Address: addr,
NodeID: 1,
}
nodes := []*NodeInfo{info}
err = cluster.Startup(ctx, nodes)
assert.Nil(t, err)
err = cluster.Watch("chan-1", 1)
assert.NoError(t, err)
assert.NotPanics(t, func() {
cluster.AddSegment(ctx, 1, &datapb.AddSegmentRequest{
Base: &commonpb.MsgBase{
SourceID: 0,
},
})
})
time.Sleep(500 * time.Millisecond)
})
t.Run("add segment failed", func(t *testing.T) {
ctx, cancel := context.WithCancel(context.TODO())
defer cancel()
sessionManager := NewSessionManager()
channelManager, err := NewChannelManager(kv, newMockHandler())
assert.Nil(t, err)
cluster := NewCluster(sessionManager, channelManager)
defer cluster.Close()
addr := "localhost:8080"
info := &NodeInfo{
Address: addr,
NodeID: 1,
}
nodes := []*NodeInfo{info}
err = cluster.Startup(ctx, nodes)
assert.Nil(t, err)
err = cluster.Watch("chan-1", 1)
assert.NoError(t, err)
assert.NotPanics(t, func() {
cluster.AddSegment(ctx, 1, &datapb.AddSegmentRequest{
Base: &commonpb.MsgBase{
SourceID: 0,
},
})
})
time.Sleep(500 * time.Millisecond)
})
}

View File

@ -284,7 +284,8 @@ func (t *compactionTrigger) handleGlobalSignal(signal *compactionSignal) {
return (signal.collectionID == 0 || segment.CollectionID == signal.collectionID) &&
isSegmentHealthy(segment) &&
isFlush(segment) &&
!segment.isCompacting // not compacting now
!segment.isCompacting && // not compacting now
!segment.GetIsImporting() // not importing now
}) // m is list of chanPartSegments, which is channel-partition organized segments
for _, group := range m {
@ -535,7 +536,8 @@ func (t *compactionTrigger) getCandidateSegments(channel string, partitionID Uni
!isFlush(s) ||
s.GetInsertChannel() != channel ||
s.GetPartitionID() != partitionID ||
s.isCompacting {
s.isCompacting ||
s.GetIsImporting() {
continue
}
res = append(res, s)

View File

@ -21,16 +21,15 @@ import (
"sync"
"time"
"github.com/milvus-io/milvus/internal/common"
"github.com/milvus-io/milvus/internal/types"
"github.com/milvus-io/milvus/internal/util/typeutil"
"github.com/samber/lo"
"github.com/milvus-io/milvus/api/commonpb"
"github.com/milvus-io/milvus/internal/common"
"github.com/milvus-io/milvus/internal/log"
"github.com/milvus-io/milvus/internal/proto/datapb"
"github.com/milvus-io/milvus/internal/storage"
"github.com/milvus-io/milvus/internal/types"
"github.com/milvus-io/milvus/internal/util/typeutil"
"github.com/minio/minio-go/v7"
"github.com/samber/lo"
"go.uber.org/zap"
)
@ -65,10 +64,7 @@ type garbageCollector struct {
}
// newGarbageCollector create garbage collector with meta and option
func newGarbageCollector(meta *meta,
segRefer *SegmentReferenceManager,
indexCoord types.IndexCoord,
opt GcOption) *garbageCollector {
func newGarbageCollector(meta *meta, segRefer *SegmentReferenceManager, indexCoord types.IndexCoord, opt GcOption) *garbageCollector {
log.Info("GC with option", zap.Bool("enabled", opt.enabled), zap.Duration("interval", opt.checkInterval),
zap.Duration("missingTolerance", opt.missingTolerance), zap.Duration("dropTolerance", opt.dropTolerance))
return &garbageCollector{

View File

@ -175,7 +175,7 @@ func Test_garbageCollector_scan(t *testing.T) {
gc.close()
})
t.Run("hit, no gc", func(t *testing.T) {
segment := buildSegment(1, 10, 100, "ch")
segment := buildSegment(1, 10, 100, "ch", false)
segment.State = commonpb.SegmentState_Flushed
segment.Binlogs = []*datapb.FieldBinlog{getFieldBinlogPaths(0, inserts[0])}
segment.Statslogs = []*datapb.FieldBinlog{getFieldBinlogPaths(0, stats[0])}
@ -201,7 +201,7 @@ func Test_garbageCollector_scan(t *testing.T) {
})
t.Run("dropped gc one", func(t *testing.T) {
segment := buildSegment(1, 10, 100, "ch")
segment := buildSegment(1, 10, 100, "ch", false)
segment.State = commonpb.SegmentState_Dropped
segment.DroppedAt = uint64(time.Now().Add(-time.Hour).UnixNano())
segment.Binlogs = []*datapb.FieldBinlog{getFieldBinlogPaths(0, inserts[0])}
@ -237,6 +237,27 @@ func Test_garbageCollector_scan(t *testing.T) {
gc.start()
gc.scan()
gc.clearEtcd()
// bad path shall remains since datacoord cannot determine file is garbage or not if path is not valid
validateMinioPrefixElements(t, cli.Client, bucketName, path.Join(rootPath, insertLogPrefix), inserts[1:2])
validateMinioPrefixElements(t, cli.Client, bucketName, path.Join(rootPath, statsLogPrefix), stats[1:2])
validateMinioPrefixElements(t, cli.Client, bucketName, path.Join(rootPath, deltaLogPrefix), delta[1:2])
validateMinioPrefixElements(t, cli.Client, bucketName, path.Join(rootPath, `indexes`), others)
gc.close()
})
t.Run("list object with error", func(t *testing.T) {
gc := newGarbageCollector(meta, segRefer, indexCoord, GcOption{
cli: cli,
enabled: true,
checkInterval: time.Minute * 30,
missingTolerance: 0,
dropTolerance: 0,
})
gc.start()
gc.scan()
// bad path shall remains since datacoord cannot determine file is garbage or not if path is not valid
validateMinioPrefixElements(t, cli.Client, bucketName, path.Join(rootPath, insertLogPrefix), inserts[1:2])
validateMinioPrefixElements(t, cli.Client, bucketName, path.Join(rootPath, statsLogPrefix), stats[1:2])

View File

@ -76,6 +76,10 @@ func (h *ServerHandler) GetVChanPositions(channel *channel, partitionID UniqueID
(s.GetStartPosition() == nil && s.GetDmlPosition() == nil) {
continue
}
if s.GetIsImporting() {
// Skip bulk load segments.
continue
}
segmentInfos[s.GetID()] = s
if s.GetState() == commonpb.SegmentState_Dropped {
droppedIDs.Insert(s.GetID())

View File

@ -217,30 +217,63 @@ func (m *meta) GetAllSegment(segID UniqueID) *SegmentInfo {
}
// SetState setting segment with provided ID state
func (m *meta) SetState(segmentID UniqueID, state commonpb.SegmentState) error {
func (m *meta) SetState(segmentID UniqueID, targetState commonpb.SegmentState) error {
m.Lock()
defer m.Unlock()
curSegInfo := m.segments.GetSegment(segmentID)
if curSegInfo == nil {
// TODO: Should return error instead.
return nil
}
// Persist segment updates first.
clonedSegment := curSegInfo.Clone()
clonedSegment.State = targetState
oldState := curSegInfo.GetState()
m.segments.SetState(segmentID, state)
curSegInfo = m.segments.GetSegment(segmentID)
if curSegInfo != nil && isSegmentHealthy(curSegInfo) {
err := m.catalog.AlterSegments(m.ctx, []*datapb.SegmentInfo{curSegInfo.SegmentInfo})
if err == nil {
metrics.DataCoordNumSegments.WithLabelValues(oldState.String()).Dec()
metrics.DataCoordNumSegments.WithLabelValues(state.String()).Inc()
if state == commonpb.SegmentState_Flushed {
metrics.DataCoordNumStoredRows.WithLabelValues().Add(float64(curSegInfo.GetNumOfRows()))
metrics.DataCoordNumStoredRowsCounter.WithLabelValues().Add(float64(curSegInfo.GetNumOfRows()))
} else if oldState == commonpb.SegmentState_Flushed {
metrics.DataCoordNumStoredRows.WithLabelValues().Sub(float64(curSegInfo.GetNumOfRows()))
}
if clonedSegment != nil && isSegmentHealthy(clonedSegment) {
if err := m.catalog.AlterSegments(m.ctx, []*datapb.SegmentInfo{clonedSegment.SegmentInfo}); err != nil {
log.Error("failed to alter segments",
zap.Int64("segment ID", segmentID),
zap.Any("target state", targetState),
zap.Error(err))
return err
}
metrics.DataCoordNumSegments.WithLabelValues(oldState.String()).Dec()
metrics.DataCoordNumSegments.WithLabelValues(targetState.String()).Inc()
if targetState == commonpb.SegmentState_Flushed {
metrics.DataCoordNumStoredRows.WithLabelValues().Add(float64(curSegInfo.GetNumOfRows()))
metrics.DataCoordNumStoredRowsCounter.WithLabelValues().Add(float64(curSegInfo.GetNumOfRows()))
} else if oldState == commonpb.SegmentState_Flushed {
metrics.DataCoordNumStoredRows.WithLabelValues().Sub(float64(curSegInfo.GetNumOfRows()))
}
return err
}
// Update in-memory meta.
m.segments.SetState(segmentID, targetState)
return nil
}
// UnsetIsImporting removes the `isImporting` flag of a segment.
func (m *meta) UnsetIsImporting(segmentID UniqueID) error {
m.Lock()
defer m.Unlock()
curSegInfo := m.segments.GetSegment(segmentID)
if curSegInfo == nil {
return fmt.Errorf("segment not found %d", segmentID)
}
// Persist segment updates first.
clonedSegment := curSegInfo.Clone()
clonedSegment.IsImporting = false
if isSegmentHealthy(clonedSegment) {
log.Info("unsetting isImport state of segment",
zap.Int64("segment ID", segmentID))
if err := m.catalog.AlterSegments(m.ctx, []*datapb.SegmentInfo{clonedSegment.SegmentInfo}); err != nil {
log.Error("failed to unset segment isImporting state",
zap.Int64("segment ID", segmentID),
zap.Error(err))
return err
}
}
// Update in-memory meta.
m.segments.SetIsImporting(segmentID, false)
return nil
}
@ -259,12 +292,15 @@ func (m *meta) UpdateFlushSegmentsInfo(
m.Lock()
defer m.Unlock()
log.Info("update flush segments info", zap.Int64("segmentId", segmentID),
log.Info("update flush segments info",
zap.Int64("segmentId", segmentID),
zap.Int("binlog", len(binlogs)),
zap.Int("statslog", len(statslogs)),
zap.Int("deltalogs", len(deltalogs)),
zap.Int("stats log", len(statslogs)),
zap.Int("delta logs", len(deltalogs)),
zap.Bool("flushed", flushed),
zap.Bool("dropped", dropped),
zap.Any("check points", checkpoints),
zap.Any("start position", startPositions),
zap.Bool("importing", importing))
segment := m.segments.GetSegment(segmentID)
if importing {
@ -705,11 +741,23 @@ func (m *meta) SelectSegments(selector SegmentInfoSelector) []*SegmentInfo {
func (m *meta) AddAllocation(segmentID UniqueID, allocation *Allocation) error {
m.Lock()
defer m.Unlock()
m.segments.AddAllocation(segmentID, allocation)
if segInfo := m.segments.GetSegment(segmentID); segInfo != nil {
// update segment LastExpireTime
return m.catalog.AlterSegments(m.ctx, []*datapb.SegmentInfo{segInfo.SegmentInfo})
curSegInfo := m.segments.GetSegment(segmentID)
if curSegInfo == nil {
// TODO: Error handling.
return nil
}
// Persist segment updates first.
clonedSegment := curSegInfo.Clone(AddAllocation(allocation))
if clonedSegment != nil && isSegmentHealthy(clonedSegment) {
if err := m.catalog.AlterSegments(m.ctx, []*datapb.SegmentInfo{clonedSegment.SegmentInfo}); err != nil {
log.Error("failed to add allocation for segment",
zap.Int64("segment ID", segmentID),
zap.Error(err))
return err
}
}
// Update in-memory meta.
m.segments.AddAllocation(segmentID, allocation)
return nil
}
@ -745,6 +793,14 @@ func (m *meta) SetSegmentCompacting(segmentID UniqueID, compacting bool) {
m.segments.SetIsCompacting(segmentID, compacting)
}
// SetSegmentIsImporting sets the importing state for a segment.
func (m *meta) SetSegmentIsImporting(segmentID UniqueID, importing bool) {
m.Lock()
defer m.Unlock()
m.segments.SetIsImporting(segmentID, importing)
}
func (m *meta) CompleteMergeCompaction(compactionLogs []*datapb.CompactionSegmentBinlogs, result *datapb.CompactionResult) error {
m.Lock()
defer m.Unlock()
@ -919,7 +975,7 @@ func (m *meta) updateDeltalogs(origin []*datapb.FieldBinlog, removes []*datapb.F
}
// buildSegment utility function for compose datapb.SegmentInfo struct with provided info
func buildSegment(collectionID UniqueID, partitionID UniqueID, segmentID UniqueID, channelName string) *SegmentInfo {
func buildSegment(collectionID UniqueID, partitionID UniqueID, segmentID UniqueID, channelName string, isImporting bool) *SegmentInfo {
info := &datapb.SegmentInfo{
ID: segmentID,
CollectionID: collectionID,
@ -927,6 +983,7 @@ func buildSegment(collectionID UniqueID, partitionID UniqueID, segmentID UniqueI
InsertChannel: channelName,
NumOfRows: 0,
State: commonpb.SegmentState_Growing,
IsImporting: isImporting,
}
return NewSegmentInfo(info)
}

View File

@ -274,13 +274,13 @@ func TestMeta_Basic(t *testing.T) {
// create seg0 for partition0, seg0/seg1 for partition1
segID0_0, err := mockAllocator.allocID(ctx)
assert.Nil(t, err)
segInfo0_0 := buildSegment(collID, partID0, segID0_0, channelName)
segInfo0_0 := buildSegment(collID, partID0, segID0_0, channelName, true)
segID1_0, err := mockAllocator.allocID(ctx)
assert.Nil(t, err)
segInfo1_0 := buildSegment(collID, partID1, segID1_0, channelName)
segInfo1_0 := buildSegment(collID, partID1, segID1_0, channelName, false)
segID1_1, err := mockAllocator.allocID(ctx)
assert.Nil(t, err)
segInfo1_1 := buildSegment(collID, partID1, segID1_1, channelName)
segInfo1_1 := buildSegment(collID, partID1, segID1_1, channelName, false)
// check AddSegment
err = meta.AddSegment(segInfo0_0)
@ -329,6 +329,28 @@ func TestMeta_Basic(t *testing.T) {
info0_0 = meta.GetSegment(segID0_0)
assert.NotNil(t, info0_0)
assert.EqualValues(t, commonpb.SegmentState_Flushed, info0_0.State)
info0_0 = meta.GetSegment(segID0_0)
assert.NotNil(t, info0_0)
assert.Equal(t, true, info0_0.GetIsImporting())
err = meta.UnsetIsImporting(segID0_0)
assert.NoError(t, err)
info0_0 = meta.GetSegment(segID0_0)
assert.NotNil(t, info0_0)
assert.Equal(t, false, info0_0.GetIsImporting())
// UnsetIsImporting on segment that does not exist.
err = meta.UnsetIsImporting(segID1_0)
assert.Error(t, err)
info1_1 := meta.GetSegment(segID1_1)
assert.NotNil(t, info1_1)
assert.Equal(t, false, info1_1.GetIsImporting())
err = meta.UnsetIsImporting(segID1_1)
assert.NoError(t, err)
info1_1 = meta.GetSegment(segID1_1)
assert.NotNil(t, info1_1)
assert.Equal(t, false, info1_1.GetIsImporting())
})
t.Run("Test segment with kv fails", func(t *testing.T) {
@ -366,7 +388,7 @@ func TestMeta_Basic(t *testing.T) {
// add seg1 with 100 rows
segID0, err := mockAllocator.allocID(ctx)
assert.Nil(t, err)
segInfo0 := buildSegment(collID, partID0, segID0, channelName)
segInfo0 := buildSegment(collID, partID0, segID0, channelName, false)
segInfo0.NumOfRows = rowCount0
err = meta.AddSegment(segInfo0)
assert.Nil(t, err)
@ -374,7 +396,7 @@ func TestMeta_Basic(t *testing.T) {
// add seg2 with 300 rows
segID1, err := mockAllocator.allocID(ctx)
assert.Nil(t, err)
segInfo1 := buildSegment(collID, partID0, segID1, channelName)
segInfo1 := buildSegment(collID, partID0, segID1, channelName, false)
segInfo1.NumOfRows = rowCount1
err = meta.AddSegment(segInfo1)
assert.Nil(t, err)
@ -717,6 +739,55 @@ func Test_meta_SetSegmentCompacting(t *testing.T) {
}
}
func Test_meta_SetSegmentImporting(t *testing.T) {
type fields struct {
client kv.TxnKV
segments *SegmentsInfo
}
type args struct {
segmentID UniqueID
importing bool
}
tests := []struct {
name string
fields fields
args args
}{
{
"test set segment importing",
fields{
memkv.NewMemoryKV(),
&SegmentsInfo{
map[int64]*SegmentInfo{
1: {
SegmentInfo: &datapb.SegmentInfo{
ID: 1,
State: commonpb.SegmentState_Flushed,
IsImporting: false,
},
},
},
},
},
args{
segmentID: 1,
importing: true,
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
m := &meta{
catalog: &datacoord.Catalog{Txn: tt.fields.client},
segments: tt.fields.segments,
}
m.SetSegmentCompacting(tt.args.segmentID, tt.args.importing)
segment := m.GetSegment(tt.args.segmentID)
assert.Equal(t, tt.args.importing, segment.isCompacting)
})
}
}
func Test_meta_GetSegmentsOfCollection(t *testing.T) {
type fields struct {
segments *SegmentsInfo

View File

@ -118,11 +118,12 @@ func newTestSchema() *schemapb.CollectionSchema {
}
type mockDataNodeClient struct {
id int64
state internalpb.StateCode
ch chan interface{}
compactionStateResp *datapb.CompactionStateResponse
compactionResp *commonpb.Status
id int64
state internalpb.StateCode
ch chan interface{}
compactionStateResp *datapb.CompactionStateResponse
addImportSegmentResp *datapb.AddImportSegmentResponse
compactionResp *commonpb.Status
}
func newMockDataNodeClient(id int64, ch chan interface{}) (*mockDataNodeClient, error) {
@ -130,6 +131,11 @@ func newMockDataNodeClient(id int64, ch chan interface{}) (*mockDataNodeClient,
id: id,
state: internalpb.StateCode_Initializing,
ch: ch,
addImportSegmentResp: &datapb.AddImportSegmentResponse{
Status: &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
},
},
}, nil
}
@ -242,8 +248,8 @@ func (c *mockDataNodeClient) Import(ctx context.Context, in *datapb.ImportTaskRe
return &commonpb.Status{ErrorCode: commonpb.ErrorCode_Success}, nil
}
func (c *mockDataNodeClient) AddSegment(ctx context.Context, req *datapb.AddSegmentRequest) (*commonpb.Status, error) {
return &commonpb.Status{ErrorCode: commonpb.ErrorCode_Success}, nil
func (c *mockDataNodeClient) AddImportSegment(ctx context.Context, req *datapb.AddImportSegmentRequest) (*datapb.AddImportSegmentResponse, error) {
return c.addImportSegmentResp, nil
}
func (c *mockDataNodeClient) SyncSegments(ctx context.Context, req *datapb.SyncSegmentsRequest) (*commonpb.Status, error) {

View File

@ -109,6 +109,13 @@ func (s *SegmentsInfo) SetState(segmentID UniqueID, state commonpb.SegmentState)
}
}
// SetIsImporting sets the import status for a segment.
func (s *SegmentsInfo) SetIsImporting(segmentID UniqueID, isImporting bool) {
if segment, ok := s.segments[segmentID]; ok {
s.segments[segmentID] = segment.Clone(SetIsImporting(isImporting))
}
}
// SetDmlPosition sets DmlPosition info (checkpoint for recovery) for SegmentInfo with provided segmentID
// if SegmentInfo not found, do nothing
func (s *SegmentsInfo) SetDmlPosition(segmentID UniqueID, pos *internalpb.MsgPosition) {
@ -179,7 +186,7 @@ func (s *SegmentsInfo) AddSegmentBinlogs(segmentID UniqueID, field2Binlogs map[U
}
}
// SetIsCompacting sets compactino status for segment
// SetIsCompacting sets compaction status for segment
func (s *SegmentsInfo) SetIsCompacting(segmentID UniqueID, isCompacting bool) {
if segment, ok := s.segments[segmentID]; ok {
s.segments[segmentID] = segment.ShadowClone(SetIsCompacting(isCompacting))
@ -246,6 +253,13 @@ func SetState(state commonpb.SegmentState) SegmentInfoOption {
}
}
// SetIsImporting is the option to set import state for segment info.
func SetIsImporting(isImporting bool) SegmentInfoOption {
return func(segment *SegmentInfo) {
segment.IsImporting = isImporting
}
}
// SetDmlPosition is the option to set dml position for segment info
func SetDmlPosition(pos *internalpb.MsgPosition) SegmentInfoOption {
return func(segment *SegmentInfo) {

View File

@ -26,7 +26,6 @@ import (
"github.com/milvus-io/milvus/api/commonpb"
"github.com/milvus-io/milvus/internal/log"
"github.com/milvus-io/milvus/internal/proto/datapb"
"github.com/milvus-io/milvus/internal/proto/rootcoordpb"
"github.com/milvus-io/milvus/internal/types"
"github.com/milvus-io/milvus/internal/util/trace"
"github.com/milvus-io/milvus/internal/util/tsoutil"
@ -302,25 +301,6 @@ func (s *SegmentManager) allocSegmentForImport(ctx context.Context, collectionID
log.Error("RootCoord client not set")
return nil, errors.New("RootCoord client not set")
}
status, err := s.rcc.ReportImport(context.Background(), &rootcoordpb.ImportResult{
Status: &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
},
TaskId: importTaskID,
DatanodeId: Params.DataNodeCfg.GetNodeID(),
State: commonpb.ImportState_ImportAllocSegment,
Segments: []int64{segment.GetID()},
})
if err != nil {
log.Error("failed to report import on new segment", zap.Error(err))
return nil, err
}
if status.GetErrorCode() != commonpb.ErrorCode_Success {
log.Error("failed to report import on new segment", zap.String("reason", status.GetReason()))
return nil, fmt.Errorf("failed to report import on new segment: %s", status.GetReason())
}
log.Info("successfully report import the new segment",
zap.Int64("segment ID", segment.GetID()))
allocation.ExpireTime = expireTs
allocation.SegmentID = segment.GetID()
@ -375,6 +355,9 @@ func (s *SegmentManager) openNewSegment(ctx context.Context, collectionID Unique
MaxRowNum: int64(maxNumOfRows),
LastExpireTime: 0,
}
if segmentState == commonpb.SegmentState_Importing {
segmentInfo.IsImporting = true
}
segment := NewSegmentInfo(segmentInfo)
if err := s.meta.AddSegment(segment); err != nil {
log.Error("failed to add segment to DataCoord", zap.Error(err))

View File

@ -133,7 +133,6 @@ type Server struct {
dnEventCh <-chan *sessionutil.SessionEvent
//icEventCh <-chan *sessionutil.SessionEvent
qcEventCh <-chan *sessionutil.SessionEvent
rcEventCh <-chan *sessionutil.SessionEvent
dataNodeCreator dataNodeCreatorFunc
rootCoordClientCreator rootCoordCreatorFunc
@ -443,16 +442,6 @@ func (s *Server) initServiceDiscovery() error {
}
s.qcEventCh = s.session.WatchServices(typeutil.QueryCoordRole, qcRevision+1, nil)
rcSessions, rcRevision, err := s.session.GetSessions(typeutil.RootCoordRole)
if err != nil {
log.Error("DataCoord get RootCoord session failed", zap.Error(err))
return err
}
for _, session := range rcSessions {
serverIDs = append(serverIDs, session.ServerID)
}
s.rcEventCh = s.session.WatchServices(typeutil.RootCoordRole, rcRevision+1, nil)
s.segReferManager, err = NewSegmentReferenceManager(s.kvClient, serverIDs)
return err
}
@ -736,12 +725,6 @@ func (s *Server) watchService(ctx context.Context) {
return
}
s.processSessionEvent(ctx, "QueryCoord", event)
case event, ok := <-s.rcEventCh:
if !ok {
s.stopServiceWatch()
return
}
s.processSessionEvent(ctx, "RootCoord", event)
}
}
}

View File

@ -30,17 +30,6 @@ import (
"testing"
"time"
"github.com/milvus-io/milvus/internal/mocks"
"github.com/milvus-io/milvus/internal/util/funcutil"
"github.com/milvus-io/milvus/internal/util/typeutil"
"github.com/minio/minio-go/v7"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
"github.com/stretchr/testify/require"
clientv3 "go.etcd.io/etcd/client/v3"
"go.uber.org/zap"
"github.com/milvus-io/milvus/api/commonpb"
"github.com/milvus-io/milvus/api/milvuspb"
"github.com/milvus-io/milvus/api/schemapb"
@ -48,15 +37,26 @@ import (
"github.com/milvus-io/milvus/internal/kv"
etcdkv "github.com/milvus-io/milvus/internal/kv/etcd"
"github.com/milvus-io/milvus/internal/log"
"github.com/milvus-io/milvus/internal/mocks"
"github.com/milvus-io/milvus/internal/mq/msgstream"
"github.com/milvus-io/milvus/internal/proto/datapb"
"github.com/milvus-io/milvus/internal/proto/indexpb"
"github.com/milvus-io/milvus/internal/proto/internalpb"
"github.com/milvus-io/milvus/internal/proto/rootcoordpb"
"github.com/milvus-io/milvus/internal/storage"
"github.com/milvus-io/milvus/internal/types"
"github.com/milvus-io/milvus/internal/util/dependency"
"github.com/milvus-io/milvus/internal/util/etcd"
"github.com/milvus-io/milvus/internal/util/funcutil"
"github.com/milvus-io/milvus/internal/util/metricsinfo"
"github.com/milvus-io/milvus/internal/util/sessionutil"
"github.com/milvus-io/milvus/internal/util/typeutil"
"github.com/minio/minio-go/v7"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
"github.com/stretchr/testify/require"
clientv3 "go.etcd.io/etcd/client/v3"
"go.uber.org/zap"
)
func TestMain(m *testing.M) {
@ -166,7 +166,7 @@ func TestAssignSegmentID(t *testing.T) {
t.Run("assign segment with invalid collection", func(t *testing.T) {
svr := newTestServer(t, nil)
defer closeTestServer(t, svr)
svr.rootCoordClient = &mockDescribeCollRoot{
svr.rootCoordClient = &mockRootCoord{
RootCoord: svr.rootCoordClient,
collID: collID,
}
@ -193,12 +193,12 @@ func TestAssignSegmentID(t *testing.T) {
})
}
type mockDescribeCollRoot struct {
type mockRootCoord struct {
types.RootCoord
collID UniqueID
}
func (r *mockDescribeCollRoot) DescribeCollection(ctx context.Context, req *milvuspb.DescribeCollectionRequest) (*milvuspb.DescribeCollectionResponse, error) {
func (r *mockRootCoord) DescribeCollection(ctx context.Context, req *milvuspb.DescribeCollectionRequest) (*milvuspb.DescribeCollectionResponse, error) {
if req.CollectionID != r.collID {
return &milvuspb.DescribeCollectionResponse{
Status: &commonpb.Status{
@ -210,6 +210,13 @@ func (r *mockDescribeCollRoot) DescribeCollection(ctx context.Context, req *milv
return r.RootCoord.DescribeCollection(ctx, req)
}
func (r *mockRootCoord) ReportImport(context.Context, *rootcoordpb.ImportResult) (*commonpb.Status, error) {
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_UnexpectedError,
Reason: "something bad",
}, nil
}
func TestFlush(t *testing.T) {
req := &datapb.FlushRequest{
Base: &commonpb.MsgBase{
@ -815,12 +822,10 @@ func TestServer_watchQueryCoord(t *testing.T) {
dnCh := make(chan *sessionutil.SessionEvent)
//icCh := make(chan *sessionutil.SessionEvent)
qcCh := make(chan *sessionutil.SessionEvent)
rcCh := make(chan *sessionutil.SessionEvent)
svr.dnEventCh = dnCh
//svr.icEventCh = icCh
svr.qcEventCh = qcCh
svr.rcEventCh = rcCh
segRefer, err := NewSegmentReferenceManager(etcdKV, nil)
assert.NoError(t, err)
@ -862,69 +867,6 @@ func TestServer_watchQueryCoord(t *testing.T) {
assert.True(t, closed)
}
func TestServer_watchRootCoord(t *testing.T) {
Params.Init()
etcdCli, err := etcd.GetEtcdClient(&Params.EtcdCfg)
assert.Nil(t, err)
etcdKV := etcdkv.NewEtcdKV(etcdCli, Params.EtcdCfg.MetaRootPath)
assert.NotNil(t, etcdKV)
factory := dependency.NewDefaultFactory(true)
svr := CreateServer(context.TODO(), factory)
svr.session = &sessionutil.Session{
TriggerKill: true,
}
svr.kvClient = etcdKV
dnCh := make(chan *sessionutil.SessionEvent)
//icCh := make(chan *sessionutil.SessionEvent)
qcCh := make(chan *sessionutil.SessionEvent)
rcCh := make(chan *sessionutil.SessionEvent)
svr.dnEventCh = dnCh
//svr.icEventCh = icCh
svr.qcEventCh = qcCh
svr.rcEventCh = rcCh
segRefer, err := NewSegmentReferenceManager(etcdKV, nil)
assert.NoError(t, err)
assert.NotNil(t, segRefer)
svr.segReferManager = segRefer
sc := make(chan os.Signal, 1)
signal.Notify(sc, syscall.SIGINT)
defer signal.Reset(syscall.SIGINT)
closed := false
sigQuit := make(chan struct{}, 1)
svr.serverLoopWg.Add(1)
go func() {
svr.watchService(context.Background())
}()
go func() {
<-sc
closed = true
sigQuit <- struct{}{}
}()
rcCh <- &sessionutil.SessionEvent{
EventType: sessionutil.SessionAddEvent,
Session: &sessionutil.Session{
ServerID: 3,
},
}
rcCh <- &sessionutil.SessionEvent{
EventType: sessionutil.SessionDelEvent,
Session: &sessionutil.Session{
ServerID: 3,
},
}
close(rcCh)
<-sigQuit
svr.serverLoopWg.Wait()
assert.True(t, closed)
}
func TestServer_ShowConfigurations(t *testing.T) {
svr := newTestServer(t, nil)
defer closeTestServer(t, svr)
@ -2732,10 +2674,14 @@ func TestDataCoordServer_SetSegmentState(t *testing.T) {
}
func TestDataCoord_Import(t *testing.T) {
storage.CheckBucketRetryAttempts = 2
t.Run("normal case", func(t *testing.T) {
svr := newTestServer(t, nil)
defer closeTestServer(t, svr)
svr.sessionManager.AddSession(&NodeInfo{
NodeID: 0,
Address: "localhost:8080",
})
err := svr.channelManager.AddNode(0)
assert.Nil(t, err)
err = svr.channelManager.Watch(&channel{Name: "ch1", CollectionID: 0})
@ -2749,12 +2695,11 @@ func TestDataCoord_Import(t *testing.T) {
})
assert.Nil(t, err)
assert.EqualValues(t, commonpb.ErrorCode_Success, resp.Status.GetErrorCode())
etcd.StopEtcdServer()
closeTestServer(t, svr)
})
t.Run("no free node", func(t *testing.T) {
svr := newTestServer(t, nil)
defer closeTestServer(t, svr)
err := svr.channelManager.AddNode(0)
assert.Nil(t, err)
@ -2770,13 +2715,12 @@ func TestDataCoord_Import(t *testing.T) {
})
assert.Nil(t, err)
assert.EqualValues(t, commonpb.ErrorCode_UnexpectedError, resp.Status.GetErrorCode())
etcd.StopEtcdServer()
closeTestServer(t, svr)
})
t.Run("no datanode available", func(t *testing.T) {
svr := newTestServer(t, nil)
defer closeTestServer(t, svr)
Params.MinioCfg.Address = "minio:9000"
resp, err := svr.Import(svr.ctx, &datapb.ImportTaskRequest{
ImportTask: &datapb.ImportTask{
CollectionId: 100,
@ -2785,7 +2729,7 @@ func TestDataCoord_Import(t *testing.T) {
})
assert.Nil(t, err)
assert.EqualValues(t, commonpb.ErrorCode_UnexpectedError, resp.Status.GetErrorCode())
etcd.StopEtcdServer()
closeTestServer(t, svr)
})
t.Run("with closed server", func(t *testing.T) {
@ -2805,7 +2749,6 @@ func TestDataCoord_Import(t *testing.T) {
t.Run("test update segment stat", func(t *testing.T) {
svr := newTestServer(t, nil)
defer closeTestServer(t, svr)
status, err := svr.UpdateSegmentStatistics(context.TODO(), &datapb.UpdateSegmentStatisticsRequest{
Stats: []*datapb.SegmentStats{{
@ -2815,6 +2758,7 @@ func TestDataCoord_Import(t *testing.T) {
})
assert.NoError(t, err)
assert.Equal(t, commonpb.ErrorCode_Success, status.GetErrorCode())
closeTestServer(t, svr)
})
t.Run("test update segment stat w/ closed server", func(t *testing.T) {
@ -2856,22 +2800,45 @@ func TestDataCoord_Import(t *testing.T) {
})
}
func TestDataCoord_AddSegment(t *testing.T) {
func TestDataCoord_SaveImportSegment(t *testing.T) {
t.Run("test add segment", func(t *testing.T) {
svr := newTestServer(t, nil)
defer closeTestServer(t, svr)
seg := buildSegment(100, 100, 100, "ch1", false)
svr.meta.AddSegment(seg)
svr.sessionManager.AddSession(&NodeInfo{
NodeID: 110,
Address: "localhost:8080",
})
svr.indexCoord.(*mocks.MockIndexCoord).EXPECT().GetIndexInfos(mock.Anything, mock.Anything).Return(nil, nil)
err := svr.channelManager.AddNode(110)
assert.Nil(t, err)
err = svr.channelManager.Watch(&channel{Name: "ch1", CollectionID: 100})
assert.Nil(t, err)
status, err := svr.AddSegment(context.TODO(), &datapb.AddSegmentRequest{
status, err := svr.SaveImportSegment(context.TODO(), &datapb.SaveImportSegmentRequest{
SegmentId: 100,
ChannelName: "ch1",
CollectionId: 100,
PartitionId: 100,
RowNum: int64(1),
SaveBinlogPathReq: &datapb.SaveBinlogPathsRequest{
Base: &commonpb.MsgBase{
SourceID: Params.DataNodeCfg.GetNodeID(),
},
SegmentID: 100,
CollectionID: 100,
Importing: true,
StartPositions: []*datapb.SegmentStartPosition{
{
StartPosition: &internalpb.MsgPosition{
ChannelName: "ch1",
Timestamp: 1,
},
SegmentID: 100,
},
},
},
})
assert.NoError(t, err)
assert.Equal(t, commonpb.ErrorCode_Success, status.GetErrorCode())
@ -2886,7 +2853,7 @@ func TestDataCoord_AddSegment(t *testing.T) {
err = svr.channelManager.Watch(&channel{Name: "ch1", CollectionID: 100})
assert.Nil(t, err)
status, err := svr.AddSegment(context.TODO(), &datapb.AddSegmentRequest{
status, err := svr.SaveImportSegment(context.TODO(), &datapb.SaveImportSegmentRequest{
SegmentId: 100,
ChannelName: "non-channel",
CollectionId: 100,
@ -2901,12 +2868,57 @@ func TestDataCoord_AddSegment(t *testing.T) {
svr := newTestServer(t, nil)
closeTestServer(t, svr)
status, err := svr.AddSegment(context.TODO(), &datapb.AddSegmentRequest{})
status, err := svr.SaveImportSegment(context.TODO(), &datapb.SaveImportSegmentRequest{})
assert.NoError(t, err)
assert.Equal(t, commonpb.ErrorCode_DataCoordNA, status.GetErrorCode())
})
}
func TestDataCoord_UnsetIsImportingState(t *testing.T) {
t.Run("normal case", func(t *testing.T) {
svr := newTestServer(t, nil)
defer closeTestServer(t, svr)
seg := buildSegment(100, 100, 100, "ch1", false)
svr.meta.AddSegment(seg)
status, err := svr.UnsetIsImportingState(context.Background(), &datapb.UnsetIsImportingStateRequest{
SegmentIds: []int64{100},
})
assert.NoError(t, err)
assert.Equal(t, commonpb.ErrorCode_Success, status.GetErrorCode())
// Trying to unset state of a segment that does not exist.
status, err = svr.UnsetIsImportingState(context.Background(), &datapb.UnsetIsImportingStateRequest{
SegmentIds: []int64{999},
})
assert.NoError(t, err)
assert.Equal(t, commonpb.ErrorCode_UnexpectedError, status.GetErrorCode())
})
}
func TestDataCoord_MarkSegmentsDropped(t *testing.T) {
t.Run("normal case", func(t *testing.T) {
svr := newTestServer(t, nil)
defer closeTestServer(t, svr)
seg := buildSegment(100, 100, 100, "ch1", false)
svr.meta.AddSegment(seg)
status, err := svr.MarkSegmentsDropped(context.Background(), &datapb.MarkSegmentsDroppedRequest{
SegmentIds: []int64{100},
})
assert.NoError(t, err)
assert.Equal(t, commonpb.ErrorCode_Success, status.GetErrorCode())
// Trying to mark dropped of a segment that does not exist.
status, err = svr.MarkSegmentsDropped(context.Background(), &datapb.MarkSegmentsDroppedRequest{
SegmentIds: []int64{999},
})
assert.NoError(t, err)
// Returning success as SetState will succeed if segment does not exist. This should probably get fixed.
assert.Equal(t, commonpb.ErrorCode_Success, status.GetErrorCode())
})
}
// https://github.com/milvus-io/milvus/issues/15659
func TestIssue15659(t *testing.T) {
ctx, cancel := context.WithCancel(context.Background())
@ -3104,18 +3116,17 @@ func Test_newChunkManagerFactory(t *testing.T) {
getCheckBucketFn = getCheckBucketFnBak
}()
Params.MinioCfg.Address = "minio:9000"
t.Run("ok", func(t *testing.T) {
storageCli, err := server.newChunkManagerFactory()
assert.NotNil(t, storageCli)
assert.NoError(t, err)
})
t.Run("iam_ok", func(t *testing.T) {
Params.CommonCfg.StorageType = "minio"
Params.MinioCfg.UseIAM = true
storageCli, err := server.newChunkManagerFactory()
assert.Nil(t, storageCli)
assert.Error(t, err)
assert.Contains(t, err.Error(), "404 Not Found")
})
t.Run("local storage init", func(t *testing.T) {
Params.CommonCfg.StorageType = "local"
@ -3151,4 +3162,12 @@ func Test_initGarbageCollection(t *testing.T) {
assert.NoError(t, err)
server.initGarbageCollection(storageCli)
})
t.Run("err_minio_bad_address", func(t *testing.T) {
Params.CommonCfg.StorageType = "minio"
Params.MinioCfg.Address = "host:9000:bad"
storageCli, err := server.newChunkManagerFactory()
assert.Nil(t, storageCli)
assert.Error(t, err)
assert.Contains(t, err.Error(), "too many colons in address")
})
}

View File

@ -439,7 +439,7 @@ func (s *Server) SaveBinlogPaths(ctx context.Context, req *datapb.SaveBinlogPath
s.segmentManager.DropSegment(ctx, req.SegmentID)
s.flushCh <- req.SegmentID
if Params.DataCoordCfg.EnableCompaction {
if !req.Importing && Params.DataCoordCfg.EnableCompaction {
cctx, cancel := context.WithTimeout(s.ctx, 5*time.Second)
defer cancel()
@ -625,7 +625,6 @@ func (s *Server) GetRecoveryInfo(ctx context.Context, req *datapb.GetRecoveryInf
log.Debug("datacoord append channelInfo in GetRecoveryInfo",
zap.Any("channelInfo", channelInfo),
)
flushedIDs.Insert(channelInfo.GetFlushedSegmentIds()...)
}
@ -642,9 +641,14 @@ func (s *Server) GetRecoveryInfo(ctx context.Context, req *datapb.GetRecoveryInf
resp.Status.Reason = errMsg
return resp, nil
}
// Skip non-flushing, non-flushed and dropped segments.
if segment.State != commonpb.SegmentState_Flushed && segment.State != commonpb.SegmentState_Flushing && segment.State != commonpb.SegmentState_Dropped {
continue
}
// Also skip bulk load segments.
if segment.GetIsImporting() {
continue
}
segment2InsertChannel[segment.ID] = segment.InsertChannel
binlogs := segment.GetBinlogs()
@ -1090,11 +1094,12 @@ func (s *Server) Import(ctx context.Context, itr *datapb.ImportTaskRequest) (*da
return resp, nil
}
nodes := s.channelManager.store.GetNodes()
nodes := s.sessionManager.getLiveNodeIDs()
if len(nodes) == 0 {
log.Error("import failed as all DataNodes are offline")
return resp, nil
}
log.Info("available DataNodes are", zap.Int64s("node ID", nodes))
avaNodes := getDiff(nodes, itr.GetWorkingNodes())
if len(avaNodes) > 0 {
@ -1211,8 +1216,9 @@ func (s *Server) ReleaseSegmentLock(ctx context.Context, req *datapb.ReleaseSegm
return resp, nil
}
func (s *Server) AddSegment(ctx context.Context, req *datapb.AddSegmentRequest) (*commonpb.Status, error) {
log.Info("DataCoord putting segment to the right DataNode",
// SaveImportSegment saves the segment binlog paths and puts this segment to its belonging DataNode as a flushed segment.
func (s *Server) SaveImportSegment(ctx context.Context, req *datapb.SaveImportSegmentRequest) (*commonpb.Status, error) {
log.Info("DataCoord putting segment to the right DataNode and saving binlog path",
zap.Int64("segment ID", req.GetSegmentId()),
zap.Int64("collection ID", req.GetCollectionId()),
zap.Int64("partition ID", req.GetPartitionId()),
@ -1224,16 +1230,104 @@ func (s *Server) AddSegment(ctx context.Context, req *datapb.AddSegmentRequest)
}
if s.isClosed() {
log.Warn("failed to add segment for closed server")
errResp.ErrorCode = commonpb.ErrorCode_DataCoordNA
errResp.Reason = msgDataCoordIsUnhealthy(Params.DataCoordCfg.GetNodeID())
return errResp, nil
}
// Look for the DataNode that watches the channel.
ok, nodeID := s.channelManager.getNodeIDByChannelName(req.GetChannelName())
if !ok {
log.Error("no DataNode found for channel", zap.String("channel name", req.GetChannelName()))
errResp.Reason = fmt.Sprint("no DataNode found for channel ", req.GetChannelName())
return errResp, nil
}
s.cluster.AddSegment(s.ctx, nodeID, req)
// Call DataNode to add the new segment to its own flow graph.
cli, err := s.sessionManager.getClient(ctx, nodeID)
if err != nil {
log.Error("failed to get DataNode client for SaveImportSegment",
zap.Int64("DataNode ID", nodeID),
zap.Error(err))
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_UnexpectedError,
}, nil
}
resp, err := cli.AddImportSegment(ctx,
&datapb.AddImportSegmentRequest{
Base: &commonpb.MsgBase{
SourceID: Params.DataNodeCfg.GetNodeID(),
Timestamp: req.GetBase().GetTimestamp(),
},
SegmentId: req.GetSegmentId(),
ChannelName: req.GetChannelName(),
CollectionId: req.GetCollectionId(),
PartitionId: req.GetPartitionId(),
RowNum: req.GetRowNum(),
StatsLog: req.GetSaveBinlogPathReq().GetField2StatslogPaths(),
})
if err := VerifyResponse(resp.GetStatus(), err); err != nil {
log.Error("failed to add segment", zap.Int64("DataNode ID", nodeID), zap.Error(err))
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_UnexpectedError,
}, nil
}
log.Info("succeed to add segment", zap.Int64("DataNode ID", nodeID), zap.Any("add segment req", req))
// Fill in start position message ID.
req.SaveBinlogPathReq.StartPositions[0].StartPosition.MsgID = resp.GetChannelPos()
// Start saving bin log paths.
rsp, err := s.SaveBinlogPaths(context.Background(), req.GetSaveBinlogPathReq())
if err := VerifyResponse(rsp, err); err != nil {
log.Error("failed to SaveBinlogPaths", zap.Error(err))
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_UnexpectedError,
}, nil
}
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
}, nil
}
// UnsetIsImportingState unsets the isImporting states of the given segments.
// An error status will be returned and error will be logged, if we failed to update *all* segments.
func (s *Server) UnsetIsImportingState(ctx context.Context, req *datapb.UnsetIsImportingStateRequest) (*commonpb.Status, error) {
log.Info("unsetting isImport state of segments",
zap.Int64s("segments", req.GetSegmentIds()))
failure := false
for _, segID := range req.GetSegmentIds() {
if err := s.meta.UnsetIsImporting(segID); err != nil {
// Fail-open.
log.Error("failed to unset segment is importing state", zap.Int64("segment ID", segID))
failure = true
}
}
if failure {
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_UnexpectedError,
}, nil
}
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
}, nil
}
// MarkSegmentsDropped marks the given segments as `Dropped`.
// An error status will be returned and error will be logged, if we failed to mark *all* segments.
func (s *Server) MarkSegmentsDropped(ctx context.Context, req *datapb.MarkSegmentsDroppedRequest) (*commonpb.Status, error) {
log.Info("marking segments dropped",
zap.Int64s("segments", req.GetSegmentIds()))
failure := false
for _, segID := range req.GetSegmentIds() {
if err := s.meta.SetState(segID, commonpb.SegmentState_Dropped); err != nil {
// Fail-open.
log.Error("failed to set segment state as dropped", zap.Int64("segment ID", segID))
failure = true
}
}
if failure {
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_UnexpectedError,
}, nil
}
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
}, nil

View File

@ -31,7 +31,7 @@ import (
)
const (
flushTimeout = 5 * time.Second
flushTimeout = 15 * time.Second
// TODO: evaluate and update import timeout.
importTimeout = 3 * time.Hour
reCollectTimeout = 5 * time.Second
@ -264,29 +264,6 @@ func (c *SessionManager) GetCompactionState() map[int64]*datapb.CompactionStateR
return rst
}
// AddSegment calls DataNode with ID == `nodeID` to put the segment into this node.
func (c *SessionManager) AddSegment(ctx context.Context, nodeID int64, req *datapb.AddSegmentRequest) {
go c.execAddSegment(ctx, nodeID, req)
}
func (c *SessionManager) execAddSegment(ctx context.Context, nodeID int64, req *datapb.AddSegmentRequest) {
cli, err := c.getClient(ctx, nodeID)
if err != nil {
log.Warn("failed to get client for AddSegment", zap.Int64("DataNode ID", nodeID), zap.Error(err))
return
}
ctx, cancel := context.WithTimeout(ctx, addSegmentTimeout)
defer cancel()
req.Base.SourceID = Params.DataCoordCfg.GetNodeID()
resp, err := cli.AddSegment(ctx, req)
if err := VerifyResponse(resp, err); err != nil {
log.Warn("failed to add segment", zap.Int64("DataNode ID", nodeID), zap.Error(err))
return
}
log.Info("success to add segment", zap.Int64("DataNode ID", nodeID), zap.Any("add segment req", req))
}
func (c *SessionManager) getClient(ctx context.Context, nodeID int64) (types.DataNode, error) {
c.sessions.RLock()
session, ok := c.sessions.data[nodeID]

View File

@ -23,13 +23,12 @@ import (
"strconv"
"time"
"github.com/milvus-io/milvus/internal/util/metautil"
"github.com/milvus-io/milvus/internal/common"
"github.com/milvus-io/milvus/internal/log"
"github.com/milvus-io/milvus/internal/proto/datapb"
"github.com/milvus-io/milvus/internal/proto/etcdpb"
"github.com/milvus-io/milvus/internal/storage"
"github.com/milvus-io/milvus/internal/util/metautil"
"go.uber.org/zap"
"golang.org/x/sync/errgroup"
)

View File

@ -58,7 +58,15 @@ func TestCompactionTaskInnerMethods(t *testing.T) {
_, _, _, err = task.getSegmentMeta(100)
assert.Error(t, err)
err = replica.addNewSegment(100, 1, 10, "a", new(internalpb.MsgPosition), nil)
err = replica.addSegment(addSegmentReq{
segType: datapb.SegmentType_New,
segID: 100,
collID: 1,
partitionID: 10,
channelName: "a",
startPos: new(internalpb.MsgPosition),
endPos: nil,
})
require.NoError(t, err)
collID, partID, meta, err := task.getSegmentMeta(100)

View File

@ -34,13 +34,7 @@ import (
"syscall"
"time"
"github.com/milvus-io/milvus/internal/util/metautil"
"github.com/golang/protobuf/proto"
v3rpc "go.etcd.io/etcd/api/v3/v3rpc/rpctypes"
clientv3 "go.etcd.io/etcd/client/v3"
"go.uber.org/zap"
"github.com/milvus-io/milvus/api/commonpb"
"github.com/milvus-io/milvus/api/milvuspb"
"github.com/milvus-io/milvus/api/schemapb"
@ -59,12 +53,16 @@ import (
"github.com/milvus-io/milvus/internal/util/dependency"
"github.com/milvus-io/milvus/internal/util/importutil"
"github.com/milvus-io/milvus/internal/util/logutil"
"github.com/milvus-io/milvus/internal/util/metautil"
"github.com/milvus-io/milvus/internal/util/metricsinfo"
"github.com/milvus-io/milvus/internal/util/paramtable"
"github.com/milvus-io/milvus/internal/util/retry"
"github.com/milvus-io/milvus/internal/util/sessionutil"
"github.com/milvus-io/milvus/internal/util/timerecord"
"github.com/milvus-io/milvus/internal/util/typeutil"
v3rpc "go.etcd.io/etcd/api/v3/v3rpc/rpctypes"
clientv3 "go.etcd.io/etcd/client/v3"
"go.uber.org/zap"
)
const (
@ -81,6 +79,8 @@ const (
ConnectEtcdMaxRetryTime = 100
)
var getFlowGraphServiceAttempts = uint(50)
// makes sure DataNode implements types.DataNode
var _ types.DataNode = (*DataNode)(nil)
@ -959,9 +959,17 @@ func (node *DataNode) Import(ctx context.Context, req *datapb.ImportTaskRequest)
AutoIds: make([]int64, 0),
RowCount: 0,
}
// func to report import state to rootcoord
reportFunc := func(res *rootcoordpb.ImportResult) error {
_, err := node.rootCoord.ReportImport(ctx, res)
return err
status, err := node.rootCoord.ReportImport(ctx, res)
if err != nil {
log.Error("fail to report import state to root coord", zap.Error(err))
return err
}
if status != nil && status.ErrorCode != commonpb.ErrorCode_Success {
return errors.New(status.GetReason())
}
return nil
}
if !node.isHealthy() {
@ -974,7 +982,10 @@ func (node *DataNode) Import(ctx context.Context, req *datapb.ImportTaskRequest)
msg := msgDataNodeIsUnhealthy(Params.DataNodeCfg.GetNodeID())
importResult.State = commonpb.ImportState_ImportFailed
importResult.Infos = append(importResult.Infos, &commonpb.KeyValuePair{Key: "failed_reason", Value: msg})
reportFunc(importResult)
reportErr := reportFunc(importResult)
if reportErr != nil {
log.Warn("fail to report import state to root coord", zap.Error(reportErr))
}
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_UnexpectedError,
Reason: msg,
@ -997,7 +1008,9 @@ func (node *DataNode) Import(ctx context.Context, req *datapb.ImportTaskRequest)
log.Warn(msg)
importResult.State = commonpb.ImportState_ImportFailed
importResult.Infos = append(importResult.Infos, &commonpb.KeyValuePair{Key: "failed_reason", Value: msg})
reportFunc(importResult)
if reportErr := reportFunc(importResult); reportErr != nil {
log.Warn("fail to report import state to root coord", zap.Error(reportErr))
}
if err != nil {
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_UnexpectedError,
@ -1014,7 +1027,10 @@ func (node *DataNode) Import(ctx context.Context, req *datapb.ImportTaskRequest)
if err != nil {
importResult.State = commonpb.ImportState_ImportFailed
importResult.Infos = append(importResult.Infos, &commonpb.KeyValuePair{Key: "failed_reason", Value: err.Error()})
reportFunc(importResult)
reportErr := reportFunc(importResult)
if reportErr != nil {
log.Warn("fail to report import state to root coord", zap.Error(err))
}
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_UnexpectedError,
Reason: err.Error(),
@ -1029,7 +1045,10 @@ func (node *DataNode) Import(ctx context.Context, req *datapb.ImportTaskRequest)
if err != nil {
importResult.State = commonpb.ImportState_ImportFailed
importResult.Infos = append(importResult.Infos, &commonpb.KeyValuePair{Key: "failed_reason", Value: err.Error()})
reportFunc(importResult)
reportErr := reportFunc(importResult)
if reportErr != nil {
log.Warn("fail to report import state to root coord", zap.Error(err))
}
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_UnexpectedError,
Reason: err.Error(),
@ -1042,8 +1061,8 @@ func (node *DataNode) Import(ctx context.Context, req *datapb.ImportTaskRequest)
return resp, nil
}
// AddSegment adds the segment to the current DataNode.
func (node *DataNode) AddSegment(ctx context.Context, req *datapb.AddSegmentRequest) (*commonpb.Status, error) {
// AddImportSegment adds the import segment to the current DataNode.
func (node *DataNode) AddImportSegment(ctx context.Context, req *datapb.AddImportSegmentRequest) (*datapb.AddImportSegmentResponse, error) {
log.Info("adding segment to DataNode flow graph",
zap.Int64("segment ID", req.GetSegmentId()),
zap.Int64("collection ID", req.GetCollectionId()),
@ -1051,248 +1070,185 @@ func (node *DataNode) AddSegment(ctx context.Context, req *datapb.AddSegmentRequ
zap.String("channel name", req.GetChannelName()),
zap.Int64("# of rows", req.GetRowNum()))
// Fetch the flow graph on the given v-channel.
ds, ok := node.flowgraphManager.getFlowgraphService(req.GetChannelName())
if !ok {
var ds *dataSyncService
// Retry in case the channel hasn't been watched yet.
err := retry.Do(ctx, func() error {
var ok bool
ds, ok = node.flowgraphManager.getFlowgraphService(req.GetChannelName())
if !ok {
return errors.New("channel not found")
}
return nil
}, retry.Attempts(getFlowGraphServiceAttempts))
if err != nil {
log.Error("channel not found in current DataNode",
zap.String("channel name", req.GetChannelName()),
zap.Int64("node ID", Params.DataNodeCfg.GetNodeID()))
return &commonpb.Status{
// TODO: Add specific error code.
ErrorCode: commonpb.ErrorCode_UnexpectedError,
return &datapb.AddImportSegmentResponse{
Status: &commonpb.Status{
// TODO: Add specific error code.
ErrorCode: commonpb.ErrorCode_UnexpectedError,
Reason: "channel not found in current DataNode",
},
}, nil
}
// Get the current dml channel position ID, that will be used in segments start positions and end positions.
posID, err := ds.getChannelLatestMsgID(context.Background(), req.GetChannelName())
if err != nil {
return &datapb.AddImportSegmentResponse{
Status: &commonpb.Status{
// TODO: Add specific error code.
ErrorCode: commonpb.ErrorCode_UnexpectedError,
Reason: "failed to get channel position",
},
}, nil
}
// Add the new segment to the replica.
if !ds.replica.hasSegment(req.GetSegmentId(), true) {
log.Info("add a new segment to replica")
err := ds.replica.addNewSegment(req.GetSegmentId(),
req.GetCollectionId(),
req.GetPartitionId(),
req.GetChannelName(),
&internalpb.MsgPosition{
ChannelName: req.GetChannelName(),
},
&internalpb.MsgPosition{
ChannelName: req.GetChannelName(),
})
if err != nil {
log.Info("adding a new segment to replica",
zap.Int64("segment ID", req.GetSegmentId()))
// Add segment as a flushed segment, but set `importing` to true to add extra information of the segment.
// By 'extra information' we mean segment info while adding a `SegmentType_Flushed` typed segment.
if err := ds.replica.addSegment(
addSegmentReq{
segType: datapb.SegmentType_Flushed,
segID: req.GetSegmentId(),
collID: req.GetCollectionId(),
partitionID: req.GetPartitionId(),
channelName: req.GetChannelName(),
numOfRows: req.GetRowNum(),
statsBinLogs: req.GetStatsLog(),
startPos: &internalpb.MsgPosition{
ChannelName: req.GetChannelName(),
MsgID: posID,
Timestamp: req.GetBase().GetTimestamp(),
},
endPos: &internalpb.MsgPosition{
ChannelName: req.GetChannelName(),
MsgID: posID,
Timestamp: req.GetBase().GetTimestamp(),
},
recoverTs: req.GetBase().GetTimestamp(),
importing: true,
}); err != nil {
log.Error("failed to add segment to flow graph",
zap.Error(err))
return &commonpb.Status{
// TODO: Add specific error code.
ErrorCode: commonpb.ErrorCode_UnexpectedError,
Reason: err.Error(),
return &datapb.AddImportSegmentResponse{
Status: &commonpb.Status{
// TODO: Add specific error code.
ErrorCode: commonpb.ErrorCode_UnexpectedError,
Reason: err.Error(),
},
}, nil
}
}
// Update # of rows of the given segment.
ds.replica.updateStatistics(req.GetSegmentId(), req.GetRowNum())
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
ds.flushingSegCache.Remove(req.GetSegmentId())
return &datapb.AddImportSegmentResponse{
Status: &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
},
ChannelPos: posID,
}, nil
}
func importFlushReqFunc(node *DataNode, req *datapb.ImportTaskRequest, res *rootcoordpb.ImportResult, schema *schemapb.CollectionSchema, ts Timestamp) importutil.ImportFlushFunc {
return func(fields map[storage.FieldID]storage.FieldData, shardNum int) error {
if shardNum >= len(req.GetImportTask().GetChannelNames()) {
log.Error("import task returns invalid shard number",
zap.Int("shard num", shardNum),
zap.Int("# of channels", len(req.GetImportTask().GetChannelNames())),
zap.Strings("channel names", req.GetImportTask().GetChannelNames()),
return func(fields map[storage.FieldID]storage.FieldData, shardID int) error {
chNames := req.GetImportTask().GetChannelNames()
importTaskID := req.GetImportTask().GetTaskId()
if shardID >= len(chNames) {
log.Error("import task returns invalid shard ID",
zap.Int64("task ID", importTaskID),
zap.Int("shard ID", shardID),
zap.Int("# of channels", len(chNames)),
zap.Strings("channel names", chNames),
)
return fmt.Errorf("syncSegmentID Failed: invalid shard number %d", shardNum)
return fmt.Errorf("syncSegmentID Failed: invalid shard ID %d", shardID)
}
tr := timerecord.NewTimeRecorder("import callback function")
defer tr.Elapse("finished")
// use the first field's row count as segment row count
// all the fileds row count are same, checked by ImportWrapper
var rowNum int
for _, field := range fields {
rowNum = field.RowNum()
break
}
// ask DataCoord to alloc a new segment
log.Info("import task flush segment", zap.Any("ChannelNames", req.ImportTask.ChannelNames), zap.Int("shardNum", shardNum))
segReqs := []*datapb.SegmentIDRequest{
{
ChannelName: req.ImportTask.ChannelNames[shardNum],
Count: uint32(rowNum),
CollectionID: req.GetImportTask().GetCollectionId(),
PartitionID: req.GetImportTask().GetPartitionId(),
IsImport: true,
},
}
segmentIDReq := &datapb.AssignSegmentIDRequest{
NodeID: 0,
PeerRole: typeutil.ProxyRole,
SegmentIDRequests: segReqs,
}
colID := req.GetImportTask().GetCollectionId()
partID := req.GetImportTask().GetPartitionId()
segmentIDReq := composeAssignSegmentIDRequest(rowNum, shardID, chNames, colID, partID)
targetChName := segmentIDReq.GetSegmentIDRequests()[0].GetChannelName()
log.Info("target channel for the import task",
zap.Int64("task ID", importTaskID),
zap.String("target channel name", targetChName))
resp, err := node.dataCoord.AssignSegmentID(context.Background(), segmentIDReq)
if err != nil {
return fmt.Errorf("syncSegmentID Failed:%w", err)
}
if resp.Status.ErrorCode != commonpb.ErrorCode_Success {
return fmt.Errorf("syncSegmentID Failed:%s", resp.Status.Reason)
}
segmentID := resp.SegIDAssignments[0].SegID
// TODO: this code block is long and tedious, maybe split it into separate functions.
tsFieldData := make([]int64, rowNum)
for i := range tsFieldData {
tsFieldData[i] = int64(ts)
}
fields[common.TimeStampField] = &storage.Int64FieldData{
Data: tsFieldData,
NumRows: []int64{int64(rowNum)},
}
if status, _ := node.dataCoord.UpdateSegmentStatistics(context.TODO(), &datapb.UpdateSegmentStatisticsRequest{
Stats: []*datapb.SegmentStats{{
SegmentID: segmentID,
NumRows: int64(rowNum),
}},
}); status.GetErrorCode() != commonpb.ErrorCode_Success {
// TODO: reportImport the failure.
return fmt.Errorf(status.GetReason())
}
data := BufferData{buffer: &InsertData{
Data: fields,
}}
meta := &etcdpb.CollectionMeta{
ID: req.GetImportTask().GetCollectionId(),
Schema: schema,
}
inCodec := storage.NewInsertCodec(meta)
binLogs, statsBinlogs, err := inCodec.Serialize(req.GetImportTask().GetPartitionId(), segmentID, data.buffer)
fieldInsert, fieldStats, err := createBinLogs(rowNum, schema, ts, fields, node, segmentID, colID, partID)
if err != nil {
return err
}
var alloc allocatorInterface = newAllocator(node.rootCoord)
start, _, err := alloc.allocIDBatch(uint32(len(binLogs)))
if err != nil {
return err
}
field2Insert := make(map[UniqueID]*datapb.Binlog, len(binLogs))
kvs := make(map[string][]byte, len(binLogs))
field2Logidx := make(map[UniqueID]UniqueID, len(binLogs))
for idx, blob := range binLogs {
fieldID, err := strconv.ParseInt(blob.GetKey(), 10, 64)
if err != nil {
log.Error("Flush failed ... cannot parse string to fieldID ..", zap.Error(err))
return err
}
logidx := start + int64(idx)
// no error raise if alloc=false
k := metautil.JoinIDPath(req.GetImportTask().GetCollectionId(), req.GetImportTask().GetPartitionId(), segmentID, fieldID, logidx)
key := path.Join(node.chunkManager.RootPath(), common.SegmentInsertLogPath, k)
kvs[key] = blob.Value[:]
field2Insert[fieldID] = &datapb.Binlog{
EntriesNum: data.size,
TimestampFrom: 0, //TODO
TimestampTo: 0, //TODO,
LogPath: key,
LogSize: int64(len(blob.Value)),
}
field2Logidx[fieldID] = logidx
}
field2Stats := make(map[UniqueID]*datapb.Binlog)
// write stats binlog
for _, blob := range statsBinlogs {
fieldID, err := strconv.ParseInt(blob.GetKey(), 10, 64)
if err != nil {
log.Error("Flush failed ... cannot parse string to fieldID ..", zap.Error(err))
return err
}
logidx := field2Logidx[fieldID]
// no error raise if alloc=false
k := metautil.JoinIDPath(req.GetImportTask().GetCollectionId(), req.GetImportTask().GetPartitionId(), segmentID, fieldID, logidx)
key := path.Join(node.chunkManager.RootPath(), common.SegmentStatslogPath, k)
kvs[key] = blob.Value
field2Stats[fieldID] = &datapb.Binlog{
EntriesNum: 0,
TimestampFrom: 0, //TODO
TimestampTo: 0, //TODO,
LogPath: key,
LogSize: int64(len(blob.Value)),
}
}
err = node.chunkManager.MultiWrite(kvs)
if err != nil {
return err
}
var (
fieldInsert []*datapb.FieldBinlog
fieldStats []*datapb.FieldBinlog
)
for k, v := range field2Insert {
fieldInsert = append(fieldInsert, &datapb.FieldBinlog{FieldID: k, Binlogs: []*datapb.Binlog{v}})
}
for k, v := range field2Stats {
fieldStats = append(fieldStats, &datapb.FieldBinlog{FieldID: k, Binlogs: []*datapb.Binlog{v}})
}
log.Info("now adding segment to the correct DataNode flow graph")
// Ask DataCoord to add segment to the corresponding DataNode flow graph.
node.dataCoord.AddSegment(context.Background(), &datapb.AddSegmentRequest{
Base: &commonpb.MsgBase{
SourceID: Params.DataNodeCfg.GetNodeID(),
},
SegmentId: segmentID,
ChannelName: segReqs[0].GetChannelName(),
CollectionId: req.GetImportTask().GetCollectionId(),
PartitionId: req.GetImportTask().GetPartitionId(),
RowNum: int64(rowNum),
})
binlogReq := &datapb.SaveBinlogPathsRequest{
Base: &commonpb.MsgBase{
MsgType: 0, //TODO msg type
MsgID: 0, //TODO msg id
Timestamp: 0, //TODO time stamp
SourceID: Params.DataNodeCfg.GetNodeID(),
},
SegmentID: segmentID,
CollectionID: req.GetImportTask().GetCollectionId(),
Field2BinlogPaths: fieldInsert,
Field2StatslogPaths: fieldStats,
Importing: true,
}
log.Info("adding segment to the correct DataNode flow graph and saving binlog paths",
zap.Int64("segment ID", segmentID),
zap.Uint64("ts", ts))
err = retry.Do(context.Background(), func() error {
rsp, err := node.dataCoord.SaveBinlogPaths(context.Background(), binlogReq)
// should be network issue, return error and retry
// Ask DataCoord to save binlog path and add segment to the corresponding DataNode flow graph.
resp, err := node.dataCoord.SaveImportSegment(context.Background(), &datapb.SaveImportSegmentRequest{
Base: &commonpb.MsgBase{
SourceID: Params.DataNodeCfg.GetNodeID(),
// Pass current timestamp downstream.
Timestamp: ts,
},
SegmentId: segmentID,
ChannelName: targetChName,
CollectionId: req.GetImportTask().GetCollectionId(),
PartitionId: req.GetImportTask().GetPartitionId(),
RowNum: int64(rowNum),
SaveBinlogPathReq: &datapb.SaveBinlogPathsRequest{
Base: &commonpb.MsgBase{
MsgType: 0,
MsgID: 0,
Timestamp: ts,
SourceID: Params.DataNodeCfg.GetNodeID(),
},
SegmentID: segmentID,
CollectionID: req.GetImportTask().GetCollectionId(),
Field2BinlogPaths: fieldInsert,
Field2StatslogPaths: fieldStats,
// Set start positions of a SaveBinlogPathRequest explicitly.
StartPositions: []*datapb.SegmentStartPosition{
{
StartPosition: &internalpb.MsgPosition{
ChannelName: targetChName,
Timestamp: ts,
},
SegmentID: segmentID,
},
},
Importing: true,
},
})
// Only retrying when DataCoord is unhealthy or err != nil, otherwise return immediately.
if err != nil {
return fmt.Errorf(err.Error())
}
// TODO should retry only when datacoord status is unhealthy
if rsp.ErrorCode != commonpb.ErrorCode_Success {
return fmt.Errorf("data service save bin log path failed, reason = %s", rsp.Reason)
if resp.ErrorCode != commonpb.ErrorCode_Success && resp.ErrorCode != commonpb.ErrorCode_DataCoordNA {
return retry.Unrecoverable(fmt.Errorf("failed to save import segment, reason = %s", resp.Reason))
} else if resp.ErrorCode == commonpb.ErrorCode_DataCoordNA {
return fmt.Errorf("failed to save import segment: %s", resp.GetReason())
}
return nil
})
if err != nil {
log.Warn("failed to SaveBinlogPaths", zap.Error(err))
log.Warn("failed to save import segment", zap.Error(err))
return err
}
log.Info("segment imported and persisted", zap.Int64("segmentID", segmentID))
res.Segments = append(res.Segments, segmentID)
res.RowCount += int64(rowNum)
@ -1300,6 +1256,139 @@ func importFlushReqFunc(node *DataNode, req *datapb.ImportTaskRequest, res *root
}
}
func composeAssignSegmentIDRequest(rowNum int, shardID int, chNames []string,
collID int64, partID int64) *datapb.AssignSegmentIDRequest {
// use the first field's row count as segment row count
// all the fields row count are same, checked by ImportWrapper
// ask DataCoord to alloc a new segment
log.Info("import task flush segment",
zap.Any("channel names", chNames),
zap.Int("shard ID", shardID))
segReqs := []*datapb.SegmentIDRequest{
{
ChannelName: chNames[shardID],
Count: uint32(rowNum),
CollectionID: collID,
PartitionID: partID,
IsImport: true,
},
}
segmentIDReq := &datapb.AssignSegmentIDRequest{
NodeID: 0,
PeerRole: typeutil.ProxyRole,
SegmentIDRequests: segReqs,
}
return segmentIDReq
}
func createBinLogs(rowNum int, schema *schemapb.CollectionSchema, ts Timestamp,
fields map[storage.FieldID]storage.FieldData, node *DataNode, segmentID, colID, partID UniqueID) ([]*datapb.FieldBinlog, []*datapb.FieldBinlog, error) {
tsFieldData := make([]int64, rowNum)
for i := range tsFieldData {
tsFieldData[i] = int64(ts)
}
fields[common.TimeStampField] = &storage.Int64FieldData{
Data: tsFieldData,
NumRows: []int64{int64(rowNum)},
}
if status, _ := node.dataCoord.UpdateSegmentStatistics(context.TODO(), &datapb.UpdateSegmentStatisticsRequest{
Stats: []*datapb.SegmentStats{{
SegmentID: segmentID,
NumRows: int64(rowNum),
}},
}); status.GetErrorCode() != commonpb.ErrorCode_Success {
return nil, nil, fmt.Errorf(status.GetReason())
}
data := BufferData{buffer: &InsertData{
Data: fields,
}}
data.updateSize(int64(rowNum))
meta := &etcdpb.CollectionMeta{
ID: colID,
Schema: schema,
}
binLogs, statsBinLogs, err := storage.NewInsertCodec(meta).Serialize(partID, segmentID, data.buffer)
if err != nil {
return nil, nil, err
}
var alloc allocatorInterface = newAllocator(node.rootCoord)
start, _, err := alloc.allocIDBatch(uint32(len(binLogs)))
if err != nil {
return nil, nil, err
}
field2Insert := make(map[UniqueID]*datapb.Binlog, len(binLogs))
kvs := make(map[string][]byte, len(binLogs))
field2Logidx := make(map[UniqueID]UniqueID, len(binLogs))
for idx, blob := range binLogs {
fieldID, err := strconv.ParseInt(blob.GetKey(), 10, 64)
if err != nil {
log.Error("Flush failed ... cannot parse string to fieldID ..", zap.Error(err))
return nil, nil, err
}
logidx := start + int64(idx)
// no error raise if alloc=false
k := metautil.JoinIDPath(colID, partID, segmentID, fieldID, logidx)
key := path.Join(node.chunkManager.RootPath(), common.SegmentInsertLogPath, k)
kvs[key] = blob.Value[:]
field2Insert[fieldID] = &datapb.Binlog{
EntriesNum: data.size,
TimestampFrom: ts,
TimestampTo: ts,
LogPath: key,
LogSize: int64(len(blob.Value)),
}
field2Logidx[fieldID] = logidx
}
field2Stats := make(map[UniqueID]*datapb.Binlog)
// write stats binlog
for _, blob := range statsBinLogs {
fieldID, err := strconv.ParseInt(blob.GetKey(), 10, 64)
if err != nil {
log.Error("Flush failed ... cannot parse string to fieldID ..", zap.Error(err))
return nil, nil, err
}
logidx := field2Logidx[fieldID]
// no error raise if alloc=false
k := metautil.JoinIDPath(colID, partID, segmentID, fieldID, logidx)
key := path.Join(node.chunkManager.RootPath(), common.SegmentStatslogPath, k)
kvs[key] = blob.Value
field2Stats[fieldID] = &datapb.Binlog{
EntriesNum: data.size,
TimestampFrom: ts,
TimestampTo: ts,
LogPath: key,
LogSize: int64(len(blob.Value)),
}
}
err = node.chunkManager.MultiWrite(kvs)
if err != nil {
return nil, nil, err
}
var (
fieldInsert []*datapb.FieldBinlog
fieldStats []*datapb.FieldBinlog
)
for k, v := range field2Insert {
fieldInsert = append(fieldInsert, &datapb.FieldBinlog{FieldID: k, Binlogs: []*datapb.Binlog{v}})
}
for k, v := range field2Stats {
fieldStats = append(fieldStats, &datapb.FieldBinlog{FieldID: k, Binlogs: []*datapb.Binlog{v}})
}
return fieldInsert, fieldStats, nil
}
func logDupFlush(cID, segID int64) {
log.Info("segment is already being flushed, ignoring flush request",
zap.Int64("collection ID", cID),

View File

@ -28,26 +28,23 @@ import (
"testing"
"time"
"github.com/golang/protobuf/proto"
"github.com/milvus-io/milvus/api/commonpb"
"github.com/milvus-io/milvus/api/milvuspb"
"github.com/milvus-io/milvus/api/schemapb"
"github.com/milvus-io/milvus/internal/common"
etcdkv "github.com/milvus-io/milvus/internal/kv/etcd"
"github.com/milvus-io/milvus/internal/log"
"github.com/milvus-io/milvus/internal/mq/msgstream"
"github.com/milvus-io/milvus/internal/storage"
"github.com/milvus-io/milvus/internal/types"
"github.com/milvus-io/milvus/internal/util/dependency"
"github.com/milvus-io/milvus/api/commonpb"
"github.com/milvus-io/milvus/api/milvuspb"
"github.com/milvus-io/milvus/api/schemapb"
"github.com/milvus-io/milvus/internal/proto/datapb"
"github.com/milvus-io/milvus/internal/proto/internalpb"
"github.com/milvus-io/milvus/internal/proto/rootcoordpb"
"github.com/milvus-io/milvus/internal/storage"
"github.com/milvus-io/milvus/internal/types"
"github.com/milvus-io/milvus/internal/util/dependency"
"github.com/milvus-io/milvus/internal/util/etcd"
"github.com/milvus-io/milvus/internal/util/metricsinfo"
"github.com/milvus-io/milvus/internal/util/sessionutil"
"github.com/golang/protobuf/proto"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"go.uber.org/zap"
@ -217,7 +214,15 @@ func TestDataNode(t *testing.T) {
fgservice, ok := node1.flowgraphManager.getFlowgraphService(dmChannelName)
assert.True(t, ok)
err = fgservice.replica.addNewSegment(0, 1, 1, dmChannelName, &internalpb.MsgPosition{}, &internalpb.MsgPosition{})
err = fgservice.replica.addSegment(addSegmentReq{
segType: datapb.SegmentType_New,
segID: 0,
collID: 1,
partitionID: 1,
channelName: dmChannelName,
startPos: &internalpb.MsgPosition{},
endPos: &internalpb.MsgPosition{},
})
assert.Nil(t, err)
req := &datapb.FlushSegmentsRequest{
@ -425,6 +430,28 @@ func TestDataNode(t *testing.T) {
]
}`)
chName1 := "fake-by-dev-rootcoord-dml-testimport-1"
chName2 := "fake-by-dev-rootcoord-dml-testimport-2"
err := node.flowgraphManager.addAndStart(node, &datapb.VchannelInfo{
CollectionID: 100,
ChannelName: chName1,
UnflushedSegmentIds: []int64{},
FlushedSegmentIds: []int64{},
})
require.Nil(t, err)
err = node.flowgraphManager.addAndStart(node, &datapb.VchannelInfo{
CollectionID: 100,
ChannelName: chName2,
UnflushedSegmentIds: []int64{},
FlushedSegmentIds: []int64{},
})
require.Nil(t, err)
_, ok := node.flowgraphManager.getFlowgraphService(chName1)
assert.True(t, ok)
_, ok = node.flowgraphManager.getFlowgraphService(chName2)
assert.True(t, ok)
filePath := "import/rows_1.json"
err = node.chunkManager.Write(filePath, content)
assert.NoError(t, err)
@ -432,11 +459,31 @@ func TestDataNode(t *testing.T) {
ImportTask: &datapb.ImportTask{
CollectionId: 100,
PartitionId: 100,
ChannelNames: []string{"ch1", "ch2"},
ChannelNames: []string{chName1, chName2},
Files: []string{filePath},
RowBased: true,
},
}
node.rootCoord.(*RootCoordFactory).ReportImportErr = true
_, err = node.Import(context.WithValue(ctx, ctxKey{}, ""), req)
assert.NoError(t, err)
node.rootCoord.(*RootCoordFactory).ReportImportErr = false
node.rootCoord.(*RootCoordFactory).ReportImportNotSuccess = true
_, err = node.Import(context.WithValue(ctx, ctxKey{}, ""), req)
assert.NoError(t, err)
node.rootCoord.(*RootCoordFactory).ReportImportNotSuccess = false
node.dataCoord.(*DataCoordFactory).AddSegmentError = true
_, err = node.Import(context.WithValue(ctx, ctxKey{}, ""), req)
assert.NoError(t, err)
node.dataCoord.(*DataCoordFactory).AddSegmentError = false
node.dataCoord.(*DataCoordFactory).AddSegmentNotSuccess = true
_, err = node.Import(context.WithValue(ctx, ctxKey{}, ""), req)
assert.NoError(t, err)
node.dataCoord.(*DataCoordFactory).AddSegmentNotSuccess = false
stat, err := node.Import(context.WithValue(ctx, ctxKey{}, ""), req)
assert.NoError(t, err)
assert.Equal(t, commonpb.ErrorCode_Success, stat.GetErrorCode())
@ -736,7 +783,7 @@ func TestDataNode_AddSegment(t *testing.T) {
_, ok = node.flowgraphManager.getFlowgraphService(chName2)
assert.True(t, ok)
stat, err := node.AddSegment(context.WithValue(ctx, ctxKey{}, ""), &datapb.AddSegmentRequest{
stat, err := node.AddImportSegment(context.WithValue(ctx, ctxKey{}, ""), &datapb.AddImportSegmentRequest{
SegmentId: 100,
CollectionId: 100,
PartitionId: 100,
@ -744,10 +791,12 @@ func TestDataNode_AddSegment(t *testing.T) {
RowNum: 500,
})
assert.NoError(t, err)
assert.Equal(t, commonpb.ErrorCode_Success, stat.GetErrorCode())
assert.Equal(t, "", stat.GetReason())
assert.Equal(t, commonpb.ErrorCode_Success, stat.GetStatus().GetErrorCode())
assert.Equal(t, "", stat.GetStatus().GetReason())
assert.NotEqual(t, nil, stat.GetChannelPos())
stat, err = node.AddSegment(context.WithValue(ctx, ctxKey{}, ""), &datapb.AddSegmentRequest{
getFlowGraphServiceAttempts = 3
stat, err = node.AddImportSegment(context.WithValue(ctx, ctxKey{}, ""), &datapb.AddImportSegmentRequest{
SegmentId: 100,
CollectionId: 100,
PartitionId: 100,
@ -755,7 +804,7 @@ func TestDataNode_AddSegment(t *testing.T) {
RowNum: 500,
})
assert.NoError(t, err)
assert.Equal(t, commonpb.ErrorCode_UnexpectedError, stat.GetErrorCode())
assert.Equal(t, commonpb.ErrorCode_UnexpectedError, stat.GetStatus().GetErrorCode())
})
}
@ -1062,11 +1111,35 @@ func TestDataNode_ResendSegmentStats(t *testing.T) {
fgService, ok := node.flowgraphManager.getFlowgraphService(dmChannelName)
assert.True(t, ok)
err = fgService.replica.addNewSegment(0, 1, 1, dmChannelName, &internalpb.MsgPosition{}, &internalpb.MsgPosition{})
err = fgService.replica.addSegment(addSegmentReq{
segType: datapb.SegmentType_New,
segID: 0,
collID: 1,
partitionID: 1,
channelName: dmChannelName,
startPos: &internalpb.MsgPosition{},
endPos: &internalpb.MsgPosition{},
})
assert.Nil(t, err)
err = fgService.replica.addNewSegment(1, 1, 2, dmChannelName, &internalpb.MsgPosition{}, &internalpb.MsgPosition{})
err = fgService.replica.addSegment(addSegmentReq{
segType: datapb.SegmentType_New,
segID: 1,
collID: 1,
partitionID: 2,
channelName: dmChannelName,
startPos: &internalpb.MsgPosition{},
endPos: &internalpb.MsgPosition{},
})
assert.Nil(t, err)
err = fgService.replica.addNewSegment(2, 1, 3, dmChannelName, &internalpb.MsgPosition{}, &internalpb.MsgPosition{})
err = fgService.replica.addSegment(addSegmentReq{
segType: datapb.SegmentType_New,
segID: 2,
collID: 1,
partitionID: 3,
channelName: dmChannelName,
startPos: &internalpb.MsgPosition{},
endPos: &internalpb.MsgPosition{},
})
assert.Nil(t, err)
req := &datapb.ResendSegmentStatsRequest{

View File

@ -32,6 +32,7 @@ import (
"github.com/milvus-io/milvus/internal/types"
"github.com/milvus-io/milvus/internal/util/concurrency"
"github.com/milvus-io/milvus/internal/util/flowgraph"
"github.com/milvus-io/milvus/internal/util/funcutil"
)
// dataSyncService controls a flowgraph for a specific collection
@ -210,8 +211,16 @@ func (dsService *dataSyncService) initNodes(vchanInfo *datapb.VchannelInfo) erro
// avoid closure capture iteration variable
segment := us
future := dsService.ioPool.Submit(func() (interface{}, error) {
if err := dsService.replica.addNormalSegment(segment.GetID(), segment.GetCollectionID(), segment.GetPartitionID(), segment.GetInsertChannel(),
segment.GetNumOfRows(), segment.GetStatslogs(), cp, vchanInfo.GetSeekPosition().GetTimestamp()); err != nil {
if err := dsService.replica.addSegment(addSegmentReq{
segType: datapb.SegmentType_Normal,
segID: segment.GetID(),
collID: segment.CollectionID,
partitionID: segment.PartitionID,
channelName: segment.GetInsertChannel(),
numOfRows: segment.GetNumOfRows(),
statsBinLogs: segment.Statslogs,
cp: cp,
recoverTs: vchanInfo.GetSeekPosition().GetTimestamp()}); err != nil {
return nil, err
}
return nil, nil
@ -238,8 +247,16 @@ func (dsService *dataSyncService) initNodes(vchanInfo *datapb.VchannelInfo) erro
// avoid closure capture iteration variable
segment := fs
future := dsService.ioPool.Submit(func() (interface{}, error) {
if err := dsService.replica.addFlushedSegment(segment.GetID(), segment.GetCollectionID(), segment.GetPartitionID(), segment.GetInsertChannel(),
segment.GetNumOfRows(), segment.GetStatslogs(), vchanInfo.GetSeekPosition().GetTimestamp()); err != nil {
if err := dsService.replica.addSegment(addSegmentReq{
segType: datapb.SegmentType_Flushed,
segID: segment.GetID(),
collID: segment.CollectionID,
partitionID: segment.PartitionID,
channelName: segment.GetInsertChannel(),
numOfRows: segment.GetNumOfRows(),
statsBinLogs: segment.Statslogs,
recoverTs: vchanInfo.GetSeekPosition().GetTimestamp(),
}); err != nil {
return nil, err
}
return nil, nil
@ -372,3 +389,23 @@ func (dsService *dataSyncService) getSegmentInfos(segmentIDs []int64) ([]*datapb
}
return infoResp.Infos, nil
}
func (dsService *dataSyncService) getChannelLatestMsgID(ctx context.Context, channelName string) ([]byte, error) {
pChannelName := funcutil.ToPhysicalChannel(channelName)
log.Info("ddNode convert vChannel to pChannel",
zap.String("vChannelName", channelName),
zap.String("pChannelName", pChannelName),
)
dmlStream, err := dsService.msFactory.NewMsgStream(ctx)
defer dmlStream.Close()
if err != nil {
return nil, err
}
dmlStream.AsConsumer([]string{pChannelName}, channelName)
id, err := dmlStream.GetLatestMsgID(pChannelName)
if err != nil {
return nil, err
}
return id.Serialize(), nil
}

View File

@ -139,6 +139,11 @@ func TestDataSyncService_newDataSyncService(te *testing.T) {
0, 0, "", 0,
0, 0, "", 0,
"replica nil"},
{true, false, &mockMsgStreamFactory{true, true},
1, "by-dev-rootcoord-dml-test_v1",
1, 1, "by-dev-rootcoord-dml-test_v1", 0,
1, 2, "by-dev-rootcoord-dml-test_v1", 0,
"add un-flushed and flushed segments"},
}
cm := storage.NewLocalChunkManager(storage.RootPath(dataSyncServiceTestDir))
defer cm.RemoveWithPrefix("")
@ -419,13 +424,42 @@ func TestClearGlobalFlushingCache(t *testing.T) {
flushingSegCache: cache,
}
err = replica.addNewSegment(1, 1, 1, "", &internalpb.MsgPosition{}, &internalpb.MsgPosition{})
err = replica.addSegment(
addSegmentReq{
segType: datapb.SegmentType_New,
segID: 1,
collID: 1,
partitionID: 1,
channelName: "",
startPos: &internalpb.MsgPosition{},
endPos: &internalpb.MsgPosition{}})
assert.NoError(t, err)
err = replica.addFlushedSegment(2, 1, 1, "", 0, nil, 0)
err = replica.addSegment(
addSegmentReq{
segType: datapb.SegmentType_Flushed,
segID: 2,
collID: 1,
partitionID: 1,
channelName: "",
numOfRows: 0,
statsBinLogs: nil,
recoverTs: 0,
})
assert.NoError(t, err)
err = replica.addNormalSegment(3, 1, 1, "", 0, nil, nil, 0)
err = replica.addSegment(
addSegmentReq{
segType: datapb.SegmentType_Normal,
segID: 3,
collID: 1,
partitionID: 1,
channelName: "",
numOfRows: 0,
statsBinLogs: nil,
cp: nil,
recoverTs: 0,
})
assert.NoError(t, err)
cache.checkOrCache(1)
@ -439,3 +473,28 @@ func TestClearGlobalFlushingCache(t *testing.T) {
assert.False(t, cache.checkIfCached(3))
assert.True(t, cache.checkIfCached(4))
}
func TestGetChannelLatestMsgID(t *testing.T) {
delay := time.Now().Add(ctxTimeInMillisecond * time.Millisecond)
ctx, cancel := context.WithDeadline(context.Background(), delay)
defer cancel()
factory := dependency.NewDefaultFactory(true)
dataCoord := &DataCoordFactory{}
dsService := &dataSyncService{
dataCoord: dataCoord,
msFactory: factory,
}
dmlChannelName := "fake-by-dev-rootcoord-dml-channel_12345v0"
insertStream, _ := factory.NewMsgStream(ctx)
insertStream.AsProducer([]string{dmlChannelName})
var insertMsgStream = insertStream
insertMsgStream.Start()
id, err := dsService.getChannelLatestMsgID(ctx, dmlChannelName)
assert.NoError(t, err)
assert.NotNil(t, id)
}

View File

@ -476,8 +476,16 @@ func (ibNode *insertBufferNode) updateSegStatesInReplica(insertMsgs []*msgstream
partitionID := msg.GetPartitionID()
if !ibNode.replica.hasSegment(currentSegID, true) {
err = ibNode.replica.addNewSegment(currentSegID, collID, partitionID, msg.GetShardName(),
startPos, endPos)
err = ibNode.replica.addSegment(
addSegmentReq{
segType: datapb.SegmentType_New,
segID: currentSegID,
collID: collID,
partitionID: partitionID,
channelName: msg.GetShardName(),
startPos: startPos,
endPos: endPos,
})
if err != nil {
log.Error("add segment wrong",
zap.Int64("segID", currentSegID),

View File

@ -24,25 +24,21 @@ import (
"testing"
"time"
"github.com/milvus-io/milvus/internal/util/retry"
"github.com/milvus-io/milvus/internal/util/typeutil"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/milvus-io/milvus/internal/storage"
"github.com/milvus-io/milvus/internal/util/dependency"
"github.com/milvus-io/milvus/internal/mq/msgstream"
"github.com/milvus-io/milvus/internal/types"
"github.com/milvus-io/milvus/internal/util/flowgraph"
"github.com/milvus-io/milvus/api/commonpb"
"github.com/milvus-io/milvus/api/milvuspb"
"github.com/milvus-io/milvus/api/schemapb"
"github.com/milvus-io/milvus/internal/mq/msgstream"
"github.com/milvus-io/milvus/internal/proto/datapb"
"github.com/milvus-io/milvus/internal/proto/etcdpb"
"github.com/milvus-io/milvus/internal/proto/internalpb"
"github.com/milvus-io/milvus/internal/storage"
"github.com/milvus-io/milvus/internal/types"
"github.com/milvus-io/milvus/internal/util/dependency"
"github.com/milvus-io/milvus/internal/util/flowgraph"
"github.com/milvus-io/milvus/internal/util/retry"
"github.com/milvus-io/milvus/internal/util/typeutil"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
var insertNodeTestDir = "/tmp/milvus_test/insert_node"
@ -82,8 +78,16 @@ func TestFlowGraphInsertBufferNodeCreate(t *testing.T) {
replica, err := newReplica(ctx, mockRootCoord, cm, collMeta.ID)
assert.Nil(t, err)
err = replica.addNewSegment(1, collMeta.ID, 0, insertChannelName, &internalpb.MsgPosition{}, &internalpb.MsgPosition{})
err = replica.addSegment(
addSegmentReq{
segType: datapb.SegmentType_New,
segID: 1,
collID: collMeta.ID,
partitionID: 0,
channelName: insertChannelName,
startPos: &internalpb.MsgPosition{},
endPos: &internalpb.MsgPosition{},
})
require.NoError(t, err)
factory := dependency.NewDefaultFactory(true)
@ -170,7 +174,16 @@ func TestFlowGraphInsertBufferNode_Operate(t *testing.T) {
replica, err := newReplica(ctx, mockRootCoord, cm, collMeta.ID)
assert.Nil(t, err)
err = replica.addNewSegment(1, collMeta.ID, 0, insertChannelName, &internalpb.MsgPosition{}, &internalpb.MsgPosition{})
err = replica.addSegment(
addSegmentReq{
segType: datapb.SegmentType_New,
segID: 1,
collID: collMeta.ID,
partitionID: 0,
channelName: insertChannelName,
startPos: &internalpb.MsgPosition{},
endPos: &internalpb.MsgPosition{},
})
require.NoError(t, err)
factory := dependency.NewDefaultFactory(true)
@ -922,8 +935,16 @@ func TestInsertBufferNode_bufferInsertMsg(t *testing.T) {
replica, err := newReplica(ctx, mockRootCoord, cm, collMeta.ID)
assert.Nil(t, err)
err = replica.addNewSegment(1, collMeta.ID, 0, insertChannelName, &internalpb.MsgPosition{}, &internalpb.MsgPosition{Timestamp: 101})
err = replica.addSegment(
addSegmentReq{
segType: datapb.SegmentType_New,
segID: 1,
collID: collMeta.ID,
partitionID: 0,
channelName: insertChannelName,
startPos: &internalpb.MsgPosition{},
endPos: &internalpb.MsgPosition{Timestamp: 101},
})
require.NoError(t, err)
factory := dependency.NewDefaultFactory(true)

View File

@ -94,7 +94,15 @@ func TestFlowGraphManager(t *testing.T) {
assert.True(t, fm.exist(vchanName))
fg, ok := fm.getFlowgraphService(vchanName)
require.True(t, ok)
err = fg.replica.addNewSegment(100, 1, 10, vchanName, &internalpb.MsgPosition{}, &internalpb.MsgPosition{})
err = fg.replica.addSegment(addSegmentReq{
segType: datapb.SegmentType_New,
segID: 100,
collID: 1,
partitionID: 10,
channelName: vchanName,
startPos: &internalpb.MsgPosition{},
endPos: &internalpb.MsgPosition{},
})
require.NoError(t, err)
tests := []struct {
@ -136,7 +144,15 @@ func TestFlowGraphManager(t *testing.T) {
fg, ok := fm.getFlowgraphService(vchanName)
require.True(t, ok)
err = fg.replica.addNewSegment(100, 1, 10, vchanName, &internalpb.MsgPosition{}, &internalpb.MsgPosition{})
err = fg.replica.addSegment(addSegmentReq{
segType: datapb.SegmentType_New,
segID: 100,
collID: 1,
partitionID: 10,
channelName: vchanName,
startPos: &internalpb.MsgPosition{},
endPos: &internalpb.MsgPosition{},
})
require.NoError(t, err)
tests := []struct {

View File

@ -614,11 +614,17 @@ func TestDropVirtualChannelFunc(t *testing.T) {
}
dropFunc := dropVirtualChannelFunc(dsService, retry.Attempts(1))
t.Run("normal run", func(t *testing.T) {
replica.addNewSegment(2, 1, 10, "vchan_01", &internalpb.MsgPosition{
ChannelName: "vchan_01",
MsgID: []byte{1, 2, 3},
Timestamp: 10,
}, nil)
replica.addSegment(
addSegmentReq{
segType: datapb.SegmentType_New,
segID: 2,
collID: 1,
partitionID: 10,
channelName: "vchan_01", startPos: &internalpb.MsgPosition{
ChannelName: "vchan_01",
MsgID: []byte{1, 2, 3},
Timestamp: 10,
}, endPos: nil})
assert.NotPanics(t, func() {
dropFunc([]*segmentFlushPack{
{

View File

@ -54,6 +54,26 @@ import (
const ctxTimeInMillisecond = 5000
const debug = false
// As used in data_sync_service_test.go
var segID2SegInfo = map[int64]*datapb.SegmentInfo{
1: {
ID: 1,
CollectionID: 1,
PartitionID: 1,
InsertChannel: "by-dev-rootcoord-dml-test_v1",
},
2: {
ID: 2,
CollectionID: 1,
InsertChannel: "by-dev-rootcoord-dml-test_v1",
},
3: {
ID: 3,
CollectionID: 1,
InsertChannel: "by-dev-rootcoord-dml-test_v1",
},
}
var emptyFlushAndDropFunc flushAndDropFunc = func(_ []*segmentFlushPack) {}
func newIDLEDataNodeMock(ctx context.Context, pkType schemapb.DataType) *DataNode {
@ -161,6 +181,9 @@ type RootCoordFactory struct {
collectionName string
collectionID UniqueID
pkType schemapb.DataType
ReportImportErr bool
ReportImportNotSuccess bool
}
type DataCoordFactory struct {
@ -177,6 +200,9 @@ type DataCoordFactory struct {
GetSegmentInfosError bool
GetSegmentInfosNotSuccess bool
AddSegmentError bool
AddSegmentNotSuccess bool
}
func (ds *DataCoordFactory) AssignSegmentID(ctx context.Context, req *datapb.AssignSegmentIDRequest) (*datapb.AssignSegmentIDResponse, error) {
@ -227,7 +253,19 @@ func (ds *DataCoordFactory) UpdateSegmentStatistics(ctx context.Context, req *da
}, nil
}
func (ds *DataCoordFactory) AddSegment(ctx context.Context, req *datapb.AddSegmentRequest) (*commonpb.Status, error) {
func (ds *DataCoordFactory) SaveImportSegment(ctx context.Context, req *datapb.SaveImportSegmentRequest) (*commonpb.Status, error) {
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
}, nil
}
func (ds *DataCoordFactory) UnsetIsImportingState(context.Context, *datapb.UnsetIsImportingStateRequest) (*commonpb.Status, error) {
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
}, nil
}
func (ds *DataCoordFactory) MarkSegmentsDropped(context.Context, *datapb.MarkSegmentsDroppedRequest) (*commonpb.Status, error) {
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
}, nil
@ -247,9 +285,13 @@ func (ds *DataCoordFactory) GetSegmentInfo(ctx context.Context, req *datapb.GetS
}
var segmentInfos []*datapb.SegmentInfo
for _, segmentID := range req.SegmentIDs {
segmentInfos = append(segmentInfos, &datapb.SegmentInfo{
ID: segmentID,
})
if segInfo, ok := segID2SegInfo[segmentID]; ok {
segmentInfos = append(segmentInfos, segInfo)
} else {
segmentInfos = append(segmentInfos, &datapb.SegmentInfo{
ID: segmentID,
})
}
}
return &datapb.GetSegmentInfoResponse{
Status: &commonpb.Status{
@ -976,6 +1018,16 @@ func (m *RootCoordFactory) ReportImport(ctx context.Context, req *rootcoordpb.Im
return nil, fmt.Errorf("injected error")
}
}
if m.ReportImportErr {
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
}, fmt.Errorf("mock error")
}
if m.ReportImportNotSuccess {
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_UnexpectedError,
}, nil
}
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
}, nil

View File

@ -61,11 +61,9 @@ type Replica interface {
listAllSegmentIDs() []UniqueID
listNotFlushedSegmentIDs() []UniqueID
addSegment(req addSegmentReq) error
listPartitionSegments(partID UniqueID) []UniqueID
addNewSegment(segID, collID, partitionID UniqueID, channelName string, startPos, endPos *internalpb.MsgPosition) error
addNormalSegment(segID, collID, partitionID UniqueID, channelName string, numOfRows int64, statsBinlog []*datapb.FieldBinlog, cp *segmentCheckPoint, recoverTs Timestamp) error
filterSegments(channelName string, partitionID UniqueID) []*Segment
addFlushedSegment(segID, collID, partitionID UniqueID, channelName string, numOfRows int64, statsBinlog []*datapb.FieldBinlog, recoverTs Timestamp) error
listNewSegmentsStartPositions() []*datapb.SegmentStartPosition
listSegmentsCheckPoints() map[UniqueID]segmentCheckPoint
updateSegmentEndPosition(segID UniqueID, endPos *internalpb.MsgPosition)
@ -121,6 +119,18 @@ type SegmentReplica struct {
chunkManager storage.ChunkManager
}
type addSegmentReq struct {
segType datapb.SegmentType
segID, collID, partitionID UniqueID
channelName string
numOfRows int64
startPos, endPos *internalpb.MsgPosition
statsBinLogs []*datapb.FieldBinlog
cp *segmentCheckPoint
recoverTs Timestamp
importing bool
}
func (s *Segment) updatePk(pk primaryKey) error {
if s.minPK == nil {
s.minPK = pk
@ -327,49 +337,76 @@ func (replica *SegmentReplica) initSegmentBloomFilter(s *Segment) error {
return nil
}
// addNewSegment adds a *New* and *NotFlushed* new segment. Before add, please make sure there's no
// such segment by `hasSegment`
func (replica *SegmentReplica) addNewSegment(segID, collID, partitionID UniqueID, channelName string,
startPos, endPos *internalpb.MsgPosition) error {
log := log.With(
zap.Int64("segment ID", segID),
zap.Int64("collection ID", collID),
zap.Int64("partition ID", partitionID),
zap.String("channel name", channelName))
if collID != replica.collectionID {
log.Warn("Mismatch collection",
zap.Int64("expected collectionID", replica.collectionID))
return fmt.Errorf("mismatch collection, ID=%d", collID)
// addSegment adds the segment to current replica. Segments can be added as *new*, *normal* or *flushed*.
// Make sure to verify `replica.hasSegment(segID)` == false before calling `replica.addSegment()`.
func (replica *SegmentReplica) addSegment(req addSegmentReq) error {
if req.collID != replica.collectionID {
log.Warn("collection mismatch",
zap.Int64("current collection ID", req.collID),
zap.Int64("expected collection ID", replica.collectionID))
return fmt.Errorf("mismatch collection, ID=%d", req.collID)
}
log.Info("Add new segment")
log.Info("adding segment",
zap.String("segment type", req.segType.String()),
zap.Int64("segment ID", req.segID),
zap.Int64("collection ID", req.collID),
zap.Int64("partition ID", req.partitionID),
zap.String("channel name", req.channelName),
zap.Any("start position", req.startPos),
zap.Any("end position", req.endPos),
zap.Any("checkpoints", req.cp),
zap.Uint64("recover ts", req.recoverTs),
zap.Bool("importing", req.importing),
)
seg := &Segment{
collectionID: collID,
partitionID: partitionID,
segmentID: segID,
channelName: channelName,
checkPoint: segmentCheckPoint{0, *startPos},
startPos: startPos,
endPos: endPos,
collectionID: req.collID,
partitionID: req.partitionID,
segmentID: req.segID,
channelName: req.channelName,
numRows: req.numOfRows, // 0 if segType == NEW
}
err := replica.initSegmentBloomFilter(seg)
if req.importing || req.segType == datapb.SegmentType_New {
seg.checkPoint = segmentCheckPoint{0, *req.startPos}
seg.startPos = req.startPos
seg.endPos = req.endPos
}
if req.segType == datapb.SegmentType_Normal {
if req.cp != nil {
seg.checkPoint = *req.cp
seg.endPos = &req.cp.pos
}
}
// Set up bloom filter.
err := replica.initPKBloomFilter(seg, req.statsBinLogs, req.recoverTs)
if err != nil {
log.Warn("failed to addNewSegment, init segment bf returns error", zap.Error(err))
log.Error("failed to init bloom filter",
zap.Int64("segment ID", req.segID),
zap.Error(err))
return err
}
seg.isNew.Store(true)
seg.isFlushed.Store(false)
// Please ignore `isNew` and `isFlushed` as they are for debugging only.
if req.segType == datapb.SegmentType_New {
seg.isNew.Store(true)
} else {
seg.isNew.Store(false)
}
if req.segType == datapb.SegmentType_Flushed {
seg.isFlushed.Store(true)
} else {
seg.isFlushed.Store(false)
}
replica.segMu.Lock()
defer replica.segMu.Unlock()
replica.newSegments[segID] = seg
metrics.DataNodeNumUnflushedSegments.WithLabelValues(fmt.Sprint(Params.DataNodeCfg.GetNodeID())).Inc()
if req.segType == datapb.SegmentType_New {
replica.newSegments[req.segID] = seg
} else if req.segType == datapb.SegmentType_Normal {
replica.normalSegments[req.segID] = seg
} else if req.segType == datapb.SegmentType_Flushed {
replica.flushedSegments[req.segID] = seg
}
replica.segMu.Unlock()
if req.segType == datapb.SegmentType_New || req.segType == datapb.SegmentType_Normal {
metrics.DataNodeNumUnflushedSegments.WithLabelValues(fmt.Sprint(Params.DataNodeCfg.GetNodeID())).Inc()
}
return nil
}
@ -414,92 +451,6 @@ func (replica *SegmentReplica) filterSegments(channelName string, partitionID Un
return results
}
// addNormalSegment adds a *NotNew* and *NotFlushed* segment. Before add, please make sure there's no
// such segment by `hasSegment`
func (replica *SegmentReplica) addNormalSegment(segID, collID, partitionID UniqueID, channelName string, numOfRows int64, statsBinlogs []*datapb.FieldBinlog, cp *segmentCheckPoint, recoverTs Timestamp) error {
log := log.With(
zap.Int64("segment ID", segID),
zap.Int64("collection ID", collID),
zap.Int64("partition ID", partitionID),
zap.String("channel name", channelName))
if collID != replica.collectionID {
log.Warn("Mismatch collection",
zap.Int64("expected collectionID", replica.collectionID))
return fmt.Errorf("mismatch collection, ID=%d", collID)
}
log.Info("Add Normal segment")
seg := &Segment{
collectionID: collID,
partitionID: partitionID,
segmentID: segID,
channelName: channelName,
numRows: numOfRows,
}
if cp != nil {
seg.checkPoint = *cp
seg.endPos = &cp.pos
}
err := replica.initPKBloomFilter(seg, statsBinlogs, recoverTs)
if err != nil {
return err
}
seg.isNew.Store(false)
seg.isFlushed.Store(false)
replica.segMu.Lock()
replica.normalSegments[segID] = seg
replica.segMu.Unlock()
metrics.DataNodeNumUnflushedSegments.WithLabelValues(fmt.Sprint(Params.DataNodeCfg.GetNodeID())).Inc()
return nil
}
// addFlushedSegment adds a *Flushed* segment. Before add, please make sure there's no
// such segment by `hasSegment`
func (replica *SegmentReplica) addFlushedSegment(segID, collID, partitionID UniqueID, channelName string, numOfRows int64, statsBinlogs []*datapb.FieldBinlog, recoverTs Timestamp) error {
log := log.With(
zap.Int64("segment ID", segID),
zap.Int64("collection ID", collID),
zap.Int64("partition ID", partitionID),
zap.String("channel name", channelName))
if collID != replica.collectionID {
log.Warn("Mismatch collection",
zap.Int64("expected collectionID", replica.collectionID))
return fmt.Errorf("mismatch collection, ID=%d", collID)
}
log.Info("Add Flushed segment")
seg := &Segment{
collectionID: collID,
partitionID: partitionID,
segmentID: segID,
channelName: channelName,
numRows: numOfRows,
}
err := replica.initPKBloomFilter(seg, statsBinlogs, recoverTs)
if err != nil {
return err
}
seg.isNew.Store(false)
seg.isFlushed.Store(true)
replica.segMu.Lock()
replica.flushedSegments[segID] = seg
replica.segMu.Unlock()
return nil
}
func (replica *SegmentReplica) initPKBloomFilter(s *Segment, statsBinlogs []*datapb.FieldBinlog, ts Timestamp) error {
log := log.With(zap.Int64("segmentID", s.segmentID))
log.Info("begin to init pk bloom filter", zap.Int("stats bin logs", len(statsBinlogs)))
@ -956,9 +907,10 @@ func (replica *SegmentReplica) listNotFlushedSegmentIDs() []UniqueID {
// getSegmentStatslog returns the segment statslog for the provided segment id.
func (replica *SegmentReplica) getSegmentStatslog(segID UniqueID) ([]byte, error) {
replica.segMu.RLock()
defer replica.segMu.RUnlock()
colID := replica.getCollectionID()
replica.segMu.RUnlock()
schema, err := replica.getCollectionSchema(replica.collectionID, 0)
schema, err := replica.getCollectionSchema(colID, 0)
if err != nil {
return nil, err
}

View File

@ -236,8 +236,16 @@ func TestSegmentReplica(t *testing.T) {
t.Run("Test coll mot match", func(t *testing.T) {
replica, err := newReplica(context.Background(), rc, cm, collID)
assert.Nil(t, err)
err = replica.addNewSegment(1, collID+1, 0, "", nil, nil)
err = replica.addSegment(
addSegmentReq{
segType: datapb.SegmentType_New,
segID: 1,
collID: collID + 1,
partitionID: 0,
channelName: "",
startPos: nil,
endPos: nil,
})
assert.NotNil(t, err)
})
@ -373,8 +381,16 @@ func TestSegmentReplica_InterfaceMethod(t *testing.T) {
sr, err := newReplica(context.Background(), rc, cm, test.replicaCollID)
assert.Nil(t, err)
require.False(t, sr.hasSegment(test.inSegID, true))
err = sr.addNewSegment(test.inSegID,
test.inCollID, 1, "", test.instartPos, &internalpb.MsgPosition{})
err = sr.addSegment(
addSegmentReq{
segType: datapb.SegmentType_New,
segID: test.inSegID,
collID: test.inCollID,
partitionID: 1,
channelName: "",
startPos: test.instartPos,
endPos: &internalpb.MsgPosition{},
})
if test.isValidCase {
assert.NoError(t, err)
assert.True(t, sr.hasSegment(test.inSegID, true))
@ -409,7 +425,18 @@ func TestSegmentReplica_InterfaceMethod(t *testing.T) {
sr, err := newReplica(context.Background(), rc, &mockDataCM{}, test.replicaCollID)
assert.Nil(t, err)
require.False(t, sr.hasSegment(test.inSegID, true))
err = sr.addNormalSegment(test.inSegID, test.inCollID, 1, "", 0, []*datapb.FieldBinlog{getSimpleFieldBinlog()}, &segmentCheckPoint{}, 0)
err = sr.addSegment(
addSegmentReq{
segType: datapb.SegmentType_Normal,
segID: test.inSegID,
collID: test.inCollID,
partitionID: 1,
channelName: "",
numOfRows: 0,
statsBinLogs: []*datapb.FieldBinlog{getSimpleFieldBinlog()},
cp: &segmentCheckPoint{},
recoverTs: 0,
})
if test.isValidCase {
assert.NoError(t, err)
assert.True(t, sr.hasSegment(test.inSegID, true))
@ -429,7 +456,18 @@ func TestSegmentReplica_InterfaceMethod(t *testing.T) {
segID := int64(101)
require.False(t, sr.hasSegment(segID, true))
assert.NotPanics(t, func() {
err = sr.addNormalSegment(segID, 1, 10, "empty_dml_chan", 0, []*datapb.FieldBinlog{}, nil, 0)
err = sr.addSegment(
addSegmentReq{
segType: datapb.SegmentType_Normal,
segID: segID,
collID: 1,
partitionID: 10,
channelName: "empty_dml_chan",
numOfRows: 0,
statsBinLogs: []*datapb.FieldBinlog{},
cp: nil,
recoverTs: 0,
})
assert.NoError(t, err)
})
})
@ -668,9 +706,30 @@ func TestSegmentReplica_InterfaceMethod(t *testing.T) {
cpPos := &internalpb.MsgPosition{ChannelName: "insert-01", Timestamp: Timestamp(10)}
cp := &segmentCheckPoint{int64(10), *cpPos}
err = sr.addNormalSegment(1, 1, 2, "insert-01", int64(10), []*datapb.FieldBinlog{getSimpleFieldBinlog()}, cp, 0)
err = sr.addSegment(
addSegmentReq{
segType: datapb.SegmentType_Normal,
segID: 1,
collID: 1,
partitionID: 2,
channelName: "insert-01",
numOfRows: int64(10),
statsBinLogs: []*datapb.FieldBinlog{getSimpleFieldBinlog()},
cp: cp,
recoverTs: 0,
})
assert.NotNil(t, err)
err = sr.addFlushedSegment(1, 1, 2, "insert-01", int64(0), []*datapb.FieldBinlog{getSimpleFieldBinlog()}, 0)
err = sr.addSegment(
addSegmentReq{
segType: datapb.SegmentType_Flushed,
segID: 1,
collID: 1,
partitionID: 2,
channelName: "insert-01",
numOfRows: int64(0),
statsBinLogs: []*datapb.FieldBinlog{getSimpleFieldBinlog()},
recoverTs: 0,
})
assert.NotNil(t, err)
})
@ -681,9 +740,30 @@ func TestSegmentReplica_InterfaceMethod(t *testing.T) {
cpPos := &internalpb.MsgPosition{ChannelName: "insert-01", Timestamp: Timestamp(10)}
cp := &segmentCheckPoint{int64(10), *cpPos}
err = sr.addNormalSegment(1, 1, 2, "insert-01", int64(10), []*datapb.FieldBinlog{getSimpleFieldBinlog()}, cp, 0)
err = sr.addSegment(
addSegmentReq{
segType: datapb.SegmentType_Normal,
segID: 1,
collID: 1,
partitionID: 2,
channelName: "insert-01",
numOfRows: int64(10),
statsBinLogs: []*datapb.FieldBinlog{getSimpleFieldBinlog()},
cp: cp,
recoverTs: 0,
})
assert.NotNil(t, err)
err = sr.addFlushedSegment(1, 1, 2, "insert-01", int64(0), []*datapb.FieldBinlog{getSimpleFieldBinlog()}, 0)
err = sr.addSegment(
addSegmentReq{
segType: datapb.SegmentType_Flushed,
segID: 1,
collID: 1,
partitionID: 2,
channelName: "insert-01",
numOfRows: int64(0),
statsBinLogs: []*datapb.FieldBinlog{getSimpleFieldBinlog()},
recoverTs: 0,
})
assert.NotNil(t, err)
})
@ -694,9 +774,30 @@ func TestSegmentReplica_InterfaceMethod(t *testing.T) {
cpPos := &internalpb.MsgPosition{ChannelName: "insert-01", Timestamp: Timestamp(10)}
cp := &segmentCheckPoint{int64(10), *cpPos}
err = sr.addNormalSegment(1, 1, 2, "insert-01", int64(10), []*datapb.FieldBinlog{getSimpleFieldBinlog()}, cp, 0)
err = sr.addSegment(
addSegmentReq{
segType: datapb.SegmentType_Normal,
segID: 1,
collID: 1,
partitionID: 2,
channelName: "insert-01",
numOfRows: int64(10),
statsBinLogs: []*datapb.FieldBinlog{getSimpleFieldBinlog()},
cp: cp,
recoverTs: 0,
})
assert.NotNil(t, err)
err = sr.addFlushedSegment(1, 1, 2, "insert-01", int64(0), []*datapb.FieldBinlog{getSimpleFieldBinlog()}, 0)
err = sr.addSegment(
addSegmentReq{
segType: datapb.SegmentType_Flushed,
segID: 1,
collID: 1,
partitionID: 2,
channelName: "insert-01",
numOfRows: int64(0),
statsBinLogs: []*datapb.FieldBinlog{getSimpleFieldBinlog()},
recoverTs: 0,
})
assert.NotNil(t, err)
})
@ -795,7 +896,16 @@ func TestInnerFunctionSegment(t *testing.T) {
startPos := &internalpb.MsgPosition{ChannelName: "insert-01", Timestamp: Timestamp(100)}
endPos := &internalpb.MsgPosition{ChannelName: "insert-01", Timestamp: Timestamp(200)}
err = replica.addNewSegment(0, 1, 2, "insert-01", startPos, endPos)
err = replica.addSegment(
addSegmentReq{
segType: datapb.SegmentType_New,
segID: 0,
collID: 1,
partitionID: 2,
channelName: "insert-01",
startPos: startPos,
endPos: endPos,
})
assert.NoError(t, err)
assert.True(t, replica.hasSegment(0, true))
assert.Equal(t, 1, len(replica.newSegments))
@ -820,7 +930,18 @@ func TestInnerFunctionSegment(t *testing.T) {
cpPos := &internalpb.MsgPosition{ChannelName: "insert-01", Timestamp: Timestamp(10)}
cp := &segmentCheckPoint{int64(10), *cpPos}
err = replica.addNormalSegment(1, 1, 2, "insert-01", int64(10), []*datapb.FieldBinlog{getSimpleFieldBinlog()}, cp, 0)
err = replica.addSegment(
addSegmentReq{
segType: datapb.SegmentType_Normal,
segID: 1,
collID: 1,
partitionID: 2,
channelName: "insert-01",
numOfRows: int64(10),
statsBinLogs: []*datapb.FieldBinlog{getSimpleFieldBinlog()},
cp: cp,
recoverTs: 0,
})
assert.NoError(t, err)
assert.True(t, replica.hasSegment(1, true))
assert.Equal(t, 1, len(replica.normalSegments))
@ -837,7 +958,18 @@ func TestInnerFunctionSegment(t *testing.T) {
assert.False(t, seg.isNew.Load().(bool))
assert.False(t, seg.isFlushed.Load().(bool))
err = replica.addNormalSegment(1, 100000, 2, "invalid", int64(0), []*datapb.FieldBinlog{getSimpleFieldBinlog()}, &segmentCheckPoint{}, 0)
err = replica.addSegment(
addSegmentReq{
segType: datapb.SegmentType_Normal,
segID: 1,
collID: 100000,
partitionID: 2,
channelName: "invalid",
numOfRows: int64(0),
statsBinLogs: []*datapb.FieldBinlog{getSimpleFieldBinlog()},
cp: &segmentCheckPoint{},
recoverTs: 0,
})
assert.Error(t, err)
replica.updateStatistics(1, 10)
@ -872,7 +1004,17 @@ func TestInnerFunctionSegment(t *testing.T) {
replica.updateSegmentCheckPoint(1)
assert.Equal(t, int64(20), replica.normalSegments[UniqueID(1)].checkPoint.numRows)
err = replica.addFlushedSegment(1, 1, 2, "insert-01", int64(0), []*datapb.FieldBinlog{getSimpleFieldBinlog()}, 0)
err = replica.addSegment(
addSegmentReq{
segType: datapb.SegmentType_Flushed,
segID: 1,
collID: 1,
partitionID: 2,
channelName: "insert-01",
numOfRows: int64(0),
statsBinLogs: []*datapb.FieldBinlog{getSimpleFieldBinlog()},
recoverTs: 0,
})
assert.Nil(t, err)
totalSegments := replica.filterSegments("insert-01", common.InvalidPartitionID)
@ -968,9 +1110,29 @@ func TestReplica_UpdatePKRange(t *testing.T) {
assert.Nil(t, err)
replica.chunkManager = &mockDataCM{}
err = replica.addNewSegment(1, collID, partID, chanName, startPos, endPos)
err = replica.addSegment(
addSegmentReq{
segType: datapb.SegmentType_New,
segID: 1,
collID: collID,
partitionID: partID,
channelName: chanName,
startPos: startPos,
endPos: endPos,
})
assert.Nil(t, err)
err = replica.addNormalSegment(2, collID, partID, chanName, 100, []*datapb.FieldBinlog{getSimpleFieldBinlog()}, cp, 0)
err = replica.addSegment(
addSegmentReq{
segType: datapb.SegmentType_Normal,
segID: 2,
collID: collID,
partitionID: partID,
channelName: chanName,
numOfRows: 100,
statsBinLogs: []*datapb.FieldBinlog{getSimpleFieldBinlog()},
cp: cp,
recoverTs: 0,
})
assert.Nil(t, err)
segNew := replica.newSegments[1]
@ -1031,11 +1193,38 @@ func (s *SegmentReplicaSuite) TearDownSuite() {
func (s *SegmentReplicaSuite) SetupTest() {
var err error
err = s.sr.addNewSegment(1, s.collID, s.partID, s.vchanName, &internalpb.MsgPosition{}, nil)
err = s.sr.addSegment(addSegmentReq{
segType: datapb.SegmentType_New,
segID: 1,
collID: s.collID,
partitionID: s.partID,
channelName: s.vchanName,
startPos: &internalpb.MsgPosition{},
endPos: nil,
})
s.Require().NoError(err)
err = s.sr.addNormalSegment(2, s.collID, s.partID, s.vchanName, 10, nil, nil, 0)
err = s.sr.addSegment(addSegmentReq{
segType: datapb.SegmentType_Normal,
segID: 2,
collID: s.collID,
partitionID: s.partID,
channelName: s.vchanName,
numOfRows: 10,
statsBinLogs: nil,
cp: nil,
recoverTs: 0,
})
s.Require().NoError(err)
err = s.sr.addFlushedSegment(3, s.collID, s.partID, s.vchanName, 10, nil, 0)
err = s.sr.addSegment(addSegmentReq{
segType: datapb.SegmentType_Flushed,
segID: 3,
collID: s.collID,
partitionID: s.partID,
channelName: s.vchanName,
numOfRows: 10,
statsBinLogs: nil,
recoverTs: 0,
})
s.Require().NoError(err)
}

View File

@ -569,13 +569,39 @@ func (c *Client) ReleaseSegmentLock(ctx context.Context, req *datapb.ReleaseSegm
return ret.(*commonpb.Status), err
}
// AddSegment is the DataCoord client side code for AddSegment call.
func (c *Client) AddSegment(ctx context.Context, req *datapb.AddSegmentRequest) (*commonpb.Status, error) {
// SaveImportSegment is the DataCoord client side code for SaveImportSegment call.
func (c *Client) SaveImportSegment(ctx context.Context, req *datapb.SaveImportSegmentRequest) (*commonpb.Status, error) {
ret, err := c.grpcClient.ReCall(ctx, func(client interface{}) (interface{}, error) {
if !funcutil.CheckCtxValid(ctx) {
return nil, ctx.Err()
}
return client.(datapb.DataCoordClient).AddSegment(ctx, req)
return client.(datapb.DataCoordClient).SaveImportSegment(ctx, req)
})
if err != nil || ret == nil {
return nil, err
}
return ret.(*commonpb.Status), err
}
func (c *Client) UnsetIsImportingState(ctx context.Context, req *datapb.UnsetIsImportingStateRequest) (*commonpb.Status, error) {
ret, err := c.grpcClient.ReCall(ctx, func(client interface{}) (interface{}, error) {
if !funcutil.CheckCtxValid(ctx) {
return nil, ctx.Err()
}
return client.(datapb.DataCoordClient).UnsetIsImportingState(ctx, req)
})
if err != nil || ret == nil {
return nil, err
}
return ret.(*commonpb.Status), err
}
func (c *Client) MarkSegmentsDropped(ctx context.Context, req *datapb.MarkSegmentsDroppedRequest) (*commonpb.Status, error) {
ret, err := c.grpcClient.ReCall(ctx, func(client interface{}) (interface{}, error) {
if !funcutil.CheckCtxValid(ctx) {
return nil, ctx.Err()
}
return client.(datapb.DataCoordClient).MarkSegmentsDropped(ctx, req)
})
if err != nil || ret == nil {
return nil, err

View File

@ -133,11 +133,17 @@ func Test_NewClient(t *testing.T) {
r26, err := client.ReleaseSegmentLock(ctx, nil)
retCheck(retNotNil, r26, err)
r27, err := client.AddSegment(ctx, nil)
r27, err := client.SaveImportSegment(ctx, nil)
retCheck(retNotNil, r27, err)
r28, err := client.ShowConfigurations(ctx, nil)
retCheck(retNotNil, r28, err)
r29, err := client.UnsetIsImportingState(ctx, nil)
retCheck(retNotNil, r29, err)
r30, err := client.MarkSegmentsDropped(ctx, nil)
retCheck(retNotNil, r30, err)
r31, err := client.ShowConfigurations(ctx, nil)
retCheck(retNotNil, r31, err)
}
client.grpcClient = &mock.GRPCClientBase{

View File

@ -377,6 +377,18 @@ func (s *Server) ReleaseSegmentLock(ctx context.Context, req *datapb.ReleaseSegm
return s.dataCoord.ReleaseSegmentLock(ctx, req)
}
func (s *Server) AddSegment(ctx context.Context, request *datapb.AddSegmentRequest) (*commonpb.Status, error) {
return s.dataCoord.AddSegment(ctx, request)
// SaveImportSegment saves the import segment binlog paths data and then looks for the right DataNode to add the
// segment to that DataNode.
func (s *Server) SaveImportSegment(ctx context.Context, request *datapb.SaveImportSegmentRequest) (*commonpb.Status, error) {
return s.dataCoord.SaveImportSegment(ctx, request)
}
// UnsetIsImportingState is the distributed caller of UnsetIsImportingState.
func (s *Server) UnsetIsImportingState(ctx context.Context, request *datapb.UnsetIsImportingStateRequest) (*commonpb.Status, error) {
return s.dataCoord.UnsetIsImportingState(ctx, request)
}
// MarkSegmentsDropped is the distributed caller of MarkSegmentsDropped.
func (s *Server) MarkSegmentsDropped(ctx context.Context, req *datapb.MarkSegmentsDroppedRequest) (*commonpb.Status, error) {
return s.dataCoord.MarkSegmentsDropped(ctx, req)
}

View File

@ -33,37 +33,39 @@ import (
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
type MockDataCoord struct {
states *internalpb.ComponentStates
status *commonpb.Status
err error
initErr error
startErr error
stopErr error
regErr error
strResp *milvuspb.StringResponse
infoResp *datapb.GetSegmentInfoResponse
flushResp *datapb.FlushResponse
assignResp *datapb.AssignSegmentIDResponse
segStateResp *datapb.GetSegmentStatesResponse
binResp *datapb.GetInsertBinlogPathsResponse
colStatResp *datapb.GetCollectionStatisticsResponse
partStatResp *datapb.GetPartitionStatisticsResponse
recoverResp *datapb.GetRecoveryInfoResponse
flushSegResp *datapb.GetFlushedSegmentsResponse
configResp *internalpb.ShowConfigurationsResponse
metricResp *milvuspb.GetMetricsResponse
compactionStateResp *milvuspb.GetCompactionStateResponse
manualCompactionResp *milvuspb.ManualCompactionResponse
compactionPlansResp *milvuspb.GetCompactionPlansResponse
watchChannelsResp *datapb.WatchChannelsResponse
getFlushStateResp *milvuspb.GetFlushStateResponse
dropVChanResp *datapb.DropVirtualChannelResponse
setSegmentStateResp *datapb.SetSegmentStateResponse
importResp *datapb.ImportTaskResponse
updateSegStatResp *commonpb.Status
acquireSegLockResp *commonpb.Status
releaseSegLockResp *commonpb.Status
addSegmentResp *commonpb.Status
states *internalpb.ComponentStates
status *commonpb.Status
err error
initErr error
startErr error
stopErr error
regErr error
strResp *milvuspb.StringResponse
infoResp *datapb.GetSegmentInfoResponse
flushResp *datapb.FlushResponse
assignResp *datapb.AssignSegmentIDResponse
segStateResp *datapb.GetSegmentStatesResponse
binResp *datapb.GetInsertBinlogPathsResponse
colStatResp *datapb.GetCollectionStatisticsResponse
partStatResp *datapb.GetPartitionStatisticsResponse
recoverResp *datapb.GetRecoveryInfoResponse
flushSegResp *datapb.GetFlushedSegmentsResponse
configResp *internalpb.ShowConfigurationsResponse
metricResp *milvuspb.GetMetricsResponse
compactionStateResp *milvuspb.GetCompactionStateResponse
manualCompactionResp *milvuspb.ManualCompactionResponse
compactionPlansResp *milvuspb.GetCompactionPlansResponse
watchChannelsResp *datapb.WatchChannelsResponse
getFlushStateResp *milvuspb.GetFlushStateResponse
dropVChanResp *datapb.DropVirtualChannelResponse
setSegmentStateResp *datapb.SetSegmentStateResponse
importResp *datapb.ImportTaskResponse
updateSegStatResp *commonpb.Status
acquireSegLockResp *commonpb.Status
releaseSegLockResp *commonpb.Status
addSegmentResp *commonpb.Status
unsetIsImportingStateResp *commonpb.Status
markSegmentsDroppedResp *commonpb.Status
}
func (m *MockDataCoord) Init() error {
@ -200,10 +202,18 @@ func (m *MockDataCoord) ReleaseSegmentLock(ctx context.Context, req *datapb.Rele
return m.releaseSegLockResp, m.err
}
func (m *MockDataCoord) AddSegment(ctx context.Context, req *datapb.AddSegmentRequest) (*commonpb.Status, error) {
func (m *MockDataCoord) SaveImportSegment(ctx context.Context, req *datapb.SaveImportSegmentRequest) (*commonpb.Status, error) {
return m.addSegmentResp, m.err
}
func (m *MockDataCoord) UnsetIsImportingState(context.Context, *datapb.UnsetIsImportingStateRequest) (*commonpb.Status, error) {
return m.unsetIsImportingStateResp, m.err
}
func (m *MockDataCoord) MarkSegmentsDropped(ctx context.Context, req *datapb.MarkSegmentsDroppedRequest) (*commonpb.Status, error) {
return m.markSegmentsDroppedResp, m.err
}
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
func Test_NewServer(t *testing.T) {
ctx := context.Background()
@ -471,13 +481,35 @@ func Test_NewServer(t *testing.T) {
assert.NotNil(t, resp)
})
t.Run("add segment", func(t *testing.T) {
t.Run("save import segment", func(t *testing.T) {
server.dataCoord = &MockDataCoord{
addSegmentResp: &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
},
}
resp, err := server.AddSegment(ctx, nil)
resp, err := server.SaveImportSegment(ctx, nil)
assert.Nil(t, err)
assert.NotNil(t, resp)
})
t.Run("unset isImporting state", func(t *testing.T) {
server.dataCoord = &MockDataCoord{
unsetIsImportingStateResp: &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
},
}
resp, err := server.UnsetIsImportingState(ctx, nil)
assert.Nil(t, err)
assert.NotNil(t, resp)
})
t.Run("mark segments dropped", func(t *testing.T) {
server.dataCoord = &MockDataCoord{
markSegmentsDroppedResp: &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
},
}
resp, err := server.MarkSegmentsDropped(ctx, nil)
assert.Nil(t, err)
assert.NotNil(t, resp)
})

View File

@ -245,18 +245,18 @@ func (c *Client) ResendSegmentStats(ctx context.Context, req *datapb.ResendSegme
return ret.(*datapb.ResendSegmentStatsResponse), err
}
// AddSegment is the DataNode client side code for AddSegment call.
func (c *Client) AddSegment(ctx context.Context, req *datapb.AddSegmentRequest) (*commonpb.Status, error) {
// AddImportSegment is the DataNode client side code for AddImportSegment call.
func (c *Client) AddImportSegment(ctx context.Context, req *datapb.AddImportSegmentRequest) (*datapb.AddImportSegmentResponse, error) {
ret, err := c.grpcClient.ReCall(ctx, func(client interface{}) (interface{}, error) {
if !funcutil.CheckCtxValid(ctx) {
return nil, ctx.Err()
}
return client.(datapb.DataNodeClient).AddSegment(ctx, req)
return client.(datapb.DataNodeClient).AddImportSegment(ctx, req)
})
if err != nil || ret == nil {
return nil, err
}
return ret.(*commonpb.Status), err
return ret.(*datapb.AddImportSegmentResponse), err
}
// SyncSegments is the DataNode client side code for SyncSegments call.

View File

@ -83,7 +83,7 @@ func Test_NewClient(t *testing.T) {
r8, err := client.ResendSegmentStats(ctx, nil)
retCheck(retNotNil, r8, err)
r9, err := client.AddSegment(ctx, nil)
r9, err := client.AddImportSegment(ctx, nil)
retCheck(retNotNil, r9, err)
r10, err := client.ShowConfigurations(ctx, nil)

View File

@ -378,8 +378,8 @@ func (s *Server) ResendSegmentStats(ctx context.Context, request *datapb.ResendS
return s.datanode.ResendSegmentStats(ctx, request)
}
func (s *Server) AddSegment(ctx context.Context, request *datapb.AddSegmentRequest) (*commonpb.Status, error) {
return s.datanode.AddSegment(ctx, request)
func (s *Server) AddImportSegment(ctx context.Context, request *datapb.AddImportSegmentRequest) (*datapb.AddImportSegmentResponse, error) {
return s.datanode.AddImportSegment(ctx, request)
}
func (s *Server) SyncSegments(ctx context.Context, request *datapb.SyncSegmentsRequest) (*commonpb.Status, error) {

View File

@ -36,19 +36,20 @@ import (
type MockDataNode struct {
nodeID typeutil.UniqueID
stateCode internalpb.StateCode
states *internalpb.ComponentStates
status *commonpb.Status
err error
initErr error
startErr error
stopErr error
regErr error
strResp *milvuspb.StringResponse
configResp *internalpb.ShowConfigurationsResponse
metricResp *milvuspb.GetMetricsResponse
resendResp *datapb.ResendSegmentStatsResponse
compactionResp *datapb.CompactionStateResponse
stateCode internalpb.StateCode
states *internalpb.ComponentStates
status *commonpb.Status
err error
initErr error
startErr error
stopErr error
regErr error
strResp *milvuspb.StringResponse
configResp *internalpb.ShowConfigurationsResponse
metricResp *milvuspb.GetMetricsResponse
resendResp *datapb.ResendSegmentStatsResponse
addImportSegmentResp *datapb.AddImportSegmentResponse
compactionResp *datapb.CompactionStateResponse
}
func (m *MockDataNode) Init() error {
@ -130,8 +131,8 @@ func (m *MockDataNode) ResendSegmentStats(ctx context.Context, req *datapb.Resen
return m.resendResp, m.err
}
func (m *MockDataNode) AddSegment(ctx context.Context, req *datapb.AddSegmentRequest) (*commonpb.Status, error) {
return m.status, m.err
func (m *MockDataNode) AddImportSegment(ctx context.Context, req *datapb.AddImportSegmentRequest) (*datapb.AddImportSegmentResponse, error) {
return m.addImportSegmentResp, m.err
}
func (m *MockDataNode) SyncSegments(ctx context.Context, req *datapb.SyncSegmentsRequest) (*commonpb.Status, error) {
@ -303,8 +304,13 @@ func Test_NewServer(t *testing.T) {
t.Run("add segment", func(t *testing.T) {
server.datanode = &MockDataNode{
status: &commonpb.Status{},
addImportSegmentResp: &datapb.AddImportSegmentResponse{
Status: &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
},
},
}
resp, err := server.AddSegment(ctx, nil)
resp, err := server.AddImportSegment(ctx, nil)
assert.Nil(t, err)
assert.NotNil(t, resp)
})

View File

@ -29,24 +29,10 @@ import (
"sync"
"time"
"google.golang.org/grpc/credentials"
grpc_auth "github.com/grpc-ecosystem/go-grpc-middleware/auth"
grpc_middleware "github.com/grpc-ecosystem/go-grpc-middleware"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
"github.com/gin-gonic/gin"
grpc_middleware "github.com/grpc-ecosystem/go-grpc-middleware"
grpc_auth "github.com/grpc-ecosystem/go-grpc-middleware/auth"
ot "github.com/grpc-ecosystem/go-grpc-middleware/tracing/opentracing"
"github.com/opentracing/opentracing-go"
clientv3 "go.etcd.io/etcd/client/v3"
"go.uber.org/zap"
"google.golang.org/grpc"
"google.golang.org/grpc/health/grpc_health_v1"
"google.golang.org/grpc/keepalive"
"github.com/milvus-io/milvus/api/commonpb"
"github.com/milvus-io/milvus/api/milvuspb"
dcc "github.com/milvus-io/milvus/internal/distributed/datacoord/client"
@ -66,6 +52,15 @@ import (
"github.com/milvus-io/milvus/internal/util/paramtable"
"github.com/milvus-io/milvus/internal/util/trace"
"github.com/milvus-io/milvus/internal/util/typeutil"
"github.com/opentracing/opentracing-go"
clientv3 "go.etcd.io/etcd/client/v3"
"go.uber.org/zap"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/credentials"
"google.golang.org/grpc/health/grpc_health_v1"
"google.golang.org/grpc/keepalive"
"google.golang.org/grpc/status"
)
var Params paramtable.GrpcServerConfig

View File

@ -466,7 +466,15 @@ func (m *MockDataCoord) Flush(ctx context.Context, req *datapb.FlushRequest) (*d
return nil, nil
}
func (m *MockDataCoord) AddSegment(ctx context.Context, req *datapb.AddSegmentRequest) (*commonpb.Status, error) {
func (m *MockDataCoord) SaveImportSegment(ctx context.Context, req *datapb.SaveImportSegmentRequest) (*commonpb.Status, error) {
return nil, nil
}
func (m *MockDataCoord) UnsetIsImportingState(ctx context.Context, req *datapb.UnsetIsImportingStateRequest) (*commonpb.Status, error) {
return nil, nil
}
func (m *MockDataCoord) MarkSegmentsDropped(ctx context.Context, req *datapb.MarkSegmentsDroppedRequest) (*commonpb.Status, error) {
return nil, nil
}

View File

@ -410,7 +410,6 @@ func Test_NewClient(t *testing.T) {
rTimeout, err := client.ListPolicy(shortCtx, nil)
retCheck(rTimeout, err)
}
// clean up
err = client.Stop()
assert.Nil(t, err)

View File

@ -17,7 +17,9 @@
package kv
import (
"errors"
"strings"
"sync"
"github.com/milvus-io/milvus/internal/log"
clientv3 "go.etcd.io/etcd/client/v3"
@ -25,12 +27,13 @@ import (
)
type MockBaseKV struct {
InMemKv map[string]string
InMemKv sync.Map
}
func (m *MockBaseKV) Load(key string) (string, error) {
if val, ok := m.InMemKv[key]; ok {
return val, nil
log.Debug("doing load", zap.String("key", key))
if val, ok := m.InMemKv.Load(key); ok {
return val.(string), nil
}
return "", nil
}
@ -44,7 +47,9 @@ func (m *MockBaseKV) LoadWithPrefix(key string) ([]string, []string, error) {
}
func (m *MockBaseKV) Save(key string, value string) error {
panic("not implemented") // TODO: Implement
m.InMemKv.Store(key, value)
log.Debug("doing Save", zap.String("key", key))
return nil
}
func (m *MockBaseKV) MultiSave(kvs map[string]string) error {
@ -52,7 +57,9 @@ func (m *MockBaseKV) MultiSave(kvs map[string]string) error {
}
func (m *MockBaseKV) Remove(key string) error {
panic("not implemented") // TODO: Implement
m.InMemKv.Delete(key)
log.Debug("doing Remove", zap.String("key", key))
return nil
}
func (m *MockBaseKV) MultiRemove(keys []string) error {
@ -85,6 +92,9 @@ func (m *MockTxnKV) MultiSaveAndRemoveWithPrefix(saves map[string]string, remova
type MockMetaKV struct {
MockTxnKV
LoadWithPrefixMockErr bool
SaveMockErr bool
}
func (m *MockMetaKV) GetPath(key string) string {
@ -92,14 +102,18 @@ func (m *MockMetaKV) GetPath(key string) string {
}
func (m *MockMetaKV) LoadWithPrefix(prefix string) ([]string, []string, error) {
keys := make([]string, 0, len(m.InMemKv))
values := make([]string, 0, len(m.InMemKv))
for k, v := range m.InMemKv {
if strings.HasPrefix(k, prefix) {
keys = append(keys, k)
values = append(values, v)
}
if m.LoadWithPrefixMockErr {
return nil, nil, errors.New("mock err")
}
keys := make([]string, 0)
values := make([]string, 0)
m.InMemKv.Range(func(key, value interface{}) bool {
if strings.HasPrefix(key.(string), prefix) {
keys = append(keys, key.(string))
values = append(values, value.(string))
}
return true
})
return keys, values, nil
}
@ -128,13 +142,16 @@ func (m *MockMetaKV) WatchWithRevision(key string, revision int64) clientv3.Watc
}
func (m *MockMetaKV) SaveWithLease(key, value string, id clientv3.LeaseID) error {
m.InMemKv[key] = value
m.InMemKv.Store(key, value)
log.Debug("Doing SaveWithLease", zap.String("key", key))
return nil
}
func (m *MockMetaKV) SaveWithIgnoreLease(key, value string) error {
m.InMemKv[key] = value
if m.SaveMockErr {
return errors.New("mock error")
}
m.InMemKv.Store(key, value)
log.Debug("Doing SaveWithIgnoreLease", zap.String("key", key))
return nil
}

View File

@ -17,6 +17,7 @@
package kv
import (
"sync"
"testing"
"github.com/stretchr/testify/assert"
@ -28,7 +29,7 @@ const testValue = "value"
func TestMockKV_MetaKV(t *testing.T) {
mockKv := &MockMetaKV{}
mockKv.InMemKv = make(map[string]string)
mockKv.InMemKv = sync.Map{}
var err error
value, err := mockKv.Load(testKey)
@ -42,17 +43,13 @@ func TestMockKV_MetaKV(t *testing.T) {
_, _, err = mockKv.LoadWithPrefix(testKey)
assert.NoError(t, err)
assert.Panics(t, func() {
mockKv.Save(testKey, testValue)
})
assert.NoError(t, mockKv.Save(testKey, testValue))
assert.Panics(t, func() {
mockKv.MultiSave(map[string]string{testKey: testValue})
})
assert.Panics(t, func() {
mockKv.Remove(testKey)
})
assert.NoError(t, mockKv.Remove(testKey))
assert.Panics(t, func() {
mockKv.MultiRemove([]string{testKey})

View File

@ -76,53 +76,6 @@ func (_c *DataCoord_AcquireSegmentLock_Call) Return(_a0 *commonpb.Status, _a1 er
return _c
}
// AddSegment provides a mock function with given fields: ctx, req
func (_m *DataCoord) AddSegment(ctx context.Context, req *datapb.AddSegmentRequest) (*commonpb.Status, error) {
ret := _m.Called(ctx, req)
var r0 *commonpb.Status
if rf, ok := ret.Get(0).(func(context.Context, *datapb.AddSegmentRequest) *commonpb.Status); ok {
r0 = rf(ctx, req)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(*commonpb.Status)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, *datapb.AddSegmentRequest) error); ok {
r1 = rf(ctx, req)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// DataCoord_AddSegment_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'AddSegment'
type DataCoord_AddSegment_Call struct {
*mock.Call
}
// AddSegment is a helper method to define mock.On call
// - ctx context.Context
// - req *datapb.AddSegmentRequest
func (_e *DataCoord_Expecter) AddSegment(ctx interface{}, req interface{}) *DataCoord_AddSegment_Call {
return &DataCoord_AddSegment_Call{Call: _e.mock.On("AddSegment", ctx, req)}
}
func (_c *DataCoord_AddSegment_Call) Run(run func(ctx context.Context, req *datapb.AddSegmentRequest)) *DataCoord_AddSegment_Call {
_c.Call.Run(func(args mock.Arguments) {
run(args[0].(context.Context), args[1].(*datapb.AddSegmentRequest))
})
return _c
}
func (_c *DataCoord_AddSegment_Call) Return(_a0 *commonpb.Status, _a1 error) *DataCoord_AddSegment_Call {
_c.Call.Return(_a0, _a1)
return _c
}
// AssignSegmentID provides a mock function with given fields: ctx, req
func (_m *DataCoord) AssignSegmentID(ctx context.Context, req *datapb.AssignSegmentIDRequest) (*datapb.AssignSegmentIDResponse, error) {
ret := _m.Called(ctx, req)
@ -1095,6 +1048,53 @@ func (_c *DataCoord_ManualCompaction_Call) Return(_a0 *milvuspb.ManualCompaction
return _c
}
// MarkSegmentsDropped provides a mock function with given fields: ctx, req
func (_m *DataCoord) MarkSegmentsDropped(ctx context.Context, req *datapb.MarkSegmentsDroppedRequest) (*commonpb.Status, error) {
ret := _m.Called(ctx, req)
var r0 *commonpb.Status
if rf, ok := ret.Get(0).(func(context.Context, *datapb.MarkSegmentsDroppedRequest) *commonpb.Status); ok {
r0 = rf(ctx, req)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(*commonpb.Status)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, *datapb.MarkSegmentsDroppedRequest) error); ok {
r1 = rf(ctx, req)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// DataCoord_MarkSegmentsDropped_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'MarkSegmentsDropped'
type DataCoord_MarkSegmentsDropped_Call struct {
*mock.Call
}
// MarkSegmentsDropped is a helper method to define mock.On call
// - ctx context.Context
// - req *datapb.MarkSegmentsDroppedRequest
func (_e *DataCoord_Expecter) MarkSegmentsDropped(ctx interface{}, req interface{}) *DataCoord_MarkSegmentsDropped_Call {
return &DataCoord_MarkSegmentsDropped_Call{Call: _e.mock.On("MarkSegmentsDropped", ctx, req)}
}
func (_c *DataCoord_MarkSegmentsDropped_Call) Run(run func(ctx context.Context, req *datapb.MarkSegmentsDroppedRequest)) *DataCoord_MarkSegmentsDropped_Call {
_c.Call.Run(func(args mock.Arguments) {
run(args[0].(context.Context), args[1].(*datapb.MarkSegmentsDroppedRequest))
})
return _c
}
func (_c *DataCoord_MarkSegmentsDropped_Call) Return(_a0 *commonpb.Status, _a1 error) *DataCoord_MarkSegmentsDropped_Call {
_c.Call.Return(_a0, _a1)
return _c
}
// Register provides a mock function with given fields:
func (_m *DataCoord) Register() error {
ret := _m.Called()
@ -1225,6 +1225,53 @@ func (_c *DataCoord_SaveBinlogPaths_Call) Return(_a0 *commonpb.Status, _a1 error
return _c
}
// SaveImportSegment provides a mock function with given fields: ctx, req
func (_m *DataCoord) SaveImportSegment(ctx context.Context, req *datapb.SaveImportSegmentRequest) (*commonpb.Status, error) {
ret := _m.Called(ctx, req)
var r0 *commonpb.Status
if rf, ok := ret.Get(0).(func(context.Context, *datapb.SaveImportSegmentRequest) *commonpb.Status); ok {
r0 = rf(ctx, req)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(*commonpb.Status)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, *datapb.SaveImportSegmentRequest) error); ok {
r1 = rf(ctx, req)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// DataCoord_SaveImportSegment_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'SaveImportSegment'
type DataCoord_SaveImportSegment_Call struct {
*mock.Call
}
// SaveImportSegment is a helper method to define mock.On call
// - ctx context.Context
// - req *datapb.SaveImportSegmentRequest
func (_e *DataCoord_Expecter) SaveImportSegment(ctx interface{}, req interface{}) *DataCoord_SaveImportSegment_Call {
return &DataCoord_SaveImportSegment_Call{Call: _e.mock.On("SaveImportSegment", ctx, req)}
}
func (_c *DataCoord_SaveImportSegment_Call) Run(run func(ctx context.Context, req *datapb.SaveImportSegmentRequest)) *DataCoord_SaveImportSegment_Call {
_c.Call.Run(func(args mock.Arguments) {
run(args[0].(context.Context), args[1].(*datapb.SaveImportSegmentRequest))
})
return _c
}
func (_c *DataCoord_SaveImportSegment_Call) Return(_a0 *commonpb.Status, _a1 error) *DataCoord_SaveImportSegment_Call {
_c.Call.Return(_a0, _a1)
return _c
}
// SetSegmentState provides a mock function with given fields: ctx, req
func (_m *DataCoord) SetSegmentState(ctx context.Context, req *datapb.SetSegmentStateRequest) (*datapb.SetSegmentStateResponse, error) {
ret := _m.Called(ctx, req)
@ -1391,6 +1438,53 @@ func (_c *DataCoord_Stop_Call) Return(_a0 error) *DataCoord_Stop_Call {
return _c
}
// UnsetIsImportingState provides a mock function with given fields: ctx, req
func (_m *DataCoord) UnsetIsImportingState(ctx context.Context, req *datapb.UnsetIsImportingStateRequest) (*commonpb.Status, error) {
ret := _m.Called(ctx, req)
var r0 *commonpb.Status
if rf, ok := ret.Get(0).(func(context.Context, *datapb.UnsetIsImportingStateRequest) *commonpb.Status); ok {
r0 = rf(ctx, req)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(*commonpb.Status)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, *datapb.UnsetIsImportingStateRequest) error); ok {
r1 = rf(ctx, req)
} else {
r1 = ret.Error(1)
}
return r0, r1
}
// DataCoord_UnsetIsImportingState_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'UnsetIsImportingState'
type DataCoord_UnsetIsImportingState_Call struct {
*mock.Call
}
// UnsetIsImportingState is a helper method to define mock.On call
// - ctx context.Context
// - req *datapb.UnsetIsImportingStateRequest
func (_e *DataCoord_Expecter) UnsetIsImportingState(ctx interface{}, req interface{}) *DataCoord_UnsetIsImportingState_Call {
return &DataCoord_UnsetIsImportingState_Call{Call: _e.mock.On("UnsetIsImportingState", ctx, req)}
}
func (_c *DataCoord_UnsetIsImportingState_Call) Run(run func(ctx context.Context, req *datapb.UnsetIsImportingStateRequest)) *DataCoord_UnsetIsImportingState_Call {
_c.Call.Run(func(args mock.Arguments) {
run(args[0].(context.Context), args[1].(*datapb.UnsetIsImportingStateRequest))
})
return _c
}
func (_c *DataCoord_UnsetIsImportingState_Call) Return(_a0 *commonpb.Status, _a1 error) *DataCoord_UnsetIsImportingState_Call {
_c.Call.Return(_a0, _a1)
return _c
}
// UpdateSegmentStatistics provides a mock function with given fields: ctx, req
func (_m *DataCoord) UpdateSegmentStatistics(ctx context.Context, req *datapb.UpdateSegmentStatisticsRequest) (*commonpb.Status, error) {
ret := _m.Called(ctx, req)

View File

@ -29,21 +29,21 @@ func (_m *DataNode) EXPECT() *DataNode_Expecter {
return &DataNode_Expecter{mock: &_m.Mock}
}
// AddSegment provides a mock function with given fields: ctx, req
func (_m *DataNode) AddSegment(ctx context.Context, req *datapb.AddSegmentRequest) (*commonpb.Status, error) {
// AddImportSegment provides a mock function with given fields: ctx, req
func (_m *DataNode) AddImportSegment(ctx context.Context, req *datapb.AddImportSegmentRequest) (*datapb.AddImportSegmentResponse, error) {
ret := _m.Called(ctx, req)
var r0 *commonpb.Status
if rf, ok := ret.Get(0).(func(context.Context, *datapb.AddSegmentRequest) *commonpb.Status); ok {
var r0 *datapb.AddImportSegmentResponse
if rf, ok := ret.Get(0).(func(context.Context, *datapb.AddImportSegmentRequest) *datapb.AddImportSegmentResponse); ok {
r0 = rf(ctx, req)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(*commonpb.Status)
r0 = ret.Get(0).(*datapb.AddImportSegmentResponse)
}
}
var r1 error
if rf, ok := ret.Get(1).(func(context.Context, *datapb.AddSegmentRequest) error); ok {
if rf, ok := ret.Get(1).(func(context.Context, *datapb.AddImportSegmentRequest) error); ok {
r1 = rf(ctx, req)
} else {
r1 = ret.Error(1)
@ -52,26 +52,26 @@ func (_m *DataNode) AddSegment(ctx context.Context, req *datapb.AddSegmentReques
return r0, r1
}
// DataNode_AddSegment_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'AddSegment'
type DataNode_AddSegment_Call struct {
// DataNode_AddImportSegment_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'AddImportSegment'
type DataNode_AddImportSegment_Call struct {
*mock.Call
}
// AddSegment is a helper method to define mock.On call
// AddImportSegment is a helper method to define mock.On call
// - ctx context.Context
// - req *datapb.AddSegmentRequest
func (_e *DataNode_Expecter) AddSegment(ctx interface{}, req interface{}) *DataNode_AddSegment_Call {
return &DataNode_AddSegment_Call{Call: _e.mock.On("AddSegment", ctx, req)}
// - req *datapb.AddImportSegmentRequest
func (_e *DataNode_Expecter) AddImportSegment(ctx interface{}, req interface{}) *DataNode_AddImportSegment_Call {
return &DataNode_AddImportSegment_Call{Call: _e.mock.On("AddImportSegment", ctx, req)}
}
func (_c *DataNode_AddSegment_Call) Run(run func(ctx context.Context, req *datapb.AddSegmentRequest)) *DataNode_AddSegment_Call {
func (_c *DataNode_AddImportSegment_Call) Run(run func(ctx context.Context, req *datapb.AddImportSegmentRequest)) *DataNode_AddImportSegment_Call {
_c.Call.Run(func(args mock.Arguments) {
run(args[0].(context.Context), args[1].(*datapb.AddSegmentRequest))
run(args[0].(context.Context), args[1].(*datapb.AddImportSegmentRequest))
})
return _c
}
func (_c *DataNode_AddSegment_Call) Return(_a0 *commonpb.Status, _a1 error) *DataNode_AddSegment_Call {
func (_c *DataNode_AddImportSegment_Call) Return(_a0 *datapb.AddImportSegmentResponse, _a1 error) *DataNode_AddImportSegment_Call {
_c.Call.Return(_a0, _a1)
return _c
}

View File

@ -61,6 +61,10 @@ enum ErrorCode {
ForceDeny = 48;
RateLimit = 49;
// Service availability.
// NA: Not Available.
DataCoordNA = 100;
// internal error code.
DDRequestRace = 1000;
}
@ -269,14 +273,12 @@ enum ConsistencyLevel {
}
enum ImportState {
ImportPending = 0;
ImportFailed = 1;
ImportStarted = 2;
ImportDownloaded = 3;
ImportParsed = 4;
ImportPersisted = 5;
ImportCompleted = 6;
ImportAllocSegment = 10;
ImportPending = 0; // the task in in pending list of rootCoord, waiting to be executed
ImportFailed = 1; // the task failed for some reason, get detail reason from GetImportStateResponse.infos
ImportStarted = 2; // the task has been sent to datanode to execute
ImportPersisted = 5; // all data files have been parsed and data already persisted
ImportCompleted = 6; // all indexes are successfully built and segments are able to be compacted as normal.
ImportFailedAndCleaned = 7; // the task failed and all segments it generated are cleaned up.
}
enum ObjectType {

View File

@ -12,6 +12,12 @@ import "schema.proto";
// TODO: import google/protobuf/empty.proto
message Empty {}
enum SegmentType {
New = 0;
Normal = 1;
Flushed = 2;
}
service DataCoord {
rpc GetComponentStates(internal.GetComponentStatesRequest) returns (internal.ComponentStates) {}
rpc GetTimeTickChannel(internal.GetTimeTickChannelRequest) returns(milvus.StringResponse) {}
@ -53,7 +59,9 @@ service DataCoord {
rpc AcquireSegmentLock(AcquireSegmentLockRequest) returns (common.Status) {}
rpc ReleaseSegmentLock(ReleaseSegmentLockRequest) returns (common.Status) {}
rpc AddSegment(AddSegmentRequest) returns(common.Status) {}
rpc SaveImportSegment(SaveImportSegmentRequest) returns(common.Status) {}
rpc UnsetIsImportingState(UnsetIsImportingStateRequest) returns(common.Status) {}
rpc MarkSegmentsDropped(MarkSegmentsDroppedRequest) returns(common.Status) {}
}
service DataNode {
@ -76,7 +84,7 @@ service DataNode {
rpc ResendSegmentStats(ResendSegmentStatsRequest) returns(ResendSegmentStatsResponse) {}
rpc AddSegment(AddSegmentRequest) returns(common.Status) {}
rpc AddImportSegment(AddImportSegmentRequest) returns(AddImportSegmentResponse) {}
}
message FlushRequest {
@ -530,8 +538,8 @@ message ImportTaskInfo {
repeated string files = 9; // A list of files to import.
int64 create_ts = 10; // Timestamp when the import task is created.
ImportTaskState state = 11; // State of the import task.
bool data_queryable = 12; // A flag indicating whether import data are queryable (i.e. loaded in query nodes)
bool data_indexed = 13; // A flag indicating whether import data are indexed.
string collection_name = 12; // Collection name for the import task.
string partition_name = 13; // Partition name for the import task.
}
message ImportTaskResponse {
@ -559,13 +567,40 @@ message ResendSegmentStatsResponse {
repeated int64 seg_resent = 2;
}
message AddSegmentRequest {
message AddImportSegmentRequest {
common.MsgBase base = 1;
int64 segment_id = 2;
string channel_name = 3;
int64 collection_id = 4;
int64 partition_id = 5;
int64 row_num = 6;
repeated FieldBinlog stats_log = 7;
}
message AddImportSegmentResponse {
common.Status status = 1;
bytes channel_pos = 2;
}
message SaveImportSegmentRequest {
common.MsgBase base = 1;
int64 segment_id = 2;
string channel_name = 3;
int64 collection_id = 4;
int64 partition_id = 5;
int64 row_num = 6;
SaveBinlogPathsRequest save_binlog_path_req = 7;
bytes dml_position_id = 8;
}
message UnsetIsImportingStateRequest {
common.MsgBase base = 1;
repeated int64 segment_ids = 2; // IDs of segments whose `isImport` states need to be unset.
}
message MarkSegmentsDroppedRequest {
common.MsgBase base = 1;
repeated int64 segment_ids = 2; // IDs of segments that needs to be marked as `dropped`.
}
message SegmentReferenceLock {

File diff suppressed because it is too large Load Diff

View File

@ -968,11 +968,14 @@ message GetImportStateResponse {
repeated int64 id_list = 4; // auto generated ids if the primary key is autoid
repeated common.KeyValuePair infos = 5; // more information about the task, progress percent, file path, failed reason, etc.
int64 id = 6; // id of an import task
bool data_queryable = 7; // A flag indicating whether import data are queryable (i.e. loaded in query nodes)
bool data_indexed = 8; // A flag indicating whether import data are indexed.
int64 collection_id = 7; // collection ID of the import task.
repeated int64 segment_ids = 8; // a list of segment IDs created by the import task.
int64 create_ts = 9; // timestamp when the import task is created.
}
message ListImportTasksRequest {
string collection_name = 1; // target collection, list all tasks if the name is empty
int64 limit = 2; // maximum number of tasks returned, list all tasks if the value is 0
}
message ListImportTasksResponse {

View File

@ -114,7 +114,15 @@ func (coord *DataCoordMock) Flush(ctx context.Context, req *datapb.FlushRequest)
panic("implement me")
}
func (coord *DataCoordMock) AddSegment(ctx context.Context, req *datapb.AddSegmentRequest) (*commonpb.Status, error) {
func (coord *DataCoordMock) SaveImportSegment(ctx context.Context, req *datapb.SaveImportSegmentRequest) (*commonpb.Status, error) {
panic("implement me")
}
func (coord *DataCoordMock) UnsetIsImportingState(context.Context, *datapb.UnsetIsImportingStateRequest) (*commonpb.Status, error) {
panic("implement me")
}
func (coord *DataCoordMock) MarkSegmentsDropped(ctx context.Context, req *datapb.MarkSegmentsDroppedRequest) (*commonpb.Status, error) {
panic("implement me")
}

View File

@ -23,31 +23,27 @@ import (
"os"
"strconv"
"github.com/milvus-io/milvus/internal/util/errorutil"
"github.com/milvus-io/milvus/internal/util"
"go.uber.org/zap"
"go.uber.org/zap/zapcore"
"github.com/golang/protobuf/proto"
"github.com/milvus-io/milvus/api/commonpb"
"github.com/milvus-io/milvus/api/milvuspb"
"github.com/milvus-io/milvus/internal/common"
"github.com/milvus-io/milvus/internal/log"
"github.com/milvus-io/milvus/internal/metrics"
"github.com/milvus-io/milvus/internal/mq/msgstream"
"github.com/golang/protobuf/proto"
"github.com/milvus-io/milvus/api/commonpb"
"github.com/milvus-io/milvus/api/milvuspb"
"github.com/milvus-io/milvus/internal/proto/datapb"
"github.com/milvus-io/milvus/internal/proto/internalpb"
"github.com/milvus-io/milvus/internal/proto/proxypb"
"github.com/milvus-io/milvus/internal/proto/querypb"
"github.com/milvus-io/milvus/internal/util"
"github.com/milvus-io/milvus/internal/util/crypto"
"github.com/milvus-io/milvus/internal/util/errorutil"
"github.com/milvus-io/milvus/internal/util/logutil"
"github.com/milvus-io/milvus/internal/util/metricsinfo"
"github.com/milvus-io/milvus/internal/util/timerecord"
"github.com/milvus-io/milvus/internal/util/trace"
"github.com/milvus-io/milvus/internal/util/typeutil"
"go.uber.org/zap"
"go.uber.org/zap/zapcore"
)
const moduleName = "Proxy"
@ -3802,30 +3798,6 @@ func (node *Proxy) Import(ctx context.Context, req *milvuspb.ImportRequest) (*mi
resp.Status = unhealthyStatus()
return resp, nil
}
// Get collection ID and then channel names.
collID, err := globalMetaCache.GetCollectionID(ctx, req.GetCollectionName())
if err != nil {
log.Error("collection ID not found",
zap.String("collection name", req.GetCollectionName()),
zap.Error(err))
resp.Status.ErrorCode = commonpb.ErrorCode_UnexpectedError
resp.Status.Reason = err.Error()
return resp, nil
}
chNames, err := node.chMgr.getVChannels(collID)
if err != nil {
log.Error("failed to get virtual channels",
zap.Error(err),
zap.String("collection", req.GetCollectionName()),
zap.Int64("collection_id", collID))
resp.Status.ErrorCode = commonpb.ErrorCode_UnexpectedError
resp.Status.Reason = err.Error()
return resp, nil
}
req.ChannelNames = chNames
if req.GetPartitionName() == "" {
req.PartitionName = Params.CommonCfg.DefaultPartitionName
}
// Call rootCoord to finish import.
respFromRC, err := node.rootCoord.Import(ctx, req)
if err != nil {
@ -3837,7 +3809,7 @@ func (node *Proxy) Import(ctx context.Context, req *milvuspb.ImportRequest) (*mi
return respFromRC, nil
}
// GetImportState checks import task state from datanode
// GetImportState checks import task state from RootCoord.
func (node *Proxy) GetImportState(ctx context.Context, req *milvuspb.GetImportStateRequest) (*milvuspb.GetImportStateResponse, error) {
log.Info("received get import state request", zap.Int64("taskID", req.GetTask()))
resp := &milvuspb.GetImportStateResponse{}

View File

@ -1657,12 +1657,14 @@ func TestProxy(t *testing.T) {
defer wg.Done()
req := &milvuspb.ImportRequest{
CollectionName: collectionName,
Files: []string{"f1", "f2", "f3"},
Files: []string{"f1.json", "f2.json", "f3.csv"},
}
proxy.stateCode.Store(internalpb.StateCode_Healthy)
resp, err := proxy.Import(context.TODO(), req)
assert.EqualValues(t, commonpb.ErrorCode_Success, resp.Status.ErrorCode)
assert.Nil(t, err)
// Wait a bit for complete import to start.
time.Sleep(2 * time.Second)
})
wg.Add(1)
@ -3722,44 +3724,6 @@ func TestProxy_Import(t *testing.T) {
assert.EqualValues(t, unhealthyStatus(), resp.GetStatus())
})
wg.Add(1)
t.Run("collection not found", func(t *testing.T) {
defer wg.Done()
proxy := &Proxy{}
proxy.UpdateStateCode(internalpb.StateCode_Healthy)
cache := newMockCache()
cache.setGetIDFunc(func(ctx context.Context, collectionName string) (typeutil.UniqueID, error) {
return 0, errors.New("mock")
})
globalMetaCache = cache
req := &milvuspb.ImportRequest{
CollectionName: "dummy",
}
resp, err := proxy.Import(context.TODO(), req)
assert.NoError(t, err)
assert.NotEqual(t, commonpb.ErrorCode_Success, resp.GetStatus().GetErrorCode())
})
wg.Add(1)
t.Run("failed to get virtual channels", func(t *testing.T) {
defer wg.Done()
proxy := &Proxy{}
proxy.UpdateStateCode(internalpb.StateCode_Healthy)
cache := newMockCache()
globalMetaCache = cache
chMgr := newMockChannelsMgr()
chMgr.getVChannelsFuncType = func(collectionID UniqueID) ([]vChan, error) {
return nil, errors.New("mock")
}
proxy.chMgr = chMgr
req := &milvuspb.ImportRequest{
CollectionName: "dummy",
}
resp, err := proxy.Import(context.TODO(), req)
assert.NoError(t, err)
assert.NotEqual(t, commonpb.ErrorCode_Success, resp.GetStatus().GetErrorCode())
})
wg.Add(1)
t.Run("rootcoord fail", func(t *testing.T) {
defer wg.Done()

View File

@ -34,9 +34,12 @@ type Broker interface {
ReleaseSegRefLock(ctx context.Context, taskID int64, segIDs []int64) error
Flush(ctx context.Context, cID int64, segIDs []int64) error
Import(ctx context.Context, req *datapb.ImportTaskRequest) (*datapb.ImportTaskResponse, error)
UnsetIsImportingState(context.Context, *datapb.UnsetIsImportingStateRequest) (*commonpb.Status, error)
MarkSegmentsDropped(context.Context, *datapb.MarkSegmentsDroppedRequest) (*commonpb.Status, error)
DropCollectionIndex(ctx context.Context, collID UniqueID, partIDs []UniqueID) error
GetSegmentIndexState(ctx context.Context, collID UniqueID, indexName string, segIDs []UniqueID) ([]*indexpb.SegmentIndexState, error)
DescribeIndex(ctx context.Context, colID UniqueID) (*indexpb.DescribeIndexResponse, error)
}
type ServerBroker struct {
@ -170,7 +173,7 @@ func (b *ServerBroker) Flush(ctx context.Context, cID int64, segIDs []int64) err
if err != nil {
return errors.New("failed to call flush to data coordinator: " + err.Error())
}
if resp.Status.ErrorCode != commonpb.ErrorCode_Success {
if resp.GetStatus().GetErrorCode() != commonpb.ErrorCode_Success {
return errors.New(resp.Status.Reason)
}
log.Info("flush on collection succeed", zap.Int64("collection ID", cID))
@ -181,6 +184,14 @@ func (b *ServerBroker) Import(ctx context.Context, req *datapb.ImportTaskRequest
return b.s.dataCoord.Import(ctx, req)
}
func (b *ServerBroker) UnsetIsImportingState(ctx context.Context, req *datapb.UnsetIsImportingStateRequest) (*commonpb.Status, error) {
return b.s.dataCoord.UnsetIsImportingState(ctx, req)
}
func (b *ServerBroker) MarkSegmentsDropped(ctx context.Context, req *datapb.MarkSegmentsDroppedRequest) (*commonpb.Status, error) {
return b.s.dataCoord.MarkSegmentsDropped(ctx, req)
}
func (b *ServerBroker) DropCollectionIndex(ctx context.Context, collID UniqueID, partIDs []UniqueID) error {
rsp, err := b.s.indexCoord.DropIndex(ctx, &indexpb.DropIndexRequest{
CollectionID: collID,
@ -211,3 +222,9 @@ func (b *ServerBroker) GetSegmentIndexState(ctx context.Context, collID UniqueID
return resp.GetStates(), nil
}
func (b *ServerBroker) DescribeIndex(ctx context.Context, colID UniqueID) (*indexpb.DescribeIndexResponse, error) {
return b.s.indexCoord.DescribeIndex(ctx, &indexpb.DescribeIndexRequest{
CollectionID: colID,
})
}

View File

@ -3,19 +3,29 @@ package rootcoord
import (
"context"
"github.com/milvus-io/milvus/api/commonpb"
"github.com/milvus-io/milvus/internal/log"
"github.com/milvus-io/milvus/internal/proto/datapb"
"github.com/milvus-io/milvus/internal/proto/indexpb"
"go.uber.org/zap"
)
type GetCollectionNameFunc func(collID, partitionID UniqueID) (string, string, error)
type IDAllocator func(count uint32) (UniqueID, UniqueID, error)
type ImportFunc func(ctx context.Context, req *datapb.ImportTaskRequest) *datapb.ImportTaskResponse
type ImportFunc func(ctx context.Context, req *datapb.ImportTaskRequest) (*datapb.ImportTaskResponse, error)
type MarkSegmentsDroppedFunc func(ctx context.Context, segIDs []int64) (*commonpb.Status, error)
type DescribeIndexFunc func(ctx context.Context, colID UniqueID) (*indexpb.DescribeIndexResponse, error)
type GetSegmentIndexStateFunc func(ctx context.Context, collID UniqueID, indexName string, segIDs []UniqueID) ([]*indexpb.SegmentIndexState, error)
type UnsetIsImportingStateFunc func(context.Context, *datapb.UnsetIsImportingStateRequest) (*commonpb.Status, error)
type ImportFactory interface {
NewGetCollectionNameFunc() GetCollectionNameFunc
NewIDAllocator() IDAllocator
NewImportFunc() ImportFunc
NewMarkSegmentsDroppedFunc() MarkSegmentsDroppedFunc
NewDescribeIndexFunc() DescribeIndexFunc
NewGetSegmentIndexStateFunc() GetSegmentIndexStateFunc
NewUnsetIsImportingStateFunc() UnsetIsImportingStateFunc
}
type ImportFactoryImpl struct {
@ -34,6 +44,22 @@ func (f ImportFactoryImpl) NewImportFunc() ImportFunc {
return ImportFuncWithCore(f.c)
}
func (f ImportFactoryImpl) NewMarkSegmentsDroppedFunc() MarkSegmentsDroppedFunc {
return MarkSegmentsDroppedWithCore(f.c)
}
func (f ImportFactoryImpl) NewDescribeIndexFunc() DescribeIndexFunc {
return DescribeIndexWithCore(f.c)
}
func (f ImportFactoryImpl) NewGetSegmentIndexStateFunc() GetSegmentIndexStateFunc {
return GetSegmentIndexStateWithCore(f.c)
}
func (f ImportFactoryImpl) NewUnsetIsImportingStateFunc() UnsetIsImportingStateFunc {
return UnsetIsImportingStateWithCore(f.c)
}
func NewImportFactory(c *Core) ImportFactory {
return &ImportFactoryImpl{c: c}
}
@ -63,9 +89,33 @@ func IDAllocatorWithCore(c *Core) IDAllocator {
}
func ImportFuncWithCore(c *Core) ImportFunc {
return func(ctx context.Context, req *datapb.ImportTaskRequest) *datapb.ImportTaskResponse {
// TODO: better to handle error here.
resp, _ := c.broker.Import(ctx, req)
return resp
return func(ctx context.Context, req *datapb.ImportTaskRequest) (*datapb.ImportTaskResponse, error) {
return c.broker.Import(ctx, req)
}
}
func MarkSegmentsDroppedWithCore(c *Core) MarkSegmentsDroppedFunc {
return func(ctx context.Context, segIDs []int64) (*commonpb.Status, error) {
return c.broker.MarkSegmentsDropped(ctx, &datapb.MarkSegmentsDroppedRequest{
SegmentIds: segIDs,
})
}
}
func DescribeIndexWithCore(c *Core) DescribeIndexFunc {
return func(ctx context.Context, colID UniqueID) (*indexpb.DescribeIndexResponse, error) {
return c.broker.DescribeIndex(ctx, colID)
}
}
func GetSegmentIndexStateWithCore(c *Core) GetSegmentIndexStateFunc {
return func(ctx context.Context, collID UniqueID, indexName string, segIDs []UniqueID) ([]*indexpb.SegmentIndexState, error) {
return c.broker.GetSegmentIndexState(ctx, collID, indexName, segIDs)
}
}
func UnsetIsImportingStateWithCore(c *Core) UnsetIsImportingStateFunc {
return func(ctx context.Context, req *datapb.UnsetIsImportingStateRequest) (*commonpb.Status, error) {
return c.broker.UnsetIsImportingState(ctx, req)
}
}

File diff suppressed because it is too large Load Diff

View File

@ -29,6 +29,7 @@ import (
"github.com/milvus-io/milvus/api/milvuspb"
"github.com/milvus-io/milvus/internal/kv"
"github.com/milvus-io/milvus/internal/proto/datapb"
"github.com/milvus-io/milvus/internal/proto/indexpb"
"github.com/milvus-io/milvus/internal/proto/rootcoordpb"
"github.com/milvus-io/milvus/internal/util/typeutil"
"github.com/stretchr/testify/assert"
@ -49,54 +50,70 @@ func TestImportManager_NewImportManager(t *testing.T) {
return globalCount, 0, nil
}
Params.RootCoordCfg.ImportTaskSubPath = "test_import_task"
Params.RootCoordCfg.ImportTaskExpiration = 100
Params.RootCoordCfg.ImportTaskExpiration = 50
Params.RootCoordCfg.ImportTaskRetention = 200
checkPendingTasksInterval = 100
expireOldTasksInterval = 100
cleanUpLoopInterval = 100
mockKv := &kv.MockMetaKV{}
mockKv.InMemKv = make(map[string]string)
mockKv.InMemKv = sync.Map{}
ti1 := &datapb.ImportTaskInfo{
Id: 100,
State: &datapb.ImportTaskState{
StateCode: commonpb.ImportState_ImportPending,
},
CreateTs: time.Now().Unix() - 100,
}
ti2 := &datapb.ImportTaskInfo{
Id: 200,
State: &datapb.ImportTaskState{
StateCode: commonpb.ImportState_ImportPersisted,
},
CreateTs: time.Now().Unix() - 100,
}
taskInfo1, err := proto.Marshal(ti1)
assert.NoError(t, err)
taskInfo2, err := proto.Marshal(ti2)
assert.NoError(t, err)
mockKv.SaveWithLease(BuildImportTaskKey(1), "value", 1)
mockKv.SaveWithLease(BuildImportTaskKey(2), string(taskInfo1), 2)
mockKv.SaveWithLease(BuildImportTaskKey(3), string(taskInfo2), 3)
fn := func(ctx context.Context, req *datapb.ImportTaskRequest) *datapb.ImportTaskResponse {
mockKv.Save(BuildImportTaskKey(1), "value")
mockKv.Save(BuildImportTaskKey(100), string(taskInfo1))
mockKv.Save(BuildImportTaskKey(200), string(taskInfo2))
mockCallImportServiceErr := false
callImportServiceFn := func(ctx context.Context, req *datapb.ImportTaskRequest) (*datapb.ImportTaskResponse, error) {
if mockCallImportServiceErr {
return &datapb.ImportTaskResponse{
Status: &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
},
}, errors.New("mock err")
}
return &datapb.ImportTaskResponse{
Status: &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
},
}
}, nil
}
callMarkSegmentsDropped := func(ctx context.Context, segIDs []typeutil.UniqueID) (*commonpb.Status, error) {
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
}, nil
}
time.Sleep(1 * time.Second)
var wg sync.WaitGroup
wg.Add(1)
t.Run("working task expired", func(t *testing.T) {
defer wg.Done()
ctx, cancel := context.WithTimeout(context.Background(), 3*time.Second)
defer cancel()
mgr := newImportManager(ctx, mockKv, idAlloc, fn, nil)
mgr := newImportManager(ctx, mockKv, idAlloc, callImportServiceFn, callMarkSegmentsDropped, nil, nil, nil, nil)
assert.NotNil(t, mgr)
mgr.init(ctx)
_, err := mgr.loadFromTaskStore(true)
assert.NoError(t, err)
var wgLoop sync.WaitGroup
wgLoop.Add(2)
mgr.expireOldTasksLoop(&wgLoop, func(ctx context.Context, int64 int64, int64s []int64) error {
return nil
})
mgr.sendOutTasks(ctx)
assert.Equal(t, 1, len(mgr.workingTasks))
mgr.cleanupLoop(&wgLoop)
assert.Equal(t, 0, len(mgr.workingTasks))
mgr.sendOutTasksLoop(&wgLoop)
wgLoop.Wait()
})
@ -106,38 +123,99 @@ func TestImportManager_NewImportManager(t *testing.T) {
defer wg.Done()
ctx, cancel := context.WithTimeout(context.Background(), 1*time.Nanosecond)
defer cancel()
mgr := newImportManager(ctx, mockKv, idAlloc, fn, nil)
mgr := newImportManager(ctx, mockKv, idAlloc, callImportServiceFn, callMarkSegmentsDropped, nil, nil, nil, nil)
assert.NotNil(t, mgr)
mgr.init(context.TODO())
var wgLoop sync.WaitGroup
wgLoop.Add(2)
mgr.expireOldTasksLoop(&wgLoop, func(ctx context.Context, int64 int64, int64s []int64) error {
return nil
})
mgr.cleanupLoop(&wgLoop)
mgr.sendOutTasksLoop(&wgLoop)
wgLoop.Wait()
})
wg.Add(1)
t.Run("importManager init fail because of loadFromTaskStore fail", func(t *testing.T) {
defer wg.Done()
ctx, cancel := context.WithTimeout(context.Background(), 1*time.Nanosecond)
defer cancel()
mgr := newImportManager(ctx, mockKv, idAlloc, callImportServiceFn, callMarkSegmentsDropped, nil, nil, nil, nil)
mockKv.LoadWithPrefixMockErr = true
defer func() {
mockKv.LoadWithPrefixMockErr = false
}()
assert.NotNil(t, mgr)
assert.Panics(t, func() {
mgr.init(context.TODO())
})
})
wg.Add(1)
t.Run("sendOutTasks fail", func(t *testing.T) {
defer wg.Done()
ctx, cancel := context.WithTimeout(context.Background(), 1*time.Nanosecond)
defer cancel()
mgr := newImportManager(ctx, mockKv, idAlloc, callImportServiceFn, callMarkSegmentsDropped, nil, nil, nil, nil)
mockKv.SaveMockErr = true
defer func() {
mockKv.SaveMockErr = false
}()
assert.NotNil(t, mgr)
mgr.init(context.TODO())
})
wg.Add(1)
t.Run("sendOutTasks fail", func(t *testing.T) {
defer wg.Done()
ctx, cancel := context.WithTimeout(context.Background(), 1*time.Nanosecond)
defer cancel()
mgr := newImportManager(ctx, mockKv, idAlloc, callImportServiceFn, callMarkSegmentsDropped, nil, nil, nil, nil)
assert.NotNil(t, mgr)
mgr.init(context.TODO())
func() {
mockKv.SaveMockErr = true
defer func() {
mockKv.SaveMockErr = false
}()
mgr.sendOutTasks(context.TODO())
}()
func() {
mockCallImportServiceErr = true
defer func() {
mockKv.SaveMockErr = false
}()
mgr.sendOutTasks(context.TODO())
}()
})
wg.Add(1)
t.Run("pending task expired", func(t *testing.T) {
defer wg.Done()
ctx, cancel := context.WithTimeout(context.Background(), 3*time.Second)
defer cancel()
mgr := newImportManager(ctx, mockKv, idAlloc, fn, nil)
mgr := newImportManager(ctx, mockKv, idAlloc, callImportServiceFn, callMarkSegmentsDropped, nil, nil, nil, nil)
assert.NotNil(t, mgr)
mgr.pendingTasks = append(mgr.pendingTasks, &datapb.ImportTaskInfo{
Id: 300,
State: &datapb.ImportTaskState{
StateCode: commonpb.ImportState_ImportPending,
},
CreateTs: time.Now().Unix() - 10,
CreateTs: time.Now().Unix() + 1,
})
mgr.loadFromTaskStore()
mgr.pendingTasks = append(mgr.pendingTasks, &datapb.ImportTaskInfo{
Id: 400,
State: &datapb.ImportTaskState{
StateCode: commonpb.ImportState_ImportPending,
},
CreateTs: time.Now().Unix() - 100,
})
_, err := mgr.loadFromTaskStore(true)
assert.NoError(t, err)
var wgLoop sync.WaitGroup
wgLoop.Add(2)
mgr.expireOldTasksLoop(&wgLoop, func(ctx context.Context, int64 int64, int64s []int64) error {
return nil
})
assert.Equal(t, 2, len(mgr.pendingTasks))
mgr.cleanupLoop(&wgLoop)
assert.Equal(t, 1, len(mgr.pendingTasks))
mgr.sendOutTasksLoop(&wgLoop)
wgLoop.Wait()
})
@ -147,22 +225,263 @@ func TestImportManager_NewImportManager(t *testing.T) {
defer wg.Done()
ctx, cancel := context.WithTimeout(context.Background(), 3*time.Second)
defer cancel()
mgr := newImportManager(ctx, mockKv, idAlloc, fn, nil)
mgr := newImportManager(ctx, mockKv, idAlloc, callImportServiceFn, callMarkSegmentsDropped, nil, nil, nil, nil)
assert.NotNil(t, mgr)
mgr.init(ctx)
var wgLoop sync.WaitGroup
wgLoop.Add(2)
mgr.expireOldTasksLoop(&wgLoop, func(ctx context.Context, int64 int64, int64s []int64) error {
return nil
})
mgr.cleanupLoop(&wgLoop)
mgr.sendOutTasksLoop(&wgLoop)
time.Sleep(500 * time.Millisecond)
time.Sleep(100 * time.Millisecond)
wgLoop.Wait()
})
wg.Wait()
}
func TestImportManager_TestEtcdCleanUp(t *testing.T) {
var countLock sync.RWMutex
var globalCount = typeutil.UniqueID(0)
var idAlloc = func(count uint32) (typeutil.UniqueID, typeutil.UniqueID, error) {
countLock.Lock()
defer countLock.Unlock()
globalCount++
return globalCount, 0, nil
}
Params.RootCoordCfg.ImportTaskSubPath = "test_import_task"
Params.RootCoordCfg.ImportTaskExpiration = 50
Params.RootCoordCfg.ImportTaskRetention = 200
checkPendingTasksInterval = 100
cleanUpLoopInterval = 100
mockKv := &kv.MockMetaKV{}
mockKv.InMemKv = sync.Map{}
ti1 := &datapb.ImportTaskInfo{
Id: 100,
State: &datapb.ImportTaskState{
StateCode: commonpb.ImportState_ImportPending,
},
CreateTs: time.Now().Unix() - 500,
}
ti2 := &datapb.ImportTaskInfo{
Id: 200,
State: &datapb.ImportTaskState{
StateCode: commonpb.ImportState_ImportPersisted,
},
CreateTs: time.Now().Unix() - 500,
}
ti3 := &datapb.ImportTaskInfo{
Id: 300,
State: &datapb.ImportTaskState{
StateCode: commonpb.ImportState_ImportPersisted,
},
CreateTs: time.Now().Unix() - 100,
}
taskInfo3, err := proto.Marshal(ti3)
assert.NoError(t, err)
taskInfo1, err := proto.Marshal(ti1)
assert.NoError(t, err)
taskInfo2, err := proto.Marshal(ti2)
assert.NoError(t, err)
mockKv.Save(BuildImportTaskKey(100), string(taskInfo1))
mockKv.Save(BuildImportTaskKey(200), string(taskInfo2))
mockKv.Save(BuildImportTaskKey(300), string(taskInfo3))
mockCallImportServiceErr := false
callImportServiceFn := func(ctx context.Context, req *datapb.ImportTaskRequest) (*datapb.ImportTaskResponse, error) {
if mockCallImportServiceErr {
return &datapb.ImportTaskResponse{
Status: &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
},
}, errors.New("mock err")
}
return &datapb.ImportTaskResponse{
Status: &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
},
}, nil
}
callMarkSegmentsDropped := func(ctx context.Context, segIDs []typeutil.UniqueID) (*commonpb.Status, error) {
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
}, nil
}
ctx, cancel := context.WithTimeout(context.Background(), 3*time.Second)
defer cancel()
mgr := newImportManager(ctx, mockKv, idAlloc, callImportServiceFn, callMarkSegmentsDropped, nil, nil, nil, nil)
assert.NotNil(t, mgr)
_, err = mgr.loadFromTaskStore(true)
assert.NoError(t, err)
var wgLoop sync.WaitGroup
wgLoop.Add(2)
keys, _, _ := mockKv.LoadWithPrefix("")
// All 3 tasks are stored in Etcd.
assert.Equal(t, 3, len(keys))
mgr.cleanupLoop(&wgLoop)
keys, _, _ = mockKv.LoadWithPrefix("")
// task 1 and task 2 have passed retention period.
assert.Equal(t, 1, len(keys))
mgr.sendOutTasksLoop(&wgLoop)
}
func TestImportManager_TestFlipTaskStateLoop(t *testing.T) {
var countLock sync.RWMutex
var globalCount = typeutil.UniqueID(0)
var idAlloc = func(count uint32) (typeutil.UniqueID, typeutil.UniqueID, error) {
countLock.Lock()
defer countLock.Unlock()
globalCount++
return globalCount, 0, nil
}
Params.RootCoordCfg.ImportTaskSubPath = "test_import_task"
Params.RootCoordCfg.ImportTaskExpiration = 50
Params.RootCoordCfg.ImportTaskRetention = 200
checkPendingTasksInterval = 100
cleanUpLoopInterval = 100
mockKv := &kv.MockMetaKV{}
mockKv.InMemKv = sync.Map{}
ti1 := &datapb.ImportTaskInfo{
Id: 100,
State: &datapb.ImportTaskState{
StateCode: commonpb.ImportState_ImportPending,
},
CreateTs: time.Now().Unix() - 100,
}
ti2 := &datapb.ImportTaskInfo{
Id: 200,
State: &datapb.ImportTaskState{
StateCode: commonpb.ImportState_ImportPersisted,
Segments: []int64{201, 202, 203},
},
CreateTs: time.Now().Unix() - 100,
}
taskInfo1, err := proto.Marshal(ti1)
assert.NoError(t, err)
taskInfo2, err := proto.Marshal(ti2)
assert.NoError(t, err)
mockKv.Save(BuildImportTaskKey(1), "value")
mockKv.Save(BuildImportTaskKey(100), string(taskInfo1))
mockKv.Save(BuildImportTaskKey(200), string(taskInfo2))
mockCallImportServiceErr := false
callImportServiceFn := func(ctx context.Context, req *datapb.ImportTaskRequest) (*datapb.ImportTaskResponse, error) {
if mockCallImportServiceErr {
return &datapb.ImportTaskResponse{
Status: &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
},
}, errors.New("mock err")
}
return &datapb.ImportTaskResponse{
Status: &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
},
}, nil
}
callMarkSegmentsDropped := func(ctx context.Context, segIDs []typeutil.UniqueID) (*commonpb.Status, error) {
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
}, nil
}
callDescribeIndex := func(ctx context.Context, colID UniqueID) (*indexpb.DescribeIndexResponse, error) {
return &indexpb.DescribeIndexResponse{
Status: &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
},
IndexInfos: []*indexpb.IndexInfo{
{},
},
}, nil
}
callGetSegmentIndexState := func(ctx context.Context, collID UniqueID, indexName string,
segIDs []UniqueID) ([]*indexpb.SegmentIndexState, error) {
return []*indexpb.SegmentIndexState{
{
SegmentID: 200,
State: commonpb.IndexState_Finished,
},
{
SegmentID: 201,
State: commonpb.IndexState_Finished,
},
{
SegmentID: 202,
State: commonpb.IndexState_Finished,
},
}, nil
}
callUnsetIsImportingState := func(context.Context, *datapb.UnsetIsImportingStateRequest) (*commonpb.Status, error) {
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
}, nil
}
flipTaskStateInterval = 50
var wg sync.WaitGroup
wg.Add(1)
t.Run("normal case", func(t *testing.T) {
defer wg.Done()
ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second)
defer cancel()
mgr := newImportManager(ctx, mockKv, idAlloc, callImportServiceFn, callMarkSegmentsDropped,
nil, callDescribeIndex, callGetSegmentIndexState, callUnsetIsImportingState)
assert.NotNil(t, mgr)
var wgLoop sync.WaitGroup
wgLoop.Add(1)
mgr.flipTaskStateLoop(&wgLoop)
wgLoop.Wait()
time.Sleep(100 * time.Millisecond)
})
wg.Add(1)
t.Run("describe index fail", func(t *testing.T) {
defer wg.Done()
ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second)
defer cancel()
callDescribeIndex = func(ctx context.Context, colID UniqueID) (*indexpb.DescribeIndexResponse, error) {
return &indexpb.DescribeIndexResponse{
Status: &commonpb.Status{
ErrorCode: commonpb.ErrorCode_UnexpectedError,
},
}, nil
}
mgr := newImportManager(ctx, mockKv, idAlloc, callImportServiceFn, callMarkSegmentsDropped,
nil, callDescribeIndex, callGetSegmentIndexState, callUnsetIsImportingState)
assert.NotNil(t, mgr)
var wgLoop sync.WaitGroup
wgLoop.Add(1)
mgr.flipTaskStateLoop(&wgLoop)
wgLoop.Wait()
time.Sleep(100 * time.Millisecond)
})
wg.Add(1)
t.Run("describe index with index not exist", func(t *testing.T) {
defer wg.Done()
ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second)
defer cancel()
callDescribeIndex = func(ctx context.Context, colID UniqueID) (*indexpb.DescribeIndexResponse, error) {
return &indexpb.DescribeIndexResponse{
Status: &commonpb.Status{
ErrorCode: commonpb.ErrorCode_IndexNotExist,
},
}, nil
}
mgr := newImportManager(ctx, mockKv, idAlloc, callImportServiceFn, callMarkSegmentsDropped,
nil, callDescribeIndex, callGetSegmentIndexState, callUnsetIsImportingState)
assert.NotNil(t, mgr)
var wgLoop sync.WaitGroup
wgLoop.Add(1)
mgr.flipTaskStateLoop(&wgLoop)
wgLoop.Wait()
time.Sleep(100 * time.Millisecond)
})
wg.Wait()
}
func TestImportManager_ImportJob(t *testing.T) {
var countLock sync.RWMutex
var globalCount = typeutil.UniqueID(0)
@ -176,8 +495,13 @@ func TestImportManager_ImportJob(t *testing.T) {
Params.RootCoordCfg.ImportTaskSubPath = "test_import_task"
colID := int64(100)
mockKv := &kv.MockMetaKV{}
mockKv.InMemKv = make(map[string]string)
mgr := newImportManager(context.TODO(), mockKv, idAlloc, nil, nil)
mockKv.InMemKv = sync.Map{}
callMarkSegmentsDropped := func(ctx context.Context, segIDs []typeutil.UniqueID) (*commonpb.Status, error) {
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
}, nil
}
mgr := newImportManager(context.TODO(), mockKv, idAlloc, nil, callMarkSegmentsDropped, nil, nil, nil, nil)
resp := mgr.importJob(context.TODO(), nil, colID, 0)
assert.NotEqual(t, commonpb.ErrorCode_Success, resp.Status.ErrorCode)
@ -204,60 +528,60 @@ func TestImportManager_ImportJob(t *testing.T) {
},
}
fn := func(ctx context.Context, req *datapb.ImportTaskRequest) *datapb.ImportTaskResponse {
fn := func(ctx context.Context, req *datapb.ImportTaskRequest) (*datapb.ImportTaskResponse, error) {
return &datapb.ImportTaskResponse{
Status: &commonpb.Status{
ErrorCode: commonpb.ErrorCode_UnexpectedError,
},
}
}, nil
}
mgr = newImportManager(context.TODO(), mockKv, idAlloc, fn, nil)
mgr = newImportManager(context.TODO(), mockKv, idAlloc, fn, callMarkSegmentsDropped, nil, nil, nil, nil)
resp = mgr.importJob(context.TODO(), rowReq, colID, 0)
assert.Equal(t, len(rowReq.Files), len(mgr.pendingTasks))
assert.Equal(t, 0, len(mgr.workingTasks))
mgr = newImportManager(context.TODO(), mockKv, idAlloc, fn, nil)
mgr = newImportManager(context.TODO(), mockKv, idAlloc, fn, callMarkSegmentsDropped, nil, nil, nil, nil)
resp = mgr.importJob(context.TODO(), colReq, colID, 0)
assert.Equal(t, 1, len(mgr.pendingTasks))
assert.Equal(t, 0, len(mgr.workingTasks))
fn = func(ctx context.Context, req *datapb.ImportTaskRequest) *datapb.ImportTaskResponse {
fn = func(ctx context.Context, req *datapb.ImportTaskRequest) (*datapb.ImportTaskResponse, error) {
return &datapb.ImportTaskResponse{
Status: &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
},
}
}, nil
}
mgr = newImportManager(context.TODO(), mockKv, idAlloc, fn, nil)
mgr = newImportManager(context.TODO(), mockKv, idAlloc, fn, callMarkSegmentsDropped, nil, nil, nil, nil)
resp = mgr.importJob(context.TODO(), rowReq, colID, 0)
assert.Equal(t, 0, len(mgr.pendingTasks))
assert.Equal(t, len(rowReq.Files), len(mgr.workingTasks))
mgr = newImportManager(context.TODO(), mockKv, idAlloc, fn, nil)
mgr = newImportManager(context.TODO(), mockKv, idAlloc, fn, callMarkSegmentsDropped, nil, nil, nil, nil)
resp = mgr.importJob(context.TODO(), colReq, colID, 0)
assert.Equal(t, 0, len(mgr.pendingTasks))
assert.Equal(t, 1, len(mgr.workingTasks))
count := 0
fn = func(ctx context.Context, req *datapb.ImportTaskRequest) *datapb.ImportTaskResponse {
fn = func(ctx context.Context, req *datapb.ImportTaskRequest) (*datapb.ImportTaskResponse, error) {
if count >= 2 {
return &datapb.ImportTaskResponse{
Status: &commonpb.Status{
ErrorCode: commonpb.ErrorCode_UnexpectedError,
},
}
}, nil
}
count++
return &datapb.ImportTaskResponse{
Status: &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
},
}
}, nil
}
mgr = newImportManager(context.TODO(), mockKv, idAlloc, fn, nil)
mgr = newImportManager(context.TODO(), mockKv, idAlloc, fn, callMarkSegmentsDropped, nil, nil, nil, nil)
resp = mgr.importJob(context.TODO(), rowReq, colID, 0)
assert.Equal(t, len(rowReq.Files)-2, len(mgr.pendingTasks))
assert.Equal(t, 2, len(mgr.workingTasks))
@ -282,7 +606,7 @@ func TestImportManager_AllDataNodesBusy(t *testing.T) {
Params.RootCoordCfg.ImportTaskSubPath = "test_import_task"
colID := int64(100)
mockKv := &kv.MockMetaKV{}
mockKv.InMemKv = make(map[string]string)
mockKv.InMemKv = sync.Map{}
rowReq := &milvuspb.ImportRequest{
CollectionName: "c1",
PartitionName: "p1",
@ -304,7 +628,7 @@ func TestImportManager_AllDataNodesBusy(t *testing.T) {
dnList := []int64{1, 2, 3}
count := 0
fn := func(ctx context.Context, req *datapb.ImportTaskRequest) *datapb.ImportTaskResponse {
fn := func(ctx context.Context, req *datapb.ImportTaskRequest) (*datapb.ImportTaskResponse, error) {
if count < len(dnList) {
count++
return &datapb.ImportTaskResponse{
@ -312,28 +636,33 @@ func TestImportManager_AllDataNodesBusy(t *testing.T) {
ErrorCode: commonpb.ErrorCode_Success,
},
DatanodeId: dnList[count-1],
}
}, nil
}
return &datapb.ImportTaskResponse{
Status: &commonpb.Status{
ErrorCode: commonpb.ErrorCode_UnexpectedError,
},
}
}, nil
}
mgr := newImportManager(context.TODO(), mockKv, idAlloc, fn, nil)
callMarkSegmentsDropped := func(ctx context.Context, segIDs []typeutil.UniqueID) (*commonpb.Status, error) {
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
}, nil
}
mgr := newImportManager(context.TODO(), mockKv, idAlloc, fn, callMarkSegmentsDropped, nil, nil, nil, nil)
mgr.importJob(context.TODO(), rowReq, colID, 0)
assert.Equal(t, 0, len(mgr.pendingTasks))
assert.Equal(t, len(rowReq.Files), len(mgr.workingTasks))
mgr = newImportManager(context.TODO(), mockKv, idAlloc, fn, nil)
mgr = newImportManager(context.TODO(), mockKv, idAlloc, fn, callMarkSegmentsDropped, nil, nil, nil, nil)
mgr.importJob(context.TODO(), rowReq, colID, 0)
assert.Equal(t, len(rowReq.Files), len(mgr.pendingTasks))
assert.Equal(t, 0, len(mgr.workingTasks))
// Reset count.
count = 0
mgr = newImportManager(context.TODO(), mockKv, idAlloc, fn, nil)
mgr = newImportManager(context.TODO(), mockKv, idAlloc, fn, callMarkSegmentsDropped, nil, nil, nil, nil)
mgr.importJob(context.TODO(), colReq, colID, 0)
assert.Equal(t, 0, len(mgr.pendingTasks))
assert.Equal(t, 1, len(mgr.workingTasks))
@ -364,13 +693,13 @@ func TestImportManager_TaskState(t *testing.T) {
Params.RootCoordCfg.ImportTaskSubPath = "test_import_task"
colID := int64(100)
mockKv := &kv.MockMetaKV{}
mockKv.InMemKv = make(map[string]string)
fn := func(ctx context.Context, req *datapb.ImportTaskRequest) *datapb.ImportTaskResponse {
mockKv.InMemKv = sync.Map{}
fn := func(ctx context.Context, req *datapb.ImportTaskRequest) (*datapb.ImportTaskResponse, error) {
return &datapb.ImportTaskResponse{
Status: &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
},
}
}, nil
}
rowReq := &milvuspb.ImportRequest{
@ -380,19 +709,25 @@ func TestImportManager_TaskState(t *testing.T) {
Files: []string{"f1", "f2", "f3"},
}
mgr := newImportManager(context.TODO(), mockKv, idAlloc, fn, nil)
callMarkSegmentsDropped := func(ctx context.Context, segIDs []typeutil.UniqueID) (*commonpb.Status, error) {
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
}, nil
}
mgr := newImportManager(context.TODO(), mockKv, idAlloc, fn, callMarkSegmentsDropped, nil, nil, nil, nil)
mgr.importJob(context.TODO(), rowReq, colID, 0)
state := &rootcoordpb.ImportResult{
info := &rootcoordpb.ImportResult{
TaskId: 10000,
}
_, err := mgr.updateTaskState(state)
_, err := mgr.updateTaskInfo(info)
assert.NotNil(t, err)
state = &rootcoordpb.ImportResult{
info = &rootcoordpb.ImportResult{
TaskId: 2,
RowCount: 1000,
State: commonpb.ImportState_ImportCompleted,
State: commonpb.ImportState_ImportPersisted,
Infos: []*commonpb.KeyValuePair{
{
Key: "key1",
@ -404,15 +739,14 @@ func TestImportManager_TaskState(t *testing.T) {
},
},
}
ti, err := mgr.updateTaskState(state)
ti, err := mgr.updateTaskInfo(info)
assert.NoError(t, err)
assert.Equal(t, int64(2), ti.GetId())
assert.Equal(t, int64(100), ti.GetCollectionId())
assert.Equal(t, int64(100), ti.GetCollectionId())
assert.Equal(t, int64(0), ti.GetPartitionId())
assert.Equal(t, true, ti.GetRowBased())
assert.Equal(t, []string{"f2"}, ti.GetFiles())
assert.Equal(t, commonpb.ImportState_ImportCompleted, ti.GetState().GetStateCode())
assert.Equal(t, commonpb.ImportState_ImportPersisted, ti.GetState().GetStateCode())
assert.Equal(t, int64(1000), ti.GetState().GetRowCount())
resp := mgr.getTaskState(10000)
@ -420,11 +754,34 @@ func TestImportManager_TaskState(t *testing.T) {
resp = mgr.getTaskState(2)
assert.Equal(t, commonpb.ErrorCode_Success, resp.Status.ErrorCode)
assert.Equal(t, commonpb.ImportState_ImportCompleted, resp.State)
assert.Equal(t, commonpb.ImportState_ImportPersisted, resp.State)
resp = mgr.getTaskState(1)
assert.Equal(t, commonpb.ErrorCode_Success, resp.Status.ErrorCode)
assert.Equal(t, commonpb.ImportState_ImportPending, resp.State)
assert.Equal(t, commonpb.ImportState_ImportStarted, resp.State)
info = &rootcoordpb.ImportResult{
TaskId: 1,
RowCount: 1000,
State: commonpb.ImportState_ImportFailed,
Infos: []*commonpb.KeyValuePair{
{
Key: "key1",
Value: "value1",
},
{
Key: "failed_reason",
Value: "some_reason",
},
},
}
newTaskInfo, err := mgr.updateTaskInfo(info)
assert.NoError(t, err)
assert.Equal(t, commonpb.ImportState_ImportFailed, newTaskInfo.GetState().GetStateCode())
newTaskInfo, err = mgr.updateTaskInfo(info)
assert.Error(t, err)
assert.Nil(t, newTaskInfo)
}
func TestImportManager_AllocFail(t *testing.T) {
@ -434,13 +791,13 @@ func TestImportManager_AllocFail(t *testing.T) {
Params.RootCoordCfg.ImportTaskSubPath = "test_import_task"
colID := int64(100)
mockKv := &kv.MockMetaKV{}
mockKv.InMemKv = make(map[string]string)
fn := func(ctx context.Context, req *datapb.ImportTaskRequest) *datapb.ImportTaskResponse {
mockKv.InMemKv = sync.Map{}
fn := func(ctx context.Context, req *datapb.ImportTaskRequest) (*datapb.ImportTaskResponse, error) {
return &datapb.ImportTaskResponse{
Status: &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
},
}
}, nil
}
rowReq := &milvuspb.ImportRequest{
@ -450,7 +807,12 @@ func TestImportManager_AllocFail(t *testing.T) {
Files: []string{"f1", "f2", "f3"},
}
mgr := newImportManager(context.TODO(), mockKv, idAlloc, fn, nil)
callMarkSegmentsDropped := func(ctx context.Context, segIDs []typeutil.UniqueID) (*commonpb.Status, error) {
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
}, nil
}
mgr := newImportManager(context.TODO(), mockKv, idAlloc, fn, callMarkSegmentsDropped, nil, nil, nil, nil)
mgr.importJob(context.TODO(), rowReq, colID, 0)
}
@ -468,15 +830,15 @@ func TestImportManager_ListAllTasks(t *testing.T) {
Params.RootCoordCfg.ImportTaskSubPath = "test_import_task"
colID := int64(100)
mockKv := &kv.MockMetaKV{}
mockKv.InMemKv = make(map[string]string)
mockKv.InMemKv = sync.Map{}
// reject some tasks so there are 3 tasks left in pending list
fn := func(ctx context.Context, req *datapb.ImportTaskRequest) *datapb.ImportTaskResponse {
fn := func(ctx context.Context, req *datapb.ImportTaskRequest) (*datapb.ImportTaskResponse, error) {
return &datapb.ImportTaskResponse{
Status: &commonpb.Status{
ErrorCode: commonpb.ErrorCode_UnexpectedError,
},
}
}, nil
}
rowReq := &milvuspb.ImportRequest{
@ -485,11 +847,15 @@ func TestImportManager_ListAllTasks(t *testing.T) {
RowBased: true,
Files: []string{"f1", "f2", "f3"},
}
mgr := newImportManager(context.TODO(), mockKv, idAlloc, fn, nil)
callMarkSegmentsDropped := func(ctx context.Context, segIDs []typeutil.UniqueID) (*commonpb.Status, error) {
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
}, nil
}
mgr := newImportManager(context.TODO(), mockKv, idAlloc, fn, callMarkSegmentsDropped, nil, nil, nil, nil)
mgr.importJob(context.TODO(), rowReq, colID, 0)
tasks := mgr.listAllTasks()
tasks := mgr.listAllTasks("", 100)
assert.Equal(t, len(rowReq.Files), len(tasks))
resp := mgr.getTaskState(1)
@ -498,17 +864,21 @@ func TestImportManager_ListAllTasks(t *testing.T) {
assert.Equal(t, int64(1), resp.Id)
// accept tasks to working list
mgr.callImportService = func(ctx context.Context, req *datapb.ImportTaskRequest) *datapb.ImportTaskResponse {
mgr.callImportService = func(ctx context.Context, req *datapb.ImportTaskRequest) (*datapb.ImportTaskResponse, error) {
return &datapb.ImportTaskResponse{
Status: &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
},
}
}, nil
}
mgr.importJob(context.TODO(), rowReq, colID, 0)
tasks = mgr.listAllTasks()
tasks = mgr.listAllTasks("", 100)
assert.Equal(t, len(rowReq.Files)*2, len(tasks))
tasks = mgr.listAllTasks("", 1)
assert.Equal(t, 1, len(tasks))
tasks = mgr.listAllTasks("bad-collection-name", 1)
assert.Equal(t, 0, len(tasks))
// the id of tasks must be 1,2,3,4,5,6(sequence not guaranteed)
ids := make(map[int64]struct{})
@ -521,23 +891,30 @@ func TestImportManager_ListAllTasks(t *testing.T) {
assert.Equal(t, 0, len(ids))
}
func TestImportManager_getCollectionPartitionName(t *testing.T) {
func TestImportManager_setCollectionPartitionName(t *testing.T) {
mgr := &importManager{
getCollectionName: func(collID, partitionID typeutil.UniqueID) (string, string, error) {
return "c1", "p1", nil
if collID == 1 && partitionID == 2 {
return "c1", "p1", nil
}
return "", "", errors.New("Error")
},
}
task := &datapb.ImportTaskInfo{
CollectionId: 1,
PartitionId: 2,
info := &datapb.ImportTaskInfo{
Id: 100,
State: &datapb.ImportTaskState{
StateCode: commonpb.ImportState_ImportStarted,
},
CreateTs: time.Now().Unix() - 100,
}
resp := &milvuspb.GetImportStateResponse{
Infos: make([]*commonpb.KeyValuePair, 0),
}
mgr.getCollectionPartitionName(task, resp)
assert.Equal(t, "c1", resp.Infos[0].Value)
assert.Equal(t, "p1", resp.Infos[1].Value)
err := mgr.setCollectionPartitionName(1, 2, info)
assert.Nil(t, err)
assert.Equal(t, "c1", info.GetCollectionName())
assert.Equal(t, "p1", info.GetPartitionName())
err = mgr.setCollectionPartitionName(0, 0, info)
assert.Error(t, err)
}
func TestImportManager_rearrangeTasks(t *testing.T) {

View File

@ -78,6 +78,7 @@ type IMetaTable interface {
ListCollections(ctx context.Context, ts Timestamp) ([]*model.Collection, error)
ListAbnormalCollections(ctx context.Context, ts Timestamp) ([]*model.Collection, error)
ListCollectionPhysicalChannels() map[typeutil.UniqueID][]string
GetCollectionVirtualChannels(colID int64) []string
AddPartition(ctx context.Context, partition *model.Partition) error
ChangePartitionState(ctx context.Context, collectionID UniqueID, partitionID UniqueID, state pb.PartitionState, ts Timestamp) error
RemovePartition(ctx context.Context, collectionID UniqueID, partitionID UniqueID, ts Timestamp) error
@ -360,6 +361,18 @@ func (mt *MetaTable) ListCollectionPhysicalChannels() map[typeutil.UniqueID][]st
return chanMap
}
// GetCollectionVirtualChannels returns virtual channels of a given collection.
func (mt *MetaTable) GetCollectionVirtualChannels(colID int64) []string {
mt.ddLock.RLock()
defer mt.ddLock.RUnlock()
for id, collInfo := range mt.collID2Meta {
if id == colID {
return common.CloneStringList(collInfo.VirtualChannelNames)
}
}
return nil
}
func (mt *MetaTable) AddPartition(ctx context.Context, partition *model.Partition) error {
mt.ddLock.Lock()
defer mt.ddLock.Unlock()
@ -568,7 +581,7 @@ func (mt *MetaTable) ListAliasesByID(collID UniqueID) []string {
// GetCollectionNameByID serve for bulk load. TODO: why this didn't accept ts?
func (mt *MetaTable) GetCollectionNameByID(collID UniqueID) (string, error) {
mt.ddLock.RUnlock()
mt.ddLock.RLock()
defer mt.ddLock.RUnlock()
coll, ok := mt.collID2Meta[collID]
@ -581,7 +594,7 @@ func (mt *MetaTable) GetCollectionNameByID(collID UniqueID) (string, error) {
// GetPartitionNameByID serve for bulk load.
func (mt *MetaTable) GetPartitionNameByID(collID UniqueID, partitionID UniqueID, ts Timestamp) (string, error) {
mt.ddLock.RUnlock()
mt.ddLock.RLock()
defer mt.ddLock.RUnlock()
coll, ok := mt.collID2Meta[collID]

View File

@ -38,20 +38,23 @@ const (
type mockMetaTable struct {
IMetaTable
ListCollectionsFunc func(ctx context.Context, ts Timestamp) ([]*model.Collection, error)
AddCollectionFunc func(ctx context.Context, coll *model.Collection) error
GetCollectionByNameFunc func(ctx context.Context, collectionName string, ts Timestamp) (*model.Collection, error)
GetCollectionByIDFunc func(ctx context.Context, collectionID UniqueID, ts Timestamp) (*model.Collection, error)
ChangeCollectionStateFunc func(ctx context.Context, collectionID UniqueID, state pb.CollectionState, ts Timestamp) error
RemoveCollectionFunc func(ctx context.Context, collectionID UniqueID, ts Timestamp) error
AddPartitionFunc func(ctx context.Context, partition *model.Partition) error
ChangePartitionStateFunc func(ctx context.Context, collectionID UniqueID, partitionID UniqueID, state pb.PartitionState, ts Timestamp) error
RemovePartitionFunc func(ctx context.Context, collectionID UniqueID, partitionID UniqueID, ts Timestamp) error
CreateAliasFunc func(ctx context.Context, alias string, collectionName string, ts Timestamp) error
AlterAliasFunc func(ctx context.Context, alias string, collectionName string, ts Timestamp) error
DropAliasFunc func(ctx context.Context, alias string, ts Timestamp) error
IsAliasFunc func(name string) bool
ListAliasesByIDFunc func(collID UniqueID) []string
ListCollectionsFunc func(ctx context.Context, ts Timestamp) ([]*model.Collection, error)
AddCollectionFunc func(ctx context.Context, coll *model.Collection) error
GetCollectionByNameFunc func(ctx context.Context, collectionName string, ts Timestamp) (*model.Collection, error)
GetCollectionByIDFunc func(ctx context.Context, collectionID UniqueID, ts Timestamp) (*model.Collection, error)
ChangeCollectionStateFunc func(ctx context.Context, collectionID UniqueID, state pb.CollectionState, ts Timestamp) error
RemoveCollectionFunc func(ctx context.Context, collectionID UniqueID, ts Timestamp) error
AddPartitionFunc func(ctx context.Context, partition *model.Partition) error
ChangePartitionStateFunc func(ctx context.Context, collectionID UniqueID, partitionID UniqueID, state pb.PartitionState, ts Timestamp) error
RemovePartitionFunc func(ctx context.Context, collectionID UniqueID, partitionID UniqueID, ts Timestamp) error
CreateAliasFunc func(ctx context.Context, alias string, collectionName string, ts Timestamp) error
AlterAliasFunc func(ctx context.Context, alias string, collectionName string, ts Timestamp) error
DropAliasFunc func(ctx context.Context, alias string, ts Timestamp) error
IsAliasFunc func(name string) bool
ListAliasesByIDFunc func(collID UniqueID) []string
GetCollectionIDByNameFunc func(name string) (UniqueID, error)
GetPartitionByNameFunc func(collID UniqueID, partitionName string, ts Timestamp) (UniqueID, error)
GetCollectionVirtualChannelsFunc func(colID int64) []string
}
func (m mockMetaTable) ListCollections(ctx context.Context, ts Timestamp) ([]*model.Collection, error) {
@ -110,6 +113,18 @@ func (m mockMetaTable) ListAliasesByID(collID UniqueID) []string {
return m.ListAliasesByIDFunc(collID)
}
func (m mockMetaTable) GetCollectionIDByName(name string) (UniqueID, error) {
return m.GetCollectionIDByNameFunc(name)
}
func (m mockMetaTable) GetPartitionByName(collID UniqueID, partitionName string, ts Timestamp) (UniqueID, error) {
return m.GetPartitionByNameFunc(collID, partitionName, ts)
}
func (m mockMetaTable) GetCollectionVirtualChannels(colID int64) []string {
return m.GetCollectionVirtualChannelsFunc(colID)
}
func newMockMetaTable() *mockMetaTable {
return &mockMetaTable{}
}
@ -139,12 +154,13 @@ func (m mockIndexCoord) DropIndex(ctx context.Context, req *indexpb.DropIndexReq
type mockDataCoord struct {
types.DataCoord
GetComponentStatesFunc func(ctx context.Context) (*internalpb.ComponentStates, error)
WatchChannelsFunc func(ctx context.Context, req *datapb.WatchChannelsRequest) (*datapb.WatchChannelsResponse, error)
AcquireSegmentLockFunc func(ctx context.Context, req *datapb.AcquireSegmentLockRequest) (*commonpb.Status, error)
ReleaseSegmentLockFunc func(ctx context.Context, req *datapb.ReleaseSegmentLockRequest) (*commonpb.Status, error)
FlushFunc func(ctx context.Context, req *datapb.FlushRequest) (*datapb.FlushResponse, error)
ImportFunc func(ctx context.Context, req *datapb.ImportTaskRequest) (*datapb.ImportTaskResponse, error)
GetComponentStatesFunc func(ctx context.Context) (*internalpb.ComponentStates, error)
WatchChannelsFunc func(ctx context.Context, req *datapb.WatchChannelsRequest) (*datapb.WatchChannelsResponse, error)
AcquireSegmentLockFunc func(ctx context.Context, req *datapb.AcquireSegmentLockRequest) (*commonpb.Status, error)
ReleaseSegmentLockFunc func(ctx context.Context, req *datapb.ReleaseSegmentLockRequest) (*commonpb.Status, error)
FlushFunc func(ctx context.Context, req *datapb.FlushRequest) (*datapb.FlushResponse, error)
ImportFunc func(ctx context.Context, req *datapb.ImportTaskRequest) (*datapb.ImportTaskResponse, error)
UnsetIsImportingStateFunc func(ctx context.Context, req *datapb.UnsetIsImportingStateRequest) (*commonpb.Status, error)
}
func newMockDataCoord() *mockDataCoord {
@ -175,6 +191,10 @@ func (m *mockDataCoord) Import(ctx context.Context, req *datapb.ImportTaskReques
return m.ImportFunc(ctx, req)
}
func (m *mockDataCoord) UnsetIsImportingState(ctx context.Context, req *datapb.UnsetIsImportingStateRequest) (*commonpb.Status, error) {
return m.UnsetIsImportingStateFunc(ctx, req)
}
type mockQueryCoord struct {
types.QueryCoord
GetSegmentInfoFunc func(ctx context.Context, req *querypb.GetSegmentInfoRequest) (*querypb.GetSegmentInfoResponse, error)
@ -580,6 +600,9 @@ func withInvalidDataCoord() Opt {
dc.ImportFunc = func(ctx context.Context, req *datapb.ImportTaskRequest) (*datapb.ImportTaskResponse, error) {
return nil, errors.New("error mock Import")
}
dc.UnsetIsImportingStateFunc = func(ctx context.Context, req *datapb.UnsetIsImportingStateRequest) (*commonpb.Status, error) {
return nil, errors.New("error mock UnsetIsImportingState")
}
return withDataCoord(dc)
}
@ -612,6 +635,12 @@ func withFailedDataCoord() Opt {
Status: failStatus(commonpb.ErrorCode_UnexpectedError, "mock import error"),
}, nil
}
dc.UnsetIsImportingStateFunc = func(ctx context.Context, req *datapb.UnsetIsImportingStateRequest) (*commonpb.Status, error) {
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_UnexpectedError,
Reason: "mock UnsetIsImportingState error",
}, nil
}
return withDataCoord(dc)
}
@ -644,6 +673,9 @@ func withValidDataCoord() Opt {
Status: succStatus(),
}, nil
}
dc.UnsetIsImportingStateFunc = func(ctx context.Context, req *datapb.UnsetIsImportingStateRequest) (*commonpb.Status, error) {
return succStatus(), nil
}
return withDataCoord(dc)
}
@ -753,7 +785,9 @@ type mockBroker struct {
FlushFunc func(ctx context.Context, cID int64, segIDs []int64) error
ImportFunc func(ctx context.Context, req *datapb.ImportTaskRequest) (*datapb.ImportTaskResponse, error)
DropCollectionIndexFunc func(ctx context.Context, collID UniqueID, partIDs []UniqueID) error
DropCollectionIndexFunc func(ctx context.Context, collID UniqueID, partIDs []UniqueID) error
DescribeIndexFunc func(ctx context.Context, colID UniqueID) (*indexpb.DescribeIndexResponse, error)
GetSegmentIndexStateFunc func(ctx context.Context, collID UniqueID, indexName string, segIDs []UniqueID) ([]*indexpb.SegmentIndexState, error)
}
func newMockBroker() *mockBroker {
@ -776,6 +810,14 @@ func (b mockBroker) DropCollectionIndex(ctx context.Context, collID UniqueID, pa
return b.DropCollectionIndexFunc(ctx, collID, partIDs)
}
func (b mockBroker) DescribeIndex(ctx context.Context, colID UniqueID) (*indexpb.DescribeIndexResponse, error) {
return b.DescribeIndexFunc(ctx, colID)
}
func (b mockBroker) GetSegmentIndexState(ctx context.Context, collID UniqueID, indexName string, segIDs []UniqueID) ([]*indexpb.SegmentIndexState, error) {
return b.GetSegmentIndexStateFunc(ctx, collID, indexName, segIDs)
}
func withBroker(b Broker) Opt {
return func(c *Core) {
c.broker = b

View File

@ -290,6 +290,22 @@ func (_m *IMetaTable) GetCollectionNameByID(collID int64) (string, error) {
return r0, r1
}
// GetCollectionVirtualChannels provides a mock function with given fields: colID
func (_m *IMetaTable) GetCollectionVirtualChannels(colID int64) []string {
ret := _m.Called(colID)
var r0 []string
if rf, ok := ret.Get(0).(func(int64) []string); ok {
r0 = rf(colID)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).([]string)
}
}
return r0
}
// GetCredential provides a mock function with given fields: username
func (_m *IMetaTable) GetCredential(username string) (*internalpb.CredentialInfo, error) {
ret := _m.Called(username)

View File

@ -22,33 +22,27 @@ import (
"fmt"
"math/rand"
"os"
"strconv"
"sync"
"sync/atomic"
"syscall"
"time"
"github.com/milvus-io/milvus/api/commonpb"
"github.com/milvus-io/milvus/api/milvuspb"
"github.com/milvus-io/milvus/api/schemapb"
etcdkv "github.com/milvus-io/milvus/internal/kv/etcd"
"github.com/milvus-io/milvus/internal/metastore/db/rootcoord"
pb "github.com/milvus-io/milvus/internal/proto/etcdpb"
"github.com/milvus-io/milvus/internal/allocator"
"github.com/milvus-io/milvus/internal/common"
"github.com/milvus-io/milvus/internal/kv"
etcdkv "github.com/milvus-io/milvus/internal/kv/etcd"
"github.com/milvus-io/milvus/internal/log"
"github.com/milvus-io/milvus/internal/metastore"
"github.com/milvus-io/milvus/internal/metastore/db/dao"
"github.com/milvus-io/milvus/internal/metastore/db/dbcore"
"github.com/milvus-io/milvus/internal/metastore/db/rootcoord"
kvmetestore "github.com/milvus-io/milvus/internal/metastore/kv/rootcoord"
"github.com/milvus-io/milvus/api/commonpb"
"github.com/milvus-io/milvus/api/milvuspb"
"github.com/milvus-io/milvus/internal/allocator"
"github.com/milvus-io/milvus/internal/common"
"github.com/milvus-io/milvus/internal/log"
"github.com/milvus-io/milvus/internal/metastore/model"
"github.com/milvus-io/milvus/internal/metrics"
pb "github.com/milvus-io/milvus/internal/proto/etcdpb"
"github.com/milvus-io/milvus/internal/proto/internalpb"
"github.com/milvus-io/milvus/internal/proto/proxypb"
"github.com/milvus-io/milvus/internal/proto/rootcoordpb"
@ -412,7 +406,11 @@ func (c *Core) initImportManager() error {
impTaskKv,
f.NewIDAllocator(),
f.NewImportFunc(),
f.NewMarkSegmentsDroppedFunc(),
f.NewGetCollectionNameFunc(),
f.NewDescribeIndexFunc(),
f.NewGetSegmentIndexStateFunc(),
f.NewUnsetIsImportingStateFunc(),
)
c.importManager.init(c.ctx)
@ -612,12 +610,15 @@ func (c *Core) startInternal() error {
panic(err)
}
c.wg.Add(5)
go c.tsLoop()
c.wg.Add(6)
go c.startTimeTickLoop()
go c.tsLoop()
go c.chanTimeTick.startWatch(&c.wg)
go c.importManager.expireOldTasksLoop(&c.wg, c.broker.ReleaseSegRefLock)
go c.importManager.cleanupLoop(&c.wg)
go c.importManager.sendOutTasksLoop(&c.wg)
go c.importManager.flipTaskStateLoop(&c.wg)
Params.RootCoordCfg.CreatedTime = time.Now()
Params.RootCoordCfg.UpdatedTime = time.Now()
if Params.QuotaConfig.QuotaAndLimitsEnabled {
go c.quotaCenter.run()
@ -1478,6 +1479,10 @@ func (c *Core) Import(ctx context.Context, req *milvuspb.ImportRequest) (*milvus
zap.Error(err))
return nil, err
}
req.ChannelNames = c.meta.GetCollectionVirtualChannels(cID)
if req.GetPartitionName() == "" {
req.PartitionName = Params.CommonCfg.DefaultPartitionName
}
var pID UniqueID
if pID, err = c.meta.GetPartitionByName(cID, req.GetPartitionName(), typeutil.MaxTimestamp); err != nil {
log.Error("failed to get partition ID from its name",
@ -1489,12 +1494,13 @@ func (c *Core) Import(ctx context.Context, req *milvuspb.ImportRequest) (*milvus
zap.String("collection name", req.GetCollectionName()),
zap.Int64("collection ID", cID),
zap.String("partition name", req.GetPartitionName()),
zap.Strings("virtual channel names", req.GetChannelNames()),
zap.Int64("partition ID", pID),
zap.Int("# of files = ", len(req.GetFiles())),
zap.Bool("row-based", req.GetRowBased()),
)
resp := c.importManager.importJob(ctx, req, cID, pID)
return resp, nil
importJobResp := c.importManager.importJob(ctx, req, cID, pID)
return importJobResp, nil
}
// GetImportState returns the current state of an import task.
@ -1519,7 +1525,7 @@ func (c *Core) ListImportTasks(ctx context.Context, req *milvuspb.ListImportTask
Status: &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
},
Tasks: c.importManager.listAllTasks(),
Tasks: c.importManager.listAllTasks(req.GetCollectionName(), req.GetLimit()),
}
return resp, nil
}
@ -1532,27 +1538,22 @@ func (c *Core) ReportImport(ctx context.Context, ir *rootcoordpb.ImportResult) (
if code, ok := c.checkHealthy(); !ok {
return failStatus(commonpb.ErrorCode_UnexpectedError, "StateCode="+internalpb.StateCode_name[int32(code)]), nil
}
// Special case for ImportState_ImportAllocSegment state, where we shall only add segment ref lock and do no other
// operations.
// TODO: This is inelegant and must get re-structured.
if ir.GetState() == commonpb.ImportState_ImportAllocSegment {
// Lock the segments, so we don't lose track of them when compaction happens.
// Note that these locks will be unlocked in c.postImportPersistLoop() -> checkSegmentLoadedLoop().
if err := c.broker.AddSegRefLock(ctx, ir.GetTaskId(), ir.GetSegments()); err != nil {
log.Error("failed to acquire segment ref lock", zap.Error(err))
// If setting ImportState_ImportCompleted, simply update the state and return directly.
if ir.GetState() == commonpb.ImportState_ImportCompleted {
if err := c.importManager.setImportTaskState(ir.GetTaskId(), commonpb.ImportState_ImportCompleted); err != nil {
errMsg := "failed to set import task as ImportState_ImportCompleted"
log.Error(errMsg, zap.Error(err))
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_UnexpectedError,
Reason: fmt.Sprintf("failed to acquire segment ref lock %s", err.Error()),
Reason: fmt.Sprintf("%s %s", errMsg, err.Error()),
}, nil
}
// Update task store with new segments.
c.importManager.appendTaskSegments(ir.GetTaskId(), ir.GetSegments())
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
}, nil
}
// Upon receiving ReportImport request, update the related task's state in task store.
ti, err := c.importManager.updateTaskState(ir)
ti, err := c.importManager.updateTaskInfo(ir)
if err != nil {
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_UpdateImportTaskFailure,
@ -1566,200 +1567,48 @@ func (c *Core) ReportImport(ctx context.Context, ir *rootcoordpb.ImportResult) (
c.importManager.busyNodesLock.Lock()
defer c.importManager.busyNodesLock.Unlock()
delete(c.importManager.busyNodes, ir.GetDatanodeId())
log.Info("DataNode is no longer busy",
log.Info("a DataNode is no longer busy after processing task",
zap.Int64("dataNode ID", ir.GetDatanodeId()),
zap.Int64("task ID", ir.GetTaskId()))
}()
c.importManager.sendOutTasks(c.importManager.ctx)
err := c.importManager.sendOutTasks(c.importManager.ctx)
if err != nil {
log.Error("fail to send out import task to datanodes")
}
}
// If task failed, send task to idle datanode
if ir.GetState() == commonpb.ImportState_ImportFailed {
// Release segments when task fails.
log.Info("task failed, release segment ref locks")
err := retry.Do(ctx, func() error {
return c.broker.ReleaseSegRefLock(ctx, ir.GetTaskId(), ir.GetSegments())
}, retry.Attempts(100))
if err != nil {
log.Error("failed to release lock, about to panic!")
panic(err)
}
// When a DataNode failed importing, remove this DataNode from the busy node list and send out import tasks again.
log.Info("an import task has failed, marking DataNode available and resending import task",
zap.Int64("task ID", ir.GetTaskId()))
resendTaskFunc()
}
// So much for reporting, unless the task just reached `ImportPersisted` state.
if ir.GetState() != commonpb.ImportState_ImportPersisted {
log.Debug("non import-persisted state received, return immediately",
} else if ir.GetState() != commonpb.ImportState_ImportPersisted {
log.Debug("unexpected import task state reported, return immediately (this should not happen)",
zap.Any("task ID", ir.GetTaskId()),
zap.Any("import state", ir.GetState()))
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
}, nil
}
// Look up collection name on collection ID.
var colName string
var colMeta *model.Collection
if colMeta, err = c.meta.GetCollectionByID(ctx, ti.GetCollectionId(), typeutil.MaxTimestamp); err != nil {
log.Error("failed to get collection name",
zap.Int64("collection ID", ti.GetCollectionId()),
zap.Error(err))
// In some unexpected cases, user drop collection when bulkload task still in pending list, the datanode become idle.
// If we directly return, the pending tasks will remain in pending list. So we call resendTaskFunc() to push next pending task to idle datanode.
resendTaskFunc()
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_CollectionNameNotFound,
Reason: "failed to get collection name for collection ID" + strconv.FormatInt(ti.GetCollectionId(), 10),
}, nil
} else {
// Here ir.GetState() == commonpb.ImportState_ImportPersisted
// When a DataNode finishes importing, remove this DataNode from the busy node list and send out import tasks again.
resendTaskFunc()
// Flush all import data segments.
if err := c.broker.Flush(ctx, ti.GetCollectionId(), ir.GetSegments()); err != nil {
log.Error("failed to call Flush on bulk load segments",
zap.Int64("task ID", ir.GetTaskId()))
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_UnexpectedError,
Reason: err.Error(),
}, nil
}
}
colName = colMeta.Name
// When DataNode has done its thing, remove it from the busy node list. And send import task again
resendTaskFunc()
// Flush all import data segments.
c.broker.Flush(ctx, ti.GetCollectionId(), ir.GetSegments())
// Check if data are "queryable" and if indices are built on all segments.
go c.postImportPersistLoop(c.ctx, ir.GetTaskId(), ti.GetCollectionId(), colName, ir.GetSegments())
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
}, nil
}
// CountCompleteIndex checks indexing status of the given segments.
// It returns an error if error occurs. It also returns a boolean indicating whether indexing is done (or if no index
// is needed).
func (c *Core) CountCompleteIndex(ctx context.Context, collectionName string, collectionID UniqueID,
allSegmentIDs []UniqueID) (bool, error) {
// Note: Index name is always Params.CommonCfg.DefaultIndexName in current Milvus designs as of today.
indexName := Params.CommonCfg.DefaultIndexName
states, err := c.broker.GetSegmentIndexState(ctx, collectionID, indexName, allSegmentIDs)
if err != nil {
log.Error("failed to get index state in checkSegmentIndexStates", zap.Error(err))
return false, err
}
// Count the # of segments with finished index.
ct := 0
for _, s := range states {
if s.State == commonpb.IndexState_Finished {
ct++
}
}
log.Info("segment indexing state checked",
//zap.Int64s("segments checked", seg2Check),
//zap.Int("# of checked segment", len(seg2Check)),
zap.Int("# of segments with complete index", ct),
zap.String("collection name", collectionName),
zap.Int64("collection ID", collectionID),
)
return len(allSegmentIDs) == ct, nil
}
func (c *Core) postImportPersistLoop(ctx context.Context, taskID int64, colID int64, colName string, segIDs []UniqueID) {
// Loop and check if segments are loaded in queryNodes.
c.wg.Add(1)
go c.checkSegmentLoadedLoop(ctx, taskID, colID, segIDs)
// Check if collection has any indexed fields. If so, start a loop to check segments' index states.
if _, err := c.meta.GetCollectionByID(ctx, colID, typeutil.MaxTimestamp); err != nil {
log.Error("failed to find meta for collection",
zap.Int64("collection ID", colID),
zap.Error(err))
} else {
log.Info("start checking index state", zap.Int64("collection ID", colID))
c.wg.Add(1)
go c.checkCompleteIndexLoop(ctx, taskID, colID, colName, segIDs)
}
}
// checkSegmentLoadedLoop loops and checks if all segments in `segIDs` are loaded in queryNodes.
func (c *Core) checkSegmentLoadedLoop(ctx context.Context, taskID int64, colID int64, segIDs []UniqueID) {
defer c.wg.Done()
ticker := time.NewTicker(time.Duration(Params.RootCoordCfg.ImportSegmentStateCheckInterval*1000) * time.Millisecond)
defer ticker.Stop()
expireTicker := time.NewTicker(time.Duration(Params.RootCoordCfg.ImportSegmentStateWaitLimit*1000) * time.Millisecond)
defer expireTicker.Stop()
defer func() {
log.Info("we are done checking segment loading state, release segment ref locks")
err := retry.Do(ctx, func() error {
return c.broker.ReleaseSegRefLock(ctx, taskID, segIDs)
}, retry.Attempts(100))
if err != nil {
log.Error("failed to release lock, about to panic!")
panic(err)
}
}()
for {
select {
case <-c.ctx.Done():
log.Info("(in check segment loaded loop) context done, exiting checkSegmentLoadedLoop")
return
case <-ticker.C:
resp, err := c.broker.GetQuerySegmentInfo(ctx, colID, segIDs)
log.Debug("(in check segment loaded loop)",
zap.Int64("task ID", taskID),
zap.Int64("collection ID", colID),
zap.Int64s("segment IDs expected", segIDs),
zap.Int("# of segments found", len(resp.GetInfos())))
if err != nil {
log.Warn("(in check segment loaded loop) failed to call get segment info on queryCoord",
zap.Int64("task ID", taskID),
zap.Int64("collection ID", colID),
zap.Int64s("segment IDs", segIDs))
} else if len(resp.GetInfos()) == len(segIDs) {
// Check if all segment info are loaded in queryNodes.
log.Info("(in check segment loaded loop) all import data segments loaded in queryNodes",
zap.Int64("task ID", taskID),
zap.Int64("collection ID", colID),
zap.Int64s("segment IDs", segIDs))
c.importManager.setTaskDataQueryable(taskID)
return
}
case <-expireTicker.C:
log.Warn("(in check segment loaded loop) segments still not loaded after max wait time",
zap.Int64("task ID", taskID),
zap.Int64("collection ID", colID),
zap.Int64s("segment IDs", segIDs))
return
}
}
}
// checkCompleteIndexLoop loops and checks if all indices are built for an import task's segments.
func (c *Core) checkCompleteIndexLoop(ctx context.Context, taskID int64, colID int64, colName string, segIDs []UniqueID) {
defer c.wg.Done()
ticker := time.NewTicker(time.Duration(Params.RootCoordCfg.ImportIndexCheckInterval*1000) * time.Millisecond)
defer ticker.Stop()
expireTicker := time.NewTicker(time.Duration(Params.RootCoordCfg.ImportIndexWaitLimit*1000) * time.Millisecond)
defer expireTicker.Stop()
for {
select {
case <-c.ctx.Done():
log.Info("(in check complete index loop) context done, exiting checkCompleteIndexLoop")
return
case <-ticker.C:
if done, err := c.CountCompleteIndex(ctx, colName, colID, segIDs); err == nil && done {
log.Info("(in check complete index loop) indices are built or no index needed",
zap.Int64("task ID", taskID))
c.importManager.setTaskDataIndexed(taskID)
return
} else if err != nil {
log.Error("(in check complete index loop) an error occurs",
zap.Error(err))
}
case <-expireTicker.C:
log.Warn("(in check complete index loop) indexing is taken too long",
zap.Int64("task ID", taskID),
zap.Int64("collection ID", colID),
zap.Int64s("segment IDs", segIDs))
return
}
}
}
// ExpireCredCache will call invalidate credential cache
func (c *Core) ExpireCredCache(ctx context.Context, username string) error {
req := proxypb.InvalidateCredCacheRequest{

View File

@ -4,33 +4,29 @@ import (
"context"
"errors"
"math/rand"
"sync"
"testing"
"time"
mockrootcoord "github.com/milvus-io/milvus/internal/rootcoord/mocks"
pb "github.com/milvus-io/milvus/internal/proto/etcdpb"
"github.com/milvus-io/milvus/internal/util/typeutil"
"github.com/golang/protobuf/proto"
"github.com/milvus-io/milvus/api/commonpb"
"github.com/milvus-io/milvus/api/milvuspb"
"github.com/milvus-io/milvus/internal/allocator"
"github.com/milvus-io/milvus/internal/kv"
"github.com/milvus-io/milvus/internal/metastore/model"
"github.com/stretchr/testify/mock"
"github.com/milvus-io/milvus/internal/proto/datapb"
"github.com/milvus-io/milvus/internal/proto/etcdpb"
pb "github.com/milvus-io/milvus/internal/proto/etcdpb"
"github.com/milvus-io/milvus/internal/proto/internalpb"
"github.com/milvus-io/milvus/internal/proto/proxypb"
"github.com/milvus-io/milvus/internal/proto/rootcoordpb"
mockrootcoord "github.com/milvus-io/milvus/internal/rootcoord/mocks"
"github.com/milvus-io/milvus/internal/util/funcutil"
"github.com/milvus-io/milvus/internal/util/metricsinfo"
"github.com/milvus-io/milvus/internal/util/sessionutil"
"github.com/milvus-io/milvus/internal/util/funcutil"
"github.com/milvus-io/milvus/internal/proto/internalpb"
"github.com/milvus-io/milvus/internal/allocator"
"github.com/milvus-io/milvus/internal/proto/rootcoordpb"
"github.com/milvus-io/milvus/api/commonpb"
"github.com/milvus-io/milvus/internal/util/typeutil"
"github.com/stretchr/testify/assert"
"github.com/milvus-io/milvus/api/milvuspb"
"github.com/stretchr/testify/mock"
)
func TestRootCoord_CreateCollection(t *testing.T) {
@ -877,23 +873,341 @@ func TestRootCoord_GetMetrics(t *testing.T) {
}
func TestCore_Import(t *testing.T) {
meta := newMockMetaTable()
meta.AddCollectionFunc = func(ctx context.Context, coll *model.Collection) error {
return nil
}
meta.ChangeCollectionStateFunc = func(ctx context.Context, collectionID UniqueID, state etcdpb.CollectionState, ts Timestamp) error {
return nil
}
t.Run("not healthy", func(t *testing.T) {
ctx := context.Background()
c := newTestCore(withAbnormalCode())
resp, err := c.Import(ctx, &milvuspb.ImportRequest{})
assert.NoError(t, err)
assert.NotEqual(t, commonpb.ErrorCode_Success, resp.GetStatus().GetErrorCode())
})
t.Run("bad collection name", func(t *testing.T) {
ctx := context.Background()
c := newTestCore(withHealthyCode(),
withMeta(meta))
meta.GetCollectionIDByNameFunc = func(name string) (UniqueID, error) {
return 0, errors.New("error mock GetCollectionIDByName")
}
_, err := c.Import(ctx, &milvuspb.ImportRequest{
CollectionName: "a-bad-name",
})
assert.Error(t, err)
})
t.Run("bad partition name", func(t *testing.T) {
ctx := context.Background()
c := newTestCore(withHealthyCode(),
withMeta(meta))
meta.GetCollectionIDByNameFunc = func(name string) (UniqueID, error) {
return 100, nil
}
meta.GetCollectionVirtualChannelsFunc = func(colID int64) []string {
return []string{"ch-1", "ch-2"}
}
meta.GetPartitionByNameFunc = func(collID UniqueID, partitionName string, ts Timestamp) (UniqueID, error) {
return 0, errors.New("mock GetPartitionByNameFunc error")
}
_, err := c.Import(ctx, &milvuspb.ImportRequest{
CollectionName: "a-good-name",
})
assert.Error(t, err)
})
t.Run("normal case", func(t *testing.T) {
ctx := context.Background()
c := newTestCore(withHealthyCode(),
withMeta(meta))
meta.GetCollectionIDByNameFunc = func(name string) (UniqueID, error) {
return 100, nil
}
meta.GetCollectionVirtualChannelsFunc = func(colID int64) []string {
return []string{"ch-1", "ch-2"}
}
meta.GetPartitionByNameFunc = func(collID UniqueID, partitionName string, ts Timestamp) (UniqueID, error) {
return 101, nil
}
_, err := c.Import(ctx, &milvuspb.ImportRequest{
CollectionName: "a-good-name",
})
assert.NoError(t, err)
})
}
func TestCore_GetImportState(t *testing.T) {
mockKv := &kv.MockMetaKV{}
mockKv.InMemKv = sync.Map{}
ti1 := &datapb.ImportTaskInfo{
Id: 100,
State: &datapb.ImportTaskState{
StateCode: commonpb.ImportState_ImportPending,
},
CreateTs: time.Now().Unix() - 100,
}
ti2 := &datapb.ImportTaskInfo{
Id: 200,
State: &datapb.ImportTaskState{
StateCode: commonpb.ImportState_ImportPersisted,
},
CreateTs: time.Now().Unix() - 100,
}
taskInfo1, err := proto.Marshal(ti1)
assert.NoError(t, err)
taskInfo2, err := proto.Marshal(ti2)
assert.NoError(t, err)
mockKv.Save(BuildImportTaskKey(1), "value")
mockKv.Save(BuildImportTaskKey(100), string(taskInfo1))
mockKv.Save(BuildImportTaskKey(200), string(taskInfo2))
t.Run("not healthy", func(t *testing.T) {
ctx := context.Background()
c := newTestCore(withAbnormalCode())
resp, err := c.GetImportState(ctx, &milvuspb.GetImportStateRequest{
Task: 100,
})
assert.NoError(t, err)
assert.NotEqual(t, commonpb.ErrorCode_Success, resp.GetStatus().GetErrorCode())
})
t.Run("normal case", func(t *testing.T) {
ctx := context.Background()
c := newTestCore(withHealthyCode())
c.importManager = newImportManager(ctx, mockKv, nil, nil, nil, nil, nil, nil, nil)
resp, err := c.GetImportState(ctx, &milvuspb.GetImportStateRequest{
Task: 100,
})
assert.NoError(t, err)
assert.Equal(t, int64(100), resp.GetId())
assert.NotEqual(t, 0, resp.GetCreateTs())
assert.Equal(t, commonpb.ImportState_ImportPending, resp.GetState())
assert.Equal(t, commonpb.ErrorCode_Success, resp.GetStatus().GetErrorCode())
})
}
func TestCore_ListImportTasks(t *testing.T) {
mockKv := &kv.MockMetaKV{}
mockKv.InMemKv = sync.Map{}
ti1 := &datapb.ImportTaskInfo{
Id: 100,
CollectionName: "collection-A",
State: &datapb.ImportTaskState{
StateCode: commonpb.ImportState_ImportPending,
},
CreateTs: time.Now().Unix() - 100,
}
ti2 := &datapb.ImportTaskInfo{
Id: 200,
CollectionName: "collection-A",
State: &datapb.ImportTaskState{
StateCode: commonpb.ImportState_ImportPersisted,
},
CreateTs: time.Now().Unix() - 100,
}
ti3 := &datapb.ImportTaskInfo{
Id: 300,
CollectionName: "collection-B",
State: &datapb.ImportTaskState{
StateCode: commonpb.ImportState_ImportPersisted,
},
CreateTs: time.Now().Unix() - 100,
}
taskInfo1, err := proto.Marshal(ti1)
assert.NoError(t, err)
taskInfo2, err := proto.Marshal(ti2)
assert.NoError(t, err)
taskInfo3, err := proto.Marshal(ti3)
assert.NoError(t, err)
mockKv.Save(BuildImportTaskKey(1), "value")
mockKv.Save(BuildImportTaskKey(100), string(taskInfo1))
mockKv.Save(BuildImportTaskKey(200), string(taskInfo2))
mockKv.Save(BuildImportTaskKey(300), string(taskInfo3))
t.Run("not healthy", func(t *testing.T) {
ctx := context.Background()
c := newTestCore(withAbnormalCode())
resp, err := c.ListImportTasks(ctx, &milvuspb.ListImportTasksRequest{})
assert.NoError(t, err)
assert.NotEqual(t, commonpb.ErrorCode_Success, resp.GetStatus().GetErrorCode())
})
t.Run("normal case", func(t *testing.T) {
ctx := context.Background()
c := newTestCore(withHealthyCode())
c.importManager = newImportManager(ctx, mockKv, nil, nil, nil, nil, nil, nil, nil)
resp, err := c.ListImportTasks(ctx, &milvuspb.ListImportTasksRequest{})
assert.NoError(t, err)
assert.Equal(t, 3, len(resp.GetTasks()))
assert.Equal(t, commonpb.ErrorCode_Success, resp.GetStatus().GetErrorCode())
})
}
func TestCore_ReportImport(t *testing.T) {
Params.RootCoordCfg.ImportTaskSubPath = "importtask"
var countLock sync.RWMutex
var globalCount = typeutil.UniqueID(0)
var idAlloc = func(count uint32) (typeutil.UniqueID, typeutil.UniqueID, error) {
countLock.Lock()
defer countLock.Unlock()
globalCount++
return globalCount, 0, nil
}
mockKv := &kv.MockMetaKV{}
mockKv.InMemKv = sync.Map{}
ti1 := &datapb.ImportTaskInfo{
Id: 100,
State: &datapb.ImportTaskState{
StateCode: commonpb.ImportState_ImportPending,
},
CreateTs: time.Now().Unix() - 100,
}
ti2 := &datapb.ImportTaskInfo{
Id: 200,
State: &datapb.ImportTaskState{
StateCode: commonpb.ImportState_ImportPersisted,
},
CreateTs: time.Now().Unix() - 100,
}
taskInfo1, err := proto.Marshal(ti1)
assert.NoError(t, err)
taskInfo2, err := proto.Marshal(ti2)
assert.NoError(t, err)
mockKv.Save(BuildImportTaskKey(1), "value")
mockKv.Save(BuildImportTaskKey(100), string(taskInfo1))
mockKv.Save(BuildImportTaskKey(200), string(taskInfo2))
}
ticker := newRocksMqTtSynchronizer()
meta := newMockMetaTable()
meta.GetCollectionByNameFunc = func(ctx context.Context, collectionName string, ts Timestamp) (*model.Collection, error) {
return nil, errors.New("error mock GetCollectionByName")
}
meta.AddCollectionFunc = func(ctx context.Context, coll *model.Collection) error {
return nil
}
meta.ChangeCollectionStateFunc = func(ctx context.Context, collectionID UniqueID, state etcdpb.CollectionState, ts Timestamp) error {
return nil
}
func TestCore_CountCompleteIndex(t *testing.T) {
dc := newMockDataCoord()
dc.GetComponentStatesFunc = func(ctx context.Context) (*internalpb.ComponentStates, error) {
return &internalpb.ComponentStates{
State: &internalpb.ComponentInfo{
NodeID: TestRootCoordID,
StateCode: internalpb.StateCode_Healthy,
},
SubcomponentStates: nil,
Status: succStatus(),
}, nil
}
dc.WatchChannelsFunc = func(ctx context.Context, req *datapb.WatchChannelsRequest) (*datapb.WatchChannelsResponse, error) {
return &datapb.WatchChannelsResponse{Status: succStatus()}, nil
}
dc.FlushFunc = func(ctx context.Context, req *datapb.FlushRequest) (*datapb.FlushResponse, error) {
return &datapb.FlushResponse{Status: succStatus()}, nil
}
mockCallImportServiceErr := false
callImportServiceFn := func(ctx context.Context, req *datapb.ImportTaskRequest) (*datapb.ImportTaskResponse, error) {
if mockCallImportServiceErr {
return &datapb.ImportTaskResponse{
Status: &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
},
}, errors.New("mock err")
}
return &datapb.ImportTaskResponse{
Status: &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
},
}, nil
}
callMarkSegmentsDropped := func(ctx context.Context, segIDs []typeutil.UniqueID) (*commonpb.Status, error) {
return &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
}, nil
}
t.Run("not healthy", func(t *testing.T) {
ctx := context.Background()
c := newTestCore(withAbnormalCode())
resp, err := c.ReportImport(ctx, &rootcoordpb.ImportResult{})
assert.NoError(t, err)
assert.NotEqual(t, commonpb.ErrorCode_Success, resp.GetErrorCode())
})
t.Run("report complete import", func(t *testing.T) {
ctx := context.Background()
c := newTestCore(withHealthyCode())
c.importManager = newImportManager(ctx, mockKv, idAlloc, callImportServiceFn, callMarkSegmentsDropped, nil, nil, nil, nil)
resp, err := c.ReportImport(ctx, &rootcoordpb.ImportResult{
TaskId: 100,
State: commonpb.ImportState_ImportCompleted,
})
assert.NoError(t, err)
assert.Equal(t, commonpb.ErrorCode_Success, resp.GetErrorCode())
// Change the state back.
err = c.importManager.setImportTaskState(100, commonpb.ImportState_ImportPending)
assert.NoError(t, err)
})
t.Run("report complete import with task not found", func(t *testing.T) {
ctx := context.Background()
c := newTestCore(withHealthyCode())
c.importManager = newImportManager(ctx, mockKv, idAlloc, callImportServiceFn, callMarkSegmentsDropped, nil, nil, nil, nil)
resp, err := c.ReportImport(ctx, &rootcoordpb.ImportResult{
TaskId: 101,
State: commonpb.ImportState_ImportCompleted,
})
assert.NoError(t, err)
assert.NotEqual(t, commonpb.ErrorCode_Success, resp.GetErrorCode())
})
t.Run("report import started state", func(t *testing.T) {
ctx := context.Background()
c := newTestCore(withHealthyCode())
c.importManager = newImportManager(ctx, mockKv, idAlloc, callImportServiceFn, callMarkSegmentsDropped, nil, nil, nil, nil)
c.importManager.loadFromTaskStore(true)
c.importManager.sendOutTasks(ctx)
resp, err := c.ReportImport(ctx, &rootcoordpb.ImportResult{
TaskId: 100,
State: commonpb.ImportState_ImportStarted,
})
assert.NoError(t, err)
assert.Equal(t, commonpb.ErrorCode_Success, resp.GetErrorCode())
// Change the state back.
err = c.importManager.setImportTaskState(100, commonpb.ImportState_ImportPending)
assert.NoError(t, err)
})
t.Run("report persisted import", func(t *testing.T) {
ctx := context.Background()
c := newTestCore(
withHealthyCode(),
withValidIDAllocator(),
withMeta(meta),
withTtSynchronizer(ticker),
withDataCoord(dc))
c.broker = newServerBroker(c)
c.importManager = newImportManager(ctx, mockKv, idAlloc, callImportServiceFn, callMarkSegmentsDropped, nil, nil, nil, nil)
c.importManager.loadFromTaskStore(true)
c.importManager.sendOutTasks(ctx)
resp, err := c.ReportImport(ctx, &rootcoordpb.ImportResult{
TaskId: 100,
State: commonpb.ImportState_ImportPersisted,
})
assert.NoError(t, err)
assert.Equal(t, commonpb.ErrorCode_Success, resp.GetErrorCode())
// Change the state back.
err = c.importManager.setImportTaskState(100, commonpb.ImportState_ImportPending)
assert.NoError(t, err)
})
}
func TestCore_Rbac(t *testing.T) {

View File

@ -36,6 +36,8 @@ import (
"golang.org/x/exp/mmap"
)
var CheckBucketRetryAttempts uint = 20
// MinioChunkManager is responsible for read and write data stored in minio.
type MinioChunkManager struct {
*minio.Client
@ -95,7 +97,7 @@ func newMinioChunkManagerWithConfig(ctx context.Context, c *config) (*MinioChunk
}
return nil
}
err = retry.Do(ctx, checkBucketFn, retry.Attempts(20))
err = retry.Do(ctx, checkBucketFn, retry.Attempts(CheckBucketRetryAttempts))
if err != nil {
return nil, err
}

View File

@ -96,8 +96,8 @@ type DataNode interface {
// It returns a list of segments to be sent.
ResendSegmentStats(ctx context.Context, req *datapb.ResendSegmentStatsRequest) (*datapb.ResendSegmentStatsResponse, error)
// AddSegment puts the given segment to current DataNode's flow graph.
AddSegment(ctx context.Context, req *datapb.AddSegmentRequest) (*commonpb.Status, error)
// AddImportSegment puts the given import segment to current DataNode's flow graph.
AddImportSegment(ctx context.Context, req *datapb.AddImportSegmentRequest) (*datapb.AddImportSegmentResponse, error)
}
// DataNodeComponent is used by grpc server of DataNode
@ -306,9 +306,15 @@ type DataCoord interface {
AcquireSegmentLock(ctx context.Context, req *datapb.AcquireSegmentLockRequest) (*commonpb.Status, error)
ReleaseSegmentLock(ctx context.Context, req *datapb.ReleaseSegmentLockRequest) (*commonpb.Status, error)
// AddSegment looks for the right DataNode given channel name, and triggers AddSegment call on that DataNode to
// add the segment into this DataNode.
AddSegment(ctx context.Context, req *datapb.AddSegmentRequest) (*commonpb.Status, error)
// SaveImportSegment saves the import segment binlog paths data and then looks for the right DataNode to add the
// segment to that DataNode.
SaveImportSegment(ctx context.Context, req *datapb.SaveImportSegmentRequest) (*commonpb.Status, error)
// UnsetIsImportingState unsets the `isImport` state of the given segments so that they can get compacted normally.
UnsetIsImportingState(ctx context.Context, req *datapb.UnsetIsImportingStateRequest) (*commonpb.Status, error)
// MarkSegmentsDropped marks the given segments as `dropped` state.
MarkSegmentsDropped(ctx context.Context, req *datapb.MarkSegmentsDroppedRequest) (*commonpb.Status, error)
}
// DataCoordComponent defines the interface of DataCoord component.

View File

@ -19,6 +19,7 @@ import (
"github.com/milvus-io/milvus/internal/log"
"github.com/milvus-io/milvus/internal/proto/rootcoordpb"
"github.com/milvus-io/milvus/internal/storage"
"github.com/milvus-io/milvus/internal/util/retry"
"github.com/milvus-io/milvus/internal/util/timerecord"
"github.com/milvus-io/milvus/internal/util/typeutil"
)
@ -171,57 +172,13 @@ func (p *ImportWrapper) Import(filePaths []string, rowBased bool, onlyValidate b
log.Info("import wrapper: row-based file ", zap.Any("filePath", filePath), zap.Any("fileType", fileType))
if fileType == JSONFileExt {
err := func() error {
tr := timerecord.NewTimeRecorder("json row-based parser: " + filePath)
// for minio storage, chunkManager will download file into local memory
// for local storage, chunkManager open the file directly
file, err := p.chunkManager.Reader(filePath)
if err != nil {
return err
}
defer file.Close()
tr.Record("open reader")
// report file process state
p.importResult.State = commonpb.ImportState_ImportDownloaded
p.reportFunc(p.importResult)
// parse file
reader := bufio.NewReader(file)
parser := NewJSONParser(p.ctx, p.collectionSchema)
var consumer *JSONRowConsumer
if !onlyValidate {
flushFunc := func(fields map[storage.FieldID]storage.FieldData, shardNum int) error {
p.printFieldsDataInfo(fields, "import wrapper: prepare to flush segment", filePaths)
return p.callFlushFunc(fields, shardNum)
}
consumer = NewJSONRowConsumer(p.collectionSchema, p.rowIDAllocator, p.shardNum, p.segmentSize, flushFunc)
}
validator := NewJSONRowValidator(p.collectionSchema, consumer)
err = parser.ParseRows(reader, validator)
if err != nil {
return err
}
// for row-based files, auto-id is generated within JSONRowConsumer
if consumer != nil {
p.importResult.AutoIds = append(p.importResult.AutoIds, consumer.IDRange()...)
}
// report file process state
p.importResult.State = commonpb.ImportState_ImportParsed
p.reportFunc(p.importResult)
tr.Record("parsed")
return nil
}()
err = p.parseRowBasedJSON(filePath, onlyValidate)
if err != nil {
log.Error("import error: "+err.Error(), zap.String("filePath", filePath))
return err
}
}
// no need to check else, since the fileValidation() already do this
}
} else {
// parse and consume column-based files
@ -269,103 +226,24 @@ func (p *ImportWrapper) Import(filePaths []string, rowBased bool, onlyValidate b
// parse/validate/consume data
for i := 0; i < len(filePaths); i++ {
filePath := filePaths[i]
fileName, fileType := getFileNameAndExt(filePath)
_, fileType := getFileNameAndExt(filePath)
log.Info("import wrapper: column-based file ", zap.Any("filePath", filePath), zap.Any("fileType", fileType))
if fileType == JSONFileExt {
err := func() error {
tr := timerecord.NewTimeRecorder("json column-based parser: " + filePath)
// for minio storage, chunkManager will download file into local memory
// for local storage, chunkManager open the file directly
file, err := p.chunkManager.Reader(filePath)
if err != nil {
return err
}
defer file.Close()
tr.Record("open reader")
// report file process state
p.importResult.State = commonpb.ImportState_ImportDownloaded
p.reportFunc(p.importResult)
// parse file
reader := bufio.NewReader(file)
parser := NewJSONParser(p.ctx, p.collectionSchema)
var consumer *JSONColumnConsumer
if !onlyValidate {
consumer = NewJSONColumnConsumer(p.collectionSchema, combineFunc)
}
validator := NewJSONColumnValidator(p.collectionSchema, consumer)
err = parser.ParseColumns(reader, validator)
if err != nil {
return err
}
// report file process state
p.importResult.State = commonpb.ImportState_ImportParsed
p.reportFunc(p.importResult)
tr.Record("parsed")
return nil
}()
err = p.parseColumnBasedJSON(filePath, onlyValidate, combineFunc)
if err != nil {
log.Error("import error: "+err.Error(), zap.String("filePath", filePath))
return err
}
} else if fileType == NumpyFileExt {
err := func() error {
tr := timerecord.NewTimeRecorder("numpy parser: " + filePath)
// for minio storage, chunkManager will download file into local memory
// for local storage, chunkManager open the file directly
file, err := p.chunkManager.Reader(filePath)
if err != nil {
return err
}
defer file.Close()
tr.Record("open reader")
// report file process state
p.importResult.State = commonpb.ImportState_ImportDownloaded
p.reportFunc(p.importResult)
var id storage.FieldID
for _, field := range p.collectionSchema.Fields {
if field.GetName() == fileName {
id = field.GetFieldID()
}
}
// the numpy parser return a storage.FieldData, here construct a map[string]storage.FieldData to combine
flushFunc := func(field storage.FieldData) error {
fields := make(map[storage.FieldID]storage.FieldData)
fields[id] = field
return combineFunc(fields)
}
// for numpy file, we say the file name(without extension) is the filed name
parser := NewNumpyParser(p.ctx, p.collectionSchema, flushFunc)
err = parser.Parse(file, fileName, onlyValidate)
if err != nil {
return err
}
// report file process state
p.importResult.State = commonpb.ImportState_ImportParsed
p.reportFunc(p.importResult)
tr.Record("parsed")
return nil
}()
err = p.parseColumnBasedNumpy(filePath, onlyValidate, combineFunc)
if err != nil {
log.Error("import error: "+err.Error(), zap.String("filePath", filePath))
return err
}
}
// no need to check else, since the fileValidation() already do this
}
// split fields data into segments
@ -379,7 +257,143 @@ func (p *ImportWrapper) Import(filePaths []string, rowBased bool, onlyValidate b
debug.FreeOSMemory()
// report file process state
p.importResult.State = commonpb.ImportState_ImportPersisted
return p.reportFunc(p.importResult)
// persist state task is valuable, retry more times in case fail this task only because of network error
reportErr := retry.Do(p.ctx, func() error {
return p.reportFunc(p.importResult)
}, retry.Attempts(10))
if reportErr != nil {
log.Warn("fail to report import state to root coord", zap.Error(err))
return reportErr
}
return nil
}
func (p *ImportWrapper) parseRowBasedJSON(filePath string, onlyValidate bool) error {
tr := timerecord.NewTimeRecorder("json row-based parser: " + filePath)
// for minio storage, chunkManager will download file into local memory
// for local storage, chunkManager open the file directly
file, err := p.chunkManager.Reader(filePath)
if err != nil {
return err
}
defer file.Close()
// parse file
reader := bufio.NewReader(file)
parser := NewJSONParser(p.ctx, p.collectionSchema)
var consumer *JSONRowConsumer
if !onlyValidate {
flushFunc := func(fields map[storage.FieldID]storage.FieldData, shardID int) error {
var filePaths = []string{filePath}
p.printFieldsDataInfo(fields, "import wrapper: prepare to flush segment", filePaths)
return p.callFlushFunc(fields, shardID)
}
consumer, err = NewJSONRowConsumer(p.collectionSchema, p.rowIDAllocator, p.shardNum, p.segmentSize, flushFunc)
if err != nil {
return err
}
}
validator, err := NewJSONRowValidator(p.collectionSchema, consumer)
if err != nil {
return err
}
err = parser.ParseRows(reader, validator)
if err != nil {
return err
}
// for row-based files, auto-id is generated within JSONRowConsumer
if consumer != nil {
p.importResult.AutoIds = append(p.importResult.AutoIds, consumer.IDRange()...)
}
tr.Elapse("parsed")
return nil
}
func (p *ImportWrapper) parseColumnBasedJSON(filePath string, onlyValidate bool,
combineFunc func(fields map[storage.FieldID]storage.FieldData) error) error {
tr := timerecord.NewTimeRecorder("json column-based parser: " + filePath)
// for minio storage, chunkManager will download file into local memory
// for local storage, chunkManager open the file directly
file, err := p.chunkManager.Reader(filePath)
if err != nil {
return err
}
defer file.Close()
// parse file
reader := bufio.NewReader(file)
parser := NewJSONParser(p.ctx, p.collectionSchema)
var consumer *JSONColumnConsumer
if !onlyValidate {
consumer, err = NewJSONColumnConsumer(p.collectionSchema, combineFunc)
if err != nil {
return err
}
}
validator, err := NewJSONColumnValidator(p.collectionSchema, consumer)
if err != nil {
return err
}
err = parser.ParseColumns(reader, validator)
if err != nil {
return err
}
tr.Elapse("parsed")
return nil
}
func (p *ImportWrapper) parseColumnBasedNumpy(filePath string, onlyValidate bool,
combineFunc func(fields map[storage.FieldID]storage.FieldData) error) error {
tr := timerecord.NewTimeRecorder("numpy parser: " + filePath)
fileName, _ := getFileNameAndExt(filePath)
// for minio storage, chunkManager will download file into local memory
// for local storage, chunkManager open the file directly
file, err := p.chunkManager.Reader(filePath)
if err != nil {
return err
}
defer file.Close()
var id storage.FieldID
var found = false
for _, field := range p.collectionSchema.Fields {
if field.GetName() == fileName {
id = field.GetFieldID()
found = true
break
}
}
// if the numpy file name is not mapping to a field name, ignore it
if !found {
return nil
}
// the numpy parser return a storage.FieldData, here construct a map[string]storage.FieldData to combine
flushFunc := func(field storage.FieldData) error {
fields := make(map[storage.FieldID]storage.FieldData)
fields[id] = field
return combineFunc(fields)
}
// for numpy file, we say the file name(without extension) is the filed name
parser := NewNumpyParser(p.ctx, p.collectionSchema, flushFunc)
err = parser.Parse(file, fileName, onlyValidate)
if err != nil {
return err
}
tr.Elapse("parsed")
return nil
}
func (p *ImportWrapper) appendFunc(schema *schemapb.FieldSchema) func(src storage.FieldData, n int, target storage.FieldData) error {
@ -544,11 +558,11 @@ func (p *ImportWrapper) splitFieldsData(fieldsData map[storage.FieldID]storage.F
appendFunctions := make(map[string]func(src storage.FieldData, n int, target storage.FieldData) error)
for i := 0; i < len(p.collectionSchema.Fields); i++ {
schema := p.collectionSchema.Fields[i]
appendFunc := p.appendFunc(schema)
if appendFunc == nil {
appendFuncErr := p.appendFunc(schema)
if appendFuncErr == nil {
return errors.New("import error: unsupported field data type")
}
appendFunctions[schema.GetName()] = appendFunc
appendFunctions[schema.GetName()] = appendFuncErr
}
// split data into segments

View File

@ -5,6 +5,7 @@ import (
"bytes"
"context"
"encoding/json"
"errors"
"strconv"
"testing"
"time"
@ -473,6 +474,7 @@ func Test_ImportColumnBased_numpy(t *testing.T) {
err = cm.Write(filePath, content)
assert.NoError(t, err)
importResult.State = commonpb.ImportState_ImportStarted
wrapper = NewImportWrapper(ctx, sampleSchema(), 2, 1, idAllocator, cm, flushFunc, importResult, reportFunc)
files = make([]string, 0)
files = append(files, filePath)
@ -802,3 +804,238 @@ func Test_FileValidation(t *testing.T) {
err = wrapper.fileValidation(files[:], false)
assert.NotNil(t, err)
}
func Test_ReportImportFailRowBased(t *testing.T) {
f := dependency.NewDefaultFactory(true)
ctx := context.Background()
cm, err := f.NewPersistentStorageChunkManager(ctx)
assert.NoError(t, err)
idAllocator := newIDAllocator(ctx, t)
content := []byte(`{
"rows":[
{"field_bool": true, "field_int8": 10, "field_int16": 101, "field_int32": 1001, "field_int64": 10001, "field_float": 3.14, "field_double": 1.56, "field_string": "hello world", "field_binary_vector": [254, 0], "field_float_vector": [1.1, 1.2, 1.3, 1.4]},
{"field_bool": false, "field_int8": 11, "field_int16": 102, "field_int32": 1002, "field_int64": 10002, "field_float": 3.15, "field_double": 2.56, "field_string": "hello world", "field_binary_vector": [253, 0], "field_float_vector": [2.1, 2.2, 2.3, 2.4]},
{"field_bool": true, "field_int8": 12, "field_int16": 103, "field_int32": 1003, "field_int64": 10003, "field_float": 3.16, "field_double": 3.56, "field_string": "hello world", "field_binary_vector": [252, 0], "field_float_vector": [3.1, 3.2, 3.3, 3.4]},
{"field_bool": false, "field_int8": 13, "field_int16": 104, "field_int32": 1004, "field_int64": 10004, "field_float": 3.17, "field_double": 4.56, "field_string": "hello world", "field_binary_vector": [251, 0], "field_float_vector": [4.1, 4.2, 4.3, 4.4]},
{"field_bool": true, "field_int8": 14, "field_int16": 105, "field_int32": 1005, "field_int64": 10005, "field_float": 3.18, "field_double": 5.56, "field_string": "hello world", "field_binary_vector": [250, 0], "field_float_vector": [5.1, 5.2, 5.3, 5.4]}
]
}`)
filePath := TempFilesPath + "rows_1.json"
err = cm.Write(filePath, content)
assert.NoError(t, err)
defer cm.RemoveWithPrefix("")
rowCount := 0
flushFunc := func(fields map[storage.FieldID]storage.FieldData, shardNum int) error {
count := 0
for _, data := range fields {
assert.Less(t, 0, data.RowNum())
if count == 0 {
count = data.RowNum()
} else {
assert.Equal(t, count, data.RowNum())
}
}
rowCount += count
return nil
}
// success case
importResult := &rootcoordpb.ImportResult{
Status: &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
},
TaskId: 1,
DatanodeId: 1,
State: commonpb.ImportState_ImportStarted,
Segments: make([]int64, 0),
AutoIds: make([]int64, 0),
RowCount: 0,
}
reportFunc := func(res *rootcoordpb.ImportResult) error {
return nil
}
wrapper := NewImportWrapper(ctx, sampleSchema(), 2, 1, idAllocator, cm, flushFunc, importResult, reportFunc)
files := make([]string, 0)
files = append(files, filePath)
wrapper.reportFunc = func(res *rootcoordpb.ImportResult) error {
return errors.New("mock error")
}
err = wrapper.Import(files, true, false)
assert.NotNil(t, err)
assert.Equal(t, 5, rowCount)
assert.Equal(t, commonpb.ImportState_ImportPersisted, importResult.State)
}
func Test_ReportImportFailColumnBased_json(t *testing.T) {
f := dependency.NewDefaultFactory(true)
ctx := context.Background()
cm, err := f.NewPersistentStorageChunkManager(ctx)
assert.NoError(t, err)
defer cm.RemoveWithPrefix("")
idAllocator := newIDAllocator(ctx, t)
content := []byte(`{
"field_bool": [true, false, true, true, true],
"field_int8": [10, 11, 12, 13, 14],
"field_int16": [100, 101, 102, 103, 104],
"field_int32": [1000, 1001, 1002, 1003, 1004],
"field_int64": [10000, 10001, 10002, 10003, 10004],
"field_float": [3.14, 3.15, 3.16, 3.17, 3.18],
"field_double": [5.1, 5.2, 5.3, 5.4, 5.5],
"field_string": ["a", "b", "c", "d", "e"],
"field_binary_vector": [
[254, 1],
[253, 2],
[252, 3],
[251, 4],
[250, 5]
],
"field_float_vector": [
[1.1, 1.2, 1.3, 1.4],
[2.1, 2.2, 2.3, 2.4],
[3.1, 3.2, 3.3, 3.4],
[4.1, 4.2, 4.3, 4.4],
[5.1, 5.2, 5.3, 5.4]
]
}`)
filePath := TempFilesPath + "columns_1.json"
err = cm.Write(filePath, content)
assert.NoError(t, err)
rowCount := 0
flushFunc := func(fields map[storage.FieldID]storage.FieldData, shardNum int) error {
count := 0
for _, data := range fields {
assert.Less(t, 0, data.RowNum())
if count == 0 {
count = data.RowNum()
} else {
assert.Equal(t, count, data.RowNum())
}
}
rowCount += count
return nil
}
// success case
importResult := &rootcoordpb.ImportResult{
Status: &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
},
TaskId: 1,
DatanodeId: 1,
State: commonpb.ImportState_ImportStarted,
Segments: make([]int64, 0),
AutoIds: make([]int64, 0),
RowCount: 0,
}
reportFunc := func(res *rootcoordpb.ImportResult) error {
return nil
}
wrapper := NewImportWrapper(ctx, sampleSchema(), 2, 1, idAllocator, cm, flushFunc, importResult, reportFunc)
files := make([]string, 0)
files = append(files, filePath)
wrapper.reportFunc = func(res *rootcoordpb.ImportResult) error {
return errors.New("mock error")
}
err = wrapper.Import(files, false, false)
assert.NotNil(t, err)
assert.Equal(t, 5, rowCount)
assert.Equal(t, commonpb.ImportState_ImportPersisted, importResult.State)
}
func Test_ReportImportFailColumnBased_numpy(t *testing.T) {
f := dependency.NewDefaultFactory(true)
ctx := context.Background()
cm, err := f.NewPersistentStorageChunkManager(ctx)
assert.NoError(t, err)
defer cm.RemoveWithPrefix("")
idAllocator := newIDAllocator(ctx, t)
content := []byte(`{
"field_bool": [true, false, true, true, true],
"field_int8": [10, 11, 12, 13, 14],
"field_int16": [100, 101, 102, 103, 104],
"field_int32": [1000, 1001, 1002, 1003, 1004],
"field_int64": [10000, 10001, 10002, 10003, 10004],
"field_float": [3.14, 3.15, 3.16, 3.17, 3.18],
"field_double": [5.1, 5.2, 5.3, 5.4, 5.5],
"field_string": ["a", "b", "c", "d", "e"]
}`)
files := make([]string, 0)
filePath := TempFilesPath + "scalar_fields.json"
err = cm.Write(filePath, content)
assert.NoError(t, err)
files = append(files, filePath)
filePath = TempFilesPath + "field_binary_vector.npy"
bin := [][2]uint8{{1, 2}, {3, 4}, {5, 6}, {7, 8}, {9, 10}}
content, err = CreateNumpyData(bin)
assert.Nil(t, err)
log.Debug("content", zap.Any("c", content))
err = cm.Write(filePath, content)
assert.NoError(t, err)
files = append(files, filePath)
filePath = TempFilesPath + "field_float_vector.npy"
flo := [][4]float32{{1, 2, 3, 4}, {3, 4, 5, 6}, {5, 6, 7, 8}, {7, 8, 9, 10}, {9, 10, 11, 12}}
content, err = CreateNumpyData(flo)
assert.Nil(t, err)
log.Debug("content", zap.Any("c", content))
err = cm.Write(filePath, content)
assert.NoError(t, err)
files = append(files, filePath)
rowCount := 0
flushFunc := func(fields map[storage.FieldID]storage.FieldData, shardNum int) error {
count := 0
for _, data := range fields {
assert.Less(t, 0, data.RowNum())
if count == 0 {
count = data.RowNum()
} else {
assert.Equal(t, count, data.RowNum())
}
}
rowCount += count
return nil
}
// success case
importResult := &rootcoordpb.ImportResult{
Status: &commonpb.Status{
ErrorCode: commonpb.ErrorCode_Success,
},
TaskId: 1,
DatanodeId: 1,
State: commonpb.ImportState_ImportStarted,
Segments: make([]int64, 0),
AutoIds: make([]int64, 0),
RowCount: 0,
}
reportFunc := func(res *rootcoordpb.ImportResult) error {
return nil
}
schema := sampleSchema()
schema.Fields[4].AutoID = true
wrapper := NewImportWrapper(ctx, schema, 2, 1, idAllocator, cm, flushFunc, importResult, reportFunc)
wrapper.reportFunc = func(res *rootcoordpb.ImportResult) error {
return errors.New("mock error")
}
err = wrapper.Import(files, false, false)
assert.NotNil(t, err)
assert.Equal(t, 5, rowCount)
assert.Equal(t, commonpb.ImportState_ImportPersisted, importResult.State)
}

View File

@ -262,15 +262,18 @@ type JSONRowValidator struct {
rowCounter int64 // how many rows have been validated
}
func NewJSONRowValidator(collectionSchema *schemapb.CollectionSchema, downstream JSONRowHandler) *JSONRowValidator {
func NewJSONRowValidator(collectionSchema *schemapb.CollectionSchema, downstream JSONRowHandler) (*JSONRowValidator, error) {
v := &JSONRowValidator{
validators: make(map[storage.FieldID]*Validator),
downstream: downstream,
rowCounter: 0,
}
initValidators(collectionSchema, v.validators)
return v
err := initValidators(collectionSchema, v.validators)
if err != nil {
log.Error("JSON column validator: failed to initialize json row-based validator", zap.Error(err))
return nil, err
}
return v, nil
}
func (v *JSONRowValidator) ValidateCount() int64 {
@ -326,15 +329,18 @@ type JSONColumnValidator struct {
rowCounter map[string]int64 // row count of each field
}
func NewJSONColumnValidator(schema *schemapb.CollectionSchema, downstream JSONColumnHandler) *JSONColumnValidator {
func NewJSONColumnValidator(schema *schemapb.CollectionSchema, downstream JSONColumnHandler) (*JSONColumnValidator, error) {
v := &JSONColumnValidator{
validators: make(map[storage.FieldID]*Validator),
downstream: downstream,
rowCounter: make(map[string]int64),
}
initValidators(schema, v.validators)
return v
err := initValidators(schema, v.validators)
if err != nil {
log.Error("JSON column validator: fail to initialize json column-based validator", zap.Error(err))
return nil, err
}
return v, nil
}
func (v *JSONColumnValidator) ValidateCount() map[string]int64 {
@ -390,7 +396,7 @@ func (v *JSONColumnValidator) Handle(columns map[storage.FieldID][]interface{})
return nil
}
type ImportFlushFunc func(fields map[storage.FieldID]storage.FieldData, shardNum int) error
type ImportFlushFunc func(fields map[storage.FieldID]storage.FieldData, shardID int) error
// row-based json format consumer class
type JSONRowConsumer struct {
@ -483,10 +489,10 @@ func initSegmentData(collectionSchema *schemapb.CollectionSchema) map[storage.Fi
}
func NewJSONRowConsumer(collectionSchema *schemapb.CollectionSchema, idAlloc *allocator.IDAllocator, shardNum int32, segmentSize int64,
flushFunc ImportFlushFunc) *JSONRowConsumer {
flushFunc ImportFlushFunc) (*JSONRowConsumer, error) {
if collectionSchema == nil {
log.Error("JSON row consumer: collection schema is nil")
return nil
return nil, errors.New("collection schema is nil")
}
v := &JSONRowConsumer{
@ -501,13 +507,18 @@ func NewJSONRowConsumer(collectionSchema *schemapb.CollectionSchema, idAlloc *al
callFlushFunc: flushFunc,
}
initValidators(collectionSchema, v.validators)
err := initValidators(collectionSchema, v.validators)
if err != nil {
log.Error("JSON row consumer: fail to initialize json row-based consumer", zap.Error(err))
return nil, errors.New("fail to initialize json row-based consumer")
}
v.segmentsData = make([]map[storage.FieldID]storage.FieldData, 0, shardNum)
for i := 0; i < int(shardNum); i++ {
segmentData := initSegmentData(collectionSchema)
if segmentData == nil {
return nil
log.Error("JSON row consumer: fail to initialize in-memory segment data", zap.Int32("shardNum", shardNum))
return nil, errors.New("fail to initialize in-memory segment data")
}
v.segmentsData = append(v.segmentsData, segmentData)
}
@ -522,15 +533,15 @@ func NewJSONRowConsumer(collectionSchema *schemapb.CollectionSchema, idAlloc *al
// primary key not found
if v.primaryKey == -1 {
log.Error("JSON row consumer: collection schema has no primary key")
return nil
return nil, errors.New("collection schema has no primary key")
}
// primary key is autoid, id generator is required
if v.validators[v.primaryKey].autoID && idAlloc == nil {
log.Error("JSON row consumer: ID allocator is nil")
return nil
return nil, errors.New(" ID allocator is nil")
}
return v
return v, nil
}
func (v *JSONRowConsumer) IDRange() []int64 {
@ -672,9 +683,10 @@ type JSONColumnConsumer struct {
callFlushFunc ColumnFlushFunc // call back function to flush segment
}
func NewJSONColumnConsumer(collectionSchema *schemapb.CollectionSchema, flushFunc ColumnFlushFunc) *JSONColumnConsumer {
func NewJSONColumnConsumer(collectionSchema *schemapb.CollectionSchema, flushFunc ColumnFlushFunc) (*JSONColumnConsumer, error) {
if collectionSchema == nil {
return nil
log.Error("JSON column consumer: collection schema is nil")
return nil, errors.New("collection schema is nil")
}
v := &JSONColumnConsumer{
@ -682,8 +694,16 @@ func NewJSONColumnConsumer(collectionSchema *schemapb.CollectionSchema, flushFun
validators: make(map[storage.FieldID]*Validator),
callFlushFunc: flushFunc,
}
initValidators(collectionSchema, v.validators)
err := initValidators(collectionSchema, v.validators)
if err != nil {
log.Error("JSON column consumer: fail to initialize validator", zap.Error(err))
return nil, errors.New("fail to initialize validator")
}
v.fieldsData = initSegmentData(collectionSchema)
if v.fieldsData == nil {
log.Error("JSON column consumer: fail to initialize in-memory segment data")
return nil, errors.New("fail to initialize in-memory segment data")
}
for i := 0; i < len(collectionSchema.Fields); i++ {
schema := collectionSchema.Fields[i]
@ -693,7 +713,7 @@ func NewJSONColumnConsumer(collectionSchema *schemapb.CollectionSchema, flushFun
}
}
return v
return v, nil
}
func (v *JSONColumnConsumer) flush() error {

View File

@ -185,8 +185,11 @@ func Test_JSONRowValidator(t *testing.T) {
"rows":[]
}`)
validator := NewJSONRowValidator(schema, nil)
err := parser.ParseRows(reader, validator)
validator, err := NewJSONRowValidator(schema, nil)
assert.NotNil(t, validator)
assert.Nil(t, err)
err = parser.ParseRows(reader, validator)
assert.NotNil(t, err)
assert.Equal(t, int64(0), validator.ValidateCount())
@ -246,8 +249,11 @@ func Test_JSONColumnValidator(t *testing.T) {
"field_float_vector": []
}`)
validator := NewJSONColumnValidator(schema, nil)
err := parser.ParseColumns(reader, validator)
validator, err := NewJSONColumnValidator(schema, nil)
assert.NotNil(t, validator)
assert.Nil(t, err)
err = parser.ParseColumns(reader, validator)
assert.NotNil(t, err)
for _, count := range validator.rowCounter {
assert.Equal(t, int64(0), count)
@ -267,7 +273,10 @@ func Test_JSONColumnValidator(t *testing.T) {
"field_float_vector": []
}`)
validator = NewJSONColumnValidator(schema, nil)
validator, err = NewJSONColumnValidator(schema, nil)
assert.NotNil(t, validator)
assert.Nil(t, err)
err = parser.ParseColumns(reader, validator)
assert.NotNil(t, err)
@ -286,7 +295,10 @@ func Test_JSONColumnValidator(t *testing.T) {
"field_float_vector": [[1.1, 1.2, 1.3, 1.4]]
}`)
validator = NewJSONColumnValidator(schema, nil)
validator, err = NewJSONColumnValidator(schema, nil)
assert.NotNil(t, validator)
assert.Nil(t, err)
err = parser.ParseColumns(reader, validator)
assert.NotNil(t, err)
@ -332,11 +344,15 @@ func Test_JSONRowConsumer(t *testing.T) {
return nil
}
consumer := NewJSONRowConsumer(schema, idAllocator, shardNum, 1, consumeFunc)
consumer, err := NewJSONRowConsumer(schema, idAllocator, shardNum, 1, consumeFunc)
assert.NotNil(t, consumer)
assert.Nil(t, err)
validator := NewJSONRowValidator(schema, consumer)
err := parser.ParseRows(reader, validator)
validator, err := NewJSONRowValidator(schema, consumer)
assert.NotNil(t, validator)
assert.Nil(t, err)
err = parser.ParseRows(reader, validator)
assert.Nil(t, err)
assert.Equal(t, int64(5), validator.ValidateCount())
@ -454,11 +470,15 @@ func Test_JSONRowConsumerStringKey(t *testing.T) {
return nil
}
consumer := NewJSONRowConsumer(schema, idAllocator, shardNum, 1, consumeFunc)
consumer, err := NewJSONRowConsumer(schema, idAllocator, shardNum, 1, consumeFunc)
assert.NotNil(t, consumer)
assert.Nil(t, err)
validator := NewJSONRowValidator(schema, consumer)
err := parser.ParseRows(reader, validator)
validator, err := NewJSONRowValidator(schema, consumer)
assert.NotNil(t, validator)
assert.Nil(t, err)
err = parser.ParseRows(reader, validator)
assert.Nil(t, err)
assert.Equal(t, int64(10), validator.ValidateCount())
@ -516,11 +536,15 @@ func Test_JSONColumnConsumer(t *testing.T) {
return nil
}
consumer := NewJSONColumnConsumer(schema, consumeFunc)
consumer, err := NewJSONColumnConsumer(schema, consumeFunc)
assert.NotNil(t, consumer)
assert.Nil(t, err)
validator := NewJSONColumnValidator(schema, consumer)
err := parser.ParseColumns(reader, validator)
validator, err := NewJSONColumnValidator(schema, consumer)
assert.NotNil(t, validator)
assert.Nil(t, err)
err = parser.ParseColumns(reader, validator)
assert.Nil(t, err)
for _, count := range validator.ValidateCount() {
assert.Equal(t, int64(5), count)

View File

@ -216,7 +216,10 @@ func Test_ParserRows(t *testing.T) {
err := parser.ParseRows(reader, nil)
assert.NotNil(t, err)
validator := NewJSONRowValidator(schema, nil)
validator, err := NewJSONRowValidator(schema, nil)
assert.NotNil(t, validator)
assert.Nil(t, err)
err = parser.ParseRows(reader, validator)
assert.Nil(t, err)
assert.Equal(t, int64(5), validator.ValidateCount())
@ -224,50 +227,74 @@ func Test_ParserRows(t *testing.T) {
reader = strings.NewReader(`{
"dummy":[]
}`)
validator = NewJSONRowValidator(schema, nil)
validator, err = NewJSONRowValidator(schema, nil)
assert.NotNil(t, validator)
assert.Nil(t, err)
err = parser.ParseRows(reader, validator)
assert.NotNil(t, err)
reader = strings.NewReader(`{
"rows":
}`)
validator = NewJSONRowValidator(schema, nil)
validator, err = NewJSONRowValidator(schema, nil)
assert.NotNil(t, validator)
assert.Nil(t, err)
err = parser.ParseRows(reader, validator)
assert.NotNil(t, err)
reader = strings.NewReader(`{
"rows": [}
}`)
validator = NewJSONRowValidator(schema, nil)
validator, err = NewJSONRowValidator(schema, nil)
assert.NotNil(t, validator)
assert.Nil(t, err)
err = parser.ParseRows(reader, validator)
assert.NotNil(t, err)
reader = strings.NewReader(`{
"rows": {}
}`)
validator = NewJSONRowValidator(schema, nil)
validator, err = NewJSONRowValidator(schema, nil)
assert.NotNil(t, validator)
assert.Nil(t, err)
err = parser.ParseRows(reader, validator)
assert.NotNil(t, err)
reader = strings.NewReader(`{
"rows": [[]]
}`)
validator = NewJSONRowValidator(schema, nil)
validator, err = NewJSONRowValidator(schema, nil)
assert.NotNil(t, validator)
assert.Nil(t, err)
err = parser.ParseRows(reader, validator)
assert.NotNil(t, err)
reader = strings.NewReader(`[]`)
validator = NewJSONRowValidator(schema, nil)
validator, err = NewJSONRowValidator(schema, nil)
assert.NotNil(t, validator)
assert.Nil(t, err)
err = parser.ParseRows(reader, validator)
assert.NotNil(t, err)
reader = strings.NewReader(`{}`)
validator = NewJSONRowValidator(schema, nil)
validator, err = NewJSONRowValidator(schema, nil)
assert.NotNil(t, validator)
assert.Nil(t, err)
err = parser.ParseRows(reader, validator)
assert.NotNil(t, err)
reader = strings.NewReader(``)
validator = NewJSONRowValidator(schema, nil)
validator, err = NewJSONRowValidator(schema, nil)
assert.NotNil(t, validator)
assert.Nil(t, err)
err = parser.ParseRows(reader, validator)
assert.NotNil(t, err)
}
@ -309,7 +336,10 @@ func Test_ParserColumns(t *testing.T) {
err := parser.ParseColumns(reader, nil)
assert.NotNil(t, err)
validator := NewJSONColumnValidator(schema, nil)
validator, err := NewJSONColumnValidator(schema, nil)
assert.NotNil(t, validator)
assert.Nil(t, err)
err = parser.ParseColumns(reader, validator)
assert.Nil(t, err)
counter := validator.ValidateCount()
@ -321,50 +351,74 @@ func Test_ParserColumns(t *testing.T) {
"field_int8": [10, 11, 12, 13, 14],
"dummy":[1, 2, 3]
}`)
validator = NewJSONColumnValidator(schema, nil)
validator, err = NewJSONColumnValidator(schema, nil)
assert.NotNil(t, validator)
assert.Nil(t, err)
err = parser.ParseColumns(reader, validator)
assert.Nil(t, err)
reader = strings.NewReader(`{
"dummy":[1, 2, 3]
}`)
validator = NewJSONColumnValidator(schema, nil)
validator, err = NewJSONColumnValidator(schema, nil)
assert.NotNil(t, validator)
assert.Nil(t, err)
err = parser.ParseColumns(reader, validator)
assert.NotNil(t, err)
reader = strings.NewReader(`{
"field_bool":
}`)
validator = NewJSONColumnValidator(schema, nil)
validator, err = NewJSONColumnValidator(schema, nil)
assert.NotNil(t, validator)
assert.Nil(t, err)
err = parser.ParseColumns(reader, validator)
assert.NotNil(t, err)
reader = strings.NewReader(`{
"field_bool":{}
}`)
validator = NewJSONColumnValidator(schema, nil)
validator, err = NewJSONColumnValidator(schema, nil)
assert.NotNil(t, validator)
assert.Nil(t, err)
err = parser.ParseColumns(reader, validator)
assert.NotNil(t, err)
reader = strings.NewReader(`{
"field_bool":[}
}`)
validator = NewJSONColumnValidator(schema, nil)
validator, err = NewJSONColumnValidator(schema, nil)
assert.NotNil(t, validator)
assert.Nil(t, err)
err = parser.ParseColumns(reader, validator)
assert.NotNil(t, err)
reader = strings.NewReader(`[]`)
validator = NewJSONColumnValidator(schema, nil)
validator, err = NewJSONColumnValidator(schema, nil)
assert.NotNil(t, validator)
assert.Nil(t, err)
err = parser.ParseColumns(reader, validator)
assert.NotNil(t, err)
reader = strings.NewReader(`{}`)
validator = NewJSONColumnValidator(schema, nil)
validator, err = NewJSONColumnValidator(schema, nil)
assert.NotNil(t, validator)
assert.Nil(t, err)
err = parser.ParseColumns(reader, validator)
assert.NotNil(t, err)
reader = strings.NewReader(``)
validator = NewJSONColumnValidator(schema, nil)
validator, err = NewJSONColumnValidator(schema, nil)
assert.NotNil(t, validator)
assert.Nil(t, err)
err = parser.ParseColumns(reader, validator)
assert.NotNil(t, err)
}
@ -462,8 +516,11 @@ func Test_ParserRowsStringKey(t *testing.T) {
]
}`)
validator := NewJSONRowValidator(schema, nil)
err := parser.ParseRows(reader, validator)
validator, err := NewJSONRowValidator(schema, nil)
assert.NotNil(t, validator)
assert.Nil(t, err)
err = parser.ParseRows(reader, validator)
assert.Nil(t, err)
assert.Equal(t, int64(10), validator.ValidateCount())
}
@ -495,7 +552,10 @@ func Test_ParserColumnsStrKey(t *testing.T) {
err := parser.ParseColumns(reader, nil)
assert.NotNil(t, err)
validator := NewJSONColumnValidator(schema, nil)
validator, err := NewJSONColumnValidator(schema, nil)
assert.NotNil(t, validator)
assert.Nil(t, err)
err = parser.ParseColumns(reader, validator)
assert.Nil(t, err)
counter := validator.ValidateCount()

View File

@ -182,7 +182,8 @@ func Test_Read(t *testing.T) {
{
filePath := TempFilesPath + "bool.npy"
data := []bool{true, false, true, false}
CreateNumpyFile(filePath, data)
err := CreateNumpyFile(filePath, data)
assert.Nil(t, err)
file, err := os.Open(filePath)
assert.Nil(t, err)
@ -241,7 +242,8 @@ func Test_Read(t *testing.T) {
{
filePath := TempFilesPath + "uint8.npy"
data := []uint8{1, 2, 3, 4, 5, 6}
CreateNumpyFile(filePath, data)
err := CreateNumpyFile(filePath, data)
assert.Nil(t, err)
file, err := os.Open(filePath)
assert.Nil(t, err)
@ -276,7 +278,8 @@ func Test_Read(t *testing.T) {
{
filePath := TempFilesPath + "int8.npy"
data := []int8{1, 2, 3, 4, 5, 6}
CreateNumpyFile(filePath, data)
err := CreateNumpyFile(filePath, data)
assert.Nil(t, err)
file, err := os.Open(filePath)
assert.Nil(t, err)
@ -306,7 +309,8 @@ func Test_Read(t *testing.T) {
{
filePath := TempFilesPath + "int16.npy"
data := []int16{1, 2, 3, 4, 5, 6}
CreateNumpyFile(filePath, data)
err := CreateNumpyFile(filePath, data)
assert.Nil(t, err)
file, err := os.Open(filePath)
assert.Nil(t, err)
@ -336,7 +340,8 @@ func Test_Read(t *testing.T) {
{
filePath := TempFilesPath + "int32.npy"
data := []int32{1, 2, 3, 4, 5, 6}
CreateNumpyFile(filePath, data)
err := CreateNumpyFile(filePath, data)
assert.Nil(t, err)
file, err := os.Open(filePath)
assert.Nil(t, err)
@ -366,7 +371,8 @@ func Test_Read(t *testing.T) {
{
filePath := TempFilesPath + "int64.npy"
data := []int64{1, 2, 3, 4, 5, 6}
CreateNumpyFile(filePath, data)
err := CreateNumpyFile(filePath, data)
assert.Nil(t, err)
file, err := os.Open(filePath)
assert.Nil(t, err)
@ -396,7 +402,8 @@ func Test_Read(t *testing.T) {
{
filePath := TempFilesPath + "float.npy"
data := []float32{1, 2, 3, 4, 5, 6}
CreateNumpyFile(filePath, data)
err := CreateNumpyFile(filePath, data)
assert.Nil(t, err)
file, err := os.Open(filePath)
assert.Nil(t, err)
@ -426,7 +433,8 @@ func Test_Read(t *testing.T) {
{
filePath := TempFilesPath + "double.npy"
data := []float64{1, 2, 3, 4, 5, 6}
CreateNumpyFile(filePath, data)
err := CreateNumpyFile(filePath, data)
assert.Nil(t, err)
file, err := os.Open(filePath)
assert.Nil(t, err)

View File

@ -83,7 +83,8 @@ func Test_Validate(t *testing.T) {
func() {
filePath := TempFilesPath + "scalar_1.npy"
data1 := []float64{0, 1, 2, 3, 4, 5}
CreateNumpyFile(filePath, data1)
err := CreateNumpyFile(filePath, data1)
assert.Nil(t, err)
file1, err := os.Open(filePath)
assert.Nil(t, err)
@ -102,7 +103,8 @@ func Test_Validate(t *testing.T) {
// data type mismatch
filePath = TempFilesPath + "scalar_2.npy"
data2 := []int64{0, 1, 2, 3, 4, 5}
CreateNumpyFile(filePath, data2)
err = CreateNumpyFile(filePath, data2)
assert.Nil(t, err)
file2, err := os.Open(filePath)
assert.Nil(t, err)
@ -117,7 +119,8 @@ func Test_Validate(t *testing.T) {
// shape mismatch
filePath = TempFilesPath + "scalar_2.npy"
data3 := [][2]float64{{1, 1}}
CreateNumpyFile(filePath, data3)
err = CreateNumpyFile(filePath, data3)
assert.Nil(t, err)
file3, err := os.Open(filePath)
assert.Nil(t, err)
@ -134,7 +137,8 @@ func Test_Validate(t *testing.T) {
func() {
filePath := TempFilesPath + "binary_vector_1.npy"
data1 := [][2]uint8{{0, 1}, {2, 3}, {4, 5}}
CreateNumpyFile(filePath, data1)
err := CreateNumpyFile(filePath, data1)
assert.Nil(t, err)
file1, err := os.Open(filePath)
assert.Nil(t, err)
@ -150,7 +154,8 @@ func Test_Validate(t *testing.T) {
// data type mismatch
filePath = TempFilesPath + "binary_vector_2.npy"
data2 := [][2]uint16{{0, 1}, {2, 3}, {4, 5}}
CreateNumpyFile(filePath, data2)
err = CreateNumpyFile(filePath, data2)
assert.Nil(t, err)
file2, err := os.Open(filePath)
assert.Nil(t, err)
@ -165,7 +170,8 @@ func Test_Validate(t *testing.T) {
// shape mismatch
filePath = TempFilesPath + "binary_vector_3.npy"
data3 := []uint8{1, 2, 3}
CreateNumpyFile(filePath, data3)
err = CreateNumpyFile(filePath, data3)
assert.Nil(t, err)
file3, err := os.Open(filePath)
assert.Nil(t, err)
@ -180,7 +186,8 @@ func Test_Validate(t *testing.T) {
// shape[1] mismatch
filePath = TempFilesPath + "binary_vector_4.npy"
data4 := [][3]uint8{{0, 1, 2}, {2, 3, 4}, {4, 5, 6}}
CreateNumpyFile(filePath, data4)
err = CreateNumpyFile(filePath, data4)
assert.Nil(t, err)
file4, err := os.Open(filePath)
assert.Nil(t, err)
@ -211,7 +218,8 @@ func Test_Validate(t *testing.T) {
func() {
filePath := TempFilesPath + "float_vector.npy"
data1 := [][4]float32{{0, 0, 0, 0}, {1, 1, 1, 1}, {2, 2, 2, 2}, {3, 3, 3, 3}}
CreateNumpyFile(filePath, data1)
err := CreateNumpyFile(filePath, data1)
assert.Nil(t, err)
file1, err := os.Open(filePath)
assert.Nil(t, err)
@ -227,7 +235,8 @@ func Test_Validate(t *testing.T) {
// data type mismatch
filePath = TempFilesPath + "float_vector_2.npy"
data2 := [][4]int32{{0, 1, 2, 3}}
CreateNumpyFile(filePath, data2)
err = CreateNumpyFile(filePath, data2)
assert.Nil(t, err)
file2, err := os.Open(filePath)
assert.Nil(t, err)
@ -242,7 +251,8 @@ func Test_Validate(t *testing.T) {
// shape mismatch
filePath = TempFilesPath + "float_vector_3.npy"
data3 := []float32{1, 2, 3}
CreateNumpyFile(filePath, data3)
err = CreateNumpyFile(filePath, data3)
assert.Nil(t, err)
file3, err := os.Open(filePath)
assert.Nil(t, err)
@ -257,7 +267,8 @@ func Test_Validate(t *testing.T) {
// shape[1] mismatch
filePath = TempFilesPath + "float_vector_4.npy"
data4 := [][3]float32{{0, 0, 0}, {1, 1, 1}}
CreateNumpyFile(filePath, data4)
err = CreateNumpyFile(filePath, data4)
assert.Nil(t, err)
file4, err := os.Open(filePath)
assert.Nil(t, err)
@ -296,7 +307,8 @@ func Test_Parse(t *testing.T) {
checkFunc := func(data interface{}, fieldName string, callback func(field storage.FieldData) error) {
filePath := TempFilesPath + fieldName + ".npy"
CreateNumpyFile(filePath, data)
err := CreateNumpyFile(filePath, data)
assert.Nil(t, err)
func() {
file, err := os.Open(filePath)
@ -510,7 +522,8 @@ func Test_Parse_perf(t *testing.T) {
}
filePath := TempFilesPath + "perf.npy"
CreateNumpyFile(filePath, data)
err = CreateNumpyFile(filePath, data)
assert.Nil(t, err)
tr.Record("generate large numpy file " + filePath)

View File

@ -145,6 +145,14 @@ func (m *GrpcDataCoordClient) ReleaseSegmentLock(ctx context.Context, req *datap
return &commonpb.Status{}, m.Err
}
func (m *GrpcDataCoordClient) AddSegment(ctx context.Context, in *datapb.AddSegmentRequest, opts ...grpc.CallOption) (*commonpb.Status, error) {
func (m *GrpcDataCoordClient) SaveImportSegment(ctx context.Context, in *datapb.SaveImportSegmentRequest, opts ...grpc.CallOption) (*commonpb.Status, error) {
return &commonpb.Status{}, m.Err
}
func (m *GrpcDataCoordClient) UnsetIsImportingState(context.Context, *datapb.UnsetIsImportingStateRequest, ...grpc.CallOption) (*commonpb.Status, error) {
return &commonpb.Status{}, m.Err
}
func (m *GrpcDataCoordClient) MarkSegmentsDropped(context.Context, *datapb.MarkSegmentsDroppedRequest, ...grpc.CallOption) (*commonpb.Status, error) {
return &commonpb.Status{}, m.Err
}

View File

@ -73,8 +73,8 @@ func (m *GrpcDataNodeClient) ResendSegmentStats(ctx context.Context, req *datapb
return &datapb.ResendSegmentStatsResponse{}, m.Err
}
func (m *GrpcDataNodeClient) AddSegment(ctx context.Context, in *datapb.AddSegmentRequest, opts ...grpc.CallOption) (*commonpb.Status, error) {
return &commonpb.Status{}, m.Err
func (m *GrpcDataNodeClient) AddImportSegment(ctx context.Context, in *datapb.AddImportSegmentRequest, opts ...grpc.CallOption) (*datapb.AddImportSegmentResponse, error) {
return &datapb.AddImportSegmentResponse{}, m.Err
}
func (m *GrpcDataNodeClient) SyncSegments(ctx context.Context, in *datapb.SyncSegmentsRequest, opts ...grpc.CallOption) (*commonpb.Status, error) {

View File

@ -392,15 +392,11 @@ type rootCoordConfig struct {
Address string
Port int
DmlChannelNum int64
MaxPartitionNum int64
MinSegmentSizeToEnableIndex int64
ImportTaskExpiration float64
ImportTaskRetention float64
ImportSegmentStateCheckInterval float64
ImportSegmentStateWaitLimit float64
ImportIndexCheckInterval float64
ImportIndexWaitLimit float64
DmlChannelNum int64
MaxPartitionNum int64
MinSegmentSizeToEnableIndex int64
ImportTaskExpiration float64
ImportTaskRetention float64
// --- ETCD Path ---
ImportTaskSubPath string
@ -416,10 +412,6 @@ func (p *rootCoordConfig) init(base *BaseTable) {
p.MinSegmentSizeToEnableIndex = p.Base.ParseInt64WithDefault("rootCoord.minSegmentSizeToEnableIndex", 1024)
p.ImportTaskExpiration = p.Base.ParseFloatWithDefault("rootCoord.importTaskExpiration", 15*60)
p.ImportTaskRetention = p.Base.ParseFloatWithDefault("rootCoord.importTaskRetention", 24*60*60)
p.ImportSegmentStateCheckInterval = p.Base.ParseFloatWithDefault("rootCoord.importSegmentStateCheckInterval", 10)
p.ImportSegmentStateWaitLimit = p.Base.ParseFloatWithDefault("rootCoord.importSegmentStateWaitLimit", 60)
p.ImportIndexCheckInterval = p.Base.ParseFloatWithDefault("rootCoord.importIndexCheckInterval", 10)
p.ImportIndexWaitLimit = p.Base.ParseFloatWithDefault("rootCoord.importIndexWaitLimit", 10*60)
p.ImportTaskSubPath = "importtask"
}

View File

@ -124,13 +124,7 @@ func TestComponentParam(t *testing.T) {
assert.NotEqual(t, Params.MinSegmentSizeToEnableIndex, 0)
t.Logf("master MinSegmentSizeToEnableIndex = %d", Params.MinSegmentSizeToEnableIndex)
assert.NotEqual(t, Params.ImportTaskExpiration, 0)
t.Logf("master ImportTaskExpiration = %f", Params.ImportTaskExpiration)
assert.NotEqual(t, Params.ImportTaskRetention, 0)
t.Logf("master ImportTaskRetention = %f", Params.ImportTaskRetention)
assert.NotEqual(t, Params.ImportIndexCheckInterval, 0)
t.Logf("master ImportIndexCheckInterval = %f", Params.ImportIndexCheckInterval)
assert.NotEqual(t, Params.ImportIndexWaitLimit, 0)
t.Logf("master ImportIndexWaitLimit = %f", Params.ImportIndexWaitLimit)
Params.CreatedTime = time.Now()
Params.UpdatedTime = time.Now()