fix: [restful v2] search result be cut while nq > 1 (#33339)

issue: #33099 #32837 #32419

1. len(search result) may be nq * topk, we need return all rather than
topk
2. the  in restful response payload keep the same with milvus error code

Signed-off-by: PowderLi <min.li@zilliz.com>
pull/33400/head
PowderLi 2024-05-27 14:25:41 +08:00 committed by GitHub
parent cb99e3db34
commit ee73e62202
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
15 changed files with 316 additions and 241 deletions

View File

@ -408,7 +408,7 @@ func (h *HandlersV2) getCollectionDetails(ctx context.Context, c *gin.Context, a
if coll.Properties == nil {
coll.Properties = []*commonpb.KeyValuePair{}
}
c.JSON(http.StatusOK, gin.H{HTTPReturnCode: http.StatusOK, HTTPReturnData: gin.H{
c.JSON(http.StatusOK, gin.H{HTTPReturnCode: commonpb.ErrorCode_Success, HTTPReturnData: gin.H{
HTTPCollectionName: coll.CollectionName,
HTTPCollectionID: coll.CollectionID,
HTTPReturnDescription: coll.Schema.Description,
@ -458,7 +458,7 @@ func (h *HandlersV2) getCollectionLoadState(ctx context.Context, c *gin.Context,
c.JSON(http.StatusOK, gin.H{HTTPReturnCode: merr.Code(err), HTTPReturnMessage: err.Error()})
return resp, err
} else if resp.(*milvuspb.GetLoadStateResponse).State == commonpb.LoadState_LoadStateNotLoad {
c.JSON(http.StatusOK, gin.H{HTTPReturnCode: http.StatusOK, HTTPReturnData: gin.H{
c.JSON(http.StatusOK, gin.H{HTTPReturnCode: commonpb.ErrorCode_Success, HTTPReturnData: gin.H{
HTTPReturnLoadState: resp.(*milvuspb.GetLoadStateResponse).State.String(),
}})
return resp, err
@ -483,7 +483,7 @@ func (h *HandlersV2) getCollectionLoadState(ctx context.Context, c *gin.Context,
if progress >= 100 {
state = commonpb.LoadState_LoadStateLoaded.String()
}
c.JSON(http.StatusOK, gin.H{HTTPReturnCode: http.StatusOK, HTTPReturnData: gin.H{
c.JSON(http.StatusOK, gin.H{HTTPReturnCode: commonpb.ErrorCode_Success, HTTPReturnData: gin.H{
HTTPReturnLoadState: state,
HTTPReturnLoadProgress: progress,
}, HTTPReturnMessage: errMessage})
@ -587,7 +587,7 @@ func (h *HandlersV2) query(ctx context.Context, c *gin.Context, anyReq any, dbNa
})
} else {
c.JSON(http.StatusOK, gin.H{
HTTPReturnCode: http.StatusOK,
HTTPReturnCode: commonpb.ErrorCode_Success,
HTTPReturnData: outputData,
HTTPReturnCost: proxy.GetCostValue(queryResp.GetStatus()),
})
@ -634,7 +634,7 @@ func (h *HandlersV2) get(ctx context.Context, c *gin.Context, anyReq any, dbName
})
} else {
c.JSON(http.StatusOK, gin.H{
HTTPReturnCode: http.StatusOK,
HTTPReturnCode: commonpb.ErrorCode_Success,
HTTPReturnData: outputData,
HTTPReturnCost: proxy.GetCostValue(queryResp.GetStatus()),
})
@ -721,20 +721,20 @@ func (h *HandlersV2) insert(ctx context.Context, c *gin.Context, anyReq any, dbN
allowJS, _ := strconv.ParseBool(c.Request.Header.Get(HTTPHeaderAllowInt64))
if allowJS {
c.JSON(http.StatusOK, gin.H{
HTTPReturnCode: http.StatusOK,
HTTPReturnCode: commonpb.ErrorCode_Success,
HTTPReturnData: gin.H{"insertCount": insertResp.InsertCnt, "insertIds": insertResp.IDs.IdField.(*schemapb.IDs_IntId).IntId.Data},
HTTPReturnCost: cost,
})
} else {
c.JSON(http.StatusOK, gin.H{
HTTPReturnCode: http.StatusOK,
HTTPReturnCode: commonpb.ErrorCode_Success,
HTTPReturnData: gin.H{"insertCount": insertResp.InsertCnt, "insertIds": formatInt64(insertResp.IDs.IdField.(*schemapb.IDs_IntId).IntId.Data)},
HTTPReturnCost: cost,
})
}
case *schemapb.IDs_StrId:
c.JSON(http.StatusOK, gin.H{
HTTPReturnCode: http.StatusOK,
HTTPReturnCode: commonpb.ErrorCode_Success,
HTTPReturnData: gin.H{"insertCount": insertResp.InsertCnt, "insertIds": insertResp.IDs.IdField.(*schemapb.IDs_StrId).StrId.Data},
HTTPReturnCost: cost,
})
@ -796,20 +796,20 @@ func (h *HandlersV2) upsert(ctx context.Context, c *gin.Context, anyReq any, dbN
allowJS, _ := strconv.ParseBool(c.Request.Header.Get(HTTPHeaderAllowInt64))
if allowJS {
c.JSON(http.StatusOK, gin.H{
HTTPReturnCode: http.StatusOK,
HTTPReturnCode: commonpb.ErrorCode_Success,
HTTPReturnData: gin.H{"upsertCount": upsertResp.UpsertCnt, "upsertIds": upsertResp.IDs.IdField.(*schemapb.IDs_IntId).IntId.Data},
HTTPReturnCost: cost,
})
} else {
c.JSON(http.StatusOK, gin.H{
HTTPReturnCode: http.StatusOK,
HTTPReturnCode: commonpb.ErrorCode_Success,
HTTPReturnData: gin.H{"upsertCount": upsertResp.UpsertCnt, "upsertIds": formatInt64(upsertResp.IDs.IdField.(*schemapb.IDs_IntId).IntId.Data)},
HTTPReturnCost: cost,
})
}
case *schemapb.IDs_StrId:
c.JSON(http.StatusOK, gin.H{
HTTPReturnCode: http.StatusOK,
HTTPReturnCode: commonpb.ErrorCode_Success,
HTTPReturnData: gin.H{"upsertCount": upsertResp.UpsertCnt, "upsertIds": upsertResp.IDs.IdField.(*schemapb.IDs_StrId).StrId.Data},
HTTPReturnCost: cost,
})
@ -935,10 +935,10 @@ func (h *HandlersV2) search(ctx context.Context, c *gin.Context, anyReq any, dbN
searchResp := resp.(*milvuspb.SearchResults)
cost := proxy.GetCostValue(searchResp.GetStatus())
if searchResp.Results.TopK == int64(0) {
c.JSON(http.StatusOK, gin.H{HTTPReturnCode: http.StatusOK, HTTPReturnData: []interface{}{}, HTTPReturnCost: cost})
c.JSON(http.StatusOK, gin.H{HTTPReturnCode: commonpb.ErrorCode_Success, HTTPReturnData: []interface{}{}, HTTPReturnCost: cost})
} else {
allowJS, _ := strconv.ParseBool(c.Request.Header.Get(HTTPHeaderAllowInt64))
outputData, err := buildQueryResp(searchResp.Results.TopK, searchResp.Results.OutputFields, searchResp.Results.FieldsData, searchResp.Results.Ids, searchResp.Results.Scores, allowJS)
outputData, err := buildQueryResp(0, searchResp.Results.OutputFields, searchResp.Results.FieldsData, searchResp.Results.Ids, searchResp.Results.Scores, allowJS)
if err != nil {
log.Ctx(ctx).Warn("high level restful api, fail to deal with search result", zap.Any("result", searchResp.Results), zap.Error(err))
c.JSON(http.StatusOK, gin.H{
@ -946,7 +946,7 @@ func (h *HandlersV2) search(ctx context.Context, c *gin.Context, anyReq any, dbN
HTTPReturnMessage: merr.ErrInvalidSearchResult.Error() + ", error: " + err.Error(),
})
} else {
c.JSON(http.StatusOK, gin.H{HTTPReturnCode: http.StatusOK, HTTPReturnData: outputData, HTTPReturnCost: cost})
c.JSON(http.StatusOK, gin.H{HTTPReturnCode: commonpb.ErrorCode_Success, HTTPReturnData: outputData, HTTPReturnCost: cost})
}
}
}
@ -1013,7 +1013,7 @@ func (h *HandlersV2) advancedSearch(ctx context.Context, c *gin.Context, anyReq
searchResp := resp.(*milvuspb.SearchResults)
cost := proxy.GetCostValue(searchResp.GetStatus())
if searchResp.Results.TopK == int64(0) {
c.JSON(http.StatusOK, gin.H{HTTPReturnCode: http.StatusOK, HTTPReturnData: []interface{}{}, HTTPReturnCost: cost})
c.JSON(http.StatusOK, gin.H{HTTPReturnCode: commonpb.ErrorCode_Success, HTTPReturnData: []interface{}{}, HTTPReturnCost: cost})
} else {
allowJS, _ := strconv.ParseBool(c.Request.Header.Get(HTTPHeaderAllowInt64))
outputData, err := buildQueryResp(0, searchResp.Results.OutputFields, searchResp.Results.FieldsData, searchResp.Results.Ids, searchResp.Results.Scores, allowJS)
@ -1024,7 +1024,7 @@ func (h *HandlersV2) advancedSearch(ctx context.Context, c *gin.Context, anyReq
HTTPReturnMessage: merr.ErrInvalidSearchResult.Error() + ", error: " + err.Error(),
})
} else {
c.JSON(http.StatusOK, gin.H{HTTPReturnCode: http.StatusOK, HTTPReturnData: outputData, HTTPReturnCost: cost})
c.JSON(http.StatusOK, gin.H{HTTPReturnCode: commonpb.ErrorCode_Success, HTTPReturnData: outputData, HTTPReturnCost: cost})
}
}
}
@ -1534,7 +1534,7 @@ func (h *HandlersV2) describeRole(ctx context.Context, c *gin.Context, anyReq an
}
privileges = append(privileges, privilege)
}
c.JSON(http.StatusOK, gin.H{HTTPReturnCode: http.StatusOK, HTTPReturnData: privileges})
c.JSON(http.StatusOK, gin.H{HTTPReturnCode: commonpb.ErrorCode_Success, HTTPReturnData: privileges})
}
return resp, err
}
@ -1664,7 +1664,7 @@ func (h *HandlersV2) describeIndex(ctx context.Context, c *gin.Context, anyReq a
}
indexInfos = append(indexInfos, indexInfo)
}
c.JSON(http.StatusOK, gin.H{HTTPReturnCode: http.StatusOK, HTTPReturnData: indexInfos})
c.JSON(http.StatusOK, gin.H{HTTPReturnCode: commonpb.ErrorCode_Success, HTTPReturnData: indexInfos})
}
return resp, err
}
@ -1738,7 +1738,7 @@ func (h *HandlersV2) describeAlias(ctx context.Context, c *gin.Context, anyReq a
})
if err == nil {
response := resp.(*milvuspb.DescribeAliasResponse)
c.JSON(http.StatusOK, gin.H{HTTPReturnCode: http.StatusOK, HTTPReturnData: gin.H{
c.JSON(http.StatusOK, gin.H{HTTPReturnCode: commonpb.ErrorCode_Success, HTTPReturnData: gin.H{
HTTPDbName: response.DbName,
HTTPCollectionName: response.Collection,
HTTPAliasName: response.Alias,
@ -1834,7 +1834,7 @@ func (h *HandlersV2) listImportJob(ctx context.Context, c *gin.Context, anyReq a
records = append(records, jobDetail)
}
returnData["records"] = records
c.JSON(http.StatusOK, gin.H{HTTPReturnCode: http.StatusOK, HTTPReturnData: returnData})
c.JSON(http.StatusOK, gin.H{HTTPReturnCode: commonpb.ErrorCode_Success, HTTPReturnData: returnData})
}
return resp, err
}
@ -1871,7 +1871,7 @@ func (h *HandlersV2) createImportJob(ctx context.Context, c *gin.Context, anyReq
if err == nil {
returnData := make(map[string]interface{})
returnData["jobId"] = resp.(*internalpb.ImportResponse).GetJobID()
c.JSON(http.StatusOK, gin.H{HTTPReturnCode: http.StatusOK, HTTPReturnData: returnData})
c.JSON(http.StatusOK, gin.H{HTTPReturnCode: commonpb.ErrorCode_Success, HTTPReturnData: returnData})
}
return resp, err
}
@ -1927,7 +1927,7 @@ func (h *HandlersV2) getImportJobProcess(ctx context.Context, c *gin.Context, an
}
returnData["fileSize"] = totalFileSize
returnData["details"] = details
c.JSON(http.StatusOK, gin.H{HTTPReturnCode: http.StatusOK, HTTPReturnData: returnData})
c.JSON(http.StatusOK, gin.H{HTTPReturnCode: commonpb.ErrorCode_Success, HTTPReturnData: returnData})
}
return resp, err
}

View File

@ -647,11 +647,9 @@ func TestCreateCollection(t *testing.T) {
returnBody := &ReturnErrMsg{}
err := json.Unmarshal(w.Body.Bytes(), returnBody)
assert.Nil(t, err)
assert.Equal(t, testcase.errCode, returnBody.Code)
if testcase.errCode != 0 {
assert.Equal(t, testcase.errCode, returnBody.Code)
assert.Equal(t, testcase.errMsg, returnBody.Message)
} else {
assert.Equal(t, int32(200), returnBody.Code)
}
})
}
@ -742,7 +740,15 @@ func TestMethodGet(t *testing.T) {
}, nil).Twice()
mp.EXPECT().DescribeCollection(mock.Anything, mock.Anything).Return(&milvuspb.DescribeCollectionResponse{Status: commonErrorStatus}, nil).Once()
mp.EXPECT().GetLoadState(mock.Anything, mock.Anything).Return(&milvuspb.GetLoadStateResponse{Status: commonErrorStatus}, nil).Once()
mp.EXPECT().GetLoadState(mock.Anything, mock.Anything).Return(&DefaultLoadStateResp, nil).Times(3)
mp.EXPECT().GetLoadState(mock.Anything, mock.Anything).Return(&DefaultLoadStateResp, nil).Times(4)
mp.EXPECT().GetLoadState(mock.Anything, mock.Anything).Return(&milvuspb.GetLoadStateResponse{
Status: &StatusSuccess,
State: commonpb.LoadState_LoadStateNotExist,
}, nil).Once()
mp.EXPECT().GetLoadState(mock.Anything, mock.Anything).Return(&milvuspb.GetLoadStateResponse{
Status: &StatusSuccess,
State: commonpb.LoadState_LoadStateNotLoad,
}, nil).Once()
mp.EXPECT().DescribeIndex(mock.Anything, mock.Anything).Return(&milvuspb.DescribeIndexResponse{Status: commonErrorStatus}, nil).Once()
mp.EXPECT().DescribeIndex(mock.Anything, mock.Anything).Return(&DefaultDescIndexesReqp, nil).Times(3)
mp.EXPECT().DescribeIndex(mock.Anything, mock.Anything).Return(nil, merr.WrapErrIndexNotFoundForCollection(DefaultCollectionName)).Once()
@ -765,6 +771,10 @@ func TestMethodGet(t *testing.T) {
Status: commonSuccessStatus,
Progress: int64(77),
}, nil).Once()
mp.EXPECT().GetLoadingProgress(mock.Anything, mock.Anything).Return(&milvuspb.GetLoadingProgressResponse{
Status: commonSuccessStatus,
Progress: int64(100),
}, nil).Once()
mp.EXPECT().GetLoadingProgress(mock.Anything, mock.Anything).Return(&milvuspb.GetLoadingProgressResponse{Status: commonErrorStatus}, nil).Once()
mp.EXPECT().ShowPartitions(mock.Anything, mock.Anything).Return(&milvuspb.ShowPartitionsResponse{
Status: &StatusSuccess,
@ -865,6 +875,17 @@ func TestMethodGet(t *testing.T) {
queryTestCases = append(queryTestCases, rawTestCase{
path: versionalV2(CollectionCategory, LoadStateAction),
})
queryTestCases = append(queryTestCases, rawTestCase{
path: versionalV2(CollectionCategory, LoadStateAction),
})
queryTestCases = append(queryTestCases, rawTestCase{
path: versionalV2(CollectionCategory, LoadStateAction),
errCode: 100,
errMsg: "collection not found[collection=book]",
})
queryTestCases = append(queryTestCases, rawTestCase{
path: versionalV2(CollectionCategory, LoadStateAction),
})
queryTestCases = append(queryTestCases, rawTestCase{
path: versionalV2(PartitionCategory, ListAction),
})
@ -906,7 +927,7 @@ func TestMethodGet(t *testing.T) {
})
for _, testcase := range queryTestCases {
t.Run("query", func(t *testing.T) {
t.Run(testcase.path, func(t *testing.T) {
bodyReader := bytes.NewReader([]byte(`{` +
`"collectionName": "` + DefaultCollectionName + `",` +
`"partitionName": "` + DefaultPartitionName + `",` +
@ -922,11 +943,9 @@ func TestMethodGet(t *testing.T) {
returnBody := &ReturnErrMsg{}
err := json.Unmarshal(w.Body.Bytes(), returnBody)
assert.Nil(t, err)
assert.Equal(t, testcase.errCode, returnBody.Code)
if testcase.errCode != 0 {
assert.Equal(t, testcase.errCode, returnBody.Code)
assert.Equal(t, testcase.errMsg, returnBody.Message)
} else {
assert.Equal(t, int32(http.StatusOK), returnBody.Code)
}
fmt.Println(w.Body.String())
})
@ -973,7 +992,7 @@ func TestMethodDelete(t *testing.T) {
path: versionalV2(AliasCategory, DropAction),
})
for _, testcase := range queryTestCases {
t.Run("query", func(t *testing.T) {
t.Run(testcase.path, func(t *testing.T) {
bodyReader := bytes.NewReader([]byte(`{"collectionName": "` + DefaultCollectionName + `", "partitionName": "` + DefaultPartitionName +
`", "userName": "` + util.UserRoot + `", "roleName": "` + util.RoleAdmin + `", "indexName": "` + DefaultIndexName + `", "aliasName": "` + DefaultAliasName + `"}`))
req := httptest.NewRequest(http.MethodPost, testcase.path, bodyReader)
@ -983,11 +1002,9 @@ func TestMethodDelete(t *testing.T) {
returnBody := &ReturnErrMsg{}
err := json.Unmarshal(w.Body.Bytes(), returnBody)
assert.Nil(t, err)
assert.Equal(t, testcase.errCode, returnBody.Code)
if testcase.errCode != 0 {
assert.Equal(t, testcase.errCode, returnBody.Code)
assert.Equal(t, testcase.errMsg, returnBody.Message)
} else {
assert.Equal(t, int32(http.StatusOK), returnBody.Code)
}
fmt.Println(w.Body.String())
})
@ -1104,7 +1121,7 @@ func TestMethodPost(t *testing.T) {
})
for _, testcase := range queryTestCases {
t.Run("query", func(t *testing.T) {
t.Run(testcase.path, func(t *testing.T) {
bodyReader := bytes.NewReader([]byte(`{` +
`"collectionName": "` + DefaultCollectionName + `", "newCollectionName": "test", "newDbName": "",` +
`"partitionName": "` + DefaultPartitionName + `", "partitionNames": ["` + DefaultPartitionName + `"],` +
@ -1123,11 +1140,9 @@ func TestMethodPost(t *testing.T) {
returnBody := &ReturnErrMsg{}
err := json.Unmarshal(w.Body.Bytes(), returnBody)
assert.Nil(t, err)
assert.Equal(t, testcase.errCode, returnBody.Code)
if testcase.errCode != 0 {
assert.Equal(t, testcase.errCode, returnBody.Code)
assert.Equal(t, testcase.errMsg, returnBody.Message)
} else {
assert.Equal(t, int32(http.StatusOK), returnBody.Code)
}
fmt.Println(w.Body.String())
})
@ -1222,7 +1237,7 @@ func TestDML(t *testing.T) {
})
for _, testcase := range queryTestCases {
t.Run("query", func(t *testing.T) {
t.Run(testcase.path, func(t *testing.T) {
bodyReader := bytes.NewReader(testcase.requestBody)
req := httptest.NewRequest(http.MethodPost, versionalV2(EntityCategory, testcase.path), bodyReader)
w := httptest.NewRecorder()
@ -1231,11 +1246,51 @@ func TestDML(t *testing.T) {
returnBody := &ReturnErrMsg{}
err := json.Unmarshal(w.Body.Bytes(), returnBody)
assert.Nil(t, err)
assert.Equal(t, testcase.errCode, returnBody.Code)
if testcase.errCode != 0 {
assert.Equal(t, testcase.errMsg, returnBody.Message)
}
fmt.Println(w.Body.String())
})
}
}
func TestAllowInt64(t *testing.T) {
paramtable.Init()
mp := mocks.NewMockProxy(t)
testEngine := initHTTPServerV2(mp, false)
queryTestCases := []requestBodyTestCase{}
queryTestCases = append(queryTestCases, requestBodyTestCase{
path: InsertAction,
requestBody: []byte(`{"collectionName": "book", "data": [{"book_id": 0, "word_count": 0, "book_intro": [0.11825, 0.6]}]}`),
})
queryTestCases = append(queryTestCases, requestBodyTestCase{
path: UpsertAction,
requestBody: []byte(`{"collectionName": "book", "data": [{"book_id": 0, "word_count": 0, "book_intro": [0.11825, 0.6]}]}`),
})
mp.EXPECT().DescribeCollection(mock.Anything, mock.Anything).Return(&milvuspb.DescribeCollectionResponse{
CollectionName: DefaultCollectionName,
Schema: generateCollectionSchema(schemapb.DataType_Int64),
ShardsNum: ShardNumDefault,
Status: &StatusSuccess,
}, nil).Twice()
mp.EXPECT().Insert(mock.Anything, mock.Anything).Return(&milvuspb.MutationResult{Status: commonSuccessStatus, InsertCnt: int64(0), IDs: &schemapb.IDs{IdField: &schemapb.IDs_IntId{IntId: &schemapb.LongArray{Data: []int64{}}}}}, nil).Once()
mp.EXPECT().Upsert(mock.Anything, mock.Anything).Return(&milvuspb.MutationResult{Status: commonSuccessStatus, UpsertCnt: int64(0), IDs: &schemapb.IDs{IdField: &schemapb.IDs_IntId{IntId: &schemapb.LongArray{Data: []int64{}}}}}, nil).Once()
for _, testcase := range queryTestCases {
t.Run(testcase.path, func(t *testing.T) {
bodyReader := bytes.NewReader(testcase.requestBody)
req := httptest.NewRequest(http.MethodPost, versionalV2(EntityCategory, testcase.path), bodyReader)
req.Header.Set(HTTPHeaderAllowInt64, "true")
w := httptest.NewRecorder()
testEngine.ServeHTTP(w, req)
assert.Equal(t, http.StatusOK, w.Code)
returnBody := &ReturnErrMsg{}
err := json.Unmarshal(w.Body.Bytes(), returnBody)
assert.Nil(t, err)
assert.Equal(t, testcase.errCode, returnBody.Code)
if testcase.errCode != 0 {
assert.Equal(t, testcase.errCode, returnBody.Code)
assert.Equal(t, testcase.errMsg, returnBody.Message)
} else {
assert.Equal(t, int32(http.StatusOK), returnBody.Code)
}
fmt.Println(w.Body.String())
})
@ -1244,18 +1299,33 @@ func TestDML(t *testing.T) {
func TestSearchV2(t *testing.T) {
paramtable.Init()
outputFields := []string{FieldBookID, FieldWordCount, "author", "date"}
mp := mocks.NewMockProxy(t)
mp.EXPECT().DescribeCollection(mock.Anything, mock.Anything).Return(&milvuspb.DescribeCollectionResponse{
CollectionName: DefaultCollectionName,
Schema: generateCollectionSchema(schemapb.DataType_Int64),
ShardsNum: ShardNumDefault,
Status: &StatusSuccess,
}, nil).Times(10)
}, nil).Times(12)
mp.EXPECT().Search(mock.Anything, mock.Anything).Return(&milvuspb.SearchResults{Status: commonSuccessStatus, Results: &schemapb.SearchResultData{
TopK: int64(3),
OutputFields: outputFields,
FieldsData: generateFieldData(),
Ids: generateIDs(schemapb.DataType_Int64, 3),
Scores: DefaultScores,
}}, nil).Once()
mp.EXPECT().Search(mock.Anything, mock.Anything).Return(&milvuspb.SearchResults{Status: commonSuccessStatus, Results: &schemapb.SearchResultData{TopK: int64(0)}}, nil).Times(3)
mp.EXPECT().Search(mock.Anything, mock.Anything).Return(&milvuspb.SearchResults{Status: &commonpb.Status{
ErrorCode: 1700, // ErrFieldNotFound
Reason: "groupBy field not found in schema: field not found[field=test]",
}}, nil).Once()
mp.EXPECT().HybridSearch(mock.Anything, mock.Anything).Return(&milvuspb.SearchResults{Status: commonSuccessStatus, Results: &schemapb.SearchResultData{
TopK: int64(3),
OutputFields: outputFields,
FieldsData: generateFieldData(),
Ids: generateIDs(schemapb.DataType_Int64, 3),
Scores: DefaultScores,
}}, nil).Once()
mp.EXPECT().HybridSearch(mock.Anything, mock.Anything).Return(&milvuspb.SearchResults{Status: commonSuccessStatus, Results: &schemapb.SearchResultData{TopK: int64(0)}}, nil).Times(3)
collSchema := generateCollectionSchema(schemapb.DataType_Int64)
binaryVectorField := generateVectorFieldSchema(schemapb.DataType_BinaryVector)
@ -1283,6 +1353,10 @@ func TestSearchV2(t *testing.T) {
path: SearchAction,
requestBody: []byte(`{"collectionName": "book", "data": [[0.1, 0.2]], "filter": "book_id in [2, 4, 6, 8]", "limit": 4, "outputFields": ["word_count"]}`),
})
queryTestCases = append(queryTestCases, requestBodyTestCase{
path: SearchAction,
requestBody: []byte(`{"collectionName": "book", "data": [[0.1, 0.2]], "filter": "book_id in [2, 4, 6, 8]", "limit": 4, "outputFields": ["word_count"]}`),
})
queryTestCases = append(queryTestCases, requestBodyTestCase{
path: SearchAction,
requestBody: []byte(`{"collectionName": "book", "data": [[0.1, 0.2]], "filter": "book_id in [2, 4, 6, 8]", "limit": 4, "outputFields": ["word_count"], "params": {"radius":0.9}}`),
@ -1313,6 +1387,10 @@ func TestSearchV2(t *testing.T) {
path: AdvancedSearchAction,
requestBody: []byte(`{"collectionName": "hello_milvus", "search": [{"data": [[0.1, 0.2]], "annsField": "book_intro", "metricType": "L2", "limit": 3}, {"data": [[0.1, 0.2]], "annsField": "book_intro", "metricType": "L2", "limit": 3}], "rerank": {"strategy": "weighted", "params": {"weights": [0.9, 0.8]}}}`),
})
queryTestCases = append(queryTestCases, requestBodyTestCase{
path: AdvancedSearchAction,
requestBody: []byte(`{"collectionName": "hello_milvus", "search": [{"data": [[0.1, 0.2]], "annsField": "book_intro", "metricType": "L2", "limit": 3}, {"data": [[0.1, 0.2]], "annsField": "book_intro", "metricType": "L2", "limit": 3}], "rerank": {"strategy": "weighted", "params": {"weights": [0.9, 0.8]}}}`),
})
queryTestCases = append(queryTestCases, requestBodyTestCase{
path: HybridSearchAction,
requestBody: []byte(`{"collectionName": "hello_milvus", "search": [{"data": [[0.1, 0.2]], "annsField": "book_intro", "metricType": "L2", "limit": 3}, {"data": [[0.1, 0.2]], "annsField": "book_intro", "metricType": "L2", "limit": 3}], "rerank": {"strategy": "weighted", "params": {"weights": [0.9, 0.8]}}}`),
@ -1410,7 +1488,7 @@ func TestSearchV2(t *testing.T) {
})
for _, testcase := range queryTestCases {
t.Run("search", func(t *testing.T) {
t.Run(testcase.path, func(t *testing.T) {
bodyReader := bytes.NewReader(testcase.requestBody)
req := httptest.NewRequest(http.MethodPost, versionalV2(EntityCategory, testcase.path), bodyReader)
w := httptest.NewRecorder()
@ -1419,11 +1497,9 @@ func TestSearchV2(t *testing.T) {
returnBody := &ReturnErrMsg{}
err := json.Unmarshal(w.Body.Bytes(), returnBody)
assert.Nil(t, err)
assert.Equal(t, testcase.errCode, returnBody.Code)
if testcase.errCode != 0 {
assert.Equal(t, testcase.errCode, returnBody.Code)
assert.Equal(t, testcase.errMsg, returnBody.Message)
} else {
assert.Equal(t, int32(http.StatusOK), returnBody.Code)
}
fmt.Println(w.Body.String())
})

View File

@ -1,7 +1,6 @@
package httpserver
import (
"net/http"
"strconv"
"github.com/gin-gonic/gin"
@ -357,14 +356,14 @@ func (req *AliasCollectionReq) GetAliasName() string {
}
func wrapperReturnHas(has bool) gin.H {
return gin.H{HTTPReturnCode: http.StatusOK, HTTPReturnData: gin.H{HTTPReturnHas: has}}
return gin.H{HTTPReturnCode: commonpb.ErrorCode_Success, HTTPReturnData: gin.H{HTTPReturnHas: has}}
}
func wrapperReturnList(names []string) gin.H {
if names == nil {
return gin.H{HTTPReturnCode: http.StatusOK, HTTPReturnData: []string{}}
return gin.H{HTTPReturnCode: commonpb.ErrorCode_Success, HTTPReturnData: []string{}}
}
return gin.H{HTTPReturnCode: http.StatusOK, HTTPReturnData: names}
return gin.H{HTTPReturnCode: commonpb.ErrorCode_Success, HTTPReturnData: names}
}
func wrapperReturnRowCount(pairs []*commonpb.KeyValuePair) gin.H {
@ -376,15 +375,15 @@ func wrapperReturnRowCount(pairs []*commonpb.KeyValuePair) gin.H {
}
rowCount, err := strconv.ParseInt(rowCountValue, 10, 64)
if err != nil {
return gin.H{HTTPReturnCode: http.StatusOK, HTTPReturnData: gin.H{HTTPReturnRowCount: rowCountValue}}
return gin.H{HTTPReturnCode: commonpb.ErrorCode_Success, HTTPReturnData: gin.H{HTTPReturnRowCount: rowCountValue}}
}
return gin.H{HTTPReturnCode: http.StatusOK, HTTPReturnData: gin.H{HTTPReturnRowCount: rowCount}}
return gin.H{HTTPReturnCode: commonpb.ErrorCode_Success, HTTPReturnData: gin.H{HTTPReturnRowCount: rowCount}}
}
func wrapperReturnDefault() gin.H {
return gin.H{HTTPReturnCode: http.StatusOK, HTTPReturnData: gin.H{}}
return gin.H{HTTPReturnCode: commonpb.ErrorCode_Success, HTTPReturnData: gin.H{}}
}
func wrapperReturnDefaultWithCost(cost int) gin.H {
return gin.H{HTTPReturnCode: http.StatusOK, HTTPReturnData: gin.H{}, HTTPReturnCost: cost}
return gin.H{HTTPReturnCode: commonpb.ErrorCode_Success, HTTPReturnData: gin.H{}, HTTPReturnCost: cost}
}

View File

@ -1084,7 +1084,7 @@ func buildQueryResp(rowsNum int64, needFields []string, fieldDataList []*schemap
var queryResp []map[string]interface{}
columnNum := len(fieldDataList)
if rowsNum == int64(0) {
if rowsNum == int64(0) { // always
if columnNum > 0 {
switch fieldDataList[0].Type {
case schemapb.DataType_Bool:

View File

@ -15,7 +15,7 @@ def logger_request_response(response, url, tt, headers, data, str_data, str_resp
data = data[:1000] + "..." + data[-1000:]
try:
if response.status_code == 200:
if ('code' in response.json() and response.json()["code"] == 200) or (
if ('code' in response.json() and response.json()["code"] == 0) or (
'Code' in response.json() and response.json()["Code"] == 0):
logger.debug(
f"\nmethod: {method}, \nurl: {url}, \ncost time: {tt}, \nheader: {headers}, \npayload: {str_data}, \nresponse: {str_response}")
@ -612,7 +612,7 @@ class RoleClient(Requests):
url = f'{self.endpoint}/v2/vectordb/roles/create'
response = self.post(url, headers=self.update_headers(), data=payload)
res = response.json()
if res["code"] == 200:
if res["code"] == 0:
self.role_names.append(payload["roleName"])
return res

View File

@ -80,7 +80,7 @@ class TestBase(Base):
"vectorField": "vector",
}
rsp = self.collection_client.collection_create(schema_payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
self.wait_collection_load_completed(collection_name)
batch_size = batch_size
batch = nb // batch_size
@ -97,7 +97,7 @@ class TestBase(Base):
body_size = sys.getsizeof(json.dumps(payload))
logger.debug(f"body size: {body_size / 1024 / 1024} MB")
rsp = self.vector_client.vector_insert(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
if return_insert_id:
insert_ids.extend(rsp['data']['insertIds'])
# insert remainder data
@ -109,7 +109,7 @@ class TestBase(Base):
"data": data
}
rsp = self.vector_client.vector_insert(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
if return_insert_id:
insert_ids.extend(rsp['data']['insertIds'])
if return_insert_id:

View File

@ -38,7 +38,7 @@ class TestAliasE2E(TestBase):
"aliasName": alias_name
}
rsp = self.alias_client.create_alias(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
# list alias after create
rsp = self.alias_client.list_alias()
assert alias_name in rsp['data']

View File

@ -33,14 +33,14 @@ class TestCreateCollection(TestBase):
}
logging.info(f"create collection {name} with payload: {payload}")
rsp = client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = client.collection_list()
all_collections = rsp['data']
assert name in all_collections
# describe collection
rsp = client.collection_describe(name)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']['collectionName'] == name
assert rsp['data']['autoId'] is False
assert rsp['data']['enableDynamicField'] is True
@ -68,10 +68,10 @@ class TestCreateCollection(TestBase):
if id_type == "VarChar":
collection_payload["params"] = {"max_length": "256"}
rsp = self.collection_client.collection_create(collection_payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = self.collection_client.collection_describe(name)
logger.info(f"rsp: {rsp}")
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']['collectionName'] == name
fields = [f["name"] for f in rsp['data']['fields']]
assert primary_field in fields
@ -113,7 +113,7 @@ class TestCreateCollection(TestBase):
logging.info(f"create collection {name} with payload: {payload}")
rsp = client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = client.collection_list()
all_collections = rsp['data']
@ -131,7 +131,7 @@ class TestCreateCollection(TestBase):
for d in rsp["data"]["properties"]:
if d["key"] == "collection.ttl.seconds":
ttl_seconds_actual = int(d["value"])
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']['enableDynamicField'] == False
assert rsp['data']['collectionName'] == name
assert rsp['data']['shardsNum'] == num_shards
@ -178,7 +178,7 @@ class TestCreateCollection(TestBase):
logging.info(f"create collection {name} with payload: {payload}")
rsp = client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = client.collection_list()
all_collections = rsp['data']
@ -196,7 +196,7 @@ class TestCreateCollection(TestBase):
for d in rsp["data"]["properties"]:
if d["key"] == "collection.ttl.seconds":
ttl_seconds_actual = int(d["value"])
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']['collectionName'] == name
assert rsp['data']['shardsNum'] == num_shards
assert rsp['data']['partitionsNum'] == num_partitions
@ -235,7 +235,7 @@ class TestCreateCollection(TestBase):
}
logging.info(f"create collection {name} with payload: {payload}")
rsp = client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = client.collection_list()
all_collections = rsp['data']
@ -244,7 +244,7 @@ class TestCreateCollection(TestBase):
logger.info(f"schema: {c.schema}")
# describe collection
rsp = client.collection_describe(name)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']['collectionName'] == name
assert rsp['data']['autoId'] == auto_id
assert c.schema.auto_id == auto_id
@ -288,7 +288,7 @@ class TestCreateCollection(TestBase):
}
logging.info(f"create collection {name} with payload: {payload}")
rsp = client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = client.collection_list()
all_collections = rsp['data']
@ -296,7 +296,7 @@ class TestCreateCollection(TestBase):
# describe collection
time.sleep(10)
rsp = client.collection_describe(name)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']['collectionName'] == name
# assert index created
indexes = rsp['data']['indexes']
@ -339,7 +339,7 @@ class TestCreateCollection(TestBase):
# describe collection
time.sleep(10)
rsp = client.collection_describe(name)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']['collectionName'] == name
# assert index created
indexes = rsp['data']['indexes']
@ -375,7 +375,7 @@ class TestCreateCollection(TestBase):
}
logging.info(f"create collection {name} with payload: {payload}")
rsp = client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = client.collection_list()
all_collections = rsp['data']
@ -383,7 +383,7 @@ class TestCreateCollection(TestBase):
# describe collection
time.sleep(10)
rsp = client.collection_describe(name)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']['collectionName'] == name
# assert index created
indexes = rsp['data']['indexes']
@ -426,7 +426,7 @@ class TestCreateCollection(TestBase):
}
logging.info(f"create collection {name} with payload: {payload}")
rsp = client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = client.collection_list()
all_collections = rsp['data']
@ -435,7 +435,7 @@ class TestCreateCollection(TestBase):
logger.info(f"schema: {c.schema}")
# describe collection
rsp = client.collection_describe(name)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']['collectionName'] == name
assert len(rsp['data']['fields']) == len(c.schema.fields)
@ -472,7 +472,7 @@ class TestCreateCollection(TestBase):
}
logging.info(f"create collection {name} with payload: {payload}")
rsp = client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = client.collection_list()
all_collections = rsp['data']
@ -481,7 +481,7 @@ class TestCreateCollection(TestBase):
logger.info(f"schema: {c.schema}")
# describe collection
rsp = client.collection_describe(name)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']['collectionName'] == name
assert len(rsp['data']['fields']) == len(c.schema.fields)
@ -518,7 +518,7 @@ class TestCreateCollection(TestBase):
time.sleep(10)
success_cnt = 0
for rsp in concurrent_rsp:
if rsp["code"] == 200:
if rsp['code'] == 0:
success_cnt += 1
logger.info(concurrent_rsp)
assert success_cnt == 10
@ -527,7 +527,7 @@ class TestCreateCollection(TestBase):
assert name in all_collections
# describe collection
rsp = client.collection_describe(name)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']['collectionName'] == name
def test_create_collections_concurrent_with_different_param(self):
@ -565,7 +565,7 @@ class TestCreateCollection(TestBase):
time.sleep(10)
success_cnt = 0
for rsp in concurrent_rsp:
if rsp["code"] == 200:
if rsp['code'] == 0:
success_cnt += 1
logger.info(concurrent_rsp)
assert success_cnt == 1
@ -574,7 +574,7 @@ class TestCreateCollection(TestBase):
assert name in all_collections
# describe collection
rsp = client.collection_describe(name)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']['collectionName'] == name
@ -659,7 +659,7 @@ class TestHasCollections(TestBase):
}
time.sleep(1)
rsp = client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
name_list.append(name)
rsp = client.collection_list()
all_collections = rsp['data']
@ -706,11 +706,11 @@ class TestGetCollectionStats(TestBase):
}
time.sleep(1)
rsp = client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
# describe collection
client.collection_describe(collection_name=name)
rsp = client.collection_stats(collection_name=name)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']['rowCount'] == 0
# insert data
nb = 3000
@ -746,7 +746,7 @@ class TestLoadReleaseCollection(TestBase):
}
}
rsp = client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
# create index before load
index_params = [{"fieldName": "book_intro", "indexName": "book_intro_vector", "metricType": "L2"}]
payload = {
@ -762,7 +762,7 @@ class TestLoadReleaseCollection(TestBase):
# describe collection
client.collection_describe(collection_name=name)
rsp = client.collection_load(collection_name=name)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = client.collection_load_state(collection_name=name)
assert rsp['data']['loadState'] in ["LoadStateLoaded", "LoadStateLoading"]
time.sleep(5)
@ -793,11 +793,11 @@ class TestGetCollectionLoadState(TestBase):
"dimension": dim,
}
rsp = client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
# describe collection
client.collection_describe(collection_name=name)
rsp = client.collection_load_state(collection_name=name)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']['loadState'] in ["LoadStateNotLoad", "LoadStateLoading"]
# insert data
nb = 3000
@ -835,7 +835,7 @@ class TestListCollections(TestBase):
}
time.sleep(1)
rsp = client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
name_list.append(name)
rsp = client.collection_list()
all_collections = rsp['data']
@ -863,7 +863,7 @@ class TestListCollectionsNegative(TestBase):
}
time.sleep(1)
rsp = client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
name_list.append(name)
client = self.collection_client
client.api_key = "illegal_api_key"
@ -889,13 +889,13 @@ class TestDescribeCollection(TestBase):
"metricType": "L2"
}
rsp = client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = client.collection_list()
all_collections = rsp['data']
assert name in all_collections
# describe collection
rsp = client.collection_describe(name)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']['collectionName'] == name
assert rsp['data']['autoId'] is False
assert rsp['data']['enableDynamicField'] is True
@ -936,7 +936,7 @@ class TestDescribeCollection(TestBase):
assert name in all_collections
# describe collection
rsp = client.collection_describe(name)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']['collectionName'] == name
for field in rsp['data']['fields']:
@ -964,7 +964,7 @@ class TestDescribeCollectionNegative(TestBase):
"dimension": dim,
}
rsp = client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = client.collection_list()
all_collections = rsp['data']
assert name in all_collections
@ -987,7 +987,7 @@ class TestDescribeCollectionNegative(TestBase):
"dimension": dim,
}
rsp = client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = client.collection_list()
all_collections = rsp['data']
assert name in all_collections
@ -1017,7 +1017,7 @@ class TestDropCollection(TestBase):
"metricType": "L2"
}
rsp = self.collection_client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
clo_list.append(name)
rsp = self.collection_client.collection_list()
all_collections = rsp['data']
@ -1029,7 +1029,7 @@ class TestDropCollection(TestBase):
"collectionName": name,
}
rsp = self.collection_client.collection_drop(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = self.collection_client.collection_list()
all_collections = rsp['data']
for name in clo_list:
@ -1052,7 +1052,7 @@ class TestDropCollectionNegative(TestBase):
"dimension": dim,
}
rsp = client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = client.collection_list()
all_collections = rsp['data']
assert name in all_collections
@ -1081,7 +1081,7 @@ class TestDropCollectionNegative(TestBase):
"dimension": dim,
}
rsp = client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = client.collection_list()
all_collections = rsp['data']
assert name in all_collections
@ -1091,7 +1091,7 @@ class TestDropCollectionNegative(TestBase):
"collectionName": invalid_name,
}
rsp = client.collection_drop(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
@pytest.mark.L0
@ -1112,7 +1112,7 @@ class TestRenameCollection(TestBase):
"dimension": dim,
}
rsp = client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = client.collection_list()
all_collections = rsp['data']
assert name in all_collections
@ -1122,7 +1122,7 @@ class TestRenameCollection(TestBase):
"newCollectionName": new_name,
}
rsp = client.collection_rename(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = client.collection_list()
all_collections = rsp['data']
assert new_name in all_collections

View File

@ -76,13 +76,13 @@ class TestCreateIndex(TestBase):
if index_type == "AUTOINDEX":
payload["indexParams"][0]["params"] = {"index_type": "AUTOINDEX"}
rsp = self.index_client.index_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
time.sleep(10)
# list index, expect not empty
rsp = self.index_client.index_list(collection_name=name)
# describe index
rsp = self.index_client.index_describe(collection_name=name, index_name="book_intro_vector")
assert rsp['code'] == 200
assert rsp['code'] == 0
assert len(rsp['data']) == len(payload['indexParams'])
expected_index = sorted(payload['indexParams'], key=lambda x: x['fieldName'])
actual_index = sorted(rsp['data'], key=lambda x: x['fieldName'])
@ -99,7 +99,7 @@ class TestCreateIndex(TestBase):
"indexName": actual_index[i]['indexName']
}
rsp = self.index_client.index_drop(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
# list index, expect empty
rsp = self.index_client.index_list(collection_name=name)
assert rsp['data'] == []
@ -156,13 +156,13 @@ class TestCreateIndex(TestBase):
"params": {"index_type": "INVERTED"}}]
}
rsp = self.index_client.index_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
time.sleep(10)
# list index, expect not empty
rsp = self.index_client.index_list(collection_name=name)
# describe index
rsp = self.index_client.index_describe(collection_name=name, index_name="word_count_vector")
assert rsp['code'] == 200
assert rsp['code'] == 0
assert len(rsp['data']) == len(payload['indexParams'])
expected_index = sorted(payload['indexParams'], key=lambda x: x['fieldName'])
actual_index = sorted(rsp['data'], key=lambda x: x['fieldName'])
@ -226,13 +226,13 @@ class TestCreateIndex(TestBase):
if index_type == "BIN_IVF_FLAT":
payload["indexParams"][0]["params"]["nlist"] = "16384"
rsp = self.index_client.index_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
time.sleep(10)
# list index, expect not empty
rsp = self.index_client.index_list(collection_name=name)
# describe index
rsp = self.index_client.index_describe(collection_name=name, index_name=index_name)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert len(rsp['data']) == len(payload['indexParams'])
expected_index = sorted(payload['indexParams'], key=lambda x: x['fieldName'])
actual_index = sorted(rsp['data'], key=lambda x: x['fieldName'])

View File

@ -114,7 +114,7 @@ class TestCreateImportJob(TestBase):
"outputFields": ["*"],
}
rsp = self.vector_client.vector_query(payload)
assert rsp["code"] == 200
assert rsp['code'] == 0
@pytest.mark.parametrize("insert_num", [5000])
@pytest.mark.parametrize("import_task_num", [1])
@ -205,7 +205,7 @@ class TestCreateImportJob(TestBase):
"outputFields": ["*"],
}
rsp = self.vector_client.vector_query(payload)
assert rsp["code"] == 200
assert rsp['code'] == 0
@pytest.mark.parametrize("insert_num", [5000])
@pytest.mark.parametrize("import_task_num", [1])
@ -306,7 +306,7 @@ class TestCreateImportJob(TestBase):
"outputFields": ["*"],
}
rsp = self.vector_client.vector_query(payload)
assert rsp["code"] == 200
assert rsp['code'] == 0
def test_job_import_multi_json_file(self):
# create collection
@ -720,15 +720,15 @@ class TestCreateImportJob(TestBase):
]
}
rsp = self.collection_client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
# create restore collection
restore_collection_name = f"{name}_restore"
payload["collectionName"] = restore_collection_name
rsp = self.collection_client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = self.collection_client.collection_describe(name)
logger.info(f"rsp: {rsp}")
assert rsp['code'] == 200
assert rsp['code'] == 0
# insert data
for i in range(insert_round):
data = []
@ -772,7 +772,7 @@ class TestCreateImportJob(TestBase):
"data": data,
}
rsp = self.vector_client.vector_insert(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']['insertCount'] == nb
# flush data to generate binlog file
c = Collection(name)
@ -781,7 +781,7 @@ class TestCreateImportJob(TestBase):
# query data to make sure the data is inserted
rsp = self.vector_client.vector_query({"collectionName": name, "filter": "user_id > 0", "limit": 50})
assert rsp['code'] == 200
assert rsp['code'] == 0
assert len(rsp['data']) == 50
# get collection id
c = Collection(name)
@ -802,7 +802,7 @@ class TestCreateImportJob(TestBase):
if is_partition_key:
payload["partitionName"] = "_default_0"
rsp = self.import_job_client.create_import_jobs(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
# list import job
payload = {
"collectionName": restore_collection_name,
@ -1528,7 +1528,7 @@ class TestCreateImportJobNegative(TestBase):
"outputFields": ["*"],
}
rsp = self.vector_client.vector_query(payload)
assert rsp["code"] == 200
assert rsp['code'] == 0
@pytest.mark.L1

View File

@ -36,13 +36,13 @@ class TestPartitionE2E(TestBase):
{"fieldName": "book_intro", "indexName": "book_intro_vector", "metricType": f"{metric_type}"}]
}
rsp = self.collection_client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = client.collection_list()
all_collections = rsp['data']
assert name in all_collections
# describe collection
rsp = client.collection_describe(name)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']['collectionName'] == name
# insert data to default partition
data = []
@ -59,11 +59,11 @@ class TestPartitionE2E(TestBase):
"data": data,
}
rsp = self.vector_client.vector_insert(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
# create partition
partition_name = "test_partition"
rsp = self.partition_client.partition_create(collection_name=name, partition_name=partition_name)
assert rsp['code'] == 200
assert rsp['code'] == 0
# insert data to partition
data = []
for j in range(3000, 6000):
@ -80,45 +80,45 @@ class TestPartitionE2E(TestBase):
"data": data,
}
rsp = self.vector_client.vector_insert(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
# create partition again
rsp = self.partition_client.partition_create(collection_name=name, partition_name=partition_name)
# list partitions
rsp = self.partition_client.partition_list(collection_name=name)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert partition_name in rsp['data']
# has partition
rsp = self.partition_client.partition_has(collection_name=name, partition_name=partition_name)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']["has"] is True
# flush and get partition statistics
c = Collection(name=name)
c.flush()
rsp = self.partition_client.partition_stats(collection_name=name, partition_name=partition_name)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']['rowCount'] == 3000
# release partition
rsp = self.partition_client.partition_release(collection_name=name, partition_names=[partition_name])
assert rsp['code'] == 200
assert rsp['code'] == 0
# release partition again
rsp = self.partition_client.partition_release(collection_name=name, partition_names=[partition_name])
assert rsp['code'] == 200
assert rsp['code'] == 0
# load partition
rsp = self.partition_client.partition_load(collection_name=name, partition_names=[partition_name])
assert rsp['code'] == 200
assert rsp['code'] == 0
# load partition again
rsp = self.partition_client.partition_load(collection_name=name, partition_names=[partition_name])
assert rsp['code'] == 200
assert rsp['code'] == 0
# drop partition when it is loaded
rsp = self.partition_client.partition_drop(collection_name=name, partition_name=partition_name)
assert rsp['code'] == 65535
# drop partition after release
rsp = self.partition_client.partition_release(collection_name=name, partition_names=[partition_name])
rsp = self.partition_client.partition_drop(collection_name=name, partition_name=partition_name)
assert rsp['code'] == 200
assert rsp['code'] == 0
# has partition
rsp = self.partition_client.partition_has(collection_name=name, partition_name=partition_name)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']["has"] is False

View File

@ -37,7 +37,7 @@ class TestRestfulSdkCompatibility(TestBase):
all_collections = rsp['data']
assert name in all_collections
rsp = client.collection_describe(name)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']['collectionName'] == name
assert rsp['data']['enableDynamicField'] == enable_dynamic
assert rsp['data']['load'] == "LoadStateNotLoad"
@ -57,7 +57,7 @@ class TestRestfulSdkCompatibility(TestBase):
"metricType": metric_type,
}
rsp = client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
collection = Collection(name=name)
logger.info(collection.schema)
field_names = [field.name for field in collection.schema.fields]
@ -89,7 +89,7 @@ class TestRestfulSdkCompatibility(TestBase):
all_collections = rsp['data']
assert name in all_collections
rsp = client.collection_describe(name)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']['collectionName'] == name
assert len(rsp['data']['indexes']) == 1 and rsp['data']['indexes'][0]['metricType'] == metric_type
@ -160,7 +160,7 @@ class TestRestfulSdkCompatibility(TestBase):
"data": data,
}
rsp = client.vector_insert(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']['insertCount'] == nb
assert len(rsp['data']["insertIds"]) == nb
@ -196,7 +196,7 @@ class TestRestfulSdkCompatibility(TestBase):
}
# search data by restful
rsp = client.vector_search(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert len(rsp['data']) == 10
def test_collection_create_by_sdk_query_vector_by_restful(self):
@ -230,7 +230,7 @@ class TestRestfulSdkCompatibility(TestBase):
}
# query data by restful
rsp = client.vector_query(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert len(rsp['data']) == 10
def test_collection_create_by_restful_search_vector_by_sdk(self):

View File

@ -43,7 +43,7 @@ class TestRoleE2E(TestBase):
assert role_name in rsp['data']
# describe role
rsp = self.role_client.role_describe(role_name)
assert rsp['code'] == 200
assert rsp['code'] == 0
# grant privilege to role
payload = {
"roleName": role_name,
@ -52,7 +52,7 @@ class TestRoleE2E(TestBase):
"privilege": "CreateCollection"
}
rsp = self.role_client.role_grant(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
# describe role after grant
rsp = self.role_client.role_describe(role_name)
privileges = []

View File

@ -56,7 +56,7 @@ class TestUserE2E(TestBase):
"newPassword": new_password
}
rsp = self.user_client.user_password_update(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
# drop user
payload = {
"userName": user_name
@ -124,7 +124,7 @@ class TestUserE2E(TestBase):
}
self.collection_client.api_key = f"{user_name}:{password}"
rsp = self.collection_client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
@pytest.mark.L1
@ -158,7 +158,7 @@ class TestUserNegative(TestBase):
for i in range(2):
rsp = self.user_client.user_create(payload)
if i == 0:
assert rsp['code'] == 200
assert rsp['code'] == 0
else:
assert rsp['code'] == 65535
assert "user already exists" in rsp['message']

View File

@ -33,10 +33,10 @@ class TestInsertVector(TestBase):
"metricType": "L2"
}
rsp = self.collection_client.collection_create(collection_payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = self.collection_client.collection_describe(name)
logger.info(f"rsp: {rsp}")
assert rsp['code'] == 200
assert rsp['code'] == 0
# insert data
for i in range(insert_round):
data = get_data_by_payload(collection_payload, nb)
@ -47,7 +47,7 @@ class TestInsertVector(TestBase):
body_size = sys.getsizeof(json.dumps(payload))
logger.info(f"body size: {body_size / 1024 / 1024} MB")
rsp = self.vector_client.vector_insert(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']['insertCount'] == nb
@pytest.mark.parametrize("insert_round", [1])
@ -92,10 +92,10 @@ class TestInsertVector(TestBase):
]
}
rsp = self.collection_client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = self.collection_client.collection_describe(name)
logger.info(f"rsp: {rsp}")
assert rsp['code'] == 200
assert rsp['code'] == 0
# insert data
for i in range(insert_round):
data = []
@ -139,11 +139,11 @@ class TestInsertVector(TestBase):
"data": data,
}
rsp = self.vector_client.vector_insert(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']['insertCount'] == nb
# query data to make sure the data is inserted
rsp = self.vector_client.vector_query({"collectionName": name, "filter": "user_id > 0", "limit": 50})
assert rsp['code'] == 200
assert rsp['code'] == 0
assert len(rsp['data']) == 50
@pytest.mark.parametrize("insert_round", [1])
@ -187,10 +187,10 @@ class TestInsertVector(TestBase):
]
}
rsp = self.collection_client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = self.collection_client.collection_describe(name)
logger.info(f"rsp: {rsp}")
assert rsp['code'] == 200
assert rsp['code'] == 0
# insert data
for i in range(insert_round):
data = []
@ -224,7 +224,7 @@ class TestInsertVector(TestBase):
"data": data,
}
rsp = self.vector_client.vector_insert(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']['insertCount'] == nb
c = Collection(name)
res = c.query(
@ -235,7 +235,7 @@ class TestInsertVector(TestBase):
logger.info(f"res: {res}")
# query data to make sure the data is inserted
rsp = self.vector_client.vector_query({"collectionName": name, "filter": "user_id > 0", "limit": 50})
assert rsp['code'] == 200
assert rsp['code'] == 0
assert len(rsp['data']) == 50
@pytest.mark.parametrize("insert_round", [1])
@ -280,10 +280,10 @@ class TestInsertVector(TestBase):
]
}
rsp = self.collection_client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = self.collection_client.collection_describe(name)
logger.info(f"rsp: {rsp}")
assert rsp['code'] == 200
assert rsp['code'] == 0
json_value = [
1,
1.0,
@ -336,11 +336,11 @@ class TestInsertVector(TestBase):
"data": data,
}
rsp = self.vector_client.vector_insert(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']['insertCount'] == nb
# query data to make sure the data is inserted
rsp = self.vector_client.vector_query({"collectionName": name, "filter": "user_id > 0", "limit": 50})
assert rsp['code'] == 200
assert rsp['code'] == 0
assert len(rsp['data']) == 50
@ -359,9 +359,9 @@ class TestInsertVectorNegative(TestBase):
"dimension": dim,
}
rsp = self.collection_client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = self.collection_client.collection_describe(name)
assert rsp['code'] == 200
assert rsp['code'] == 0
# insert data
nb = 10
data = [
@ -393,9 +393,9 @@ class TestInsertVectorNegative(TestBase):
"dimension": dim,
}
rsp = self.collection_client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = self.collection_client.collection_describe(name)
assert rsp['code'] == 200
assert rsp['code'] == 0
# insert data
nb = 100
data = get_data_by_payload(payload, nb)
@ -421,9 +421,9 @@ class TestInsertVectorNegative(TestBase):
"dimension": dim,
}
rsp = self.collection_client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = self.collection_client.collection_describe(name)
assert rsp['code'] == 200
assert rsp['code'] == 0
# insert data
nb = 10
data = get_data_by_payload(payload, nb)
@ -449,9 +449,9 @@ class TestInsertVectorNegative(TestBase):
"dimension": dim,
}
rsp = self.collection_client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = self.collection_client.collection_describe(name)
assert rsp['code'] == 200
assert rsp['code'] == 0
# insert data
nb = 1
data = [
@ -493,10 +493,10 @@ class TestUpsertVector(TestBase):
"indexParams": [{"fieldName": "text_emb", "indexName": "text_emb_index", "metricType": "L2"}]
}
rsp = self.collection_client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = self.collection_client.collection_describe(name)
logger.info(f"rsp: {rsp}")
assert rsp['code'] == 200
assert rsp['code'] == 0
# insert data
for i in range(insert_round):
data = []
@ -516,7 +516,7 @@ class TestUpsertVector(TestBase):
body_size = sys.getsizeof(json.dumps(payload))
logger.info(f"body size: {body_size / 1024 / 1024} MB")
rsp = self.vector_client.vector_insert(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']['insertCount'] == nb
c = Collection(name)
c.flush()
@ -575,10 +575,10 @@ class TestUpsertVector(TestBase):
"indexParams": [{"fieldName": "text_emb", "indexName": "text_emb_index", "metricType": "L2"}]
}
rsp = self.collection_client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = self.collection_client.collection_describe(name)
logger.info(f"rsp: {rsp}")
assert rsp['code'] == 200
assert rsp['code'] == 0
ids = []
# insert data
for i in range(insert_round):
@ -599,7 +599,7 @@ class TestUpsertVector(TestBase):
body_size = sys.getsizeof(json.dumps(payload))
logger.info(f"body size: {body_size / 1024 / 1024} MB")
rsp = self.vector_client.vector_insert(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']['insertCount'] == nb
ids.extend(rsp['data']['insertIds'])
c = Collection(name)
@ -682,10 +682,10 @@ class TestSearchVector(TestBase):
]
}
rsp = self.collection_client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = self.collection_client.collection_describe(name)
logger.info(f"rsp: {rsp}")
assert rsp['code'] == 200
assert rsp['code'] == 0
# insert data
for i in range(insert_round):
data = []
@ -719,7 +719,7 @@ class TestSearchVector(TestBase):
"data": data,
}
rsp = self.vector_client.vector_insert(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']['insertCount'] == nb
# search data
payload = {
@ -739,7 +739,7 @@ class TestSearchVector(TestBase):
"limit": 100,
}
rsp = self.vector_client.vector_search(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
# assert no dup user_id
user_ids = [r["user_id"]for r in rsp['data']]
assert len(user_ids) == len(set(user_ids))
@ -776,10 +776,10 @@ class TestSearchVector(TestBase):
]
}
rsp = self.collection_client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = self.collection_client.collection_describe(name)
logger.info(f"rsp: {rsp}")
assert rsp['code'] == 200
assert rsp['code'] == 0
# insert data
for i in range(insert_round):
data = []
@ -807,7 +807,7 @@ class TestSearchVector(TestBase):
"data": data,
}
rsp = self.vector_client.vector_insert(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']['insertCount'] == nb
# search data
payload = {
@ -826,7 +826,7 @@ class TestSearchVector(TestBase):
"limit": 100,
}
rsp = self.vector_client.vector_search(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert len(rsp['data']) == 100
@ -864,10 +864,10 @@ class TestSearchVector(TestBase):
]
}
rsp = self.collection_client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = self.collection_client.collection_describe(name)
logger.info(f"rsp: {rsp}")
assert rsp['code'] == 200
assert rsp['code'] == 0
# insert data
for i in range(insert_round):
data = []
@ -896,7 +896,7 @@ class TestSearchVector(TestBase):
"data": data,
}
rsp = self.vector_client.vector_insert(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']['insertCount'] == nb
# search data
payload = {
@ -915,7 +915,7 @@ class TestSearchVector(TestBase):
if groupingField:
payload["groupingField"] = groupingField
rsp = self.vector_client.vector_search(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
@ -952,10 +952,10 @@ class TestSearchVector(TestBase):
]
}
rsp = self.collection_client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = self.collection_client.collection_describe(name)
logger.info(f"rsp: {rsp}")
assert rsp['code'] == 200
assert rsp['code'] == 0
# insert data
for i in range(insert_round):
data = []
@ -983,7 +983,7 @@ class TestSearchVector(TestBase):
"data": data,
}
rsp = self.vector_client.vector_insert(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']['insertCount'] == nb
# flush data
c = Collection(name)
@ -1008,7 +1008,7 @@ class TestSearchVector(TestBase):
"limit": 100,
}
rsp = self.vector_client.vector_search(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert len(rsp['data']) == 100
@pytest.mark.parametrize("metric_type", ["IP", "L2", "COSINE"])
@ -1028,7 +1028,7 @@ class TestSearchVector(TestBase):
"data": [vector_to_search],
}
rsp = self.vector_client.vector_search(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
res = rsp['data']
logger.info(f"res: {len(res)}")
limit = int(payload.get("limit", 100))
@ -1068,7 +1068,7 @@ class TestSearchVector(TestBase):
if sum_limit_offset > max_search_sum_limit_offset:
assert rsp['code'] == 65535
return
assert rsp['code'] == 200
assert rsp['code'] == 0
res = rsp['data']
logger.info(f"res: {len(res)}")
limit = int(payload.get("limit", 100))
@ -1109,7 +1109,7 @@ class TestSearchVector(TestBase):
if offset + limit > constant.MAX_SUM_OFFSET_AND_LIMIT:
assert rsp['code'] == 90126
return
assert rsp['code'] == 200
assert rsp['code'] == 0
res = rsp['data']
logger.info(f"res: {len(res)}")
assert len(res) == limit
@ -1142,7 +1142,7 @@ class TestSearchVector(TestBase):
"offset": 0,
}
rsp = self.vector_client.vector_search(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
res = rsp['data']
logger.info(f"res: {len(res)}")
assert len(res) <= limit
@ -1183,7 +1183,7 @@ class TestSearchVector(TestBase):
"offset": 0,
}
rsp = self.vector_client.vector_search(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
res = rsp['data']
logger.info(f"res: {len(res)}")
assert len(res) <= limit
@ -1230,7 +1230,7 @@ class TestSearchVector(TestBase):
"offset": 0,
}
rsp = self.vector_client.vector_search(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
res = rsp['data']
logger.info(f"res: {len(res)}")
assert len(res) <= limit
@ -1355,10 +1355,10 @@ class TestAdvancedSearchVector(TestBase):
]
}
rsp = self.collection_client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = self.collection_client.collection_describe(name)
logger.info(f"rsp: {rsp}")
assert rsp['code'] == 200
assert rsp['code'] == 0
# insert data
for i in range(insert_round):
data = []
@ -1389,7 +1389,7 @@ class TestAdvancedSearchVector(TestBase):
"data": data,
}
rsp = self.vector_client.vector_insert(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']['insertCount'] == nb
# advanced search data
@ -1420,7 +1420,7 @@ class TestAdvancedSearchVector(TestBase):
}
rsp = self.vector_client.vector_advanced_search(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert len(rsp['data']) == 10
@ -1463,10 +1463,10 @@ class TestHybridSearchVector(TestBase):
]
}
rsp = self.collection_client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = self.collection_client.collection_describe(name)
logger.info(f"rsp: {rsp}")
assert rsp['code'] == 200
assert rsp['code'] == 0
# insert data
for i in range(insert_round):
data = []
@ -1497,7 +1497,7 @@ class TestHybridSearchVector(TestBase):
"data": data,
}
rsp = self.vector_client.vector_insert(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']['insertCount'] == nb
# advanced search data
@ -1528,7 +1528,7 @@ class TestHybridSearchVector(TestBase):
}
rsp = self.vector_client.vector_hybrid_search(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert len(rsp['data']) == 10
@ -1579,10 +1579,10 @@ class TestQueryVector(TestBase):
]
}
rsp = self.collection_client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = self.collection_client.collection_describe(name)
logger.info(f"rsp: {rsp}")
assert rsp['code'] == 200
assert rsp['code'] == 0
# insert data
for i in range(insert_round):
data = []
@ -1626,7 +1626,7 @@ class TestQueryVector(TestBase):
"data": data,
}
rsp = self.vector_client.vector_insert(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']['insertCount'] == nb
# query data to make sure the data is inserted
# 1. query for int64
@ -1637,7 +1637,7 @@ class TestQueryVector(TestBase):
"outputFields": ["*"]
}
rsp = self.vector_client.vector_query(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert len(rsp['data']) == 50
# 2. query for varchar
@ -1648,7 +1648,7 @@ class TestQueryVector(TestBase):
"outputFields": ["*"]
}
rsp = self.vector_client.vector_query(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert len(rsp['data']) == 50
# 3. query for json
@ -1712,10 +1712,10 @@ class TestQueryVector(TestBase):
]
}
rsp = self.collection_client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = self.collection_client.collection_describe(name)
logger.info(f"rsp: {rsp}")
assert rsp['code'] == 200
assert rsp['code'] == 0
# insert data
for i in range(insert_round):
data = []
@ -1749,7 +1749,7 @@ class TestQueryVector(TestBase):
"data": data,
}
rsp = self.vector_client.vector_insert(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']['insertCount'] == nb
c = Collection(name)
res = c.query(
@ -1760,7 +1760,7 @@ class TestQueryVector(TestBase):
logger.info(f"res: {res}")
# query data to make sure the data is inserted
rsp = self.vector_client.vector_query({"collectionName": name, "filter": "user_id > 0", "limit": 50})
assert rsp['code'] == 200
assert rsp['code'] == 0
assert len(rsp['data']) == 50
@pytest.mark.parametrize("expr", ["10+20 <= uid < 20+30", "uid in [1,2,3,4]",
@ -1797,7 +1797,7 @@ class TestQueryVector(TestBase):
output_fields.remove("vector")
time.sleep(5)
rsp = self.vector_client.vector_query(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
res = rsp['data']
logger.info(f"res: {len(res)}")
for r in res:
@ -1821,7 +1821,7 @@ class TestQueryVector(TestBase):
"outputFields": ["count(*)"]
}
rsp = self.vector_client.vector_query(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data'][0]['count(*)'] == 3000
@pytest.mark.xfail(reason="query by id is not supported")
@ -1837,7 +1837,7 @@ class TestQueryVector(TestBase):
"id": insert_ids,
}
rsp = self.vector_client.vector_query(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
@pytest.mark.parametrize("filter_expr", ["name > \"placeholder\"", "name like \"placeholder%\""])
@pytest.mark.parametrize("include_output_fields", [True, False])
@ -1872,7 +1872,7 @@ class TestQueryVector(TestBase):
if not include_output_fields:
payload.pop("outputFields")
rsp = self.vector_client.vector_query(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
res = rsp['data']
logger.info(f"res: {len(res)}")
assert len(res) <= limit
@ -1920,7 +1920,7 @@ class TestQueryVector(TestBase):
if sum_of_limit_offset > max_sum_of_limit_offset:
assert rsp['code'] == 1
return
assert rsp['code'] == 200
assert rsp['code'] == 0
res = rsp['data']
logger.info(f"res: {len(res)}")
assert len(res) <= limit
@ -1975,7 +1975,7 @@ class TestGetVector(TestBase):
"data": [vector_to_search],
}
rsp = self.vector_client.vector_search(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
res = rsp['data']
logger.info(f"res: {len(res)}")
limit = int(payload.get("limit", 100))
@ -1988,7 +1988,7 @@ class TestGetVector(TestBase):
"id": ids[0],
}
rsp = self.vector_client.vector_get(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
res = rsp['data']
logger.info(f"res: {res}")
logger.info(f"res: {len(res)}")
@ -2015,7 +2015,7 @@ class TestGetVector(TestBase):
"filter": f"uid in {uids}",
}
rsp = self.vector_client.vector_query(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
res = rsp['data']
logger.info(f"res: {len(res)}")
ids = []
@ -2039,7 +2039,7 @@ class TestGetVector(TestBase):
"id": id_to_get
}
rsp = self.vector_client.vector_get(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
res = rsp['data']
if isinstance(id_to_get, list):
if include_invalid_id:
@ -2077,7 +2077,7 @@ class TestDeleteVector(TestBase):
"id": insert_ids,
}
rsp = self.vector_client.vector_query(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
@pytest.mark.parametrize("id_field_type", ["list", "one"])
def test_delete_vector_by_pk_field_ids(self, id_field_type):
@ -2103,7 +2103,7 @@ class TestDeleteVector(TestBase):
"filter": f"id == {id_to_delete}"
}
rsp = self.vector_client.vector_delete(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
# verify data deleted by get
payload = {
"collectionName": name,
@ -2130,7 +2130,7 @@ class TestDeleteVector(TestBase):
"filter": f"uid in {uids}",
}
rsp = self.vector_client.vector_query(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
res = rsp['data']
logger.info(f"res: {len(res)}")
ids = []
@ -2158,7 +2158,7 @@ class TestDeleteVector(TestBase):
}
rsp = self.vector_client.vector_delete(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
logger.info(f"delete res: {rsp}")
# verify data deleted
@ -2170,7 +2170,7 @@ class TestDeleteVector(TestBase):
}
time.sleep(5)
rsp = self.vector_client.vector_query(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert len(rsp['data']) == 0
def test_delete_vector_by_custom_pk_field(self):
@ -2192,10 +2192,10 @@ class TestDeleteVector(TestBase):
"indexParams": [{"fieldName": "text_emb", "indexName": "text_emb_index", "metricType": "L2"}]
}
rsp = self.collection_client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = self.collection_client.collection_describe(name)
logger.info(f"rsp: {rsp}")
assert rsp['code'] == 200
assert rsp['code'] == 0
pk_values = []
# insert data
for i in range(insert_round):
@ -2217,7 +2217,7 @@ class TestDeleteVector(TestBase):
body_size = sys.getsizeof(json.dumps(payload))
logger.info(f"body size: {body_size / 1024 / 1024} MB")
rsp = self.vector_client.vector_insert(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']['insertCount'] == nb
# query data before delete
c = Collection(name)
@ -2255,10 +2255,10 @@ class TestDeleteVector(TestBase):
"indexParams": [{"fieldName": "text_emb", "indexName": "text_emb_index", "metricType": "L2"}]
}
rsp = self.collection_client.collection_create(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
rsp = self.collection_client.collection_describe(name)
logger.info(f"rsp: {rsp}")
assert rsp['code'] == 200
assert rsp['code'] == 0
# insert data
for i in range(insert_round):
data = []
@ -2277,7 +2277,7 @@ class TestDeleteVector(TestBase):
body_size = sys.getsizeof(json.dumps(payload))
logger.info(f"body size: {body_size / 1024 / 1024} MB")
rsp = self.vector_client.vector_insert(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
assert rsp['data']['insertCount'] == nb
# query data before delete
c = Collection(name)
@ -2313,7 +2313,7 @@ class TestDeleteVector(TestBase):
"outputFields": ["id", "uid"]
}
rsp = self.vector_client.vector_query(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
res = rsp['data']
logger.info(f"res: {len(res)}")
id_list = [r['uid'] for r in res]
@ -2327,7 +2327,7 @@ class TestDeleteVector(TestBase):
"outputFields": ["id", "uid"]
}
rsp = self.vector_client.vector_query(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
res = rsp['data']
num_before_delete = len(res)
logger.info(f"res: {len(res)}")
@ -2371,7 +2371,7 @@ class TestDeleteVectorNegative(TestBase):
"filter": f"uid in {uids}",
}
rsp = self.vector_client.vector_query(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
res = rsp['data']
logger.info(f"res: {len(res)}")
ids = []
@ -2408,7 +2408,7 @@ class TestDeleteVectorNegative(TestBase):
"outputFields": ["id", "uid"]
}
rsp = self.vector_client.vector_query(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
res = rsp['data']
logger.info(f"res: {len(res)}")
id_list = [r['id'] for r in res]
@ -2422,7 +2422,7 @@ class TestDeleteVectorNegative(TestBase):
"outputFields": ["id", "uid"]
}
rsp = self.vector_client.vector_query(payload)
assert rsp['code'] == 200
assert rsp['code'] == 0
res = rsp['data']
logger.info(f"res: {len(res)}")
# delete data