test: add part of milvus client test cases (#30222)

issue: #30221

Signed-off-by: binbin lv <binbin.lv@zilliz.com>
ea442779
binbin 2024-01-26 09:25:01 +08:00 committed by GitHub
parent 116d0f20b8
commit a847d56ac0
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
13 changed files with 4620 additions and 30 deletions

View File

@ -125,9 +125,9 @@ class TestcaseBase(Base):
Public methods that can be used for test cases.
"""
def _connect(self, enable_high_level_api=False):
def _connect(self, enable_milvus_client_api=False):
""" Add a connection and create the connect """
if enable_high_level_api:
if enable_milvus_client_api:
if cf.param_info.param_uri:
uri = cf.param_info.param_uri
else:

View File

@ -26,6 +26,18 @@ class HighLevelApiWrapper:
def __init__(self, active_trace=False):
self.active_trace = active_trace
@trace()
def create_schema(self, client, timeout=None, check_task=None,
check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
kwargs.update({"timeout": timeout})
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.create_schema], **kwargs)
check_result = ResponseChecker(res, func_name, check_task, check_items, check,
**kwargs).run()
return res, check_result
@trace()
def create_collection(self, client, collection_name, dimension, timeout=None, check_task=None,
check_items=None, **kwargs):
@ -39,6 +51,18 @@ class HighLevelApiWrapper:
**kwargs).run()
return res, check_result
def has_collection(self, client, collection_name, timeout=None, check_task=None,
check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
kwargs.update({"timeout": timeout})
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.has_collection, collection_name], **kwargs)
check_result = ResponseChecker(res, func_name, check_task, check_items, check,
collection_name=collection_name,
**kwargs).run()
return res, check_result
@trace()
def insert(self, client, collection_name, data, timeout=None, check_task=None, check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
@ -51,6 +75,18 @@ class HighLevelApiWrapper:
**kwargs).run()
return res, check_result
@trace()
def upsert(self, client, collection_name, data, timeout=None, check_task=None, check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
kwargs.update({"timeout": timeout})
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.upsert, collection_name, data], **kwargs)
check_result = ResponseChecker(res, func_name, check_task, check_items, check,
collection_name=collection_name, data=data,
**kwargs).run()
return res, check_result
@trace()
def search(self, client, collection_name, data, limit=10, filter=None, output_fields=None, search_params=None,
timeout=None, check_task=None, check_items=None, **kwargs):
@ -66,16 +102,14 @@ class HighLevelApiWrapper:
return res, check_result
@trace()
def query(self, client, collection_name, filter=None, output_fields=None,
timeout=None, check_task=None, check_items=None, **kwargs):
def query(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
kwargs.update({"timeout": timeout})
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.query, collection_name, filter, output_fields], **kwargs)
res, check = api_request([client.query, collection_name], **kwargs)
check_result = ResponseChecker(res, func_name, check_task, check_items, check,
collection_name=collection_name, filter=filter,
output_fields=output_fields,
collection_name=collection_name,
**kwargs).run()
return res, check_result
@ -106,14 +140,14 @@ class HighLevelApiWrapper:
return res, check_result
@trace()
def delete(self, client, collection_name, pks, timeout=None, check_task=None, check_items=None, **kwargs):
def delete(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
kwargs.update({"timeout": timeout})
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.delete, collection_name, pks], **kwargs)
res, check = api_request([client.delete, collection_name], **kwargs)
check_result = ResponseChecker(res, func_name, check_task, check_items, check,
collection_name=collection_name, pks=pks,
collection_name=collection_name,
**kwargs).run()
return res, check_result
@ -161,3 +195,319 @@ class HighLevelApiWrapper:
**kwargs).run()
return res, check_result
@trace()
def list_partitions(self, client, collection_name, check_task=None, check_items=None, **kwargs):
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.list_partitions, collection_name], **kwargs)
check_result = ResponseChecker(res, func_name, check_task, check_items, check,
collection_name=collection_name,
**kwargs).run()
return res, check_result
@trace()
def list_indexes(self, client, collection_name, check_task=None, check_items=None, **kwargs):
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.list_indexes, collection_name], **kwargs)
check_result = ResponseChecker(res, func_name, check_task, check_items, check,
collection_name=collection_name,
**kwargs).run()
return res, check_result
@trace()
def get_load_state(self, client, collection_name, check_task=None, check_items=None, **kwargs):
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.get_load_state, collection_name], **kwargs)
check_result = ResponseChecker(res, func_name, check_task, check_items, check,
collection_name=collection_name,
**kwargs).run()
return res, check_result
@trace()
def prepare_index_params(self, client, timeout=None, check_task=None, check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
kwargs.update({"timeout": timeout})
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.prepare_index_params], **kwargs)
check_result = ResponseChecker(res, func_name, check_task, check_items, check,
**kwargs).run()
return res, check_result
@trace()
def load_collection(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
kwargs.update({"timeout": timeout})
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.load_collection, collection_name], **kwargs)
check_result = ResponseChecker(res, func_name, check_task,
check_items, check,
collection_name=collection_name, **kwargs).run()
return res, check_result
@trace()
def release_collection(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
kwargs.update({"timeout": timeout})
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.release_collection, collection_name], **kwargs)
check_result = ResponseChecker(res, func_name, check_task,
check_items, check,
collection_name=collection_name, **kwargs).run()
return res, check_result
@trace()
def load_partitions(self, client, collection_name, partition_names, timeout=None, check_task=None, check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
kwargs.update({"timeout": timeout})
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.load_partitions, collection_name, partition_names], **kwargs)
check_result = ResponseChecker(res, func_name, check_task,
check_items, check,
collection_name=collection_name,
partition_names=partition_names,
**kwargs).run()
return res, check_result
@trace()
def release_partitions(self, client, collection_name, partition_names, timeout=None, check_task=None, check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
kwargs.update({"timeout": timeout})
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.release_partitions, collection_name, partition_names], **kwargs)
check_result = ResponseChecker(res, func_name, check_task,
check_items, check,
collection_name=collection_name,
partition_names=partition_names,
**kwargs).run()
return res, check_result
@trace()
def rename_collection(self, client, old_name, new_name, timeout=None, check_task=None, check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
kwargs.update({"timeout": timeout})
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.rename_collection, old_name, new_name], **kwargs)
check_result = ResponseChecker(res, func_name, check_task,
check_items, check,
old_name=old_name,
new_name=new_name,
**kwargs).run()
return res, check_result
@trace()
def use_database(self, client, db_name, timeout=None, check_task=None, check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
kwargs.update({"timeout": timeout})
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.use_database, db_name], **kwargs)
check_result = ResponseChecker(res, func_name, check_task,
check_items, check,
db_name=db_name,
**kwargs).run()
return res, check_result
@trace()
def create_partition(self, client, collection_name, partition_name, timeout=None, check_task=None, check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
kwargs.update({"timeout": timeout})
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.create_partition, collection_name, partition_name], **kwargs)
check_result = ResponseChecker(res, func_name, check_task,
check_items, check,
collection_name=collection_name,
partition_name=partition_name,
**kwargs).run()
return res, check_result
@trace()
def list_partitions(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
kwargs.update({"timeout": timeout})
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.list_partitions, collection_name], **kwargs)
check_result = ResponseChecker(res, func_name, check_task,
check_items, check,
collection_name=collection_name,
**kwargs).run()
return res, check_result
@trace()
def drop_partition(self, client, collection_name, partition_name, timeout=None, check_task=None, check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
kwargs.update({"timeout": timeout})
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.drop_partition, collection_name, partition_name], **kwargs)
check_result = ResponseChecker(res, func_name, check_task,
check_items, check,
collection_name=collection_name,
partition_name=partition_name,
**kwargs).run()
return res, check_result
@trace()
def has_partition(self, client, collection_name, partition_name, timeout=None, check_task=None, check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
kwargs.update({"timeout": timeout})
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.has_partition, collection_name, partition_name], **kwargs)
check_result = ResponseChecker(res, func_name, check_task,
check_items, check,
collection_name=collection_name,
partition_name=partition_name,
**kwargs).run()
return res, check_result
@trace()
def get_partition_stats(self, client, collection_name, partition_name, timeout=None, check_task=None, check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
kwargs.update({"timeout": timeout})
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.get_partition_stats, collection_name, partition_name], **kwargs)
check_result = ResponseChecker(res, func_name, check_task,
check_items, check,
collection_name=collection_name,
partition_name=partition_name,
**kwargs).run()
return res, check_result
@trace()
def prepare_index_params(self, client, check_task=None, check_items=None, **kwargs):
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.prepare_index_params], **kwargs)
check_result = ResponseChecker(res, func_name, check_task,
check_items, check,
**kwargs).run()
return res, check_result
@trace()
def create_index(self, client, collection_name, index_params, timeout=None, check_task=None, check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
kwargs.update({"timeout": timeout})
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.create_index, collection_name, index_params], **kwargs)
check_result = ResponseChecker(res, func_name, check_task,
check_items, check,
collection_name=collection_name,
index_params=index_params,
**kwargs).run()
return res, check_result
@trace()
def drop_index(self, client, collection_name, index_name, timeout=None, check_task=None, check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
kwargs.update({"timeout": timeout})
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.drop_index, collection_name, index_name], **kwargs)
check_result = ResponseChecker(res, func_name, check_task,
check_items, check,
collection_name=collection_name,
index_name=index_name,
**kwargs).run()
return res, check_result
@trace()
def describe_index(self, client, collection_name, index_name, timeout=None, check_task=None, check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
kwargs.update({"timeout": timeout})
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.describe_index, collection_name, index_name], **kwargs)
check_result = ResponseChecker(res, func_name, check_task,
check_items, check,
collection_name=collection_name,
index_name=index_name,
**kwargs).run()
return res, check_result
@trace()
def list_indexes(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
kwargs.update({"timeout": timeout})
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.list_indexes, collection_name], **kwargs)
check_result = ResponseChecker(res, func_name, check_task,
check_items, check,
collection_name=collection_name,
**kwargs).run()
return res, check_result
@trace()
def create_alias(self, client, collection_name, alias, timeout=None, check_task=None, check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
kwargs.update({"timeout": timeout})
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.create_alias, collection_name, alias], **kwargs)
check_result = ResponseChecker(res, func_name, check_task,
check_items, check,
collection_name=collection_name,
alias=alias,
**kwargs).run()
return res, check_result
@trace()
def drop_alias(self, client, alias, timeout=None, check_task=None, check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
kwargs.update({"timeout": timeout})
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.drop_alias, alias], **kwargs)
check_result = ResponseChecker(res, func_name, check_task,
check_items, check,
alias=alias,
**kwargs).run()
return res, check_result
@trace()
def alter_alias(self, client, collection_name, alias, timeout=None, check_task=None, check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
kwargs.update({"timeout": timeout})
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.alter_alias, collection_name, alias], **kwargs)
check_result = ResponseChecker(res, func_name, check_task,
check_items, check,
collection_name=collection_name,
alias=alias,
**kwargs).run()
return res, check_result
@trace()
def describe_alias(self, client, alias, timeout=None, check_task=None, check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
kwargs.update({"timeout": timeout})
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.describe_alias, alias], **kwargs)
check_result = ResponseChecker(res, func_name, check_task,
check_items, check,
alias=alias,
**kwargs).run()
return res, check_result
@trace()
def list_aliases(self, client, timeout=None, check_task=None, check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
kwargs.update({"timeout": timeout})
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.list_aliases], **kwargs)
check_result = ResponseChecker(res, func_name, check_task,
check_items, check,
**kwargs).run()
return res, check_result

View File

@ -232,7 +232,7 @@ class ResponseChecker:
if check_items.get("dim", None) is not None:
assert res["fields"][1]["params"]["dim"] == check_items.get("dim")
assert res["fields"][0]["is_primary"] is True
assert res["fields"][0]["field_id"] == 100 and res["fields"][0]["type"] == 5
assert res["fields"][0]["field_id"] == 100 and (res["fields"][0]["type"] == 5 or 21)
assert res["fields"][1]["field_id"] == 101 and res["fields"][1]["type"] == 101
return True
@ -310,12 +310,12 @@ class ResponseChecker:
assert len(search_res) == check_items["nq"]
else:
log.info("search_results_check: Numbers of query searched is correct")
enable_high_level_api = check_items.get("enable_high_level_api", False)
enable_milvus_client_api = check_items.get("enable_milvus_client_api", False)
log.debug(search_res)
for hits in search_res:
searched_original_vectors = []
ids = []
if enable_high_level_api:
if enable_milvus_client_api:
for hit in hits:
ids.append(hit['id'])
else:

View File

@ -0,0 +1,500 @@
import multiprocessing
import numbers
import random
import numpy
import threading
import pytest
import pandas as pd
import decimal
from decimal import Decimal, getcontext
from time import sleep
import heapq
from base.client_base import TestcaseBase
from utils.util_log import test_log as log
from common import common_func as cf
from common import common_type as ct
from common.common_type import CaseLabel, CheckTasks
from utils.util_pymilvus import *
from common.constants import *
from pymilvus.orm.types import CONSISTENCY_STRONG, CONSISTENCY_BOUNDED, CONSISTENCY_SESSION, CONSISTENCY_EVENTUALLY
from base.high_level_api_wrapper import HighLevelApiWrapper
client_w = HighLevelApiWrapper()
prefix = "milvus_client_api_alias"
epsilon = ct.epsilon
default_nb = ct.default_nb
default_nb_medium = ct.default_nb_medium
default_nq = ct.default_nq
default_dim = ct.default_dim
default_limit = ct.default_limit
default_search_exp = "id >= 0"
exp_res = "exp_res"
default_search_string_exp = "varchar >= \"0\""
default_search_mix_exp = "int64 >= 0 && varchar >= \"0\""
default_invaild_string_exp = "varchar >= 0"
default_json_search_exp = "json_field[\"number\"] >= 0"
perfix_expr = 'varchar like "0%"'
default_search_field = ct.default_float_vec_field_name
default_search_params = ct.default_search_params
default_primary_key_field_name = "id"
default_vector_field_name = "vector"
default_float_field_name = ct.default_float_field_name
default_bool_field_name = ct.default_bool_field_name
default_string_field_name = ct.default_string_field_name
default_int32_array_field_name = ct.default_int32_array_field_name
default_string_array_field_name = ct.default_string_array_field_name
class TestMilvusClientAliasInvalid(TestcaseBase):
""" Test case of search interface """
@pytest.fixture(scope="function", params=[False, True])
def auto_id(self, request):
yield request.param
@pytest.fixture(scope="function", params=["COSINE", "L2"])
def metric_type(self, request):
yield request.param
"""
******************************************************************
# The following are invalid base cases
******************************************************************
"""
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.parametrize("collection_name", ["12-s", "12 s", "(mn)", "中文", "%$#"])
def test_milvus_client_create_alias_invalid_collection_name(self, collection_name):
"""
target: test alias (high level api) normal case
method: create connection, collection, partition, alias, and assert collection
is equal to alias according to partitions
expected: create alias successfully
"""
client = self._connect(enable_milvus_client_api=True)
alias = cf.gen_unique_str("collection_alias")
# 2. create alias
error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection name: {collection_name}. the first character of a "
f"collection name must be an underscore or letter: invalid parameter"}
client_w.create_alias(client, collection_name, alias,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_create_alias_collection_name_over_max_length(self):
"""
target: test alias (high level api) normal case
method: create connection, collection, partition, alias, and assert collection
is equal to alias according to partitions
expected: create alias successfully
"""
client = self._connect(enable_milvus_client_api=True)
alias = cf.gen_unique_str("collection_alias")
collection_name = "a".join("a" for i in range(256))
# 2. create alias
error = {ct.err_code: 1100, ct.err_msg: f"invalid dimension: {collection_name}. "
f"the length of a collection name must be less than 255 characters: "
f"invalid parameter"}
client_w.create_alias(client, collection_name, alias,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_create_alias_not_exist_collection(self):
"""
target: test alias (high level api) normal case
method: create connection, collection, partition, alias, and assert collection
is equal to alias according to partitions
expected: create alias successfully
"""
client = self._connect(enable_milvus_client_api=True)
alias = cf.gen_unique_str("collection_alias")
collection_name = "not_exist_collection_alias"
error = {ct.err_code: 100, ct.err_msg: f"can't find collection collection not "
f"found[database=default][collection={collection_name}]"}
client_w.create_alias(client, collection_name, alias,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.parametrize("alias", ["12-s", "12 s", "(mn)", "中文", "%$#"])
def test_milvus_client_create_alias_invalid_alias_name(self, alias):
"""
target: test alias (high level api) normal case
method: create connection, collection, partition, alias, and assert collection
is equal to alias according to partitions
expected: create alias successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. create alias
error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection name: {collection_name}. the first character of a "
f"collection name must be an underscore or letter: invalid parameter"}
client_w.create_alias(client, collection_name, alias,
check_task=CheckTasks.err_res, check_items=error)
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_create_alias_name_over_max_length(self):
"""
target: test alias (high level api) normal case
method: create connection, collection, partition, alias, and assert collection
is equal to alias according to partitions
expected: create alias successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
alias = "a".join("a" for i in range(256))
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. create alias
error = {ct.err_code: 1100, ct.err_msg: f"invalid dimension: {collection_name}. "
f"the length of a collection name must be less than 255 characters: "
f"invalid parameter"}
client_w.create_alias(client, collection_name, alias,
check_task=CheckTasks.err_res, check_items=error)
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_create_alias_same_collection_name(self):
"""
target: test alias (high level api) normal case
method: create connection, collection, partition, alias, and assert collection
is equal to alias according to partitions
expected: create alias successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. create alias
error = {ct.err_code: 1601, ct.err_msg: f"alias and collection name conflict[database=default]"
f"[alias={collection_name}]"}
client_w.create_alias(client, collection_name, collection_name,
check_task=CheckTasks.err_res, check_items=error)
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_create_same_alias_diff_collections(self):
"""
target: test create same alias to different collections
method: create connection, collection, partition, alias, and assert collection
is equal to alias according to partitions
expected: create alias successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
collection_name_1 = cf.gen_unique_str(prefix)
alias = cf.gen_unique_str("collection_alias")
# 1. create collection and alias
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
client_w.create_alias(client, collection_name, alias)
# 2. create another collection and same alias
client_w.create_collection(client, collection_name_1, default_dim, consistency_level="Strong")
error = {ct.err_code: 1602, ct.err_msg: f"{alias} is alias to another collection: "
f"{collection_name}: alias already exist[database=default]"
f"[alias={alias}]"}
client_w.create_alias(client, collection_name_1, alias,
check_task=CheckTasks.err_res, check_items=error)
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_drop_alias_not_existed(self):
"""
target: test create same alias to different collections
method: create connection, collection, partition, alias, and assert collection
is equal to alias according to partitions
expected: create alias successfully
"""
client = self._connect(enable_milvus_client_api=True)
alias = cf.gen_unique_str("not_existed_alias")
client_w.drop_alias(client, alias)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.parametrize("alias_name", ["12-s", "12 s", "(mn)", "中文", "%$#"])
def test_milvus_client_drop_alias_invalid_alias_name(self, alias_name):
"""
target: test create same alias to different collections
method: create connection, collection, partition, alias, and assert collection
is equal to alias according to partitions
expected: create alias successfully
"""
client = self._connect(enable_milvus_client_api=True)
error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection name: {alias_name}. the first character of a "
f"collection name must be an underscore or letter: invalid parameter"}
client_w.drop_alias(client, alias_name,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_drop_alias_over_max_length(self):
"""
target: test create same alias to different collections
method: create connection, collection, partition, alias, and assert collection
is equal to alias according to partitions
expected: create alias successfully
"""
client = self._connect(enable_milvus_client_api=True)
alias = "a".join("a" for i in range(256))
error = {ct.err_code: 1100, ct.err_msg: f"invalid dimension: {alias}. "
f"the length of a collection name must be less than 255 characters: "
f"invalid parameter"}
client_w.drop_alias(client, alias,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.parametrize("collection_name", ["12-s", "12 s", "(mn)", "中文", "%$#"])
def test_milvus_client_alter_alias_invalid_collection_name(self, collection_name):
"""
target: test alias (high level api) normal case
method: create connection, collection, partition, alias, and assert collection
is equal to alias according to partitions
expected: create alias successfully
"""
client = self._connect(enable_milvus_client_api=True)
alias = cf.gen_unique_str("collection_alias")
error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection name: {collection_name}. the first character of a "
f"collection name must be an underscore or letter: invalid parameter"}
client_w.alter_alias(client, collection_name, alias,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_alter_alias_collection_name_over_max_length(self):
"""
target: test alias (high level api) normal case
method: create connection, collection, partition, alias, and assert collection
is equal to alias according to partitions
expected: create alias successfully
"""
client = self._connect(enable_milvus_client_api=True)
alias = cf.gen_unique_str("collection_alias")
collection_name = "a".join("a" for i in range(256))
# 2. create alias
error = {ct.err_code: 1100, ct.err_msg: f"invalid dimension: {collection_name}. "
f"the length of a collection name must be less than 255 characters: "
f"invalid parameter"}
client_w.alter_alias(client, collection_name, alias,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_alter_alias_not_exist_collection(self):
"""
target: test alias (high level api) normal case
method: create connection, collection, partition, alias, and assert collection
is equal to alias according to partitions
expected: create alias successfully
"""
client = self._connect(enable_milvus_client_api=True)
alias = cf.gen_unique_str("collection_alias")
collection_name = cf.gen_unique_str("not_exist_collection_alias")
# 2. create alias
error = {ct.err_code: 100, ct.err_msg: f"can't find collection collection not "
f"found[database=default][collection={collection_name}]"}
client_w.alter_alias(client, collection_name, alias,
check_task=CheckTasks.err_res, check_items=error)
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.parametrize("alias", ["12-s", "12 s", "(mn)", "中文", "%$#"])
def test_milvus_client_alter_alias_invalid_alias_name(self, alias):
"""
target: test alias (high level api) normal case
method: create connection, collection, partition, alias, and assert collection
is equal to alias according to partitions
expected: create alias successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. create alias
error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection name: {collection_name}. the first character of a "
f"collection name must be an underscore or letter: invalid parameter"}
client_w.alter_alias(client, collection_name, alias,
check_task=CheckTasks.err_res, check_items=error)
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_alter_alias_name_over_max_length(self):
"""
target: test alias (high level api) normal case
method: create connection, collection, partition, alias, and assert collection
is equal to alias according to partitions
expected: create alias successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
alias = "a".join("a" for i in range(256))
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. create alias
error = {ct.err_code: 1100, ct.err_msg: f"invalid dimension: {collection_name}. "
f"the length of a collection name must be less than 255 characters: "
f"invalid parameter"}
client_w.alter_alias(client, collection_name, alias,
check_task=CheckTasks.err_res, check_items=error)
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_alter_alias_same_collection_name(self):
"""
target: test alias (high level api) normal case
method: create connection, collection, partition, alias, and assert collection
is equal to alias according to partitions
expected: create alias successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. create alias
error = {ct.err_code: 1601, ct.err_msg: f"alias and collection name conflict[database=default]"
f"[alias={collection_name}"}
client_w.alter_alias(client, collection_name, collection_name,
check_task=CheckTasks.err_res, check_items=error)
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_alter_non_exists_alias(self):
"""
target: test alter alias (high level api)
method: create connection, collection, partition, alias, and assert collection
is equal to alias according to partitions
expected: alter alias successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
alias = cf.gen_unique_str("collection_alias")
another_alias = cf.gen_unique_str("collection_alias_another")
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. create alias
client_w.create_alias(client, collection_name, alias)
# 3. alter alias
error = {ct.err_code: 1600, ct.err_msg: f"alias not found[database=default][alias={collection_name}]"}
client_w.alter_alias(client, collection_name, another_alias,
check_task=CheckTasks.err_res, check_items=error)
client_w.drop_collection(client, collection_name)
class TestMilvusClientAliasValid(TestcaseBase):
""" Test case of search interface """
@pytest.fixture(scope="function", params=[False, True])
def auto_id(self, request):
yield request.param
@pytest.fixture(scope="function", params=["COSINE", "L2"])
def metric_type(self, request):
yield request.param
"""
******************************************************************
# The following are valid base cases
******************************************************************
"""
@pytest.mark.tags(CaseLabel.L2)
def test_milvus_client_alias_search_query(self):
"""
target: test search (high level api) normal case
method: create connection, collection, insert and search
expected: search/query successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
alias = "collection_alias"
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. create alias
client_w.create_alias(collection_name, alias)
collection_name = alias
# 2. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
client_w.insert(client, collection_name, rows)
# client_w.flush(client, collection_name)
# assert client_w.num_entities(client, collection_name)[0] == default_nb
# 3. search
vectors_to_search = rng.random((1, default_dim))
insert_ids = [i for i in range(default_nb)]
client_w.search(client, collection_name, vectors_to_search,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
# 4. query
client_w.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
client_w.release_collection(client, collection_name)
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.xfail(reason="pymilvus issue 1891, 1892")
def test_milvus_client_alias_default(self):
"""
target: test alias (high level api) normal case
method: create connection, collection, partition, alias, and assert collection
is equal to alias according to partitions
expected: create alias successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
partition_name = cf.gen_unique_str("partition")
alias = cf.gen_unique_str("collection_alias")
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
client_w.create_partition(client, collection_name, partition_name)
partition_name_list = client_w.list_partitions(client, collection_name)[0]
# 2. create alias
client_w.create_alias(client, collection_name, alias)
client_w.describe_alias(client, alias)
# 3. list alias
aliases = client_w.list_aliases(client)[0]
# assert alias in aliases
# 4. assert collection is equal to alias according to partitions
partition_name_list_alias = client_w.list_partitions(client, alias)[0]
assert partition_name_list == partition_name_list_alias
# 5. drop alias
client_w.drop_alias(client, alias)
aliases = client_w.list_aliases(client)[0]
# assert alias not in aliases
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_alter_alias_default(self):
"""
target: test alter alias (high level api)
method: create connection, collection, partition, alias, and assert collection
is equal to alias according to partitions
expected: alter alias successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
another_collectinon_name = cf.gen_unique_str(prefix)
partition_name = cf.gen_unique_str("partition")
alias = cf.gen_unique_str("collection_alias")
another_alias = cf.gen_unique_str("collection_alias_another")
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
client_w.create_partition(client, collection_name, partition_name)
partition_name_list = client_w.list_partitions(client, collection_name)[0]
client_w.create_collection(client, another_collectinon_name, default_dim, consistency_level="Strong")
client_w.create_alias(client, another_collectinon_name, another_alias)
# 2. create alias
client_w.create_alias(client, collection_name, alias)
# 3. alter alias
client_w.alter_alias(client, collection_name, another_alias)
client_w.describe_alias(client, alias)
# 3. list alias
aliases = client_w.list_aliases(client)[0]
# assert alias in aliases
# assert another_alias in aliases
# 4. assert collection is equal to alias according to partitions
partition_name_list_alias = client_w.list_partitions(client, another_alias)[0]
assert partition_name_list == partition_name_list_alias
client_w.drop_collection(client, collection_name)

View File

@ -0,0 +1,871 @@
import multiprocessing
import numbers
import random
import numpy
import threading
import pytest
import pandas as pd
import decimal
from decimal import Decimal, getcontext
from time import sleep
import heapq
from pymilvus import DataType
from base.client_base import TestcaseBase
from utils.util_log import test_log as log
from common import common_func as cf
from common import common_type as ct
from common.common_type import CaseLabel, CheckTasks
from utils.util_pymilvus import *
from common.constants import *
from pymilvus.orm.types import CONSISTENCY_STRONG, CONSISTENCY_BOUNDED, CONSISTENCY_SESSION, CONSISTENCY_EVENTUALLY
from base.high_level_api_wrapper import HighLevelApiWrapper
client_w = HighLevelApiWrapper()
prefix = "milvus_client_api_collection"
epsilon = ct.epsilon
default_nb = ct.default_nb
default_nb_medium = ct.default_nb_medium
default_nq = ct.default_nq
default_dim = ct.default_dim
default_limit = ct.default_limit
default_search_exp = "id >= 0"
exp_res = "exp_res"
default_search_string_exp = "varchar >= \"0\""
default_search_mix_exp = "int64 >= 0 && varchar >= \"0\""
default_invaild_string_exp = "varchar >= 0"
default_json_search_exp = "json_field[\"number\"] >= 0"
perfix_expr = 'varchar like "0%"'
default_search_field = ct.default_float_vec_field_name
default_search_params = ct.default_search_params
default_primary_key_field_name = "id"
default_vector_field_name = "vector"
default_float_field_name = ct.default_float_field_name
default_bool_field_name = ct.default_bool_field_name
default_string_field_name = ct.default_string_field_name
default_int32_array_field_name = ct.default_int32_array_field_name
default_string_array_field_name = ct.default_string_array_field_name
class TestMilvusClientCollectionInvalid(TestcaseBase):
""" Test case of search interface """
@pytest.fixture(scope="function", params=[False, True])
def auto_id(self, request):
yield request.param
@pytest.fixture(scope="function", params=["COSINE", "L2"])
def metric_type(self, request):
yield request.param
"""
******************************************************************
# The following are invalid base cases
******************************************************************
"""
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.parametrize("collection_name", ["12-s", "12 s", "(mn)", "中文", "%$#"])
def test_milvus_client_collection_invalid_collection_name(self, collection_name):
"""
target: test fast create collection normal case
method: create collection
expected: create collection with default schema, index, and load successfully
"""
client = self._connect(enable_milvus_client_api=True)
# 1. create collection
error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection name: {collection_name}. the first character of a "
f"collection name must be an underscore or letter: invalid parameter"}
client_w.create_collection(client, collection_name, default_dim,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_collection_name_over_max_length(self):
"""
target: test fast create collection normal case
method: create collection
expected: create collection with default schema, index, and load successfully
"""
client = self._connect(enable_milvus_client_api=True)
# 1. create collection
collection_name = "a".join("a" for i in range(256))
error = {ct.err_code: 1100, ct.err_msg: f"invalid dimension: {collection_name}. "
f"the length of a collection name must be less than 255 characters: "
f"invalid parameter"}
client_w.create_collection(client, collection_name, default_dim,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_collection_name_empty(self):
"""
target: test fast create collection normal case
method: create collection
expected: create collection with default schema, index, and load successfully
"""
client = self._connect(enable_milvus_client_api=True)
# 1. create collection
collection_name = " "
error = {ct.err_code: 0, ct.err_msg: "collection name should not be empty: invalid parameter"}
client_w.create_collection(client, collection_name, default_dim,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.parametrize("dim", [ct.min_dim-1, ct.max_dim+1])
def test_milvus_client_collection_invalid_dim(self, dim):
"""
target: test fast create collection normal case
method: create collection
expected: create collection with default schema, index, and load successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
error = {ct.err_code: 65535, ct.err_msg: f"invalid dimension: {dim}. should be in range 1 ~ 32768"}
client_w.create_collection(client, collection_name, dim,
check_task=CheckTasks.err_res, check_items=error)
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.xfail(reason="pymilvus issue 1554")
def test_milvus_client_collection_invalid_primary_field(self):
"""
target: test high level api: client.create_collection
method: create collection with invalid primary field
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
error = {ct.err_code: 1, ct.err_msg: f"Param id_type must be int or string"}
client_w.create_collection(client, collection_name, default_dim, id_type="invalid",
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_milvus_client_collection_string_auto_id(self):
"""
target: test high level api: client.create_collection
method: create collection with auto id on string primary key
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
error = {ct.err_code: 65535, ct.err_msg: f"type param(max_length) should be specified for varChar "
f"field of collection {collection_name}"}
client_w.create_collection(client, collection_name, default_dim, id_type="string", auto_id=True,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_create_same_collection_different_params(self):
"""
target: test high level api: client.create_collection
method: create
expected: 1. Successfully to create collection with same params
2. Report errors for creating collection with same name and different params
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim)
# 2. create collection with same params
client_w.create_collection(client, collection_name, default_dim)
# 3. create collection with same name and different params
error = {ct.err_code: 1, ct.err_msg: f"create duplicate collection with different parameters, "
f"collection: {collection_name}"}
client_w.create_collection(client, collection_name, default_dim+1,
check_task=CheckTasks.err_res, check_items=error)
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.xfail(reason="pymilvus issue 1872")
@pytest.mark.parametrize("metric_type", [1, " ", "invalid"])
def test_milvus_client_collection_invalid_metric_type(self, metric_type):
"""
target: test high level api: client.create_collection
method: create collection with auto id on string primary key
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
error = {ct.err_code: 65535,
ct.err_msg: "metric type not found or not supported, supported: [L2 IP COSINE HAMMING JACCARD]"}
client_w.create_collection(client, collection_name, default_dim, metric_type=metric_type,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.skip(reason="pymilvus issue 1864")
def test_milvus_client_collection_invalid_schema_field_name(self):
"""
target: test high level api: client.create_collection
method: create collection with auto id on string primary key
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
schema = client_w.create_schema(client, enable_dynamic_field=False)[0]
schema.add_field("%$#", DataType.VARCHAR, max_length=64,
is_primary=True, auto_id = False)
schema.add_field("embeddings", DataType.FLOAT_VECTOR, dim=128)
# 1. create collection
error = {ct.err_code: 65535,
ct.err_msg: "metric type not found or not supported, supported: [L2 IP COSINE HAMMING JACCARD]"}
client_w.create_collection(client, collection_name, schema=schema,
check_task=CheckTasks.err_res, check_items=error)
class TestMilvusClientCollectionValid(TestcaseBase):
""" Test case of search interface """
@pytest.fixture(scope="function", params=[False, True])
def auto_id(self, request):
yield request.param
@pytest.fixture(scope="function", params=["COSINE", "L2", "IP"])
def metric_type(self, request):
yield request.param
@pytest.fixture(scope="function", params=["int", "string"])
def id_type(self, request):
yield request.param
"""
******************************************************************
# The following are valid base cases
******************************************************************
"""
@pytest.mark.tags(CaseLabel.L0)
@pytest.mark.xfail(reason="pymilvus issue 1871")
@pytest.mark.parametrize("dim", [ct.min_dim, default_dim, ct.max_dim])
def test_milvus_client_collection_fast_creation_default(self, dim):
"""
target: test fast create collection normal case
method: create collection
expected: create collection with default schema, index, and load successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, dim)
collections = client_w.list_collections(client)[0]
assert collection_name in collections
client_w.describe_collection(client, collection_name,
check_task=CheckTasks.check_describe_collection_property,
check_items={"collection_name": collection_name,
"dim": dim,
"consistency_level": 0})
index = client_w.list_indexes(client, collection_name)[0]
assert index == ['vector']
# load_state = client_w.get_load_state(collection_name)[0]
client_w.load_partitions(client, collection_name, "_default")
client_w.release_partitions(client, collection_name, "_default")
if client_w.has_collection(collection_name)[0]:
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.parametrize("dim", [ct.min_dim, default_dim, ct.max_dim])
def test_milvus_client_collection_fast_creation_all_params(self, dim, metric_type, id_type, auto_id):
"""
target: test fast create collection normal case
method: create collection
expected: create collection with default schema, index, and load successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
max_length = 100
# 1. create collection
client_w.create_collection(client, collection_name, dim, id_type=id_type, metric_type=metric_type,
auto_id=auto_id, max_length=max_length)
collections = client_w.list_collections(client)[0]
assert collection_name in collections
client_w.describe_collection(client, collection_name,
check_task=CheckTasks.check_describe_collection_property,
check_items={"collection_name": collection_name,
"dim": dim,
"consistency_level": 0})
index = client_w.list_indexes(client, collection_name)[0]
assert index == ['vector']
# load_state = client_w.get_load_state(collection_name)[0]
client_w.release_collection(client, collection_name)
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L0)
@pytest.mark.skip(reason="pymilvus issue 1864")
def test_milvus_client_collection_self_creation_default(self):
"""
target: test fast create collection normal case
method: create collection
expected: create collection with default schema, index, and load successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
schema = client_w.create_schema(client, enable_dynamic_field=False)[0]
schema.add_field("id_string", DataType.VARCHAR, max_length=64, is_primary=True, auto_id = False)
schema.add_field("embeddings", DataType.FLOAT_VECTOR, dim=128)
schema.add_field("title", DataType.VARCHAR, max_length=64, is_partition_key=True)
schema.add_field("array_field", DataType.Array, max_capacity=12,
element_type_params={"type": DataType.VARCHAR, "max_length": 64})
index_params = client_w.prepare_index_params()
index_params.add_index("embeddings", metric_type="cosine")
index_params.add_index("title")
client_w.create_collection(client, collection_name, schema=schema, index_params=index_params)
collections = client_w.list_collections(client)[0]
assert collection_name in collections
client_w.describe_collection(client, collection_name,
check_task=CheckTasks.check_describe_collection_property,
check_items={"collection_name": collection_name,
"dim": 128,
"consistency_level": 0})
index = client_w.list_indexes(client, collection_name)[0]
assert index == ['vector']
# load_state = client_w.get_load_state(collection_name)[0]
if client_w.has_collection(collection_name)[0]:
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_array_insert_search(self):
"""
target: test search (high level api) normal case
method: create connection, collection, insert and search
expected: search/query successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
collections = client_w.list_collections(client)[0]
assert collection_name in collections
# 2. insert
rng = np.random.default_rng(seed=19530)
rows = [{
default_primary_key_field_name: i,
default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0,
default_int32_array_field_name: [i, i+1, i+2],
default_string_array_field_name: [str(i), str(i + 1), str(i + 2)]
} for i in range(default_nb)]
client_w.insert(client, collection_name, rows)
# 3. search
vectors_to_search = rng.random((1, default_dim))
insert_ids = [i for i in range(default_nb)]
client_w.search(client, collection_name, vectors_to_search,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.skip(reason="issue 25110")
def test_milvus_client_search_query_string(self):
"""
target: test search (high level api) for string primary key
method: create connection, collection, insert and search
expected: search/query successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, id_type="string", max_length=ct.default_length)
client_w.describe_collection(client, collection_name,
check_task=CheckTasks.check_describe_collection_property,
check_items={"collection_name": collection_name,
"dim": default_dim,
"auto_id": auto_id})
# 2. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: str(i), default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
client_w.insert(client, collection_name, rows)
client_w.flush(client, collection_name)
assert client_w.num_entities(client, collection_name)[0] == default_nb
# 3. search
vectors_to_search = rng.random((1, default_dim))
client_w.search(client, collection_name, vectors_to_search,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"limit": default_limit})
# 4. query
client_w.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L2)
def test_milvus_client_search_different_metric_types_not_specifying_in_search_params(self, metric_type, auto_id):
"""
target: test search (high level api) normal case
method: create connection, collection, insert and search
expected: search successfully with limit(topK)
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, metric_type=metric_type, auto_id=auto_id,
consistency_level="Strong")
# 2. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
if auto_id:
for row in rows:
row.pop(default_primary_key_field_name)
client_w.insert(client, collection_name, rows)
# 3. search
vectors_to_search = rng.random((1, default_dim))
# search_params = {"metric_type": metric_type}
client_w.search(client, collection_name, vectors_to_search, limit=default_limit,
output_fields=[default_primary_key_field_name],
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"limit": default_limit})
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.skip("pymilvus issue #1866")
def test_milvus_client_search_different_metric_types_specifying_in_search_params(self, metric_type, auto_id):
"""
target: test search (high level api) normal case
method: create connection, collection, insert and search
expected: search successfully with limit(topK)
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, metric_type=metric_type, auto_id=auto_id,
consistency_level="Strong")
# 2. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
if auto_id:
for row in rows:
row.pop(default_primary_key_field_name)
client_w.insert(client, collection_name, rows)
# 3. search
vectors_to_search = rng.random((1, default_dim))
search_params = {"metric_type": metric_type}
client_w.search(client, collection_name, vectors_to_search, limit=default_limit,
search_params=search_params,
output_fields=[default_primary_key_field_name],
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"limit": default_limit})
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_delete_with_ids(self):
"""
target: test delete (high level api)
method: create connection, collection, insert delete, and search
expected: search/query successfully without deleted data
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. insert
default_nb = 1000
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
pks = client_w.insert(client, collection_name, rows)[0]
# 3. delete
delete_num = 3
client_w.delete(client, collection_name, ids=[i for i in range(delete_num)])
# 4. search
vectors_to_search = rng.random((1, default_dim))
insert_ids = [i for i in range(default_nb)]
for insert_id in range(delete_num):
if insert_id in insert_ids:
insert_ids.remove(insert_id)
limit = default_nb - delete_num
client_w.search(client, collection_name, vectors_to_search, limit=default_nb,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": limit})
# 5. query
client_w.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows[delete_num:],
"with_vec": True,
"primary_field": default_primary_key_field_name})
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_delete_with_filters(self):
"""
target: test delete (high level api)
method: create connection, collection, insert delete, and search
expected: search/query successfully without deleted data
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. insert
default_nb = 1000
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
pks = client_w.insert(client, collection_name, rows)[0]
# 3. delete
delete_num = 3
client_w.delete(client, collection_name, filter=f"id < {delete_num}")
# 4. search
vectors_to_search = rng.random((1, default_dim))
insert_ids = [i for i in range(default_nb)]
for insert_id in range(delete_num):
if insert_id in insert_ids:
insert_ids.remove(insert_id)
limit = default_nb - delete_num
client_w.search(client, collection_name, vectors_to_search, limit=default_nb,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": limit})
# 5. query
client_w.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows[delete_num:],
"with_vec": True,
"primary_field": default_primary_key_field_name})
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_collection_rename_collection(self):
"""
target: test fast create collection normal case
method: create collection
expected: create collection with default schema, index, and load successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim)
collections = client_w.list_collections(client)[0]
assert collection_name in collections
old_name = collection_name
new_name = collection_name + "new"
client_w.rename_collection(client, old_name, new_name)
collections = client_w.list_collections(client)[0]
assert new_name in collections
assert old_name not in collections
client_w.describe_collection(client, new_name,
check_task=CheckTasks.check_describe_collection_property,
check_items={"collection_name": new_name,
"dim": default_dim,
"consistency_level": 0})
index = client_w.list_indexes(client, new_name)[0]
assert index == ['vector']
# load_state = client_w.get_load_state(collection_name)[0]
error = {ct.err_code: 100, ct.err_msg: f"collection not found"}
client_w.load_partitions(client, old_name, "_default",
check_task=CheckTasks.err_res, check_items=error)
client_w.load_partitions(client, new_name, "_default")
client_w.release_partitions(client, new_name, "_default")
if client_w.has_collection(client, collection_name)[0]:
client_w.drop_collection(client, new_name)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.skip(reason="db not ready")
def test_milvus_client_collection_rename_collection_target_db(self):
"""
target: test fast create collection normal case
method: create collection
expected: create collection with default schema, index, and load successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim)
collections = client_w.list_collections(client)[0]
assert collection_name in collections
db_name = "new_db"
client_w.use_database(client, db_name)
old_name = collection_name
new_name = collection_name + "new"
client_w.rename_collection(client, old_name, new_name, target_db=db_name)
collections = client_w.list_collections(client)[0]
assert new_name in collections
assert old_name not in collections
client_w.describe_collection(client, new_name,
check_task=CheckTasks.check_describe_collection_property,
check_items={"collection_name": new_name,
"dim": default_dim,
"consistency_level": 0})
index = client_w.list_indexes(client, new_name)[0]
assert index == ['vector']
# load_state = client_w.get_load_state(collection_name)[0]
error = {ct.err_code: 100, ct.err_msg: f"collection not found"}
client_w.load_partitions(client, old_name, "_default",
check_task=CheckTasks.err_res, check_items=error)
client_w.load_partitions(client, new_name, "_default")
client_w.release_partitions(client, new_name, "_default")
if client_w.has_collection(client, collection_name)[0]:
client_w.drop_collection(client, new_name)
class TestMilvusClientDropCollectionInvalid(TestcaseBase):
""" Test case of search interface """
"""
******************************************************************
# The following are invalid base cases
******************************************************************
"""
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.parametrize("name", ["12-s", "12 s", "(mn)", "中文", "%$#"])
def test_milvus_client_drop_collection_invalid_collection_name(self, name):
"""
target: test fast create collection normal case
method: create collection
expected: create collection with default schema, index, and load successfully
"""
client = self._connect(enable_milvus_client_api=True)
error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection name: {name}. collection name can only "
f"contain numbers, letters and underscores: invalid parameter"}
client_w.drop_collection(client, name,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_milvus_client_drop_collection_not_existed(self):
"""
target: test fast create collection normal case
method: create collection
expected: drop successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = "nonexisted"
client_w.drop_collection(client, collection_name)
class TestMilvusClientDescribeCollectionInvalid(TestcaseBase):
""" Test case of search interface """
"""
******************************************************************
# The following are invalid base cases
******************************************************************
"""
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.parametrize("name", ["12-s", "12 s", "(mn)", "中文", "%$#"])
def test_milvus_client_describe_collection_invalid_collection_name(self, name):
"""
target: test fast create collection normal case
method: create collection
expected: create collection with default schema, index, and load successfully
"""
client = self._connect(enable_milvus_client_api=True)
error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection name: {name}. collection name can only "
f"contain numbers, letters and underscores: invalid parameter"}
client_w.describe_collection(client, name,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_milvus_client_describe_collection_not_existed(self):
"""
target: test fast create collection normal case
method: create collection
expected: drop successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = "nonexisted"
error = {ct.err_code: 100, ct.err_msg: f"can't find collection collection not "
f"found[database=default][collection=nonexisted]"}
client_w.describe_collection(client, collection_name,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_milvus_client_describe_collection_deleted_collection(self):
"""
target: test fast create collection normal case
method: create collection
expected: drop successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim)
client_w.drop_collection(client, collection_name)
error = {ct.err_code: 100, ct.err_msg: f"can't find collection collection not "
f"found[database=default]"}
client_w.describe_collection(client, collection_name,
check_task=CheckTasks.err_res, check_items=error)
class TestMilvusClientHasCollectionInvalid(TestcaseBase):
""" Test case of search interface """
"""
******************************************************************
# The following are invalid base cases
******************************************************************
"""
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.parametrize("name", ["12-s", "12 s", "(mn)", "中文", "%$#"])
def test_milvus_client_has_collection_invalid_collection_name(self, name):
"""
target: test fast create collection normal case
method: create collection
expected: create collection with default schema, index, and load successfully
"""
client = self._connect(enable_milvus_client_api=True)
error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection name: {name}. collection name can only "
f"contain numbers, letters and underscores: invalid parameter"}
client_w.has_collection(client, name,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_milvus_client_has_collection_not_existed(self):
"""
target: test fast create collection normal case
method: create collection
expected: drop successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = "nonexisted"
result = client_w.has_collection(client, collection_name)[0]
assert result == False
@pytest.mark.tags(CaseLabel.L2)
def test_milvus_client_has_collection_deleted_collection(self):
"""
target: test fast create collection normal case
method: create collection
expected: drop successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim)
client_w.drop_collection(client, collection_name)
result = client_w.has_collection(client, collection_name)[0]
assert result == False
class TestMilvusClientRenameCollectionInValid(TestcaseBase):
""" Test case of search interface """
"""
******************************************************************
# The following are valid base cases
******************************************************************
"""
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.parametrize("name", ["12-s", "12 s", "(mn)", "中文", "%$#"])
def test_milvus_client_rename_collection_invalid_collection_name(self, name):
"""
target: test fast create collection normal case
method: create collection
expected: create collection with default schema, index, and load successfully
"""
client = self._connect(enable_milvus_client_api=True)
error = {ct.err_code: 100, ct.err_msg: f"collection not found[database=1][collection={name}]"}
client_w.rename_collection(client, name, "new_collection",
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_milvus_client_rename_collection_not_existed_collection(self):
"""
target: test fast create collection normal case
method: create collection
expected: drop successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = "nonexisted"
error = {ct.err_code: 100, ct.err_msg: f"collection not found[database=1][collection={collection_name}]"}
client_w.rename_collection(client, collection_name, "new_collection",
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_milvus_client_rename_collection_duplicated_collection(self):
"""
target: test fast create collection normal case
method: create collection
expected: drop successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim)
error = {ct.err_code: 65535, ct.err_msg: f"duplicated new collection name default:{collection_name}"
f"with other collection name or alias"}
client_w.rename_collection(client, collection_name, collection_name,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_milvus_client_rename_deleted_collection(self):
"""
target: test fast create collection normal case
method: create collection
expected: drop successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim)
client_w.drop_collection(client, collection_name)
error = {ct.err_code: 100, ct.err_msg: f"can't find collection collection not "
f"found[database=default]"}
client_w.rename_collection(client, collection_name, "new_collection",
check_task=CheckTasks.err_res, check_items=error)
class TestMilvusClientRenameCollectionValid(TestcaseBase):
""" Test case of search interface """
"""
******************************************************************
# The following are valid base cases
******************************************************************
"""
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_rename_collection_multiple_times(self):
"""
target: test fast create collection normal case
method: create collection
expected: create collection with default schema, index, and load successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 2. rename with invalid new_name
new_name = "new_name_rename"
client_w.create_collection(client, collection_name, default_dim)
times = 3
for _ in range(times):
client_w.rename_collection(client, collection_name, new_name)
client_w.rename_collection(client, new_name, collection_name)
@pytest.mark.tags(CaseLabel.L2)
def test_milvus_client_rename_collection_deleted_collection(self):
"""
target: test fast create collection normal case
method: create collection
expected: drop successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
another_collection_name = cf.gen_unique_str("another_collection")
# 1. create 2 collections
client_w.create_collection(client, collection_name, default_dim)
client_w.create_collection(client, another_collection_name, default_dim)
# 2. drop one collection
client_w.drop_collection(client, another_collection_name)
# 3. rename to dropped collection
client_w.rename_collection(client, collection_name, another_collection_name)

View File

@ -0,0 +1,268 @@
import multiprocessing
import numbers
import random
import numpy
import threading
import pytest
import pandas as pd
import decimal
from decimal import Decimal, getcontext
from time import sleep
import heapq
from base.client_base import TestcaseBase
from utils.util_log import test_log as log
from common import common_func as cf
from common import common_type as ct
from common.common_type import CaseLabel, CheckTasks
from utils.util_pymilvus import *
from common.constants import *
from pymilvus.orm.types import CONSISTENCY_STRONG, CONSISTENCY_BOUNDED, CONSISTENCY_SESSION, CONSISTENCY_EVENTUALLY
from base.high_level_api_wrapper import HighLevelApiWrapper
client_w = HighLevelApiWrapper()
prefix = "milvus_client_api_delete"
epsilon = ct.epsilon
default_nb = ct.default_nb
default_nb_medium = ct.default_nb_medium
default_nq = ct.default_nq
default_dim = ct.default_dim
default_limit = ct.default_limit
default_search_exp = "id >= 0"
exp_res = "exp_res"
default_search_string_exp = "varchar >= \"0\""
default_search_mix_exp = "int64 >= 0 && varchar >= \"0\""
default_invaild_string_exp = "varchar >= 0"
default_json_search_exp = "json_field[\"number\"] >= 0"
perfix_expr = 'varchar like "0%"'
default_search_field = ct.default_float_vec_field_name
default_search_params = ct.default_search_params
default_primary_key_field_name = "id"
default_vector_field_name = "vector"
default_float_field_name = ct.default_float_field_name
default_bool_field_name = ct.default_bool_field_name
default_string_field_name = ct.default_string_field_name
default_int32_array_field_name = ct.default_int32_array_field_name
default_string_array_field_name = ct.default_string_array_field_name
class TestMilvusClientDeleteInvalid(TestcaseBase):
""" Test case of search interface """
@pytest.fixture(scope="function", params=[False, True])
def auto_id(self, request):
yield request.param
@pytest.fixture(scope="function", params=["COSINE", "L2"])
def metric_type(self, request):
yield request.param
"""
******************************************************************
# The following are invalid base cases
******************************************************************
"""
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_delete_with_filters_and_ids(self):
"""
target: test delete (high level api) with ids and filters
method: create connection, collection, insert, delete, and search
expected: raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. insert
default_nb = 1000
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
pks = client_w.insert(client, collection_name, rows)[0]
# 3. delete
delete_num = 3
client_w.delete(client, collection_name, ids=[i for i in range(delete_num)], filter=f"id < {delete_num}",
check_task=CheckTasks.err_res,
check_items={"err_code": 1,
"err_msg": "Ambiguous filter parameter, "
"only one deletion condition can be specified."})
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.xfail(reason="pymilvus issue 1869")
def test_milvus_client_delete_with_invalid_id_type(self):
"""
target: test delete (high level api)
method: create connection, collection, insert delete, and search
expected: search/query successfully without deleted data
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. delete
client_w.delete(client, collection_name, ids=0,
check_task=CheckTasks.err_res,
check_items={"err_code": 1,
"err_msg": "expr cannot be empty"})
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.xfail(reason="pymilvus issue 1870")
def test_milvus_client_delete_with_not_all_required_params(self):
"""
target: test delete (high level api)
method: create connection, collection, insert delete, and search
expected: search/query successfully without deleted data
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. delete
client_w.delete(client, collection_name,
check_task=CheckTasks.err_res,
check_items={"err_code": 1,
"err_msg": "expr cannot be empty"})
class TestMilvusClientDeleteValid(TestcaseBase):
""" Test case of search interface """
@pytest.fixture(scope="function", params=[False, True])
def auto_id(self, request):
yield request.param
@pytest.fixture(scope="function", params=["COSINE", "L2"])
def metric_type(self, request):
yield request.param
"""
******************************************************************
# The following are valid base cases
******************************************************************
"""
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_delete_with_ids(self):
"""
target: test delete (high level api)
method: create connection, collection, insert delete, and search
expected: search/query successfully without deleted data
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. insert
default_nb = 1000
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
pks = client_w.insert(client, collection_name, rows)[0]
# 3. delete
delete_num = 3
client_w.delete(client, collection_name, ids=[i for i in range(delete_num)])
# 4. search
vectors_to_search = rng.random((1, default_dim))
insert_ids = [i for i in range(default_nb)]
for insert_id in range(delete_num):
if insert_id in insert_ids:
insert_ids.remove(insert_id)
limit = default_nb - delete_num
client_w.search(client, collection_name, vectors_to_search, limit=default_nb,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": limit})
# 5. query
client_w.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows[delete_num:],
"with_vec": True,
"primary_field": default_primary_key_field_name})
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_delete_with_filters(self):
"""
target: test delete (high level api)
method: create connection, collection, insert delete, and search
expected: search/query successfully without deleted data
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. insert
default_nb = 1000
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
pks = client_w.insert(client, collection_name, rows)[0]
# 3. delete
delete_num = 3
client_w.delete(client, collection_name, filter=f"id < {delete_num}")
# 4. search
vectors_to_search = rng.random((1, default_dim))
insert_ids = [i for i in range(default_nb)]
for insert_id in range(delete_num):
if insert_id in insert_ids:
insert_ids.remove(insert_id)
limit = default_nb - delete_num
client_w.search(client, collection_name, vectors_to_search, limit=default_nb,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": limit})
# 5. query
client_w.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows[delete_num:],
"with_vec": True,
"primary_field": default_primary_key_field_name})
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_delete_with_filters_partition(self):
"""
target: test delete (high level api)
method: create connection, collection, insert delete, and search
expected: search/query successfully without deleted data
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. insert
default_nb = 1000
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
pks = client_w.insert(client, collection_name, rows)[0]
# 3. get partition lists
partition_names = client_w.list_partitions(client, collection_name)
# 4. delete
delete_num = 3
client_w.delete(client, collection_name, filter=f"id < {delete_num}", partition_names=partition_names)
# 5. search
vectors_to_search = rng.random((1, default_dim))
insert_ids = [i for i in range(default_nb)]
for insert_id in range(delete_num):
if insert_id in insert_ids:
insert_ids.remove(insert_id)
limit = default_nb - delete_num
client_w.search(client, collection_name, vectors_to_search, limit=default_nb,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": limit})
# 6. query
client_w.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows[delete_num:],
"with_vec": True,
"primary_field": default_primary_key_field_name})
client_w.drop_collection(client, collection_name)

View File

@ -0,0 +1,615 @@
import multiprocessing
import numbers
import random
import numpy
import threading
import pytest
import pandas as pd
import decimal
from decimal import Decimal, getcontext
from time import sleep
import heapq
from base.client_base import TestcaseBase
from utils.util_log import test_log as log
from common import common_func as cf
from common import common_type as ct
from common.common_type import CaseLabel, CheckTasks
from utils.util_pymilvus import *
from common.constants import *
from pymilvus.orm.types import CONSISTENCY_STRONG, CONSISTENCY_BOUNDED, CONSISTENCY_SESSION, CONSISTENCY_EVENTUALLY
from base.high_level_api_wrapper import HighLevelApiWrapper
client_w = HighLevelApiWrapper()
prefix = "milvus_client_api_index"
epsilon = ct.epsilon
default_nb = ct.default_nb
default_nb_medium = ct.default_nb_medium
default_nq = ct.default_nq
default_dim = ct.default_dim
default_limit = ct.default_limit
default_search_exp = "id >= 0"
exp_res = "exp_res"
default_search_string_exp = "varchar >= \"0\""
default_search_mix_exp = "int64 >= 0 && varchar >= \"0\""
default_invaild_string_exp = "varchar >= 0"
default_json_search_exp = "json_field[\"number\"] >= 0"
perfix_expr = 'varchar like "0%"'
default_search_field = ct.default_float_vec_field_name
default_search_params = ct.default_search_params
default_primary_key_field_name = "id"
default_vector_field_name = "vector"
default_multiple_vector_field_name = "vector_new"
default_float_field_name = ct.default_float_field_name
default_bool_field_name = ct.default_bool_field_name
default_string_field_name = ct.default_string_field_name
default_int32_array_field_name = ct.default_int32_array_field_name
default_string_array_field_name = ct.default_string_array_field_name
class TestMilvusClientIndexInvalid(TestcaseBase):
""" Test case of search interface """
@pytest.fixture(scope="function", params=[False, True])
def auto_id(self, request):
yield request.param
@pytest.fixture(scope="function", params=["COSINE", "L2"])
def metric_type(self, request):
yield request.param
"""
******************************************************************
# The following are invalid base cases
******************************************************************
"""
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.parametrize("name", ["12-s", "12 s", "(mn)", "中文", "%$#"])
def test_milvus_client_index_invalid_collection_name(self, name):
"""
target: test search (high level api) normal case
method: create connection, collection, insert and search
expected: search/query successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
client_w.release_collection(client, collection_name)
client_w.drop_index(client, collection_name, "vector")
# 2. prepare index params
index_params = client_w.prepare_index_params(client)[0]
index_params.add_index(field_name = "vector")
# 3. create index
error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection name: {name}. the first character of a collection "
f"name must be an underscore or letter: invalid parameter"}
client_w.create_index(client, name, index_params,
check_task=CheckTasks.err_res, check_items=error)
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.parametrize("name", ["a".join("a" for i in range(256))])
def test_milvus_client_index_collection_name_over_max_length(self, name):
"""
target: test search (high level api) normal case
method: create connection, collection, insert and search
expected: search/query successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
client_w.release_collection(client, collection_name)
client_w.drop_index(client, collection_name, "vector")
# 2. prepare index params
index_params = client_w.prepare_index_params(client)[0]
index_params.add_index(field_name = "vector")
# 3. create index
error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection name: {name}. the length of a collection name "
f"must be less than 255 characters: invalid parameter"}
client_w.create_index(client, name, index_params,
check_task=CheckTasks.err_res, check_items=error)
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_index_not_exist_collection_name(self):
"""
target: test search (high level api) normal case
method: create connection, collection, insert and search
expected: search/query successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
not_existed_collection_name = cf.gen_unique_str("not_existed_collection")
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
client_w.release_collection(client, collection_name)
client_w.drop_index(client, collection_name, "vector")
# 2. prepare index params
index_params = client_w.prepare_index_params(client)[0]
index_params.add_index(field_name = "vector")
# 3. create index
error = {ct.err_code: 100, ct.err_msg: f"can't find collection collection not "
f"found[database=default][collection=not_existed]"}
client_w.create_index(client, not_existed_collection_name, index_params,
check_task=CheckTasks.err_res, check_items=error)
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.skip(reason="pymilvus issue 1885")
@pytest.mark.parametrize("index", ["12-s", "12 s", "(mn)", "中文", "%$#", "a".join("a" for i in range(256))])
def test_milvus_client_index_invalid_index_type(self, index):
"""
target: test search (high level api) normal case
method: create connection, collection, insert and search
expected: search/query successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
client_w.release_collection(client, collection_name)
client_w.drop_index(client, collection_name, "vector")
# 2. prepare index params
index_params = client_w.prepare_index_params(client)[0]
index_params.add_index(field_name = "vector", index_type=index)
# 3. create index
error = {ct.err_code: 100, ct.err_msg: f"can't find collection collection not "
f"found[database=default][collection=not_existed]"}
client_w.create_index(client, collection_name, index_params,
check_task=CheckTasks.err_res, check_items=error)
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.skip(reason="pymilvus issue 1885")
@pytest.mark.parametrize("metric", ["12-s", "12 s", "(mn)", "中文", "%$#", "a".join("a" for i in range(256))])
def test_milvus_client_index_invalid_metric_type(self, metric):
"""
target: test search (high level api) normal case
method: create connection, collection, insert and search
expected: search/query successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
client_w.release_collection(client, collection_name)
client_w.drop_index(client, collection_name, "vector")
# 2. prepare index params
index_params = client_w.prepare_index_params(client)[0]
index_params.add_index(field_name = "vector", metric_type = metric)
# 3. create index
error = {ct.err_code: 100, ct.err_msg: f"can't find collection collection not "
f"found[database=default][collection=not_existed]"}
client_w.create_index(client, collection_name, index_params,
check_task=CheckTasks.err_res, check_items=error)
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_index_drop_index_before_release(self):
"""
target: test search (high level api) normal case
method: create connection, collection, insert and search
expected: search/query successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
error = {ct.err_code: 65535, ct.err_msg: f"index cannot be dropped, collection is loaded, "
f"please release it first"}
client_w.drop_index(client, collection_name, "vector",
check_task=CheckTasks.err_res, check_items=error)
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.skip(reason="pymilvus issue 1886")
def test_milvus_client_index_multiple_indexes_one_field(self):
"""
target: test search (high level api) normal case
method: create connection, collection, insert and search
expected: search/query successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. prepare index params
index_params = client_w.prepare_index_params(client)[0]
index_params.add_index(field_name = "vector", index_type="HNSW", metric_type="IP")
# 3. create index
client_w.create_index(client, collection_name, index_params)
# 4. prepare index params
index_params = client_w.prepare_index_params(client)[0]
index_params.add_index(field_name="vector", index_type="IVF_FLAT", metric_type="L2")
error = {ct.err_code: 1100, ct.err_msg: f""}
# 5. create another index
client_w.create_index(client, collection_name, index_params,
check_task=CheckTasks.err_res, check_items=error)
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.skip(reason="pymilvus issue 1886")
def test_milvus_client_create_diff_index_without_release(self):
"""
target: test search (high level api) normal case
method: create connection, collection, insert and search
expected: raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. prepare index params
index_params = client_w.prepare_index_params(client)[0]
index_params.add_index(field_name = "vector", index_type="HNSW", metric_type="L2")
# 3. create index
client_w.create_index(client, collection_name, index_params)
client_w.drop_collection(client, collection_name)
class TestMilvusClientIndexValid(TestcaseBase):
""" Test case of search interface """
@pytest.fixture(scope="function", params=[False, True])
def auto_id(self, request):
yield request.param
@pytest.fixture(scope="function", params=["COSINE", "L2", "IP"])
def metric_type(self, request):
yield request.param
@pytest.fixture(scope="function", params=["TRIE", "STL_SORT", "AUTOINDEX"])
def scalar_index(self, request):
yield request.param
"""
******************************************************************
# The following are valid base cases
******************************************************************
"""
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.parametrize("index, params",
zip(ct.all_index_types[:7],
ct.default_index_params[:7]))
def test_milvus_client_index_default(self, index, params, metric_type):
"""
target: test search (high level api) normal case
method: create connection, collection, insert and search
expected: search/query successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
client_w.release_collection(client, collection_name)
client_w.drop_index(client, collection_name, "vector")
res = client_w.list_indexes(client, collection_name)[0]
assert res == []
# 2. prepare index params
index_params = client_w.prepare_index_params(client)[0]
index_params.add_index(field_name = "vector", index_type=index, metric_type = metric_type)
# 3. create index
client_w.create_index(client, collection_name, index_params)
# 4. create same index twice
client_w.create_index(client, collection_name, index_params)
# 5. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
client_w.insert(client, collection_name, rows)
# 6. load collection
client_w.load_collection(client, collection_name)
# 7. search
vectors_to_search = rng.random((1, default_dim))
insert_ids = [i for i in range(default_nb)]
client_w.search(client, collection_name, vectors_to_search,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
# 8. query
client_w.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.skip(reason="pymilvus issue 1884")
@pytest.mark.parametrize("index, params",
zip(ct.all_index_types[:7],
ct.default_index_params[:7]))
def test_milvus_client_index_with_params(self, index, params, metric_type):
"""
target: test search (high level api) normal case
method: create connection, collection, insert and search
expected: search/query successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
client_w.release_collection(client, collection_name)
client_w.drop_index(client, collection_name, "vector")
res = client_w.list_indexes(client, collection_name)[0]
assert res == []
# 2. prepare index params
index_params = client_w.prepare_index_params(client)[0]
index_params.add_index(field_name = "vector", index_type=index, params=params,metric_type = metric_type)
# 3. create index
client_w.create_index(client, collection_name, index_params)
# 4. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
client_w.insert(client, collection_name, rows)
# 5. load collection
client_w.load_collection(client, collection_name)
# 6. search
vectors_to_search = rng.random((1, default_dim))
insert_ids = [i for i in range(default_nb)]
client_w.search(client, collection_name, vectors_to_search,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
# 7. query
client_w.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("index, params",
zip(ct.all_index_types[:7],
ct.default_index_params[:7]))
def test_milvus_client_index_after_insert(self, index, params, metric_type):
"""
target: test search (high level api) normal case
method: create connection, collection, insert and search
expected: search/query successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
client_w.release_collection(client, collection_name)
client_w.drop_index(client, collection_name, "vector")
# 2. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
client_w.insert(client, collection_name, rows)
# 3. prepare index params
index_params = client_w.prepare_index_params(client)[0]
index_params.add_index(field_name = "vector", index_type=index, metric_type = metric_type)
# 4. create index
client_w.create_index(client, collection_name, index_params)
# 5. load collection
client_w.load_collection(client, collection_name)
# 5. search
vectors_to_search = rng.random((1, default_dim))
insert_ids = [i for i in range(default_nb)]
client_w.search(client, collection_name, vectors_to_search,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
# 4. query
client_w.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_index_auto_index(self, scalar_index, metric_type):
"""
target: test search (high level api) normal case
method: create connection, collection, insert and search
expected: search/query successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
client_w.release_collection(client, collection_name)
client_w.drop_index(client, collection_name, "vector")
res = client_w.list_indexes(client, collection_name)[0]
assert res == []
# 2. prepare index params
index = "AUTOINDEX"
index_params = client_w.prepare_index_params(client)[0]
index_params.add_index(field_name = "vector", index_type=index, metric_type = metric_type)
index_params.add_index(field_name="id", index_type=scalar_index, metric_type=metric_type)
# 3. create index
client_w.create_index(client, collection_name, index_params)
# 4. drop index
client_w.drop_index(client, collection_name, "vector")
client_w.drop_index(client, collection_name, "id")
# 5. create index
client_w.create_index(client, collection_name, index_params)
# 6. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
client_w.insert(client, collection_name, rows)
# 7. load collection
client_w.load_collection(client, collection_name)
# 8. search
vectors_to_search = rng.random((1, default_dim))
insert_ids = [i for i in range(default_nb)]
client_w.search(client, collection_name, vectors_to_search,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
# 9. query
client_w.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L2)
def test_milvus_client_index_multiple_vectors(self, scalar_index, metric_type):
"""
target: test search (high level api) normal case
method: create connection, collection, insert and search
expected: search/query successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
client_w.release_collection(client, collection_name)
client_w.drop_index(client, collection_name, "vector")
res = client_w.list_indexes(client, collection_name)[0]
assert res == []
# 2. prepare index params
index = "AUTOINDEX"
index_params = client_w.prepare_index_params(client)[0]
index_params.add_index(field_name = "vector", index_type=index, metric_type = metric_type)
index_params.add_index(field_name="id", index_type=scalar_index, metric_type=metric_type)
# 3. create index
client_w.create_index(client, collection_name, index_params)
# 4. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i),
default_multiple_vector_field_name: list(rng.random((1, default_dim))[0])} for i in range(default_nb)]
client_w.insert(client, collection_name, rows)
# 5. load collection
client_w.load_collection(client, collection_name)
# 6. search
vectors_to_search = rng.random((1, default_dim))
insert_ids = [i for i in range(default_nb)]
client_w.search(client, collection_name, vectors_to_search,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
# 7. query
client_w.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("index, params",
zip(ct.all_index_types[:7],
ct.default_index_params[:7]))
def test_milvus_client_index_drop_create_same_index(self, index, params, metric_type):
"""
target: test search (high level api) normal case
method: create connection, collection, insert and search
expected: search/query successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
client_w.release_collection(client, collection_name)
client_w.drop_index(client, collection_name, "vector")
res = client_w.list_indexes(client, collection_name)[0]
assert res == []
# 2. prepare index params
index_params = client_w.prepare_index_params(client)[0]
index_params.add_index(field_name = "vector", index_type=index, metric_type = metric_type)
# 3. create index
client_w.create_index(client, collection_name, index_params)
# 4. drop index
client_w.drop_index(client, collection_name, "vector")
# 4. create same index twice
client_w.create_index(client, collection_name, index_params)
# 5. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
client_w.insert(client, collection_name, rows)
# 6. load collection
client_w.load_collection(client, collection_name)
# 7. search
vectors_to_search = rng.random((1, default_dim))
insert_ids = [i for i in range(default_nb)]
client_w.search(client, collection_name, vectors_to_search,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
# 8. query
client_w.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("index, params",
zip(ct.all_index_types[:7],
ct.default_index_params[:7]))
def test_milvus_client_index_drop_create_different_index(self, index, params, metric_type):
"""
target: test search (high level api) normal case
method: create connection, collection, insert and search
expected: search/query successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
client_w.release_collection(client, collection_name)
client_w.drop_index(client, collection_name, "vector")
res = client_w.list_indexes(client, collection_name)[0]
assert res == []
# 2. prepare index params
index_params = client_w.prepare_index_params(client)[0]
index_params.add_index(field_name = "vector", metric_type = metric_type)
# 3. create index
client_w.create_index(client, collection_name, index_params)
# 4. drop index
client_w.drop_index(client, collection_name, "vector")
# 4. create different index
index_params.add_index(field_name="vector", index_type=index, metric_type=metric_type)
client_w.create_index(client, collection_name, index_params)
# 5. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
client_w.insert(client, collection_name, rows)
# 6. load collection
client_w.load_collection(client, collection_name)
# 7. search
vectors_to_search = rng.random((1, default_dim))
insert_ids = [i for i in range(default_nb)]
client_w.search(client, collection_name, vectors_to_search,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
# 8. query
client_w.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
client_w.drop_collection(client, collection_name)

View File

@ -0,0 +1,997 @@
import multiprocessing
import numbers
import random
import numpy
import threading
import pytest
import pandas as pd
import decimal
from decimal import Decimal, getcontext
from time import sleep
import heapq
from base.client_base import TestcaseBase
from utils.util_log import test_log as log
from common import common_func as cf
from common import common_type as ct
from common.common_type import CaseLabel, CheckTasks
from utils.util_pymilvus import *
from common.constants import *
from pymilvus.orm.types import CONSISTENCY_STRONG, CONSISTENCY_BOUNDED, CONSISTENCY_SESSION, CONSISTENCY_EVENTUALLY
from base.high_level_api_wrapper import HighLevelApiWrapper
client_w = HighLevelApiWrapper()
prefix = "milvus_client_api_insert"
epsilon = ct.epsilon
default_nb = ct.default_nb
default_nb_medium = ct.default_nb_medium
default_nq = ct.default_nq
default_dim = ct.default_dim
default_limit = ct.default_limit
default_search_exp = "id >= 0"
exp_res = "exp_res"
default_search_string_exp = "varchar >= \"0\""
default_search_mix_exp = "int64 >= 0 && varchar >= \"0\""
default_invaild_string_exp = "varchar >= 0"
default_json_search_exp = "json_field[\"number\"] >= 0"
perfix_expr = 'varchar like "0%"'
default_search_field = ct.default_float_vec_field_name
default_search_params = ct.default_search_params
default_primary_key_field_name = "id"
default_vector_field_name = "vector"
default_float_field_name = ct.default_float_field_name
default_bool_field_name = ct.default_bool_field_name
default_string_field_name = ct.default_string_field_name
default_int32_array_field_name = ct.default_int32_array_field_name
default_string_array_field_name = ct.default_string_array_field_name
class TestMilvusClientInsertInvalid(TestcaseBase):
""" Test case of search interface """
@pytest.fixture(scope="function", params=[False, True])
def auto_id(self, request):
yield request.param
@pytest.fixture(scope="function", params=["COSINE", "L2"])
def metric_type(self, request):
yield request.param
"""
******************************************************************
# The following are invalid base cases
******************************************************************
"""
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.xfail(reason="pymilvus issue 1883")
def test_milvus_client_insert_column_data(self):
"""
target: test insert column data
method: create connection, collection, insert and search
expected: raise error
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim)
# 2. insert
vectors = [[random.random() for _ in range(default_dim)] for _ in range(default_nb)]
data = [[i for i in range(default_nb)], vectors]
error = {ct.err_code: 1, ct.err_msg: "Unexpected error, message=<'list' object has no attribute 'items'"}
client_w.insert(client, collection_name, data,
check_task=CheckTasks.err_res, check_items=error)
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_insert_empty_collection_name(self):
"""
target: test high level api: client.create_collection
method: create collection with invalid primary field
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = ""
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
error = {ct.err_code: 1, ct.err_msg: f"`collection_name` value {collection_name} is illegal"}
client_w.insert(client, collection_name, rows,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.parametrize("collection_name", ["12-s", "12 s", "(mn)", "中文", "%$#"])
def test_milvus_client_insert_invalid_collection_name(self, collection_name):
"""
target: test high level api: client.create_collection
method: create collection with invalid primary field
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection name: {collection_name}. the first character of a "
f"collection name must be an underscore or letter: invalid parameter"}
client_w.insert(client, collection_name, rows,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_insert_collection_name_over_max_length(self):
"""
target: test high level api: client.create_collection
method: create collection with invalid primary field
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = "a".join("a" for i in range(256))
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
error = {ct.err_code: 1100, ct.err_msg: f"invalid dimension: {collection_name}. "
f"the length of a collection name must be less than 255 characters: "
f"invalid parameter"}
client_w.insert(client, collection_name, rows,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_insert_not_exist_collection_name(self):
"""
target: test high level api: client.create_collection
method: create collection with invalid primary field
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str("insert_not_exist")
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
error = {ct.err_code: 100, ct.err_msg: f"can't find collection collection not found"
f"[database=default][collection={collection_name}]"}
client_w.insert(client, collection_name, rows,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.xfail(reason="pymilvus issue 1894")
@pytest.mark.parametrize("data", ["12-s", "12 s", "(mn)", "中文", "%$#", " "])
def test_milvus_client_insert_data_invalid_type(self, data):
"""
target: test high level api: client.create_collection
method: create collection with invalid primary field
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. insert
error = {ct.err_code: 1, ct.err_msg: f"None rows, please provide valid row data."}
client_w.insert(client, collection_name, data,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.xfail(reason="pymilvus issue 1895")
def test_milvus_client_insert_data_empty(self):
"""
target: test high level api: client.create_collection
method: create collection with invalid primary field
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. insert
error = {ct.err_code: 1, ct.err_msg: f"None rows, please provide valid row data."}
client_w.insert(client, collection_name, data= "")
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_insert_data_vector_field_missing(self):
"""
target: test high level api: client.create_collection
method: create collection with invalid primary field
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i,
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
error = {ct.err_code: 1, ct.err_msg: f"Field vector don't match in entities[0]"}
client_w.insert(client, collection_name, data= rows,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_insert_data_id_field_missing(self):
"""
target: test high level api: client.create_collection
method: create collection with invalid primary field
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
error = {ct.err_code: 1, ct.err_msg: f"Field id don't match in entities[0]"}
client_w.insert(client, collection_name, data= rows,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_insert_data_extra_field(self):
"""
target: test milvus client: insert extra field than schema
method: insert extra field than schema when enable_dynamic_field is False
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, enable_dynamic_field=False)
# 2. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
error = {ct.err_code: 1, ct.err_msg: f"Attempt to insert an unexpected field "
f"to collection without enabling dynamic field"}
client_w.insert(client, collection_name, data= rows,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_insert_data_dim_not_match(self):
"""
target: test milvus client: insert extra field than schema
method: insert extra field than schema when enable_dynamic_field is False
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim)
# 2. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim+1))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
error = {ct.err_code: 1, ct.err_msg: f"Collection field dim is {default_dim}, "
f"but entities field dim is {default_dim+1}"}
client_w.insert(client, collection_name, data= rows,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_insert_not_matched_data(self):
"""
target: test milvus client: insert not matched data then defined
method: insert string to int primary field
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim)
# 2. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: str(i), default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
error = {ct.err_code: 1, ct.err_msg: f"The Input data type is inconsistent with defined schema, "
f"please check it."}
client_w.insert(client, collection_name, data= rows,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.parametrize("partition_name", ["12-s", "12 s", "(mn)", "中文", "%$#", " "])
def test_milvus_client_insert_invalid_partition_name(self, partition_name):
"""
target: test milvus client: insert extra field than schema
method: insert extra field than schema when enable_dynamic_field is False
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim)
# 2. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
error = {ct.err_code: 65535, ct.err_msg: f"Invalid partition name: {partition_name}. The first character of "
f"a partition name must be an underscore or letter."}
client_w.insert(client, collection_name, data= rows, partition_name=partition_name,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_insert_not_exist_partition_name(self):
"""
target: test milvus client: insert extra field than schema
method: insert extra field than schema when enable_dynamic_field is False
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim)
# 2. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
partition_name = cf.gen_unique_str("partition_not_exist")
error = {ct.err_code: 200, ct.err_msg: f"partition not found[partition={partition_name}]"}
client_w.insert(client, collection_name, data= rows, partition_name=partition_name,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_milvus_client_insert_collection_partition_not_match(self):
"""
target: test milvus client: insert extra field than schema
method: insert extra field than schema when enable_dynamic_field is False
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
another_collection_name = cf.gen_unique_str(prefix + "another")
partition_name = cf.gen_unique_str("partition")
# 1. create collection
client_w.create_collection(client, collection_name, default_dim)
client_w.create_collection(client, another_collection_name, default_dim)
client_w.create_partition(client, another_collection_name, partition_name)
# 2. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
error = {ct.err_code: 200, ct.err_msg: f"partition not found[partition={partition_name}]"}
client_w.insert(client, collection_name, data= rows, partition_name=partition_name,
check_task=CheckTasks.err_res, check_items=error)
class TestMilvusClientInsertValid(TestcaseBase):
""" Test case of search interface """
@pytest.fixture(scope="function", params=[False, True])
def auto_id(self, request):
yield request.param
@pytest.fixture(scope="function", params=["COSINE", "L2"])
def metric_type(self, request):
yield request.param
"""
******************************************************************
# The following are valid base cases
******************************************************************
"""
@pytest.mark.tags(CaseLabel.L0)
def test_milvus_client_insert_default(self):
"""
target: test search (high level api) normal case
method: create connection, collection, insert and search
expected: search/query successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
collections = client_w.list_collections(client)[0]
assert collection_name in collections
client_w.describe_collection(client, collection_name,
check_task=CheckTasks.check_describe_collection_property,
check_items={"collection_name": collection_name,
"dim": default_dim,
"consistency_level": 0})
# 2. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
results = client_w.insert(client, collection_name, rows)[0]
assert results['insert_count'] == default_nb
# 3. search
vectors_to_search = rng.random((1, default_dim))
insert_ids = [i for i in range(default_nb)]
client_w.search(client, collection_name, vectors_to_search,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
# 4. query
client_w.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
client_w.release_collection(client, collection_name)
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L2)
def test_milvus_client_insert_different_fields(self):
"""
target: test search (high level api) normal case
method: create connection, collection, insert and search
expected: search/query successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
collections = client_w.list_collections(client)[0]
assert collection_name in collections
client_w.describe_collection(client, collection_name,
check_task=CheckTasks.check_describe_collection_property,
check_items={"collection_name": collection_name,
"dim": default_dim,
"consistency_level": 0})
# 2. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
results = client_w.insert(client, collection_name, rows)[0]
assert results['insert_count'] == default_nb
# 3. insert diff fields
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, "new_diff_str_field": str(i)} for i in range(default_nb)]
results = client_w.insert(client, collection_name, rows)[0]
assert results['insert_count'] == default_nb
# 3. search
vectors_to_search = rng.random((1, default_dim))
insert_ids = [i for i in range(default_nb)]
client_w.search(client, collection_name, vectors_to_search,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L2)
def test_milvus_client_insert_empty_data(self):
"""
target: test search (high level api) normal case
method: create connection, collection, insert and search
expected: search/query successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. insert
rows = []
results = client_w.insert(client, collection_name, rows)[0]
assert results['insert_count'] == 0
# 3. search
rng = np.random.default_rng(seed=19530)
vectors_to_search = rng.random((1, default_dim))
client_w.search(client, collection_name, vectors_to_search,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": [],
"limit": 0})
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_insert_partition(self):
"""
target: test fast create collection normal case
method: create collection
expected: create collection with default schema, index, and load successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
partition_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. create partition
client_w.create_partition(client, collection_name, partition_name)
partitions = client_w.list_partitions(client, collection_name)[0]
assert partition_name in partitions
index = client_w.list_indexes(client, collection_name)[0]
assert index == ['vector']
# load_state = client_w.get_load_state(collection_name)[0]
# 3. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
results = client_w.insert(client, collection_name, rows, partition_name=partition_name)[0]
assert results['insert_count'] == default_nb
# 3. search
vectors_to_search = rng.random((1, default_dim))
insert_ids = [i for i in range(default_nb)]
client_w.search(client, collection_name, vectors_to_search,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
# partition_number = client_w.get_partition_stats(client, collection_name, "_default")[0]
# assert partition_number == default_nb
# partition_number = client_w.get_partition_stats(client, collection_name, partition_name)[0]
# assert partition_number[0]['value'] == 0
if client_w.has_partition(client, collection_name, partition_name)[0]:
client_w.release_partitions(client, collection_name, partition_name)
client_w.drop_partition(client, collection_name, partition_name)
if client_w.has_collection(client, collection_name)[0]:
client_w.drop_collection(client, collection_name)
class TestMilvusClientUpsertInvalid(TestcaseBase):
""" Test case of search interface """
@pytest.fixture(scope="function", params=[False, True])
def auto_id(self, request):
yield request.param
@pytest.fixture(scope="function", params=["COSINE", "L2"])
def metric_type(self, request):
yield request.param
"""
******************************************************************
# The following are invalid base cases
******************************************************************
"""
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.xfail(reason="pymilvus issue 1883")
def test_milvus_client_upsert_column_data(self):
"""
target: test insert column data
method: create connection, collection, insert and search
expected: raise error
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim)
# 2. insert
vectors = [[random.random() for _ in range(default_dim)] for _ in range(default_nb)]
data = [[i for i in range(default_nb)], vectors]
error = {ct.err_code: 1, ct.err_msg: "Unexpected error, message=<'list' object has no attribute 'items'"}
client_w.upsert(client, collection_name, data,
check_task=CheckTasks.err_res, check_items=error)
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_upsert_empty_collection_name(self):
"""
target: test high level api: client.create_collection
method: create collection with invalid primary field
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = ""
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
error = {ct.err_code: 1, ct.err_msg: f"`collection_name` value {collection_name} is illegal"}
client_w.upsert(client, collection_name, rows,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.parametrize("collection_name", ["12-s", "12 s", "(mn)", "中文", "%$#"])
def test_milvus_client_upsert_invalid_collection_name(self, collection_name):
"""
target: test high level api: client.create_collection
method: create collection with invalid primary field
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection name: {collection_name}. the first character of a "
f"collection name must be an underscore or letter: invalid parameter"}
client_w.upsert(client, collection_name, rows,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_upsert_collection_name_over_max_length(self):
"""
target: test high level api: client.create_collection
method: create collection with invalid primary field
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = "a".join("a" for i in range(256))
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
error = {ct.err_code: 1100, ct.err_msg: f"invalid dimension: {collection_name}. "
f"the length of a collection name must be less than 255 characters: "
f"invalid parameter"}
client_w.upsert(client, collection_name, rows,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_upsert_not_exist_collection_name(self):
"""
target: test high level api: client.create_collection
method: create collection with invalid primary field
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str("insert_not_exist")
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
error = {ct.err_code: 100, ct.err_msg: f"can't find collection collection not found"
f"[database=default][collection={collection_name}]"}
client_w.upsert(client, collection_name, rows,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.xfail(reason="pymilvus issue 1894")
@pytest.mark.parametrize("data", ["12-s", "12 s", "(mn)", "中文", "%$#", " "])
def test_milvus_client_upsert_data_invalid_type(self, data):
"""
target: test high level api: client.create_collection
method: create collection with invalid primary field
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. insert
error = {ct.err_code: 1, ct.err_msg: f"None rows, please provide valid row data."}
client_w.upsert(client, collection_name, data,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.xfail(reason="pymilvus issue 1895")
def test_milvus_client_upsert_data_empty(self):
"""
target: test high level api: client.create_collection
method: create collection with invalid primary field
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. insert
error = {ct.err_code: 1, ct.err_msg: f"None rows, please provide valid row data."}
client_w.upsert(client, collection_name, data= "")
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_upsert_data_vector_field_missing(self):
"""
target: test high level api: client.create_collection
method: create collection with invalid primary field
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i,
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
error = {ct.err_code: 1, ct.err_msg: f"Field vector don't match in entities[0]"}
client_w.upsert(client, collection_name, data= rows,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_upsert_data_id_field_missing(self):
"""
target: test high level api: client.create_collection
method: create collection with invalid primary field
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
error = {ct.err_code: 1, ct.err_msg: f"Field id don't match in entities[0]"}
client_w.upsert(client, collection_name, data= rows,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_upsert_data_extra_field(self):
"""
target: test milvus client: insert extra field than schema
method: insert extra field than schema when enable_dynamic_field is False
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, enable_dynamic_field=False)
# 2. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
error = {ct.err_code: 1, ct.err_msg: f"Attempt to insert an unexpected field "
f"to collection without enabling dynamic field"}
client_w.upsert(client, collection_name, data= rows,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_upsert_data_dim_not_match(self):
"""
target: test milvus client: insert extra field than schema
method: insert extra field than schema when enable_dynamic_field is False
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim)
# 2. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim+1))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
error = {ct.err_code: 1, ct.err_msg: f"Collection field dim is {default_dim}, "
f"but entities field dim is {default_dim+1}"}
client_w.upsert(client, collection_name, data= rows,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_upsert_not_matched_data(self):
"""
target: test milvus client: insert not matched data then defined
method: insert string to int primary field
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim)
# 2. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: str(i), default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
error = {ct.err_code: 1, ct.err_msg: f"The Input data type is inconsistent with defined schema, "
f"please check it."}
client_w.upsert(client, collection_name, data= rows,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.parametrize("partition_name", ["12-s", "12 s", "(mn)", "中文", "%$#", " "])
def test_milvus_client_upsert_invalid_partition_name(self, partition_name):
"""
target: test milvus client: insert extra field than schema
method: insert extra field than schema when enable_dynamic_field is False
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim)
# 2. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
error = {ct.err_code: 65535, ct.err_msg: f"Invalid partition name: {partition_name}. The first character of "
f"a partition name must be an underscore or letter."}
client_w.upsert(client, collection_name, data= rows, partition_name=partition_name,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_upsert_not_exist_partition_name(self):
"""
target: test milvus client: insert extra field than schema
method: insert extra field than schema when enable_dynamic_field is False
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim)
# 2. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
partition_name = cf.gen_unique_str("partition_not_exist")
error = {ct.err_code: 200, ct.err_msg: f"partition not found[partition={partition_name}]"}
client_w.upsert(client, collection_name, data= rows, partition_name=partition_name,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_milvus_client_upsert_collection_partition_not_match(self):
"""
target: test milvus client: insert extra field than schema
method: insert extra field than schema when enable_dynamic_field is False
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
another_collection_name = cf.gen_unique_str(prefix + "another")
partition_name = cf.gen_unique_str("partition")
# 1. create collection
client_w.create_collection(client, collection_name, default_dim)
client_w.create_collection(client, another_collection_name, default_dim)
client_w.create_partition(client, another_collection_name, partition_name)
# 2. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
error = {ct.err_code: 200, ct.err_msg: f"partition not found[partition={partition_name}]"}
client_w.upsert(client, collection_name, data= rows, partition_name=partition_name,
check_task=CheckTasks.err_res, check_items=error)
class TestMilvusClientUpsertValid(TestcaseBase):
""" Test case of search interface """
@pytest.fixture(scope="function", params=[False, True])
def auto_id(self, request):
yield request.param
@pytest.fixture(scope="function", params=["COSINE", "L2"])
def metric_type(self, request):
yield request.param
"""
******************************************************************
# The following are valid base cases
******************************************************************
"""
@pytest.mark.tags(CaseLabel.L0)
def test_milvus_client_upsert_default(self):
"""
target: test search (high level api) normal case
method: create connection, collection, insert and search
expected: search/query successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
collections = client_w.list_collections(client)[0]
assert collection_name in collections
client_w.describe_collection(client, collection_name,
check_task=CheckTasks.check_describe_collection_property,
check_items={"collection_name": collection_name,
"dim": default_dim,
"consistency_level": 0})
# 2. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
results = client_w.upsert(client, collection_name, rows)[0]
assert results['upsert_count'] == default_nb
# 3. search
vectors_to_search = rng.random((1, default_dim))
insert_ids = [i for i in range(default_nb)]
client_w.search(client, collection_name, vectors_to_search,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
# 4. query
client_w.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
client_w.release_collection(client, collection_name)
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L2)
def test_milvus_client_upsert_empty_data(self):
"""
target: test search (high level api) normal case
method: create connection, collection, insert and search
expected: search/query successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. insert
rows = []
results = client_w.upsert(client, collection_name, rows)[0]
assert results['upsert_count'] == 0
# 3. search
rng = np.random.default_rng(seed=19530)
vectors_to_search = rng.random((1, default_dim))
client_w.search(client, collection_name, vectors_to_search,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": [],
"limit": 0})
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L2)
def test_milvus_client_upsert_partition(self):
"""
target: test fast create collection normal case
method: create collection
expected: create collection with default schema, index, and load successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
partition_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. create partition
client_w.create_partition(client, collection_name, partition_name)
partitions = client_w.list_partitions(client, collection_name)[0]
assert partition_name in partitions
index = client_w.list_indexes(client, collection_name)[0]
assert index == ['vector']
# load_state = client_w.get_load_state(collection_name)[0]
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
# 3. upsert to default partition
results = client_w.upsert(client, collection_name, rows, partition_name=partitions[0])[0]
assert results['upsert_count'] == default_nb
# 4. upsert to non-default partition
results = client_w.upsert(client, collection_name, rows, partition_name=partition_name)[0]
assert results['upsert_count'] == default_nb
# 5. search
vectors_to_search = rng.random((1, default_dim))
insert_ids = [i for i in range(default_nb)]
client_w.search(client, collection_name, vectors_to_search,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
# partition_number = client_w.get_partition_stats(client, collection_name, "_default")[0]
# assert partition_number == default_nb
# partition_number = client_w.get_partition_stats(client, collection_name, partition_name)[0]
# assert partition_number[0]['value'] == 0
if client_w.has_partition(client, collection_name, partition_name)[0]:
client_w.release_partitions(client, collection_name, partition_name)
client_w.drop_partition(client, collection_name, partition_name)
if client_w.has_collection(client, collection_name)[0]:
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_insert_upsert(self):
"""
target: test fast create collection normal case
method: create collection
expected: create collection with default schema, index, and load successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
partition_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. create partition
client_w.create_partition(client, collection_name, partition_name)
partitions = client_w.list_partitions(client, collection_name)[0]
assert partition_name in partitions
index = client_w.list_indexes(client, collection_name)[0]
assert index == ['vector']
# load_state = client_w.get_load_state(collection_name)[0]
# 3. insert and upsert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
results = client_w.insert(client, collection_name, rows, partition_name=partition_name)[0]
assert results['insert_count'] == default_nb
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, "new_diff_str_field": str(i)} for i in range(default_nb)]
results = client_w.upsert(client, collection_name, rows, partition_name=partition_name)[0]
assert results['upsert_count'] == default_nb
# 3. search
vectors_to_search = rng.random((1, default_dim))
insert_ids = [i for i in range(default_nb)]
client_w.search(client, collection_name, vectors_to_search,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
if client_w.has_partition(client, collection_name, partition_name)[0]:
client_w.release_partitions(client, collection_name, partition_name)
client_w.drop_partition(client, collection_name, partition_name)
if client_w.has_collection(client, collection_name)[0]:
client_w.drop_collection(client, collection_name)

View File

@ -0,0 +1,270 @@
import multiprocessing
import numbers
import random
import numpy
import threading
import pytest
import pandas as pd
import decimal
from decimal import Decimal, getcontext
from time import sleep
import heapq
from pymilvus import DataType
from base.client_base import TestcaseBase
from utils.util_log import test_log as log
from common import common_func as cf
from common import common_type as ct
from common.common_type import CaseLabel, CheckTasks
from utils.util_pymilvus import *
from common.constants import *
from pymilvus.orm.types import CONSISTENCY_STRONG, CONSISTENCY_BOUNDED, CONSISTENCY_SESSION, CONSISTENCY_EVENTUALLY
from base.high_level_api_wrapper import HighLevelApiWrapper
client_w = HighLevelApiWrapper()
prefix = "milvus_client_api_partition"
partition_prefix = "milvus_client_api_partition"
epsilon = ct.epsilon
default_nb = ct.default_nb
default_nb_medium = ct.default_nb_medium
default_nq = ct.default_nq
default_dim = ct.default_dim
default_limit = ct.default_limit
default_search_exp = "id >= 0"
exp_res = "exp_res"
default_search_string_exp = "varchar >= \"0\""
default_search_mix_exp = "int64 >= 0 && varchar >= \"0\""
default_invaild_string_exp = "varchar >= 0"
default_json_search_exp = "json_field[\"number\"] >= 0"
perfix_expr = 'varchar like "0%"'
default_search_field = ct.default_float_vec_field_name
default_search_params = ct.default_search_params
default_primary_key_field_name = "id"
default_vector_field_name = "vector"
default_float_field_name = ct.default_float_field_name
default_bool_field_name = ct.default_bool_field_name
default_string_field_name = ct.default_string_field_name
default_int32_array_field_name = ct.default_int32_array_field_name
default_string_array_field_name = ct.default_string_array_field_name
class TestMilvusClientPartitionInvalid(TestcaseBase):
""" Test case of search interface """
@pytest.fixture(scope="function", params=[False, True])
def auto_id(self, request):
yield request.param
@pytest.fixture(scope="function", params=["COSINE", "L2"])
def metric_type(self, request):
yield request.param
"""
******************************************************************
# The following are invalid base cases
******************************************************************
"""
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.parametrize("collection_name", ["12-s", "12 s", "(mn)", "中文", "%$#"])
def test_milvus_client_collection_invalid_collection_name(self, collection_name):
"""
target: test fast create collection normal case
method: create collection
expected: create collection with default schema, index, and load successfully
"""
client = self._connect(enable_milvus_client_api=True)
# 1. create collection
error = {ct.err_code: 1100, ct.err_msg: f"invalid dimension: {collection_name}. should be in range 1 ~ 32768"}
client_w.create_collection(client, collection_name, default_dim,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_collection_name_over_max_length(self):
"""
target: test fast create collection normal case
method: create collection
expected: create collection with default schema, index, and load successfully
"""
client = self._connect(enable_milvus_client_api=True)
# 1. create collection
collection_name = "a".join("a" for i in range(256))
error = {ct.err_code: 1100, ct.err_msg: f"invalid dimension: {collection_name}. "
f"the length of a collection name must be less than 255 characters: "
f"invalid parameter"}
client_w.create_collection(client, collection_name, default_dim,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_collection_name_empty(self):
"""
target: test fast create collection normal case
method: create collection
expected: create collection with default schema, index, and load successfully
"""
client = self._connect(enable_milvus_client_api=True)
# 1. create collection
collection_name = " "
error = {ct.err_code: 0, ct.err_msg: "collection name should not be empty: invalid parameter"}
client_w.create_collection(client, collection_name, default_dim,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.parametrize("dim", [ct.min_dim-1, ct.max_dim+1])
def test_milvus_client_collection_invalid_dim(self, dim):
"""
target: test fast create collection normal case
method: create collection
expected: create collection with default schema, index, and load successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
error = {ct.err_code: 65535, ct.err_msg: f"invalid dimension: {dim}. should be in range 1 ~ 32768"}
client_w.create_collection(client, collection_name, dim,
check_task=CheckTasks.err_res, check_items=error)
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.xfail(reason="pymilvus issue 1554")
def test_milvus_client_collection_invalid_primary_field(self):
"""
target: test high level api: client.create_collection
method: create collection with invalid primary field
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
error = {ct.err_code: 1, ct.err_msg: f"Param id_type must be int or string"}
client_w.create_collection(client, collection_name, default_dim, id_type="invalid",
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_milvus_client_collection_string_auto_id(self):
"""
target: test high level api: client.create_collection
method: create collection with auto id on string primary key
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
error = {ct.err_code: 65535, ct.err_msg: f"type param(max_length) should be specified for varChar "
f"field of collection {collection_name}"}
client_w.create_collection(client, collection_name, default_dim, id_type="string", auto_id=True,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_create_same_collection_different_params(self):
"""
target: test high level api: client.create_collection
method: create
expected: 1. Successfully to create collection with same params
2. Report errors for creating collection with same name and different params
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim)
# 2. create collection with same params
client_w.create_collection(client, collection_name, default_dim)
# 3. create collection with same name and different params
error = {ct.err_code: 1, ct.err_msg: f"create duplicate collection with different parameters, "
f"collection: {collection_name}"}
client_w.create_collection(client, collection_name, default_dim+1,
check_task=CheckTasks.err_res, check_items=error)
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.xfail(reason="pymilvus issue 1872")
@pytest.mark.parametrize("metric_type", [1, " ", "invalid"])
def test_milvus_client_collection_invalid_metric_type(self, metric_type):
"""
target: test high level api: client.create_collection
method: create collection with auto id on string primary key
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
error = {ct.err_code: 65535,
ct.err_msg: "metric type not found or not supported, supported: [L2 IP COSINE HAMMING JACCARD]"}
client_w.create_collection(client, collection_name, default_dim, metric_type=metric_type,
check_task=CheckTasks.err_res, check_items=error)
class TestMilvusClientPartitionValid(TestcaseBase):
""" Test case of search interface """
@pytest.fixture(scope="function", params=[False, True])
def auto_id(self, request):
yield request.param
@pytest.fixture(scope="function", params=["COSINE", "L2", "IP"])
def metric_type(self, request):
yield request.param
@pytest.fixture(scope="function", params=["int", "string"])
def id_type(self, request):
yield request.param
"""
******************************************************************
# The following are valid base cases
******************************************************************
"""
@pytest.mark.tags(CaseLabel.L0)
@pytest.mark.skip(reason="pymilvus issue 1880")
def test_milvus_client_partition_default(self):
"""
target: test fast create collection normal case
method: create collection
expected: create collection with default schema, index, and load successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
partition_name = cf.gen_unique_str(partition_prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. create partition
client_w.create_partition(client, collection_name, partition_name)
partitions = client_w.list_partitions(client, collection_name)[0]
assert partition_name in partitions
index = client_w.list_indexes(client, collection_name)[0]
assert index == ['vector']
# load_state = client_w.get_load_state(collection_name)[0]
# 3. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
client_w.insert(client, collection_name, rows)
# 3. search
vectors_to_search = rng.random((1, default_dim))
insert_ids = [i for i in range(default_nb)]
client_w.search(client, collection_name, vectors_to_search,
partition_names=partitions,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
# 4. query
res = client_w.query(client, collection_name, filter=default_search_exp,
output_fields=["vector"], partition_names=partitions,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})[0]
assert set(res[0].keys()) == {"ids", "vector"}
partition_number = client_w.get_partition_stats(client, collection_name, "_default")[0]
assert partition_number == default_nb
partition_number = client_w.get_partition_stats(client, collection_name, partition_name)[0]
assert partition_number[0]['value'] == 0
if client_w.has_partition(client, collection_name, partition_name)[0]:
client_w.release_partitions(client, collection_name, partition_name)
client_w.drop_partition(client, collection_name, partition_name)
if client_w.has_collection(client, collection_name)[0]:
client_w.drop_collection(client, collection_name)

View File

@ -0,0 +1,240 @@
import multiprocessing
import numbers
import random
import numpy
import threading
import pytest
import pandas as pd
import decimal
from decimal import Decimal, getcontext
from time import sleep
import heapq
from base.client_base import TestcaseBase
from utils.util_log import test_log as log
from common import common_func as cf
from common import common_type as ct
from common.common_type import CaseLabel, CheckTasks
from utils.util_pymilvus import *
from common.constants import *
from pymilvus.orm.types import CONSISTENCY_STRONG, CONSISTENCY_BOUNDED, CONSISTENCY_SESSION, CONSISTENCY_EVENTUALLY
from base.high_level_api_wrapper import HighLevelApiWrapper
client_w = HighLevelApiWrapper()
prefix = "milvus_client_api_query"
epsilon = ct.epsilon
default_nb = ct.default_nb
default_nb_medium = ct.default_nb_medium
default_nq = ct.default_nq
default_dim = ct.default_dim
default_limit = ct.default_limit
default_search_exp = "id >= 0"
exp_res = "exp_res"
default_search_string_exp = "varchar >= \"0\""
default_search_mix_exp = "int64 >= 0 && varchar >= \"0\""
default_invaild_string_exp = "varchar >= 0"
default_json_search_exp = "json_field[\"number\"] >= 0"
perfix_expr = 'varchar like "0%"'
default_search_field = ct.default_float_vec_field_name
default_search_params = ct.default_search_params
default_primary_key_field_name = "id"
default_vector_field_name = "vector"
default_float_field_name = ct.default_float_field_name
default_bool_field_name = ct.default_bool_field_name
default_string_field_name = ct.default_string_field_name
default_int32_array_field_name = ct.default_int32_array_field_name
default_string_array_field_name = ct.default_string_array_field_name
class TestMilvusClientQueryInvalid(TestcaseBase):
""" Test case of search interface """
@pytest.fixture(scope="function", params=[False, True])
def auto_id(self, request):
yield request.param
@pytest.fixture(scope="function", params=["COSINE", "L2"])
def metric_type(self, request):
yield request.param
"""
******************************************************************
# The following are invalid base cases
******************************************************************
"""
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_query_not_all_required_params(self):
"""
target: test query (high level api) normal case
method: create connection, collection, insert and search
expected: search/query successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
collections = client_w.list_collections(client)[0]
assert collection_name in collections
client_w.describe_collection(client, collection_name,
check_task=CheckTasks.check_describe_collection_property,
check_items={"collection_name": collection_name,
"dim": default_dim,
"consistency_level": 0})
# 2. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
client_w.insert(client, collection_name, rows)
# 3. query using ids
error = {ct.err_code: 65535, ct.err_msg: f"empty expression should be used with limit"}
client_w.query(client, collection_name,
check_task=CheckTasks.err_res, check_items=error)
class TestMilvusClientQueryValid(TestcaseBase):
""" Test case of search interface """
@pytest.fixture(scope="function", params=[False, True])
def auto_id(self, request):
yield request.param
@pytest.fixture(scope="function", params=["COSINE", "L2"])
def metric_type(self, request):
yield request.param
"""
******************************************************************
# The following are valid base cases
******************************************************************
"""
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_query_default(self):
"""
target: test query (high level api) normal case
method: create connection, collection, insert and search
expected: search/query successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
client_w.insert(client, collection_name, rows)
# 3. query using ids
client_w.query(client, collection_name, ids=[i for i in range(default_nb)],
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
# 4. query using filter
client_w.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_query_output_fields(self):
"""
target: test query (high level api) normal case
method: create connection, collection, insert and search
expected: search/query successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
client_w.insert(client, collection_name, rows)
# 3. query using ids
client_w.query(client, collection_name, ids=[i for i in range(default_nb)],
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
# 4. query using filter
res = client_w.query(client, collection_name, filter=default_search_exp,
output_fields=[default_primary_key_field_name, default_float_field_name,
default_string_field_name, default_vector_field_name],
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})[0]
assert set(res[0].keys()) == {default_primary_key_field_name, default_vector_field_name,
default_float_field_name, default_string_field_name}
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_query_output_fields_all(self):
"""
target: test query (high level api) normal case
method: create connection, collection, insert and search
expected: search/query successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
client_w.insert(client, collection_name, rows)
# 3. query using ids
client_w.query(client, collection_name, ids=[i for i in range(default_nb)],
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
# 4. query using filter
res = client_w.query(client, collection_name, filter=default_search_exp,
output_fields=["*"],
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})[0]
assert set(res[0].keys()) == {default_primary_key_field_name, default_vector_field_name,
default_float_field_name, default_string_field_name}
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_query_limit(self):
"""
target: test query (high level api) normal case
method: create connection, collection, insert and search
expected: search/query successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
client_w.insert(client, collection_name, rows)
# 3. query using ids
limit = 5
client_w.query(client, collection_name, ids=[i for i in range(default_nb)],
limit=limit,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows[:limit],
"with_vec": True,
"primary_field": default_primary_key_field_name[:limit]})
# 4. query using filter
client_w.query(client, collection_name, filter=default_search_exp,
limit=limit,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows[:limit],
"with_vec": True,
"primary_field": default_primary_key_field_name[:limit]})[0]
client_w.drop_collection(client, collection_name)

View File

@ -0,0 +1,477 @@
import multiprocessing
import numbers
import random
import numpy
import threading
import pytest
import pandas as pd
import decimal
from decimal import Decimal, getcontext
from time import sleep
import heapq
from base.client_base import TestcaseBase
from utils.util_log import test_log as log
from common import common_func as cf
from common import common_type as ct
from common.common_type import CaseLabel, CheckTasks
from utils.util_pymilvus import *
from common.constants import *
from pymilvus.orm.types import CONSISTENCY_STRONG, CONSISTENCY_BOUNDED, CONSISTENCY_SESSION, CONSISTENCY_EVENTUALLY
from base.high_level_api_wrapper import HighLevelApiWrapper
client_w = HighLevelApiWrapper()
prefix = "milvus_client_api_search"
epsilon = ct.epsilon
default_nb = ct.default_nb
default_nb_medium = ct.default_nb_medium
default_nq = ct.default_nq
default_dim = ct.default_dim
default_limit = ct.default_limit
default_search_exp = "id >= 0"
exp_res = "exp_res"
default_search_string_exp = "varchar >= \"0\""
default_search_mix_exp = "int64 >= 0 && varchar >= \"0\""
default_invaild_string_exp = "varchar >= 0"
default_json_search_exp = "json_field[\"number\"] >= 0"
perfix_expr = 'varchar like "0%"'
default_search_field = ct.default_float_vec_field_name
default_search_params = ct.default_search_params
default_primary_key_field_name = "id"
default_vector_field_name = "vector"
default_float_field_name = ct.default_float_field_name
default_bool_field_name = ct.default_bool_field_name
default_string_field_name = ct.default_string_field_name
default_int32_array_field_name = ct.default_int32_array_field_name
default_string_array_field_name = ct.default_string_array_field_name
class TestMilvusClientSearchInvalid(TestcaseBase):
""" Test case of search interface """
@pytest.fixture(scope="function", params=[False, True])
def auto_id(self, request):
yield request.param
@pytest.fixture(scope="function", params=["COSINE", "L2"])
def metric_type(self, request):
yield request.param
"""
******************************************************************
# The following are invalid base cases
******************************************************************
"""
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.xfail(reason="pymilvus issue 1554")
def test_milvus_client_collection_invalid_primary_field(self):
"""
target: test high level api: client.create_collection
method: create collection with invalid primary field
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
error = {ct.err_code: 1, ct.err_msg: f"Param id_type must be int or string"}
client_w.create_collection(client, collection_name, default_dim, id_type="invalid",
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_milvus_client_collection_string_auto_id(self):
"""
target: test high level api: client.create_collection
method: create collection with auto id on string primary key
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
error = {ct.err_code: 65535, ct.err_msg: f"type param(max_length) should be specified for varChar "
f"field of collection {collection_name}"}
client_w.create_collection(client, collection_name, default_dim, id_type="string", auto_id=True,
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_create_same_collection_different_params(self):
"""
target: test high level api: client.create_collection
method: create
expected: 1. Successfully to create collection with same params
2. Report errors for creating collection with same name and different params
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim)
# 2. create collection with same params
client_w.create_collection(client, collection_name, default_dim)
# 3. create collection with same name and different params
error = {ct.err_code: 1, ct.err_msg: f"create duplicate collection with different parameters, "
f"collection: {collection_name}"}
client_w.create_collection(client, collection_name, default_dim+1,
check_task=CheckTasks.err_res, check_items=error)
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L2)
def test_milvus_client_collection_invalid_metric_type(self):
"""
target: test high level api: client.create_collection
method: create collection with auto id on string primary key
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
error = {ct.err_code: 65535,
ct.err_msg: "metric type not found or not supported, supported: [L2 IP COSINE HAMMING JACCARD]"}
client_w.create_collection(client, collection_name, default_dim, metric_type="invalid",
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_milvus_client_search_not_consistent_metric_type(self, metric_type):
"""
target: test search with inconsistent metric type (default is IP) with that of index
method: create connection, collection, insert and search with not consistent metric type
expected: Raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim)
# 2. search
rng = np.random.default_rng(seed=19530)
vectors_to_search = rng.random((1, 8))
search_params = {"metric_type": metric_type}
error = {ct.err_code: 1100,
ct.err_msg: f"metric type not match: invalid parameter[expected=IP][actual={metric_type}]"}
client_w.search(client, collection_name, vectors_to_search, limit=default_limit,
search_params=search_params,
check_task=CheckTasks.err_res, check_items=error)
client_w.drop_collection(client, collection_name)
class TestMilvusClientSearchValid(TestcaseBase):
""" Test case of search interface """
@pytest.fixture(scope="function", params=[False, True])
def auto_id(self, request):
yield request.param
@pytest.fixture(scope="function", params=["COSINE", "L2"])
def metric_type(self, request):
yield request.param
"""
******************************************************************
# The following are valid base cases
******************************************************************
"""
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_search_query_default(self):
"""
target: test search (high level api) normal case
method: create connection, collection, insert and search
expected: search/query successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
collections = client_w.list_collections(client)[0]
assert collection_name in collections
client_w.describe_collection(client, collection_name,
check_task=CheckTasks.check_describe_collection_property,
check_items={"collection_name": collection_name,
"dim": default_dim,
"consistency_level": 0})
# 2. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
client_w.insert(client, collection_name, rows)
# client_w.flush(client, collection_name)
# assert client_w.num_entities(client, collection_name)[0] == default_nb
# 3. search
vectors_to_search = rng.random((1, default_dim))
insert_ids = [i for i in range(default_nb)]
client_w.search(client, collection_name, vectors_to_search,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
# 4. query
client_w.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
client_w.release_collection(client, collection_name)
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_rename_search_query_default(self):
"""
target: test search (high level api) normal case
method: create connection, collection, insert and search
expected: search/query successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
collections = client_w.list_collections(client)[0]
assert collection_name in collections
client_w.describe_collection(client, collection_name,
check_task=CheckTasks.check_describe_collection_property,
check_items={"collection_name": collection_name,
"dim": default_dim,
"consistency_level": 0})
old_name = collection_name
new_name = collection_name + "new"
client_w.rename_collection(client, old_name, new_name)
# 2. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
client_w.insert(client, new_name, rows)
# client_w.flush(client, collection_name)
# assert client_w.num_entities(client, collection_name)[0] == default_nb
# 3. search
vectors_to_search = rng.random((1, default_dim))
insert_ids = [i for i in range(default_nb)]
client_w.search(client, new_name, vectors_to_search,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
# 4. query
client_w.query(client, new_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
client_w.release_collection(client, new_name)
client_w.drop_collection(client, new_name)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_array_insert_search(self):
"""
target: test search (high level api) normal case
method: create connection, collection, insert and search
expected: search/query successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
collections = client_w.list_collections(client)[0]
assert collection_name in collections
# 2. insert
rng = np.random.default_rng(seed=19530)
rows = [{
default_primary_key_field_name: i,
default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0,
default_int32_array_field_name: [i, i+1, i+2],
default_string_array_field_name: [str(i), str(i + 1), str(i + 2)]
} for i in range(default_nb)]
client_w.insert(client, collection_name, rows)
# 3. search
vectors_to_search = rng.random((1, default_dim))
insert_ids = [i for i in range(default_nb)]
client_w.search(client, collection_name, vectors_to_search,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.skip(reason="issue 25110")
def test_milvus_client_search_query_string(self):
"""
target: test search (high level api) for string primary key
method: create connection, collection, insert and search
expected: search/query successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, id_type="string", max_length=ct.default_length)
client_w.describe_collection(client, collection_name,
check_task=CheckTasks.check_describe_collection_property,
check_items={"collection_name": collection_name,
"dim": default_dim,
"auto_id": auto_id})
# 2. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: str(i), default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
client_w.insert(client, collection_name, rows)
client_w.flush(client, collection_name)
assert client_w.num_entities(client, collection_name)[0] == default_nb
# 3. search
vectors_to_search = rng.random((1, default_dim))
client_w.search(client, collection_name, vectors_to_search,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"limit": default_limit})
# 4. query
client_w.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L2)
def test_milvus_client_search_different_metric_types_not_specifying_in_search_params(self, metric_type, auto_id):
"""
target: test search (high level api) normal case
method: create connection, collection, insert and search
expected: search successfully with limit(topK)
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, metric_type=metric_type, auto_id=auto_id,
consistency_level="Strong")
# 2. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
if auto_id:
for row in rows:
row.pop(default_primary_key_field_name)
client_w.insert(client, collection_name, rows)
# 3. search
vectors_to_search = rng.random((1, default_dim))
# search_params = {"metric_type": metric_type}
client_w.search(client, collection_name, vectors_to_search, limit=default_limit,
output_fields=[default_primary_key_field_name],
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"limit": default_limit})
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.skip("pymilvus issue #1866")
def test_milvus_client_search_different_metric_types_specifying_in_search_params(self, metric_type, auto_id):
"""
target: test search (high level api) normal case
method: create connection, collection, insert and search
expected: search successfully with limit(topK)
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, metric_type=metric_type, auto_id=auto_id,
consistency_level="Strong")
# 2. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
if auto_id:
for row in rows:
row.pop(default_primary_key_field_name)
client_w.insert(client, collection_name, rows)
# 3. search
vectors_to_search = rng.random((1, default_dim))
search_params = {"metric_type": metric_type}
client_w.search(client, collection_name, vectors_to_search, limit=default_limit,
search_params=search_params,
output_fields=[default_primary_key_field_name],
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"limit": default_limit})
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_delete_with_ids(self):
"""
target: test delete (high level api)
method: create connection, collection, insert delete, and search
expected: search/query successfully without deleted data
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. insert
default_nb = 1000
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
pks = client_w.insert(client, collection_name, rows)[0]
# 3. delete
delete_num = 3
client_w.delete(client, collection_name, ids=[i for i in range(delete_num)])
# 4. search
vectors_to_search = rng.random((1, default_dim))
insert_ids = [i for i in range(default_nb)]
for insert_id in range(delete_num):
if insert_id in insert_ids:
insert_ids.remove(insert_id)
limit = default_nb - delete_num
client_w.search(client, collection_name, vectors_to_search, limit=default_nb,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": limit})
# 5. query
client_w.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows[delete_num:],
"with_vec": True,
"primary_field": default_primary_key_field_name})
client_w.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_delete_with_filters(self):
"""
target: test delete (high level api)
method: create connection, collection, insert delete, and search
expected: search/query successfully without deleted data
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
# 2. insert
default_nb = 1000
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
pks = client_w.insert(client, collection_name, rows)[0]
# 3. delete
delete_num = 3
client_w.delete(client, collection_name, filter=f"id < {delete_num}")
# 4. search
vectors_to_search = rng.random((1, default_dim))
insert_ids = [i for i in range(default_nb)]
for insert_id in range(delete_num):
if insert_id in insert_ids:
insert_ids.remove(insert_id)
limit = default_nb - delete_num
client_w.search(client, collection_name, vectors_to_search, limit=default_nb,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": limit})
# 5. query
client_w.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows[delete_num:],
"with_vec": True,
"primary_field": default_primary_key_field_name})
client_w.drop_collection(client, collection_name)

View File

@ -4305,6 +4305,8 @@ class TestCollectionMultipleVectorValid(TestcaseBase):
schema = cf.gen_collection_schema(fields=int_fields, auto_id=auto_id, shards_num=shards_num)
self.collection_wrap.init_collection(c_name, schema=schema, check_task=CheckTasks.check_collection_property,
check_items={exp_name: c_name, exp_schema: schema})
self.collection_wrap.init_collection(c_name, schema=schema, check_task=CheckTasks.check_collection_property,
check_items={exp_name: c_name, exp_schema: schema})
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("primary_key", [ct.default_int64_field_name, ct.default_string_field_name])

View File

@ -71,7 +71,7 @@ class TestHighLevelApi(TestcaseBase):
method: create collection with invalid primary field
expected: Raise exception
"""
client = self._connect(enable_high_level_api=True)
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
error = {ct.err_code: 1, ct.err_msg: f"Param id_type must be int or string"}
@ -85,7 +85,7 @@ class TestHighLevelApi(TestcaseBase):
method: create collection with auto id on string primary key
expected: Raise exception
"""
client = self._connect(enable_high_level_api=True)
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
error = {ct.err_code: 65535, ct.err_msg: f"type param(max_length) should be specified for varChar "
@ -101,7 +101,7 @@ class TestHighLevelApi(TestcaseBase):
expected: 1. Successfully to create collection with same params
2. Report errors for creating collection with same name and different params
"""
client = self._connect(enable_high_level_api=True)
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
@ -121,7 +121,7 @@ class TestHighLevelApi(TestcaseBase):
method: create collection with auto id on string primary key
expected: Raise exception
"""
client = self._connect(enable_high_level_api=True)
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
error = {ct.err_code: 65535,
@ -136,7 +136,7 @@ class TestHighLevelApi(TestcaseBase):
method: create connection, collection, insert and search with not consistent metric type
expected: Raise exception
"""
client = self._connect(enable_high_level_api=True)
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
@ -164,7 +164,7 @@ class TestHighLevelApi(TestcaseBase):
method: create connection, collection, insert and search
expected: search/query successfully
"""
client = self._connect(enable_high_level_api=True)
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
@ -186,7 +186,7 @@ class TestHighLevelApi(TestcaseBase):
client_w.search(client, collection_name, vectors_to_search,
check_task=CheckTasks.check_search_results,
check_items={"enable_high_level_api": True,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
@ -205,7 +205,7 @@ class TestHighLevelApi(TestcaseBase):
method: create connection, collection, insert and search
expected: search/query successfully
"""
client = self._connect(enable_high_level_api=True)
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
@ -227,7 +227,7 @@ class TestHighLevelApi(TestcaseBase):
insert_ids = [i for i in range(default_nb)]
client_w.search(client, collection_name, vectors_to_search,
check_task=CheckTasks.check_search_results,
check_items={"enable_high_level_api": True,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
@ -240,7 +240,7 @@ class TestHighLevelApi(TestcaseBase):
method: create connection, collection, insert and search
expected: search/query successfully
"""
client = self._connect(enable_high_level_api=True)
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, id_type="string",
@ -260,7 +260,7 @@ class TestHighLevelApi(TestcaseBase):
vectors_to_search = rng.random((1, default_dim))
client_w.search(client, collection_name, vectors_to_search,
check_task=CheckTasks.check_search_results,
check_items={"enable_high_level_api": True,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"limit": default_limit})
# 4. query
@ -278,7 +278,7 @@ class TestHighLevelApi(TestcaseBase):
method: create connection, collection, insert and search
expected: search successfully with limit(topK)
"""
client = self._connect(enable_high_level_api=True)
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, metric_type=metric_type,
@ -299,7 +299,7 @@ class TestHighLevelApi(TestcaseBase):
search_params=search_params,
output_fields=[default_primary_key_field_name],
check_task=CheckTasks.check_search_results,
check_items={"enable_high_level_api": True,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"limit": default_limit})
client_w.drop_collection(client, collection_name)
@ -311,7 +311,7 @@ class TestHighLevelApi(TestcaseBase):
method: create connection, collection, insert delete, and search
expected: search/query successfully without deleted data
"""
client = self._connect(enable_high_level_api=True)
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
@ -323,10 +323,10 @@ class TestHighLevelApi(TestcaseBase):
client_w.insert(client, collection_name, rows)[0]
pks = [i for i in range(default_nb)]
# 3. get first primary key
first_pk_data = client_w.get(client, collection_name, pks[0:1])
first_pk_data = client_w.get(client, collection_name, ids=pks[0:1])
# 4. delete
delete_num = 3
client_w.delete(client, collection_name, pks[0:delete_num])
client_w.delete(client, collection_name, ids=pks[0:delete_num])
# 5. search
vectors_to_search = rng.random((1, default_dim))
insert_ids = [i for i in range(default_nb)]
@ -336,7 +336,7 @@ class TestHighLevelApi(TestcaseBase):
limit = default_nb - delete_num
client_w.search(client, collection_name, vectors_to_search, limit=default_nb,
check_task=CheckTasks.check_search_results,
check_items={"enable_high_level_api": True,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": limit})