test: [CP] add alter tests (#38659)

related issue: https://github.com/milvus-io/milvus/issues/38471

---------

Signed-off-by: yanliang567 <yanliang.qiao@zilliz.com>
pull/38676/head
yanliang567 2024-12-23 19:42:49 +08:00 committed by GitHub
parent d718bbb4de
commit 6884319d03
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
8 changed files with 567 additions and 29 deletions

View File

@ -53,13 +53,16 @@ class HighLevelApiWrapper:
return res, check_result return res, check_result
@trace() @trace()
def create_collection(self, client, collection_name, dimension, timeout=None, check_task=None, def create_collection(self, client, collection_name, dimension=None, primary_field_name='id',
id_type='int', vector_field_name='vector', metric_type='COSINE',
auto_id=False, schema=None, index_params=None, timeout=None, check_task=None,
check_items=None, **kwargs): check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout timeout = TIMEOUT if timeout is None else timeout
kwargs.update({"timeout": timeout})
func_name = sys._getframe().f_code.co_name func_name = sys._getframe().f_code.co_name
res, check = api_request([client.create_collection, collection_name, dimension], **kwargs) res, check = api_request([client.create_collection, collection_name, dimension, primary_field_name,
id_type, vector_field_name, metric_type, auto_id, timeout, schema,
index_params], **kwargs)
check_result = ResponseChecker(res, func_name, check_task, check_items, check, check_result = ResponseChecker(res, func_name, check_task, check_items, check,
collection_name=collection_name, dimension=dimension, collection_name=collection_name, dimension=dimension,
**kwargs).run() **kwargs).run()
@ -85,7 +88,6 @@ class HighLevelApiWrapper:
func_name = sys._getframe().f_code.co_name func_name = sys._getframe().f_code.co_name
res, check = api_request([client.insert, collection_name, data], **kwargs) res, check = api_request([client.insert, collection_name, data], **kwargs)
check_result = ResponseChecker(res, func_name, check_task, check_items, check, check_result = ResponseChecker(res, func_name, check_task, check_items, check,
collection_name=collection_name, data=data,
**kwargs).run() **kwargs).run()
return res, check_result return res, check_result
@ -219,9 +221,9 @@ class HighLevelApiWrapper:
return res, check_result return res, check_result
@trace() @trace()
def list_indexes(self, client, collection_name, check_task=None, check_items=None, **kwargs): def list_indexes(self, client, collection_name, field_name=None, check_task=None, check_items=None, **kwargs):
func_name = sys._getframe().f_code.co_name func_name = sys._getframe().f_code.co_name
res, check = api_request([client.list_indexes, collection_name], **kwargs) res, check = api_request([client.list_indexes, collection_name, field_name], **kwargs)
check_result = ResponseChecker(res, func_name, check_task, check_items, check, check_result = ResponseChecker(res, func_name, check_task, check_items, check,
collection_name=collection_name, collection_name=collection_name,
**kwargs).run() **kwargs).run()
@ -313,19 +315,6 @@ class HighLevelApiWrapper:
**kwargs).run() **kwargs).run()
return res, check_result return res, check_result
@trace()
def use_database(self, client, db_name, timeout=None, check_task=None, check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
kwargs.update({"timeout": timeout})
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.use_database, db_name], **kwargs)
check_result = ResponseChecker(res, func_name, check_task,
check_items, check,
db_name=db_name,
**kwargs).run()
return res, check_result
@trace() @trace()
def create_partition(self, client, collection_name, partition_name, timeout=None, check_task=None, check_items=None, **kwargs): def create_partition(self, client, collection_name, partition_name, timeout=None, check_task=None, check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout timeout = TIMEOUT if timeout is None else timeout
@ -533,10 +522,7 @@ class HighLevelApiWrapper:
func_name = sys._getframe().f_code.co_name func_name = sys._getframe().f_code.co_name
res, check = api_request([client.using_database, db_name], **kwargs) res, check = api_request([client.using_database, db_name], **kwargs)
check_result = ResponseChecker(res, func_name, check_task, check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run()
check_items, check,
db_name=db_name,
**kwargs).run()
return res, check_result return res, check_result
def create_user(self, user_name, password, timeout=None, check_task=None, check_items=None, **kwargs): def create_user(self, user_name, password, timeout=None, check_task=None, check_items=None, **kwargs):
@ -678,3 +664,128 @@ class HighLevelApiWrapper:
role_name=role_name, object_type=object_type, privilege=privilege, role_name=role_name, object_type=object_type, privilege=privilege,
object_name=object_name, db_name=db_name, **kwargs).run() object_name=object_name, db_name=db_name, **kwargs).run()
return res, check_result return res, check_result
@trace()
def alter_index_properties(self, client, collection_name, index_name, properties, timeout=None,
check_task=None, check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
kwargs.update({"timeout": timeout})
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.alter_index_properties, collection_name, index_name, properties],
**kwargs)
check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run()
return res, check_result
@trace()
def drop_index_properties(self, client, collection_name, index_name, property_keys, timeout=None,
check_task=None, check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
kwargs.update({"timeout": timeout})
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.drop_index_properties, collection_name, index_name, property_keys],
**kwargs)
check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run()
return res, check_result
@trace()
def alter_collection_properties(self, client, collection_name, properties, timeout=None,
check_task=None, check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
kwargs.update({"timeout": timeout})
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.alter_collection_properties, collection_name, properties],
**kwargs)
check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run()
return res, check_result
@trace()
def drop_collection_properties(self, client, collection_name, property_keys, timeout=None,
check_task=None, check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.drop_collection_properties, collection_name, property_keys, timeout],
**kwargs)
check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run()
return res, check_result
@trace()
def alter_collection_field(self, client, collection_name, field_name, field_params, timeout=None,
check_task=None, check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.alter_collection_field, collection_name, field_name, field_params, timeout],
**kwargs)
check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run()
return res, check_result
@trace()
def alter_database_properties(self, client, db_name, properties, timeout=None,
check_task=None, check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
kwargs.update({"timeout": timeout})
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.alter_database_properties, db_name, properties], **kwargs)
check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run()
return res, check_result
@trace()
def drop_database_properties(self, client, db_name, property_keys, timeout=None,
check_task=None, check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
kwargs.update({"timeout": timeout})
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.drop_database_properties, db_name, property_keys], **kwargs)
check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run()
return res, check_result
@trace()
def create_database(self, client, db_name, timeout=None, check_task=None, check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
kwargs.update({"timeout": timeout})
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.create_database, db_name], **kwargs)
check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run()
return res, check_result
@trace()
def describe_database(self, client, db_name, timeout=None, check_task=None, check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
kwargs.update({"timeout": timeout})
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.describe_database, db_name], **kwargs)
check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run()
return res, check_result
@trace()
def drop_database(self, client, db_name, timeout=None, check_task=None, check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
kwargs.update({"timeout": timeout})
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.drop_database, db_name], **kwargs)
check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run()
return res, check_result
@trace()
def list_databases(self, client, timeout=None, check_task=None, check_items=None, **kwargs):
timeout = TIMEOUT if timeout is None else timeout
kwargs.update({"timeout": timeout})
func_name = sys._getframe().f_code.co_name
res, check = api_request([client.list_databases], **kwargs)
check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run()
return res, check_result

View File

@ -103,6 +103,9 @@ class ResponseChecker:
elif self.check_task == CheckTasks.check_describe_collection_property: elif self.check_task == CheckTasks.check_describe_collection_property:
# describe collection interface(high level api) response check # describe collection interface(high level api) response check
result = self.check_describe_collection_property(self.response, self.func_name, self.check_items) result = self.check_describe_collection_property(self.response, self.func_name, self.check_items)
elif self.check_task == CheckTasks.check_collection_fields_properties:
# check field properties in describe collection response
result = self.check_collection_fields_properties(self.response, self.func_name, self.check_items)
elif self.check_task == CheckTasks.check_insert_result: elif self.check_task == CheckTasks.check_insert_result:
# check `insert` interface response # check `insert` interface response
@ -257,6 +260,32 @@ class ResponseChecker:
return True return True
@staticmethod
def check_collection_fields_properties(res, func_name, check_items):
"""
According to the check_items to check collection field properties of res, which return from func_name
:param res: actual response of client.describe_collection()
:type res: Collection
:param func_name: describe_collection
:type func_name: str
:param check_items: which field properties expected to be checked, like max_length etc.
:type check_items: dict, {field_name: {field_properties}, ...}
"""
exp_func_name = "describe_collection"
if func_name != exp_func_name:
log.warning("The function name is {} rather than {}".format(func_name, exp_func_name))
if len(check_items) == 0:
raise Exception("No expect values found in the check task")
if check_items.get("collection_name", None) is not None:
assert res["collection_name"] == check_items.get("collection_name")
for key in check_items.keys():
for field in res["fields"]:
if field["name"] == key:
assert field['params'].items() >= check_items[key].items()
return True
@staticmethod @staticmethod
def check_partition_property(partition, func_name, check_items): def check_partition_property(partition, func_name, check_items):
exp_func_name = "init_partition" exp_func_name = "init_partition"

View File

@ -282,6 +282,7 @@ class CheckTasks:
check_rg_property = "check_resource_group_property" check_rg_property = "check_resource_group_property"
check_describe_collection_property = "check_describe_collection_property" check_describe_collection_property = "check_describe_collection_property"
check_insert_result = "check_insert_result" check_insert_result = "check_insert_result"
check_collection_fields_properties = "check_collection_fields_properties"
class BulkLoadStates: class BulkLoadStates:

View File

@ -0,0 +1,363 @@
import multiprocessing
import numbers
import random
import numpy
import threading
import pytest
import pandas as pd
import decimal
from decimal import Decimal, getcontext
from time import sleep
import heapq
from base.client_base import TestcaseBase
from utils.util_log import test_log as log
from common import common_func as cf
from common import common_type as ct
from common.common_type import CaseLabel, CheckTasks
from utils.util_pymilvus import *
from common.constants import *
from pymilvus.orm.types import CONSISTENCY_STRONG, CONSISTENCY_BOUNDED, CONSISTENCY_SESSION, CONSISTENCY_EVENTUALLY
from base.high_level_api_wrapper import HighLevelApiWrapper
client_w = HighLevelApiWrapper()
prefix = "milvus_client_api_index"
epsilon = ct.epsilon
default_nb = ct.default_nb
default_nb_medium = ct.default_nb_medium
default_nq = ct.default_nq
default_dim = ct.default_dim
default_limit = ct.default_limit
default_search_exp = "id >= 0"
exp_res = "exp_res"
default_search_string_exp = "varchar >= \"0\""
default_search_mix_exp = "int64 >= 0 && varchar >= \"0\""
default_invaild_string_exp = "varchar >= 0"
default_json_search_exp = "json_field[\"number\"] >= 0"
perfix_expr = 'varchar like "0%"'
default_search_field = ct.default_float_vec_field_name
default_search_params = ct.default_search_params
default_primary_key_field_name = "id"
default_vector_field_name = "vector"
default_multiple_vector_field_name = "vector_new"
default_float_field_name = ct.default_float_field_name
default_bool_field_name = ct.default_bool_field_name
default_string_field_name = ct.default_string_field_name
default_int32_array_field_name = ct.default_int32_array_field_name
default_string_array_field_name = ct.default_string_array_field_name
class TestMilvusClientAlterIndex(TestcaseBase):
@pytest.mark.tags(CaseLabel.L0)
def test_milvus_client_alter_index_default(self):
"""
target: test alter index
method: 1. alter index after load
verify alter fail
2. alter index after release
verify alter successfully
3. drop index properties after load
verify drop fail
4. drop index properties after release
verify drop successfully
expected: alter successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
idx_names, _ = client_w.list_indexes(client, collection_name, field_name=default_vector_field_name)
client_w.load_collection(client, collection_name)
res1 = client_w.describe_index(client, collection_name, index_name=idx_names[0])[0]
assert res1.get('mmap.enabled', None) is None
error = {ct.err_code: 999,
ct.err_msg: "can't alter index on loaded collection, please release the collection first"}
# 1. alter index after load
client_w.alter_index_properties(client, collection_name, idx_names[0], properties={"mmap.enabled": True},
check_task=CheckTasks.err_res, check_items=error)
client_w.drop_index_properties(client, collection_name, idx_names[0], property_keys=["mmap.enabled"],
check_task=CheckTasks.err_res, check_items=error)
client_w.release_collection(client, collection_name)
# 2. alter index after release
client_w.alter_index_properties(client, collection_name, idx_names[0], properties={"mmap.enabled": True})
res2 = client_w.describe_index(client, collection_name, index_name=idx_names[0])[0]
assert res2.get('mmap.enabled', None) == 'True'
client_w.drop_index_properties(client, collection_name, idx_names[0], property_keys=["mmap.enabled"])
res3 = client_w.describe_index(client, collection_name, index_name=idx_names[0])[0]
assert res3.get('mmap.enabled', None) is None
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_alter_index_unsupported_properties(self):
"""
target: test alter index with unsupported properties
method: 1. alter index with unsupported properties
expected: raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
schema = client_w.create_schema(client, enable_dynamic_field=False)[0]
dim = 32
pk_field_name = 'id_string'
vector_field_name = 'embeddings'
str_field_name = 'title'
max_length = 16
schema.add_field(pk_field_name, DataType.VARCHAR, max_length=max_length, is_primary=True, auto_id=False)
schema.add_field(vector_field_name, DataType.FLOAT_VECTOR, dim=dim, mmap_enabled=True)
schema.add_field(str_field_name, DataType.VARCHAR, max_length=max_length, mmap_enabled=True)
index_params = client_w.prepare_index_params(client)[0]
index_params.add_index(field_name=vector_field_name, metric_type="COSINE",
index_type="HNSW", params={"M": 16, "efConstruction": 100, "mmap.enabled": True})
index_params.add_index(field_name=str_field_name)
client_w.create_collection(client, collection_name, schema=schema, index_params=index_params)
client_w.describe_collection(client, collection_name, check_task=CheckTasks.check_collection_fields_properties,
check_items={str_field_name: {"max_length": max_length, "mmap_enabled": True},
vector_field_name: {"mmap_enabled": True}})
client_w.release_collection(client, collection_name)
properties = client_w.describe_index(client, collection_name, index_name=vector_field_name)[0]
for p in properties.items():
if p[0] not in ["mmap.enabled"]:
log.debug(f"try to alter index property: {p[0]}")
error = {ct.err_code: 1, ct.err_msg: f"{p[0]} is not a configable index proptery"}
new_value = p[1] + 1 if isinstance(p[1], numbers.Number) else "new_value"
client_w.alter_index_properties(client, collection_name, vector_field_name,
properties={p[0]: new_value},
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
def test_milvus_client_alter_index_unsupported_value(self):
"""
target: test alter index with unsupported properties
method: 1. alter index with unsupported properties
expected: raise exception
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
idx_names, _ = client_w.list_indexes(client, collection_name, field_name=default_vector_field_name)
client_w.release_collection(client, collection_name)
res1 = client_w.describe_index(client, collection_name, index_name=idx_names[0])[0]
assert res1.get('mmap.enabled', None) is None
unsupported_values = [None, [], '', 20, ' ', 0.01, "new_value"]
for value in unsupported_values:
error = {ct.err_code: 1, ct.err_msg: f"invalid mmap.enabled value: {value}, expected: true, false"}
client_w.alter_index_properties(client, collection_name, idx_names[0],
properties={"mmap.enabled": value},
check_task=CheckTasks.err_res, check_items=error)
class TestMilvusClientAlterCollection(TestcaseBase):
@pytest.mark.tags(CaseLabel.L0)
def test_milvus_client_alter_collection_default(self):
"""
target: test alter collection
method:
1. alter collection properties after load
verify alter successfully if trying to altering lazyload.enabled, mmap.enabled or collection.ttl.seconds
2. alter collection properties after release
verify alter successfully
3. drop collection properties after load
verify drop successfully
expected: alter successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
client_w.load_collection(client, collection_name)
res1 = client_w.describe_collection(client, collection_name)[0]
assert res1.get('properties', None) == {}
# 1. alter collection properties after load
client_w.load_collection(client, collection_name)
error = {ct.err_code: 999,
ct.err_msg: "can not alter mmap properties if collection loaded"}
client_w.alter_collection_properties(client, collection_name, properties={"mmap.enabled": True},
check_task=CheckTasks.err_res, check_items=error)
client_w.alter_collection_properties(client, collection_name, properties={"lazyload.enabled": True},
check_task=CheckTasks.err_res, check_items=error)
error = {ct.err_code: 999,
ct.err_msg: "can not delete mmap properties if collection loaded"}
client_w.drop_collection_properties(client, collection_name, property_keys=["mmap.enabled"],
check_task=CheckTasks.err_res, check_items=error)
client_w.drop_collection_properties(client, collection_name, property_keys=["lazyload.enabled"],
check_task=CheckTasks.err_res, check_items=error)
res3 = client_w.describe_collection(client, collection_name)[0]
assert res3.get('properties', None) == {}
client_w.drop_collection_properties(client, collection_name, property_keys=["collection.ttl.seconds"])
assert res3.get('properties', None) == {}
# 2. alter collection properties after release
client_w.release_collection(client, collection_name)
client_w.alter_collection_properties(client, collection_name, properties={"mmap.enabled": True})
res2 = client_w.describe_collection(client, collection_name)[0]
assert res2.get('properties', None) == {'mmap.enabled': 'True'}
client_w.alter_collection_properties(client, collection_name,
properties={"collection.ttl.seconds": 100, "lazyload.enabled": True})
res2 = client_w.describe_collection(client, collection_name)[0]
assert res2.get('properties', None) == {'mmap.enabled': 'True',
'collection.ttl.seconds': '100', 'lazyload.enabled': 'True'}
client_w.drop_collection_properties(client, collection_name,
property_keys=["mmap.enabled", "lazyload.enabled",
"collection.ttl.seconds"])
res3 = client_w.describe_collection(client, collection_name)[0]
assert res3.get('properties', None) == {}
class TestMilvusClientAlterCollectionField(TestcaseBase):
@pytest.mark.tags(CaseLabel.L0)
def test_milvus_client_alter_collection_field_default(self):
"""
target: test alter collection field before load
method: alter varchar field max length
expected: alter successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
schema = client_w.create_schema(client, enable_dynamic_field=False)[0]
dim = 32
pk_field_name = 'id_string'
vector_field_name = 'embeddings'
str_field_name = 'title'
json_field_name = 'json_field'
max_length = 16
schema.add_field(pk_field_name, DataType.VARCHAR, max_length=max_length, is_primary=True, auto_id=False)
schema.add_field(vector_field_name, DataType.FLOAT_VECTOR, dim=dim, mmap_enabled=True)
schema.add_field(str_field_name, DataType.VARCHAR, max_length=max_length, mmap_enabled=True)
schema.add_field(json_field_name, DataType.JSON, mmap_enabled=False)
index_params = client_w.prepare_index_params(client)[0]
index_params.add_index(field_name=vector_field_name, metric_type="COSINE",
index_type="IVF_FLAT", params={"nlist": 128})
index_params.add_index(field_name=str_field_name)
client_w.create_collection(client, collection_name, schema=schema, index_params=index_params)
client_w.describe_collection(client, collection_name, check_task=CheckTasks.check_collection_fields_properties,
check_items={str_field_name: {"max_length": max_length, "mmap_enabled": True},
vector_field_name: {"mmap_enabled": True},
json_field_name: {"mmap_enabled": False}})
rng = np.random.default_rng(seed=19530)
rows = [{
pk_field_name: f'id_{i}',
vector_field_name: list(rng.random((1, dim))[0]),
str_field_name: cf.gen_str_by_length(max_length),
json_field_name: {"number": i}
} for i in range(default_nb)]
client_w.insert(client, collection_name, rows)
# 1. alter collection field before load
client_w.release_collection(client, collection_name)
new_max_length = max_length//2
# TODO: use one format of mmap_enabled after #38443 fixed
client_w.alter_collection_field(client, collection_name, field_name=str_field_name,
field_params={"max_length": new_max_length, "mmap.enabled": False})
client_w.alter_collection_field(client, collection_name, field_name=pk_field_name,
field_params={"max_length": new_max_length})
client_w.alter_collection_field(client, collection_name, field_name=json_field_name,
field_params={"mmap.enabled": True})
client_w.alter_collection_field(client, collection_name, field_name=vector_field_name,
field_params={"mmap.enabled": False})
error = {ct.err_code: 999, ct.err_msg: f"can not modify the maxlength for non-string types"}
client_w.alter_collection_field(client, collection_name, field_name=vector_field_name,
field_params={"max_length": new_max_length},
check_task=CheckTasks.err_res, check_items=error)
client_w.describe_collection(client, collection_name, check_task=CheckTasks.check_collection_fields_properties,
check_items={str_field_name: {"max_length": new_max_length, "mmap_enabled": False},
vector_field_name: {"mmap_enabled": False},
json_field_name: {"mmap_enabled": True}})
# verify that cannot insert data with the old max_length
for alter_field in [pk_field_name, str_field_name]:
error = {ct.err_code: 999, ct.err_msg: f"length of varchar field {alter_field} exceeds max length"}
rows = [{
pk_field_name: cf.gen_str_by_length(max_length) if alter_field == pk_field_name else f'id_{i}',
vector_field_name: list(rng.random((1, dim))[0]),
str_field_name: cf.gen_str_by_length(max_length) if alter_field == str_field_name else f'title_{i}',
json_field_name: {"number": i}
} for i in range(default_nb, default_nb+10)]
client_w.insert(client, collection_name, rows, check_task=CheckTasks.err_res, check_items=error)
# verify that can insert data with the new max_length
rows = [{
pk_field_name: f"new_{cf.gen_str_by_length(new_max_length-4)}",
vector_field_name: list(rng.random((1, dim))[0]),
str_field_name: cf.gen_str_by_length(new_max_length),
json_field_name: {"number": i}
} for i in range(default_nb, default_nb+10)]
client_w.insert(client, collection_name, rows)
# 2. alter collection field after load
client_w.load_collection(client, collection_name)
error = {ct.err_code: 999,
ct.err_msg: "can not alter collection field properties if collection loaded"}
client_w.alter_collection_field(client, collection_name, field_name=str_field_name,
field_params={"max_length": max_length, "mmap.enabled": True},
check_task=CheckTasks.err_res, check_items=error)
client_w.alter_collection_field(client, collection_name, field_name=vector_field_name,
field_params={"mmap.enabled": True},
check_task=CheckTasks.err_res, check_items=error)
client_w.alter_collection_field(client, collection_name, field_name=pk_field_name,
field_params={"max_length": max_length})
res = client_w.query(client, collection_name, filter=f"{pk_field_name} in ['id_10', 'id_20']",
output_fields=["*"])[0]
assert (len(res)) == 2
res = client_w.query(client, collection_name, filter=f"{pk_field_name} like 'new_%'",
output_fields=["*"])[0]
assert(len(res)) == 10
class TestMilvusClientAlterDatabase(TestcaseBase):
@pytest.mark.tags(CaseLabel.L0)
# @pytest.mark.skip("reason: need to fix #38469, #38471")
def test_milvus_client_alter_database_default(self):
"""
target: test alter database
method:
1. alter database properties before load
alter successfully
2. alter database properties after load
alter successfully
expected: alter successfully
"""
client = self._connect(enable_milvus_client_api=True)
collection_name = cf.gen_unique_str(prefix)
client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong")
client_w.release_collection(client, collection_name)
default_db = 'default'
res1 = client_w.describe_database(client, db_name=default_db)[0]
if len(res1.keys()) != 1:
client_w.drop_database_properties(client, db_name=default_db, property_keys=res1.keys())
assert len(client_w.describe_database(client, default_db)[0].keys()) == 1
for need_load in [True, False]:
if need_load:
log.debug("alter database after load collection")
client_w.load_collection(client, collection_name)
# 1. alter default database properties before load
properties = {"key1": 1, "key2": "value2", "key3": [1, 2, 3], }
client_w.alter_database_properties(client, db_name=default_db, properties=properties)
res1 = client_w.describe_database(client, db_name=default_db)[0]
# assert res1.properties.items() >= properties.items()
assert len(res1.keys()) == 4
my_db = cf.gen_unique_str(prefix)
client_w.create_database(client, my_db, properties=properties)
res1 = client_w.describe_database(client, db_name=my_db)[0]
# assert res1.properties.items() >= properties.items()
assert len(res1.keys()) == 4
properties = {"key1": 2, "key2": "value3", "key3": [1, 2, 3], 'key4': 0.123}
client_w.alter_database_properties(client, db_name=my_db, properties=properties)
res1 = client_w.describe_database(client, db_name=my_db)[0]
# assert res1.properties.items() >= properties.items()
assert len(res1.keys()) == 5
# drop the default database properties
client_w.drop_database_properties(client, db_name=default_db, property_keys=["key1", "key2"])
res1 = client_w.describe_database(client, db_name=default_db)[0]
assert len(res1.keys()) == 2
client_w.drop_database_properties(client, db_name=default_db, property_keys=["key3", "key_non_exist"])
res1 = client_w.describe_database(client, db_name=default_db)[0]
assert len(res1.keys()) == 1
# drop the user database
client_w.drop_database(client, my_db)

View File

@ -601,7 +601,7 @@ class TestMilvusClientCollectionValid(TestcaseBase):
collections = client_w.list_collections(client)[0] collections = client_w.list_collections(client)[0]
assert collection_name in collections assert collection_name in collections
db_name = "new_db" db_name = "new_db"
client_w.use_database(client, db_name) client_w.using_database(client, db_name)
old_name = collection_name old_name = collection_name
new_name = collection_name + "new" new_name = collection_name + "new"
client_w.rename_collection(client, old_name, new_name, target_db=db_name) client_w.rename_collection(client, old_name, new_name, target_db=db_name)

View File

@ -27,8 +27,8 @@ pytest-parallel
pytest-random-order pytest-random-order
# pymilvus # pymilvus
pymilvus==2.5.1rc14 pymilvus==2.5.1rc25
pymilvus[bulk_writer]==2.5.1rc14 pymilvus[bulk_writer]==2.5.1rc25
# for customize config test # for customize config test

View File

@ -431,9 +431,9 @@ class TestQueryParams(TestcaseBase):
""" """
# 1. initialize with data # 1. initialize with data
nb = 2000 nb = 2000
collection_w, _vectors, _, insert_ids = self.init_collection_general(prefix, True, nb, collection_w, _vectors, _, insert_ids = \
enable_dynamic_field=enable_dynamic_field)[ self.init_collection_general(prefix, True, nb,
0:4] enable_dynamic_field=enable_dynamic_field)[0:4]
# filter result with expression in collection # filter result with expression in collection
_vectors = _vectors[0] _vectors = _vectors[0]

View File

@ -10955,6 +10955,40 @@ class TestCollectionHybridSearchValid(TestcaseBase):
"ids": insert_ids, "ids": insert_ids,
"limit": default_limit})[0] "limit": default_limit})[0]
@pytest.mark.tags(CaseLabel.L1)
def test_hybrid_search_normal_expr(self):
"""
target: test hybrid search normal case
method: create connection, collection, insert and search
expected: hybrid search successfully with search param templates
"""
# 1. initialize collection with data
nq = 10
collection_w, _, _, insert_ids, time_stamp = self.init_collection_general(prefix, True)[0:5]
# 2. extract vector field name
vector_name_list = cf.extract_vector_field_name_list(collection_w)
vector_name_list.append(ct.default_float_vec_field_name)
# 3. prepare search params
req_list = []
weights = [1]
vectors = cf.gen_vectors_based_on_vector_type(nq, default_dim, "FLOAT_VECTOR")
# 4. get hybrid search req list
for i in range(len(vector_name_list)):
search_param = {
"data": vectors,
"anns_field": vector_name_list[i],
"param": {"metric_type": "COSINE"},
"limit": default_limit,
"expr": "int64 > {value_0}",
"expr_params": {"value_0": 0}
}
req = AnnSearchRequest(**search_param)
req_list.append(req)
# 5. hybrid search
collection_w.hybrid_search(req_list, WeightedRanker(*weights), default_limit,
check_task=CheckTasks.check_search_results,
check_items={"nq": nq, "ids": insert_ids, "limit": default_limit})
@pytest.mark.tags(CaseLabel.L1) @pytest.mark.tags(CaseLabel.L1)
@pytest.mark.skip(reason="issue 32288") @pytest.mark.skip(reason="issue 32288")
@pytest.mark.parametrize("nq", [0, 16385]) @pytest.mark.parametrize("nq", [0, 16385])