test: Update get index params method (#40244)

related issue: #40156
remove the old get index params method and update the new one

---------

Signed-off-by: yanliang567 <yanliang.qiao@zilliz.com>
pull/40293/head
yanliang567 2025-03-03 14:40:05 +08:00 committed by GitHub
parent b046d44065
commit 2f7cce11a3
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
11 changed files with 78 additions and 384 deletions

View File

@ -2219,21 +2219,6 @@ def gen_default_binary_list_data(nb=ct.default_nb, dim=ct.default_dim):
return data, binary_raw_values
def gen_simple_index():
index_params = []
for i in range(len(ct.all_index_types)):
if ct.all_index_types[i] in ct.binary_support:
continue
elif ct.all_index_types[i] in ct.sparse_support:
continue
elif ct.all_index_types[i] in ct.gpu_support:
continue
dic = {"index_type": ct.all_index_types[i], "metric_type": "L2"}
dic.update({"params": ct.default_all_indexes_params[i]})
index_params.append(dic)
return index_params
def gen_autoindex_params():
index_params = [
{},
@ -2778,12 +2763,14 @@ def index_to_dict(index):
def get_index_params_params(index_type):
"""get default params of index params by index type"""
return ct.default_all_indexes_params[ct.all_index_types.index(index_type)].copy()
params = ct.default_all_indexes_params[ct.all_index_types.index(index_type)].copy()
return params
def get_search_params_params(index_type):
"""get default params of search params by index type"""
return ct.default_all_search_params_params[ct.all_index_types.index(index_type)].copy()
params = ct.default_all_search_params_params[ct.all_index_types.index(index_type)].copy()
return params
def assert_json_contains(expr, list_data):

View File

@ -251,11 +251,11 @@ default_sparse_inverted_index = {"index_type": "SPARSE_INVERTED_INDEX", "metric_
"params": {"drop_ratio_build": 0.2}}
default_text_sparse_inverted_index = {"index_type": "SPARSE_INVERTED_INDEX", "metric_type": "BM25",
"params": {"drop_ratio_build": 0.2, "bm25_k1": 1.5, "bm25_b": 0.75,}}
default_search_params = {"params": default_all_search_params_params[2].copy()}
default_search_ip_params = {"metric_type": "IP", "params": default_all_search_params_params[2].copy()}
default_search_params = {"params": {"nlist": 128}}
default_search_ip_params = {"metric_type": "IP", "params": {"nlist": 128}}
default_search_binary_params = {"metric_type": "JACCARD", "params": {"nprobe": 32}}
default_index = {"index_type": "IVF_SQ8", "metric_type": default_L0_metric, "params": default_all_indexes_params[2].copy()}
default_binary_index = {"index_type": "BIN_IVF_FLAT", "metric_type": "JACCARD", "params": default_all_indexes_params[8].copy()}
default_index = {"index_type": "IVF_SQ8", "metric_type": default_L0_metric, "params": {"nlist": 128}}
default_binary_index = {"index_type": "BIN_IVF_FLAT", "metric_type": "JACCARD", "params": {"nlist": 64}}
default_diskann_index = {"index_type": "DISKANN", "metric_type": default_L0_metric, "params": {}}
default_diskann_search_params = {"params": {"search_list": 30}}
default_sparse_search_params = {"metric_type": "IP", "params": {"drop_ratio_search": "0.2"}}

View File

@ -234,11 +234,6 @@ def initialize_env(request):
param_info.prepare_param_info(host, port, handler, replica_num, user, password, secure, uri, token)
@pytest.fixture(params=cf.gen_simple_index())
def get_index_param(request):
yield request.param
# TODO: construct invalid index params for all index types
@pytest.fixture(params=[{"metric_type": "L3", "index_type": "IVF_FLAT"},
{"metric_type": "L2", "index_type": "IVF_FLAT", "params": {"nlist": -1}}])

View File

@ -1,94 +0,0 @@
# import datetime
# import pytest
#
# from base.client_base import TestcaseBase
# from common import common_func as cf
# from common import common_type as ct
# from common.common_type import CaseLabel
# from utils.util_log import test_log as log
# from pymilvus import utility
#
#
# rounds = 100
# per_nb = 100000
# default_field_name = ct.default_float_vec_field_name
# default_index_params = {"index_type": "IVF_SQ8", "metric_type": "L2", "params": {"nlist": 64}}
#
#
# class TestLoad(TestcaseBase):
# """ Test case of end to end"""
# @pytest.mark.tags(CaseLabel.L3)
# def test_load_default(self):
# name = 'load_test_collection_1'
# name2 = 'load_test_collection_2'
# # create
# # collection_w = self.init_collection_wrap(name=name)
# # collection_w2 = self.init_collection_wrap(name=name2)
# # assert collection_w.name == name
#
# for i in range(50):
# name = f"load_collection2_{i}"
# self.init_collection_wrap(name=name)
# log.debug(f"total collections: {len(utility.list_collections())}")
#
# # # insert
# # data = cf.gen_default_list_data(per_nb)
# # log.debug(f"data len: {len(data[0])}")
# # for i in range(rounds):
# # t0 = datetime.datetime.now()
# # ins_res, res = collection_w.insert(data, timeout=180)
# # tt = datetime.datetime.now() - t0
# # log.debug(f"round{i} insert: {len(ins_res.primary_keys)} entities in {tt}s")
# # assert res # and per_nb == len(ins_res.primary_keys)
# #
# # t0 = datetime.datetime.now()
# # ins_res2, res = collection_w2.insert(data, timeout=180)
# # tt = datetime.datetime.now() - t0
# # log.debug(f"round{i} insert2: {len(ins_res2.primary_keys)} entities in {tt}s")
# # assert res
# #
# # # flush
# # t0 = datetime.datetime.now()
# # log.debug(f"current collection num_entities: {collection_w.num_entities}")
# # tt = datetime.datetime.now() - t0
# # log.debug(f"round{i} flush in {tt}")
# #
# # t0 = datetime.datetime.now()
# # log.debug(f"current collection2 num_entities: {collection_w2.num_entities}")
# # tt = datetime.datetime.now() - t0
# # log.debug(f"round{i} flush2 in {tt}")
#
# # index, res = collection_w.create_index(default_field_name, default_all_indexes_params, timeout=60)
# # assert res
#
# # # search
# # collection_w.load()
# # search_vectors = cf.gen_vectors(1, ct.default_dim)
# # t0 = datetime.datetime.now()
# # res_1, _ = collection_w.search(data=search_vectors,
# # anns_field=ct.default_float_vec_field_name,
# # param={"nprobe": 16}, limit=1)
# # tt = datetime.datetime.now() - t0
# # log.debug(f"assert search: {tt}")
# # assert len(res_1) == 1
# # # collection_w.release()
# #
# # # index
# # collection_w.insert(cf.gen_default_dataframe_data(nb=5000))
# # assert collection_w.num_entities == len(data[0]) + 5000
# # _index_params = {"index_type": "IVF_SQ8", "metric_type": "L2", "params": {"nlist": 64}}
# # t0 = datetime.datetime.now()
# # index, _ = collection_w.create_index(field_name=ct.default_float_vec_field_name,
# # index_params=_index_params,
# # name=cf.gen_unique_str())
# # tt = datetime.datetime.now() - t0
# # log.debug(f"assert index: {tt}")
# # assert len(collection_w.indexes) == 1
# #
# # # query
# # term_expr = f'{ct.default_int64_field_name} in [3001,4001,4999,2999]'
# # t0 = datetime.datetime.now()
# # res, _ = collection_w.query(term_expr)
# # tt = datetime.datetime.now() - t0
# # log.debug(f"assert query: {tt}")
# # assert len(res) == 4

View File

@ -238,62 +238,9 @@ class TestMilvusClientIndexValid(TestMilvusClientV2Base):
# The following are valid base cases
******************************************************************
"""
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.skip("https://github.com/milvus-io/pymilvus/issues/1886")
@pytest.mark.parametrize("index, params",
zip(ct.all_index_types[:7],
ct.default_all_indexes_params[:7]))
def test_milvus_client_index_default(self, index, params, metric_type):
"""
target: test index normal case
method: create connection, collection, create index, insert and search
expected: index/search/query successfully
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
self.create_collection(client, collection_name, default_dim, consistency_level="Strong")
self.release_collection(client, collection_name)
self.drop_index(client, collection_name, "vector")
res = self.list_indexes(client, collection_name)[0]
assert res == []
# 2. prepare index params
index_params = self.prepare_index_params(client)[0]
index_params.add_index(field_name="vector", index_type=index, metric_type=metric_type)
# 3. create index
self.create_index(client, collection_name, index_params)
# 4. create same index twice
self.create_index(client, collection_name, index_params)
# 5. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
self.insert(client, collection_name, rows)
# 6. load collection
self.load_collection(client, collection_name)
# 7. search
vectors_to_search = rng.random((1, default_dim))
insert_ids = [i for i in range(default_nb)]
self.search(client, collection_name, vectors_to_search,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
# 8. query
self.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("index, params",
zip(ct.all_index_types[:7],
ct.default_all_indexes_params[:7]))
def test_milvus_client_index_with_params(self, index, params, metric_type):
@pytest.mark.tags(CaseLabel.L0)
@pytest.mark.parametrize("index", ct.all_index_types[:7])
def test_milvus_client_index_with_params(self, index, metric_type):
"""
target: test index with user defined params
method: create connection, collection, index, insert and search
@ -309,6 +256,7 @@ class TestMilvusClientIndexValid(TestMilvusClientV2Base):
assert res == []
# 2. prepare index params
index_params = self.prepare_index_params(client)[0]
params = cf.get_index_params_params(index_type=index)
index_params.add_index(field_name="vector", index_type=index, params=params, metric_type=metric_type)
# 3. create index
self.create_index(client, collection_name, index_params)
@ -337,10 +285,8 @@ class TestMilvusClientIndexValid(TestMilvusClientV2Base):
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("index, params",
zip(ct.all_index_types[:7],
ct.default_all_indexes_params[:7]))
def test_milvus_client_index_after_insert(self, index, params, metric_type):
@pytest.mark.parametrize("index", ct.all_index_types[:7])
def test_milvus_client_index_after_insert(self, index, metric_type):
"""
target: test index after insert
method: create connection, collection, insert, index and search
@ -359,6 +305,7 @@ class TestMilvusClientIndexValid(TestMilvusClientV2Base):
self.insert(client, collection_name, rows)
# 3. prepare index params
index_params = self.prepare_index_params(client)[0]
params = cf.get_index_params_params(index)
index_params.add_index(field_name="vector", index_type=index, metric_type=metric_type, params=params)
# 4. create index
self.create_index(client, collection_name, index_params)
@ -640,10 +587,7 @@ class TestMilvusClientIndexValid(TestMilvusClientV2Base):
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("index, params",
zip(ct.all_index_types[:7],
ct.default_all_indexes_params[:7]))
def test_milvus_client_index_drop_create_same_index(self, index, params, metric_type):
def test_milvus_client_index_drop_create_same_index(self):
"""
target: test index after drop and create same index twice
method: create connection, collection, create/drop/create index, insert and search
@ -659,7 +603,7 @@ class TestMilvusClientIndexValid(TestMilvusClientV2Base):
assert res == []
# 2. prepare index params
index_params = self.prepare_index_params(client)[0]
index_params.add_index(field_name="vector", index_type=index, params=params, metric_type=metric_type)
index_params.add_index(field_name="vector", index_type="HNSW", metric_type="L2")
# 3. create index
self.create_index(client, collection_name, index_params)
# 4. drop index
@ -688,56 +632,4 @@ class TestMilvusClientIndexValid(TestMilvusClientV2Base):
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
self.drop_collection(client, collection_name)
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("index, params",
zip(ct.all_index_types[:7],
ct.default_all_indexes_params[:7]))
def test_milvus_client_index_drop_create_different_index(self, index, params, metric_type):
"""
target: test index after drop and create different index twice
method: create connection, collection, create/drop/create index, insert and search
expected: index create/drop and search/query successfully
"""
client = self._client()
collection_name = cf.gen_unique_str(prefix)
# 1. create collection
self.create_collection(client, collection_name, default_dim, consistency_level="Strong")
self.release_collection(client, collection_name)
self.drop_index(client, collection_name, "vector")
res = self.list_indexes(client, collection_name)[0]
assert res == []
# 2. prepare index params
index_params = self.prepare_index_params(client)[0]
index_params.add_index(field_name="vector", metric_type=metric_type)
# 3. create index
self.create_index(client, collection_name, index_params)
# 4. drop index
self.drop_index(client, collection_name, "vector")
# 4. create different index
index_params.add_index(field_name="vector", index_type=index, params=params, metric_type=metric_type)
self.create_index(client, collection_name, index_params)
# 5. insert
rng = np.random.default_rng(seed=19530)
rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]),
default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)]
self.insert(client, collection_name, rows)
# 6. load collection
self.load_collection(client, collection_name)
# 7. search
vectors_to_search = rng.random((1, default_dim))
insert_ids = [i for i in range(default_nb)]
self.search(client, collection_name, vectors_to_search,
check_task=CheckTasks.check_search_results,
check_items={"enable_milvus_client_api": True,
"nq": len(vectors_to_search),
"ids": insert_ids,
"limit": default_limit})
# 8. query
self.query(client, collection_name, filter=default_search_exp,
check_task=CheckTasks.check_query_results,
check_items={exp_res: rows,
"with_vec": True,
"primary_field": default_primary_key_field_name})
self.drop_collection(client, collection_name)
self.drop_collection(client, collection_name)

View File

@ -3249,14 +3249,6 @@ class TestLoadPartition(TestcaseBase):
The following cases are used to test `load_collection` function
******************************************************************
"""
@pytest.fixture(
scope="function",
params=gen_simple_index()
)
def get_simple_index(self, request, connect):
return request.param
@pytest.fixture(
scope="function",
params=gen_binary_index()

View File

@ -424,17 +424,9 @@ class TestNewIndexBase(TestcaseBase):
The following cases are used to test `create_index` function
******************************************************************
"""
@pytest.fixture(
scope="function",
params=gen_simple_index()
)
def get_simple_index(self, request):
log.info(request.param)
return copy.deepcopy(request.param)
@pytest.mark.tags(CaseLabel.L1)
def test_create_index_new(self, get_simple_index):
@pytest.mark.tags(CaseLabel.L0)
@pytest.mark.parametrize("index_type", ct.all_index_types[0:7])
def test_create_index_default(self, index_type):
"""
target: test create index interface
method: create collection and add entities in it, create index
@ -442,34 +434,15 @@ class TestNewIndexBase(TestcaseBase):
"""
c_name = cf.gen_unique_str(prefix)
collection_w = self.init_collection_wrap(name=c_name, shards_num=1)
data = cf.gen_default_list_data(nb=5000)
data = cf.gen_default_list_data(nb=1500)
collection_w.insert(data=data)
log.debug(collection_w.num_entities)
if get_simple_index["index_type"] != "FLAT":
collection_w.create_index(ct.default_float_vec_field_name, get_simple_index,
index_name=ct.default_index_name)
assert len(collection_w.indexes) == 1
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.skip(reason="The scenario in this case is not existed for each RPC is limited to 64 MB")
def test_annoy_index(self):
# The strange thing is that the indexnode crash is only reproduced when nb is 50000 and dim is 512
nb = 50000
dim = 512
fields = [cf.gen_int64_field(), cf.gen_float_vec_field(dim=dim)]
schema = cf.gen_collection_schema(fields, primary_field=ct.default_int64_field_name)
collection_w = self.init_collection_wrap(name=cf.gen_unique_str(), schema=schema)
# use python random to generate the data as usual doesn't reproduce
data = [[i for i in range(nb)], np.random.random([nb, dim]).tolist()]
collection_w.insert(data)
log.debug(collection_w.num_entities)
index_params = {"index_type": "ANNOY", "metric_type": "IP", "params": {"n_trees": 10}}
index_wrapper = ApiIndexWrapper()
index, _ = index_wrapper.init_index(collection_w.collection, ct.default_float_vec_field_name, index_params)
assert index.params == index_params
params = cf.get_index_params_params(index_type)
index_params = {"index_type": index_type, "metric_type": "L2", "params": params}
collection_w.create_index(ct.default_float_vec_field_name, index_params=index_params,
index_name=ct.default_index_name)
assert len(collection_w.indexes) == 1
collection_w.drop_index(index_name=ct.default_index_name)
assert len(collection_w.indexes) == 0
@pytest.mark.tags(CaseLabel.L1)
def test_create_index_non_existed_field(self):
@ -535,7 +508,7 @@ class TestNewIndexBase(TestcaseBase):
self.connection_wrap.remove_connection(ct.default_alias)
res_list, _ = self.connection_wrap.list_connections()
assert ct.default_alias not in res_list
collection_w.create_index(ct.default_float_vec_field_name, ct.default_all_indexes_params,
collection_w.create_index(ct.default_float_vec_field_name, ct.default_index,
check_task=CheckTasks.err_res,
check_items={ct.err_code: 999, ct.err_msg: "should create connection first"})
@ -583,20 +556,6 @@ class TestNewIndexBase(TestcaseBase):
for t in threads:
t.join()
@pytest.mark.tags(CaseLabel.L1)
def test_create_index_insert_flush(self, get_simple_index):
"""
target: test create index
method: create collection and create index, add entities in it
expected: create index ok, and count correct
"""
c_name = cf.gen_unique_str(prefix)
collection_w = self.init_collection_wrap(name=c_name)
data = cf.gen_default_list_data(default_nb)
collection_w.insert(data=data)
assert collection_w.num_entities == default_nb
collection_w.create_index(ct.default_float_vec_field_name, get_simple_index)
@pytest.mark.tags(CaseLabel.L1)
def test_create_same_index_repeatedly(self):
"""
@ -795,27 +754,8 @@ class TestNewIndexBase(TestcaseBase):
The following cases are used to test `drop_index` function
******************************************************************
"""
@pytest.mark.tags(CaseLabel.L0)
def test_drop_index(self, get_simple_index):
"""
target: test drop index interface
method: create collection and add entities in it, create index, call drop index
expected: return code 0, and default index param
"""
c_name = cf.gen_unique_str(prefix)
collection_w = self.init_collection_wrap(name=c_name)
data = cf.gen_default_list_data()
collection_w.insert(data=data)
if get_simple_index["index_type"] != "FLAT":
collection_w.create_index(ct.default_float_vec_field_name, get_simple_index,
index_name=ct.default_index_name)
assert len(collection_w.indexes) == 1
collection_w.drop_index(index_name=ct.default_index_name)
assert len(collection_w.indexes) == 0
@pytest.mark.tags(CaseLabel.L2)
def test_drop_index_repeatedly(self, get_simple_index):
def test_drop_index_repeatedly(self):
"""
target: test drop index repeatedly
method: create index, call drop index, and drop again
@ -823,14 +763,15 @@ class TestNewIndexBase(TestcaseBase):
"""
c_name = cf.gen_unique_str(prefix)
collection_w = self.init_collection_wrap(name=c_name)
if get_simple_index["index_type"] != "FLAT":
collection_w.create_index(ct.default_float_vec_field_name, get_simple_index,
index_name=ct.default_index_name)
assert len(collection_w.indexes) == 1
collection_w.drop_index(index_name=ct.default_index_name)
assert len(collection_w.indexes) == 0
collection_w.drop_index(index_name=ct.default_index_name)
assert len(collection_w.indexes) == 0
params = cf.get_index_params_params("HNSW")
index_params = {"index_type": "HNSW", "metric_type": "L2", "params": params}
collection_w.create_index(ct.default_float_vec_field_name, index_params,
index_name=ct.default_index_name)
assert len(collection_w.indexes) == 1
collection_w.drop_index(index_name=ct.default_index_name)
assert len(collection_w.indexes) == 0
collection_w.drop_index(index_name=ct.default_index_name)
assert len(collection_w.indexes) == 0
@pytest.mark.tags(CaseLabel.L2)
def test_drop_index_without_connect(self):
@ -849,7 +790,7 @@ class TestNewIndexBase(TestcaseBase):
check_items={ct.err_code: 999, ct.err_msg: "should create connection first."})
@pytest.mark.tags(CaseLabel.L2)
def test_create_drop_index_repeatedly(self, get_simple_index):
def test_create_drop_index_repeatedly(self):
"""
target: test create / drop index repeatedly, use the same index params
method: create index, drop index, four times
@ -857,13 +798,14 @@ class TestNewIndexBase(TestcaseBase):
"""
c_name = cf.gen_unique_str(prefix)
collection_w = self.init_collection_wrap(name=c_name)
if get_simple_index["index_type"] != "FLAT":
for i in range(4):
collection_w.create_index(ct.default_float_vec_field_name, get_simple_index,
index_name=ct.default_index_name)
assert len(collection_w.indexes) == 1
collection_w.drop_index(index_name=ct.default_index_name)
assert len(collection_w.indexes) == 0
params = cf.get_index_params_params("HNSW")
index_params = {"index_type": "HNSW", "metric_type": "L2", "params": params}
for i in range(4):
collection_w.create_index(ct.default_float_vec_field_name, index_params,
index_name=ct.default_index_name)
assert len(collection_w.indexes) == 1
collection_w.drop_index(index_name=ct.default_index_name)
assert len(collection_w.indexes) == 0
@pytest.mark.tags(CaseLabel.L0)
def test_create_PQ_without_nbits(self):
@ -948,8 +890,8 @@ class TestNewIndexBase(TestcaseBase):
"limit": default_limit})
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("index, params", zip(ct.all_index_types[:6], ct.default_all_indexes_params[:6]))
def test_drop_mmap_index(self, index, params):
@pytest.mark.parametrize("index", ct.all_index_types[:6])
def test_drop_mmap_index(self, index):
"""
target: disabling and re-enabling mmap for index
method: disabling and re-enabling mmap for index
@ -957,6 +899,7 @@ class TestNewIndexBase(TestcaseBase):
"""
self._connect()
collection_w = self.init_collection_general(prefix, insert_data=True, is_index=False)[0]
params = cf.get_index_params_params(index)
default_index = {"index_type": index, "params": params, "metric_type": "L2"}
collection_w.create_index(field_name, default_index, index_name=f"mmap_index_{index}")
collection_w.alter_index(f"mmap_index_{index}", {'mmap.enabled': True})

View File

@ -766,9 +766,7 @@ class TestPartitionOperations(TestcaseBase):
assert not collection_w.has_partition(partition_name)[0]
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("data", [cf.gen_default_list_data(nb=3000)])
@pytest.mark.parametrize("index_param", cf.gen_simple_index())
def test_partition_drop_indexed_partition(self, data, index_param):
def test_partition_drop_indexed_partition(self):
"""
target: verify drop an indexed partition
method: 1. create a partition
@ -786,11 +784,14 @@ class TestPartitionOperations(TestcaseBase):
assert collection_w.has_partition(partition_name)[0]
# insert data to partition
data = cf.gen_default_list_data(nb=3000)
ins_res, _ = partition_w.insert(data)
assert len(ins_res.primary_keys) == len(data[0])
# create index of collection
collection_w.create_index(ct.default_float_vec_field_name, index_param)
params = cf.get_index_params_params("IVF_SQ8")
index_params = {"index_type": "IVF_SQ8", "metric_type": "L2", "params": params}
collection_w.create_index(ct.default_float_vec_field_name, index_params)
# drop partition
partition_w.drop()
@ -1022,9 +1023,7 @@ class TestPartitionOperations(TestcaseBase):
pass
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.parametrize("data", [cf.gen_default_list_data(nb=3000)])
@pytest.mark.parametrize("index_param", cf.gen_simple_index())
def test_partition_delete_indexed_data(self, data, index_param):
def test_partition_delete_indexed_data(self):
"""
target: verify delete entities with an expression condition from an indexed partition
method: 1. create collection
@ -1039,7 +1038,9 @@ class TestPartitionOperations(TestcaseBase):
collection_w = self.init_collection_wrap()
# create index of collection
collection_w.create_index(ct.default_float_vec_field_name, index_param)
params = cf.get_index_params_params("IVF_SQ8")
index_params = {"index_type": "IVF_SQ8", "metric_type": "L2", "params": params}
collection_w.create_index(ct.default_float_vec_field_name, index_params)
# create partition
partition_name = cf.gen_unique_str(prefix)
@ -1047,6 +1048,7 @@ class TestPartitionOperations(TestcaseBase):
assert collection_w.has_partition(partition_name)[0]
# insert data to partition
data = cf.gen_default_list_data(nb=3000)
ins_res, _ = partition_w.insert(data)
assert len(ins_res.primary_keys) == len(data[0])

View File

@ -107,7 +107,7 @@ class TestPartitionKeyParams(TestcaseBase):
# flush
collection_w.flush()
# build index
collection_w.create_index(field_name=vector_field.name, index_params=ct.default_index)
collection_w.create_index(field_name=vector_field.name, index_params=ct.default_flat_index)
if index_on_par_key_field:
collection_w.create_index(field_name=par_key_field, index_params={})
# load

View File

@ -4795,33 +4795,6 @@ class TestCollectionSearch(TestcaseBase):
assert res.get(ct.default_bool_field_name) is False
assert res.get(ct.default_string_field_name) == "abc"
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("index", ct.all_index_types[1:4])
def test_search_repeatedly_ivf_index_same_limit(self, index):
"""
target: test create collection repeatedly
method: search twice, check the results is the same
expected: search results are as expected
"""
nb = 5000
limit = 30
# 1. create a collection
collection_w = self.init_collection_general(prefix, True, nb, is_index=False)[0]
# 2. insert data again
params = cf.get_index_params_params(index)
index_params = {"metric_type": "COSINE", "index_type": index, "params": params}
collection_w.create_index(default_search_field, index_params)
# 3. search with param ignore_growing=True
collection_w.load()
search_params = cf.gen_search_param(index, "COSINE")[0]
vector = [[random.random() for _ in range(default_dim)] for _ in range(default_nq)]
res1 = collection_w.search(vector[:default_nq], default_search_field, search_params, limit)[0]
res2 = collection_w.search(vector[:default_nq], default_search_field, search_params, limit)[0]
for i in range(default_nq):
assert res1[i].ids == res2[i].ids
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("index", ct.all_index_types[1:4])
def test_search_repeatedly_ivf_index_different_limit(self, index):
@ -4848,6 +4821,10 @@ class TestCollectionSearch(TestcaseBase):
res2 = collection_w.search(vector, default_search_field, search_params, limit * 2)[0]
for i in range(default_nq):
assert res1[i].ids == res2[i].ids[:limit]
# search again with the previous limit
res3 = collection_w.search(vector, default_search_field, search_params, limit)[0]
for i in range(default_nq):
assert res1[i].ids == res3[i].ids
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("metrics", ct.binary_metrics[:2])

View File

@ -761,17 +761,17 @@ def gen_index():
return index_params
def gen_simple_index():
index_params = []
for i in range(len(all_index_types)):
if all_index_types[i] in binary_support():
continue
if all_index_types[i] in gpu_support():
continue
dic = {"index_type": all_index_types[i], "metric_type": "L2"}
dic.update({"params": default_index_params[i]})
index_params.append(dic)
return index_params
# def gen_simple_index():
# index_params = []
# for i in range(len(all_index_types)):
# if all_index_types[i] in binary_support():
# continue
# if all_index_types[i] in gpu_support():
# continue
# dic = {"index_type": all_index_types[i], "metric_type": "L2"}
# dic.update({"params": default_index_params[i]})
# index_params.append(dic)
# return index_params
def gen_binary_index():