mirror of https://github.com/milvus-io/milvus.git
test: Add index names tests for mulitple vectors support (#33250)
Related issue: https://github.com/milvus-io/milvus/issues/32653 1. Update index name tests 2. remove some time.sleep --------- Signed-off-by: yanliang567 <yanliang.qiao@zilliz.com>pull/33231/head
parent
155cb40fc9
commit
00cd88e164
|
@ -226,7 +226,6 @@ class ApiCollectionWrapper:
|
|||
@trace()
|
||||
def query(self, expr, output_fields=None, partition_names=None, timeout=None, check_task=None, check_items=None,
|
||||
**kwargs):
|
||||
# time.sleep(5)
|
||||
timeout = TIMEOUT if timeout is None else timeout
|
||||
|
||||
func_name = sys._getframe().f_code.co_name
|
||||
|
@ -240,7 +239,6 @@ class ApiCollectionWrapper:
|
|||
@trace()
|
||||
def query_iterator(self, batch_size=1000, limit=-1, expr=None, output_fields=None, partition_names=None, timeout=None,
|
||||
check_task=None, check_items=None, **kwargs):
|
||||
# time.sleep(5)
|
||||
timeout = TIMEOUT if timeout is None else timeout
|
||||
|
||||
func_name = sys._getframe().f_code.co_name
|
||||
|
|
|
@ -106,9 +106,11 @@ class TestCollectionParams(TestcaseBase):
|
|||
expected: raise exception
|
||||
"""
|
||||
self._connect()
|
||||
error = {ct.err_code: 1, ct.err_msg: "Invalid collection name: {}".format(name)}
|
||||
if name is not None and name.strip() == "":
|
||||
error = {ct.err_code: 1, ct.err_msg: "collection name should not be empty"}
|
||||
error = {ct.err_code: 999, ct.err_msg: f"Invalid collection name: {name}"}
|
||||
if name in [None, ""]:
|
||||
error = {ct.err_code: 999, ct.err_msg: f"`collection_name` value {name} is illegal"}
|
||||
if name in [" "]:
|
||||
error = {ct.err_code: 999, ct.err_msg: f"collection name should not be empty"}
|
||||
self.collection_wrap.init_collection(name, schema=default_schema, check_task=CheckTasks.err_res,
|
||||
check_items=error)
|
||||
|
||||
|
@ -161,8 +163,8 @@ class TestCollectionParams(TestcaseBase):
|
|||
check_items={exp_name: c_name, exp_schema: default_schema})
|
||||
fields = [cf.gen_int64_field(is_primary=True)]
|
||||
schema = cf.gen_collection_schema(fields=fields)
|
||||
error = {ct.err_code: 0, ct.err_msg: "The collection already exist, but the schema is not the same as the "
|
||||
"schema passed in."}
|
||||
error = {ct.err_code: 999, ct.err_msg: "The collection already exist, but the schema is not the same as the "
|
||||
"schema passed in."}
|
||||
self.collection_wrap.init_collection(c_name, schema=schema, check_task=CheckTasks.err_res, check_items=error)
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
|
@ -382,7 +384,7 @@ class TestCollectionParams(TestcaseBase):
|
|||
self._connect()
|
||||
c_name = cf.gen_unique_str(prefix)
|
||||
schema = cf.gen_collection_schema([cf.gen_int64_field(is_primary=True)])
|
||||
error = {ct.err_code: 0, ct.err_msg: "No vector field is found."}
|
||||
error = {ct.err_code: 999, ct.err_msg: "No vector field is found."}
|
||||
self.collection_wrap.init_collection(c_name, schema=schema, check_task=CheckTasks.err_res, check_items=error)
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L1)
|
||||
|
@ -428,7 +430,7 @@ class TestCollectionParams(TestcaseBase):
|
|||
check_task=CheckTasks.err_res, check_items=error)
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
@pytest.mark.parametrize("primary_field", ["12-s", "12 s", "(mn)", "中文", "%$#", "a".join("a" for i in range(256))])
|
||||
@pytest.mark.parametrize("primary_field", ["12-s", "non_existing", "(mn)", "中文", None])
|
||||
def test_collection_invalid_primary_field(self, primary_field):
|
||||
"""
|
||||
target: test collection with invalid primary_field
|
||||
|
@ -437,12 +439,12 @@ class TestCollectionParams(TestcaseBase):
|
|||
"""
|
||||
self._connect()
|
||||
fields = [cf.gen_int64_field(), cf.gen_float_vec_field()]
|
||||
error = {ct.err_code: 1, ct.err_msg: "Schema must have a primary key field."}
|
||||
error = {ct.err_code: 999, ct.err_msg: "Schema must have a primary key field"}
|
||||
self.collection_schema_wrap.init_collection_schema(fields=fields, primary_field=primary_field,
|
||||
check_task=CheckTasks.err_res, check_items=error)
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
@pytest.mark.parametrize("primary_field", [[], 1, [1, "2", 3], (1,), {1: 1}, None])
|
||||
@pytest.mark.parametrize("primary_field", [[], 1, [1, "2", 3], (1,), {1: 1}])
|
||||
def test_collection_non_string_primary_field(self, primary_field):
|
||||
"""
|
||||
target: test collection with non-string primary_field
|
||||
|
@ -451,25 +453,10 @@ class TestCollectionParams(TestcaseBase):
|
|||
"""
|
||||
self._connect()
|
||||
fields = [cf.gen_int64_field(), cf.gen_float_vec_field()]
|
||||
error = {ct.err_code: 1, ct.err_msg: "Param primary_field must be str type."}
|
||||
error = {ct.err_code: 999, ct.err_msg: "Param primary_field must be int or str type"}
|
||||
self.collection_schema_wrap.init_collection_schema(fields, primary_field=primary_field,
|
||||
check_task=CheckTasks.err_res, check_items=error)
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
def test_collection_not_existed_primary_field(self):
|
||||
"""
|
||||
target: test collection with not exist primary field
|
||||
method: specify not existed field as primary_field
|
||||
expected: raise exception
|
||||
"""
|
||||
self._connect()
|
||||
fake_field = cf.gen_unique_str()
|
||||
fields = [cf.gen_int64_field(), cf.gen_float_vec_field()]
|
||||
error = {ct.err_code: 1, ct.err_msg: "Schema must have a primary key field."}
|
||||
|
||||
self.collection_schema_wrap.init_collection_schema(fields, primary_field=fake_field,
|
||||
check_task=CheckTasks.err_res, check_items=error)
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L0)
|
||||
def test_collection_primary_in_schema(self):
|
||||
"""
|
||||
|
@ -506,7 +493,7 @@ class TestCollectionParams(TestcaseBase):
|
|||
self._connect()
|
||||
field = get_unsupported_primary_field
|
||||
vec_field = cf.gen_float_vec_field(name="vec")
|
||||
error = {ct.err_code: 1, ct.err_msg: "Primary key type must be DataType.INT64 or DataType.VARCHAR."}
|
||||
error = {ct.err_code: 999, ct.err_msg: "Primary key type must be DataType.INT64 or DataType.VARCHAR."}
|
||||
self.collection_schema_wrap.init_collection_schema(fields=[field, vec_field], primary_field=field.name,
|
||||
check_task=CheckTasks.err_res, check_items=error)
|
||||
|
||||
|
@ -520,7 +507,7 @@ class TestCollectionParams(TestcaseBase):
|
|||
self._connect()
|
||||
int_field_one = cf.gen_int64_field(is_primary=True)
|
||||
int_field_two = cf.gen_int64_field(name="int2", is_primary=True)
|
||||
error = {ct.err_code: 0, ct.err_msg: "Expected only one primary key field"}
|
||||
error = {ct.err_code: 999, ct.err_msg: "Expected only one primary key field"}
|
||||
self.collection_schema_wrap.init_collection_schema(
|
||||
fields=[int_field_one, int_field_two, cf.gen_float_vec_field()],
|
||||
check_task=CheckTasks.err_res, check_items=error)
|
||||
|
@ -536,7 +523,7 @@ class TestCollectionParams(TestcaseBase):
|
|||
int_field_one = cf.gen_int64_field(is_primary=True)
|
||||
int_field_two = cf.gen_int64_field(name="int2")
|
||||
fields = [int_field_one, int_field_two, cf.gen_float_vec_field()]
|
||||
error = {ct.err_code: 1, ct.err_msg: "Expected only one primary key field"}
|
||||
error = {ct.err_code: 999, ct.err_msg: "Expected only one primary key field"}
|
||||
self.collection_schema_wrap.init_collection_schema(fields, primary_field=int_field_two.name,
|
||||
check_task=CheckTasks.err_res, check_items=error)
|
||||
|
||||
|
@ -597,7 +584,7 @@ class TestCollectionParams(TestcaseBase):
|
|||
expected: raise exception
|
||||
"""
|
||||
self._connect()
|
||||
error = {ct.err_code: 0, ct.err_msg: "auto_id can only be specified on the primary key field"}
|
||||
error = {ct.err_code: 999, ct.err_msg: "auto_id can only be specified on the primary key field"}
|
||||
self.field_schema_wrap.init_field_schema(name=ct.default_int64_field_name, dtype=DataType.INT64, auto_id=True,
|
||||
check_task=CheckTasks.err_res, check_items=error)
|
||||
|
||||
|
@ -616,19 +603,21 @@ class TestCollectionParams(TestcaseBase):
|
|||
assert not schema.auto_id
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
@pytest.mark.xfail(reason="issue 24578")
|
||||
def test_collection_auto_id_inconsistent(self):
|
||||
@pytest.mark.xfail(reason="pymilvus issue, should use fieldschema as top priority")
|
||||
@pytest.mark.parametrize("auto_id", [True, False])
|
||||
def test_collection_auto_id_inconsistent(self, auto_id):
|
||||
"""
|
||||
target: test collection auto_id with both collection schema and field schema
|
||||
method: 1.set primary field auto_id=True in field schema 2.set auto_id=False in collection schema
|
||||
expected: raise exception
|
||||
"""
|
||||
self._connect()
|
||||
int_field = cf.gen_int64_field(is_primary=True, auto_id=True)
|
||||
int_field = cf.gen_int64_field(is_primary=True, auto_id=auto_id)
|
||||
vec_field = cf.gen_float_vec_field(name='vec')
|
||||
schema, _ = self.collection_schema_wrap.init_collection_schema([int_field, vec_field], auto_id=not auto_id)
|
||||
collection_w = self.collection_wrap.init_collection(cf.gen_unique_str(prefix), schema=schema)[0]
|
||||
|
||||
schema, _ = self.collection_schema_wrap.init_collection_schema([int_field, vec_field], auto_id=False)
|
||||
assert schema.auto_id
|
||||
assert collection_w.schema.auto_id is auto_id
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
@pytest.mark.parametrize("auto_id", [True, False])
|
||||
|
@ -718,7 +707,7 @@ class TestCollectionParams(TestcaseBase):
|
|||
self.collection_wrap.init_collection(c_name, schema=schema, check_task=CheckTasks.err_res, check_items=error)
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
@pytest.mark.parametrize("dim", [-1, 0, 32769])
|
||||
@pytest.mark.parametrize("dim", [ct.min_dim-1, ct.max_dim+1])
|
||||
def test_collection_vector_out_bounds_dim(self, dim):
|
||||
"""
|
||||
target: test collection with out of bounds dim
|
||||
|
|
|
@ -210,12 +210,15 @@ class TestIndexOperation(TestcaseBase):
|
|||
"""
|
||||
target: Test create indexes for different fields
|
||||
method: create two different indexes with default index name
|
||||
expected: create successfully
|
||||
expected: create successfully, and the default index name equals to field name
|
||||
"""
|
||||
collection_w = self.init_collection_general(prefix, True, is_index=False)[0]
|
||||
default_index = {"index_type": "IVF_FLAT", "params": {"nlist": 128}, "metric_type": "L2"}
|
||||
collection_w = self.init_collection_general(prefix, True, nb=200, is_index=False)[0]
|
||||
default_index = ct.default_index
|
||||
collection_w.create_index(default_field_name, default_index)
|
||||
collection_w.create_index(ct.default_int64_field_name, {})
|
||||
assert len(collection_w.indexes) == 2
|
||||
for index in collection_w.indexes:
|
||||
assert index.field_name == index.index_name
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L1)
|
||||
def test_index_create_on_scalar_field(self):
|
||||
|
@ -224,7 +227,7 @@ class TestIndexOperation(TestcaseBase):
|
|||
method: create index on scalar field and load
|
||||
expected: raise exception
|
||||
"""
|
||||
collection_w = self.init_collection_general(prefix, True, is_index=False)[0]
|
||||
collection_w = self.init_collection_general(prefix, True, nb=200, is_index=False)[0]
|
||||
collection_w.create_index(ct.default_int64_field_name, {})
|
||||
collection_w.load(check_task=CheckTasks.err_res,
|
||||
check_items={ct.err_code: 65535,
|
||||
|
@ -256,7 +259,6 @@ class TestIndexOperation(TestcaseBase):
|
|||
c_name = cf.gen_unique_str(prefix)
|
||||
collection_w = self.init_collection_wrap(name=c_name)
|
||||
index, _ = self.index_wrap.init_index(collection_w.collection, default_field_name, default_index_params)
|
||||
# TODO: assert index
|
||||
cf.assert_equal_index(index, collection_w.collection.indexes[0])
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L1)
|
||||
|
@ -273,7 +275,6 @@ class TestIndexOperation(TestcaseBase):
|
|||
collection_w.insert(data=data)
|
||||
index_params = index_param
|
||||
index, _ = self.index_wrap.init_index(collection_w.collection, default_field_name, index_params)
|
||||
# TODO: assert index
|
||||
cf.assert_equal_index(index, collection_w.collection.indexes[0])
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L1)
|
||||
|
@ -294,78 +295,76 @@ class TestIndexOperation(TestcaseBase):
|
|||
cf.assert_equal_index(index, collection_w.collection.indexes[0])
|
||||
assert collection_w.num_entities == ct.default_nb
|
||||
|
||||
# TODO: not support
|
||||
@pytest.mark.tags(CaseLabel.L1)
|
||||
@pytest.mark.skip(reason='not supported')
|
||||
def test_index_name_dup(self):
|
||||
"""
|
||||
target: test index with duplicate index name
|
||||
method: create index with existed index name create by `collection.create_index`
|
||||
method: create index with existed index name and different index params
|
||||
expected: raise exception
|
||||
create index with the same index name and same index params
|
||||
expected: no exception raised
|
||||
"""
|
||||
c_name = cf.gen_unique_str(prefix)
|
||||
index_name = ct.default_index_name
|
||||
collection_w = self.init_collection_wrap(name=c_name)
|
||||
collection_w.collection.create_index(default_field_name, default_index_params, index_name=index_name)
|
||||
self.index_wrap.init_index(collection_w.collection, default_field_name, default_index_params,
|
||||
params = cf.get_index_params_params("HNSW")
|
||||
index_params = {"index_type": "HNSW", "metric_type": "L2", "params": params}
|
||||
params2 = cf.get_index_params_params("HNSW")
|
||||
params2.update({"M": 16, "efConstruction": 200})
|
||||
index_params2 = {"index_type": "HNSW", "metric_type": "L2", "params": params2}
|
||||
collection_w.collection.create_index(default_field_name, index_params, index_name=index_name)
|
||||
|
||||
# create index with the same index name and different index params
|
||||
error = {ct.err_code: 999, ct.err_msg: "at most one distinct index is allowed per field"}
|
||||
self.index_wrap.init_index(collection_w.collection, default_field_name, index_params2, index_name=index_name,
|
||||
check_task=CheckTasks.err_res,
|
||||
check_items={ct.err_code: 1, ct.err_msg: ""})
|
||||
check_items=error)
|
||||
# create index with the same index name and same index params
|
||||
self.index_wrap.init_index(collection_w.collection, default_field_name, index_params)
|
||||
|
||||
# TODO: server not supported
|
||||
@pytest.mark.tags(CaseLabel.L1)
|
||||
@pytest.mark.skip(reason='not supported')
|
||||
def test_index_field_names(self):
|
||||
def test_index_same_name_on_diff_fields(self):
|
||||
"""
|
||||
target: test index on one field, with two indexes
|
||||
method: create index with two different indexes
|
||||
expected: no exception raised
|
||||
target: verify index with the same name on different fields is not supported
|
||||
method: create index with index name A on fieldA, create index with index name A on fieldB
|
||||
expected: raise exception
|
||||
"""
|
||||
pass
|
||||
# collection_w, _ = self.init_collection_general(prefix, dim=64, insert_data=False, is_index=False,
|
||||
# multiple_dim_array=[32])
|
||||
id_field = cf.gen_int64_field(name="id", is_primary=True)
|
||||
vec_field = cf.gen_float_vec_field(name="vec_field", dim=64)
|
||||
vec_field2 = cf.gen_float_vec_field(name="vec_field2", dim=32)
|
||||
str_field = cf.gen_string_field(name="str_field")
|
||||
str_field2 = cf.gen_string_field(name="str_field2")
|
||||
schema, _ = self.collection_schema_wrap.init_collection_schema([id_field, vec_field, vec_field2, str_field, str_field2])
|
||||
collection_w = self.init_collection_wrap(schema=schema)
|
||||
vec_index = ct.default_index
|
||||
vec_index_name = "my_index"
|
||||
|
||||
# TODO: server not supported
|
||||
@pytest.mark.tags(CaseLabel.L1)
|
||||
@pytest.mark.skip(reason='not supported')
|
||||
def test_index_fields(self):
|
||||
"""
|
||||
target: test index on two fields, with the same name
|
||||
method: create the same index name with two different fields
|
||||
expected: exception raised
|
||||
"""
|
||||
pass
|
||||
# create same index name on different vector fields
|
||||
error = {ct.err_code: 999, ct.err_msg: "at most one distinct index is allowed per field"}
|
||||
collection_w.create_index(vec_field.name, vec_index, index_name=vec_index_name)
|
||||
collection_w.create_index(vec_field2.name, vec_index, index_name=vec_index_name,
|
||||
check_task=CheckTasks.err_res,
|
||||
check_items=error)
|
||||
|
||||
# TODO: server not supported
|
||||
@pytest.mark.tags(CaseLabel.L1)
|
||||
@pytest.mark.skip(reason='not supported')
|
||||
def test_index_fields_B(self):
|
||||
"""
|
||||
target: test index on two fields, with the different name
|
||||
method: create the different index with two different fields
|
||||
expected: no exception raised
|
||||
"""
|
||||
pass
|
||||
# create same index name on different scalar fields
|
||||
collection_w.create_index(str_field.name, index_name=vec_index_name,
|
||||
check_task=CheckTasks.err_res,
|
||||
check_items=error)
|
||||
|
||||
# TODO: server not supported
|
||||
@pytest.mark.tags(CaseLabel.L1)
|
||||
@pytest.mark.skip(reason='not supported')
|
||||
def test_index_field_names_eq_maximum(self):
|
||||
"""
|
||||
target: test index on one field, with the different names, num of the names equal to the maximum num supported
|
||||
method: create the different indexes
|
||||
expected: no exception raised
|
||||
"""
|
||||
pass
|
||||
# create same salar index nae on different scalar fields
|
||||
index_name = "scalar_index"
|
||||
collection_w.create_index(str_field.name, index_name=index_name)
|
||||
collection_w.create_index(str_field2.name, index_name=index_name,
|
||||
check_task=CheckTasks.err_res,
|
||||
check_items=error)
|
||||
all_indexes = collection_w.indexes
|
||||
assert len(all_indexes) == 2
|
||||
assert all_indexes[0].index_name != all_indexes[1].index_name
|
||||
for index in all_indexes:
|
||||
assert index.index_name in [vec_index_name, index_name]
|
||||
|
||||
# TODO: server not supported
|
||||
@pytest.mark.tags(CaseLabel.L1)
|
||||
@pytest.mark.skip(reason='not supported')
|
||||
def test_index_field_names_more_maximum(self):
|
||||
"""
|
||||
target: test index on one field, with the different names, num of the names more than the maximum num supported
|
||||
method: create the different indexes
|
||||
expected: exception raised
|
||||
"""
|
||||
pass
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L1)
|
||||
def test_index_drop_index(self):
|
||||
"""
|
||||
|
@ -381,7 +380,6 @@ class TestIndexOperation(TestcaseBase):
|
|||
assert len(collection_w.indexes) == 0
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L1)
|
||||
# TODO #7372
|
||||
def test_index_drop_repeatedly(self):
|
||||
"""
|
||||
target: test index.drop
|
||||
|
@ -417,52 +415,6 @@ class TestIndexAdvanced(TestcaseBase):
|
|||
assert cf.assert_equal_index(index_2, cw2.collection.indexes[0])
|
||||
assert len(cw.collection.indexes) == 0
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
@pytest.mark.skip(reason='TODO')
|
||||
def test_index_drop_during_inserting(self):
|
||||
"""
|
||||
target: test index.drop during inserting
|
||||
method: create indexes by `index`, and then drop it during inserting entities, make sure async insert
|
||||
expected: no exception raised, insert success
|
||||
"""
|
||||
pass
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
@pytest.mark.skip(reason='TODO')
|
||||
def test_index_drop_during_searching(self):
|
||||
"""
|
||||
target: test index.drop during searching
|
||||
method: create indexes by `index`, and then drop it during searching, make sure async search
|
||||
expected: no exception raised, search success
|
||||
"""
|
||||
pass
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
@pytest.mark.skip(reason='TODO')
|
||||
def test_index_recovery_after_restart(self):
|
||||
"""
|
||||
target: test index still existed after server restart
|
||||
method: create index by `index`, and then restart server, assert index existed
|
||||
expected: index in collection.indexes
|
||||
"""
|
||||
pass
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
@pytest.mark.skip(reason='TODO')
|
||||
def test_index_building_after_restart(self):
|
||||
"""
|
||||
target: index can still build if not finished before server restart
|
||||
method: create index by `index`, and then restart server, assert server is indexing
|
||||
expected: index build finished after server restart
|
||||
"""
|
||||
pass
|
||||
|
||||
"""
|
||||
******************************************************************
|
||||
The following classes are copied from pymilvus test
|
||||
******************************************************************
|
||||
"""
|
||||
|
||||
|
||||
@pytest.mark.tags(CaseLabel.GPU)
|
||||
class TestNewIndexBase(TestcaseBase):
|
||||
|
@ -532,22 +484,10 @@ class TestNewIndexBase(TestcaseBase):
|
|||
collection_w.create_index(ct.default_int8_field_name, default_index_params,
|
||||
index_name=ct.default_index_name,
|
||||
check_task=CheckTasks.err_res,
|
||||
check_items={ct.err_code: 1,
|
||||
check_items={ct.err_code: 999,
|
||||
ct.err_msg: "cannot create index on non-existed field: int8"}
|
||||
)
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L1)
|
||||
def test_create_index_no_vectors(self):
|
||||
"""
|
||||
target: test create index interface
|
||||
method: create collection and add entities in it, create index
|
||||
expected: return success
|
||||
"""
|
||||
c_name = cf.gen_unique_str(prefix)
|
||||
collection_w = self.init_collection_wrap(name=c_name)
|
||||
collection_w.create_index(ct.default_float_vec_field_name, default_index_params,
|
||||
index_name=ct.default_index_name)
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L1)
|
||||
def test_create_index_partition(self):
|
||||
"""
|
||||
|
@ -597,7 +537,7 @@ class TestNewIndexBase(TestcaseBase):
|
|||
assert ct.default_alias not in res_list
|
||||
collection_w.create_index(ct.default_float_vec_field_name, ct.default_all_indexes_params,
|
||||
check_task=CheckTasks.err_res,
|
||||
check_items={ct.err_code: 1, ct.err_msg: "should create connect first"})
|
||||
check_items={ct.err_code: 999, ct.err_msg: "should create connection first"})
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L1)
|
||||
def test_create_index_search_with_query_vectors(self):
|
||||
|
@ -686,7 +626,7 @@ class TestNewIndexBase(TestcaseBase):
|
|||
collection_w.create_index(ct.default_float_vec_field_name, default_index_params, index_name="a")
|
||||
collection_w.create_index(ct.default_float_vec_field_name, default_index_params, index_name="b",
|
||||
check_task=CheckTasks.err_res,
|
||||
check_items={ct.err_code: 1,
|
||||
check_items={ct.err_code: 999,
|
||||
ct.err_msg: "CreateIndex failed: creating multiple indexes on same field is not supported"})
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L1)
|
||||
|
@ -722,18 +662,6 @@ class TestNewIndexBase(TestcaseBase):
|
|||
collection_w.insert(data=data)
|
||||
collection_w.create_index(ct.default_float_vec_field_name, default_ip_index_params)
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L1)
|
||||
def test_create_index_no_vectors_ip(self):
|
||||
"""
|
||||
target: test create index interface
|
||||
method: create collection and add entities in it, create index
|
||||
expected: return success
|
||||
"""
|
||||
c_name = cf.gen_unique_str(prefix)
|
||||
collection_w = self.init_collection_wrap(name=c_name)
|
||||
collection_w.create_index(ct.default_float_vec_field_name, default_ip_index_params,
|
||||
index_name=ct.default_index_name)
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L1)
|
||||
def test_create_index_partition_ip(self):
|
||||
"""
|
||||
|
@ -750,7 +678,7 @@ class TestNewIndexBase(TestcaseBase):
|
|||
assert len(ins_res.primary_keys) == len(data[0])
|
||||
collection_w.create_index(ct.default_float_vec_field_name, default_ip_index_params)
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L1)
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
def test_create_index_partition_flush_ip(self):
|
||||
"""
|
||||
target: test create index
|
||||
|
@ -810,7 +738,7 @@ class TestNewIndexBase(TestcaseBase):
|
|||
for t in threads:
|
||||
t.join()
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L1)
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
def test_create_index_no_vectors_insert_ip(self):
|
||||
"""
|
||||
target: test create index interface when there is no vectors in collection,
|
||||
|
@ -841,23 +769,6 @@ class TestNewIndexBase(TestcaseBase):
|
|||
collection_w.create_index(ct.default_float_vec_field_name, default_ip_index_params)
|
||||
assert len(collection_w.indexes) == 1
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
def test_create_index_different_name_ip(self):
|
||||
"""
|
||||
target: check if index can be created repeatedly, with the same create_index params
|
||||
method: create index after index have been built
|
||||
expected: raise error
|
||||
"""
|
||||
c_name = cf.gen_unique_str(prefix)
|
||||
collection_w = self.init_collection_wrap(name=c_name)
|
||||
data = cf.gen_default_list_data(default_nb)
|
||||
collection_w.insert(data=data)
|
||||
collection_w.create_index(ct.default_float_vec_field_name, default_ip_index_params, index_name="a")
|
||||
collection_w.create_index(ct.default_float_vec_field_name, default_ip_index_params, index_name="b",
|
||||
check_task=CheckTasks.err_res,
|
||||
check_items={ct.err_code: 1,
|
||||
ct.err_msg: "CreateIndex failed: creating multiple indexes on same field is not supported"})
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L0)
|
||||
def test_create_different_index_repeatedly_ip(self):
|
||||
"""
|
||||
|
@ -903,7 +814,6 @@ class TestNewIndexBase(TestcaseBase):
|
|||
assert len(collection_w.indexes) == 0
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
# TODO #7372
|
||||
def test_drop_index_repeatedly(self, get_simple_index):
|
||||
"""
|
||||
target: test drop index repeatedly
|
||||
|
@ -935,7 +845,7 @@ class TestNewIndexBase(TestcaseBase):
|
|||
index_name=ct.default_index_name)
|
||||
self.connection_wrap.remove_connection(ct.default_alias)
|
||||
collection_w.drop_index(index_name=ct.default_index_name, check_task=CheckTasks.err_res,
|
||||
check_items={ct.err_code: 1, ct.err_msg: "should create connect first."})
|
||||
check_items={ct.err_code: 999, ct.err_msg: "should create connection first."})
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
def test_create_drop_index_repeatedly(self, get_simple_index):
|
||||
|
@ -954,76 +864,6 @@ class TestNewIndexBase(TestcaseBase):
|
|||
collection_w.drop_index(index_name=ct.default_index_name)
|
||||
assert len(collection_w.indexes) == 0
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
def test_drop_index_ip(self, get_simple_index):
|
||||
"""
|
||||
target: test drop index interface
|
||||
method: create collection and add entities in it, create index, call drop index
|
||||
expected: return code 0, and default index param
|
||||
"""
|
||||
get_simple_index["metric_type"] = "IP"
|
||||
c_name = cf.gen_unique_str(prefix)
|
||||
collection_w = self.init_collection_wrap(name=c_name)
|
||||
if get_simple_index["index_type"] != "FLAT":
|
||||
collection_w.create_index(ct.default_float_vec_field_name, get_simple_index,
|
||||
index_name=ct.default_index_name)
|
||||
assert len(collection_w.indexes) == 1
|
||||
collection_w.drop_index(index_name=ct.default_index_name)
|
||||
assert len(collection_w.indexes) == 0
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
def test_drop_index_repeatedly_ip(self, get_simple_index):
|
||||
"""
|
||||
target: test drop index repeatedly
|
||||
method: create index, call drop index, and drop again
|
||||
expected: return code 0
|
||||
"""
|
||||
get_simple_index["metric_type"] = "IP"
|
||||
c_name = cf.gen_unique_str(prefix)
|
||||
collection_w = self.init_collection_wrap(name=c_name)
|
||||
if get_simple_index["index_type"] != "FLAT":
|
||||
collection_w.create_index(ct.default_float_vec_field_name, get_simple_index,
|
||||
index_name=ct.default_index_name)
|
||||
assert len(collection_w.indexes) == 1
|
||||
collection_w.drop_index(index_name=ct.default_index_name)
|
||||
assert len(collection_w.indexes) == 0
|
||||
collection_w.drop_index(index_name=ct.default_index_name)
|
||||
assert len(collection_w.indexes) == 0
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
def test_drop_index_without_connect_ip(self):
|
||||
"""
|
||||
target: test drop index without connection
|
||||
method: drop index, and check if drop successfully
|
||||
expected: raise exception
|
||||
"""
|
||||
|
||||
c_name = cf.gen_unique_str(prefix)
|
||||
collection_w = self.init_collection_wrap(c_name)
|
||||
collection_w.create_index(ct.default_float_vec_field_name, default_ip_index_params,
|
||||
index_name=ct.default_index_name)
|
||||
self.connection_wrap.remove_connection(ct.default_alias)
|
||||
collection_w.drop_index(index_name=ct.default_index_name, check_task=CheckTasks.err_res,
|
||||
check_items={ct.err_code: 1, ct.err_msg: "should create connect first."})
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
def test_create_drop_index_repeatedly_ip(self, get_simple_index):
|
||||
"""
|
||||
target: test create / drop index repeatedly, use the same index params
|
||||
method: create index, drop index, four times
|
||||
expected: return code 0
|
||||
"""
|
||||
get_simple_index["metric_type"] = "IP"
|
||||
c_name = cf.gen_unique_str(prefix)
|
||||
collection_w = self.init_collection_wrap(c_name)
|
||||
if get_simple_index["index_type"] != "FLAT":
|
||||
for i in range(4):
|
||||
collection_w.create_index(ct.default_float_vec_field_name, get_simple_index,
|
||||
index_name=ct.default_index_name)
|
||||
assert len(collection_w.indexes) == 1
|
||||
collection_w.drop_index(index_name=ct.default_index_name)
|
||||
assert len(collection_w.indexes) == 0
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L0)
|
||||
def test_create_PQ_without_nbits(self):
|
||||
"""
|
||||
|
@ -1059,8 +899,8 @@ class TestNewIndexBase(TestcaseBase):
|
|||
expected: load and search successfully
|
||||
"""
|
||||
collection_w = self.init_collection_wrap(cf.gen_unique_str(prefix))
|
||||
nums = 20
|
||||
tmp_nb = 5000
|
||||
nums = 5
|
||||
tmp_nb = 1000
|
||||
for i in range(nums):
|
||||
df = cf.gen_default_dataframe_data(nb=tmp_nb, start=i * tmp_nb)
|
||||
insert_res, _ = collection_w.insert(df)
|
||||
|
@ -1179,10 +1019,6 @@ class TestNewIndexBase(TestcaseBase):
|
|||
@pytest.mark.tags(CaseLabel.GPU)
|
||||
class TestNewIndexBinary(TestcaseBase):
|
||||
|
||||
def get_simple_index(self, request):
|
||||
log.info(request.param)
|
||||
return copy.deepcopy(request.param)
|
||||
|
||||
"""
|
||||
******************************************************************
|
||||
The following cases are used to test `create_index` function
|
||||
|
@ -1190,7 +1026,6 @@ class TestNewIndexBinary(TestcaseBase):
|
|||
"""
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
# @pytest.mark.timeout(BUILD_TIMEOUT)
|
||||
def test_create_binary_index_on_scalar_field(self):
|
||||
"""
|
||||
target: test create index interface
|
||||
|
@ -1202,7 +1037,6 @@ class TestNewIndexBinary(TestcaseBase):
|
|||
assert collection_w.has_index(index_name=binary_field_name)[0] is True
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L0)
|
||||
# @pytest.mark.timeout(BUILD_TIMEOUT)
|
||||
def test_create_index_partition(self):
|
||||
"""
|
||||
target: test create index interface
|
||||
|
@ -1219,11 +1053,10 @@ class TestNewIndexBinary(TestcaseBase):
|
|||
assert len(ins_res.primary_keys) == len(df)
|
||||
collection_w.create_index(default_binary_vec_field_name, default_binary_index_params,
|
||||
index_name=binary_field_name)
|
||||
assert collection_w.has_index(index_name=binary_field_name)[0] == True
|
||||
assert collection_w.has_index(index_name=binary_field_name)[0] is True
|
||||
assert len(collection_w.indexes) == 1
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L0)
|
||||
# @pytest.mark.timeout(BUILD_TIMEOUT)
|
||||
def test_create_index_search_with_query_vectors(self):
|
||||
"""
|
||||
target: test create index interface, search with more query vectors
|
||||
|
@ -1242,7 +1075,6 @@ class TestNewIndexBinary(TestcaseBase):
|
|||
default_search_binary_params, default_limit,
|
||||
default_search_exp)
|
||||
|
||||
# @pytest.mark.timeout(BUILD_TIMEOUT)
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
def test_create_index_invalid_metric_type_binary(self):
|
||||
"""
|
||||
|
@ -1352,52 +1184,29 @@ class TestIndexInvalid(TestcaseBase):
|
|||
def vector_data_type(self, request):
|
||||
yield request.param
|
||||
|
||||
@pytest.fixture(
|
||||
scope="function",
|
||||
params=gen_invalid_strs()
|
||||
)
|
||||
def get_collection_name(self, request):
|
||||
@pytest.fixture(scope="function", params=ct.invalid_resource_names)
|
||||
def invalid_index_name(self, request):
|
||||
if request.param in [None, "", " "]:
|
||||
pytest.skip("None and empty is valid for there is a default index name")
|
||||
yield request.param
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L0)
|
||||
def test_create_index_with_invalid_collection_name(self, connect, get_collection_name):
|
||||
def test_index_with_invalid_index_name(self, connect, invalid_index_name):
|
||||
"""
|
||||
target: test create index interface for invalid scenario
|
||||
method: create index with invalid collection name
|
||||
method:
|
||||
1. create index with invalid collection name
|
||||
expected: raise exception
|
||||
2. drop index with an invalid index name
|
||||
expected: succeed
|
||||
"""
|
||||
collection_name = get_collection_name
|
||||
with pytest.raises(Exception) as e:
|
||||
connect.create_index(collection_name, field_name, default_ivf_flat_index)
|
||||
collection_w = self.init_collection_wrap()
|
||||
error = {ct.err_code: 999, ct.err_msg: f"Invalid index name: {invalid_index_name}"}
|
||||
collection_w.create_index(ct.default_float_vec_field_name, default_index_params, index_name=invalid_index_name,
|
||||
check_task=CheckTasks.err_res, check_items=error)
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
def test_drop_index_with_invalid_collection_name(self, connect, get_collection_name):
|
||||
"""
|
||||
target: test drop index interface for invalid scenario
|
||||
method: drop index with invalid collection name
|
||||
expected: raise exception
|
||||
"""
|
||||
collection_name = get_collection_name
|
||||
with pytest.raises(Exception) as e:
|
||||
connect.drop_index(collection_name)
|
||||
|
||||
@pytest.fixture(
|
||||
scope="function",
|
||||
params=gen_invalid_index()
|
||||
)
|
||||
def get_index(self, request):
|
||||
yield request.param
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
def test_create_index_with_invalid_index_params(self, connect, collection, get_index):
|
||||
"""
|
||||
target: test create index interface for invalid scenario
|
||||
method: create index with invalid index params
|
||||
expected: raise exception
|
||||
"""
|
||||
log.info(get_index)
|
||||
with pytest.raises(Exception) as e:
|
||||
connect.create_index(collection, field_name, get_index)
|
||||
# drop index with an invalid index name
|
||||
collection_w.drop_index(index_name=invalid_index_name)
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L1)
|
||||
def test_drop_index_without_release(self):
|
||||
|
@ -1407,12 +1216,11 @@ class TestIndexInvalid(TestcaseBase):
|
|||
2. drop the index
|
||||
expected: raise exception
|
||||
"""
|
||||
collection_w = self.init_collection_general(prefix, True, is_index=False)[0]
|
||||
default_index = {"index_type": "IVF_FLAT", "params": {"nlist": 128}, "metric_type": "L2"}
|
||||
collection_w.create_index("float_vector", default_index)
|
||||
collection_w = self.init_collection_general(prefix, True, nb=100, is_index=False)[0]
|
||||
collection_w.create_index(ct.default_float_vec_field_name, ct.default_index)
|
||||
collection_w.load()
|
||||
collection_w.drop_index(check_task=CheckTasks.err_res,
|
||||
check_items={"err_code": 1,
|
||||
check_items={"err_code": 999,
|
||||
"err_msg": "index cannot be dropped, collection is "
|
||||
"loaded, please release it first"})
|
||||
|
||||
|
@ -1425,7 +1233,7 @@ class TestIndexInvalid(TestcaseBase):
|
|||
2. set annoy index param n_trees type invalid(not int)
|
||||
expected: raise exception
|
||||
"""
|
||||
collection_w = self.init_collection_general(prefix, True, is_index=False)[0]
|
||||
collection_w = self.init_collection_general(prefix, True, nb=100, is_index=False)[0]
|
||||
index_annoy = {"index_type": "ANNOY", "params": {"n_trees": n_trees}, "metric_type": "L2"}
|
||||
collection_w.create_index("float_vector", index_annoy,
|
||||
check_task=CheckTasks.err_res,
|
||||
|
@ -1439,10 +1247,9 @@ class TestIndexInvalid(TestcaseBase):
|
|||
method: 1.create collection, and create index
|
||||
expected: create index raise an error
|
||||
"""
|
||||
collection_w, _, _, insert_ids = self.init_collection_general(prefix, True,
|
||||
dim=ct.default_dim, is_index=False)[0:4]
|
||||
collection_w = self.init_collection_general(prefix, True, nb=100, is_index=False)[0]
|
||||
# create index on JSON/Array field is not supported
|
||||
collection_w.create_index(ct.default_json_field_name, index_params=ct.default_flat_index,
|
||||
collection_w.create_index(ct.default_json_field_name,
|
||||
check_task=CheckTasks.err_res,
|
||||
check_items={ct.err_code: 1100,
|
||||
ct.err_msg: "create index on JSON field is not supported"})
|
||||
|
@ -1454,9 +1261,8 @@ class TestIndexInvalid(TestcaseBase):
|
|||
method: 1.create collection, and create index
|
||||
expected: Raise exception
|
||||
"""
|
||||
collection_w, _, _, insert_ids = self.init_collection_general(prefix, True,
|
||||
dim=ct.default_dim, is_index=False,
|
||||
vector_data_type=vector_data_type)[0:4]
|
||||
collection_w = self.init_collection_general(prefix, True, nb=100,
|
||||
is_index=False, vector_data_type=vector_data_type)[0]
|
||||
scalar_index_params = {"index_type": scalar_index}
|
||||
collection_w.create_index(ct.default_float_vec_field_name, index_params=scalar_index_params,
|
||||
check_task=CheckTasks.err_res,
|
||||
|
@ -2191,22 +1997,6 @@ class TestIndexDiskann(TestcaseBase):
|
|||
for t in threads:
|
||||
t.join()
|
||||
|
||||
@pytest.mark.skip(reason="diskann dim range is set to be [1, 32768)")
|
||||
@pytest.mark.tags(CaseLabel.L1)
|
||||
@pytest.mark.parametrize("dim", [2, 4, 8])
|
||||
def test_create_index_with_small_dim(self, dim):
|
||||
"""
|
||||
target: test create index with diskann
|
||||
method: 1.create collection, when the dim of the vector Less than 8
|
||||
2.create diskann index
|
||||
expected: create index raise an error
|
||||
"""
|
||||
collection_w = self.init_collection_general(prefix, False, dim=dim, is_index=False)[0]
|
||||
collection_w.create_index(default_float_vec_field_name, ct.default_diskann_index,
|
||||
check_task=CheckTasks.err_res,
|
||||
check_items={ct.err_code: 1,
|
||||
ct.err_msg: "dim out of range: [8, 32768]"})
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L2)
|
||||
def test_diskann_enable_mmap(self):
|
||||
"""
|
||||
|
@ -2227,7 +2017,6 @@ class TestIndexDiskann(TestcaseBase):
|
|||
check_items={ct.err_code: 104,
|
||||
ct.err_msg: f"index type DISKANN does not support mmap"})
|
||||
|
||||
|
||||
@pytest.mark.tags(CaseLabel.GPU)
|
||||
class TestAutoIndex(TestcaseBase):
|
||||
""" Test case of Auto index """
|
||||
|
|
|
@ -11,9 +11,8 @@ import pytest
|
|||
class TestIssues(TestcaseBase):
|
||||
@pytest.mark.tags(CaseLabel.L0)
|
||||
@pytest.mark.parametrize("par_key_field", [ct.default_int64_field_name])
|
||||
@pytest.mark.parametrize("index_on_par_key_field", [True])
|
||||
@pytest.mark.parametrize("use_upsert", [True, False])
|
||||
def test_issue_30607(self, par_key_field, index_on_par_key_field, use_upsert):
|
||||
def test_issue_30607(self, par_key_field, use_upsert):
|
||||
"""
|
||||
Method:
|
||||
1. create a collection with partition key on collection schema with customized num_partitions
|
||||
|
@ -50,27 +49,30 @@ class TestIssues(TestcaseBase):
|
|||
num_entities = collection_w.num_entities
|
||||
# build index
|
||||
collection_w.create_index(field_name=vector_field.name, index_params=ct.default_index)
|
||||
if index_on_par_key_field:
|
||||
collection_w.create_index(field_name=par_key_field, index_params={})
|
||||
# load
|
||||
collection_w.load()
|
||||
|
||||
# verify the partition key values are bashed correctly
|
||||
seeds = 200
|
||||
rand_ids = random.sample(range(0, num_entities), seeds)
|
||||
rand_ids = [str(rand_ids[i]) for i in range(len(rand_ids))]
|
||||
res = collection_w.query(expr=f"pk in {rand_ids}", output_fields=["pk", par_key_field])
|
||||
# verify every the random id exists
|
||||
assert len(res) == len(rand_ids)
|
||||
for index_on_par_key_field in [False, True]:
|
||||
collection_w.release()
|
||||
if index_on_par_key_field:
|
||||
collection_w.create_index(field_name=par_key_field, index_params={})
|
||||
# load
|
||||
collection_w.load()
|
||||
|
||||
dirty_count = 0
|
||||
for i in range(len(res)):
|
||||
pk = res[i].get("pk")
|
||||
parkey_value = res[i].get(par_key_field)
|
||||
res_parkey = collection_w.query(expr=f"{par_key_field}=={parkey_value} and pk=='{pk}'",
|
||||
output_fields=["pk", par_key_field])
|
||||
if len(res_parkey) != 1:
|
||||
log.info(f"dirty data found: pk {pk} with parkey {parkey_value}")
|
||||
dirty_count += 1
|
||||
assert dirty_count == 0
|
||||
log.info(f"check randomly {seeds}/{num_entities}, dirty count={dirty_count}")
|
||||
# verify the partition key values are bashed correctly
|
||||
seeds = 200
|
||||
rand_ids = random.sample(range(0, num_entities), seeds)
|
||||
rand_ids = [str(rand_ids[i]) for i in range(len(rand_ids))]
|
||||
res = collection_w.query(expr=f"pk in {rand_ids}", output_fields=["pk", par_key_field])
|
||||
# verify every the random id exists
|
||||
assert len(res) == len(rand_ids)
|
||||
|
||||
dirty_count = 0
|
||||
for i in range(len(res)):
|
||||
pk = res[i].get("pk")
|
||||
parkey_value = res[i].get(par_key_field)
|
||||
res_parkey = collection_w.query(expr=f"{par_key_field}=={parkey_value} and pk=='{pk}'",
|
||||
output_fields=["pk", par_key_field])
|
||||
if len(res_parkey) != 1:
|
||||
log.info(f"dirty data found: pk {pk} with parkey {parkey_value}")
|
||||
dirty_count += 1
|
||||
assert dirty_count == 0
|
||||
log.info(f"check randomly {seeds}/{num_entities}, dirty count={dirty_count}")
|
|
@ -10097,7 +10097,6 @@ class TestSearchGroupBy(TestcaseBase):
|
|||
|
||||
collection_w.flush()
|
||||
collection_w.create_index(ct.default_float_vec_field_name, index_params=_index_params)
|
||||
# time.sleep(10)
|
||||
collection_w.load()
|
||||
|
||||
search_params = {"metric_type": metric, "params": {"ef": 128}}
|
||||
|
@ -10214,7 +10213,6 @@ class TestSearchGroupBy(TestcaseBase):
|
|||
collection_w.flush()
|
||||
collection_w.create_index(ct.default_float_vec_field_name, index_params=_index)
|
||||
collection_w.create_index(grpby_field)
|
||||
time.sleep(30)
|
||||
collection_w.load()
|
||||
|
||||
search_params = {"metric_type": metric, "params": {"ef": 128}}
|
||||
|
@ -10507,7 +10505,6 @@ class TestSearchGroupBy(TestcaseBase):
|
|||
|
||||
collection_w.flush()
|
||||
collection_w.create_index(ct.default_float_vec_field_name, index_params=_index)
|
||||
time.sleep(10)
|
||||
collection_w.load()
|
||||
|
||||
nq = 1
|
||||
|
|
Loading…
Reference in New Issue