[test]Skip new feature in upgrade deploy test cases (#24748)

Signed-off-by: zhuwenxing <wenxing.zhu@zilliz.com>
pull/24765/head
zhuwenxing 2023-06-08 18:52:36 +08:00 committed by GitHub
parent 44e5daae3a
commit bede8f6171
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 53 additions and 31 deletions

View File

@ -115,9 +115,11 @@ class TestcaseBase(Base):
port=cf.param_info.param_port)
return res
def init_collection_wrap(self, name=None, schema=None, check_task=None, check_items=None, **kwargs):
def init_collection_wrap(self, name=None, schema=None, check_task=None, check_items=None,
enable_dynamic_field=False, with_json=True, **kwargs):
name = cf.gen_unique_str('coll_') if name is None else name
schema = cf.gen_default_collection_schema() if schema is None else schema
schema = cf.gen_default_collection_schema(enable_dynamic_field=enable_dynamic_field, with_json=with_json) \
if schema is None else schema
if not self.connection_wrap.has_connection(alias=DefaultConfig.DEFAULT_USING)[0]:
self._connect()
collection_w = ApiCollectionWrapper()
@ -149,7 +151,8 @@ class TestcaseBase(Base):
def insert_data_general(self, prefix="test", insert_data=False, nb=ct.default_nb,
partition_num=0, is_binary=False, is_all_data_type=False,
auto_id=False, dim=ct.default_dim,
primary_field=ct.default_int64_field_name, is_flush=True, name=None, **kwargs):
primary_field=ct.default_int64_field_name, is_flush=True, name=None,
enable_dynamic_field=False, with_json=True, **kwargs):
"""
"""
@ -162,19 +165,25 @@ class TestcaseBase(Base):
insert_ids = []
time_stamp = 0
# 1 create collection
default_schema = cf.gen_default_collection_schema(auto_id=auto_id, dim=dim, primary_field=primary_field)
default_schema = cf.gen_default_collection_schema(auto_id=auto_id, dim=dim, primary_field=primary_field,
enable_dynamic_field=enable_dynamic_field,
with_json=with_json)
if is_binary:
default_schema = cf.gen_default_binary_collection_schema(auto_id=auto_id, dim=dim,
primary_field=primary_field)
if is_all_data_type:
default_schema = cf.gen_collection_schema_all_datatype(auto_id=auto_id, dim=dim,
primary_field=primary_field)
primary_field=primary_field,
enable_dynamic_field=enable_dynamic_field,
with_json=with_json)
log.info("insert_data_general: collection creation")
collection_w = self.init_collection_wrap(name=collection_name, schema=default_schema, **kwargs)
pre_entities = collection_w.num_entities
if insert_data:
collection_w, vectors, binary_raw_vectors, insert_ids, time_stamp = \
cf.insert_data(collection_w, nb, is_binary, is_all_data_type, auto_id=auto_id, dim=dim)
cf.insert_data(collection_w, nb, is_binary, is_all_data_type, auto_id=auto_id, dim=dim,
enable_dynamic_field=enable_dynamic_field,
with_json=with_json)
if is_flush:
collection_w.flush()
assert collection_w.num_entities == nb + pre_entities

View File

@ -29,7 +29,8 @@ class TestAllCollection(TestcaseBase):
# create
name = collection_name if collection_name else cf.gen_unique_str("Checker_")
t0 = time.time()
collection_w = self.init_collection_wrap(name=name, active_trace=True)
collection_w = self.init_collection_wrap(name=name, active_trace=True, enable_dynamic_field=False,
with_json=False)
tt = time.time() - t0
assert collection_w.name == name
# compact collection before getting num_entities
@ -42,7 +43,7 @@ class TestAllCollection(TestcaseBase):
# insert
insert_batch = 3000
data = cf.gen_default_list_data(start=-insert_batch)
data = cf.gen_default_list_data(start=-insert_batch, with_json=False)
t0 = time.time()
_, res = collection_w.insert(data)
tt = time.time() - t0
@ -97,7 +98,7 @@ class TestAllCollection(TestcaseBase):
collection_w.release()
# insert data
d = cf.gen_default_list_data()
d = cf.gen_default_list_data(with_json=False)
collection_w.insert(d)
# load

View File

@ -442,14 +442,17 @@ def gen_default_binary_dataframe_data(nb=ct.default_nb, dim=ct.default_dim, star
return df, binary_raw_values
def gen_default_list_data(nb=ct.default_nb, dim=ct.default_dim, start=0):
def gen_default_list_data(nb=ct.default_nb, dim=ct.default_dim, start=0, with_json=True):
int_values = [i for i in range(start, start + nb)]
float_values = [np.float32(i) for i in range(start, start + nb)]
string_values = [str(i) for i in range(start, start + nb)]
json_values = [{"number": i, "string": str(i), "bool": bool(i), "list": [j for j in range(0, i)]}
for i in range(start, start + nb)]
float_vec_values = gen_vectors(nb, dim)
data = [int_values, float_values, string_values, json_values, float_vec_values]
if with_json is False:
data = [int_values, float_values, string_values, float_vec_values]
else:
data = [int_values, float_values, string_values, json_values, float_vec_values]
return data
@ -742,7 +745,7 @@ def gen_normal_string_expressions(field):
return expressions
def gen_invaild_string_expressions():
def gen_invalid_string_expressions():
expressions = [
"varchar in [0, \"1\"]",
"varchar not in [\"0\", 1, 2]"

View File

@ -29,6 +29,7 @@ prefix = "deploy_test"
TIMEOUT = 120
class TestActionFirstDeployment(TestDeployBase):
""" Test case of action before reinstall """
@ -54,7 +55,9 @@ class TestActionFirstDeployment(TestDeployBase):
is_binary = False
if "BIN" in name:
is_binary = True
collection_w = self.init_collection_general(insert_data=False, is_binary=is_binary, name=name)[0]
collection_w = \
self.init_collection_general(insert_data=False, is_binary=is_binary, name=name, enable_dynamic_field=False,
with_json=False)[0]
if collection_w.has_index():
index_names = [index.index_name for index in collection_w.indexes]
for index_name in index_names:
@ -66,14 +69,14 @@ class TestActionFirstDeployment(TestDeployBase):
@pytest.mark.parametrize("is_deleted", ["is_deleted"])
@pytest.mark.parametrize("is_string_indexed", ["is_string_indexed", "not_string_indexed"])
@pytest.mark.parametrize("segment_status", ["only_growing", "all"])
@pytest.mark.parametrize("index_type", ["HNSW", "BIN_IVF_FLAT"]) #"IVF_FLAT", "HNSW", "BIN_IVF_FLAT"
@pytest.mark.parametrize("index_type", ["HNSW", "BIN_IVF_FLAT"])
def test_task_all(self, index_type, is_compacted,
segment_status, is_string_indexed, replica_number, is_deleted, data_size):
"""
before reinstall: create collection and insert data, load and search
"""
name = ""
for k,v in locals().items():
for k, v in locals().items():
if k in ["self", "name"]:
continue
name += f"_{k}_{v}"
@ -104,7 +107,9 @@ class TestActionFirstDeployment(TestDeployBase):
# init collection and insert with small size data without flush to get growing segment
collection_w = self.init_collection_general(insert_data=True, is_binary=is_binary, nb=3000,
is_flush=False, is_index=False, name=name)[0]
is_flush=False, is_index=False, name=name,
enable_dynamic_field=False,
with_json=False)[0]
# params for creating index
if is_binary:
default_index_field = ct.default_binary_vec_field_name
@ -132,7 +137,7 @@ class TestActionFirstDeployment(TestDeployBase):
self.utility_wrap.wait_for_loading_complete(name)
# delete data for growing segment
delete_expr = f"{ct.default_int64_field_name} in {[i for i in range(0,10)]}"
delete_expr = f"{ct.default_int64_field_name} in {[i for i in range(0, 10)]}"
if is_deleted == "is_deleted":
collection_w.delete(expr=delete_expr)
@ -143,10 +148,10 @@ class TestActionFirstDeployment(TestDeployBase):
default_search_exp,
check_task=CheckTasks.check_search_results,
check_items={"nq": default_nq,
"limit": default_limit})
"limit": default_limit})
output_fields = [ct.default_int64_field_name]
collection_w.query(default_term_expr, output_fields=output_fields,
check_task=CheckTasks.check_query_not_empty)
check_task=CheckTasks.check_query_not_empty)
# skip subsequent operations when segment_status is set to only_growing
if segment_status == "only_growing":
@ -155,19 +160,20 @@ class TestActionFirstDeployment(TestDeployBase):
# insert with flush multiple times to generate multiple sealed segment
for i in range(5):
self.init_collection_general(insert_data=True, is_binary=is_binary, nb=data_size,
is_flush=False, is_index=False, name=name)
is_flush=False, is_index=False, name=name, enable_dynamic_field=False,
with_json=False)
# at this step, all segment are sealed
if pymilvus_version >= "2.2.0":
collection_w.flush()
else:
collection_w.collection.num_entities
# delete data for sealed segment and before index
delete_expr = f"{ct.default_int64_field_name} in {[i for i in range(10,20)]}"
delete_expr = f"{ct.default_int64_field_name} in {[i for i in range(10, 20)]}"
if is_deleted == "is_deleted":
collection_w.delete(expr=delete_expr)
# delete data for sealed segment and after index
delete_expr = f"{ct.default_int64_field_name} in {[i for i in range(20,30)]}"
delete_expr = f"{ct.default_int64_field_name} in {[i for i in range(20, 30)]}"
if is_deleted == "is_deleted":
collection_w.delete(expr=delete_expr)
if is_compacted == "is_compacted":
@ -175,7 +181,8 @@ class TestActionFirstDeployment(TestDeployBase):
# get growing segment before reload
if segment_status == "all":
self.init_collection_general(insert_data=True, is_binary=is_binary, nb=3000,
is_flush=False, is_index=False, name=name)
is_flush=False, is_index=False, name=name, enable_dynamic_field=False,
with_json=False)
# reload after flush and creating index
if replica_number > 0:
collection_w.release()
@ -185,8 +192,9 @@ class TestActionFirstDeployment(TestDeployBase):
# insert data to get growing segment after reload
if segment_status == "all":
self.init_collection_general(insert_data=True, is_binary=is_binary, nb=3000,
is_flush=False, is_index=False, name=name)
is_flush=False, is_index=False, name=name, enable_dynamic_field=False,
with_json=False)
# search and query for sealed and growing segment
if replica_number > 0:
collection_w.search(vectors_to_search[:default_nq], default_search_field,
@ -194,7 +202,7 @@ class TestActionFirstDeployment(TestDeployBase):
default_search_exp,
check_task=CheckTasks.check_search_results,
check_items={"nq": default_nq,
"limit": default_limit})
"limit": default_limit})
output_fields = [ct.default_int64_field_name]
collection_w.query(default_term_expr, output_fields=output_fields,
check_task=CheckTasks.check_query_not_empty)
check_task=CheckTasks.check_query_not_empty)

View File

@ -190,7 +190,8 @@ class TestActionSecondDeployment(TestDeployBase):
# insert data and flush
for i in range(2):
self.insert_data_general(insert_data=True, is_binary=is_binary, nb=data_size,
is_flush=False, is_index=True, name=name)
is_flush=False, is_index=True, name=name,
enable_dynamic_field=False, with_json=False)
if pymilvus_version >= "2.2.0":
collection_w.flush()
else:

View File

@ -1631,7 +1631,7 @@ class TestQueryString(TestcaseBase):
check_task=CheckTasks.check_query_results, check_items={exp_res: res})
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.parametrize("expression", cf.gen_invaild_string_expressions())
@pytest.mark.parametrize("expression", cf.gen_invalid_string_expressions())
def test_query_with_invalid_string_expr(self, expression):
"""
target: test query data
@ -1674,10 +1674,10 @@ class TestQueryString(TestcaseBase):
check_task=CheckTasks.check_query_results, check_items={exp_res: res})
@pytest.mark.tags(CaseLabel.L1)
def test_query_string_with_invaild_prefix_expr(self):
def test_query_string_with_invalid_prefix_expr(self):
"""
target: test query with invalid prefix string expression
method: specify string primary field, use invaild prefix string expr
method: specify string primary field, use invalid prefix string expr
expected: raise error
"""
collection_w = self.init_collection_general(prefix, insert_data=True)[0]