Update test cases nightly (#24711)

Signed-off-by: nico <cheng.yuan@zilliz.com>
pull/24729/head
nico 2023-06-07 19:48:35 +08:00 committed by GitHub
parent e8a1086fef
commit a64a029c72
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 16 additions and 10 deletions

View File

@ -3800,6 +3800,7 @@ class TestCollectionJSON(TestcaseBase):
check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("primary_field", [ct.default_int64_field_name, ct.default_string_field_name])
def test_collection_json_field_partition_key(self, primary_field):
"""
target: test create collection with multiple JSON fields

View File

@ -98,7 +98,7 @@ class TestInsertParams(TestcaseBase):
collection_w = self.init_collection_wrap(name=c_name)
error = {ct.err_code: 1, ct.err_msg: "The fields don't match with schema fields, "
"expected: ['int64', 'float', 'varchar', 'float_vector'], got %s" % data}
collection_w.insert(data=data)
collection_w.insert(data=data, check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
def test_insert_dataframe_only_columns(self):
@ -847,7 +847,8 @@ class TestInsertOperation(TestcaseBase):
method: calculated critical value and insert equivalent data
expected: raise exception
"""
nb = 127583
# nb = 127583 without json field
nb = 108993
collection_name = cf.gen_unique_str(prefix)
collection_w = self.init_collection_wrap(name=collection_name)
data = cf.gen_default_dataframe_data(nb)
@ -1730,7 +1731,7 @@ class TestUpsertInvalid(TestcaseBase):
collection_w.upsert(data=data, check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.parametrize("partition_name", ct.get_invalid_strs)
@pytest.mark.parametrize("partition_name", ct.get_invalid_strs[:13])
def test_upsert_partition_name_invalid(self, partition_name):
"""
target: test upsert partition name invalid

View File

@ -280,7 +280,7 @@ class TestQueryParams(TestcaseBase):
ct.default_float_field_name, ct.default_double_field_name, ct.default_string_field_name]
# exp res: first two rows and all fields expect last vec field
res = df.iloc[:2, :-1].to_dict('records')
res = df.iloc[:2, :].to_dict('records')
for field in non_primary_field:
filter_values = df[field].tolist()[:2]
if field is not ct.default_string_field_name:
@ -288,8 +288,10 @@ class TestQueryParams(TestcaseBase):
else:
term_expr = f'{field} in {filter_values}'
term_expr = term_expr.replace("'", "\"")
log.info(res)
self.collection_wrap.query(term_expr, output_fields=["*"],
check_task=CheckTasks.check_query_results, check_items={exp_res: res})
check_task=CheckTasks.check_query_results,
check_items={exp_res: res, "with_vec": True})
@pytest.mark.tags(CaseLabel.L2)
def test_query_expr_by_bool_field(self):

View File

@ -5519,10 +5519,11 @@ class TestSearchDiskann(TestcaseBase):
# 2. create index
default_index = {"index_type": "DISKANN", "metric_type": "L2", "params": {}}
collection_w.create_index(ct.default_float_vec_field_name, default_index, index_name=index_name1)
index_params_one = {}
collection_w.create_index("float", index_params_one, index_name="a")
index_param_two = {}
collection_w.create_index("varchar", index_param_two, index_name="b")
if not enable_dynamic_field:
index_params_one = {}
collection_w.create_index("float", index_params_one, index_name="a")
index_param_two = {}
collection_w.create_index("varchar", index_param_two, index_name="b")
collection_w.load()
tmp_expr = f'{ct.default_int64_field_name} in {[0]}'
@ -5534,7 +5535,7 @@ class TestSearchDiskann(TestcaseBase):
assert del_res.delete_count == half_nb
collection_w.delete(tmp_expr)
default_search_params ={"metric_type": "L2", "params": {"search_list": 30}}
default_search_params = {"metric_type": "L2", "params": {"search_list": 30}}
vectors = [[random.random() for _ in range(dim)] for _ in range(default_nq)]
output_fields = [default_int64_field_name, default_float_field_name, default_string_field_name]
collection_w.search(vectors[:default_nq], default_search_field,
@ -6085,6 +6086,7 @@ class TestCollectionRangeSearch(TestcaseBase):
# 3. insert new data
nb_new = 300
_, _, _, insert_ids_new, time_stamp = cf.insert_data(collection_w, nb_new, dim=dim,
enable_dynamic_field=enable_dynamic_field,
insert_offset=nb_old)
insert_ids.extend(insert_ids_new)
# 4. search for new data without load