Modify test cases after code change (#19755)

Signed-off-by: Binbin Lv <binbin.lv@zilliz.com>

Signed-off-by: Binbin Lv <binbin.lv@zilliz.com>
pull/19760/head
binbin 2022-10-13 17:31:23 +08:00 committed by GitHub
parent fb14f4cc7e
commit 857bd1fd68
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 38 additions and 32 deletions

View File

@ -8,7 +8,7 @@ String cron_string = BRANCH_NAME == "master" ? "50 22,5 * * * " : ""
// Make timeout 4 hours so that we can run two nightly during the ci
int total_timeout_minutes = 7 * 60
def imageTag=''
def chart_version='3.1.11'
def chart_version='3.2.9'
pipeline {
triggers {
cron """${cron_timezone}

View File

@ -4,7 +4,7 @@ int total_timeout_minutes = 60 * 5
int e2e_timeout_seconds = 70 * 60
def imageTag=''
int case_timeout_seconds = 10 * 60
def chart_version='3.1.11'
def chart_version='3.2.9'
pipeline {
options {
timestamps()

View File

@ -9,7 +9,7 @@ allure-pytest==2.7.0
pytest-print==0.2.1
pytest-level==0.1.1
pytest-xdist==2.5.0
pymilvus==2.2.0.dev40
pymilvus==2.2.0.dev44
pytest-rerunfailures==9.1.1
git+https://github.com/Projectplace/pytest-tags
ndg-httpsclient

View File

@ -968,7 +968,7 @@ class TestCompactionOperation(TestcaseBase):
search_res, _ = collection_w.search(df2[ct.default_float_vec_field_name][:1].to_list(),
ct.default_float_vec_field_name,
ct.default_search_params, ct.default_limit,
travel_timestamp=tt)
travel_timestamp=0)
assert tmp_nb in search_res[0].ids
assert len(search_res[0]) == ct.default_limit

View File

@ -997,6 +997,7 @@ class TestDeleteOperation(TestcaseBase):
pass
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.skip(reason="Time travel disabled")
def test_delete_time_travel(self):
"""
target: test search with time travel after delete
@ -1618,6 +1619,7 @@ class TestDeleteString(TestcaseBase):
check_task=CheckTasks.check_query_empty)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.skip(reason="Time travel disabled")
def test_delete_time_travel_string(self):
"""
target: test search with time travel after delete

View File

@ -944,15 +944,14 @@ class TestCollectionSearch(TestcaseBase):
default_search_params, default_limit,
default_search_exp,
travel_timestamp=time_stamp - 1,
check_task=CheckTasks.check_search_results,
check_items={"nq": nq,
"ids": [],
"limit": 0})
check_task=CheckTasks.err_res,
check_items={"err_code": 1,
"err_msg": f"only support to travel back to 0s so far"})
# 3. search after insert time_stamp
collection_w.search(vectors[:nq], default_search_field,
default_search_params, default_limit,
default_search_exp,
travel_timestamp=time_stamp,
travel_timestamp=0,
guarantee_timestamp=0,
check_task=CheckTasks.check_search_results,
check_items={"nq": nq,
@ -1161,7 +1160,7 @@ class TestCollectionSearch(TestcaseBase):
vectors = [[random.random() for _ in range(dim)] for _ in range(nq)]
collection_w.search(vectors[:nq], default_search_field, default_search_params,
default_limit, default_search_exp, _async=_async,
travel_timestamp=time_stamp,
travel_timestamp=0,
check_task=CheckTasks.check_search_results,
check_items={"nq": nq,
"ids": insert_ids,
@ -1220,7 +1219,7 @@ class TestCollectionSearch(TestcaseBase):
collection_w.search(vectors[:nq], default_search_field,
default_search_params, limit,
default_search_exp, _async=_async,
travel_timestamp=time_stamp + 1,
travel_timestamp=0,
check_task=CheckTasks.check_search_results,
check_items={"nq": nq,
"ids": insert_ids,
@ -1426,7 +1425,7 @@ class TestCollectionSearch(TestcaseBase):
collection_w.search(vectors[:default_nq], default_search_field,
search_param, default_limit,
default_search_exp, _async=_async,
travel_timestamp=time_stamp,
travel_timestamp=0,
check_task=CheckTasks.check_search_results,
check_items={"nq": default_nq,
"ids": insert_ids,
@ -1459,7 +1458,7 @@ class TestCollectionSearch(TestcaseBase):
collection_w.search(vectors[:default_nq], default_search_field,
search_param, limit,
default_search_exp, _async=_async,
travel_timestamp=time_stamp,
travel_timestamp=0,
check_task=CheckTasks.check_search_results,
check_items={"nq": default_nq,
"ids": insert_ids,
@ -1499,7 +1498,7 @@ class TestCollectionSearch(TestcaseBase):
collection_w.search(vectors[:default_nq], default_search_field,
search_param, default_limit,
default_search_exp, _async=_async,
travel_timestamp=time_stamp,
travel_timestamp=0,
check_task=CheckTasks.check_search_results,
check_items={"nq": default_nq,
"ids": insert_ids,
@ -1537,7 +1536,7 @@ class TestCollectionSearch(TestcaseBase):
collection_w.search(vectors[:default_nq], default_search_field,
search_param, default_limit,
default_search_exp, _async=_async,
travel_timestamp=time_stamp,
travel_timestamp=0,
check_task=CheckTasks.check_search_results,
check_items={"nq": default_nq,
"ids": insert_ids,
@ -1579,7 +1578,7 @@ class TestCollectionSearch(TestcaseBase):
collection_w.search(vectors[:default_nq], default_search_field,
search_param, default_limit,
default_search_exp, _async=_async,
travel_timestamp=time_stamp,
travel_timestamp=0,
check_task=CheckTasks.check_search_results,
check_items={"nq": default_nq,
"ids": insert_ids,
@ -1631,7 +1630,7 @@ class TestCollectionSearch(TestcaseBase):
collection_w.search(vectors[:nq], default_search_field,
default_search_params, default_limit,
default_search_exp, _async=_async,
travel_timestamp=time_stamp,
travel_timestamp=0,
check_task=CheckTasks.check_search_results,
check_items={"nq": nq,
"ids": insert_ids,
@ -1696,7 +1695,7 @@ class TestCollectionSearch(TestcaseBase):
collection_w.search(vectors[:default_nq], default_search_field,
search_params, limit, default_search_exp,
[par[1].name], _async=_async,
travel_timestamp=time_stamp,
travel_timestamp=0,
check_task=CheckTasks.check_search_results,
check_items={"nq": default_nq,
"ids": insert_ids[par[0].num_entities:],
@ -1834,7 +1833,7 @@ class TestCollectionSearch(TestcaseBase):
res = collection_w.search(binary_vectors[:nq], "binary_vector",
search_params, default_limit, "int64 >= 0",
_async=_async,
travel_timestamp=time_stamp,
travel_timestamp=0,
check_task=CheckTasks.check_search_results,
check_items={"nq": nq,
"ids": insert_ids,
@ -1952,7 +1951,7 @@ class TestCollectionSearch(TestcaseBase):
res = collection_w.search(binary_vectors[:nq], "binary_vector",
search_params, default_limit, "int64 >= 0",
_async=_async,
travel_timestamp=time_stamp)[0]
travel_timestamp=0)[0]
if _async:
res.done()
res = res.result()
@ -1988,7 +1987,7 @@ class TestCollectionSearch(TestcaseBase):
res = collection_w.search(binary_vectors[:nq], "binary_vector",
search_params, default_limit, "int64 >= 0",
_async=_async,
travel_timestamp=time_stamp,
travel_timestamp=0,
check_task=CheckTasks.check_search_results,
check_items={"nq": nq,
"ids": insert_ids,
@ -2025,6 +2024,7 @@ class TestCollectionSearch(TestcaseBase):
"limit": default_limit})
@pytest.mark.tags(CaseLabel.L2)
@pytest.mark.skip(reason="Time travel disabled")
def test_search_travel_time_without_expression(self, auto_id):
"""
target: test search using travel time without expression
@ -2471,7 +2471,7 @@ class TestCollectionSearch(TestcaseBase):
collection_w.search(vectors[:nq], default_search_field,
default_search_params, default_limit,
default_search_exp, _async=_async,
travel_timestamp=time_stamp,
travel_timestamp=0,
check_task=CheckTasks.check_search_results,
check_items={"nq": nq,
"ids": insert_ids,
@ -3228,7 +3228,7 @@ class TestSearchBase(TestcaseBase):
collection_w.search(vectors[:nq], default_search_field,
default_search_params, default_limit,
default_search_exp, _async=_async,
travel_timestamp=time_stamp,
travel_timestamp=0,
check_task=CheckTasks.check_search_results,
check_items={"nq": nq,
"ids": insert_ids,
@ -4115,13 +4115,13 @@ class TestsearchPagination(TestcaseBase):
for search_param in search_params:
res = collection_w.search(vectors[:default_nq], default_search_field, search_param,
default_limit + offset, default_search_exp, _async=_async,
travel_timestamp=time_stamp)[0]
travel_timestamp=0)[0]
search_param["offset"] = offset
log.info("Searching with search params: {}".format(search_param))
search_res = collection_w.search(vectors[:default_nq], default_search_field,
search_param, default_limit,
default_search_exp, _async=_async,
travel_timestamp=time_stamp,
travel_timestamp=0,
check_task=CheckTasks.check_search_results,
check_items={"nq": default_nq,
"ids": insert_ids,

View File

@ -257,7 +257,7 @@ class TestUtilityParams(TestcaseBase):
collection_w = self.init_collection_general(prefix)[0]
log.debug(collection_w.num_entities)
collection_w.load()
err_msg = {ct.err_code: -1, ct.err_msg: f"Partitions not exist: [{ct.default_tag}]"}
err_msg = {ct.err_code: 1, ct.err_msg: f"partitionID of partitionName:{ct.default_tag} can not be find"}
self.utility_wrap.loading_progress(collection_w.name, partition_names,
check_task=CheckTasks.err_res, check_items=err_msg)
@ -677,10 +677,11 @@ class TestUtilityBase(TestcaseBase):
cw = self.init_collection_wrap(name=c_name)
data = cf.gen_default_list_data(nb)
cw.insert(data=data)
error = {ct.err_code: 1, ct.err_msg: "no index is created"}
error = {ct.err_code: 25, ct.err_msg: "index not exist"}
self.utility_wrap.index_building_progress(c_name, check_task=CheckTasks.err_res, check_items=error)
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.xfail(reason="issue 19752")
def test_index_process_collection_index(self):
"""
target: test building_process
@ -699,6 +700,7 @@ class TestUtilityBase(TestcaseBase):
assert res['total_rows'] == nb
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.xfail(reason="issue 19753")
def test_index_process_collection_indexing(self):
"""
target: test building_process
@ -752,6 +754,7 @@ class TestUtilityBase(TestcaseBase):
assert res == exp_res
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.xfail(reason="issue 19752")
def test_wait_index_collection_index(self):
"""
target: test wait_index
@ -781,7 +784,7 @@ class TestUtilityBase(TestcaseBase):
collection_w.insert(df)
assert collection_w.num_entities == ct.default_nb
res = self.utility_wrap.loading_progress(collection_w.name)[0]
exp_res = {loading_progress: '0%', num_loaded_partitions: 0, not_loaded_partitions: ['_default']}
exp_res = {loading_progress: '0%'}
assert exp_res == res
@ -827,11 +830,12 @@ class TestUtilityBase(TestcaseBase):
collection_w = self.init_collection_wrap()
collection_w.load()
res, _ = self.utility_wrap.loading_progress(collection_w.name)
exp_res = {loading_progress: '100%', num_loaded_partitions: 1, not_loaded_partitions: []}
exp_res = {loading_progress: '100%'}
assert exp_res == res
@pytest.mark.tags(CaseLabel.L1)
@pytest.mark.xfail(reason="issue 19754")
def test_loading_progress_after_release(self):
"""
target: test loading progress after release
@ -858,7 +862,7 @@ class TestUtilityBase(TestcaseBase):
collection_w, partition_w, _, _ = self.insert_entities_into_two_partitions_in_half(half)
partition_w.release()
res = self.utility_wrap.loading_progress(collection_w.name)[0]
assert res[loading_progress] == '50%'
assert res[loading_progress] == '100%'
@pytest.mark.tags(CaseLabel.L2)
def test_loading_progress_with_load_partition(self):
@ -873,7 +877,7 @@ class TestUtilityBase(TestcaseBase):
collection_w.release()
partition_w.load()
res = self.utility_wrap.loading_progress(collection_w.name)[0]
assert res[loading_progress] == '50%'
assert res[loading_progress] == '100%'
@pytest.mark.tags(CaseLabel.L1)
def test_loading_progress_with_partition(self):
@ -937,7 +941,7 @@ class TestUtilityBase(TestcaseBase):
cw.load()
self.utility_wrap.wait_for_loading_complete(cw.name)
res, _ = self.utility_wrap.loading_progress(cw.name)
exp_res = {loading_progress: "100%", not_loaded_partitions: [], num_loaded_partitions: 1}
exp_res = {loading_progress: "100%"}
assert res == exp_res
@pytest.mark.tags(CaseLabel.L1)