[skip e2e] Update compact case with max time interval (#13768)

Signed-off-by: ThreadDao <yufen.zong@zilliz.com>
pull/13821/head
ThreadDao 2021-12-21 09:20:54 +08:00 committed by GitHub
parent c1cb69ed7e
commit efd86e8c1e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 22 additions and 6 deletions

View File

@ -48,6 +48,7 @@ default_nlist = 128
compact_segment_num_threshold = 10
compact_delta_ratio_reciprocal = 5 # compact_delta_binlog_ratio is 0.2
compact_retention_duration = 20 # compaction travel time retention range 20s
max_compaction_interval = 60 # the max time interval (s) from the last compaction
max_field_num = 256 # Maximum number of fields in a collection
Not_Exist = "Not_Exist"

View File

@ -260,7 +260,6 @@ class TestCompactionParams(TestcaseBase):
"""
pass
@pytest.mark.xfail(reason="Issue 12344")
@pytest.mark.tags(CaseLabel.L2)
def test_compact_max_time_interval(self):
"""
@ -274,16 +273,29 @@ class TestCompactionParams(TestcaseBase):
collection_w = self.init_collection_wrap(name=cf.gen_unique_str(prefix), shards_num=1)
collection_w.compact()
for i in range(2):
# Notice:The merge segments compaction triggered by max_compaction_interval also needs to meet
# the compaction_segment_ num_threshold
for i in range(ct.compact_segment_num_threshold):
df = cf.gen_default_dataframe_data(tmp_nb)
collection_w.insert(df)
assert collection_w.num_entities == tmp_nb * (i + 1)
sleep(61)
sleep(ct.max_compaction_interval + 1)
# verify queryNode load the compacted segments
collection_w.load()
segment_info = self.utility_wrap.get_query_segment_info(collection_w.name)[0]
assert len(segment_info) == 1
@pytest.mark.skip(reason="TODO")
@pytest.mark.tags(CaseLabel.L2)
def test_compact_delta_max_time_interval(self):
"""
target: test merge insert and delta log triggered by max_compaction_interval
method: todo
expected: auto merge
"""
pass
class TestCompactionOperation(TestcaseBase):
@ -339,7 +351,8 @@ class TestCompactionOperation(TestcaseBase):
4.search
expected: Verify segment info and index info
"""
collection_w = self.collection_insert_multi_segments_one_shard(prefix, nb_of_segment=ct.default_nb, is_dup=False)
collection_w = self.collection_insert_multi_segments_one_shard(prefix, nb_of_segment=ct.default_nb,
is_dup=False)
# create index
collection_w.create_index(ct.default_float_vec_field_name, ct.default_index)
@ -425,7 +438,8 @@ class TestCompactionOperation(TestcaseBase):
4.load and search
expected: Verify search result and index info
"""
collection_w = self.collection_insert_multi_segments_one_shard(prefix, nb_of_segment=ct.default_nb, is_dup=False)
collection_w = self.collection_insert_multi_segments_one_shard(prefix, nb_of_segment=ct.default_nb,
is_dup=False)
# compact
collection_w.compact()
@ -485,7 +499,8 @@ class TestCompactionOperation(TestcaseBase):
3.load and search
expected: Verify search result
"""
collection_w = self.collection_insert_multi_segments_one_shard(prefix, nb_of_segment=ct.default_nb, is_dup=False)
collection_w = self.collection_insert_multi_segments_one_shard(prefix, nb_of_segment=ct.default_nb,
is_dup=False)
# compact
collection_w.compact()