mirror of https://github.com/milvus-io/milvus.git
[test]Add the testcase with query and search (#17320)
Signed-off-by: jingkl <jingjing.jia@zilliz.com>pull/17315/head
parent
3ccc91532e
commit
7e7941af78
|
@ -1665,3 +1665,49 @@ class TestDeleteString(TestcaseBase):
|
||||||
collection_w.load()
|
collection_w.load()
|
||||||
error = {ct.err_code: 0, ct.err_msg: f"failed to create expr plan, expr = {default_invaild_string_exp}"}
|
error = {ct.err_code: 0, ct.err_msg: f"failed to create expr plan, expr = {default_invaild_string_exp}"}
|
||||||
collection_w.delete(expr=default_invaild_string_exp, check_task=CheckTasks.err_res, check_items=error)
|
collection_w.delete(expr=default_invaild_string_exp, check_task=CheckTasks.err_res, check_items=error)
|
||||||
|
|
||||||
|
@pytest.mark.tags(CaseLabel.L1)
|
||||||
|
@pytest.mark.parametrize("to_query", [True, False])
|
||||||
|
def test_delete_insert_same_id_sealed_string(self, to_query):
|
||||||
|
"""
|
||||||
|
target: test insert same id entity after delete from sealed data
|
||||||
|
method: 1.create and insert with flush, string is pk field
|
||||||
|
2.load and query with the id
|
||||||
|
3.delete the id entity
|
||||||
|
4.insert new entity with the same id and flush
|
||||||
|
5.query the id
|
||||||
|
expected: Verify that the query gets the newly inserted entity
|
||||||
|
"""
|
||||||
|
# init collection and insert data without flush
|
||||||
|
schema = cf.gen_string_pk_default_collection_schema()
|
||||||
|
collection_w = self.init_collection_wrap(cf.gen_unique_str(prefix), schema=schema)
|
||||||
|
|
||||||
|
# insert
|
||||||
|
df = cf.gen_default_dataframe_data(1000)
|
||||||
|
collection_w.insert(df)
|
||||||
|
log.debug(collection_w.num_entities)
|
||||||
|
|
||||||
|
# load and query
|
||||||
|
collection_w.load()
|
||||||
|
res = df.iloc[:1, 2:3].to_dict('records')
|
||||||
|
default_search_params = {"metric_type": "L2", "params": {"nprobe": 16}}
|
||||||
|
collection_w.search(data=[df[ct.default_float_vec_field_name][0]], anns_field=ct.default_float_vec_field_name,
|
||||||
|
param=default_search_params, limit=1)
|
||||||
|
collection_w.query(default_string_expr, check_task=CheckTasks.check_query_results, check_items={'exp_res': res})
|
||||||
|
|
||||||
|
# delete
|
||||||
|
collection_w.delete(default_string_expr)
|
||||||
|
if to_query:
|
||||||
|
collection_w.query(default_string_expr, check_task=CheckTasks.check_query_empty)
|
||||||
|
|
||||||
|
# re-insert
|
||||||
|
df_new = cf.gen_default_dataframe_data(nb=1)
|
||||||
|
collection_w.insert(df_new)
|
||||||
|
log.debug(collection_w.num_entities)
|
||||||
|
|
||||||
|
# re-query
|
||||||
|
res = df_new.iloc[[0], [2, 3]].to_dict('records')
|
||||||
|
collection_w.query(default_string_expr, output_fields=[ct.default_float_vec_field_name],
|
||||||
|
check_task=CheckTasks.check_query_results, check_items={'exp_res': res , 'primary_field': ct.default_string_field_name, 'with_vec': True})
|
||||||
|
collection_w.search(data=[df_new[ct.default_float_vec_field_name][0]], anns_field=ct.default_float_vec_field_name,
|
||||||
|
param=default_search_params, limit=1)
|
||||||
|
|
|
@ -1304,3 +1304,29 @@ class TestqueryString(TestcaseBase):
|
||||||
collection_w.query(expression, check_task=CheckTasks.err_res,
|
collection_w.query(expression, check_task=CheckTasks.err_res,
|
||||||
check_items={ct.err_code: 1, ct.err_msg: "like operation on non-string field is unsupported"}
|
check_items={ct.err_code: 1, ct.err_msg: "like operation on non-string field is unsupported"}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@pytest.mark.tags(CaseLabel.L1)
|
||||||
|
def test_query_compare_two_fields(self):
|
||||||
|
"""
|
||||||
|
target: test query with
|
||||||
|
method: specify string primary field, compare two fields
|
||||||
|
expected: verify query successfully
|
||||||
|
"""
|
||||||
|
collection_w = self.init_collection_general(prefix, insert_data=True, primary_field=ct.default_string_field_name)[0]
|
||||||
|
res = []
|
||||||
|
expression = 'float > int64'
|
||||||
|
output_fields = [default_int_field_name, default_float_field_name, default_string_field_name]
|
||||||
|
collection_w.query(expression, output_fields=output_fields,
|
||||||
|
check_task=CheckTasks.check_query_results, check_items={exp_res: res})
|
||||||
|
|
||||||
|
@pytest.mark.tags(CaseLabel.L1)
|
||||||
|
def test_query_compare_invalid_fields(self):
|
||||||
|
"""
|
||||||
|
target: test query with
|
||||||
|
method: specify string primary field, compare string and int field
|
||||||
|
expected: raise error
|
||||||
|
"""
|
||||||
|
collection_w = self.init_collection_general(prefix, insert_data=True, primary_field=ct.default_string_field_name)[0]
|
||||||
|
expression = 'varchar == int64'
|
||||||
|
collection_w.query(expression, check_task=CheckTasks.err_res,
|
||||||
|
check_items={ct.err_code: 1, ct.err_msg: f' cannot parse expression:{expression}'})
|
||||||
|
|
|
@ -46,6 +46,8 @@ entity = gen_entities(1, is_normal=True)
|
||||||
entities = gen_entities(default_nb, is_normal=True)
|
entities = gen_entities(default_nb, is_normal=True)
|
||||||
raw_vectors, binary_entities = gen_binary_entities(default_nb)
|
raw_vectors, binary_entities = gen_binary_entities(default_nb)
|
||||||
default_query, _ = gen_search_vectors_params(field_name, entities, default_top_k, nq)
|
default_query, _ = gen_search_vectors_params(field_name, entities, default_top_k, nq)
|
||||||
|
index_name1=cf.gen_unique_str("float")
|
||||||
|
index_name2=cf.gen_unique_str("varhar")
|
||||||
|
|
||||||
|
|
||||||
class TestCollectionSearchInvalid(TestcaseBase):
|
class TestCollectionSearchInvalid(TestcaseBase):
|
||||||
|
@ -3022,3 +3024,42 @@ class TestsearchString(TestcaseBase):
|
||||||
"limit": 1,
|
"limit": 1,
|
||||||
"_async": _async}
|
"_async": _async}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@pytest.mark.tags(CaseLabel.L1)
|
||||||
|
def test_search_all_index_with_compare_expr(self, _async):
|
||||||
|
"""
|
||||||
|
target: test delete after creating index
|
||||||
|
method: 1.create collection , insert data, primary_field is string field
|
||||||
|
2.create string and float index ,delete entities, query
|
||||||
|
3.search
|
||||||
|
expected: assert index and deleted id not in search result
|
||||||
|
"""
|
||||||
|
# create collection, insert tmp_nb, flush and load
|
||||||
|
collection_w, vectors, _, insert_ids = self.init_collection_general(prefix, insert_data=True, primary_field=ct.default_string_field_name)[0:4]
|
||||||
|
|
||||||
|
# create index
|
||||||
|
index_params_one = {"index_type": "IVF_SQ8", "metric_type": "L2", "params": {"nlist": 64}}
|
||||||
|
collection_w.create_index(ct.default_float_vec_field_name, index_params_one, index_name=index_name1)
|
||||||
|
index_params_two ={}
|
||||||
|
collection_w.create_index(ct.default_string_field_name, index_params=index_params_two, index_name=index_name2)
|
||||||
|
assert collection_w.has_index(index_name=index_name2)
|
||||||
|
|
||||||
|
collection_w.release()
|
||||||
|
collection_w.load()
|
||||||
|
# delete entity
|
||||||
|
expr = 'float >= int64'
|
||||||
|
# search with id 0 vectors
|
||||||
|
vectors = [[random.random() for _ in range(default_dim)] for _ in range(default_nq)]
|
||||||
|
output_fields = [default_int64_field_name, default_float_field_name, default_string_field_name]
|
||||||
|
collection_w.search(vectors[:default_nq], default_search_field,
|
||||||
|
default_search_params, default_limit,
|
||||||
|
expr,
|
||||||
|
output_fields=output_fields,
|
||||||
|
_async=_async,
|
||||||
|
travel_timestamp=0,
|
||||||
|
check_task=CheckTasks.check_search_results,
|
||||||
|
check_items={"nq": default_nq,
|
||||||
|
"ids": insert_ids,
|
||||||
|
"limit": default_limit,
|
||||||
|
"_async": _async}
|
||||||
|
)
|
Loading…
Reference in New Issue