[test]Add vector to output field for query (#23708) (#23790)

Signed-off-by: zhuwenxing <wenxing.zhu@zilliz.com>
pull/23774/head
zhuwenxing 2023-04-28 14:18:35 +08:00 committed by GitHub
parent 537e068fb9
commit 4c773d25a2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 18 additions and 12 deletions

View File

@ -13,7 +13,7 @@ def task_1(data_size, host):
get_collections(prefix)
load_and_search(prefix)
release_collection(prefix)
create_collections_and_insert_data(prefix,data_size)
create_collections_and_insert_data(prefix,count=data_size)
load_and_search(prefix)
@ -27,7 +27,7 @@ def task_2(data_zise, host):
connections.connect(host=host, port=19530, timeout=60)
get_collections(prefix)
load_and_search(prefix)
create_collections_and_insert_data(prefix, data_zise)
create_collections_and_insert_data(prefix, count=data_size)
release_collection(prefix)
create_index(prefix)
load_and_search(prefix)

View File

@ -18,7 +18,7 @@ def task_1(data_size, host):
assert len(col_list) == len(all_index_types)
create_index(prefix)
load_and_search(prefix)
create_collections_and_insert_data(prefix, data_size)
create_collections_and_insert_data(prefix, count=data_size)
release_collection(prefix)
create_index(prefix)
load_and_search(prefix)
@ -35,7 +35,7 @@ def task_2(data_size, host):
col_list = get_collections(prefix, check=True)
assert len(col_list) == len(all_index_types)
load_and_search(prefix)
create_collections_and_insert_data(prefix, data_size)
create_collections_and_insert_data(prefix, count=data_size)
release_collection(prefix)
create_index(prefix)
load_and_search(prefix)

View File

@ -11,10 +11,10 @@ def task_1(data_size, host):
prefix = "task_1_"
connections.connect(host=host, port=19530, timeout=60)
get_collections(prefix)
create_collections_and_insert_data(prefix,data_size)
create_collections_and_insert_data(prefix,count=data_size)
create_index(prefix)
load_and_search(prefix)
create_collections_and_insert_data(prefix,data_size)
create_collections_and_insert_data(prefix,count=data_size)
load_and_search(prefix)
@ -27,9 +27,9 @@ def task_2(data_size, host):
prefix = "task_2_"
connections.connect(host=host, port=19530, timeout=60)
get_collections(prefix)
create_collections_and_insert_data(prefix, data_size)
create_collections_and_insert_data(prefix, count=data_size)
create_index(prefix)
create_collections_and_insert_data(prefix, data_size)
create_collections_and_insert_data(prefix, count=data_size)
create_index(prefix)
load_and_search(prefix)

View File

@ -15,7 +15,7 @@ def task_1(data_size, host):
prefix = "task_1_"
connections.connect(host=host, port=19530, timeout=60)
get_collections(prefix)
create_collections_and_insert_data(prefix, data_size)
create_collections_and_insert_data(prefix, count=data_size)
create_index(prefix)
load_and_search(prefix)
@ -29,7 +29,7 @@ def task_2(data_size, host):
prefix = "task_2_"
connections.connect(host=host, port=19530, timeout=60)
get_collections(prefix)
create_collections_and_insert_data(prefix, data_size)
create_collections_and_insert_data(prefix, count=data_size)
create_index(prefix)
load_and_search(prefix)
create_collections_and_insert_data(prefix, flush=False, count=data_size)
@ -43,7 +43,7 @@ def task_3(data_size, host):
prefix = "task_3_"
connections.connect(host=host, port=19530, timeout=60)
get_collections(prefix)
create_collections_and_insert_data(prefix, data_size)
create_collections_and_insert_data(prefix, count=data_size)
create_index(prefix)
load_and_search(prefix)

View File

@ -108,6 +108,12 @@ def create_collections_and_insert_data(prefix, flush=True, count=3000, collectio
end_time = time.time()
logger.info(f"[{j+1}/{times}] insert {nb} data, time: {end_time - start_time:.4f}")
total_time += end_time - start_time
if j <= times - 3:
collection.flush()
collection.num_entities
if j == times - 3:
collection.compact()
logger.info(f"end insert, time: {total_time:.4f}")
if flush:
@ -233,7 +239,7 @@ def load_and_search(prefix, replicas=1):
logger.info("search latency: %.4fs" % (end_time - start_time))
t0 = time.time()
expr = "count in [2,4,6,8]"
output_fields = ["count", "random_value"]
output_fields = ["count", "random_value", "float_vector"]
res = c.query(expr, output_fields, timeout=120)
sorted_res = sorted(res, key=lambda k: k['count'])
for r in sorted_res: