2019-10-16 10:40:31 +00:00
|
|
|
import pdb
|
2020-01-14 11:22:27 +00:00
|
|
|
import struct
|
2020-03-07 07:23:34 +00:00
|
|
|
from random import sample
|
2020-01-14 11:22:27 +00:00
|
|
|
|
2019-10-16 10:40:31 +00:00
|
|
|
import pytest
|
|
|
|
import threading
|
|
|
|
import datetime
|
|
|
|
import logging
|
|
|
|
from time import sleep
|
|
|
|
from multiprocessing import Process
|
|
|
|
import numpy
|
2020-02-29 08:11:31 +00:00
|
|
|
import sklearn.preprocessing
|
2020-01-17 01:59:49 +00:00
|
|
|
from milvus import IndexType, MetricType
|
2019-10-16 10:40:31 +00:00
|
|
|
from utils import *
|
|
|
|
|
|
|
|
dim = 128
|
2020-03-09 04:00:37 +00:00
|
|
|
collection_id = "test_search"
|
2019-10-16 10:40:31 +00:00
|
|
|
add_interval_time = 2
|
2020-01-14 11:22:27 +00:00
|
|
|
vectors = gen_vectors(6000, dim)
|
2020-02-29 08:11:31 +00:00
|
|
|
vectors = sklearn.preprocessing.normalize(vectors, axis=1, norm='l2')
|
|
|
|
vectors = vectors.tolist()
|
2019-11-21 09:23:06 +00:00
|
|
|
nprobe = 1
|
2019-10-16 10:40:31 +00:00
|
|
|
epsilon = 0.001
|
2019-11-21 09:23:06 +00:00
|
|
|
tag = "1970-01-01"
|
2020-01-14 11:22:27 +00:00
|
|
|
raw_vectors, binary_vectors = gen_binary_vectors(6000, dim)
|
2019-10-16 10:40:31 +00:00
|
|
|
|
|
|
|
|
|
|
|
class TestSearchBase:
|
2020-03-09 04:00:37 +00:00
|
|
|
def init_data(self, connect, collection, nb=6000, partition_tags=None):
|
2019-10-16 10:40:31 +00:00
|
|
|
'''
|
2020-03-09 04:00:37 +00:00
|
|
|
Generate vectors and add it in collection, before search vectors
|
2019-10-16 10:40:31 +00:00
|
|
|
'''
|
|
|
|
global vectors
|
2020-01-14 11:22:27 +00:00
|
|
|
if nb == 6000:
|
2019-10-16 10:40:31 +00:00
|
|
|
add_vectors = vectors
|
|
|
|
else:
|
|
|
|
add_vectors = gen_vectors(nb, dim)
|
2020-02-29 08:11:31 +00:00
|
|
|
vectors = sklearn.preprocessing.normalize(vectors, axis=1, norm='l2')
|
|
|
|
vectors = vectors.tolist()
|
|
|
|
if partition_tags is None:
|
2020-03-09 04:00:37 +00:00
|
|
|
status, ids = connect.add_vectors(collection, add_vectors)
|
2020-02-29 08:11:31 +00:00
|
|
|
assert status.OK()
|
|
|
|
else:
|
2020-03-09 04:00:37 +00:00
|
|
|
status, ids = connect.add_vectors(collection, add_vectors, partition_tag=partition_tags)
|
2020-02-29 08:11:31 +00:00
|
|
|
assert status.OK()
|
2019-10-16 10:40:31 +00:00
|
|
|
sleep(add_interval_time)
|
|
|
|
return add_vectors, ids
|
|
|
|
|
2020-03-09 04:00:37 +00:00
|
|
|
def init_binary_data(self, connect, collection, nb=6000, insert=True, partition_tags=None):
|
2020-01-14 11:22:27 +00:00
|
|
|
'''
|
2020-03-09 04:00:37 +00:00
|
|
|
Generate vectors and add it in collection, before search vectors
|
2020-01-14 11:22:27 +00:00
|
|
|
'''
|
|
|
|
ids = []
|
|
|
|
global binary_vectors
|
|
|
|
global raw_vectors
|
|
|
|
if nb == 6000:
|
|
|
|
add_vectors = binary_vectors
|
|
|
|
add_raw_vectors = raw_vectors
|
|
|
|
else:
|
|
|
|
add_raw_vectors, add_vectors = gen_binary_vectors(nb, dim)
|
|
|
|
if insert is True:
|
2020-02-29 08:11:31 +00:00
|
|
|
if partition_tags is None:
|
2020-03-09 04:00:37 +00:00
|
|
|
status, ids = connect.add_vectors(collection, add_vectors)
|
2020-02-29 08:11:31 +00:00
|
|
|
assert status.OK()
|
|
|
|
else:
|
2020-03-09 04:00:37 +00:00
|
|
|
status, ids = connect.add_vectors(collection, add_vectors, partition_tag=partition_tags)
|
2020-02-29 08:11:31 +00:00
|
|
|
assert status.OK()
|
2020-01-14 11:22:27 +00:00
|
|
|
sleep(add_interval_time)
|
|
|
|
return add_raw_vectors, add_vectors, ids
|
|
|
|
|
2019-10-16 10:40:31 +00:00
|
|
|
"""
|
|
|
|
generate valid create_index params
|
|
|
|
"""
|
|
|
|
@pytest.fixture(
|
|
|
|
scope="function",
|
2020-03-07 07:23:34 +00:00
|
|
|
params=gen_index()
|
2019-10-16 10:40:31 +00:00
|
|
|
)
|
2020-03-07 07:23:34 +00:00
|
|
|
def get_index(self, request, connect):
|
2020-01-14 11:22:27 +00:00
|
|
|
if str(connect._cmd("mode")[1]) == "CPU":
|
2019-10-19 09:12:59 +00:00
|
|
|
if request.param["index_type"] == IndexType.IVF_SQ8H:
|
2020-02-29 08:11:31 +00:00
|
|
|
pytest.skip("sq8h not support in CPU mode")
|
2020-03-07 07:23:34 +00:00
|
|
|
if str(connect._cmd("mode")[1]) == "GPU":
|
|
|
|
if request.param["index_type"] == IndexType.IVF_PQ:
|
|
|
|
pytest.skip("ivfpq not support in GPU mode")
|
2019-10-19 09:12:59 +00:00
|
|
|
return request.param
|
2019-10-16 10:40:31 +00:00
|
|
|
|
2019-11-21 09:23:06 +00:00
|
|
|
@pytest.fixture(
|
|
|
|
scope="function",
|
2020-03-07 07:23:34 +00:00
|
|
|
params=gen_simple_index()
|
2019-11-21 09:23:06 +00:00
|
|
|
)
|
2020-03-07 07:23:34 +00:00
|
|
|
def get_simple_index(self, request, connect):
|
2020-01-14 11:22:27 +00:00
|
|
|
if str(connect._cmd("mode")[1]) == "CPU":
|
2019-11-21 09:23:06 +00:00
|
|
|
if request.param["index_type"] == IndexType.IVF_SQ8H:
|
2020-02-29 08:11:31 +00:00
|
|
|
pytest.skip("sq8h not support in CPU mode")
|
2020-03-07 07:23:34 +00:00
|
|
|
if str(connect._cmd("mode")[1]) == "GPU":
|
|
|
|
if request.param["index_type"] == IndexType.IVF_PQ:
|
|
|
|
pytest.skip("ivfpq not support in GPU mode")
|
2019-11-21 09:23:06 +00:00
|
|
|
return request.param
|
2020-01-14 11:22:27 +00:00
|
|
|
|
|
|
|
@pytest.fixture(
|
|
|
|
scope="function",
|
2020-03-07 07:23:34 +00:00
|
|
|
params=gen_simple_index()
|
2020-01-14 11:22:27 +00:00
|
|
|
)
|
2020-03-07 07:23:34 +00:00
|
|
|
def get_jaccard_index(self, request, connect):
|
2020-01-14 11:22:27 +00:00
|
|
|
logging.getLogger().info(request.param)
|
|
|
|
if request.param["index_type"] == IndexType.IVFLAT or request.param["index_type"] == IndexType.FLAT:
|
|
|
|
return request.param
|
|
|
|
else:
|
|
|
|
pytest.skip("Skip index Temporary")
|
|
|
|
|
|
|
|
@pytest.fixture(
|
|
|
|
scope="function",
|
2020-03-07 07:23:34 +00:00
|
|
|
params=gen_simple_index()
|
2020-01-14 11:22:27 +00:00
|
|
|
)
|
2020-03-07 07:23:34 +00:00
|
|
|
def get_hamming_index(self, request, connect):
|
2020-01-14 11:22:27 +00:00
|
|
|
logging.getLogger().info(request.param)
|
|
|
|
if request.param["index_type"] == IndexType.IVFLAT or request.param["index_type"] == IndexType.FLAT:
|
|
|
|
return request.param
|
|
|
|
else:
|
|
|
|
pytest.skip("Skip index Temporary")
|
|
|
|
|
2020-03-16 13:33:25 +00:00
|
|
|
@pytest.fixture(
|
|
|
|
scope="function",
|
|
|
|
params=gen_simple_index()
|
|
|
|
)
|
|
|
|
def get_structure_index(self, request, connect):
|
|
|
|
logging.getLogger().info(request.param)
|
|
|
|
if request.param["index_type"] == IndexType.FLAT:
|
|
|
|
return request.param
|
|
|
|
else:
|
|
|
|
pytest.skip("Skip index Temporary")
|
|
|
|
|
2019-10-16 10:40:31 +00:00
|
|
|
"""
|
|
|
|
generate top-k params
|
|
|
|
"""
|
|
|
|
@pytest.fixture(
|
|
|
|
scope="function",
|
2019-10-25 08:14:51 +00:00
|
|
|
params=[1, 99, 1024, 2048, 2049]
|
2019-10-16 10:40:31 +00:00
|
|
|
)
|
|
|
|
def get_top_k(self, request):
|
|
|
|
yield request.param
|
|
|
|
|
|
|
|
|
2020-03-09 04:00:37 +00:00
|
|
|
def test_search_top_k_flat_index(self, connect, collection, get_top_k):
|
2019-10-16 10:40:31 +00:00
|
|
|
'''
|
|
|
|
target: test basic search fuction, all the search params is corrent, change top-k value
|
|
|
|
method: search with the given vectors, check the result
|
|
|
|
expected: search status ok, and the length of the result is top_k
|
|
|
|
'''
|
2020-03-09 04:00:37 +00:00
|
|
|
vectors, ids = self.init_data(connect, collection)
|
2019-10-16 10:40:31 +00:00
|
|
|
query_vec = [vectors[0]]
|
|
|
|
top_k = get_top_k
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(collection, top_k, query_vec)
|
2019-10-16 10:40:31 +00:00
|
|
|
if top_k <= 2048:
|
|
|
|
assert status.OK()
|
|
|
|
assert len(result[0]) == min(len(vectors), top_k)
|
|
|
|
assert result[0][0].distance <= epsilon
|
|
|
|
assert check_result(result[0], ids[0])
|
|
|
|
else:
|
|
|
|
assert not status.OK()
|
|
|
|
|
2020-03-09 04:00:37 +00:00
|
|
|
def test_search_l2_index_params(self, connect, collection, get_simple_index):
|
2019-10-16 10:40:31 +00:00
|
|
|
'''
|
|
|
|
target: test basic search fuction, all the search params is corrent, test all index params, and build
|
|
|
|
method: search with the given vectors, check the result
|
|
|
|
expected: search status ok, and the length of the result is top_k
|
|
|
|
'''
|
2020-03-07 07:23:34 +00:00
|
|
|
index_param = get_simple_index["index_param"]
|
|
|
|
index_type = get_simple_index["index_type"]
|
|
|
|
logging.getLogger().info(get_simple_index)
|
2020-03-09 04:00:37 +00:00
|
|
|
vectors, ids = self.init_data(connect, collection)
|
|
|
|
status = connect.create_index(collection, index_type, index_param)
|
2019-10-16 10:40:31 +00:00
|
|
|
query_vec = [vectors[0]]
|
|
|
|
top_k = 10
|
2020-03-07 07:23:34 +00:00
|
|
|
search_param = get_search_param(index_type)
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(collection, top_k, query_vec, params=search_param)
|
2019-10-16 10:40:31 +00:00
|
|
|
logging.getLogger().info(result)
|
|
|
|
if top_k <= 1024:
|
|
|
|
assert status.OK()
|
2020-03-07 07:23:34 +00:00
|
|
|
if index_type == IndexType.IVF_PQ:
|
|
|
|
return
|
2019-10-16 10:40:31 +00:00
|
|
|
assert len(result[0]) == min(len(vectors), top_k)
|
|
|
|
assert check_result(result[0], ids[0])
|
|
|
|
assert result[0][0].distance <= epsilon
|
|
|
|
else:
|
|
|
|
assert not status.OK()
|
|
|
|
|
2020-03-09 04:00:37 +00:00
|
|
|
def test_search_l2_large_nq_index_params(self, connect, collection, get_simple_index):
|
2020-03-07 07:23:34 +00:00
|
|
|
'''
|
|
|
|
target: test basic search fuction, all the search params is corrent, test all index params, and build
|
|
|
|
method: search with the given vectors, check the result
|
|
|
|
expected: search status ok, and the length of the result is top_k
|
|
|
|
'''
|
|
|
|
index_param = get_simple_index["index_param"]
|
|
|
|
index_type = get_simple_index["index_type"]
|
|
|
|
logging.getLogger().info(get_simple_index)
|
2020-03-09 04:00:37 +00:00
|
|
|
vectors, ids = self.init_data(connect, collection)
|
|
|
|
status = connect.create_index(collection, index_type, index_param)
|
2020-03-07 07:23:34 +00:00
|
|
|
query_vec = []
|
|
|
|
for i in range (1200):
|
|
|
|
query_vec.append(vectors[i])
|
|
|
|
top_k = 10
|
|
|
|
search_param = get_search_param(index_type)
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(collection, top_k, query_vec, params=search_param)
|
2020-03-07 07:23:34 +00:00
|
|
|
logging.getLogger().info(result)
|
|
|
|
assert status.OK()
|
|
|
|
if index_type == IndexType.IVF_PQ:
|
|
|
|
return
|
|
|
|
assert len(result[0]) == min(len(vectors), top_k)
|
|
|
|
assert check_result(result[0], ids[0])
|
|
|
|
assert result[0][0].distance <= epsilon
|
|
|
|
|
2020-03-09 04:00:37 +00:00
|
|
|
def test_search_l2_index_params_partition(self, connect, collection, get_simple_index):
|
2019-11-21 09:23:06 +00:00
|
|
|
'''
|
|
|
|
target: test basic search fuction, all the search params is corrent, test all index params, and build
|
2020-03-09 04:00:37 +00:00
|
|
|
method: add vectors into collection, search with the given vectors, check the result
|
|
|
|
expected: search status ok, and the length of the result is top_k, search collection with partition tag return empty
|
2019-11-21 09:23:06 +00:00
|
|
|
'''
|
2020-03-07 07:23:34 +00:00
|
|
|
index_param = get_simple_index["index_param"]
|
|
|
|
index_type = get_simple_index["index_type"]
|
|
|
|
logging.getLogger().info(get_simple_index)
|
2020-03-09 04:00:37 +00:00
|
|
|
status = connect.create_partition(collection, tag)
|
|
|
|
vectors, ids = self.init_data(connect, collection)
|
|
|
|
status = connect.create_index(collection, index_type, index_param)
|
2019-11-21 09:23:06 +00:00
|
|
|
query_vec = [vectors[0]]
|
|
|
|
top_k = 10
|
2020-03-07 07:23:34 +00:00
|
|
|
search_param = get_search_param(index_type)
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(collection, top_k, query_vec, params=search_param)
|
2019-11-21 09:23:06 +00:00
|
|
|
logging.getLogger().info(result)
|
|
|
|
assert status.OK()
|
2020-03-07 07:23:34 +00:00
|
|
|
if(index_type != IndexType.IVF_PQ):
|
|
|
|
assert len(result[0]) == min(len(vectors), top_k)
|
|
|
|
assert check_result(result[0], ids[0])
|
|
|
|
assert result[0][0].distance <= epsilon
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(collection, top_k, query_vec, partition_tags=[tag], params=search_param)
|
2019-11-21 09:23:06 +00:00
|
|
|
logging.getLogger().info(result)
|
|
|
|
assert status.OK()
|
|
|
|
assert len(result) == 0
|
|
|
|
|
2020-03-09 04:00:37 +00:00
|
|
|
def test_search_l2_index_params_partition_A(self, connect, collection, get_simple_index):
|
2019-11-21 09:23:06 +00:00
|
|
|
'''
|
|
|
|
target: test basic search fuction, all the search params is corrent, test all index params, and build
|
|
|
|
method: search partition with the given vectors, check the result
|
|
|
|
expected: search status ok, and the length of the result is 0
|
|
|
|
'''
|
2020-03-07 07:23:34 +00:00
|
|
|
index_param = get_simple_index["index_param"]
|
|
|
|
index_type = get_simple_index["index_type"]
|
|
|
|
logging.getLogger().info(get_simple_index)
|
2020-03-09 04:00:37 +00:00
|
|
|
status = connect.create_partition(collection, tag)
|
|
|
|
vectors, ids = self.init_data(connect, collection)
|
|
|
|
status = connect.create_index(collection, index_type, index_param)
|
2019-11-21 09:23:06 +00:00
|
|
|
query_vec = [vectors[0]]
|
|
|
|
top_k = 10
|
2020-03-07 07:23:34 +00:00
|
|
|
search_param = get_search_param(index_type)
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(collection, top_k, query_vec, partition_tags=[tag], params=search_param)
|
2019-11-21 09:23:06 +00:00
|
|
|
logging.getLogger().info(result)
|
|
|
|
assert status.OK()
|
|
|
|
assert len(result) == 0
|
|
|
|
|
2020-03-09 04:00:37 +00:00
|
|
|
def test_search_l2_index_params_partition_B(self, connect, collection, get_simple_index):
|
2019-11-21 09:23:06 +00:00
|
|
|
'''
|
|
|
|
target: test basic search fuction, all the search params is corrent, test all index params, and build
|
|
|
|
method: search with the given vectors, check the result
|
|
|
|
expected: search status ok, and the length of the result is top_k
|
|
|
|
'''
|
2020-03-07 07:23:34 +00:00
|
|
|
index_param = get_simple_index["index_param"]
|
|
|
|
index_type = get_simple_index["index_type"]
|
|
|
|
logging.getLogger().info(get_simple_index)
|
2020-03-09 04:00:37 +00:00
|
|
|
status = connect.create_partition(collection, tag)
|
|
|
|
vectors, ids = self.init_data(connect, collection, partition_tags=tag)
|
|
|
|
status = connect.create_index(collection, index_type, index_param)
|
2019-11-21 09:23:06 +00:00
|
|
|
query_vec = [vectors[0]]
|
|
|
|
top_k = 10
|
2020-03-07 07:23:34 +00:00
|
|
|
search_param = get_search_param(index_type)
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(collection, top_k, query_vec, params=search_param)
|
2019-11-21 09:23:06 +00:00
|
|
|
logging.getLogger().info(result)
|
|
|
|
assert status.OK()
|
2020-03-07 07:23:34 +00:00
|
|
|
if(index_type != IndexType.IVF_PQ):
|
|
|
|
assert len(result[0]) == min(len(vectors), top_k)
|
|
|
|
assert check_result(result[0], ids[0])
|
|
|
|
assert result[0][0].distance <= epsilon
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(collection, top_k, query_vec, partition_tags=[tag], params=search_param)
|
2019-11-21 09:23:06 +00:00
|
|
|
logging.getLogger().info(result)
|
|
|
|
assert status.OK()
|
2020-03-07 07:23:34 +00:00
|
|
|
if(index_type != IndexType.IVF_PQ):
|
|
|
|
assert len(result[0]) == min(len(vectors), top_k)
|
|
|
|
assert check_result(result[0], ids[0])
|
|
|
|
assert result[0][0].distance <= epsilon
|
2019-11-21 09:23:06 +00:00
|
|
|
|
2020-03-09 04:00:37 +00:00
|
|
|
def test_search_l2_index_params_partition_C(self, connect, collection, get_simple_index):
|
2019-11-21 09:23:06 +00:00
|
|
|
'''
|
|
|
|
target: test basic search fuction, all the search params is corrent, test all index params, and build
|
2020-03-09 04:00:37 +00:00
|
|
|
method: search with the given vectors and tags (one of the tags not existed in collection), check the result
|
2019-11-21 09:23:06 +00:00
|
|
|
expected: search status ok, and the length of the result is top_k
|
|
|
|
'''
|
2020-03-07 07:23:34 +00:00
|
|
|
index_param = get_simple_index["index_param"]
|
|
|
|
index_type = get_simple_index["index_type"]
|
|
|
|
logging.getLogger().info(get_simple_index)
|
2020-03-09 04:00:37 +00:00
|
|
|
status = connect.create_partition(collection, tag)
|
|
|
|
vectors, ids = self.init_data(connect, collection, partition_tags=tag)
|
|
|
|
status = connect.create_index(collection, index_type, index_param)
|
2019-11-21 09:23:06 +00:00
|
|
|
query_vec = [vectors[0]]
|
|
|
|
top_k = 10
|
2020-03-07 07:23:34 +00:00
|
|
|
search_param = get_search_param(index_type)
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(collection, top_k, query_vec, partition_tags=[tag, "new_tag"], params=search_param)
|
2019-11-21 09:23:06 +00:00
|
|
|
logging.getLogger().info(result)
|
|
|
|
assert status.OK()
|
2020-03-07 07:23:34 +00:00
|
|
|
if(index_type != IndexType.IVF_PQ):
|
|
|
|
assert len(result[0]) == min(len(vectors), top_k)
|
|
|
|
assert check_result(result[0], ids[0])
|
|
|
|
assert result[0][0].distance <= epsilon
|
2019-11-21 09:23:06 +00:00
|
|
|
|
2020-03-09 04:00:37 +00:00
|
|
|
def test_search_l2_index_params_partition_D(self, connect, collection, get_simple_index):
|
2019-11-21 09:23:06 +00:00
|
|
|
'''
|
|
|
|
target: test basic search fuction, all the search params is corrent, test all index params, and build
|
2020-03-09 04:00:37 +00:00
|
|
|
method: search with the given vectors and tag (tag name not existed in collection), check the result
|
2019-11-21 09:23:06 +00:00
|
|
|
expected: search status ok, and the length of the result is top_k
|
|
|
|
'''
|
2020-03-07 07:23:34 +00:00
|
|
|
index_param = get_simple_index["index_param"]
|
|
|
|
index_type = get_simple_index["index_type"]
|
|
|
|
logging.getLogger().info(get_simple_index)
|
2020-03-09 04:00:37 +00:00
|
|
|
status = connect.create_partition(collection, tag)
|
|
|
|
vectors, ids = self.init_data(connect, collection, partition_tags=tag)
|
|
|
|
status = connect.create_index(collection, index_type, index_param)
|
2019-11-21 09:23:06 +00:00
|
|
|
query_vec = [vectors[0]]
|
|
|
|
top_k = 10
|
2020-03-07 07:23:34 +00:00
|
|
|
search_param = get_search_param(index_type)
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(collection, top_k, query_vec, partition_tags=["new_tag"], params=search_param)
|
2019-11-21 09:23:06 +00:00
|
|
|
logging.getLogger().info(result)
|
|
|
|
assert status.OK()
|
|
|
|
assert len(result) == 0
|
|
|
|
|
2020-03-09 04:00:37 +00:00
|
|
|
def test_search_l2_index_params_partition_E(self, connect, collection, get_simple_index):
|
2019-11-21 09:23:06 +00:00
|
|
|
'''
|
|
|
|
target: test basic search fuction, all the search params is corrent, test all index params, and build
|
2020-03-09 04:00:37 +00:00
|
|
|
method: search collection with the given vectors and tags, check the result
|
2019-11-21 09:23:06 +00:00
|
|
|
expected: search status ok, and the length of the result is top_k
|
|
|
|
'''
|
|
|
|
new_tag = "new_tag"
|
2020-03-07 07:23:34 +00:00
|
|
|
index_param = get_simple_index["index_param"]
|
|
|
|
index_type = get_simple_index["index_type"]
|
|
|
|
logging.getLogger().info(get_simple_index)
|
2020-03-09 04:00:37 +00:00
|
|
|
status = connect.create_partition(collection, tag)
|
|
|
|
status = connect.create_partition(collection, new_tag)
|
|
|
|
vectors, ids = self.init_data(connect, collection, partition_tags=tag)
|
|
|
|
new_vectors, new_ids = self.init_data(connect, collection, nb=6001, partition_tags=new_tag)
|
|
|
|
status = connect.create_index(collection, index_type, index_param)
|
2019-11-21 09:23:06 +00:00
|
|
|
query_vec = [vectors[0], new_vectors[0]]
|
|
|
|
top_k = 10
|
2020-03-07 07:23:34 +00:00
|
|
|
search_param = get_search_param(index_type)
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(collection, top_k, query_vec, partition_tags=[tag, new_tag], params=search_param)
|
2019-11-21 09:23:06 +00:00
|
|
|
logging.getLogger().info(result)
|
|
|
|
assert status.OK()
|
2020-03-07 07:23:34 +00:00
|
|
|
if(index_type != IndexType.IVF_PQ):
|
|
|
|
assert len(result[0]) == min(len(vectors), top_k)
|
|
|
|
assert check_result(result[0], ids[0])
|
|
|
|
assert check_result(result[1], new_ids[0])
|
|
|
|
assert result[0][0].distance <= epsilon
|
|
|
|
assert result[1][0].distance <= epsilon
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(collection, top_k, query_vec, partition_tags=[new_tag], params=search_param)
|
2019-11-21 09:23:06 +00:00
|
|
|
logging.getLogger().info(result)
|
|
|
|
assert status.OK()
|
2020-03-07 07:23:34 +00:00
|
|
|
if(index_type != IndexType.IVF_PQ):
|
|
|
|
assert len(result[0]) == min(len(vectors), top_k)
|
|
|
|
assert check_result(result[1], new_ids[0])
|
|
|
|
assert result[1][0].distance <= epsilon
|
2019-11-21 09:23:06 +00:00
|
|
|
|
2020-03-09 04:00:37 +00:00
|
|
|
def test_search_l2_index_params_partition_F(self, connect, collection, get_simple_index):
|
2019-11-27 04:39:35 +00:00
|
|
|
'''
|
|
|
|
target: test basic search fuction, all the search params is corrent, test all index params, and build
|
2020-03-09 04:00:37 +00:00
|
|
|
method: search collection with the given vectors and tags with "re" expr, check the result
|
2019-11-27 04:39:35 +00:00
|
|
|
expected: search status ok, and the length of the result is top_k
|
|
|
|
'''
|
2019-11-27 05:41:45 +00:00
|
|
|
tag = "atag"
|
2019-11-27 04:39:35 +00:00
|
|
|
new_tag = "new_tag"
|
2020-03-07 07:23:34 +00:00
|
|
|
index_param = get_simple_index["index_param"]
|
|
|
|
index_type = get_simple_index["index_type"]
|
|
|
|
logging.getLogger().info(get_simple_index)
|
2020-03-09 04:00:37 +00:00
|
|
|
status = connect.create_partition(collection, tag)
|
|
|
|
status = connect.create_partition(collection, new_tag)
|
|
|
|
vectors, ids = self.init_data(connect, collection, partition_tags=tag)
|
|
|
|
new_vectors, new_ids = self.init_data(connect, collection, nb=6001, partition_tags=new_tag)
|
|
|
|
status = connect.create_index(collection, index_type, index_param)
|
2019-11-27 04:39:35 +00:00
|
|
|
query_vec = [vectors[0], new_vectors[0]]
|
|
|
|
top_k = 10
|
2020-03-07 07:23:34 +00:00
|
|
|
search_param = get_search_param(index_type)
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(collection, top_k, query_vec, partition_tags=["new(.*)"], params=search_param)
|
2019-11-27 04:39:35 +00:00
|
|
|
logging.getLogger().info(result)
|
|
|
|
assert status.OK()
|
2020-03-07 07:23:34 +00:00
|
|
|
if(index_type != IndexType.IVF_PQ):
|
|
|
|
assert result[0][0].distance > epsilon
|
|
|
|
assert result[1][0].distance <= epsilon
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(collection, top_k, query_vec, partition_tags=["(.*)tag"], params=search_param)
|
2019-11-27 04:39:35 +00:00
|
|
|
logging.getLogger().info(result)
|
|
|
|
assert status.OK()
|
2020-03-07 07:23:34 +00:00
|
|
|
if(index_type != IndexType.IVF_PQ):
|
|
|
|
assert result[0][0].distance <= epsilon
|
|
|
|
assert result[1][0].distance <= epsilon
|
2019-11-27 04:39:35 +00:00
|
|
|
|
2020-03-09 04:00:37 +00:00
|
|
|
def test_search_ip_index_params(self, connect, ip_collection, get_simple_index):
|
2019-10-16 10:40:31 +00:00
|
|
|
'''
|
|
|
|
target: test basic search fuction, all the search params is corrent, test all index params, and build
|
|
|
|
method: search with the given vectors, check the result
|
|
|
|
expected: search status ok, and the length of the result is top_k
|
|
|
|
'''
|
2020-03-07 07:23:34 +00:00
|
|
|
index_param = get_simple_index["index_param"]
|
|
|
|
index_type = get_simple_index["index_type"]
|
|
|
|
logging.getLogger().info(get_simple_index)
|
2020-03-16 11:32:14 +00:00
|
|
|
if index_type == IndexType.RNSG:
|
|
|
|
pytest.skip("rnsg not support in ip")
|
2020-03-09 04:00:37 +00:00
|
|
|
vectors, ids = self.init_data(connect, ip_collection)
|
|
|
|
status = connect.create_index(ip_collection, index_type, index_param)
|
2019-10-16 10:40:31 +00:00
|
|
|
query_vec = [vectors[0]]
|
|
|
|
top_k = 10
|
2020-03-07 07:23:34 +00:00
|
|
|
search_param = get_search_param(index_type)
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(ip_collection, top_k, query_vec, params=search_param)
|
2019-10-16 10:40:31 +00:00
|
|
|
logging.getLogger().info(result)
|
|
|
|
|
|
|
|
if top_k <= 1024:
|
|
|
|
assert status.OK()
|
2020-03-07 07:23:34 +00:00
|
|
|
if(index_type != IndexType.IVF_PQ):
|
|
|
|
assert len(result[0]) == min(len(vectors), top_k)
|
|
|
|
assert check_result(result[0], ids[0])
|
|
|
|
assert result[0][0].distance >= 1 - gen_inaccuracy(result[0][0].distance)
|
|
|
|
else:
|
|
|
|
assert not status.OK()
|
|
|
|
|
2020-03-09 04:00:37 +00:00
|
|
|
def test_search_ip_large_nq_index_params(self, connect, ip_collection, get_simple_index):
|
2020-03-07 07:23:34 +00:00
|
|
|
'''
|
|
|
|
target: test basic search fuction, all the search params is corrent, test all index params, and build
|
|
|
|
method: search with the given vectors, check the result
|
|
|
|
expected: search status ok, and the length of the result is top_k
|
|
|
|
'''
|
|
|
|
index_param = get_simple_index["index_param"]
|
|
|
|
index_type = get_simple_index["index_type"]
|
|
|
|
logging.getLogger().info(get_simple_index)
|
2020-03-16 11:32:14 +00:00
|
|
|
if index_type == IndexType.RNSG:
|
|
|
|
pytest.skip("rnsg not support in ip")
|
2020-03-09 04:00:37 +00:00
|
|
|
vectors, ids = self.init_data(connect, ip_collection)
|
|
|
|
status = connect.create_index(ip_collection, index_type, index_param)
|
2020-03-07 07:23:34 +00:00
|
|
|
query_vec = []
|
|
|
|
for i in range (1200):
|
|
|
|
query_vec.append(vectors[i])
|
|
|
|
top_k = 10
|
|
|
|
search_param = get_search_param(index_type)
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(ip_collection, top_k, query_vec, params=search_param)
|
2020-03-07 07:23:34 +00:00
|
|
|
logging.getLogger().info(result)
|
|
|
|
assert status.OK()
|
|
|
|
if(index_type != IndexType.IVF_PQ):
|
2019-10-16 10:40:31 +00:00
|
|
|
assert len(result[0]) == min(len(vectors), top_k)
|
|
|
|
assert check_result(result[0], ids[0])
|
2020-02-29 08:11:31 +00:00
|
|
|
assert result[0][0].distance >= 1 - gen_inaccuracy(result[0][0].distance)
|
2019-10-16 10:40:31 +00:00
|
|
|
|
2020-03-09 04:00:37 +00:00
|
|
|
def test_search_ip_index_params_partition(self, connect, ip_collection, get_simple_index):
|
2019-11-21 09:23:06 +00:00
|
|
|
'''
|
|
|
|
target: test basic search fuction, all the search params is corrent, test all index params, and build
|
|
|
|
method: search with the given vectors, check the result
|
|
|
|
expected: search status ok, and the length of the result is top_k
|
|
|
|
'''
|
2020-03-07 07:23:34 +00:00
|
|
|
index_param = get_simple_index["index_param"]
|
|
|
|
index_type = get_simple_index["index_type"]
|
|
|
|
logging.getLogger().info(index_param)
|
2020-03-16 11:32:14 +00:00
|
|
|
if index_type == IndexType.RNSG:
|
|
|
|
pytest.skip("rnsg not support in ip")
|
2020-03-09 04:00:37 +00:00
|
|
|
status = connect.create_partition(ip_collection, tag)
|
|
|
|
vectors, ids = self.init_data(connect, ip_collection)
|
|
|
|
status = connect.create_index(ip_collection, index_type, index_param)
|
2019-11-21 09:23:06 +00:00
|
|
|
query_vec = [vectors[0]]
|
|
|
|
top_k = 10
|
2020-03-07 07:23:34 +00:00
|
|
|
search_param = get_search_param(index_type)
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(ip_collection, top_k, query_vec, params=search_param)
|
2019-11-21 09:23:06 +00:00
|
|
|
logging.getLogger().info(result)
|
|
|
|
assert status.OK()
|
2020-03-07 07:23:34 +00:00
|
|
|
if(index_type != IndexType.IVF_PQ):
|
|
|
|
assert len(result[0]) == min(len(vectors), top_k)
|
|
|
|
assert check_result(result[0], ids[0])
|
|
|
|
assert result[0][0].distance >= 1 - gen_inaccuracy(result[0][0].distance)
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(ip_collection, top_k, query_vec, partition_tags=[tag], params=search_param)
|
2019-11-21 09:23:06 +00:00
|
|
|
logging.getLogger().info(result)
|
|
|
|
assert status.OK()
|
|
|
|
assert len(result) == 0
|
|
|
|
|
2020-03-09 04:00:37 +00:00
|
|
|
def test_search_ip_index_params_partition_A(self, connect, ip_collection, get_simple_index):
|
2019-11-21 09:23:06 +00:00
|
|
|
'''
|
|
|
|
target: test basic search fuction, all the search params is corrent, test all index params, and build
|
|
|
|
method: search with the given vectors and tag, check the result
|
|
|
|
expected: search status ok, and the length of the result is top_k
|
|
|
|
'''
|
2020-03-07 07:23:34 +00:00
|
|
|
index_param = get_simple_index["index_param"]
|
|
|
|
index_type = get_simple_index["index_type"]
|
|
|
|
logging.getLogger().info(index_param)
|
2020-03-16 11:32:14 +00:00
|
|
|
if index_type == IndexType.RNSG:
|
|
|
|
pytest.skip("rnsg not support in ip")
|
2020-03-09 04:00:37 +00:00
|
|
|
status = connect.create_partition(ip_collection, tag)
|
|
|
|
vectors, ids = self.init_data(connect, ip_collection, partition_tags=tag)
|
|
|
|
status = connect.create_index(ip_collection, index_type, index_param)
|
2019-11-21 09:23:06 +00:00
|
|
|
query_vec = [vectors[0]]
|
|
|
|
top_k = 10
|
2020-03-07 07:23:34 +00:00
|
|
|
search_param = get_search_param(index_type)
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(ip_collection, top_k, query_vec, partition_tags=[tag], params=search_param)
|
2019-11-21 09:23:06 +00:00
|
|
|
logging.getLogger().info(result)
|
|
|
|
assert status.OK()
|
2020-03-07 07:23:34 +00:00
|
|
|
if(index_type != IndexType.IVF_PQ):
|
|
|
|
assert len(result[0]) == min(len(vectors), top_k)
|
|
|
|
assert check_result(result[0], ids[0])
|
|
|
|
assert result[0][0].distance >= 1 - gen_inaccuracy(result[0][0].distance)
|
2019-11-21 09:23:06 +00:00
|
|
|
|
2019-10-16 10:40:31 +00:00
|
|
|
@pytest.mark.level(2)
|
2020-03-09 04:00:37 +00:00
|
|
|
def test_search_vectors_without_connect(self, dis_connect, collection):
|
2019-10-16 10:40:31 +00:00
|
|
|
'''
|
|
|
|
target: test search vectors without connection
|
|
|
|
method: use dis connected instance, call search method and check if search successfully
|
|
|
|
expected: raise exception
|
|
|
|
'''
|
|
|
|
query_vectors = [vectors[0]]
|
|
|
|
top_k = 1
|
|
|
|
nprobe = 1
|
|
|
|
with pytest.raises(Exception) as e:
|
2020-03-09 04:00:37 +00:00
|
|
|
status, ids = dis_connect.search_vectors(collection, top_k, query_vectors)
|
2019-10-16 10:40:31 +00:00
|
|
|
|
2020-03-09 04:00:37 +00:00
|
|
|
def test_search_collection_name_not_existed(self, connect, collection):
|
2019-10-16 10:40:31 +00:00
|
|
|
'''
|
2020-03-09 04:00:37 +00:00
|
|
|
target: search collection not existed
|
|
|
|
method: search with the random collection_name, which is not in db
|
2019-10-16 10:40:31 +00:00
|
|
|
expected: status not ok
|
|
|
|
'''
|
2020-03-09 04:00:37 +00:00
|
|
|
collection_name = gen_unique_str("not_existed_collection")
|
2019-10-16 10:40:31 +00:00
|
|
|
top_k = 1
|
|
|
|
nprobe = 1
|
|
|
|
query_vecs = [vectors[0]]
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(collection_name, top_k, query_vecs)
|
2019-10-16 10:40:31 +00:00
|
|
|
assert not status.OK()
|
|
|
|
|
2020-03-09 04:00:37 +00:00
|
|
|
def test_search_collection_name_None(self, connect, collection):
|
2019-10-16 10:40:31 +00:00
|
|
|
'''
|
2020-03-09 04:00:37 +00:00
|
|
|
target: search collection that collection name is None
|
|
|
|
method: search with the collection_name: None
|
2019-10-16 10:40:31 +00:00
|
|
|
expected: status not ok
|
|
|
|
'''
|
2020-03-09 04:00:37 +00:00
|
|
|
collection_name = None
|
2019-10-16 10:40:31 +00:00
|
|
|
top_k = 1
|
|
|
|
nprobe = 1
|
|
|
|
query_vecs = [vectors[0]]
|
|
|
|
with pytest.raises(Exception) as e:
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(collection_name, top_k, query_vecs)
|
2019-10-16 10:40:31 +00:00
|
|
|
|
2020-03-09 04:00:37 +00:00
|
|
|
def test_search_top_k_query_records(self, connect, collection):
|
2019-10-16 10:40:31 +00:00
|
|
|
'''
|
|
|
|
target: test search fuction, with search params: query_records
|
|
|
|
method: search with the given query_records, which are subarrays of the inserted vectors
|
|
|
|
expected: status ok and the returned vectors should be query_records
|
|
|
|
'''
|
|
|
|
top_k = 10
|
2020-03-09 04:00:37 +00:00
|
|
|
vectors, ids = self.init_data(connect, collection)
|
2019-10-16 10:40:31 +00:00
|
|
|
query_vecs = [vectors[0],vectors[55],vectors[99]]
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(collection, top_k, query_vecs)
|
2019-10-16 10:40:31 +00:00
|
|
|
assert status.OK()
|
|
|
|
assert len(result) == len(query_vecs)
|
|
|
|
for i in range(len(query_vecs)):
|
|
|
|
assert len(result[i]) == top_k
|
|
|
|
assert result[i][0].distance <= epsilon
|
|
|
|
|
2020-03-09 04:00:37 +00:00
|
|
|
def test_search_distance_l2_flat_index(self, connect, collection):
|
2019-10-16 10:40:31 +00:00
|
|
|
'''
|
2020-03-09 04:00:37 +00:00
|
|
|
target: search collection, and check the result: distance
|
2019-10-16 10:40:31 +00:00
|
|
|
method: compare the return distance value with value computed with Euclidean
|
|
|
|
expected: the return distance equals to the computed value
|
|
|
|
'''
|
|
|
|
nb = 2
|
|
|
|
top_k = 1
|
2020-03-09 04:00:37 +00:00
|
|
|
vectors, ids = self.init_data(connect, collection, nb=nb)
|
2019-10-16 10:40:31 +00:00
|
|
|
query_vecs = [[0.50 for i in range(dim)]]
|
|
|
|
distance_0 = numpy.linalg.norm(numpy.array(query_vecs[0]) - numpy.array(vectors[0]))
|
|
|
|
distance_1 = numpy.linalg.norm(numpy.array(query_vecs[0]) - numpy.array(vectors[1]))
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(collection, top_k, query_vecs)
|
2019-10-16 10:40:31 +00:00
|
|
|
assert abs(numpy.sqrt(result[0][0].distance) - min(distance_0, distance_1)) <= gen_inaccuracy(result[0][0].distance)
|
|
|
|
|
2020-03-09 04:00:37 +00:00
|
|
|
def test_search_distance_ip_flat_index(self, connect, ip_collection):
|
2019-10-16 10:40:31 +00:00
|
|
|
'''
|
2020-03-09 04:00:37 +00:00
|
|
|
target: search ip_collection, and check the result: distance
|
2019-10-16 10:40:31 +00:00
|
|
|
method: compare the return distance value with value computed with Inner product
|
|
|
|
expected: the return distance equals to the computed value
|
|
|
|
'''
|
|
|
|
nb = 2
|
|
|
|
top_k = 1
|
|
|
|
nprobe = 1
|
2020-03-09 04:00:37 +00:00
|
|
|
vectors, ids = self.init_data(connect, ip_collection, nb=nb)
|
2020-03-07 07:23:34 +00:00
|
|
|
index_type = IndexType.FLAT
|
|
|
|
index_param = {
|
2019-10-16 10:40:31 +00:00
|
|
|
"nlist": 16384
|
|
|
|
}
|
2020-03-09 04:00:37 +00:00
|
|
|
connect.create_index(ip_collection, index_type, index_param)
|
|
|
|
logging.getLogger().info(connect.describe_index(ip_collection))
|
2019-10-16 10:40:31 +00:00
|
|
|
query_vecs = [[0.50 for i in range(dim)]]
|
|
|
|
distance_0 = numpy.inner(numpy.array(query_vecs[0]), numpy.array(vectors[0]))
|
|
|
|
distance_1 = numpy.inner(numpy.array(query_vecs[0]), numpy.array(vectors[1]))
|
2020-03-07 07:23:34 +00:00
|
|
|
search_param = get_search_param(index_type)
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(ip_collection, top_k, query_vecs, params=search_param)
|
2019-10-16 10:40:31 +00:00
|
|
|
assert abs(result[0][0].distance - max(distance_0, distance_1)) <= gen_inaccuracy(result[0][0].distance)
|
|
|
|
|
2020-03-09 04:00:37 +00:00
|
|
|
def test_search_distance_jaccard_flat_index(self, connect, jac_collection):
|
2020-01-14 11:22:27 +00:00
|
|
|
'''
|
2020-03-09 04:00:37 +00:00
|
|
|
target: search ip_collection, and check the result: distance
|
2020-01-14 11:22:27 +00:00
|
|
|
method: compare the return distance value with value computed with Inner product
|
|
|
|
expected: the return distance equals to the computed value
|
|
|
|
'''
|
|
|
|
# from scipy.spatial import distance
|
|
|
|
top_k = 1
|
|
|
|
nprobe = 512
|
2020-03-09 04:00:37 +00:00
|
|
|
int_vectors, vectors, ids = self.init_binary_data(connect, jac_collection, nb=2)
|
2020-03-07 07:23:34 +00:00
|
|
|
index_type = IndexType.FLAT
|
|
|
|
index_param = {
|
2020-01-14 11:22:27 +00:00
|
|
|
"nlist": 16384
|
|
|
|
}
|
2020-03-09 04:00:37 +00:00
|
|
|
connect.create_index(jac_collection, index_type, index_param)
|
|
|
|
logging.getLogger().info(connect.describe_collection(jac_collection))
|
|
|
|
logging.getLogger().info(connect.describe_index(jac_collection))
|
|
|
|
query_int_vectors, query_vecs, tmp_ids = self.init_binary_data(connect, jac_collection, nb=1, insert=False)
|
2020-01-14 11:22:27 +00:00
|
|
|
distance_0 = jaccard(query_int_vectors[0], int_vectors[0])
|
|
|
|
distance_1 = jaccard(query_int_vectors[0], int_vectors[1])
|
2020-03-07 07:23:34 +00:00
|
|
|
search_param = get_search_param(index_type)
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(jac_collection, top_k, query_vecs, params=search_param)
|
2020-01-14 11:22:27 +00:00
|
|
|
logging.getLogger().info(status)
|
|
|
|
logging.getLogger().info(result)
|
|
|
|
assert abs(result[0][0].distance - min(distance_0, distance_1)) <= epsilon
|
|
|
|
|
2020-03-09 04:00:37 +00:00
|
|
|
def test_search_distance_hamming_flat_index(self, connect, ham_collection):
|
2020-01-14 11:22:27 +00:00
|
|
|
'''
|
2020-03-09 04:00:37 +00:00
|
|
|
target: search ip_collection, and check the result: distance
|
2020-01-14 11:22:27 +00:00
|
|
|
method: compare the return distance value with value computed with Inner product
|
|
|
|
expected: the return distance equals to the computed value
|
|
|
|
'''
|
|
|
|
# from scipy.spatial import distance
|
|
|
|
top_k = 1
|
|
|
|
nprobe = 512
|
2020-03-09 04:00:37 +00:00
|
|
|
int_vectors, vectors, ids = self.init_binary_data(connect, ham_collection, nb=2)
|
2020-03-07 07:23:34 +00:00
|
|
|
index_type = IndexType.FLAT
|
|
|
|
index_param = {
|
2020-01-14 11:22:27 +00:00
|
|
|
"nlist": 16384
|
|
|
|
}
|
2020-03-09 04:00:37 +00:00
|
|
|
connect.create_index(ham_collection, index_type, index_param)
|
|
|
|
logging.getLogger().info(connect.describe_collection(ham_collection))
|
|
|
|
logging.getLogger().info(connect.describe_index(ham_collection))
|
|
|
|
query_int_vectors, query_vecs, tmp_ids = self.init_binary_data(connect, ham_collection, nb=1, insert=False)
|
2020-01-14 11:22:27 +00:00
|
|
|
distance_0 = hamming(query_int_vectors[0], int_vectors[0])
|
|
|
|
distance_1 = hamming(query_int_vectors[0], int_vectors[1])
|
2020-03-07 07:23:34 +00:00
|
|
|
search_param = get_search_param(index_type)
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(ham_collection, top_k, query_vecs, params=search_param)
|
2020-01-14 11:22:27 +00:00
|
|
|
logging.getLogger().info(status)
|
|
|
|
logging.getLogger().info(result)
|
|
|
|
assert abs(result[0][0].distance - min(distance_0, distance_1).astype(float)) <= epsilon
|
|
|
|
|
2020-03-16 13:33:25 +00:00
|
|
|
def test_search_distance_substructure_flat_index(self, connect, substructure_collection):
|
|
|
|
'''
|
|
|
|
target: search ip_collection, and check the result: distance
|
|
|
|
method: compare the return distance value with value computed with Inner product
|
|
|
|
expected: the return distance equals to the computed value
|
|
|
|
'''
|
|
|
|
# from scipy.spatial import distance
|
|
|
|
top_k = 1
|
|
|
|
nprobe = 512
|
|
|
|
int_vectors, vectors, ids = self.init_binary_data(connect, substructure_collection, nb=2)
|
|
|
|
index_type = IndexType.FLAT
|
|
|
|
index_param = {
|
|
|
|
"nlist": 16384
|
|
|
|
}
|
|
|
|
connect.create_index(substructure_collection, index_type, index_param)
|
|
|
|
logging.getLogger().info(connect.describe_collection(substructure_collection))
|
|
|
|
logging.getLogger().info(connect.describe_index(substructure_collection))
|
|
|
|
query_int_vectors, query_vecs, tmp_ids = self.init_binary_data(connect, substructure_collection, nb=1, insert=False)
|
|
|
|
distance_0 = substructure(query_int_vectors[0], int_vectors[0])
|
|
|
|
distance_1 = substructure(query_int_vectors[0], int_vectors[1])
|
|
|
|
search_param = get_search_param(index_type)
|
|
|
|
status, result = connect.search_vectors(substructure_collection, top_k, query_vecs, params=search_param)
|
|
|
|
logging.getLogger().info(status)
|
|
|
|
logging.getLogger().info(result)
|
2020-03-24 02:10:56 +00:00
|
|
|
assert result[0][0].id == -1
|
|
|
|
|
|
|
|
def test_search_distance_substructure_flat_index_B(self, connect, substructure_collection):
|
|
|
|
'''
|
|
|
|
target: search ip_collection, and check the result: distance
|
|
|
|
method: compare the return distance value with value computed with SUB
|
|
|
|
expected: the return distance equals to the computed value
|
|
|
|
'''
|
|
|
|
# from scipy.spatial import distance
|
|
|
|
top_k = 3
|
|
|
|
nprobe = 512
|
|
|
|
int_vectors, vectors, ids = self.init_binary_data(connect, substructure_collection, nb=2)
|
|
|
|
index_type = IndexType.FLAT
|
|
|
|
index_param = {
|
|
|
|
"nlist": 16384
|
|
|
|
}
|
|
|
|
connect.create_index(substructure_collection, index_type, index_param)
|
|
|
|
logging.getLogger().info(connect.describe_collection(substructure_collection))
|
|
|
|
logging.getLogger().info(connect.describe_index(substructure_collection))
|
|
|
|
query_int_vectors, query_vecs = gen_binary_sub_vectors(int_vectors, 2)
|
|
|
|
search_param = get_search_param(index_type)
|
|
|
|
status, result = connect.search_vectors(substructure_collection, top_k, query_vecs, params=search_param)
|
|
|
|
logging.getLogger().info(status)
|
|
|
|
logging.getLogger().info(result)
|
|
|
|
assert result[0][0].distance <= epsilon
|
|
|
|
assert result[0][0].id == ids[0]
|
|
|
|
assert result[1][0].distance <= epsilon
|
|
|
|
assert result[1][0].id == ids[1]
|
|
|
|
assert result[0][1].id == -1
|
|
|
|
assert result[1][1].id == -1
|
2020-03-16 13:33:25 +00:00
|
|
|
|
|
|
|
def test_search_distance_superstructure_flat_index(self, connect, superstructure_collection):
|
|
|
|
'''
|
|
|
|
target: search ip_collection, and check the result: distance
|
|
|
|
method: compare the return distance value with value computed with Inner product
|
|
|
|
expected: the return distance equals to the computed value
|
|
|
|
'''
|
|
|
|
# from scipy.spatial import distance
|
|
|
|
top_k = 1
|
|
|
|
nprobe = 512
|
|
|
|
int_vectors, vectors, ids = self.init_binary_data(connect, superstructure_collection, nb=2)
|
|
|
|
index_type = IndexType.FLAT
|
|
|
|
index_param = {
|
|
|
|
"nlist": 16384
|
|
|
|
}
|
|
|
|
connect.create_index(superstructure_collection, index_type, index_param)
|
|
|
|
logging.getLogger().info(connect.describe_collection(superstructure_collection))
|
|
|
|
logging.getLogger().info(connect.describe_index(superstructure_collection))
|
|
|
|
query_int_vectors, query_vecs, tmp_ids = self.init_binary_data(connect, superstructure_collection, nb=1, insert=False)
|
|
|
|
distance_0 = superstructure(query_int_vectors[0], int_vectors[0])
|
|
|
|
distance_1 = superstructure(query_int_vectors[0], int_vectors[1])
|
|
|
|
search_param = get_search_param(index_type)
|
|
|
|
status, result = connect.search_vectors(superstructure_collection, top_k, query_vecs, params=search_param)
|
|
|
|
logging.getLogger().info(status)
|
|
|
|
logging.getLogger().info(result)
|
2020-03-24 02:10:56 +00:00
|
|
|
assert result[0][0].id == -1
|
|
|
|
|
|
|
|
def test_search_distance_superstructure_flat_index_B(self, connect, superstructure_collection):
|
|
|
|
'''
|
|
|
|
target: search ip_collection, and check the result: distance
|
|
|
|
method: compare the return distance value with value computed with SUPER
|
|
|
|
expected: the return distance equals to the computed value
|
|
|
|
'''
|
|
|
|
# from scipy.spatial import distance
|
|
|
|
top_k = 3
|
|
|
|
nprobe = 512
|
|
|
|
int_vectors, vectors, ids = self.init_binary_data(connect, superstructure_collection, nb=2)
|
|
|
|
index_type = IndexType.FLAT
|
|
|
|
index_param = {
|
|
|
|
"nlist": 16384
|
|
|
|
}
|
|
|
|
connect.create_index(superstructure_collection, index_type, index_param)
|
|
|
|
logging.getLogger().info(connect.describe_collection(superstructure_collection))
|
|
|
|
logging.getLogger().info(connect.describe_index(superstructure_collection))
|
|
|
|
query_int_vectors, query_vecs = gen_binary_super_vectors(int_vectors, 2)
|
|
|
|
search_param = get_search_param(index_type)
|
|
|
|
status, result = connect.search_vectors(superstructure_collection, top_k, query_vecs, params=search_param)
|
|
|
|
logging.getLogger().info(status)
|
|
|
|
logging.getLogger().info(result)
|
|
|
|
assert result[0][0].id in ids
|
|
|
|
assert result[0][0].distance <= epsilon
|
|
|
|
assert result[1][0].id in ids
|
|
|
|
assert result[1][0].distance <= epsilon
|
|
|
|
assert result[0][2].id == -1
|
|
|
|
assert result[1][2].id == -1
|
2020-03-16 13:33:25 +00:00
|
|
|
|
2020-03-09 04:00:37 +00:00
|
|
|
def test_search_distance_tanimoto_flat_index(self, connect, tanimoto_collection):
|
2020-01-14 11:22:27 +00:00
|
|
|
'''
|
2020-03-09 04:00:37 +00:00
|
|
|
target: search ip_collection, and check the result: distance
|
2020-01-14 11:22:27 +00:00
|
|
|
method: compare the return distance value with value computed with Inner product
|
|
|
|
expected: the return distance equals to the computed value
|
|
|
|
'''
|
|
|
|
# from scipy.spatial import distance
|
|
|
|
top_k = 1
|
|
|
|
nprobe = 512
|
2020-03-09 04:00:37 +00:00
|
|
|
int_vectors, vectors, ids = self.init_binary_data(connect, tanimoto_collection, nb=2)
|
2020-03-07 07:23:34 +00:00
|
|
|
index_type = IndexType.FLAT
|
|
|
|
index_param = {
|
2020-01-14 11:22:27 +00:00
|
|
|
"nlist": 16384
|
|
|
|
}
|
2020-03-09 04:00:37 +00:00
|
|
|
connect.create_index(tanimoto_collection, index_type, index_param)
|
|
|
|
logging.getLogger().info(connect.describe_collection(tanimoto_collection))
|
|
|
|
logging.getLogger().info(connect.describe_index(tanimoto_collection))
|
|
|
|
query_int_vectors, query_vecs, tmp_ids = self.init_binary_data(connect, tanimoto_collection, nb=1, insert=False)
|
2020-01-14 11:22:27 +00:00
|
|
|
distance_0 = tanimoto(query_int_vectors[0], int_vectors[0])
|
|
|
|
distance_1 = tanimoto(query_int_vectors[0], int_vectors[1])
|
2020-03-07 07:23:34 +00:00
|
|
|
search_param = get_search_param(index_type)
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(tanimoto_collection, top_k, query_vecs, params=search_param)
|
2020-01-14 11:22:27 +00:00
|
|
|
logging.getLogger().info(status)
|
|
|
|
logging.getLogger().info(result)
|
|
|
|
assert abs(result[0][0].distance - min(distance_0, distance_1)) <= epsilon
|
|
|
|
|
2020-03-09 04:00:37 +00:00
|
|
|
def test_search_distance_ip_index_params(self, connect, ip_collection, get_index):
|
2019-10-16 10:40:31 +00:00
|
|
|
'''
|
2020-03-09 04:00:37 +00:00
|
|
|
target: search collection, and check the result: distance
|
2019-10-16 10:40:31 +00:00
|
|
|
method: compare the return distance value with value computed with Inner product
|
|
|
|
expected: the return distance equals to the computed value
|
|
|
|
'''
|
|
|
|
top_k = 2
|
|
|
|
nprobe = 1
|
2020-03-07 07:23:34 +00:00
|
|
|
index_param = get_index["index_param"]
|
|
|
|
index_type = get_index["index_type"]
|
2020-03-16 11:32:14 +00:00
|
|
|
if index_type == IndexType.RNSG:
|
|
|
|
pytest.skip("rnsg not support in ip")
|
|
|
|
vectors, ids = self.init_data(connect, ip_collection, nb=2)
|
2020-03-09 04:00:37 +00:00
|
|
|
connect.create_index(ip_collection, index_type, index_param)
|
|
|
|
logging.getLogger().info(connect.describe_index(ip_collection))
|
2019-10-16 10:40:31 +00:00
|
|
|
query_vecs = [[0.50 for i in range(dim)]]
|
2020-03-07 07:23:34 +00:00
|
|
|
search_param = get_search_param(index_type)
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(ip_collection, top_k, query_vecs, params=search_param)
|
2020-01-14 11:22:27 +00:00
|
|
|
logging.getLogger().debug(status)
|
|
|
|
logging.getLogger().debug(result)
|
2019-10-16 10:40:31 +00:00
|
|
|
distance_0 = numpy.inner(numpy.array(query_vecs[0]), numpy.array(vectors[0]))
|
|
|
|
distance_1 = numpy.inner(numpy.array(query_vecs[0]), numpy.array(vectors[1]))
|
|
|
|
assert abs(result[0][0].distance - max(distance_0, distance_1)) <= gen_inaccuracy(result[0][0].distance)
|
|
|
|
|
|
|
|
# TODO: enable
|
|
|
|
# @pytest.mark.repeat(5)
|
|
|
|
@pytest.mark.timeout(30)
|
2020-03-09 04:00:37 +00:00
|
|
|
def _test_search_concurrent(self, connect, collection):
|
|
|
|
vectors, ids = self.init_data(connect, collection)
|
2019-10-16 10:40:31 +00:00
|
|
|
thread_num = 10
|
|
|
|
nb = 100
|
|
|
|
top_k = 10
|
|
|
|
threads = []
|
|
|
|
query_vecs = vectors[nb//2:nb]
|
|
|
|
def search():
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(collection, top_k, query_vecs)
|
2019-10-16 10:40:31 +00:00
|
|
|
assert len(result) == len(query_vecs)
|
|
|
|
for i in range(len(query_vecs)):
|
|
|
|
assert result[i][0].id in ids
|
|
|
|
assert result[i][0].distance == 0.0
|
|
|
|
for i in range(thread_num):
|
|
|
|
x = threading.Thread(target=search, args=())
|
|
|
|
threads.append(x)
|
|
|
|
x.start()
|
|
|
|
for th in threads:
|
|
|
|
th.join()
|
|
|
|
|
2020-03-07 07:23:34 +00:00
|
|
|
@pytest.mark.timeout(30)
|
|
|
|
def test_search_concurrent_multithreads(self, args):
|
|
|
|
'''
|
|
|
|
target: test concurrent search with multiprocessess
|
|
|
|
method: search with 10 processes, each process uses dependent connection
|
|
|
|
expected: status ok and the returned vectors should be query_records
|
|
|
|
'''
|
|
|
|
nb = 100
|
|
|
|
top_k = 10
|
|
|
|
threads_num = 4
|
|
|
|
threads = []
|
2020-03-09 04:00:37 +00:00
|
|
|
collection = gen_unique_str("test_search_concurrent_multiprocessing")
|
2020-03-07 07:23:34 +00:00
|
|
|
uri = "tcp://%s:%s" % (args["ip"], args["port"])
|
2020-03-09 04:00:37 +00:00
|
|
|
param = {'collection_name': collection,
|
2020-03-07 07:23:34 +00:00
|
|
|
'dimension': dim,
|
|
|
|
'index_type': IndexType.FLAT,
|
|
|
|
'store_raw_vector': False}
|
2020-03-09 04:00:37 +00:00
|
|
|
# create collection
|
2020-03-07 07:23:34 +00:00
|
|
|
milvus = get_milvus(args["handler"])
|
|
|
|
milvus.connect(uri=uri)
|
2020-03-09 04:00:37 +00:00
|
|
|
milvus.create_collection(param)
|
|
|
|
vectors, ids = self.init_data(milvus, collection, nb=nb)
|
2020-03-07 07:23:34 +00:00
|
|
|
query_vecs = vectors[nb//2:nb]
|
|
|
|
def search(milvus):
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = milvus.search_vectors(collection, top_k, query_vecs)
|
2020-03-07 07:23:34 +00:00
|
|
|
assert len(result) == len(query_vecs)
|
|
|
|
for i in range(len(query_vecs)):
|
|
|
|
assert result[i][0].id in ids
|
|
|
|
assert result[i][0].distance == 0.0
|
|
|
|
|
|
|
|
for i in range(threads_num):
|
|
|
|
milvus = get_milvus(args["handler"])
|
|
|
|
milvus.connect(uri=uri)
|
|
|
|
t = threading.Thread(target=search, args=(milvus, ))
|
|
|
|
threads.append(t)
|
|
|
|
t.start()
|
|
|
|
time.sleep(0.2)
|
|
|
|
for t in threads:
|
|
|
|
t.join()
|
|
|
|
|
2019-10-16 10:40:31 +00:00
|
|
|
# TODO: enable
|
|
|
|
@pytest.mark.timeout(30)
|
|
|
|
def _test_search_concurrent_multiprocessing(self, args):
|
|
|
|
'''
|
|
|
|
target: test concurrent search with multiprocessess
|
|
|
|
method: search with 10 processes, each process uses dependent connection
|
|
|
|
expected: status ok and the returned vectors should be query_records
|
|
|
|
'''
|
|
|
|
nb = 100
|
|
|
|
top_k = 10
|
|
|
|
process_num = 4
|
|
|
|
processes = []
|
2020-03-09 04:00:37 +00:00
|
|
|
collection = gen_unique_str("test_search_concurrent_multiprocessing")
|
2019-10-16 10:40:31 +00:00
|
|
|
uri = "tcp://%s:%s" % (args["ip"], args["port"])
|
2020-03-09 04:00:37 +00:00
|
|
|
param = {'collection_name': collection,
|
2019-10-16 10:40:31 +00:00
|
|
|
'dimension': dim,
|
|
|
|
'index_type': IndexType.FLAT,
|
|
|
|
'store_raw_vector': False}
|
2020-03-09 04:00:37 +00:00
|
|
|
# create collection
|
2020-02-14 03:07:09 +00:00
|
|
|
milvus = get_milvus(args["handler"])
|
2019-10-16 10:40:31 +00:00
|
|
|
milvus.connect(uri=uri)
|
2020-03-09 04:00:37 +00:00
|
|
|
milvus.create_collection(param)
|
|
|
|
vectors, ids = self.init_data(milvus, collection, nb=nb)
|
2019-10-16 10:40:31 +00:00
|
|
|
query_vecs = vectors[nb//2:nb]
|
|
|
|
def search(milvus):
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = milvus.search_vectors(collection, top_k, query_vecs)
|
2019-10-16 10:40:31 +00:00
|
|
|
assert len(result) == len(query_vecs)
|
|
|
|
for i in range(len(query_vecs)):
|
|
|
|
assert result[i][0].id in ids
|
|
|
|
assert result[i][0].distance == 0.0
|
|
|
|
|
|
|
|
for i in range(process_num):
|
2020-02-14 03:07:09 +00:00
|
|
|
milvus = get_milvus(args["handler"])
|
2019-10-16 10:40:31 +00:00
|
|
|
milvus.connect(uri=uri)
|
|
|
|
p = Process(target=search, args=(milvus, ))
|
|
|
|
processes.append(p)
|
|
|
|
p.start()
|
|
|
|
time.sleep(0.2)
|
|
|
|
for p in processes:
|
|
|
|
p.join()
|
|
|
|
|
2020-03-09 04:00:37 +00:00
|
|
|
def test_search_multi_collection_L2(search, args):
|
2019-10-16 10:40:31 +00:00
|
|
|
'''
|
2020-03-09 04:00:37 +00:00
|
|
|
target: test search multi collections of L2
|
|
|
|
method: add vectors into 10 collections, and search
|
2019-10-16 10:40:31 +00:00
|
|
|
expected: search status ok, the length of result
|
|
|
|
'''
|
|
|
|
num = 10
|
|
|
|
top_k = 10
|
2020-03-09 04:00:37 +00:00
|
|
|
collections = []
|
2019-10-16 10:40:31 +00:00
|
|
|
idx = []
|
|
|
|
for i in range(num):
|
2020-03-09 04:00:37 +00:00
|
|
|
collection = gen_unique_str("test_add_multicollection_%d" % i)
|
2019-10-16 10:40:31 +00:00
|
|
|
uri = "tcp://%s:%s" % (args["ip"], args["port"])
|
2020-03-09 04:00:37 +00:00
|
|
|
param = {'collection_name': collection,
|
2019-10-16 10:40:31 +00:00
|
|
|
'dimension': dim,
|
|
|
|
'index_file_size': 10,
|
|
|
|
'metric_type': MetricType.L2}
|
2020-03-09 04:00:37 +00:00
|
|
|
# create collection
|
2020-02-14 03:07:09 +00:00
|
|
|
milvus = get_milvus(args["handler"])
|
2019-10-16 10:40:31 +00:00
|
|
|
milvus.connect(uri=uri)
|
2020-03-09 04:00:37 +00:00
|
|
|
milvus.create_collection(param)
|
|
|
|
status, ids = milvus.add_vectors(collection, vectors)
|
2019-10-16 10:40:31 +00:00
|
|
|
assert status.OK()
|
|
|
|
assert len(ids) == len(vectors)
|
2020-03-09 04:00:37 +00:00
|
|
|
collections.append(collection)
|
2019-10-16 10:40:31 +00:00
|
|
|
idx.append(ids[0])
|
|
|
|
idx.append(ids[10])
|
|
|
|
idx.append(ids[20])
|
|
|
|
time.sleep(6)
|
|
|
|
query_vecs = [vectors[0], vectors[10], vectors[20]]
|
2020-03-09 04:00:37 +00:00
|
|
|
# start query from random collection
|
2019-10-16 10:40:31 +00:00
|
|
|
for i in range(num):
|
2020-03-09 04:00:37 +00:00
|
|
|
collection = collections[i]
|
|
|
|
status, result = milvus.search_vectors(collection, top_k, query_vecs)
|
2019-10-16 10:40:31 +00:00
|
|
|
assert status.OK()
|
|
|
|
assert len(result) == len(query_vecs)
|
|
|
|
for j in range(len(query_vecs)):
|
|
|
|
assert len(result[j]) == top_k
|
|
|
|
for j in range(len(query_vecs)):
|
|
|
|
assert check_result(result[j], idx[3 * i + j])
|
|
|
|
|
2020-03-09 04:00:37 +00:00
|
|
|
def test_search_multi_collection_IP(search, args):
|
2019-10-16 10:40:31 +00:00
|
|
|
'''
|
2020-03-09 04:00:37 +00:00
|
|
|
target: test search multi collections of IP
|
|
|
|
method: add vectors into 10 collections, and search
|
2019-10-16 10:40:31 +00:00
|
|
|
expected: search status ok, the length of result
|
|
|
|
'''
|
|
|
|
num = 10
|
|
|
|
top_k = 10
|
2020-03-09 04:00:37 +00:00
|
|
|
collections = []
|
2019-10-16 10:40:31 +00:00
|
|
|
idx = []
|
|
|
|
for i in range(num):
|
2020-03-09 04:00:37 +00:00
|
|
|
collection = gen_unique_str("test_add_multicollection_%d" % i)
|
2019-10-16 10:40:31 +00:00
|
|
|
uri = "tcp://%s:%s" % (args["ip"], args["port"])
|
2020-03-09 04:00:37 +00:00
|
|
|
param = {'collection_name': collection,
|
2019-10-16 10:40:31 +00:00
|
|
|
'dimension': dim,
|
|
|
|
'index_file_size': 10,
|
|
|
|
'metric_type': MetricType.L2}
|
2020-03-09 04:00:37 +00:00
|
|
|
# create collection
|
2020-02-14 03:07:09 +00:00
|
|
|
milvus = get_milvus(args["handler"])
|
2019-10-16 10:40:31 +00:00
|
|
|
milvus.connect(uri=uri)
|
2020-03-09 04:00:37 +00:00
|
|
|
milvus.create_collection(param)
|
|
|
|
status, ids = milvus.add_vectors(collection, vectors)
|
2019-10-16 10:40:31 +00:00
|
|
|
assert status.OK()
|
|
|
|
assert len(ids) == len(vectors)
|
2020-03-09 04:00:37 +00:00
|
|
|
collections.append(collection)
|
2019-10-16 10:40:31 +00:00
|
|
|
idx.append(ids[0])
|
|
|
|
idx.append(ids[10])
|
|
|
|
idx.append(ids[20])
|
|
|
|
time.sleep(6)
|
|
|
|
query_vecs = [vectors[0], vectors[10], vectors[20]]
|
2020-03-09 04:00:37 +00:00
|
|
|
# start query from random collection
|
2019-10-16 10:40:31 +00:00
|
|
|
for i in range(num):
|
2020-03-09 04:00:37 +00:00
|
|
|
collection = collections[i]
|
|
|
|
status, result = milvus.search_vectors(collection, top_k, query_vecs)
|
2019-10-16 10:40:31 +00:00
|
|
|
assert status.OK()
|
|
|
|
assert len(result) == len(query_vecs)
|
|
|
|
for j in range(len(query_vecs)):
|
|
|
|
assert len(result[j]) == top_k
|
|
|
|
for j in range(len(query_vecs)):
|
|
|
|
assert check_result(result[j], idx[3 * i + j])
|
|
|
|
"""
|
|
|
|
******************************************************************
|
|
|
|
# The following cases are used to test `search_vectors` function
|
2020-03-09 04:00:37 +00:00
|
|
|
# with invalid collection_name top-k / nprobe / query_range
|
2019-10-16 10:40:31 +00:00
|
|
|
******************************************************************
|
|
|
|
"""
|
|
|
|
|
|
|
|
class TestSearchParamsInvalid(object):
|
2019-10-25 08:14:51 +00:00
|
|
|
nlist = 16384
|
2020-03-07 07:23:34 +00:00
|
|
|
index_type = IndexType.IVF_SQ8
|
|
|
|
index_param = {"nlist": nlist}
|
2019-10-25 08:14:51 +00:00
|
|
|
logging.getLogger().info(index_param)
|
2019-10-16 10:40:31 +00:00
|
|
|
|
2020-03-09 04:00:37 +00:00
|
|
|
def init_data(self, connect, collection, nb=6000):
|
2019-10-16 10:40:31 +00:00
|
|
|
'''
|
2020-03-09 04:00:37 +00:00
|
|
|
Generate vectors and add it in collection, before search vectors
|
2019-10-16 10:40:31 +00:00
|
|
|
'''
|
|
|
|
global vectors
|
2020-01-14 11:22:27 +00:00
|
|
|
if nb == 6000:
|
2019-10-16 10:40:31 +00:00
|
|
|
add_vectors = vectors
|
|
|
|
else:
|
|
|
|
add_vectors = gen_vectors(nb, dim)
|
2020-03-09 04:00:37 +00:00
|
|
|
status, ids = connect.add_vectors(collection, add_vectors)
|
2019-10-16 10:40:31 +00:00
|
|
|
sleep(add_interval_time)
|
|
|
|
return add_vectors, ids
|
|
|
|
|
|
|
|
"""
|
2020-03-09 04:00:37 +00:00
|
|
|
Test search collection with invalid collection names
|
2019-10-16 10:40:31 +00:00
|
|
|
"""
|
|
|
|
@pytest.fixture(
|
|
|
|
scope="function",
|
2020-03-09 04:00:37 +00:00
|
|
|
params=gen_invalid_collection_names()
|
2019-10-16 10:40:31 +00:00
|
|
|
)
|
2020-03-09 04:00:37 +00:00
|
|
|
def get_collection_name(self, request):
|
2019-10-16 10:40:31 +00:00
|
|
|
yield request.param
|
|
|
|
|
|
|
|
@pytest.mark.level(2)
|
2020-03-09 04:00:37 +00:00
|
|
|
def test_search_with_invalid_collectionname(self, connect, get_collection_name):
|
|
|
|
collection_name = get_collection_name
|
|
|
|
logging.getLogger().info(collection_name)
|
2019-10-16 10:40:31 +00:00
|
|
|
top_k = 1
|
|
|
|
nprobe = 1
|
|
|
|
query_vecs = gen_vectors(1, dim)
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(collection_name, top_k, query_vecs)
|
2019-10-16 10:40:31 +00:00
|
|
|
assert not status.OK()
|
|
|
|
|
2019-11-21 09:23:06 +00:00
|
|
|
@pytest.mark.level(1)
|
2020-03-09 04:00:37 +00:00
|
|
|
def test_search_with_invalid_tag_format(self, connect, collection):
|
2019-11-21 09:23:06 +00:00
|
|
|
top_k = 1
|
|
|
|
nprobe = 1
|
|
|
|
query_vecs = gen_vectors(1, dim)
|
|
|
|
with pytest.raises(Exception) as e:
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(collection, top_k, query_vecs, partition_tags="tag")
|
2019-11-21 09:23:06 +00:00
|
|
|
|
2019-10-16 10:40:31 +00:00
|
|
|
"""
|
2020-03-09 04:00:37 +00:00
|
|
|
Test search collection with invalid top-k
|
2019-10-16 10:40:31 +00:00
|
|
|
"""
|
|
|
|
@pytest.fixture(
|
|
|
|
scope="function",
|
|
|
|
params=gen_invalid_top_ks()
|
|
|
|
)
|
|
|
|
def get_top_k(self, request):
|
|
|
|
yield request.param
|
|
|
|
|
2019-10-25 08:14:51 +00:00
|
|
|
@pytest.mark.level(1)
|
2020-03-09 04:00:37 +00:00
|
|
|
def test_search_with_invalid_top_k(self, connect, collection, get_top_k):
|
2019-10-16 10:40:31 +00:00
|
|
|
'''
|
|
|
|
target: test search fuction, with the wrong top_k
|
|
|
|
method: search with top_k
|
|
|
|
expected: raise an error, and the connection is normal
|
|
|
|
'''
|
|
|
|
top_k = get_top_k
|
|
|
|
logging.getLogger().info(top_k)
|
|
|
|
nprobe = 1
|
|
|
|
query_vecs = gen_vectors(1, dim)
|
2019-10-25 08:14:51 +00:00
|
|
|
if isinstance(top_k, int):
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(collection, top_k, query_vecs)
|
2019-10-25 08:14:51 +00:00
|
|
|
assert not status.OK()
|
|
|
|
else:
|
|
|
|
with pytest.raises(Exception) as e:
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(collection, top_k, query_vecs)
|
2019-10-16 10:40:31 +00:00
|
|
|
|
|
|
|
@pytest.mark.level(2)
|
2020-03-09 04:00:37 +00:00
|
|
|
def test_search_with_invalid_top_k_ip(self, connect, ip_collection, get_top_k):
|
2019-10-16 10:40:31 +00:00
|
|
|
'''
|
|
|
|
target: test search fuction, with the wrong top_k
|
|
|
|
method: search with top_k
|
|
|
|
expected: raise an error, and the connection is normal
|
|
|
|
'''
|
|
|
|
top_k = get_top_k
|
|
|
|
logging.getLogger().info(top_k)
|
|
|
|
nprobe = 1
|
|
|
|
query_vecs = gen_vectors(1, dim)
|
2019-10-25 08:14:51 +00:00
|
|
|
if isinstance(top_k, int):
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(ip_collection, top_k, query_vecs)
|
2019-10-25 08:14:51 +00:00
|
|
|
assert not status.OK()
|
|
|
|
else:
|
|
|
|
with pytest.raises(Exception) as e:
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(ip_collection, top_k, query_vecs)
|
2019-10-16 10:40:31 +00:00
|
|
|
"""
|
2020-03-09 04:00:37 +00:00
|
|
|
Test search collection with invalid nprobe
|
2019-10-16 10:40:31 +00:00
|
|
|
"""
|
|
|
|
@pytest.fixture(
|
|
|
|
scope="function",
|
|
|
|
params=gen_invalid_nprobes()
|
|
|
|
)
|
|
|
|
def get_nprobes(self, request):
|
|
|
|
yield request.param
|
|
|
|
|
2019-10-25 08:14:51 +00:00
|
|
|
@pytest.mark.level(1)
|
2020-03-09 04:00:37 +00:00
|
|
|
def test_search_with_invalid_nprobe(self, connect, collection, get_nprobes):
|
2019-10-16 10:40:31 +00:00
|
|
|
'''
|
2020-03-07 07:23:34 +00:00
|
|
|
target: test search fuction, with the wrong nprobe
|
|
|
|
method: search with nprobe
|
2019-10-16 10:40:31 +00:00
|
|
|
expected: raise an error, and the connection is normal
|
|
|
|
'''
|
2020-03-07 07:23:34 +00:00
|
|
|
index_type = IndexType.IVF_SQ8
|
|
|
|
index_param = {"nlist": 16384}
|
2020-03-09 04:00:37 +00:00
|
|
|
connect.create_index(collection, index_type, index_param)
|
2020-03-07 07:23:34 +00:00
|
|
|
|
2019-10-16 10:40:31 +00:00
|
|
|
top_k = 1
|
|
|
|
nprobe = get_nprobes
|
2020-03-07 07:23:34 +00:00
|
|
|
search_param = {"nprobe": nprobe}
|
2019-10-16 10:40:31 +00:00
|
|
|
logging.getLogger().info(nprobe)
|
|
|
|
query_vecs = gen_vectors(1, dim)
|
2020-03-07 07:23:34 +00:00
|
|
|
# if isinstance(nprobe, int):
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(collection, top_k, query_vecs, params=search_param)
|
2020-03-07 07:23:34 +00:00
|
|
|
assert not status.OK()
|
|
|
|
# else:
|
|
|
|
# with pytest.raises(Exception) as e:
|
2020-03-09 04:00:37 +00:00
|
|
|
# status, result = connect.search_vectors(collection, top_k, query_vecs, params=search_param)
|
2019-10-16 10:40:31 +00:00
|
|
|
|
|
|
|
@pytest.mark.level(2)
|
2020-03-09 04:00:37 +00:00
|
|
|
def test_search_with_invalid_nprobe_ip(self, connect, ip_collection, get_nprobes):
|
2019-10-16 10:40:31 +00:00
|
|
|
'''
|
|
|
|
target: test search fuction, with the wrong top_k
|
|
|
|
method: search with top_k
|
|
|
|
expected: raise an error, and the connection is normal
|
|
|
|
'''
|
2020-03-07 07:23:34 +00:00
|
|
|
index_type = IndexType.IVF_SQ8
|
|
|
|
index_param = {"nlist": 16384}
|
2020-03-09 04:00:37 +00:00
|
|
|
connect.create_index(ip_collection, index_type, index_param)
|
2020-03-07 07:23:34 +00:00
|
|
|
|
2019-10-16 10:40:31 +00:00
|
|
|
top_k = 1
|
|
|
|
nprobe = get_nprobes
|
2020-03-07 07:23:34 +00:00
|
|
|
search_param = {"nprobe": nprobe}
|
2019-10-16 10:40:31 +00:00
|
|
|
logging.getLogger().info(nprobe)
|
|
|
|
query_vecs = gen_vectors(1, dim)
|
2020-03-07 07:23:34 +00:00
|
|
|
|
|
|
|
# if isinstance(nprobe, int):
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(ip_collection, top_k, query_vecs, params=search_param)
|
2020-03-07 07:23:34 +00:00
|
|
|
assert not status.OK()
|
|
|
|
# else:
|
|
|
|
# with pytest.raises(Exception) as e:
|
2020-03-09 04:00:37 +00:00
|
|
|
# status, result = connect.search_vectors(ip_collection, top_k, query_vecs, params=search_param)
|
2020-03-07 07:23:34 +00:00
|
|
|
|
|
|
|
@pytest.fixture(
|
|
|
|
scope="function",
|
|
|
|
params=gen_simple_index()
|
|
|
|
)
|
|
|
|
def get_simple_index(self, request, connect):
|
|
|
|
if str(connect._cmd("mode")[1]) == "CPU":
|
|
|
|
if request.param["index_type"] == IndexType.IVF_SQ8H:
|
|
|
|
pytest.skip("sq8h not support in CPU mode")
|
|
|
|
if str(connect._cmd("mode")[1]) == "GPU":
|
|
|
|
if request.param["index_type"] == IndexType.IVF_PQ:
|
|
|
|
pytest.skip("ivfpq not support in GPU mode")
|
|
|
|
return request.param
|
|
|
|
|
2020-03-13 12:21:23 +00:00
|
|
|
def test_search_with_empty_params(self, connect, collection, args, get_simple_index):
|
2020-03-07 07:23:34 +00:00
|
|
|
'''
|
|
|
|
target: test search fuction, with empty search params
|
|
|
|
method: search with params
|
|
|
|
expected: search status not ok, and the connection is normal
|
|
|
|
'''
|
2020-03-13 12:21:23 +00:00
|
|
|
if args["handler"] == "HTTP":
|
|
|
|
pytest.skip("skip in http mode")
|
2020-03-07 07:23:34 +00:00
|
|
|
index_type = get_simple_index["index_type"]
|
|
|
|
index_param = get_simple_index["index_param"]
|
2020-03-09 04:00:37 +00:00
|
|
|
connect.create_index(collection, index_type, index_param)
|
2020-03-07 07:23:34 +00:00
|
|
|
|
|
|
|
top_k = 1
|
|
|
|
query_vecs = gen_vectors(1, dim)
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(collection, top_k, query_vecs, params={})
|
2020-03-07 07:23:34 +00:00
|
|
|
|
|
|
|
if index_type == IndexType.FLAT:
|
|
|
|
assert status.OK()
|
2019-10-16 10:40:31 +00:00
|
|
|
else:
|
2020-03-07 07:23:34 +00:00
|
|
|
assert not status.OK()
|
|
|
|
|
|
|
|
@pytest.fixture(
|
|
|
|
scope="function",
|
|
|
|
params=gen_invaild_search_params()
|
|
|
|
)
|
|
|
|
def get_invalid_searh_param(self, request, connect):
|
|
|
|
if str(connect._cmd("mode")[1]) == "CPU":
|
|
|
|
if request.param["index_type"] == IndexType.IVF_SQ8H:
|
|
|
|
pytest.skip("sq8h not support in CPU mode")
|
|
|
|
if str(connect._cmd("mode")[1]) == "GPU":
|
|
|
|
if request.param["index_type"] == IndexType.IVF_PQ:
|
|
|
|
pytest.skip("ivfpq not support in GPU mode")
|
|
|
|
return request.param
|
|
|
|
|
2020-03-09 04:00:37 +00:00
|
|
|
def test_search_with_invalid_params(self, connect, collection, get_invalid_searh_param):
|
2020-03-07 07:23:34 +00:00
|
|
|
'''
|
|
|
|
target: test search fuction, with invalid search params
|
|
|
|
method: search with params
|
|
|
|
expected: search status not ok, and the connection is normal
|
|
|
|
'''
|
|
|
|
index_type = get_invalid_searh_param["index_type"]
|
|
|
|
search_param = get_invalid_searh_param["search_param"]
|
|
|
|
|
|
|
|
if index_type in [IndexType.IVFLAT, IndexType.IVF_SQ8, IndexType.IVF_SQ8H]:
|
2020-03-09 04:00:37 +00:00
|
|
|
connect.create_index(collection, index_type, {"nlist": 16384})
|
2020-03-07 07:23:34 +00:00
|
|
|
if (index_type == IndexType.IVF_PQ):
|
2020-03-18 10:26:38 +00:00
|
|
|
connect.create_index(collection, index_type, {"nlist": 16384, "m": 16})
|
2020-03-07 07:23:34 +00:00
|
|
|
if(index_type == IndexType.HNSW):
|
2020-03-09 04:00:37 +00:00
|
|
|
connect.create_index(collection, index_type, {"M": 16, "efConstruction": 500})
|
2020-03-07 07:23:34 +00:00
|
|
|
if (index_type == IndexType.RNSG):
|
2020-03-09 04:00:37 +00:00
|
|
|
connect.create_index(collection, index_type, {"search_length": 60, "out_degree": 50, "candidate_pool_size": 300, "knng": 100})
|
2019-10-16 10:40:31 +00:00
|
|
|
|
2020-03-07 07:23:34 +00:00
|
|
|
top_k = 1
|
|
|
|
query_vecs = gen_vectors(1, dim)
|
2020-03-09 04:00:37 +00:00
|
|
|
status, result = connect.search_vectors(collection, top_k, query_vecs, params=search_param)
|
2020-03-07 07:23:34 +00:00
|
|
|
assert not status.OK()
|
2019-10-16 10:40:31 +00:00
|
|
|
|
|
|
|
def check_result(result, id):
|
|
|
|
if len(result) >= 5:
|
|
|
|
return id in [result[0].id, result[1].id, result[2].id, result[3].id, result[4].id]
|
|
|
|
else:
|
2020-02-29 08:11:31 +00:00
|
|
|
return id in (i.id for i in result)
|