mirror of https://github.com/milvus-io/milvus.git
Merge pull request #609 from cydrain/caiyd_handle_search_exception
handle search exceptionpull/632/head
commit
e705fa3007
|
@ -9,6 +9,7 @@ Please mark all change in change log and use the ticket from JIRA.
|
|||
- \#248 - Reside src/external in thirdparty
|
||||
- \#316 - Some files not merged after vectors added
|
||||
- \#327 - Search does not use GPU when index type is FLAT
|
||||
- \#331 - Add exception handle when search fail
|
||||
- \#340 - Test cases run failed on 0.6.0
|
||||
- \#353 - Rename config.h.in to version.h.in
|
||||
- \#374 - sdk_simple return empty result
|
||||
|
|
|
@ -131,12 +131,9 @@ GPUIVF::search_impl(int64_t n, const float* data, int64_t k, float* distances, i
|
|||
if (auto device_index = std::dynamic_pointer_cast<faiss::gpu::GpuIndexIVF>(index_)) {
|
||||
auto search_cfg = std::dynamic_pointer_cast<IVFCfg>(cfg);
|
||||
device_index->nprobe = search_cfg->nprobe;
|
||||
// assert(device_index->getNumProbes() == search_cfg->nprobe);
|
||||
|
||||
{
|
||||
ResScope rs(res_, gpu_id_);
|
||||
device_index->search(n, (float*)data, k, distances, labels);
|
||||
}
|
||||
// assert(device_index->getNumProbes() == search_cfg->nprobe);
|
||||
ResScope rs(res_, gpu_id_);
|
||||
device_index->search(n, (float*)data, k, distances, labels);
|
||||
} else {
|
||||
KNOWHERE_THROW_MSG("Not a GpuIndexIVF type.");
|
||||
}
|
||||
|
|
|
@ -119,42 +119,48 @@ IVF::Search(const DatasetPtr& dataset, const Config& config) {
|
|||
|
||||
GETTENSOR(dataset)
|
||||
|
||||
auto elems = rows * search_cfg->k;
|
||||
auto res_ids = (int64_t*)malloc(sizeof(int64_t) * elems);
|
||||
auto res_dis = (float*)malloc(sizeof(float) * elems);
|
||||
try {
|
||||
auto elems = rows * search_cfg->k;
|
||||
auto res_ids = (int64_t*)malloc(sizeof(int64_t) * elems);
|
||||
auto res_dis = (float*)malloc(sizeof(float) * elems);
|
||||
|
||||
search_impl(rows, (float*)p_data, search_cfg->k, res_dis, res_ids, config);
|
||||
search_impl(rows, (float*)p_data, search_cfg->k, res_dis, res_ids, config);
|
||||
|
||||
// std::stringstream ss_res_id, ss_res_dist;
|
||||
// for (int i = 0; i < 10; ++i) {
|
||||
// printf("%llu", res_ids[i]);
|
||||
// printf("\n");
|
||||
// printf("%.6f", res_dis[i]);
|
||||
// printf("\n");
|
||||
// ss_res_id << res_ids[i] << " ";
|
||||
// ss_res_dist << res_dis[i] << " ";
|
||||
// }
|
||||
// std::cout << std::endl << "after search: " << std::endl;
|
||||
// std::cout << ss_res_id.str() << std::endl;
|
||||
// std::cout << ss_res_dist.str() << std::endl << std::endl;
|
||||
// std::stringstream ss_res_id, ss_res_dist;
|
||||
// for (int i = 0; i < 10; ++i) {
|
||||
// printf("%llu", res_ids[i]);
|
||||
// printf("\n");
|
||||
// printf("%.6f", res_dis[i]);
|
||||
// printf("\n");
|
||||
// ss_res_id << res_ids[i] << " ";
|
||||
// ss_res_dist << res_dis[i] << " ";
|
||||
// }
|
||||
// std::cout << std::endl << "after search: " << std::endl;
|
||||
// std::cout << ss_res_id.str() << std::endl;
|
||||
// std::cout << ss_res_dist.str() << std::endl << std::endl;
|
||||
|
||||
// auto id_buf = MakeMutableBufferSmart((uint8_t*)res_ids, sizeof(int64_t) * elems);
|
||||
// auto dist_buf = MakeMutableBufferSmart((uint8_t*)res_dis, sizeof(float) * elems);
|
||||
//
|
||||
// std::vector<BufferPtr> id_bufs{nullptr, id_buf};
|
||||
// std::vector<BufferPtr> dist_bufs{nullptr, dist_buf};
|
||||
//
|
||||
// auto int64_type = std::make_shared<arrow::Int64Type>();
|
||||
// auto float_type = std::make_shared<arrow::FloatType>();
|
||||
//
|
||||
// auto id_array_data = arrow::ArrayData::Make(int64_type, elems, id_bufs);
|
||||
// auto dist_array_data = arrow::ArrayData::Make(float_type, elems, dist_bufs);
|
||||
//
|
||||
// auto ids = std::make_shared<NumericArray<arrow::Int64Type>>(id_array_data);
|
||||
// auto dists = std::make_shared<NumericArray<arrow::FloatType>>(dist_array_data);
|
||||
// std::vector<ArrayPtr> array{ids, dists};
|
||||
// auto id_buf = MakeMutableBufferSmart((uint8_t*)res_ids, sizeof(int64_t) * elems);
|
||||
// auto dist_buf = MakeMutableBufferSmart((uint8_t*)res_dis, sizeof(float) * elems);
|
||||
//
|
||||
// std::vector<BufferPtr> id_bufs{nullptr, id_buf};
|
||||
// std::vector<BufferPtr> dist_bufs{nullptr, dist_buf};
|
||||
//
|
||||
// auto int64_type = std::make_shared<arrow::Int64Type>();
|
||||
// auto float_type = std::make_shared<arrow::FloatType>();
|
||||
//
|
||||
// auto id_array_data = arrow::ArrayData::Make(int64_type, elems, id_bufs);
|
||||
// auto dist_array_data = arrow::ArrayData::Make(float_type, elems, dist_bufs);
|
||||
//
|
||||
// auto ids = std::make_shared<NumericArray<arrow::Int64Type>>(id_array_data);
|
||||
// auto dists = std::make_shared<NumericArray<arrow::FloatType>>(dist_array_data);
|
||||
// std::vector<ArrayPtr> array{ids, dists};
|
||||
|
||||
return std::make_shared<Dataset>((void*)res_ids, (void*)res_dis);
|
||||
return std::make_shared<Dataset>((void*)res_ids, (void*)res_dis);
|
||||
} catch (faiss::FaissException& e) {
|
||||
KNOWHERE_THROW_MSG(e.what());
|
||||
} catch (std::exception& e) {
|
||||
KNOWHERE_THROW_MSG(e.what());
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
|
|
|
@ -212,7 +212,13 @@ XSearchTask::Execute() {
|
|||
ResMgrInst::GetInstance()->GetResource(path().Last())->type() == ResourceType::CPU) {
|
||||
hybrid = true;
|
||||
}
|
||||
index_engine_->Search(nq, vectors, topk, nprobe, output_distance.data(), output_ids.data(), hybrid);
|
||||
Status s =
|
||||
index_engine_->Search(nq, vectors, topk, nprobe, output_distance.data(), output_ids.data(), hybrid);
|
||||
if (!s.ok()) {
|
||||
search_job->GetStatus() = s;
|
||||
search_job->SearchDone(index_id_);
|
||||
return;
|
||||
}
|
||||
|
||||
double span = rc.RecordSection(hdr + ", do search");
|
||||
// search_job->AccumSearchCost(span);
|
||||
|
|
Loading…
Reference in New Issue