mirror of https://github.com/milvus-io/milvus.git
#552 Server down during building index_type: IVF_PQ using GPU-edition
parent
44201ade76
commit
dd5ec085b6
|
@ -42,7 +42,8 @@ class NSGInterfaceTest : public DataGen, public ::testing::Test {
|
|||
SetUp() override {
|
||||
// Init_with_default();
|
||||
#ifdef MILVUS_GPU_VERSION
|
||||
knowhere::FaissGpuResourceMgr::GetInstance().InitDevice(DEVICEID, 1024 * 1024 * 200, 1024 * 1024 * 600, 1);
|
||||
int64_t MB = 1024 * 1024;
|
||||
knowhere::FaissGpuResourceMgr::GetInstance().InitDevice(DEVICEID, MB * 200, MB * 600, 1);
|
||||
#endif
|
||||
Generate(256, 1000000 / 100, 1);
|
||||
index_ = std::make_shared<knowhere::NSG>();
|
||||
|
|
|
@ -134,7 +134,7 @@ IVFPQConfAdapter::Match(const TempMetaConf& metaconf) {
|
|||
|
||||
/*
|
||||
* Faiss 1.6
|
||||
* Only 1, 2, 3, 4, 6, 8, 10, 12, 16, 20, 24, 28, 32 dims per sub-quantizer are currently supporte with
|
||||
* Only 1, 2, 3, 4, 6, 8, 10, 12, 16, 20, 24, 28, 32 dims per sub-quantizer are currently supported with
|
||||
* no precomputed codes. Precomputed codes supports any number of dimensions, but will involve memory overheads.
|
||||
*/
|
||||
static std::vector<int64_t> support_dim_per_subquantizer{32, 28, 24, 20, 16, 12, 10, 8, 6, 4, 3, 2, 1};
|
||||
|
@ -152,8 +152,10 @@ IVFPQConfAdapter::Match(const TempMetaConf& metaconf) {
|
|||
|
||||
if (resset.empty()) {
|
||||
// todo(linxj): throw exception here.
|
||||
WRAPPER_LOG_ERROR << "The dims of PQ is wrong!";
|
||||
throw WrapperException("The dims of PQ is wrong!");
|
||||
WRAPPER_LOG_ERROR << "The dims of PQ is wrong : only 1, 2, 3, 4, 6, 8, 10, 12, 16, 20, 24, 28, 32 dims per sub-"
|
||||
"quantizer are currently supported with no precomputed codes.";
|
||||
throw WrapperException("The dims of PQ is wrong : only 1, 2, 3, 4, 6, 8, 10, 12, 16, 20, 24, 28, 32 dims "
|
||||
"per sub-quantizer are currently supported with no precomputed codes.");
|
||||
// return nullptr;
|
||||
}
|
||||
static int64_t compression_level = 1; // 1:low, 2:high
|
||||
|
|
Loading…
Reference in New Issue