mirror of https://github.com/milvus-io/milvus.git
Remove other Exceptions, keeps SegcoreError only (#27017)
Signed-off-by: Enwei Jiao <enwei.jiao@zilliz.com>pull/27107/head
parent
f7b2ad6650
commit
0afdfdb9af
|
@ -29,7 +29,7 @@ if ( MILVUS_GPU_VERSION )
|
|||
add_definitions(-DMILVUS_GPU_VERSION)
|
||||
endif ()
|
||||
|
||||
if ( USE_DYNAMIC_SIMD )
|
||||
if ( USE_DYNAMIC_SIMD )
|
||||
add_definitions(-DUSE_DYNAMIC_SIMD)
|
||||
endif()
|
||||
|
||||
|
@ -152,7 +152,7 @@ if ( APPLE )
|
|||
endif ()
|
||||
|
||||
# Set SIMD to CMAKE_CXX_FLAGS
|
||||
if (OPEN_SIMD)
|
||||
if (OPEN_SIMD)
|
||||
message(STATUS "open simd function, CPU_ARCH:${CPU_ARCH}")
|
||||
if (${CPU_ARCH} STREQUAL "avx")
|
||||
#set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -ftree-vectorize -mavx2 -mfma -mavx -mf16c ")
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
|
||||
#include <iostream>
|
||||
#include "EasyAssert.h"
|
||||
#include "fmt/core.h"
|
||||
#include <boost/stacktrace.hpp>
|
||||
#include <sstream>
|
||||
|
||||
|
@ -39,13 +40,16 @@ EasyAssertInfo(bool value,
|
|||
std::string_view filename,
|
||||
int lineno,
|
||||
std::string_view extra_info,
|
||||
ErrorCodeEnum error_code) {
|
||||
ErrorCode error_code) {
|
||||
// enable error code
|
||||
if (!value) {
|
||||
std::string info;
|
||||
info += "Assert \"" + std::string(expr_str) + "\"";
|
||||
info += " at " + std::string(filename) + ":" + std::to_string(lineno) +
|
||||
"\n";
|
||||
if (!expr_str.empty()) {
|
||||
info += fmt::format("Assert \"{}\" at {}:{}\n",
|
||||
expr_str,
|
||||
std::string(filename),
|
||||
std::to_string(lineno));
|
||||
}
|
||||
if (!extra_info.empty()) {
|
||||
info += " => " + std::string(extra_info);
|
||||
}
|
||||
|
|
|
@ -18,19 +18,78 @@
|
|||
#include <string_view>
|
||||
#include <stdexcept>
|
||||
#include <exception>
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <cstdio>
|
||||
#include <cstdlib>
|
||||
#include <string>
|
||||
#include "pb/common.pb.h"
|
||||
#include "common/type_c.h"
|
||||
|
||||
/* Paste this on the file if you want to debug. */
|
||||
namespace milvus {
|
||||
enum ErrorCodeEnum {
|
||||
enum ErrorCode {
|
||||
Success = 0,
|
||||
UnexpectedError = 1,
|
||||
BuildIndexError = 2,
|
||||
IllegalArgument = 5,
|
||||
UnexpectedError = 2001,
|
||||
NotImplemented = 2002,
|
||||
Unsupported = 2003,
|
||||
IndexBuildError = 2004,
|
||||
IndexAlreadyBuild = 2005,
|
||||
ConfigInvalid = 2006,
|
||||
DataTypeInvalid = 2007,
|
||||
PathInvalid = 2009,
|
||||
PathAlreadyExist = 2010,
|
||||
PathNotExist = 2011,
|
||||
FileOpenFailed = 2012,
|
||||
FileCreateFailed = 2013,
|
||||
FileReadFailed = 2014,
|
||||
FileWriteFailed = 2015,
|
||||
BucketInvalid = 2016,
|
||||
ObjectNotExist = 2017,
|
||||
S3Error = 2018,
|
||||
RetrieveError = 2019,
|
||||
FieldIDInvalid = 2020,
|
||||
FieldAlreadyExist = 2021,
|
||||
OpTypeInvalid = 2022,
|
||||
DataIsEmpty = 2023,
|
||||
DataFormatBroken = 2024,
|
||||
JsonKeyInvalid = 2025,
|
||||
MetricTypeInvalid = 2026,
|
||||
UnistdError = 2030,
|
||||
KnowhereError = 2100,
|
||||
};
|
||||
namespace impl {
|
||||
void
|
||||
EasyAssertInfo(bool value,
|
||||
std::string_view expr_str,
|
||||
std::string_view filename,
|
||||
int lineno,
|
||||
std::string_view extra_info,
|
||||
ErrorCode error_code = ErrorCode::UnexpectedError);
|
||||
|
||||
} // namespace impl
|
||||
|
||||
class SegcoreError : public std::runtime_error {
|
||||
public:
|
||||
static SegcoreError
|
||||
success() {
|
||||
return SegcoreError(ErrorCode::Success, "");
|
||||
}
|
||||
|
||||
SegcoreError(ErrorCode error_code, const std::string& error_msg)
|
||||
: std::runtime_error(error_msg), error_code_(error_code) {
|
||||
}
|
||||
|
||||
ErrorCode
|
||||
get_error_code() {
|
||||
return error_code_;
|
||||
}
|
||||
|
||||
bool
|
||||
ok() {
|
||||
return error_code_ == ErrorCode::Success;
|
||||
}
|
||||
|
||||
private:
|
||||
ErrorCode error_code_;
|
||||
};
|
||||
|
||||
inline CStatus
|
||||
|
@ -42,41 +101,16 @@ inline CStatus
|
|||
FailureCStatus(int code, const std::string& msg) {
|
||||
return CStatus{code, strdup(msg.data())};
|
||||
}
|
||||
namespace impl {
|
||||
void
|
||||
EasyAssertInfo(bool value,
|
||||
std::string_view expr_str,
|
||||
std::string_view filename,
|
||||
int lineno,
|
||||
std::string_view extra_info,
|
||||
ErrorCodeEnum error_code = ErrorCodeEnum::UnexpectedError);
|
||||
|
||||
} // namespace impl
|
||||
|
||||
class SegcoreError : public std::runtime_error {
|
||||
public:
|
||||
static SegcoreError
|
||||
success() {
|
||||
return SegcoreError(ErrorCodeEnum::Success, "");
|
||||
inline CStatus
|
||||
FailureCStatus(std::exception* ex) {
|
||||
if (dynamic_cast<SegcoreError*>(ex) != nullptr) {
|
||||
auto segcore_error = dynamic_cast<SegcoreError*>(ex);
|
||||
return CStatus{static_cast<int>(segcore_error->get_error_code()),
|
||||
strdup(ex->what())};
|
||||
}
|
||||
|
||||
SegcoreError(ErrorCodeEnum error_code, const std::string& error_msg)
|
||||
: std::runtime_error(error_msg), error_code_(error_code) {
|
||||
}
|
||||
|
||||
ErrorCodeEnum
|
||||
get_error_code() {
|
||||
return error_code_;
|
||||
}
|
||||
|
||||
bool
|
||||
ok() {
|
||||
return error_code_ == ErrorCodeEnum::Success;
|
||||
}
|
||||
|
||||
private:
|
||||
ErrorCodeEnum error_code_;
|
||||
};
|
||||
return CStatus{static_cast<int>(UnexpectedError), strdup(ex->what())};
|
||||
}
|
||||
|
||||
} // namespace milvus
|
||||
|
||||
|
@ -93,13 +127,13 @@ class SegcoreError : public std::runtime_error {
|
|||
#define Assert(expr) AssertInfo((expr), "")
|
||||
#define PanicInfo(info) \
|
||||
do { \
|
||||
milvus::impl::EasyAssertInfo(false, (info), __FILE__, __LINE__, ""); \
|
||||
milvus::impl::EasyAssertInfo(false, "", __FILE__, __LINE__, (info)); \
|
||||
__builtin_unreachable(); \
|
||||
} while (0)
|
||||
|
||||
#define PanicCodeInfo(errcode, info) \
|
||||
do { \
|
||||
milvus::impl::EasyAssertInfo( \
|
||||
false, (info), __FILE__, __LINE__, "", errcode); \
|
||||
false, "", __FILE__, __LINE__, (info), errcode); \
|
||||
__builtin_unreachable(); \
|
||||
} while (0)
|
||||
|
|
|
@ -52,7 +52,8 @@ datatype_sizeof(DataType data_type, int dim = 1) {
|
|||
return sizeof(float16) * dim;
|
||||
}
|
||||
default: {
|
||||
throw std::invalid_argument("unsupported data type");
|
||||
throw SegcoreError(DataTypeInvalid,
|
||||
fmt::format("invalid type is {}", data_type));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -90,9 +91,9 @@ datatype_name(DataType data_type) {
|
|||
return "vector_float16";
|
||||
}
|
||||
default: {
|
||||
auto err_msg =
|
||||
"Unsupported DataType(" + std::to_string((int)data_type) + ")";
|
||||
PanicInfo(err_msg);
|
||||
PanicCodeInfo(DataTypeInvalid,
|
||||
fmt::format("Unsupported DataType({})",
|
||||
fmt::underlying(data_type)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,12 +24,12 @@ NewBinarySet(CBinarySet* c_binary_set) {
|
|||
auto binary_set = std::make_unique<knowhere::BinarySet>();
|
||||
*c_binary_set = binary_set.release();
|
||||
auto status = CStatus();
|
||||
status.error_code = milvus::ErrorCodeEnum::Success;
|
||||
status.error_code = milvus::ErrorCode::Success;
|
||||
status.error_msg = "";
|
||||
return status;
|
||||
} catch (std::exception& e) {
|
||||
auto status = CStatus();
|
||||
status.error_code = milvus::ErrorCodeEnum::UnexpectedError;
|
||||
status.error_code = milvus::ErrorCode::UnexpectedError;
|
||||
status.error_msg = strdup(e.what());
|
||||
return status;
|
||||
}
|
||||
|
@ -56,10 +56,10 @@ AppendIndexBinary(CBinarySet c_binary_set,
|
|||
std::shared_ptr<uint8_t[]> data(dup);
|
||||
binary_set->Append(index_key, data, index_size);
|
||||
|
||||
status.error_code = milvus::ErrorCodeEnum::Success;
|
||||
status.error_code = milvus::ErrorCode::Success;
|
||||
status.error_msg = "";
|
||||
} catch (std::exception& e) {
|
||||
status.error_code = milvus::ErrorCodeEnum::UnexpectedError;
|
||||
status.error_code = milvus::ErrorCode::UnexpectedError;
|
||||
status.error_msg = strdup(e.what());
|
||||
}
|
||||
return status;
|
||||
|
@ -101,11 +101,11 @@ CopyBinarySetValue(void* data, const char* key, CBinarySet c_binary_set) {
|
|||
auto binary_set = (knowhere::BinarySet*)c_binary_set;
|
||||
try {
|
||||
auto binary = binary_set->GetByName(key);
|
||||
status.error_code = milvus::ErrorCodeEnum::Success;
|
||||
status.error_code = milvus::ErrorCode::Success;
|
||||
status.error_msg = "";
|
||||
memcpy((uint8_t*)data, binary->data.get(), binary->size);
|
||||
} catch (std::exception& e) {
|
||||
status.error_code = milvus::ErrorCodeEnum::UnexpectedError;
|
||||
status.error_code = milvus::ErrorCode::UnexpectedError;
|
||||
status.error_msg = strdup(e.what());
|
||||
}
|
||||
return status;
|
||||
|
|
|
@ -1,34 +0,0 @@
|
|||
// Licensed to the LF AI & Data foundation under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <iostream>
|
||||
#include <stdexcept>
|
||||
#include <string>
|
||||
|
||||
namespace milvus::index {
|
||||
|
||||
class UnistdException : public std::runtime_error {
|
||||
public:
|
||||
explicit UnistdException(const std::string& msg) : std::runtime_error(msg) {
|
||||
}
|
||||
|
||||
virtual ~UnistdException() {
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace milvus::index
|
|
@ -63,21 +63,21 @@ IndexFactory::CreateScalarIndex(const CreateIndexInfo& create_index_info,
|
|||
case DataType::VARCHAR:
|
||||
return CreateScalarIndex<std::string>(index_type, file_manager);
|
||||
default:
|
||||
throw std::invalid_argument(
|
||||
std::string("invalid data type to build index: ") +
|
||||
std::to_string(int(data_type)));
|
||||
throw SegcoreError(
|
||||
DataTypeInvalid,
|
||||
fmt::format("invalid data type to build index: {}", data_type));
|
||||
}
|
||||
}
|
||||
|
||||
IndexBasePtr
|
||||
IndexFactory::CreateVectorIndex(const CreateIndexInfo& create_index_info,
|
||||
storage::FileManagerImplPtr file_manager) {
|
||||
auto data_type = create_index_info.field_type;
|
||||
auto index_type = create_index_info.index_type;
|
||||
auto metric_type = create_index_info.metric_type;
|
||||
|
||||
#ifdef BUILD_DISK_ANN
|
||||
// create disk index
|
||||
auto data_type = create_index_info.field_type;
|
||||
if (is_in_disk_list(index_type)) {
|
||||
switch (data_type) {
|
||||
case DataType::VECTOR_FLOAT: {
|
||||
|
@ -85,9 +85,10 @@ IndexFactory::CreateVectorIndex(const CreateIndexInfo& create_index_info,
|
|||
index_type, metric_type, file_manager);
|
||||
}
|
||||
default:
|
||||
throw std::invalid_argument(
|
||||
std::string("invalid data type to build disk index: ") +
|
||||
std::to_string(int(data_type)));
|
||||
throw SegcoreError(
|
||||
DataTypeInvalid,
|
||||
fmt::format("invalid data type to build disk index: {}",
|
||||
data_type));
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
|
|
@ -64,8 +64,9 @@ ScalarIndex<T>::Query(const DatasetPtr& dataset) {
|
|||
case OpType::PrefixMatch:
|
||||
case OpType::PostfixMatch:
|
||||
default:
|
||||
throw std::invalid_argument(std::string(
|
||||
"unsupported operator type: " + std::to_string(op)));
|
||||
throw SegcoreError(OpTypeInvalid,
|
||||
fmt::format("unsupported operator type: {}",
|
||||
fmt::underlying(op)));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -24,6 +24,7 @@
|
|||
#include "common/Types.h"
|
||||
#include "common/EasyAssert.h"
|
||||
#include "index/Index.h"
|
||||
#include "fmt/format.h"
|
||||
|
||||
namespace milvus::index {
|
||||
|
||||
|
@ -38,7 +39,8 @@ class ScalarIndex : public IndexBase {
|
|||
void
|
||||
BuildWithDataset(const DatasetPtr& dataset,
|
||||
const Config& config = {}) override {
|
||||
PanicInfo("scalar index don't support build index with dataset");
|
||||
PanicCodeInfo(Unsupported,
|
||||
"scalar index don't support build index with dataset");
|
||||
};
|
||||
|
||||
public:
|
||||
|
|
|
@ -55,9 +55,8 @@ ScalarIndexSort<T>::Build(const Config& config) {
|
|||
total_num_rows += data->get_num_rows();
|
||||
}
|
||||
if (total_num_rows == 0) {
|
||||
// todo: throw an exception
|
||||
throw std::invalid_argument(
|
||||
"ScalarIndexSort cannot build null values!");
|
||||
throw SegcoreError(DataIsEmpty,
|
||||
"ScalarIndexSort cannot build null values!");
|
||||
}
|
||||
|
||||
data_.reserve(total_num_rows);
|
||||
|
@ -85,9 +84,8 @@ ScalarIndexSort<T>::Build(size_t n, const T* values) {
|
|||
if (is_built_)
|
||||
return;
|
||||
if (n == 0) {
|
||||
// todo: throw an exception
|
||||
throw std::invalid_argument(
|
||||
"ScalarIndexSort cannot build null values!");
|
||||
throw SegcoreError(DataIsEmpty,
|
||||
"ScalarIndexSort cannot build null values!");
|
||||
}
|
||||
data_.reserve(n);
|
||||
idx_to_offsets_.resize(n);
|
||||
|
@ -254,8 +252,9 @@ ScalarIndexSort<T>::Range(const T value, const OpType op) {
|
|||
data_.begin(), data_.end(), IndexStructure<T>(value));
|
||||
break;
|
||||
default:
|
||||
throw std::invalid_argument(std::string("Invalid OperatorType: ") +
|
||||
std::to_string((int)op) + "!");
|
||||
throw SegcoreError(
|
||||
OpTypeInvalid,
|
||||
fmt::format("Invalid OperatorType: {}", fmt::underlying(op)));
|
||||
}
|
||||
for (; lb < ub; ++lb) {
|
||||
bitset[lb->idx_] = true;
|
||||
|
|
|
@ -21,10 +21,10 @@
|
|||
#include <stdio.h>
|
||||
#include <fcntl.h>
|
||||
|
||||
#include "common/EasyAssert.h"
|
||||
#include "index/StringIndexMarisa.h"
|
||||
#include "index/Utils.h"
|
||||
#include "index/Index.h"
|
||||
#include "index/Exception.h"
|
||||
#include "common/Utils.h"
|
||||
#include "common/Slice.h"
|
||||
|
||||
|
@ -50,7 +50,7 @@ valid_str_id(size_t str_id) {
|
|||
void
|
||||
StringIndexMarisa::Build(const Config& config) {
|
||||
if (built_) {
|
||||
throw std::runtime_error("index has been built");
|
||||
throw SegcoreError(IndexAlreadyBuild, "index has been built");
|
||||
}
|
||||
|
||||
auto insert_files =
|
||||
|
@ -63,7 +63,7 @@ StringIndexMarisa::Build(const Config& config) {
|
|||
|
||||
// fill key set.
|
||||
marisa::Keyset keyset;
|
||||
for (auto data : field_datas) {
|
||||
for (const auto& data : field_datas) {
|
||||
auto slice_num = data->get_num_rows();
|
||||
for (int64_t i = 0; i < slice_num; ++i) {
|
||||
keyset.push_back(
|
||||
|
@ -76,7 +76,7 @@ StringIndexMarisa::Build(const Config& config) {
|
|||
// fill str_ids_
|
||||
str_ids_.resize(total_num_rows);
|
||||
int64_t offset = 0;
|
||||
for (auto data : field_datas) {
|
||||
for (const auto& data : field_datas) {
|
||||
auto slice_num = data->get_num_rows();
|
||||
for (int64_t i = 0; i < slice_num; ++i) {
|
||||
auto str_id =
|
||||
|
@ -95,7 +95,7 @@ StringIndexMarisa::Build(const Config& config) {
|
|||
void
|
||||
StringIndexMarisa::Build(size_t n, const std::string* values) {
|
||||
if (built_) {
|
||||
throw std::runtime_error("index has been built");
|
||||
throw SegcoreError(IndexAlreadyBuild, "index has been built");
|
||||
}
|
||||
|
||||
marisa::Keyset keyset;
|
||||
|
@ -176,8 +176,9 @@ StringIndexMarisa::LoadWithoutAssemble(const BinarySet& set,
|
|||
if (status != len) {
|
||||
close(fd);
|
||||
remove(file.c_str());
|
||||
throw UnistdException("write index to fd error, errorCode is " +
|
||||
std::to_string(status));
|
||||
throw SegcoreError(
|
||||
ErrorCode::UnistdError,
|
||||
"write index to fd error, errorCode is " + std::to_string(status));
|
||||
}
|
||||
|
||||
lseek(fd, 0, SEEK_SET);
|
||||
|
@ -275,9 +276,9 @@ StringIndexMarisa::Range(std::string value, OpType op) {
|
|||
set = raw_data.compare(value) >= 0;
|
||||
break;
|
||||
default:
|
||||
throw std::invalid_argument(
|
||||
std::string("Invalid OperatorType: ") +
|
||||
std::to_string((int)op) + "!");
|
||||
throw SegcoreError(OpTypeInvalid,
|
||||
fmt::format("Invalid OperatorType: {}",
|
||||
static_cast<int>(op)));
|
||||
}
|
||||
if (set) {
|
||||
bitset[offset] = true;
|
||||
|
|
|
@ -26,7 +26,6 @@
|
|||
#include <iostream>
|
||||
|
||||
#include "index/Utils.h"
|
||||
#include "index/Exception.h"
|
||||
#include "index/Meta.h"
|
||||
#include <google/protobuf/text_format.h>
|
||||
#include <unistd.h>
|
||||
|
@ -286,9 +285,10 @@ ReadDataFromFD(int fd, void* buf, size_t size, size_t chunk_size) {
|
|||
const size_t count = (size < chunk_size) ? size : chunk_size;
|
||||
const ssize_t size_read = read(fd, buf, count);
|
||||
if (size_read != count) {
|
||||
throw UnistdException(
|
||||
throw SegcoreError(
|
||||
ErrorCode::UnistdError,
|
||||
"read data from fd error, returned read size is " +
|
||||
std::to_string(size_read));
|
||||
std::to_string(size_read));
|
||||
}
|
||||
|
||||
buf = static_cast<char*>(buf) + size_read;
|
||||
|
|
|
@ -81,7 +81,7 @@ VectorDiskAnnIndex<T>::Load(const Config& config) {
|
|||
auto stat = index_.Deserialize(knowhere::BinarySet(), load_config);
|
||||
if (stat != knowhere::Status::success)
|
||||
PanicCodeInfo(
|
||||
ErrorCodeEnum::UnexpectedError,
|
||||
ErrorCode::UnexpectedError,
|
||||
"failed to Deserialize index, " + KnowhereStatusString(stat));
|
||||
|
||||
SetDim(index_.Dim());
|
||||
|
@ -177,7 +177,7 @@ VectorDiskAnnIndex<T>::BuildWithDataset(const DatasetPtr& dataset,
|
|||
knowhere::DataSet* ds_ptr = nullptr;
|
||||
auto stat = index_.Build(*ds_ptr, build_config);
|
||||
if (stat != knowhere::Status::success)
|
||||
PanicCodeInfo(ErrorCodeEnum::BuildIndexError,
|
||||
PanicCodeInfo(ErrorCode::IndexBuildError,
|
||||
"failed to build index, " + KnowhereStatusString(stat));
|
||||
local_chunk_manager->RemoveDir(
|
||||
storage::GetSegmentRawDataPathPrefix(local_chunk_manager, segment_id));
|
||||
|
@ -234,7 +234,7 @@ VectorDiskAnnIndex<T>::Query(const DatasetPtr dataset,
|
|||
auto res = index_.RangeSearch(*dataset, search_config, bitset);
|
||||
|
||||
if (!res.has_value()) {
|
||||
PanicCodeInfo(ErrorCodeEnum::UnexpectedError,
|
||||
PanicCodeInfo(ErrorCode::UnexpectedError,
|
||||
fmt::format("failed to range search: {}: {}",
|
||||
KnowhereStatusString(res.error()),
|
||||
res.what()));
|
||||
|
@ -244,7 +244,7 @@ VectorDiskAnnIndex<T>::Query(const DatasetPtr dataset,
|
|||
} else {
|
||||
auto res = index_.Search(*dataset, search_config, bitset);
|
||||
if (!res.has_value()) {
|
||||
PanicCodeInfo(ErrorCodeEnum::UnexpectedError,
|
||||
PanicCodeInfo(ErrorCode::UnexpectedError,
|
||||
fmt::format("failed to search: {}: {}",
|
||||
KnowhereStatusString(res.error()),
|
||||
res.what()));
|
||||
|
@ -289,7 +289,7 @@ std::vector<uint8_t>
|
|||
VectorDiskAnnIndex<T>::GetVector(const DatasetPtr dataset) const {
|
||||
auto res = index_.GetVectorByIds(*dataset);
|
||||
if (!res.has_value()) {
|
||||
PanicCodeInfo(ErrorCodeEnum::UnexpectedError,
|
||||
PanicCodeInfo(ErrorCode::UnexpectedError,
|
||||
fmt::format("failed to get vector: {}: {}",
|
||||
KnowhereStatusString(res.error()),
|
||||
res.what()));
|
||||
|
|
|
@ -43,12 +43,14 @@ class VectorIndex : public IndexBase {
|
|||
BuildWithRawData(size_t n,
|
||||
const void* values,
|
||||
const Config& config = {}) override {
|
||||
PanicInfo("vector index don't support build index with raw data");
|
||||
PanicCodeInfo(Unsupported,
|
||||
"vector index don't support build index with raw data");
|
||||
};
|
||||
|
||||
virtual void
|
||||
AddWithDataset(const DatasetPtr& dataset, const Config& config) {
|
||||
PanicInfo("vector index don't support add with dataset");
|
||||
PanicCodeInfo(Unsupported,
|
||||
"vector index don't support add with dataset");
|
||||
}
|
||||
|
||||
virtual std::unique_ptr<SearchResult>
|
||||
|
|
|
@ -81,7 +81,7 @@ VectorMemIndex::Serialize(const Config& config) {
|
|||
auto stat = index_.Serialize(ret);
|
||||
if (stat != knowhere::Status::success)
|
||||
PanicCodeInfo(
|
||||
ErrorCodeEnum::UnexpectedError,
|
||||
ErrorCode::UnexpectedError,
|
||||
"failed to serialize index, " + KnowhereStatusString(stat));
|
||||
Disassemble(ret);
|
||||
|
||||
|
@ -94,7 +94,7 @@ VectorMemIndex::LoadWithoutAssemble(const BinarySet& binary_set,
|
|||
auto stat = index_.Deserialize(binary_set);
|
||||
if (stat != knowhere::Status::success)
|
||||
PanicCodeInfo(
|
||||
ErrorCodeEnum::UnexpectedError,
|
||||
ErrorCode::UnexpectedError,
|
||||
"failed to Deserialize index, " + KnowhereStatusString(stat));
|
||||
SetDim(index_.Dim());
|
||||
}
|
||||
|
@ -227,7 +227,7 @@ VectorMemIndex::BuildWithDataset(const DatasetPtr& dataset,
|
|||
knowhere::TimeRecorder rc("BuildWithoutIds", 1);
|
||||
auto stat = index_.Build(*dataset, index_config);
|
||||
if (stat != knowhere::Status::success)
|
||||
PanicCodeInfo(ErrorCodeEnum::BuildIndexError,
|
||||
PanicCodeInfo(ErrorCode::IndexBuildError,
|
||||
"failed to build index, " + KnowhereStatusString(stat));
|
||||
rc.ElapseFromBegin("Done");
|
||||
SetDim(index_.Dim());
|
||||
|
@ -279,7 +279,7 @@ VectorMemIndex::AddWithDataset(const DatasetPtr& dataset,
|
|||
knowhere::TimeRecorder rc("AddWithDataset", 1);
|
||||
auto stat = index_.Add(*dataset, index_config);
|
||||
if (stat != knowhere::Status::success)
|
||||
PanicCodeInfo(ErrorCodeEnum::BuildIndexError,
|
||||
PanicCodeInfo(ErrorCode::IndexBuildError,
|
||||
"failed to append index, " + KnowhereStatusString(stat));
|
||||
rc.ElapseFromBegin("Done");
|
||||
}
|
||||
|
@ -309,7 +309,7 @@ VectorMemIndex::Query(const DatasetPtr dataset,
|
|||
auto res = index_.RangeSearch(*dataset, search_conf, bitset);
|
||||
milvus::tracer::AddEvent("finish_knowhere_index_range_search");
|
||||
if (!res.has_value()) {
|
||||
PanicCodeInfo(ErrorCodeEnum::UnexpectedError,
|
||||
PanicCodeInfo(ErrorCode::UnexpectedError,
|
||||
fmt::format("failed to range search: {}: {}",
|
||||
KnowhereStatusString(res.error()),
|
||||
res.what()));
|
||||
|
@ -323,7 +323,7 @@ VectorMemIndex::Query(const DatasetPtr dataset,
|
|||
auto res = index_.Search(*dataset, search_conf, bitset);
|
||||
milvus::tracer::AddEvent("finish_knowhere_index_search");
|
||||
if (!res.has_value()) {
|
||||
PanicCodeInfo(ErrorCodeEnum::UnexpectedError,
|
||||
PanicCodeInfo(ErrorCode::UnexpectedError,
|
||||
fmt::format("failed to search: {}: {}",
|
||||
KnowhereStatusString(res.error()),
|
||||
res.what()));
|
||||
|
@ -366,7 +366,7 @@ VectorMemIndex::GetVector(const DatasetPtr dataset) const {
|
|||
auto res = index_.GetVectorByIds(*dataset);
|
||||
if (!res.has_value()) {
|
||||
PanicCodeInfo(
|
||||
ErrorCodeEnum::UnexpectedError,
|
||||
ErrorCode::UnexpectedError,
|
||||
"failed to get vector, " + KnowhereStatusString(res.error()));
|
||||
}
|
||||
auto index_type = GetIndexType();
|
||||
|
@ -485,7 +485,7 @@ VectorMemIndex::LoadFromFile(const Config& config) {
|
|||
conf[kEnableMmap] = true;
|
||||
auto stat = index_.DeserializeFromFile(filepath.value(), conf);
|
||||
if (stat != knowhere::Status::success) {
|
||||
PanicCodeInfo(ErrorCodeEnum::UnexpectedError,
|
||||
PanicCodeInfo(ErrorCode::UnexpectedError,
|
||||
fmt::format("failed to Deserialize index: {}",
|
||||
KnowhereStatusString(stat)));
|
||||
}
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
#include <memory>
|
||||
#include <string>
|
||||
|
||||
#include "common/EasyAssert.h"
|
||||
#include "indexbuilder/IndexCreatorBase.h"
|
||||
#include "indexbuilder/ScalarIndexCreator.h"
|
||||
#include "indexbuilder/VecIndexCreator.h"
|
||||
|
@ -65,7 +66,9 @@ class IndexFactory {
|
|||
return std::make_unique<VecIndexCreator>(
|
||||
type, config, file_manager);
|
||||
default:
|
||||
throw std::invalid_argument(invalid_dtype_msg);
|
||||
throw SegcoreError(
|
||||
DataTypeInvalid,
|
||||
fmt::format("invalid type is {}", invalid_dtype_msg));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
|
@ -47,7 +47,7 @@ LogOut(const char* pattern, ...) {
|
|||
vsnprintf(str_p.get(), len, pattern, vl); // NOLINT
|
||||
va_end(vl);
|
||||
|
||||
return std::string(str_p.get());
|
||||
return {str_p.get()};
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -83,18 +83,6 @@ get_now_timestamp() {
|
|||
|
||||
#ifndef WIN32
|
||||
|
||||
int64_t
|
||||
get_system_boottime() {
|
||||
FILE* uptime = fopen("/proc/uptime", "r");
|
||||
float since_sys_boot, _;
|
||||
auto ret = fscanf(uptime, "%f %f", &since_sys_boot, &_);
|
||||
fclose(uptime);
|
||||
if (ret != 2) {
|
||||
throw std::runtime_error("read /proc/uptime failed.");
|
||||
}
|
||||
return static_cast<int64_t>(since_sys_boot);
|
||||
}
|
||||
|
||||
int64_t
|
||||
get_thread_starttime() {
|
||||
#ifdef __APPLE__
|
||||
|
@ -133,34 +121,11 @@ get_thread_starttime() {
|
|||
return val / sysconf(_SC_CLK_TCK);
|
||||
}
|
||||
|
||||
int64_t
|
||||
get_thread_start_timestamp() {
|
||||
try {
|
||||
return get_now_timestamp() - get_system_boottime() +
|
||||
get_thread_starttime();
|
||||
} catch (...) {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
#else
|
||||
|
||||
#define WINDOWS_TICK 10000000
|
||||
#define SEC_TO_UNIX_EPOCH 11644473600LL
|
||||
|
||||
int64_t
|
||||
get_thread_start_timestamp() {
|
||||
FILETIME dummy;
|
||||
FILETIME ret;
|
||||
|
||||
if (GetThreadTimes(GetCurrentThread(), &ret, &dummy, &dummy, &dummy)) {
|
||||
auto ticks = Int64ShllMod32(ret.dwHighDateTime, 32) | ret.dwLowDateTime;
|
||||
auto thread_started = ticks / WINDOWS_TICK - SEC_TO_UNIX_EPOCH;
|
||||
return get_now_timestamp() - thread_started;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
// } // namespace milvus
|
||||
|
|
|
@ -37,7 +37,6 @@
|
|||
#define VAR_CLIENT_TAG (context->client_tag())
|
||||
#define VAR_CLIENT_IPPORT (context->client_ipport())
|
||||
#define VAR_THREAD_ID (gettid())
|
||||
#define VAR_THREAD_START_TIMESTAMP (get_thread_start_timestamp())
|
||||
#define VAR_COMMAND_TAG (context->command_tag())
|
||||
|
||||
/////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
|
@ -93,7 +92,4 @@ SetThreadName(const std::string_view name);
|
|||
std::string
|
||||
GetThreadName();
|
||||
|
||||
int64_t
|
||||
get_thread_start_timestamp();
|
||||
|
||||
// } // namespace milvus
|
||||
|
|
|
@ -520,116 +520,116 @@ const char descriptor_table_protodef_common_2eproto[] PROTOBUF_SECTION_VARIABLE(
|
|||
"\020\004\022\t\n\005Retry\020\005*\202\001\n\014SegmentState\022\024\n\020Segmen"
|
||||
"tStateNone\020\000\022\014\n\010NotExist\020\001\022\013\n\007Growing\020\002\022"
|
||||
"\n\n\006Sealed\020\003\022\013\n\007Flushed\020\004\022\014\n\010Flushing\020\005\022\013"
|
||||
"\n\007Dropped\020\006\022\r\n\tImporting\020\007*V\n\017Placeholde"
|
||||
"\n\007Dropped\020\006\022\r\n\tImporting\020\007*i\n\017Placeholde"
|
||||
"rType\022\010\n\004None\020\000\022\020\n\014BinaryVector\020d\022\017\n\013Flo"
|
||||
"atVector\020e\022\t\n\005Int64\020\005\022\013\n\007VarChar\020\025*\264\020\n\007M"
|
||||
"sgType\022\r\n\tUndefined\020\000\022\024\n\020CreateCollectio"
|
||||
"n\020d\022\022\n\016DropCollection\020e\022\021\n\rHasCollection"
|
||||
"\020f\022\026\n\022DescribeCollection\020g\022\023\n\017ShowCollec"
|
||||
"tions\020h\022\024\n\020GetSystemConfigs\020i\022\022\n\016LoadCol"
|
||||
"lection\020j\022\025\n\021ReleaseCollection\020k\022\017\n\013Crea"
|
||||
"teAlias\020l\022\r\n\tDropAlias\020m\022\016\n\nAlterAlias\020n"
|
||||
"\022\023\n\017AlterCollection\020o\022\024\n\020RenameCollectio"
|
||||
"n\020p\022\021\n\rDescribeAlias\020q\022\017\n\013ListAliases\020r\022"
|
||||
"\024\n\017CreatePartition\020\310\001\022\022\n\rDropPartition\020\311"
|
||||
"\001\022\021\n\014HasPartition\020\312\001\022\026\n\021DescribePartitio"
|
||||
"n\020\313\001\022\023\n\016ShowPartitions\020\314\001\022\023\n\016LoadPartiti"
|
||||
"ons\020\315\001\022\026\n\021ReleasePartitions\020\316\001\022\021\n\014ShowSe"
|
||||
"gments\020\372\001\022\024\n\017DescribeSegment\020\373\001\022\021\n\014LoadS"
|
||||
"egments\020\374\001\022\024\n\017ReleaseSegments\020\375\001\022\024\n\017Hand"
|
||||
"offSegments\020\376\001\022\030\n\023LoadBalanceSegments\020\377\001"
|
||||
"\022\025\n\020DescribeSegments\020\200\002\022\034\n\027FederListInde"
|
||||
"xedSegment\020\201\002\022\"\n\035FederDescribeSegmentInd"
|
||||
"exData\020\202\002\022\020\n\013CreateIndex\020\254\002\022\022\n\rDescribeI"
|
||||
"ndex\020\255\002\022\016\n\tDropIndex\020\256\002\022\027\n\022GetIndexStati"
|
||||
"stics\020\257\002\022\013\n\006Insert\020\220\003\022\013\n\006Delete\020\221\003\022\n\n\005Fl"
|
||||
"ush\020\222\003\022\027\n\022ResendSegmentStats\020\223\003\022\013\n\006Upser"
|
||||
"t\020\224\003\022\013\n\006Search\020\364\003\022\021\n\014SearchResult\020\365\003\022\022\n\r"
|
||||
"GetIndexState\020\366\003\022\032\n\025GetIndexBuildProgres"
|
||||
"s\020\367\003\022\034\n\027GetCollectionStatistics\020\370\003\022\033\n\026Ge"
|
||||
"tPartitionStatistics\020\371\003\022\r\n\010Retrieve\020\372\003\022\023"
|
||||
"\n\016RetrieveResult\020\373\003\022\024\n\017WatchDmChannels\020\374"
|
||||
"\003\022\025\n\020RemoveDmChannels\020\375\003\022\027\n\022WatchQueryCh"
|
||||
"annels\020\376\003\022\030\n\023RemoveQueryChannels\020\377\003\022\035\n\030S"
|
||||
"ealedSegmentsChangeInfo\020\200\004\022\027\n\022WatchDelta"
|
||||
"Channels\020\201\004\022\024\n\017GetShardLeaders\020\202\004\022\020\n\013Get"
|
||||
"Replicas\020\203\004\022\023\n\016UnsubDmChannel\020\204\004\022\024\n\017GetD"
|
||||
"istribution\020\205\004\022\025\n\020SyncDistribution\020\206\004\022\020\n"
|
||||
"\013SegmentInfo\020\330\004\022\017\n\nSystemInfo\020\331\004\022\024\n\017GetR"
|
||||
"ecoveryInfo\020\332\004\022\024\n\017GetSegmentState\020\333\004\022\r\n\010"
|
||||
"TimeTick\020\260\t\022\023\n\016QueryNodeStats\020\261\t\022\016\n\tLoad"
|
||||
"Index\020\262\t\022\016\n\tRequestID\020\263\t\022\017\n\nRequestTSO\020\264"
|
||||
"\t\022\024\n\017AllocateSegment\020\265\t\022\026\n\021SegmentStatis"
|
||||
"tics\020\266\t\022\025\n\020SegmentFlushDone\020\267\t\022\017\n\nDataNo"
|
||||
"deTt\020\270\t\022\014\n\007Connect\020\271\t\022\024\n\017ListClientInfos"
|
||||
"\020\272\t\022\023\n\016AllocTimestamp\020\273\t\022\025\n\020CreateCreden"
|
||||
"tial\020\334\013\022\022\n\rGetCredential\020\335\013\022\025\n\020DeleteCre"
|
||||
"dential\020\336\013\022\025\n\020UpdateCredential\020\337\013\022\026\n\021Lis"
|
||||
"tCredUsernames\020\340\013\022\017\n\nCreateRole\020\300\014\022\r\n\010Dr"
|
||||
"opRole\020\301\014\022\024\n\017OperateUserRole\020\302\014\022\017\n\nSelec"
|
||||
"tRole\020\303\014\022\017\n\nSelectUser\020\304\014\022\023\n\016SelectResou"
|
||||
"rce\020\305\014\022\025\n\020OperatePrivilege\020\306\014\022\020\n\013SelectG"
|
||||
"rant\020\307\014\022\033\n\026RefreshPolicyInfoCache\020\310\014\022\017\n\n"
|
||||
"ListPolicy\020\311\014\022\030\n\023CreateResourceGroup\020\244\r\022"
|
||||
"\026\n\021DropResourceGroup\020\245\r\022\027\n\022ListResourceG"
|
||||
"roups\020\246\r\022\032\n\025DescribeResourceGroup\020\247\r\022\021\n\014"
|
||||
"TransferNode\020\250\r\022\024\n\017TransferReplica\020\251\r\022\023\n"
|
||||
"\016CreateDatabase\020\211\016\022\021\n\014DropDatabase\020\212\016\022\022\n"
|
||||
"\rListDatabases\020\213\016*\"\n\007DslType\022\007\n\003Dsl\020\000\022\016\n"
|
||||
"\nBoolExprV1\020\001*B\n\017CompactionState\022\021\n\rUnde"
|
||||
"fiedState\020\000\022\r\n\tExecuting\020\001\022\r\n\tCompleted\020"
|
||||
"\002*X\n\020ConsistencyLevel\022\n\n\006Strong\020\000\022\013\n\007Ses"
|
||||
"sion\020\001\022\013\n\007Bounded\020\002\022\016\n\nEventually\020\003\022\016\n\nC"
|
||||
"ustomized\020\004*\236\001\n\013ImportState\022\021\n\rImportPen"
|
||||
"ding\020\000\022\020\n\014ImportFailed\020\001\022\021\n\rImportStarte"
|
||||
"d\020\002\022\023\n\017ImportPersisted\020\005\022\021\n\rImportFlushe"
|
||||
"d\020\010\022\023\n\017ImportCompleted\020\006\022\032\n\026ImportFailed"
|
||||
"AndCleaned\020\007*2\n\nObjectType\022\016\n\nCollection"
|
||||
"\020\000\022\n\n\006Global\020\001\022\010\n\004User\020\002*\241\010\n\017ObjectPrivi"
|
||||
"lege\022\020\n\014PrivilegeAll\020\000\022\035\n\031PrivilegeCreat"
|
||||
"eCollection\020\001\022\033\n\027PrivilegeDropCollection"
|
||||
"\020\002\022\037\n\033PrivilegeDescribeCollection\020\003\022\034\n\030P"
|
||||
"rivilegeShowCollections\020\004\022\021\n\rPrivilegeLo"
|
||||
"ad\020\005\022\024\n\020PrivilegeRelease\020\006\022\027\n\023PrivilegeC"
|
||||
"ompaction\020\007\022\023\n\017PrivilegeInsert\020\010\022\023\n\017Priv"
|
||||
"ilegeDelete\020\t\022\032\n\026PrivilegeGetStatistics\020"
|
||||
"\n\022\030\n\024PrivilegeCreateIndex\020\013\022\030\n\024Privilege"
|
||||
"IndexDetail\020\014\022\026\n\022PrivilegeDropIndex\020\r\022\023\n"
|
||||
"\017PrivilegeSearch\020\016\022\022\n\016PrivilegeFlush\020\017\022\022"
|
||||
"\n\016PrivilegeQuery\020\020\022\030\n\024PrivilegeLoadBalan"
|
||||
"ce\020\021\022\023\n\017PrivilegeImport\020\022\022\034\n\030PrivilegeCr"
|
||||
"eateOwnership\020\023\022\027\n\023PrivilegeUpdateUser\020\024"
|
||||
"\022\032\n\026PrivilegeDropOwnership\020\025\022\034\n\030Privileg"
|
||||
"eSelectOwnership\020\026\022\034\n\030PrivilegeManageOwn"
|
||||
"ership\020\027\022\027\n\023PrivilegeSelectUser\020\030\022\023\n\017Pri"
|
||||
"vilegeUpsert\020\031\022 \n\034PrivilegeCreateResourc"
|
||||
"eGroup\020\032\022\036\n\032PrivilegeDropResourceGroup\020\033"
|
||||
"\022\"\n\036PrivilegeDescribeResourceGroup\020\034\022\037\n\033"
|
||||
"PrivilegeListResourceGroups\020\035\022\031\n\025Privile"
|
||||
"geTransferNode\020\036\022\034\n\030PrivilegeTransferRep"
|
||||
"lica\020\037\022\037\n\033PrivilegeGetLoadingProgress\020 \022"
|
||||
"\031\n\025PrivilegeGetLoadState\020!\022\035\n\031PrivilegeR"
|
||||
"enameCollection\020\"\022\033\n\027PrivilegeCreateData"
|
||||
"base\020#\022\031\n\025PrivilegeDropDatabase\020$\022\032\n\026Pri"
|
||||
"vilegeListDatabases\020%\022\025\n\021PrivilegeFlushA"
|
||||
"ll\020&*S\n\tStateCode\022\020\n\014Initializing\020\000\022\013\n\007H"
|
||||
"ealthy\020\001\022\014\n\010Abnormal\020\002\022\013\n\007StandBy\020\003\022\014\n\010S"
|
||||
"topping\020\004*c\n\tLoadState\022\025\n\021LoadStateNotEx"
|
||||
"ist\020\000\022\024\n\020LoadStateNotLoad\020\001\022\024\n\020LoadState"
|
||||
"Loading\020\002\022\023\n\017LoadStateLoaded\020\003:^\n\021privil"
|
||||
"ege_ext_obj\022\037.google.protobuf.MessageOpt"
|
||||
"ions\030\351\007 \001(\0132!.milvus.proto.common.Privil"
|
||||
"egeExtBi\n\016io.milvus.grpcB\013CommonProtoP\001Z"
|
||||
"4github.com/milvus-io/milvus-proto/go-ap"
|
||||
"i/v2/commonpb\240\001\001\252\002\016IO.Milvus.Grpcb\006proto"
|
||||
"3"
|
||||
"atVector\020e\022\021\n\rFloat16Vector\020f\022\t\n\005Int64\020\005"
|
||||
"\022\013\n\007VarChar\020\025*\264\020\n\007MsgType\022\r\n\tUndefined\020\000"
|
||||
"\022\024\n\020CreateCollection\020d\022\022\n\016DropCollection"
|
||||
"\020e\022\021\n\rHasCollection\020f\022\026\n\022DescribeCollect"
|
||||
"ion\020g\022\023\n\017ShowCollections\020h\022\024\n\020GetSystemC"
|
||||
"onfigs\020i\022\022\n\016LoadCollection\020j\022\025\n\021ReleaseC"
|
||||
"ollection\020k\022\017\n\013CreateAlias\020l\022\r\n\tDropAlia"
|
||||
"s\020m\022\016\n\nAlterAlias\020n\022\023\n\017AlterCollection\020o"
|
||||
"\022\024\n\020RenameCollection\020p\022\021\n\rDescribeAlias\020"
|
||||
"q\022\017\n\013ListAliases\020r\022\024\n\017CreatePartition\020\310\001"
|
||||
"\022\022\n\rDropPartition\020\311\001\022\021\n\014HasPartition\020\312\001\022"
|
||||
"\026\n\021DescribePartition\020\313\001\022\023\n\016ShowPartition"
|
||||
"s\020\314\001\022\023\n\016LoadPartitions\020\315\001\022\026\n\021ReleasePart"
|
||||
"itions\020\316\001\022\021\n\014ShowSegments\020\372\001\022\024\n\017Describe"
|
||||
"Segment\020\373\001\022\021\n\014LoadSegments\020\374\001\022\024\n\017Release"
|
||||
"Segments\020\375\001\022\024\n\017HandoffSegments\020\376\001\022\030\n\023Loa"
|
||||
"dBalanceSegments\020\377\001\022\025\n\020DescribeSegments\020"
|
||||
"\200\002\022\034\n\027FederListIndexedSegment\020\201\002\022\"\n\035Fede"
|
||||
"rDescribeSegmentIndexData\020\202\002\022\020\n\013CreateIn"
|
||||
"dex\020\254\002\022\022\n\rDescribeIndex\020\255\002\022\016\n\tDropIndex\020"
|
||||
"\256\002\022\027\n\022GetIndexStatistics\020\257\002\022\013\n\006Insert\020\220\003"
|
||||
"\022\013\n\006Delete\020\221\003\022\n\n\005Flush\020\222\003\022\027\n\022ResendSegme"
|
||||
"ntStats\020\223\003\022\013\n\006Upsert\020\224\003\022\013\n\006Search\020\364\003\022\021\n\014"
|
||||
"SearchResult\020\365\003\022\022\n\rGetIndexState\020\366\003\022\032\n\025G"
|
||||
"etIndexBuildProgress\020\367\003\022\034\n\027GetCollection"
|
||||
"Statistics\020\370\003\022\033\n\026GetPartitionStatistics\020"
|
||||
"\371\003\022\r\n\010Retrieve\020\372\003\022\023\n\016RetrieveResult\020\373\003\022\024"
|
||||
"\n\017WatchDmChannels\020\374\003\022\025\n\020RemoveDmChannels"
|
||||
"\020\375\003\022\027\n\022WatchQueryChannels\020\376\003\022\030\n\023RemoveQu"
|
||||
"eryChannels\020\377\003\022\035\n\030SealedSegmentsChangeIn"
|
||||
"fo\020\200\004\022\027\n\022WatchDeltaChannels\020\201\004\022\024\n\017GetSha"
|
||||
"rdLeaders\020\202\004\022\020\n\013GetReplicas\020\203\004\022\023\n\016UnsubD"
|
||||
"mChannel\020\204\004\022\024\n\017GetDistribution\020\205\004\022\025\n\020Syn"
|
||||
"cDistribution\020\206\004\022\020\n\013SegmentInfo\020\330\004\022\017\n\nSy"
|
||||
"stemInfo\020\331\004\022\024\n\017GetRecoveryInfo\020\332\004\022\024\n\017Get"
|
||||
"SegmentState\020\333\004\022\r\n\010TimeTick\020\260\t\022\023\n\016QueryN"
|
||||
"odeStats\020\261\t\022\016\n\tLoadIndex\020\262\t\022\016\n\tRequestID"
|
||||
"\020\263\t\022\017\n\nRequestTSO\020\264\t\022\024\n\017AllocateSegment\020"
|
||||
"\265\t\022\026\n\021SegmentStatistics\020\266\t\022\025\n\020SegmentFlu"
|
||||
"shDone\020\267\t\022\017\n\nDataNodeTt\020\270\t\022\014\n\007Connect\020\271\t"
|
||||
"\022\024\n\017ListClientInfos\020\272\t\022\023\n\016AllocTimestamp"
|
||||
"\020\273\t\022\025\n\020CreateCredential\020\334\013\022\022\n\rGetCredent"
|
||||
"ial\020\335\013\022\025\n\020DeleteCredential\020\336\013\022\025\n\020UpdateC"
|
||||
"redential\020\337\013\022\026\n\021ListCredUsernames\020\340\013\022\017\n\n"
|
||||
"CreateRole\020\300\014\022\r\n\010DropRole\020\301\014\022\024\n\017OperateU"
|
||||
"serRole\020\302\014\022\017\n\nSelectRole\020\303\014\022\017\n\nSelectUse"
|
||||
"r\020\304\014\022\023\n\016SelectResource\020\305\014\022\025\n\020OperatePriv"
|
||||
"ilege\020\306\014\022\020\n\013SelectGrant\020\307\014\022\033\n\026RefreshPol"
|
||||
"icyInfoCache\020\310\014\022\017\n\nListPolicy\020\311\014\022\030\n\023Crea"
|
||||
"teResourceGroup\020\244\r\022\026\n\021DropResourceGroup\020"
|
||||
"\245\r\022\027\n\022ListResourceGroups\020\246\r\022\032\n\025DescribeR"
|
||||
"esourceGroup\020\247\r\022\021\n\014TransferNode\020\250\r\022\024\n\017Tr"
|
||||
"ansferReplica\020\251\r\022\023\n\016CreateDatabase\020\211\016\022\021\n"
|
||||
"\014DropDatabase\020\212\016\022\022\n\rListDatabases\020\213\016*\"\n\007"
|
||||
"DslType\022\007\n\003Dsl\020\000\022\016\n\nBoolExprV1\020\001*B\n\017Comp"
|
||||
"actionState\022\021\n\rUndefiedState\020\000\022\r\n\tExecut"
|
||||
"ing\020\001\022\r\n\tCompleted\020\002*X\n\020ConsistencyLevel"
|
||||
"\022\n\n\006Strong\020\000\022\013\n\007Session\020\001\022\013\n\007Bounded\020\002\022\016"
|
||||
"\n\nEventually\020\003\022\016\n\nCustomized\020\004*\236\001\n\013Impor"
|
||||
"tState\022\021\n\rImportPending\020\000\022\020\n\014ImportFaile"
|
||||
"d\020\001\022\021\n\rImportStarted\020\002\022\023\n\017ImportPersiste"
|
||||
"d\020\005\022\021\n\rImportFlushed\020\010\022\023\n\017ImportComplete"
|
||||
"d\020\006\022\032\n\026ImportFailedAndCleaned\020\007*2\n\nObjec"
|
||||
"tType\022\016\n\nCollection\020\000\022\n\n\006Global\020\001\022\010\n\004Use"
|
||||
"r\020\002*\241\010\n\017ObjectPrivilege\022\020\n\014PrivilegeAll\020"
|
||||
"\000\022\035\n\031PrivilegeCreateCollection\020\001\022\033\n\027Priv"
|
||||
"ilegeDropCollection\020\002\022\037\n\033PrivilegeDescri"
|
||||
"beCollection\020\003\022\034\n\030PrivilegeShowCollectio"
|
||||
"ns\020\004\022\021\n\rPrivilegeLoad\020\005\022\024\n\020PrivilegeRele"
|
||||
"ase\020\006\022\027\n\023PrivilegeCompaction\020\007\022\023\n\017Privil"
|
||||
"egeInsert\020\010\022\023\n\017PrivilegeDelete\020\t\022\032\n\026Priv"
|
||||
"ilegeGetStatistics\020\n\022\030\n\024PrivilegeCreateI"
|
||||
"ndex\020\013\022\030\n\024PrivilegeIndexDetail\020\014\022\026\n\022Priv"
|
||||
"ilegeDropIndex\020\r\022\023\n\017PrivilegeSearch\020\016\022\022\n"
|
||||
"\016PrivilegeFlush\020\017\022\022\n\016PrivilegeQuery\020\020\022\030\n"
|
||||
"\024PrivilegeLoadBalance\020\021\022\023\n\017PrivilegeImpo"
|
||||
"rt\020\022\022\034\n\030PrivilegeCreateOwnership\020\023\022\027\n\023Pr"
|
||||
"ivilegeUpdateUser\020\024\022\032\n\026PrivilegeDropOwne"
|
||||
"rship\020\025\022\034\n\030PrivilegeSelectOwnership\020\026\022\034\n"
|
||||
"\030PrivilegeManageOwnership\020\027\022\027\n\023Privilege"
|
||||
"SelectUser\020\030\022\023\n\017PrivilegeUpsert\020\031\022 \n\034Pri"
|
||||
"vilegeCreateResourceGroup\020\032\022\036\n\032Privilege"
|
||||
"DropResourceGroup\020\033\022\"\n\036PrivilegeDescribe"
|
||||
"ResourceGroup\020\034\022\037\n\033PrivilegeListResource"
|
||||
"Groups\020\035\022\031\n\025PrivilegeTransferNode\020\036\022\034\n\030P"
|
||||
"rivilegeTransferReplica\020\037\022\037\n\033PrivilegeGe"
|
||||
"tLoadingProgress\020 \022\031\n\025PrivilegeGetLoadSt"
|
||||
"ate\020!\022\035\n\031PrivilegeRenameCollection\020\"\022\033\n\027"
|
||||
"PrivilegeCreateDatabase\020#\022\031\n\025PrivilegeDr"
|
||||
"opDatabase\020$\022\032\n\026PrivilegeListDatabases\020%"
|
||||
"\022\025\n\021PrivilegeFlushAll\020&*S\n\tStateCode\022\020\n\014"
|
||||
"Initializing\020\000\022\013\n\007Healthy\020\001\022\014\n\010Abnormal\020"
|
||||
"\002\022\013\n\007StandBy\020\003\022\014\n\010Stopping\020\004*c\n\tLoadStat"
|
||||
"e\022\025\n\021LoadStateNotExist\020\000\022\024\n\020LoadStateNot"
|
||||
"Load\020\001\022\024\n\020LoadStateLoading\020\002\022\023\n\017LoadStat"
|
||||
"eLoaded\020\003:^\n\021privilege_ext_obj\022\037.google."
|
||||
"protobuf.MessageOptions\030\351\007 \001(\0132!.milvus."
|
||||
"proto.common.PrivilegeExtBm\n\016io.milvus.g"
|
||||
"rpcB\013CommonProtoP\001Z4github.com/milvus-io"
|
||||
"/milvus-proto/go-api/v2/commonpb\240\001\001\252\002\022Mi"
|
||||
"lvus.Client.Grpcb\006proto3"
|
||||
;
|
||||
static const ::_pbi::DescriptorTable* const descriptor_table_common_2eproto_deps[1] = {
|
||||
&::descriptor_table_google_2fprotobuf_2fdescriptor_2eproto,
|
||||
};
|
||||
static ::_pbi::once_flag descriptor_table_common_2eproto_once;
|
||||
const ::_pbi::DescriptorTable descriptor_table_common_2eproto = {
|
||||
false, false, 7081, descriptor_table_protodef_common_2eproto,
|
||||
false, false, 7104, descriptor_table_protodef_common_2eproto,
|
||||
"common.proto",
|
||||
&descriptor_table_common_2eproto_once, descriptor_table_common_2eproto_deps, 1, 16,
|
||||
schemas, file_default_instances, TableStruct_common_2eproto::offsets,
|
||||
|
@ -765,6 +765,7 @@ bool PlaceholderType_IsValid(int value) {
|
|||
case 21:
|
||||
case 100:
|
||||
case 101:
|
||||
case 102:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
|
|
|
@ -271,6 +271,7 @@ enum PlaceholderType : int {
|
|||
None = 0,
|
||||
BinaryVector = 100,
|
||||
FloatVector = 101,
|
||||
Float16Vector = 102,
|
||||
Int64 = 5,
|
||||
VarChar = 21,
|
||||
PlaceholderType_INT_MIN_SENTINEL_DO_NOT_USE_ = std::numeric_limits<int32_t>::min(),
|
||||
|
@ -278,7 +279,7 @@ enum PlaceholderType : int {
|
|||
};
|
||||
bool PlaceholderType_IsValid(int value);
|
||||
constexpr PlaceholderType PlaceholderType_MIN = None;
|
||||
constexpr PlaceholderType PlaceholderType_MAX = FloatVector;
|
||||
constexpr PlaceholderType PlaceholderType_MAX = Float16Vector;
|
||||
constexpr int PlaceholderType_ARRAYSIZE = PlaceholderType_MAX + 1;
|
||||
|
||||
const ::PROTOBUF_NAMESPACE_ID::EnumDescriptor* PlaceholderType_descriptor();
|
||||
|
|
|
@ -421,6 +421,7 @@ const uint32_t TableStruct_schema_2eproto::offsets[] PROTOBUF_SECTION_VARIABLE(p
|
|||
PROTOBUF_FIELD_OFFSET(::milvus::proto::schema::VectorField, _impl_.dim_),
|
||||
::_pbi::kInvalidFieldOffsetTag,
|
||||
::_pbi::kInvalidFieldOffsetTag,
|
||||
::_pbi::kInvalidFieldOffsetTag,
|
||||
PROTOBUF_FIELD_OFFSET(::milvus::proto::schema::VectorField, _impl_.data_),
|
||||
~0u, // no _has_bits_
|
||||
PROTOBUF_FIELD_OFFSET(::milvus::proto::schema::FieldData, _internal_metadata_),
|
||||
|
@ -473,9 +474,9 @@ static const ::_pbi::MigrationSchema schemas[] PROTOBUF_SECTION_VARIABLE(protode
|
|||
{ 94, -1, -1, sizeof(::milvus::proto::schema::ValueField)},
|
||||
{ 108, -1, -1, sizeof(::milvus::proto::schema::ScalarField)},
|
||||
{ 124, -1, -1, sizeof(::milvus::proto::schema::VectorField)},
|
||||
{ 134, -1, -1, sizeof(::milvus::proto::schema::FieldData)},
|
||||
{ 147, -1, -1, sizeof(::milvus::proto::schema::IDs)},
|
||||
{ 156, -1, -1, sizeof(::milvus::proto::schema::SearchResultData)},
|
||||
{ 135, -1, -1, sizeof(::milvus::proto::schema::FieldData)},
|
||||
{ 148, -1, -1, sizeof(::milvus::proto::schema::IDs)},
|
||||
{ 157, -1, -1, sizeof(::milvus::proto::schema::SearchResultData)},
|
||||
};
|
||||
|
||||
static const ::_pb::Message* const file_default_instances[] = {
|
||||
|
@ -541,40 +542,42 @@ const char descriptor_table_protodef_schema_2eproto[] PROTOBUF_SECTION_VARIABLE(
|
|||
"oto.schema.BytesArrayH\000\0225\n\narray_data\030\010 "
|
||||
"\001(\0132\037.milvus.proto.schema.ArrayArrayH\000\0223"
|
||||
"\n\tjson_data\030\t \001(\0132\036.milvus.proto.schema."
|
||||
"JSONArrayH\000B\006\n\004data\"t\n\013VectorField\022\013\n\003di"
|
||||
"m\030\001 \001(\003\0227\n\014float_vector\030\002 \001(\0132\037.milvus.p"
|
||||
"roto.schema.FloatArrayH\000\022\027\n\rbinary_vecto"
|
||||
"r\030\003 \001(\014H\000B\006\n\004data\"\345\001\n\tFieldData\022+\n\004type\030"
|
||||
"\001 \001(\0162\035.milvus.proto.schema.DataType\022\022\n\n"
|
||||
"field_name\030\002 \001(\t\0223\n\007scalars\030\003 \001(\0132 .milv"
|
||||
"us.proto.schema.ScalarFieldH\000\0223\n\007vectors"
|
||||
"\030\004 \001(\0132 .milvus.proto.schema.VectorField"
|
||||
"H\000\022\020\n\010field_id\030\005 \001(\003\022\022\n\nis_dynamic\030\006 \001(\010"
|
||||
"B\007\n\005field\"w\n\003IDs\0220\n\006int_id\030\001 \001(\0132\036.milvu"
|
||||
"s.proto.schema.LongArrayH\000\0222\n\006str_id\030\002 \001"
|
||||
"(\0132 .milvus.proto.schema.StringArrayH\000B\n"
|
||||
"\n\010id_field\"\310\001\n\020SearchResultData\022\023\n\013num_q"
|
||||
"ueries\030\001 \001(\003\022\r\n\005top_k\030\002 \001(\003\0223\n\013fields_da"
|
||||
"ta\030\003 \003(\0132\036.milvus.proto.schema.FieldData"
|
||||
"\022\016\n\006scores\030\004 \003(\002\022%\n\003ids\030\005 \001(\0132\030.milvus.p"
|
||||
"roto.schema.IDs\022\r\n\005topks\030\006 \003(\003\022\025\n\routput"
|
||||
"_fields\030\007 \003(\t*\261\001\n\010DataType\022\010\n\004None\020\000\022\010\n\004"
|
||||
"Bool\020\001\022\010\n\004Int8\020\002\022\t\n\005Int16\020\003\022\t\n\005Int32\020\004\022\t"
|
||||
"\n\005Int64\020\005\022\t\n\005Float\020\n\022\n\n\006Double\020\013\022\n\n\006Stri"
|
||||
"ng\020\024\022\013\n\007VarChar\020\025\022\t\n\005Array\020\026\022\010\n\004JSON\020\027\022\020"
|
||||
"\n\014BinaryVector\020d\022\017\n\013FloatVector\020e*V\n\nFie"
|
||||
"ldState\022\020\n\014FieldCreated\020\000\022\021\n\rFieldCreati"
|
||||
"ng\020\001\022\021\n\rFieldDropping\020\002\022\020\n\014FieldDropped\020"
|
||||
"\003Bi\n\016io.milvus.grpcB\013SchemaProtoP\001Z4gith"
|
||||
"ub.com/milvus-io/milvus-proto/go-api/v2/"
|
||||
"schemapb\240\001\001\252\002\016IO.Milvus.Grpcb\006proto3"
|
||||
"JSONArrayH\000B\006\n\004data\"\216\001\n\013VectorField\022\013\n\003d"
|
||||
"im\030\001 \001(\003\0227\n\014float_vector\030\002 \001(\0132\037.milvus."
|
||||
"proto.schema.FloatArrayH\000\022\027\n\rbinary_vect"
|
||||
"or\030\003 \001(\014H\000\022\030\n\016float16_vector\030\004 \001(\014H\000B\006\n\004"
|
||||
"data\"\345\001\n\tFieldData\022+\n\004type\030\001 \001(\0162\035.milvu"
|
||||
"s.proto.schema.DataType\022\022\n\nfield_name\030\002 "
|
||||
"\001(\t\0223\n\007scalars\030\003 \001(\0132 .milvus.proto.sche"
|
||||
"ma.ScalarFieldH\000\0223\n\007vectors\030\004 \001(\0132 .milv"
|
||||
"us.proto.schema.VectorFieldH\000\022\020\n\010field_i"
|
||||
"d\030\005 \001(\003\022\022\n\nis_dynamic\030\006 \001(\010B\007\n\005field\"w\n\003"
|
||||
"IDs\0220\n\006int_id\030\001 \001(\0132\036.milvus.proto.schem"
|
||||
"a.LongArrayH\000\0222\n\006str_id\030\002 \001(\0132 .milvus.p"
|
||||
"roto.schema.StringArrayH\000B\n\n\010id_field\"\310\001"
|
||||
"\n\020SearchResultData\022\023\n\013num_queries\030\001 \001(\003\022"
|
||||
"\r\n\005top_k\030\002 \001(\003\0223\n\013fields_data\030\003 \003(\0132\036.mi"
|
||||
"lvus.proto.schema.FieldData\022\016\n\006scores\030\004 "
|
||||
"\003(\002\022%\n\003ids\030\005 \001(\0132\030.milvus.proto.schema.I"
|
||||
"Ds\022\r\n\005topks\030\006 \003(\003\022\025\n\routput_fields\030\007 \003(\t"
|
||||
"*\304\001\n\010DataType\022\010\n\004None\020\000\022\010\n\004Bool\020\001\022\010\n\004Int"
|
||||
"8\020\002\022\t\n\005Int16\020\003\022\t\n\005Int32\020\004\022\t\n\005Int64\020\005\022\t\n\005"
|
||||
"Float\020\n\022\n\n\006Double\020\013\022\n\n\006String\020\024\022\013\n\007VarCh"
|
||||
"ar\020\025\022\t\n\005Array\020\026\022\010\n\004JSON\020\027\022\020\n\014BinaryVecto"
|
||||
"r\020d\022\017\n\013FloatVector\020e\022\021\n\rFloat16Vector\020f*"
|
||||
"V\n\nFieldState\022\020\n\014FieldCreated\020\000\022\021\n\rField"
|
||||
"Creating\020\001\022\021\n\rFieldDropping\020\002\022\020\n\014FieldDr"
|
||||
"opped\020\003Bm\n\016io.milvus.grpcB\013SchemaProtoP\001"
|
||||
"Z4github.com/milvus-io/milvus-proto/go-a"
|
||||
"pi/v2/schemapb\240\001\001\252\002\022Milvus.Client.Grpcb\006"
|
||||
"proto3"
|
||||
;
|
||||
static const ::_pbi::DescriptorTable* const descriptor_table_schema_2eproto_deps[1] = {
|
||||
&::descriptor_table_common_2eproto,
|
||||
};
|
||||
static ::_pbi::once_flag descriptor_table_schema_2eproto_once;
|
||||
const ::_pbi::DescriptorTable descriptor_table_schema_2eproto = {
|
||||
false, false, 2756, descriptor_table_protodef_schema_2eproto,
|
||||
false, false, 2806, descriptor_table_protodef_schema_2eproto,
|
||||
"schema.proto",
|
||||
&descriptor_table_schema_2eproto_once, descriptor_table_schema_2eproto_deps, 1, 17,
|
||||
schemas, file_default_instances, TableStruct_schema_2eproto::offsets,
|
||||
|
@ -610,6 +613,7 @@ bool DataType_IsValid(int value) {
|
|||
case 23:
|
||||
case 100:
|
||||
case 101:
|
||||
case 102:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
|
@ -4456,6 +4460,10 @@ VectorField::VectorField(const VectorField& from)
|
|||
_this->_internal_set_binary_vector(from._internal_binary_vector());
|
||||
break;
|
||||
}
|
||||
case kFloat16Vector: {
|
||||
_this->_internal_set_float16_vector(from._internal_float16_vector());
|
||||
break;
|
||||
}
|
||||
case DATA_NOT_SET: {
|
||||
break;
|
||||
}
|
||||
|
@ -4509,6 +4517,10 @@ void VectorField::clear_data() {
|
|||
_impl_.data_.binary_vector_.Destroy();
|
||||
break;
|
||||
}
|
||||
case kFloat16Vector: {
|
||||
_impl_.data_.float16_vector_.Destroy();
|
||||
break;
|
||||
}
|
||||
case DATA_NOT_SET: {
|
||||
break;
|
||||
}
|
||||
|
@ -4559,6 +4571,15 @@ const char* VectorField::_InternalParse(const char* ptr, ::_pbi::ParseContext* c
|
|||
} else
|
||||
goto handle_unusual;
|
||||
continue;
|
||||
// bytes float16_vector = 4;
|
||||
case 4:
|
||||
if (PROTOBUF_PREDICT_TRUE(static_cast<uint8_t>(tag) == 34)) {
|
||||
auto str = _internal_mutable_float16_vector();
|
||||
ptr = ::_pbi::InlineGreedyStringParser(str, ptr, ctx);
|
||||
CHK_(ptr);
|
||||
} else
|
||||
goto handle_unusual;
|
||||
continue;
|
||||
default:
|
||||
goto handle_unusual;
|
||||
} // switch
|
||||
|
@ -4607,6 +4628,12 @@ uint8_t* VectorField::_InternalSerialize(
|
|||
3, this->_internal_binary_vector(), target);
|
||||
}
|
||||
|
||||
// bytes float16_vector = 4;
|
||||
if (_internal_has_float16_vector()) {
|
||||
target = stream->WriteBytesMaybeAliased(
|
||||
4, this->_internal_float16_vector(), target);
|
||||
}
|
||||
|
||||
if (PROTOBUF_PREDICT_FALSE(_internal_metadata_.have_unknown_fields())) {
|
||||
target = ::_pbi::WireFormat::InternalSerializeUnknownFieldsToArray(
|
||||
_internal_metadata_.unknown_fields<::PROTOBUF_NAMESPACE_ID::UnknownFieldSet>(::PROTOBUF_NAMESPACE_ID::UnknownFieldSet::default_instance), target, stream);
|
||||
|
@ -4643,6 +4670,13 @@ size_t VectorField::ByteSizeLong() const {
|
|||
this->_internal_binary_vector());
|
||||
break;
|
||||
}
|
||||
// bytes float16_vector = 4;
|
||||
case kFloat16Vector: {
|
||||
total_size += 1 +
|
||||
::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::BytesSize(
|
||||
this->_internal_float16_vector());
|
||||
break;
|
||||
}
|
||||
case DATA_NOT_SET: {
|
||||
break;
|
||||
}
|
||||
|
@ -4678,6 +4712,10 @@ void VectorField::MergeImpl(::PROTOBUF_NAMESPACE_ID::Message& to_msg, const ::PR
|
|||
_this->_internal_set_binary_vector(from._internal_binary_vector());
|
||||
break;
|
||||
}
|
||||
case kFloat16Vector: {
|
||||
_this->_internal_set_float16_vector(from._internal_float16_vector());
|
||||
break;
|
||||
}
|
||||
case DATA_NOT_SET: {
|
||||
break;
|
||||
}
|
||||
|
|
|
@ -141,12 +141,13 @@ enum DataType : int {
|
|||
JSON = 23,
|
||||
BinaryVector = 100,
|
||||
FloatVector = 101,
|
||||
Float16Vector = 102,
|
||||
DataType_INT_MIN_SENTINEL_DO_NOT_USE_ = std::numeric_limits<int32_t>::min(),
|
||||
DataType_INT_MAX_SENTINEL_DO_NOT_USE_ = std::numeric_limits<int32_t>::max()
|
||||
};
|
||||
bool DataType_IsValid(int value);
|
||||
constexpr DataType DataType_MIN = None;
|
||||
constexpr DataType DataType_MAX = FloatVector;
|
||||
constexpr DataType DataType_MAX = Float16Vector;
|
||||
constexpr int DataType_ARRAYSIZE = DataType_MAX + 1;
|
||||
|
||||
const ::PROTOBUF_NAMESPACE_ID::EnumDescriptor* DataType_descriptor();
|
||||
|
@ -2861,6 +2862,7 @@ class VectorField final :
|
|||
enum DataCase {
|
||||
kFloatVector = 2,
|
||||
kBinaryVector = 3,
|
||||
kFloat16Vector = 4,
|
||||
DATA_NOT_SET = 0,
|
||||
};
|
||||
|
||||
|
@ -2945,6 +2947,7 @@ class VectorField final :
|
|||
kDimFieldNumber = 1,
|
||||
kFloatVectorFieldNumber = 2,
|
||||
kBinaryVectorFieldNumber = 3,
|
||||
kFloat16VectorFieldNumber = 4,
|
||||
};
|
||||
// int64 dim = 1;
|
||||
void clear_dim();
|
||||
|
@ -2991,6 +2994,24 @@ class VectorField final :
|
|||
std::string* _internal_mutable_binary_vector();
|
||||
public:
|
||||
|
||||
// bytes float16_vector = 4;
|
||||
bool has_float16_vector() const;
|
||||
private:
|
||||
bool _internal_has_float16_vector() const;
|
||||
public:
|
||||
void clear_float16_vector();
|
||||
const std::string& float16_vector() const;
|
||||
template <typename ArgT0 = const std::string&, typename... ArgT>
|
||||
void set_float16_vector(ArgT0&& arg0, ArgT... args);
|
||||
std::string* mutable_float16_vector();
|
||||
PROTOBUF_NODISCARD std::string* release_float16_vector();
|
||||
void set_allocated_float16_vector(std::string* float16_vector);
|
||||
private:
|
||||
const std::string& _internal_float16_vector() const;
|
||||
inline PROTOBUF_ALWAYS_INLINE void _internal_set_float16_vector(const std::string& value);
|
||||
std::string* _internal_mutable_float16_vector();
|
||||
public:
|
||||
|
||||
void clear_data();
|
||||
DataCase data_case() const;
|
||||
// @@protoc_insertion_point(class_scope:milvus.proto.schema.VectorField)
|
||||
|
@ -2998,6 +3019,7 @@ class VectorField final :
|
|||
class _Internal;
|
||||
void set_has_float_vector();
|
||||
void set_has_binary_vector();
|
||||
void set_has_float16_vector();
|
||||
|
||||
inline bool has_data() const;
|
||||
inline void clear_has_data();
|
||||
|
@ -3012,6 +3034,7 @@ class VectorField final :
|
|||
::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized _constinit_;
|
||||
::milvus::proto::schema::FloatArray* float_vector_;
|
||||
::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr binary_vector_;
|
||||
::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr float16_vector_;
|
||||
} data_;
|
||||
mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
|
||||
uint32_t _oneof_case_[1];
|
||||
|
@ -6121,6 +6144,83 @@ inline void VectorField::set_allocated_binary_vector(std::string* binary_vector)
|
|||
// @@protoc_insertion_point(field_set_allocated:milvus.proto.schema.VectorField.binary_vector)
|
||||
}
|
||||
|
||||
// bytes float16_vector = 4;
|
||||
inline bool VectorField::_internal_has_float16_vector() const {
|
||||
return data_case() == kFloat16Vector;
|
||||
}
|
||||
inline bool VectorField::has_float16_vector() const {
|
||||
return _internal_has_float16_vector();
|
||||
}
|
||||
inline void VectorField::set_has_float16_vector() {
|
||||
_impl_._oneof_case_[0] = kFloat16Vector;
|
||||
}
|
||||
inline void VectorField::clear_float16_vector() {
|
||||
if (_internal_has_float16_vector()) {
|
||||
_impl_.data_.float16_vector_.Destroy();
|
||||
clear_has_data();
|
||||
}
|
||||
}
|
||||
inline const std::string& VectorField::float16_vector() const {
|
||||
// @@protoc_insertion_point(field_get:milvus.proto.schema.VectorField.float16_vector)
|
||||
return _internal_float16_vector();
|
||||
}
|
||||
template <typename ArgT0, typename... ArgT>
|
||||
inline void VectorField::set_float16_vector(ArgT0&& arg0, ArgT... args) {
|
||||
if (!_internal_has_float16_vector()) {
|
||||
clear_data();
|
||||
set_has_float16_vector();
|
||||
_impl_.data_.float16_vector_.InitDefault();
|
||||
}
|
||||
_impl_.data_.float16_vector_.SetBytes( static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
|
||||
// @@protoc_insertion_point(field_set:milvus.proto.schema.VectorField.float16_vector)
|
||||
}
|
||||
inline std::string* VectorField::mutable_float16_vector() {
|
||||
std::string* _s = _internal_mutable_float16_vector();
|
||||
// @@protoc_insertion_point(field_mutable:milvus.proto.schema.VectorField.float16_vector)
|
||||
return _s;
|
||||
}
|
||||
inline const std::string& VectorField::_internal_float16_vector() const {
|
||||
if (_internal_has_float16_vector()) {
|
||||
return _impl_.data_.float16_vector_.Get();
|
||||
}
|
||||
return ::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited();
|
||||
}
|
||||
inline void VectorField::_internal_set_float16_vector(const std::string& value) {
|
||||
if (!_internal_has_float16_vector()) {
|
||||
clear_data();
|
||||
set_has_float16_vector();
|
||||
_impl_.data_.float16_vector_.InitDefault();
|
||||
}
|
||||
_impl_.data_.float16_vector_.Set(value, GetArenaForAllocation());
|
||||
}
|
||||
inline std::string* VectorField::_internal_mutable_float16_vector() {
|
||||
if (!_internal_has_float16_vector()) {
|
||||
clear_data();
|
||||
set_has_float16_vector();
|
||||
_impl_.data_.float16_vector_.InitDefault();
|
||||
}
|
||||
return _impl_.data_.float16_vector_.Mutable( GetArenaForAllocation());
|
||||
}
|
||||
inline std::string* VectorField::release_float16_vector() {
|
||||
// @@protoc_insertion_point(field_release:milvus.proto.schema.VectorField.float16_vector)
|
||||
if (_internal_has_float16_vector()) {
|
||||
clear_has_data();
|
||||
return _impl_.data_.float16_vector_.Release();
|
||||
} else {
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
inline void VectorField::set_allocated_float16_vector(std::string* float16_vector) {
|
||||
if (has_data()) {
|
||||
clear_data();
|
||||
}
|
||||
if (float16_vector != nullptr) {
|
||||
set_has_float16_vector();
|
||||
_impl_.data_.float16_vector_.InitAllocated(float16_vector, GetArenaForAllocation());
|
||||
}
|
||||
// @@protoc_insertion_point(field_set_allocated:milvus.proto.schema.VectorField.float16_vector)
|
||||
}
|
||||
|
||||
inline bool VectorField::has_data() const {
|
||||
return data_case() != DATA_NOT_SET;
|
||||
}
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
#include <vector>
|
||||
|
||||
#include "common/Consts.h"
|
||||
#include "common/EasyAssert.h"
|
||||
#include "common/RangeSearchHelper.h"
|
||||
#include "common/Utils.h"
|
||||
#include "common/Tracer.h"
|
||||
|
@ -48,96 +49,88 @@ BruteForceSearch(const dataset::SearchDataset& dataset,
|
|||
dataset.topk,
|
||||
dataset.metric_type,
|
||||
dataset.round_decimal);
|
||||
try {
|
||||
auto nq = dataset.num_queries;
|
||||
auto dim = dataset.dim;
|
||||
auto topk = dataset.topk;
|
||||
auto nq = dataset.num_queries;
|
||||
auto dim = dataset.dim;
|
||||
auto topk = dataset.topk;
|
||||
|
||||
auto base_dataset =
|
||||
knowhere::GenDataSet(chunk_rows, dim, chunk_data_raw);
|
||||
auto query_dataset = knowhere::GenDataSet(nq, dim, dataset.query_data);
|
||||
auto base_dataset = knowhere::GenDataSet(chunk_rows, dim, chunk_data_raw);
|
||||
auto query_dataset = knowhere::GenDataSet(nq, dim, dataset.query_data);
|
||||
|
||||
if (data_type == DataType::VECTOR_FLOAT16) {
|
||||
// Todo: Temporarily use cast to float32 to achieve, need to optimize
|
||||
// first, First, transfer the cast to knowhere part
|
||||
// second, knowhere partially supports float16 and removes the forced conversion to float32
|
||||
auto xb = base_dataset->GetTensor();
|
||||
std::vector<float> float_xb(base_dataset->GetRows() *
|
||||
base_dataset->GetDim());
|
||||
if (data_type == DataType::VECTOR_FLOAT16) {
|
||||
// Todo: Temporarily use cast to float32 to achieve, need to optimize
|
||||
// first, First, transfer the cast to knowhere part
|
||||
// second, knowhere partially supports float16 and removes the forced conversion to float32
|
||||
auto xb = base_dataset->GetTensor();
|
||||
std::vector<float> float_xb(base_dataset->GetRows() *
|
||||
base_dataset->GetDim());
|
||||
|
||||
auto xq = query_dataset->GetTensor();
|
||||
std::vector<float> float_xq(query_dataset->GetRows() *
|
||||
query_dataset->GetDim());
|
||||
auto xq = query_dataset->GetTensor();
|
||||
std::vector<float> float_xq(query_dataset->GetRows() *
|
||||
query_dataset->GetDim());
|
||||
|
||||
auto fp16_xb = (const float16*)xb;
|
||||
for (int i = 0;
|
||||
i < base_dataset->GetRows() * base_dataset->GetDim();
|
||||
i++) {
|
||||
float_xb[i] = (float)fp16_xb[i];
|
||||
}
|
||||
|
||||
auto fp16_xq = (const float16*)xq;
|
||||
for (int i = 0;
|
||||
i < query_dataset->GetRows() * query_dataset->GetDim();
|
||||
i++) {
|
||||
float_xq[i] = (float)fp16_xq[i];
|
||||
}
|
||||
void* void_ptr_xb = static_cast<void*>(float_xb.data());
|
||||
void* void_ptr_xq = static_cast<void*>(float_xq.data());
|
||||
base_dataset = knowhere::GenDataSet(chunk_rows, dim, void_ptr_xb);
|
||||
query_dataset = knowhere::GenDataSet(nq, dim, void_ptr_xq);
|
||||
auto fp16_xb = static_cast<const float16*>(xb);
|
||||
for (int i = 0; i < base_dataset->GetRows() * base_dataset->GetDim();
|
||||
i++) {
|
||||
float_xb[i] = (float)fp16_xb[i];
|
||||
}
|
||||
|
||||
auto config = knowhere::Json{
|
||||
{knowhere::meta::METRIC_TYPE, dataset.metric_type},
|
||||
{knowhere::meta::DIM, dim},
|
||||
{knowhere::meta::TOPK, topk},
|
||||
};
|
||||
|
||||
sub_result.mutable_seg_offsets().resize(nq * topk);
|
||||
sub_result.mutable_distances().resize(nq * topk);
|
||||
|
||||
if (conf.contains(RADIUS)) {
|
||||
config[RADIUS] = conf[RADIUS].get<float>();
|
||||
if (conf.contains(RANGE_FILTER)) {
|
||||
config[RANGE_FILTER] = conf[RANGE_FILTER].get<float>();
|
||||
CheckRangeSearchParam(
|
||||
config[RADIUS], config[RANGE_FILTER], dataset.metric_type);
|
||||
}
|
||||
auto res = knowhere::BruteForce::RangeSearch(
|
||||
base_dataset, query_dataset, config, bitset);
|
||||
milvus::tracer::AddEvent("knowhere_finish_BruteForce_RangeSearch");
|
||||
if (!res.has_value()) {
|
||||
PanicCodeInfo(ErrorCodeEnum::UnexpectedError,
|
||||
fmt::format("failed to range search: {}: {}",
|
||||
KnowhereStatusString(res.error()),
|
||||
res.what()));
|
||||
}
|
||||
auto result = ReGenRangeSearchResult(
|
||||
res.value(), topk, nq, dataset.metric_type);
|
||||
milvus::tracer::AddEvent("ReGenRangeSearchResult");
|
||||
std::copy_n(
|
||||
GetDatasetIDs(result), nq * topk, sub_result.get_seg_offsets());
|
||||
std::copy_n(GetDatasetDistance(result),
|
||||
nq * topk,
|
||||
sub_result.get_distances());
|
||||
} else {
|
||||
auto stat = knowhere::BruteForce::SearchWithBuf(
|
||||
base_dataset,
|
||||
query_dataset,
|
||||
sub_result.mutable_seg_offsets().data(),
|
||||
sub_result.mutable_distances().data(),
|
||||
config,
|
||||
bitset);
|
||||
milvus::tracer::AddEvent(
|
||||
"knowhere_finish_BruteForce_SearchWithBuf");
|
||||
if (stat != knowhere::Status::success) {
|
||||
throw std::invalid_argument("invalid metric type, " +
|
||||
KnowhereStatusString(stat));
|
||||
}
|
||||
auto fp16_xq = static_cast<const float16*>(xq);
|
||||
for (int i = 0; i < query_dataset->GetRows() * query_dataset->GetDim();
|
||||
i++) {
|
||||
float_xq[i] = (float)fp16_xq[i];
|
||||
}
|
||||
void* void_ptr_xb = static_cast<void*>(float_xb.data());
|
||||
void* void_ptr_xq = static_cast<void*>(float_xq.data());
|
||||
base_dataset = knowhere::GenDataSet(chunk_rows, dim, void_ptr_xb);
|
||||
query_dataset = knowhere::GenDataSet(nq, dim, void_ptr_xq);
|
||||
}
|
||||
|
||||
auto config = knowhere::Json{
|
||||
{knowhere::meta::METRIC_TYPE, dataset.metric_type},
|
||||
{knowhere::meta::DIM, dim},
|
||||
{knowhere::meta::TOPK, topk},
|
||||
};
|
||||
|
||||
sub_result.mutable_seg_offsets().resize(nq * topk);
|
||||
sub_result.mutable_distances().resize(nq * topk);
|
||||
|
||||
if (conf.contains(RADIUS)) {
|
||||
config[RADIUS] = conf[RADIUS].get<float>();
|
||||
if (conf.contains(RANGE_FILTER)) {
|
||||
config[RANGE_FILTER] = conf[RANGE_FILTER].get<float>();
|
||||
CheckRangeSearchParam(
|
||||
config[RADIUS], config[RANGE_FILTER], dataset.metric_type);
|
||||
}
|
||||
auto res = knowhere::BruteForce::RangeSearch(
|
||||
base_dataset, query_dataset, config, bitset);
|
||||
milvus::tracer::AddEvent("knowhere_finish_BruteForce_RangeSearch");
|
||||
if (!res.has_value()) {
|
||||
PanicCodeInfo(KnowhereError,
|
||||
fmt::format("failed to range search: {}: {}",
|
||||
KnowhereStatusString(res.error()),
|
||||
res.what()));
|
||||
}
|
||||
auto result =
|
||||
ReGenRangeSearchResult(res.value(), topk, nq, dataset.metric_type);
|
||||
milvus::tracer::AddEvent("ReGenRangeSearchResult");
|
||||
std::copy_n(
|
||||
GetDatasetIDs(result), nq * topk, sub_result.get_seg_offsets());
|
||||
std::copy_n(
|
||||
GetDatasetDistance(result), nq * topk, sub_result.get_distances());
|
||||
} else {
|
||||
auto stat = knowhere::BruteForce::SearchWithBuf(
|
||||
base_dataset,
|
||||
query_dataset,
|
||||
sub_result.mutable_seg_offsets().data(),
|
||||
sub_result.mutable_distances().data(),
|
||||
config,
|
||||
bitset);
|
||||
milvus::tracer::AddEvent("knowhere_finish_BruteForce_SearchWithBuf");
|
||||
if (stat != knowhere::Status::success) {
|
||||
throw SegcoreError(
|
||||
KnowhereError,
|
||||
"invalid metric type, " + KnowhereStatusString(stat));
|
||||
}
|
||||
} catch (std::exception& e) {
|
||||
PanicInfo(e.what());
|
||||
}
|
||||
sub_result.round_values();
|
||||
return sub_result;
|
||||
|
|
|
@ -1649,7 +1649,9 @@ ExecExprVisitor::ExecCompareExprDispatcher(CompareExpr& expr, Op op)
|
|||
}
|
||||
}
|
||||
default:
|
||||
PanicInfo(fmt::format("unsupported data type: {}", type));
|
||||
PanicCodeInfo(
|
||||
DataTypeInvalid,
|
||||
fmt::format("unsupported data type {}", type));
|
||||
}
|
||||
};
|
||||
auto left = getChunkData(
|
||||
|
@ -1717,7 +1719,9 @@ ExecExprVisitor::visit(CompareExpr& expr) {
|
|||
// case OpType::PostfixMatch: {
|
||||
// }
|
||||
default: {
|
||||
PanicInfo("unsupported optype");
|
||||
PanicCodeInfo(OpTypeInvalid,
|
||||
fmt::format("unsupported optype {}",
|
||||
fmt::underlying(expr.op_type_)));
|
||||
}
|
||||
}
|
||||
AssertInfo(res.size() == row_count_,
|
||||
|
@ -1761,7 +1765,9 @@ ExecExprVisitor::ExecTermVisitorImpl(TermExpr& expr_raw) -> BitsetType {
|
|||
break;
|
||||
}
|
||||
default: {
|
||||
PanicInfo("unsupported type");
|
||||
PanicCodeInfo(
|
||||
DataTypeInvalid,
|
||||
fmt::format("unsupported data type {}", expr.val_case_));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2018,14 +2024,16 @@ ExecExprVisitor::visit(TermExpr& expr) {
|
|||
res = ExecTermVisitorImplTemplateJson<bool>(expr);
|
||||
break;
|
||||
default:
|
||||
PanicInfo(
|
||||
fmt::format("unknown data type: {}", expr.val_case_));
|
||||
PanicCodeInfo(DataTypeInvalid,
|
||||
fmt::format("unsupported data type {}",
|
||||
expr.val_case_));
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
PanicInfo(fmt::format("unsupported data type: {}",
|
||||
expr.column_.data_type));
|
||||
PanicCodeInfo(DataTypeInvalid,
|
||||
fmt::format("unsupported data type {}",
|
||||
expr.column_.data_type));
|
||||
}
|
||||
AssertInfo(res.size() == row_count_,
|
||||
"[ExecExprVisitor]Size of results not equal row count");
|
||||
|
@ -2052,8 +2060,9 @@ ExecExprVisitor::visit(ExistsExpr& expr) {
|
|||
break;
|
||||
}
|
||||
default:
|
||||
PanicInfo(fmt::format("unsupported data type {}",
|
||||
expr.column_.data_type));
|
||||
PanicCodeInfo(DataTypeInvalid,
|
||||
fmt::format("unsupported data type {}",
|
||||
expr.column_.data_type));
|
||||
}
|
||||
AssertInfo(res.size() == row_count_,
|
||||
"[ExecExprVisitor]Size of results not equal row count");
|
||||
|
@ -2116,8 +2125,9 @@ compareTwoJsonArray(T arr1, const proto::plan::Array& arr2) {
|
|||
break;
|
||||
}
|
||||
default:
|
||||
PanicInfo(fmt::format("unsupported data type {}",
|
||||
arr2.array(i).val_case()));
|
||||
PanicCodeInfo(DataTypeInvalid,
|
||||
fmt::format("unsupported data type {}",
|
||||
arr2.array(i).val_case()));
|
||||
}
|
||||
i++;
|
||||
}
|
||||
|
@ -2270,8 +2280,9 @@ ExecExprVisitor::ExecJsonContainsWithDiffType(JsonContainsExpr& expr_raw)
|
|||
break;
|
||||
}
|
||||
default:
|
||||
PanicInfo(fmt::format("unsupported data type {}",
|
||||
element.val_case()));
|
||||
PanicCodeInfo(DataTypeInvalid,
|
||||
fmt::format("unsupported data type {}",
|
||||
element.val_case()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2384,83 +2395,84 @@ ExecExprVisitor::ExecJsonContainsAllWithDiffType(JsonContainsExpr& expr_raw)
|
|||
elements_index.insert(i);
|
||||
i++;
|
||||
}
|
||||
auto elem_func =
|
||||
[&elements, &elements_index, &pointer](const milvus::Json& json) {
|
||||
auto doc = json.doc();
|
||||
auto array = doc.at_pointer(pointer).get_array();
|
||||
if (array.error()) {
|
||||
return false;
|
||||
}
|
||||
std::unordered_set<int> tmp_elements_index(elements_index);
|
||||
for (auto&& it : array) {
|
||||
int i = -1;
|
||||
for (auto& element : elements) {
|
||||
i++;
|
||||
switch (element.val_case()) {
|
||||
case proto::plan::GenericValue::kBoolVal: {
|
||||
auto val = it.template get<bool>();
|
||||
if (val.error()) {
|
||||
continue;
|
||||
}
|
||||
if (val.value() == element.bool_val()) {
|
||||
tmp_elements_index.erase(i);
|
||||
}
|
||||
break;
|
||||
auto elem_func = [&elements, &elements_index, &pointer](
|
||||
const milvus::Json& json) {
|
||||
auto doc = json.doc();
|
||||
auto array = doc.at_pointer(pointer).get_array();
|
||||
if (array.error()) {
|
||||
return false;
|
||||
}
|
||||
std::unordered_set<int> tmp_elements_index(elements_index);
|
||||
for (auto&& it : array) {
|
||||
int i = -1;
|
||||
for (auto& element : elements) {
|
||||
i++;
|
||||
switch (element.val_case()) {
|
||||
case proto::plan::GenericValue::kBoolVal: {
|
||||
auto val = it.template get<bool>();
|
||||
if (val.error()) {
|
||||
continue;
|
||||
}
|
||||
case proto::plan::GenericValue::kInt64Val: {
|
||||
auto val = it.template get<int64_t>();
|
||||
if (val.error()) {
|
||||
continue;
|
||||
}
|
||||
if (val.value() == element.int64_val()) {
|
||||
tmp_elements_index.erase(i);
|
||||
}
|
||||
break;
|
||||
if (val.value() == element.bool_val()) {
|
||||
tmp_elements_index.erase(i);
|
||||
}
|
||||
case proto::plan::GenericValue::kFloatVal: {
|
||||
auto val = it.template get<double>();
|
||||
if (val.error()) {
|
||||
continue;
|
||||
}
|
||||
if (val.value() == element.float_val()) {
|
||||
tmp_elements_index.erase(i);
|
||||
}
|
||||
break;
|
||||
break;
|
||||
}
|
||||
case proto::plan::GenericValue::kInt64Val: {
|
||||
auto val = it.template get<int64_t>();
|
||||
if (val.error()) {
|
||||
continue;
|
||||
}
|
||||
case proto::plan::GenericValue::kStringVal: {
|
||||
auto val = it.template get<std::string_view>();
|
||||
if (val.error()) {
|
||||
continue;
|
||||
}
|
||||
if (val.value() == element.string_val()) {
|
||||
tmp_elements_index.erase(i);
|
||||
}
|
||||
break;
|
||||
if (val.value() == element.int64_val()) {
|
||||
tmp_elements_index.erase(i);
|
||||
}
|
||||
case proto::plan::GenericValue::kArrayVal: {
|
||||
auto val = it.get_array();
|
||||
if (val.error()) {
|
||||
continue;
|
||||
}
|
||||
if (compareTwoJsonArray(val, element.array_val())) {
|
||||
tmp_elements_index.erase(i);
|
||||
}
|
||||
break;
|
||||
break;
|
||||
}
|
||||
case proto::plan::GenericValue::kFloatVal: {
|
||||
auto val = it.template get<double>();
|
||||
if (val.error()) {
|
||||
continue;
|
||||
}
|
||||
default:
|
||||
PanicInfo(fmt::format("unsupported data type {}",
|
||||
if (val.value() == element.float_val()) {
|
||||
tmp_elements_index.erase(i);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case proto::plan::GenericValue::kStringVal: {
|
||||
auto val = it.template get<std::string_view>();
|
||||
if (val.error()) {
|
||||
continue;
|
||||
}
|
||||
if (val.value() == element.string_val()) {
|
||||
tmp_elements_index.erase(i);
|
||||
}
|
||||
break;
|
||||
}
|
||||
case proto::plan::GenericValue::kArrayVal: {
|
||||
auto val = it.get_array();
|
||||
if (val.error()) {
|
||||
continue;
|
||||
}
|
||||
if (compareTwoJsonArray(val, element.array_val())) {
|
||||
tmp_elements_index.erase(i);
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
PanicCodeInfo(DataTypeInvalid,
|
||||
fmt::format("unsupported data type {}",
|
||||
element.val_case()));
|
||||
}
|
||||
if (tmp_elements_index.size() == 0) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
if (tmp_elements_index.size() == 0) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return tmp_elements_index.size() == 0;
|
||||
};
|
||||
if (tmp_elements_index.size() == 0) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return tmp_elements_index.size() == 0;
|
||||
};
|
||||
|
||||
return ExecRangeVisitorImpl<milvus::Json>(
|
||||
expr.column_.field_id, index_func, elem_func);
|
||||
|
@ -2499,7 +2511,9 @@ ExecExprVisitor::visit(JsonContainsExpr& expr) {
|
|||
break;
|
||||
}
|
||||
default:
|
||||
PanicInfo(fmt::format("unsupported data type"));
|
||||
PanicCodeInfo(DataTypeInvalid,
|
||||
fmt::format("unsupported data type {}",
|
||||
expr.val_case_));
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
@ -2530,7 +2544,9 @@ ExecExprVisitor::visit(JsonContainsExpr& expr) {
|
|||
break;
|
||||
}
|
||||
default:
|
||||
PanicInfo(fmt::format("unsupported data type"));
|
||||
PanicCodeInfo(DataTypeInvalid,
|
||||
fmt::format("unsupported data type {}",
|
||||
expr.val_case_));
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
@ -2538,7 +2554,9 @@ ExecExprVisitor::visit(JsonContainsExpr& expr) {
|
|||
break;
|
||||
}
|
||||
default:
|
||||
PanicInfo(fmt::format("unsupported json contains type"));
|
||||
PanicCodeInfo(DataTypeInvalid,
|
||||
fmt::format("unsupported json contains type {}",
|
||||
expr.val_case_));
|
||||
}
|
||||
AssertInfo(res.size() == row_count_,
|
||||
"[ExecExprVisitor]Size of results not equal row count");
|
||||
|
|
|
@ -26,39 +26,6 @@ class VerifyPlanNodeVisitor : PlanNodeVisitor {
|
|||
};
|
||||
} // namespace impl
|
||||
|
||||
static IndexType
|
||||
InferIndexType(const Json& search_params) {
|
||||
// ivf -> nprobe
|
||||
// hnsw -> ef
|
||||
static const std::map<std::string, IndexType> key_list = [] {
|
||||
std::map<std::string, IndexType> list;
|
||||
namespace ip = knowhere::indexparam;
|
||||
namespace ie = knowhere::IndexEnum;
|
||||
list.emplace(ip::NPROBE, ie::INDEX_FAISS_IVFFLAT);
|
||||
list.emplace(ip::EF, ie::INDEX_HNSW);
|
||||
return list;
|
||||
}();
|
||||
auto dbg_str = search_params.dump();
|
||||
for (auto& kv : search_params.items()) {
|
||||
std::string key = kv.key();
|
||||
if (key_list.count(key)) {
|
||||
return key_list.at(key);
|
||||
}
|
||||
}
|
||||
PanicCodeInfo(ErrorCodeEnum::IllegalArgument, "failed to infer index type");
|
||||
}
|
||||
|
||||
static IndexType
|
||||
InferBinaryIndexType(const Json& search_params) {
|
||||
namespace ip = knowhere::indexparam;
|
||||
namespace ie = knowhere::IndexEnum;
|
||||
if (search_params.contains(ip::NPROBE)) {
|
||||
return ie::INDEX_FAISS_BIN_IVFFLAT;
|
||||
} else {
|
||||
return ie::INDEX_FAISS_BIN_IDMAP;
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
VerifyPlanNodeVisitor::visit(FloatVectorANNS&) {
|
||||
}
|
||||
|
|
|
@ -34,7 +34,7 @@ VectorBase::set_data_raw(ssize_t element_offset,
|
|||
return set_data_raw(
|
||||
element_offset, VEC_FIELD_DATA(data, float16), element_count);
|
||||
} else {
|
||||
PanicInfo("unsupported");
|
||||
PanicCodeInfo(DataTypeInvalid, "unsupported");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -88,8 +88,9 @@ VectorBase::set_data_raw(ssize_t element_offset,
|
|||
return set_data_raw(element_offset, data_raw.data(), element_count);
|
||||
}
|
||||
default: {
|
||||
PanicInfo(fmt::format("unsupported datatype {}",
|
||||
field_meta.get_data_type()));
|
||||
PanicCodeInfo(DataTypeInvalid,
|
||||
fmt::format("unsupported datatype {}",
|
||||
field_meta.get_data_type()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -182,9 +182,8 @@ class ConcurrentVectorImpl : public VectorBase {
|
|||
return Span<TraitType>(chunk.data(), chunk.size());
|
||||
} else if constexpr (std::is_same_v<Type, int64_t> || // NOLINT
|
||||
std::is_same_v<Type, int>) {
|
||||
// TODO: where should the braces be placed?
|
||||
// only for testing
|
||||
PanicInfo("unimplemented");
|
||||
PanicCodeInfo(NotImplemented, "unimplemented");
|
||||
} else {
|
||||
static_assert(
|
||||
std::is_same_v<typename TraitType::embedded_type, Type>);
|
||||
|
|
|
@ -11,6 +11,8 @@
|
|||
|
||||
#include <string>
|
||||
#include <thread>
|
||||
#include "common/EasyAssert.h"
|
||||
#include "fmt/format.h"
|
||||
#include "index/ScalarIndexSort.h"
|
||||
#include "index/StringIndexSort.h"
|
||||
|
||||
|
@ -20,16 +22,17 @@
|
|||
#include "IndexConfigGenerator.h"
|
||||
|
||||
namespace milvus::segcore {
|
||||
using std::unique_ptr;
|
||||
|
||||
VectorFieldIndexing::VectorFieldIndexing(const FieldMeta& field_meta,
|
||||
const FieldIndexMeta& field_index_meta,
|
||||
int64_t segment_max_row_count,
|
||||
const SegcoreConfig& segcore_config)
|
||||
: FieldIndexing(field_meta, segcore_config),
|
||||
config_(std::make_unique<VecIndexConfig>(
|
||||
segment_max_row_count, field_index_meta, segcore_config)),
|
||||
build(false),
|
||||
sync_with_index(false) {
|
||||
sync_with_index(false),
|
||||
config_(std::make_unique<VecIndexConfig>(
|
||||
segment_max_row_count, field_index_meta, segcore_config)) {
|
||||
index_ = std::make_unique<index::VectorMemIndex>(config_->GetIndexType(),
|
||||
config_->GetMetricType());
|
||||
}
|
||||
|
@ -99,7 +102,7 @@ VectorFieldIndexing::AppendSegmentIndex(int64_t reserved_offset,
|
|||
int64_t vec_num = vector_id_end - vector_id_beg + 1;
|
||||
// for train index
|
||||
const void* data_addr;
|
||||
std::unique_ptr<float[]> vec_data;
|
||||
unique_ptr<float[]> vec_data;
|
||||
//all train data in one chunk
|
||||
if (chunk_id_beg == chunk_id_end) {
|
||||
data_addr = vec_base->get_chunk_data(chunk_id_beg);
|
||||
|
@ -243,8 +246,10 @@ CreateIndex(const FieldMeta& field_meta,
|
|||
segment_max_row_count,
|
||||
segcore_config);
|
||||
} else {
|
||||
// TODO
|
||||
PanicInfo("unsupported");
|
||||
PanicCodeInfo(
|
||||
DataTypeInvalid,
|
||||
fmt::format("unsupported vector type in index: {}",
|
||||
fmt::underlying(field_meta.get_data_type())));
|
||||
}
|
||||
}
|
||||
switch (field_meta.get_data_type()) {
|
||||
|
@ -273,7 +278,10 @@ CreateIndex(const FieldMeta& field_meta,
|
|||
return std::make_unique<ScalarFieldIndexing<std::string>>(
|
||||
field_meta, segcore_config);
|
||||
default:
|
||||
PanicInfo("unsupported");
|
||||
PanicCodeInfo(
|
||||
DataTypeInvalid,
|
||||
fmt::format("unsupported scalar type in index: {}",
|
||||
fmt::underlying(field_meta.get_data_type())));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -111,7 +111,8 @@ class ScalarFieldIndexing : public FieldIndexing {
|
|||
int64_t size,
|
||||
const VectorBase* vec_base,
|
||||
const void* data_source) override {
|
||||
PanicInfo("scalar index don't support append segment index");
|
||||
PanicCodeInfo(Unsupported,
|
||||
"scalar index don't support append segment index");
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -119,7 +120,8 @@ class ScalarFieldIndexing : public FieldIndexing {
|
|||
int64_t count,
|
||||
int64_t element_size,
|
||||
void* output) override {
|
||||
PanicInfo("scalar index don't support get data from index");
|
||||
PanicCodeInfo(Unsupported,
|
||||
"scalar index don't support get data from index");
|
||||
}
|
||||
idx_t
|
||||
get_index_cursor() override {
|
||||
|
|
|
@ -21,8 +21,10 @@
|
|||
#include <queue>
|
||||
|
||||
#include "TimestampIndex.h"
|
||||
#include "common/EasyAssert.h"
|
||||
#include "common/Schema.h"
|
||||
#include "common/Types.h"
|
||||
#include "fmt/format.h"
|
||||
#include "mmap/Column.h"
|
||||
#include "segcore/AckResponder.h"
|
||||
#include "segcore/ConcurrentVector.h"
|
||||
|
@ -77,7 +79,8 @@ class OffsetOrderedMap : public OffsetMap {
|
|||
|
||||
void
|
||||
seal() override {
|
||||
PanicInfo(
|
||||
PanicCodeInfo(
|
||||
NotImplemented,
|
||||
"OffsetOrderedMap used for growing segment could not be sealed.");
|
||||
}
|
||||
|
||||
|
@ -157,8 +160,10 @@ class OffsetOrderedArray : public OffsetMap {
|
|||
|
||||
void
|
||||
insert(const PkType& pk, int64_t offset) override {
|
||||
if (is_sealed)
|
||||
PanicInfo("OffsetOrderedArray could not insert after seal");
|
||||
if (is_sealed) {
|
||||
PanicCodeInfo(Unsupported,
|
||||
"OffsetOrderedArray could not insert after seal");
|
||||
}
|
||||
array_.push_back(std::make_pair(std::get<T>(pk), offset));
|
||||
}
|
||||
|
||||
|
@ -248,25 +253,31 @@ struct InsertRecord {
|
|||
pk_field_id.value() == field_id) {
|
||||
switch (field_meta.get_data_type()) {
|
||||
case DataType::INT64: {
|
||||
if (is_sealed)
|
||||
if (is_sealed) {
|
||||
pk2offset_ =
|
||||
std::make_unique<OffsetOrderedArray<int64_t>>();
|
||||
else
|
||||
} else {
|
||||
pk2offset_ =
|
||||
std::make_unique<OffsetOrderedMap<int64_t>>();
|
||||
}
|
||||
break;
|
||||
}
|
||||
case DataType::VARCHAR: {
|
||||
if (is_sealed)
|
||||
if (is_sealed) {
|
||||
pk2offset_ = std::make_unique<
|
||||
OffsetOrderedArray<std::string>>();
|
||||
else
|
||||
} else {
|
||||
pk2offset_ = std::make_unique<
|
||||
OffsetOrderedMap<std::string>>();
|
||||
}
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
PanicInfo("unsupported pk type");
|
||||
PanicCodeInfo(
|
||||
DataTypeInvalid,
|
||||
fmt::format(
|
||||
"unsupported pk type",
|
||||
fmt::underlying(field_meta.get_data_type())));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -286,7 +297,10 @@ struct InsertRecord {
|
|||
field_id, field_meta.get_dim(), size_per_chunk);
|
||||
continue;
|
||||
} else {
|
||||
PanicInfo("unsupported");
|
||||
PanicCodeInfo(DataTypeInvalid,
|
||||
fmt::format("unsupported vector type",
|
||||
fmt::underlying(
|
||||
field_meta.get_data_type())));
|
||||
}
|
||||
}
|
||||
switch (field_meta.get_data_type()) {
|
||||
|
@ -333,7 +347,10 @@ struct InsertRecord {
|
|||
// break;
|
||||
// }
|
||||
default: {
|
||||
PanicInfo("unsupported");
|
||||
PanicCodeInfo(DataTypeInvalid,
|
||||
fmt::format("unsupported scalar type",
|
||||
fmt::underlying(
|
||||
field_meta.get_data_type())));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -378,7 +395,9 @@ struct InsertRecord {
|
|||
break;
|
||||
}
|
||||
default: {
|
||||
PanicInfo("unsupported primary key data type");
|
||||
PanicCodeInfo(DataTypeInvalid,
|
||||
fmt::format("unsupported primary key data type",
|
||||
fmt::underlying(data_type)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -408,7 +427,10 @@ struct InsertRecord {
|
|||
break;
|
||||
}
|
||||
default: {
|
||||
PanicInfo("unsupported primary key data type");
|
||||
PanicCodeInfo(
|
||||
DataTypeInvalid,
|
||||
fmt::format("unsupported primary key data type",
|
||||
fmt::underlying(data_type)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -318,7 +318,9 @@ ReduceHelper::GetSearchResultDataSlice(int slice_index) {
|
|||
break;
|
||||
}
|
||||
default: {
|
||||
PanicInfo("unsupported primary key type");
|
||||
PanicCodeInfo(DataTypeInvalid,
|
||||
fmt::format("unsupported primary key type {}",
|
||||
fmt::underlying(pk_type)));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -365,7 +367,10 @@ ReduceHelper::GetSearchResultDataSlice(int slice_index) {
|
|||
break;
|
||||
}
|
||||
default: {
|
||||
PanicInfo("unsupported primary key type");
|
||||
PanicCodeInfo(
|
||||
DataTypeInvalid,
|
||||
fmt::format("unsupported primary key type {}",
|
||||
fmt::underlying(pk_type)));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -37,7 +37,7 @@ apply_parser(const YAML::Node& node, Func func) {
|
|||
results.emplace_back(func(element));
|
||||
}
|
||||
} else {
|
||||
PanicInfo("node should be scalar or sequence");
|
||||
PanicCodeInfo(ConfigInvalid, "node should be scalar or sequence");
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
@ -102,7 +102,7 @@ SegcoreConfig::parse_from(const std::string& config_path) {
|
|||
} catch (const std::exception& e) {
|
||||
std::string str =
|
||||
std::string("Invalid Yaml: ") + config_path + ", err: " + e.what();
|
||||
PanicInfo(str);
|
||||
PanicCodeInfo(ConfigInvalid, str);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -21,6 +21,7 @@
|
|||
#include "common/Consts.h"
|
||||
#include "common/EasyAssert.h"
|
||||
#include "common/Types.h"
|
||||
#include "fmt/format.h"
|
||||
#include "nlohmann/json.hpp"
|
||||
#include "query/PlanNode.h"
|
||||
#include "query/SearchOnSealed.h"
|
||||
|
@ -328,7 +329,6 @@ std::unique_ptr<DataArray>
|
|||
SegmentGrowingImpl::bulk_subscript(FieldId field_id,
|
||||
const int64_t* seg_offsets,
|
||||
int64_t count) const {
|
||||
// TODO: support more types
|
||||
auto vec_ptr = insert_record_.get_field_data_base(field_id);
|
||||
auto& field_meta = schema_->operator[](field_id);
|
||||
if (field_meta.is_vector()) {
|
||||
|
@ -355,7 +355,7 @@ SegmentGrowingImpl::bulk_subscript(FieldId field_id,
|
|||
count,
|
||||
output.data());
|
||||
} else {
|
||||
PanicInfo("logical error");
|
||||
PanicCodeInfo(DataTypeInvalid, "logical error");
|
||||
}
|
||||
return CreateVectorDataArrayFrom(output.data(), count, field_meta);
|
||||
}
|
||||
|
@ -418,7 +418,10 @@ SegmentGrowingImpl::bulk_subscript(FieldId field_id,
|
|||
return CreateScalarDataArrayFrom(output.data(), count, field_meta);
|
||||
}
|
||||
default: {
|
||||
PanicInfo("unsupported type");
|
||||
PanicCodeInfo(
|
||||
DataTypeInvalid,
|
||||
fmt::format("unsupported type {}",
|
||||
fmt::underlying(field_meta.get_data_type())));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -496,7 +499,7 @@ SegmentGrowingImpl::bulk_subscript(SystemFieldType system_type,
|
|||
&this->insert_record_.row_ids_, seg_offsets, count, output);
|
||||
break;
|
||||
default:
|
||||
PanicInfo("unknown subscript fields");
|
||||
PanicCodeInfo(DataTypeInvalid, "unknown subscript fields");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -529,7 +532,9 @@ SegmentGrowingImpl::search_ids(const IdArray& id_array,
|
|||
break;
|
||||
}
|
||||
default: {
|
||||
PanicInfo("unsupported type");
|
||||
PanicCodeInfo(DataTypeInvalid,
|
||||
fmt::format("unsupported type {}",
|
||||
fmt::underlying(data_type)));
|
||||
}
|
||||
}
|
||||
res_offsets.push_back(offset);
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
#include <cstdint>
|
||||
|
||||
#include "Utils.h"
|
||||
#include "common/EasyAssert.h"
|
||||
#include "common/SystemProperty.h"
|
||||
#include "common/Tracer.h"
|
||||
#include "common/Types.h"
|
||||
|
@ -93,8 +94,9 @@ SegmentInternalInterface::Retrieve(const query::RetrievePlan* plan,
|
|||
output_data_size += get_field_avg_size(field_id) * result_rows;
|
||||
}
|
||||
if (output_data_size > limit_size) {
|
||||
throw std::runtime_error("query results exceed the limit size " +
|
||||
std::to_string(limit_size));
|
||||
throw SegcoreError(
|
||||
RetrieveError,
|
||||
fmt::format("query results exceed the limit size ", limit_size));
|
||||
}
|
||||
|
||||
if (plan->plan_node_->is_count_) {
|
||||
|
@ -159,7 +161,9 @@ SegmentInternalInterface::Retrieve(const query::RetrievePlan* plan,
|
|||
break;
|
||||
}
|
||||
default: {
|
||||
PanicInfo("unsupported data type");
|
||||
PanicCodeInfo(DataTypeInvalid,
|
||||
fmt::format("unsupported datatype {}",
|
||||
field_meta.get_data_type()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -200,7 +204,7 @@ SegmentInternalInterface::get_field_avg_size(FieldId field_id) const {
|
|||
return sizeof(int64_t);
|
||||
}
|
||||
|
||||
throw std::runtime_error("unsupported system field id");
|
||||
throw SegcoreError(FieldIDInvalid, "unsupported system field id");
|
||||
}
|
||||
|
||||
auto schema = get_schema();
|
||||
|
|
|
@ -157,7 +157,9 @@ SegmentSealedImpl::LoadScalarIndex(const LoadIndexInfo& info) {
|
|||
break;
|
||||
}
|
||||
default: {
|
||||
PanicInfo("unsupported primary key type");
|
||||
PanicCodeInfo(DataTypeInvalid,
|
||||
fmt::format("unsupported primary key type",
|
||||
field_meta.get_data_type()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include "common/EasyAssert.h"
|
||||
#include "common/LoadInfo.h"
|
||||
#include "segcore/load_field_data_c.h"
|
||||
|
||||
|
@ -24,13 +25,13 @@ NewLoadFieldDataInfo(CLoadFieldDataInfo* c_load_field_data_info) {
|
|||
*c_load_field_data_info = load_field_data_info.release();
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::UnexpectedError, e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
DeleteLoadFieldDataInfo(CLoadFieldDataInfo c_load_field_data_info) {
|
||||
auto info = (LoadFieldDataInfo*)c_load_field_data_info;
|
||||
auto info = static_cast<LoadFieldDataInfo*>(c_load_field_data_info);
|
||||
delete info;
|
||||
}
|
||||
|
||||
|
@ -39,10 +40,12 @@ AppendLoadFieldInfo(CLoadFieldDataInfo c_load_field_data_info,
|
|||
int64_t field_id,
|
||||
int64_t row_count) {
|
||||
try {
|
||||
auto load_field_data_info = (LoadFieldDataInfo*)c_load_field_data_info;
|
||||
auto load_field_data_info =
|
||||
static_cast<LoadFieldDataInfo*>(c_load_field_data_info);
|
||||
auto iter = load_field_data_info->field_infos.find(field_id);
|
||||
if (iter != load_field_data_info->field_infos.end()) {
|
||||
throw std::runtime_error("append same field info multi times");
|
||||
throw milvus::SegcoreError(milvus::FieldAlreadyExist,
|
||||
"append same field info multi times");
|
||||
}
|
||||
FieldBinlogInfo binlog_info;
|
||||
binlog_info.field_id = field_id;
|
||||
|
@ -50,7 +53,7 @@ AppendLoadFieldInfo(CLoadFieldDataInfo c_load_field_data_info,
|
|||
load_field_data_info->field_infos[field_id] = binlog_info;
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::UnexpectedError, e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -59,24 +62,26 @@ AppendLoadFieldDataPath(CLoadFieldDataInfo c_load_field_data_info,
|
|||
int64_t field_id,
|
||||
const char* c_file_path) {
|
||||
try {
|
||||
auto load_field_data_info = (LoadFieldDataInfo*)c_load_field_data_info;
|
||||
auto load_field_data_info =
|
||||
static_cast<LoadFieldDataInfo*>(c_load_field_data_info);
|
||||
auto iter = load_field_data_info->field_infos.find(field_id);
|
||||
std::string file_path(c_file_path);
|
||||
if (iter == load_field_data_info->field_infos.end()) {
|
||||
throw std::runtime_error("please append field info first");
|
||||
throw milvus::SegcoreError(milvus::FieldIDInvalid,
|
||||
"please append field info first");
|
||||
}
|
||||
|
||||
std::string file_path(c_file_path);
|
||||
load_field_data_info->field_infos[field_id].insert_files.emplace_back(
|
||||
file_path);
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::UnexpectedError, e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
AppendMMapDirPath(CLoadFieldDataInfo c_load_field_data_info,
|
||||
const char* c_dir_path) {
|
||||
auto load_field_data_info = (LoadFieldDataInfo*)c_load_field_data_info;
|
||||
auto load_field_data_info =
|
||||
static_cast<LoadFieldDataInfo*>(c_load_field_data_info);
|
||||
load_field_data_info->mmap_dir_path = std::string(c_dir_path);
|
||||
}
|
||||
|
|
|
@ -11,6 +11,7 @@
|
|||
|
||||
#pragma once
|
||||
#include <string>
|
||||
#include "common/EasyAssert.h"
|
||||
|
||||
namespace milvus::segcore {
|
||||
|
||||
|
@ -18,7 +19,7 @@ struct Int64PKVisitor {
|
|||
template <typename T>
|
||||
int64_t
|
||||
operator()(T t) const {
|
||||
PanicInfo("invalid int64 pk value");
|
||||
PanicCodeInfo(Unsupported, "invalid int64 pk value");
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -32,7 +33,7 @@ struct StrPKVisitor {
|
|||
template <typename T>
|
||||
std::string
|
||||
operator()(T t) const {
|
||||
PanicInfo("invalid string pk value");
|
||||
PanicCodeInfo(Unsupported, "invalid string pk value");
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -76,13 +76,13 @@ ParsePlaceholderGroup(CSearchPlan c_plan,
|
|||
int64_t
|
||||
GetNumOfQueries(CPlaceholderGroup placeholder_group) {
|
||||
auto res = milvus::query::GetNumOfQueries(
|
||||
(milvus::query::PlaceholderGroup*)placeholder_group);
|
||||
static_cast<milvus::query::PlaceholderGroup*>(placeholder_group));
|
||||
return res;
|
||||
}
|
||||
|
||||
int64_t
|
||||
GetTopK(CSearchPlan plan) {
|
||||
auto res = milvus::query::GetTopK((milvus::query::Plan*)plan);
|
||||
auto res = milvus::query::GetTopK(static_cast<milvus::query::Plan*>(plan));
|
||||
return res;
|
||||
}
|
||||
|
||||
|
@ -93,7 +93,7 @@ GetFieldID(CSearchPlan plan, int64_t* field_id) {
|
|||
*field_id = milvus::query::GetFieldID(p);
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::UnexpectedError, e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -115,14 +115,14 @@ SetMetricType(CSearchPlan plan, const char* metric_type) {
|
|||
|
||||
void
|
||||
DeleteSearchPlan(CSearchPlan cPlan) {
|
||||
auto plan = (milvus::query::Plan*)cPlan;
|
||||
auto plan = static_cast<milvus::query::Plan*>(cPlan);
|
||||
delete plan;
|
||||
}
|
||||
|
||||
void
|
||||
DeletePlaceholderGroup(CPlaceholderGroup cPlaceholder_group) {
|
||||
auto placeHolder_group =
|
||||
(milvus::query::PlaceholderGroup*)cPlaceholder_group;
|
||||
static_cast<milvus::query::PlaceholderGroup*>(cPlaceholder_group);
|
||||
delete placeHolder_group;
|
||||
}
|
||||
|
||||
|
@ -131,7 +131,7 @@ CreateRetrievePlanByExpr(CCollection c_col,
|
|||
const void* serialized_expr_plan,
|
||||
const int64_t size,
|
||||
CRetrievePlan* res_plan) {
|
||||
auto col = (milvus::segcore::Collection*)c_col;
|
||||
auto col = static_cast<milvus::segcore::Collection*>(c_col);
|
||||
|
||||
try {
|
||||
auto res = milvus::query::CreateRetrievePlanByExpr(
|
||||
|
@ -160,6 +160,6 @@ CreateRetrievePlanByExpr(CCollection c_col,
|
|||
|
||||
void
|
||||
DeleteRetrievePlan(CRetrievePlan c_plan) {
|
||||
auto plan = (milvus::query::RetrievePlan*)c_plan;
|
||||
auto plan = static_cast<milvus::query::RetrievePlan*>(c_plan);
|
||||
delete plan;
|
||||
}
|
||||
|
|
|
@ -45,7 +45,7 @@ ReduceSearchResultsAndFillData(CSearchResultDataBlobs* cSearchResultDataBlobs,
|
|||
*cSearchResultDataBlobs = reduce_helper.GetSearchResultDataBlobs();
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::UnexpectedError, e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -67,7 +67,7 @@ GetSearchResultDataBlob(CProto* searchResultDataBlob,
|
|||
} catch (std::exception& e) {
|
||||
searchResultDataBlob->proto_blob = nullptr;
|
||||
searchResultDataBlob->proto_size = 0;
|
||||
return milvus::FailureCStatus(milvus::UnexpectedError, e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -91,7 +91,7 @@ Search(CSegmentInterface c_segment,
|
|||
milvus::tracer::CloseRootSpan();
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::UnexpectedError, e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -128,7 +128,7 @@ Retrieve(CSegmentInterface c_segment,
|
|||
span->End();
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::UnexpectedError, e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -190,7 +190,7 @@ Insert(CSegmentInterface c_segment,
|
|||
reserved_offset, size, row_ids, timestamps, insert_data.get());
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::UnexpectedError, e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -201,7 +201,7 @@ PreInsert(CSegmentInterface c_segment, int64_t size, int64_t* offset) {
|
|||
*offset = segment->PreInsert(size);
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::UnexpectedError, e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -221,7 +221,7 @@ Delete(CSegmentInterface c_segment,
|
|||
segment->Delete(reserved_offset, size, pks.get(), timestamps);
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::UnexpectedError, e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -237,7 +237,7 @@ LoadFieldData(CSegmentInterface c_segment,
|
|||
segment->LoadFieldData(*load_info);
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::UnexpectedError, e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -277,7 +277,7 @@ LoadFieldRawData(CSegmentInterface c_segment,
|
|||
segment->LoadFieldData(milvus::FieldId(field_id), field_data_info);
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::UnexpectedError, e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -298,7 +298,7 @@ LoadDeletedRecord(CSegmentInterface c_segment,
|
|||
segment_interface->LoadDeletedRecord(load_info);
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::UnexpectedError, e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -316,7 +316,7 @@ UpdateSealedSegmentIndex(CSegmentInterface c_segment,
|
|||
segment->LoadIndex(*load_index_info);
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::UnexpectedError, e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -333,7 +333,7 @@ UpdateFieldRawDataSize(CSegmentInterface c_segment,
|
|||
milvus::FieldId(field_id), num_rows, field_data_size);
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::UnexpectedError, e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -348,7 +348,7 @@ DropFieldData(CSegmentInterface c_segment, int64_t field_id) {
|
|||
segment->DropFieldData(milvus::FieldId(field_id));
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::UnexpectedError, e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -363,6 +363,6 @@ DropSealedSegmentIndex(CSegmentInterface c_segment, int64_t field_id) {
|
|||
segment->DropIndex(milvus::FieldId(field_id));
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::UnexpectedError, e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -86,14 +86,16 @@ DeserializeRemoteFileData(BinlogReaderPtr reader) {
|
|||
return index_data;
|
||||
}
|
||||
default:
|
||||
PanicInfo("unsupported event type");
|
||||
PanicCodeInfo(DataFormatBroken,
|
||||
fmt::format("unsupported event type {}",
|
||||
fmt::underlying(header.event_type_)));
|
||||
}
|
||||
}
|
||||
|
||||
// For now, no file header in file data
|
||||
std::unique_ptr<DataCodec>
|
||||
DeserializeLocalFileData(BinlogReaderPtr reader) {
|
||||
PanicInfo("not supported");
|
||||
PanicCodeInfo(NotImplemented, "not supported");
|
||||
}
|
||||
|
||||
std::unique_ptr<DataCodec>
|
||||
|
@ -109,7 +111,9 @@ DeserializeFileData(const std::shared_ptr<uint8_t[]> input_data,
|
|||
return DeserializeLocalFileData(binlog_reader);
|
||||
}
|
||||
default:
|
||||
PanicInfo("unsupported medium type");
|
||||
PanicCodeInfo(DataFormatBroken,
|
||||
fmt::format("unsupported medium type {}",
|
||||
fmt::underlying(medium_type)));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -25,7 +25,6 @@
|
|||
|
||||
#include "storage/DiskFileManagerImpl.h"
|
||||
#include "storage/LocalChunkManagerSingleton.h"
|
||||
#include "storage/Exception.h"
|
||||
#include "storage/IndexData.h"
|
||||
#include "storage/Util.h"
|
||||
#include "storage/ThreadPools.h"
|
||||
|
@ -350,10 +349,6 @@ DiskFileManagerImpl::IsExisted(const std::string& file) noexcept {
|
|||
LocalChunkManagerSingleton::GetInstance().GetChunkManager();
|
||||
try {
|
||||
isExist = local_chunk_manager->Exist(file);
|
||||
} catch (LocalChunkManagerException& e) {
|
||||
// LOG_SEGCORE_DEBUG_ << "LocalChunkManagerException:"
|
||||
// << e.what();
|
||||
return std::nullopt;
|
||||
} catch (std::exception& e) {
|
||||
// LOG_SEGCORE_DEBUG_ << "Exception:" << e.what();
|
||||
return std::nullopt;
|
||||
|
|
|
@ -15,6 +15,7 @@
|
|||
// limitations under the License.
|
||||
|
||||
#include "storage/Event.h"
|
||||
#include "fmt/format.h"
|
||||
#include "nlohmann/json.hpp"
|
||||
#include "storage/PayloadReader.h"
|
||||
#include "storage/PayloadWriter.h"
|
||||
|
@ -46,8 +47,8 @@ GetEventHeaderSize(EventHeader& header) {
|
|||
}
|
||||
|
||||
int
|
||||
GetEventFixPartSize(EventType EventTypeCode) {
|
||||
switch (EventTypeCode) {
|
||||
GetEventFixPartSize(EventType event_type) {
|
||||
switch (event_type) {
|
||||
case EventType::DescriptorEvent: {
|
||||
DescriptorEventData data;
|
||||
return GetFixPartSize(data);
|
||||
|
@ -63,7 +64,9 @@ GetEventFixPartSize(EventType EventTypeCode) {
|
|||
return GetFixPartSize(data);
|
||||
}
|
||||
default:
|
||||
PanicInfo("unsupported event type");
|
||||
PanicCodeInfo(DataFormatBroken,
|
||||
fmt::format("unsupported event type {}",
|
||||
fmt::underlying(event_type)));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,179 +0,0 @@
|
|||
// Licensed to the LF AI & Data foundation under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <iostream>
|
||||
#include <stdexcept>
|
||||
#include <string>
|
||||
|
||||
namespace milvus::storage {
|
||||
|
||||
class NotImplementedException : public std::exception {
|
||||
public:
|
||||
explicit NotImplementedException(const std::string& msg)
|
||||
: std::exception(), exception_message_(msg) {
|
||||
}
|
||||
const char*
|
||||
what() const noexcept {
|
||||
return exception_message_.c_str();
|
||||
}
|
||||
virtual ~NotImplementedException() {
|
||||
}
|
||||
|
||||
private:
|
||||
std::string exception_message_;
|
||||
};
|
||||
|
||||
class NotSupportedDataTypeException : public std::exception {
|
||||
public:
|
||||
explicit NotSupportedDataTypeException(const std::string& msg)
|
||||
: std::exception(), exception_message_(msg) {
|
||||
}
|
||||
const char*
|
||||
what() const noexcept {
|
||||
return exception_message_.c_str();
|
||||
}
|
||||
virtual ~NotSupportedDataTypeException() {
|
||||
}
|
||||
|
||||
private:
|
||||
std::string exception_message_;
|
||||
};
|
||||
|
||||
class LocalChunkManagerException : public std::runtime_error {
|
||||
public:
|
||||
explicit LocalChunkManagerException(const std::string& msg)
|
||||
: std::runtime_error(msg) {
|
||||
}
|
||||
virtual ~LocalChunkManagerException() {
|
||||
}
|
||||
};
|
||||
|
||||
class InvalidPathException : public LocalChunkManagerException {
|
||||
public:
|
||||
explicit InvalidPathException(const std::string& msg)
|
||||
: LocalChunkManagerException(msg) {
|
||||
}
|
||||
virtual ~InvalidPathException() {
|
||||
}
|
||||
};
|
||||
|
||||
class OpenFileException : public LocalChunkManagerException {
|
||||
public:
|
||||
explicit OpenFileException(const std::string& msg)
|
||||
: LocalChunkManagerException(msg) {
|
||||
}
|
||||
virtual ~OpenFileException() {
|
||||
}
|
||||
};
|
||||
|
||||
class CreateFileException : public LocalChunkManagerException {
|
||||
public:
|
||||
explicit CreateFileException(const std::string& msg)
|
||||
: LocalChunkManagerException(msg) {
|
||||
}
|
||||
virtual ~CreateFileException() {
|
||||
}
|
||||
};
|
||||
|
||||
class ReadFileException : public LocalChunkManagerException {
|
||||
public:
|
||||
explicit ReadFileException(const std::string& msg)
|
||||
: LocalChunkManagerException(msg) {
|
||||
}
|
||||
virtual ~ReadFileException() {
|
||||
}
|
||||
};
|
||||
|
||||
class WriteFileException : public LocalChunkManagerException {
|
||||
public:
|
||||
explicit WriteFileException(const std::string& msg)
|
||||
: LocalChunkManagerException(msg) {
|
||||
}
|
||||
virtual ~WriteFileException() {
|
||||
}
|
||||
};
|
||||
|
||||
class PathAlreadyExistException : public LocalChunkManagerException {
|
||||
public:
|
||||
explicit PathAlreadyExistException(const std::string& msg)
|
||||
: LocalChunkManagerException(msg) {
|
||||
}
|
||||
virtual ~PathAlreadyExistException() {
|
||||
}
|
||||
};
|
||||
|
||||
class DirNotExistException : public LocalChunkManagerException {
|
||||
public:
|
||||
explicit DirNotExistException(const std::string& msg)
|
||||
: LocalChunkManagerException(msg) {
|
||||
}
|
||||
virtual ~DirNotExistException() {
|
||||
}
|
||||
};
|
||||
|
||||
class MinioException : public std::runtime_error {
|
||||
public:
|
||||
explicit MinioException(const std::string& msg) : std::runtime_error(msg) {
|
||||
}
|
||||
virtual ~MinioException() {
|
||||
}
|
||||
};
|
||||
|
||||
class InvalidBucketNameException : public MinioException {
|
||||
public:
|
||||
explicit InvalidBucketNameException(const std::string& msg)
|
||||
: MinioException(msg) {
|
||||
}
|
||||
virtual ~InvalidBucketNameException() {
|
||||
}
|
||||
};
|
||||
|
||||
class ObjectNotExistException : public MinioException {
|
||||
public:
|
||||
explicit ObjectNotExistException(const std::string& msg)
|
||||
: MinioException(msg) {
|
||||
}
|
||||
virtual ~ObjectNotExistException() {
|
||||
}
|
||||
};
|
||||
class S3ErrorException : public MinioException {
|
||||
public:
|
||||
explicit S3ErrorException(const std::string& msg) : MinioException(msg) {
|
||||
}
|
||||
virtual ~S3ErrorException() {
|
||||
}
|
||||
};
|
||||
|
||||
class DiskANNFileManagerException : public std::runtime_error {
|
||||
public:
|
||||
explicit DiskANNFileManagerException(const std::string& msg)
|
||||
: std::runtime_error(msg) {
|
||||
}
|
||||
virtual ~DiskANNFileManagerException() {
|
||||
}
|
||||
};
|
||||
|
||||
class ArrowException : public std::runtime_error {
|
||||
public:
|
||||
explicit ArrowException(const std::string& msg) : std::runtime_error(msg) {
|
||||
}
|
||||
virtual ~ArrowException() {
|
||||
}
|
||||
};
|
||||
|
||||
} // namespace milvus::storage
|
|
@ -16,6 +16,7 @@
|
|||
|
||||
#include "storage/FieldData.h"
|
||||
#include "arrow/array/array_binary.h"
|
||||
#include "common/EasyAssert.h"
|
||||
#include "common/Json.h"
|
||||
#include "simdjson/padded_string.h"
|
||||
|
||||
|
@ -140,9 +141,10 @@ FieldDataImpl<Type, is_scalar>::FillFieldData(
|
|||
return FillFieldData(array_info.first, array_info.second);
|
||||
}
|
||||
default: {
|
||||
throw NotSupportedDataTypeException(GetName() + "::FillFieldData" +
|
||||
" not support data type " +
|
||||
datatype_name(data_type_));
|
||||
throw SegcoreError(DataTypeInvalid,
|
||||
GetName() + "::FillFieldData" +
|
||||
" not support data type " +
|
||||
datatype_name(data_type_));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -164,4 +166,4 @@ template class FieldDataImpl<int8_t, false>;
|
|||
template class FieldDataImpl<float, false>;
|
||||
template class FieldDataImpl<float16, false>;
|
||||
|
||||
} // namespace milvus::storage
|
||||
} // namespace milvus::storage
|
||||
|
|
|
@ -31,7 +31,6 @@
|
|||
#include "common/Utils.h"
|
||||
#include "common/VectorTrait.h"
|
||||
#include "common/EasyAssert.h"
|
||||
#include "storage/Exception.h"
|
||||
|
||||
namespace milvus::storage {
|
||||
|
||||
|
@ -104,8 +103,8 @@ class FieldDataImpl : public FieldDataBase {
|
|||
DataType data_type,
|
||||
int64_t buffered_num_rows = 0)
|
||||
: FieldDataBase(data_type),
|
||||
dim_(is_scalar ? 1 : dim),
|
||||
num_rows_(buffered_num_rows) {
|
||||
num_rows_(buffered_num_rows),
|
||||
dim_(is_scalar ? 1 : dim) {
|
||||
field_data_.resize(num_rows_ * dim_);
|
||||
}
|
||||
|
||||
|
|
|
@ -29,27 +29,15 @@
|
|||
namespace milvus::storage {
|
||||
|
||||
#define FILEMANAGER_TRY try {
|
||||
#define FILEMANAGER_CATCH \
|
||||
} \
|
||||
catch (LocalChunkManagerException & e) { \
|
||||
LOG_SEGCORE_ERROR_ << "LocalChunkManagerException:" << e.what(); \
|
||||
return false; \
|
||||
} \
|
||||
catch (MinioException & e) { \
|
||||
LOG_SEGCORE_ERROR_ << "milvus::storage::MinioException:" << e.what(); \
|
||||
return false; \
|
||||
} \
|
||||
catch (DiskANNFileManagerException & e) { \
|
||||
LOG_SEGCORE_ERROR_ << "milvus::storage::DiskANNFileManagerException:" \
|
||||
<< e.what(); \
|
||||
return false; \
|
||||
} \
|
||||
catch (ArrowException & e) { \
|
||||
LOG_SEGCORE_ERROR_ << "milvus::storage::ArrowException:" << e.what(); \
|
||||
return false; \
|
||||
} \
|
||||
catch (std::exception & e) { \
|
||||
LOG_SEGCORE_ERROR_ << "Exception:" << e.what(); \
|
||||
#define FILEMANAGER_CATCH \
|
||||
} \
|
||||
catch (SegcoreError & e) { \
|
||||
LOG_SEGCORE_ERROR_ << "SegcoreError: code " << e.get_error_code() \
|
||||
<< ", " << e.what(); \
|
||||
return false; \
|
||||
} \
|
||||
catch (std::exception & e) { \
|
||||
LOG_SEGCORE_ERROR_ << "Exception:" << e.what(); \
|
||||
return false;
|
||||
#define FILEMANAGER_END }
|
||||
|
||||
|
|
|
@ -41,7 +41,9 @@ IndexData::Serialize(StorageType medium) {
|
|||
case StorageType::LocalDisk:
|
||||
return serialize_to_local_file();
|
||||
default:
|
||||
PanicInfo("unsupported medium type");
|
||||
PanicCodeInfo(DataFormatBroken,
|
||||
fmt::format("unsupported medium type {}",
|
||||
fmt::underlying(medium)));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -37,7 +37,9 @@ InsertData::Serialize(StorageType medium) {
|
|||
case StorageType::LocalDisk:
|
||||
return serialize_to_local_file();
|
||||
default:
|
||||
PanicInfo("unsupported medium type");
|
||||
PanicCodeInfo(DataFormatBroken,
|
||||
fmt::format("unsupported medium type {}",
|
||||
fmt::underlying(medium)));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -21,13 +21,13 @@
|
|||
#include <fstream>
|
||||
#include <sstream>
|
||||
|
||||
#include "Exception.h"
|
||||
#include "common/EasyAssert.h"
|
||||
|
||||
#define THROWLOCALERROR(FUNCTION) \
|
||||
#define THROWLOCALERROR(code, FUNCTION) \
|
||||
do { \
|
||||
std::stringstream err_msg; \
|
||||
err_msg << "Error:" << #FUNCTION << ":" << err.message(); \
|
||||
throw LocalChunkManagerException(err_msg.str()); \
|
||||
throw SegcoreError(code, err_msg.str()); \
|
||||
} while (0)
|
||||
|
||||
namespace milvus::storage {
|
||||
|
@ -38,7 +38,7 @@ LocalChunkManager::Exist(const std::string& filepath) {
|
|||
boost::system::error_code err;
|
||||
bool isExist = boost::filesystem::exists(absPath, err);
|
||||
if (err && err.value() != boost::system::errc::no_such_file_or_directory) {
|
||||
THROWLOCALERROR(Exist);
|
||||
THROWLOCALERROR(FileReadFailed, Exist);
|
||||
}
|
||||
return isExist;
|
||||
}
|
||||
|
@ -48,12 +48,13 @@ LocalChunkManager::Size(const std::string& filepath) {
|
|||
boost::filesystem::path absPath(filepath);
|
||||
|
||||
if (!Exist(filepath)) {
|
||||
throw InvalidPathException("invalid local path:" + absPath.string());
|
||||
throw SegcoreError(PathNotExist,
|
||||
"invalid local path:" + absPath.string());
|
||||
}
|
||||
boost::system::error_code err;
|
||||
int64_t size = boost::filesystem::file_size(absPath, err);
|
||||
if (err) {
|
||||
THROWLOCALERROR(FileSize);
|
||||
THROWLOCALERROR(FileReadFailed, FileSize);
|
||||
}
|
||||
return size;
|
||||
}
|
||||
|
@ -64,7 +65,7 @@ LocalChunkManager::Remove(const std::string& filepath) {
|
|||
boost::system::error_code err;
|
||||
boost::filesystem::remove(absPath, err);
|
||||
if (err) {
|
||||
THROWLOCALERROR(Remove);
|
||||
THROWLOCALERROR(FileWriteFailed, Remove);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -84,7 +85,7 @@ LocalChunkManager::Read(const std::string& filepath,
|
|||
std::stringstream err_msg;
|
||||
err_msg << "Error: open local file '" << filepath << " failed, "
|
||||
<< strerror(errno);
|
||||
throw OpenFileException(err_msg.str());
|
||||
throw SegcoreError(FileOpenFailed, err_msg.str());
|
||||
}
|
||||
|
||||
infile.seekg(offset, std::ios::beg);
|
||||
|
@ -93,7 +94,7 @@ LocalChunkManager::Read(const std::string& filepath,
|
|||
std::stringstream err_msg;
|
||||
err_msg << "Error: read local file '" << filepath << " failed, "
|
||||
<< strerror(errno);
|
||||
throw ReadFileException(err_msg.str());
|
||||
throw SegcoreError(FileReadFailed, err_msg.str());
|
||||
}
|
||||
}
|
||||
return infile.gcount();
|
||||
|
@ -114,13 +115,13 @@ LocalChunkManager::Write(const std::string& absPathStr,
|
|||
std::stringstream err_msg;
|
||||
err_msg << "Error: open local file '" << absPathStr << " failed, "
|
||||
<< strerror(errno);
|
||||
throw OpenFileException(err_msg.str());
|
||||
throw SegcoreError(FileOpenFailed, err_msg.str());
|
||||
}
|
||||
if (!outfile.write(reinterpret_cast<char*>(buf), size)) {
|
||||
std::stringstream err_msg;
|
||||
err_msg << "Error: write local file '" << absPathStr << " failed, "
|
||||
<< strerror(errno);
|
||||
throw WriteFileException(err_msg.str());
|
||||
throw SegcoreError(FileWriteFailed, err_msg.str());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -142,7 +143,7 @@ LocalChunkManager::Write(const std::string& absPathStr,
|
|||
std::stringstream err_msg;
|
||||
err_msg << "Error: open local file '" << absPathStr << " failed, "
|
||||
<< strerror(errno);
|
||||
throw OpenFileException(err_msg.str());
|
||||
throw SegcoreError(FileOpenFailed, err_msg.str());
|
||||
}
|
||||
|
||||
outfile.seekp(offset, std::ios::beg);
|
||||
|
@ -150,14 +151,14 @@ LocalChunkManager::Write(const std::string& absPathStr,
|
|||
std::stringstream err_msg;
|
||||
err_msg << "Error: write local file '" << absPathStr << " failed, "
|
||||
<< strerror(errno);
|
||||
throw WriteFileException(err_msg.str());
|
||||
throw SegcoreError(FileWriteFailed, err_msg.str());
|
||||
}
|
||||
}
|
||||
|
||||
std::vector<std::string>
|
||||
LocalChunkManager::ListWithPrefix(const std::string& filepath) {
|
||||
throw NotImplementedException(GetName() + "::ListWithPrefix" +
|
||||
" not implement now");
|
||||
throw SegcoreError(NotImplemented,
|
||||
GetName() + "::ListWithPrefix" + " not implement now");
|
||||
}
|
||||
|
||||
bool
|
||||
|
@ -173,7 +174,7 @@ LocalChunkManager::CreateFile(const std::string& filepath) {
|
|||
std::stringstream err_msg;
|
||||
err_msg << "Error: create new local file '" << absPathStr << " failed, "
|
||||
<< strerror(errno);
|
||||
throw CreateFileException(err_msg.str());
|
||||
throw SegcoreError(FileCreateFailed, err_msg.str());
|
||||
}
|
||||
file.close();
|
||||
return true;
|
||||
|
@ -185,7 +186,7 @@ LocalChunkManager::DirExist(const std::string& dir) {
|
|||
boost::system::error_code err;
|
||||
bool isExist = boost::filesystem::exists(dirPath, err);
|
||||
if (err && err.value() != boost::system::errc::no_such_file_or_directory) {
|
||||
THROWLOCALERROR(DirExist);
|
||||
THROWLOCALERROR(FileReadFailed, DirExist);
|
||||
}
|
||||
return isExist;
|
||||
}
|
||||
|
@ -194,12 +195,12 @@ void
|
|||
LocalChunkManager::CreateDir(const std::string& dir) {
|
||||
bool isExist = DirExist(dir);
|
||||
if (isExist) {
|
||||
throw PathAlreadyExistException("dir:" + dir + " already exists");
|
||||
throw SegcoreError(PathAlreadyExist, "dir:" + dir + " already exists");
|
||||
}
|
||||
boost::filesystem::path dirPath(dir);
|
||||
auto create_success = boost::filesystem::create_directories(dirPath);
|
||||
if (!create_success) {
|
||||
throw CreateFileException("create dir failed" + dir);
|
||||
throw SegcoreError(FileCreateFailed, "create dir failed" + dir);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -209,7 +210,7 @@ LocalChunkManager::RemoveDir(const std::string& dir) {
|
|||
boost::system::error_code err;
|
||||
boost::filesystem::remove_all(dirPath, err);
|
||||
if (err) {
|
||||
THROWLOCALERROR(RemoveDir);
|
||||
THROWLOCALERROR(FileCreateFailed, RemoveDir);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -218,7 +219,7 @@ LocalChunkManager::GetSizeOfDir(const std::string& dir) {
|
|||
boost::filesystem::path dirPath(dir);
|
||||
bool is_dir = boost::filesystem::is_directory(dirPath);
|
||||
if (!is_dir) {
|
||||
throw DirNotExistException("dir:" + dir + " not exists");
|
||||
throw SegcoreError(PathNotExist, "dir:" + dir + " not exists");
|
||||
}
|
||||
|
||||
using boost::filesystem::directory_entry;
|
||||
|
|
|
@ -43,7 +43,7 @@
|
|||
<< "[errcode:" << int(err.GetResponseCode()) \
|
||||
<< ", exception:" << err.GetExceptionName() \
|
||||
<< ", errmessage:" << err.GetMessage() << "]"; \
|
||||
throw S3ErrorException(err_msg.str()); \
|
||||
throw SegcoreError(S3Error, err_msg.str()); \
|
||||
} while (0)
|
||||
|
||||
#define S3NoSuchBucket "NoSuchBucket"
|
||||
|
@ -319,7 +319,7 @@ MinioChunkManager::Read(const std::string& filepath, void* buf, uint64_t size) {
|
|||
std::stringstream err_msg;
|
||||
err_msg << "object('" << default_bucket_name_ << "', " << filepath
|
||||
<< "') not exists";
|
||||
throw ObjectNotExistException(err_msg.str());
|
||||
throw SegcoreError(ObjectNotExist, err_msg.str());
|
||||
}
|
||||
return GetObjectBuffer(default_bucket_name_, filepath, buf, size);
|
||||
}
|
||||
|
@ -354,7 +354,7 @@ MinioChunkManager::BucketExists(const std::string& bucket_name) {
|
|||
std::stringstream err_msg;
|
||||
err_msg << "Error: BucketExists: "
|
||||
<< error.GetExceptionName() + " - " + error.GetMessage();
|
||||
throw S3ErrorException(err_msg.str());
|
||||
throw SegcoreError(S3Error, err_msg.str());
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
@ -421,7 +421,7 @@ MinioChunkManager::ObjectExists(const std::string& bucket_name,
|
|||
if (!err.GetExceptionName().empty()) {
|
||||
std::stringstream err_msg;
|
||||
err_msg << "Error: ObjectExists: " << err.GetMessage();
|
||||
throw S3ErrorException(err_msg.str());
|
||||
throw SegcoreError(S3Error, err_msg.str());
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -37,8 +37,8 @@
|
|||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "common/EasyAssert.h"
|
||||
#include "storage/ChunkManager.h"
|
||||
#include "storage/Exception.h"
|
||||
#include "storage/Types.h"
|
||||
|
||||
namespace milvus::storage {
|
||||
|
@ -89,8 +89,8 @@ class MinioChunkManager : public ChunkManager {
|
|||
uint64_t offset,
|
||||
void* buf,
|
||||
uint64_t len) {
|
||||
throw NotImplementedException(GetName() +
|
||||
"Read with offset not implement");
|
||||
throw SegcoreError(NotImplemented,
|
||||
GetName() + "Read with offset not implement");
|
||||
}
|
||||
|
||||
virtual void
|
||||
|
@ -98,8 +98,8 @@ class MinioChunkManager : public ChunkManager {
|
|||
uint64_t offset,
|
||||
void* buf,
|
||||
uint64_t len) {
|
||||
throw NotImplementedException(GetName() +
|
||||
"Write with offset not implement");
|
||||
throw SegcoreError(NotImplemented,
|
||||
GetName() + "Write with offset not implement");
|
||||
}
|
||||
|
||||
virtual uint64_t
|
||||
|
@ -242,9 +242,10 @@ class GoogleHttpClientFactory : public Aws::Http::HttpClientFactory {
|
|||
request->SetResponseStreamFactory(streamFactory);
|
||||
auto auth_header = credentials_->AuthorizationHeader();
|
||||
if (!auth_header.ok()) {
|
||||
throw std::runtime_error(
|
||||
"get authorization failed, errcode:" +
|
||||
StatusCodeToString(auth_header.status().code()));
|
||||
throw SegcoreError(
|
||||
S3Error,
|
||||
fmt::format("get authorization failed, errcode: {}",
|
||||
StatusCodeToString(auth_header.status().code())));
|
||||
}
|
||||
request->SetHeaderValue(auth_header->first.c_str(),
|
||||
auth_header->second.c_str());
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
#include "arrow/type_fwd.h"
|
||||
#include "common/EasyAssert.h"
|
||||
#include "common/Consts.h"
|
||||
#include "fmt/format.h"
|
||||
#include "storage/FieldData.h"
|
||||
#include "storage/FieldDataInterface.h"
|
||||
#include "storage/ThreadPools.h"
|
||||
|
@ -128,7 +129,9 @@ AddPayloadToArrowBuilder(std::shared_ptr<arrow::ArrayBuilder> builder,
|
|||
break;
|
||||
}
|
||||
default: {
|
||||
PanicInfo("unsupported data type");
|
||||
PanicCodeInfo(DataTypeInvalid,
|
||||
fmt::format("unsupported data type {}",
|
||||
fmt::underlying(data_type)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -198,7 +201,9 @@ CreateArrowBuilder(DataType data_type) {
|
|||
return std::make_shared<arrow::BinaryBuilder>();
|
||||
}
|
||||
default: {
|
||||
PanicInfo("unsupported numeric data type");
|
||||
PanicCodeInfo(DataTypeInvalid,
|
||||
fmt::format("unsupported numeric data type {}",
|
||||
fmt::underlying(data_type)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -222,7 +227,9 @@ CreateArrowBuilder(DataType data_type, int dim) {
|
|||
arrow::fixed_size_binary(dim * sizeof(float16)));
|
||||
}
|
||||
default: {
|
||||
PanicInfo("unsupported vector data type");
|
||||
PanicCodeInfo(DataTypeInvalid,
|
||||
fmt::format("unsupported vector data type {}",
|
||||
fmt::underlying(data_type)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -260,7 +267,9 @@ CreateArrowSchema(DataType data_type) {
|
|||
return arrow::schema({arrow::field("val", arrow::binary())});
|
||||
}
|
||||
default: {
|
||||
PanicInfo("unsupported numeric data type");
|
||||
PanicCodeInfo(DataTypeInvalid,
|
||||
fmt::format("unsupported numeric data type {}",
|
||||
fmt::underlying(data_type)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -284,7 +293,9 @@ CreateArrowSchema(DataType data_type, int dim) {
|
|||
"val", arrow::fixed_size_binary(dim * sizeof(float16)))});
|
||||
}
|
||||
default: {
|
||||
PanicInfo("unsupported vector data type");
|
||||
PanicCodeInfo(DataTypeInvalid,
|
||||
fmt::format("unsupported vector data type {}",
|
||||
fmt::underlying(data_type)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -303,7 +314,9 @@ GetDimensionFromFileMetaData(const parquet::ColumnDescriptor* schema,
|
|||
return schema->type_length() / sizeof(float16);
|
||||
}
|
||||
default:
|
||||
PanicInfo("unsupported data type");
|
||||
PanicCodeInfo(DataTypeInvalid,
|
||||
fmt::format("unsupported data type {}",
|
||||
fmt::underlying(data_type)));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -328,7 +341,9 @@ GetDimensionFromArrowArray(std::shared_ptr<arrow::Array> data,
|
|||
return array->byte_width() * 8;
|
||||
}
|
||||
default:
|
||||
PanicInfo("unsupported data type");
|
||||
PanicCodeInfo(DataTypeInvalid,
|
||||
fmt::format("unsupported data type {}",
|
||||
fmt::underlying(data_type)));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -527,7 +542,10 @@ CreateChunkManager(const StorageConfig& storage_config) {
|
|||
return std::make_shared<MinioChunkManager>(storage_config);
|
||||
}
|
||||
default: {
|
||||
PanicInfo("unsupported");
|
||||
PanicCodeInfo(
|
||||
ConfigInvalid,
|
||||
fmt::format("unsupported storage_config.storage_type {}",
|
||||
fmt::underlying(storage_type)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -578,7 +596,8 @@ CreateFieldData(const DataType& type, int64_t dim, int64_t total_num_rows) {
|
|||
return std::make_shared<FieldData<Float16Vector>>(
|
||||
dim, type, total_num_rows);
|
||||
default:
|
||||
throw NotSupportedDataTypeException(
|
||||
throw SegcoreError(
|
||||
DataTypeInvalid,
|
||||
"CreateFieldData not support data type " + datatype_name(type));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
|
||||
#include <mutex>
|
||||
|
||||
#include "common/EasyAssert.h"
|
||||
#include "storage/parquet_c.h"
|
||||
#include "storage/PayloadReader.h"
|
||||
#include "storage/PayloadWriter.h"
|
||||
|
@ -49,8 +50,7 @@ AddValuesToPayload(CPayloadWriter payloadWriter, const Payload& info) {
|
|||
p->add_payload(info);
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::ErrorCodeEnum::UnexpectedError,
|
||||
e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -117,8 +117,7 @@ AddOneStringToPayload(CPayloadWriter payloadWriter, char* cstr, int str_size) {
|
|||
p->add_one_string_payload(cstr, str_size);
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::ErrorCodeEnum::UnexpectedError,
|
||||
e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -129,8 +128,7 @@ AddOneArrayToPayload(CPayloadWriter payloadWriter, uint8_t* data, int length) {
|
|||
p->add_one_binary_payload(data, length);
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::ErrorCodeEnum::UnexpectedError,
|
||||
e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -141,8 +139,7 @@ AddOneJSONToPayload(CPayloadWriter payloadWriter, uint8_t* data, int length) {
|
|||
p->add_one_binary_payload(data, length);
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::ErrorCodeEnum::UnexpectedError,
|
||||
e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -158,8 +155,7 @@ AddBinaryVectorToPayload(CPayloadWriter payloadWriter,
|
|||
p->add_payload(raw_data_info);
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::ErrorCodeEnum::UnexpectedError,
|
||||
e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -177,8 +173,7 @@ AddFloatVectorToPayload(CPayloadWriter payloadWriter,
|
|||
p->add_payload(raw_data_info);
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::ErrorCodeEnum::UnexpectedError,
|
||||
e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -189,8 +184,7 @@ FinishPayloadWriter(CPayloadWriter payloadWriter) {
|
|||
p->finish();
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::ErrorCodeEnum::UnexpectedError,
|
||||
e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -246,9 +240,8 @@ NewPayloadReader(int columnType,
|
|||
break;
|
||||
}
|
||||
default: {
|
||||
return milvus::FailureCStatus(
|
||||
milvus::ErrorCodeEnum::UnexpectedError,
|
||||
"unsupported data type");
|
||||
return milvus::FailureCStatus(milvus::DataTypeInvalid,
|
||||
"unsupported data type");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -257,8 +250,7 @@ NewPayloadReader(int columnType,
|
|||
*c_reader = (CPayloadReader)(p.release());
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::ErrorCodeEnum::UnexpectedError,
|
||||
e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -270,8 +262,7 @@ GetBoolFromPayload(CPayloadReader payloadReader, int idx, bool* value) {
|
|||
*value = *reinterpret_cast<const bool*>(field_data->RawValue(idx));
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::ErrorCodeEnum::UnexpectedError,
|
||||
e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -285,8 +276,7 @@ GetInt8FromPayload(CPayloadReader payloadReader, int8_t** values, int* length) {
|
|||
reinterpret_cast<int8_t*>(const_cast<void*>(field_data->Data()));
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::ErrorCodeEnum::UnexpectedError,
|
||||
e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -302,8 +292,7 @@ GetInt16FromPayload(CPayloadReader payloadReader,
|
|||
reinterpret_cast<int16_t*>(const_cast<void*>(field_data->Data()));
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::ErrorCodeEnum::UnexpectedError,
|
||||
e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -319,8 +308,7 @@ GetInt32FromPayload(CPayloadReader payloadReader,
|
|||
reinterpret_cast<int32_t*>(const_cast<void*>(field_data->Data()));
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::ErrorCodeEnum::UnexpectedError,
|
||||
e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -336,8 +324,7 @@ GetInt64FromPayload(CPayloadReader payloadReader,
|
|||
reinterpret_cast<int64_t*>(const_cast<void*>(field_data->Data()));
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::ErrorCodeEnum::UnexpectedError,
|
||||
e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -351,8 +338,7 @@ GetFloatFromPayload(CPayloadReader payloadReader, float** values, int* length) {
|
|||
reinterpret_cast<float*>(const_cast<void*>(field_data->Data()));
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::ErrorCodeEnum::UnexpectedError,
|
||||
e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -368,8 +354,7 @@ GetDoubleFromPayload(CPayloadReader payloadReader,
|
|||
reinterpret_cast<double*>(const_cast<void*>(field_data->Data()));
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::ErrorCodeEnum::UnexpectedError,
|
||||
e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -386,8 +371,7 @@ GetOneStringFromPayload(CPayloadReader payloadReader,
|
|||
*str_size = field_data->Size(idx);
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::ErrorCodeEnum::UnexpectedError,
|
||||
e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -404,8 +388,7 @@ GetBinaryVectorFromPayload(CPayloadReader payloadReader,
|
|||
*length = field_data->get_num_rows();
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::ErrorCodeEnum::UnexpectedError,
|
||||
e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -422,8 +405,7 @@ GetFloatVectorFromPayload(CPayloadReader payloadReader,
|
|||
*length = field_data->get_num_rows();
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::ErrorCodeEnum::UnexpectedError,
|
||||
e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -445,7 +427,6 @@ ReleasePayloadReader(CPayloadReader payloadReader) {
|
|||
milvus::storage::ReleaseArrowUnused();
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::ErrorCodeEnum::UnexpectedError,
|
||||
e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -32,8 +32,7 @@ GetLocalUsedSize(const char* c_dir, int64_t* size) {
|
|||
}
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::ErrorCodeEnum::UnexpectedError,
|
||||
e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -45,8 +44,7 @@ InitLocalChunkManagerSingleton(const char* c_path) {
|
|||
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::ErrorCodeEnum::UnexpectedError,
|
||||
e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -75,8 +73,7 @@ InitRemoteChunkManagerSingleton(CStorageConfig c_storage_config) {
|
|||
|
||||
return milvus::SuccessCStatus();
|
||||
} catch (std::exception& e) {
|
||||
return milvus::FailureCStatus(milvus::ErrorCodeEnum::UnexpectedError,
|
||||
e.what());
|
||||
return milvus::FailureCStatus(&e);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,105 +0,0 @@
|
|||
#-------------------------------------------------------------------------------
|
||||
# Copyright (C) 2019-2020 Zilliz. All rights reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
|
||||
# or implied. See the License for the specific language governing permissions and limitations under the License.
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
if ( DEFINED ENV{KNOWHERE_LIBUNWIND_URL} )
|
||||
set( LIBUNWIND_SOURCE_URL "$ENV{KNOWHERE_LIBUNWIND_URL}" )
|
||||
else ()
|
||||
set( LIBUNWIND_SOURCE_URL
|
||||
"https://github.com/libunwind/libunwind/releases/download/v${LIBUNWIND_VERSION}/libunwind-${LIBUNWIND_VERSION}.tar.gz" )
|
||||
endif ()
|
||||
|
||||
if ( DEFINED ENV{KNOWHERE_GPERFTOOLS_URL} )
|
||||
set( GPERFTOOLS_SOURCE_URL "$ENV{KNOWHERE_GPERFTOOLS_URL}" )
|
||||
else ()
|
||||
set( GPERFTOOLS_SOURCE_URL
|
||||
"https://github.com/gperftools/gperftools/releases/download/gperftools-${GPERFTOOLS_VERSION}/gperftools-${GPERFTOOLS_VERSION}.tar.gz" )
|
||||
endif ()
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# libunwind
|
||||
|
||||
macro( build_libunwind )
|
||||
message( STATUS "Building libunwind-${LIBUNWIND_VERSION} from source" )
|
||||
|
||||
set( LIBUNWIND_PREFIX ${CMAKE_CURRENT_BINARY_DIR}/libunwind)
|
||||
ExternalProject_Add(
|
||||
libunwind_ep
|
||||
DOWNLOAD_DIR ${THIRDPARTY_DOWNLOAD_PATH}
|
||||
INSTALL_DIR ${CMAKE_CURRENT_BINARY_DIR}/libunwind
|
||||
URL ${LIBUNWIND_SOURCE_URL}
|
||||
URL_MD5 f625b6a98ac1976116c71708a73dc44a
|
||||
CONFIGURE_COMMAND <SOURCE_DIR>/configure
|
||||
"--prefix=<INSTALL_DIR>"
|
||||
"--quiet"
|
||||
"--disable-tests"
|
||||
"cc=${EP_C_COMPILER}"
|
||||
"cxx=${EP_CXX_COMPILER}"
|
||||
BUILD_COMMAND ${MAKE} ${MAKE_BUILD_ARGS}
|
||||
INSTALL_COMMAND ${MAKE} install
|
||||
${EP_LOG_OPTIONS} )
|
||||
|
||||
ExternalProject_Get_Property( libunwind_ep INSTALL_DIR )
|
||||
file(MAKE_DIRECTORY ${INSTALL_DIR}/include)
|
||||
add_library( libunwind SHARED IMPORTED )
|
||||
set_target_properties(
|
||||
libunwind PROPERTIES
|
||||
IMPORTED_GLOBAL TRUE
|
||||
IMPORTED_LOCATION "${INSTALL_DIR}/lib/libunwind.so"
|
||||
INTERFACE_INCLUDE_DIRECTORIES "${INSTALL_DIR}/include" )
|
||||
|
||||
add_dependencies( libunwind libunwind_ep )
|
||||
endmacro()
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# gperftools
|
||||
|
||||
macro( build_gperftools )
|
||||
message( STATUS "Building gperftools-${GPERFTOOLS_VERSION} from source" )
|
||||
|
||||
ExternalProject_Add(
|
||||
gperftools_ep
|
||||
DEPENDS libunwind_ep
|
||||
DOWNLOAD_DIR ${CMAKE_BINARY_DIR}/3rdparty_download/download
|
||||
INSTALL_DIR ${CMAKE_CURRENT_BINARY_DIR}/gperftools
|
||||
URL ${GPERFTOOLS_SOURCE_URL}
|
||||
URL_MD5 cb21f2ebe71bbc8d5ad101b310be980a
|
||||
CONFIGURE_COMMAND <SOURCE_DIR>/configure
|
||||
"--prefix=<INSTALL_DIR>"
|
||||
"--quiet"
|
||||
"cc=${EP_C_COMPILER}"
|
||||
"cxx=${EP_CXX_COMPILER}"
|
||||
BUILD_COMMAND ${MAKE} ${MAKE_BUILD_ARGS}
|
||||
INSTALL_COMMAND ${MAKE} install
|
||||
${EP_LOG_OPTIONS} )
|
||||
|
||||
ExternalProject_Get_Property( gperftools_ep INSTALL_DIR )
|
||||
file(MAKE_DIRECTORY ${INSTALL_DIR}/include)
|
||||
# libprofiler.so
|
||||
add_library( gperftools SHARED IMPORTED )
|
||||
set_target_properties( gperftools
|
||||
PROPERTIES
|
||||
IMPORTED_GLOBAL TRUE
|
||||
IMPORTED_LOCATION "${INSTALL_DIR}/lib/libtcmalloc_and_profiler.so"
|
||||
INTERFACE_INCLUDE_DIRECTORIES "${INSTALL_DIR}/include"
|
||||
INTERFACE_LINK_LIBRARIES libunwind
|
||||
)
|
||||
add_dependencies( gperftools gperftools_ep )
|
||||
endmacro()
|
||||
|
||||
|
||||
build_libunwind()
|
||||
build_gperftools()
|
||||
get_target_property( GPERFTOOLS_LIB gperftools LOCATION )
|
||||
install(FILES ${GPERFTOOLS_LIB} DESTINATION ${CMAKE_INSTALL_PREFIX})
|
||||
|
|
@ -56,7 +56,7 @@ Distances(const float* base,
|
|||
}
|
||||
return res;
|
||||
} else {
|
||||
PanicInfo("invalid metric type");
|
||||
PanicCodeInfo(MetricTypeInvalid, "invalid metric type");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -85,7 +85,7 @@ Ref(const float* base,
|
|||
} else if (milvus::IsMetricType(metric, knowhere::metric::IP)) {
|
||||
std::reverse(res.begin(), res.end());
|
||||
} else {
|
||||
PanicInfo("invalid metric type");
|
||||
PanicCodeInfo(MetricTypeInvalid, "invalid metric type");
|
||||
}
|
||||
return GetOffsets(res, topk);
|
||||
}
|
||||
|
|
|
@ -3933,7 +3933,7 @@ TEST(CApiTest, RetriveScalarFieldFromSealedSegmentWithIndex) {
|
|||
break;
|
||||
}
|
||||
default: {
|
||||
PanicInfo("not supported type");
|
||||
PanicCodeInfo(DataTypeInvalid, "not supported type");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -163,7 +163,7 @@ TEST_F(DiskAnnFileManagerTest, TestThreadPool) {
|
|||
int
|
||||
test_exception(string s) {
|
||||
if (s == "test_id60") {
|
||||
throw std::runtime_error("run time error");
|
||||
throw SegcoreError(ErrorCode::UnexpectedError, "run time error");
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
|
|
|
@ -762,7 +762,7 @@ TEST(Expr, TestUnaryRangeJson) {
|
|||
break;
|
||||
}
|
||||
default: {
|
||||
PanicInfo("unsupported range node");
|
||||
PanicCodeInfo(Unsupported, "unsupported range node");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -4017,7 +4017,8 @@ TEST(Expr, TestJsonContainsArray) {
|
|||
proto::plan::GenericValue int_val22;
|
||||
int_val22.set_int64_val(int64_t(4));
|
||||
sub_arr2.add_array()->CopyFrom(int_val22);
|
||||
std::vector<Testcase<proto::plan::Array>> diff_testcases2{{{sub_arr1, sub_arr2}, {"array2"}}};
|
||||
std::vector<Testcase<proto::plan::Array>> diff_testcases2{
|
||||
{{sub_arr1, sub_arr2}, {"array2"}}};
|
||||
|
||||
for (auto& testcase : diff_testcases2) {
|
||||
auto check = [&](const std::vector<bool>& values, int i) {
|
||||
|
@ -4096,7 +4097,8 @@ TEST(Expr, TestJsonContainsArray) {
|
|||
proto::plan::GenericValue int_val42;
|
||||
int_val42.set_int64_val(int64_t(8));
|
||||
sub_arr4.add_array()->CopyFrom(int_val42);
|
||||
std::vector<Testcase<proto::plan::Array>> diff_testcases3{{{sub_arr3, sub_arr4}, {"array2"}}};
|
||||
std::vector<Testcase<proto::plan::Array>> diff_testcases3{
|
||||
{{sub_arr3, sub_arr4}, {"array2"}}};
|
||||
|
||||
for (auto& testcase : diff_testcases2) {
|
||||
auto check = [&](const std::vector<bool>& values, int i) {
|
||||
|
|
|
@ -23,7 +23,7 @@
|
|||
#include "storage/PayloadWriter.h"
|
||||
|
||||
namespace wrapper = milvus::storage;
|
||||
using ErrorCode = milvus::ErrorCodeEnum;
|
||||
using ErrorCode = milvus::ErrorCode;
|
||||
|
||||
static void
|
||||
WriteToFile(CBuffer cb) {
|
||||
|
|
|
@ -15,11 +15,11 @@
|
|||
#include <boost/uuid/uuid_io.hpp>
|
||||
#include <boost/uuid/uuid_generators.hpp>
|
||||
|
||||
#include "common/EasyAssert.h"
|
||||
#include "common/Utils.h"
|
||||
#include "query/Utils.h"
|
||||
#include "test_utils/DataGen.h"
|
||||
#include "common/Types.h"
|
||||
#include "index/Exception.h"
|
||||
|
||||
TEST(Util, StringMatch) {
|
||||
using namespace milvus;
|
||||
|
@ -162,5 +162,5 @@ TEST(Util, read_from_fd) {
|
|||
// On Linux, read() (and similar system calls) will transfer at most 0x7ffff000 (2,147,479,552) bytes once
|
||||
EXPECT_THROW(milvus::index::ReadDataFromFD(
|
||||
fd, read_buf.get(), data_size * max_loop, INT_MAX),
|
||||
milvus::index::UnistdException);
|
||||
milvus::SegcoreError);
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
#include <google/protobuf/text_format.h>
|
||||
|
||||
#include "Constants.h"
|
||||
#include "common/EasyAssert.h"
|
||||
#include "common/Schema.h"
|
||||
#include "index/ScalarIndexSort.h"
|
||||
#include "index/StringIndexSort.h"
|
||||
|
@ -114,7 +115,7 @@ struct GeneratedData {
|
|||
target_field_data.vectors().float16_vector().data());
|
||||
std::copy_n(src_data, len, ret.data());
|
||||
} else {
|
||||
PanicInfo("unsupported");
|
||||
PanicCodeInfo(Unsupported, "unsupported");
|
||||
}
|
||||
|
||||
return std::move(ret);
|
||||
|
@ -171,7 +172,7 @@ struct GeneratedData {
|
|||
break;
|
||||
}
|
||||
default: {
|
||||
PanicInfo("unsupported");
|
||||
PanicCodeInfo(Unsupported, "unsupported");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -180,13 +181,13 @@ struct GeneratedData {
|
|||
|
||||
std::unique_ptr<DataArray>
|
||||
get_col(FieldId field_id) const {
|
||||
for (auto target_field_data : raw_->fields_data()) {
|
||||
for (const auto& target_field_data : raw_->fields_data()) {
|
||||
if (field_id.get() == target_field_data.field_id()) {
|
||||
return std::make_unique<DataArray>(target_field_data);
|
||||
}
|
||||
}
|
||||
|
||||
PanicInfo("field id not find");
|
||||
PanicCodeInfo(FieldIDInvalid, "field id not find");
|
||||
}
|
||||
|
||||
private:
|
||||
|
@ -357,7 +358,7 @@ DataGen(SchemaPtr schema,
|
|||
break;
|
||||
}
|
||||
default: {
|
||||
throw std::runtime_error("unimplemented");
|
||||
throw SegcoreError(ErrorCode::NotImplemented, "unimplemented");
|
||||
}
|
||||
}
|
||||
++offset;
|
||||
|
@ -447,7 +448,7 @@ DataGenForJsonArray(SchemaPtr schema,
|
|||
break;
|
||||
}
|
||||
default: {
|
||||
throw std::runtime_error("unimplemented");
|
||||
throw SegcoreError(ErrorCode::NotImplemented, "unimplemented");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -650,7 +651,7 @@ CreateFieldDataFromDataArray(ssize_t raw_count,
|
|||
break;
|
||||
}
|
||||
default: {
|
||||
PanicInfo("unsupported");
|
||||
PanicCodeInfo(Unsupported, "unsupported");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
@ -712,7 +713,7 @@ CreateFieldDataFromDataArray(ssize_t raw_count,
|
|||
break;
|
||||
}
|
||||
default: {
|
||||
PanicInfo("unsupported");
|
||||
PanicCodeInfo(Unsupported, "unsupported");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -234,6 +234,7 @@ func (BinaryExpr_BinaryOp) EnumDescriptor() ([]byte, []int) {
|
|||
|
||||
type GenericValue struct {
|
||||
// Types that are valid to be assigned to Val:
|
||||
//
|
||||
// *GenericValue_BoolVal
|
||||
// *GenericValue_Int64Val
|
||||
// *GenericValue_FloatVal
|
||||
|
@ -1278,6 +1279,7 @@ var xxx_messageInfo_AlwaysTrueExpr proto.InternalMessageInfo
|
|||
|
||||
type Expr struct {
|
||||
// Types that are valid to be assigned to Expr:
|
||||
//
|
||||
// *Expr_TermExpr
|
||||
// *Expr_UnaryExpr
|
||||
// *Expr_BinaryExpr
|
||||
|
@ -1649,6 +1651,7 @@ func (m *QueryPlanNode) GetLimit() int64 {
|
|||
|
||||
type PlanNode struct {
|
||||
// Types that are valid to be assigned to Node:
|
||||
//
|
||||
// *PlanNode_VectorAnns
|
||||
// *PlanNode_Predicates
|
||||
// *PlanNode_Query
|
||||
|
|
|
@ -29,8 +29,9 @@ const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
|
|||
|
||||
type InvalidateCollMetaCacheRequest struct {
|
||||
// MsgType:
|
||||
// DropCollection -> {meta cache, dml channels}
|
||||
// Other -> {meta cache}
|
||||
//
|
||||
// DropCollection -> {meta cache, dml channels}
|
||||
// Other -> {meta cache}
|
||||
Base *commonpb.MsgBase `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"`
|
||||
DbName string `protobuf:"bytes,2,opt,name=db_name,json=dbName,proto3" json:"db_name,omitempty"`
|
||||
CollectionName string `protobuf:"bytes,3,opt,name=collection_name,json=collectionName,proto3" json:"collection_name,omitempty"`
|
||||
|
|
|
@ -251,7 +251,7 @@ func (SyncType) EnumDescriptor() ([]byte, []int) {
|
|||
return fileDescriptor_aab7cc9a69ed26e8, []int{6}
|
||||
}
|
||||
|
||||
//--------------------QueryCoord grpc request and response proto------------------
|
||||
// --------------------QueryCoord grpc request and response proto------------------
|
||||
type ShowCollectionsRequest struct {
|
||||
Base *commonpb.MsgBase `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"`
|
||||
// Not useful for now
|
||||
|
@ -1336,7 +1336,7 @@ func (m *SyncNewCreatedPartitionRequest) GetPartitionID() int64 {
|
|||
return 0
|
||||
}
|
||||
|
||||
//-----------------query node grpc request and response proto----------------
|
||||
// -----------------query node grpc request and response proto----------------
|
||||
type LoadMetaInfo struct {
|
||||
LoadType LoadType `protobuf:"varint,1,opt,name=load_type,json=loadType,proto3,enum=milvus.proto.query.LoadType" json:"load_type,omitempty"`
|
||||
CollectionID int64 `protobuf:"varint,2,opt,name=collectionID,proto3" json:"collectionID,omitempty"`
|
||||
|
@ -2488,7 +2488,7 @@ func (m *GetLoadInfoResponse) GetPartitions() []int64 {
|
|||
return nil
|
||||
}
|
||||
|
||||
//----------------request auto triggered by QueryCoord-----------------
|
||||
// ----------------request auto triggered by QueryCoord-----------------
|
||||
type HandoffSegmentsRequest struct {
|
||||
Base *commonpb.MsgBase `protobuf:"bytes,1,opt,name=base,proto3" json:"base,omitempty"`
|
||||
SegmentInfos []*SegmentInfo `protobuf:"bytes,2,rep,name=segmentInfos,proto3" json:"segmentInfos,omitempty"`
|
||||
|
@ -3185,7 +3185,7 @@ func (m *UnsubscribeChannelInfo) GetCollectionChannels() []*UnsubscribeChannels
|
|||
return nil
|
||||
}
|
||||
|
||||
//---- synchronize messages proto between QueryCoord and QueryNode -----
|
||||
// ---- synchronize messages proto between QueryCoord and QueryNode -----
|
||||
type SegmentChangeInfo struct {
|
||||
OnlineNodeID int64 `protobuf:"varint,1,opt,name=online_nodeID,json=onlineNodeID,proto3" json:"online_nodeID,omitempty"`
|
||||
OnlineSegments []*SegmentInfo `protobuf:"bytes,2,rep,name=online_segments,json=onlineSegments,proto3" json:"online_segments,omitempty"`
|
||||
|
|
|
@ -793,28 +793,28 @@ type RootCoordClient interface {
|
|||
GetComponentStates(ctx context.Context, in *milvuspb.GetComponentStatesRequest, opts ...grpc.CallOption) (*milvuspb.ComponentStates, error)
|
||||
GetTimeTickChannel(ctx context.Context, in *internalpb.GetTimeTickChannelRequest, opts ...grpc.CallOption) (*milvuspb.StringResponse, error)
|
||||
GetStatisticsChannel(ctx context.Context, in *internalpb.GetStatisticsChannelRequest, opts ...grpc.CallOption) (*milvuspb.StringResponse, error)
|
||||
//*
|
||||
// *
|
||||
// @brief This method is used to create collection
|
||||
//
|
||||
// @param CreateCollectionRequest, use to provide collection information to be created.
|
||||
//
|
||||
// @return Status
|
||||
CreateCollection(ctx context.Context, in *milvuspb.CreateCollectionRequest, opts ...grpc.CallOption) (*commonpb.Status, error)
|
||||
//*
|
||||
// *
|
||||
// @brief This method is used to delete collection.
|
||||
//
|
||||
// @param DropCollectionRequest, collection name is going to be deleted.
|
||||
//
|
||||
// @return Status
|
||||
DropCollection(ctx context.Context, in *milvuspb.DropCollectionRequest, opts ...grpc.CallOption) (*commonpb.Status, error)
|
||||
//*
|
||||
// *
|
||||
// @brief This method is used to test collection existence.
|
||||
//
|
||||
// @param HasCollectionRequest, collection name is going to be tested.
|
||||
//
|
||||
// @return BoolResponse
|
||||
HasCollection(ctx context.Context, in *milvuspb.HasCollectionRequest, opts ...grpc.CallOption) (*milvuspb.BoolResponse, error)
|
||||
//*
|
||||
// *
|
||||
// @brief This method is used to get collection schema.
|
||||
//
|
||||
// @param DescribeCollectionRequest, target collection name.
|
||||
|
@ -825,28 +825,28 @@ type RootCoordClient interface {
|
|||
CreateAlias(ctx context.Context, in *milvuspb.CreateAliasRequest, opts ...grpc.CallOption) (*commonpb.Status, error)
|
||||
DropAlias(ctx context.Context, in *milvuspb.DropAliasRequest, opts ...grpc.CallOption) (*commonpb.Status, error)
|
||||
AlterAlias(ctx context.Context, in *milvuspb.AlterAliasRequest, opts ...grpc.CallOption) (*commonpb.Status, error)
|
||||
//*
|
||||
// *
|
||||
// @brief This method is used to list all collections.
|
||||
//
|
||||
// @return StringListResponse, collection name list
|
||||
ShowCollections(ctx context.Context, in *milvuspb.ShowCollectionsRequest, opts ...grpc.CallOption) (*milvuspb.ShowCollectionsResponse, error)
|
||||
AlterCollection(ctx context.Context, in *milvuspb.AlterCollectionRequest, opts ...grpc.CallOption) (*commonpb.Status, error)
|
||||
//*
|
||||
// *
|
||||
// @brief This method is used to create partition
|
||||
//
|
||||
// @return Status
|
||||
CreatePartition(ctx context.Context, in *milvuspb.CreatePartitionRequest, opts ...grpc.CallOption) (*commonpb.Status, error)
|
||||
//*
|
||||
// *
|
||||
// @brief This method is used to drop partition
|
||||
//
|
||||
// @return Status
|
||||
DropPartition(ctx context.Context, in *milvuspb.DropPartitionRequest, opts ...grpc.CallOption) (*commonpb.Status, error)
|
||||
//*
|
||||
// *
|
||||
// @brief This method is used to test partition existence.
|
||||
//
|
||||
// @return BoolResponse
|
||||
HasPartition(ctx context.Context, in *milvuspb.HasPartitionRequest, opts ...grpc.CallOption) (*milvuspb.BoolResponse, error)
|
||||
//*
|
||||
// *
|
||||
// @brief This method is used to show partition information
|
||||
//
|
||||
// @param ShowPartitionRequest, target collection name.
|
||||
|
@ -854,7 +854,7 @@ type RootCoordClient interface {
|
|||
// @return StringListResponse
|
||||
ShowPartitions(ctx context.Context, in *milvuspb.ShowPartitionsRequest, opts ...grpc.CallOption) (*milvuspb.ShowPartitionsResponse, error)
|
||||
ShowPartitionsInternal(ctx context.Context, in *milvuspb.ShowPartitionsRequest, opts ...grpc.CallOption) (*milvuspb.ShowPartitionsResponse, error)
|
||||
// rpc DescribeSegment(milvus.DescribeSegmentRequest) returns (milvus.DescribeSegmentResponse) {}
|
||||
// rpc DescribeSegment(milvus.DescribeSegmentRequest) returns (milvus.DescribeSegmentResponse) {}
|
||||
ShowSegments(ctx context.Context, in *milvuspb.ShowSegmentsRequest, opts ...grpc.CallOption) (*milvuspb.ShowSegmentsResponse, error)
|
||||
AllocTimestamp(ctx context.Context, in *AllocTimestampRequest, opts ...grpc.CallOption) (*AllocTimestampResponse, error)
|
||||
AllocID(ctx context.Context, in *AllocIDRequest, opts ...grpc.CallOption) (*AllocIDResponse, error)
|
||||
|
@ -1327,28 +1327,28 @@ type RootCoordServer interface {
|
|||
GetComponentStates(context.Context, *milvuspb.GetComponentStatesRequest) (*milvuspb.ComponentStates, error)
|
||||
GetTimeTickChannel(context.Context, *internalpb.GetTimeTickChannelRequest) (*milvuspb.StringResponse, error)
|
||||
GetStatisticsChannel(context.Context, *internalpb.GetStatisticsChannelRequest) (*milvuspb.StringResponse, error)
|
||||
//*
|
||||
// *
|
||||
// @brief This method is used to create collection
|
||||
//
|
||||
// @param CreateCollectionRequest, use to provide collection information to be created.
|
||||
//
|
||||
// @return Status
|
||||
CreateCollection(context.Context, *milvuspb.CreateCollectionRequest) (*commonpb.Status, error)
|
||||
//*
|
||||
// *
|
||||
// @brief This method is used to delete collection.
|
||||
//
|
||||
// @param DropCollectionRequest, collection name is going to be deleted.
|
||||
//
|
||||
// @return Status
|
||||
DropCollection(context.Context, *milvuspb.DropCollectionRequest) (*commonpb.Status, error)
|
||||
//*
|
||||
// *
|
||||
// @brief This method is used to test collection existence.
|
||||
//
|
||||
// @param HasCollectionRequest, collection name is going to be tested.
|
||||
//
|
||||
// @return BoolResponse
|
||||
HasCollection(context.Context, *milvuspb.HasCollectionRequest) (*milvuspb.BoolResponse, error)
|
||||
//*
|
||||
// *
|
||||
// @brief This method is used to get collection schema.
|
||||
//
|
||||
// @param DescribeCollectionRequest, target collection name.
|
||||
|
@ -1359,28 +1359,28 @@ type RootCoordServer interface {
|
|||
CreateAlias(context.Context, *milvuspb.CreateAliasRequest) (*commonpb.Status, error)
|
||||
DropAlias(context.Context, *milvuspb.DropAliasRequest) (*commonpb.Status, error)
|
||||
AlterAlias(context.Context, *milvuspb.AlterAliasRequest) (*commonpb.Status, error)
|
||||
//*
|
||||
// *
|
||||
// @brief This method is used to list all collections.
|
||||
//
|
||||
// @return StringListResponse, collection name list
|
||||
ShowCollections(context.Context, *milvuspb.ShowCollectionsRequest) (*milvuspb.ShowCollectionsResponse, error)
|
||||
AlterCollection(context.Context, *milvuspb.AlterCollectionRequest) (*commonpb.Status, error)
|
||||
//*
|
||||
// *
|
||||
// @brief This method is used to create partition
|
||||
//
|
||||
// @return Status
|
||||
CreatePartition(context.Context, *milvuspb.CreatePartitionRequest) (*commonpb.Status, error)
|
||||
//*
|
||||
// *
|
||||
// @brief This method is used to drop partition
|
||||
//
|
||||
// @return Status
|
||||
DropPartition(context.Context, *milvuspb.DropPartitionRequest) (*commonpb.Status, error)
|
||||
//*
|
||||
// *
|
||||
// @brief This method is used to test partition existence.
|
||||
//
|
||||
// @return BoolResponse
|
||||
HasPartition(context.Context, *milvuspb.HasPartitionRequest) (*milvuspb.BoolResponse, error)
|
||||
//*
|
||||
// *
|
||||
// @brief This method is used to show partition information
|
||||
//
|
||||
// @param ShowPartitionRequest, target collection name.
|
||||
|
@ -1388,7 +1388,7 @@ type RootCoordServer interface {
|
|||
// @return StringListResponse
|
||||
ShowPartitions(context.Context, *milvuspb.ShowPartitionsRequest) (*milvuspb.ShowPartitionsResponse, error)
|
||||
ShowPartitionsInternal(context.Context, *milvuspb.ShowPartitionsRequest) (*milvuspb.ShowPartitionsResponse, error)
|
||||
// rpc DescribeSegment(milvus.DescribeSegmentRequest) returns (milvus.DescribeSegmentResponse) {}
|
||||
// rpc DescribeSegment(milvus.DescribeSegmentRequest) returns (milvus.DescribeSegmentResponse) {}
|
||||
ShowSegments(context.Context, *milvuspb.ShowSegmentsRequest) (*milvuspb.ShowSegmentsResponse, error)
|
||||
AllocTimestamp(context.Context, *AllocTimestampRequest) (*AllocTimestampResponse, error)
|
||||
AllocID(context.Context, *AllocIDRequest) (*AllocIDResponse, error)
|
||||
|
|
Loading…
Reference in New Issue