Refact master and proxy and add etcdutil

Signed-off-by: zhenshan.cao <zhenshan.cao@zilliz.com>
pull/4973/head^2
zhenshan.cao 2020-10-15 21:31:50 +08:00 committed by yefu.chen
parent 3d7181617b
commit 64295db471
1424 changed files with 3817 additions and 191399 deletions

58
.github/workflows-bk/main.yaml vendored Normal file
View File

@ -0,0 +1,58 @@
name: Build and test
# TODO: do not trigger action for some document file update
on: [push, pull_request]
jobs:
ubuntu:
name: AMD64 ubuntu-18.04
runs-on: ubuntu-18.04
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Install Dependency
run: |
./ci/scripts/install_deps.sh
go get github.com/golang/protobuf/protoc-gen-go@v1.3.2
- name: Cache Proxy Thirdparty
id: cache-proxy
uses: actions/cache@v2
with:
path: |
./proxy/cmake_build
key: ${{ runner.os }}-proxy-thirdparty
- name: Cache Core Thirdparty
id: cache-core
uses: actions/cache@v2
with:
path: |
./core/cmake_build
key: ${{ runner.os }}-core-thirdparty
- name: Cache SDK Thirdparty
id: cache-sdk
uses: actions/cache@v2
with:
path: |
./sdk/cmake_build
key: ${{ runner.os }}-sdk-thirdparty
- name: Build Cpp
run: |
./ci/scripts/proxy_build.sh -u
./ci/scripts/core_build.sh -u
./ci/scripts/sdk_build.sh -u
- name: Generat Proto GO File
run: |
echo `pwd`
export PATH=$PATH:$(go env GOPATH)/bin
export protoc=./proxy/cmake_build/thirdparty/grpc/grpc-build/third_party/protobuf/protoc-3.9.0.0
./ci/scripts/proto_gen_go.sh
- name: Build GO
run: |
go build -o ./cmd/writer ./writer/main.go
go build -o ./cmd/reader ./reader/main.go
go build -o ./cmd/master ./cmd/master.go
- name: Docker Pull And Run
run: |
docker-compose up -d
- name: Run Unittest
run: |
./ci/scripts/run_unittest.sh

4
.gitignore vendored
View File

@ -26,6 +26,8 @@ cmake_build
proxy/milvus proxy/milvus
proxy/cmake_build proxy/cmake_build
proxy/cmake-build-debug proxy/cmake-build-debug
proxy/cmake-build-release
proxy/cmake_build_release
proxy/thirdparty/grpc-src proxy/thirdparty/grpc-src
proxy/thirdparty/grpc-build proxy/thirdparty/grpc-build
proxy/milvus/* proxy/milvus/*
@ -38,7 +40,7 @@ proxy-go/proxy-go
sdk/cmake_build sdk/cmake_build
sdk/cmake-build-debug sdk/cmake-build-debug
sdk/cmake-build-release sdk/cmake-build-release
sdk/cmake_build_release
# Compiled source # Compiled source
*.a *.a

View File

@ -1,3 +1,3 @@
master-proto-gen: master-proto-gen:
protoc --go_out=plugins=grpc,paths=source_relative:. pkg/master/grpc/master/master.proto ${protoc} --go_out=plugins=grpc,paths=source_relative:. internal/proto/master/master.proto
protoc --go_out=plugins=grpc,paths=source_relative:. pkg/master/grpc/message/message.proto ${protoc} --go_out=plugins=grpc,paths=source_relative:. internal/proto/message/message.proto

196
ci/jenkins/Jenkinsfile vendored Normal file
View File

@ -0,0 +1,196 @@
#!/usr/bin/env groovy
@Library('mpl') _
String cron_timezone = "TZ=Asia/Shanghai"
String cron_string = BRANCH_NAME == "master" ? "50 3 * * * " : ""
pipeline {
agent none
triggers {
cron """${cron_timezone}
${cron_string}"""
}
options {
timestamps()
}
parameters{
choice choices: ['Release', 'Debug'], description: 'Build Type', name: 'BUILD_TYPE'
choice choices: ['False', 'True'], description: 'Is Manual Trigger Or Not', name: 'IS_MANUAL_TRIGGER_TYPE'
string defaultValue: 'registry.zilliz.com', description: 'DOCKER REGISTRY URL', name: 'DOKCER_REGISTRY_URL', trim: true
string defaultValue: 'ba070c98-c8cc-4f7c-b657-897715f359fc', description: 'DOCKER CREDENTIALS ID', name: 'DOCKER_CREDENTIALS_ID', trim: true
string defaultValue: 'http://192.168.1.201/artifactory/milvus', description: 'JFROG ARTFACTORY URL', name: 'JFROG_ARTFACTORY_URL', trim: true
string defaultValue: '1a527823-d2b7-44fd-834b-9844350baf14', description: 'JFROG CREDENTIALS ID', name: 'JFROG_CREDENTIALS_ID', trim: true
}
environment {
PROJECT_NAME = "milvus"
MILVUS_ROOT_PATH="/var/lib"
MILVUS_INSTALL_PREFIX="${env.MILVUS_ROOT_PATH}/${env.PROJECT_NAME}"
LOWER_BUILD_TYPE = params.BUILD_TYPE.toLowerCase()
SEMVER = "${BRANCH_NAME.contains('/') ? BRANCH_NAME.substring(BRANCH_NAME.lastIndexOf('/') + 1) : BRANCH_NAME}"
PIPELINE_NAME = "milvus-ci"
HELM_BRANCH = "0.11.0"
}
stages {
stage ('Milvus Build and Unittest') {
matrix {
axes {
axis {
name 'OS_NAME'
values 'centos7'
}
axis {
name 'CPU_ARCH'
values 'amd64'
}
axis {
name 'BINARY_VERSION'
values 'cpu'
}
}
environment {
PACKAGE_VERSION = VersionNumber([
versionNumberString : '${SEMVER}-${BINARY_VERSION}-${OS_NAME}-${CPU_ARCH}-${LOWER_BUILD_TYPE}'
]);
}
agent {
kubernetes {
label "${OS_NAME}-${BINARY_VERSION}-build-${SEMVER}-${env.PIPELINE_NAME}-${env.BUILD_NUMBER}"
defaultContainer 'jnlp'
customWorkspace '/home/jenkins/agent/workspace'
yamlFile "ci/jenkins/pod/milvus-${BINARY_VERSION}-version-${OS_NAME}-build-env-pod.yaml"
}
}
stages {
stage('Build and Unittest') {
steps {
container("milvus-${BINARY_VERSION}-build-env") {
MPLModule('Milvus Build')
MPLModule('Unittest')
MPLModule('Package Build')
}
}
}
}
}
}
stage ('Publish Docker Images') {
matrix {
axes {
axis {
name 'OS_NAME'
values 'centos7'
}
axis {
name 'CPU_ARCH'
values 'amd64'
}
axis {
name 'BINARY_VERSION'
values 'cpu'
}
}
environment {
PACKAGE_VERSION = VersionNumber([
versionNumberString : '${SEMVER}-${BINARY_VERSION}-${OS_NAME}-${CPU_ARCH}-${LOWER_BUILD_TYPE}'
]);
SOURCE_REPO = "${params.DOKCER_REGISTRY_URL}/milvus/engine"
TARGET_REPO = "${params.DOKCER_REGISTRY_URL}/milvus/engine"
SOURCE_TAG = "${CHANGE_TARGET ? CHANGE_TARGET : SEMVER}-${BINARY_VERSION}-${OS_NAME}-${LOWER_BUILD_TYPE}"
TARGET_TAG = "${SEMVER}-${BINARY_VERSION}-${OS_NAME}-${LOWER_BUILD_TYPE}"
DOCKER_BUILDKIT = 1
}
agent {
kubernetes {
label "${OS_NAME}-${BINARY_VERSION}-publish-${SEMVER}-${env.PIPELINE_NAME}-${env.BUILD_NUMBER}"
defaultContainer 'jnlp'
yamlFile 'ci/jenkins/pod/docker-pod.yaml'
}
}
stages {
stage('Publish') {
steps {
container('publish-images') {
MPLModule('Publish')
}
}
}
}
}
}
stage ('Dev Test') {
matrix {
axes {
axis {
name 'OS_NAME'
values 'centos7'
}
axis {
name 'CPU_ARCH'
values 'amd64'
}
axis {
name 'BINARY_VERSION'
values 'cpu'
}
}
environment {
DOCKER_VERSION = "${SEMVER}-${BINARY_VERSION}-${OS_NAME}-${LOWER_BUILD_TYPE}"
FROMAT_SEMVER = "${env.SEMVER}".replaceAll("\\.", "-").replaceAll("_", "-")
FORMAT_OS_NAME = "${OS_NAME}".replaceAll("\\.", "-").replaceAll("_", "-")
HELM_RELEASE_NAME = "${env.PIPELINE_NAME}-${env.FROMAT_SEMVER}-${env.BUILD_NUMBER}-single-${FORMAT_OS_NAME}-${BINARY_VERSION}".toLowerCase()
SHARDS_HELM_RELEASE_NAME = "${env.PIPELINE_NAME}-${env.FROMAT_SEMVER}-${env.BUILD_NUMBER}-shards-${FORMAT_OS_NAME}-${BINARY_VERSION}".toLowerCase()
DEV_TEST_ARTIFACTS = "_artifacts/${FORMAT_OS_NAME}/${BINARY_VERSION}"
}
agent {
kubernetes {
label "${OS_NAME}-${BINARY_VERSION}-dev-test-${SEMVER}-${env.PIPELINE_NAME}-${env.BUILD_NUMBER}"
defaultContainer 'jnlp'
yamlFile 'ci/jenkins/pod/testEnvironment.yaml'
}
}
stages {
stage('Test') {
steps {
container('milvus-test-env') {
MPLModule('Single Node DevTest')
MPLModule('Mishards DevTest')
}
}
}
}
post {
cleanup {
container('milvus-test-env') {
archiveArtifacts artifacts: "${env.DEV_TEST_ARTIFACTS}/**", allowEmptyArchive: true
MPLModule('Cleanup Single Node DevTest')
MPLModule('Cleanup Mishards DevTest')
}
}
}
}
}
}
post {
unsuccessful {
script {
if (isTimeTriggeredBuild()) {
// Send an email only if the build status has changed from green/unstable to red
emailext subject: '$DEFAULT_SUBJECT',
body: '$DEFAULT_CONTENT',
recipientProviders: [
[$class: 'DevelopersRecipientProvider'],
[$class: 'RequesterRecipientProvider']
],
replyTo: '$DEFAULT_REPLYTO',
to: 'dev.milvus@zilliz.com'
}
}
}
}
}
boolean isTimeTriggeredBuild() {
return (currentBuild.getBuildCauses('hudson.triggers.TimerTrigger$TimerTriggerCause').size() != 0) ? true : false;
}

View File

@ -0,0 +1,35 @@
apiVersion: v1
kind: Pod
metadata:
labels:
app: publish
componet: docker
spec:
containers:
- name: publish-images
image: registry.zilliz.com/library/docker:v1.1.0
imagePullPolicy: Always
securityContext:
privileged: true
command:
- cat
tty: true
resources:
limits:
memory: "8Gi"
cpu: "2"
requests:
memory: "2Gi"
cpu: "1"
volumeMounts:
- name: docker-sock
mountPath: /var/run/docker.sock
volumes:
- name: docker-sock
hostPath:
path: /var/run/docker.sock
tolerations:
- key: dedicated
operator: Equal
value: milvus
effect: NoSchedule

View File

@ -0,0 +1,35 @@
apiVersion: v1
kind: Pod
metadata:
name: milvus-cpu-build-env
labels:
app: milvus
componet: cpu-build-env
spec:
containers:
- name: milvus-cpu-build-env
image: registry.zilliz.com/milvus/milvus-cpu-build-env:v0.10.2-ubuntu18.04
env:
- name: POD_IP
valueFrom:
fieldRef:
fieldPath: status.podIP
- name: OS_NAME
value: "ubuntu18.04"
- name: BUILD_ENV_IMAGE_ID
value: "4e30ebe398d1a10150c625e52f44d5426c71a557afbb3f16ee9cea20e52e1b9d"
command:
- cat
tty: true
resources:
limits:
memory: "14Gi"
cpu: "6.0"
requests:
memory: "8Gi"
cpu: "4.0"
tolerations:
- key: dedicated
operator: Equal
value: milvus
effect: NoSchedule

View File

@ -0,0 +1,33 @@
apiVersion: v1
kind: Pod
metadata:
labels:
app: milvus
componet: test-env
spec:
containers:
- name: milvus-test-env
image: registry.zilliz.com/milvus/milvus-test-env:v0.2
command:
- cat
tty: true
resources:
limits:
memory: "8Gi"
cpu: "4.0"
requests:
memory: "4Gi"
cpu: "2.0"
volumeMounts:
- name: kubeconf
mountPath: /root/.kube/
readOnly: true
volumes:
- name: kubeconf
secret:
secretName: test-cluster-config
tolerations:
- key: dedicated
operator: Equal
value: milvus
effect: NoSchedule

15
ci/scripts/before-install.sh Executable file
View File

@ -0,0 +1,15 @@
#!/bin/bash
set -ex
export CCACHE_COMPRESS=1
export CCACHE_COMPRESSLEVEL=5
export CCACHE_COMPILERCHECK=content
export PATH=/usr/lib/ccache/:$PATH
export CCACHE_BASEDIR=${WORKSPACE:=""}
export CCACHE_DIR=${CCACHE_DIR:="${HOME}/.ccache"}
export CCACHE_COMPRESS_PACKAGE_FILE=${CCACHE_COMPRESS_PACKAGE_FILE:="ccache-${OS_NAME}-${BUILD_ENV_IMAGE_ID}.tar.gz"}
export CUSTOM_THIRDPARTY_DOWNLOAD_PATH=${CUSTOM_THIRDPARTY_DOWNLOAD_PATH:="${HOME}/3rdparty_download"}
export THIRDPARTY_COMPRESS_PACKAGE_FILE=${THIRDPARTY_COMPRESS_PACKAGE_FILE:="thirdparty-download.tar.gz"}
set +ex

114
ci/scripts/check_cache.sh Executable file
View File

@ -0,0 +1,114 @@
#!/bin/bash
HELP="
Usage:
$0 [flags] [Arguments]
-l [ARTIFACTORY_URL] Artifactory URL
--cache_dir=[CACHE_DIR] Cache directory
-f [FILE] or --file=[FILE] Cache compress package file
-h or --help Print help information
Use \"$0 --help\" for more information about a given command.
"
ARGS=$(getopt -o "l:f:h" -l "cache_dir::,file::,help" -n "$0" -- "$@")
eval set -- "${ARGS}"
while true ; do
case "$1" in
-l)
# o has an optional argument. As we are in quoted mode,
# an empty parameter will be generated if its optional
# argument is not found.
case "$2" in
"") echo "Option Artifactory URL, no argument"; exit 1 ;;
*) ARTIFACTORY_URL=$2 ; shift 2 ;;
esac ;;
--cache_dir)
case "$2" in
"") echo "Option cache_dir, no argument"; exit 1 ;;
*) CACHE_DIR=$2 ; shift 2 ;;
esac ;;
-f|--file)
case "$2" in
"") echo "Option file, no argument"; exit 1 ;;
*) PACKAGE_FILE=$2 ; shift 2 ;;
esac ;;
-h|--help) echo -e "${HELP}" ; exit 0 ;;
--) shift ; break ;;
*) echo "Internal error!" ; exit 1 ;;
esac
done
# Set defaults for vars modified by flags to this script
BRANCH_NAMES=$(git log --decorate | head -n 1 | sed 's/.*(\(.*\))/\1/' | sed 's=[a-zA-Z]*\/==g' | awk -F", " '{$1=""; print $0}')
if [[ -z "${ARTIFACTORY_URL}" || "${ARTIFACTORY_URL}" == "" ]];then
echo "You have not input ARTIFACTORY_URL !"
exit 1
fi
if [[ -z "${CACHE_DIR}" ]]; then
echo "You have not input CACHE_DIR !"
exit 1
fi
if [[ -z "${PACKAGE_FILE}" ]]; then
echo "You have not input PACKAGE_FILE !"
exit 1
fi
function check_cache() {
BRANCH=$1
echo "fetching ${BRANCH}/${PACKAGE_FILE}"
wget -q --spider "${ARTIFACTORY_URL}/${BRANCH}/${PACKAGE_FILE}"
return $?
}
function download_file() {
BRANCH=$1
wget -q "${ARTIFACTORY_URL}/${BRANCH}/${PACKAGE_FILE}" && \
mkdir -p "${CACHE_DIR}" && \
tar zxf "${PACKAGE_FILE}" -C "${CACHE_DIR}" && \
rm ${PACKAGE_FILE}
return $?
}
if [[ -n "${CHANGE_TARGET}" && "${BRANCH_NAME}" =~ "PR-" ]];then
check_cache ${CHANGE_TARGET}
if [[ $? == 0 ]];then
download_file ${CHANGE_TARGET}
if [[ $? == 0 ]];then
echo "found cache"
exit 0
fi
fi
check_cache ${BRANCH_NAME}
if [[ $? == 0 ]];then
download_file ${BRANCH_NAME}
if [[ $? == 0 ]];then
echo "found cache"
exit 0
fi
fi
fi
for CURRENT_BRANCH in ${BRANCH_NAMES}
do
if [[ "${CURRENT_BRANCH}" != "HEAD" ]];then
check_cache ${CURRENT_BRANCH}
if [[ $? == 0 ]];then
download_file ${CURRENT_BRANCH}
if [[ $? == 0 ]];then
echo "found cache"
exit 0
fi
fi
fi
done
echo "could not download cache" && exit 1

169
ci/scripts/core_build.sh Executable file
View File

@ -0,0 +1,169 @@
#!/usr/bin/env bash
# Compile jobs variable; Usage: $ jobs=12 ./core_build.sh ...
if [[ ! ${jobs+1} ]]; then
jobs=$(nproc)
fi
SOURCE="${BASH_SOURCE[0]}"
while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
SOURCE="$(readlink "$SOURCE")"
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
done
SCRIPTS_DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
CPP_SRC_DIR="${SCRIPTS_DIR}/../../core"
CPP_BUILD_DIR="${CPP_SRC_DIR}/cmake_build"
BUILD_OUTPUT_DIR=${CPP_BUILD_DIR}
BUILD_TYPE="Release"
BUILD_UNITTEST="OFF"
INSTALL_PREFIX="${CPP_SRC_DIR}/milvus"
MAKE_CLEAN="OFF"
BUILD_COVERAGE="OFF"
DB_PATH="/tmp/milvus"
PROFILING="OFF"
RUN_CPPLINT="OFF"
CUDA_COMPILER=/usr/local/cuda/bin/nvcc
GPU_VERSION="OFF" #defaults to CPU version
WITH_PROMETHEUS="ON"
CUDA_ARCH="DEFAULT"
CUSTOM_THIRDPARTY_PATH=""
while getopts "p:d:t:s:f:ulrcghzme" arg; do
case $arg in
f)
CUSTOM_THIRDPARTY_PATH=$OPTARG
;;
p)
INSTALL_PREFIX=$OPTARG
;;
d)
DB_PATH=$OPTARG
;;
t)
BUILD_TYPE=$OPTARG # BUILD_TYPE
;;
u)
echo "Build and run unittest cases"
BUILD_UNITTEST="ON"
;;
l)
RUN_CPPLINT="ON"
;;
r)
if [[ -d ${BUILD_OUTPUT_DIR} ]]; then
MAKE_CLEAN="ON"
fi
;;
c)
BUILD_COVERAGE="ON"
;;
z)
PROFILING="ON"
;;
g)
GPU_VERSION="ON"
;;
e)
WITH_PROMETHEUS="OFF"
;;
s)
CUDA_ARCH=$OPTARG
;;
h) # help
echo "
parameter:
-f: custom paths of thirdparty downloaded files(default: NULL)
-p: install prefix(default: $(pwd)/milvus)
-d: db data path(default: /tmp/milvus)
-t: build type(default: Debug)
-u: building unit test options(default: OFF)
-l: run cpplint, clang-format and clang-tidy(default: OFF)
-r: remove previous build directory(default: OFF)
-c: code coverage(default: OFF)
-z: profiling(default: OFF)
-g: build GPU version(default: OFF)
-e: build without prometheus(default: OFF)
-s: build with CUDA arch(default:DEFAULT), for example '-gencode=compute_61,code=sm_61;-gencode=compute_75,code=sm_75'
-h: help
usage:
./core_build.sh -p \${INSTALL_PREFIX} -t \${BUILD_TYPE} -s \${CUDA_ARCH} -f\${CUSTOM_THIRDPARTY_PATH} [-u] [-l] [-r] [-c] [-z] [-g] [-m] [-e] [-h]
"
exit 0
;;
?)
echo "ERROR! unknown argument"
exit 1
;;
esac
done
if [[ ! -d ${BUILD_OUTPUT_DIR} ]]; then
mkdir ${BUILD_OUTPUT_DIR}
fi
pushd ${BUILD_OUTPUT_DIR}
# remove make cache since build.sh -l use default variables
# force update the variables each time
make rebuild_cache >/dev/null 2>&1
if [[ ${MAKE_CLEAN} == "ON" ]]; then
echo "Runing make clean in ${BUILD_OUTPUT_DIR} ..."
make clean
exit 0
fi
CMAKE_CMD="cmake \
-DBUILD_UNIT_TEST=${BUILD_UNITTEST} \
-DCMAKE_INSTALL_PREFIX=${INSTALL_PREFIX}
-DCMAKE_BUILD_TYPE=${BUILD_TYPE} \
-DOpenBLAS_SOURCE=AUTO \
-DCMAKE_CUDA_COMPILER=${CUDA_COMPILER} \
-DBUILD_COVERAGE=${BUILD_COVERAGE} \
-DMILVUS_DB_PATH=${DB_PATH} \
-DENABLE_CPU_PROFILING=${PROFILING} \
-DMILVUS_GPU_VERSION=${GPU_VERSION} \
-DMILVUS_WITH_PROMETHEUS=${WITH_PROMETHEUS} \
-DMILVUS_CUDA_ARCH=${CUDA_ARCH} \
-DCUSTOM_THIRDPARTY_DOWNLOAD_PATH=${CUSTOM_THIRDPARTY_PATH} \
${CPP_SRC_DIR}"
echo ${CMAKE_CMD}
${CMAKE_CMD}
if [[ ${RUN_CPPLINT} == "ON" ]]; then
# cpplint check
make lint
if [ $? -ne 0 ]; then
echo "ERROR! cpplint check failed"
exit 1
fi
echo "cpplint check passed!"
# clang-format check
make check-clang-format
if [ $? -ne 0 ]; then
echo "ERROR! clang-format check failed"
exit 1
fi
echo "clang-format check passed!"
# clang-tidy check
make check-clang-tidy
if [ $? -ne 0 ]; then
echo "ERROR! clang-tidy check failed"
exit 1
fi
echo "clang-tidy check passed!"
else
# compile and build
make -j ${jobs} install || exit 1
fi
popd

6
ci/scripts/install_deps.sh Executable file
View File

@ -0,0 +1,6 @@
#!/usr/bin/env bash
sudo apt install -y g++ gcc make libssl-dev zlib1g-dev libboost-regex-dev \
libboost-program-options-dev libboost-system-dev libboost-filesystem-dev \
libboost-serialization-dev python3-dev libboost-python-dev libcurl4-openssl-dev gfortran libtbb-dev

4
ci/scripts/proto_gen_go.sh Executable file
View File

@ -0,0 +1,4 @@
#!/usr/bin/env bash
${protoc} --go_out=plugins=grpc,paths=source_relative:. pkg/master/grpc/master/master.proto
${protoc} --go_out=plugins=grpc,paths=source_relative:. pkg/master/grpc/message/message.proto

View File

@ -1,14 +1,26 @@
#!/bin/bash #!/usr/bin/env bash
# Compile jobs variable; Usage: $ jobs=12 ./build.sh ... # Compile jobs variable; Usage: $ jobs=12 ./proxy_build.sh ...
if [[ ! ${jobs+1} ]]; then if [[ ! ${jobs+1} ]]; then
jobs=$(nproc) jobs=$(nproc)
fi fi
BUILD_OUTPUT_DIR="cmake_build" SOURCE="${BASH_SOURCE[0]}"
while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
SOURCE="$(readlink "$SOURCE")"
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
done
SCRIPTS_DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
CPP_SRC_DIR="${SCRIPTS_DIR}/../../proxy"
CPP_BUILD_DIR="${CPP_SRC_DIR}/cmake_build"
BUILD_OUTPUT_DIR="${CPP_BUILD_DIR}"
BUILD_TYPE="Release" BUILD_TYPE="Release"
BUILD_UNITTEST="OFF" BUILD_UNITTEST="OFF"
INSTALL_PREFIX=$(pwd)/milvus INSTALL_PREFIX="${CPP_SRC_DIR}/milvus"
MAKE_CLEAN="OFF" MAKE_CLEAN="OFF"
DB_PATH="/tmp/milvus" DB_PATH="/tmp/milvus"
RUN_CPPLINT="OFF" RUN_CPPLINT="OFF"
@ -40,7 +52,7 @@ while getopts "p:d:t:s:ulrcghzme" arg; do
echo " echo "
parameter: parameter:
-p: install prefix(default: $(pwd)/milvus) -p: install prefix(default: ${CPP_SRC_DIR}/milvus)
-d: db data path(default: /tmp/milvus) -d: db data path(default: /tmp/milvus)
-t: build type(default: Debug) -t: build type(default: Debug)
-u: building unit test options(default: OFF) -u: building unit test options(default: OFF)
@ -49,7 +61,7 @@ parameter:
-h: help -h: help
usage: usage:
./build.sh -p \${INSTALL_PREFIX} -t \${BUILD_TYPE} [-u] [-l] [-r] [-h] ./proxy_build.sh -p \${INSTALL_PREFIX} -t \${BUILD_TYPE} [-u] [-l] [-r] [-h]
" "
exit 0 exit 0
;; ;;
@ -64,7 +76,7 @@ if [[ ! -d ${BUILD_OUTPUT_DIR} ]]; then
mkdir ${BUILD_OUTPUT_DIR} mkdir ${BUILD_OUTPUT_DIR}
fi fi
cd ${BUILD_OUTPUT_DIR} pushd ${CPP_BUILD_DIR}
# remove make cache since build.sh -l use default variables # remove make cache since build.sh -l use default variables
# force update the variables each time # force update the variables each time
@ -77,13 +89,14 @@ if [[ ${MAKE_CLEAN} == "ON" ]]; then
exit 0 exit 0
fi fi
CMAKE_CMD="cmake \ CMAKE_CMD="cmake \
-DBUILD_UNIT_TEST=${BUILD_UNITTEST} \ -DBUILD_UNIT_TEST=${BUILD_UNITTEST} \
-DCMAKE_INSTALL_PREFIX=${INSTALL_PREFIX} -DCMAKE_INSTALL_PREFIX=${INSTALL_PREFIX} \
-DCMAKE_BUILD_TYPE=${BUILD_TYPE} \ -DCMAKE_BUILD_TYPE=${BUILD_TYPE} \
-DOpenBLAS_SOURCE=AUTO \ -DOpenBLAS_SOURCE=AUTO \
-DMILVUS_DB_PATH=${DB_PATH} \ -DMILVUS_DB_PATH=${DB_PATH} \
../" ${CPP_SRC_DIR}"
echo ${CMAKE_CMD} echo ${CMAKE_CMD}
${CMAKE_CMD} ${CMAKE_CMD}
@ -116,3 +129,5 @@ else
# compile and build # compile and build
make -j ${jobs} install || exit 1 make -j ${jobs} install || exit 1
fi fi
popd

44
ci/scripts/run_unittest.sh Executable file
View File

@ -0,0 +1,44 @@
#!/usr/bin/env bash
set -e
SOURCE="${BASH_SOURCE[0]}"
while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink
DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
SOURCE="$(readlink "$SOURCE")"
[[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located
done
SCRIPTS_DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
MILVUS_CORE_DIR="${SCRIPTS_DIR}/../../core"
MILVUS_PROXY_DIR="${SCRIPTS_DIR}/../../proxy"
CORE_INSTALL_PREFIX="${MILVUS_CORE_DIR}/milvus"
PROXY_INSTALL_PREFIX="${MILVUS_PROXY_DIR}/milvus"
UNITTEST_DIRS=("${CORE_INSTALL_PREFIX}/unittest" "${PROXY_INSTALL_PREFIX}/unittest")
# Currently core will install target lib to "core/lib"
if [ -d "${MILVUS_CORE_DIR}/lib" ]; then
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${MILVUS_CORE_DIR}/lib
fi
# run unittest
for UNITTEST_DIR in "${UNITTEST_DIRS[@]}"; do
if [ ! -d "${UNITTEST_DIR}" ]; then
echo "The unittest folder does not exist!"
exit 1
fi
for test in `ls ${UNITTEST_DIR}`; do
echo $test " running..."
# run unittest
# ${UNITTEST_DIR}/${test}
if [ $? -ne 0 ]; then
echo ${UNITTEST_DIR}/${test} "run failed"
exit 1
fi
done
done
# ignore Minio,S3 unittes
MILVUS_DIR="${SCRIPTS_DIR}/../../"
echo $MILVUS_DIR
go test "${MILVUS_DIR}/storage/internal/tikv/..." "${MILVUS_DIR}/reader/..." "${MILVUS_DIR}/writer/..." "${MILVUS_DIR}/pkg/master/..." -failfast

View File

@ -1,9 +1,12 @@
#!/bin/bash #!/bin/bash
BUILD_OUTPUT_DIR="cmake_build" SOURCE="${BASH_SOURCE[0]}"
BUILD_TYPE="Debug" SCRIPTS_DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )"
BUILD_OUTPUT_DIR=${SCRIPTS_DIR}"/../../sdk/cmake_build"
BUILD_TYPE="Release"
MAKE_CLEAN="OFF" MAKE_CLEAN="OFF"
RUN_CPPLINT="OFF" RUN_CPPLINT="OFF"
BUILD_UNITTEST="OFF"
while getopts "p:d:t:f:ulrcgjhxzme" arg; do while getopts "p:d:t:f:ulrcgjhxzme" arg; do
case $arg in case $arg in

104
ci/scripts/update_cache.sh Executable file
View File

@ -0,0 +1,104 @@
#!/bin/bash
HELP="
Usage:
$0 [flags] [Arguments]
-l [ARTIFACTORY_URL] Artifactory URL
--cache_dir=[CACHE_DIR] Cache directory
-f [FILE] or --file=[FILE] Cache compress package file
-u [USERNAME] Artifactory Username
-p [PASSWORD] Artifactory Password
-h or --help Print help information
Use \"$0 --help\" for more information about a given command.
"
ARGS=$(getopt -o "l:f:u:p:h" -l "cache_dir::,file::,help" -n "$0" -- "$@")
eval set -- "${ARGS}"
while true ; do
case "$1" in
-l)
# o has an optional argument. As we are in quoted mode,
# an empty parameter will be generated if its optional
# argument is not found.
case "$2" in
"") echo "Option Artifactory URL, no argument"; exit 1 ;;
*) ARTIFACTORY_URL=$2 ; shift 2 ;;
esac ;;
--cache_dir)
case "$2" in
"") echo "Option cache_dir, no argument"; exit 1 ;;
*) CACHE_DIR=$2 ; shift 2 ;;
esac ;;
-u)
case "$2" in
"") echo "Option Username, no argument"; exit 1 ;;
*) USERNAME=$2 ; shift 2 ;;
esac ;;
-p)
case "$2" in
"") echo "Option Password, no argument"; exit 1 ;;
*) PASSWORD=$2 ; shift 2 ;;
esac ;;
-f|--file)
case "$2" in
"") echo "Option file, no argument"; exit 1 ;;
*) PACKAGE_FILE=$2 ; shift 2 ;;
esac ;;
-h|--help) echo -e "${HELP}" ; exit 0 ;;
--) shift ; break ;;
*) echo "Internal error!" ; exit 1 ;;
esac
done
# Set defaults for vars modified by flags to this script
BRANCH_NAME=$(git log --decorate | head -n 1 | sed 's/.*(\(.*\))/\1/' | sed 's/.*, //' | sed 's=[a-zA-Z]*\/==g')
if [[ -z "${ARTIFACTORY_URL}" || "${ARTIFACTORY_URL}" == "" ]];then
echo "You have not input ARTIFACTORY_URL !"
exit 1
fi
if [[ ! -d "${CACHE_DIR}" ]]; then
echo "\"${CACHE_DIR}\" directory does not exist !"
exit 1
fi
if [[ -z "${PACKAGE_FILE}" ]]; then
echo "You have not input PACKAGE_FILE !"
exit 1
fi
function check_cache() {
BRANCH=$1
wget -q --spider "${ARTIFACTORY_URL}/${BRANCH}/${PACKAGE_FILE}"
return $?
}
if [[ -n "${CHANGE_TARGET}" && "${BRANCH_NAME}" =~ "PR-" ]]; then
check_cache ${CHANGE_TARGET}
if [[ $? == 0 ]];then
echo "Skip Update cache package ..." && exit 0
fi
fi
if [[ "${BRANCH_NAME}" != "HEAD" ]];then
REMOTE_PACKAGE_PATH="${ARTIFACTORY_URL}/${BRANCH_NAME}"
echo "Updating cache package file: ${PACKAGE_FILE}"
tar zcf ./"${PACKAGE_FILE}" -C "${CACHE_DIR}" .
echo "Uploading cache package file ${PACKAGE_FILE} to ${REMOTE_PACKAGE_PATH}"
curl -u"${USERNAME}":"${PASSWORD}" -T "${PACKAGE_FILE}" "${REMOTE_PACKAGE_PATH}"/"${PACKAGE_FILE}"
if [[ $? == 0 ]];then
echo "Uploading cache package file success !"
exit 0
else
echo "Uploading cache package file fault !"
exit 1
fi
fi
echo "Skip Update cache package ..."

View File

@ -1,6 +1,11 @@
package main package main
import "github.com/czs007/suvlim/pkg/master" import (
"flag"
"fmt"
"github.com/czs007/suvlim/internal/conf"
"github.com/czs007/suvlim/internal/master"
)
// func main() { // func main() {
// ctx, cancel := context.WithCancel(context.Background()) // ctx, cancel := context.WithCancel(context.Background())
@ -20,6 +25,13 @@ func init() {
// go mock.FakePulsarProducer() // go mock.FakePulsarProducer()
} }
func main() { func main() {
var yamlFile string
flag.StringVar(&yamlFile, "yaml", "", "yaml file")
flag.Parse()
// flag.Usage()
fmt.Println("yaml file: ", yamlFile)
conf.LoadConfig(yamlFile)
master.Run() master.Run()
//master.SegmentStatsController() //master.SegmentStatsController()
//master.CollectionController() //master.CollectionController()

View File

@ -1,7 +1,7 @@
package main package main
import ( import (
proxy "github.com/czs007/suvlim/proxy-go/proxy_node" "github.com/czs007/suvlim/internal/proxy"
"log" "log"
) )

View File

28
cmd/reader/reader.go Normal file
View File

@ -0,0 +1,28 @@
package main
import (
"context"
"flag"
"fmt"
"github.com/czs007/suvlim/internal/conf"
"github.com/czs007/suvlim/internal/reader"
"strconv"
)
func main() {
ctx, _ := context.WithCancel(context.Background())
var yamlFile string
flag.StringVar(&yamlFile, "yaml", "", "yaml file")
flag.Parse()
// flag.Usage()
fmt.Println("yaml file: ", yamlFile)
conf.LoadConfig(yamlFile)
pulsarAddr := "pulsar://"
pulsarAddr += conf.Config.Pulsar.Address
pulsarAddr += ":"
pulsarAddr += strconv.FormatInt(int64(conf.Config.Pulsar.Port), 10)
reader.StartQueryNode(ctx, pulsarAddr)
}

View File

@ -5,10 +5,8 @@ import (
"crypto/md5" "crypto/md5"
"flag" "flag"
"fmt" "fmt"
s3 "github.com/czs007/suvlim/storage/internal/S3" "github.com/czs007/suvlim/internal/storage"
minio "github.com/czs007/suvlim/storage/internal/minio" "github.com/czs007/suvlim/internal/storage/type"
tikv "github.com/czs007/suvlim/storage/internal/tikv"
"github.com/czs007/suvlim/storage/pkg/types"
"github.com/pivotal-golang/bytefmt" "github.com/pivotal-golang/bytefmt"
"log" "log"
"math/rand" "math/rand"
@ -30,7 +28,7 @@ var totalKeys [][]byte
var logFileName = "benchmark.log" var logFileName = "benchmark.log"
var logFile *os.File var logFile *os.File
var store types.Store var store storagetype.Store
var wg sync.WaitGroup var wg sync.WaitGroup
@ -191,25 +189,9 @@ func main() {
if valueSize, err = bytefmt.ToBytes(sizeArg); err != nil { if valueSize, err = bytefmt.ToBytes(sizeArg); err != nil {
log.Fatalf("Invalid -z argument for object size: %v", err) log.Fatalf("Invalid -z argument for object size: %v", err)
} }
switch storeType { store, err = storage.NewStore(context.Background(), storeType)
case "minio": if err != nil {
store, err = minio.NewMinioDriver(context.Background()) log.Fatalf("Error when creating storage " + err.Error())
if err != nil {
log.Fatalf("Error when creating storage " + err.Error())
}
case "tikv":
store, err = tikv.NewTikvStore(context.Background())
if err != nil {
log.Fatalf("Error when creating storage " + err.Error())
}
case "s3":
store, err = s3.NewS3Driver(context.Background())
if err != nil {
log.Fatalf("Error when creating storage " + err.Error())
}
default:
log.Fatalf("Not supported storage type")
} }
logFile, err = os.OpenFile(logFileName, os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0777) logFile, err = os.OpenFile(logFileName, os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0777)
if err != nil { if err != nil {

View File

@ -1,208 +0,0 @@
// s3-benchmark.go
// Copyright (c) 2017 Wasabi Technology, Inc.
package main
import (
"github.com/czs007/suvlim/storage/pkg"
. "github.com/czs007/suvlim/storage/pkg/types"
"crypto/md5"
"flag"
"fmt"
"code.cloudfoundry.org/bytefmt"
"github.com/tikv/client-go/config"
"github.com/tikv/client-go/rawkv"
"log"
"math/rand"
"net/http"
"os"
"sync"
"sync/atomic"
"time"
"context"
)
// Global variables
var duration_secs, threads, batchOpSize int
var object_size uint64
var object_data []byte
var running_threads, upload_count, upload_slowdown_count int32
var endtime, upload_finish time.Time
var store Store
var err error
var keys [][]byte
var objects_data [][]byte
var segments []string
var timestamps []uint64
var wg sync.WaitGroup
var client *rawkv.Client
func logit(msg string) {
fmt.Println(msg)
logfile, _ := os.OpenFile("benchmark.log", os.O_WRONLY|os.O_CREATE|os.O_APPEND, 0666)
if logfile != nil {
logfile.WriteString(time.Now().Format(http.TimeFormat) + ": " + msg + "\n")
logfile.Close()
}
}
func _putFile(ctx context.Context, store Store){
atomic.AddInt32(&upload_count, 1)
key := "collection_abc"
err := store.PutRow(ctx, []byte(key), object_data, "abc", uint64(time.Now().Unix()))
if err != nil {
atomic.AddInt32(&upload_slowdown_count, 1)
}
}
func _putFiles(ctx context.Context, store Store){
atomic.AddInt32(&upload_count, 1)
err = client.BatchPut(ctx, keys, objects_data)
//err := store.PutRows(ctx, keys, objects_data, segments, timestamps)
if err != nil {
atomic.AddInt32(&upload_slowdown_count, 1)
}
//wg.Done()
}
func runPutFiles(thread_num int) {
//var store Store
//var err error
ctx := context.Background()
//store, err = storage.NewStore(ctx, TIKVDriver)
//if err != nil {
// panic(err.Error())
//}
for time.Now().Before(endtime) {
_putFiles(ctx, store)
}
// Remember last done time
upload_finish = time.Now()
// One less thread
atomic.AddInt32(&running_threads, -1)
wg.Done()
}
func runPutFile(thread_num int) {
var store Store
var err error
ctx := context.Background()
store, err = storage.NewStore(ctx, TIKVDriver)
if err != nil {
panic(err.Error())
}
for time.Now().Before(endtime) {
_putFile(ctx, store)
}
// Remember last done time
upload_finish = time.Now()
// One less thread
atomic.AddInt32(&running_threads, -1)
}
func main() {
// Hello
// Parse command line
myflag := flag.NewFlagSet("myflag", flag.ExitOnError)
myflag.IntVar(&duration_secs, "d", 10, "Duration of each test in seconds")
myflag.IntVar(&threads, "t", 50, "Number of threads to run")
myflag.IntVar(&batchOpSize, "b", 1000, "Batch operation kv pair number")
var sizeArg string
myflag.StringVar(&sizeArg, "z", "1M", "Size of objects in bytes with postfix K, M, and G")
if err := myflag.Parse(os.Args[1:]); err != nil {
os.Exit(1)
}
// Check the arguments
var err error
if object_size, err = bytefmt.ToBytes(sizeArg); err != nil {
log.Fatalf("Invalid -z argument for object size: %v", err)
}
logit(fmt.Sprintf("Parameters: duration=%d, threads=%d, size=%s",
duration_secs, threads, sizeArg))
pdAddrs := []string{"127.0.0.1:2379"}
conf := config.Default()
ctx := context.Background()
client, err = rawkv.NewClient(ctx, pdAddrs, conf)
// Initialize data for the bucket
object_data = make([]byte, object_size)
rand.Read(object_data)
hasher := md5.New()
hasher.Write(object_data)
// reset counters
upload_count = 0
upload_slowdown_count = 0
running_threads = int32(threads)
// Run the upload case
//starttime := time.Now()
//endtime = starttime.Add(time.Second * time.Duration(duration_secs))
//
//for n := 1; n <= threads; n++ {
// go runPutFile(n)
//}
//
//// Wait for it to finish
//for atomic.LoadInt32(&running_threads) > 0 {
// time.Sleep(time.Millisecond)
//}
//upload_time := upload_finish.Sub(starttime).Seconds()
//
//bps := float64(uint64(upload_count)*object_size) / upload_time
//logit(fmt.Sprintf("PUT time %.1f secs, objects = %d, speed = %sB/sec, %.1f operations/sec. Slowdowns = %d",
// upload_time, upload_count, bytefmt.ByteSize(uint64(bps)), float64(upload_count)/upload_time, upload_slowdown_count))
//
//fmt.Println(" upload_count :", upload_count)
// Run the batchput case
keys = make([][]byte, batchOpSize)
objects_data = make([][]byte, batchOpSize)
segments = make([]string, batchOpSize)
timestamps = make([]uint64, batchOpSize)
for n := batchOpSize; n > 0; n-- {
keys[n-1] = []byte("collection_abc")
objects_data[n-1] = object_data
segments[n-1] = "abc"
timestamps[n-1] = uint64(time.Now().Unix())
}
starttime := time.Now()
endtime = starttime.Add(time.Second * time.Duration(duration_secs))
for n := 1; n <= threads; n++ {
wg.Add(1)
go runPutFiles(n)
}
wg.Wait()
// Wait for it to finish
for atomic.LoadInt32(&running_threads) > 0 {
time.Sleep(time.Millisecond)
}
upload_time := upload_finish.Sub(starttime).Seconds()
bps := float64(uint64(upload_count)*object_size*uint64(batchOpSize)) / upload_time
logit(fmt.Sprintf("PUT time %.1f secs, objects = %d, speed = %sB/sec, %.1f operations/sec. Slowdowns = %d",
upload_time, upload_count*int32(batchOpSize), bytefmt.ByteSize(uint64(bps)), float64(upload_count)/upload_time, upload_slowdown_count))
fmt.Println(" upload_count :", upload_count)
}

View File

@ -1,69 +0,0 @@
package main
import (
"context"
"fmt"
"math"
"storage/pkg"
. "storage/pkg/types"
)
func main() {
// Create a tikv based storage
var store Store
var err error
ctx := context.Background()
store, err = storage.NewStore(ctx, TIKVDriver)
if err != nil {
panic(err.Error())
}
// Set some key-value pair with different timestamp
key := Key("key")
store.Set(ctx, key, Value("value_1"), 1)
store.Set(ctx, key, Value("value_2"), 2)
store.Set(ctx, key, Value("value_3"), 3)
store.Set(ctx, key, Value("value_4"), 4)
search := func(key Key, timestamp uint64) {
v, err := store.Get(ctx, key, timestamp)
if err != nil {
panic(err.Error())
}
fmt.Printf("Get result for key: %s, version:%d, value:%s \n", key, timestamp, v)
}
search(key, 0)
search(key, 3)
search(key, 10)
// Batch set key-value pairs with same timestamp
keys := []Key{Key("key"), Key("key1")}
values := []Value{Value("value_5"), Value("value1_5")}
store.BatchSet(ctx, keys, values, 5)
batchSearch := func(keys []Key, timestamp uint64) {
vs, err := store.BatchGet(ctx, keys, timestamp)
if err != nil {
panic(err.Error())
}
for i, v := range vs {
fmt.Printf("Get result for key: %s, version:%d, value:%s \n", keys[i], timestamp, v)
}
}
// Batch get keys
keys = []Key{Key("key"), Key("key1")}
batchSearch(keys, 5)
//Delete outdated key-value pairs for a key
store.Set(ctx, key, Value("value_6"), 6)
store.Set(ctx, key, Value("value_7"), 7)
err = store.Delete(ctx, key, 5)
search(key, 5)
// use BatchDelete all keys
keys = []Key{Key("key"), Key("key1")}
store.BatchDelete(ctx, keys , math.MaxUint64)
batchSearch(keys, math.MaxUint64)
}

View File

View File

@ -2,29 +2,34 @@ package main
import ( import (
"context" "context"
"flag"
"fmt" "fmt"
"github.com/czs007/suvlim/conf" "github.com/czs007/suvlim/internal/conf"
storage "github.com/czs007/suvlim/storage/pkg" "github.com/czs007/suvlim/internal/storage"
"github.com/czs007/suvlim/writer/message_client" "github.com/czs007/suvlim/internal/msgclient"
"github.com/czs007/suvlim/writer/write_node" "github.com/czs007/suvlim/internal/writer"
"log" "log"
"strconv" "strconv"
"sync"
"time"
) )
func main() { func main() {
var yamlFile string
flag.StringVar(&yamlFile, "yaml", "", "yaml file")
flag.Parse()
// flag.Usage()
fmt.Println("yaml file: ", yamlFile)
conf.LoadConfig(yamlFile)
pulsarAddr := "pulsar://" pulsarAddr := "pulsar://"
pulsarAddr += conf.Config.Pulsar.Address pulsarAddr += conf.Config.Pulsar.Address
pulsarAddr += ":" pulsarAddr += ":"
pulsarAddr += strconv.FormatInt(int64(conf.Config.Pulsar.Port), 10) pulsarAddr += strconv.FormatInt(int64(conf.Config.Pulsar.Port), 10)
mc := message_client.MessageClient{} mc := msgclient.WriterMessageClient{}
mc.InitClient(pulsarAddr) mc.InitClient(pulsarAddr)
//TODO::close client / consumer/ producer //TODO::close client / consumer/ producer
mc.ReceiveMessage() mc.ReceiveMessage()
wg := sync.WaitGroup{}
ctx := context.Background() ctx := context.Background()
kv, err := storage.NewStore(ctx, conf.Config.Storage.Driver) kv, err := storage.NewStore(ctx, conf.Config.Storage.Driver)
// TODO:: if err != nil, should retry link // TODO:: if err != nil, should retry link
@ -32,12 +37,12 @@ func main() {
log.Fatal(err) log.Fatal(err)
} }
msgCounter := write_node.MsgCounter{ msgCounter := writer.MsgCounter{
InsertCounter: 0, InsertCounter: 0,
DeleteCounter: 0, DeleteCounter: 0,
} }
wn := write_node.WriteNode{ wn := writer.WriteNode{
KvStore: &kv, KvStore: &kv,
MessageClient: &mc, MessageClient: &mc,
TimeSync: 100, TimeSync: 100,
@ -45,34 +50,25 @@ func main() {
} }
const Debug = true const Debug = true
const CountMsgNum = 1000 * 1000
if Debug { if Debug {
var printFlag = true const CountInsertMsgBaseline = 1000 * 1000
var startTime = true var BaselineCounter int64 = 0
var start time.Time
for { for {
if ctx.Err() != nil { if ctx.Err() != nil {
break break
} }
msgLength := wn.MessageClient.PrepareBatchMsg() msgLength := wn.MessageClient.PrepareBatchMsg()
if msgLength > 0 {
if startTime {
fmt.Println("============> Start Test <============")
startTime = false
start = time.Now()
}
wn.DoWriteNode(ctx, &wg) if wn.MsgCounter.InsertCounter/CountInsertMsgBaseline != BaselineCounter {
fmt.Println("write node do a batch message, storage len: ", msgLength) wn.WriteWriterLog()
BaselineCounter = wn.MsgCounter.InsertCounter/CountInsertMsgBaseline
} }
// Test insert time if msgLength > 0 {
if printFlag && wn.MsgCounter.InsertCounter >= CountMsgNum { wn.DoWriteNode(ctx)
printFlag = false fmt.Println("write node do a batch message, storage len: ", msgLength)
timeSince := time.Since(start)
fmt.Println("============> Do", wn.MsgCounter.InsertCounter, "Insert in", timeSince, "<============")
} }
} }
} }
@ -84,7 +80,7 @@ func main() {
} }
msgLength := wn.MessageClient.PrepareBatchMsg() msgLength := wn.MessageClient.PrepareBatchMsg()
if msgLength > 0 { if msgLength > 0 {
wn.DoWriteNode(ctx, &wg) wn.DoWriteNode(ctx)
fmt.Println("write node do a batch message, storage len: ", msgLength) fmt.Println("write node do a batch message, storage len: ", msgLength)
} }
} }

View File

@ -14,7 +14,7 @@ master:
port: 53100 port: 53100
pulsarmoniterinterval: 1 pulsarmoniterinterval: 1
pulsartopic: "monitor-topic" pulsartopic: "monitor-topic"
segmentthreshole: 104857600 segmentthreshole: 1073741824
proxyidlist: [0] proxyidlist: [0]
querynodenum: 1 querynodenum: 1
writenodenum: 1 writenodenum: 1
@ -36,6 +36,9 @@ storage:
secretkey: secretkey:
pulsar: pulsar:
authentication: false
user: user-default
token: eyJhbGciOiJIUzI1NiJ9.eyJzdWIiOiJKb2UifQ.ipevRNuRP6HflG8cFKnmUPtypruRC4fb1DWtoLL62SY
address: localhost address: localhost
port: 6650 port: 6650
topicnum: 128 topicnum: 128
@ -54,8 +57,10 @@ writer:
stopflag: -2 stopflag: -2
readerqueuesize: 10000 readerqueuesize: 10000
searchbyidchansize: 10000 searchbyidchansize: 10000
parallelism: 100
topicstart: 0 topicstart: 0
topicend: 128 topicend: 128
bucket: "zilliz-hz"
proxy: proxy:
timezone: UTC+8 timezone: UTC+8

View File

@ -1,19 +0,0 @@
#pragma once
#include <string_view>
namespace milvus::impl {
inline
void EasyAssertInfo(bool value, std::string_view expr_str, std::string_view filename, int lineno,
std::string_view extra_info) {
if (!value) {
std::string info;
info += "Assert \"" + std::string(expr_str) + "\"";
info += " at " + std::string(filename) + ":" + std::to_string(lineno);
info += " => " + std::string(extra_info);
throw std::runtime_error(info);
}
}
}
#define AssertInfo(expr, info) impl::EasyAssertInfo(bool(expr), #expr, __FILE__, __LINE__, (info))
#define Assert(expr) AssertInfo((expr), "")

60
docker-compose.yml Normal file
View File

@ -0,0 +1,60 @@
version: '3.5'
services:
etcd:
image: quay.io/coreos/etcd:latest
command: etcd -listen-peer-urls=http://127.0.0.1:12380 -advertise-client-urls=http://127.0.0.1:12379 -listen-client-urls http://0.0.0.0:12379,http://0.0.0.0:14001 -initial-advertise-peer-urls=http://127.0.0.1:12380 --initial-cluster default=http://127.0.0.1:12380
ports:
- "12379:12379"
- "12380:12380"
- "14001:14001"
pulsar:
image: apachepulsar/pulsar:latest
command: bin/pulsar standalone
ports:
- "6650:6650"
- "18080:8080"
pd0:
image: pingcap/pd:latest
network_mode: "host"
ports:
- "2379:2379"
- "2380:2380"
volumes:
- /tmp/config/pd.toml:/pd.toml:ro
- /tmp/data:/data
- /tmp/logs:/logs
- /etc/localtime:/etc/localtime:ro
command:
- --name=pd0
- --client-urls=http://0.0.0.0:2379
- --peer-urls=http://0.0.0.0:2380
- --advertise-client-urls=http://127.0.0.1:2379
- --advertise-peer-urls=http://127.0.0.1:2380
- --initial-cluster=pd0=http://127.0.0.1:2380
- --data-dir=/data/pd0
- --log-file=/logs/pd0.log
restart: on-failure
tikv0:
network_mode: "host"
image: pingcap/tikv:latest
ports:
- "20160:20160"
volumes:
- /tmp/config/tikv.toml:/tikv.toml:ro
- /tmp/data:/data
- /tmp/logs:/logs
- /etc/localtime:/etc/localtime:ro
command:
- --addr=0.0.0.0:20160
- --advertise-addr=127.0.0.1:20160
- --data-dir=/data/tikv0
- --pd=127.0.0.1:2379
- --log-file=/logs/tikv0.log
depends_on:
- "pd0"
restart: on-failure

View File

@ -0,0 +1,57 @@
# Copyright (C) 2019-2020 Zilliz. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions and limitations under the License.
FROM ubuntu:18.04
# pipefail is enabled for proper error detection in the `wget | apt-key add`
# step
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
ENV DEBIAN_FRONTEND noninteractive
RUN apt-get update && apt-get install -y --no-install-recommends wget ca-certificates gnupg2 && \
wget -P /tmp https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS-2019.PUB && \
apt-key add /tmp/GPG-PUB-KEY-INTEL-SW-PRODUCTS-2019.PUB && \
sh -c 'echo deb https://apt.repos.intel.com/mkl all main > /etc/apt/sources.list.d/intel-mkl.list' && \
wget -qO- "https://cmake.org/files/v3.14/cmake-3.14.3-Linux-x86_64.tar.gz" | tar --strip-components=1 -xz -C /usr/local && \
apt-get update && apt-get install -y --no-install-recommends \
g++ git gfortran lsb-core \
libboost-serialization-dev libboost-filesystem-dev libboost-system-dev libboost-regex-dev \
curl libtool automake libssl-dev pkg-config libcurl4-openssl-dev python3-pip \
clang-format-7 clang-tidy-7 \
lcov mysql-client libmysqlclient-dev intel-mkl-gnu-2019.5-281 intel-mkl-core-2019.5-281 && \
apt-get remove --purge -y && \
rm -rf /var/lib/apt/lists/*
RUN ln -s /usr/lib/x86_64-linux-gnu/libmysqlclient.so \
/usr/lib/x86_64-linux-gnu/libmysqlclient_r.so
RUN sh -c 'echo export LD_LIBRARY_PATH=/opt/intel/compilers_and_libraries_2019.5.281/linux/mkl/lib/intel64:\$LD_LIBRARY_PATH > /etc/profile.d/mkl.sh'
RUN wget https://github.com/xianyi/OpenBLAS/archive/v0.3.9.tar.gz && \
tar zxvf v0.3.9.tar.gz && cd OpenBLAS-0.3.9 && \
make TARGET=CORE2 DYNAMIC_ARCH=1 DYNAMIC_OLDER=1 USE_THREAD=0 USE_OPENMP=0 FC=gfortran CC=gcc COMMON_OPT="-O3 -g -fPIC" FCOMMON_OPT="-O3 -g -fPIC -frecursive" NMAX="NUM_THREADS=128" LIBPREFIX="libopenblas" LAPACKE="NO_LAPACKE=1" INTERFACE64=0 NO_STATIC=1 && \
make PREFIX=/usr install && \
cd .. && rm -rf OpenBLAS-0.3.9 && rm v0.3.9.tar.gz
RUN apt-get update && apt-get install -y --no-install-recommends ccache && \
apt-get remove --purge -y && \
rm -rf /var/lib/apt/lists/*
ENV LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/lib"
# Set permissions on /etc/passwd and /home to allow arbitrary users to write
COPY --chown=0:0 docker/build_env/entrypoint.sh /
RUN mkdir -p /home/user && chgrp -R 0 /home && chmod -R g=u /etc/passwd /etc/group /home && chmod +x /entrypoint.sh
ENV HOME=/home/user
ENTRYPOINT [ "/entrypoint.sh" ]
CMD ["tail", "-f", "/dev/null"]

24
docker/build_env/entrypoint.sh Executable file
View File

@ -0,0 +1,24 @@
#!/bin/bash
set -e
# Ensure $HOME exists when starting
if [ ! -d "${HOME}" ]; then
mkdir -p "${HOME}"
fi
# Setup $PS1 for a consistent and reasonable prompt
if [ -w "${HOME}" ] && [ ! -f "${HOME}"/.bashrc ]; then
echo "PS1='\s-\v \w \$ '" > "${HOME}"/.bashrc
echo -e 'if [ -f /etc/bashrc ]; then\n . /etc/bashrc\nfi' >> "${HOME}"/.bashrc
fi
# Add current (arbitrary) user to /etc/passwd and /etc/group
if ! whoami &> /dev/null; then
if [ -w /etc/passwd ]; then
echo "${USER_NAME:-user}:x:$(id -u):0:${USER_NAME:-user} user:${HOME}:/bin/bash" >> /etc/passwd
echo "${USER_NAME:-user}:x:$(id -u):" >> /etc/group
fi
fi
exec "$@"

View File

@ -0,0 +1,33 @@
# Copyright (C) 2019-2020 Zilliz. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions and limitations under the License.
FROM milvusdb/milvus-dev:amd64-centos-7-core AS openblas
FROM centos:centos7
RUN yum install -y wget && \
wget -P /etc/yum.repos.d/ http://mirrors.aliyun.com/repo/epel-7.repo && \
yum clean all && yum makecache && \
yum install -y libgomp libgfortran4 mysql-devel && \
rm -rf /var/cache/yum/*
COPY ./milvus /var/lib/milvus
ENV LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/var/lib/milvus/lib"
COPY --from=openblas /usr/lib/libopenblas-r0.3.9.so /var/lib/milvus/lib/
RUN ln -s /var/lib/milvus/lib/libopenblas-r0.3.9.so /var/lib/milvus/lib/libopenblas.so.0 && \
ln -s /var/lib/milvus/lib/libopenblas.so.0 /var/lib/milvus/lib/libopenblas.so
WORKDIR /var/lib/milvus
CMD [ "/var/lib/milvus/bin/milvus_server", "-c", "/var/lib/milvus/conf/milvus.yaml" ]
EXPOSE 19530

View File

@ -0,0 +1,35 @@
# Copyright (C) 2019-2020 Zilliz. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions and limitations under the License.
FROM milvusdb/milvus-dev:amd64-ubuntu-18.04-core AS openblas
FROM ubuntu:18.04
RUN apt-get update && apt-get install -y --no-install-recommends \
gfortran libsqlite3-dev libmysqlclient-dev libcurl4-openssl-dev python3 && \
apt-get remove --purge -y && \
rm -rf /var/lib/apt/lists/*
RUN ln -s /usr/lib/x86_64-linux-gnu/libmysqlclient.so /usr/lib/x86_64-linux-gnu/libmysqlclient_r.so
COPY ./milvus /var/lib/milvus
ENV LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/var/lib/milvus/lib"
COPY --from=openblas /usr/lib/libopenblas-r0.3.9.so /var/lib/milvus/lib/
RUN ln -s /var/lib/milvus/lib/libopenblas-r0.3.9.so /var/lib/milvus/lib/libopenblas.so.0 && \
ln -s /var/lib/milvus/lib/libopenblas.so.0 /var/lib/milvus/lib/libopenblas.so
WORKDIR /var/lib/milvus
CMD [ "/var/lib/milvus/bin/milvus_server", "-c", "/var/lib/milvus/conf/milvus.yaml" ]
EXPOSE 19530

View File

@ -0,0 +1,17 @@
version: '2.3'
services:
cpu_centos7:
image: ${TARGET_REPO}:${TARGET_TAG}
build:
context: ./
dockerfile: cpu/centos7/Dockerfile
cache_from:
- ${SOURCE_REPO}:${SOURCE_TAG}
cpu_ubuntu18.04:
image: ${TARGET_REPO}:${TARGET_TAG}
build:
context: ./
dockerfile: cpu/ubuntu18.04/Dockerfile
cache_from:
- ${SOURCE_REPO}:${SOURCE_TAG}

View File

@ -0,0 +1,21 @@
version: '2.3'
networks:
monitor:
driver: bridge
services:
milvus_server:
runtime: nvidia
image: milvusdb/milvus:latest
restart: always
environment:
WEB_APP: host.docker.internal
volumes:
- ../core/conf/milvus.yaml:/var/lib/milvus/conf/milvus.yaml
- ../core/conf/log_config.conf:/var/lib/milvus/conf/log_config.conf
ports:
- "8080:8080"
- "19530:19530"
networks:
- monitor

View File

@ -0,0 +1,30 @@
# Copyright (C) 2019-2020 Zilliz. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions and limitations under the License.
FROM python:3.6.8-jessie
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
RUN apt-get update && apt-get install -y --no-install-recommends wget apt-transport-https && \
wget -qO- "https://get.helm.sh/helm-v3.0.2-linux-amd64.tar.gz" | tar --strip-components=1 -xz -C /usr/local/bin linux-amd64/helm && \
wget -P /tmp https://mirrors.aliyun.com/kubernetes/apt/doc/apt-key.gpg && \
apt-key add /tmp/apt-key.gpg && \
sh -c 'echo deb https://mirrors.aliyun.com/kubernetes/apt/ kubernetes-xenial main > /etc/apt/sources.list.d/kubernetes.list' && \
apt-get update && apt-get install -y --no-install-recommends \
build-essential kubectl && \
apt-get remove --purge -y && \
rm -rf /var/lib/apt/lists/*
COPY docker-entrypoint.sh /app/docker-entrypoint.sh
WORKDIR /root
ENTRYPOINT [ "/app/docker-entrypoint.sh" ]
CMD [ "start" ]

View File

@ -0,0 +1,9 @@
#!/bin/bash
set -e
if [ "$1" = 'start' ]; then
tail -f /dev/null
fi
exec "$@"

5
go.sum
View File

@ -87,6 +87,8 @@ github.com/aws/aws-lambda-go v1.13.3/go.mod h1:4UKl9IzQMoD+QF79YdCuzCwp8VbmG4VAQ
github.com/aws/aws-sdk-go v1.27.0/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= github.com/aws/aws-sdk-go v1.27.0/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
github.com/aws/aws-sdk-go v1.30.8 h1:4BHbh8K3qKmcnAgToZ2LShldRF9inoqIBccpCLNCy3I= github.com/aws/aws-sdk-go v1.30.8 h1:4BHbh8K3qKmcnAgToZ2LShldRF9inoqIBccpCLNCy3I=
github.com/aws/aws-sdk-go v1.30.8/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZveU8YkpAk0= github.com/aws/aws-sdk-go v1.30.8/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZveU8YkpAk0=
github.com/aws/aws-sdk-go v1.35.7 h1:FHMhVhyc/9jljgFAcGkQDYjpC9btM0B8VfkLBfctdNE=
github.com/aws/aws-sdk-go v1.35.7/go.mod h1:tlPOdRjfxPBpNIwqDj61rmsnA85v9jc0Ps9+muhnW+k=
github.com/aws/aws-sdk-go-v2 v0.18.0/go.mod h1:JWVYvqSMppoMJC0x5wdwiImzgXTI9FuZwxzkQq9wy+g= github.com/aws/aws-sdk-go-v2 v0.18.0/go.mod h1:JWVYvqSMppoMJC0x5wdwiImzgXTI9FuZwxzkQq9wy+g=
github.com/beefsack/go-rate v0.0.0-20180408011153-efa7637bb9b6/go.mod h1:6YNgTHLutezwnBvyneBbwvB8C82y3dcoOj5EQJIdGXA= github.com/beefsack/go-rate v0.0.0-20180408011153-efa7637bb9b6/go.mod h1:6YNgTHLutezwnBvyneBbwvB8C82y3dcoOj5EQJIdGXA=
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973 h1:xJ4a3vCFaGF/jqvzLMYoU8P317H5OQ+Via4RmuPwCS0= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973 h1:xJ4a3vCFaGF/jqvzLMYoU8P317H5OQ+Via4RmuPwCS0=
@ -318,6 +320,9 @@ github.com/jawher/mow.cli v1.2.0/go.mod h1:y+pcA3jBAdo/GIZx/0rFjw/K2bVEODP9rfZOf
github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k=
github.com/jmespath/go-jmespath v0.3.0 h1:OS12ieG61fsCg5+qLJ+SsW9NicxNkg3b25OyT2yCeUc= github.com/jmespath/go-jmespath v0.3.0 h1:OS12ieG61fsCg5+qLJ+SsW9NicxNkg3b25OyT2yCeUc=
github.com/jmespath/go-jmespath v0.3.0/go.mod h1:9QtRXoHjLGCJ5IBSaohpXITPlowMeeYCZ7fLUTSywik= github.com/jmespath/go-jmespath v0.3.0/go.mod h1:9QtRXoHjLGCJ5IBSaohpXITPlowMeeYCZ7fLUTSywik=
github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg=
github.com/jmespath/go-jmespath v0.4.0/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo=
github.com/jmespath/go-jmespath/internal/testify v1.5.1/go.mod h1:L3OGu8Wl2/fWfCI6z80xFu9LTZmf1ZRjMHUOPmWr69U=
github.com/jonboulle/clockwork v0.1.0 h1:VKV+ZcuP6l3yW9doeqz6ziZGgcynBVQO+obU0+0hcPo= github.com/jonboulle/clockwork v0.1.0 h1:VKV+ZcuP6l3yW9doeqz6ziZGgcynBVQO+obU0+0hcPo=
github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4=

View File

@ -5,7 +5,7 @@ import (
"path" "path"
"runtime" "runtime"
"github.com/czs007/suvlim/storage/pkg/types" storagetype "github.com/czs007/suvlim/internal/storage/type"
yaml "gopkg.in/yaml.v2" yaml "gopkg.in/yaml.v2"
) )
@ -34,7 +34,7 @@ type TimeSyncConfig struct {
} }
type StorageConfig struct { type StorageConfig struct {
Driver types.DriverType Driver storagetype.DriverType
Address string Address string
Port int32 Port int32
Accesskey string Accesskey string
@ -42,9 +42,12 @@ type StorageConfig struct {
} }
type PulsarConfig struct { type PulsarConfig struct {
Address string Authentication bool
Port int32 User string
TopicNum int Token string
Address string
Port int32
TopicNum int
} }
type ProxyConfig struct { type ProxyConfig struct {
@ -94,8 +97,10 @@ type Writer struct {
StopFlag int64 StopFlag int64
ReaderQueueSize int ReaderQueueSize int
SearchByIdChanSize int SearchByIdChanSize int
Parallelism int
TopicStart int TopicStart int
TopicEnd int TopicEnd int
Bucket string
} }
type ServerConfig struct { type ServerConfig struct {
@ -111,9 +116,9 @@ type ServerConfig struct {
var Config ServerConfig var Config ServerConfig
func init() { // func init() {
load_config() // load_config()
} // }
func getCurrentFileDir() string { func getCurrentFileDir() string {
_, fpath, _, _ := runtime.Caller(0) _, fpath, _, _ := runtime.Caller(0)
@ -132,3 +137,16 @@ func load_config() {
} }
//fmt.Printf("Result: %v\n", Config) //fmt.Printf("Result: %v\n", Config)
} }
func LoadConfig(yamlFile string) {
filePath := path.Join(getCurrentFileDir(), yamlFile)
source, err := ioutil.ReadFile(filePath)
if err != nil {
panic(err)
}
err = yaml.Unmarshal(source, &Config)
if err != nil {
panic(err)
}
//fmt.Printf("Result: %v\n", Config)
}

View File

@ -5,8 +5,8 @@ if [[ ! ${jobs+1} ]]; then
jobs=$(nproc) jobs=$(nproc)
fi fi
BUILD_OUTPUT_DIR="cmake-build-debug" BUILD_OUTPUT_DIR="cmake-build-release"
BUILD_TYPE="Debug" BUILD_TYPE="Release"
BUILD_UNITTEST="OFF" BUILD_UNITTEST="OFF"
INSTALL_PREFIX=$(pwd)/milvus INSTALL_PREFIX=$(pwd)/milvus
MAKE_CLEAN="OFF" MAKE_CLEAN="OFF"

View File

@ -1,5 +1,6 @@
aux_source_directory(${CMAKE_CURRENT_SOURCE_DIR}/../pb PB_SRC_FILES) aux_source_directory(${CMAKE_CURRENT_SOURCE_DIR}/../pb PB_SRC_FILES)
# add_definitions(-DBOOST_STACKTRACE_USE_ADDR2LINE)
set(DOG_SEGMENT_FILES set(DOG_SEGMENT_FILES
SegmentNaive.cpp SegmentNaive.cpp
IndexMeta.cpp IndexMeta.cpp
@ -9,6 +10,7 @@ set(DOG_SEGMENT_FILES
collection_c.cpp collection_c.cpp
partition_c.cpp partition_c.cpp
segment_c.cpp segment_c.cpp
EasyAssert.cpp
${PB_SRC_FILES} ${PB_SRC_FILES}
) )
add_library(milvus_dog_segment SHARED add_library(milvus_dog_segment SHARED
@ -18,4 +20,5 @@ add_library(milvus_dog_segment SHARED
#add_dependencies( segment sqlite mysqlpp ) #add_dependencies( segment sqlite mysqlpp )
target_link_libraries(milvus_dog_segment tbb utils pthread knowhere log libprotobuf) target_link_libraries(milvus_dog_segment tbb utils pthread knowhere log libprotobuf dl backtrace
)

View File

@ -0,0 +1,26 @@
#include <iostream>
#include "EasyAssert.h"
// #define BOOST_STACKTRACE_USE_ADDR2LINE
#define BOOST_STACKTRACE_USE_BACKTRACE
#include <boost/stacktrace.hpp>
namespace milvus::impl {
void EasyAssertInfo(bool value, std::string_view expr_str, std::string_view filename, int lineno,
std::string_view extra_info) {
if (!value) {
std::string info;
info += "Assert \"" + std::string(expr_str) + "\"";
info += " at " + std::string(filename) + ":" + std::to_string(lineno) + "\n";
if(!extra_info.empty()) {
info += " => " + std::string(extra_info);
}
auto fuck = boost::stacktrace::stacktrace();
std::cout << fuck;
// std::string s = fuck;
// info += ;
throw std::runtime_error(info);
}
}
}

View File

@ -0,0 +1,14 @@
#pragma once
#include <string_view>
#include <stdio.h>
#include <stdlib.h>
/* Paste this on the file you want to debug. */
namespace milvus::impl {
void EasyAssertInfo(bool value, std::string_view expr_str, std::string_view filename, int lineno,
std::string_view extra_info);
}
#define AssertInfo(expr, info) impl::EasyAssertInfo(bool(expr), #expr, __FILE__, __LINE__, (info))
#define Assert(expr) AssertInfo((expr), "")

View File

@ -171,9 +171,7 @@ class Schema {
const FieldMeta& const FieldMeta&
operator[](const std::string& field_name) const { operator[](const std::string& field_name) const {
auto offset_iter = offsets_.find(field_name); auto offset_iter = offsets_.find(field_name);
if (offset_iter == offsets_.end()) { AssertInfo(offset_iter != offsets_.end(), "Cannot found field_name: " + field_name);
throw std::runtime_error("Cannot found field_name: " + field_name);
}
auto offset = offset_iter->second; auto offset = offset_iter->second;
return (*this)[offset]; return (*this)[offset];
} }

View File

@ -96,9 +96,6 @@ auto SegmentNaive::get_deleted_bitmap(int64_t del_barrier, Timestamp query_times
if (offset >= insert_barrier) { if (offset >= insert_barrier) {
continue; continue;
} }
if (offset >= insert_barrier) {
continue;
}
if (record_.timestamps_[offset] < query_timestamp) { if (record_.timestamps_[offset] < query_timestamp) {
Assert(offset < insert_barrier); Assert(offset < insert_barrier);
the_offset = std::max(the_offset, offset); the_offset = std::max(the_offset, offset);

Some files were not shown because too many files have changed in this diff Show More