diff --git a/ci/jenkins/Jenkinsfile b/ci/jenkins/Jenkinsfile index 853eed6135..fbdf3a3096 100644 --- a/ci/jenkins/Jenkinsfile +++ b/ci/jenkins/Jenkinsfile @@ -8,9 +8,10 @@ pipeline { parameters{ choice choices: ['Release', 'Debug'], description: '', name: 'BUILD_TYPE' string defaultValue: 'cf1434e7-5a4b-4d25-82e8-88d667aef9e5', description: 'GIT CREDENTIALS ID', name: 'GIT_CREDENTIALS_ID', trim: true - string defaultValue: '1a527823-d2b7-44fd-834b-9844350baf14', description: 'JFROG CREDENTIALS ID', name: 'JFROG_CREDENTIALS_ID', trim: true + string defaultValue: 'registry.zilliz.com', description: 'DOCKER REGISTRY URL', name: 'DOKCER_REGISTRY_URL', trim: true string defaultValue: 'ba070c98-c8cc-4f7c-b657-897715f359fc', description: 'DOCKER CREDENTIALS ID', name: 'DOCKER_CREDENTIALS_ID', trim: true string defaultValue: 'http://192.168.1.202/artifactory/milvus', description: 'JFROG ARTFACTORY URL', name: 'JFROG_ARTFACTORY_URL', trim: true + string defaultValue: '1a527823-d2b7-44fd-834b-9844350baf14', description: 'JFROG CREDENTIALS ID', name: 'JFROG_CREDENTIALS_ID', trim: true } environment { @@ -18,7 +19,7 @@ pipeline { LOWER_BUILD_TYPE = params.BUILD_TYPE.toLowerCase() SEMVER = "${BRANCH_NAME}" JOBNAMES = env.JOB_NAME.split('/') - PIPELIEN_NAME = "${JOBNAMES[0]}" + PIPELINE_NAME = "${JOBNAMES[0]}" } stages { @@ -51,6 +52,15 @@ pipeline { } } } + stage('Code Coverage') { + steps { + container('milvus-build-env') { + script { + load "${env.WORKSPACE}/ci/jenkins/jenkinsfile/coverage.groovy" + } + } + } + } stage('Upload Package') { steps { container('milvus-build-env') { diff --git a/ci/jenkins/jenkinsfile/build.groovy b/ci/jenkins/jenkinsfile/build.groovy index 445dfbc2f5..14d0414f4f 100644 --- a/ci/jenkins/jenkinsfile/build.groovy +++ b/ci/jenkins/jenkinsfile/build.groovy @@ -1,11 +1,8 @@ timeout(time: 60, unit: 'MINUTES') { - dir ("core") { - sh "git config --global user.email \"test@zilliz.com\"" - sh "git config --global user.name \"test\"" + dir ("ci/jenkins/scripts") { sh "./build.sh -l" withCredentials([usernamePassword(credentialsId: "${params.JFROG_CREDENTIALS_ID}", usernameVariable: 'USERNAME', passwordVariable: 'PASSWORD')]) { - sh "export JFROG_ARTFACTORY_URL='${params.JFROG_ARTFACTORY_URL}' && export JFROG_USER_NAME='${USERNAME}' && export JFROG_PASSWORD='${PASSWORD}' && ./build.sh -t ${params.BUILD_TYPE} -d /opt/milvus -j -u -c" - sh "./coverage.sh -u root -p 123456 -t \$POD_IP" + sh "export JFROG_ARTFACTORY_URL='${params.JFROG_ARTFACTORY_URL}' && export JFROG_USER_NAME='${USERNAME}' && export JFROG_PASSWORD='${PASSWORD}' && ./build.sh -t ${params.BUILD_TYPE} -o /opt/milvus -d /opt/milvus -j -u -c" } } } diff --git a/ci/jenkins/jenkinsfile/cleanupSingleDev.groovy b/ci/jenkins/jenkinsfile/cleanupSingleDev.groovy index 82124c45e7..6e85a678be 100644 --- a/ci/jenkins/jenkinsfile/cleanupSingleDev.groovy +++ b/ci/jenkins/jenkinsfile/cleanupSingleDev.groovy @@ -1,9 +1,9 @@ try { - sh "helm del --purge ${env.PIPELIEN_NAME}-${env.BUILD_NUMBER}-single-gpu" + sh "helm del --purge ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-gpu" } catch (exc) { - def helmResult = sh script: "helm status ${env.PIPELIEN_NAME}-${env.BUILD_NUMBER}-single-gpu", returnStatus: true + def helmResult = sh script: "helm status ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-gpu", returnStatus: true if (!helmResult) { - sh "helm del --purge ${env.PIPELIEN_NAME}-${env.BUILD_NUMBER}-single-gpu" + sh "helm del --purge ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-gpu" } throw exc } diff --git a/ci/jenkins/jenkinsfile/coverage.groovy b/ci/jenkins/jenkinsfile/coverage.groovy new file mode 100644 index 0000000000..7c3b16c029 --- /dev/null +++ b/ci/jenkins/jenkinsfile/coverage.groovy @@ -0,0 +1,10 @@ +timeout(time: 60, unit: 'MINUTES') { + dir ("ci/jenkins/scripts") { + sh "./coverage.sh -o /opt/milvus -u root -p 123456 -t \$POD_IP" + // Set some env variables so codecov detection script works correctly + withCredentials([[$class: 'StringBinding', credentialsId: "${env.PIPELINE_NAME}-codecov-token", variable: 'CODECOV_TOKEN']]) { + sh 'curl -s https://codecov.io/bash | bash -s - -f output_new.info || echo "Codecov did not collect coverage reports"' + } + } +} + diff --git a/ci/jenkins/jenkinsfile/deploySingle2Dev.groovy b/ci/jenkins/jenkinsfile/deploySingle2Dev.groovy index f63da03b06..2ab13486a6 100644 --- a/ci/jenkins/jenkinsfile/deploySingle2Dev.groovy +++ b/ci/jenkins/jenkinsfile/deploySingle2Dev.groovy @@ -4,11 +4,11 @@ try { dir ('milvus-helm') { checkout([$class: 'GitSCM', branches: [[name: "0.5.0"]], doGenerateSubmoduleConfigurations: false, extensions: [], submoduleCfg: [], userRemoteConfigs: [[credentialsId: "${params.GIT_CREDENTIALS_ID}", url: "https://github.com/milvus-io/milvus-helm.git", name: 'origin', refspec: "+refs/heads/0.5.0:refs/remotes/origin/0.5.0"]]]) dir ("milvus-gpu") { - sh "helm install --wait --timeout 300 --set engine.image.tag=${DOCKER_VERSION} --set expose.type=clusterIP --name ${env.PIPELIEN_NAME}-${env.BUILD_NUMBER}-single-gpu -f ci/values.yaml --namespace milvus ." + sh "helm install --wait --timeout 300 --set engine.image.tag=${DOCKER_VERSION} --set expose.type=clusterIP --name ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-gpu -f ci/values.yaml --namespace milvus ." } } } catch (exc) { echo 'Helm running failed!' - sh "helm del --purge ${env.PIPELIEN_NAME}-${env.BUILD_NUMBER}-single-gpu" + sh "helm del --purge ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-gpu" throw exc } diff --git a/ci/jenkins/jenkinsfile/package.groovy b/ci/jenkins/jenkinsfile/package.groovy index aa969e9ad5..edd6ce88da 100644 --- a/ci/jenkins/jenkinsfile/package.groovy +++ b/ci/jenkins/jenkinsfile/package.groovy @@ -1,15 +1,9 @@ timeout(time: 5, unit: 'MINUTES') { - dir ("core") { - if (fileExists('milvus')) { - sh "tar -zcvf ./${PROJECT_NAME}-${PACKAGE_VERSION}.tar.gz ./milvus" - withCredentials([usernamePassword(credentialsId: "${params.JFROG_CREDENTIALS_ID}", usernameVariable: 'JFROG_USERNAME', passwordVariable: 'JFROG_PASSWORD')]) { - def uploadStatus = sh(returnStatus: true, script: "curl -u${JFROG_USERNAME}:${JFROG_PASSWORD} -T ./${PROJECT_NAME}-${PACKAGE_VERSION}.tar.gz ${params.JFROG_ARTFACTORY_URL}/milvus/package/${PROJECT_NAME}-${PACKAGE_VERSION}.tar.gz") - if (uploadStatus != 0) { - error("\" ${PROJECT_NAME}-${PACKAGE_VERSION}.tar.gz \" upload to \" ${params.JFROG_ARTFACTORY_URL}/milvus/package/${PROJECT_NAME}-${PACKAGE_VERSION}.tar.gz \" failed!") - } - } - } else { - error("Milvus binary directory don't exists!") + sh "tar -zcvf ./${PROJECT_NAME}-${PACKAGE_VERSION}.tar.gz -C /opt/ milvus" + withCredentials([usernamePassword(credentialsId: "${params.JFROG_CREDENTIALS_ID}", usernameVariable: 'JFROG_USERNAME', passwordVariable: 'JFROG_PASSWORD')]) { + def uploadStatus = sh(returnStatus: true, script: "curl -u${JFROG_USERNAME}:${JFROG_PASSWORD} -T ./${PROJECT_NAME}-${PACKAGE_VERSION}.tar.gz ${params.JFROG_ARTFACTORY_URL}/milvus/package/${PROJECT_NAME}-${PACKAGE_VERSION}.tar.gz") + if (uploadStatus != 0) { + error("\" ${PROJECT_NAME}-${PACKAGE_VERSION}.tar.gz \" upload to \" ${params.JFROG_ARTFACTORY_URL}/milvus/package/${PROJECT_NAME}-${PACKAGE_VERSION}.tar.gz \" failed!") } } } diff --git a/ci/jenkins/jenkinsfile/publishImages.groovy b/ci/jenkins/jenkinsfile/publishImages.groovy index 9a5327b9c7..62df0c73bf 100644 --- a/ci/jenkins/jenkinsfile/publishImages.groovy +++ b/ci/jenkins/jenkinsfile/publishImages.groovy @@ -2,7 +2,6 @@ container('publish-images') { timeout(time: 15, unit: 'MINUTES') { dir ("docker/deploy/${OS_NAME}") { def binaryPackage = "${PROJECT_NAME}-${PACKAGE_VERSION}.tar.gz" - def dockerRegistryURL = "registry.zilliz.com" withCredentials([usernamePassword(credentialsId: "${params.JFROG_CREDENTIALS_ID}", usernameVariable: 'JFROG_USERNAME', passwordVariable: 'JFROG_PASSWORD')]) { def downloadStatus = sh(returnStatus: true, script: "curl -u${JFROG_USERNAME}:${JFROG_PASSWORD} -O ${params.JFROG_ARTFACTORY_URL}/milvus/package/${binaryPackage}") @@ -15,21 +14,32 @@ container('publish-images') { def imageName = "${PROJECT_NAME}/engine:${DOCKER_VERSION}" try { - def isExistImage = sh(returnStatus: true, script: "docker inspect --type=image ${imageName} 2>&1 > /dev/null") - if (isExistImage == 0) { - sh "docker rmi ${imageName}" + def isExistSourceImage = sh(returnStatus: true, script: "docker inspect --type=image ${imageName} 2>&1 > /dev/null") + if (isExistSourceImage == 0) { + def removeSourceImageStatus = sh(returnStatus: true, script: "docker rmi ${imageName}") } def customImage = docker.build("${imageName}") - docker.withRegistry("https://${dockerRegistryURL}", "${params.DOCKER_CREDENTIALS_ID}") { + + def isExistTargeImage = sh(returnStatus: true, script: "docker inspect --type=image ${params.DOKCER_REGISTRY_URL}/${imageName} 2>&1 > /dev/null") + if (isExistTargeImage == 0) { + def removeTargeImageStatus = sh(returnStatus: true, script: "docker rmi ${params.DOKCER_REGISTRY_URL}/${imageName}") + } + + docker.withRegistry("https://${params.DOKCER_REGISTRY_URL}", "${params.DOCKER_CREDENTIALS_ID}") { customImage.push() } } catch (exc) { throw exc } finally { - def isExistImage = sh(returnStatus: true, script: "docker inspect --type=image ${imageName} 2>&1 > /dev/null") - if (isExistImage == 0) { - sh "docker rmi ${imageName}" + def isExistSourceImage = sh(returnStatus: true, script: "docker inspect --type=image ${imageName} 2>&1 > /dev/null") + if (isExistSourceImage == 0) { + def removeSourceImageStatus = sh(returnStatus: true, script: "docker rmi ${imageName}") + } + + def isExistTargeImage = sh(returnStatus: true, script: "docker inspect --type=image ${params.DOKCER_REGISTRY_URL}/${imageName} 2>&1 > /dev/null") + if (isExistTargeImage == 0) { + def removeTargeImageStatus = sh(returnStatus: true, script: "docker rmi ${params.DOKCER_REGISTRY_URL}/${imageName}") } } } diff --git a/ci/jenkins/jenkinsfile/singleDevTest.groovy b/ci/jenkins/jenkinsfile/singleDevTest.groovy index d228b34839..ae57ffd42b 100644 --- a/ci/jenkins/jenkinsfile/singleDevTest.groovy +++ b/ci/jenkins/jenkinsfile/singleDevTest.groovy @@ -1,7 +1,7 @@ timeout(time: 30, unit: 'MINUTES') { dir ("tests/milvus_python_test") { sh 'python3 -m pip install -r requirements.txt' - sh "pytest . --alluredir=\"test_out/dev/single/sqlite\" --level=1 --ip ${env.PIPELIEN_NAME}-${env.BUILD_NUMBER}-single-gpu-milvus-gpu-engine.milvus.svc.cluster.local" + sh "pytest . --alluredir=\"test_out/dev/single/sqlite\" --level=1 --ip ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-gpu-milvus-gpu-engine.milvus.svc.cluster.local" } // mysql database backend test load "${env.WORKSPACE}/ci/jenkins/jenkinsfile/cleanupSingleDev.groovy" @@ -13,10 +13,10 @@ timeout(time: 30, unit: 'MINUTES') { } dir ("milvus-helm") { dir ("milvus-gpu") { - sh "helm install --wait --timeout 300 --set engine.image.tag=${DOCKER_VERSION} --set expose.type=clusterIP --name ${env.PIPELIEN_NAME}-${env.BUILD_NUMBER}-single-gpu -f ci/db_backend/mysql_values.yaml --namespace milvus ." + sh "helm install --wait --timeout 300 --set engine.image.tag=${DOCKER_VERSION} --set expose.type=clusterIP --name ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-gpu -f ci/db_backend/mysql_values.yaml --namespace milvus ." } } dir ("tests/milvus_python_test") { - sh "pytest . --alluredir=\"test_out/dev/single/mysql\" --level=1 --ip ${env.PIPELIEN_NAME}-${env.BUILD_NUMBER}-single-gpu-milvus-gpu-engine.milvus.svc.cluster.local" + sh "pytest . --alluredir=\"test_out/dev/single/mysql\" --level=1 --ip ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-gpu-milvus-gpu-engine.milvus.svc.cluster.local" } } diff --git a/ci/jenkins/scripts/build.sh b/ci/jenkins/scripts/build.sh new file mode 100755 index 0000000000..2ccdf4a618 --- /dev/null +++ b/ci/jenkins/scripts/build.sh @@ -0,0 +1,142 @@ +#!/bin/bash + +SOURCE="${BASH_SOURCE[0]}" +while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink + DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" + SOURCE="$(readlink "$SOURCE")" + [[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located +done +SCRIPTS_DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" + +CMAKE_BUILD_DIR="${SCRIPTS_DIR}/../../../core/cmake_build" +BUILD_TYPE="Debug" +BUILD_UNITTEST="OFF" +INSTALL_PREFIX="/opt/milvus" +BUILD_COVERAGE="OFF" +DB_PATH="/opt/milvus" +PROFILING="OFF" +USE_JFROG_CACHE="OFF" +RUN_CPPLINT="OFF" +CUSTOMIZATION="OFF" # default use ori faiss +CUDA_COMPILER=/usr/local/cuda/bin/nvcc + +CUSTOMIZED_FAISS_URL="${FAISS_URL:-NONE}" +wget -q --method HEAD ${CUSTOMIZED_FAISS_URL} +if [ $? -eq 0 ]; then + CUSTOMIZATION="ON" +else + CUSTOMIZATION="OFF" +fi + +while getopts "o:d:t:ulcgjhx" arg +do + case $arg in + o) + INSTALL_PREFIX=$OPTARG + ;; + d) + DB_PATH=$OPTARG + ;; + t) + BUILD_TYPE=$OPTARG # BUILD_TYPE + ;; + u) + echo "Build and run unittest cases" ; + BUILD_UNITTEST="ON"; + ;; + l) + RUN_CPPLINT="ON" + ;; + c) + BUILD_COVERAGE="ON" + ;; + g) + PROFILING="ON" + ;; + j) + USE_JFROG_CACHE="ON" + ;; + x) + CUSTOMIZATION="OFF" # force use ori faiss + ;; + h) # help + echo " + +parameter: +-o: install prefix(default: /opt/milvus) +-d: db data path(default: /opt/milvus) +-t: build type(default: Debug) +-u: building unit test options(default: OFF) +-l: run cpplint, clang-format and clang-tidy(default: OFF) +-c: code coverage(default: OFF) +-g: profiling(default: OFF) +-j: use jfrog cache build directory(default: OFF) +-h: help + +usage: +./build.sh -p \${INSTALL_PREFIX} -t \${BUILD_TYPE} [-u] [-l] [-r] [-c] [-g] [-j] [-h] + " + exit 0 + ;; + ?) + echo "ERROR! unknown argument" + exit 1 + ;; + esac +done + +if [[ ! -d ${CMAKE_BUILD_DIR} ]]; then + mkdir ${CMAKE_BUILD_DIR} +fi + +cd ${CMAKE_BUILD_DIR} + +# remove make cache since build.sh -l use default variables +# force update the variables each time +make rebuild_cache + +CMAKE_CMD="cmake \ +-DBUILD_UNIT_TEST=${BUILD_UNITTEST} \ +-DCMAKE_INSTALL_PREFIX=${INSTALL_PREFIX} +-DCMAKE_BUILD_TYPE=${BUILD_TYPE} \ +-DCMAKE_CUDA_COMPILER=${CUDA_COMPILER} \ +-DBUILD_COVERAGE=${BUILD_COVERAGE} \ +-DMILVUS_DB_PATH=${DB_PATH} \ +-DMILVUS_ENABLE_PROFILING=${PROFILING} \ +-DUSE_JFROG_CACHE=${USE_JFROG_CACHE} \ +-DCUSTOMIZATION=${CUSTOMIZATION} \ +-DFAISS_URL=${CUSTOMIZED_FAISS_URL} \ +.." +echo ${CMAKE_CMD} +${CMAKE_CMD} + +if [[ ${RUN_CPPLINT} == "ON" ]]; then + # cpplint check + make lint + if [ $? -ne 0 ]; then + echo "ERROR! cpplint check failed" + exit 1 + fi + echo "cpplint check passed!" + + # clang-format check + make check-clang-format + if [ $? -ne 0 ]; then + echo "ERROR! clang-format check failed" + exit 1 + fi + echo "clang-format check passed!" + +# # clang-tidy check +# make check-clang-tidy +# if [ $? -ne 0 ]; then +# echo "ERROR! clang-tidy check failed" +# rm -f CMakeCache.txt +# exit 1 +# fi +# echo "clang-tidy check passed!" +else + # compile and build + make -j8 || exit 1 + make install || exit 1 +fi diff --git a/ci/jenkins/scripts/coverage.sh b/ci/jenkins/scripts/coverage.sh new file mode 100755 index 0000000000..ecbb2dfbe9 --- /dev/null +++ b/ci/jenkins/scripts/coverage.sh @@ -0,0 +1,138 @@ +#!/bin/bash + +SOURCE="${BASH_SOURCE[0]}" +while [ -h "$SOURCE" ]; do # resolve $SOURCE until the file is no longer a symlink + DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" + SOURCE="$(readlink "$SOURCE")" + [[ $SOURCE != /* ]] && SOURCE="$DIR/$SOURCE" # if $SOURCE was a relative symlink, we need to resolve it relative to the path where the symlink file was located +done +SCRIPTS_DIR="$( cd -P "$( dirname "$SOURCE" )" && pwd )" + +INSTALL_PREFIX="/opt/milvus" +CMAKE_BUILD_DIR="${SCRIPTS_DIR}/../../../core/cmake_build" +MYSQL_USER_NAME=root +MYSQL_PASSWORD=123456 +MYSQL_HOST='127.0.0.1' +MYSQL_PORT='3306' + +while getopts "o:u:p:t:h" arg +do + case $arg in + o) + INSTALL_PREFIX=$OPTARG + ;; + u) + MYSQL_USER_NAME=$OPTARG + ;; + p) + MYSQL_PASSWORD=$OPTARG + ;; + t) + MYSQL_HOST=$OPTARG + ;; + h) # help + echo " + +parameter: +-o: milvus install prefix(default: /opt/milvus) +-u: mysql account +-p: mysql password +-t: mysql host +-h: help + +usage: +./coverage.sh -o \${INSTALL_PREFIX} -u \${MYSQL_USER} -p \${MYSQL_PASSWORD} -t \${MYSQL_HOST} [-h] + " + exit 0 + ;; + ?) + echo "ERROR! unknown argument" + exit 1 + ;; + esac +done + +export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${INSTALL_PREFIX}/lib + +LCOV_CMD="lcov" +# LCOV_GEN_CMD="genhtml" + +FILE_INFO_BASE="base.info" +FILE_INFO_MILVUS="server.info" +FILE_INFO_OUTPUT="output.info" +FILE_INFO_OUTPUT_NEW="output_new.info" +DIR_LCOV_OUTPUT="lcov_out" + +DIR_GCNO="${CMAKE_BUILD_DIR}" +DIR_UNITTEST="${INSTALL_PREFIX}/unittest" + +# delete old code coverage info files +rm -rf lcov_out +rm -f FILE_INFO_BASE FILE_INFO_MILVUS FILE_INFO_OUTPUT FILE_INFO_OUTPUT_NEW + +MYSQL_DB_NAME=milvus_`date +%s%N` + +function mysql_exc() +{ + cmd=$1 + mysql -h${MYSQL_HOST} -u${MYSQL_USER_NAME} -p${MYSQL_PASSWORD} -e "${cmd}" + if [ $? -ne 0 ]; then + echo "mysql $cmd run failed" + fi +} + +mysql_exc "CREATE DATABASE IF NOT EXISTS ${MYSQL_DB_NAME};" +mysql_exc "GRANT ALL PRIVILEGES ON ${MYSQL_DB_NAME}.* TO '${MYSQL_USER_NAME}'@'%';" +mysql_exc "FLUSH PRIVILEGES;" +mysql_exc "USE ${MYSQL_DB_NAME};" + +# get baseline +${LCOV_CMD} -c -i -d ${DIR_GCNO} -o "${FILE_INFO_BASE}" +if [ $? -ne 0 ]; then + echo "gen baseline coverage run failed" + exit -1 +fi + +for test in `ls ${DIR_UNITTEST}`; do + echo $test + case ${test} in + test_db) + # set run args for test_db + args="mysql://${MYSQL_USER_NAME}:${MYSQL_PASSWORD}@${MYSQL_HOST}:${MYSQL_PORT}/${MYSQL_DB_NAME}" + ;; + *test_*) + args="" + ;; + esac + # run unittest + ${DIR_UNITTEST}/${test} "${args}" + if [ $? -ne 0 ]; then + echo ${args} + echo ${DIR_UNITTEST}/${test} "run failed" + fi +done + +mysql_exc "DROP DATABASE IF EXISTS ${MYSQL_DB_NAME};" + +# gen code coverage +${LCOV_CMD} -d ${DIR_GCNO} -o "${FILE_INFO_MILVUS}" -c +# merge coverage +${LCOV_CMD} -a ${FILE_INFO_BASE} -a ${FILE_INFO_MILVUS} -o "${FILE_INFO_OUTPUT}" + +# remove third party from tracefiles +${LCOV_CMD} -r "${FILE_INFO_OUTPUT}" -o "${FILE_INFO_OUTPUT_NEW}" \ + "/usr/*" \ + "*/boost/*" \ + "*/cmake_build/*_ep-prefix/*" \ + "*/src/index/cmake_build*" \ + "*/src/index/thirdparty*" \ + "*/src/grpc*" \ + "*/src/metrics/MetricBase.h" \ + "*/src/server/Server.cpp" \ + "*/src/server/DBWrapper.cpp" \ + "*/src/server/grpc_impl/GrpcServer.cpp" \ + "*/src/utils/easylogging++.h" \ + "*/src/utils/easylogging++.cc" + +# gen html report +# ${LCOV_GEN_CMD} "${FILE_INFO_OUTPUT_NEW}" --output-directory ${DIR_LCOV_OUTPUT}/