mirror of https://github.com/milvus-io/milvus.git
solve conflict
commit
6d1de33292
|
@ -4,9 +4,12 @@ Please mark all change in change log and use the ticket from JIRA.
|
|||
# Milvus 0.6.0 (TODO)
|
||||
|
||||
## Bug
|
||||
- \#228 - memory usage increased slowly during searching vectors
|
||||
- \#246 - Exclude src/external folder from code coverage for jenkin ci
|
||||
- \#248 - Reside src/external in thirdparty
|
||||
- \#316 - Some files not merged after vectors added
|
||||
- \#327 - Search does not use GPU when index type is FLAT
|
||||
- \#340 - Test cases run failed on 0.6.0
|
||||
|
||||
## Feature
|
||||
- \#12 - Pure CPU version for Milvus
|
||||
|
@ -19,6 +22,8 @@ Please mark all change in change log and use the ticket from JIRA.
|
|||
- \#260 - C++ SDK README
|
||||
- \#314 - add Find FAISS in CMake
|
||||
- \#310 - Add Q&A for 'protocol https not supported or disable in libcurl' issue
|
||||
- \#322 - Add option to enable / disable prometheus
|
||||
- \#358 - Add more information in build.sh and install.md
|
||||
|
||||
## Task
|
||||
|
||||
|
|
|
@ -33,128 +33,267 @@ pipeline {
|
|||
}
|
||||
|
||||
stages {
|
||||
stage("Ubuntu 18.04") {
|
||||
stage("Ubuntu 18.04 x86_64") {
|
||||
environment {
|
||||
OS_NAME = "ubuntu18.04"
|
||||
PACKAGE_VERSION = VersionNumber([
|
||||
versionNumberString : '${SEMVER}-${LOWER_BUILD_TYPE}-ubuntu18.04-x86_64-${BUILD_DATE_FORMATTED, "yyyyMMdd"}-${BUILDS_TODAY}'
|
||||
]);
|
||||
DOCKER_VERSION = "${SEMVER}-${OS_NAME}-${LOWER_BUILD_TYPE}"
|
||||
CPU_ARCH = "amd64"
|
||||
}
|
||||
|
||||
stages {
|
||||
stage("Run Build") {
|
||||
agent {
|
||||
kubernetes {
|
||||
label 'build'
|
||||
defaultContainer 'jnlp'
|
||||
yamlFile 'ci/jenkins/pod/milvus-build-env-pod.yaml'
|
||||
}
|
||||
parallel {
|
||||
stage ("GPU Version") {
|
||||
environment {
|
||||
BINRARY_VERSION = "gpu"
|
||||
PACKAGE_VERSION = VersionNumber([
|
||||
versionNumberString : '${SEMVER}-gpu-${OS_NAME}-${CPU_ARCH}-${LOWER_BUILD_TYPE}-${BUILD_DATE_FORMATTED, "yyyyMMdd"}-${BUILDS_TODAY}'
|
||||
]);
|
||||
DOCKER_VERSION = "${SEMVER}-gpu-${OS_NAME}-${LOWER_BUILD_TYPE}"
|
||||
}
|
||||
|
||||
stages {
|
||||
stage('Build') {
|
||||
steps {
|
||||
container('milvus-build-env') {
|
||||
script {
|
||||
load "${env.WORKSPACE}/ci/jenkins/step/build.groovy"
|
||||
}
|
||||
stage("Run Build") {
|
||||
agent {
|
||||
kubernetes {
|
||||
label "${BINRARY_VERSION}-build"
|
||||
defaultContainer 'jnlp'
|
||||
yamlFile 'ci/jenkins/pod/milvus-gpu-version-build-env-pod.yaml'
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Code Coverage') {
|
||||
steps {
|
||||
container('milvus-build-env') {
|
||||
script {
|
||||
load "${env.WORKSPACE}/ci/jenkins/step/coverage.groovy"
|
||||
|
||||
stages {
|
||||
stage('Build') {
|
||||
steps {
|
||||
container('milvus-build-env') {
|
||||
script {
|
||||
load "${env.WORKSPACE}/ci/jenkins/step/build.groovy"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Upload Package') {
|
||||
steps {
|
||||
container('milvus-build-env') {
|
||||
script {
|
||||
load "${env.WORKSPACE}/ci/jenkins/step/package.groovy"
|
||||
stage('Code Coverage') {
|
||||
steps {
|
||||
container('milvus-build-env') {
|
||||
script {
|
||||
load "${env.WORKSPACE}/ci/jenkins/step/coverage.groovy"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage("Publish docker images") {
|
||||
agent {
|
||||
kubernetes {
|
||||
label 'publish'
|
||||
defaultContainer 'jnlp'
|
||||
yamlFile 'ci/jenkins/pod/docker-pod.yaml'
|
||||
}
|
||||
}
|
||||
|
||||
stages {
|
||||
stage('Publish') {
|
||||
steps {
|
||||
container('publish-images'){
|
||||
script {
|
||||
load "${env.WORKSPACE}/ci/jenkins/step/publishImages.groovy"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage("Deploy to Development") {
|
||||
agent {
|
||||
kubernetes {
|
||||
label 'dev-test'
|
||||
defaultContainer 'jnlp'
|
||||
yamlFile 'ci/jenkins/pod/testEnvironment.yaml'
|
||||
}
|
||||
}
|
||||
|
||||
stages {
|
||||
stage("Deploy to Dev") {
|
||||
steps {
|
||||
container('milvus-test-env') {
|
||||
script {
|
||||
load "${env.WORKSPACE}/ci/jenkins/step/deploySingle2Dev.groovy"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage("Dev Test") {
|
||||
steps {
|
||||
container('milvus-test-env') {
|
||||
script {
|
||||
boolean isNightlyTest = isTimeTriggeredBuild()
|
||||
if (isNightlyTest) {
|
||||
load "${env.WORKSPACE}/ci/jenkins/step/singleDevNightlyTest.groovy"
|
||||
} else {
|
||||
load "${env.WORKSPACE}/ci/jenkins/step/singleDevTest.groovy"
|
||||
stage('Upload Package') {
|
||||
steps {
|
||||
container('milvus-build-env') {
|
||||
script {
|
||||
load "${env.WORKSPACE}/ci/jenkins/step/package.groovy"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage ("Cleanup Dev") {
|
||||
steps {
|
||||
container('milvus-test-env') {
|
||||
script {
|
||||
load "${env.WORKSPACE}/ci/jenkins/step/cleanupSingleDev.groovy"
|
||||
stage("Publish docker images") {
|
||||
agent {
|
||||
kubernetes {
|
||||
label "${BINRARY_VERSION}-publish"
|
||||
defaultContainer 'jnlp'
|
||||
yamlFile 'ci/jenkins/pod/docker-pod.yaml'
|
||||
}
|
||||
}
|
||||
|
||||
stages {
|
||||
stage('Publish') {
|
||||
steps {
|
||||
container('publish-images'){
|
||||
script {
|
||||
load "${env.WORKSPACE}/ci/jenkins/step/publishImages.groovy"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage("Deploy to Development") {
|
||||
agent {
|
||||
kubernetes {
|
||||
label "${BINRARY_VERSION}-dev-test"
|
||||
defaultContainer 'jnlp'
|
||||
yamlFile 'ci/jenkins/pod/testEnvironment.yaml'
|
||||
}
|
||||
}
|
||||
|
||||
stages {
|
||||
stage("Deploy to Dev") {
|
||||
steps {
|
||||
container('milvus-test-env') {
|
||||
script {
|
||||
load "${env.WORKSPACE}/ci/jenkins/step/deploySingle2Dev.groovy"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage("Dev Test") {
|
||||
steps {
|
||||
container('milvus-test-env') {
|
||||
script {
|
||||
boolean isNightlyTest = isTimeTriggeredBuild()
|
||||
if (isNightlyTest) {
|
||||
load "${env.WORKSPACE}/ci/jenkins/step/singleDevNightlyTest.groovy"
|
||||
} else {
|
||||
load "${env.WORKSPACE}/ci/jenkins/step/singleDevTest.groovy"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage ("Cleanup Dev") {
|
||||
steps {
|
||||
container('milvus-test-env') {
|
||||
script {
|
||||
load "${env.WORKSPACE}/ci/jenkins/step/cleanupSingleDev.groovy"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
unsuccessful {
|
||||
container('milvus-test-env') {
|
||||
script {
|
||||
load "${env.WORKSPACE}/ci/jenkins/step/cleanupSingleDev.groovy"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
unsuccessful {
|
||||
container('milvus-test-env') {
|
||||
script {
|
||||
load "${env.WORKSPACE}/ci/jenkins/step/cleanupSingleDev.groovy"
|
||||
}
|
||||
|
||||
stage ("CPU Version") {
|
||||
environment {
|
||||
BINRARY_VERSION = "cpu"
|
||||
PACKAGE_VERSION = VersionNumber([
|
||||
versionNumberString : '${SEMVER}-cpu-${OS_NAME}-${CPU_ARCH}-${LOWER_BUILD_TYPE}-${BUILD_DATE_FORMATTED, "yyyyMMdd"}-${BUILDS_TODAY}'
|
||||
]);
|
||||
DOCKER_VERSION = "${SEMVER}-cpu-${OS_NAME}-${LOWER_BUILD_TYPE}"
|
||||
}
|
||||
|
||||
stages {
|
||||
stage("Run Build") {
|
||||
agent {
|
||||
kubernetes {
|
||||
label "${BINRARY_VERSION}-build"
|
||||
defaultContainer 'jnlp'
|
||||
yamlFile 'ci/jenkins/pod/milvus-cpu-version-build-env-pod.yaml'
|
||||
}
|
||||
}
|
||||
|
||||
stages {
|
||||
stage('Build') {
|
||||
steps {
|
||||
container('milvus-build-env') {
|
||||
script {
|
||||
load "${env.WORKSPACE}/ci/jenkins/step/build.groovy"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Code Coverage') {
|
||||
steps {
|
||||
container('milvus-build-env') {
|
||||
script {
|
||||
load "${env.WORKSPACE}/ci/jenkins/step/coverage.groovy"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
stage('Upload Package') {
|
||||
steps {
|
||||
container('milvus-build-env') {
|
||||
script {
|
||||
load "${env.WORKSPACE}/ci/jenkins/step/package.groovy"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage("Publish docker images") {
|
||||
agent {
|
||||
kubernetes {
|
||||
label "${BINRARY_VERSION}-publish"
|
||||
defaultContainer 'jnlp'
|
||||
yamlFile 'ci/jenkins/pod/docker-pod.yaml'
|
||||
}
|
||||
}
|
||||
|
||||
stages {
|
||||
stage('Publish') {
|
||||
steps {
|
||||
container('publish-images'){
|
||||
script {
|
||||
load "${env.WORKSPACE}/ci/jenkins/step/publishImages.groovy"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage("Deploy to Development") {
|
||||
agent {
|
||||
kubernetes {
|
||||
label "${BINRARY_VERSION}-dev-test"
|
||||
defaultContainer 'jnlp'
|
||||
yamlFile 'ci/jenkins/pod/testEnvironment.yaml'
|
||||
}
|
||||
}
|
||||
|
||||
stages {
|
||||
stage("Deploy to Dev") {
|
||||
steps {
|
||||
container('milvus-test-env') {
|
||||
script {
|
||||
load "${env.WORKSPACE}/ci/jenkins/step/deploySingle2Dev.groovy"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage("Dev Test") {
|
||||
steps {
|
||||
container('milvus-test-env') {
|
||||
script {
|
||||
boolean isNightlyTest = isTimeTriggeredBuild()
|
||||
if (isNightlyTest) {
|
||||
load "${env.WORKSPACE}/ci/jenkins/step/singleDevNightlyTest.groovy"
|
||||
} else {
|
||||
load "${env.WORKSPACE}/ci/jenkins/step/singleDevTest.groovy"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stage ("Cleanup Dev") {
|
||||
steps {
|
||||
container('milvus-test-env') {
|
||||
script {
|
||||
load "${env.WORKSPACE}/ci/jenkins/step/cleanupSingleDev.groovy"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
post {
|
||||
unsuccessful {
|
||||
container('milvus-test-env') {
|
||||
script {
|
||||
load "${env.WORKSPACE}/ci/jenkins/step/cleanupSingleDev.groovy"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,34 @@
|
|||
apiVersion: v1
|
||||
kind: Pod
|
||||
metadata:
|
||||
name: milvus-cpu-build-env
|
||||
labels:
|
||||
app: milvus
|
||||
componet: cpu-build-env
|
||||
spec:
|
||||
containers:
|
||||
- name: milvus-build-env
|
||||
image: registry.zilliz.com/milvus/milvus-cpu-build-env:v0.6.0-ubuntu18.04
|
||||
env:
|
||||
- name: POD_IP
|
||||
valueFrom:
|
||||
fieldRef:
|
||||
fieldPath: status.podIP
|
||||
command:
|
||||
- cat
|
||||
tty: true
|
||||
resources:
|
||||
limits:
|
||||
memory: "32Gi"
|
||||
cpu: "8.0"
|
||||
requests:
|
||||
memory: "16Gi"
|
||||
cpu: "4.0"
|
||||
- name: milvus-mysql
|
||||
image: mysql:5.6
|
||||
env:
|
||||
- name: MYSQL_ROOT_PASSWORD
|
||||
value: 123456
|
||||
ports:
|
||||
- containerPort: 3306
|
||||
name: mysql
|
|
@ -1,14 +1,14 @@
|
|||
apiVersion: v1
|
||||
kind: Pod
|
||||
metadata:
|
||||
name: milvus-build-env
|
||||
name: milvus-gpu-build-env
|
||||
labels:
|
||||
app: milvus
|
||||
componet: build-env
|
||||
componet: gpu-build-env
|
||||
spec:
|
||||
containers:
|
||||
- name: milvus-build-env
|
||||
image: registry.zilliz.com/milvus/milvus-build-env:v0.5.1-ubuntu18.04
|
||||
image: registry.zilliz.com/milvus/milvus-gpu-build-env:v0.6.0-ubuntu18.04
|
||||
env:
|
||||
- name: POD_IP
|
||||
valueFrom:
|
|
@ -1,8 +1,11 @@
|
|||
timeout(time: 60, unit: 'MINUTES') {
|
||||
dir ("ci/scripts") {
|
||||
withCredentials([usernamePassword(credentialsId: "${params.JFROG_CREDENTIALS_ID}", usernameVariable: 'USERNAME', passwordVariable: 'PASSWORD')]) {
|
||||
sh "export JFROG_ARTFACTORY_URL='${params.JFROG_ARTFACTORY_URL}' && export JFROG_USER_NAME='${USERNAME}' && export JFROG_PASSWORD='${PASSWORD}' && ./build.sh -t ${params.BUILD_TYPE} -o /opt/milvus -l -g -j -u -c"
|
||||
if ("${env.BINRARY_VERSION}" == "gpu") {
|
||||
sh "export JFROG_ARTFACTORY_URL='${params.JFROG_ARTFACTORY_URL}' && export JFROG_USER_NAME='${USERNAME}' && export JFROG_PASSWORD='${PASSWORD}' && ./build.sh -t ${params.BUILD_TYPE} -o /opt/milvus -l -g -j -u -c"
|
||||
} else {
|
||||
sh "export JFROG_ARTFACTORY_URL='${params.JFROG_ARTFACTORY_URL}' && export JFROG_USER_NAME='${USERNAME}' && export JFROG_PASSWORD='${PASSWORD}' && ./build.sh -t ${params.BUILD_TYPE} -o /opt/milvus -l -m -j -u -c"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
try {
|
||||
def helmResult = sh script: "helm status ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-gpu", returnStatus: true
|
||||
def helmResult = sh script: "helm status ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-${env.BINRARY_VERSION}", returnStatus: true
|
||||
if (!helmResult) {
|
||||
sh "helm del --purge ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-gpu"
|
||||
sh "helm del --purge ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-${env.BINRARY_VERSION}"
|
||||
}
|
||||
} catch (exc) {
|
||||
def helmResult = sh script: "helm status ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-gpu", returnStatus: true
|
||||
def helmResult = sh script: "helm status ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-${env.BINRARY_VERSION}", returnStatus: true
|
||||
if (!helmResult) {
|
||||
sh "helm del --purge ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-gpu"
|
||||
sh "helm del --purge ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-${env.BINRARY_VERSION}"
|
||||
}
|
||||
throw exc
|
||||
}
|
||||
|
|
|
@ -1,9 +1,13 @@
|
|||
sh 'helm init --client-only --skip-refresh --stable-repo-url https://kubernetes.oss-cn-hangzhou.aliyuncs.com/charts'
|
||||
sh 'helm repo update'
|
||||
dir ('milvus-helm') {
|
||||
checkout([$class: 'GitSCM', branches: [[name: "0.5.0"]], userRemoteConfigs: [[url: "https://github.com/milvus-io/milvus-helm.git", name: 'origin', refspec: "+refs/heads/0.5.0:refs/remotes/origin/0.5.0"]]])
|
||||
dir ("milvus-gpu") {
|
||||
sh "helm install --wait --timeout 300 --set engine.image.tag=${DOCKER_VERSION} --set expose.type=clusterIP --name ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-gpu -f ci/db_backend/sqlite_values.yaml -f ci/filebeat/values.yaml --namespace milvus ."
|
||||
checkout([$class: 'GitSCM', branches: [[name: "0.6.0"]], userRemoteConfigs: [[url: "https://github.com/milvus-io/milvus-helm.git", name: 'origin', refspec: "+refs/heads/0.6.0:refs/remotes/origin/0.6.0"]]])
|
||||
dir ("milvus") {
|
||||
if ("${env.BINRARY_VERSION}" == "gpu") {
|
||||
sh "helm install --wait --timeout 300 --set engine.image.tag=${DOCKER_VERSION} --set expose.type=clusterIP --name ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-${env.BINRARY_VERSION} -f gpu_values.yaml -f ci/filebeat/values.yaml --namespace milvus ."
|
||||
} else {
|
||||
sh "helm install --wait --timeout 300 --set engine.image.tag=${DOCKER_VERSION} --set expose.type=clusterIP --name ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-${env.BINRARY_VERSION} -f ci/filebeat/values.yaml --namespace milvus ."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
container('publish-images') {
|
||||
timeout(time: 15, unit: 'MINUTES') {
|
||||
dir ("docker/deploy/${OS_NAME}") {
|
||||
dir ("docker/deploy/${env.BINRARY_VERSION}/${env.OS_NAME}") {
|
||||
def binaryPackage = "${PROJECT_NAME}-${PACKAGE_VERSION}.tar.gz"
|
||||
|
||||
withCredentials([usernamePassword(credentialsId: "${params.JFROG_CREDENTIALS_ID}", usernameVariable: 'JFROG_USERNAME', passwordVariable: 'JFROG_PASSWORD')]) {
|
||||
|
|
|
@ -1,22 +1,26 @@
|
|||
timeout(time: 90, unit: 'MINUTES') {
|
||||
dir ("tests/milvus_python_test") {
|
||||
sh 'python3 -m pip install -r requirements.txt'
|
||||
sh "pytest . --alluredir=\"test_out/dev/single/sqlite\" --ip ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-gpu-milvus-gpu-engine.milvus.svc.cluster.local"
|
||||
sh "pytest . --alluredir=\"test_out/dev/single/sqlite\" --ip ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-${env.BINRARY_VERSION}-milvus-engine.milvus.svc.cluster.local"
|
||||
}
|
||||
// mysql database backend test
|
||||
load "${env.WORKSPACE}/ci/jenkins/jenkinsfile/cleanupSingleDev.groovy"
|
||||
|
||||
if (!fileExists('milvus-helm')) {
|
||||
dir ("milvus-helm") {
|
||||
checkout([$class: 'GitSCM', branches: [[name: "0.5.0"]], userRemoteConfigs: [[url: "https://github.com/milvus-io/milvus-helm.git", name: 'origin', refspec: "+refs/heads/0.5.0:refs/remotes/origin/0.5.0"]]])
|
||||
checkout([$class: 'GitSCM', branches: [[name: "0.6.0"]], userRemoteConfigs: [[url: "https://github.com/milvus-io/milvus-helm.git", name: 'origin', refspec: "+refs/heads/0.6.0:refs/remotes/origin/0.6.0"]]])
|
||||
}
|
||||
}
|
||||
dir ("milvus-helm") {
|
||||
dir ("milvus-gpu") {
|
||||
sh "helm install --wait --timeout 300 --set engine.image.tag=${DOCKER_VERSION} --set expose.type=clusterIP --name ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-gpu -f ci/db_backend/mysql_values.yaml -f ci/filebeat/values.yaml --namespace milvus ."
|
||||
dir ("milvus") {
|
||||
if ("${env.BINRARY_VERSION}" == "gpu") {
|
||||
sh "helm install --wait --timeout 300 --set engine.image.tag=${DOCKER_VERSION} --set expose.type=clusterIP --name ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-${env.BINRARY_VERSION} -f gpu_values.yaml -f ci/db_backend/mysql_values.yaml -f ci/filebeat/values.yaml --namespace milvus ."
|
||||
} else {
|
||||
sh "helm install --wait --timeout 300 --set engine.image.tag=${DOCKER_VERSION} --set expose.type=clusterIP --name ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-${env.BINRARY_VERSION} -f ci/db_backend/mysql_values.yaml -f ci/filebeat/values.yaml --namespace milvus ."
|
||||
}
|
||||
}
|
||||
}
|
||||
dir ("tests/milvus_python_test") {
|
||||
sh "pytest . --alluredir=\"test_out/dev/single/mysql\" --ip ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-gpu-milvus-gpu-engine.milvus.svc.cluster.local"
|
||||
sh "pytest . --alluredir=\"test_out/dev/single/mysql\" --ip ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-${env.BINRARY_VERSION}-milvus-engine.milvus.svc.cluster.local"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,24 +1,27 @@
|
|||
timeout(time: 60, unit: 'MINUTES') {
|
||||
dir ("tests/milvus_python_test") {
|
||||
sh 'python3 -m pip install -r requirements.txt'
|
||||
sh "pytest . --alluredir=\"test_out/dev/single/sqlite\" --level=1 --ip ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-gpu-milvus-gpu-engine.milvus.svc.cluster.local"
|
||||
sh "pytest . --alluredir=\"test_out/dev/single/sqlite\" --level=1 --ip ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-${env.BINRARY_VERSION}-milvus-engine.milvus.svc.cluster.local"
|
||||
}
|
||||
|
||||
// mysql database backend test
|
||||
// load "${env.WORKSPACE}/ci/jenkins/jenkinsfile/cleanupSingleDev.groovy"
|
||||
|
||||
// Remove mysql-version tests: 10-28
|
||||
|
||||
// if (!fileExists('milvus-helm')) {
|
||||
// dir ("milvus-helm") {
|
||||
// checkout([$class: 'GitSCM', branches: [[name: "0.5.0"]], userRemoteConfigs: [[url: "https://github.com/milvus-io/milvus-helm.git", name: 'origin', refspec: "+refs/heads/0.5.0:refs/remotes/origin/0.5.0"]]])
|
||||
// checkout([$class: 'GitSCM', branches: [[name: "0.6.0"]], userRemoteConfigs: [[url: "https://github.com/milvus-io/milvus-helm.git", name: 'origin', refspec: "+refs/heads/0.6.0:refs/remotes/origin/0.6.0"]]])
|
||||
// }
|
||||
// }
|
||||
// dir ("milvus-helm") {
|
||||
// dir ("milvus-gpu") {
|
||||
// sh "helm install --wait --timeout 300 --set engine.image.tag=${DOCKER_VERSION} --set expose.type=clusterIP --name ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-gpu -f ci/db_backend/mysql_values.yaml -f ci/filebeat/values.yaml --namespace milvus ."
|
||||
// dir ("milvus") {
|
||||
// if ("${env.BINRARY_VERSION}" == "gpu") {
|
||||
// sh "helm install --wait --timeout 300 --set engine.image.tag=${DOCKER_VERSION} --set expose.type=clusterIP --name ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-${env.BINRARY_VERSION} -f gpu_values.yaml -f ci/db_backend/mysql_values.yaml -f ci/filebeat/values.yaml --namespace milvus ."
|
||||
// } else {
|
||||
// sh "helm install --wait --timeout 300 --set engine.image.tag=${DOCKER_VERSION} --set expose.type=clusterIP --name ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-${env.BINRARY_VERSION} -f ci/db_backend/mysql_values.yaml -f ci/filebeat/values.yaml --namespace milvus ."
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// dir ("tests/milvus_python_test") {
|
||||
// sh "pytest . --alluredir=\"test_out/dev/single/mysql\" --level=1 --ip ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-gpu-milvus-gpu-engine.milvus.svc.cluster.local"
|
||||
// sh "pytest . --alluredir=\"test_out/dev/single/mysql\" --level=1 --ip ${env.PIPELINE_NAME}-${env.BUILD_NUMBER}-single-${env.BINRARY_VERSION}-milvus-engine.milvus.svc.cluster.local"
|
||||
// }
|
||||
}
|
||||
|
|
|
@ -33,7 +33,7 @@ message(STATUS "Build time = ${BUILD_TIME}")
|
|||
|
||||
MACRO(GET_GIT_BRANCH_NAME GIT_BRANCH_NAME)
|
||||
execute_process(COMMAND sh "-c" "git log --decorate | head -n 1 | sed 's/.*(\\(.*\\))/\\1/' | sed 's/.* \\(.*\\),.*/\\1/' | sed 's=[a-zA-Z]*\/==g'"
|
||||
OUTPUT_VARIABLE ${GIT_BRANCH_NAME})
|
||||
OUTPUT_VARIABLE ${GIT_BRANCH_NAME})
|
||||
ENDMACRO(GET_GIT_BRANCH_NAME)
|
||||
|
||||
GET_GIT_BRANCH_NAME(GIT_BRANCH_NAME)
|
||||
|
@ -117,17 +117,17 @@ include(DefineOptions)
|
|||
include(BuildUtils)
|
||||
include(ThirdPartyPackages)
|
||||
|
||||
if(MILVUS_USE_CCACHE)
|
||||
find_program(CCACHE_FOUND ccache)
|
||||
if(CCACHE_FOUND)
|
||||
message(STATUS "Using ccache: ${CCACHE_FOUND}")
|
||||
set_property(GLOBAL PROPERTY RULE_LAUNCH_COMPILE ${CCACHE_FOUND})
|
||||
set_property(GLOBAL PROPERTY RULE_LAUNCH_LINK ${CCACHE_FOUND})
|
||||
# let ccache preserve C++ comments, because some of them may be
|
||||
# meaningful to the compiler
|
||||
set(ENV{CCACHE_COMMENTS} "1")
|
||||
endif(CCACHE_FOUND)
|
||||
endif()
|
||||
if (MILVUS_USE_CCACHE)
|
||||
find_program(CCACHE_FOUND ccache)
|
||||
if (CCACHE_FOUND)
|
||||
message(STATUS "Using ccache: ${CCACHE_FOUND}")
|
||||
set_property(GLOBAL PROPERTY RULE_LAUNCH_COMPILE ${CCACHE_FOUND})
|
||||
set_property(GLOBAL PROPERTY RULE_LAUNCH_LINK ${CCACHE_FOUND})
|
||||
# let ccache preserve C++ comments, because some of them may be
|
||||
# meaningful to the compiler
|
||||
set(ENV{CCACHE_COMMENTS} "1")
|
||||
endif (CCACHE_FOUND)
|
||||
endif ()
|
||||
|
||||
set(MILVUS_CPU_VERSION false)
|
||||
if (MILVUS_GPU_VERSION)
|
||||
|
@ -142,6 +142,10 @@ else ()
|
|||
add_compile_definitions("MILVUS_CPU_VERSION")
|
||||
endif ()
|
||||
|
||||
if (MILVUS_WITH_PROMETHEUS)
|
||||
add_compile_definitions("MILVUS_WITH_PROMETHEUS")
|
||||
endif ()
|
||||
|
||||
if (CMAKE_BUILD_TYPE STREQUAL "Release")
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -O3 -fPIC -DELPP_THREAD_SAFE -fopenmp")
|
||||
if (MILVUS_GPU_VERSION)
|
||||
|
@ -176,9 +180,9 @@ endif ()
|
|||
|
||||
if (MILVUS_GPU_VERSION)
|
||||
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/conf/server_gpu_config.template ${CMAKE_CURRENT_SOURCE_DIR}/conf/server_config.yaml)
|
||||
else()
|
||||
else ()
|
||||
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/conf/server_cpu_config.template ${CMAKE_CURRENT_SOURCE_DIR}/conf/server_config.yaml)
|
||||
endif()
|
||||
endif ()
|
||||
|
||||
configure_file(${CMAKE_CURRENT_SOURCE_DIR}/conf/log_config.template ${CMAKE_CURRENT_SOURCE_DIR}/conf/log_config.conf)
|
||||
|
||||
|
|
171
core/build.sh
171
core/build.sh
|
@ -14,64 +14,69 @@ CUSTOMIZATION="OFF" # default use ori faiss
|
|||
CUDA_COMPILER=/usr/local/cuda/bin/nvcc
|
||||
GPU_VERSION="OFF" #defaults to CPU version
|
||||
WITH_MKL="OFF"
|
||||
FAISS_ROOT=""
|
||||
FAISS_ROOT="" #FAISS root path
|
||||
FAISS_SOURCE="BUNDLED"
|
||||
WITH_PROMETHEUS="ON"
|
||||
|
||||
while getopts "p:d:t:f:ulrcgjhxzm" arg
|
||||
do
|
||||
case $arg in
|
||||
p)
|
||||
INSTALL_PREFIX=$OPTARG
|
||||
;;
|
||||
d)
|
||||
DB_PATH=$OPTARG
|
||||
;;
|
||||
t)
|
||||
BUILD_TYPE=$OPTARG # BUILD_TYPE
|
||||
;;
|
||||
f)
|
||||
FAISS_ROOT=$OPTARG
|
||||
FAISS_SOURCE="AUTO"
|
||||
;;
|
||||
u)
|
||||
echo "Build and run unittest cases" ;
|
||||
BUILD_UNITTEST="ON";
|
||||
;;
|
||||
l)
|
||||
RUN_CPPLINT="ON"
|
||||
;;
|
||||
r)
|
||||
if [[ -d ${BUILD_OUTPUT_DIR} ]]; then
|
||||
rm ./${BUILD_OUTPUT_DIR} -r
|
||||
MAKE_CLEAN="ON"
|
||||
fi
|
||||
;;
|
||||
c)
|
||||
BUILD_COVERAGE="ON"
|
||||
;;
|
||||
z)
|
||||
PROFILING="ON"
|
||||
;;
|
||||
j)
|
||||
USE_JFROG_CACHE="ON"
|
||||
;;
|
||||
x)
|
||||
CUSTOMIZATION="OFF" # force use ori faiss
|
||||
;;
|
||||
g)
|
||||
GPU_VERSION="ON"
|
||||
;;
|
||||
m)
|
||||
WITH_MKL="ON"
|
||||
;;
|
||||
h) # help
|
||||
echo "
|
||||
while getopts "p:d:t:f:ulrcgjhxzme" arg; do
|
||||
case $arg in
|
||||
p)
|
||||
INSTALL_PREFIX=$OPTARG
|
||||
;;
|
||||
d)
|
||||
DB_PATH=$OPTARG
|
||||
;;
|
||||
t)
|
||||
BUILD_TYPE=$OPTARG # BUILD_TYPE
|
||||
;;
|
||||
f)
|
||||
FAISS_ROOT=$OPTARG
|
||||
FAISS_SOURCE="AUTO"
|
||||
;;
|
||||
u)
|
||||
echo "Build and run unittest cases"
|
||||
BUILD_UNITTEST="ON"
|
||||
;;
|
||||
l)
|
||||
RUN_CPPLINT="ON"
|
||||
;;
|
||||
r)
|
||||
if [[ -d ${BUILD_OUTPUT_DIR} ]]; then
|
||||
rm ./${BUILD_OUTPUT_DIR} -r
|
||||
MAKE_CLEAN="ON"
|
||||
fi
|
||||
;;
|
||||
c)
|
||||
BUILD_COVERAGE="ON"
|
||||
;;
|
||||
z)
|
||||
PROFILING="ON"
|
||||
;;
|
||||
j)
|
||||
USE_JFROG_CACHE="ON"
|
||||
;;
|
||||
x)
|
||||
CUSTOMIZATION="OFF" # force use ori faiss
|
||||
;;
|
||||
g)
|
||||
GPU_VERSION="ON"
|
||||
;;
|
||||
m)
|
||||
WITH_MKL="ON"
|
||||
;;
|
||||
e)
|
||||
WITH_PROMETHEUS="OFF"
|
||||
;;
|
||||
h) # help
|
||||
echo "
|
||||
|
||||
parameter:
|
||||
-p: install prefix(default: $(pwd)/milvus)
|
||||
-d: db data path(default: /tmp/milvus)
|
||||
-t: build type(default: Debug)
|
||||
-f: faiss root path(default: empty)
|
||||
-f: FAISS root path(default: empty). The path should be an absolute path
|
||||
containing the pre-installed lib/ and include/ directory of FAISS. If they can't be found,
|
||||
we will build the original FAISS from source instead.
|
||||
-u: building unit test options(default: OFF)
|
||||
-l: run cpplint, clang-format and clang-tidy(default: OFF)
|
||||
-r: remove previous build directory(default: OFF)
|
||||
|
@ -80,29 +85,30 @@ parameter:
|
|||
-j: use jfrog cache build directory(default: OFF)
|
||||
-g: build GPU version(default: OFF)
|
||||
-m: build with MKL(default: OFF)
|
||||
-e: build without prometheus(default: OFF)
|
||||
-h: help
|
||||
|
||||
usage:
|
||||
./build.sh -p \${INSTALL_PREFIX} -t \${BUILD_TYPE} -f \${FAISS_ROOT} [-u] [-l] [-r] [-c] [-z] [-j] [-g] [-m] [-h]
|
||||
./build.sh -p \${INSTALL_PREFIX} -t \${BUILD_TYPE} -f \${FAISS_ROOT} [-u] [-l] [-r] [-c] [-z] [-j] [-g] [-m] [-e] [-h]
|
||||
"
|
||||
exit 0
|
||||
;;
|
||||
?)
|
||||
echo "ERROR! unknown argument"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
exit 0
|
||||
;;
|
||||
?)
|
||||
echo "ERROR! unknown argument"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [[ ! -d ${BUILD_OUTPUT_DIR} ]]; then
|
||||
mkdir ${BUILD_OUTPUT_DIR}
|
||||
mkdir ${BUILD_OUTPUT_DIR}
|
||||
fi
|
||||
|
||||
cd ${BUILD_OUTPUT_DIR}
|
||||
|
||||
# remove make cache since build.sh -l use default variables
|
||||
# force update the variables each time
|
||||
make rebuild_cache > /dev/null 2>&1
|
||||
make rebuild_cache >/dev/null 2>&1
|
||||
|
||||
CMAKE_CMD="cmake \
|
||||
-DBUILD_UNIT_TEST=${BUILD_UNITTEST} \
|
||||
|
@ -118,30 +124,31 @@ CMAKE_CMD="cmake \
|
|||
-DCUSTOMIZATION=${CUSTOMIZATION} \
|
||||
-DMILVUS_GPU_VERSION=${GPU_VERSION} \
|
||||
-DFAISS_WITH_MKL=${WITH_MKL} \
|
||||
-DMILVUS_WITH_PROMETHEUS=${WITH_PROMETHEUS} \
|
||||
../"
|
||||
echo ${CMAKE_CMD}
|
||||
${CMAKE_CMD}
|
||||
|
||||
if [[ ${MAKE_CLEAN} == "ON" ]]; then
|
||||
make clean
|
||||
make clean
|
||||
fi
|
||||
|
||||
if [[ ${RUN_CPPLINT} == "ON" ]]; then
|
||||
# cpplint check
|
||||
make lint
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR! cpplint check failed"
|
||||
exit 1
|
||||
fi
|
||||
echo "cpplint check passed!"
|
||||
# cpplint check
|
||||
make lint
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR! cpplint check failed"
|
||||
exit 1
|
||||
fi
|
||||
echo "cpplint check passed!"
|
||||
|
||||
# clang-format check
|
||||
make check-clang-format
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR! clang-format check failed"
|
||||
exit 1
|
||||
fi
|
||||
echo "clang-format check passed!"
|
||||
# clang-format check
|
||||
make check-clang-format
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "ERROR! clang-format check failed"
|
||||
exit 1
|
||||
fi
|
||||
echo "clang-format check passed!"
|
||||
|
||||
# # clang-tidy check
|
||||
# make check-clang-tidy
|
||||
|
@ -152,11 +159,11 @@ if [[ ${RUN_CPPLINT} == "ON" ]]; then
|
|||
# echo "clang-tidy check passed!"
|
||||
else
|
||||
|
||||
# strip binary symbol
|
||||
if [[ ${BUILD_TYPE} != "Debug" ]]; then
|
||||
strip src/milvus_server
|
||||
fi
|
||||
# strip binary symbol
|
||||
if [[ ${BUILD_TYPE} != "Debug" ]]; then
|
||||
strip src/milvus_server
|
||||
fi
|
||||
|
||||
# compile and build
|
||||
make -j 8 install || exit 1
|
||||
# compile and build
|
||||
make -j 8 install || exit 1
|
||||
fi
|
||||
|
|
|
@ -37,6 +37,7 @@ endforeach ()
|
|||
aux_source_directory(${MILVUS_ENGINE_SRC}/cache cache_files)
|
||||
aux_source_directory(${MILVUS_ENGINE_SRC}/config config_files)
|
||||
aux_source_directory(${MILVUS_ENGINE_SRC}/metrics metrics_files)
|
||||
aux_source_directory(${MILVUS_ENGINE_SRC}/metrics/prometheus metrics_prometheus_files)
|
||||
aux_source_directory(${MILVUS_ENGINE_SRC}/db db_main_files)
|
||||
aux_source_directory(${MILVUS_ENGINE_SRC}/db/engine db_engine_files)
|
||||
aux_source_directory(${MILVUS_ENGINE_SRC}/db/insert db_insert_files)
|
||||
|
@ -91,6 +92,11 @@ set(engine_files
|
|||
${wrapper_files}
|
||||
)
|
||||
|
||||
if (MILVUS_WITH_PROMETHEUS)
|
||||
set(engine_files ${engine_files}
|
||||
${metrics_prometheus_files})
|
||||
endif ()
|
||||
|
||||
set(client_grpc_lib
|
||||
grpcpp_channelz
|
||||
grpc++
|
||||
|
@ -115,7 +121,6 @@ set(third_party_libs
|
|||
sqlite
|
||||
${client_grpc_lib}
|
||||
yaml-cpp
|
||||
${prometheus_lib}
|
||||
mysqlpp
|
||||
zlib
|
||||
${boost_lib}
|
||||
|
@ -138,13 +143,19 @@ if (MILVUS_GPU_VERSION)
|
|||
)
|
||||
endif ()
|
||||
|
||||
if (MILVUS_ENABLE_PROFILING STREQUAL "ON")
|
||||
if (MILVUS_ENABLE_PROFILING)
|
||||
set(third_party_libs ${third_party_libs}
|
||||
gperftools
|
||||
libunwind
|
||||
)
|
||||
endif ()
|
||||
|
||||
if (MILVUS_WITH_PROMETHEUS)
|
||||
set(third_party_libs ${third_party_libs}
|
||||
${prometheus_lib}
|
||||
)
|
||||
endif ()
|
||||
|
||||
set(engine_libs
|
||||
pthread
|
||||
libgomp.a
|
||||
|
@ -166,13 +177,22 @@ target_link_libraries(milvus_engine
|
|||
${engine_libs}
|
||||
)
|
||||
|
||||
add_library(metrics STATIC ${metrics_files})
|
||||
if (MILVUS_WITH_PROMETHEUS)
|
||||
add_library(metrics STATIC ${metrics_files} ${metrics_prometheus_files})
|
||||
else ()
|
||||
add_library(metrics STATIC ${metrics_files})
|
||||
endif ()
|
||||
|
||||
set(metrics_lib
|
||||
yaml-cpp
|
||||
${prometheus_lib}
|
||||
)
|
||||
|
||||
if (MILVUS_WITH_PROMETHEUS)
|
||||
set(metrics_lib ${metrics_lib}
|
||||
${prometheus_lib}
|
||||
)
|
||||
endif ()
|
||||
|
||||
target_link_libraries(metrics ${metrics_lib})
|
||||
|
||||
set(server_libs
|
||||
|
|
|
@ -179,9 +179,10 @@ DBImpl::PreloadTable(const std::string& table_id) {
|
|||
}
|
||||
|
||||
// get all table files from parent table
|
||||
meta::DatesT dates;
|
||||
std::vector<size_t> ids;
|
||||
meta::TableFilesSchema files_array;
|
||||
auto status = GetFilesToSearch(table_id, ids, files_array);
|
||||
auto status = GetFilesToSearch(table_id, ids, dates, files_array);
|
||||
if (!status.ok()) {
|
||||
return status;
|
||||
}
|
||||
|
@ -190,7 +191,7 @@ DBImpl::PreloadTable(const std::string& table_id) {
|
|||
std::vector<meta::TableSchema> partiton_array;
|
||||
status = meta_ptr_->ShowPartitions(table_id, partiton_array);
|
||||
for (auto& schema : partiton_array) {
|
||||
status = GetFilesToSearch(schema.table_id_, ids, files_array);
|
||||
status = GetFilesToSearch(schema.table_id_, ids, dates, files_array);
|
||||
}
|
||||
|
||||
int64_t size = 0;
|
||||
|
@ -304,6 +305,10 @@ DBImpl::InsertVectors(const std::string& table_id, const std::string& partition_
|
|||
if (!partition_tag.empty()) {
|
||||
std::string partition_name;
|
||||
status = meta_ptr_->GetPartitionName(table_id, partition_tag, target_table_name);
|
||||
if (!status.ok()) {
|
||||
ENGINE_LOG_ERROR << status.message();
|
||||
return status;
|
||||
}
|
||||
}
|
||||
|
||||
// insert vectors into target table
|
||||
|
@ -400,7 +405,7 @@ DBImpl::Query(const std::string& table_id, const std::vector<std::string>& parti
|
|||
if (partition_tags.empty()) {
|
||||
// no partition tag specified, means search in whole table
|
||||
// get all table files from parent table
|
||||
status = GetFilesToSearch(table_id, ids, files_array);
|
||||
status = GetFilesToSearch(table_id, ids, dates, files_array);
|
||||
if (!status.ok()) {
|
||||
return status;
|
||||
}
|
||||
|
@ -408,7 +413,7 @@ DBImpl::Query(const std::string& table_id, const std::vector<std::string>& parti
|
|||
std::vector<meta::TableSchema> partiton_array;
|
||||
status = meta_ptr_->ShowPartitions(table_id, partiton_array);
|
||||
for (auto& schema : partiton_array) {
|
||||
status = GetFilesToSearch(schema.table_id_, ids, files_array);
|
||||
status = GetFilesToSearch(schema.table_id_, ids, dates, files_array);
|
||||
}
|
||||
} else {
|
||||
// get files from specified partitions
|
||||
|
@ -416,7 +421,7 @@ DBImpl::Query(const std::string& table_id, const std::vector<std::string>& parti
|
|||
GetPartitionsByTags(table_id, partition_tags, partition_name_array);
|
||||
|
||||
for (auto& partition_name : partition_name_array) {
|
||||
status = GetFilesToSearch(partition_name, ids, files_array);
|
||||
status = GetFilesToSearch(partition_name, ids, dates, files_array);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -446,7 +451,7 @@ DBImpl::QueryByFileID(const std::string& table_id, const std::vector<std::string
|
|||
}
|
||||
|
||||
meta::TableFilesSchema files_array;
|
||||
auto status = GetFilesToSearch(table_id, ids, files_array);
|
||||
auto status = GetFilesToSearch(table_id, ids, dates, files_array);
|
||||
if (!status.ok()) {
|
||||
return status;
|
||||
}
|
||||
|
@ -619,6 +624,18 @@ DBImpl::StartCompactionTask() {
|
|||
{
|
||||
std::lock_guard<std::mutex> lck(compact_result_mutex_);
|
||||
if (compact_thread_results_.empty()) {
|
||||
// collect merge files for all tables(if compact_table_ids_ is empty) for two reasons:
|
||||
// 1. other tables may still has un-merged files
|
||||
// 2. server may be closed unexpected, these un-merge files need to be merged when server restart
|
||||
if (compact_table_ids_.empty()) {
|
||||
std::vector<meta::TableSchema> table_schema_array;
|
||||
meta_ptr_->AllTables(table_schema_array);
|
||||
for (auto& schema : table_schema_array) {
|
||||
compact_table_ids_.insert(schema.table_id_);
|
||||
}
|
||||
}
|
||||
|
||||
// start merge file thread
|
||||
compact_thread_results_.push_back(
|
||||
compact_thread_pool_.enqueue(&DBImpl::BackgroundCompaction, this, compact_table_ids_));
|
||||
compact_table_ids_.clear();
|
||||
|
@ -717,7 +734,7 @@ DBImpl::BackgroundMergeFiles(const std::string& table_id) {
|
|||
for (auto& kv : raw_files) {
|
||||
auto files = kv.second;
|
||||
if (files.size() < options_.merge_trigger_number_) {
|
||||
ENGINE_LOG_DEBUG << "Files number not greater equal than merge trigger number, skip merge action";
|
||||
ENGINE_LOG_TRACE << "Files number not greater equal than merge trigger number, skip merge action";
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -734,7 +751,7 @@ DBImpl::BackgroundMergeFiles(const std::string& table_id) {
|
|||
|
||||
void
|
||||
DBImpl::BackgroundCompaction(std::set<std::string> table_ids) {
|
||||
ENGINE_LOG_TRACE << " Background compaction thread start";
|
||||
ENGINE_LOG_TRACE << "Background compaction thread start";
|
||||
|
||||
Status status;
|
||||
for (auto& table_id : table_ids) {
|
||||
|
@ -757,7 +774,7 @@ DBImpl::BackgroundCompaction(std::set<std::string> table_ids) {
|
|||
}
|
||||
meta_ptr_->CleanUpFilesWithTTL(ttl);
|
||||
|
||||
ENGINE_LOG_TRACE << " Background compaction thread exit";
|
||||
ENGINE_LOG_TRACE << "Background compaction thread exit";
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -817,9 +834,8 @@ DBImpl::BackgroundBuildIndex() {
|
|||
}
|
||||
|
||||
Status
|
||||
DBImpl::GetFilesToSearch(const std::string& table_id, const std::vector<size_t>& file_ids,
|
||||
DBImpl::GetFilesToSearch(const std::string& table_id, const std::vector<size_t>& file_ids, const meta::DatesT& dates,
|
||||
meta::TableFilesSchema& files) {
|
||||
meta::DatesT dates;
|
||||
meta::DatePartionedTableFilesSchema date_files;
|
||||
auto status = meta_ptr_->FilesToSearch(table_id, file_ids, dates, date_files);
|
||||
if (!status.ok()) {
|
||||
|
|
|
@ -153,7 +153,8 @@ class DBImpl : public DB {
|
|||
MemSerialize();
|
||||
|
||||
Status
|
||||
GetFilesToSearch(const std::string& table_id, const std::vector<size_t>& file_ids, meta::TableFilesSchema& files);
|
||||
GetFilesToSearch(const std::string& table_id, const std::vector<size_t>& file_ids, const meta::DatesT& dates,
|
||||
meta::TableFilesSchema& files);
|
||||
|
||||
Status
|
||||
GetPartitionsByTags(const std::string& table_id, const std::vector<std::string>& partition_tags,
|
||||
|
|
|
@ -1392,6 +1392,7 @@ MySQLMetaImpl::FilesToMerge(const std::string& table_id, DatePartionedTableFiles
|
|||
} // Scoped Connection
|
||||
|
||||
Status ret;
|
||||
int64_t to_merge_files = 0;
|
||||
for (auto& resRow : res) {
|
||||
TableFileSchema table_file;
|
||||
table_file.file_size_ = resRow["file_size"];
|
||||
|
@ -1420,13 +1421,14 @@ MySQLMetaImpl::FilesToMerge(const std::string& table_id, DatePartionedTableFiles
|
|||
auto dateItr = files.find(table_file.date_);
|
||||
if (dateItr == files.end()) {
|
||||
files[table_file.date_] = TableFilesSchema();
|
||||
to_merge_files++;
|
||||
}
|
||||
|
||||
files[table_file.date_].push_back(table_file);
|
||||
}
|
||||
|
||||
if (res.size() > 0) {
|
||||
ENGINE_LOG_DEBUG << "Collect " << res.size() << " to-merge files";
|
||||
if (to_merge_files > 0) {
|
||||
ENGINE_LOG_TRACE << "Collect " << to_merge_files << " to-merge files";
|
||||
}
|
||||
return ret;
|
||||
} catch (std::exception& e) {
|
||||
|
@ -1809,6 +1811,7 @@ MySQLMetaImpl::CleanUpFilesWithTTL(uint16_t seconds) {
|
|||
|
||||
mysqlpp::StoreQueryResult res = cleanUpFilesWithTTLQuery.store();
|
||||
|
||||
int64_t remove_tables = 0;
|
||||
if (!res.empty()) {
|
||||
std::stringstream idsToDeleteSS;
|
||||
for (auto& resRow : res) {
|
||||
|
@ -1817,7 +1820,7 @@ MySQLMetaImpl::CleanUpFilesWithTTL(uint16_t seconds) {
|
|||
resRow["table_id"].to_string(table_id);
|
||||
|
||||
utils::DeleteTablePath(options_, table_id, false); // only delete empty folder
|
||||
|
||||
remove_tables++;
|
||||
idsToDeleteSS << "id = " << std::to_string(id) << " OR ";
|
||||
}
|
||||
std::string idsToDeleteStr = idsToDeleteSS.str();
|
||||
|
@ -1832,8 +1835,8 @@ MySQLMetaImpl::CleanUpFilesWithTTL(uint16_t seconds) {
|
|||
}
|
||||
}
|
||||
|
||||
if (res.size() > 0) {
|
||||
ENGINE_LOG_DEBUG << "Remove " << res.size() << " tables from meta";
|
||||
if (remove_tables > 0) {
|
||||
ENGINE_LOG_DEBUG << "Remove " << remove_tables << " tables from meta";
|
||||
}
|
||||
} // Scoped Connection
|
||||
} catch (std::exception& e) {
|
||||
|
|
|
@ -971,6 +971,7 @@ SqliteMetaImpl::FilesToMerge(const std::string& table_id, DatePartionedTableFile
|
|||
order_by(&TableFileSchema::file_size_).desc());
|
||||
|
||||
Status result;
|
||||
int64_t to_merge_files = 0;
|
||||
for (auto& file : selected) {
|
||||
TableFileSchema table_file;
|
||||
table_file.file_size_ = std::get<4>(file);
|
||||
|
@ -999,11 +1000,13 @@ SqliteMetaImpl::FilesToMerge(const std::string& table_id, DatePartionedTableFile
|
|||
if (dateItr == files.end()) {
|
||||
files[table_file.date_] = TableFilesSchema();
|
||||
}
|
||||
|
||||
files[table_file.date_].push_back(table_file);
|
||||
to_merge_files++;
|
||||
}
|
||||
|
||||
if (selected.size() > 0) {
|
||||
ENGINE_LOG_DEBUG << "Collect " << selected.size() << " to-merge files";
|
||||
if (to_merge_files > 0) {
|
||||
ENGINE_LOG_TRACE << "Collect " << to_merge_files << " to-merge files";
|
||||
}
|
||||
return result;
|
||||
} catch (std::exception& e) {
|
||||
|
@ -1313,16 +1316,18 @@ SqliteMetaImpl::CleanUpFilesWithTTL(uint16_t seconds) {
|
|||
try {
|
||||
server::MetricCollector metric;
|
||||
|
||||
int64_t remove_tables = 0;
|
||||
for (auto& table_id : table_ids) {
|
||||
auto selected = ConnectorPtr->select(columns(&TableFileSchema::file_id_),
|
||||
where(c(&TableFileSchema::table_id_) == table_id));
|
||||
if (selected.size() == 0) {
|
||||
utils::DeleteTablePath(options_, table_id);
|
||||
remove_tables++;
|
||||
}
|
||||
}
|
||||
|
||||
if (table_ids.size() > 0) {
|
||||
ENGINE_LOG_DEBUG << "Remove " << table_ids.size() << " tables folder";
|
||||
if (remove_tables) {
|
||||
ENGINE_LOG_DEBUG << "Remove " << remove_tables << " tables folder";
|
||||
}
|
||||
} catch (std::exception& e) {
|
||||
return HandleException("Encounter exception when delete table folder", e.what());
|
||||
|
|
|
@ -89,34 +89,35 @@ ConvertToDataset(std::vector<SPTAG::QueryResult> query_results) {
|
|||
}
|
||||
}
|
||||
|
||||
auto id_buf = MakeMutableBufferSmart((uint8_t*)p_id, sizeof(int64_t) * elems);
|
||||
auto dist_buf = MakeMutableBufferSmart((uint8_t*)p_dist, sizeof(float) * elems);
|
||||
|
||||
// TODO: magic
|
||||
std::vector<BufferPtr> id_bufs{nullptr, id_buf};
|
||||
std::vector<BufferPtr> dist_bufs{nullptr, dist_buf};
|
||||
|
||||
auto int64_type = std::make_shared<arrow::Int64Type>();
|
||||
auto float_type = std::make_shared<arrow::FloatType>();
|
||||
|
||||
auto id_array_data = arrow::ArrayData::Make(int64_type, elems, id_bufs);
|
||||
auto dist_array_data = arrow::ArrayData::Make(float_type, elems, dist_bufs);
|
||||
// auto id_array_data = std::make_shared<ArrayData>(int64_type, sizeof(int64_t) * elems, id_bufs);
|
||||
// auto dist_array_data = std::make_shared<ArrayData>(float_type, sizeof(float) * elems, dist_bufs);
|
||||
|
||||
// auto ids = ConstructInt64Array((uint8_t*)p_id, sizeof(int64_t) * elems);
|
||||
// auto dists = ConstructFloatArray((uint8_t*)p_dist, sizeof(float) * elems);
|
||||
|
||||
auto ids = std::make_shared<NumericArray<arrow::Int64Type>>(id_array_data);
|
||||
auto dists = std::make_shared<NumericArray<arrow::FloatType>>(dist_array_data);
|
||||
std::vector<ArrayPtr> array{ids, dists};
|
||||
|
||||
auto field_id = std::make_shared<Field>("id", std::make_shared<arrow::Int64Type>());
|
||||
auto field_dist = std::make_shared<Field>("dist", std::make_shared<arrow::FloatType>());
|
||||
std::vector<FieldPtr> fields{field_id, field_dist};
|
||||
auto schema = std::make_shared<Schema>(fields);
|
||||
|
||||
return std::make_shared<Dataset>(array, schema);
|
||||
// auto id_buf = MakeMutableBufferSmart((uint8_t*)p_id, sizeof(int64_t) * elems);
|
||||
// auto dist_buf = MakeMutableBufferSmart((uint8_t*)p_dist, sizeof(float) * elems);
|
||||
//
|
||||
// // TODO: magic
|
||||
// std::vector<BufferPtr> id_bufs{nullptr, id_buf};
|
||||
// std::vector<BufferPtr> dist_bufs{nullptr, dist_buf};
|
||||
//
|
||||
// auto int64_type = std::make_shared<arrow::Int64Type>();
|
||||
// auto float_type = std::make_shared<arrow::FloatType>();
|
||||
//
|
||||
// auto id_array_data = arrow::ArrayData::Make(int64_type, elems, id_bufs);
|
||||
// auto dist_array_data = arrow::ArrayData::Make(float_type, elems, dist_bufs);
|
||||
// // auto id_array_data = std::make_shared<ArrayData>(int64_type, sizeof(int64_t) * elems, id_bufs);
|
||||
// // auto dist_array_data = std::make_shared<ArrayData>(float_type, sizeof(float) * elems, dist_bufs);
|
||||
//
|
||||
// // auto ids = ConstructInt64Array((uint8_t*)p_id, sizeof(int64_t) * elems);
|
||||
// // auto dists = ConstructFloatArray((uint8_t*)p_dist, sizeof(float) * elems);
|
||||
//
|
||||
// auto ids = std::make_shared<NumericArray<arrow::Int64Type>>(id_array_data);
|
||||
// auto dists = std::make_shared<NumericArray<arrow::FloatType>>(dist_array_data);
|
||||
// std::vector<ArrayPtr> array{ids, dists};
|
||||
//
|
||||
// auto field_id = std::make_shared<Field>("id", std::make_shared<arrow::Int64Type>());
|
||||
// auto field_dist = std::make_shared<Field>("dist", std::make_shared<arrow::FloatType>());
|
||||
// std::vector<FieldPtr> fields{field_id, field_dist};
|
||||
// auto schema = std::make_shared<Schema>(fields);
|
||||
//
|
||||
// return std::make_shared<Dataset>(array, schema);
|
||||
return std::make_shared<Dataset>((void*)p_id, (void*)p_dist);
|
||||
}
|
||||
|
||||
} // namespace knowhere
|
||||
|
|
|
@ -54,6 +54,9 @@ class Dataset {
|
|||
: tensor_(std::move(tensor)), tensor_schema_(std::move(tensor_schema)) {
|
||||
}
|
||||
|
||||
Dataset(void* ids, void* dists) : ids_(ids), dists_(dists) {
|
||||
}
|
||||
|
||||
Dataset(const Dataset&) = delete;
|
||||
Dataset&
|
||||
operator=(const Dataset&) = delete;
|
||||
|
@ -128,6 +131,16 @@ class Dataset {
|
|||
tensor_schema_ = std::move(tensor_schema);
|
||||
}
|
||||
|
||||
void*
|
||||
ids() {
|
||||
return ids_;
|
||||
}
|
||||
|
||||
void*
|
||||
dist() {
|
||||
return dists_;
|
||||
}
|
||||
|
||||
// const Config &
|
||||
// meta() const { return meta_; }
|
||||
|
||||
|
@ -141,6 +154,9 @@ class Dataset {
|
|||
SchemaPtr array_schema_;
|
||||
std::vector<TensorPtr> tensor_;
|
||||
SchemaPtr tensor_schema_;
|
||||
// TODO(yukun): using smart pointer
|
||||
void* ids_;
|
||||
void* dists_;
|
||||
// Config meta_;
|
||||
};
|
||||
|
||||
|
|
|
@ -80,23 +80,24 @@ IDMAP::Search(const DatasetPtr& dataset, const Config& config) {
|
|||
|
||||
search_impl(rows, (float*)p_data, config->k, res_dis, res_ids, Config());
|
||||
|
||||
auto id_buf = MakeMutableBufferSmart((uint8_t*)res_ids, sizeof(int64_t) * elems);
|
||||
auto dist_buf = MakeMutableBufferSmart((uint8_t*)res_dis, sizeof(float) * elems);
|
||||
|
||||
std::vector<BufferPtr> id_bufs{nullptr, id_buf};
|
||||
std::vector<BufferPtr> dist_bufs{nullptr, dist_buf};
|
||||
|
||||
auto int64_type = std::make_shared<arrow::Int64Type>();
|
||||
auto float_type = std::make_shared<arrow::FloatType>();
|
||||
|
||||
auto id_array_data = arrow::ArrayData::Make(int64_type, elems, id_bufs);
|
||||
auto dist_array_data = arrow::ArrayData::Make(float_type, elems, dist_bufs);
|
||||
|
||||
auto ids = std::make_shared<NumericArray<arrow::Int64Type>>(id_array_data);
|
||||
auto dists = std::make_shared<NumericArray<arrow::FloatType>>(dist_array_data);
|
||||
std::vector<ArrayPtr> array{ids, dists};
|
||||
|
||||
return std::make_shared<Dataset>(array, nullptr);
|
||||
// auto id_buf = MakeMutableBufferSmart((uint8_t*)res_ids, sizeof(int64_t) * elems);
|
||||
// auto dist_buf = MakeMutableBufferSmart((uint8_t*)res_dis, sizeof(float) * elems);
|
||||
//
|
||||
// std::vector<BufferPtr> id_bufs{nullptr, id_buf};
|
||||
// std::vector<BufferPtr> dist_bufs{nullptr, dist_buf};
|
||||
//
|
||||
// auto int64_type = std::make_shared<arrow::Int64Type>();
|
||||
// auto float_type = std::make_shared<arrow::FloatType>();
|
||||
//
|
||||
// auto id_array_data = arrow::ArrayData::Make(int64_type, elems, id_bufs);
|
||||
// auto dist_array_data = arrow::ArrayData::Make(float_type, elems, dist_bufs);
|
||||
//
|
||||
// auto ids = std::make_shared<NumericArray<arrow::Int64Type>>(id_array_data);
|
||||
// auto dists = std::make_shared<NumericArray<arrow::FloatType>>(dist_array_data);
|
||||
// std::vector<ArrayPtr> array{ids, dists};
|
||||
//
|
||||
// return std::make_shared<Dataset>(array, nullptr);
|
||||
return std::make_shared<Dataset>((void*)res_ids, (void*)res_dis);
|
||||
}
|
||||
|
||||
void
|
||||
|
|
|
@ -139,23 +139,23 @@ IVF::Search(const DatasetPtr& dataset, const Config& config) {
|
|||
// std::cout << ss_res_id.str() << std::endl;
|
||||
// std::cout << ss_res_dist.str() << std::endl << std::endl;
|
||||
|
||||
auto id_buf = MakeMutableBufferSmart((uint8_t*)res_ids, sizeof(int64_t) * elems);
|
||||
auto dist_buf = MakeMutableBufferSmart((uint8_t*)res_dis, sizeof(float) * elems);
|
||||
// auto id_buf = MakeMutableBufferSmart((uint8_t*)res_ids, sizeof(int64_t) * elems);
|
||||
// auto dist_buf = MakeMutableBufferSmart((uint8_t*)res_dis, sizeof(float) * elems);
|
||||
//
|
||||
// std::vector<BufferPtr> id_bufs{nullptr, id_buf};
|
||||
// std::vector<BufferPtr> dist_bufs{nullptr, dist_buf};
|
||||
//
|
||||
// auto int64_type = std::make_shared<arrow::Int64Type>();
|
||||
// auto float_type = std::make_shared<arrow::FloatType>();
|
||||
//
|
||||
// auto id_array_data = arrow::ArrayData::Make(int64_type, elems, id_bufs);
|
||||
// auto dist_array_data = arrow::ArrayData::Make(float_type, elems, dist_bufs);
|
||||
//
|
||||
// auto ids = std::make_shared<NumericArray<arrow::Int64Type>>(id_array_data);
|
||||
// auto dists = std::make_shared<NumericArray<arrow::FloatType>>(dist_array_data);
|
||||
// std::vector<ArrayPtr> array{ids, dists};
|
||||
|
||||
std::vector<BufferPtr> id_bufs{nullptr, id_buf};
|
||||
std::vector<BufferPtr> dist_bufs{nullptr, dist_buf};
|
||||
|
||||
auto int64_type = std::make_shared<arrow::Int64Type>();
|
||||
auto float_type = std::make_shared<arrow::FloatType>();
|
||||
|
||||
auto id_array_data = arrow::ArrayData::Make(int64_type, elems, id_bufs);
|
||||
auto dist_array_data = arrow::ArrayData::Make(float_type, elems, dist_bufs);
|
||||
|
||||
auto ids = std::make_shared<NumericArray<arrow::Int64Type>>(id_array_data);
|
||||
auto dists = std::make_shared<NumericArray<arrow::FloatType>>(dist_array_data);
|
||||
std::vector<ArrayPtr> array{ids, dists};
|
||||
|
||||
return std::make_shared<Dataset>(array, nullptr);
|
||||
return std::make_shared<Dataset>((void*)res_ids, (void*)res_dis);
|
||||
}
|
||||
|
||||
void
|
||||
|
|
|
@ -88,23 +88,24 @@ NSG::Search(const DatasetPtr& dataset, const Config& config) {
|
|||
s_params.search_length = build_cfg->search_length;
|
||||
index_->Search((float*)p_data, rows, dim, build_cfg->k, res_dis, res_ids, s_params);
|
||||
|
||||
auto id_buf = MakeMutableBufferSmart((uint8_t*)res_ids, sizeof(int64_t) * elems);
|
||||
auto dist_buf = MakeMutableBufferSmart((uint8_t*)res_dis, sizeof(float) * elems);
|
||||
// auto id_buf = MakeMutableBufferSmart((uint8_t*)res_ids, sizeof(int64_t) * elems);
|
||||
// auto dist_buf = MakeMutableBufferSmart((uint8_t*)res_dis, sizeof(float) * elems);
|
||||
|
||||
std::vector<BufferPtr> id_bufs{nullptr, id_buf};
|
||||
std::vector<BufferPtr> dist_bufs{nullptr, dist_buf};
|
||||
|
||||
auto int64_type = std::make_shared<arrow::Int64Type>();
|
||||
auto float_type = std::make_shared<arrow::FloatType>();
|
||||
|
||||
auto id_array_data = arrow::ArrayData::Make(int64_type, elems, id_bufs);
|
||||
auto dist_array_data = arrow::ArrayData::Make(float_type, elems, dist_bufs);
|
||||
|
||||
auto ids = std::make_shared<NumericArray<arrow::Int64Type>>(id_array_data);
|
||||
auto dists = std::make_shared<NumericArray<arrow::FloatType>>(dist_array_data);
|
||||
std::vector<ArrayPtr> array{ids, dists};
|
||||
|
||||
return std::make_shared<Dataset>(array, nullptr);
|
||||
// std::vector<BufferPtr> id_bufs{nullptr, id_buf};
|
||||
// std::vector<BufferPtr> dist_bufs{nullptr, dist_buf};
|
||||
//
|
||||
// auto int64_type = std::make_shared<arrow::Int64Type>();
|
||||
// auto float_type = std::make_shared<arrow::FloatType>();
|
||||
//
|
||||
// auto id_array_data = arrow::ArrayData::Make(int64_type, elems, id_bufs);
|
||||
// auto dist_array_data = arrow::ArrayData::Make(float_type, elems, dist_bufs);
|
||||
//
|
||||
// auto ids = std::make_shared<NumericArray<arrow::Int64Type>>(id_array_data);
|
||||
// auto dists = std::make_shared<NumericArray<arrow::FloatType>>(dist_array_data);
|
||||
// std::vector<ArrayPtr> array{ids, dists};
|
||||
//
|
||||
// return std::make_shared<Dataset>(array, nullptr);
|
||||
return std::make_shared<Dataset>((void*)res_ids, (void*)res_dis);
|
||||
}
|
||||
|
||||
IndexModelPtr
|
||||
|
|
|
@ -181,11 +181,13 @@ TEST_P(IVFTest, clone_test) {
|
|||
// PrintResult(result, nq, k);
|
||||
|
||||
auto AssertEqual = [&](knowhere::DatasetPtr p1, knowhere::DatasetPtr p2) {
|
||||
auto ids_p1 = p1->array()[0];
|
||||
auto ids_p2 = p2->array()[0];
|
||||
auto ids_p1 = p1->ids();
|
||||
auto ids_p2 = p2->ids();
|
||||
|
||||
for (int i = 0; i < nq * k; ++i) {
|
||||
EXPECT_EQ(*(ids_p2->data()->GetValues<int64_t>(1, i)), *(ids_p1->data()->GetValues<int64_t>(1, i)));
|
||||
EXPECT_EQ(*((int64_t*)(ids_p2) + i), *((int64_t*)(ids_p1) + i));
|
||||
// EXPECT_EQ(*(ids_p2->data()->GetValues<int64_t>(1, i)), *(ids_p1->data()->GetValues<int64_t>(1,
|
||||
// i)));
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -66,15 +66,19 @@ TEST_F(KDTTest, kdt_basic) {
|
|||
AssertAnns(result, nq, k);
|
||||
|
||||
{
|
||||
auto ids = result->array()[0];
|
||||
auto dists = result->array()[1];
|
||||
// auto ids = result->array()[0];
|
||||
// auto dists = result->array()[1];
|
||||
auto ids = result->ids();
|
||||
auto dists = result->dist();
|
||||
|
||||
std::stringstream ss_id;
|
||||
std::stringstream ss_dist;
|
||||
for (auto i = 0; i < nq; i++) {
|
||||
for (auto j = 0; j < k; ++j) {
|
||||
ss_id << *ids->data()->GetValues<int64_t>(1, i * k + j) << " ";
|
||||
ss_dist << *dists->data()->GetValues<float>(1, i * k + j) << " ";
|
||||
ss_id << *((int64_t*)(ids) + i * k + j) << " ";
|
||||
ss_dist << *((float*)(dists) + i * k + j) << " ";
|
||||
// ss_id << *ids->data()->GetValues<int64_t>(1, i * k + j) << " ";
|
||||
// ss_dist << *dists->data()->GetValues<float>(1, i * k + j) << " ";
|
||||
}
|
||||
ss_id << std::endl;
|
||||
ss_dist << std::endl;
|
||||
|
|
|
@ -151,9 +151,10 @@ generate_query_dataset(int64_t nb, int64_t dim, float* xb) {
|
|||
|
||||
void
|
||||
AssertAnns(const knowhere::DatasetPtr& result, const int& nq, const int& k) {
|
||||
auto ids = result->array()[0];
|
||||
auto ids = result->ids();
|
||||
for (auto i = 0; i < nq; i++) {
|
||||
EXPECT_EQ(i, *(ids->data()->GetValues<int64_t>(1, i * k)));
|
||||
EXPECT_EQ(i, *((int64_t*)(ids) + i * k));
|
||||
// EXPECT_EQ(i, *(ids->data()->GetValues<int64_t>(1, i * k)));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -16,8 +16,10 @@
|
|||
// under the License.
|
||||
|
||||
#include "metrics/Metrics.h"
|
||||
#include "PrometheusMetrics.h"
|
||||
#include "server/Config.h"
|
||||
#ifdef MILVUS_WITH_PROMETHEUS
|
||||
#include "metrics/prometheus/PrometheusMetrics.h"
|
||||
#endif
|
||||
|
||||
#include <string>
|
||||
|
||||
|
@ -37,11 +39,15 @@ Metrics::CreateMetricsCollector() {
|
|||
|
||||
config.GetMetricConfigCollector(collector_type_str);
|
||||
|
||||
#ifdef MILVUS_WITH_PROMETHEUS
|
||||
if (collector_type_str == "prometheus") {
|
||||
return PrometheusMetrics::GetInstance();
|
||||
} else {
|
||||
return MetricsBase::GetInstance();
|
||||
}
|
||||
#else
|
||||
return MetricsBase::GetInstance();
|
||||
#endif
|
||||
}
|
||||
|
||||
} // namespace server
|
||||
|
|
|
@ -15,9 +15,9 @@
|
|||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
#include "metrics/PrometheusMetrics.h"
|
||||
#include "SystemInfo.h"
|
||||
#include "metrics/prometheus/PrometheusMetrics.h"
|
||||
#include "cache/GpuCacheMgr.h"
|
||||
#include "metrics/SystemInfo.h"
|
||||
#include "server/Config.h"
|
||||
#include "utils/Log.h"
|
||||
|
|
@ -24,7 +24,7 @@
|
|||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "MetricBase.h"
|
||||
#include "metrics/MetricBase.h"
|
||||
#include "utils/Error.h"
|
||||
|
||||
#define METRICS_NOW_TIME std::chrono::system_clock::now()
|
|
@ -84,8 +84,8 @@ VecIndexImpl::Search(const int64_t& nq, const float* xq, float* dist, int64_t* i
|
|||
Config search_cfg = cfg;
|
||||
|
||||
auto res = index_->Search(dataset, search_cfg);
|
||||
auto ids_array = res->array()[0];
|
||||
auto dis_array = res->array()[1];
|
||||
// auto ids_array = res->array()[0];
|
||||
// auto dis_array = res->array()[1];
|
||||
|
||||
//{
|
||||
// auto& ids = ids_array;
|
||||
|
@ -104,12 +104,14 @@ VecIndexImpl::Search(const int64_t& nq, const float* xq, float* dist, int64_t* i
|
|||
// std::cout << "dist\n" << ss_dist.str() << std::endl;
|
||||
//}
|
||||
|
||||
auto p_ids = ids_array->data()->GetValues<int64_t>(1, 0);
|
||||
auto p_dist = dis_array->data()->GetValues<float>(1, 0);
|
||||
// auto p_ids = ids_array->data()->GetValues<int64_t>(1, 0);
|
||||
// auto p_dist = dis_array->data()->GetValues<float>(1, 0);
|
||||
|
||||
// TODO(linxj): avoid copy here.
|
||||
memcpy(ids, p_ids, sizeof(int64_t) * nq * k);
|
||||
memcpy(dist, p_dist, sizeof(float) * nq * k);
|
||||
memcpy(ids, res->ids(), sizeof(int64_t) * nq * k);
|
||||
memcpy(dist, res->dist(), sizeof(float) * nq * k);
|
||||
free(res->ids());
|
||||
free(res->dist());
|
||||
} catch (knowhere::KnowhereException& e) {
|
||||
WRAPPER_LOG_ERROR << e.what();
|
||||
return Status(KNOWHERE_UNEXPECTED_ERROR, e.what());
|
||||
|
|
|
@ -110,12 +110,18 @@ set(unittest_libs
|
|||
pthread
|
||||
metrics
|
||||
gfortran
|
||||
prometheus-cpp-pull
|
||||
prometheus-cpp-push
|
||||
prometheus-cpp-core
|
||||
dl
|
||||
z
|
||||
)
|
||||
if (MILVUS_WITH_PROMETHEUS)
|
||||
set(unittest_libs ${unittest_libs}
|
||||
prometheus-cpp-push
|
||||
prometheus-cpp-pull
|
||||
prometheus-cpp-core
|
||||
)
|
||||
endif ()
|
||||
set(unittest_libs ${unittest_libs}
|
||||
dl
|
||||
z
|
||||
)
|
||||
|
||||
if (MILVUS_GPU_VERSION)
|
||||
include_directories("${CUDA_INCLUDE_DIRS}")
|
||||
|
@ -135,4 +141,4 @@ add_subdirectory(db)
|
|||
add_subdirectory(wrapper)
|
||||
add_subdirectory(metrics)
|
||||
add_subdirectory(scheduler)
|
||||
add_subdirectory(server)
|
||||
add_subdirectory(server)
|
||||
|
|
|
@ -18,10 +18,15 @@
|
|||
#-------------------------------------------------------------------------------
|
||||
|
||||
set(test_files
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/test_metricbase.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/test_metrics.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/test_prometheus.cpp
|
||||
${CMAKE_CURRENT_SOURCE_DIR}/utils.cpp)
|
||||
test_metricbase.cpp
|
||||
test_metrics.cpp
|
||||
utils.cpp
|
||||
)
|
||||
|
||||
if (MILVUS_WITH_PROMETHEUS)
|
||||
set(test_files ${test_files}
|
||||
test_prometheus.cpp)
|
||||
endif ()
|
||||
|
||||
add_executable(test_metrics
|
||||
${common_files}
|
||||
|
|
|
@ -15,8 +15,8 @@
|
|||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
|
||||
#include "metrics/PrometheusMetrics.h"
|
||||
#include "server/Config.h"
|
||||
#include "metrics/prometheus/PrometheusMetrics.h"
|
||||
|
||||
#include <gtest/gtest.h>
|
||||
#include <iostream>
|
||||
|
|
|
@ -0,0 +1,29 @@
|
|||
FROM ubuntu:16.04
|
||||
|
||||
ENV DEBIAN_FRONTEND noninteractive
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends wget ca-certificates gnupg2 apt-transport-https && \
|
||||
wget -P /tmp https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS-2019.PUB && \
|
||||
apt-key add /tmp/GPG-PUB-KEY-INTEL-SW-PRODUCTS-2019.PUB && \
|
||||
sh -c 'echo deb https://apt.repos.intel.com/mkl all main > /etc/apt/sources.list.d/intel-mkl.list' && \
|
||||
wget -qO- "https://cmake.org/files/v3.14/cmake-3.14.3-Linux-x86_64.tar.gz" | tar --strip-components=1 -xz -C /usr/local && \
|
||||
apt-get update && apt-get install -y --no-install-recommends \
|
||||
g++ git gfortran lsb-core \
|
||||
libboost-serialization-dev libboost-filesystem-dev libboost-system-dev libboost-regex-dev \
|
||||
curl libtool automake libssl-dev pkg-config libcurl4-openssl-dev python3-pip \
|
||||
clang-format-6.0 clang-tidy-6.0 \
|
||||
lcov mysql-client libmysqlclient-dev intel-mkl-gnu-2019.5-281 intel-mkl-core-2019.5-281 && \
|
||||
apt-get remove --purge -y && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN ln -s /usr/lib/x86_64-linux-gnu/libmysqlclient.so \
|
||||
/usr/lib/x86_64-linux-gnu/libmysqlclient_r.so
|
||||
|
||||
RUN sh -c 'echo export LD_LIBRARY_PATH=/opt/intel/compilers_and_libraries_2019.5.281/linux/mkl/lib/intel64:\$LD_LIBRARY_PATH > /etc/profile.d/mkl.sh'
|
||||
|
||||
COPY docker-entrypoint.sh /app/docker-entrypoint.sh
|
||||
|
||||
WORKDIR /opt/milvus
|
||||
|
||||
ENTRYPOINT [ "/app/docker-entrypoint.sh" ]
|
||||
CMD [ "start" ]
|
|
@ -0,0 +1,29 @@
|
|||
FROM ubuntu:18.04
|
||||
|
||||
ENV DEBIAN_FRONTEND noninteractive
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends wget ca-certificates gnupg2 && \
|
||||
wget -P /tmp https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRODUCTS-2019.PUB && \
|
||||
apt-key add /tmp/GPG-PUB-KEY-INTEL-SW-PRODUCTS-2019.PUB && \
|
||||
sh -c 'echo deb https://apt.repos.intel.com/mkl all main > /etc/apt/sources.list.d/intel-mkl.list' && \
|
||||
wget -qO- "https://cmake.org/files/v3.14/cmake-3.14.3-Linux-x86_64.tar.gz" | tar --strip-components=1 -xz -C /usr/local && \
|
||||
apt-get update && apt-get install -y --no-install-recommends \
|
||||
g++ git gfortran lsb-core \
|
||||
libboost-serialization-dev libboost-filesystem-dev libboost-system-dev libboost-regex-dev \
|
||||
curl libtool automake libssl-dev pkg-config libcurl4-openssl-dev python3-pip \
|
||||
clang-format-6.0 clang-tidy-6.0 \
|
||||
lcov mysql-client libmysqlclient-dev intel-mkl-gnu-2019.5-281 intel-mkl-core-2019.5-281 && \
|
||||
apt-get remove --purge -y && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN ln -s /usr/lib/x86_64-linux-gnu/libmysqlclient.so \
|
||||
/usr/lib/x86_64-linux-gnu/libmysqlclient_r.so
|
||||
|
||||
RUN sh -c 'echo export LD_LIBRARY_PATH=/opt/intel/compilers_and_libraries_2019.5.281/linux/mkl/lib/intel64:\$LD_LIBRARY_PATH > /etc/profile.d/mkl.sh'
|
||||
|
||||
COPY docker-entrypoint.sh /app/docker-entrypoint.sh
|
||||
|
||||
WORKDIR /opt/milvus
|
||||
|
||||
ENTRYPOINT [ "/app/docker-entrypoint.sh" ]
|
||||
CMD [ "start" ]
|
|
@ -11,15 +11,17 @@ RUN apt-get update && apt-get install -y --no-install-recommends wget && \
|
|||
git flex bison gfortran lsb-core \
|
||||
curl libtool automake libboost1.58-all-dev libssl-dev pkg-config libcurl4-openssl-dev python3-pip \
|
||||
clang-format-6.0 clang-tidy-6.0 \
|
||||
lcov mysql-client libmysqlclient-dev intel-mkl-gnu-2019.4-243 intel-mkl-core-2019.4-243 && \
|
||||
lcov mysql-client libmysqlclient-dev intel-mkl-gnu-2019.5-281 intel-mkl-core-2019.5-281 && \
|
||||
apt-get remove --purge -y && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN ln -s /usr/lib/x86_64-linux-gnu/libmysqlclient.so /usr/lib/x86_64-linux-gnu/libmysqlclient_r.so
|
||||
|
||||
RUN sh -c 'echo export LD_LIBRARY_PATH=/opt/intel/compilers_and_libraries_2019.4.243/linux/mkl/lib/intel64:\$LD_LIBRARY_PATH > /etc/profile.d/mkl.sh'
|
||||
RUN sh -c 'echo export LD_LIBRARY_PATH=/opt/intel/compilers_and_libraries_2019.5.281/linux/mkl/lib/intel64:\$LD_LIBRARY_PATH > /etc/profile.d/mkl.sh'
|
||||
|
||||
COPY docker-entrypoint.sh /app/docker-entrypoint.sh
|
||||
|
||||
WORKDIR /opt/milvus
|
||||
|
||||
ENTRYPOINT [ "/app/docker-entrypoint.sh" ]
|
||||
CMD [ "start" ]
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
if [ "$1" = 'start' ]; then
|
||||
tail -f /dev/null
|
||||
fi
|
||||
|
||||
exec "$@"
|
||||
|
|
@ -11,15 +11,17 @@ RUN apt-get update && apt-get install -y --no-install-recommends wget && \
|
|||
git flex bison gfortran lsb-core \
|
||||
curl libtool automake libboost-all-dev libssl-dev pkg-config libcurl4-openssl-dev python3-pip \
|
||||
clang-format-6.0 clang-tidy-6.0 \
|
||||
lcov mysql-client libmysqlclient-dev intel-mkl-gnu-2019.4-243 intel-mkl-core-2019.4-243 && \
|
||||
lcov mysql-client libmysqlclient-dev intel-mkl-gnu-2019.5-281 intel-mkl-core-2019.5-281 && \
|
||||
apt-get remove --purge -y && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN ln -s /usr/lib/x86_64-linux-gnu/libmysqlclient.so /usr/lib/x86_64-linux-gnu/libmysqlclient_r.so
|
||||
|
||||
RUN sh -c 'echo export LD_LIBRARY_PATH=/opt/intel/compilers_and_libraries_2019.4.243/linux/mkl/lib/intel64:\$LD_LIBRARY_PATH > /etc/profile.d/mkl.sh'
|
||||
RUN sh -c 'echo export LD_LIBRARY_PATH=/opt/intel/compilers_and_libraries_2019.5.281/linux/mkl/lib/intel64:\$LD_LIBRARY_PATH > /etc/profile.d/mkl.sh'
|
||||
|
||||
COPY docker-entrypoint.sh /app/docker-entrypoint.sh
|
||||
|
||||
WORKDIR /opt/milvus
|
||||
|
||||
ENTRYPOINT [ "/app/docker-entrypoint.sh" ]
|
||||
CMD [ "start" ]
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
if [ "$1" = 'start' ]; then
|
||||
tail -f /dev/null
|
||||
fi
|
||||
|
||||
exec "$@"
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
FROM ubuntu:16.04
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
gfortran libsqlite3-dev libmysqlclient-dev libcurl4-openssl-dev python3 && \
|
||||
apt-get remove --purge -y && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN ln -s /usr/lib/x86_64-linux-gnu/libmysqlclient.so /usr/lib/x86_64-linux-gnu/libmysqlclient_r.so
|
||||
|
||||
COPY ./docker-entrypoint.sh /opt
|
||||
COPY ./milvus /opt/milvus
|
||||
ENV LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/opt/milvus/lib"
|
||||
|
||||
WORKDIR /opt/milvus
|
||||
|
||||
ENTRYPOINT [ "/opt/docker-entrypoint.sh" ]
|
||||
|
||||
CMD [ "start" ]
|
||||
|
||||
EXPOSE 19530
|
|
@ -7,4 +7,3 @@ if [ "$1" == 'start' ]; then
|
|||
fi
|
||||
|
||||
exec "$@"
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
FROM ubuntu:18.04
|
||||
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
gfortran libsqlite3-dev libmysqlclient-dev libcurl4-openssl-dev python3 && \
|
||||
apt-get remove --purge -y && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN ln -s /usr/lib/x86_64-linux-gnu/libmysqlclient.so /usr/lib/x86_64-linux-gnu/libmysqlclient_r.so
|
||||
|
||||
COPY ./docker-entrypoint.sh /opt
|
||||
COPY ./milvus /opt/milvus
|
||||
ENV LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/opt/milvus/lib"
|
||||
|
||||
WORKDIR /opt/milvus
|
||||
|
||||
ENTRYPOINT [ "/opt/docker-entrypoint.sh" ]
|
||||
|
||||
CMD [ "start" ]
|
||||
|
||||
EXPOSE 19530
|
||||
|
|
@ -7,4 +7,3 @@ if [ "$1" == 'start' ]; then
|
|||
fi
|
||||
|
||||
exec "$@"
|
||||
|
|
@ -15,6 +15,8 @@ COPY ./docker-entrypoint.sh /opt
|
|||
COPY ./milvus /opt/milvus
|
||||
ENV LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/opt/milvus/lib"
|
||||
|
||||
WORKDIR /opt/milvus
|
||||
|
||||
ENTRYPOINT [ "/opt/docker-entrypoint.sh" ]
|
||||
|
||||
CMD [ "start" ]
|
|
@ -0,0 +1,9 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
if [ "$1" == 'start' ]; then
|
||||
cd /opt/milvus/scripts && ./start_server.sh
|
||||
fi
|
||||
|
||||
exec "$@"
|
|
@ -15,6 +15,8 @@ COPY ./docker-entrypoint.sh /opt
|
|||
COPY ./milvus /opt/milvus
|
||||
ENV LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/opt/milvus/lib"
|
||||
|
||||
WORKDIR /opt/milvus
|
||||
|
||||
ENTRYPOINT [ "/opt/docker-entrypoint.sh" ]
|
||||
|
||||
CMD [ "start" ]
|
|
@ -0,0 +1,9 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
if [ "$1" == 'start' ]; then
|
||||
cd /opt/milvus/scripts && ./start_server.sh
|
||||
fi
|
||||
|
||||
exec "$@"
|
13
install.md
13
install.md
|
@ -29,10 +29,15 @@ $ ./build.sh -t Release
|
|||
```
|
||||
|
||||
By default, it will build CPU version. To build GPU version, add `-g` option
|
||||
```
|
||||
```shell
|
||||
$ ./build.sh -g
|
||||
```
|
||||
|
||||
If you want to know the complete build options, run
|
||||
```shell
|
||||
$./build.sh -h
|
||||
```
|
||||
|
||||
When the build is completed, all the stuff that you need in order to run Milvus will be installed under `[Milvus root path]/core/milvus`.
|
||||
|
||||
## Launch Milvus server
|
||||
|
@ -43,13 +48,13 @@ $ cd [Milvus root path]/core/milvus
|
|||
|
||||
Add `lib/` directory to `LD_LIBRARY_PATH`
|
||||
|
||||
```
|
||||
```shell
|
||||
$ export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:[Milvus root path]/core/milvus/lib
|
||||
```
|
||||
|
||||
Then start Milvus server:
|
||||
|
||||
```
|
||||
```shell
|
||||
$ cd scripts
|
||||
$ ./start_server.sh
|
||||
```
|
||||
|
@ -65,7 +70,7 @@ $ ./stop_server.sh
|
|||
`protocol https not supported or disabled in libcurl`.
|
||||
First, make sure you have `libcurl4-openssl-dev` installed in your system.
|
||||
Then try reinstall CMake from source with `--system-curl` option:
|
||||
```
|
||||
```shell
|
||||
$ ./bootstrap --system-curl
|
||||
$ make
|
||||
$ sudo make install
|
||||
|
|
|
@ -99,7 +99,7 @@
|
|||
<dependency>
|
||||
<groupId>io.milvus</groupId>
|
||||
<artifactId>milvus-sdk-java</artifactId>
|
||||
<version>0.2.0-SNAPSHOT</version>
|
||||
<version>0.3.0</version>
|
||||
</dependency>
|
||||
|
||||
<!-- <dependency>-->
|
||||
|
@ -134,4 +134,4 @@
|
|||
|
||||
</dependencies>
|
||||
|
||||
</project>
|
||||
</project>
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import logging
|
||||
import pytest
|
||||
|
||||
__version__ = '0.5.1'
|
||||
__version__ = '0.6.0'
|
||||
|
||||
|
||||
class TestPing:
|
||||
|
|
Loading…
Reference in New Issue