From 07a127f3f2222f1d20ac59025c642be93db4e718 Mon Sep 17 00:00:00 2001 From: quicksilver Date: Tue, 18 Aug 2020 16:03:36 +0800 Subject: [PATCH] Developing a modular pipeline library to improve DevOps collaboration (#3282) * Developing a modular pipeline library to improve DevOps collaboration Signed-off-by: quicksilver * Developing a modular pipeline library to improve DevOps collaboration Signed-off-by: quicksilver * Developing a modular pipeline library to improve DevOps collaboration Signed-off-by: quicksilver * Developing a modular pipeline library to improve DevOps collaboration Signed-off-by: quicksilver * Developing a modular pipeline library to improve DevOps collaboration Signed-off-by: quicksilver * Developing a modular pipeline library to improve DevOps collaboration Signed-off-by: quicksilver * Developing a modular pipeline library to improve DevOps collaboration Signed-off-by: quicksilver * Developing a modular pipeline library to improve DevOps collaboration Signed-off-by: quicksilver * Developing a modular pipeline library to improve DevOps collaboration Signed-off-by: quicksilver * Update code checker rule Signed-off-by: quicksilver * Cleanup Helm Charts Signed-off-by: quicksilver * Cleanup Helm Charts Signed-off-by: quicksilver * always enable fiu to keeping behavior Signed-off-by: quicksilver --- .codacy.yaml | 2 + .gitignore | 1 - .jenkins/modules/Build/MilvusBuild.groovy | 18 + .../modules/Build/PackageBuild.groovy | 0 .../modules/Coverage/Coverage.groovy | 9 +- .../DevTest/CleanupMishardsDevTest.groovy | 9 + .../DevTest/CleanupSingleNodeDevTest.groovy | 0 .../modules/DevTest/MishardsDevTest.groovy | 40 ++ .../modules/DevTest/SingleNodeDevTest.groovy | 16 +- .../modules/Publish/Publish.groovy | 23 +- .../modules/Unittest/Unittest.groovy | 0 .jenkinsignore | 9 - ci/jenkins/Jenkinsfile | 33 +- ci/jenkins/scripts/mail.py | 36 -- ci/jenkins/scripts/requirements.txt | 2 - ci/jenkins/scripts/yaml_processor.py | 536 ------------------ ci/jenkins/step/build.groovy | 24 - ci/jenkins/step/cleanupShardsDevTest.groovy | 6 - ci/jenkins/step/shardsDevTest.groovy | 37 -- 19 files changed, 93 insertions(+), 708 deletions(-) create mode 100644 .jenkins/modules/Build/MilvusBuild.groovy rename ci/jenkins/step/package.groovy => .jenkins/modules/Build/PackageBuild.groovy (100%) rename ci/jenkins/step/coverage.groovy => .jenkins/modules/Coverage/Coverage.groovy (82%) create mode 100644 .jenkins/modules/DevTest/CleanupMishardsDevTest.groovy rename ci/jenkins/step/cleanupSingleDevTest.groovy => .jenkins/modules/DevTest/CleanupSingleNodeDevTest.groovy (100%) create mode 100644 .jenkins/modules/DevTest/MishardsDevTest.groovy rename ci/jenkins/step/singleDevTest.groovy => .jenkins/modules/DevTest/SingleNodeDevTest.groovy (90%) rename ci/jenkins/step/publishImages.groovy => .jenkins/modules/Publish/Publish.groovy (69%) rename ci/jenkins/step/unittest.groovy => .jenkins/modules/Unittest/Unittest.groovy (100%) delete mode 100644 .jenkinsignore delete mode 100644 ci/jenkins/scripts/mail.py delete mode 100644 ci/jenkins/scripts/requirements.txt delete mode 100755 ci/jenkins/scripts/yaml_processor.py delete mode 100644 ci/jenkins/step/build.groovy delete mode 100644 ci/jenkins/step/cleanupShardsDevTest.groovy delete mode 100644 ci/jenkins/step/shardsDevTest.groovy diff --git a/.codacy.yaml b/.codacy.yaml index 4ff91d1096..f6ccef0791 100644 --- a/.codacy.yaml +++ b/.codacy.yaml @@ -11,3 +11,5 @@ exclude_paths: - 'docker/**' - 'docs/**' - 'tests/**' + - '.github/**' + - '.jenkins/**' diff --git a/.gitignore b/.gitignore index efc7bc816f..2462c45fa0 100644 --- a/.gitignore +++ b/.gitignore @@ -14,7 +14,6 @@ __pycache__ # vscode generated files .vscode -build cmake-build-debug cmake-build-release cmake_build diff --git a/.jenkins/modules/Build/MilvusBuild.groovy b/.jenkins/modules/Build/MilvusBuild.groovy new file mode 100644 index 0000000000..ef524adbe5 --- /dev/null +++ b/.jenkins/modules/Build/MilvusBuild.groovy @@ -0,0 +1,18 @@ +timeout(time: 60, unit: 'MINUTES') { + dir ("ci/scripts") { + def isTimeTriggeredBuild = currentBuild.getBuildCauses('hudson.triggers.TimerTrigger$TimerTriggerCause').size() != 0 + if (!isTimeTriggeredBuild) { + sh "./check_ccache.sh -l ${params.JFROG_ARTFACTORY_URL}/ccache || echo \"ccache files not found!\"" + } + + if ("${BINARY_VERSION}" == "gpu") { + sh "/bin/bash --login -c \". ./before-install.sh && ./build.sh -t ${params.BUILD_TYPE} -j4 -i ${env.MILVUS_INSTALL_PREFIX} --coverage -l -g -u\"" + } else { + sh "/bin/bash --login -c \". ./before-install.sh && ./build.sh -t ${params.BUILD_TYPE} -j4 -i ${env.MILVUS_INSTALL_PREFIX} --coverage -l -u\"" + } + + withCredentials([usernamePassword(credentialsId: "${params.JFROG_CREDENTIALS_ID}", usernameVariable: 'USERNAME', passwordVariable: 'PASSWORD')]) { + sh "./update_ccache.sh -l ${params.JFROG_ARTFACTORY_URL}/ccache -u ${USERNAME} -p ${PASSWORD}" + } + } +} diff --git a/ci/jenkins/step/package.groovy b/.jenkins/modules/Build/PackageBuild.groovy similarity index 100% rename from ci/jenkins/step/package.groovy rename to .jenkins/modules/Build/PackageBuild.groovy diff --git a/ci/jenkins/step/coverage.groovy b/.jenkins/modules/Coverage/Coverage.groovy similarity index 82% rename from ci/jenkins/step/coverage.groovy rename to .jenkins/modules/Coverage/Coverage.groovy index 1bf6475783..218f412c86 100644 --- a/ci/jenkins/step/coverage.groovy +++ b/.jenkins/modules/Coverage/Coverage.groovy @@ -1,8 +1,9 @@ -timeout(time: 30, unit: 'MINUTES') { +timeout(time: 10, unit: 'MINUTES') { dir ("ci/scripts") { sh ". ./before-install.sh && ./coverage.sh" String formatFlag = "${BINARY_VERSION}-version-${OS_NAME}-unittest".replaceAll("\\.", "_").replaceAll("-", "_") - if (isTimeTriggeredBuild()) { + def isTimeTriggeredBuild = currentBuild.getBuildCauses('hudson.triggers.TimerTrigger$TimerTriggerCause').size() != 0 + if (isTimeTriggeredBuild) { withCredentials([[$class: 'StringBinding', credentialsId: "milvus-ci-codecov-token", variable: 'CODECOV_TOKEN']]) { sh "curl -s https://codecov.io/bash | bash -s - -f output_new.info -U \"--proxy http://proxy.zilliz.tech:1088\" -A \"--proxy http://proxy.zilliz.tech:1088\" -n ${BINARY_VERSION}-version-${OS_NAME}-unittest -F nightly -F ${formatFlag} || echo \"Codecov did not collect coverage reports\"" } @@ -13,7 +14,3 @@ timeout(time: 30, unit: 'MINUTES') { } } } - -boolean isTimeTriggeredBuild() { - return (currentBuild.getBuildCauses('hudson.triggers.TimerTrigger$TimerTriggerCause').size() != 0) ? true : false; -} diff --git a/.jenkins/modules/DevTest/CleanupMishardsDevTest.groovy b/.jenkins/modules/DevTest/CleanupMishardsDevTest.groovy new file mode 100644 index 0000000000..1403c29581 --- /dev/null +++ b/.jenkins/modules/DevTest/CleanupMishardsDevTest.groovy @@ -0,0 +1,9 @@ +def isTimeTriggeredBuild = currentBuild.getBuildCauses('hudson.triggers.TimerTrigger$TimerTriggerCause').size() != 0 +if (isTimeTriggeredBuild || "${params.IS_MANUAL_TRIGGER_TYPE}" == "True") { + retry(3) { + def helmResult = sh script: "helm status -n milvus ${env.SHARDS_HELM_RELEASE_NAME}", returnStatus: true + if (!helmResult) { + sh "helm uninstall -n milvus ${env.SHARDS_HELM_RELEASE_NAME}" + } + } +} diff --git a/ci/jenkins/step/cleanupSingleDevTest.groovy b/.jenkins/modules/DevTest/CleanupSingleNodeDevTest.groovy similarity index 100% rename from ci/jenkins/step/cleanupSingleDevTest.groovy rename to .jenkins/modules/DevTest/CleanupSingleNodeDevTest.groovy diff --git a/.jenkins/modules/DevTest/MishardsDevTest.groovy b/.jenkins/modules/DevTest/MishardsDevTest.groovy new file mode 100644 index 0000000000..3b2d66747a --- /dev/null +++ b/.jenkins/modules/DevTest/MishardsDevTest.groovy @@ -0,0 +1,40 @@ +def isTimeTriggeredBuild = currentBuild.getBuildCauses('hudson.triggers.TimerTrigger$TimerTriggerCause').size() != 0 +if (isTimeTriggeredBuild || "${params.IS_MANUAL_TRIGGER_TYPE}" == "True") { + timeout(time: 180, unit: 'MINUTES') { + sh "mkdir -p ${env.DEV_TEST_ARTIFACTS}" + + dir ('milvus-helm') { + sh 'helm version' + sh 'helm repo add stable https://kubernetes.oss-cn-hangzhou.aliyuncs.com/charts' + sh 'helm repo update' + checkout([$class: 'GitSCM', branches: [[name: "${env.HELM_BRANCH}"]], userRemoteConfigs: [[url: "https://github.com/milvus-io/milvus-helm.git", name: 'origin', refspec: "+refs/heads/${env.HELM_BRANCH}:refs/remotes/origin/${env.HELM_BRANCH}"]]]) + // sh 'helm dep update' + + retry(3) { + try { + dir ('charts/milvus') { + if ("${BINARY_VERSION}" == "CPU") { + sh "helm install --wait --timeout 300s --set cluster.enabled=true --set persistence.enabled=true --set image.repository=registry.zilliz.com/milvus/engine --set mishards.image.tag=test --set mishards.image.pullPolicy=Always --set image.tag=${DOCKER_VERSION} --set image.pullPolicy=Always --set service.type=ClusterIP --set image.resources.requests.memory=8Gi --set image.resources.requests.cpu=2.0 --set image.resources.limits.memory=12Gi --set image.resources.limits.cpu=4.0 -f ci/db_backend/mysql_${BINARY_VERSION}_values.yaml --namespace milvus ${env.SHARDS_HELM_RELEASE_NAME} ." + } else { + sh "helm install --wait --timeout 300s --set cluster.enabled=true --set persistence.enabled=true --set image.repository=registry.zilliz.com/milvus/engine --set mishards.image.tag=test --set mishards.image.pullPolicy=Always --set gpu.enabled=true --set image.tag=${DOCKER_VERSION} --set image.pullPolicy=Always --set service.type=ClusterIP -f ci/db_backend/mysql_${BINARY_VERSION}_values.yaml --namespace milvus ${env.SHARDS_HELM_RELEASE_NAME} ." + } + } + } catch (exc) { + def helmStatusCMD = "helm get manifest --namespace milvus ${env.SHARDS_HELM_RELEASE_NAME} | kubectl describe -n milvus -f - && \ + kubectl logs --namespace milvus -l \"app.kubernetes.io/name=milvus,app.kubernetes.io/instance=${env.SHARDS_HELM_RELEASE_NAME},component=writable\" -c milvus && \ + kubectl logs --namespace milvus -l \"app.kubernetes.io/name=milvus,app.kubernetes.io/instance=${env.SHARDS_HELM_RELEASE_NAME},component=readonly\" -c milvus && \ + kubectl logs --namespace milvus -l \"app.kubernetes.io/name=milvus,app.kubernetes.io/instance=${env.SHARDS_HELM_RELEASE_NAME},component=mishards\" && \ + helm status -n milvus ${env.SHARDS_HELM_RELEASE_NAME}" + sh script: helmStatusCMD, returnStatus: true + sh script: "helm uninstall -n milvus ${env.SHARDS_HELM_RELEASE_NAME} && sleep 1m", returnStatus: true + throw exc + } + } + } + + dir ("tests/milvus_python_test") { + sh 'python3 -m pip install -r requirements.txt' + sh "pytest . --level=2 --alluredir=\"test_out/dev/shards/\" --ip ${env.SHARDS_HELM_RELEASE_NAME}.milvus.svc.cluster.local >> ${WORKSPACE}/${env.DEV_TEST_ARTIFACTS}/milvus_${BINARY_VERSION}_shards_dev_test.log" + } + } +} diff --git a/ci/jenkins/step/singleDevTest.groovy b/.jenkins/modules/DevTest/SingleNodeDevTest.groovy similarity index 90% rename from ci/jenkins/step/singleDevTest.groovy rename to .jenkins/modules/DevTest/SingleNodeDevTest.groovy index 7fc12a5b03..06a52bf376 100644 --- a/ci/jenkins/step/singleDevTest.groovy +++ b/.jenkins/modules/DevTest/SingleNodeDevTest.groovy @@ -24,19 +24,21 @@ timeout(time: 120, unit: 'MINUTES') { } } + def isTimeTriggeredBuild = currentBuild.getBuildCauses('hudson.triggers.TimerTrigger$TimerTriggerCause').size() != 0 + dir ("tests/milvus_python_test") { // sh 'python3 -m pip install -r requirements.txt -i http://pypi.douban.com/simple --trusted-host pypi.douban.com' sh 'python3 -m pip install -r requirements.txt' - if (isTimeTriggeredBuild()) { + if (isTimeTriggeredBuild) { sh "pytest . --alluredir=\"test_out/dev/single/mysql\" --level=2 --ip ${env.HELM_RELEASE_NAME}.milvus.svc.cluster.local --service ${env.HELM_RELEASE_NAME} >> ${WORKSPACE}/${env.DEV_TEST_ARTIFACTS}/milvus_${BINARY_VERSION}_mysql_dev_test.log" } else { sh "pytest . --alluredir=\"test_out/dev/single/mysql\" --level=1 --ip ${env.HELM_RELEASE_NAME}.milvus.svc.cluster.local --service ${env.HELM_RELEASE_NAME} >> ${WORKSPACE}/${env.DEV_TEST_ARTIFACTS}/milvus_${BINARY_VERSION}_mysql_dev_test.log" } } - if (isTimeTriggeredBuild()) { + if (isTimeTriggeredBuild) { // sqlite database backend test - load "ci/jenkins/step/cleanupSingleDev.groovy" + MPLModule('Cleanup Single Node DevTest') retry(3) { try { @@ -48,9 +50,7 @@ timeout(time: 120, unit: 'MINUTES') { kubectl logs --namespace milvus -l \"app=milvus,release=${env.HELM_RELEASE_NAME}\" -c milvus && \ helm status -n milvus ${env.HELM_RELEASE_NAME}" def helmResult = sh script: helmStatusCMD, returnStatus: true - if (!helmResult) { - sh "helm uninstall -n milvus ${env.HELM_RELEASE_NAME} && sleep 1m" - } + sh script: "helm uninstall -n milvus ${env.HELM_RELEASE_NAME} && sleep 1m", returnStatus: true throw exc } } @@ -60,7 +60,3 @@ timeout(time: 120, unit: 'MINUTES') { } } } - -boolean isTimeTriggeredBuild() { - return (currentBuild.getBuildCauses('hudson.triggers.TimerTrigger$TimerTriggerCause').size() != 0) ? true : false; -} diff --git a/ci/jenkins/step/publishImages.groovy b/.jenkins/modules/Publish/Publish.groovy similarity index 69% rename from ci/jenkins/step/publishImages.groovy rename to .jenkins/modules/Publish/Publish.groovy index 6b3aac7cd8..f8f055b465 100644 --- a/ci/jenkins/step/publishImages.groovy +++ b/.jenkins/modules/Publish/Publish.groovy @@ -27,26 +27,11 @@ dir ("docker/deploy") { } catch (exc) { throw exc } finally { - deleteImages("${sourceImage}", true) + def isExistImage = sh(returnStatus: true, script: "docker inspect --type=image ${sourceImage} 2>&1 > /dev/null") + if (isExistImage == 0) { + sh(returnStatus: true, script: "docker rmi -f \$(docker inspect --type=image --format \"{{.ID}}\" ${sourceImage})") + } sh "docker-compose down --rmi all" sh(returnStatus: true, script: "docker rmi -f \$(docker images | grep '' | awk '{print \$3}')") } } - -boolean deleteImages(String imageName, boolean force) { - def imageNameStr = imageName.trim() - def isExistImage = sh(returnStatus: true, script: "docker inspect --type=image ${imageNameStr} 2>&1 > /dev/null") - if (isExistImage == 0) { - def deleteImageStatus = 0 - if (force) { - deleteImageStatus = sh(returnStatus: true, script: "docker rmi -f \$(docker inspect --type=image --format \"{{.ID}}\" ${imageNameStr})") - } else { - deleteImageStatus = sh(returnStatus: true, script: "docker rmi ${imageNameStr}") - } - - if (deleteImageStatus != 0) { - return false - } - } - return true -} diff --git a/ci/jenkins/step/unittest.groovy b/.jenkins/modules/Unittest/Unittest.groovy similarity index 100% rename from ci/jenkins/step/unittest.groovy rename to .jenkins/modules/Unittest/Unittest.groovy diff --git a/.jenkinsignore b/.jenkinsignore deleted file mode 100644 index 2a3991e960..0000000000 --- a/.jenkinsignore +++ /dev/null @@ -1,9 +0,0 @@ -\.* -*.yml -*.md -**/*.md -.github/ -.mergify -ci/docker/** -docs/** -shards/** diff --git a/ci/jenkins/Jenkinsfile b/ci/jenkins/Jenkinsfile index d0f43b12e9..7938fabf55 100644 --- a/ci/jenkins/Jenkinsfile +++ b/ci/jenkins/Jenkinsfile @@ -1,4 +1,5 @@ #!/usr/bin/env groovy +@Library('mpl') _ String cron_timezone = "TZ=Asia/Shanghai" String cron_string = BRANCH_NAME == "master" ? "50 22 * * * " : "" @@ -63,12 +64,10 @@ pipeline { stage('Build and Unittest') { steps { container("milvus-${BINARY_VERSION}-build-env") { - script { - load "${env.WORKSPACE}/ci/jenkins/step/build.groovy" - load "${env.WORKSPACE}/ci/jenkins/step/unittest.groovy" - load "${env.WORKSPACE}/ci/jenkins/step/coverage.groovy" - load "${env.WORKSPACE}/ci/jenkins/step/package.groovy" - } + MPLModule('Milvus Build') + MPLModule('Unittest') + MPLModule('Coverage') + MPLModule('Package Build') } } } @@ -111,9 +110,7 @@ pipeline { stage('Publish') { steps { container('publish-images') { - script { - load "${env.WORKSPACE}/ci/jenkins/step/publishImages.groovy" - } + MPLModule('Publish') } } } @@ -157,12 +154,8 @@ pipeline { stage('Test') { steps { container('milvus-test-env') { - script { - load "${env.WORKSPACE}/ci/jenkins/step/singleDevTest.groovy" - if (isTimeTriggeredBuild() || "${params.IS_MANUAL_TRIGGER_TYPE}" == "True") { - load "${env.WORKSPACE}/ci/jenkins/step/shardsDevTest.groovy" - } - } + MPLModule('Single Node DevTest') + MPLModule('Mishards DevTest') } } } @@ -170,13 +163,9 @@ pipeline { post { cleanup { container('milvus-test-env') { - script { - archiveArtifacts artifacts: "${env.DEV_TEST_ARTIFACTS}/**", allowEmptyArchive: true - load "${env.WORKSPACE}/ci/jenkins/step/cleanupSingleDevTest.groovy" - if (isTimeTriggeredBuild() || "${params.IS_MANUAL_TRIGGER_TYPE}" == "True") { - load "${env.WORKSPACE}/ci/jenkins/step/cleanupShardsDevTest.groovy" - } - } + archiveArtifacts artifacts: "${env.DEV_TEST_ARTIFACTS}/**", allowEmptyArchive: true + MPLModule('Cleanup Single Node DevTest') + MPLModule('Cleanup Mishards DevTest') } } } diff --git a/ci/jenkins/scripts/mail.py b/ci/jenkins/scripts/mail.py deleted file mode 100644 index 960a189833..0000000000 --- a/ci/jenkins/scripts/mail.py +++ /dev/null @@ -1,36 +0,0 @@ -# -*- coding: utf-8 -*- -import sys -import logging -from email.mime.text import MIMEText -from email.header import Header -import smtplib - -SMS_DEFAULT_TO_LIST = [ - "dev.milvus@zilliz.com", -] - -def send_email(subject, content, token, receivers=None): - sender = 'test@zilliz.com' - message = MIMEText(content, 'html', 'utf-8') - message['From'] = Header("Daily Test") - message['To'] = Header("dev.milvus") - message['Subject'] = Header(subject, 'utf-8') - try: - smtp_obj = smtplib.SMTP('smtp.exmail.qq.com') - if receivers is None: - receivers = SMS_DEFAULT_TO_LIST - smtp_obj.login(sender, token) - result = smtp_obj.sendmail(sender, receivers, message.as_string()) - except smtplib.SMTPException as e: - logging.error(str(e)) - finally: - smtp_obj.quit() - - -if __name__ == "__main__": - if len(sys.argv) != 4: - sys.exit() - subject = sys.argv[1] - content = sys.argv[2] - token = sys.argv[3] - send_email(subject, content, token) diff --git a/ci/jenkins/scripts/requirements.txt b/ci/jenkins/scripts/requirements.txt deleted file mode 100644 index 2f81afcfad..0000000000 --- a/ci/jenkins/scripts/requirements.txt +++ /dev/null @@ -1,2 +0,0 @@ -ruamel.yaml==0.16.5 -ruamel.yaml.clib==0.2.0 diff --git a/ci/jenkins/scripts/yaml_processor.py b/ci/jenkins/scripts/yaml_processor.py deleted file mode 100755 index 0e6d7dbbf4..0000000000 --- a/ci/jenkins/scripts/yaml_processor.py +++ /dev/null @@ -1,536 +0,0 @@ -#!/usr/bin/env python3 - -import sys -import argparse -from argparse import Namespace -import os, shutil -import getopt -from ruamel.yaml import YAML, yaml_object -from ruamel.yaml.comments import CommentedSeq, CommentedMap -from ruamel.yaml.tokens import CommentToken - -## -yaml = YAML(typ="rt") -## format yaml file -yaml.indent(mapping=2, sequence=4, offset=2) - - -############################################ -# Comment operation -# -############################################ -def _extract_comment(_comment): - """ - remove '#' at start of comment - """ - # if _comment is empty, do nothing - if not _comment: - return _comment - - # str_ = _comment.lstrip(" ") - str_ = _comment.strip() - str_ = str_.lstrip("#") - - return str_ - - -def _add_eol_comment(element, *args, **kwargs): - """ - add_eol_comment - args --> (comment, key) - """ - if element is None or \ - (not isinstance(element, CommentedMap) and - not isinstance(element, CommentedSeq)) or \ - args[0] is None or \ - len(args[0]) == 0: - return - - comment = args[0] - # comment is empty, do nothing - if not comment: - return - - key = args[1] - try: - element.yaml_add_eol_comment(*args, **kwargs) - except Exception: - element.ca.items.pop(key, None) - element.yaml_add_eol_comment(*args, **kwargs) - - -def _map_comment(_element, _key): - origin_comment = "" - token = _element.ca.items.get(_key, None) - if token is not None: - try: - origin_comment = token[2].value - except Exception: - try: - # comment is below element, add profix "#\n" - col = _element.lc.col + 2 - space_list = [" " for i in range(col)] - space_str = "".join(space_list) - - origin_comment = "\n" + "".join([space_str + t.value for t in token[3]]) - except Exception: - pass - - return origin_comment - - -def _seq_comment(_element, _index): - # get target comment - _comment = "" - token = _element.ca.items.get(_index, None) - if token is not None: - _comment = token[0].value - - return _comment - - -def _start_comment(_element): - _comment = "" - cmt = _element.ca.comment - try: - _comment = cmt[1][0].value - except Exception: - pass - - return _comment - - -def _comment_counter(_comment): - """ - - counter comment tips and split into list - """ - - x = lambda l: l.strip().strip("#").strip() - - _counter = [] - if _comment.startswith("\n"): - _counter.append("") - _counter.append(x(_comment[1:])) - - return _counter - elif _comment.startswith("#\n"): - _counter.append("") - _counter.append(x(_comment[2:])) - else: - index = _comment.find("\n") - _counter.append(x(_comment[:index])) - _counter.append(x(_comment[index + 1:])) - - return _counter - - -def _obtain_comment(_m_comment, _t_comment): - if not _m_comment or not _t_comment: - return _m_comment or _t_comment - - _m_counter = _comment_counter(_m_comment) - _t_counter = _comment_counter(_t_comment) - - if not _m_counter[0] and not _t_counter[1]: - comment = _t_comment + _m_comment - elif not _m_counter[1] and not _t_counter[0]: - comment = _m_comment + _t_comment - elif _t_counter[0] and _t_counter[1]: - comment = _t_comment - elif not _t_counter[0] and not _t_counter[1]: - comment = _m_comment - elif not _m_counter[0] and not _m_counter[1]: - comment = _t_comment - else: - if _t_counter[0]: - comment = _m_comment.replace(_m_counter[0], _t_counter[0], 1) - else: - comment = _m_comment.replace(_m_counter[1], _t_counter[1], 1) - - i = comment.find("\n\n") - while i >= 0: - comment = comment.replace("\n\n\n", "\n\n", 1) - i = comment.find("\n\n\n") - - return comment - - -############################################ -# Utils -# -############################################ -def _get_update_par(_args): - _dict = _args.__dict__ - - # file path - _in_file = _dict.get("f", None) or _dict.get("file", None) - # tips - _tips = _dict.get('tips', None) or "Input \"-h\" for more information" - # update - _u = _dict.get("u", None) or _dict.get("update", None) - # apppend - _a = _dict.get('a', None) or _dict.get('append', None) - # out stream group - _i = _dict.get("i", None) or _dict.get("inplace", None) - _o = _dict.get("o", None) or _dict.get("out_file", None) - - return _in_file, _u, _a, _i, _o, _tips - - -############################################ -# Element operation -# -############################################ -def update_map_element(element, key, value, comment, _type): - """ - element: - key: - value: - comment: - _type: value type. - """ - if element is None or not isinstance(element, CommentedMap): - print("Only key-value update support") - sys.exit(1) - - origin_comment = _map_comment(element, key) - - sub_element = element.get(key, None) - if isinstance(sub_element, CommentedMap) or isinstance(sub_element, CommentedSeq): - print("Only support update a single value") - - element.update({key: value}) - - comment = _obtain_comment(origin_comment, comment) - _add_eol_comment(element, _extract_comment(comment), key) - - -def update_seq_element(element, value, comment, _type): - if element is None or not isinstance(element, CommentedSeq): - print("Param `-a` only use to append yaml list") - sys.exit(1) - element.append(str(value)) - - comment = _obtain_comment("", comment) - _add_eol_comment(element, _extract_comment(comment), len(element) - 1) - - -def run_update(code, keys, value, comment, _app): - key_list = keys.split(".") - - space_str = ":\n " - key_str = "{}".format(key_list[0]) - for key in key_list[1:]: - key_str = key_str + space_str + key - space_str = space_str + " " - if not _app: - yaml_str = """{}: {}""".format(key_str, value) - else: - yaml_str = "{}{}- {}".format(key_str, space_str, value) - - if comment: - yaml_str = "{} # {}".format(yaml_str, comment) - - mcode = yaml.load(yaml_str) - - _merge(code, mcode) - - -def _update(code, _update, _app, _tips): - if not _update: - return code - - _update_list = [l.strip() for l in _update.split(",")] - for l in _update_list: - try: - variant, comment = l.split("#") - except ValueError: - variant = l - comment = None - - try: - keys, value = variant.split("=") - run_update(code, keys, value, comment, _app) - except ValueError: - print("Invalid format. print command \"--help\" get more info.") - sys.exit(1) - - return code - - -def _backup(in_file_p): - backup_p = in_file_p + ".bak" - - if os.path.exists(backup_p): - os.remove(backup_p) - - if not os.path.exists(in_file_p): - print("File {} not exists.".format(in_file_p)) - sys.exit(1) - - shutil.copyfile(in_file_p, backup_p) # 复制文件 - - -def _recovery(in_file_p): - backup_p = in_file_p + ".bak" - - if not os.path.exists(in_file_p): - print("File {} not exists.".format(in_file_p)) - sys.exit(1) - elif not os.path.exists(backup_p): - print("Backup file not exists") - sys.exit(0) - - os.remove(in_file_p) - - os.rename(backup_p, in_file_p) - - -# master merge target -def _merge(master, target): - if type(master) != type(target): - print("yaml format not match:\n") - yaml.dump(master, sys.stdout) - print("\n&&\n") - yaml.dump(target, sys.stdout) - - sys.exit(1) - - ## item is a sequence - if isinstance(target, CommentedSeq): - for index in range(len(target)): - # get target comment - target_comment = _seq_comment(target, index) - - master_index = len(master) - - target_item = target[index] - - if isinstance(target_item, CommentedMap): - merge_flag = False - for idx in range(len(master)): - if isinstance(master[idx], CommentedMap): - if master[idx].keys() == target_item.keys(): - _merge(master[idx], target_item) - # nonlocal merge_flag - master_index = idx - merge_flag = True - break - - if merge_flag is False: - master.append(target_item) - elif target_item not in master: - master.append(target[index]) - else: - # merge(master[index], target[index]) - pass - - # # remove enter signal in previous item - previous_comment = _seq_comment(master, master_index - 1) - _add_eol_comment(master, _extract_comment(previous_comment), master_index - 1) - - origin_comment = _seq_comment(master, master_index) - comment = _obtain_comment(origin_comment, target_comment) - if len(comment) > 0: - _add_eol_comment(master, _extract_comment(comment) + "\n\n", len(master) - 1) - - ## item is a map - elif isinstance(target, CommentedMap): - for item in target: - if item == "flag": - print("") - origin_comment = _map_comment(master, item) - target_comment = _map_comment(target, item) - - # get origin start comment - origin_start_comment = _start_comment(master) - - # get target start comment - target_start_comment = _start_comment(target) - - m = master.get(item, default=None) - if m is None or \ - (not (isinstance(m, CommentedMap) or - isinstance(m, CommentedSeq))): - master.update({item: target[item]}) - - else: - _merge(master[item], target[item]) - - comment = _obtain_comment(origin_comment, target_comment) - if len(comment) > 0: - _add_eol_comment(master, _extract_comment(comment), item) - - start_comment = _obtain_comment(origin_start_comment, target_start_comment) - if len(start_comment) > 0: - master.yaml_set_start_comment(_extract_comment(start_comment)) - - -def _save(_code, _file): - with open(_file, 'w') as wf: - yaml.dump(_code, wf) - - -def _load(_file): - with open(_file, 'r') as rf: - code = yaml.load(rf) - return code - - -############################################ -# sub parser process operation -# -############################################ -def merge_yaml(_args): - _dict = _args.__dict__ - - _m_file = _dict.get("merge_file", None) - _in_file, _u, _a, _i, _o, _tips = _get_update_par(_args) - - if not (_in_file and _m_file): - print(_tips) - sys.exit(1) - - code = _load(_in_file) - mcode = _load(_m_file) - - _merge(code, mcode) - - _update(code, _u, _a, _tips) - - if _i: - _backup(_in_file) - _save(code, _in_file) - elif _o: - _save(code, _o) - else: - print(_tips) - sys.exit(1) - - -def update_yaml(_args): - _in_file, _u, _a, _i, _o, _tips = _get_update_par(_args) - - if not _in_file or not _u: - print(_tips) - sys.exit(1) - - code = _load(_in_file) - - if _i and _o: - print(_tips) - sys.exit(1) - - _update(code, _u, _a, _tips) - - if _i: - _backup(_in_file) - _save(code, _in_file) - elif _o: - _save(code, _o) - - -def reset(_args): - _dict = _args.__dict__ - _f = _dict.get('f', None) or _dict.get('file', None) - - if _f: - _recovery(_f) - else: - _t = _dict.get('tips', None) or "Input \"-h\" for more information" - print(_t) - - -############################################ -# Cli operation -# -############################################ -def _set_merge_parser(_parsers): - """ - config merge parser - """ - - merge_parser = _parsers.add_parser("merge", help="merge with another yaml file") - - _set_merge_parser_arg(merge_parser) - _set_update_parser_arg(merge_parser) - - merge_parser.set_defaults( - function=merge_yaml, - tips=merge_parser.format_help() - ) - - -def _set_merge_parser_arg(_parser): - """ - config parser argument for merging - """ - - _parser.add_argument("-m", "--merge-file", help="indicate merge yaml file") - - -def _set_update_parser(_parsers): - """ - config merge parser - """ - - update_parser = _parsers.add_parser("update", help="update with another yaml file") - _set_update_parser_arg(update_parser) - - update_parser.set_defaults( - function=update_yaml, - tips=update_parser.format_help() - ) - - -def _set_update_parser_arg(_parser): - """ - config parser argument for updating - """ - - _parser.add_argument("-f", "--file", help="source yaml file") - _parser.add_argument('-u', '--update', help="update with args, instance as \"a.b.c=d# d comment\"") - _parser.add_argument('-a', '--append', action="store_true", help="append to a seq") - - group = _parser.add_mutually_exclusive_group() - group.add_argument("-o", "--out-file", help="indicate output yaml file") - group.add_argument("-i", "--inplace", action="store_true", help="indicate whether result store in origin file") - - -def _set_reset_parser(_parsers): - """ - config merge parser - """ - - reset_parser = _parsers.add_parser("reset", help="reset yaml file") - - # indicate yaml file - reset_parser.add_argument('-f', '--file', help="indicate input yaml file") - - reset_parser.set_defaults( - function=reset, - tips=reset_parser.format_help() - ) - - -def main(): - parser = argparse.ArgumentParser() - sub_parsers = parser.add_subparsers() - - # set merge command - _set_merge_parser(sub_parsers) - - # set update command - _set_update_parser(sub_parsers) - - # set reset command - _set_reset_parser(sub_parsers) - - # parse argument and run func - args = parser.parse_args() - args.function(args) - - -if __name__ == '__main__': - main() diff --git a/ci/jenkins/step/build.groovy b/ci/jenkins/step/build.groovy deleted file mode 100644 index e051a0e297..0000000000 --- a/ci/jenkins/step/build.groovy +++ /dev/null @@ -1,24 +0,0 @@ -timeout(time: 120, unit: 'MINUTES') { - dir ("ci/scripts") { - withCredentials([usernamePassword(credentialsId: "${params.JFROG_CREDENTIALS_ID}", usernameVariable: 'USERNAME', passwordVariable: 'PASSWORD')]) { - if (!isTimeTriggeredBuild()) { - sh "./check_ccache.sh -l ${params.JFROG_ARTFACTORY_URL}/ccache || echo \"ccache files not found!\"" - } - - if ("${BINARY_VERSION}" == "gpu") { - if (isTimeTriggeredBuild()) { - sh "/bin/bash --login -c \". ./before-install.sh && ./build.sh -t ${params.BUILD_TYPE} -j4 ${env.MILVUS_INSTALL_PREFIX} --with_fiu --coverage -l -g -u -s '-gencode=arch=compute_61,code=sm_61;-gencode=arch=compute_75,code=sm_75' \"" - } else { - sh "/bin/bash --login -c \". ./before-install.sh && ./build.sh -t ${params.BUILD_TYPE} -j4 ${env.MILVUS_INSTALL_PREFIX} --with_fiu --coverage -l -g -u\"" - } - } else { - sh "/bin/bash --login -c \". ./before-install.sh && ./build.sh -t ${params.BUILD_TYPE} -j4 ${env.MILVUS_INSTALL_PREFIX} --with_fiu --coverage -l -u\"" - } - sh "./update_ccache.sh -l ${params.JFROG_ARTFACTORY_URL}/ccache -u ${USERNAME} -p ${PASSWORD}" - } - } -} - -boolean isTimeTriggeredBuild() { - return (currentBuild.getBuildCauses('hudson.triggers.TimerTrigger$TimerTriggerCause').size() != 0) ? true : false; -} diff --git a/ci/jenkins/step/cleanupShardsDevTest.groovy b/ci/jenkins/step/cleanupShardsDevTest.groovy deleted file mode 100644 index eec6f8f9d7..0000000000 --- a/ci/jenkins/step/cleanupShardsDevTest.groovy +++ /dev/null @@ -1,6 +0,0 @@ -retry(3) { - def helmResult = sh script: "helm status -n milvus ${env.SHARDS_HELM_RELEASE_NAME}", returnStatus: true - if (!helmResult) { - sh "helm uninstall -n milvus ${env.SHARDS_HELM_RELEASE_NAME}" - } -} diff --git a/ci/jenkins/step/shardsDevTest.groovy b/ci/jenkins/step/shardsDevTest.groovy deleted file mode 100644 index 86712380a3..0000000000 --- a/ci/jenkins/step/shardsDevTest.groovy +++ /dev/null @@ -1,37 +0,0 @@ -timeout(time: 180, unit: 'MINUTES') { - sh "mkdir -p ${env.DEV_TEST_ARTIFACTS}" - - dir ('milvus-helm') { - sh 'helm version' - sh 'helm repo add stable https://kubernetes.oss-cn-hangzhou.aliyuncs.com/charts' - sh 'helm repo update' - checkout([$class: 'GitSCM', branches: [[name: "${env.HELM_BRANCH}"]], userRemoteConfigs: [[url: "https://github.com/milvus-io/milvus-helm.git", name: 'origin', refspec: "+refs/heads/${env.HELM_BRANCH}:refs/remotes/origin/${env.HELM_BRANCH}"]]]) - // sh 'helm dep update' - - retry(3) { - try { - dir ('charts/milvus') { - if ("${BINARY_VERSION}" == "CPU") { - sh "helm install --wait --timeout 300s --set cluster.enabled=true --set persistence.enabled=true --set image.repository=registry.zilliz.com/milvus/engine --set mishards.image.tag=test --set mishards.image.pullPolicy=Always --set image.tag=${DOCKER_VERSION} --set image.pullPolicy=Always --set service.type=ClusterIP --set image.resources.requests.memory=8Gi --set image.resources.requests.cpu=2.0 --set image.resources.limits.memory=12Gi --set image.resources.limits.cpu=4.0 -f ci/db_backend/mysql_${BINARY_VERSION}_values.yaml --namespace milvus ${env.SHARDS_HELM_RELEASE_NAME} ." - } else { - sh "helm install --wait --timeout 300s --set cluster.enabled=true --set persistence.enabled=true --set image.repository=registry.zilliz.com/milvus/engine --set mishards.image.tag=test --set mishards.image.pullPolicy=Always --set gpu.enabled=true --set image.tag=${DOCKER_VERSION} --set image.pullPolicy=Always --set service.type=ClusterIP -f ci/db_backend/mysql_${BINARY_VERSION}_values.yaml --namespace milvus ${env.SHARDS_HELM_RELEASE_NAME} ." - } - } - } catch (exc) { - def helmStatusCMD = "helm get manifest --namespace milvus ${env.SHARDS_HELM_RELEASE_NAME} | kubectl describe -n milvus -f - && \ - kubectl logs --namespace milvus -l \"app.kubernetes.io/name=milvus,app.kubernetes.io/instance=${env.SHARDS_HELM_RELEASE_NAME},component=writable\" -c milvus && \ - kubectl logs --namespace milvus -l \"app.kubernetes.io/name=milvus,app.kubernetes.io/instance=${env.SHARDS_HELM_RELEASE_NAME},component=readonly\" -c milvus && \ - kubectl logs --namespace milvus -l \"app.kubernetes.io/name=milvus,app.kubernetes.io/instance=${env.SHARDS_HELM_RELEASE_NAME},component=mishards\" && \ - helm status -n milvus ${env.SHARDS_HELM_RELEASE_NAME}" - sh script: helmStatusCMD, returnStatus: true - sh script: "helm uninstall -n milvus ${env.SHARDS_HELM_RELEASE_NAME} && sleep 1m", returnStatus: true - throw exc - } - } - } - - dir ("tests/milvus_python_test") { - sh 'python3 -m pip install -r requirements.txt' - sh "pytest . --level=2 --alluredir=\"test_out/dev/shards/\" --ip ${env.SHARDS_HELM_RELEASE_NAME}.milvus.svc.cluster.local >> ${WORKSPACE}/${env.DEV_TEST_ARTIFACTS}/milvus_${BINARY_VERSION}_shards_dev_test.log" - } -}