Add deploy test (#7539)

Signed-off-by: zhuwenxing <wenxing.zhu@zilliz.com>
pull/7628/head
zhuwenxing 2021-09-09 14:32:27 +08:00 committed by GitHub
parent 89358ad91e
commit 1cf2e14344
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 191 additions and 37 deletions

65
.github/workflows/deploy_test.yaml vendored Normal file
View File

@ -0,0 +1,65 @@
name: Deploy_Test
on:
workflow_dispatch:
schedule:
- cron: "30 20 * * *"
push:
branches:
- master
- test_deploy
jobs:
test-docker-compose:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
mode: [standalone,cluster]
task: [reinstall,upgrade]
steps:
- uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Run deploy test
shell: bash
working-directory: tests/python_client/deploy
run: |
pip install -r requirements.txt
pip install --upgrade protobuf
mkdir -p ${{ matrix.mode }}/logs
echo "test -m ${{ matrix.mode }} -t ${{ matrix.task }}" > ./${{ matrix.mode }}/logs/test_info.log
python scripts/get_tag.py >> ./${{ matrix.mode }}/logs/test_info.log
bash test.sh -m ${{ matrix.mode }} -t ${{ matrix.task }} -p ""
- name: 'Send mail'
if: ${{ failure() }}
uses: dawidd6/action-send-mail@v3
with:
server_address: ${{ secrets.EMAIL_SERVICE_NAME }}
server_port: 465
username: ${{ secrets.TEST_EMAIL_USERNAME }}
password: ${{ secrets.TEST_EMAIL_PASSWORD }}
subject: Deploy Test
body: "test ${{ matrix.mode }} ${{ matrix.task }} failed"
to: ${{ secrets.QA_EMAIL_ADDRESS }}
from: GitHub Actions
- name: Upload logs
if: ${{ always() }}
uses: actions/upload-artifact@v2
with:
name: logs-${{ matrix.mode }}-${{ matrix.task }}
path: tests/python_client/deploy/${{ matrix.mode }}/logs

View File

@ -0,0 +1,4 @@
docker==5.0.0
grpcio==1.37.1
grpcio-tools==1.37.1
pymilvus==2.0.0rc5

View File

@ -9,7 +9,8 @@ func() {
exit -1
}
while getopts "hp:" OPT;do
while getopts "hp:" OPT;
do
case $OPT in
p) Password="$OPTARG";;
h) func;;

View File

@ -0,0 +1,29 @@
import requests
import json
milvus_dev = "https://registry.hub.docker.com/v2/repositories/milvusdb/milvus-dev/tags?ordering=last_updated"
milvus = "https://registry.hub.docker.com/v2/repositories/milvusdb/milvus/tags?ordering=last_updated"
def get_tag(url):
payload={}
headers = {}
response = requests.request("GET", url, headers=headers, data=payload)
res = response.json()["results"]
tags = [r["name"] for r in res]
return tags
latest_tag = get_tag(milvus_dev)[1]
latest_rc_tag = [tag for tag in sorted(get_tag(milvus)) if "rc" and "v" in tag][-1]
release_version = "-".join(latest_rc_tag.split("-")[:-2])
print(release_version)
print(latest_tag,latest_rc_tag)
data = {
"latest_tag":latest_tag,
"latest_rc_tag":latest_rc_tag[1:],
"release_version":release_version
}
print(data)
with open("tag_info.json","w") as f:
f.write(json.dumps(data))

View File

@ -1,10 +1,47 @@
import docker
import copy
from pymilvus import (
connections, FieldSchema, CollectionSchema, DataType,
Collection, list_collections,
)
all_index_types = ["FLAT", "IVF_FLAT", "IVF_SQ8", "IVF_PQ", "HNSW", "ANNOY", "RHNSW_FLAT", "RHNSW_PQ", "RHNSW_SQ",
"BIN_FLAT","BIN_IVF_FLAT"]
default_index_params = [{"nlist": 128}, {"nlist": 128}, {"nlist": 128}, {"nlist": 128, "m": 16, "nbits": 8},
{"M": 48, "efConstruction": 500}, {"n_trees": 50}, {"M": 48, "efConstruction": 500},
{"M": 48, "efConstruction": 500, "PQM": 64}, {"M": 48, "efConstruction": 500}, {"nlist": 128},
{"nlist": 128}]
index_params_map = dict(zip(all_index_types,default_index_params))
def gen_search_param(index_type, metric_type="L2"):
search_params = []
if index_type in ["FLAT", "IVF_FLAT", "IVF_SQ8", "IVF_SQ8H", "IVF_PQ"]:
for nprobe in [10]:
ivf_search_params = {"metric_type": metric_type, "params": {"nprobe": nprobe}}
search_params.append(ivf_search_params)
elif index_type in ["BIN_FLAT", "BIN_IVF_FLAT"]:
for nprobe in [10]:
bin_search_params = {"metric_type": "HAMMING", "params": {"nprobe": nprobe}}
search_params.append(bin_search_params)
elif index_type in ["HNSW", "RHNSW_FLAT", "RHNSW_PQ", "RHNSW_SQ"]:
for ef in [64]:
hnsw_search_param = {"metric_type": metric_type, "params": {"ef": ef}}
search_params.append(hnsw_search_param)
elif index_type in ["NSG", "RNSG"]:
for search_length in [100]:
nsg_search_param = {"metric_type": metric_type, "params": {"search_length": search_length}}
search_params.append(nsg_search_param)
elif index_type == "ANNOY":
for search_k in [1000]:
annoy_search_param = {"metric_type": metric_type, "params": {"search_k": search_k}}
search_params.append(annoy_search_param)
else:
print("Invalid index_type.")
raise Exception("Invalid index_type.")
return search_params
def list_containers():
client = docker.from_env()
@ -24,7 +61,7 @@ def get_collections():
print(f"{name}: {c.num_entities}")
def create_collections_and_insert_data(col_name="hello_milvus"):
def create_collections_and_insert_data():
import random
dim = 128
default_fields = [
@ -33,26 +70,26 @@ def create_collections_and_insert_data(col_name="hello_milvus"):
FieldSchema(name="float_vector", dtype=DataType.FLOAT_VECTOR, dim=dim)
]
default_schema = CollectionSchema(fields=default_fields, description="test collection")
print(f"\nCreate collection...")
collection = Collection(name=col_name, schema=default_schema)
print(f"\nList collections...")
print(list_collections())
for col_name in all_index_types:
print(f"\nCreate collection...")
collection = Collection(name=col_name, schema=default_schema)
# insert data
nb = 3000
vectors = [[i/nb for _ in range(dim)] for i in range(nb)]
collection.insert(
[
[i for i in range(nb)],
[float(random.randrange(-20, -10)) for _ in range(nb)],
vectors
]
)
print(f"collection name: {col_name}")
print(f"collection entities: {collection.num_entities}")
print(f"\nList collections...")
print(list_collections())
# insert data
nb = 3000
vectors = [[random.random() for _ in range(dim)] for _ in range(nb)]
collection.insert(
[
[i for i in range(nb)],
[float(random.randrange(-20, -10)) for _ in range(nb)],
vectors
]
)
print(f"\nGet collection entities...")
print(collection.num_entities)
def create_index():
# create index
default_index = {"index_type": "IVF_FLAT", "params": {"nlist": 128}, "metric_type": "L2"}
@ -63,7 +100,12 @@ def create_index():
print(name)
print(c)
c.create_index(field_name="float_vector", index_params=default_index)
index = copy.deepcopy(default_index)
index["index_type"] = name
index["params"] = index_params_map[name]
if name in ["BIN_FLAT", "BIN_IVF_FLAT"]:
index["metric_type"] = "HAMMING"
c.create_index(field_name="float_vector", index_params=index)
@ -75,8 +117,11 @@ def load_and_search():
print(f"collection name: {name}")
c.load()
topK = 5
vectors = [[1.0 for _ in range(128)] for _ in range(3000)]
search_params = {"metric_type": "L2", "params": {"nprobe": 10}}
vectors = [[0.0 for _ in range(128)] for _ in range(3000)]
index_type = name
search_params = gen_search_param(index_type)[0]
print(search_params)
# search_params = {"metric_type": "L2", "params": {"nprobe": 10}}
import time
@ -84,8 +129,8 @@ def load_and_search():
print(f"\nSearch...")
# define output_fields of search result
res = c.search(
vectors[-2:], "float_vector", search_params, topK,
"count > 500", output_fields=["count", "random_value"]
vectors[:1], "float_vector", search_params, topK,
"count > 500", output_fields=["count", "random_value"],timeout=20
)
end_time = time.time()
# show result
@ -93,8 +138,10 @@ def load_and_search():
for hit in hits:
# Get value of the random value field for search result
print(hit, hit.entity.get("random_value"))
ids= hits.ids
print(ids)
print("###########")
print("search latency = %.4fs" % (end_time - start_time))
c.release()
print("search data ends")

View File

@ -29,7 +29,8 @@ echo "platform: $platform"
Task="reinstall"
Mode="standalone"
Release="2.0.0-rc5"
while getopts "hm:t:p:" OPT;do
while getopts "hm:t:p:" OPT;
do
case $OPT in
m) Mode="$OPTARG";;
t) Task="$OPTARG";;
@ -72,11 +73,11 @@ function replace_image_tag {
#to check containers all running and minio is healthy
function check_healthy {
cnt=`docker-compose ps | grep -E "Running|Up" | wc -l`
cnt=`docker-compose ps | grep -E "running|Running|Up|up" | wc -l`
healthy=`docker-compose ps | grep "Healthy" | wc -l`
time_cnt=0
echo "running num $cnt expect num $Expect"
echo "healthy num $healthy expect num 1"
echo "healthy num $healthy expect num $Expect_health"
while [[ $cnt -ne $Expect || $healthy -ne 1 ]];
do
printf "waiting all containers get running\n"
@ -88,10 +89,10 @@ function check_healthy {
printf "timeout,there are some issue with deployment!"
error_exit
fi
cnt=`docker-compose ps | grep -E "Running|Up" | wc -l`
cnt=`docker-compose ps | grep -E "running|Running|Up|up" | wc -l`
healthy=`docker-compose ps | grep "healthy" | wc -l`
echo "running num $cnt expect num $Expect"
echo "healthy num $healthy expect num 1"
echo "healthy num $healthy expect num $Expect_health"
done
}
@ -111,15 +112,22 @@ if [ ! -d ${Deploy_Dir} ];
then
mkdir ${Deploy_Dir}
fi
latest_tag=2.0.0-rc5-latest # the version you are testing now
latest_rc_tag=2.0.0-rc4-latest # a previous version based on current version
echo "get tag info"
python scripts/get_tag.py
latest_tag=`jq -r ".latest_tag" tag_info.json`
latest_rc_tag=`jq -r ".latest_rc_tag" tag_info.json`
release_version=`jq -r ".release_version" tag_info.json`
echo $release_version
pushd ${Deploy_Dir}
# download docker-compose.yml
wget https://github.com/milvus-io/milvus/releases/download/v${Release}/milvus-${Deploy_Dir}-docker-compose.yml -O docker-compose.yml
wget https://github.com/milvus-io/milvus/releases/download/${release_version}/milvus-${Deploy_Dir}-docker-compose.yml -O docker-compose.yml
ls
# clean env to deoploy a fresh milvus
docker-compose down
sleep 10s
docker-compose ps
echo "$pw"| sudo -S rm -rf ./volumes
@ -134,9 +142,11 @@ if [ "$Task" == "upgrade" ];
then
printf "start to deploy previous rc tag milvus\n"
replace_image_tag $latest_rc_tag
fi
cat docker-compose.yml|grep milvusdb
Expect=`grep "container_name" docker-compose.yml | wc -l`
Expect_health=`grep "healthcheck" docker-compose.yml | wc -l`
docker-compose up -d
check_healthy
docker-compose ps
@ -199,5 +209,3 @@ sleep 10s
docker-compose ps
echo "$pw"|sudo -S rm -rf ./volumes
popd