mirror of https://github.com/milvus-io/milvus.git
refaceor ttest_infor to log_config (#5685)
* [skip ci] refactor test_info to log_config Signed-off-by: yanliang567 <yanliang.qiao@zilliz.com> * [skip ci] update chaos checker class Signed-off-by: yanliang567 <yanliang.qiao@zilliz.com> * add pytest-parallel in requirements Signed-off-by: yanliang567 <yanliang.qiao@zilliz.com> * [skip ci] update the error msg Signed-off-by: yanliang567 <yanliang.qiao@zilliz.com> * [skip ci] limit the length of one log row Signed-off-by: yanliang567 <yanliang.qiao@zilliz.com>pull/5692/head
parent
cad67a63c8
commit
a0ed3dc8c6
|
@ -11,6 +11,7 @@ allure-pytest==2.7.0
|
|||
pytest-print==0.2.1
|
||||
pytest-level==0.1.1
|
||||
pytest-xdist==2.2.1
|
||||
pytest-parallel
|
||||
pymilvus==2.0a1.dev22
|
||||
pytest-rerunfailures==9.1.1
|
||||
git+https://github.com/Projectplace/pytest-tags
|
||||
|
|
|
@ -1,19 +1,31 @@
|
|||
# Testcases All-in-one
|
||||
# pod kill
|
||||
# standalone
|
||||
# 3 pods(standalone-ha-blabla, etcd, minio)
|
||||
# cluster-1-node
|
||||
# 11 pods(proxy, master, query node, query service, data node, data service,
|
||||
# index node, index service, pulsar, etcd, minio)
|
||||
# cluster-n-nodes
|
||||
# 11 pods* n: kill one and kill all
|
||||
|
||||
Collections:
|
||||
-
|
||||
testcase: test_querynode_podkill
|
||||
chaos: chaos_querynode_podkill.yaml
|
||||
expectation:
|
||||
single:
|
||||
standalone:
|
||||
create: succ # succ by default if not specified
|
||||
drop: succ
|
||||
query: fail
|
||||
insert: succ
|
||||
flush: succ
|
||||
index: succ
|
||||
search: fail
|
||||
query: fail
|
||||
cluster_1_node:
|
||||
search: fail
|
||||
query: fail
|
||||
cluster_n_nodes:
|
||||
query: degrade # keep functional, but performance degraded
|
||||
search: degrade # keep functional, but performance degraded
|
||||
query: degrade
|
||||
-
|
||||
testcase: test_queryservice_podkill
|
||||
chaos: chaos_queryservice_podkill.yaml
|
||||
|
@ -21,7 +33,7 @@ Collections:
|
|||
testcase: test_datanode_podkill
|
||||
chaos: chaos_datanode_podkill.yaml
|
||||
expectation:
|
||||
single:
|
||||
standalone:
|
||||
insert: fail
|
||||
cluster_1_node:
|
||||
insert: fail
|
||||
|
|
|
@ -7,6 +7,7 @@ from common import common_type as ct
|
|||
|
||||
nums = 0
|
||||
|
||||
|
||||
class Checker:
|
||||
def __init__(self):
|
||||
self._succ = 0
|
||||
|
@ -32,35 +33,58 @@ class SearchChecker(Checker):
|
|||
super().__init__()
|
||||
self.c_wrapper = collection_wrapper
|
||||
|
||||
def keep_searching(self):
|
||||
def keep_running(self):
|
||||
while self._running is True:
|
||||
search_vec = cf.gen_vectors(5, ct.default_dim)
|
||||
_, result = self.c_wrapper.search(
|
||||
data=search_vec,
|
||||
params={"nprobe": 32},
|
||||
limit=1,
|
||||
check_task="nothing"
|
||||
)
|
||||
data=search_vec,
|
||||
params={"nprobe": 32},
|
||||
limit=1,
|
||||
check_task="nothing"
|
||||
)
|
||||
if result is True:
|
||||
self._succ += 1
|
||||
else:
|
||||
self._fail += 1
|
||||
|
||||
|
||||
class InsertChecker(Checker):
|
||||
class InsertAndFlushChecker(Checker):
|
||||
def __init__(self, collection_wrapper):
|
||||
super().__init__()
|
||||
self._flush_succ = 0
|
||||
self._flush_fail = 0
|
||||
self.c_wrapper = collection_wrapper
|
||||
|
||||
def keep_inserting(self):
|
||||
def keep_running(self):
|
||||
while self._running is True:
|
||||
sleep(1)
|
||||
_, result = self.c_wrapper.insert(data=cf.gen_default_list_data(),
|
||||
check_task="nothing")
|
||||
if result is True:
|
||||
_, insert_result = self.c_wrapper.insert(
|
||||
data=cf.gen_default_list_data(nb=ct.default_nb),
|
||||
check_task="nothing")
|
||||
|
||||
if insert_result is True:
|
||||
self._succ += 1
|
||||
num_entities = self.c_wrapper.num_entities
|
||||
self.connection.flush([self.c_wrapper.collection.name])
|
||||
if self.c_wrapper.num_entities == (num_entities + ct.default_nb):
|
||||
self._flush_succ += 1
|
||||
else:
|
||||
self._flush_fail += 1
|
||||
else:
|
||||
self._fail += 1
|
||||
self._flush_fail += 1
|
||||
|
||||
def insert_statics(self):
|
||||
return self.statics()
|
||||
|
||||
def flush_statics(self):
|
||||
return self._flush_succ / self.total() if self.total() != 0 else 0
|
||||
|
||||
def reset(self):
|
||||
self._succ = 0
|
||||
self._fail = 0
|
||||
self._flush_succ = 0
|
||||
self._flush_fail = 0
|
||||
|
||||
|
||||
class CreateChecker(Checker):
|
||||
|
@ -69,11 +93,13 @@ class CreateChecker(Checker):
|
|||
self.c_wrapper = collection_wrapper
|
||||
self.num = 0
|
||||
|
||||
def keep_creating(self):
|
||||
def keep_running(self):
|
||||
while self._running is True:
|
||||
collection, result = self.c_wrapper.init_collection(name=cf.gen_unique_str(),
|
||||
schema=cf.gen_default_collection_schema(),
|
||||
check_task="check_nothing")
|
||||
collection, result = self.c_wrapper.init_collection(
|
||||
name=cf.gen_unique_str(),
|
||||
schema=cf.gen_default_collection_schema(),
|
||||
check_task="check_nothing"
|
||||
)
|
||||
if result is True:
|
||||
self._succ += 1
|
||||
self.c_wrapper.drop(check_task="check_nothing")
|
||||
|
@ -85,15 +111,15 @@ class IndexChecker(Checker):
|
|||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
def keep_indexing(self):
|
||||
def keep_running(self):
|
||||
pass
|
||||
|
||||
|
||||
class DropChecker(Checker):
|
||||
class QueryChecker(Checker):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
def keep_dropping(self):
|
||||
def keep_running(self):
|
||||
pass
|
||||
|
||||
|
||||
|
@ -101,6 +127,6 @@ class FlushChecker(Checker):
|
|||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
def keep_flushing(self):
|
||||
def keep_running(self):
|
||||
pass
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ from base.index_wrapper import ApiIndexWrapper
|
|||
from base.utility_wrapper import ApiUtilityWrapper
|
||||
from base.schema_wrapper import ApiCollectionSchemaWrapper, ApiFieldSchemaWrapper
|
||||
|
||||
from config.test_info import test_info
|
||||
from config.log_config import log_config
|
||||
from utils.util_log import test_log as log
|
||||
from common import common_func as cf
|
||||
from common import common_type as ct
|
||||
|
@ -87,7 +87,7 @@ class Base:
|
|||
assert ip_check(host) and number_check(port)
|
||||
|
||||
""" modify log files """
|
||||
cf.modify_file(file_path_list=[test_info.log_debug, test_info.log_info, test_info.log_err], is_modify=clean_log)
|
||||
cf.modify_file(file_path_list=[log_config.log_debug, log_config.log_info, log_config.log_err], is_modify=clean_log)
|
||||
|
||||
log.info("#" * 80)
|
||||
log.info("[initialize_milvus] Log cleaned up, start testing...")
|
||||
|
@ -116,17 +116,6 @@ class TestcaseBase(Base):
|
|||
raise res
|
||||
return res
|
||||
|
||||
'''
|
||||
def _collection(self, **kwargs):
|
||||
""" Init a collection and return the object of collection """
|
||||
name = cf.gen_unique_str()
|
||||
schema = cf.gen_default_collection_schema()
|
||||
if self.connection_wrap.get_connection(alias='default') is None:
|
||||
self._connect()
|
||||
res, cr = self.collection_wrap.init_collection(name=name, schema=schema, **kwargs)
|
||||
return res
|
||||
'''
|
||||
|
||||
def init_collection_wrap(self, name=None, data=None, schema=None, check_task=None, **kwargs):
|
||||
name = cf.gen_unique_str('coll_') if name is None else name
|
||||
schema = cf.gen_default_collection_schema() if schema is None else schema
|
||||
|
|
|
@ -1,8 +1,11 @@
|
|||
import os
|
||||
|
||||
|
||||
class TestInfo:
|
||||
class LogConfig:
|
||||
def __init__(self):
|
||||
self.log_debug = ""
|
||||
self.log_err = ""
|
||||
self.log_info = ""
|
||||
self.get_default_config()
|
||||
|
||||
@staticmethod
|
||||
|
@ -25,12 +28,12 @@ class TestInfo:
|
|||
|
||||
def get_default_config(self):
|
||||
""" Make sure the path exists """
|
||||
self.log_dir = self.get_env_variable()
|
||||
self.log_debug = "%s/ci_test_log.debug" % self.log_dir
|
||||
self.log_info = "%s/ci_test_log.log" % self.log_dir
|
||||
self.log_err = "%s/ci_test_log.err" % self.log_dir
|
||||
log_dir = self.get_env_variable()
|
||||
self.log_debug = "%s/ci_test_log.debug" % log_dir
|
||||
self.log_info = "%s/ci_test_log.log" % log_dir
|
||||
self.log_err = "%s/ci_test_log.err" % log_dir
|
||||
|
||||
self.create_path(self.log_dir)
|
||||
self.create_path(log_dir)
|
||||
|
||||
|
||||
test_info = TestInfo()
|
||||
log_config = LogConfig()
|
|
@ -1,5 +1,5 @@
|
|||
[pytest]
|
||||
|
||||
addopts = --host 192.168.1.239 --html=/tmp/ci_logs/report.html --self-contained-html
|
||||
-;addopts = --host 172.28.255.155 --html=/tmp/report.html
|
||||
# -;addopts = --host 172.28.255.155 --html=/tmp/report.html
|
||||
# python3 -W ignore -m pytest
|
|
@ -11,6 +11,7 @@ allure-pytest==2.7.0
|
|||
pytest-print==0.2.1
|
||||
pytest-level==0.1.1
|
||||
pytest-xdist==2.2.1
|
||||
pytest-parallel
|
||||
pymilvus==2.0a1.dev22
|
||||
pytest-rerunfailures==9.1.1
|
||||
git+https://github.com/Projectplace/pytest-tags
|
||||
|
|
|
@ -95,7 +95,6 @@ class TestPartitionParams(TestcaseBase):
|
|||
"is_empty": True}
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L1)
|
||||
@pytest.mark.parametrize("collection_name, partition_name, description",
|
||||
[(cf.gen_unique_str(), cf.gen_unique_str(prefix), cf.gen_unique_str())])
|
||||
|
@ -208,7 +207,7 @@ class TestPartitionParams(TestcaseBase):
|
|||
check_task=CheckTasks.err_res,
|
||||
check_items={"err_code": 1, "err_msg": "'NoneType' object has no attribute"})
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L0)
|
||||
@pytest.mark.tags(CaseLabel.L1)
|
||||
@pytest.mark.parametrize("partition_name", [cf.gen_unique_str(prefix)])
|
||||
def test_partition_drop(self, partition_name):
|
||||
"""
|
||||
|
@ -749,7 +748,7 @@ class TestPartitionOperations(TestcaseBase):
|
|||
data = cf.gen_default_list_data(nb=10, dim=dim)
|
||||
# insert data to partition
|
||||
partition_w.insert(data, check_task=CheckTasks.err_res,
|
||||
check_items={"err_code": 1, "err_msg": "Field type doesn't match"})
|
||||
check_items={"err_code": 1, "err_msg": "but entities field dim"})
|
||||
|
||||
@pytest.mark.tags(CaseLabel.L1)
|
||||
@pytest.mark.parametrize("sync", [True, False])
|
||||
|
|
|
@ -8,18 +8,23 @@ class Error:
|
|||
self.message = getattr(error, 'message', str(error))
|
||||
|
||||
|
||||
log_row_length = 150
|
||||
|
||||
|
||||
def api_request_catch():
|
||||
def wrapper(func):
|
||||
def inner_wrapper(*args, **kwargs):
|
||||
try:
|
||||
res = func(*args, **kwargs)
|
||||
log.debug("(api_res) Response : %s " % str(res))
|
||||
log.debug("(api_res) Response : %s " % str(res)[0:log_row_length])
|
||||
return res, True
|
||||
except Exception as e:
|
||||
log.error(traceback.format_exc())
|
||||
log.error("[Milvus API Exception]%s: %s" % (str(func), str(e)))
|
||||
log.error("[Milvus API Exception]%s: %s" % (str(func), str(e)[0:log_row_length]))
|
||||
return Error(e), False
|
||||
|
||||
return inner_wrapper
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
|
@ -32,6 +37,7 @@ def api_request(_list, **kwargs):
|
|||
if len(_list) > 1:
|
||||
for a in _list[1:]:
|
||||
arg.append(a)
|
||||
log.debug("(api_req)[%s] Parameters ars arg: %s, kwargs: %s" % (str(func), str(arg), str(kwargs)))
|
||||
log.debug("(api_req)[%s] Parameters are arg: %s, kwargs: %s"
|
||||
% (str(func), str(arg)[0:log_row_length], str(kwargs)))
|
||||
return func(*arg, **kwargs)
|
||||
return False, False
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import logging
|
||||
import sys
|
||||
|
||||
from config.test_info import test_info
|
||||
from config.log_config import log_config
|
||||
|
||||
|
||||
class TestLog:
|
||||
|
@ -41,7 +41,7 @@ class TestLog:
|
|||
|
||||
|
||||
"""All modules share this unified log"""
|
||||
log_debug = test_info.log_debug
|
||||
log_info = test_info.log_info
|
||||
log_err = test_info.log_err
|
||||
log_debug = log_config.log_debug
|
||||
log_info = log_config.log_info
|
||||
log_err = log_config.log_err
|
||||
test_log = TestLog('ci_test', log_debug, log_info, log_err).log
|
||||
|
|
Loading…
Reference in New Issue