mirror of https://github.com/milvus-io/milvus.git
test: add log level for go sdk test and update part cases (#38385)
- add log level for go sdk cases - update cases for issue #33460 & #37853 --------- Signed-off-by: ThreadDao <yufen.zong@zilliz.com>pull/38417/head
parent
75e64b993f
commit
59234a3350
|
@ -15,43 +15,56 @@ import (
|
|||
)
|
||||
|
||||
func LoggingUnaryInterceptor() grpc.UnaryClientInterceptor {
|
||||
// Limit debug logging for these methods
|
||||
rateLogMethods := map[string]struct{}{
|
||||
"GetFlushState": {},
|
||||
"GetLoadingProgress": {},
|
||||
"DescribeIndex": {},
|
||||
}
|
||||
|
||||
logWithRateLimit := func(_methodShortName string, logFunc func(msg string, fields ...zap.Field),
|
||||
logRateFunc func(cost float64, msg string, fields ...zap.Field) bool,
|
||||
msg string, fields ...zap.Field,
|
||||
) {
|
||||
if _, exists := rateLogMethods[_methodShortName]; exists {
|
||||
logRateFunc(10, msg, fields...)
|
||||
} else {
|
||||
logFunc(msg, fields...)
|
||||
}
|
||||
}
|
||||
|
||||
return func(ctx context.Context, method string, req, reply interface{}, cc *grpc.ClientConn, invoker grpc.UnaryInvoker, opts ...grpc.CallOption) error {
|
||||
maxLogLength := 300
|
||||
const maxLogLength = 300
|
||||
_method := strings.Split(method, "/")
|
||||
_methodShotName := _method[len(_method)-1]
|
||||
// Marshal req to json str
|
||||
reqJSON, err := json.Marshal(req)
|
||||
if err != nil {
|
||||
log.Error("Failed to marshal request", zap.Error(err))
|
||||
reqJSON = []byte("could not marshal request")
|
||||
}
|
||||
reqStr := string(reqJSON)
|
||||
if len(reqStr) > maxLogLength {
|
||||
reqStr = reqStr[:maxLogLength] + "..."
|
||||
_methodShortName := _method[len(_method)-1]
|
||||
|
||||
// Marshal request
|
||||
marshalWithFallback := func(v interface{}, fallbackMsg string) string {
|
||||
dataJSON, err := json.Marshal(v)
|
||||
if err != nil {
|
||||
log.Error("Failed to marshal", zap.Error(err))
|
||||
return fallbackMsg
|
||||
}
|
||||
dataStr := string(dataJSON)
|
||||
if len(dataStr) > maxLogLength {
|
||||
return dataStr[:maxLogLength] + "......"
|
||||
}
|
||||
return dataStr
|
||||
}
|
||||
|
||||
// log before
|
||||
log.Info("Request", zap.String("method", _methodShotName), zap.Any("reqs", reqStr))
|
||||
reqStr := marshalWithFallback(req, "could not marshal request")
|
||||
logWithRateLimit(_methodShortName, log.Info, log.RatedInfo, "Request", zap.String("method", _methodShortName), zap.String("reqs", reqStr))
|
||||
|
||||
// invoker
|
||||
// Ike the actual method
|
||||
start := time.Now()
|
||||
errResp := invoker(ctx, method, req, reply, cc, opts...)
|
||||
cost := time.Since(start)
|
||||
|
||||
// Marshal reply to json str
|
||||
respJSON, err := json.Marshal(reply)
|
||||
if err != nil {
|
||||
log.Error("Failed to marshal response", zap.Error(err))
|
||||
respJSON = []byte("could not marshal response")
|
||||
}
|
||||
respStr := string(respJSON)
|
||||
if len(respStr) > maxLogLength {
|
||||
respStr = respStr[:maxLogLength] + "..."
|
||||
}
|
||||
// Marshal response
|
||||
respStr := marshalWithFallback(reply, "could not marshal response")
|
||||
logWithRateLimit(_methodShortName, log.Info, log.RatedInfo, "Response", zap.String("method", _methodShortName), zap.String("resp", respStr))
|
||||
logWithRateLimit(_methodShortName, log.Debug, log.RatedDebug, "Cost", zap.String("method", _methodShortName), zap.Duration("cost", cost))
|
||||
|
||||
// log after
|
||||
log.Info("Response", zap.String("method", _methodShotName), zap.Any("resp", respStr))
|
||||
log.Debug("Cost", zap.String("method", _methodShotName), zap.Duration("cost", cost))
|
||||
return errResp
|
||||
}
|
||||
}
|
||||
|
|
|
@ -356,6 +356,24 @@ func TestInsertColumnsDifferentLen(t *testing.T) {
|
|||
common.CheckErr(t, errInsert, false, "column size not match")
|
||||
}
|
||||
|
||||
func TestInsertAutoIdPkData(t *testing.T) {
|
||||
ctx := hp.CreateContext(t, time.Second*common.DefaultTimeout)
|
||||
mc := createDefaultMilvusClient(ctx, t)
|
||||
|
||||
// create collection
|
||||
cp := hp.NewCreateCollectionParams(hp.Int64Vec)
|
||||
_, schema := hp.CollPrepare.CreateCollection(ctx, t, mc, cp, hp.TNewFieldsOption().TWithAutoID(true), hp.TNewSchemaOption())
|
||||
|
||||
// insert
|
||||
columnOpt := hp.TNewDataOption().TWithDim(common.DefaultDim)
|
||||
pkColumn := hp.GenColumnData(common.DefaultNb, entity.FieldTypeInt64, *columnOpt)
|
||||
vecColumn := hp.GenColumnData(common.DefaultNb, entity.FieldTypeFloatVector, *columnOpt)
|
||||
insertOpt := client.NewColumnBasedInsertOption(schema.CollectionName).WithColumns(vecColumn, pkColumn)
|
||||
|
||||
_, err := mc.Insert(ctx, insertOpt)
|
||||
common.CheckErr(t, err, false, "more fieldData has pass in")
|
||||
}
|
||||
|
||||
// test insert invalid column: empty column or dim not match
|
||||
func TestInsertInvalidColumn(t *testing.T) {
|
||||
ctx := hp.CreateContext(t, time.Second*common.DefaultTimeout)
|
||||
|
@ -745,22 +763,78 @@ func TestInsertRowMismatchFields(t *testing.T) {
|
|||
common.CheckErr(t, errInsert, true)
|
||||
}
|
||||
|
||||
func TestInsertAutoIDInvalidRow(t *testing.T) {
|
||||
t.Skip("https://github.com/milvus-io/milvus/issues/33460")
|
||||
func TestInsertDisableAutoIDRow(t *testing.T) {
|
||||
/*
|
||||
autoID: false
|
||||
- pass pk value -> insert success
|
||||
- no pk value -> error
|
||||
*/
|
||||
t.Parallel()
|
||||
ctx := hp.CreateContext(t, time.Second*common.DefaultTimeout)
|
||||
mc := createDefaultMilvusClient(ctx, t)
|
||||
cp := hp.NewCreateCollectionParams(hp.Int64Vec)
|
||||
_, schema := hp.CollPrepare.CreateCollection(ctx, t, mc, cp, hp.TNewFieldsOption().TWithAutoID(false), hp.TNewSchemaOption().TWithAutoID(false))
|
||||
|
||||
for _, autoId := range []bool{false, true} {
|
||||
cp := hp.NewCreateCollectionParams(hp.Int64Vec)
|
||||
_, schema := hp.CollPrepare.CreateCollection(ctx, t, mc, cp, hp.TNewFieldsOption().TWithAutoID(autoId), hp.TNewSchemaOption())
|
||||
// pass pk value
|
||||
rowsWithPk := hp.GenInt64VecRows(10, false, false, *hp.TNewDataOption())
|
||||
idsWithPk, err := mc.Insert(ctx, client.NewRowBasedInsertOption(schema.CollectionName, rowsWithPk...))
|
||||
common.CheckErr(t, err, true)
|
||||
require.Contains(t, idsWithPk.IDs.(*column.ColumnInt64).Data(), rowsWithPk[0].(*hp.BaseRow).Int64)
|
||||
|
||||
// insert rows: autoId true -> o pk data; autoID false -> has pk data
|
||||
rows := hp.GenInt64VecRows(10, false, !autoId, *hp.TNewDataOption())
|
||||
log.Info("rows data", zap.Any("rows[8]", rows[0]))
|
||||
_, err := mc.Insert(ctx, client.NewRowBasedInsertOption(schema.CollectionName, rows...))
|
||||
common.CheckErr(t, err, false, "missing pk data")
|
||||
// no pk value -> now error
|
||||
type tmpRow struct {
|
||||
FloatVec []float32 `json:"floatVec,omitempty" milvus:"name:floatVec"`
|
||||
}
|
||||
rowsWithoutPk := make([]interface{}, 0, 10)
|
||||
|
||||
// BaseRow generate insert rows
|
||||
for i := 0; i < 10; i++ {
|
||||
baseRow := tmpRow{
|
||||
FloatVec: common.GenFloatVector(common.DefaultDim),
|
||||
}
|
||||
rowsWithoutPk = append(rowsWithoutPk, &baseRow)
|
||||
}
|
||||
_, err1 := mc.Insert(ctx, client.NewRowBasedInsertOption(schema.CollectionName, rowsWithoutPk...))
|
||||
common.CheckErr(t, err1, false, "row 0 does not has field int64")
|
||||
}
|
||||
|
||||
func TestInsertEnableAutoIDRow(t *testing.T) {
|
||||
/*
|
||||
autoID: true
|
||||
- pass pk value -> ignore passed value and write back auto-gen pk
|
||||
- no pk value -> insert success
|
||||
*/
|
||||
t.Parallel()
|
||||
ctx := hp.CreateContext(t, time.Second*common.DefaultTimeout)
|
||||
mc := createDefaultMilvusClient(ctx, t)
|
||||
cp := hp.NewCreateCollectionParams(hp.Int64Vec)
|
||||
_, schema := hp.CollPrepare.CreateCollection(ctx, t, mc, cp, hp.TNewFieldsOption().TWithAutoID(true), hp.TNewSchemaOption().TWithAutoID(true))
|
||||
|
||||
// pass pk value -> ignore passed pks
|
||||
rowsWithPk := hp.GenInt64VecRows(10, false, false, *hp.TNewDataOption())
|
||||
log.Debug("origin first rowsWithPk", zap.Any("rowsWithPk", rowsWithPk[0].(*hp.BaseRow)))
|
||||
idsWithPk, err := mc.Insert(ctx, client.NewRowBasedInsertOption(schema.CollectionName, rowsWithPk...))
|
||||
log.Info("write back rowsWithPk", zap.Any("rowsWithPk", rowsWithPk[0].(*hp.BaseRow)))
|
||||
common.CheckErr(t, err, true)
|
||||
require.Contains(t, idsWithPk.IDs.(*column.ColumnInt64).Data(), rowsWithPk[0].(*hp.BaseRow).Int64)
|
||||
|
||||
// no pk value -> now error
|
||||
rowsWithoutPk := make([]interface{}, 0, 10)
|
||||
type tmpRow struct {
|
||||
FloatVec []float32 `json:"floatVec,omitempty" milvus:"name:floatVec"`
|
||||
}
|
||||
|
||||
// BaseRow generate insert rows
|
||||
for i := 0; i < 10; i++ {
|
||||
baseRow := tmpRow{
|
||||
FloatVec: common.GenFloatVector(common.DefaultDim),
|
||||
}
|
||||
rowsWithoutPk = append(rowsWithoutPk, &baseRow)
|
||||
}
|
||||
|
||||
idsWithoutPk, err1 := mc.Insert(ctx, client.NewRowBasedInsertOption(schema.CollectionName, rowsWithoutPk...))
|
||||
common.CheckErr(t, err1, true)
|
||||
require.Equal(t, 10, int(idsWithoutPk.InsertCount))
|
||||
}
|
||||
|
||||
func TestFlushRate(t *testing.T) {
|
||||
|
|
|
@ -255,7 +255,6 @@ func TestLoadCollectionSparse(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestLoadPartialFields(t *testing.T) {
|
||||
t.Skip("https://github.com/milvus-io/milvus/issues/37853")
|
||||
/*
|
||||
1. verify the collection loaded successfully
|
||||
2. verify the loaded fields can be searched in expr and output_fields
|
||||
|
@ -294,7 +293,7 @@ func TestLoadPartialFields(t *testing.T) {
|
|||
// search with expr not loaded field -> Error
|
||||
invalidExpr := fmt.Sprintf("%s > 2.0 ", common.DefaultFloatFieldName)
|
||||
_, err = mc.Search(ctx, clientv2.NewSearchOption(schema.CollectionName, common.DefaultLimit, vectors).WithANNSField(common.DefaultFloatVecFieldName).WithFilter(invalidExpr))
|
||||
common.CheckErr(t, err, false, "field fieldID:105 name:\"float\" data_type:Float is not loaded")
|
||||
common.CheckErr(t, err, false, "data_type:Float is not loaded")
|
||||
|
||||
// search with output_fields not loaded field -> Error
|
||||
_, err = mc.Search(ctx, clientv2.NewSearchOption(schema.CollectionName, common.DefaultLimit, vectors).WithANNSField(common.DefaultFloatVecFieldName).WithOutputFields(common.DefaultBoolFieldName))
|
||||
|
|
|
@ -17,6 +17,7 @@ import (
|
|||
|
||||
var (
|
||||
addr = flag.String("addr", "localhost:19530", "server host and port")
|
||||
logLevel = flag.String("log.level", "info", "log level for test")
|
||||
defaultCfg clientv2.ClientConfig
|
||||
)
|
||||
|
||||
|
@ -81,8 +82,25 @@ func createMilvusClient(ctx context.Context, t *testing.T, cfg *clientv2.ClientC
|
|||
return mc
|
||||
}
|
||||
|
||||
func parseLogConfig() {
|
||||
log.Info("Parser Log Level", zap.String("logLevel", *logLevel))
|
||||
switch *logLevel {
|
||||
case "debug", "DEBUG", "Debug":
|
||||
log.SetLevel(zap.DebugLevel)
|
||||
case "info", "INFO", "Info":
|
||||
log.SetLevel(zap.InfoLevel)
|
||||
case "warn", "WARN", "Warn":
|
||||
log.SetLevel(zap.WarnLevel)
|
||||
case "error", "ERROR", "Error":
|
||||
log.SetLevel(zap.ErrorLevel)
|
||||
default:
|
||||
log.SetLevel(zap.InfoLevel)
|
||||
}
|
||||
}
|
||||
|
||||
func TestMain(m *testing.M) {
|
||||
flag.Parse()
|
||||
parseLogConfig()
|
||||
log.Info("Parser Milvus address", zap.String("address", *addr))
|
||||
defaultCfg = clientv2.ClientConfig{Address: *addr}
|
||||
code := m.Run()
|
||||
|
|
Loading…
Reference in New Issue