Remove parser dependency (#27514)

Signed-off-by: xiaofan-luan <xiaofan.luan@zilliz.com>
pull/27507/head
Xiaofan 2023-10-08 15:05:31 +08:00 committed by GitHub
parent 241f2d77fb
commit 41124f281a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 137 additions and 1675 deletions

1
go.mod
View File

@ -9,7 +9,6 @@ require (
github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.3.0
github.com/aliyun/credentials-go v1.2.7
github.com/antlr/antlr4/runtime/Go/antlr v0.0.0-20210826220005-b48c857c3a0e
github.com/antonmedv/expr v1.8.9
github.com/apache/arrow/go/v8 v8.0.0-20220322092137-778b1772fd20
github.com/apache/pulsar-client-go v0.6.1-0.20210728062540-29414db801a7
github.com/bits-and-blooms/bloom/v3 v3.0.1

View File

@ -1,927 +0,0 @@
// Licensed to the LF AI & Data foundation under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package proxy
import (
"fmt"
"math"
"strings"
ant_ast "github.com/antonmedv/expr/ast"
ant_parser "github.com/antonmedv/expr/parser"
"github.com/milvus-io/milvus-proto/go-api/v2/schemapb"
"github.com/milvus-io/milvus/internal/proto/planpb"
"github.com/milvus-io/milvus/pkg/util/typeutil"
)
type parserContext struct {
schema *typeutil.SchemaHelper
}
type optimizer struct {
err error
}
func (*optimizer) Enter(*ant_ast.Node) {}
func (optimizer *optimizer) Exit(node *ant_ast.Node) {
patch := func(newNode ant_ast.Node) {
ant_ast.Patch(node, newNode)
}
switch node := (*node).(type) {
case *ant_ast.UnaryNode:
switch node.Operator {
case "-":
if i, ok := node.Node.(*ant_ast.IntegerNode); ok {
patch(&ant_ast.IntegerNode{Value: -i.Value})
} else if i, ok := node.Node.(*ant_ast.FloatNode); ok {
patch(&ant_ast.FloatNode{Value: -i.Value})
} else {
optimizer.err = fmt.Errorf("invalid data type")
return
}
case "+":
if i, ok := node.Node.(*ant_ast.IntegerNode); ok {
patch(&ant_ast.IntegerNode{Value: i.Value})
} else if i, ok := node.Node.(*ant_ast.FloatNode); ok {
patch(&ant_ast.FloatNode{Value: i.Value})
} else {
optimizer.err = fmt.Errorf("invalid data type")
return
}
}
case *ant_ast.BinaryNode:
floatNodeLeft, leftFloat := node.Left.(*ant_ast.FloatNode)
integerNodeLeft, leftInteger := node.Left.(*ant_ast.IntegerNode)
floatNodeRight, rightFloat := node.Right.(*ant_ast.FloatNode)
integerNodeRight, rightInteger := node.Right.(*ant_ast.IntegerNode)
// Check IdentifierNodes
identifierNodeLeft, leftIdentifier := node.Left.(*ant_ast.IdentifierNode)
identifierNodeRight, rightIdentifier := node.Right.(*ant_ast.IdentifierNode)
switch node.Operator {
case "+":
funcName, err := getFuncNameByNodeOp(node.Operator)
if err != nil {
optimizer.err = err
return
}
if leftFloat && rightFloat {
patch(&ant_ast.FloatNode{Value: floatNodeLeft.Value + floatNodeRight.Value})
} else if leftFloat && rightInteger {
patch(&ant_ast.FloatNode{Value: floatNodeLeft.Value + float64(integerNodeRight.Value)})
} else if leftInteger && rightFloat {
patch(&ant_ast.FloatNode{Value: float64(integerNodeLeft.Value) + floatNodeRight.Value})
} else if leftInteger && rightInteger {
patch(&ant_ast.IntegerNode{Value: integerNodeLeft.Value + integerNodeRight.Value})
} else if leftIdentifier && rightFloat {
patch(&ant_ast.FunctionNode{Name: funcName, Arguments: []ant_ast.Node{identifierNodeLeft, floatNodeRight}})
} else if leftIdentifier && rightInteger {
patch(&ant_ast.FunctionNode{Name: funcName, Arguments: []ant_ast.Node{identifierNodeLeft, integerNodeRight}})
} else if leftFloat && rightIdentifier {
patch(&ant_ast.FunctionNode{Name: funcName, Arguments: []ant_ast.Node{identifierNodeRight, floatNodeLeft}})
} else if leftInteger && rightIdentifier {
patch(&ant_ast.FunctionNode{Name: funcName, Arguments: []ant_ast.Node{identifierNodeRight, integerNodeLeft}})
} else {
optimizer.err = fmt.Errorf("invalid data type")
return
}
case "-":
funcName, err := getFuncNameByNodeOp(node.Operator)
if err != nil {
optimizer.err = err
return
}
if leftFloat && rightFloat {
patch(&ant_ast.FloatNode{Value: floatNodeLeft.Value - floatNodeRight.Value})
} else if leftFloat && rightInteger {
patch(&ant_ast.FloatNode{Value: floatNodeLeft.Value - float64(integerNodeRight.Value)})
} else if leftInteger && rightFloat {
patch(&ant_ast.FloatNode{Value: float64(integerNodeLeft.Value) - floatNodeRight.Value})
} else if leftInteger && rightInteger {
patch(&ant_ast.IntegerNode{Value: integerNodeLeft.Value - integerNodeRight.Value})
} else if leftIdentifier && rightFloat {
patch(&ant_ast.FunctionNode{Name: funcName, Arguments: []ant_ast.Node{identifierNodeLeft, floatNodeRight}})
} else if leftIdentifier && rightInteger {
patch(&ant_ast.FunctionNode{Name: funcName, Arguments: []ant_ast.Node{identifierNodeLeft, integerNodeRight}})
} else if leftFloat && rightIdentifier {
optimizer.err = fmt.Errorf("field as right operand is not yet supported for (%s) operator", node.Operator)
return
} else if leftInteger && rightIdentifier {
optimizer.err = fmt.Errorf("field as right operand is not yet supported for (%s) operator", node.Operator)
return
} else {
optimizer.err = fmt.Errorf("invalid data type")
return
}
case "*":
funcName, err := getFuncNameByNodeOp(node.Operator)
if err != nil {
optimizer.err = err
return
}
if leftFloat && rightFloat {
patch(&ant_ast.FloatNode{Value: floatNodeLeft.Value * floatNodeRight.Value})
} else if leftFloat && rightInteger {
patch(&ant_ast.FloatNode{Value: floatNodeLeft.Value * float64(integerNodeRight.Value)})
} else if leftInteger && rightFloat {
patch(&ant_ast.FloatNode{Value: float64(integerNodeLeft.Value) * floatNodeRight.Value})
} else if leftInteger && rightInteger {
patch(&ant_ast.IntegerNode{Value: integerNodeLeft.Value * integerNodeRight.Value})
} else if leftIdentifier && rightFloat {
patch(&ant_ast.FunctionNode{Name: funcName, Arguments: []ant_ast.Node{identifierNodeLeft, floatNodeRight}})
} else if leftIdentifier && rightInteger {
patch(&ant_ast.FunctionNode{Name: funcName, Arguments: []ant_ast.Node{identifierNodeLeft, integerNodeRight}})
} else if leftFloat && rightIdentifier {
patch(&ant_ast.FunctionNode{Name: funcName, Arguments: []ant_ast.Node{identifierNodeRight, floatNodeLeft}})
} else if leftInteger && rightIdentifier {
patch(&ant_ast.FunctionNode{Name: funcName, Arguments: []ant_ast.Node{identifierNodeRight, integerNodeLeft}})
} else {
optimizer.err = fmt.Errorf("invalid data type")
return
}
case "/":
funcName, err := getFuncNameByNodeOp(node.Operator)
if err != nil {
optimizer.err = err
return
}
if leftFloat && rightFloat {
if floatNodeRight.Value == 0 {
optimizer.err = fmt.Errorf("divide by zero")
return
}
patch(&ant_ast.FloatNode{Value: floatNodeLeft.Value / floatNodeRight.Value})
} else if leftFloat && rightInteger {
if integerNodeRight.Value == 0 {
optimizer.err = fmt.Errorf("divide by zero")
return
}
patch(&ant_ast.FloatNode{Value: floatNodeLeft.Value / float64(integerNodeRight.Value)})
} else if leftInteger && rightFloat {
if floatNodeRight.Value == 0 {
optimizer.err = fmt.Errorf("divide by zero")
return
}
patch(&ant_ast.FloatNode{Value: float64(integerNodeLeft.Value) / floatNodeRight.Value})
} else if leftInteger && rightInteger {
if integerNodeRight.Value == 0 {
optimizer.err = fmt.Errorf("divide by zero")
return
}
patch(&ant_ast.IntegerNode{Value: integerNodeLeft.Value / integerNodeRight.Value})
} else if leftIdentifier && rightFloat {
if floatNodeRight.Value == 0 {
optimizer.err = fmt.Errorf("divide by zero")
return
}
patch(&ant_ast.FunctionNode{Name: funcName, Arguments: []ant_ast.Node{identifierNodeLeft, floatNodeRight}})
} else if leftIdentifier && rightInteger {
if integerNodeRight.Value == 0 {
optimizer.err = fmt.Errorf("divide by zero")
return
}
patch(&ant_ast.FunctionNode{Name: funcName, Arguments: []ant_ast.Node{identifierNodeLeft, integerNodeRight}})
} else if leftFloat && rightIdentifier {
optimizer.err = fmt.Errorf("field as right operand is not yet supported for (%s) operator", node.Operator)
return
} else if leftInteger && rightIdentifier {
optimizer.err = fmt.Errorf("field as right operand is not yet supported for (%s) operator", node.Operator)
return
} else {
optimizer.err = fmt.Errorf("invalid data type")
return
}
case "%":
funcName, err := getFuncNameByNodeOp(node.Operator)
if err != nil {
optimizer.err = err
return
}
if leftInteger && rightInteger {
if integerNodeRight.Value == 0 {
optimizer.err = fmt.Errorf("modulo by zero")
return
}
patch(&ant_ast.IntegerNode{Value: integerNodeLeft.Value % integerNodeRight.Value})
} else if leftIdentifier && rightInteger {
if integerNodeRight.Value == 0 {
optimizer.err = fmt.Errorf("modulo by zero")
return
}
patch(&ant_ast.FunctionNode{Name: funcName, Arguments: []ant_ast.Node{identifierNodeLeft, integerNodeRight}})
} else if leftInteger && rightIdentifier {
optimizer.err = fmt.Errorf("field as right operand is not yet supported for (%s) operator", node.Operator)
return
} else {
optimizer.err = fmt.Errorf("invalid data type")
return
}
case "**":
if leftFloat && rightFloat {
patch(&ant_ast.FloatNode{Value: math.Pow(floatNodeLeft.Value, floatNodeRight.Value)})
} else if leftFloat && rightInteger {
patch(&ant_ast.FloatNode{Value: math.Pow(floatNodeLeft.Value, float64(integerNodeRight.Value))})
} else if leftInteger && rightFloat {
patch(&ant_ast.FloatNode{Value: math.Pow(float64(integerNodeLeft.Value), floatNodeRight.Value)})
} else if leftInteger && rightInteger {
patch(&ant_ast.IntegerNode{Value: int(math.Pow(float64(integerNodeLeft.Value), float64(integerNodeRight.Value)))})
} else {
optimizer.err = fmt.Errorf("invalid data type")
return
}
}
}
}
func parseExpr(schema *typeutil.SchemaHelper, exprStr string) (*planpb.Expr, error) {
if exprStr == "" {
return nil, nil
}
ast, err := ant_parser.Parse(exprStr)
if err != nil {
return nil, err
}
optimizer := &optimizer{}
ant_ast.Walk(&ast.Node, optimizer)
if optimizer.err != nil {
return nil, optimizer.err
}
pc := parserContext{schema}
expr, err := pc.handleExpr(&ast.Node)
if err != nil {
return nil, err
}
return expr, nil
}
func createColumnInfo(field *schemapb.FieldSchema) *planpb.ColumnInfo {
return &planpb.ColumnInfo{
FieldId: field.FieldID,
DataType: field.DataType,
IsPrimaryKey: field.IsPrimaryKey,
IsPartitionKey: field.IsPartitionKey,
}
}
func isSameOrder(opStr1, opStr2 string) bool {
isLess1 := (opStr1 == "<") || (opStr1 == "<=")
isLess2 := (opStr2 == "<") || (opStr2 == "<=")
return isLess1 == isLess2
}
var opMap = map[planpb.OpType]string{
planpb.OpType_Invalid: "invalid",
planpb.OpType_GreaterThan: ">",
planpb.OpType_GreaterEqual: ">=",
planpb.OpType_LessThan: "<",
planpb.OpType_LessEqual: "<=",
planpb.OpType_Equal: "==",
planpb.OpType_NotEqual: "!=",
}
func getCompareOpType(opStr string, reverse bool) (op planpb.OpType) {
switch opStr {
case ">":
if reverse {
op = planpb.OpType_LessThan
} else {
op = planpb.OpType_GreaterThan
}
case "<":
if reverse {
op = planpb.OpType_GreaterThan
} else {
op = planpb.OpType_LessThan
}
case ">=":
if reverse {
op = planpb.OpType_LessEqual
} else {
op = planpb.OpType_GreaterEqual
}
case "<=":
if reverse {
op = planpb.OpType_GreaterEqual
} else {
op = planpb.OpType_LessEqual
}
case "==":
op = planpb.OpType_Equal
case "!=":
op = planpb.OpType_NotEqual
case "startsWith":
op = planpb.OpType_PrefixMatch
case "endsWith":
op = planpb.OpType_PostfixMatch
default:
op = planpb.OpType_Invalid
}
return op
}
func getLogicalOpType(opStr string) planpb.BinaryExpr_BinaryOp {
switch opStr {
case "&&", "and":
return planpb.BinaryExpr_LogicalAnd
case "||", "or":
return planpb.BinaryExpr_LogicalOr
default:
return planpb.BinaryExpr_Invalid
}
}
func getArithOpType(funcName string) (planpb.ArithOpType, error) {
var op planpb.ArithOpType
switch funcName {
case "add":
op = planpb.ArithOpType_Add
case "sub":
op = planpb.ArithOpType_Sub
case "mul":
op = planpb.ArithOpType_Mul
case "div":
op = planpb.ArithOpType_Div
case "mod":
op = planpb.ArithOpType_Mod
default:
return op, fmt.Errorf("unsupported or invalid arith op type: %s", funcName)
}
return op, nil
}
func getFuncNameByNodeOp(nodeOp string) (string, error) {
var funcName string
switch nodeOp {
case "+":
funcName = "add"
case "-":
funcName = "sub"
case "*":
funcName = "mul"
case "/":
funcName = "div"
case "%":
funcName = "mod"
default:
return funcName, fmt.Errorf("no defined funcName assigned to nodeOp: %s", nodeOp)
}
return funcName, nil
}
func parseBoolNode(nodeRaw *ant_ast.Node) *ant_ast.BoolNode {
switch node := (*nodeRaw).(type) {
case *ant_ast.IdentifierNode:
// bool node only accept value 'true' or 'false'
val := strings.ToLower(node.Value)
if val == "true" {
return &ant_ast.BoolNode{
Value: true,
}
} else if val == "false" {
return &ant_ast.BoolNode{
Value: false,
}
} else {
return nil
}
default:
return nil
}
}
func (pc *parserContext) createCmpExpr(left, right ant_ast.Node, operator string) (*planpb.Expr, error) {
if boolNode := parseBoolNode(&left); boolNode != nil {
left = boolNode
}
if boolNode := parseBoolNode(&right); boolNode != nil {
right = boolNode
}
idNodeLeft, okLeft := left.(*ant_ast.IdentifierNode)
idNodeRight, okRight := right.(*ant_ast.IdentifierNode)
if okLeft && okRight {
leftField, err := pc.handleIdentifier(idNodeLeft)
if err != nil {
return nil, err
}
rightField, err := pc.handleIdentifier(idNodeRight)
if err != nil {
return nil, err
}
op := getCompareOpType(operator, false)
if op == planpb.OpType_Invalid {
return nil, fmt.Errorf("invalid binary operator(%s)", operator)
}
expr := &planpb.Expr{
Expr: &planpb.Expr_CompareExpr{
CompareExpr: &planpb.CompareExpr{
LeftColumnInfo: createColumnInfo(leftField),
RightColumnInfo: createColumnInfo(rightField),
Op: op,
},
},
}
return expr, nil
}
var idNode *ant_ast.IdentifierNode
var reverse bool
var valueNode *ant_ast.Node
if okLeft {
idNode = idNodeLeft
reverse = false
valueNode = &right
} else if okRight {
idNode = idNodeRight
reverse = true
valueNode = &left
} else {
return nil, fmt.Errorf("compare expr has no identifier")
}
field, err := pc.handleIdentifier(idNode)
if err != nil {
return nil, err
}
val, err := pc.handleLeafValue(valueNode, field.DataType)
if err != nil {
return nil, err
}
op := getCompareOpType(operator, reverse)
if op == planpb.OpType_Invalid {
return nil, fmt.Errorf("invalid binary operator(%s)", operator)
}
expr := &planpb.Expr{
Expr: &planpb.Expr_UnaryRangeExpr{
UnaryRangeExpr: &planpb.UnaryRangeExpr{
ColumnInfo: createColumnInfo(field),
Op: op,
Value: val,
},
},
}
return expr, nil
}
func (pc *parserContext) createBinaryArithOpEvalExpr(left *ant_ast.FunctionNode, right *ant_ast.Node, operator string) (*planpb.Expr, error) {
switch operator {
case "==", "!=":
binArithOp, err := pc.handleFunction(left)
if err != nil {
return nil, fmt.Errorf("createBinaryArithOpEvalExpr: %v", err)
}
op := getCompareOpType(operator, false)
val, err := pc.handleLeafValue(right, binArithOp.ColumnInfo.DataType)
if err != nil {
return nil, err
}
expr := &planpb.Expr{
Expr: &planpb.Expr_BinaryArithOpEvalRangeExpr{
BinaryArithOpEvalRangeExpr: &planpb.BinaryArithOpEvalRangeExpr{
ColumnInfo: binArithOp.ColumnInfo,
ArithOp: binArithOp.ArithOp,
RightOperand: binArithOp.RightOperand,
Op: op,
Value: val,
},
},
}
return expr, nil
}
return nil, fmt.Errorf("operator(%s) not yet supported for function nodes", operator)
}
func (pc *parserContext) handleCmpExpr(node *ant_ast.BinaryNode) (*planpb.Expr, error) {
return pc.createCmpExpr(node.Left, node.Right, node.Operator)
}
func (pc *parserContext) handleBinaryArithCmpExpr(node *ant_ast.BinaryNode) (*planpb.Expr, error) {
leftNode, funcNodeLeft := node.Left.(*ant_ast.FunctionNode)
rightNode, funcNodeRight := node.Right.(*ant_ast.FunctionNode)
if funcNodeLeft && funcNodeRight {
return nil, fmt.Errorf("left and right are both expression are not supported")
} else if funcNodeRight {
// Only the right node is a function node
op := getCompareOpType(node.Operator, true)
if op == planpb.OpType_Invalid {
return nil, fmt.Errorf("invalid right expression")
}
return pc.createBinaryArithOpEvalExpr(rightNode, &node.Left, opMap[op])
} else if funcNodeLeft {
// Only the left node is a function node
return pc.createBinaryArithOpEvalExpr(leftNode, &node.Right, node.Operator)
} else {
// Both left and right are not function nodes, pass to createCmpExpr
return pc.createCmpExpr(node.Left, node.Right, node.Operator)
}
}
func (pc *parserContext) handleLogicalExpr(node *ant_ast.BinaryNode) (*planpb.Expr, error) {
op := getLogicalOpType(node.Operator)
if op == planpb.BinaryExpr_Invalid {
return nil, fmt.Errorf("invalid logical operator(%s)", node.Operator)
}
leftExpr, err := pc.handleExpr(&node.Left)
if err != nil {
return nil, err
}
rightExpr, err := pc.handleExpr(&node.Right)
if err != nil {
return nil, err
}
expr := &planpb.Expr{
Expr: &planpb.Expr_BinaryExpr{
BinaryExpr: &planpb.BinaryExpr{
Op: op,
Left: leftExpr,
Right: rightExpr,
},
},
}
return expr, nil
}
func (pc *parserContext) handleArrayExpr(node *ant_ast.Node, dataType schemapb.DataType) ([]*planpb.GenericValue, error) {
arrayNode, ok2 := (*node).(*ant_ast.ArrayNode)
if !ok2 {
return nil, fmt.Errorf("right operand of the InExpr must be array")
}
var arr []*planpb.GenericValue
for _, element := range arrayNode.Nodes {
// use value inside
// #nosec G601
val, err := pc.handleLeafValue(&element, dataType)
if err != nil {
return nil, err
}
arr = append(arr, val)
}
return arr, nil
}
func (pc *parserContext) handleInExpr(node *ant_ast.BinaryNode) (*planpb.Expr, error) {
if node.Operator != "in" && node.Operator != "not in" {
return nil, fmt.Errorf("invalid operator(%s)", node.Operator)
}
idNode, ok := node.Left.(*ant_ast.IdentifierNode)
if !ok {
return nil, fmt.Errorf("left operand of the InExpr must be identifier")
}
field, err := pc.handleIdentifier(idNode)
if err != nil {
return nil, err
}
arrayData, err := pc.handleArrayExpr(&node.Right, field.DataType)
if err != nil {
return nil, err
}
expr := &planpb.Expr{
Expr: &planpb.Expr_TermExpr{
TermExpr: &planpb.TermExpr{
ColumnInfo: createColumnInfo(field),
Values: arrayData,
},
},
}
if node.Operator == "not in" {
return pc.createNotExpr(expr)
}
return expr, nil
}
func (pc *parserContext) combineUnaryRangeExpr(a, b *planpb.UnaryRangeExpr) *planpb.Expr {
if a.Op == planpb.OpType_LessEqual || a.Op == planpb.OpType_LessThan {
a, b = b, a
}
lowerInclusive := (a.Op == planpb.OpType_GreaterEqual)
upperInclusive := (b.Op == planpb.OpType_LessEqual)
expr := &planpb.Expr{
Expr: &planpb.Expr_BinaryRangeExpr{
BinaryRangeExpr: &planpb.BinaryRangeExpr{
ColumnInfo: a.ColumnInfo,
LowerInclusive: lowerInclusive,
UpperInclusive: upperInclusive,
LowerValue: a.Value,
UpperValue: b.Value,
},
},
}
return expr
}
func (pc *parserContext) handleMultiCmpExpr(node *ant_ast.BinaryNode) (*planpb.Expr, error) {
exprs := []*planpb.Expr{}
curNode := node
// handle multiple relational operators
for {
binNodeLeft, LeftOk := curNode.Left.(*ant_ast.BinaryNode)
if !LeftOk {
expr, err := pc.handleCmpExpr(curNode)
if err != nil {
return nil, err
}
exprs = append(exprs, expr)
break
}
if isSameOrder(node.Operator, binNodeLeft.Operator) {
expr, err := pc.createCmpExpr(binNodeLeft.Right, curNode.Right, curNode.Operator)
if err != nil {
return nil, err
}
exprs = append(exprs, expr)
curNode = binNodeLeft
} else {
return nil, fmt.Errorf("illegal multi-range expr")
}
}
// combine UnaryRangeExpr to BinaryRangeExpr
var lastExpr *planpb.UnaryRangeExpr
for i := len(exprs) - 1; i >= 0; i-- {
if expr, ok := exprs[i].Expr.(*planpb.Expr_UnaryRangeExpr); ok {
if lastExpr != nil && expr.UnaryRangeExpr.ColumnInfo.FieldId == lastExpr.ColumnInfo.FieldId {
binaryRangeExpr := pc.combineUnaryRangeExpr(expr.UnaryRangeExpr, lastExpr)
exprs = append(exprs[0:i], append([]*planpb.Expr{binaryRangeExpr}, exprs[i+2:]...)...)
lastExpr = nil
} else {
lastExpr = expr.UnaryRangeExpr
}
} else {
lastExpr = nil
}
}
// use `&&` to connect exprs
combinedExpr := exprs[len(exprs)-1]
for i := len(exprs) - 2; i >= 0; i-- {
expr := exprs[i]
combinedExpr = &planpb.Expr{
Expr: &planpb.Expr_BinaryExpr{
BinaryExpr: &planpb.BinaryExpr{
Op: planpb.BinaryExpr_LogicalAnd,
Left: combinedExpr,
Right: expr,
},
},
}
}
return combinedExpr, nil
}
func (pc *parserContext) handleBinaryExpr(node *ant_ast.BinaryNode) (*planpb.Expr, error) {
_, leftArithExpr := node.Left.(*ant_ast.FunctionNode)
_, rightArithExpr := node.Right.(*ant_ast.FunctionNode)
if leftArithExpr || rightArithExpr {
return pc.handleBinaryArithCmpExpr(node)
}
switch node.Operator {
case "<", "<=", ">", ">=":
return pc.handleMultiCmpExpr(node)
case "==", "!=", "startsWith", "endsWith":
return pc.handleCmpExpr(node)
case "and", "or", "&&", "||":
return pc.handleLogicalExpr(node)
case "in", "not in":
return pc.handleInExpr(node)
}
return nil, fmt.Errorf("unsupported binary operator %s", node.Operator)
}
func (pc *parserContext) createNotExpr(childExpr *planpb.Expr) (*planpb.Expr, error) {
expr := &planpb.Expr{
Expr: &planpb.Expr_UnaryExpr{
UnaryExpr: &planpb.UnaryExpr{
Op: planpb.UnaryExpr_Not,
Child: childExpr,
},
},
}
return expr, nil
}
func (pc *parserContext) handleLeafValue(nodeRaw *ant_ast.Node, dataType schemapb.DataType) (gv *planpb.GenericValue, err error) {
switch node := (*nodeRaw).(type) {
case *ant_ast.FloatNode:
if typeutil.IsFloatingType(dataType) {
gv = &planpb.GenericValue{
Val: &planpb.GenericValue_FloatVal{
FloatVal: node.Value,
},
}
} else {
return nil, fmt.Errorf("type mismatch")
}
case *ant_ast.IntegerNode:
if typeutil.IsFloatingType(dataType) {
gv = &planpb.GenericValue{
Val: &planpb.GenericValue_FloatVal{
FloatVal: float64(node.Value),
},
}
} else if typeutil.IsIntegerType(dataType) {
gv = &planpb.GenericValue{
Val: &planpb.GenericValue_Int64Val{
Int64Val: int64(node.Value),
},
}
} else {
return nil, fmt.Errorf("type mismatch")
}
case *ant_ast.BoolNode:
if typeutil.IsBoolType(dataType) {
gv = &planpb.GenericValue{
Val: &planpb.GenericValue_BoolVal{
BoolVal: node.Value,
},
}
} else {
return nil, fmt.Errorf("type mismatch")
}
case *ant_ast.StringNode:
if typeutil.IsStringType(dataType) {
gv = &planpb.GenericValue{
Val: &planpb.GenericValue_StringVal{
StringVal: node.Value,
},
}
} else {
return nil, fmt.Errorf("type mismatch")
}
default:
return nil, fmt.Errorf("unsupported leaf node")
}
return gv, nil
}
func (pc *parserContext) handleFunction(node *ant_ast.FunctionNode) (*planpb.BinaryArithOp, error) {
funcArithOp, err := getArithOpType(node.Name)
if err != nil {
return nil, err
}
idNode, ok := node.Arguments[0].(*ant_ast.IdentifierNode)
if !ok {
return nil, fmt.Errorf("left operand of the function must be an identifier")
}
field, err := pc.handleIdentifier(idNode)
if err != nil {
return nil, err
}
valueNode := node.Arguments[1]
val, err := pc.handleLeafValue(&valueNode, field.DataType)
if err != nil {
return nil, err
}
arithOp := &planpb.BinaryArithOp{
ColumnInfo: createColumnInfo(field),
ArithOp: funcArithOp,
RightOperand: val,
}
return arithOp, nil
}
func (pc *parserContext) handleIdentifier(node *ant_ast.IdentifierNode) (*schemapb.FieldSchema, error) {
fieldName := node.Value
field, err := pc.schema.GetFieldFromName(fieldName)
return field, err
}
func (pc *parserContext) handleUnaryExpr(node *ant_ast.UnaryNode) (*planpb.Expr, error) {
switch node.Operator {
case "!", "not":
subExpr, err := pc.handleExpr(&node.Node)
if err != nil {
return nil, err
}
return pc.createNotExpr(subExpr)
default:
return nil, fmt.Errorf("invalid unary operator(%s)", node.Operator)
}
}
func (pc *parserContext) handleExpr(nodeRaw *ant_ast.Node) (*planpb.Expr, error) {
switch node := (*nodeRaw).(type) {
case *ant_ast.IdentifierNode,
*ant_ast.FloatNode,
*ant_ast.IntegerNode,
*ant_ast.BoolNode,
*ant_ast.StringNode:
return nil, fmt.Errorf("scalar expr is not supported yet")
case *ant_ast.UnaryNode:
expr, err := pc.handleUnaryExpr(node)
if err != nil {
return nil, err
}
return expr, nil
case *ant_ast.BinaryNode:
return pc.handleBinaryExpr(node)
default:
return nil, fmt.Errorf("unsupported node")
}
}
func createQueryPlan(schemaPb *schemapb.CollectionSchema, exprStr string, vectorFieldName string, queryInfo *planpb.QueryInfo) (*planpb.PlanNode, error) {
schema, err := typeutil.CreateSchemaHelper(schemaPb)
if err != nil {
return nil, err
}
expr, err := parseExpr(schema, exprStr)
if err != nil {
return nil, err
}
vectorField, err := schema.GetFieldFromName(vectorFieldName)
if err != nil {
return nil, err
}
fieldID := vectorField.FieldID
dataType := vectorField.DataType
var vectorType planpb.VectorType
if !typeutil.IsVectorType(dataType) {
return nil, fmt.Errorf("field (%s) to search is not of vector data type", vectorFieldName)
}
if dataType == schemapb.DataType_FloatVector {
vectorType = planpb.VectorType_FloatVector
} else if dataType == schemapb.DataType_BinaryVector {
vectorType = planpb.VectorType_BinaryVector
} else {
vectorType = planpb.VectorType_Float16Vector
}
planNode := &planpb.PlanNode{
Node: &planpb.PlanNode_VectorAnns{
VectorAnns: &planpb.VectorANNS{
VectorType: vectorType,
Predicates: expr,
QueryInfo: queryInfo,
PlaceholderTag: "$0",
FieldId: fieldID,
},
},
}
return planNode, nil
}
func createExprPlan(schemaPb *schemapb.CollectionSchema, exprStr string) (*planpb.PlanNode, error) {
schema, err := typeutil.CreateSchemaHelper(schemaPb)
if err != nil {
return nil, err
}
expr, err := parseExpr(schema, exprStr)
if err != nil {
return nil, err
}
planNode := &planpb.PlanNode{
Node: &planpb.PlanNode_Predicates{
Predicates: expr,
},
}
return planNode, nil
}

View File

@ -1,636 +0,0 @@
// Licensed to the LF AI & Data foundation under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package proxy
import (
"fmt"
"testing"
ant_ast "github.com/antonmedv/expr/ast"
ant_parser "github.com/antonmedv/expr/parser"
"github.com/golang/protobuf/proto"
"github.com/stretchr/testify/assert"
"github.com/milvus-io/milvus-proto/go-api/v2/schemapb"
"github.com/milvus-io/milvus/internal/parser/planparserv2"
"github.com/milvus-io/milvus/internal/proto/planpb"
"github.com/milvus-io/milvus/pkg/util/typeutil"
)
func newTestSchema() *schemapb.CollectionSchema {
fields := []*schemapb.FieldSchema{
{FieldID: 0, Name: "FieldID", IsPrimaryKey: false, Description: "field no.1", DataType: schemapb.DataType_Int64},
}
for name, value := range schemapb.DataType_value {
dataType := schemapb.DataType(value)
if !typeutil.IsIntegerType(dataType) && !typeutil.IsFloatingType(dataType) && !typeutil.IsVectorType(dataType) && !typeutil.IsStringType(dataType) {
continue
}
newField := &schemapb.FieldSchema{
FieldID: int64(100 + value), Name: name + "Field", IsPrimaryKey: false, Description: "", DataType: dataType,
}
fields = append(fields, newField)
}
return &schemapb.CollectionSchema{
Name: "test",
Description: "schema for test used",
AutoID: true,
Fields: fields,
EnableDynamicField: true,
}
}
func assertValidExpr(t *testing.T, schema *typeutil.SchemaHelper, exprStr string) {
// t.Log("expr: ", exprStr)
_, err := parseExpr(schema, exprStr)
assert.Nil(t, err, exprStr)
// t.Log("AST1:")
// planparserv2.ShowExpr(expr1)
}
func assertValidExprV2(t *testing.T, schema *typeutil.SchemaHelper, exprStr string) {
expr1, err := parseExpr(schema, exprStr)
assert.NoError(t, err)
expr2, err := planparserv2.ParseExpr(schema, exprStr)
assert.NoError(t, err)
if !planparserv2.CheckPredicatesIdentical(expr1, expr2) {
t.Log("expr: ", exprStr)
t.Log("AST1:")
planparserv2.ShowExpr(expr1)
t.Log("AST2:")
planparserv2.ShowExpr(expr2)
t.Errorf("parsed asts are not identical")
}
}
func assertInvalidExpr(t *testing.T, schema *typeutil.SchemaHelper, exprStr string) {
_, err := parseExpr(schema, exprStr)
assert.Error(t, err, exprStr)
_, err = planparserv2.ParseExpr(schema, exprStr)
assert.Error(t, err, exprStr)
}
func assertValidSearchPlan(t *testing.T, schema *schemapb.CollectionSchema, exprStr string, vectorFieldName string, queryInfo *planpb.QueryInfo) {
_, err := createQueryPlan(schema, exprStr, vectorFieldName, queryInfo)
assert.NoError(t, err)
}
func assertValidSearchPlanV2(t *testing.T, schema *schemapb.CollectionSchema, exprStr string, vectorFieldName string, queryInfo *planpb.QueryInfo) {
planProto1, err := createQueryPlan(schema, exprStr, vectorFieldName, queryInfo)
assert.NoError(t, err)
planProto2, err := planparserv2.CreateSearchPlan(schema, exprStr, vectorFieldName, queryInfo)
assert.NoError(t, err)
expr1 := planProto1.GetVectorAnns().GetPredicates()
assert.NotNil(t, expr1)
expr2 := planProto2.GetVectorAnns().GetPredicates()
assert.NotNil(t, expr2)
if !planparserv2.CheckPredicatesIdentical(expr1, expr2) {
t.Log("expr: ", exprStr)
t.Log("AST1:")
planparserv2.ShowExpr(expr1)
t.Log("AST2:")
planparserv2.ShowExpr(expr2)
t.Errorf("parsed asts are not identical")
}
}
func assertInvalidSearchPlan(t *testing.T, schema *schemapb.CollectionSchema, exprStr string, vectorFieldName string, queryInfo *planpb.QueryInfo) {
_, err := createQueryPlan(schema, exprStr, vectorFieldName, queryInfo)
assert.Error(t, err, exprStr)
_, err = planparserv2.CreateSearchPlan(schema, exprStr, vectorFieldName, queryInfo)
assert.Error(t, err, exprStr)
}
func TestParseExpr_Naive(t *testing.T) {
schemaPb := newTestSchema()
schema, err := typeutil.CreateSchemaHelper(schemaPb)
assert.NoError(t, err)
t.Run("test UnaryNode", func(t *testing.T) {
exprStrs := []string{
"Int64Field > +1",
"Int64Field > -1",
"FloatField > +1.0",
"FloatField > -1.0",
`VarCharField > "str"`,
}
for _, exprStr := range exprStrs {
assertValidExprV2(t, schema, exprStr)
}
})
t.Run("test string unary", func(t *testing.T) {
exprStrs := []string{
`VarCharField startsWith "str"`,
`VarCharField endsWith "str"`,
}
for _, exprStr := range exprStrs {
assertValidExpr(t, schema, exprStr)
}
})
t.Run("test UnaryNode invalid", func(t *testing.T) {
exprStrs := []string{
"Int64Field > +aa",
"FloatField > -aa",
`VarCharField > -aa`,
}
for _, exprStr := range exprStrs {
assertInvalidExpr(t, schema, exprStr)
}
})
t.Run("test BinaryNode", func(t *testing.T) {
exprStrs := []string{
// "+"
"FloatField > 1 + 2",
"FloatField > 1 + 2.0",
"FloatField > 1.0 + 2",
"FloatField > 1.0 + 2.0",
// "-"
"FloatField > 1 - 2",
"FloatField > 1 - 2.0",
"FloatField > 1.0 - 2",
"FloatField > 1.0 - 2.0",
// "*"
"FloatField > 1 * 2",
"FloatField > 1 * 2.0",
"FloatField > 1.0 * 2",
"FloatField > 1.0 * 2.0",
// "/"
"FloatField > 1 / 2",
"FloatField > 1 / 2.0",
"FloatField > 1.0 / 2",
"FloatField > 1.0 / 2.0",
// "%"
"FloatField > 1 % 2",
// "**"
"FloatField > 1 ** 2",
"FloatField > 1 ** 2.0",
"FloatField > 1.0 ** 2",
"FloatField > 1.0 ** 2.0",
}
for _, exprStr := range exprStrs {
assertValidExprV2(t, schema, exprStr)
}
})
t.Run("test BinaryNode invalid", func(t *testing.T) {
exprStrs := []string{
// "+"
"FloatField > 1 + aa",
"FloatField > aa + 2.0",
// "-"
"FloatField > 1 - aa",
"FloatField > aa - 2.0",
// "*"
"FloatField > 1 * aa",
"FloatField > aa * 2.0",
// "/"
"FloatField > 1 / 0",
"FloatField > 1 / 0.0",
"FloatField > 1.0 / 0",
"FloatField > 1.0 / 0.0",
"FloatField > 1 / aa",
"FloatField > aa / 2.0",
// "%"
"FloatField > 1 % aa",
"FloatField > 1 % 0",
"FloatField > 1 % 0.0",
// "**"
"FloatField > 1 ** aa",
"FloatField > aa ** 2.0",
}
for _, exprStr := range exprStrs {
assertInvalidExpr(t, schema, exprStr)
}
})
t.Run("test BinaryArithOpNode", func(t *testing.T) {
exprStrs := []string{
// "+"
"FloatField + 1.2 == 3",
"Int64Field + 3 == 5",
"1.2 + FloatField != 3",
"3 + Int64Field != 5",
// "-"
"FloatField - 1.2 == 3",
"Int64Field - 3 != 5",
// "*"
"FloatField * 1.2 == 3",
"Int64Field * 3 == 5",
"1.2 * FloatField != 3",
"3 * Int64Field != 5",
// "/"
"FloatField / 1.2 == 3",
"Int64Field / 3 != 5",
// "%"
"Int64Field % 7 == 5",
}
for _, exprStr := range exprStrs {
assertValidExprV2(t, schema, exprStr)
}
})
t.Run("test BinaryArithOpNode invalid", func(t *testing.T) {
exprStrs := []string{
// "+"
"FloatField + FloatField == 20",
"Int64Field + Int64Field != 10",
// "-"
"FloatField - FloatField == 20.0",
"Int64Field - Int64Field != 10",
"10 - FloatField == 20",
"20 - Int64Field != 10",
// "*"
"FloatField * FloatField == 20",
"Int64Field * Int64Field != 10",
// "/"
"FloatField / FloatField == 20",
"Int64Field / Int64Field != 10",
"FloatField / 0 == 20",
"Int64Field / 0 != 10",
// "%"
"Int64Field % Int64Field != 10",
"FloatField % 0 == 20",
"Int64Field % 0 != 10",
"FloatField % 2.3 == 20",
}
for _, exprStr := range exprStrs {
exprProto, err := parseExpr(schema, exprStr)
assert.Error(t, err)
assert.Nil(t, exprProto)
}
})
}
func TestParsePlanNode_Naive(t *testing.T) {
exprStrs := []string{
"not (Int64Field > 3)",
"not (3 > Int64Field)",
"Int64Field in [1, 2, 3]",
"Int64Field < 3 and (Int64Field > 2 || Int64Field == 1)",
"DoubleField in [1.0, 2, 3]",
"DoubleField in [1.0, 2, 3] && Int64Field < 3 or Int64Field > 2",
`not (VarCharField > "str")`,
`not ("str" > VarCharField)`,
`VarCharField in ["term0", "term1", "term2"]`,
`VarCharField < "str3" and (VarCharField > "str2" || VarCharField == "str1")`,
`DoubleField in [1.0, 2, 3] && VarCharField < "str3" or Int64Field > 2`,
}
schema := newTestSchema()
queryInfo := &planpb.QueryInfo{
Topk: 10,
MetricType: "L2",
SearchParams: "{\"nprobe\": 10}",
}
for _, exprStr := range exprStrs {
assertValidSearchPlanV2(t, schema, exprStr, "FloatVectorField", queryInfo)
}
stringFuncs := []string{
`not (VarCharField startsWith "str")`,
`not (VarCharField endsWith "str")`,
`VarCharField < "str3" and (VarCharField startsWith "str2" || VarCharField endsWith "str1")`,
}
for _, exprStr := range stringFuncs {
assertValidSearchPlan(t, schema, exprStr, "FloatVectorField", queryInfo)
}
}
func TestExternalParser(t *testing.T) {
ast, err := ant_parser.Parse(`!(1 < a < 2 or b in [1, 2, 3]) or (c < 3 and b > 5) and (d > "str1" or d < "str2")`)
// NOTE: probe ast here via IDE
assert.NoError(t, err)
println(ast.Node.Location().Column)
}
func TestExprPlan_Str(t *testing.T) {
fields := []*schemapb.FieldSchema{
{FieldID: 100, Name: "fakevec", DataType: schemapb.DataType_FloatVector},
{FieldID: 101, Name: "age", DataType: schemapb.DataType_Int64},
}
schema := &schemapb.CollectionSchema{
Name: "default-collection",
Description: "",
AutoID: true,
Fields: fields,
}
queryInfo := &planpb.QueryInfo{
Topk: 10,
MetricType: "L2",
SearchParams: "{\"nprobe\": 10}",
}
// without filter
planProto, err := createQueryPlan(schema, "", "fakevec", queryInfo)
assert.NoError(t, err)
dbgStr := proto.MarshalTextString(planProto)
println(dbgStr)
exprStrs := []string{
"age >= 420000 && age < 420010", // range
"age == 420000 || age == 420001 || age == 420002 || age == 420003 || age == 420004", // term
"age not in [1, 2, 3]",
}
for _, exprStr := range exprStrs {
assertValidSearchPlanV2(t, schema, exprStr, "fakevec", queryInfo)
}
}
func TestExprMultiRange_Str(t *testing.T) {
exprStrs := []string{
"3 < FloatN < 4.0",
// "3 < age1 < 5 < age2 < 7 < FloatN < 9.0 < FloatN2", // no need to support this, ambiguous.
"1 + 1 < age1 < 2 * 2",
"1 - 1 < age1 < 3 / 2",
"1.0 - 1 < FloatN < 3 / 2",
"2 ** 10 > FloatN >= 7 % 4",
"0.1 ** 2 < FloatN < 2 ** 0.1",
"0.1 ** 1.1 < FloatN < 3.1 / 4",
"4.1 / 3 < FloatN < 0.0 / 5.0",
"BoolN1 == True",
"True == BoolN1",
"BoolN1 == False",
}
invalidExprs := []string{
"BoolN1 == 1",
"BoolN1 == 0",
"BoolN1 > 0",
}
fields := []*schemapb.FieldSchema{
{FieldID: 100, Name: "fakevec", DataType: schemapb.DataType_FloatVector},
{FieldID: 101, Name: "age1", DataType: schemapb.DataType_Int64},
{FieldID: 102, Name: "age2", DataType: schemapb.DataType_Int64},
{FieldID: 103, Name: "FloatN", DataType: schemapb.DataType_Float},
{FieldID: 104, Name: "FloatN2", DataType: schemapb.DataType_Float},
{FieldID: 105, Name: "BoolN1", DataType: schemapb.DataType_Bool},
}
schema := &schemapb.CollectionSchema{
Name: "default-collection",
Description: "",
AutoID: true,
Fields: fields,
}
queryInfo := &planpb.QueryInfo{
Topk: 10,
MetricType: "L2",
SearchParams: "{\"nprobe\": 10}",
}
for _, exprStr := range exprStrs {
assertValidSearchPlanV2(t, schema, exprStr, "fakevec", queryInfo)
}
for _, exprStr := range invalidExprs {
assertInvalidSearchPlan(t, schema, exprStr, "fakevec", queryInfo)
}
}
func TestExprFieldCompare_Str(t *testing.T) {
exprStrs := []string{
"age1 < age2",
// "3 < age1 <= age2 < 4", // no need to support this, ambiguous.
}
fields := []*schemapb.FieldSchema{
{FieldID: 100, Name: "fakevec", DataType: schemapb.DataType_FloatVector},
{FieldID: 101, Name: "age1", DataType: schemapb.DataType_Int64},
{FieldID: 102, Name: "age2", DataType: schemapb.DataType_Int64},
{FieldID: 103, Name: "FloatN", DataType: schemapb.DataType_Float},
}
schema := &schemapb.CollectionSchema{
Name: "default-collection",
Description: "",
AutoID: true,
Fields: fields,
}
queryInfo := &planpb.QueryInfo{
Topk: 10,
MetricType: "L2",
SearchParams: "{\"nprobe\": 10}",
}
for _, exprStr := range exprStrs {
assertValidSearchPlanV2(t, schema, exprStr, "fakevec", queryInfo)
}
}
func TestExprBinaryArithOp_Str(t *testing.T) {
exprStrs := []string{
// Basic arithmetic
"(age1 + 5) == 2",
// Float data type
"(FloatN - 5.2) == 0",
// Other operators
"(age1 - 5) == 1",
"(age1 * 5) == 6",
"(age1 / 5) == 1",
"(age1 % 5) == 0",
// Allow for commutative property for + and *
"(6 + age1) != 2",
"(age1 * 4) != 9",
"(5 * FloatN) != 0",
"(9 * FloatN) != 0",
// Functional nodes at the right can be reversed
"0 == (age1 + 3)",
}
unsupportedExprStrs := []string{
// Comparison operators except for "==" and "!=" are unsupported
"(age1 + 2) > 4",
"(age1 + 2) >= 4",
"(age1 + 2) < 4",
"(age1 + 2) <= 4",
// Field as the right operand for -, /, and % operators are not supported
"(10 - age1) == 0",
"(20 / age1) == 0",
"(30 % age1) == 0",
// Modulo is not supported in the parser but the engine can handle it since fmod is used
"(FloatN % 2.1) == 0",
// Left operand of the function must be an identifier
"(10.5 / floatN) == 5.75",
}
fields := []*schemapb.FieldSchema{
{FieldID: 100, Name: "fakevec", DataType: schemapb.DataType_FloatVector},
{FieldID: 101, Name: "age1", DataType: schemapb.DataType_Int64},
{FieldID: 102, Name: "FloatN", DataType: schemapb.DataType_Float},
}
schema := &schemapb.CollectionSchema{
Name: "default-collection",
Description: "",
AutoID: true,
Fields: fields,
}
queryInfo := &planpb.QueryInfo{
Topk: 10,
MetricType: "L2",
SearchParams: "{\"nprobe\": 10}",
}
for _, exprStr := range exprStrs {
assertValidSearchPlanV2(t, schema, exprStr, "fakevec", queryInfo)
}
for _, exprStr := range unsupportedExprStrs {
assertInvalidSearchPlan(t, schema, exprStr, "fakevec", queryInfo)
}
}
func TestPlanParseAPIs(t *testing.T) {
t.Run("get compare op type", func(t *testing.T) {
var op planpb.OpType
var reverse bool
reverse = false
op = getCompareOpType(">", reverse)
assert.Equal(t, planpb.OpType_GreaterThan, op)
op = getCompareOpType(">=", reverse)
assert.Equal(t, planpb.OpType_GreaterEqual, op)
op = getCompareOpType("<", reverse)
assert.Equal(t, planpb.OpType_LessThan, op)
op = getCompareOpType("<=", reverse)
assert.Equal(t, planpb.OpType_LessEqual, op)
op = getCompareOpType("==", reverse)
assert.Equal(t, planpb.OpType_Equal, op)
op = getCompareOpType("!=", reverse)
assert.Equal(t, planpb.OpType_NotEqual, op)
op = getCompareOpType("*", reverse)
assert.Equal(t, planpb.OpType_Invalid, op)
op = getCompareOpType("startsWith", reverse)
assert.Equal(t, planpb.OpType_PrefixMatch, op)
op = getCompareOpType("endsWith", reverse)
assert.Equal(t, planpb.OpType_PostfixMatch, op)
reverse = true
op = getCompareOpType(">", reverse)
assert.Equal(t, planpb.OpType_LessThan, op)
op = getCompareOpType(">=", reverse)
assert.Equal(t, planpb.OpType_LessEqual, op)
op = getCompareOpType("<", reverse)
assert.Equal(t, planpb.OpType_GreaterThan, op)
op = getCompareOpType("<=", reverse)
assert.Equal(t, planpb.OpType_GreaterEqual, op)
op = getCompareOpType("==", reverse)
assert.Equal(t, planpb.OpType_Equal, op)
op = getCompareOpType("!=", reverse)
assert.Equal(t, planpb.OpType_NotEqual, op)
op = getCompareOpType("*", reverse)
assert.Equal(t, planpb.OpType_Invalid, op)
op = getCompareOpType("startsWith", reverse)
assert.Equal(t, planpb.OpType_PrefixMatch, op)
op = getCompareOpType("endsWith", reverse)
assert.Equal(t, planpb.OpType_PostfixMatch, op)
})
t.Run("parse bool node", func(t *testing.T) {
var nodeRaw1, nodeRaw2, nodeRaw3, nodeRaw4 ant_ast.Node
nodeRaw1 = &ant_ast.IdentifierNode{
Value: "True",
}
boolNode1 := parseBoolNode(&nodeRaw1)
assert.Equal(t, boolNode1.Value, true)
nodeRaw2 = &ant_ast.IdentifierNode{
Value: "False",
}
boolNode2 := parseBoolNode(&nodeRaw2)
assert.Equal(t, boolNode2.Value, false)
nodeRaw3 = &ant_ast.IdentifierNode{
Value: "abcd",
}
assert.Nil(t, parseBoolNode(&nodeRaw3))
nodeRaw4 = &ant_ast.BoolNode{
Value: true,
}
assert.Nil(t, parseBoolNode(&nodeRaw4))
})
}
func Test_CheckIdentical(t *testing.T) {
schema := newTestSchema()
helper, err := typeutil.CreateSchemaHelper(schema)
assert.NoError(t, err)
n := 5000
int64s := generateInt64Array(n)
largeIntTermExpr := `Int64Field in [`
largeFloatTermExpr := `FloatField in [`
for _, i := range int64s[:n-1] {
largeIntTermExpr += fmt.Sprintf("%d, ", i)
largeFloatTermExpr += fmt.Sprintf("%d, ", i)
}
largeIntTermExpr += fmt.Sprintf("%d]", int64s[n-1])
largeFloatTermExpr += fmt.Sprintf("%d]", int64s[n-1])
// cases in regression.
inputs := []string{
"Int64Field > 0",
"(Int64Field > 0 && Int64Field < 400) or (Int64Field > 500 && Int64Field < 1000)",
"Int64Field not in [1, 2, 3]",
"Int64Field in [1, 2, 3] and FloatField != 2",
"Int64Field == 0 || Int64Field == 1 || Int64Field == 2",
"0 < Int64Field < 400",
"500 <= Int64Field < 1000",
"200+300 < Int64Field <= 500+500",
"Int32Field != Int64Field",
"Int64Field not in []",
`Int64Field >= 0 && VarCharField >= "0"`,
largeIntTermExpr,
largeFloatTermExpr,
}
for _, input := range inputs {
expr1, err := parseExpr(helper, input)
assert.NoError(t, err)
expr2, err := planparserv2.ParseExpr(helper, input)
assert.NoError(t, err)
assert.True(t, planparserv2.CheckPredicatesIdentical(expr1, expr2))
}
}

View File

@ -38,6 +38,7 @@ import (
"github.com/milvus-io/milvus/pkg/util/funcutil"
"github.com/milvus-io/milvus/pkg/util/merr"
"github.com/milvus-io/milvus/pkg/util/paramtable"
"github.com/milvus-io/milvus/pkg/util/typeutil"
)
func TestMain(m *testing.M) {
@ -742,3 +743,28 @@ func Test_parseIndexParams_AutoIndex(t *testing.T) {
assert.Error(t, err)
})
}
func newTestSchema() *schemapb.CollectionSchema {
fields := []*schemapb.FieldSchema{
{FieldID: 0, Name: "FieldID", IsPrimaryKey: false, Description: "field no.1", DataType: schemapb.DataType_Int64},
}
for name, value := range schemapb.DataType_value {
dataType := schemapb.DataType(value)
if !typeutil.IsIntegerType(dataType) && !typeutil.IsFloatingType(dataType) && !typeutil.IsVectorType(dataType) && !typeutil.IsStringType(dataType) {
continue
}
newField := &schemapb.FieldSchema{
FieldID: int64(100 + value), Name: name + "Field", IsPrimaryKey: false, Description: "", DataType: dataType,
}
fields = append(fields, newField)
}
return &schemapb.CollectionSchema{
Name: "test",
Description: "schema for test used",
AutoID: true,
Fields: fields,
EnableDynamicField: true,
}
}

View File

@ -42,8 +42,8 @@ type MockBalancer_AssignChannel_Call struct {
}
// AssignChannel is a helper method to define mock.On call
// - channels []*meta.DmChannel
// - nodes []int64
// - channels []*meta.DmChannel
// - nodes []int64
func (_e *MockBalancer_Expecter) AssignChannel(channels interface{}, nodes interface{}) *MockBalancer_AssignChannel_Call {
return &MockBalancer_AssignChannel_Call{Call: _e.mock.On("AssignChannel", channels, nodes)}
}
@ -87,9 +87,9 @@ type MockBalancer_AssignSegment_Call struct {
}
// AssignSegment is a helper method to define mock.On call
// - collectionID int64
// - segments []*meta.Segment
// - nodes []int64
// - collectionID int64
// - segments []*meta.Segment
// - nodes []int64
func (_e *MockBalancer_Expecter) AssignSegment(collectionID interface{}, segments interface{}, nodes interface{}) *MockBalancer_AssignSegment_Call {
return &MockBalancer_AssignSegment_Call{Call: _e.mock.On("AssignSegment", collectionID, segments, nodes)}
}
@ -145,7 +145,7 @@ type MockBalancer_BalanceReplica_Call struct {
}
// BalanceReplica is a helper method to define mock.On call
// - replica *meta.Replica
// - replica *meta.Replica
func (_e *MockBalancer_Expecter) BalanceReplica(replica interface{}) *MockBalancer_BalanceReplica_Call {
return &MockBalancer_BalanceReplica_Call{Call: _e.mock.On("BalanceReplica", replica)}
}

View File

@ -32,7 +32,7 @@ type MockController_Remove_Call struct {
}
// Remove is a helper method to define mock.On call
// - nodeID int64
// - nodeID int64
func (_e *MockController_Expecter) Remove(nodeID interface{}) *MockController_Remove_Call {
return &MockController_Remove_Call{Call: _e.mock.On("Remove", nodeID)}
}
@ -65,8 +65,8 @@ type MockController_StartDistInstance_Call struct {
}
// StartDistInstance is a helper method to define mock.On call
// - ctx context.Context
// - nodeID int64
// - ctx context.Context
// - nodeID int64
func (_e *MockController_Expecter) StartDistInstance(ctx interface{}, nodeID interface{}) *MockController_StartDistInstance_Call {
return &MockController_StartDistInstance_Call{Call: _e.mock.On("StartDistInstance", ctx, nodeID)}
}
@ -131,7 +131,7 @@ type MockController_SyncAll_Call struct {
}
// SyncAll is a helper method to define mock.On call
// - ctx context.Context
// - ctx context.Context
func (_e *MockController_Expecter) SyncAll(ctx interface{}) *MockController_SyncAll_Call {
return &MockController_SyncAll_Call{Call: _e.mock.On("SyncAll", ctx)}
}

View File

@ -60,8 +60,8 @@ type MockBroker_DescribeIndex_Call struct {
}
// DescribeIndex is a helper method to define mock.On call
// - ctx context.Context
// - collectionID int64
// - ctx context.Context
// - collectionID int64
func (_e *MockBroker_Expecter) DescribeIndex(ctx interface{}, collectionID interface{}) *MockBroker_DescribeIndex_Call {
return &MockBroker_DescribeIndex_Call{Call: _e.mock.On("DescribeIndex", ctx, collectionID)}
}
@ -115,8 +115,8 @@ type MockBroker_GetCollectionSchema_Call struct {
}
// GetCollectionSchema is a helper method to define mock.On call
// - ctx context.Context
// - collectionID int64
// - ctx context.Context
// - collectionID int64
func (_e *MockBroker_Expecter) GetCollectionSchema(ctx interface{}, collectionID interface{}) *MockBroker_GetCollectionSchema_Call {
return &MockBroker_GetCollectionSchema_Call{Call: _e.mock.On("GetCollectionSchema", ctx, collectionID)}
}
@ -170,9 +170,9 @@ type MockBroker_GetIndexInfo_Call struct {
}
// GetIndexInfo is a helper method to define mock.On call
// - ctx context.Context
// - collectionID int64
// - segmentID int64
// - ctx context.Context
// - collectionID int64
// - segmentID int64
func (_e *MockBroker_Expecter) GetIndexInfo(ctx interface{}, collectionID interface{}, segmentID interface{}) *MockBroker_GetIndexInfo_Call {
return &MockBroker_GetIndexInfo_Call{Call: _e.mock.On("GetIndexInfo", ctx, collectionID, segmentID)}
}
@ -226,8 +226,8 @@ type MockBroker_GetPartitions_Call struct {
}
// GetPartitions is a helper method to define mock.On call
// - ctx context.Context
// - collectionID int64
// - ctx context.Context
// - collectionID int64
func (_e *MockBroker_Expecter) GetPartitions(ctx interface{}, collectionID interface{}) *MockBroker_GetPartitions_Call {
return &MockBroker_GetPartitions_Call{Call: _e.mock.On("GetPartitions", ctx, collectionID)}
}
@ -290,9 +290,9 @@ type MockBroker_GetRecoveryInfo_Call struct {
}
// GetRecoveryInfo is a helper method to define mock.On call
// - ctx context.Context
// - collectionID int64
// - partitionID int64
// - ctx context.Context
// - collectionID int64
// - partitionID int64
func (_e *MockBroker_Expecter) GetRecoveryInfo(ctx interface{}, collectionID interface{}, partitionID interface{}) *MockBroker_GetRecoveryInfo_Call {
return &MockBroker_GetRecoveryInfo_Call{Call: _e.mock.On("GetRecoveryInfo", ctx, collectionID, partitionID)}
}
@ -362,9 +362,9 @@ type MockBroker_GetRecoveryInfoV2_Call struct {
}
// GetRecoveryInfoV2 is a helper method to define mock.On call
// - ctx context.Context
// - collectionID int64
// - partitionIDs ...int64
// - ctx context.Context
// - collectionID int64
// - partitionIDs ...int64
func (_e *MockBroker_Expecter) GetRecoveryInfoV2(ctx interface{}, collectionID interface{}, partitionIDs ...interface{}) *MockBroker_GetRecoveryInfoV2_Call {
return &MockBroker_GetRecoveryInfoV2_Call{Call: _e.mock.On("GetRecoveryInfoV2",
append([]interface{}{ctx, collectionID}, partitionIDs...)...)}
@ -432,8 +432,8 @@ type MockBroker_GetSegmentInfo_Call struct {
}
// GetSegmentInfo is a helper method to define mock.On call
// - ctx context.Context
// - segmentID ...int64
// - ctx context.Context
// - segmentID ...int64
func (_e *MockBroker_Expecter) GetSegmentInfo(ctx interface{}, segmentID ...interface{}) *MockBroker_GetSegmentInfo_Call {
return &MockBroker_GetSegmentInfo_Call{Call: _e.mock.On("GetSegmentInfo",
append([]interface{}{ctx}, segmentID...)...)}

View File

@ -61,8 +61,8 @@ type MockQueryNodeServer_Delete_Call struct {
}
// Delete is a helper method to define mock.On call
// - _a0 context.Context
// - _a1 *querypb.DeleteRequest
// - _a0 context.Context
// - _a1 *querypb.DeleteRequest
func (_e *MockQueryNodeServer_Expecter) Delete(_a0 interface{}, _a1 interface{}) *MockQueryNodeServer_Delete_Call {
return &MockQueryNodeServer_Delete_Call{Call: _e.mock.On("Delete", _a0, _a1)}
}
@ -116,8 +116,8 @@ type MockQueryNodeServer_GetComponentStates_Call struct {
}
// GetComponentStates is a helper method to define mock.On call
// - _a0 context.Context
// - _a1 *milvuspb.GetComponentStatesRequest
// - _a0 context.Context
// - _a1 *milvuspb.GetComponentStatesRequest
func (_e *MockQueryNodeServer_Expecter) GetComponentStates(_a0 interface{}, _a1 interface{}) *MockQueryNodeServer_GetComponentStates_Call {
return &MockQueryNodeServer_GetComponentStates_Call{Call: _e.mock.On("GetComponentStates", _a0, _a1)}
}
@ -171,8 +171,8 @@ type MockQueryNodeServer_GetDataDistribution_Call struct {
}
// GetDataDistribution is a helper method to define mock.On call
// - _a0 context.Context
// - _a1 *querypb.GetDataDistributionRequest
// - _a0 context.Context
// - _a1 *querypb.GetDataDistributionRequest
func (_e *MockQueryNodeServer_Expecter) GetDataDistribution(_a0 interface{}, _a1 interface{}) *MockQueryNodeServer_GetDataDistribution_Call {
return &MockQueryNodeServer_GetDataDistribution_Call{Call: _e.mock.On("GetDataDistribution", _a0, _a1)}
}
@ -226,8 +226,8 @@ type MockQueryNodeServer_GetMetrics_Call struct {
}
// GetMetrics is a helper method to define mock.On call
// - _a0 context.Context
// - _a1 *milvuspb.GetMetricsRequest
// - _a0 context.Context
// - _a1 *milvuspb.GetMetricsRequest
func (_e *MockQueryNodeServer_Expecter) GetMetrics(_a0 interface{}, _a1 interface{}) *MockQueryNodeServer_GetMetrics_Call {
return &MockQueryNodeServer_GetMetrics_Call{Call: _e.mock.On("GetMetrics", _a0, _a1)}
}
@ -281,8 +281,8 @@ type MockQueryNodeServer_GetSegmentInfo_Call struct {
}
// GetSegmentInfo is a helper method to define mock.On call
// - _a0 context.Context
// - _a1 *querypb.GetSegmentInfoRequest
// - _a0 context.Context
// - _a1 *querypb.GetSegmentInfoRequest
func (_e *MockQueryNodeServer_Expecter) GetSegmentInfo(_a0 interface{}, _a1 interface{}) *MockQueryNodeServer_GetSegmentInfo_Call {
return &MockQueryNodeServer_GetSegmentInfo_Call{Call: _e.mock.On("GetSegmentInfo", _a0, _a1)}
}
@ -336,8 +336,8 @@ type MockQueryNodeServer_GetStatistics_Call struct {
}
// GetStatistics is a helper method to define mock.On call
// - _a0 context.Context
// - _a1 *querypb.GetStatisticsRequest
// - _a0 context.Context
// - _a1 *querypb.GetStatisticsRequest
func (_e *MockQueryNodeServer_Expecter) GetStatistics(_a0 interface{}, _a1 interface{}) *MockQueryNodeServer_GetStatistics_Call {
return &MockQueryNodeServer_GetStatistics_Call{Call: _e.mock.On("GetStatistics", _a0, _a1)}
}
@ -391,8 +391,8 @@ type MockQueryNodeServer_GetStatisticsChannel_Call struct {
}
// GetStatisticsChannel is a helper method to define mock.On call
// - _a0 context.Context
// - _a1 *internalpb.GetStatisticsChannelRequest
// - _a0 context.Context
// - _a1 *internalpb.GetStatisticsChannelRequest
func (_e *MockQueryNodeServer_Expecter) GetStatisticsChannel(_a0 interface{}, _a1 interface{}) *MockQueryNodeServer_GetStatisticsChannel_Call {
return &MockQueryNodeServer_GetStatisticsChannel_Call{Call: _e.mock.On("GetStatisticsChannel", _a0, _a1)}
}
@ -446,8 +446,8 @@ type MockQueryNodeServer_GetTimeTickChannel_Call struct {
}
// GetTimeTickChannel is a helper method to define mock.On call
// - _a0 context.Context
// - _a1 *internalpb.GetTimeTickChannelRequest
// - _a0 context.Context
// - _a1 *internalpb.GetTimeTickChannelRequest
func (_e *MockQueryNodeServer_Expecter) GetTimeTickChannel(_a0 interface{}, _a1 interface{}) *MockQueryNodeServer_GetTimeTickChannel_Call {
return &MockQueryNodeServer_GetTimeTickChannel_Call{Call: _e.mock.On("GetTimeTickChannel", _a0, _a1)}
}
@ -501,8 +501,8 @@ type MockQueryNodeServer_LoadPartitions_Call struct {
}
// LoadPartitions is a helper method to define mock.On call
// - _a0 context.Context
// - _a1 *querypb.LoadPartitionsRequest
// - _a0 context.Context
// - _a1 *querypb.LoadPartitionsRequest
func (_e *MockQueryNodeServer_Expecter) LoadPartitions(_a0 interface{}, _a1 interface{}) *MockQueryNodeServer_LoadPartitions_Call {
return &MockQueryNodeServer_LoadPartitions_Call{Call: _e.mock.On("LoadPartitions", _a0, _a1)}
}
@ -556,8 +556,8 @@ type MockQueryNodeServer_LoadSegments_Call struct {
}
// LoadSegments is a helper method to define mock.On call
// - _a0 context.Context
// - _a1 *querypb.LoadSegmentsRequest
// - _a0 context.Context
// - _a1 *querypb.LoadSegmentsRequest
func (_e *MockQueryNodeServer_Expecter) LoadSegments(_a0 interface{}, _a1 interface{}) *MockQueryNodeServer_LoadSegments_Call {
return &MockQueryNodeServer_LoadSegments_Call{Call: _e.mock.On("LoadSegments", _a0, _a1)}
}
@ -611,8 +611,8 @@ type MockQueryNodeServer_Query_Call struct {
}
// Query is a helper method to define mock.On call
// - _a0 context.Context
// - _a1 *querypb.QueryRequest
// - _a0 context.Context
// - _a1 *querypb.QueryRequest
func (_e *MockQueryNodeServer_Expecter) Query(_a0 interface{}, _a1 interface{}) *MockQueryNodeServer_Query_Call {
return &MockQueryNodeServer_Query_Call{Call: _e.mock.On("Query", _a0, _a1)}
}
@ -666,8 +666,8 @@ type MockQueryNodeServer_QuerySegments_Call struct {
}
// QuerySegments is a helper method to define mock.On call
// - _a0 context.Context
// - _a1 *querypb.QueryRequest
// - _a0 context.Context
// - _a1 *querypb.QueryRequest
func (_e *MockQueryNodeServer_Expecter) QuerySegments(_a0 interface{}, _a1 interface{}) *MockQueryNodeServer_QuerySegments_Call {
return &MockQueryNodeServer_QuerySegments_Call{Call: _e.mock.On("QuerySegments", _a0, _a1)}
}
@ -709,8 +709,8 @@ type MockQueryNodeServer_QueryStream_Call struct {
}
// QueryStream is a helper method to define mock.On call
// - _a0 *querypb.QueryRequest
// - _a1 querypb.QueryNode_QueryStreamServer
// - _a0 *querypb.QueryRequest
// - _a1 querypb.QueryNode_QueryStreamServer
func (_e *MockQueryNodeServer_Expecter) QueryStream(_a0 interface{}, _a1 interface{}) *MockQueryNodeServer_QueryStream_Call {
return &MockQueryNodeServer_QueryStream_Call{Call: _e.mock.On("QueryStream", _a0, _a1)}
}
@ -752,8 +752,8 @@ type MockQueryNodeServer_QueryStreamSegments_Call struct {
}
// QueryStreamSegments is a helper method to define mock.On call
// - _a0 *querypb.QueryRequest
// - _a1 querypb.QueryNode_QueryStreamSegmentsServer
// - _a0 *querypb.QueryRequest
// - _a1 querypb.QueryNode_QueryStreamSegmentsServer
func (_e *MockQueryNodeServer_Expecter) QueryStreamSegments(_a0 interface{}, _a1 interface{}) *MockQueryNodeServer_QueryStreamSegments_Call {
return &MockQueryNodeServer_QueryStreamSegments_Call{Call: _e.mock.On("QueryStreamSegments", _a0, _a1)}
}
@ -807,8 +807,8 @@ type MockQueryNodeServer_ReleaseCollection_Call struct {
}
// ReleaseCollection is a helper method to define mock.On call
// - _a0 context.Context
// - _a1 *querypb.ReleaseCollectionRequest
// - _a0 context.Context
// - _a1 *querypb.ReleaseCollectionRequest
func (_e *MockQueryNodeServer_Expecter) ReleaseCollection(_a0 interface{}, _a1 interface{}) *MockQueryNodeServer_ReleaseCollection_Call {
return &MockQueryNodeServer_ReleaseCollection_Call{Call: _e.mock.On("ReleaseCollection", _a0, _a1)}
}
@ -862,8 +862,8 @@ type MockQueryNodeServer_ReleasePartitions_Call struct {
}
// ReleasePartitions is a helper method to define mock.On call
// - _a0 context.Context
// - _a1 *querypb.ReleasePartitionsRequest
// - _a0 context.Context
// - _a1 *querypb.ReleasePartitionsRequest
func (_e *MockQueryNodeServer_Expecter) ReleasePartitions(_a0 interface{}, _a1 interface{}) *MockQueryNodeServer_ReleasePartitions_Call {
return &MockQueryNodeServer_ReleasePartitions_Call{Call: _e.mock.On("ReleasePartitions", _a0, _a1)}
}
@ -917,8 +917,8 @@ type MockQueryNodeServer_ReleaseSegments_Call struct {
}
// ReleaseSegments is a helper method to define mock.On call
// - _a0 context.Context
// - _a1 *querypb.ReleaseSegmentsRequest
// - _a0 context.Context
// - _a1 *querypb.ReleaseSegmentsRequest
func (_e *MockQueryNodeServer_Expecter) ReleaseSegments(_a0 interface{}, _a1 interface{}) *MockQueryNodeServer_ReleaseSegments_Call {
return &MockQueryNodeServer_ReleaseSegments_Call{Call: _e.mock.On("ReleaseSegments", _a0, _a1)}
}
@ -972,8 +972,8 @@ type MockQueryNodeServer_Search_Call struct {
}
// Search is a helper method to define mock.On call
// - _a0 context.Context
// - _a1 *querypb.SearchRequest
// - _a0 context.Context
// - _a1 *querypb.SearchRequest
func (_e *MockQueryNodeServer_Expecter) Search(_a0 interface{}, _a1 interface{}) *MockQueryNodeServer_Search_Call {
return &MockQueryNodeServer_Search_Call{Call: _e.mock.On("Search", _a0, _a1)}
}
@ -1027,8 +1027,8 @@ type MockQueryNodeServer_SearchSegments_Call struct {
}
// SearchSegments is a helper method to define mock.On call
// - _a0 context.Context
// - _a1 *querypb.SearchRequest
// - _a0 context.Context
// - _a1 *querypb.SearchRequest
func (_e *MockQueryNodeServer_Expecter) SearchSegments(_a0 interface{}, _a1 interface{}) *MockQueryNodeServer_SearchSegments_Call {
return &MockQueryNodeServer_SearchSegments_Call{Call: _e.mock.On("SearchSegments", _a0, _a1)}
}
@ -1082,8 +1082,8 @@ type MockQueryNodeServer_ShowConfigurations_Call struct {
}
// ShowConfigurations is a helper method to define mock.On call
// - _a0 context.Context
// - _a1 *internalpb.ShowConfigurationsRequest
// - _a0 context.Context
// - _a1 *internalpb.ShowConfigurationsRequest
func (_e *MockQueryNodeServer_Expecter) ShowConfigurations(_a0 interface{}, _a1 interface{}) *MockQueryNodeServer_ShowConfigurations_Call {
return &MockQueryNodeServer_ShowConfigurations_Call{Call: _e.mock.On("ShowConfigurations", _a0, _a1)}
}
@ -1137,8 +1137,8 @@ type MockQueryNodeServer_SyncDistribution_Call struct {
}
// SyncDistribution is a helper method to define mock.On call
// - _a0 context.Context
// - _a1 *querypb.SyncDistributionRequest
// - _a0 context.Context
// - _a1 *querypb.SyncDistributionRequest
func (_e *MockQueryNodeServer_Expecter) SyncDistribution(_a0 interface{}, _a1 interface{}) *MockQueryNodeServer_SyncDistribution_Call {
return &MockQueryNodeServer_SyncDistribution_Call{Call: _e.mock.On("SyncDistribution", _a0, _a1)}
}
@ -1192,8 +1192,8 @@ type MockQueryNodeServer_SyncReplicaSegments_Call struct {
}
// SyncReplicaSegments is a helper method to define mock.On call
// - _a0 context.Context
// - _a1 *querypb.SyncReplicaSegmentsRequest
// - _a0 context.Context
// - _a1 *querypb.SyncReplicaSegmentsRequest
func (_e *MockQueryNodeServer_Expecter) SyncReplicaSegments(_a0 interface{}, _a1 interface{}) *MockQueryNodeServer_SyncReplicaSegments_Call {
return &MockQueryNodeServer_SyncReplicaSegments_Call{Call: _e.mock.On("SyncReplicaSegments", _a0, _a1)}
}
@ -1247,8 +1247,8 @@ type MockQueryNodeServer_UnsubDmChannel_Call struct {
}
// UnsubDmChannel is a helper method to define mock.On call
// - _a0 context.Context
// - _a1 *querypb.UnsubDmChannelRequest
// - _a0 context.Context
// - _a1 *querypb.UnsubDmChannelRequest
func (_e *MockQueryNodeServer_Expecter) UnsubDmChannel(_a0 interface{}, _a1 interface{}) *MockQueryNodeServer_UnsubDmChannel_Call {
return &MockQueryNodeServer_UnsubDmChannel_Call{Call: _e.mock.On("UnsubDmChannel", _a0, _a1)}
}
@ -1302,8 +1302,8 @@ type MockQueryNodeServer_WatchDmChannels_Call struct {
}
// WatchDmChannels is a helper method to define mock.On call
// - _a0 context.Context
// - _a1 *querypb.WatchDmChannelsRequest
// - _a0 context.Context
// - _a1 *querypb.WatchDmChannelsRequest
func (_e *MockQueryNodeServer_Expecter) WatchDmChannels(_a0 interface{}, _a1 interface{}) *MockQueryNodeServer_WatchDmChannels_Call {
return &MockQueryNodeServer_WatchDmChannels_Call{Call: _e.mock.On("WatchDmChannels", _a0, _a1)}
}

View File

@ -59,8 +59,8 @@ type MockCluster_GetComponentStates_Call struct {
}
// GetComponentStates is a helper method to define mock.On call
// - ctx context.Context
// - nodeID int64
// - ctx context.Context
// - nodeID int64
func (_e *MockCluster_Expecter) GetComponentStates(ctx interface{}, nodeID interface{}) *MockCluster_GetComponentStates_Call {
return &MockCluster_GetComponentStates_Call{Call: _e.mock.On("GetComponentStates", ctx, nodeID)}
}
@ -114,9 +114,9 @@ type MockCluster_GetDataDistribution_Call struct {
}
// GetDataDistribution is a helper method to define mock.On call
// - ctx context.Context
// - nodeID int64
// - req *querypb.GetDataDistributionRequest
// - ctx context.Context
// - nodeID int64
// - req *querypb.GetDataDistributionRequest
func (_e *MockCluster_Expecter) GetDataDistribution(ctx interface{}, nodeID interface{}, req interface{}) *MockCluster_GetDataDistribution_Call {
return &MockCluster_GetDataDistribution_Call{Call: _e.mock.On("GetDataDistribution", ctx, nodeID, req)}
}
@ -170,9 +170,9 @@ type MockCluster_GetMetrics_Call struct {
}
// GetMetrics is a helper method to define mock.On call
// - ctx context.Context
// - nodeID int64
// - req *milvuspb.GetMetricsRequest
// - ctx context.Context
// - nodeID int64
// - req *milvuspb.GetMetricsRequest
func (_e *MockCluster_Expecter) GetMetrics(ctx interface{}, nodeID interface{}, req interface{}) *MockCluster_GetMetrics_Call {
return &MockCluster_GetMetrics_Call{Call: _e.mock.On("GetMetrics", ctx, nodeID, req)}
}
@ -226,9 +226,9 @@ type MockCluster_LoadPartitions_Call struct {
}
// LoadPartitions is a helper method to define mock.On call
// - ctx context.Context
// - nodeID int64
// - req *querypb.LoadPartitionsRequest
// - ctx context.Context
// - nodeID int64
// - req *querypb.LoadPartitionsRequest
func (_e *MockCluster_Expecter) LoadPartitions(ctx interface{}, nodeID interface{}, req interface{}) *MockCluster_LoadPartitions_Call {
return &MockCluster_LoadPartitions_Call{Call: _e.mock.On("LoadPartitions", ctx, nodeID, req)}
}
@ -282,9 +282,9 @@ type MockCluster_LoadSegments_Call struct {
}
// LoadSegments is a helper method to define mock.On call
// - ctx context.Context
// - nodeID int64
// - req *querypb.LoadSegmentsRequest
// - ctx context.Context
// - nodeID int64
// - req *querypb.LoadSegmentsRequest
func (_e *MockCluster_Expecter) LoadSegments(ctx interface{}, nodeID interface{}, req interface{}) *MockCluster_LoadSegments_Call {
return &MockCluster_LoadSegments_Call{Call: _e.mock.On("LoadSegments", ctx, nodeID, req)}
}
@ -338,9 +338,9 @@ type MockCluster_ReleasePartitions_Call struct {
}
// ReleasePartitions is a helper method to define mock.On call
// - ctx context.Context
// - nodeID int64
// - req *querypb.ReleasePartitionsRequest
// - ctx context.Context
// - nodeID int64
// - req *querypb.ReleasePartitionsRequest
func (_e *MockCluster_Expecter) ReleasePartitions(ctx interface{}, nodeID interface{}, req interface{}) *MockCluster_ReleasePartitions_Call {
return &MockCluster_ReleasePartitions_Call{Call: _e.mock.On("ReleasePartitions", ctx, nodeID, req)}
}
@ -394,9 +394,9 @@ type MockCluster_ReleaseSegments_Call struct {
}
// ReleaseSegments is a helper method to define mock.On call
// - ctx context.Context
// - nodeID int64
// - req *querypb.ReleaseSegmentsRequest
// - ctx context.Context
// - nodeID int64
// - req *querypb.ReleaseSegmentsRequest
func (_e *MockCluster_Expecter) ReleaseSegments(ctx interface{}, nodeID interface{}, req interface{}) *MockCluster_ReleaseSegments_Call {
return &MockCluster_ReleaseSegments_Call{Call: _e.mock.On("ReleaseSegments", ctx, nodeID, req)}
}
@ -514,9 +514,9 @@ type MockCluster_SyncDistribution_Call struct {
}
// SyncDistribution is a helper method to define mock.On call
// - ctx context.Context
// - nodeID int64
// - req *querypb.SyncDistributionRequest
// - ctx context.Context
// - nodeID int64
// - req *querypb.SyncDistributionRequest
func (_e *MockCluster_Expecter) SyncDistribution(ctx interface{}, nodeID interface{}, req interface{}) *MockCluster_SyncDistribution_Call {
return &MockCluster_SyncDistribution_Call{Call: _e.mock.On("SyncDistribution", ctx, nodeID, req)}
}
@ -570,9 +570,9 @@ type MockCluster_UnsubDmChannel_Call struct {
}
// UnsubDmChannel is a helper method to define mock.On call
// - ctx context.Context
// - nodeID int64
// - req *querypb.UnsubDmChannelRequest
// - ctx context.Context
// - nodeID int64
// - req *querypb.UnsubDmChannelRequest
func (_e *MockCluster_Expecter) UnsubDmChannel(ctx interface{}, nodeID interface{}, req interface{}) *MockCluster_UnsubDmChannel_Call {
return &MockCluster_UnsubDmChannel_Call{Call: _e.mock.On("UnsubDmChannel", ctx, nodeID, req)}
}
@ -626,9 +626,9 @@ type MockCluster_WatchDmChannels_Call struct {
}
// WatchDmChannels is a helper method to define mock.On call
// - ctx context.Context
// - nodeID int64
// - req *querypb.WatchDmChannelsRequest
// - ctx context.Context
// - nodeID int64
// - req *querypb.WatchDmChannelsRequest
func (_e *MockCluster_Expecter) WatchDmChannels(ctx interface{}, nodeID interface{}, req interface{}) *MockCluster_WatchDmChannels_Call {
return &MockCluster_WatchDmChannels_Call{Call: _e.mock.On("WatchDmChannels", ctx, nodeID, req)}
}

View File

@ -37,7 +37,7 @@ type MockScheduler_Add_Call struct {
}
// Add is a helper method to define mock.On call
// - task Task
// - task Task
func (_e *MockScheduler_Expecter) Add(task interface{}) *MockScheduler_Add_Call {
return &MockScheduler_Add_Call{Call: _e.mock.On("Add", task)}
}
@ -70,7 +70,7 @@ type MockScheduler_AddExecutor_Call struct {
}
// AddExecutor is a helper method to define mock.On call
// - nodeID int64
// - nodeID int64
func (_e *MockScheduler_Expecter) AddExecutor(nodeID interface{}) *MockScheduler_AddExecutor_Call {
return &MockScheduler_AddExecutor_Call{Call: _e.mock.On("AddExecutor", nodeID)}
}
@ -103,7 +103,7 @@ type MockScheduler_Dispatch_Call struct {
}
// Dispatch is a helper method to define mock.On call
// - node int64
// - node int64
func (_e *MockScheduler_Expecter) Dispatch(node interface{}) *MockScheduler_Dispatch_Call {
return &MockScheduler_Dispatch_Call{Call: _e.mock.On("Dispatch", node)}
}
@ -186,7 +186,7 @@ type MockScheduler_GetNodeChannelDelta_Call struct {
}
// GetNodeChannelDelta is a helper method to define mock.On call
// - nodeID int64
// - nodeID int64
func (_e *MockScheduler_Expecter) GetNodeChannelDelta(nodeID interface{}) *MockScheduler_GetNodeChannelDelta_Call {
return &MockScheduler_GetNodeChannelDelta_Call{Call: _e.mock.On("GetNodeChannelDelta", nodeID)}
}
@ -228,7 +228,7 @@ type MockScheduler_GetNodeSegmentDelta_Call struct {
}
// GetNodeSegmentDelta is a helper method to define mock.On call
// - nodeID int64
// - nodeID int64
func (_e *MockScheduler_Expecter) GetNodeSegmentDelta(nodeID interface{}) *MockScheduler_GetNodeSegmentDelta_Call {
return &MockScheduler_GetNodeSegmentDelta_Call{Call: _e.mock.On("GetNodeSegmentDelta", nodeID)}
}
@ -302,7 +302,7 @@ type MockScheduler_RemoveByNode_Call struct {
}
// RemoveByNode is a helper method to define mock.On call
// - node int64
// - node int64
func (_e *MockScheduler_Expecter) RemoveByNode(node interface{}) *MockScheduler_RemoveByNode_Call {
return &MockScheduler_RemoveByNode_Call{Call: _e.mock.On("RemoveByNode", node)}
}
@ -335,7 +335,7 @@ type MockScheduler_RemoveExecutor_Call struct {
}
// RemoveExecutor is a helper method to define mock.On call
// - nodeID int64
// - nodeID int64
func (_e *MockScheduler_Expecter) RemoveExecutor(nodeID interface{}) *MockScheduler_RemoveExecutor_Call {
return &MockScheduler_RemoveExecutor_Call{Call: _e.mock.On("RemoveExecutor", nodeID)}
}