diff --git a/.golangci.yml b/.golangci.yml index d11131df21..7f50456870 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -7,26 +7,43 @@ run: - docs - scripts - internal/core + - cmake_build linters: disable-all: true enable: - - staticcheck - - typecheck - - goimports - - misspell + - gosimple - govet - ineffassign - - gosimple + - staticcheck + - decorder + - depguard + - gofmt + - goimports - gosec - revive - - durationcheck - unconvert + - misspell + - typecheck + - durationcheck - forbidigo - - depguard + - gci + - whitespace + - gofumpt # - gocritic linters-settings: + gci: + sections: + - standard + - default + - prefix(github.com/milvus-io) + custom-order: true + gofumpt: + lang-version: "1.18" + module-path: github.com/milvus-io + goimports: + local-prefixes: github.com/milvus-io revive: rules: - name: unused-parameter @@ -92,6 +109,3 @@ issues: max-issues-per-linter: 0 # Maximum count of issues with the same text. Set to 0 to disable. Default is 3. max-same-issues: 0 - -service: - golangci-lint-version: 1.27.0 # use the fixed version to not introduce new linters unexpectedly diff --git a/Makefile b/Makefile index 66894bf23c..1a0993b2e3 100644 --- a/Makefile +++ b/Makefile @@ -36,6 +36,14 @@ INSTALL_GOLANGCI_LINT := $(findstring $(GOLANGCI_LINT_VERSION), $(GOLANGCI_LINT_ MOCKERY_VERSION := 2.32.4 MOCKERY_OUTPUT := $(shell $(INSTALL_PATH)/mockery --version 2>/dev/null) INSTALL_MOCKERY := $(findstring $(MOCKERY_VERSION),$(MOCKERY_OUTPUT)) +# gci +GCI_VERSION := 0.11.2 +GCI_OUTPUT := $(shell $(INSTALL_PATH)/gci --version 2>/dev/null) +INSTALL_GCI := $(findstring $(GCI_VERSION),$(GCI_OUTPUT)) +# gofumpt +GOFUMPT_VERSION := 0.5.0 +GOFUMPT_OUTPUT := $(shell $(INSTALL_PATH)/gofumpt --version 2>/dev/null) +INSTALL_GOFUMPT := $(findstring $(GOFUMPT_VERSION),$(GOFUMPT_OUTPUT)) export GIT_BRANCH=master @@ -97,18 +105,32 @@ else @GO111MODULE=on env bash $(PWD)/scripts/gofmt.sh pkg/ endif -lint: tools/bin/revive - @echo "Running $@ check" - @tools/bin/revive -formatter friendly -config tools/check/revive.toml ./... +lint-fix: getdeps + @mkdir -p $(INSTALL_PATH) + @if [ -z "$(INSTALL_GCI)" ]; then \ + echo "Installing gci v$(GCI_VERSION) to ./bin/" && GOBIN=$(INSTALL_PATH) go install github.com/daixiang0/gci@v$(GCI_VERSION); \ + else \ + echo "gci v$(GCI_VERSION) already installed"; \ + fi + @if [ -z "$(INSTALL_GOFUMPT)" ]; then \ + echo "Installing gofumpt v$(GOFUMPT_VERSION) to ./bin/" && GOBIN=$(INSTALL_PATH) go install mvdan.cc/gofumpt@v$(GOFUMPT_VERSION); \ + else \ + echo "gofumpt v$(GOFUMPT_VERSION) already installed"; \ + fi + @echo "Running gofumpt fix" + @$(INSTALL_PATH)/gofumpt -l -w . + @echo "Running gci fix" + @$(INSTALL_PATH)/gci write cmd/ --skip-generated -s standard -s default -s "prefix(github.com/milvus-io)" --custom-order + @$(INSTALL_PATH)/gci write internal/ --skip-generated -s standard -s default -s "prefix(github.com/milvus-io)" --custom-order + @$(INSTALL_PATH)/gci write pkg/ --skip-generated -s standard -s default -s "prefix(github.com/milvus-io)" --custom-order + @$(INSTALL_PATH)/gci write tests/ --skip-generated -s standard -s default -s "prefix(github.com/milvus-io)" --custom-order + @echo "Running golangci-lint auto-fix" + @source $(PWD)/scripts/setenv.sh && GO111MODULE=on $(INSTALL_PATH)/golangci-lint run --fix --timeout=30m --config $(PWD)/.golangci.yml; cd pkg && GO111MODULE=on $(INSTALL_PATH)/golangci-lint run --fix --timeout=30m --config $(PWD)/.golangci.yml #TODO: Check code specifications by golangci-lint static-check: getdeps @echo "Running $@ check" - @GO111MODULE=on $(INSTALL_PATH)/golangci-lint cache clean - @source $(PWD)/scripts/setenv.sh && GO111MODULE=on $(INSTALL_PATH)/golangci-lint run --timeout=30m --config ./.golangci.yml ./internal/... - @source $(PWD)/scripts/setenv.sh && GO111MODULE=on $(INSTALL_PATH)/golangci-lint run --timeout=30m --config ./.golangci.yml ./cmd/... - @source $(PWD)/scripts/setenv.sh && GO111MODULE=on $(INSTALL_PATH)/golangci-lint run --timeout=30m --config ./.golangci.yml ./tests/integration/... - @source $(PWD)/scripts/setenv.sh && cd pkg && GO111MODULE=on $(INSTALL_PATH)/golangci-lint run --timeout=30m --config ../.golangci.yml ./... + @source $(PWD)/scripts/setenv.sh && GO111MODULE=on $(INSTALL_PATH)/golangci-lint run --timeout=30m --config $(PWD)/.golangci.yml; cd pkg && GO111MODULE=on $(INSTALL_PATH)/golangci-lint run --timeout=30m --config $(PWD)/.golangci.yml verifiers: build-cpp getdeps cppcheck fmt static-check diff --git a/cmd/components/data_coord.go b/cmd/components/data_coord.go index 2e556242c3..f787831473 100644 --- a/cmd/components/data_coord.go +++ b/cmd/components/data_coord.go @@ -19,13 +19,14 @@ package components import ( "context" + "go.uber.org/zap" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" grpcdatacoordclient "github.com/milvus-io/milvus/internal/distributed/datacoord" "github.com/milvus-io/milvus/internal/util/dependency" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/typeutil" - "go.uber.org/zap" ) // DataCoord implements grpc server of DataCoord server diff --git a/cmd/components/data_node.go b/cmd/components/data_node.go index 734d1e1686..25a7b9a91c 100644 --- a/cmd/components/data_node.go +++ b/cmd/components/data_node.go @@ -19,13 +19,14 @@ package components import ( "context" + "go.uber.org/zap" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" grpcdatanode "github.com/milvus-io/milvus/internal/distributed/datanode" "github.com/milvus-io/milvus/internal/util/dependency" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/typeutil" - "go.uber.org/zap" ) // DataNode implements DataNode grpc server diff --git a/cmd/components/index_coord.go b/cmd/components/index_coord.go index 4025974639..ff03f83789 100644 --- a/cmd/components/index_coord.go +++ b/cmd/components/index_coord.go @@ -26,8 +26,7 @@ import ( ) // IndexCoord implements IndexCoord grpc server -type IndexCoord struct { -} +type IndexCoord struct{} // NewIndexCoord creates a new IndexCoord func NewIndexCoord(ctx context.Context, factory dependency.Factory) (*IndexCoord, error) { diff --git a/cmd/components/index_node.go b/cmd/components/index_node.go index 9c874a8801..4f947d35f4 100644 --- a/cmd/components/index_node.go +++ b/cmd/components/index_node.go @@ -19,13 +19,14 @@ package components import ( "context" + "go.uber.org/zap" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" grpcindexnode "github.com/milvus-io/milvus/internal/distributed/indexnode" "github.com/milvus-io/milvus/internal/util/dependency" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/typeutil" - "go.uber.org/zap" ) // IndexNode implements IndexNode grpc server @@ -43,7 +44,6 @@ func NewIndexNode(ctx context.Context, factory dependency.Factory) (*IndexNode, } n.svr = svr return n, nil - } // Run starts service diff --git a/cmd/components/proxy.go b/cmd/components/proxy.go index 37bb9ef138..61a62df495 100644 --- a/cmd/components/proxy.go +++ b/cmd/components/proxy.go @@ -19,13 +19,14 @@ package components import ( "context" + "go.uber.org/zap" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" grpcproxy "github.com/milvus-io/milvus/internal/distributed/proxy" "github.com/milvus-io/milvus/internal/util/dependency" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/typeutil" - "go.uber.org/zap" ) // Proxy implements Proxy grpc server diff --git a/cmd/components/query_node.go b/cmd/components/query_node.go index f44cffa8ce..50570ec152 100644 --- a/cmd/components/query_node.go +++ b/cmd/components/query_node.go @@ -46,7 +46,6 @@ func NewQueryNode(ctx context.Context, factory dependency.Factory) (*QueryNode, ctx: ctx, svr: svr, }, nil - } // Run starts service diff --git a/cmd/components/root_coord.go b/cmd/components/root_coord.go index e26a5c50fd..720511902a 100644 --- a/cmd/components/root_coord.go +++ b/cmd/components/root_coord.go @@ -19,13 +19,14 @@ package components import ( "context" + "go.uber.org/zap" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" rc "github.com/milvus-io/milvus/internal/distributed/rootcoord" "github.com/milvus-io/milvus/internal/util/dependency" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/typeutil" - "go.uber.org/zap" ) // RootCoord implements RoodCoord grpc server diff --git a/cmd/embedded/embedded.go b/cmd/embedded/embedded.go index 1f2148a093..34b979a612 100644 --- a/cmd/embedded/embedded.go +++ b/cmd/embedded/embedded.go @@ -16,8 +16,9 @@ package main +import "C" + import ( - "C" "os" "github.com/milvus-io/milvus/cmd/milvus" diff --git a/cmd/milvus/mck.go b/cmd/milvus/mck.go index c07758f7a7..3775764bf0 100644 --- a/cmd/milvus/mck.go +++ b/cmd/milvus/mck.go @@ -364,7 +364,6 @@ func getTrashKey(taskType, key string) string { } func (c *mck) extractTask(prefix string, keys []string, values []string) { - for i := range keys { taskID, err := strconv.ParseInt(filepath.Base(keys[i]), 10, 64) if err != nil { @@ -520,7 +519,6 @@ func (c *mck) extractVecFieldIndexInfo(taskID int64, infos []*querypb.FieldIndex func (c *mck) unmarshalTask(taskID int64, t string) (string, []int64, []int64, error) { header := commonpb.MsgHeader{} err := proto.Unmarshal([]byte(t), &header) - if err != nil { return errReturn(taskID, "MsgHeader", err) } diff --git a/cmd/milvus/stop.go b/cmd/milvus/stop.go index 66517bd21b..1392125e6f 100644 --- a/cmd/milvus/stop.go +++ b/cmd/milvus/stop.go @@ -61,7 +61,7 @@ func (c *stop) formatFlags(args []string, flags *flag.FlagSet) { func (c *stop) stopPid(filename string, runtimeDir string) error { var pid int - fd, err := os.OpenFile(path.Join(runtimeDir, filename), os.O_RDONLY, 0664) + fd, err := os.OpenFile(path.Join(runtimeDir, filename), os.O_RDONLY, 0o664) if err != nil { return err } diff --git a/cmd/milvus/util.go b/cmd/milvus/util.go index 1e40bb4048..1a012d5222 100644 --- a/cmd/milvus/util.go +++ b/cmd/milvus/util.go @@ -14,7 +14,7 @@ import ( ) func makeRuntimeDir(dir string) error { - perm := os.FileMode(0755) + perm := os.FileMode(0o755) // os.MkdirAll equal to `mkdir -p` err := os.MkdirAll(dir, perm) if err != nil { @@ -63,7 +63,7 @@ func createRuntimeDir(sType string) string { func createPidFile(w io.Writer, filename string, runtimeDir string) (*flock.Flock, error) { fileFullName := path.Join(runtimeDir, filename) - fd, err := os.OpenFile(fileFullName, os.O_CREATE|os.O_RDWR, 0664) + fd, err := os.OpenFile(fileFullName, os.O_CREATE|os.O_RDWR, 0o664) if err != nil { return nil, fmt.Errorf("file %s is locked, error = %w", filename, err) } diff --git a/cmd/roles/roles.go b/cmd/roles/roles.go index 441a02d48b..4b550a7169 100644 --- a/cmd/roles/roles.go +++ b/cmd/roles/roles.go @@ -27,6 +27,10 @@ import ( "syscall" "time" + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/promhttp" + "go.uber.org/zap" + "github.com/milvus-io/milvus/cmd/components" "github.com/milvus-io/milvus/internal/http" "github.com/milvus-io/milvus/internal/http/healthz" @@ -43,9 +47,6 @@ import ( "github.com/milvus-io/milvus/pkg/util/paramtable" _ "github.com/milvus-io/milvus/pkg/util/symbolizer" // support symbolizer and crash dump "github.com/milvus-io/milvus/pkg/util/typeutil" - "github.com/prometheus/client_golang/prometheus" - "github.com/prometheus/client_golang/prometheus/promhttp" - "go.uber.org/zap" ) // all milvus related metrics is in a separate registry diff --git a/cmd/tools/config/generate.go b/cmd/tools/config/generate.go index a30f2a46c8..0e6a4d5571 100644 --- a/cmd/tools/config/generate.go +++ b/cmd/tools/config/generate.go @@ -7,12 +7,13 @@ import ( "reflect" "strings" - "github.com/milvus-io/milvus/pkg/log" - "github.com/milvus-io/milvus/pkg/util/paramtable" - "github.com/milvus-io/milvus/pkg/util/typeutil" "github.com/samber/lo" "go.uber.org/zap" "golang.org/x/exp/slices" + + "github.com/milvus-io/milvus/pkg/log" + "github.com/milvus-io/milvus/pkg/util/paramtable" + "github.com/milvus-io/milvus/pkg/util/typeutil" ) type DocContent struct { @@ -106,7 +107,7 @@ type YamlMarshaller struct { } func (m *YamlMarshaller) writeYamlRecursive(data []DocContent, level int) { - var topLevels = typeutil.NewOrderedMap[string, []DocContent]() + topLevels := typeutil.NewOrderedMap[string, []DocContent]() for _, d := range data { key := strings.Split(d.key, ".")[level] diff --git a/cmd/tools/config/main.go b/cmd/tools/config/main.go index 73aa670c3e..8d6d0abfe1 100644 --- a/cmd/tools/config/main.go +++ b/cmd/tools/config/main.go @@ -36,5 +36,4 @@ func main() { default: log.Error(fmt.Sprintf("unknown argument %s", args[1])) } - } diff --git a/cmd/tools/config/printer.go b/cmd/tools/config/printer.go index 54cd90b4bf..2030069486 100644 --- a/cmd/tools/config/printer.go +++ b/cmd/tools/config/printer.go @@ -5,9 +5,10 @@ import ( "os" "sort" - "github.com/milvus-io/milvus/pkg/log" "github.com/spf13/viper" "go.uber.org/zap" + + "github.com/milvus-io/milvus/pkg/log" ) func ShowYaml(filepath string) { diff --git a/cmd/tools/datameta/main.go b/cmd/tools/datameta/main.go index c5e20035f6..d6bdffc35c 100644 --- a/cmd/tools/datameta/main.go +++ b/cmd/tools/datameta/main.go @@ -7,12 +7,13 @@ import ( "strings" "github.com/golang/protobuf/proto" + "go.uber.org/zap" + etcdkv "github.com/milvus-io/milvus/internal/kv/etcd" "github.com/milvus-io/milvus/internal/proto/datapb" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/etcd" "github.com/milvus-io/milvus/pkg/util/tsoutil" - "go.uber.org/zap" ) var ( diff --git a/cmd/tools/migration/allocator/atomic_allocator.go b/cmd/tools/migration/allocator/atomic_allocator.go index 894747ec06..cce5a4c0b3 100644 --- a/cmd/tools/migration/allocator/atomic_allocator.go +++ b/cmd/tools/migration/allocator/atomic_allocator.go @@ -1,8 +1,9 @@ package allocator import ( - "github.com/milvus-io/milvus/pkg/util/typeutil" "go.uber.org/atomic" + + "github.com/milvus-io/milvus/pkg/util/typeutil" ) const ( diff --git a/cmd/tools/migration/backend/backend.go b/cmd/tools/migration/backend/backend.go index 915477f359..91c33e431d 100644 --- a/cmd/tools/migration/backend/backend.go +++ b/cmd/tools/migration/backend/backend.go @@ -6,10 +6,8 @@ import ( "github.com/blang/semver/v4" "github.com/milvus-io/milvus/cmd/tools/migration/configs" - - "github.com/milvus-io/milvus/cmd/tools/migration/versions" - "github.com/milvus-io/milvus/cmd/tools/migration/meta" + "github.com/milvus-io/milvus/cmd/tools/migration/versions" "github.com/milvus-io/milvus/pkg/util" ) diff --git a/cmd/tools/migration/backend/backup_header.go b/cmd/tools/migration/backend/backup_header.go index 40e30ed8e9..59436505ed 100644 --- a/cmd/tools/migration/backend/backup_header.go +++ b/cmd/tools/migration/backend/backup_header.go @@ -3,9 +3,9 @@ package backend import ( "encoding/json" - "github.com/milvus-io/milvus/cmd/tools/migration/console" - "github.com/golang/protobuf/proto" + + "github.com/milvus-io/milvus/cmd/tools/migration/console" ) type BackupHeaderVersion int32 @@ -78,7 +78,7 @@ func (v *BackupHeaderExtra) ToJSONBytes() []byte { } func GetExtra(extra []byte) *BackupHeaderExtra { - var v = newDefaultBackupHeaderExtra() + v := newDefaultBackupHeaderExtra() err := json.Unmarshal(extra, v) if err != nil { console.Error(err.Error()) diff --git a/cmd/tools/migration/backend/backup_restore.go b/cmd/tools/migration/backend/backup_restore.go index 6980c25bee..c21d096cdf 100644 --- a/cmd/tools/migration/backend/backup_restore.go +++ b/cmd/tools/migration/backend/backup_restore.go @@ -6,6 +6,7 @@ import ( "io" "github.com/golang/protobuf/proto" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" ) diff --git a/cmd/tools/migration/backend/etcd.go b/cmd/tools/migration/backend/etcd.go index b4ad68f56d..0f5e4d28a5 100644 --- a/cmd/tools/migration/backend/etcd.go +++ b/cmd/tools/migration/backend/etcd.go @@ -1,11 +1,12 @@ package backend import ( + clientv3 "go.etcd.io/etcd/client/v3" + "github.com/milvus-io/milvus/cmd/tools/migration/configs" "github.com/milvus-io/milvus/internal/kv" etcdkv "github.com/milvus-io/milvus/internal/kv/etcd" "github.com/milvus-io/milvus/pkg/util/etcd" - clientv3 "go.etcd.io/etcd/client/v3" ) type etcdBasedBackend struct { diff --git a/cmd/tools/migration/backend/etcd210.go b/cmd/tools/migration/backend/etcd210.go index 527ec44fe6..10a0f1fdd7 100644 --- a/cmd/tools/migration/backend/etcd210.go +++ b/cmd/tools/migration/backend/etcd210.go @@ -8,15 +8,13 @@ import ( "strconv" "strings" + "github.com/golang/protobuf/proto" clientv3 "go.etcd.io/etcd/client/v3" "github.com/milvus-io/milvus/cmd/tools/migration/configs" - "github.com/milvus-io/milvus/cmd/tools/migration/legacy" - - "github.com/milvus-io/milvus/cmd/tools/migration/legacy/legacypb" - - "github.com/golang/protobuf/proto" "github.com/milvus-io/milvus/cmd/tools/migration/console" + "github.com/milvus-io/milvus/cmd/tools/migration/legacy" + "github.com/milvus-io/milvus/cmd/tools/migration/legacy/legacypb" "github.com/milvus-io/milvus/cmd/tools/migration/meta" "github.com/milvus-io/milvus/cmd/tools/migration/utils" "github.com/milvus-io/milvus/cmd/tools/migration/versions" @@ -56,7 +54,7 @@ func (b etcd210) loadTtAliases() (meta.TtAliasesMeta210, error) { tsKey := keys[i] tsValue := values[i] valueIsTombstone := rootcoord.IsTombstone(tsValue) - var aliasInfo = &pb.CollectionInfo{} // alias stored in collection info. + aliasInfo := &pb.CollectionInfo{} // alias stored in collection info. if valueIsTombstone { aliasInfo = nil } else { @@ -88,7 +86,7 @@ func (b etcd210) loadAliases() (meta.AliasesMeta210, error) { key := keys[i] value := values[i] valueIsTombstone := rootcoord.IsTombstone(value) - var aliasInfo = &pb.CollectionInfo{} // alias stored in collection info. + aliasInfo := &pb.CollectionInfo{} // alias stored in collection info. if valueIsTombstone { aliasInfo = nil } else { @@ -122,7 +120,7 @@ func (b etcd210) loadTtCollections() (meta.TtCollectionsMeta210, error) { } valueIsTombstone := rootcoord.IsTombstone(tsValue) - var coll = &pb.CollectionInfo{} + coll := &pb.CollectionInfo{} if valueIsTombstone { coll = nil } else { @@ -164,7 +162,7 @@ func (b etcd210) loadCollections() (meta.CollectionsMeta210, error) { } valueIsTombstone := rootcoord.IsTombstone(value) - var coll = &pb.CollectionInfo{} + coll := &pb.CollectionInfo{} if valueIsTombstone { coll = nil } else { @@ -213,7 +211,7 @@ func (b etcd210) loadCollectionIndexes() (meta.CollectionIndexesMeta210, error) key := keys[i] value := values[i] - var index = &pb.IndexInfo{} + index := &pb.IndexInfo{} if err := proto.Unmarshal([]byte(value), index); err != nil { return nil, err } @@ -240,7 +238,7 @@ func (b etcd210) loadSegmentIndexes() (meta.SegmentIndexesMeta210, error) { for i := 0; i < l; i++ { value := values[i] - var index = &pb.SegmentIndexInfo{} + index := &pb.SegmentIndexInfo{} if err := proto.Unmarshal([]byte(value), index); err != nil { return nil, err } @@ -263,7 +261,7 @@ func (b etcd210) loadIndexBuildMeta() (meta.IndexBuildMeta210, error) { for i := 0; i < l; i++ { value := values[i] - var record = &legacypb.IndexMeta{} + record := &legacypb.IndexMeta{} if err := proto.Unmarshal([]byte(value), record); err != nil { return nil, err } @@ -434,7 +432,7 @@ func (b etcd210) Backup(meta *meta.Meta, backupFile string) error { return err } console.Warning(fmt.Sprintf("backup to: %s", backupFile)) - return ioutil.WriteFile(backupFile, backup, 0600) + return ioutil.WriteFile(backupFile, backup, 0o600) } func (b etcd210) BackupV2(file string) error { @@ -489,7 +487,7 @@ func (b etcd210) BackupV2(file string) error { } console.Warning(fmt.Sprintf("backup to: %s", file)) - return ioutil.WriteFile(file, backup, 0600) + return ioutil.WriteFile(file, backup, 0o600) } func (b etcd210) Restore(backupFile string) error { diff --git a/cmd/tools/migration/backend/etcd220.go b/cmd/tools/migration/backend/etcd220.go index 4eae60d43e..d008057865 100644 --- a/cmd/tools/migration/backend/etcd220.go +++ b/cmd/tools/migration/backend/etcd220.go @@ -3,14 +3,11 @@ package backend import ( "fmt" - "github.com/milvus-io/milvus/internal/metastore/kv/querycoord" - "github.com/milvus-io/milvus/cmd/tools/migration/configs" - - "github.com/milvus-io/milvus/pkg/util" - "github.com/milvus-io/milvus/cmd/tools/migration/meta" + "github.com/milvus-io/milvus/internal/metastore/kv/querycoord" "github.com/milvus-io/milvus/internal/metastore/kv/rootcoord" + "github.com/milvus-io/milvus/pkg/util" ) // etcd220 implements Backend. diff --git a/cmd/tools/migration/command/main.go b/cmd/tools/migration/command/main.go index 5c393f4c5a..40185ef469 100644 --- a/cmd/tools/migration/command/main.go +++ b/cmd/tools/migration/command/main.go @@ -5,9 +5,8 @@ import ( "fmt" "os" - "github.com/milvus-io/milvus/cmd/tools/migration/console" - "github.com/milvus-io/milvus/cmd/tools/migration/configs" + "github.com/milvus-io/milvus/cmd/tools/migration/console" ) func Execute(args []string) { diff --git a/cmd/tools/migration/command/run.go b/cmd/tools/migration/command/run.go index e90faacfcd..d30abdbd36 100644 --- a/cmd/tools/migration/command/run.go +++ b/cmd/tools/migration/command/run.go @@ -4,9 +4,7 @@ import ( "context" "github.com/milvus-io/milvus/cmd/tools/migration/configs" - "github.com/milvus-io/milvus/cmd/tools/migration/console" - "github.com/milvus-io/milvus/cmd/tools/migration/migration" ) diff --git a/cmd/tools/migration/meta/210_to_220.go b/cmd/tools/migration/meta/210_to_220.go index 3ae5ee7722..1fceb29d0e 100644 --- a/cmd/tools/migration/meta/210_to_220.go +++ b/cmd/tools/migration/meta/210_to_220.go @@ -6,11 +6,11 @@ import ( "strconv" "strings" - "github.com/milvus-io/milvus/cmd/tools/migration/legacy/legacypb" - - "github.com/milvus-io/milvus/cmd/tools/migration/allocator" + "go.uber.org/zap" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" + "github.com/milvus-io/milvus/cmd/tools/migration/allocator" + "github.com/milvus-io/milvus/cmd/tools/migration/legacy/legacypb" "github.com/milvus-io/milvus/cmd/tools/migration/versions" "github.com/milvus-io/milvus/internal/metastore/model" pb "github.com/milvus-io/milvus/internal/proto/etcdpb" @@ -19,7 +19,6 @@ import ( "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/funcutil" "github.com/milvus-io/milvus/pkg/util/typeutil" - "go.uber.org/zap" ) func alias210ToAlias220(record *pb.CollectionInfo, ts Timestamp) *model.Alias { diff --git a/cmd/tools/migration/meta/meta.go b/cmd/tools/migration/meta/meta.go index b2d36d4291..f76e6b174e 100644 --- a/cmd/tools/migration/meta/meta.go +++ b/cmd/tools/migration/meta/meta.go @@ -2,11 +2,14 @@ package meta import ( "github.com/blang/semver/v4" + "github.com/milvus-io/milvus/pkg/util/typeutil" ) -type UniqueID = typeutil.UniqueID -type Timestamp = typeutil.Timestamp +type ( + UniqueID = typeutil.UniqueID + Timestamp = typeutil.Timestamp +) type Meta struct { SourceVersion semver.Version diff --git a/cmd/tools/migration/meta/meta210.go b/cmd/tools/migration/meta/meta210.go index 9f3c65f97a..2301621066 100644 --- a/cmd/tools/migration/meta/meta210.go +++ b/cmd/tools/migration/meta/meta210.go @@ -21,14 +21,20 @@ type FieldIndexesWithSchema struct { type FieldIndexes210 map[UniqueID]*FieldIndexesWithSchema // coll_id -> field indexes. -type TtCollectionsMeta210 map[UniqueID]map[Timestamp]*pb.CollectionInfo // coll_id -> ts -> coll -type CollectionsMeta210 map[UniqueID]*pb.CollectionInfo // coll_id -> coll +type ( + TtCollectionsMeta210 map[UniqueID]map[Timestamp]*pb.CollectionInfo // coll_id -> ts -> coll + CollectionsMeta210 map[UniqueID]*pb.CollectionInfo // coll_id -> coll +) -type TtAliasesMeta210 map[string]map[Timestamp]*pb.CollectionInfo // alias name -> ts -> coll -type AliasesMeta210 map[string]*pb.CollectionInfo // alias name -> coll +type ( + TtAliasesMeta210 map[string]map[Timestamp]*pb.CollectionInfo // alias name -> ts -> coll + AliasesMeta210 map[string]*pb.CollectionInfo // alias name -> coll +) -type CollectionIndexesMeta210 map[UniqueID]map[UniqueID]*pb.IndexInfo // coll_id -> index_id -> index -type SegmentIndexesMeta210 map[UniqueID]map[UniqueID]*pb.SegmentIndexInfo // seg_id -> index_id -> segment index +type ( + CollectionIndexesMeta210 map[UniqueID]map[UniqueID]*pb.IndexInfo // coll_id -> index_id -> index + SegmentIndexesMeta210 map[UniqueID]map[UniqueID]*pb.SegmentIndexInfo // seg_id -> index_id -> segment index +) type IndexBuildMeta210 map[UniqueID]*legacypb.IndexMeta // index_build_id -> index diff --git a/cmd/tools/migration/meta/meta220.go b/cmd/tools/migration/meta/meta220.go index 63044fdb8c..f190b4061c 100644 --- a/cmd/tools/migration/meta/meta220.go +++ b/cmd/tools/migration/meta/meta220.go @@ -13,23 +13,35 @@ import ( "github.com/milvus-io/milvus/pkg/util" ) -type TtCollectionsMeta220 map[UniqueID]map[Timestamp]*model.Collection // coll_id -> ts -> coll -type CollectionsMeta220 map[UniqueID]*model.Collection // coll_id -> coll +type ( + TtCollectionsMeta220 map[UniqueID]map[Timestamp]*model.Collection // coll_id -> ts -> coll + CollectionsMeta220 map[UniqueID]*model.Collection // coll_id -> coll +) -type TtAliasesMeta220 map[string]map[Timestamp]*model.Alias // alias name -> ts -> coll -type AliasesMeta220 map[string]*model.Alias // alias name -> coll +type ( + TtAliasesMeta220 map[string]map[Timestamp]*model.Alias // alias name -> ts -> coll + AliasesMeta220 map[string]*model.Alias // alias name -> coll +) -type TtPartitionsMeta220 map[UniqueID]map[Timestamp][]*model.Partition // coll_id -> ts -> partitions -type PartitionsMeta220 map[UniqueID][]*model.Partition // coll_id -> ts -> partitions +type ( + TtPartitionsMeta220 map[UniqueID]map[Timestamp][]*model.Partition // coll_id -> ts -> partitions + PartitionsMeta220 map[UniqueID][]*model.Partition // coll_id -> ts -> partitions +) -type TtFieldsMeta220 map[UniqueID]map[Timestamp][]*model.Field // coll_id -> ts -> fields -type FieldsMeta220 map[UniqueID][]*model.Field // coll_id -> ts -> fields +type ( + TtFieldsMeta220 map[UniqueID]map[Timestamp][]*model.Field // coll_id -> ts -> fields + FieldsMeta220 map[UniqueID][]*model.Field // coll_id -> ts -> fields +) -type CollectionIndexesMeta220 map[UniqueID]map[UniqueID]*model.Index // coll_id -> index_id -> index -type SegmentIndexesMeta220 map[UniqueID]map[UniqueID]*model.SegmentIndex // seg_id -> index_id -> segment index +type ( + CollectionIndexesMeta220 map[UniqueID]map[UniqueID]*model.Index // coll_id -> index_id -> index + SegmentIndexesMeta220 map[UniqueID]map[UniqueID]*model.SegmentIndex // seg_id -> index_id -> segment index +) -type CollectionLoadInfo220 map[UniqueID]*model.CollectionLoadInfo // collectionID -> CollectionLoadInfo -type PartitionLoadInfo220 map[UniqueID]map[UniqueID]*model.PartitionLoadInfo // collectionID, partitionID -> PartitionLoadInfo +type ( + CollectionLoadInfo220 map[UniqueID]*model.CollectionLoadInfo // collectionID -> CollectionLoadInfo + PartitionLoadInfo220 map[UniqueID]map[UniqueID]*model.PartitionLoadInfo // collectionID, partitionID -> PartitionLoadInfo +) func (meta *TtCollectionsMeta220) GenerateSaves(sourceVersion semver.Version) (map[string]string, error) { saves := make(map[string]string) diff --git a/cmd/tools/migration/migration/210_to_220.go b/cmd/tools/migration/migration/210_to_220.go index edb28f2673..79aaba7a5f 100644 --- a/cmd/tools/migration/migration/210_to_220.go +++ b/cmd/tools/migration/migration/210_to_220.go @@ -4,8 +4,7 @@ import ( "github.com/milvus-io/milvus/cmd/tools/migration/meta" ) -type migrator210To220 struct { -} +type migrator210To220 struct{} func (m migrator210To220) Migrate(metas *meta.Meta) (*meta.Meta, error) { return meta.From210To220(metas) diff --git a/cmd/tools/migration/migration/migrator.go b/cmd/tools/migration/migration/migrator.go index e220f71dca..c02f35c343 100644 --- a/cmd/tools/migration/migration/migrator.go +++ b/cmd/tools/migration/migration/migrator.go @@ -4,6 +4,7 @@ import ( "fmt" "github.com/blang/semver/v4" + "github.com/milvus-io/milvus/cmd/tools/migration/meta" "github.com/milvus-io/milvus/cmd/tools/migration/versions" ) diff --git a/cmd/tools/migration/migration/runner.go b/cmd/tools/migration/migration/runner.go index 204c0c2d76..87d8d664db 100644 --- a/cmd/tools/migration/migration/runner.go +++ b/cmd/tools/migration/migration/runner.go @@ -7,20 +7,15 @@ import ( "sync" "time" - "github.com/milvus-io/milvus/internal/util/sessionutil" + "github.com/blang/semver/v4" + clientv3 "go.etcd.io/etcd/client/v3" "go.uber.org/atomic" - "github.com/milvus-io/milvus/cmd/tools/migration/versions" - - "github.com/blang/semver/v4" - - "github.com/milvus-io/milvus/cmd/tools/migration/configs" - - "github.com/milvus-io/milvus/cmd/tools/migration/console" - "github.com/milvus-io/milvus/cmd/tools/migration/backend" - clientv3 "go.etcd.io/etcd/client/v3" - + "github.com/milvus-io/milvus/cmd/tools/migration/configs" + "github.com/milvus-io/milvus/cmd/tools/migration/console" + "github.com/milvus-io/milvus/cmd/tools/migration/versions" + "github.com/milvus-io/milvus/internal/util/sessionutil" "github.com/milvus-io/milvus/pkg/util/etcd" ) diff --git a/cmd/tools/migration/utils/util.go b/cmd/tools/migration/utils/util.go index 6fa47a4f89..e9dc2caa57 100644 --- a/cmd/tools/migration/utils/util.go +++ b/cmd/tools/migration/utils/util.go @@ -5,13 +5,14 @@ import ( "strconv" "strings" - "github.com/milvus-io/milvus/pkg/util/typeutil" - "github.com/milvus-io/milvus/internal/metastore/kv/rootcoord" + "github.com/milvus-io/milvus/pkg/util/typeutil" ) -type UniqueID = typeutil.UniqueID -type Timestamp = typeutil.Timestamp +type ( + UniqueID = typeutil.UniqueID + Timestamp = typeutil.Timestamp +) type errNotOfTsKey struct { key string diff --git a/internal/allocator/cached_allocator.go b/internal/allocator/cached_allocator.go index 6308070038..fe63b2f3c3 100644 --- a/internal/allocator/cached_allocator.go +++ b/internal/allocator/cached_allocator.go @@ -23,8 +23,9 @@ import ( "time" "github.com/cockroachdb/errors" - "github.com/milvus-io/milvus/pkg/log" "go.uber.org/zap" + + "github.com/milvus-io/milvus/pkg/log" ) const ( diff --git a/internal/allocator/id_allocator.go b/internal/allocator/id_allocator.go index a71e7214a1..6ea7f8fca3 100644 --- a/internal/allocator/id_allocator.go +++ b/internal/allocator/id_allocator.go @@ -22,6 +22,7 @@ import ( "time" "github.com/cockroachdb/errors" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus/internal/proto/rootcoordpb" "github.com/milvus-io/milvus/pkg/util/commonpbutil" @@ -83,7 +84,6 @@ func (ia *IDAllocator) gatherReqIDCount() uint32 { } func (ia *IDAllocator) syncID() (bool, error) { - need := ia.gatherReqIDCount() if need < ia.countPerRPC { need = ia.countPerRPC diff --git a/internal/allocator/id_allocator_test.go b/internal/allocator/id_allocator_test.go index f92b805b44..c1ea1f815b 100644 --- a/internal/allocator/id_allocator_test.go +++ b/internal/allocator/id_allocator_test.go @@ -20,14 +20,14 @@ import ( "context" "testing" - "github.com/milvus-io/milvus/internal/proto/rootcoordpb" - "github.com/milvus-io/milvus/pkg/util/merr" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + + "github.com/milvus-io/milvus/internal/proto/rootcoordpb" + "github.com/milvus-io/milvus/pkg/util/merr" ) -type mockIDAllocator struct { -} +type mockIDAllocator struct{} func (tso *mockIDAllocator) AllocID(ctx context.Context, req *rootcoordpb.AllocIDRequest) (*rootcoordpb.AllocIDResponse, error) { return &rootcoordpb.AllocIDResponse{ diff --git a/internal/datacoord/allocator_test.go b/internal/datacoord/allocator_test.go index 4b0a137e85..71667fa4bf 100644 --- a/internal/datacoord/allocator_test.go +++ b/internal/datacoord/allocator_test.go @@ -20,8 +20,9 @@ import ( "context" "testing" - "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/stretchr/testify/assert" + + "github.com/milvus-io/milvus/pkg/util/paramtable" ) func TestAllocator_Basic(t *testing.T) { diff --git a/internal/datacoord/build_index_policy.go b/internal/datacoord/build_index_policy.go index 0446e50431..9397378f9c 100644 --- a/internal/datacoord/build_index_policy.go +++ b/internal/datacoord/build_index_policy.go @@ -24,5 +24,4 @@ func defaultBuildIndexPolicy(buildIDs []UniqueID) { sort.Slice(buildIDs, func(i, j int) bool { return buildIDs[i] < buildIDs[j] }) - } diff --git a/internal/datacoord/channel_checker.go b/internal/datacoord/channel_checker.go index 6ef7dd4176..9ab1555b72 100644 --- a/internal/datacoord/channel_checker.go +++ b/internal/datacoord/channel_checker.go @@ -41,8 +41,8 @@ type channelStateTimer struct { etcdWatcher clientv3.WatchChan timeoutWatcher chan *ackEvent - //Modifies afterwards must guarantee that runningTimerCount is updated synchronized with runningTimers - //in order to keep consistency + // Modifies afterwards must guarantee that runningTimerCount is updated synchronized with runningTimers + // in order to keep consistency runningTimerCount atomic.Int32 } @@ -185,7 +185,6 @@ func parseWatchInfo(key string, data []byte) (*datapb.ChannelWatchInfo, error) { watchInfo := datapb.ChannelWatchInfo{} if err := proto.Unmarshal(data, &watchInfo); err != nil { return nil, fmt.Errorf("invalid event data: fail to parse ChannelWatchInfo, key: %s, err: %v", key, err) - } if watchInfo.Vchan == nil { diff --git a/internal/datacoord/channel_checker_test.go b/internal/datacoord/channel_checker_test.go index a15cd4bd5f..5ed5e900f2 100644 --- a/internal/datacoord/channel_checker_test.go +++ b/internal/datacoord/channel_checker_test.go @@ -21,11 +21,11 @@ import ( "testing" "time" - "github.com/milvus-io/milvus/internal/proto/datapb" + "github.com/golang/protobuf/proto" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - "github.com/golang/protobuf/proto" + "github.com/milvus-io/milvus/internal/proto/datapb" ) func TestChannelStateTimer(t *testing.T) { @@ -242,6 +242,5 @@ func TestChannelStateTimer_parses(t *testing.T) { for _, test := range tests { assert.Equal(t, test.outAckType, getAckType(test.inState)) } - }) } diff --git a/internal/datacoord/channel_manager.go b/internal/datacoord/channel_manager.go index daa078e5bc..14c4ed3007 100644 --- a/internal/datacoord/channel_manager.go +++ b/internal/datacoord/channel_manager.go @@ -467,7 +467,7 @@ func (c *ChannelManager) fillChannelWatchInfo(op *ChannelOp) { // fillChannelWatchInfoWithState updates the channel op by filling in channel watch info. func (c *ChannelManager) fillChannelWatchInfoWithState(op *ChannelOp, state datapb.ChannelWatchState) []string { - var channelsWithTimer = []string{} + channelsWithTimer := []string{} startTs := time.Now().Unix() checkInterval := Params.DataCoordCfg.WatchTimeoutInterval.GetAsDuration(time.Second) for _, ch := range op.Channels { @@ -607,7 +607,7 @@ type ackEvent struct { } func (c *ChannelManager) updateWithTimer(updates ChannelOpSet, state datapb.ChannelWatchState) error { - var channelsWithTimer = []string{} + channelsWithTimer := []string{} for _, op := range updates { if op.Type == Add { channelsWithTimer = append(channelsWithTimer, c.fillChannelWatchInfoWithState(op, state)...) diff --git a/internal/datacoord/channel_manager_test.go b/internal/datacoord/channel_manager_test.go index 799308b3b2..78cc855c02 100644 --- a/internal/datacoord/channel_manager_test.go +++ b/internal/datacoord/channel_manager_test.go @@ -203,7 +203,7 @@ func TestChannelManager_StateTransfer(t *testing.T) { }) t.Run("ToRelease-ReleaseSuccess-Reassign-ToWatch-2-DN", func(t *testing.T) { - var oldNode = UniqueID(120) + oldNode := UniqueID(120) cName := channelNamePrefix + "ToRelease-ReleaseSuccess-Reassign-ToWatch-2-DN" watchkv.RemoveWithPrefix("") @@ -289,7 +289,7 @@ func TestChannelManager_StateTransfer(t *testing.T) { }) t.Run("ToRelease-ReleaseFail-CleanUpAndDelete-Reassign-ToWatch-2-DN", func(t *testing.T) { - var oldNode = UniqueID(121) + oldNode := UniqueID(121) cName := channelNamePrefix + "ToRelease-ReleaseFail-CleanUpAndDelete-Reassign-ToWatch-2-DN" watchkv.RemoveWithPrefix("") @@ -520,7 +520,7 @@ func TestChannelManager(t *testing.T) { t.Run("test Reassign", func(t *testing.T) { defer watchkv.RemoveWithPrefix("") - var collectionID = UniqueID(5) + collectionID := UniqueID(5) tests := []struct { nodeID UniqueID @@ -571,9 +571,7 @@ func TestChannelManager(t *testing.T) { t.Run("test DeleteNode", func(t *testing.T) { defer watchkv.RemoveWithPrefix("") - var ( - collectionID = UniqueID(999) - ) + collectionID := UniqueID(999) chManager, err := NewChannelManager(watchkv, newMockHandler(), withStateChecker()) require.NoError(t, err) chManager.store = &ChannelStore{ @@ -581,7 +579,8 @@ func TestChannelManager(t *testing.T) { channelsInfo: map[int64]*NodeChannelInfo{ 1: {1, []*channel{ {Name: "channel-1", CollectionID: collectionID}, - {Name: "channel-2", CollectionID: collectionID}}}, + {Name: "channel-2", CollectionID: collectionID}, + }}, bufferID: {bufferID, []*channel{}}, }, } @@ -596,7 +595,7 @@ func TestChannelManager(t *testing.T) { t.Run("test CleanupAndReassign", func(t *testing.T) { defer watchkv.RemoveWithPrefix("") - var collectionID = UniqueID(6) + collectionID := UniqueID(6) tests := []struct { nodeID UniqueID @@ -745,7 +744,7 @@ func TestChannelManager(t *testing.T) { ) cName := channelNamePrefix + "TestBgChecker" - //1. set up channel_manager + // 1. set up channel_manager ctx, cancel := context.WithCancel(context.TODO()) defer cancel() chManager, err := NewChannelManager(watchkv, newMockHandler(), withBgChecker()) @@ -753,12 +752,12 @@ func TestChannelManager(t *testing.T) { assert.NotNil(t, chManager.bgChecker) chManager.Startup(ctx, []int64{nodeID}) - //2. test isSilent function running correctly + // 2. test isSilent function running correctly Params.Save(Params.DataCoordCfg.ChannelBalanceSilentDuration.Key, "3") assert.False(t, chManager.isSilent()) assert.False(t, chManager.stateTimer.hasRunningTimers()) - //3. watch one channel + // 3. watch one channel chManager.Watch(&channel{Name: cName, CollectionID: collectionID}) assert.False(t, chManager.isSilent()) assert.True(t, chManager.stateTimer.hasRunningTimers()) @@ -766,7 +765,7 @@ func TestChannelManager(t *testing.T) { waitAndStore(t, watchkv, key, datapb.ChannelWatchState_ToWatch, datapb.ChannelWatchState_WatchSuccess) waitAndCheckState(t, watchkv, datapb.ChannelWatchState_WatchSuccess, nodeID, cName, collectionID) - //4. wait for duration and check silent again + // 4. wait for duration and check silent again time.Sleep(Params.DataCoordCfg.ChannelBalanceSilentDuration.GetAsDuration(time.Second)) chManager.stateTimer.removeTimers([]string{cName}) assert.True(t, chManager.isSilent()) @@ -839,7 +838,8 @@ func TestChannelManager_Reload(t *testing.T) { chManager.store = &ChannelStore{ store: watchkv, channelsInfo: map[int64]*NodeChannelInfo{ - nodeID: {nodeID, []*channel{{Name: channelName, CollectionID: collectionID}}}}, + nodeID: {nodeID, []*channel{{Name: channelName, CollectionID: collectionID}}}, + }, } data, err := proto.Marshal(getWatchInfoWithState(datapb.ChannelWatchState_WatchFailure, collectionID, channelName)) @@ -861,7 +861,8 @@ func TestChannelManager_Reload(t *testing.T) { chManager.store = &ChannelStore{ store: watchkv, channelsInfo: map[int64]*NodeChannelInfo{ - nodeID: {nodeID, []*channel{{Name: channelName, CollectionID: collectionID}}}}, + nodeID: {nodeID, []*channel{{Name: channelName, CollectionID: collectionID}}}, + }, } require.NoError(t, err) @@ -902,7 +903,6 @@ func TestChannelManager_Reload(t *testing.T) { v, err := watchkv.Load(path.Join(prefix, strconv.FormatInt(nodeID, 10), channelName)) assert.Error(t, err) assert.Empty(t, v) - }) }) @@ -958,9 +958,7 @@ func TestChannelManager_BalanceBehaviour(t *testing.T) { t.Run("one node with three channels add a new node", func(t *testing.T) { defer watchkv.RemoveWithPrefix("") - var ( - collectionID = UniqueID(999) - ) + collectionID := UniqueID(999) chManager, err := NewChannelManager(watchkv, newMockHandler(), withStateChecker()) require.NoError(t, err) @@ -976,12 +974,12 @@ func TestChannelManager_BalanceBehaviour(t *testing.T) { 1: {1, []*channel{ {Name: "channel-1", CollectionID: collectionID}, {Name: "channel-2", CollectionID: collectionID}, - {Name: "channel-3", CollectionID: collectionID}}}}, + {Name: "channel-3", CollectionID: collectionID}, + }}, + }, } - var ( - channelBalanced string - ) + var channelBalanced string chManager.AddNode(2) channelBalanced = "channel-1" @@ -1047,7 +1045,6 @@ func TestChannelManager_BalanceBehaviour(t *testing.T) { assert.True(t, chManager.Match(1, "channel-1")) assert.True(t, chManager.Match(1, "channel-4")) }) - } func TestChannelManager_RemoveChannel(t *testing.T) { @@ -1153,6 +1150,5 @@ func TestChannelManager_HelperFunc(t *testing.T) { assert.ElementsMatch(t, test.expectedOut, nodes) }) } - }) } diff --git a/internal/datacoord/cluster.go b/internal/datacoord/cluster.go index 335bb00b93..468e9f53a9 100644 --- a/internal/datacoord/cluster.go +++ b/internal/datacoord/cluster.go @@ -20,13 +20,14 @@ import ( "context" "fmt" + "github.com/samber/lo" + "go.uber.org/zap" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus/internal/proto/datapb" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/commonpbutil" "github.com/milvus-io/milvus/pkg/util/paramtable" - "github.com/samber/lo" - "go.uber.org/zap" ) // Cluster provides interfaces to interact with datanode cluster @@ -77,7 +78,8 @@ func (c *Cluster) Watch(ch string, collectionID UniqueID) error { // Flush sends flush requests to dataNodes specified // which also according to channels where segments are assigned to. func (c *Cluster) Flush(ctx context.Context, nodeID int64, channel string, - segments []*datapb.SegmentInfo) error { + segments []*datapb.SegmentInfo, +) error { if !c.channelManager.Match(nodeID, channel) { log.Warn("node is not matched with channel", zap.String("channel", channel), diff --git a/internal/datacoord/cluster_test.go b/internal/datacoord/cluster_test.go index e9dd11d929..b7b7412b89 100644 --- a/internal/datacoord/cluster_test.go +++ b/internal/datacoord/cluster_test.go @@ -367,7 +367,7 @@ func (suite *ClusterSuite) TestUnregister() { ctx, cancel := context.WithCancel(context.TODO()) defer cancel() - var mockSessionCreator = func(ctx context.Context, addr string, nodeID int64) (types.DataNode, error) { + mockSessionCreator := func(ctx context.Context, addr string, nodeID int64) (types.DataNode, error) { return newMockDataNodeClient(1, nil) } sessionManager := NewSessionManager(withSessionCreator(mockSessionCreator)) @@ -414,7 +414,7 @@ func TestWatchIfNeeded(t *testing.T) { ctx, cancel := context.WithCancel(context.TODO()) defer cancel() - var mockSessionCreator = func(ctx context.Context, addr string, nodeID int64) (types.DataNode, error) { + mockSessionCreator := func(ctx context.Context, addr string, nodeID int64) (types.DataNode, error) { return newMockDataNodeClient(1, nil) } sessionManager := NewSessionManager(withSessionCreator(mockSessionCreator)) @@ -584,7 +584,7 @@ func TestCluster_Flush(t *testing.T) { assert.Error(t, err) }) - //TODO add a method to verify datanode has flush request after client injection is available + // TODO add a method to verify datanode has flush request after client injection is available } func TestCluster_Import(t *testing.T) { @@ -629,7 +629,7 @@ func TestCluster_ReCollectSegmentStats(t *testing.T) { t.Run("recollect succeed", func(t *testing.T) { ctx, cancel := context.WithCancel(context.TODO()) defer cancel() - var mockSessionCreator = func(ctx context.Context, addr string, nodeID int64) (types.DataNode, error) { + mockSessionCreator := func(ctx context.Context, addr string, nodeID int64) (types.DataNode, error) { return newMockDataNodeClient(1, nil) } sessionManager := NewSessionManager(withSessionCreator(mockSessionCreator)) diff --git a/internal/datacoord/compaction.go b/internal/datacoord/compaction.go index 221fa9bfe4..bece81e5bd 100644 --- a/internal/datacoord/compaction.go +++ b/internal/datacoord/compaction.go @@ -23,13 +23,13 @@ import ( "time" "github.com/cockroachdb/errors" - "github.com/milvus-io/milvus/pkg/util/tsoutil" "go.uber.org/zap" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus/internal/proto/datapb" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/metrics" + "github.com/milvus-io/milvus/pkg/util/tsoutil" ) // TODO this num should be determined by resources of datanode, for now, we set to a fixed value for simple @@ -102,12 +102,13 @@ type compactionPlanHandler struct { quit chan struct{} wg sync.WaitGroup flushCh chan UniqueID - //segRefer *SegmentReferenceManager + // segRefer *SegmentReferenceManager parallelCh map[int64]chan struct{} } func newCompactionPlanHandler(sessions *SessionManager, cm *ChannelManager, meta *meta, - allocator allocator, flush chan UniqueID) *compactionPlanHandler { + allocator allocator, flush chan UniqueID, +) *compactionPlanHandler { return &compactionPlanHandler{ plans: make(map[int64]*compactionTask), chManager: cm, @@ -115,7 +116,7 @@ func newCompactionPlanHandler(sessions *SessionManager, cm *ChannelManager, meta sessions: sessions, allocator: allocator, flushCh: flush, - //segRefer: segRefer, + // segRefer: segRefer, parallelCh: make(map[int64]chan struct{}), } } @@ -263,7 +264,7 @@ func (c *compactionPlanHandler) handleMergeCompactionResult(plan *datapb.Compact return err } - var nodeID = c.plans[plan.GetPlanID()].dataNodeID + nodeID := c.plans[plan.GetPlanID()].dataNodeID req := &datapb.SyncSegmentsRequest{ PlanID: plan.PlanID, CompactedTo: newSegment.GetID(), diff --git a/internal/datacoord/compaction_test.go b/internal/datacoord/compaction_test.go index 2ca1a98081..e70c0f2485 100644 --- a/internal/datacoord/compaction_test.go +++ b/internal/datacoord/compaction_test.go @@ -23,22 +23,21 @@ import ( "time" "github.com/cockroachdb/errors" - "github.com/milvus-io/milvus/pkg/util/tsoutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" "go.uber.org/zap" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" + mockkv "github.com/milvus-io/milvus/internal/kv/mocks" "github.com/milvus-io/milvus/internal/metastore/kv/datacoord" "github.com/milvus-io/milvus/internal/mocks" "github.com/milvus-io/milvus/internal/proto/datapb" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/metautil" "github.com/milvus-io/milvus/pkg/util/paramtable" + "github.com/milvus-io/milvus/pkg/util/tsoutil" "github.com/milvus-io/milvus/pkg/util/typeutil" - - mockkv "github.com/milvus-io/milvus/internal/kv/mocks" ) func Test_compactionPlanHandler_execCompactionPlan(t *testing.T) { @@ -182,7 +181,6 @@ func Test_compactionPlanHandler_execCompactionPlan(t *testing.T) { assert.Equal(t, tt.args.signal, task.triggerInfo) assert.Equal(t, 1, c.executingTaskNum) } else { - assert.Eventually(t, func() bool { c.mu.RLock() @@ -198,7 +196,6 @@ func Test_compactionPlanHandler_execCompactionPlan(t *testing.T) { } func Test_compactionPlanHandler_execWithParallels(t *testing.T) { - mockDataNode := &mocks.MockDataNode{} paramtable.Get().Save(Params.DataCoordCfg.CompactionCheckIntervalInSeconds.Key, "1") defer paramtable.Get().Reset(Params.DataCoordCfg.CompactionCheckIntervalInSeconds.Key) @@ -330,7 +327,8 @@ func TestCompactionPlanHandler_handleMergeCompactionResult(t *testing.T) { data map[int64]*Session }{ data: map[int64]*Session{ - dataNodeID: {client: mockDataNode}}, + dataNodeID: {client: mockDataNode}, + }, }, } @@ -485,7 +483,8 @@ func TestCompactionPlanHandler_completeCompaction(t *testing.T) { data map[int64]*Session }{ data: map[int64]*Session{ - dataNodeID: {client: mockDataNode}}, + dataNodeID: {client: mockDataNode}, + }, }, } @@ -577,7 +576,8 @@ func TestCompactionPlanHandler_completeCompaction(t *testing.T) { data map[int64]*Session }{ data: map[int64]*Session{ - dataNodeID: {client: mockDataNode}}, + dataNodeID: {client: mockDataNode}, + }, }, } diff --git a/internal/datacoord/compaction_trigger.go b/internal/datacoord/compaction_trigger.go index 7cdf2d08c9..781cbdba9d 100644 --- a/internal/datacoord/compaction_trigger.go +++ b/internal/datacoord/compaction_trigger.go @@ -23,7 +23,6 @@ import ( "sync" "time" - "github.com/milvus-io/milvus/pkg/util/tsoutil" "github.com/samber/lo" "go.uber.org/zap" @@ -32,6 +31,7 @@ import ( "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/indexparamcheck" "github.com/milvus-io/milvus/pkg/util/logutil" + "github.com/milvus-io/milvus/pkg/util/tsoutil" ) type compactTime struct { @@ -72,8 +72,8 @@ type compactionTrigger struct { forceMu sync.Mutex quit chan struct{} wg sync.WaitGroup - //segRefer *SegmentReferenceManager - //indexCoord types.IndexCoord + // segRefer *SegmentReferenceManager + // indexCoord types.IndexCoord estimateNonDiskSegmentPolicy calUpperLimitPolicy estimateDiskSegmentPolicy calUpperLimitPolicy // A sloopy hack, so we can test with different segment row count without worrying that @@ -85,8 +85,8 @@ func newCompactionTrigger( meta *meta, compactionHandler compactionPlanContext, allocator allocator, - //segRefer *SegmentReferenceManager, - //indexCoord types.IndexCoord, + // segRefer *SegmentReferenceManager, + // indexCoord types.IndexCoord, handler Handler, ) *compactionTrigger { return &compactionTrigger{ @@ -94,8 +94,8 @@ func newCompactionTrigger( allocator: allocator, signals: make(chan *compactionSignal, 100), compactionHandler: compactionHandler, - //segRefer: segRefer, - //indexCoord: indexCoord, + // segRefer: segRefer, + // indexCoord: indexCoord, estimateDiskSegmentPolicy: calBySchemaPolicyWithDiskIndex, estimateNonDiskSegmentPolicy: calBySchemaPolicy, handler: handler, @@ -211,7 +211,6 @@ func (t *compactionTrigger) getCompactTime(ts Timestamp, coll *collectionInfo) ( // triggerCompaction trigger a compaction if any compaction condition satisfy. func (t *compactionTrigger) triggerCompaction() error { - id, err := t.allocSignalID() if err != nil { return err @@ -585,7 +584,7 @@ func (t *compactionTrigger) generatePlans(segments []*SegmentInfo, force bool, i } // greedy pick from large segment to small, the goal is to fill each segment to reach 512M // we must ensure all prioritized candidates is in a plan - //TODO the compaction selection policy should consider if compaction workload is high + // TODO the compaction selection policy should consider if compaction workload is high for len(prioritizedCandidates) > 0 { var bucket []*SegmentInfo // pop out the first element diff --git a/internal/datacoord/compaction_trigger_test.go b/internal/datacoord/compaction_trigger_test.go index cf0b3234fe..c00f460514 100644 --- a/internal/datacoord/compaction_trigger_test.go +++ b/internal/datacoord/compaction_trigger_test.go @@ -22,7 +22,6 @@ import ( "time" "github.com/cockroachdb/errors" - "github.com/milvus-io/milvus/pkg/util/tsoutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/suite" @@ -32,6 +31,7 @@ import ( "github.com/milvus-io/milvus/internal/metastore/model" "github.com/milvus-io/milvus/internal/proto/datapb" "github.com/milvus-io/milvus/pkg/common" + "github.com/milvus-io/milvus/pkg/util/tsoutil" ) type spyCompactionHandler struct { @@ -490,7 +490,7 @@ func Test_compactionTrigger_force(t *testing.T) { }) t.Run(tt.name+" with allocate ts error", func(t *testing.T) { - //indexCood := newMockIndexCoord() + // indexCood := newMockIndexCoord() tr := &compactionTrigger{ meta: tt.fields.meta, handler: newMockHandlerWithMeta(tt.fields.meta), @@ -926,7 +926,6 @@ func Test_compactionTrigger_noplan(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - tr := &compactionTrigger{ meta: tt.fields.meta, handler: newMockHandlerWithMeta(tt.fields.meta), @@ -1659,7 +1658,6 @@ func Test_compactionTrigger_noplan_random_size(t *testing.T) { } for _, plan := range plans { - size := int64(0) for _, log := range plan.SegmentBinlogs { size += log.FieldBinlogs[0].GetBinlogs()[0].LogSize @@ -1708,7 +1706,7 @@ func Test_compactionTrigger_shouldDoSingleCompaction(t *testing.T) { couldDo := trigger.ShouldDoSingleCompaction(info, false, &compactTime{}) assert.True(t, couldDo) - //Test too many stats log + // Test too many stats log info = &SegmentInfo{ SegmentInfo: &datapb.SegmentInfo{ ID: 1, @@ -1736,12 +1734,12 @@ func Test_compactionTrigger_shouldDoSingleCompaction(t *testing.T) { couldDo = trigger.ShouldDoSingleCompaction(info, true, &compactTime{}) assert.False(t, couldDo) - //Test too many stats log but compacted + // Test too many stats log but compacted info.CompactionFrom = []int64{0, 1} couldDo = trigger.ShouldDoSingleCompaction(info, false, &compactTime{}) assert.False(t, couldDo) - //Test expire triggered compaction + // Test expire triggered compaction var binlogs2 []*datapb.FieldBinlog for i := UniqueID(0); i < 100; i++ { binlogs2 = append(binlogs2, &datapb.FieldBinlog{ @@ -1974,40 +1972,41 @@ func (s *CompactionTriggerSuite) SetupTest() { s.indexID = 300 s.vecFieldID = 400 s.channel = "dml_0_100v0" - s.meta = &meta{segments: &SegmentsInfo{ - map[int64]*SegmentInfo{ - 1: { - SegmentInfo: s.genSeg(1, 60), - lastFlushTime: time.Now().Add(-100 * time.Minute), - segmentIndexes: s.genSegIndex(1, indexID, 60), - }, - 2: { - SegmentInfo: s.genSeg(2, 60), - lastFlushTime: time.Now(), - segmentIndexes: s.genSegIndex(2, indexID, 60), - }, - 3: { - SegmentInfo: s.genSeg(3, 60), - lastFlushTime: time.Now(), - segmentIndexes: s.genSegIndex(3, indexID, 60), - }, - 4: { - SegmentInfo: s.genSeg(4, 60), - lastFlushTime: time.Now(), - segmentIndexes: s.genSegIndex(4, indexID, 60), - }, - 5: { - SegmentInfo: s.genSeg(5, 26), - lastFlushTime: time.Now(), - segmentIndexes: s.genSegIndex(5, indexID, 26), - }, - 6: { - SegmentInfo: s.genSeg(6, 26), - lastFlushTime: time.Now(), - segmentIndexes: s.genSegIndex(6, indexID, 26), + s.meta = &meta{ + segments: &SegmentsInfo{ + map[int64]*SegmentInfo{ + 1: { + SegmentInfo: s.genSeg(1, 60), + lastFlushTime: time.Now().Add(-100 * time.Minute), + segmentIndexes: s.genSegIndex(1, indexID, 60), + }, + 2: { + SegmentInfo: s.genSeg(2, 60), + lastFlushTime: time.Now(), + segmentIndexes: s.genSegIndex(2, indexID, 60), + }, + 3: { + SegmentInfo: s.genSeg(3, 60), + lastFlushTime: time.Now(), + segmentIndexes: s.genSegIndex(3, indexID, 60), + }, + 4: { + SegmentInfo: s.genSeg(4, 60), + lastFlushTime: time.Now(), + segmentIndexes: s.genSegIndex(4, indexID, 60), + }, + 5: { + SegmentInfo: s.genSeg(5, 26), + lastFlushTime: time.Now(), + segmentIndexes: s.genSegIndex(5, indexID, 26), + }, + 6: { + SegmentInfo: s.genSeg(6, 26), + lastFlushTime: time.Now(), + segmentIndexes: s.genSegIndex(6, indexID, 26), + }, }, }, - }, collections: map[int64]*collectionInfo{ s.collectionID: { ID: s.collectionID, @@ -2061,7 +2060,7 @@ func (s *CompactionTriggerSuite) TestHandleSignal() { defer s.SetupTest() tr := s.tr s.compactionHandler.EXPECT().isFull().Return(false) - //s.allocator.EXPECT().allocTimestamp(mock.Anything).Return(10000, nil) + // s.allocator.EXPECT().allocTimestamp(mock.Anything).Return(10000, nil) s.handler.EXPECT().GetCollection(mock.Anything, int64(100)).Return(nil, errors.New("mocked")) tr.handleSignal(&compactionSignal{ segmentID: 1, @@ -2078,7 +2077,7 @@ func (s *CompactionTriggerSuite) TestHandleSignal() { defer s.SetupTest() tr := s.tr s.compactionHandler.EXPECT().isFull().Return(false) - //s.allocator.EXPECT().allocTimestamp(mock.Anything).Return(10000, nil) + // s.allocator.EXPECT().allocTimestamp(mock.Anything).Return(10000, nil) s.handler.EXPECT().GetCollection(mock.Anything, int64(100)).Return(&collectionInfo{ Properties: map[string]string{ common.CollectionAutoCompactionKey: "bad_value", diff --git a/internal/datacoord/coordinator_broker.go b/internal/datacoord/coordinator_broker.go index 91252d2ec4..a4f1bf7a11 100644 --- a/internal/datacoord/coordinator_broker.go +++ b/internal/datacoord/coordinator_broker.go @@ -20,6 +20,8 @@ import ( "time" "github.com/cockroachdb/errors" + "go.uber.org/zap" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus/internal/types" @@ -27,7 +29,6 @@ import ( "github.com/milvus-io/milvus/pkg/util/commonpbutil" "github.com/milvus-io/milvus/pkg/util/merr" "github.com/milvus-io/milvus/pkg/util/paramtable" - "go.uber.org/zap" ) const ( diff --git a/internal/datacoord/errors.go b/internal/datacoord/errors.go index 05497cb514..e74b672646 100644 --- a/internal/datacoord/errors.go +++ b/internal/datacoord/errors.go @@ -29,9 +29,11 @@ var errNilKvClient = errors.New("kv client not initialized") const serverNotServingErrMsg = "DataCoord is not serving" // errors for VerifyResponse -var errNilResponse = errors.New("response is nil") -var errNilStatusResponse = errors.New("response has nil status") -var errUnknownResponseType = errors.New("unknown response type") +var ( + errNilResponse = errors.New("response is nil") + errNilStatusResponse = errors.New("response has nil status") + errUnknownResponseType = errors.New("unknown response type") +) func msgDataCoordIsUnhealthy(coordID UniqueID) string { return fmt.Sprintf("DataCoord %d is not ready", coordID) diff --git a/internal/datacoord/errors_test.go b/internal/datacoord/errors_test.go index 581df1f412..5e1d722b44 100644 --- a/internal/datacoord/errors_test.go +++ b/internal/datacoord/errors_test.go @@ -19,9 +19,10 @@ package datacoord import ( "testing" + "go.uber.org/zap" + "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/typeutil" - "go.uber.org/zap" ) func TestMsgDataCoordIsUnhealthy(t *testing.T) { diff --git a/internal/datacoord/garbage_collector.go b/internal/datacoord/garbage_collector.go index b8a0220931..a5e38dd03e 100644 --- a/internal/datacoord/garbage_collector.go +++ b/internal/datacoord/garbage_collector.go @@ -201,7 +201,8 @@ func (gc *garbageCollector) scan() { func (gc *garbageCollector) checkDroppedSegmentGC(segment *SegmentInfo, childSegment *SegmentInfo, indexSet typeutil.UniqueSet, - cpTimestamp Timestamp) bool { + cpTimestamp Timestamp, +) bool { log := log.With(zap.Int64("segmentID", segment.ID)) isCompacted := childSegment != nil || segment.GetCompacted() @@ -246,7 +247,7 @@ func (gc *garbageCollector) clearEtcd() { if segment.GetState() == commonpb.SegmentState_Dropped { drops[segment.GetID()] = segment channels.Insert(segment.GetInsertChannel()) - //continue + // continue // A(indexed), B(indexed) -> C(no indexed), D(no indexed) -> E(no indexed), A, B can not be GC } for _, from := range segment.GetCompactionFrom() { diff --git a/internal/datacoord/garbage_collector_test.go b/internal/datacoord/garbage_collector_test.go index a8aac6fd3a..86367f2d13 100644 --- a/internal/datacoord/garbage_collector_test.go +++ b/internal/datacoord/garbage_collector_test.go @@ -26,8 +26,6 @@ import ( "testing" "time" - "github.com/milvus-io/milvus/pkg/common" - "github.com/cockroachdb/errors" minio "github.com/minio/minio-go/v7" "github.com/minio/minio-go/v7/pkg/credentials" @@ -47,6 +45,7 @@ import ( "github.com/milvus-io/milvus/internal/proto/datapb" "github.com/milvus-io/milvus/internal/proto/indexpb" "github.com/milvus-io/milvus/internal/storage" + "github.com/milvus-io/milvus/pkg/common" "github.com/milvus-io/milvus/pkg/util/funcutil" "github.com/milvus-io/milvus/pkg/util/paramtable" ) @@ -54,7 +53,7 @@ import ( func Test_garbageCollector_basic(t *testing.T) { bucketName := `datacoord-ut` + strings.ToLower(funcutil.RandomString(8)) rootPath := `gc` + funcutil.RandomString(8) - //TODO change to Params + // TODO change to Params cli, _, _, _, _, err := initUtOSSEnv(bucketName, rootPath, 0) require.NoError(t, err) @@ -93,7 +92,6 @@ func Test_garbageCollector_basic(t *testing.T) { gc.close() }) }) - } func validateMinioPrefixElements(t *testing.T, cli *minio.Client, bucketName string, prefix string, elements []string) { @@ -107,7 +105,7 @@ func validateMinioPrefixElements(t *testing.T, cli *minio.Client, bucketName str func Test_garbageCollector_scan(t *testing.T) { bucketName := `datacoord-ut` + strings.ToLower(funcutil.RandomString(8)) rootPath := `gc` + funcutil.RandomString(8) - //TODO change to Params + // TODO change to Params cli, inserts, stats, delta, others, err := initUtOSSEnv(bucketName, rootPath, 4) require.NoError(t, err) @@ -334,7 +332,7 @@ func createMetaForRecycleUnusedIndexes(catalog metastore.DataCoordCatalog) *meta var ( ctx = context.Background() collID = UniqueID(100) - //partID = UniqueID(200) + // partID = UniqueID(200) fieldID = UniqueID(300) indexID = UniqueID(400) ) @@ -428,7 +426,7 @@ func createMetaForRecycleUnusedSegIndexes(catalog metastore.DataCoordCatalog) *m ctx = context.Background() collID = UniqueID(100) partID = UniqueID(200) - //fieldID = UniqueID(300) + // fieldID = UniqueID(300) indexID = UniqueID(400) segID = UniqueID(500) ) @@ -571,7 +569,7 @@ func createMetaTableForRecycleUnusedIndexFiles(catalog *datacoord.Catalog) *meta ctx = context.Background() collID = UniqueID(100) partID = UniqueID(200) - //fieldID = UniqueID(300) + // fieldID = UniqueID(300) indexID = UniqueID(400) segID = UniqueID(500) buildID = UniqueID(600) diff --git a/internal/datacoord/handler.go b/internal/datacoord/handler.go index a344e36944..4db996cda0 100644 --- a/internal/datacoord/handler.go +++ b/internal/datacoord/handler.go @@ -20,7 +20,6 @@ import ( "context" "time" - "github.com/milvus-io/milvus/pkg/util/tsoutil" "github.com/samber/lo" "go.uber.org/zap" @@ -30,6 +29,7 @@ import ( "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/funcutil" "github.com/milvus-io/milvus/pkg/util/retry" + "github.com/milvus-io/milvus/pkg/util/tsoutil" "github.com/milvus-io/milvus/pkg/util/typeutil" ) diff --git a/internal/datacoord/index_builder.go b/internal/datacoord/index_builder.go index 825a3d6cd0..2bdc618a87 100644 --- a/internal/datacoord/index_builder.go +++ b/internal/datacoord/index_builder.go @@ -23,7 +23,6 @@ import ( "time" "github.com/cockroachdb/errors" - "go.uber.org/zap" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" diff --git a/internal/datacoord/index_builder_test.go b/internal/datacoord/index_builder_test.go index d1719c4286..6d28d1a550 100644 --- a/internal/datacoord/index_builder_test.go +++ b/internal/datacoord/index_builder_test.go @@ -22,7 +22,6 @@ import ( "time" "github.com/cockroachdb/errors" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" diff --git a/internal/datacoord/index_meta.go b/internal/datacoord/index_meta.go index 54018be7c7..b811a59be9 100644 --- a/internal/datacoord/index_meta.go +++ b/internal/datacoord/index_meta.go @@ -22,6 +22,7 @@ import ( "strconv" "github.com/golang/protobuf/proto" + "github.com/prometheus/client_golang/prometheus" "go.uber.org/zap" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" @@ -30,7 +31,6 @@ import ( "github.com/milvus-io/milvus/pkg/common" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/metrics" - "github.com/prometheus/client_golang/prometheus" ) func (m *meta) updateCollectionIndex(index *model.Index) { diff --git a/internal/datacoord/index_meta_test.go b/internal/datacoord/index_meta_test.go index bdd75e2966..65f5a973e4 100644 --- a/internal/datacoord/index_meta_test.go +++ b/internal/datacoord/index_meta_test.go @@ -41,7 +41,7 @@ import ( func TestMeta_CanCreateIndex(t *testing.T) { var ( collID = UniqueID(1) - //partID = UniqueID(2) + // partID = UniqueID(2) indexID = UniqueID(10) fieldID = UniqueID(100) indexName = "_default_idx" @@ -162,7 +162,7 @@ func TestMeta_CanCreateIndex(t *testing.T) { func TestMeta_HasSameReq(t *testing.T) { var ( collID = UniqueID(1) - //partID = UniqueID(2) + // partID = UniqueID(2) indexID = UniqueID(10) fieldID = UniqueID(100) indexName = "_default_idx" @@ -371,7 +371,7 @@ func TestMeta_AddSegmentIndex(t *testing.T) { func TestMeta_GetIndexIDByName(t *testing.T) { var ( collID = UniqueID(1) - //partID = UniqueID(2) + // partID = UniqueID(2) indexID = UniqueID(10) fieldID = UniqueID(100) indexName = "_default_idx" @@ -425,7 +425,6 @@ func TestMeta_GetIndexIDByName(t *testing.T) { indexID2CreateTS := m.GetIndexIDByName(collID, indexName) assert.Contains(t, indexID2CreateTS, indexID) }) - } func TestMeta_GetSegmentIndexState(t *testing.T) { diff --git a/internal/datacoord/index_service_test.go b/internal/datacoord/index_service_test.go index 5dfc182996..68b84792c4 100644 --- a/internal/datacoord/index_service_test.go +++ b/internal/datacoord/index_service_test.go @@ -48,7 +48,7 @@ func TestServer_CreateIndex(t *testing.T) { var ( collID = UniqueID(1) fieldID = UniqueID(10) - //indexID = UniqueID(100) + // indexID = UniqueID(100) indexName = "default_idx" typeParams = []*commonpb.KeyValuePair{ { @@ -706,7 +706,7 @@ func TestServer_DescribeIndex(t *testing.T) { catalog: catalog, indexes: map[UniqueID]map[UniqueID]*model.Index{ collID: { - //finished + // finished indexID: { TenantID: "", CollectionID: collID, @@ -1067,7 +1067,7 @@ func TestServer_GetIndexStatistics(t *testing.T) { catalog: catalog, indexes: map[UniqueID]map[UniqueID]*model.Index{ collID: { - //finished + // finished indexID: { TenantID: "", CollectionID: collID, @@ -1347,7 +1347,7 @@ func TestServer_DropIndex(t *testing.T) { catalog: catalog, indexes: map[UniqueID]map[UniqueID]*model.Index{ collID: { - //finished + // finished indexID: { TenantID: "", CollectionID: collID, @@ -1542,7 +1542,7 @@ func TestServer_GetIndexInfos(t *testing.T) { catalog: &datacoord.Catalog{MetaKv: mocks.NewMetaKv(t)}, indexes: map[UniqueID]map[UniqueID]*model.Index{ collID: { - //finished + // finished indexID: { TenantID: "", CollectionID: collID, diff --git a/internal/datacoord/indexnode_manager_test.go b/internal/datacoord/indexnode_manager_test.go index 698129a797..43a25bb3a6 100644 --- a/internal/datacoord/indexnode_manager_test.go +++ b/internal/datacoord/indexnode_manager_test.go @@ -22,6 +22,7 @@ import ( "testing" "github.com/cockroachdb/errors" + "github.com/stretchr/testify/assert" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus/internal/indexnode" @@ -29,7 +30,6 @@ import ( "github.com/milvus-io/milvus/internal/proto/indexpb" "github.com/milvus-io/milvus/internal/types" "github.com/milvus-io/milvus/pkg/util/merr" - "github.com/stretchr/testify/assert" ) func TestIndexNodeManager_AddNode(t *testing.T) { diff --git a/internal/datacoord/meta.go b/internal/datacoord/meta.go index 7e5d6ff120..324bda0f6c 100644 --- a/internal/datacoord/meta.go +++ b/internal/datacoord/meta.go @@ -26,7 +26,6 @@ import ( "github.com/cockroachdb/errors" "github.com/golang/protobuf/proto" - "github.com/milvus-io/milvus/pkg/util/tsoutil" "github.com/samber/lo" "go.uber.org/zap" "golang.org/x/exp/maps" @@ -44,6 +43,7 @@ import ( "github.com/milvus-io/milvus/pkg/metrics" "github.com/milvus-io/milvus/pkg/util/metautil" "github.com/milvus-io/milvus/pkg/util/timerecord" + "github.com/milvus-io/milvus/pkg/util/tsoutil" "github.com/milvus-io/milvus/pkg/util/typeutil" ) @@ -491,7 +491,7 @@ func (m *meta) UpdateFlushSegmentsInfo( } // TODO add diff encoding and compression currBinlogs := clonedSegment.GetBinlogs() - var getFieldBinlogs = func(id UniqueID, binlogs []*datapb.FieldBinlog) *datapb.FieldBinlog { + getFieldBinlogs := func(id UniqueID, binlogs []*datapb.FieldBinlog) *datapb.FieldBinlog { for _, binlog := range binlogs { if id == binlog.GetFieldID() { return binlog @@ -532,7 +532,7 @@ func (m *meta) UpdateFlushSegmentsInfo( } clonedSegment.Deltalogs = currDeltaLogs modSegments[segmentID] = clonedSegment - var getClonedSegment = func(segmentID UniqueID) *SegmentInfo { + getClonedSegment := func(segmentID UniqueID) *SegmentInfo { if s, ok := modSegments[segmentID]; ok { return s } @@ -686,7 +686,7 @@ func (m *meta) mergeDropSegment(seg2Drop *SegmentInfo) (*SegmentInfo, *segMetric currBinlogs := clonedSegment.GetBinlogs() - var getFieldBinlogs = func(id UniqueID, binlogs []*datapb.FieldBinlog) *datapb.FieldBinlog { + getFieldBinlogs := func(id UniqueID, binlogs []*datapb.FieldBinlog) *datapb.FieldBinlog { for _, binlog := range binlogs { if id == binlog.GetFieldID() { return binlog @@ -983,7 +983,8 @@ func (m *meta) SetSegmentCompacting(segmentID UniqueID, compacting bool) { // - the segment info of compactedTo segment after compaction to add // The compactedTo segment could contain 0 numRows func (m *meta) PrepareCompleteCompactionMutation(plan *datapb.CompactionPlan, - result *datapb.CompactionResult) ([]*SegmentInfo, []*SegmentInfo, *SegmentInfo, *segMetricMutation, error) { + result *datapb.CompactionResult, +) ([]*SegmentInfo, []*SegmentInfo, *SegmentInfo, *segMetricMutation, error) { log.Info("meta update: prepare for complete compaction mutation") compactionLogs := plan.GetSegmentBinlogs() m.Lock() diff --git a/internal/datacoord/meta_test.go b/internal/datacoord/meta_test.go index ef9a8962df..87b122c3df 100644 --- a/internal/datacoord/meta_test.go +++ b/internal/datacoord/meta_test.go @@ -31,6 +31,7 @@ import ( "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/msgpb" "github.com/milvus-io/milvus/internal/kv" + mockkv "github.com/milvus-io/milvus/internal/kv/mocks" "github.com/milvus-io/milvus/internal/metastore/kv/datacoord" "github.com/milvus-io/milvus/internal/metastore/model" "github.com/milvus-io/milvus/internal/mocks" @@ -39,8 +40,6 @@ import ( "github.com/milvus-io/milvus/pkg/metrics" "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/milvus-io/milvus/pkg/util/testutils" - - mockkv "github.com/milvus-io/milvus/internal/kv/mocks" ) // MetaReloadSuite tests meta reload & meta creation related logic @@ -311,7 +310,6 @@ func TestMeta_Basic(t *testing.T) { info1_1 = meta.GetHealthySegment(segID1_1) assert.NotNil(t, info1_1) assert.Equal(t, false, info1_1.GetIsImporting()) - }) t.Run("Test segment with kv fails", func(t *testing.T) { @@ -495,8 +493,10 @@ func TestUpdateFlushSegmentsInfo(t *testing.T) { meta, err := newMemoryMeta() assert.NoError(t, err) - segment1 := &SegmentInfo{SegmentInfo: &datapb.SegmentInfo{ID: 1, State: commonpb.SegmentState_Growing, Binlogs: []*datapb.FieldBinlog{getFieldBinlogPaths(1, getInsertLogPath("binlog0", 1))}, - Statslogs: []*datapb.FieldBinlog{getFieldBinlogPaths(1, getStatsLogPath("statslog0", 1))}}} + segment1 := &SegmentInfo{SegmentInfo: &datapb.SegmentInfo{ + ID: 1, State: commonpb.SegmentState_Growing, Binlogs: []*datapb.FieldBinlog{getFieldBinlogPaths(1, getInsertLogPath("binlog0", 1))}, + Statslogs: []*datapb.FieldBinlog{getFieldBinlogPaths(1, getStatsLogPath("statslog0", 1))}, + }} err = meta.AddSegment(context.TODO(), segment1) assert.NoError(t, err) @@ -524,7 +524,6 @@ func TestUpdateFlushSegmentsInfo(t *testing.T) { assert.Equal(t, updated.State, expected.State) assert.Equal(t, updated.size.Load(), expected.size.Load()) assert.Equal(t, updated.NumOfRows, expected.NumOfRows) - }) t.Run("update non-existed segment", func(t *testing.T) { diff --git a/internal/datacoord/metrics_info.go b/internal/datacoord/metrics_info.go index c889df2c5a..e8c869e058 100644 --- a/internal/datacoord/metrics_info.go +++ b/internal/datacoord/metrics_info.go @@ -20,13 +20,11 @@ import ( "context" "github.com/cockroachdb/errors" - - "github.com/milvus-io/milvus/internal/types" - "go.uber.org/zap" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" + "github.com/milvus-io/milvus/internal/types" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/hardware" "github.com/milvus-io/milvus/pkg/util/metricsinfo" diff --git a/internal/datacoord/metrics_info_test.go b/internal/datacoord/metrics_info_test.go index b8f5f69470..376f6e8a7c 100644 --- a/internal/datacoord/metrics_info_test.go +++ b/internal/datacoord/metrics_info_test.go @@ -21,6 +21,7 @@ import ( "testing" "github.com/cockroachdb/errors" + "github.com/stretchr/testify/assert" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" @@ -28,7 +29,6 @@ import ( "github.com/milvus-io/milvus/pkg/util/merr" "github.com/milvus-io/milvus/pkg/util/metricsinfo" "github.com/milvus-io/milvus/pkg/util/typeutil" - "github.com/stretchr/testify/assert" ) type mockMetricDataNodeClient struct { @@ -122,7 +122,6 @@ func TestGetDataNodeMetrics(t *testing.T) { info, err = svr.getDataNodeMetrics(ctx, req, NewSession(&NodeInfo{}, mockFailClientCreator)) assert.NoError(t, err) assert.True(t, info.HasError) - } func TestGetIndexNodeMetrics(t *testing.T) { diff --git a/internal/datacoord/mock_test.go b/internal/datacoord/mock_test.go index 82f87d9506..930d693196 100644 --- a/internal/datacoord/mock_test.go +++ b/internal/datacoord/mock_test.go @@ -22,8 +22,6 @@ import ( "time" "github.com/cockroachdb/errors" - "github.com/milvus-io/milvus/pkg/util/merr" - "github.com/milvus-io/milvus/pkg/util/tsoutil" clientv3 "go.etcd.io/etcd/client/v3" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" @@ -36,7 +34,9 @@ import ( "github.com/milvus-io/milvus/internal/proto/proxypb" "github.com/milvus-io/milvus/internal/proto/rootcoordpb" "github.com/milvus-io/milvus/pkg/common" + "github.com/milvus-io/milvus/pkg/util/merr" "github.com/milvus-io/milvus/pkg/util/metricsinfo" + "github.com/milvus-io/milvus/pkg/util/tsoutil" "github.com/milvus-io/milvus/pkg/util/typeutil" ) @@ -103,8 +103,7 @@ func (m *MockAllocator) allocID(ctx context.Context) (UniqueID, error) { return val, nil } -type MockAllocator0 struct { -} +type MockAllocator0 struct{} func (m *MockAllocator0) allocTimestamp(ctx context.Context) (Timestamp, error) { return Timestamp(0), nil @@ -307,7 +306,7 @@ type mockRootCoordService struct { } func (m *mockRootCoordService) RenameCollection(ctx context.Context, req *milvuspb.RenameCollectionRequest) (*commonpb.Status, error) { - //TODO implement me + // TODO implement me panic("implement me") } diff --git a/internal/datacoord/policy.go b/internal/datacoord/policy.go index ca58340b77..e675821c14 100644 --- a/internal/datacoord/policy.go +++ b/internal/datacoord/policy.go @@ -23,10 +23,11 @@ import ( "strconv" "time" - "github.com/milvus-io/milvus/pkg/log" "go.uber.org/zap" "go.uber.org/zap/zapcore" "stathat.com/c/consistent" + + "github.com/milvus-io/milvus/pkg/log" ) // RegisterPolicy decides the channels mapping after registering the nodeID @@ -443,7 +444,6 @@ func RoundRobinReassignPolicy(store ROChannelStore, reassigns []*NodeChannelInfo } else { addUpdates[targetID].Channels = append(addUpdates[targetID].Channels, ch) } - } } for _, update := range addUpdates { diff --git a/internal/datacoord/policy_test.go b/internal/datacoord/policy_test.go index 343b4ab0f0..17db93a16d 100644 --- a/internal/datacoord/policy_test.go +++ b/internal/datacoord/policy_test.go @@ -387,7 +387,7 @@ func TestBgCheckForChannelBalance(t *testing.T) { }, time.Now(), }, - //there should be no reallocate + // there should be no reallocate []*NodeChannelInfo{}, nil, }, @@ -409,8 +409,11 @@ func TestBgCheckForChannelBalance(t *testing.T) { "test uneven with zero", args{ []*NodeChannelInfo{ - {1, []*channel{{Name: "chan1", CollectionID: 1}, {Name: "chan2", CollectionID: 1}, - {Name: "chan3", CollectionID: 1}}}, + {1, []*channel{ + {Name: "chan1", CollectionID: 1}, + {Name: "chan2", CollectionID: 1}, + {Name: "chan3", CollectionID: 1}, + }}, {2, []*channel{}}, }, time.Now(), @@ -450,7 +453,7 @@ func TestAvgReassignPolicy(t *testing.T) { }, []*NodeChannelInfo{{1, []*channel{{Name: "chan1", CollectionID: 1}}}}, }, - //as there's no available nodes except the input node, there's no reassign plan generated + // as there's no available nodes except the input node, there's no reassign plan generated []*ChannelOp{}, }, { @@ -468,10 +471,11 @@ func TestAvgReassignPolicy(t *testing.T) { []*NodeChannelInfo{{1, []*channel{{Name: "chan1", CollectionID: 1}}}}, }, []*ChannelOp{ - //as we use ceil to calculate the wanted average number, there should be one reassign - //though the average num less than 1 + // as we use ceil to calculate the wanted average number, there should be one reassign + // though the average num less than 1 {Delete, 1, []*channel{{Name: "chan1", CollectionID: 1}}, nil}, - {Add, 2, []*channel{{Name: "chan1", CollectionID: 1}}, nil}}, + {Add, 2, []*channel{{Name: "chan1", CollectionID: 1}}, nil}, + }, }, { "test_normal_reassigning_for_one_available_nodes", @@ -487,7 +491,8 @@ func TestAvgReassignPolicy(t *testing.T) { }, []*ChannelOp{ {Delete, 1, []*channel{{Name: "chan1", CollectionID: 1}, {Name: "chan2", CollectionID: 1}}, nil}, - {Add, 2, []*channel{{Name: "chan1", CollectionID: 1}, {Name: "chan2", CollectionID: 1}}, nil}}, + {Add, 2, []*channel{{Name: "chan1", CollectionID: 1}, {Name: "chan2", CollectionID: 1}}, nil}, + }, }, { "test_normal_reassigning_for_multiple_available_nodes", @@ -499,7 +504,8 @@ func TestAvgReassignPolicy(t *testing.T) { {Name: "chan1", CollectionID: 1}, {Name: "chan2", CollectionID: 1}, {Name: "chan3", CollectionID: 1}, - {Name: "chan4", CollectionID: 1}}}, + {Name: "chan4", CollectionID: 1}, + }}, 2: {2, []*channel{}}, 3: {3, []*channel{}}, 4: {4, []*channel{}}, @@ -512,11 +518,15 @@ func TestAvgReassignPolicy(t *testing.T) { }}}, }, []*ChannelOp{ - {Delete, 1, []*channel{ - {Name: "chan1", CollectionID: 1}, - {Name: "chan2", CollectionID: 1}, - {Name: "chan3", CollectionID: 1}}, - nil}, + { + Delete, 1, + []*channel{ + {Name: "chan1", CollectionID: 1}, + {Name: "chan2", CollectionID: 1}, + {Name: "chan3", CollectionID: 1}, + }, + nil, + }, {Add, 2, []*channel{{Name: "chan1", CollectionID: 1}}, nil}, {Add, 3, []*channel{{Name: "chan2", CollectionID: 1}}, nil}, {Add, 4, []*channel{{Name: "chan3", CollectionID: 1}}, nil}, @@ -529,12 +539,18 @@ func TestAvgReassignPolicy(t *testing.T) { memkv.NewMemoryKV(), map[int64]*NodeChannelInfo{ 1: {1, []*channel{ - {Name: "chan1", CollectionID: 1}, {Name: "chan2", CollectionID: 1}, - {Name: "chan3", CollectionID: 1}, {Name: "chan4", CollectionID: 1}, - {Name: "chan5", CollectionID: 1}, {Name: "chan6", CollectionID: 1}, - {Name: "chan7", CollectionID: 1}, {Name: "chan8", CollectionID: 1}, - {Name: "chan9", CollectionID: 1}, {Name: "chan10", CollectionID: 1}, - {Name: "chan11", CollectionID: 1}, {Name: "chan12", CollectionID: 1}, + {Name: "chan1", CollectionID: 1}, + {Name: "chan2", CollectionID: 1}, + {Name: "chan3", CollectionID: 1}, + {Name: "chan4", CollectionID: 1}, + {Name: "chan5", CollectionID: 1}, + {Name: "chan6", CollectionID: 1}, + {Name: "chan7", CollectionID: 1}, + {Name: "chan8", CollectionID: 1}, + {Name: "chan9", CollectionID: 1}, + {Name: "chan10", CollectionID: 1}, + {Name: "chan11", CollectionID: 1}, + {Name: "chan12", CollectionID: 1}, }}, 2: {2, []*channel{ {Name: "chan13", CollectionID: 1}, {Name: "chan14", CollectionID: 1}, @@ -544,33 +560,51 @@ func TestAvgReassignPolicy(t *testing.T) { }, }, []*NodeChannelInfo{{1, []*channel{ - {Name: "chan1", CollectionID: 1}, {Name: "chan2", CollectionID: 1}, - {Name: "chan3", CollectionID: 1}, {Name: "chan4", CollectionID: 1}, - {Name: "chan5", CollectionID: 1}, {Name: "chan6", CollectionID: 1}, - {Name: "chan7", CollectionID: 1}, {Name: "chan8", CollectionID: 1}, - {Name: "chan9", CollectionID: 1}, {Name: "chan10", CollectionID: 1}, - {Name: "chan11", CollectionID: 1}, {Name: "chan12", CollectionID: 1}, + {Name: "chan1", CollectionID: 1}, + {Name: "chan2", CollectionID: 1}, + {Name: "chan3", CollectionID: 1}, + {Name: "chan4", CollectionID: 1}, + {Name: "chan5", CollectionID: 1}, + {Name: "chan6", CollectionID: 1}, + {Name: "chan7", CollectionID: 1}, + {Name: "chan8", CollectionID: 1}, + {Name: "chan9", CollectionID: 1}, + {Name: "chan10", CollectionID: 1}, + {Name: "chan11", CollectionID: 1}, + {Name: "chan12", CollectionID: 1}, }}}, }, []*ChannelOp{ {Delete, 1, []*channel{ - {Name: "chan1", CollectionID: 1}, {Name: "chan2", CollectionID: 1}, - {Name: "chan3", CollectionID: 1}, {Name: "chan4", CollectionID: 1}, - {Name: "chan5", CollectionID: 1}, {Name: "chan6", CollectionID: 1}, - {Name: "chan7", CollectionID: 1}, {Name: "chan8", CollectionID: 1}, - {Name: "chan9", CollectionID: 1}, {Name: "chan10", CollectionID: 1}, - {Name: "chan11", CollectionID: 1}, {Name: "chan12", CollectionID: 1}, + {Name: "chan1", CollectionID: 1}, + {Name: "chan2", CollectionID: 1}, + {Name: "chan3", CollectionID: 1}, + {Name: "chan4", CollectionID: 1}, + {Name: "chan5", CollectionID: 1}, + {Name: "chan6", CollectionID: 1}, + {Name: "chan7", CollectionID: 1}, + {Name: "chan8", CollectionID: 1}, + {Name: "chan9", CollectionID: 1}, + {Name: "chan10", CollectionID: 1}, + {Name: "chan11", CollectionID: 1}, + {Name: "chan12", CollectionID: 1}, }, nil}, {Add, 4, []*channel{ - {Name: "chan1", CollectionID: 1}, {Name: "chan2", CollectionID: 1}, - {Name: "chan3", CollectionID: 1}, {Name: "chan4", CollectionID: 1}, - {Name: "chan5", CollectionID: 1}}, nil}, + {Name: "chan1", CollectionID: 1}, + {Name: "chan2", CollectionID: 1}, + {Name: "chan3", CollectionID: 1}, + {Name: "chan4", CollectionID: 1}, + {Name: "chan5", CollectionID: 1}, + }, nil}, {Add, 3, []*channel{ - {Name: "chan6", CollectionID: 1}, {Name: "chan7", CollectionID: 1}, - {Name: "chan8", CollectionID: 1}, {Name: "chan9", CollectionID: 1}, + {Name: "chan6", CollectionID: 1}, + {Name: "chan7", CollectionID: 1}, + {Name: "chan8", CollectionID: 1}, + {Name: "chan9", CollectionID: 1}, }, nil}, {Add, 2, []*channel{ - {Name: "chan10", CollectionID: 1}, {Name: "chan11", CollectionID: 1}, + {Name: "chan10", CollectionID: 1}, + {Name: "chan11", CollectionID: 1}, {Name: "chan12", CollectionID: 1}, }, nil}, }, diff --git a/internal/datacoord/segment_allocation_policy.go b/internal/datacoord/segment_allocation_policy.go index 1071691273..0a0271c2db 100644 --- a/internal/datacoord/segment_allocation_policy.go +++ b/internal/datacoord/segment_allocation_policy.go @@ -21,10 +21,10 @@ import ( "time" "github.com/cockroachdb/errors" - "github.com/milvus-io/milvus/pkg/util/tsoutil" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" + "github.com/milvus-io/milvus/pkg/util/tsoutil" "github.com/milvus-io/milvus/pkg/util/typeutil" ) @@ -68,7 +68,8 @@ type AllocatePolicy func(segments []*SegmentInfo, count int64, // AllocatePolicyV1 v1 policy simple allocation policy using Greedy Algorithm func AllocatePolicyV1(segments []*SegmentInfo, count int64, - maxCountPerSegment int64) ([]*Allocation, []*Allocation) { + maxCountPerSegment int64, +) ([]*Allocation, []*Allocation) { newSegmentAllocations := make([]*Allocation, 0) existedSegmentAllocations := make([]*Allocation, 0) // create new segment if count >= max num diff --git a/internal/datacoord/segment_allocation_policy_test.go b/internal/datacoord/segment_allocation_policy_test.go index ad45216bf3..250d4b55b3 100644 --- a/internal/datacoord/segment_allocation_policy_test.go +++ b/internal/datacoord/segment_allocation_policy_test.go @@ -21,13 +21,13 @@ import ( "testing" "time" - "github.com/milvus-io/milvus/pkg/common" - "github.com/milvus-io/milvus/pkg/util/tsoutil" "github.com/stretchr/testify/assert" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/internal/proto/datapb" + "github.com/milvus-io/milvus/pkg/common" + "github.com/milvus-io/milvus/pkg/util/tsoutil" ) func TestUpperLimitCalBySchema(t *testing.T) { diff --git a/internal/datacoord/segment_info.go b/internal/datacoord/segment_info.go index 95f3e86f74..6d630e5127 100644 --- a/internal/datacoord/segment_info.go +++ b/internal/datacoord/segment_info.go @@ -238,7 +238,7 @@ func (s *SegmentInfo) Clone(opts ...SegmentInfoOption) *SegmentInfo { allocations: s.allocations, lastFlushTime: s.lastFlushTime, isCompacting: s.isCompacting, - //cannot copy size, since binlog may be changed + // cannot copy size, since binlog may be changed lastWrittenTime: s.lastWrittenTime, } for _, opt := range opts { diff --git a/internal/datacoord/segment_manager.go b/internal/datacoord/segment_manager.go index 90aeb21cdb..08b2206755 100644 --- a/internal/datacoord/segment_manager.go +++ b/internal/datacoord/segment_manager.go @@ -23,7 +23,6 @@ import ( "time" "github.com/cockroachdb/errors" - "github.com/milvus-io/milvus/pkg/util/tsoutil" "go.opentelemetry.io/otel" "go.uber.org/zap" @@ -31,17 +30,16 @@ import ( "github.com/milvus-io/milvus/internal/proto/datapb" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/retry" + "github.com/milvus-io/milvus/pkg/util/tsoutil" "github.com/milvus-io/milvus/pkg/util/typeutil" ) -var ( - // allocPool pool of Allocation, to reduce allocation of Allocation - allocPool = sync.Pool{ - New: func() interface{} { - return &Allocation{} - }, - } -) +// allocPool pool of Allocation, to reduce allocation of Allocation +var allocPool = sync.Pool{ + New: func() interface{} { + return &Allocation{} + }, +} // getAllocation unifies way to retrieve allocation struct func getAllocation(numOfRows int64) *Allocation { @@ -230,7 +228,7 @@ func (s *SegmentManager) loadSegmentsFromMeta() { } func (s *SegmentManager) maybeResetLastExpireForSegments() error { - //for all sealed and growing segments, need to reset last expire + // for all sealed and growing segments, need to reset last expire if len(s.segments) > 0 { var latestTs uint64 allocateErr := retry.Do(context.Background(), func() error { @@ -257,7 +255,8 @@ func (s *SegmentManager) maybeResetLastExpireForSegments() error { // AllocSegment allocate segment per request collcation, partication, channel and rows func (s *SegmentManager) AllocSegment(ctx context.Context, collectionID UniqueID, - partitionID UniqueID, channelName string, requestRows int64) ([]*Allocation, error) { + partitionID UniqueID, channelName string, requestRows int64, +) ([]*Allocation, error) { log := log.Ctx(ctx). With(zap.Int64("collectionID", collectionID)). With(zap.Int64("partitionID", partitionID)). @@ -322,7 +321,8 @@ func (s *SegmentManager) AllocSegment(ctx context.Context, collectionID UniqueID // allocSegmentForImport allocates one segment allocation for bulk insert. func (s *SegmentManager) allocSegmentForImport(ctx context.Context, collectionID UniqueID, - partitionID UniqueID, channelName string, requestRows int64, importTaskID int64) (*Allocation, error) { + partitionID UniqueID, channelName string, requestRows int64, importTaskID int64, +) (*Allocation, error) { _, sp := otel.Tracer(typeutil.DataCoordRole).Start(ctx, "Alloc-ImportSegment") defer sp.End() s.mu.Lock() @@ -375,7 +375,8 @@ func (s *SegmentManager) genExpireTs(ctx context.Context, isImported bool) (Time } func (s *SegmentManager) openNewSegment(ctx context.Context, collectionID UniqueID, partitionID UniqueID, - channelName string, segmentState commonpb.SegmentState) (*SegmentInfo, error) { + channelName string, segmentState commonpb.SegmentState, +) (*SegmentInfo, error) { log := log.Ctx(ctx) ctx, sp := otel.Tracer(typeutil.DataCoordRole).Start(ctx, "open-Segment") defer sp.End() diff --git a/internal/datacoord/segment_manager_test.go b/internal/datacoord/segment_manager_test.go index ec5fc66859..41c9c81121 100644 --- a/internal/datacoord/segment_manager_test.go +++ b/internal/datacoord/segment_manager_test.go @@ -56,7 +56,7 @@ func TestManagerOptions(t *testing.T) { opt := withCalUpperLimitPolicy(defaultCalUpperLimitPolicy()) assert.NotNil(t, opt) - //manual set nil`` + // manual set nil`` segmentManager.estimatePolicy = nil opt.apply(segmentManager) assert.True(t, segmentManager.estimatePolicy != nil) @@ -144,7 +144,7 @@ func TestAllocSegment(t *testing.T) { } func TestLastExpireReset(t *testing.T) { - //set up meta on dc + // set up meta on dc ctx := context.Background() paramtable.Init() Params.Save(Params.DataCoordCfg.AllocLatestExpireAttempt.Key, "1") @@ -179,7 +179,7 @@ func TestLastExpireReset(t *testing.T) { } meta.AddSegment(context.TODO(), initSegment) - //assign segments, set max segment to only 1MB, equalling to 10485 rows + // assign segments, set max segment to only 1MB, equalling to 10485 rows var bigRows, smallRows int64 = 10000, 1000 segmentManager, _ := newSegmentManager(meta, mockAllocator) initSegment.SegmentInfo.State = commonpb.SegmentState_Dropped @@ -193,7 +193,7 @@ func TestLastExpireReset(t *testing.T) { allocs, _ = segmentManager.AllocSegment(context.Background(), collID, 0, channelName, smallRows) segmentID3, expire3 := allocs[0].SegmentID, allocs[0].ExpireTime - //simulate handleTimeTick op on dataCoord + // simulate handleTimeTick op on dataCoord meta.SetCurrentRows(segmentID1, bigRows) meta.SetCurrentRows(segmentID2, bigRows) meta.SetCurrentRows(segmentID3, smallRows) @@ -202,11 +202,11 @@ func TestLastExpireReset(t *testing.T) { assert.Equal(t, commonpb.SegmentState_Sealed, meta.GetSegment(segmentID2).GetState()) assert.Equal(t, commonpb.SegmentState_Growing, meta.GetSegment(segmentID3).GetState()) - //pretend that dataCoord break down + // pretend that dataCoord break down metaKV.Close() etcdCli.Close() - //dataCoord restart + // dataCoord restart newEtcdCli, _ := etcd.GetEtcdClient(Params.EtcdCfg.UseEmbedEtcd.GetAsBool(), Params.EtcdCfg.EtcdUseSSL.GetAsBool(), Params.EtcdCfg.Endpoints.GetAsStrings(), Params.EtcdCfg.EtcdTLSCert.GetValue(), Params.EtcdCfg.EtcdTLSKey.GetValue(), Params.EtcdCfg.EtcdTLSCACert.GetValue(), Params.EtcdCfg.EtcdTLSMinVersion.GetValue()) @@ -217,14 +217,14 @@ func TestLastExpireReset(t *testing.T) { restartedMeta.AddCollection(&collectionInfo{ID: collID, Schema: schema}) assert.Nil(t, err) newSegmentManager, _ := newSegmentManager(restartedMeta, mockAllocator) - //reset row number to avoid being cleaned by empty segment + // reset row number to avoid being cleaned by empty segment restartedMeta.SetCurrentRows(segmentID1, bigRows) restartedMeta.SetCurrentRows(segmentID2, bigRows) restartedMeta.SetCurrentRows(segmentID3, smallRows) - //verify lastExpire of growing and sealed segments + // verify lastExpire of growing and sealed segments segment1, segment2, segment3 := restartedMeta.GetSegment(segmentID1), restartedMeta.GetSegment(segmentID2), restartedMeta.GetSegment(segmentID3) - //segmentState should not be altered but growing segment's lastExpire has been reset to the latest + // segmentState should not be altered but growing segment's lastExpire has been reset to the latest assert.Equal(t, commonpb.SegmentState_Sealed, segment1.GetState()) assert.Equal(t, commonpb.SegmentState_Sealed, segment2.GetState()) assert.Equal(t, commonpb.SegmentState_Growing, segment3.GetState()) @@ -408,7 +408,7 @@ func TestAllocRowsLargerThanOneSegment(t *testing.T) { assert.NoError(t, err) meta.AddCollection(&collectionInfo{ID: collID, Schema: schema}) - var mockPolicy = func(schema *schemapb.CollectionSchema) (int, error) { + mockPolicy := func(schema *schemapb.CollectionSchema) (int, error) { return 1, nil } segmentManager, _ := newSegmentManager(meta, mockAllocator, withCalUpperLimitPolicy(mockPolicy)) @@ -430,7 +430,7 @@ func TestExpireAllocation(t *testing.T) { assert.NoError(t, err) meta.AddCollection(&collectionInfo{ID: collID, Schema: schema}) - var mockPolicy = func(schema *schemapb.CollectionSchema) (int, error) { + mockPolicy := func(schema *schemapb.CollectionSchema) (int, error) { return 10000000, nil } segmentManager, _ := newSegmentManager(meta, mockAllocator, withCalUpperLimitPolicy(mockPolicy)) @@ -548,7 +548,7 @@ func TestTryToSealSegment(t *testing.T) { collID, err := mockAllocator.allocID(context.Background()) assert.NoError(t, err) meta.AddCollection(&collectionInfo{ID: collID, Schema: schema}) - segmentManager, _ := newSegmentManager(meta, mockAllocator, withSegmentSealPolices(sealByLifetimePolicy(math.MinInt64))) //always seal + segmentManager, _ := newSegmentManager(meta, mockAllocator, withSegmentSealPolices(sealByLifetimePolicy(math.MinInt64))) // always seal allocations, err := segmentManager.AllocSegment(context.TODO(), collID, 0, "c1", 2) assert.NoError(t, err) assert.EqualValues(t, 1, len(allocations)) @@ -573,7 +573,7 @@ func TestTryToSealSegment(t *testing.T) { collID, err := mockAllocator.allocID(context.Background()) assert.NoError(t, err) meta.AddCollection(&collectionInfo{ID: collID, Schema: schema}) - segmentManager, _ := newSegmentManager(meta, mockAllocator, withChannelSealPolices(getChannelOpenSegCapacityPolicy(-1))) //always seal + segmentManager, _ := newSegmentManager(meta, mockAllocator, withChannelSealPolices(getChannelOpenSegCapacityPolicy(-1))) // always seal allocations, err := segmentManager.AllocSegment(context.TODO(), collID, 0, "c1", 2) assert.NoError(t, err) assert.EqualValues(t, 1, len(allocations)) @@ -600,7 +600,7 @@ func TestTryToSealSegment(t *testing.T) { meta.AddCollection(&collectionInfo{ID: collID, Schema: schema}) segmentManager, _ := newSegmentManager(meta, mockAllocator, withSegmentSealPolices(sealByLifetimePolicy(math.MinInt64)), - withChannelSealPolices(getChannelOpenSegCapacityPolicy(-1))) //always seal + withChannelSealPolices(getChannelOpenSegCapacityPolicy(-1))) // always seal allocations, err := segmentManager.AllocSegment(context.TODO(), collID, 0, "c1", 2) assert.NoError(t, err) assert.EqualValues(t, 1, len(allocations)) @@ -712,7 +712,7 @@ func TestTryToSealSegment(t *testing.T) { collID, err := mockAllocator.allocID(context.Background()) assert.NoError(t, err) meta.AddCollection(&collectionInfo{ID: collID, Schema: schema}) - segmentManager, _ := newSegmentManager(meta, mockAllocator, withSegmentSealPolices(sealByLifetimePolicy(math.MinInt64))) //always seal + segmentManager, _ := newSegmentManager(meta, mockAllocator, withSegmentSealPolices(sealByLifetimePolicy(math.MinInt64))) // always seal allocations, err := segmentManager.AllocSegment(context.TODO(), collID, 0, "c1", 2) assert.NoError(t, err) assert.EqualValues(t, 1, len(allocations)) @@ -741,7 +741,7 @@ func TestTryToSealSegment(t *testing.T) { collID, err := mockAllocator.allocID(context.Background()) assert.NoError(t, err) meta.AddCollection(&collectionInfo{ID: collID, Schema: schema}) - segmentManager, _ := newSegmentManager(meta, mockAllocator, withChannelSealPolices(getChannelOpenSegCapacityPolicy(-1))) //always seal + segmentManager, _ := newSegmentManager(meta, mockAllocator, withChannelSealPolices(getChannelOpenSegCapacityPolicy(-1))) // always seal allocations, err := segmentManager.AllocSegment(context.TODO(), collID, 0, "c1", 2) assert.NoError(t, err) assert.EqualValues(t, 1, len(allocations)) @@ -800,7 +800,6 @@ func TestAllocationPool(t *testing.T) { assert.EqualValues(t, 100, allo.NumOfRows) assert.EqualValues(t, 0, allo.ExpireTime) assert.EqualValues(t, 0, allo.SegmentID) - }) } diff --git a/internal/datacoord/server.go b/internal/datacoord/server.go index c77afcbf7b..451b934ee8 100644 --- a/internal/datacoord/server.go +++ b/internal/datacoord/server.go @@ -134,7 +134,7 @@ type Server struct { icSession *sessionutil.Session dnEventCh <-chan *sessionutil.SessionEvent inEventCh <-chan *sessionutil.SessionEvent - //qcEventCh <-chan *sessionutil.SessionEvent + // qcEventCh <-chan *sessionutil.SessionEvent enableActiveStandBy bool activateFunc func() error @@ -142,9 +142,9 @@ type Server struct { dataNodeCreator dataNodeCreatorFunc indexNodeCreator indexNodeCreatorFunc rootCoordClientCreator rootCoordCreatorFunc - //indexCoord types.IndexCoord + // indexCoord types.IndexCoord - //segReferManager *SegmentReferenceManager + // segReferManager *SegmentReferenceManager indexBuilder *indexBuilder indexNodeManager *IndexNodeManager @@ -902,7 +902,7 @@ func (s *Server) startFlushLoop(ctx context.Context) { logutil.Logger(s.ctx).Info("flush loop shutdown") return case segmentID := <-s.flushCh: - //Ignore return error + // Ignore return error log.Info("flush successfully", zap.Any("segmentID", segmentID)) err := s.postFlush(ctx, segmentID) if err != nil { diff --git a/internal/datacoord/server_test.go b/internal/datacoord/server_test.go index 623577c408..de7f4b5938 100644 --- a/internal/datacoord/server_test.go +++ b/internal/datacoord/server_test.go @@ -331,14 +331,14 @@ func TestFlush(t *testing.T) { }) } -//func TestGetComponentStates(t *testing.T) { -//svr := newTestServer(t) -//defer closeTestServer(t, svr) -//cli := newMockDataNodeClient(1) -//err := cli.Init() -//assert.NoError(t, err) -//err = cli.Start() -//assert.NoError(t, err) +// func TestGetComponentStates(t *testing.T) { +// svr := newTestServer(t) +// defer closeTestServer(t, svr) +// cli := newMockDataNodeClient(1) +// err := cli.Init() +// assert.NoError(t, err) +// err = cli.Start() +// assert.NoError(t, err) //err = svr.cluster.Register(&dataNode{ //id: 1, @@ -503,7 +503,6 @@ func TestGetInsertBinlogPaths(t *testing.T) { resp, err := svr.GetInsertBinlogPaths(svr.ctx, req) assert.NoError(t, err) assert.EqualValues(t, commonpb.ErrorCode_UnexpectedError, resp.GetStatus().GetErrorCode()) - }) t.Run("with closed server", func(t *testing.T) { @@ -529,7 +528,6 @@ func TestGetCollectionStatistics(t *testing.T) { resp, err := svr.GetCollectionStatistics(svr.ctx, req) assert.NoError(t, err) assert.EqualValues(t, commonpb.ErrorCode_Success, resp.GetStatus().GetErrorCode()) - }) t.Run("with closed server", func(t *testing.T) { svr := newTestServer(t, nil) @@ -1743,11 +1741,10 @@ func TestDropVirtualChannel(t *testing.T) { err = svr.channelManager.Watch(&channel{Name: "ch1", CollectionID: 0}) require.Nil(t, err) - //resend + // resend resp, err = svr.DropVirtualChannel(ctx, req) assert.NoError(t, err) assert.Equal(t, commonpb.ErrorCode_Success, resp.GetStatus().GetErrorCode()) - }) t.Run("with channel not matched", func(t *testing.T) { @@ -1798,35 +1795,45 @@ func TestGetChannelSeekPosition(t *testing.T) { channelName string expectedPos *msgpb.MsgPosition }{ - {"test-with-channelCP", + { + "test-with-channelCP", &msgpb.MsgPosition{ChannelName: "ch1", Timestamp: 100, MsgID: msgID}, []*msgpb.MsgPosition{{ChannelName: "ch1", Timestamp: 50, MsgID: msgID}, {ChannelName: "ch1", Timestamp: 200, MsgID: msgID}}, startPos1, - "ch1", &msgpb.MsgPosition{ChannelName: "ch1", Timestamp: 100, MsgID: msgID}}, + "ch1", &msgpb.MsgPosition{ChannelName: "ch1", Timestamp: 100, MsgID: msgID}, + }, - {"test-with-segmentDMLPos", + { + "test-with-segmentDMLPos", nil, []*msgpb.MsgPosition{{ChannelName: "ch1", Timestamp: 50, MsgID: msgID}, {ChannelName: "ch1", Timestamp: 200, MsgID: msgID}}, startPos1, - "ch1", &msgpb.MsgPosition{ChannelName: "ch1", Timestamp: 50, MsgID: msgID}}, + "ch1", &msgpb.MsgPosition{ChannelName: "ch1", Timestamp: 50, MsgID: msgID}, + }, - {"test-with-collStartPos", + { + "test-with-collStartPos", nil, nil, startPos1, - "ch1", &msgpb.MsgPosition{ChannelName: "ch1", MsgID: startPos1[0].Data}}, + "ch1", &msgpb.MsgPosition{ChannelName: "ch1", MsgID: startPos1[0].Data}, + }, - {"test-non-exist-channel-1", + { + "test-non-exist-channel-1", nil, nil, startPosNonExist, - "ch1", nil}, + "ch1", nil, + }, - {"test-non-exist-channel-2", + { + "test-non-exist-channel-2", nil, nil, nil, - "ch1", nil}, + "ch1", nil, + }, } for _, test := range tests { t.Run(test.testName, func(t *testing.T) { @@ -1858,7 +1865,8 @@ func TestGetChannelSeekPosition(t *testing.T) { seekPos := svr.handler.(*ServerHandler).GetChannelSeekPosition(&channel{ Name: test.channelName, - CollectionID: 0}, allPartitionID) + CollectionID: 0, + }, allPartitionID) if test.expectedPos == nil { assert.True(t, seekPos == nil) } else { @@ -2460,7 +2468,7 @@ func TestShouldDropChannel(t *testing.T) { }) t.Run("channel name not in kv, collection not exist", func(t *testing.T) { - //myRoot.code = commonpb.ErrorCode_CollectionNotExists + // myRoot.code = commonpb.ErrorCode_CollectionNotExists myRoot.EXPECT().DescribeCollection(mock.Anything, mock.Anything). Return(&milvuspb.DescribeCollectionResponse{ Status: merr.Status(merr.WrapErrCollectionNotFound(-1)), @@ -2509,7 +2517,6 @@ func TestShouldDropChannel(t *testing.T) { } func TestGetRecoveryInfo(t *testing.T) { - t.Run("test get recovery info with no segments", func(t *testing.T) { svr := newTestServer(t, nil) defer closeTestServer(t, svr) @@ -2531,7 +2538,8 @@ func TestGetRecoveryInfo(t *testing.T) { }) createSegment := func(id, collectionID, partitionID, numOfRows int64, posTs uint64, - channel string, state commonpb.SegmentState) *datapb.SegmentInfo { + channel string, state commonpb.SegmentState, + ) *datapb.SegmentInfo { return &datapb.SegmentInfo{ ID: id, CollectionID: collectionID, @@ -2718,7 +2726,7 @@ func TestGetRecoveryInfo(t *testing.T) { assert.NoError(t, err) err = svr.meta.AddSegment(context.TODO(), NewSegmentInfo(seg2)) assert.NoError(t, err) - //svr.indexCoord.(*mocks.MockIndexCoord).EXPECT().GetIndexInfos(mock.Anything, mock.Anything).Return(nil, nil) + // svr.indexCoord.(*mocks.MockIndexCoord).EXPECT().GetIndexInfos(mock.Anything, mock.Anything).Return(nil, nil) req := &datapb.GetRecoveryInfoRequest{ CollectionID: 0, @@ -3228,7 +3236,7 @@ func TestOptions(t *testing.T) { }) t.Run("WithDataNodeCreator", func(t *testing.T) { var target int64 - var val = rand.Int63() + val := rand.Int63() opt := WithDataNodeCreator(func(context.Context, string, int64) (types.DataNode, error) { target = val return nil, nil @@ -3596,9 +3604,7 @@ func TestGetFlushState(t *testing.T) { svr := newTestServerWithMeta(t, nil, meta) defer closeTestServer(t, svr) - var ( - collection = int64(0) - ) + collection := int64(0) resp, err := svr.GetFlushState(context.Background(), &datapb.GetFlushStateRequest{ FlushTs: 11, @@ -3624,18 +3630,34 @@ func TestGetFlushAllState(t *testing.T) { ExpectedSuccess bool ExpectedFlushed bool }{ - {"test FlushAll flushed", []Timestamp{100, 200}, 99, - true, false, false, false, true, true}, - {"test FlushAll not flushed", []Timestamp{100, 200}, 150, - true, false, false, false, true, false}, - {"test Sever is not healthy", nil, 0, - false, false, false, false, false, false}, - {"test ListDatabase failed", nil, 0, - true, true, false, false, false, false}, - {"test ShowCollections failed", nil, 0, - true, false, true, false, false, false}, - {"test DescribeCollection failed", nil, 0, - true, false, false, true, false, false}, + { + "test FlushAll flushed", + []Timestamp{100, 200}, + 99, + true, false, false, false, true, true, + }, + { + "test FlushAll not flushed", + []Timestamp{100, 200}, + 150, + true, false, false, false, true, false, + }, + { + "test Sever is not healthy", nil, 0, + false, false, false, false, false, false, + }, + { + "test ListDatabase failed", nil, 0, + true, true, false, false, false, false, + }, + { + "test ShowCollections failed", nil, 0, + true, false, true, false, false, false, + }, + { + "test DescribeCollection failed", nil, 0, + true, false, false, true, false, false, + }, } for _, test := range tests { t.Run(test.testName, func(t *testing.T) { @@ -4238,9 +4260,9 @@ func newTestServerWithMeta(t *testing.T, receiveCh chan any, meta *meta, opts .. svr.rootCoordClientCreator = func(ctx context.Context, metaRootPath string, etcdCli *clientv3.Client) (types.RootCoord, error) { return newMockRootCoordService(), nil } - //indexCoord := mocks.NewMockIndexCoord(t) - //indexCoord.EXPECT().GetIndexInfos(mock.Anything, mock.Anything).Return(nil, nil).Maybe() - //svr.indexCoord = indexCoord + // indexCoord := mocks.NewMockIndexCoord(t) + // indexCoord.EXPECT().GetIndexInfos(mock.Anything, mock.Anything).Return(nil, nil).Maybe() + // svr.indexCoord = indexCoord err = svr.Init() assert.NoError(t, err) @@ -4327,7 +4349,8 @@ func Test_CheckHealth(t *testing.T) { svr.stateCode.Store(commonpb.StateCode_Healthy) healthClient := &mockDataNodeClient{ id: 1, - state: commonpb.StateCode_Healthy} + state: commonpb.StateCode_Healthy, + } sm := NewSessionManager() sm.sessions = struct { sync.RWMutex @@ -4352,7 +4375,8 @@ func Test_CheckHealth(t *testing.T) { svr.stateCode.Store(commonpb.StateCode_Healthy) unhealthClient := &mockDataNodeClient{ id: 1, - state: commonpb.StateCode_Abnormal} + state: commonpb.StateCode_Abnormal, + } sm := NewSessionManager() sm.sessions = struct { sync.RWMutex diff --git a/internal/datacoord/services_test.go b/internal/datacoord/services_test.go index 3ef562a23a..3f980a8d1d 100644 --- a/internal/datacoord/services_test.go +++ b/internal/datacoord/services_test.go @@ -98,7 +98,6 @@ func TestServer_GcConfirm(t *testing.T) { } func TestGetRecoveryInfoV2(t *testing.T) { - t.Run("test get recovery info with no segments", func(t *testing.T) { svr := newTestServer(t, nil) defer closeTestServer(t, svr) @@ -119,7 +118,8 @@ func TestGetRecoveryInfoV2(t *testing.T) { }) createSegment := func(id, collectionID, partitionID, numOfRows int64, posTs uint64, - channel string, state commonpb.SegmentState) *datapb.SegmentInfo { + channel string, state commonpb.SegmentState, + ) *datapb.SegmentInfo { return &datapb.SegmentInfo{ ID: id, CollectionID: collectionID, diff --git a/internal/datacoord/session_manager.go b/internal/datacoord/session_manager.go index b4e2186103..2c6942dab5 100644 --- a/internal/datacoord/session_manager.go +++ b/internal/datacoord/session_manager.go @@ -22,6 +22,8 @@ import ( "sync" "time" + "go.uber.org/zap" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" grpcdatanodeclient "github.com/milvus-io/milvus/internal/distributed/datanode/client" "github.com/milvus-io/milvus/internal/proto/datapb" @@ -32,7 +34,6 @@ import ( "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/milvus-io/milvus/pkg/util/retry" "github.com/milvus-io/milvus/pkg/util/typeutil" - "go.uber.org/zap" ) const ( diff --git a/internal/datanode/allocator/allocator.go b/internal/datanode/allocator/allocator.go index 0ec6a9c5a9..355974075b 100644 --- a/internal/datanode/allocator/allocator.go +++ b/internal/datanode/allocator/allocator.go @@ -58,7 +58,6 @@ func (a *Impl) GetIDAlloactor() *gAllocator.IDAllocator { } func (a *Impl) GetGenerator(count int, done <-chan struct{}) (<-chan UniqueID, error) { - idStart, _, err := a.Alloc(uint32(count)) if err != nil { return nil, err diff --git a/internal/datanode/allocator/allocator_test.go b/internal/datanode/allocator/allocator_test.go index fe82b1c40b..b3bce9b325 100644 --- a/internal/datanode/allocator/allocator_test.go +++ b/internal/datanode/allocator/allocator_test.go @@ -85,6 +85,7 @@ func (m *RootCoordFactory) AllocID(ctx context.Context, in *rootcoordpb.AllocIDR resp := &rootcoordpb.AllocIDResponse{ ID: m.ID, Count: in.GetCount(), - Status: merr.Status(nil)} + Status: merr.Status(nil), + } return resp, nil } diff --git a/internal/datanode/binlog_io.go b/internal/datanode/binlog_io.go index 7708ab0d7e..0ac8acef31 100644 --- a/internal/datanode/binlog_io.go +++ b/internal/datanode/binlog_io.go @@ -23,6 +23,7 @@ import ( "time" "github.com/cockroachdb/errors" + "go.uber.org/zap" "github.com/milvus-io/milvus/internal/datanode/allocator" "github.com/milvus-io/milvus/internal/proto/datapb" @@ -32,7 +33,6 @@ import ( "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/conc" "github.com/milvus-io/milvus/pkg/util/metautil" - "go.uber.org/zap" ) var ( @@ -64,8 +64,10 @@ type binlogIO struct { allocator.Allocator } -var _ downloader = (*binlogIO)(nil) -var _ uploader = (*binlogIO)(nil) +var ( + _ downloader = (*binlogIO)(nil) + _ uploader = (*binlogIO)(nil) +) func (b *binlogIO) download(ctx context.Context, paths []string) ([]*Blob, error) { log.Debug("down load", zap.Strings("path", paths)) @@ -78,7 +80,7 @@ func (b *binlogIO) download(ctx context.Context, paths []string) ([]*Blob, error localPath := path future := getMultiReadPool().Submit(func() (any, error) { var vs []byte - var err = errStart + err := errStart for err != nil { select { case <-ctx.Done(): @@ -111,7 +113,8 @@ func (b *binlogIO) uploadSegmentFiles( ctx context.Context, CollectionID UniqueID, segID UniqueID, - kvs map[string][]byte) error { + kvs map[string][]byte, +) error { log.Debug("update", zap.Int64("collectionID", CollectionID), zap.Int64("segmentID", segID)) if len(kvs) == 0 { return nil @@ -121,7 +124,7 @@ func (b *binlogIO) uploadSegmentFiles( localPath := key localVal := val future := getMultiReadPool().Submit(func() (any, error) { - var err = errStart + err := errStart for err != nil { select { case <-ctx.Done(): @@ -242,7 +245,8 @@ func (b *binlogIO) uploadStatsLog( iData *InsertData, stats *storage.PrimaryKeyStats, totRows int64, - meta *etcdpb.CollectionMeta) (map[UniqueID]*datapb.FieldBinlog, map[UniqueID]*datapb.FieldBinlog, error) { + meta *etcdpb.CollectionMeta, +) (map[UniqueID]*datapb.FieldBinlog, map[UniqueID]*datapb.FieldBinlog, error) { var inPaths map[int64]*datapb.FieldBinlog var err error @@ -278,8 +282,8 @@ func (b *binlogIO) uploadInsertLog( segID UniqueID, partID UniqueID, iData *InsertData, - meta *etcdpb.CollectionMeta) (map[UniqueID]*datapb.FieldBinlog, error) { - + meta *etcdpb.CollectionMeta, +) (map[UniqueID]*datapb.FieldBinlog, error) { iCodec := storage.NewInsertCodecWithSchema(meta) kvs := make(map[string][]byte) @@ -309,7 +313,8 @@ func (b *binlogIO) uploadDeltaLog( segID UniqueID, partID UniqueID, dData *DeleteData, - meta *etcdpb.CollectionMeta) ([]*datapb.FieldBinlog, error) { + meta *etcdpb.CollectionMeta, +) ([]*datapb.FieldBinlog, error) { var ( deltaInfo = make([]*datapb.FieldBinlog, 0) kvs = make(map[string][]byte) diff --git a/internal/datanode/binlog_io_test.go b/internal/datanode/binlog_io_test.go index 7084d819e5..3df83685ef 100644 --- a/internal/datanode/binlog_io_test.go +++ b/internal/datanode/binlog_io_test.go @@ -24,16 +24,16 @@ import ( "time" "github.com/cockroachdb/errors" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/require" + "go.uber.org/zap" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/internal/datanode/allocator" "github.com/milvus-io/milvus/internal/storage" "github.com/milvus-io/milvus/pkg/common" "github.com/milvus-io/milvus/pkg/log" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/mock" - "github.com/stretchr/testify/require" - "go.uber.org/zap" ) var binlogTestDir = "/tmp/milvus_test/test_binlog_io" @@ -201,7 +201,6 @@ func TestBinlogIOInnerMethods(t *testing.T) { for _, test := range tests { t.Run(test.description, func(t *testing.T) { if test.isvalid { - k, v, err := b.genDeltaBlobs(&DeleteData{ Pks: []primaryKey{test.deletepk}, Tss: []uint64{test.ts}, @@ -237,7 +236,6 @@ func TestBinlogIOInnerMethods(t *testing.T) { assert.Error(t, err) assert.Empty(t, k) assert.Empty(t, v) - }) }) diff --git a/internal/datanode/buffer.go b/internal/datanode/buffer.go index 6615ee2905..48e16988b5 100644 --- a/internal/datanode/buffer.go +++ b/internal/datanode/buffer.go @@ -62,7 +62,6 @@ func (m *DeltaBufferManager) GetEntriesNum(segID UniqueID) int64 { func (m *DeltaBufferManager) UpdateCompactedSegments() { compactedTo2From := m.channel.listCompactedSegmentIDs() for compactedTo, compactedFrom := range compactedTo2From { - // if the compactedTo segment has 0 numRows, there'll be no segments // in the channel meta, so remove all compacted from segments related if !m.channel.hasSegment(compactedTo, true) { @@ -87,7 +86,6 @@ func (m *DeltaBufferManager) UpdateCompactedSegments() { // only store delBuf if EntriesNum > 0 if compactToDelBuff.EntriesNum > 0 { - m.pushOrFixHeap(compactedTo, compactToDelBuff) // We need to re-add the memorySize because m.Delete(segID) sub them all. m.usedMemory.Add(compactToDelBuff.GetMemorySize()) @@ -129,7 +127,8 @@ func (m *DeltaBufferManager) deleteFromHeap(buffer *DelDataBuf) { } func (m *DeltaBufferManager) StoreNewDeletes(segID UniqueID, pks []primaryKey, - tss []Timestamp, tr TimeRange, startPos, endPos *msgpb.MsgPosition) { + tss []Timestamp, tr TimeRange, startPos, endPos *msgpb.MsgPosition, +) { buffer, loaded := m.Load(segID) if !loaded { buffer = newDelDataBuf(segID) @@ -154,7 +153,6 @@ func (m *DeltaBufferManager) Delete(segID UniqueID) { m.usedMemory.Sub(buffer.GetMemorySize()) m.deleteFromHeap(buffer) m.channel.rollDeleteBuffer(segID) - } } @@ -165,7 +163,7 @@ func (m *DeltaBufferManager) popHeapItem() *Item { } func (m *DeltaBufferManager) ShouldFlushSegments() []UniqueID { - var memUsage = m.usedMemory.Load() + memUsage := m.usedMemory.Load() if memUsage < Params.DataNodeCfg.FlushDeleteBufferBytes.GetAsInt64() { return nil } @@ -181,12 +179,11 @@ func (m *DeltaBufferManager) ShouldFlushSegments() []UniqueID { memUsage -= segItem.memorySize if memUsage < Params.DataNodeCfg.FlushDeleteBufferBytes.GetAsInt64() { break - } } - //here we push all selected segment back into the heap - //in order to keep the heap semantically correct + // here we push all selected segment back into the heap + // in order to keep the heap semantically correct m.heapGuard.Lock() for _, segMem := range poppedItems { heap.Push(m.delBufHeap, segMem) @@ -334,7 +331,7 @@ func (ddb *DelDataBuf) Buffer(pks []primaryKey, tss []Timestamp, tr TimeRange, s varCharPk := pks[i].(*varCharPrimaryKey) bufSize += int64(len(varCharPk.Value)) } - //accumulate buf size for timestamp, which is 8 bytes + // accumulate buf size for timestamp, which is 8 bytes bufSize += 8 } @@ -430,13 +427,14 @@ func newBufferData(collSchema *schemapb.CollectionSchema) (*BufferData, error) { limit++ } - //TODO::xige-16 eval vec and string field + // TODO::xige-16 eval vec and string field return &BufferData{ buffer: &InsertData{Data: make(map[UniqueID]storage.FieldData)}, size: 0, limit: limit, tsFrom: math.MaxUint64, - tsTo: 0}, nil + tsTo: 0, + }, nil } func newDelDataBuf(segmentID UniqueID) *DelDataBuf { diff --git a/internal/datanode/buffer_test.go b/internal/datanode/buffer_test.go index 742e6ac7f6..784169e407 100644 --- a/internal/datanode/buffer_test.go +++ b/internal/datanode/buffer_test.go @@ -170,7 +170,7 @@ func Test_CompactSegBuff(t *testing.T) { }, delBufHeap: &PriorityQueue{}, } - //1. set compactTo and compactFrom + // 1. set compactTo and compactFrom targetSeg := &Segment{segmentID: 3333} targetSeg.setType(datapb.SegmentType_Flushed) @@ -190,7 +190,7 @@ func Test_CompactSegBuff(t *testing.T) { channelSegments[seg2.segmentID] = seg2 channelSegments[targetSeg.segmentID] = targetSeg - //2. set up deleteDataBuf for seg1 and seg2 + // 2. set up deleteDataBuf for seg1 and seg2 delDataBuf1 := newDelDataBuf(seg1.segmentID) delDataBuf1.EntriesNum++ delDataBuf1.updateStartAndEndPosition(nil, &msgpb.MsgPosition{Timestamp: 50}) @@ -203,12 +203,12 @@ func Test_CompactSegBuff(t *testing.T) { delBufferManager.updateMeta(seg2.segmentID, delDataBuf2) heap.Push(delBufferManager.delBufHeap, delDataBuf2.item) - //3. test compact + // 3. test compact delBufferManager.UpdateCompactedSegments() - //4. expect results in two aspects: - //4.1 compactedFrom segments are removed from delBufferManager - //4.2 compactedTo seg is set properly with correct entriesNum + // 4. expect results in two aspects: + // 4.1 compactedFrom segments are removed from delBufferManager + // 4.2 compactedTo seg is set properly with correct entriesNum _, seg1Exist := delBufferManager.Load(seg1.segmentID) _, seg2Exist := delBufferManager.Load(seg2.segmentID) assert.False(t, seg1Exist) @@ -221,7 +221,7 @@ func Test_CompactSegBuff(t *testing.T) { assert.NotNil(t, targetSegBuf.item) assert.Equal(t, targetSeg.segmentID, targetSegBuf.item.segmentID) - //5. test roll and evict (https://github.com/milvus-io/milvus/issues/20501) + // 5. test roll and evict (https://github.com/milvus-io/milvus/issues/20501) delBufferManager.channel.rollDeleteBuffer(targetSeg.segmentID) _, segCompactedToExist := delBufferManager.Load(targetSeg.segmentID) assert.False(t, segCompactedToExist) @@ -271,25 +271,61 @@ func TestUpdateCompactedSegments(t *testing.T) { expectedSegsRemain []UniqueID }{ - {"zero segments", false, - []UniqueID{}, []UniqueID{}, []UniqueID{}}, - {"segment no compaction", false, - []UniqueID{}, []UniqueID{}, []UniqueID{100, 101}}, - {"segment compacted", true, - []UniqueID{200}, []UniqueID{103}, []UniqueID{100, 101}}, - {"segment compacted 100>201", true, - []UniqueID{201}, []UniqueID{100}, []UniqueID{101, 201}}, - {"segment compacted 100+101>201", true, - []UniqueID{201, 201}, []UniqueID{100, 101}, []UniqueID{201}}, - {"segment compacted 100>201, 101>202", true, - []UniqueID{201, 202}, []UniqueID{100, 101}, []UniqueID{201, 202}}, + { + "zero segments", false, + []UniqueID{}, + []UniqueID{}, + []UniqueID{}, + }, + { + "segment no compaction", false, + []UniqueID{}, + []UniqueID{}, + []UniqueID{100, 101}, + }, + { + "segment compacted", true, + []UniqueID{200}, + []UniqueID{103}, + []UniqueID{100, 101}, + }, + { + "segment compacted 100>201", true, + []UniqueID{201}, + []UniqueID{100}, + []UniqueID{101, 201}, + }, + { + "segment compacted 100+101>201", true, + []UniqueID{201, 201}, + []UniqueID{100, 101}, + []UniqueID{201}, + }, + { + "segment compacted 100>201, 101>202", true, + []UniqueID{201, 202}, + []UniqueID{100, 101}, + []UniqueID{201, 202}, + }, // false - {"segment compacted 100>201", false, - []UniqueID{201}, []UniqueID{100}, []UniqueID{101}}, - {"segment compacted 100+101>201", false, - []UniqueID{201, 201}, []UniqueID{100, 101}, []UniqueID{}}, - {"segment compacted 100>201, 101>202", false, - []UniqueID{201, 202}, []UniqueID{100, 101}, []UniqueID{}}, + { + "segment compacted 100>201", false, + []UniqueID{201}, + []UniqueID{100}, + []UniqueID{101}, + }, + { + "segment compacted 100+101>201", false, + []UniqueID{201, 201}, + []UniqueID{100, 101}, + []UniqueID{}, + }, + { + "segment compacted 100>201, 101>202", false, + []UniqueID{201, 202}, + []UniqueID{100, 101}, + []UniqueID{}, + }, } for _, test := range tests { diff --git a/internal/datanode/channel_meta.go b/internal/datanode/channel_meta.go index 48e0d9d88c..e22cf2c40a 100644 --- a/internal/datanode/channel_meta.go +++ b/internal/datanode/channel_meta.go @@ -693,7 +693,6 @@ func (c *ChannelMeta) mergeFlushedSegments(ctx context.Context, seg *Segment, pl if !c.hasSegment(ID, true) || c.hasSegment(ID, false) { inValidSegments = append(inValidSegments, ID) } - } if len(inValidSegments) > 0 { diff --git a/internal/datanode/channel_meta_test.go b/internal/datanode/channel_meta_test.go index aeed646b9a..ec2e095817 100644 --- a/internal/datanode/channel_meta_test.go +++ b/internal/datanode/channel_meta_test.go @@ -203,7 +203,8 @@ func TestChannelMeta_getCollectionAndPartitionID(t *testing.T) { seg.setType(test.segType) channel := &ChannelMeta{ segments: map[UniqueID]*Segment{ - test.segID: &seg}, + test.segID: &seg, + }, } collID, parID, err := channel.getCollectionAndPartitionID(test.segID) @@ -703,11 +704,9 @@ func TestChannelMeta_InterfaceMethod(t *testing.T) { } else { assert.False(t, channel.hasSegment(3, true)) } - }) } }) - } func TestChannelMeta_loadStats(t *testing.T) { @@ -730,7 +729,7 @@ func TestChannelMeta_loadStats(t *testing.T) { partitionID: 2, } - //gen pk stats bytes + // gen pk stats bytes stats := storage.NewPrimaryKeyStats(106, int64(schemapb.DataType_Int64), 10) iCodec := storage.NewInsertCodecWithSchema(meta) @@ -752,7 +751,8 @@ func TestChannelMeta_loadStats(t *testing.T) { Binlogs: []*datapb.Binlog{{ /////// LogPath: path.Join(common.SegmentStatslogPath, metautil.JoinIDPath(1, 2, 1, 106, 10)), - }}}}, 0) + }}, + }}, 0) assert.NoError(t, err) // load flushed stats log @@ -767,7 +767,8 @@ func TestChannelMeta_loadStats(t *testing.T) { Binlogs: []*datapb.Binlog{{ /////// LogPath: path.Join(common.SegmentStatslogPath, metautil.JoinIDPath(1, 2, 2, 106), storage.CompoundStatsType.LogIdx()), - }}}}, 0) + }}, + }}, 0) assert.NoError(t, err) }) } @@ -830,7 +831,6 @@ func TestChannelMeta_UpdatePKRange(t *testing.T) { assert.True(t, segNew.isPKExist(pk)) assert.True(t, segNormal.isPKExist(pk)) } - } func TestChannelMeta_ChannelCP(t *testing.T) { @@ -866,7 +866,8 @@ func TestChannelMeta_ChannelCP(t *testing.T) { t.Run("set insertBuffer&deleteBuffer then get", func(t *testing.T) { run := func(curInsertPos, curDeletePos *msgpb.MsgPosition, hisInsertPoss, hisDeletePoss []*msgpb.MsgPosition, - ttPos, expectedPos *msgpb.MsgPosition) { + ttPos, expectedPos *msgpb.MsgPosition, + ) { segmentID := UniqueID(1) channel := newChannel(mockVChannel, collID, nil, rc, cm) channel.chunkManager = &mockDataCM{} diff --git a/internal/datanode/compaction_executor_test.go b/internal/datanode/compaction_executor_test.go index b3b92a7702..107eddcd16 100644 --- a/internal/datanode/compaction_executor_test.go +++ b/internal/datanode/compaction_executor_test.go @@ -20,8 +20,9 @@ import ( "context" "testing" - "github.com/milvus-io/milvus/internal/proto/datapb" "github.com/stretchr/testify/assert" + + "github.com/milvus-io/milvus/internal/proto/datapb" ) func TestCompactionExecutor(t *testing.T) { @@ -114,7 +115,6 @@ func TestCompactionExecutor(t *testing.T) { t.FailNow() } }) - } func newMockCompactor(isvalid bool) *mockCompactor { @@ -143,7 +143,6 @@ func (mc *mockCompactor) complete() { } func (mc *mockCompactor) injectDone(success bool) { - } func (mc *mockCompactor) compact() (*datapb.CompactionResult, error) { diff --git a/internal/datanode/compactor.go b/internal/datanode/compactor.go index 25e30663a8..3c5e20e4f5 100644 --- a/internal/datanode/compactor.go +++ b/internal/datanode/compactor.go @@ -25,6 +25,8 @@ import ( "time" "github.com/cockroachdb/errors" + "go.uber.org/zap" + "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/internal/datanode/allocator" "github.com/milvus-io/milvus/internal/proto/datapb" @@ -40,7 +42,6 @@ import ( "github.com/milvus-io/milvus/pkg/util/timerecord" "github.com/milvus-io/milvus/pkg/util/tsoutil" "github.com/milvus-io/milvus/pkg/util/typeutil" - "go.uber.org/zap" ) var ( @@ -96,8 +97,8 @@ func newCompactionTask( fm flushManager, alloc allocator.Allocator, plan *datapb.CompactionPlan, - chunkManager storage.ChunkManager) *compactionTask { - + chunkManager storage.ChunkManager, +) *compactionTask { ctx1, cancel := context.WithCancel(ctx) return &compactionTask{ ctx: ctx1, @@ -152,7 +153,7 @@ func (t *compactionTask) mergeDeltalogs(dBlobs map[UniqueID][]*Blob) (map[interf mergeStart := time.Now() dCodec := storage.NewDeleteCodec() - var pk2ts = make(map[interface{}]Timestamp) + pk2ts := make(map[interface{}]Timestamp) for _, blobs := range dBlobs { _, _, dData, err := dCodec.Deserialize(blobs) @@ -184,7 +185,8 @@ func (t *compactionTask) uploadRemainLog( stats *storage.PrimaryKeyStats, totRows int64, fID2Content map[UniqueID][]interface{}, - fID2Type map[UniqueID]schemapb.DataType) (map[UniqueID]*datapb.FieldBinlog, map[UniqueID]*datapb.FieldBinlog, error) { + fID2Type map[UniqueID]schemapb.DataType, +) (map[UniqueID]*datapb.FieldBinlog, map[UniqueID]*datapb.FieldBinlog, error) { var iData *InsertData // remain insert data @@ -220,9 +222,11 @@ func (t *compactionTask) uploadSingleInsertLog( partID UniqueID, meta *etcdpb.CollectionMeta, fID2Content map[UniqueID][]interface{}, - fID2Type map[UniqueID]schemapb.DataType) (map[UniqueID]*datapb.FieldBinlog, error) { + fID2Type map[UniqueID]schemapb.DataType, +) (map[UniqueID]*datapb.FieldBinlog, error) { iData := &InsertData{ - Data: make(map[storage.FieldID]storage.FieldData)} + Data: make(map[storage.FieldID]storage.FieldData), + } for fID, content := range fID2Content { tp, ok := fID2Type[fID] @@ -253,7 +257,8 @@ func (t *compactionTask) merge( targetSegID UniqueID, partID UniqueID, meta *etcdpb.CollectionMeta, - delta map[interface{}]Timestamp) ([]*datapb.FieldBinlog, []*datapb.FieldBinlog, int64, error) { + delta map[interface{}]Timestamp, +) ([]*datapb.FieldBinlog, []*datapb.FieldBinlog, int64, error) { log := log.With(zap.Int64("planID", t.getPlanID())) mergeStart := time.Now() @@ -416,7 +421,7 @@ func (t *compactionTask) merge( } fID2Content[fID] = append(fID2Content[fID], vInter) } - //update pk to new stats log + // update pk to new stats log stats.Update(v.PK) currentRows++ @@ -490,7 +495,6 @@ func (t *compactionTask) compact() (*datapb.CompactionResult, error) { var targetSegID UniqueID var err error switch { - case t.plan.GetType() == datapb.CompactionType_UndefinedCompaction: log.Warn("compact wrong, compaction type undefined") return nil, errCompactionTypeUndifined @@ -624,12 +628,11 @@ func (t *compactionTask) compact() (*datapb.CompactionResult, error) { <-ti.Injected() log.Info("compact inject elapse", zap.Duration("elapse", time.Since(injectStart))) - var dblobs = make(map[UniqueID][]*Blob) + dblobs := make(map[UniqueID][]*Blob) allPath := make([][]string, 0) downloadStart := time.Now() for _, s := range t.plan.GetSegmentBinlogs() { - // Get the number of field binlog files from non-empty segment var binlogNum int for _, b := range s.GetFieldBinlogs() { @@ -728,7 +731,7 @@ func interface2FieldData(schemaDataType schemapb.DataType, content []interface{} var rst storage.FieldData switch schemaDataType { case schemapb.DataType_Bool: - var data = &storage.BoolFieldData{ + data := &storage.BoolFieldData{ Data: make([]bool, 0, len(content)), } @@ -742,7 +745,7 @@ func interface2FieldData(schemaDataType schemapb.DataType, content []interface{} rst = data case schemapb.DataType_Int8: - var data = &storage.Int8FieldData{ + data := &storage.Int8FieldData{ Data: make([]int8, 0, len(content)), } @@ -756,7 +759,7 @@ func interface2FieldData(schemaDataType schemapb.DataType, content []interface{} rst = data case schemapb.DataType_Int16: - var data = &storage.Int16FieldData{ + data := &storage.Int16FieldData{ Data: make([]int16, 0, len(content)), } @@ -770,7 +773,7 @@ func interface2FieldData(schemaDataType schemapb.DataType, content []interface{} rst = data case schemapb.DataType_Int32: - var data = &storage.Int32FieldData{ + data := &storage.Int32FieldData{ Data: make([]int32, 0, len(content)), } @@ -784,7 +787,7 @@ func interface2FieldData(schemaDataType schemapb.DataType, content []interface{} rst = data case schemapb.DataType_Int64: - var data = &storage.Int64FieldData{ + data := &storage.Int64FieldData{ Data: make([]int64, 0, len(content)), } @@ -798,7 +801,7 @@ func interface2FieldData(schemaDataType schemapb.DataType, content []interface{} rst = data case schemapb.DataType_Float: - var data = &storage.FloatFieldData{ + data := &storage.FloatFieldData{ Data: make([]float32, 0, len(content)), } @@ -812,7 +815,7 @@ func interface2FieldData(schemaDataType schemapb.DataType, content []interface{} rst = data case schemapb.DataType_Double: - var data = &storage.DoubleFieldData{ + data := &storage.DoubleFieldData{ Data: make([]float64, 0, len(content)), } @@ -826,7 +829,7 @@ func interface2FieldData(schemaDataType schemapb.DataType, content []interface{} rst = data case schemapb.DataType_String, schemapb.DataType_VarChar: - var data = &storage.StringFieldData{ + data := &storage.StringFieldData{ Data: make([]string, 0, len(content)), } @@ -840,7 +843,7 @@ func interface2FieldData(schemaDataType schemapb.DataType, content []interface{} rst = data case schemapb.DataType_JSON: - var data = &storage.JSONFieldData{ + data := &storage.JSONFieldData{ Data: make([][]byte, 0, len(content)), } @@ -854,7 +857,7 @@ func interface2FieldData(schemaDataType schemapb.DataType, content []interface{} rst = data case schemapb.DataType_FloatVector: - var data = &storage.FloatVectorFieldData{ + data := &storage.FloatVectorFieldData{ Data: []float32{}, } @@ -870,7 +873,7 @@ func interface2FieldData(schemaDataType schemapb.DataType, content []interface{} rst = data case schemapb.DataType_Float16Vector: - var data = &storage.Float16VectorFieldData{ + data := &storage.Float16VectorFieldData{ Data: []byte{}, } @@ -886,7 +889,7 @@ func interface2FieldData(schemaDataType schemapb.DataType, content []interface{} rst = data case schemapb.DataType_BinaryVector: - var data = &storage.BinaryVectorFieldData{ + data := &storage.BinaryVectorFieldData{ Data: []byte{}, } diff --git a/internal/datanode/compactor_test.go b/internal/datanode/compactor_test.go index 81e8caae09..2822d8caeb 100644 --- a/internal/datanode/compactor_test.go +++ b/internal/datanode/compactor_test.go @@ -139,7 +139,6 @@ func TestCompactionTaskInnerMethods(t *testing.T) { } }) } - }) t.Run("Test mergeDeltalogs", func(t *testing.T) { @@ -219,14 +218,24 @@ func TestCompactionTaskInnerMethods(t *testing.T) { }{ { 0, nil, nil, - 100, []UniqueID{1, 2, 3}, []Timestamp{20000, 30000, 20005}, - 200, []UniqueID{4, 5, 6}, []Timestamp{50000, 50001, 50002}, + 100, + []UniqueID{1, 2, 3}, + []Timestamp{20000, 30000, 20005}, + 200, + []UniqueID{4, 5, 6}, + []Timestamp{50000, 50001, 50002}, 6, "2 segments", }, { - 300, []UniqueID{10, 20}, []Timestamp{20001, 40001}, - 100, []UniqueID{1, 2, 3}, []Timestamp{20000, 30000, 20005}, - 200, []UniqueID{4, 5, 6}, []Timestamp{50000, 50001, 50002}, + 300, + []UniqueID{10, 20}, + []Timestamp{20001, 40001}, + 100, + []UniqueID{1, 2, 3}, + []Timestamp{20000, 30000, 20005}, + 200, + []UniqueID{4, 5, 6}, + []Timestamp{50000, 50001, 50002}, 8, "3 segments", }, } @@ -259,7 +268,6 @@ func TestCompactionTaskInnerMethods(t *testing.T) { }) } }) - }) t.Run("Test merge", func(t *testing.T) { @@ -278,7 +286,6 @@ func TestCompactionTaskInnerMethods(t *testing.T) { alloc.EXPECT().GetGenerator(mock.Anything, mock.Anything).Call.Return(validGeneratorFn, nil) alloc.EXPECT().AllocOne().Return(0, nil) t.Run("Merge without expiration", func(t *testing.T) { - mockbIO := &binlogIO{cm, alloc} paramtable.Get().Save(Params.CommonCfg.EntityExpirationTTL.Key, "0") iData := genInsertDataWithExpiredTS() @@ -306,8 +313,10 @@ func TestCompactionTaskInnerMethods(t *testing.T) { Channel: channel, downloader: mockbIO, uploader: mockbIO, done: make(chan struct{}, 1), plan: &datapb.CompactionPlan{ SegmentBinlogs: []*datapb.CompactionSegmentBinlogs{ - {SegmentID: 1}}, - }} + {SegmentID: 1}, + }, + }, + } inPaths, statsPaths, numOfRow, err := ct.merge(context.Background(), allPaths, 2, 0, meta, dm) assert.NoError(t, err) assert.Equal(t, int64(2), numOfRow) @@ -348,8 +357,10 @@ func TestCompactionTaskInnerMethods(t *testing.T) { Channel: channel, downloader: mockbIO, uploader: mockbIO, done: make(chan struct{}, 1), plan: &datapb.CompactionPlan{ SegmentBinlogs: []*datapb.CompactionSegmentBinlogs{ - {SegmentID: 1}}, - }} + {SegmentID: 1}, + }, + }, + } inPaths, statsPaths, numOfRow, err := ct.merge(context.Background(), allPaths, 2, 0, meta, dm) assert.NoError(t, err) assert.Equal(t, int64(2), numOfRow) @@ -361,7 +372,6 @@ func TestCompactionTaskInnerMethods(t *testing.T) { }) // set Params.DataNodeCfg.BinLogMaxSize.Key = 1 to generate multi binlogs, each has only one row t.Run("Merge without expiration3", func(t *testing.T) { - mockbIO := &binlogIO{cm, alloc} paramtable.Get().Save(Params.CommonCfg.EntityExpirationTTL.Key, "0") BinLogMaxSize := Params.DataNodeCfg.BinLogMaxSize.GetAsInt() @@ -394,8 +404,10 @@ func TestCompactionTaskInnerMethods(t *testing.T) { Channel: channel, downloader: mockbIO, uploader: mockbIO, done: make(chan struct{}, 1), plan: &datapb.CompactionPlan{ SegmentBinlogs: []*datapb.CompactionSegmentBinlogs{ - {SegmentID: 1}}, - }} + {SegmentID: 1}, + }, + }, + } inPaths, statsPaths, numOfRow, err := ct.merge(context.Background(), allPaths, 2, 0, meta, dm) assert.NoError(t, err) assert.Equal(t, int64(2), numOfRow) @@ -442,7 +454,8 @@ func TestCompactionTaskInnerMethods(t *testing.T) { plan: &datapb.CompactionPlan{ CollectionTtl: 864000, SegmentBinlogs: []*datapb.CompactionSegmentBinlogs{ - {SegmentID: 1}}, + {SegmentID: 1}, + }, }, done: make(chan struct{}, 1), } @@ -482,8 +495,10 @@ func TestCompactionTaskInnerMethods(t *testing.T) { Channel: channel, downloader: mockbIO, uploader: mockbIO, done: make(chan struct{}, 1), plan: &datapb.CompactionPlan{ SegmentBinlogs: []*datapb.CompactionSegmentBinlogs{ - {SegmentID: 1}}, - }} + {SegmentID: 1}, + }, + }, + } _, _, _, err = ct.merge(context.Background(), allPaths, 2, 0, &etcdpb.CollectionMeta{ Schema: &schemapb.CollectionSchema{Fields: []*schemapb.FieldSchema{ {DataType: schemapb.DataType_FloatVector, TypeParams: []*commonpb.KeyValuePair{ @@ -526,7 +541,8 @@ func TestCompactionTaskInnerMethods(t *testing.T) { {DataType: schemapb.DataType_FloatVector, TypeParams: []*commonpb.KeyValuePair{ {Key: common.DimKey, Value: "bad_dim"}, }}, - }}}, dm) + }}, + }, dm) assert.Error(t, err) }) }) @@ -615,12 +631,13 @@ func TestCompactionTaskInnerMethods(t *testing.T) { SegmentBinlogs: []*datapb.CompactionSegmentBinlogs{ { SegmentID: 1, - }}, + }, + }, }, done: make(chan struct{}, 1), } - //segment not in channel + // segment not in channel _, err := ct.getNumRows() assert.Error(t, err) }) @@ -1019,7 +1036,7 @@ func (mfm *mockFlushManager) isFull() bool { func (mfm *mockFlushManager) injectFlush(injection *taskInjection, segments ...UniqueID) { go func() { time.Sleep(time.Second * time.Duration(mfm.sleepSeconds)) - //injection.injected <- struct{}{} + // injection.injected <- struct{}{} close(injection.injected) <-injection.injectOver mfm.injectOverCount.Lock() diff --git a/internal/datanode/data_node.go b/internal/datanode/data_node.go index c5a874f9a7..de98ccd2ad 100644 --- a/internal/datanode/data_node.go +++ b/internal/datanode/data_node.go @@ -98,7 +98,7 @@ type DataNode struct { rootCoord types.RootCoord dataCoord types.DataCoord - //call once + // call once initOnce sync.Once startOnce sync.Once stopOnce sync.Once @@ -257,7 +257,6 @@ func (node *DataNode) Init() error { node.factory.Init(Params) log.Info("DataNode server init succeeded", zap.String("MsgChannelSubName", Params.CommonCfg.DataNodeSubName.GetValue())) - }) return initError } @@ -340,7 +339,6 @@ func (node *DataNode) Start() error { } chunkManager, err := node.factory.NewPersistentStorageChunkManager(node.ctx) - if err != nil { startErr = err return @@ -365,7 +363,6 @@ func (node *DataNode) Start() error { go node.flowgraphManager.start() node.UpdateStateCode(commonpb.StateCode_Healthy) - }) return startErr } diff --git a/internal/datanode/data_sync_service.go b/internal/datanode/data_sync_service.go index 584128f520..ef76455a0f 100644 --- a/internal/datanode/data_sync_service.go +++ b/internal/datanode/data_sync_service.go @@ -87,7 +87,6 @@ func newDataSyncService( serverID int64, timetickSender *timeTickSender, ) (*dataSyncService, error) { - if channel == nil { return nil, errors.New("Nil input") } @@ -223,7 +222,7 @@ func (dsService *dataSyncService) initNodes(initCtx context.Context, vchanInfo * return err } - //tickler will update addSegment progress to watchInfo + // tickler will update addSegment progress to watchInfo tickler.watch() defer tickler.stop() futures := make([]*conc.Future[any], 0, len(unflushedSegmentInfos)+len(flushedSegmentInfos)) @@ -258,7 +257,8 @@ func (dsService *dataSyncService) initNodes(initCtx context.Context, vchanInfo * statsBinLogs: segment.Statslogs, binLogs: segment.GetBinlogs(), endPos: segment.GetDmlPosition(), - recoverTs: vchanInfo.GetSeekPosition().GetTimestamp()}); err != nil { + recoverTs: vchanInfo.GetSeekPosition().GetTimestamp(), + }); err != nil { return nil, err } tickler.inc() @@ -400,7 +400,7 @@ func (dsService *dataSyncService) initNodes(initCtx context.Context, vchanInfo * return err } - //deleteNode + // deleteNode err = dsService.fg.SetEdges(deleteNode.Name(), []string{ttNode.Name()}, ) diff --git a/internal/datanode/data_sync_service_test.go b/internal/datanode/data_sync_service_test.go index e339bcea54..93f8621a7b 100644 --- a/internal/datanode/data_sync_service_test.go +++ b/internal/datanode/data_sync_service_test.go @@ -27,7 +27,6 @@ import ( "testing" "time" - "github.com/milvus-io/milvus/pkg/util/tsoutil" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" @@ -45,6 +44,7 @@ import ( "github.com/milvus-io/milvus/pkg/mq/msgdispatcher" "github.com/milvus-io/milvus/pkg/mq/msgstream" "github.com/milvus-io/milvus/pkg/util/paramtable" + "github.com/milvus-io/milvus/pkg/util/tsoutil" "github.com/milvus-io/milvus/pkg/util/typeutil" ) @@ -118,35 +118,44 @@ type testInfo struct { } func TestDataSyncService_newDataSyncService(t *testing.T) { - ctx := context.Background() tests := []*testInfo{ - {true, false, &mockMsgStreamFactory{false, true}, + { + true, false, &mockMsgStreamFactory{false, true}, 0, "by-dev-rootcoord-dml-test_v0", 0, 0, "", 0, 0, 0, "", 0, - "SetParamsReturnError"}, - {true, false, &mockMsgStreamFactory{true, true}, + "SetParamsReturnError", + }, + { + true, false, &mockMsgStreamFactory{true, true}, 0, "by-dev-rootcoord-dml-test_v0", 1, 0, "", 0, 1, 1, "", 0, - "CollID 0 mismach with seginfo collID 1"}, - {true, false, &mockMsgStreamFactory{true, true}, + "CollID 0 mismach with seginfo collID 1", + }, + { + true, false, &mockMsgStreamFactory{true, true}, 1, "by-dev-rootcoord-dml-test_v1", 1, 0, "by-dev-rootcoord-dml-test_v2", 0, 1, 1, "by-dev-rootcoord-dml-test_v3", 0, - "chanName c1 mismach with seginfo chanName c2"}, - {true, false, &mockMsgStreamFactory{true, true}, + "chanName c1 mismach with seginfo chanName c2", + }, + { + true, false, &mockMsgStreamFactory{true, true}, 1, "by-dev-rootcoord-dml-test_v1", 1, 0, "by-dev-rootcoord-dml-test_v1", 0, 1, 1, "by-dev-rootcoord-dml-test_v2", 0, - "add normal segments"}, - {true, false, &mockMsgStreamFactory{true, true}, + "add normal segments", + }, + { + true, false, &mockMsgStreamFactory{true, true}, 1, "by-dev-rootcoord-dml-test_v1", 1, 1, "by-dev-rootcoord-dml-test_v1", 0, 1, 2, "by-dev-rootcoord-dml-test_v1", 0, - "add un-flushed and flushed segments"}, + "add un-flushed and flushed segments", + }, } cm := storage.NewLocalChunkManager(storage.RootPath(dataSyncServiceTestDir)) defer cm.RemoveWithPrefix(ctx, cm.RootPath()) @@ -194,7 +203,6 @@ func TestDataSyncService_newDataSyncService(t *testing.T) { } }) } - } // NOTE: start pulsar before test @@ -558,7 +566,7 @@ func genBytes() (rawData []byte) { const N = 1 // Float vector - var fvector = [DIM]float32{1, 2} + fvector := [DIM]float32{1, 2} for _, ele := range fvector { buf := make([]byte, 4) common.Endian.PutUint32(buf, math.Float32bits(ele)) @@ -568,11 +576,11 @@ func genBytes() (rawData []byte) { // Binary vector // Dimension of binary vector is 32 // size := 4, = 32 / 8 - var bvector = []byte{255, 255, 255, 0} + bvector := []byte{255, 255, 255, 0} rawData = append(rawData, bvector...) // Bool - var fieldBool = true + fieldBool := true buf := new(bytes.Buffer) if err := binary.Write(buf, common.Endian, fieldBool); err != nil { panic(err) @@ -597,12 +605,12 @@ func TestBytesReader(t *testing.T) { // Bytes Reader is able to recording the position rawDataReader := bytes.NewReader(rawData) - var fvector = make([]float32, 2) + fvector := make([]float32, 2) err := binary.Read(rawDataReader, common.Endian, &fvector) assert.NoError(t, err) assert.ElementsMatch(t, fvector, []float32{1, 2}) - var bvector = make([]byte, 4) + bvector := make([]byte, 4) err = binary.Read(rawDataReader, common.Endian, &bvector) assert.NoError(t, err) assert.ElementsMatch(t, bvector, []byte{255, 255, 255, 0}) @@ -623,7 +631,7 @@ func TestGetSegmentInfos(t *testing.T) { dsService := &dataSyncService{ dataCoord: dataCoord, } - var ctx = context.Background() + ctx := context.Background() segmentInfos, err := dsService.getSegmentInfos(ctx, []int64{1}) assert.NoError(t, err) assert.Equal(t, 1, len(segmentInfos)) @@ -680,7 +688,8 @@ func TestClearGlobalFlushingCache(t *testing.T) { collID: 1, partitionID: 1, startPos: &msgpb.MsgPosition{}, - endPos: &msgpb.MsgPosition{}}) + endPos: &msgpb.MsgPosition{}, + }) assert.NoError(t, err) err = channel.addSegment( diff --git a/internal/datanode/event_manager.go b/internal/datanode/event_manager.go index 3c44648ab6..e1c0e6e09c 100644 --- a/internal/datanode/event_manager.go +++ b/internal/datanode/event_manager.go @@ -239,7 +239,8 @@ const ( ) func newChannelEventManager(handlePut func(*datapb.ChannelWatchInfo, int64) error, - handleDel func(string), retryInterval time.Duration) *channelEventManager { + handleDel func(string), retryInterval time.Duration, +) *channelEventManager { return &channelEventManager{ eventChan: make(chan event, 10), closeChan: make(chan struct{}), diff --git a/internal/datanode/event_manager_test.go b/internal/datanode/event_manager_test.go index 9a1bbc5d17..cd1732c8e0 100644 --- a/internal/datanode/event_manager_test.go +++ b/internal/datanode/event_manager_test.go @@ -161,12 +161,11 @@ func TestWatchChannel(t *testing.T) { err = kv.RemoveWithPrefix(fmt.Sprintf("%s/%d", Params.CommonCfg.DataCoordWatchSubPath.GetValue(), paramtable.GetNodeID())) assert.NoError(t, err) - //TODO there is not way to sync Release done, use sleep for now + // TODO there is not way to sync Release done, use sleep for now time.Sleep(100 * time.Millisecond) exist = node.flowgraphManager.exist(ch) assert.False(t, exist) - }) t.Run("handle watch info failed", func(t *testing.T) { @@ -414,7 +413,6 @@ func parseWatchInfo(key string, data []byte) (*datapb.ChannelWatchInfo, error) { watchInfo := datapb.ChannelWatchInfo{} if err := proto.Unmarshal(data, &watchInfo); err != nil { return nil, fmt.Errorf("invalid event data: fail to parse ChannelWatchInfo, key: %s, err: %v", key, err) - } if watchInfo.Vchan == nil { @@ -457,7 +455,6 @@ func TestEventTickler(t *testing.T) { } } } - }() tickler.inc() diff --git a/internal/datanode/flow_graph_dd_node.go b/internal/datanode/flow_graph_dd_node.go index ddb21c1aa2..5487eebe36 100644 --- a/internal/datanode/flow_graph_dd_node.go +++ b/internal/datanode/flow_graph_dd_node.go @@ -99,7 +99,7 @@ func (ddn *ddNode) Operate(in []Msg) []Msg { } if msMsg.IsCloseMsg() { - var fgMsg = flowGraphMsg{ + fgMsg := flowGraphMsg{ BaseMsg: flowgraph.NewBaseMsg(true), insertMessages: make([]*msgstream.InsertMsg, 0), timeRange: TimeRange{ @@ -133,7 +133,7 @@ func (ddn *ddNode) Operate(in []Msg) []Msg { } }() - var fgMsg = flowGraphMsg{ + fgMsg := flowGraphMsg{ insertMessages: make([]*msgstream.InsertMsg, 0), timeRange: TimeRange{ timestampMin: msMsg.TimestampMin(), @@ -279,8 +279,8 @@ func (ddn *ddNode) isDropped(segID UniqueID) bool { func (ddn *ddNode) Close() {} func newDDNode(ctx context.Context, collID UniqueID, vChannelName string, droppedSegmentIDs []UniqueID, - sealedSegments []*datapb.SegmentInfo, growingSegments []*datapb.SegmentInfo, compactor *compactionExecutor) (*ddNode, error) { - + sealedSegments []*datapb.SegmentInfo, growingSegments []*datapb.SegmentInfo, compactor *compactionExecutor, +) (*ddNode, error) { baseNode := BaseNode{} baseNode.SetMaxQueueLength(Params.DataNodeCfg.FlowGraphMaxQueueLength.GetAsInt32()) baseNode.SetMaxParallelism(Params.DataNodeCfg.FlowGraphMaxParallelism.GetAsInt32()) diff --git a/internal/datanode/flow_graph_dd_node_test.go b/internal/datanode/flow_graph_dd_node_test.go index 9a8a3dd4e8..f191c34e8e 100644 --- a/internal/datanode/flow_graph_dd_node_test.go +++ b/internal/datanode/flow_graph_dd_node_test.go @@ -48,9 +48,11 @@ func TestFlowGraph_DDNode_newDDNode(t *testing.T) { []*datapb.SegmentInfo{ getSegmentInfo(100, 10000), getSegmentInfo(101, 10000), - getSegmentInfo(102, 10000)}, + getSegmentInfo(102, 10000), + }, []*datapb.SegmentInfo{ - getSegmentInfo(200, 10000)}, + getSegmentInfo(200, 10000), + }, }, { "0 sealed segments and 0 growing segment", @@ -94,12 +96,18 @@ func TestFlowGraph_DDNode_Operate(t *testing.T) { in []Msg description string }{ - {[]Msg{}, - "Invalid input length == 0"}, - {[]Msg{&flowGraphMsg{}, &flowGraphMsg{}, &flowGraphMsg{}}, - "Invalid input length == 3"}, - {[]Msg{&flowGraphMsg{}}, - "Invalid input length == 1 but input message is not msgStreamMsg"}, + { + []Msg{}, + "Invalid input length == 0", + }, + { + []Msg{&flowGraphMsg{}, &flowGraphMsg{}, &flowGraphMsg{}}, + "Invalid input length == 3", + }, + { + []Msg{&flowGraphMsg{}}, + "Invalid input length == 1 but input message is not msgStreamMsg", + }, } for _, test := range invalidInTests { @@ -117,10 +125,14 @@ func TestFlowGraph_DDNode_Operate(t *testing.T) { description string }{ - {1, 1, 1, - "DropCollectionMsg collID == ddNode collID"}, - {1, 2, 0, - "DropCollectionMsg collID != ddNode collID"}, + { + 1, 1, 1, + "DropCollectionMsg collID == ddNode collID", + }, + { + 1, 2, 0, + "DropCollectionMsg collID != ddNode collID", + }, } for _, test := range tests { @@ -164,10 +176,16 @@ func TestFlowGraph_DDNode_Operate(t *testing.T) { description string }{ - {1, 1, 101, []UniqueID{101}, - "DropCollectionMsg collID == ddNode collID"}, - {1, 2, 101, []UniqueID{}, - "DropCollectionMsg collID != ddNode collID"}, + { + 1, 1, 101, + []UniqueID{101}, + "DropCollectionMsg collID == ddNode collID", + }, + { + 1, 2, 101, + []UniqueID{}, + "DropCollectionMsg collID != ddNode collID", + }, } for _, test := range tests { @@ -195,15 +213,12 @@ func TestFlowGraph_DDNode_Operate(t *testing.T) { fgMsg, ok := rt[0].(*flowGraphMsg) assert.True(t, ok) assert.ElementsMatch(t, test.expectOutput, fgMsg.dropPartitions) - }) } }) t.Run("Test DDNode Operate and filter insert msg", func(t *testing.T) { - var ( - collectionID UniqueID = 1 - ) + var collectionID UniqueID = 1 // Prepare ddNode states ddn := ddNode{ ctx: context.Background(), @@ -260,7 +275,6 @@ func TestFlowGraph_DDNode_Operate(t *testing.T) { }) } }) - } func TestFlowGraph_DDNode_filterMessages(t *testing.T) { @@ -274,19 +288,24 @@ func TestFlowGraph_DDNode_filterMessages(t *testing.T) { inMsg *msgstream.InsertMsg expected bool }{ - {"test dropped segments true", + { + "test dropped segments true", []UniqueID{100}, nil, nil, getInsertMsg(100, 10000), - true}, - {"test dropped segments true 2", + true, + }, + { + "test dropped segments true 2", []UniqueID{100, 101, 102}, nil, nil, getInsertMsg(102, 10000), - true}, - {"test sealed segments msgTs <= segmentTs true", + true, + }, + { + "test sealed segments msgTs <= segmentTs true", []UniqueID{}, map[UniqueID]*datapb.SegmentInfo{ 200: getSegmentInfo(200, 50000), @@ -294,8 +313,10 @@ func TestFlowGraph_DDNode_filterMessages(t *testing.T) { }, nil, getInsertMsg(200, 10000), - true}, - {"test sealed segments msgTs <= segmentTs true", + true, + }, + { + "test sealed segments msgTs <= segmentTs true", []UniqueID{}, map[UniqueID]*datapb.SegmentInfo{ 200: getSegmentInfo(200, 50000), @@ -303,8 +324,10 @@ func TestFlowGraph_DDNode_filterMessages(t *testing.T) { }, nil, getInsertMsg(200, 50000), - true}, - {"test sealed segments msgTs > segmentTs false", + true, + }, + { + "test sealed segments msgTs > segmentTs false", []UniqueID{}, map[UniqueID]*datapb.SegmentInfo{ 200: getSegmentInfo(200, 50000), @@ -312,8 +335,10 @@ func TestFlowGraph_DDNode_filterMessages(t *testing.T) { }, nil, getInsertMsg(222, 70000), - false}, - {"test growing segments msgTs <= segmentTs true", + false, + }, + { + "test growing segments msgTs <= segmentTs true", []UniqueID{}, nil, map[UniqueID]*datapb.SegmentInfo{ @@ -321,8 +346,10 @@ func TestFlowGraph_DDNode_filterMessages(t *testing.T) { 300: getSegmentInfo(300, 50000), }, getInsertMsg(200, 10000), - true}, - {"test growing segments msgTs > segmentTs false", + true, + }, + { + "test growing segments msgTs > segmentTs false", []UniqueID{}, nil, map[UniqueID]*datapb.SegmentInfo{ @@ -330,8 +357,10 @@ func TestFlowGraph_DDNode_filterMessages(t *testing.T) { 300: getSegmentInfo(300, 50000), }, getInsertMsg(200, 70000), - false}, - {"test not exist", + false, + }, + { + "test not exist", []UniqueID{}, map[UniqueID]*datapb.SegmentInfo{ 400: getSegmentInfo(500, 50000), @@ -342,14 +371,17 @@ func TestFlowGraph_DDNode_filterMessages(t *testing.T) { 300: getSegmentInfo(300, 50000), }, getInsertMsg(111, 70000), - false}, + false, + }, // for pChannel reuse on same collection - {"test insert msg with different channelName", + { + "test insert msg with different channelName", []UniqueID{100}, nil, nil, getInsertMsgWithChannel(100, 10000, anotherChannelName), - true}, + true, + }, } for _, test := range tests { @@ -364,7 +396,6 @@ func TestFlowGraph_DDNode_filterMessages(t *testing.T) { // Test got := ddn.tryToFilterSegmentInsertMessages(test.inMsg) assert.Equal(t, test.expected, got) - }) } @@ -380,33 +411,39 @@ func TestFlowGraph_DDNode_filterMessages(t *testing.T) { inMsg *msgstream.InsertMsg msgFiltered bool }{ - {"msgTssegTs", + { + "msgTs>segTs", false, 50000, 10000, map[UniqueID]*datapb.SegmentInfo{ 100: getSegmentInfo(100, 70000), - 101: getSegmentInfo(101, 50000)}, + 101: getSegmentInfo(101, 50000), + }, getInsertMsg(300, 60000), false, }, @@ -440,27 +477,33 @@ func TestFlowGraph_DDNode_filterMessages(t *testing.T) { inMsg *msgstream.InsertMsg msgFiltered bool }{ - {"msgTssegTs", + { + "msgTs>segTs", false, map[UniqueID]*datapb.SegmentInfo{ 100: getSegmentInfo(100, 50000), - 101: getSegmentInfo(101, 50000)}, + 101: getSegmentInfo(101, 50000), + }, getInsertMsg(100, 60000), false, }, @@ -497,16 +540,31 @@ func TestFlowGraph_DDNode_isDropped(t *testing.T) { description string }{ - {[]*datapb.SegmentInfo{getSegmentInfo(1, 0), getSegmentInfo(2, 0), getSegmentInfo(3, 0)}, 1, true, - "Input seg 1 in droppedSegs{1,2,3}"}, - {[]*datapb.SegmentInfo{getSegmentInfo(1, 0), getSegmentInfo(2, 0), getSegmentInfo(3, 0)}, 2, true, - "Input seg 2 in droppedSegs{1,2,3}"}, - {[]*datapb.SegmentInfo{getSegmentInfo(1, 0), getSegmentInfo(2, 0), getSegmentInfo(3, 0)}, 3, true, - "Input seg 3 in droppedSegs{1,2,3}"}, - {[]*datapb.SegmentInfo{getSegmentInfo(1, 0), getSegmentInfo(2, 0), getSegmentInfo(3, 0)}, 4, false, - "Input seg 4 not in droppedSegs{1,2,3}"}, - {[]*datapb.SegmentInfo{}, 5, false, - "Input seg 5, no droppedSegs {}"}, + { + []*datapb.SegmentInfo{getSegmentInfo(1, 0), getSegmentInfo(2, 0), getSegmentInfo(3, 0)}, + 1, true, + "Input seg 1 in droppedSegs{1,2,3}", + }, + { + []*datapb.SegmentInfo{getSegmentInfo(1, 0), getSegmentInfo(2, 0), getSegmentInfo(3, 0)}, + 2, true, + "Input seg 2 in droppedSegs{1,2,3}", + }, + { + []*datapb.SegmentInfo{getSegmentInfo(1, 0), getSegmentInfo(2, 0), getSegmentInfo(3, 0)}, + 3, true, + "Input seg 3 in droppedSegs{1,2,3}", + }, + { + []*datapb.SegmentInfo{getSegmentInfo(1, 0), getSegmentInfo(2, 0), getSegmentInfo(3, 0)}, + 4, false, + "Input seg 4 not in droppedSegs{1,2,3}", + }, + { + []*datapb.SegmentInfo{}, + 5, false, + "Input seg 5, no droppedSegs {}", + }, } for _, test := range tests { diff --git a/internal/datanode/flow_graph_delete_node.go b/internal/datanode/flow_graph_delete_node.go index 19b18a3bc7..8bed430d23 100644 --- a/internal/datanode/flow_graph_delete_node.go +++ b/internal/datanode/flow_graph_delete_node.go @@ -178,7 +178,8 @@ func (dn *deleteNode) bufferDeleteMsg(msg *msgstream.DeleteMsg, tr TimeRange, st // If the key may exist in the segment, returns it in map. // If the key not exist in the segment, the segment is filter out. func (dn *deleteNode) filterSegmentByPK(partID UniqueID, pks []primaryKey, tss []Timestamp) ( - map[UniqueID][]primaryKey, map[UniqueID][]uint64) { + map[UniqueID][]primaryKey, map[UniqueID][]uint64, +) { segID2Pks := make(map[UniqueID][]primaryKey) segID2Tss := make(map[UniqueID][]uint64) segments := dn.channel.filterSegments(partID) diff --git a/internal/datanode/flow_graph_delete_node_test.go b/internal/datanode/flow_graph_delete_node_test.go index 0d237f1efc..87faf6806a 100644 --- a/internal/datanode/flow_graph_delete_node_test.go +++ b/internal/datanode/flow_graph_delete_node_test.go @@ -140,12 +140,18 @@ func TestFlowGraphDeleteNode_Operate(t *testing.T) { in []Msg desc string }{ - {[]Msg{}, - "Invalid input length == 0"}, - {[]Msg{&flowGraphMsg{}, &flowGraphMsg{}, &flowGraphMsg{}}, - "Invalid input length == 3"}, - {[]Msg{&flowgraph.MsgStreamMsg{}}, - "Invalid input length == 1 but input message is not flowGraphMsg"}, + { + []Msg{}, + "Invalid input length == 0", + }, + { + []Msg{&flowGraphMsg{}, &flowGraphMsg{}, &flowGraphMsg{}}, + "Invalid input length == 3", + }, + { + []Msg{&flowgraph.MsgStreamMsg{}}, + "Invalid input length == 1 but input message is not flowGraphMsg", + }, } for _, test := range invalidInTests { @@ -399,7 +405,7 @@ func TestFlowGraphDeleteNode_Operate(t *testing.T) { }) t.Run("Test deleteNode auto flush function", func(t *testing.T) { - //for issue + // for issue ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second) defer cancel() @@ -422,8 +428,8 @@ func TestFlowGraphDeleteNode_Operate(t *testing.T) { delNode, err := newDeleteNode(ctx, mockFlushManager, delBufManager, make(chan string, 1), c) assert.NoError(t, err) - //2. here we set flushing segments inside fgmsg to empty - //in order to verify the validity of auto flush function + // 2. here we set flushing segments inside fgmsg to empty + // in order to verify the validity of auto flush function msg := genFlowGraphDeleteMsg(int64Pks, chanName) // delete has to match segment partition ID @@ -433,9 +439,9 @@ func TestFlowGraphDeleteNode_Operate(t *testing.T) { msg.segmentsToSync = []UniqueID{} var fgMsg flowgraph.Msg = &msg - //1. here we set buffer bytes to a relatively high level - //and the sum of memory consumption in this case is 208 - //so no segments will be flushed + // 1. here we set buffer bytes to a relatively high level + // and the sum of memory consumption in this case is 208 + // so no segments will be flushed paramtable.Get().Save(Params.DataNodeCfg.FlushDeleteBufferBytes.Key, "300") fgMsg.(*flowGraphMsg).segmentsToSync = delNode.delBufferManager.ShouldFlushSegments() delNode.Operate([]flowgraph.Msg{fgMsg}) @@ -443,10 +449,10 @@ func TestFlowGraphDeleteNode_Operate(t *testing.T) { assert.Equal(t, int64(208), delNode.delBufferManager.usedMemory.Load()) assert.Equal(t, 5, delNode.delBufferManager.delBufHeap.Len()) - //3. note that the whole memory size used by 5 segments will be 208 - //so when setting delete buffer size equal to 200 - //there will only be one segment to be flushed then the - //memory consumption will be reduced to 160(under 200) + // 3. note that the whole memory size used by 5 segments will be 208 + // so when setting delete buffer size equal to 200 + // there will only be one segment to be flushed then the + // memory consumption will be reduced to 160(under 200) msg.deleteMessages = []*msgstream.DeleteMsg{} msg.segmentsToSync = []UniqueID{} paramtable.Get().Save(Params.DataNodeCfg.FlushDeleteBufferBytes.Key, "200") @@ -456,17 +462,17 @@ func TestFlowGraphDeleteNode_Operate(t *testing.T) { assert.Equal(t, int64(160), delNode.delBufferManager.usedMemory.Load()) assert.Equal(t, 4, delNode.delBufferManager.delBufHeap.Len()) - //4. there is no new delete msg and delBufferSize is still 200 - //we expect there will not be any auto flush del + // 4. there is no new delete msg and delBufferSize is still 200 + // we expect there will not be any auto flush del fgMsg.(*flowGraphMsg).segmentsToSync = delNode.delBufferManager.ShouldFlushSegments() delNode.Operate([]flowgraph.Msg{fgMsg}) assert.Equal(t, 1, len(mockFlushManager.flushedSegIDs)) assert.Equal(t, int64(160), delNode.delBufferManager.usedMemory.Load()) assert.Equal(t, 4, delNode.delBufferManager.delBufHeap.Len()) - //5. we reset buffer bytes to 150, then we expect there would be one more - //segment which is 48 in size to be flushed, so the remained del memory size - //will be 112 + // 5. we reset buffer bytes to 150, then we expect there would be one more + // segment which is 48 in size to be flushed, so the remained del memory size + // will be 112 paramtable.Get().Save(Params.DataNodeCfg.FlushDeleteBufferBytes.Key, "150") fgMsg.(*flowGraphMsg).segmentsToSync = delNode.delBufferManager.ShouldFlushSegments() delNode.Operate([]flowgraph.Msg{fgMsg}) @@ -474,8 +480,8 @@ func TestFlowGraphDeleteNode_Operate(t *testing.T) { assert.Equal(t, int64(112), delNode.delBufferManager.usedMemory.Load()) assert.Equal(t, 3, delNode.delBufferManager.delBufHeap.Len()) - //6. we reset buffer bytes to 60, then most of the segments will be flushed - //except for the smallest entry with size equaling to 32 + // 6. we reset buffer bytes to 60, then most of the segments will be flushed + // except for the smallest entry with size equaling to 32 paramtable.Get().Save(Params.DataNodeCfg.FlushDeleteBufferBytes.Key, "60") fgMsg.(*flowGraphMsg).segmentsToSync = delNode.delBufferManager.ShouldFlushSegments() delNode.Operate([]flowgraph.Msg{fgMsg}) @@ -483,9 +489,9 @@ func TestFlowGraphDeleteNode_Operate(t *testing.T) { assert.Equal(t, int64(32), delNode.delBufferManager.usedMemory.Load()) assert.Equal(t, 1, delNode.delBufferManager.delBufHeap.Len()) - //7. we reset buffer bytes to 20, then as all segment-memory consumption - //is more than 20, so all five segments will be flushed and the remained - //del memory will be lowered to zero + // 7. we reset buffer bytes to 20, then as all segment-memory consumption + // is more than 20, so all five segments will be flushed and the remained + // del memory will be lowered to zero paramtable.Get().Save(Params.DataNodeCfg.FlushDeleteBufferBytes.Key, "20") fgMsg.(*flowGraphMsg).segmentsToSync = delNode.delBufferManager.ShouldFlushSegments() delNode.Operate([]flowgraph.Msg{fgMsg}) diff --git a/internal/datanode/flow_graph_dmstream_input_node_test.go b/internal/datanode/flow_graph_dmstream_input_node_test.go index a604e7c2fa..045b411575 100644 --- a/internal/datanode/flow_graph_dmstream_input_node_test.go +++ b/internal/datanode/flow_graph_dmstream_input_node_test.go @@ -60,8 +60,7 @@ func (mm *mockMsgStreamFactory) NewMsgStreamDisposer(ctx context.Context) func([ return nil } -type mockTtMsgStream struct { -} +type mockTtMsgStream struct{} func (mtm *mockTtMsgStream) Close() {} diff --git a/internal/datanode/flow_graph_insert_buffer_node.go b/internal/datanode/flow_graph_insert_buffer_node.go index 8a65747677..46bcffa337 100644 --- a/internal/datanode/flow_graph_insert_buffer_node.go +++ b/internal/datanode/flow_graph_insert_buffer_node.go @@ -310,7 +310,7 @@ type syncTask struct { } func (ibNode *insertBufferNode) FillInSyncTasks(fgMsg *flowGraphMsg, seg2Upload []UniqueID) map[UniqueID]*syncTask { - var syncTasks = make(map[UniqueID]*syncTask) + syncTasks := make(map[UniqueID]*syncTask) if fgMsg.dropCollection { // All segments in the collection will be synced, not matter empty buffer or not @@ -378,10 +378,10 @@ func (ibNode *insertBufferNode) FillInSyncTasks(fgMsg *flowGraphMsg, seg2Upload } // sync delete - //here we adopt a quite radical strategy: - //every time we make sure that the N biggest delDataBuf can be flushed - //when memsize usage reaches a certain level - //the aim for taking all these actions is to guarantee that the memory consumed by delBuf will not exceed a limit + // here we adopt a quite radical strategy: + // every time we make sure that the N biggest delDataBuf can be flushed + // when memsize usage reaches a certain level + // the aim for taking all these actions is to guarantee that the memory consumed by delBuf will not exceed a limit segmentsToFlush := ibNode.delBufferManager.ShouldFlushSegments() for _, segID := range segmentsToFlush { syncTasks[segID] = &syncTask{ @@ -544,7 +544,6 @@ func (ibNode *insertBufferNode) Sync(fgMsg *flowGraphMsg, seg2Upload []UniqueID, func (ibNode *insertBufferNode) addSegmentAndUpdateRowNum(insertMsgs []*msgstream.InsertMsg, startPos, endPos *msgpb.MsgPosition) (seg2Upload []UniqueID, err error) { uniqueSeg := make(map[UniqueID]int64) for _, msg := range insertMsgs { - currentSegID := msg.GetSegmentID() collID := msg.GetCollectionID() partitionID := msg.GetPartitionID() @@ -669,7 +668,6 @@ func (ibNode *insertBufferNode) getTimestampRange(tsData *storage.Int64FieldData // WriteTimeTick writes timetick once insertBufferNode operates. func (ibNode *insertBufferNode) WriteTimeTick(ts Timestamp, segmentIDs []int64) { - select { case resendTTMsg := <-ibNode.resendTTChan: log.Info("resend TT msg received in insertBufferNode", @@ -702,8 +700,8 @@ func (ibNode *insertBufferNode) getCollectionandPartitionIDbySegID(segmentID Uni } func newInsertBufferNode(ctx context.Context, collID UniqueID, delBufManager *DeltaBufferManager, flushCh <-chan flushMsg, resendTTCh <-chan resendTTMsg, - fm flushManager, flushingSegCache *Cache, config *nodeConfig, timeTickManager *timeTickSender) (*insertBufferNode, error) { - + fm flushManager, flushingSegCache *Cache, config *nodeConfig, timeTickManager *timeTickSender, +) (*insertBufferNode, error) { baseNode := BaseNode{} baseNode.SetMaxQueueLength(config.maxQueueLength) baseNode.SetMaxParallelism(config.maxParallelism) @@ -726,7 +724,7 @@ func newInsertBufferNode(ctx context.Context, collID UniqueID, delBufManager *De }, nil } - //input stream, data node time tick + // input stream, data node time tick wTt, err := config.msFactory.NewMsgStream(ctx) if err != nil { return nil, err diff --git a/internal/datanode/flow_graph_insert_buffer_node_test.go b/internal/datanode/flow_graph_insert_buffer_node_test.go index 88945c4b51..d1a005d639 100644 --- a/internal/datanode/flow_graph_insert_buffer_node_test.go +++ b/internal/datanode/flow_graph_insert_buffer_node_test.go @@ -142,12 +142,18 @@ func TestFlowGraphInsertBufferNode_Operate(t *testing.T) { in []Msg description string }{ - {[]Msg{}, - "Invalid input length == 0"}, - {[]Msg{&flowGraphMsg{}, &flowGraphMsg{}, &flowGraphMsg{}}, - "Invalid input length == 3"}, - {[]Msg{&mockMsg{}}, - "Invalid input length == 1 but input message is not flowGraphMsg"}, + { + []Msg{}, + "Invalid input length == 0", + }, + { + []Msg{&flowGraphMsg{}, &flowGraphMsg{}, &flowGraphMsg{}}, + "Invalid input length == 3", + }, + { + []Msg{&mockMsg{}}, + "Invalid input length == 1 but input message is not flowGraphMsg", + }, } for _, test := range invalidInTests { @@ -490,7 +496,6 @@ func TestFlowGraphInsertBufferNode_AutoFlush(t *testing.T) { // // assert.Equal(t, int64(1), iBNode.insertBuffer.size(UniqueID(i+1))) // } } - }) t.Run("Auto with manual flush", func(t *testing.T) { @@ -568,7 +573,6 @@ func TestFlowGraphInsertBufferNode_AutoFlush(t *testing.T) { assert.Equal(t, false, pack.flushed) } } - }) } @@ -929,7 +933,6 @@ func (s *InsertBufferNodeSuite) TestFillInSyncTasks() { s.Assert().True(task.auto) } }) - } func TestInsertBufferNodeSuite(t *testing.T) { @@ -1032,7 +1035,7 @@ func TestInsertBufferNode_bufferInsertMsg(t *testing.T) { for _, msg := range inMsg.insertMessages { msg.EndTimestamp = 101 // ts valid - msg.RowIDs = []int64{} //misaligned data + msg.RowIDs = []int64{} // misaligned data err = iBNode.bufferInsertMsg(msg, &msgpb.MsgPosition{}, &msgpb.MsgPosition{}) assert.Error(t, err) } @@ -1078,7 +1081,6 @@ func TestInsertBufferNode_updateSegmentStates(te *testing.T) { } func TestInsertBufferNode_getTimestampRange(t *testing.T) { - type testCase struct { tag string diff --git a/internal/datanode/flow_graph_manager.go b/internal/datanode/flow_graph_manager.go index 5eaaeea842..9213a60458 100644 --- a/internal/datanode/flow_graph_manager.go +++ b/internal/datanode/flow_graph_manager.go @@ -124,7 +124,8 @@ func (fm *flowgraphManager) addAndStart(dn *DataNode, vchan *datapb.VchannelInfo dataSyncService, err := getDataSyncService(context.TODO(), dn, &datapb.ChannelWatchInfo{ Schema: schema, - Vchan: vchan}, tickler) + Vchan: vchan, + }, tickler) if err != nil { log.Warn("fail to create new DataSyncService", zap.Error(err)) return err diff --git a/internal/datanode/flow_graph_manager_test.go b/internal/datanode/flow_graph_manager_test.go index cff42e84f7..22ffcbf925 100644 --- a/internal/datanode/flow_graph_manager_test.go +++ b/internal/datanode/flow_graph_manager_test.go @@ -204,10 +204,16 @@ func TestFlowGraphManager(t *testing.T) { memorySizes []int64 expectNeedToSync []bool }{ - {"test over the watermark", 100, 0.5, - []int64{15, 16, 17, 18}, []bool{false, false, false, true}}, - {"test below the watermark", 100, 0.5, - []int64{1, 2, 3, 4}, []bool{false, false, false, false}}, + { + "test over the watermark", 100, 0.5, + []int64{15, 16, 17, 18}, + []bool{false, false, false, true}, + }, + { + "test below the watermark", 100, 0.5, + []int64{1, 2, 3, 4}, + []bool{false, false, false, false}, + }, } fm.dropAll() diff --git a/internal/datanode/flow_graph_message.go b/internal/datanode/flow_graph_message.go index ce1d5bbb9b..c146035299 100644 --- a/internal/datanode/flow_graph_message.go +++ b/internal/datanode/flow_graph_message.go @@ -49,7 +49,7 @@ type flowGraphMsg struct { timeRange TimeRange startPositions []*msgpb.MsgPosition endPositions []*msgpb.MsgPosition - //segmentsToSync is the signal used by insertBufferNode to notify deleteNode to flush + // segmentsToSync is the signal used by insertBufferNode to notify deleteNode to flush segmentsToSync []UniqueID dropCollection bool dropPartitions []UniqueID @@ -69,7 +69,7 @@ type flushMsg struct { timestamp Timestamp segmentID UniqueID collectionID UniqueID - //isFlush illustrates if this is a flush or normal sync + // isFlush illustrates if this is a flush or normal sync isFlush bool } diff --git a/internal/datanode/flow_graph_message_test.go b/internal/datanode/flow_graph_message_test.go index 70b9e93491..d5d9dbbd6c 100644 --- a/internal/datanode/flow_graph_message_test.go +++ b/internal/datanode/flow_graph_message_test.go @@ -38,5 +38,4 @@ func TestInsertMsg_TimeTick(te *testing.T) { assert.Equal(t, test.timeTimestanpMax, fgMsg.TimeTick()) }) } - } diff --git a/internal/datanode/flow_graph_time_ticker.go b/internal/datanode/flow_graph_time_ticker.go index 34dabf9958..50db66bfb6 100644 --- a/internal/datanode/flow_graph_time_ticker.go +++ b/internal/datanode/flow_graph_time_ticker.go @@ -21,9 +21,8 @@ import ( "time" "github.com/samber/lo" - "golang.org/x/exp/maps" - "go.uber.org/zap" + "golang.org/x/exp/maps" "github.com/milvus-io/milvus/pkg/log" ) diff --git a/internal/datanode/flush_manager.go b/internal/datanode/flush_manager.go index 6634e60f97..c3f16c15c9 100644 --- a/internal/datanode/flush_manager.go +++ b/internal/datanode/flush_manager.go @@ -510,8 +510,8 @@ func (m *rendezvousFlushManager) flushBufferData(data *BufferData, segmentID Uni kvs[key] = pkStatsBlob.Value field2Stats[fieldID] = &datapb.Binlog{ EntriesNum: 0, - TimestampFrom: 0, //TODO - TimestampTo: 0, //TODO, + TimestampFrom: 0, // TODO + TimestampTo: 0, // TODO, LogPath: key, LogSize: int64(len(pkStatsBlob.Value)), } @@ -528,8 +528,8 @@ func (m *rendezvousFlushManager) flushBufferData(data *BufferData, segmentID Uni // notify flush manager del buffer data func (m *rendezvousFlushManager) flushDelData(data *DelDataBuf, segmentID UniqueID, - pos *msgpb.MsgPosition) error { - + pos *msgpb.MsgPosition, +) error { // del signal with empty data if data == nil || data.delData == nil { m.handleDeleteTask(segmentID, &flushBufferDeleteTask{}, nil, pos) @@ -648,7 +648,7 @@ func getSyncTaskID(pos *msgpb.MsgPosition) string { // close cleans up all the left members func (m *rendezvousFlushManager) close() { m.dispatcher.Range(func(segmentID int64, queue *orderFlushQueue) bool { - //assertion ok + // assertion ok queue.injectMut.Lock() for i := 0; i < len(queue.injectCh); i++ { go queue.handleInject(<-queue.injectCh) @@ -744,8 +744,8 @@ func dropVirtualChannelFunc(dsService *dataSyncService, opts ...retry.Option) fl return func(packs []*segmentFlushPack) { req := &datapb.DropVirtualChannelRequest{ Base: commonpbutil.NewMsgBase( - commonpbutil.WithMsgType(0), //TODO msg type - commonpbutil.WithMsgID(0), //TODO msg id + commonpbutil.WithMsgType(0), // TODO msg type + commonpbutil.WithMsgID(0), // TODO msg id commonpbutil.WithSourceID(paramtable.GetNodeID()), ), ChannelName: dsService.vchannelName, diff --git a/internal/datanode/flush_manager_test.go b/internal/datanode/flush_manager_test.go index a9cdf07eab..e7667c7102 100644 --- a/internal/datanode/flush_manager_test.go +++ b/internal/datanode/flush_manager_test.go @@ -329,7 +329,6 @@ func TestRendezvousFlushManager_Inject(t *testing.T) { }) assert.Eventually(t, func() bool { return counter.Load() == int64(size+3) }, 3*time.Second, 100*time.Millisecond) assert.EqualValues(t, 4, packs[size+1].segmentID) - } func TestRendezvousFlushManager_getSegmentMeta(t *testing.T) { @@ -455,7 +454,7 @@ func TestRendezvousFlushManager_dropMode(t *testing.T) { channel := newTestChannel() targets := make(map[int64]struct{}) - //init failed segment + // init failed segment testSeg := &Segment{ collectionID: 1, segmentID: -1, @@ -463,7 +462,7 @@ func TestRendezvousFlushManager_dropMode(t *testing.T) { testSeg.setType(datapb.SegmentType_New) channel.segments[testSeg.segmentID] = testSeg - //init target segment + // init target segment for i := 1; i < 11; i++ { targets[int64(i)] = struct{}{} testSeg := &Segment{ @@ -474,7 +473,7 @@ func TestRendezvousFlushManager_dropMode(t *testing.T) { channel.segments[testSeg.segmentID] = testSeg } - //init flush manager + // init flush manager m := NewRendezvousFlushManager(allocator.NewMockAllocator(t), cm, channel, func(pack *segmentFlushPack) { }, func(packs []*segmentFlushPack) { mut.Lock() @@ -532,7 +531,7 @@ func TestRendezvousFlushManager_dropMode(t *testing.T) { var result []*segmentFlushPack signal := make(chan struct{}) channel := newTestChannel() - //init failed segment + // init failed segment testSeg := &Segment{ collectionID: 1, segmentID: -1, @@ -540,7 +539,7 @@ func TestRendezvousFlushManager_dropMode(t *testing.T) { testSeg.setType(datapb.SegmentType_New) channel.segments[testSeg.segmentID] = testSeg - //init target segment + // init target segment for i := 1; i < 11; i++ { seg := &Segment{ collectionID: 1, @@ -558,14 +557,14 @@ func TestRendezvousFlushManager_dropMode(t *testing.T) { close(signal) }) - //flush failed segment before start drop mode + // flush failed segment before start drop mode halfMsgID := []byte{1, 1, 1} _, err := m.flushBufferData(nil, -1, true, false, &msgpb.MsgPosition{ MsgID: halfMsgID, }) assert.NoError(t, err) - //inject target segment + // inject target segment injFunc := func(pack *segmentFlushPack) { pack.segmentID = 100 } @@ -617,7 +616,7 @@ func TestRendezvousFlushManager_close(t *testing.T) { channel := newTestChannel() - //init test segment + // init test segment testSeg := &Segment{ collectionID: 1, segmentID: 1, @@ -758,7 +757,8 @@ func TestDropVirtualChannelFunc(t *testing.T) { ChannelName: vchanName, MsgID: []byte{1, 2, 3}, Timestamp: 10, - }, endPos: nil}) + }, endPos: nil, + }) assert.NotPanics(t, func() { dropFunc([]*segmentFlushPack{ { @@ -794,7 +794,6 @@ func TestDropVirtualChannelFunc(t *testing.T) { }) t.Run("datacoord call error", func(t *testing.T) { - dataCoord.DropVirtualChannelStatus = commonpb.ErrorCode_UnexpectedError dataCoord.DropVirtualChannelError = true assert.Panics(t, func() { @@ -811,5 +810,4 @@ func TestDropVirtualChannelFunc(t *testing.T) { dropFunc(nil) }) }) - } diff --git a/internal/datanode/flush_task.go b/internal/datanode/flush_task.go index 47923eba7e..2f0fdee37d 100644 --- a/internal/datanode/flush_task.go +++ b/internal/datanode/flush_task.go @@ -21,7 +21,6 @@ import ( "sync" "github.com/cockroachdb/errors" - "github.com/milvus-io/milvus/pkg/util/tsoutil" "go.uber.org/zap" "github.com/milvus-io/milvus-proto/go-api/v2/msgpb" @@ -29,6 +28,7 @@ import ( "github.com/milvus-io/milvus/internal/proto/datapb" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/retry" + "github.com/milvus-io/milvus/pkg/util/tsoutil" ) // errStart used for retry start @@ -125,7 +125,8 @@ func (t *flushTaskRunner) init(f notifyMetaFunc, postFunc taskPostFunc, signal < // runFlushInsert executes flush insert task with once and retry func (t *flushTaskRunner) runFlushInsert(task flushInsertTask, - binlogs, statslogs map[UniqueID]*datapb.Binlog, flushed bool, dropped bool, pos *msgpb.MsgPosition, opts ...retry.Option) { + binlogs, statslogs map[UniqueID]*datapb.Binlog, flushed bool, dropped bool, pos *msgpb.MsgPosition, opts ...retry.Option, +) { t.insertOnce.Do(func() { t.insertLogs = binlogs t.statsLogs = statslogs diff --git a/internal/datanode/flush_task_test.go b/internal/datanode/flush_task_test.go index bc4489bbe4..0bdcc06895 100644 --- a/internal/datanode/flush_task_test.go +++ b/internal/datanode/flush_task_test.go @@ -19,8 +19,9 @@ package datanode import ( "testing" - "github.com/milvus-io/milvus/pkg/util/retry" "github.com/stretchr/testify/assert" + + "github.com/milvus-io/milvus/pkg/util/retry" ) func TestFlushTaskRunner(t *testing.T) { @@ -91,7 +92,6 @@ func TestFlushTaskRunner_FailError(t *testing.T) { assert.True(t, errFlag) assert.True(t, nextFlag) - } func TestFlushTaskRunner_Injection(t *testing.T) { diff --git a/internal/datanode/io_pool.go b/internal/datanode/io_pool.go index fdac4d3d5e..049f687e09 100644 --- a/internal/datanode/io_pool.go +++ b/internal/datanode/io_pool.go @@ -7,11 +7,15 @@ import ( "github.com/milvus-io/milvus/pkg/util/conc" ) -var ioPool *conc.Pool[any] -var ioPoolInitOnce sync.Once +var ( + ioPool *conc.Pool[any] + ioPoolInitOnce sync.Once +) -var statsPool *conc.Pool[any] -var statsPoolInitOnce sync.Once +var ( + statsPool *conc.Pool[any] + statsPoolInitOnce sync.Once +) func initIOPool() { capacity := Params.DataNodeCfg.IOConcurrency.GetAsInt() diff --git a/internal/datanode/meta_service.go b/internal/datanode/meta_service.go index 4a7cfea7fa..7be2442cca 100644 --- a/internal/datanode/meta_service.go +++ b/internal/datanode/meta_service.go @@ -64,7 +64,7 @@ func (mService *metaService) getCollectionInfo(ctx context.Context, collID Uniqu req := &milvuspb.DescribeCollectionRequest{ Base: commonpbutil.NewMsgBase( commonpbutil.WithMsgType(commonpb.MsgType_DescribeCollection), - commonpbutil.WithMsgID(0), //GOOSE TODO + commonpbutil.WithMsgID(0), // GOOSE TODO commonpbutil.WithSourceID(paramtable.GetNodeID()), ), // please do not specify the collection name alone after database feature. diff --git a/internal/datanode/meta_service_test.go b/internal/datanode/meta_service_test.go index 297d32cfed..bef5a6c5c8 100644 --- a/internal/datanode/meta_service_test.go +++ b/internal/datanode/meta_service_test.go @@ -21,11 +21,11 @@ import ( "testing" "github.com/cockroachdb/errors" + "github.com/stretchr/testify/assert" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" - "github.com/stretchr/testify/assert" ) const ( @@ -47,7 +47,6 @@ func TestMetaService_All(t *testing.T) { ms := newMetaService(mFactory, collectionID0) t.Run("Test getCollectionSchema", func(t *testing.T) { - sch, err := ms.getCollectionSchema(ctx, collectionID0, 0) assert.NoError(t, err) assert.NotNil(t, sch) @@ -84,7 +83,6 @@ func (rc *RootCoordFails2) DescribeCollectionInternal(ctx context.Context, req * } func TestMetaServiceRootCoodFails(t *testing.T) { - t.Run("Test Describe with error", func(t *testing.T) { rc := &RootCoordFails1{} rc.setCollectionID(collectionID0) diff --git a/internal/datanode/mock_test.go b/internal/datanode/mock_test.go index e36d6b03e5..43fc167c0f 100644 --- a/internal/datanode/mock_test.go +++ b/internal/datanode/mock_test.go @@ -25,8 +25,6 @@ import ( "time" "github.com/cockroachdb/errors" - "github.com/milvus-io/milvus/pkg/util/merr" - "github.com/milvus-io/milvus/pkg/util/tsoutil" "go.uber.org/zap" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" @@ -47,7 +45,9 @@ import ( "github.com/milvus-io/milvus/pkg/mq/msgdispatcher" "github.com/milvus-io/milvus/pkg/mq/msgstream" "github.com/milvus-io/milvus/pkg/util/etcd" + "github.com/milvus-io/milvus/pkg/util/merr" "github.com/milvus-io/milvus/pkg/util/paramtable" + "github.com/milvus-io/milvus/pkg/util/tsoutil" "github.com/milvus-io/milvus/pkg/util/typeutil" ) @@ -145,11 +145,9 @@ func clearEtcd(rootPath string) error { } log.Debug("Clear ETCD with prefix writer/ddl") return nil - } -type MetaFactory struct { -} +type MetaFactory struct{} func NewMetaFactory() *MetaFactory { return &MetaFactory{} @@ -489,7 +487,7 @@ func GenRowData() (rawData []byte) { const DIM = 2 // Float vector - var fvector = [DIM]float32{1, 2} + fvector := [DIM]float32{1, 2} for _, ele := range fvector { buf := make([]byte, 4) common.Endian.PutUint32(buf, math.Float32bits(ele)) @@ -499,11 +497,11 @@ func GenRowData() (rawData []byte) { // Binary vector // Dimension of binary vector is 32 // size := 4, = 32 / 8 - var bvector = []byte{255, 255, 255, 0} + bvector := []byte{255, 255, 255, 0} rawData = append(rawData, bvector...) // Bool - var fieldBool = true + fieldBool := true buf := new(bytes.Buffer) if err := binary.Write(buf, common.Endian, fieldBool); err != nil { panic(err) @@ -552,7 +550,7 @@ func GenRowData() (rawData []byte) { rawData = append(rawData, bfloat32.Bytes()...) // float64 - var datafloat64 = 2.2 + datafloat64 := 2.2 bfloat64 := new(bytes.Buffer) if err := binary.Write(bfloat64, common.Endian, datafloat64); err != nil { panic(err) @@ -564,7 +562,7 @@ func GenRowData() (rawData []byte) { func GenColumnData() (fieldsData []*schemapb.FieldData) { // Float vector - var fVector = []float32{1, 2} + fVector := []float32{1, 2} floatVectorData := &schemapb.FieldData{ Type: schemapb.DataType_FloatVector, FieldName: "float_vector_field", @@ -709,7 +707,7 @@ func GenColumnData() (fieldsData []*schemapb.FieldData) { } fieldsData = append(fieldsData, floatFieldData) - //double + // double doubleData := []float64{2.2} doubleFieldData := &schemapb.FieldData{ Type: schemapb.DataType_Double, @@ -727,7 +725,7 @@ func GenColumnData() (fieldsData []*schemapb.FieldData) { } fieldsData = append(fieldsData, doubleFieldData) - //var char + // var char varCharData := []string{"test"} varCharFieldData := &schemapb.FieldData{ Type: schemapb.DataType_VarChar, @@ -749,7 +747,7 @@ func GenColumnData() (fieldsData []*schemapb.FieldData) { } func (df *DataFactory) GenMsgStreamInsertMsg(idx int, chanName string) *msgstream.InsertMsg { - var msg = &msgstream.InsertMsg{ + msg := &msgstream.InsertMsg{ BaseMsg: msgstream.BaseMsg{ HashValues: []uint32{uint32(idx)}, }, @@ -777,7 +775,7 @@ func (df *DataFactory) GenMsgStreamInsertMsg(idx int, chanName string) *msgstrea } func (df *DataFactory) GenMsgStreamInsertMsgWithTs(idx int, chanName string, ts Timestamp) *msgstream.InsertMsg { - var msg = &msgstream.InsertMsg{ + msg := &msgstream.InsertMsg{ BaseMsg: msgstream.BaseMsg{ HashValues: []uint32{uint32(idx)}, BeginTimestamp: ts, @@ -808,7 +806,7 @@ func (df *DataFactory) GenMsgStreamInsertMsgWithTs(idx int, chanName string, ts func (df *DataFactory) GetMsgStreamTsInsertMsgs(n int, chanName string, ts Timestamp) (inMsgs []msgstream.TsMsg) { for i := 0; i < n; i++ { - var msg = df.GenMsgStreamInsertMsgWithTs(i, chanName, ts) + msg := df.GenMsgStreamInsertMsgWithTs(i, chanName, ts) var tsMsg msgstream.TsMsg = msg inMsgs = append(inMsgs, tsMsg) } @@ -817,7 +815,7 @@ func (df *DataFactory) GetMsgStreamTsInsertMsgs(n int, chanName string, ts Times func (df *DataFactory) GetMsgStreamInsertMsgs(n int) (msgs []*msgstream.InsertMsg) { for i := 0; i < n; i++ { - var msg = df.GenMsgStreamInsertMsg(i, "") + msg := df.GenMsgStreamInsertMsg(i, "") msgs = append(msgs, msg) } return @@ -829,7 +827,7 @@ func (df *DataFactory) GenMsgStreamDeleteMsg(pks []primaryKey, chanName string) for i := 0; i < len(pks); i++ { timestamps[i] = Timestamp(i) + 1000 } - var msg = &msgstream.DeleteMsg{ + msg := &msgstream.DeleteMsg{ BaseMsg: msgstream.BaseMsg{ HashValues: []uint32{uint32(idx)}, }, @@ -853,7 +851,7 @@ func (df *DataFactory) GenMsgStreamDeleteMsg(pks []primaryKey, chanName string) } func (df *DataFactory) GenMsgStreamDeleteMsgWithTs(idx int, pks []primaryKey, chanName string, ts Timestamp) *msgstream.DeleteMsg { - var msg = &msgstream.DeleteMsg{ + msg := &msgstream.DeleteMsg{ BaseMsg: msgstream.BaseMsg{ HashValues: []uint32{uint32(idx)}, BeginTimestamp: ts, @@ -893,7 +891,7 @@ func genFlowGraphInsertMsg(chanName string) flowGraphMsg { }, } - var fgMsg = &flowGraphMsg{ + fgMsg := &flowGraphMsg{ insertMessages: make([]*msgstream.InsertMsg, 0), timeRange: TimeRange{ timestampMin: timeRange.timestampMin, @@ -923,7 +921,7 @@ func genFlowGraphDeleteMsg(pks []primaryKey, chanName string) flowGraphMsg { }, } - var fgMsg = &flowGraphMsg{ + fgMsg := &flowGraphMsg{ insertMessages: make([]*msgstream.InsertMsg, 0), timeRange: TimeRange{ timestampMin: timeRange.timestampMin, @@ -951,7 +949,8 @@ func (m *RootCoordFactory) AllocID(ctx context.Context, in *rootcoordpb.AllocIDR resp := &rootcoordpb.AllocIDResponse{ Status: &commonpb.Status{ ErrorCode: commonpb.ErrorCode_UnexpectedError, - }} + }, + } if in.Count == 12 { resp.Status.ErrorCode = commonpb.ErrorCode_Success @@ -995,7 +994,6 @@ func (m *RootCoordFactory) ShowCollections(ctx context.Context, in *milvuspb.Sho CollectionNames: []string{m.collectionName}, } return resp, nil - } func (m *RootCoordFactory) DescribeCollectionInternal(ctx context.Context, in *milvuspb.DescribeCollectionRequest) (*milvuspb.DescribeCollectionResponse, error) { @@ -1101,7 +1099,7 @@ func genInsertDataWithPKs(PKs [2]primaryKey, dataType schemapb.DataType) *Insert } iD.Data[109].(*storage.StringFieldData).Data = values default: - //TODO:: + // TODO:: } return iD } @@ -1158,7 +1156,8 @@ func genInsertData() *InsertData { 109: &storage.StringFieldData{ Data: []string{"test1", "test2"}, }, - }} + }, + } } func genEmptyInsertData() *InsertData { @@ -1202,7 +1201,8 @@ func genEmptyInsertData() *InsertData { 109: &storage.StringFieldData{ Data: []string{}, }, - }} + }, + } } func genInsertDataWithExpiredTS() *InsertData { @@ -1246,7 +1246,8 @@ func genInsertDataWithExpiredTS() *InsertData { 109: &storage.StringFieldData{ Data: []string{"test1", "test2"}, }, - }} + }, + } } func genTimestamp() typeutil.Timestamp { diff --git a/internal/datanode/rate_collector.go b/internal/datanode/rate_collector.go index 96d5e59150..b7052c3bb1 100644 --- a/internal/datanode/rate_collector.go +++ b/internal/datanode/rate_collector.go @@ -24,8 +24,10 @@ import ( ) // rateCol is global rateCollector in DataNode. -var rateCol *rateCollector -var initOnce sync.Once +var ( + rateCol *rateCollector + initOnce sync.Once +) // rateCollector helps to collect and calculate values (like rate, timeTick and etc...). type rateCollector struct { diff --git a/internal/datanode/segment.go b/internal/datanode/segment.go index f0432c7a6c..57fbf2820e 100644 --- a/internal/datanode/segment.go +++ b/internal/datanode/segment.go @@ -24,7 +24,6 @@ import ( "sync/atomic" "github.com/bits-and-blooms/bloom/v3" - "github.com/milvus-io/milvus/pkg/util/tsoutil" "go.uber.org/zap" "github.com/milvus-io/milvus-proto/go-api/v2/msgpb" @@ -34,6 +33,7 @@ import ( "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/metrics" "github.com/milvus-io/milvus/pkg/util/paramtable" + "github.com/milvus-io/milvus/pkg/util/tsoutil" ) // Segment contains the latest segment infos from channel. diff --git a/internal/datanode/segment_sync_policy.go b/internal/datanode/segment_sync_policy.go index 65f29a2ec5..72498efb4b 100644 --- a/internal/datanode/segment_sync_policy.go +++ b/internal/datanode/segment_sync_policy.go @@ -21,10 +21,11 @@ import ( "sort" "time" - "github.com/milvus-io/milvus/pkg/log" - "github.com/milvus-io/milvus/pkg/util/tsoutil" "github.com/samber/lo" "go.uber.org/zap" + + "github.com/milvus-io/milvus/pkg/log" + "github.com/milvus-io/milvus/pkg/util/tsoutil" ) const minSyncSize = 0.5 * 1024 * 1024 diff --git a/internal/datanode/segment_sync_policy_test.go b/internal/datanode/segment_sync_policy_test.go index 74f3631649..bc6a14533d 100644 --- a/internal/datanode/segment_sync_policy_test.go +++ b/internal/datanode/segment_sync_policy_test.go @@ -22,9 +22,10 @@ import ( "testing" "time" + "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus-proto/go-api/v2/msgpb" "github.com/milvus-io/milvus/pkg/util/tsoutil" - "github.com/stretchr/testify/assert" ) func TestSyncPeriodically(t *testing.T) { @@ -68,18 +69,36 @@ func TestSyncMemoryTooHigh(t *testing.T) { memorySizesInMB []float64 shouldSyncSegs []UniqueID }{ - {"test normal 1", 3, true, - []float64{1, 2, 3, 4, 5}, []UniqueID{5, 4, 3}}, - {"test normal 2", 2, true, - []float64{1, 2, 3, 4, 5}, []UniqueID{5, 4}}, - {"test normal 3", 5, true, - []float64{1, 2, 3, 4, 5}, []UniqueID{5, 4, 3, 2, 1}}, - {"test isHighMemory false", 3, false, - []float64{1, 2, 3, 4, 5}, []UniqueID{}}, - {"test syncSegmentNum 1", 1, true, - []float64{1, 2, 3, 4, 5}, []UniqueID{5}}, - {"test with small segment", 3, true, - []float64{0.1, 0.1, 0.1, 4, 5}, []UniqueID{5, 4}}, + { + "test normal 1", 3, true, + []float64{1, 2, 3, 4, 5}, + []UniqueID{5, 4, 3}, + }, + { + "test normal 2", 2, true, + []float64{1, 2, 3, 4, 5}, + []UniqueID{5, 4}, + }, + { + "test normal 3", 5, true, + []float64{1, 2, 3, 4, 5}, + []UniqueID{5, 4, 3, 2, 1}, + }, + { + "test isHighMemory false", 3, false, + []float64{1, 2, 3, 4, 5}, + []UniqueID{}, + }, + { + "test syncSegmentNum 1", 1, true, + []float64{1, 2, 3, 4, 5}, + []UniqueID{5}, + }, + { + "test with small segment", 3, true, + []float64{0.1, 0.1, 0.1, 4, 5}, + []UniqueID{5, 4}, + }, } for _, test := range tests { diff --git a/internal/datanode/segment_test.go b/internal/datanode/segment_test.go index b21130b2f3..fef988d771 100644 --- a/internal/datanode/segment_test.go +++ b/internal/datanode/segment_test.go @@ -20,9 +20,10 @@ import ( "math/rand" "testing" + "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus/internal/storage" "github.com/milvus-io/milvus/pkg/common" - "github.com/stretchr/testify/assert" ) func TestSegment_UpdatePKRange(t *testing.T) { diff --git a/internal/datanode/services.go b/internal/datanode/services.go index 6bd92c376c..ee132459d5 100644 --- a/internal/datanode/services.go +++ b/internal/datanode/services.go @@ -27,7 +27,6 @@ import ( "time" "github.com/cockroachdb/errors" - "github.com/milvus-io/milvus/pkg/util/tsoutil" "github.com/samber/lo" "go.uber.org/zap" @@ -51,6 +50,7 @@ import ( "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/milvus-io/milvus/pkg/util/retry" "github.com/milvus-io/milvus/pkg/util/timerecord" + "github.com/milvus-io/milvus/pkg/util/tsoutil" "github.com/milvus-io/milvus/pkg/util/typeutil" ) @@ -784,7 +784,8 @@ func createBinLogsFunc(node *DataNode, req *datapb.ImportTaskRequest, schema *sc func saveSegmentFunc(node *DataNode, req *datapb.ImportTaskRequest, res *rootcoordpb.ImportResult, ts Timestamp) importutil.SaveSegmentFunc { importTaskID := req.GetImportTask().GetTaskId() return func(fieldsInsert []*datapb.FieldBinlog, fieldsStats []*datapb.FieldBinlog, segmentID int64, - targetChName string, rowCount int64, partID int64) error { + targetChName string, rowCount int64, partID int64, + ) error { logFields := []zap.Field{ zap.Int64("task ID", importTaskID), zap.Int64("partitionID", partID), @@ -854,7 +855,8 @@ func saveSegmentFunc(node *DataNode, req *datapb.ImportTaskRequest, res *rootcoo } func composeAssignSegmentIDRequest(rowNum int, shardID int, chNames []string, - collID int64, partID int64) *datapb.AssignSegmentIDRequest { + collID int64, partID int64, +) *datapb.AssignSegmentIDRequest { // use the first field's row count as segment row count // all the fields row count are same, checked by ImportWrapper // ask DataCoord to alloc a new segment @@ -876,8 +878,8 @@ func composeAssignSegmentIDRequest(rowNum int, shardID int, chNames []string, } func createBinLogs(rowNum int, schema *schemapb.CollectionSchema, ts Timestamp, - fields map[storage.FieldID]storage.FieldData, node *DataNode, segmentID, colID, partID UniqueID) ([]*datapb.FieldBinlog, []*datapb.FieldBinlog, error) { - + fields map[storage.FieldID]storage.FieldData, node *DataNode, segmentID, colID, partID UniqueID, +) ([]*datapb.FieldBinlog, []*datapb.FieldBinlog, error) { ctx, cancel := context.WithCancel(context.Background()) defer cancel() diff --git a/internal/datanode/services_test.go b/internal/datanode/services_test.go index 154679b51f..8b2fbfc353 100644 --- a/internal/datanode/services_test.go +++ b/internal/datanode/services_test.go @@ -319,7 +319,7 @@ func (s *DataNodeServicesSuite) TestShowConfigurations() { Pattern: pattern, } - //test closed server + // test closed server node := &DataNode{} node.SetSession(&sessionutil.Session{ServerID: 1}) node.stateCode.Store(commonpb.StateCode_Abnormal) @@ -653,7 +653,6 @@ func (s *DataNodeServicesSuite) TestAddImportSegment() { s.Assert().False(merr.Ok(resp.GetStatus())) // s.Assert().Equal(merr.Code(merr.ErrChannelNotFound), stat.GetStatus().GetCode()) }) - } func (s *DataNodeServicesSuite) TestSyncSegments() { diff --git a/internal/datanode/util.go b/internal/datanode/util.go index 2d9f7c3f42..dc1fbdab2c 100644 --- a/internal/datanode/util.go +++ b/internal/datanode/util.go @@ -19,10 +19,11 @@ package datanode import ( "context" - "github.com/milvus-io/milvus/pkg/mq/msgstream" - "github.com/milvus-io/milvus/pkg/util/typeutil" "go.opentelemetry.io/otel" "go.opentelemetry.io/otel/trace" + + "github.com/milvus-io/milvus/pkg/mq/msgstream" + "github.com/milvus-io/milvus/pkg/util/typeutil" ) type ( diff --git a/internal/distributed/connection_manager.go b/internal/distributed/connection_manager.go index e43d1817e9..682a751dae 100644 --- a/internal/distributed/connection_manager.go +++ b/internal/distributed/connection_manager.go @@ -24,24 +24,23 @@ import ( "time" "github.com/cockroachdb/errors" - "github.com/milvus-io/milvus/internal/util/sessionutil" - "github.com/milvus-io/milvus/pkg/tracer" - grpc_middleware "github.com/grpc-ecosystem/go-grpc-middleware" grpc_retry "github.com/grpc-ecosystem/go-grpc-middleware/retry" - "github.com/milvus-io/milvus/internal/proto/datapb" - "github.com/milvus-io/milvus/internal/proto/indexpb" - "github.com/milvus-io/milvus/internal/proto/querypb" - "github.com/milvus-io/milvus/internal/proto/rootcoordpb" - "github.com/milvus-io/milvus/pkg/log" - "github.com/milvus-io/milvus/pkg/util/retry" - "github.com/milvus-io/milvus/pkg/util/typeutil" "go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc" + "go.uber.org/zap" "google.golang.org/grpc" "google.golang.org/grpc/codes" "google.golang.org/grpc/credentials/insecure" - "go.uber.org/zap" + "github.com/milvus-io/milvus/internal/proto/datapb" + "github.com/milvus-io/milvus/internal/proto/indexpb" + "github.com/milvus-io/milvus/internal/proto/querypb" + "github.com/milvus-io/milvus/internal/proto/rootcoordpb" + "github.com/milvus-io/milvus/internal/util/sessionutil" + "github.com/milvus-io/milvus/pkg/log" + "github.com/milvus-io/milvus/pkg/tracer" + "github.com/milvus-io/milvus/pkg/util/retry" + "github.com/milvus-io/milvus/pkg/util/typeutil" ) // ConnectionManager handles connection to other components of the system @@ -123,6 +122,7 @@ func (cm *ConnectionManager) AddDependency(roleName string) error { return nil } + func (cm *ConnectionManager) Start() { go cm.receiveFinishTask() } @@ -369,7 +369,6 @@ func newBuildClientTask(session *sessionutil.Session, notify chan int64, retryOp notify: notify, } - } func (bct *buildClientTask) Run() { @@ -420,6 +419,7 @@ func (bct *buildClientTask) Run() { } }() } + func (bct *buildClientTask) Stop() { bct.cancel() } diff --git a/internal/distributed/connection_manager_test.go b/internal/distributed/connection_manager_test.go index 30703f814c..1175f4eb2e 100644 --- a/internal/distributed/connection_manager_test.go +++ b/internal/distributed/connection_manager_test.go @@ -27,6 +27,10 @@ import ( "testing" "time" + "github.com/stretchr/testify/assert" + "go.uber.org/zap" + "google.golang.org/grpc" + "github.com/milvus-io/milvus/internal/proto/datapb" "github.com/milvus-io/milvus/internal/proto/indexpb" "github.com/milvus-io/milvus/internal/proto/querypb" @@ -36,9 +40,6 @@ import ( "github.com/milvus-io/milvus/pkg/util/etcd" "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/milvus-io/milvus/pkg/util/typeutil" - "github.com/stretchr/testify/assert" - "go.uber.org/zap" - "google.golang.org/grpc" ) func TestMain(t *testing.M) { diff --git a/internal/distributed/datacoord/client/client.go b/internal/distributed/datacoord/client/client.go index 59ead5d950..4511a8b450 100644 --- a/internal/distributed/datacoord/client/client.go +++ b/internal/distributed/datacoord/client/client.go @@ -20,8 +20,6 @@ import ( "context" "fmt" - "github.com/milvus-io/milvus/internal/util/grpcclient" - "github.com/milvus-io/milvus/internal/util/sessionutil" clientv3 "go.etcd.io/etcd/client/v3" "go.uber.org/zap" "google.golang.org/grpc" @@ -32,6 +30,8 @@ import ( "github.com/milvus-io/milvus/internal/proto/indexpb" "github.com/milvus-io/milvus/internal/proto/internalpb" "github.com/milvus-io/milvus/internal/types" + "github.com/milvus-io/milvus/internal/util/grpcclient" + "github.com/milvus-io/milvus/internal/util/sessionutil" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/commonpbutil" "github.com/milvus-io/milvus/pkg/util/funcutil" diff --git a/internal/distributed/datacoord/client/client_test.go b/internal/distributed/datacoord/client/client_test.go index 67fe3b25e3..ea98f744a8 100644 --- a/internal/distributed/datacoord/client/client_test.go +++ b/internal/distributed/datacoord/client/client_test.go @@ -25,16 +25,16 @@ import ( "time" "github.com/cockroachdb/errors" - "github.com/milvus-io/milvus/internal/util/mock" + "github.com/stretchr/testify/assert" "go.uber.org/zap" + "google.golang.org/grpc" "github.com/milvus-io/milvus/internal/proto/datapb" "github.com/milvus-io/milvus/internal/proxy" + "github.com/milvus-io/milvus/internal/util/mock" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/etcd" "github.com/milvus-io/milvus/pkg/util/paramtable" - "github.com/stretchr/testify/assert" - "google.golang.org/grpc" ) func TestMain(m *testing.M) { diff --git a/internal/distributed/datacoord/service.go b/internal/distributed/datacoord/service.go index 6312e12c44..eae1478bfb 100644 --- a/internal/distributed/datacoord/service.go +++ b/internal/distributed/datacoord/service.go @@ -24,11 +24,6 @@ import ( "sync" "time" - "github.com/milvus-io/milvus/internal/proto/indexpb" - "github.com/milvus-io/milvus/internal/util/dependency" - "github.com/milvus-io/milvus/pkg/tracer" - "github.com/milvus-io/milvus/pkg/util/interceptor" - grpc_middleware "github.com/grpc-ecosystem/go-grpc-middleware" "github.com/tikv/client-go/v2/txnkv" clientv3 "go.etcd.io/etcd/client/v3" @@ -42,12 +37,16 @@ import ( "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus/internal/datacoord" "github.com/milvus-io/milvus/internal/proto/datapb" + "github.com/milvus-io/milvus/internal/proto/indexpb" "github.com/milvus-io/milvus/internal/proto/internalpb" "github.com/milvus-io/milvus/internal/types" + "github.com/milvus-io/milvus/internal/util/dependency" "github.com/milvus-io/milvus/pkg/log" + "github.com/milvus-io/milvus/pkg/tracer" "github.com/milvus-io/milvus/pkg/util" "github.com/milvus-io/milvus/pkg/util/etcd" "github.com/milvus-io/milvus/pkg/util/funcutil" + "github.com/milvus-io/milvus/pkg/util/interceptor" "github.com/milvus-io/milvus/pkg/util/logutil" "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/milvus-io/milvus/pkg/util/tikv" @@ -154,12 +153,12 @@ func (s *Server) startGrpcLoop(grpcPort int) { ctx, cancel := context.WithCancel(s.ctx) defer cancel() - var kaep = keepalive.EnforcementPolicy{ + kaep := keepalive.EnforcementPolicy{ MinTime: 5 * time.Second, // If a client pings more than once every 5 seconds, terminate the connection PermitWithoutStream: true, // Allow pings even when there are no active streams } - var kasp = keepalive.ServerParameters{ + kasp := keepalive.ServerParameters{ Time: 60 * time.Second, // Ping the client if it is idle for 60 seconds to ensure the connection is still active Timeout: 10 * time.Second, // Wait 10 second for the ping ack before assuming the connection is dead } diff --git a/internal/distributed/datacoord/service_test.go b/internal/distributed/datacoord/service_test.go index 674b6d579e..c4a46efdda 100644 --- a/internal/distributed/datacoord/service_test.go +++ b/internal/distributed/datacoord/service_test.go @@ -22,6 +22,9 @@ import ( "testing" "github.com/cockroachdb/errors" + "github.com/stretchr/testify/assert" + "github.com/tikv/client-go/v2/txnkv" + clientv3 "go.etcd.io/etcd/client/v3" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" @@ -32,9 +35,6 @@ import ( "github.com/milvus-io/milvus/pkg/util/merr" "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/milvus-io/milvus/pkg/util/tikv" - "github.com/stretchr/testify/assert" - "github.com/tikv/client-go/v2/txnkv" - clientv3 "go.etcd.io/etcd/client/v3" ) type MockDataCoord struct { @@ -307,9 +307,9 @@ func Test_NewServer(t *testing.T) { t.Run("Run", func(t *testing.T) { server.dataCoord = &MockDataCoord{} - //indexCoord := mocks.NewMockIndexCoord(t) - //indexCoord.EXPECT().Init().Return(nil) - //server.indexCoord = indexCoord + // indexCoord := mocks.NewMockIndexCoord(t) + // indexCoord.EXPECT().Init().Return(nil) + // server.indexCoord = indexCoord err := server.Run() assert.NoError(t, err) diff --git a/internal/distributed/datanode/client/client.go b/internal/distributed/datanode/client/client.go index 7c571827da..41a0dfb682 100644 --- a/internal/distributed/datanode/client/client.go +++ b/internal/distributed/datanode/client/client.go @@ -20,6 +20,8 @@ import ( "context" "fmt" + "google.golang.org/grpc" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus/internal/proto/datapb" @@ -29,7 +31,6 @@ import ( "github.com/milvus-io/milvus/pkg/util/funcutil" "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/milvus-io/milvus/pkg/util/typeutil" - "google.golang.org/grpc" ) var Params *paramtable.ComponentParam = paramtable.Get() diff --git a/internal/distributed/datanode/client/client_test.go b/internal/distributed/datanode/client/client_test.go index 4b50a1f09d..cef93e7cd4 100644 --- a/internal/distributed/datanode/client/client_test.go +++ b/internal/distributed/datanode/client/client_test.go @@ -21,13 +21,12 @@ import ( "testing" "github.com/cockroachdb/errors" - "github.com/milvus-io/milvus/internal/util/mock" - "github.com/milvus-io/milvus/pkg/util/paramtable" - - "github.com/milvus-io/milvus/internal/proto/datapb" + "github.com/stretchr/testify/assert" "google.golang.org/grpc" - "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus/internal/proto/datapb" + "github.com/milvus-io/milvus/internal/util/mock" + "github.com/milvus-io/milvus/pkg/util/paramtable" ) func Test_NewClient(t *testing.T) { diff --git a/internal/distributed/datanode/service.go b/internal/distributed/datanode/service.go index 7055516d18..0ecd8b09cf 100644 --- a/internal/distributed/datanode/service.go +++ b/internal/distributed/datanode/service.go @@ -25,11 +25,6 @@ import ( "time" "github.com/cockroachdb/errors" - "github.com/milvus-io/milvus/internal/util/componentutil" - "github.com/milvus-io/milvus/internal/util/dependency" - "github.com/milvus-io/milvus/pkg/tracer" - "github.com/milvus-io/milvus/pkg/util/interceptor" - grpc_middleware "github.com/grpc-ecosystem/go-grpc-middleware" clientv3 "go.etcd.io/etcd/client/v3" "go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc" @@ -46,9 +41,13 @@ import ( "github.com/milvus-io/milvus/internal/proto/datapb" "github.com/milvus-io/milvus/internal/proto/internalpb" "github.com/milvus-io/milvus/internal/types" + "github.com/milvus-io/milvus/internal/util/componentutil" + "github.com/milvus-io/milvus/internal/util/dependency" "github.com/milvus-io/milvus/pkg/log" + "github.com/milvus-io/milvus/pkg/tracer" "github.com/milvus-io/milvus/pkg/util/etcd" "github.com/milvus-io/milvus/pkg/util/funcutil" + "github.com/milvus-io/milvus/pkg/util/interceptor" "github.com/milvus-io/milvus/pkg/util/logutil" "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/milvus-io/milvus/pkg/util/retry" @@ -76,7 +75,7 @@ type Server struct { // NewServer new DataNode grpc server func NewServer(ctx context.Context, factory dependency.Factory) (*Server, error) { ctx1, cancel := context.WithCancel(ctx) - var s = &Server{ + s := &Server{ ctx: ctx1, cancel: cancel, factory: factory, @@ -107,12 +106,12 @@ func (s *Server) startGrpc() error { func (s *Server) startGrpcLoop(grpcPort int) { defer s.wg.Done() Params := ¶mtable.Get().DataNodeGrpcServerCfg - var kaep = keepalive.EnforcementPolicy{ + kaep := keepalive.EnforcementPolicy{ MinTime: 5 * time.Second, // If a client pings more than once every 5 seconds, terminate the connection PermitWithoutStream: true, // Allow pings even when there are no active streams } - var kasp = keepalive.ServerParameters{ + kasp := keepalive.ServerParameters{ Time: 60 * time.Second, // Ping the client if it is idle for 60 seconds to ensure the connection is still active Timeout: 10 * time.Second, // Wait 10 second for the ping ack before assuming the connection is dead } @@ -124,7 +123,6 @@ func (s *Server) startGrpcLoop(grpcPort int) { lis, err = net.Listen("tcp", addr) return err }, retry.Attempts(10)) - if err != nil { log.Error("DataNode GrpcServer:failed to listen", zap.Error(err)) s.grpcErrChan <- err @@ -169,7 +167,6 @@ func (s *Server) startGrpcLoop(grpcPort int) { log.Warn("DataNode failed to start gRPC") s.grpcErrChan <- err } - } func (s *Server) SetEtcdClient(client *clientv3.Client) { diff --git a/internal/distributed/datanode/service_test.go b/internal/distributed/datanode/service_test.go index de411a4fdc..77c7d9bed9 100644 --- a/internal/distributed/datanode/service_test.go +++ b/internal/distributed/datanode/service_test.go @@ -22,6 +22,8 @@ import ( "testing" "github.com/cockroachdb/errors" + "github.com/stretchr/testify/assert" + clientv3 "go.etcd.io/etcd/client/v3" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" @@ -31,8 +33,6 @@ import ( "github.com/milvus-io/milvus/pkg/util/merr" "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/milvus-io/milvus/pkg/util/typeutil" - "github.com/stretchr/testify/assert" - clientv3 "go.etcd.io/etcd/client/v3" ) // ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////// @@ -161,9 +161,11 @@ type mockDataCoord struct { func (m *mockDataCoord) Init() error { return nil } + func (m *mockDataCoord) Start() error { return nil } + func (m *mockDataCoord) GetComponentStates(ctx context.Context) (*milvuspb.ComponentStates, error) { return &milvuspb.ComponentStates{ State: &milvuspb.ComponentInfo{ @@ -177,6 +179,7 @@ func (m *mockDataCoord) GetComponentStates(ctx context.Context) (*milvuspb.Compo }, }, nil } + func (m *mockDataCoord) Stop() error { return fmt.Errorf("stop error") } @@ -189,9 +192,11 @@ type mockRootCoord struct { func (m *mockRootCoord) Init() error { return nil } + func (m *mockRootCoord) Start() error { return nil } + func (m *mockRootCoord) GetComponentStates(ctx context.Context) (*milvuspb.ComponentStates, error) { return &milvuspb.ComponentStates{ State: &milvuspb.ComponentInfo{ @@ -205,6 +210,7 @@ func (m *mockRootCoord) GetComponentStates(ctx context.Context) (*milvuspb.Compo }, }, nil } + func (m *mockRootCoord) Stop() error { return fmt.Errorf("stop error") } diff --git a/internal/distributed/indexnode/client/client.go b/internal/distributed/indexnode/client/client.go index 41f7aaba13..9de851e61f 100644 --- a/internal/distributed/indexnode/client/client.go +++ b/internal/distributed/indexnode/client/client.go @@ -20,13 +20,13 @@ import ( "context" "fmt" - "github.com/milvus-io/milvus/internal/util/grpcclient" "google.golang.org/grpc" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus/internal/proto/indexpb" "github.com/milvus-io/milvus/internal/proto/internalpb" + "github.com/milvus-io/milvus/internal/util/grpcclient" "github.com/milvus-io/milvus/pkg/util/commonpbutil" "github.com/milvus-io/milvus/pkg/util/funcutil" "github.com/milvus-io/milvus/pkg/util/paramtable" diff --git a/internal/distributed/indexnode/client/client_test.go b/internal/distributed/indexnode/client/client_test.go index 7ba169fc67..a468765482 100644 --- a/internal/distributed/indexnode/client/client_test.go +++ b/internal/distributed/indexnode/client/client_test.go @@ -21,9 +21,6 @@ import ( "testing" "github.com/cockroachdb/errors" - "github.com/milvus-io/milvus/internal/util/dependency" - "github.com/milvus-io/milvus/internal/util/mock" - "github.com/stretchr/testify/assert" "google.golang.org/grpc" @@ -32,6 +29,8 @@ import ( "github.com/milvus-io/milvus/internal/indexnode" "github.com/milvus-io/milvus/internal/proto/indexpb" "github.com/milvus-io/milvus/internal/proto/internalpb" + "github.com/milvus-io/milvus/internal/util/dependency" + "github.com/milvus-io/milvus/internal/util/mock" "github.com/milvus-io/milvus/pkg/util/etcd" "github.com/milvus-io/milvus/pkg/util/metricsinfo" "github.com/milvus-io/milvus/pkg/util/paramtable" diff --git a/internal/distributed/indexnode/service.go b/internal/distributed/indexnode/service.go index 0e0249fd58..62187d626d 100644 --- a/internal/distributed/indexnode/service.go +++ b/internal/distributed/indexnode/service.go @@ -25,8 +25,6 @@ import ( "time" grpc_middleware "github.com/grpc-ecosystem/go-grpc-middleware" - "github.com/milvus-io/milvus/internal/util/dependency" - "github.com/milvus-io/milvus/pkg/tracer" clientv3 "go.etcd.io/etcd/client/v3" "go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc" "go.uber.org/atomic" @@ -40,7 +38,9 @@ import ( "github.com/milvus-io/milvus/internal/proto/indexpb" "github.com/milvus-io/milvus/internal/proto/internalpb" "github.com/milvus-io/milvus/internal/types" + "github.com/milvus-io/milvus/internal/util/dependency" "github.com/milvus-io/milvus/pkg/log" + "github.com/milvus-io/milvus/pkg/tracer" "github.com/milvus-io/milvus/pkg/util/etcd" "github.com/milvus-io/milvus/pkg/util/funcutil" "github.com/milvus-io/milvus/pkg/util/interceptor" @@ -93,12 +93,12 @@ func (s *Server) startGrpcLoop(grpcPort int) { ctx, cancel := context.WithCancel(s.loopCtx) defer cancel() - var kaep = keepalive.EnforcementPolicy{ + kaep := keepalive.EnforcementPolicy{ MinTime: 5 * time.Second, // If a client pings more than once every 5 seconds, terminate the connection PermitWithoutStream: true, // Allow pings even when there are no active streams } - var kasp = keepalive.ServerParameters{ + kasp := keepalive.ServerParameters{ Time: 60 * time.Second, // Ping the client if it is idle for 60 seconds to ensure the connection is still active Timeout: 10 * time.Second, // Wait 10 second for the ping ack before assuming the connection is dead } diff --git a/internal/distributed/indexnode/service_test.go b/internal/distributed/indexnode/service_test.go index 8015bd0613..4e168dfc8c 100644 --- a/internal/distributed/indexnode/service_test.go +++ b/internal/distributed/indexnode/service_test.go @@ -20,7 +20,6 @@ import ( "context" "testing" - "github.com/milvus-io/milvus/internal/util/dependency" "github.com/stretchr/testify/assert" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" @@ -28,6 +27,7 @@ import ( "github.com/milvus-io/milvus/internal/indexnode" "github.com/milvus-io/milvus/internal/proto/indexpb" "github.com/milvus-io/milvus/internal/proto/internalpb" + "github.com/milvus-io/milvus/internal/util/dependency" "github.com/milvus-io/milvus/pkg/util/etcd" "github.com/milvus-io/milvus/pkg/util/metricsinfo" "github.com/milvus-io/milvus/pkg/util/paramtable" diff --git a/internal/distributed/proxy/client/client.go b/internal/distributed/proxy/client/client.go index c33e123d25..17d7f4f9b9 100644 --- a/internal/distributed/proxy/client/client.go +++ b/internal/distributed/proxy/client/client.go @@ -20,6 +20,8 @@ import ( "context" "fmt" + "google.golang.org/grpc" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus/internal/proto/internalpb" @@ -29,7 +31,6 @@ import ( "github.com/milvus-io/milvus/pkg/util/funcutil" "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/milvus-io/milvus/pkg/util/typeutil" - "google.golang.org/grpc" ) var Params *paramtable.ComponentParam = paramtable.Get() diff --git a/internal/distributed/proxy/client/client_test.go b/internal/distributed/proxy/client/client_test.go index bece027060..9f7734d060 100644 --- a/internal/distributed/proxy/client/client_test.go +++ b/internal/distributed/proxy/client/client_test.go @@ -22,12 +22,12 @@ import ( "time" "github.com/cockroachdb/errors" - "github.com/milvus-io/milvus/internal/util/mock" - "github.com/milvus-io/milvus/pkg/util/paramtable" - - "github.com/milvus-io/milvus/internal/proto/proxypb" "github.com/stretchr/testify/assert" "google.golang.org/grpc" + + "github.com/milvus-io/milvus/internal/proto/proxypb" + "github.com/milvus-io/milvus/internal/util/mock" + "github.com/milvus-io/milvus/pkg/util/paramtable" ) func Test_NewClient(t *testing.T) { diff --git a/internal/distributed/proxy/httpserver/handler.go b/internal/distributed/proxy/httpserver/handler.go index 8a99c7fa40..2685448874 100644 --- a/internal/distributed/proxy/httpserver/handler.go +++ b/internal/distributed/proxy/httpserver/handler.go @@ -5,6 +5,7 @@ import ( "github.com/gin-gonic/gin" "github.com/golang/protobuf/proto" + "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus/internal/types" ) @@ -79,7 +80,6 @@ func (h *Handlers) RegisterRoutesTo(router gin.IRouter) { router.PATCH("/credential", wrapHandler(h.handleUpdateCredential)) router.DELETE("/credential", wrapHandler(h.handleDeleteCredential)) router.GET("/credential/users", wrapHandler(h.handleListCredUsers)) - } func (h *Handlers) handleGetHealth(c *gin.Context) (interface{}, error) { diff --git a/internal/distributed/proxy/httpserver/handler_test.go b/internal/distributed/proxy/httpserver/handler_test.go index 95cfd651f2..7957888e22 100644 --- a/internal/distributed/proxy/httpserver/handler_test.go +++ b/internal/distributed/proxy/httpserver/handler_test.go @@ -10,14 +10,14 @@ import ( "testing" "github.com/cockroachdb/errors" - "github.com/gin-gonic/gin" "github.com/gin-gonic/gin/binding" + "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/internal/types" - "github.com/stretchr/testify/assert" ) func Test_WrappedInsertRequest_JSONMarshal_AsInsertRequest(t *testing.T) { @@ -46,8 +46,10 @@ func (m *mockProxyComponent) Dummy(ctx context.Context, request *milvuspb.DummyR return nil, nil } -var emptyBody = &gin.H{} -var testStatus = &commonpb.Status{Reason: "ok"} +var ( + emptyBody = &gin.H{} + testStatus = &commonpb.Status{Reason: "ok"} +) func (m *mockProxyComponent) CreateDatabase(ctx context.Context, in *milvuspb.CreateDatabaseRequest) (*commonpb.Status, error) { return testStatus, nil @@ -112,9 +114,11 @@ func (m *mockProxyComponent) HasPartition(ctx context.Context, request *milvuspb func (m *mockProxyComponent) LoadPartitions(ctx context.Context, request *milvuspb.LoadPartitionsRequest) (*commonpb.Status, error) { return testStatus, nil } + func (m *mockProxyComponent) ReleasePartitions(ctx context.Context, request *milvuspb.ReleasePartitionsRequest) (*commonpb.Status, error) { return testStatus, nil } + func (m *mockProxyComponent) GetPartitionStatistics(ctx context.Context, request *milvuspb.GetPartitionStatisticsRequest) (*milvuspb.GetPartitionStatisticsResponse, error) { return &milvuspb.GetPartitionStatisticsResponse{Status: testStatus}, nil } @@ -413,7 +417,8 @@ func TestHandlers(t *testing.T) { }, { http.MethodGet, "/partition/statistics", emptyBody, - http.StatusOK, milvuspb.GetPartitionStatisticsResponse{Status: testStatus}, + http.StatusOK, + milvuspb.GetPartitionStatisticsResponse{Status: testStatus}, }, { http.MethodGet, "/partitions", emptyBody, @@ -456,26 +461,32 @@ func TestHandlers(t *testing.T) { http.StatusOK, &milvuspb.MutationResult{Acknowledged: true}, }, { - http.MethodDelete, "/entities", milvuspb.DeleteRequest{Expr: "some expr"}, + http.MethodDelete, "/entities", + milvuspb.DeleteRequest{Expr: "some expr"}, http.StatusOK, &milvuspb.MutationResult{Acknowledged: true}, }, { - http.MethodPost, "/search", milvuspb.SearchRequest{Dsl: "some dsl"}, + http.MethodPost, "/search", + milvuspb.SearchRequest{Dsl: "some dsl"}, http.StatusOK, &searchResult, }, { - http.MethodPost, "/query", milvuspb.QueryRequest{Expr: "some expr"}, + http.MethodPost, "/query", + milvuspb.QueryRequest{Expr: "some expr"}, http.StatusOK, &queryResult, }, { - http.MethodPost, "/persist", milvuspb.FlushRequest{CollectionNames: []string{"c1"}}, + http.MethodPost, "/persist", + milvuspb.FlushRequest{CollectionNames: []string{"c1"}}, http.StatusOK, flushResult, }, { - http.MethodGet, "/distance", milvuspb.CalcDistanceRequest{ + http.MethodGet, "/distance", + milvuspb.CalcDistanceRequest{ Params: []*commonpb.KeyValuePair{ {Key: "key", Value: "val"}, - }}, + }, + }, http.StatusOK, calcDistanceResult, }, { diff --git a/internal/distributed/proxy/httpserver/handler_v1.go b/internal/distributed/proxy/httpserver/handler_v1.go index 2f15842473..f3075bafbd 100644 --- a/internal/distributed/proxy/httpserver/handler_v1.go +++ b/internal/distributed/proxy/httpserver/handler_v1.go @@ -5,21 +5,20 @@ import ( "net/http" "strconv" - "github.com/milvus-io/milvus/pkg/util/merr" - "github.com/cockroachdb/errors" - "github.com/gin-gonic/gin" "github.com/gin-gonic/gin/binding" "github.com/golang/protobuf/proto" + "github.com/tidwall/gjson" + "go.uber.org/zap" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/internal/proxy" "github.com/milvus-io/milvus/pkg/common" "github.com/milvus-io/milvus/pkg/log" - "github.com/tidwall/gjson" - "go.uber.org/zap" + "github.com/milvus-io/milvus/pkg/util/merr" ) func checkAuthorization(c *gin.Context, req interface{}) error { @@ -577,7 +576,7 @@ func (h *Handlers) search(c *gin.Context) { c.AbortWithStatusJSON(http.StatusOK, gin.H{HTTPReturnCode: Code(merr.ErrMissingRequiredParameters), HTTPReturnMessage: merr.ErrMissingRequiredParameters.Error()}) return } - params := map[string]interface{}{ //auto generated mapping + params := map[string]interface{}{ // auto generated mapping "level": int(commonpb.ConsistencyLevel_Bounded), } bs, _ := json.Marshal(params) diff --git a/internal/distributed/proxy/httpserver/handler_v1_test.go b/internal/distributed/proxy/httpserver/handler_v1_test.go index d83fa80d21..1f09b36007 100644 --- a/internal/distributed/proxy/httpserver/handler_v1_test.go +++ b/internal/distributed/proxy/httpserver/handler_v1_test.go @@ -8,13 +8,11 @@ import ( "net/http/httptest" "testing" - "github.com/milvus-io/milvus/pkg/util/merr" - "github.com/cockroachdb/errors" - - "github.com/milvus-io/milvus/pkg/util/paramtable" - "github.com/gin-gonic/gin" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" @@ -22,8 +20,8 @@ import ( "github.com/milvus-io/milvus/internal/proxy" "github.com/milvus-io/milvus/internal/types" "github.com/milvus-io/milvus/pkg/util" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/mock" + "github.com/milvus-io/milvus/pkg/util/merr" + "github.com/milvus-io/milvus/pkg/util/paramtable" ) const ( @@ -55,7 +53,8 @@ var DefaultDescCollectionResp = milvuspb.DescribeCollectionResponse{ CollectionName: DefaultCollectionName, Schema: generateCollectionSchema(false), ShardsNum: ShardNumDefault, - Status: &StatusSuccess} + Status: &StatusSuccess, +} var DefaultLoadStateResp = milvuspb.GetLoadStateResponse{ Status: &StatusSuccess, @@ -1112,7 +1111,6 @@ func TestAuthorization(t *testing.T) { }) } } - } func TestDatabaseNotFound(t *testing.T) { @@ -1390,7 +1388,8 @@ func Test_Handles_VectorCollectionsDescribe(t *testing.T) { mp.EXPECT(). DescribeCollection(mock.Anything, mock.Anything). Return(&milvuspb.DescribeCollectionResponse{ - Status: &commonpb.Status{ErrorCode: commonpb.ErrorCode_UnexpectedError}}, nil). + Status: &commonpb.Status{ErrorCode: commonpb.ErrorCode_UnexpectedError}, + }, nil). Once() req := httptest.NewRequest(http.MethodGet, "/vector/collections/describe?collectionName=book", nil) req.SetBasicAuth(util.UserRoot, util.DefaultRootPassword) @@ -1405,7 +1404,8 @@ func Test_Handles_VectorCollectionsDescribe(t *testing.T) { DescribeCollection(mock.Anything, mock.Anything). Return(&milvuspb.DescribeCollectionResponse{ Schema: getCollectionSchema("collectionName"), - Status: &commonpb.Status{ErrorCode: commonpb.ErrorCode_Success}}, nil). + Status: &commonpb.Status{ErrorCode: commonpb.ErrorCode_Success}, + }, nil). Once() mp.EXPECT(). GetLoadState(mock.Anything, mock.Anything). @@ -1428,17 +1428,20 @@ func Test_Handles_VectorCollectionsDescribe(t *testing.T) { DescribeCollection(mock.Anything, mock.Anything). Return(&milvuspb.DescribeCollectionResponse{ Schema: getCollectionSchema("collectionName"), - Status: &commonpb.Status{ErrorCode: commonpb.ErrorCode_Success}}, nil). + Status: &commonpb.Status{ErrorCode: commonpb.ErrorCode_Success}, + }, nil). Once() mp.EXPECT(). GetLoadState(mock.Anything, mock.Anything). Return(&milvuspb.GetLoadStateResponse{ - Status: &commonpb.Status{ErrorCode: commonpb.ErrorCode_UnexpectedError}}, nil). + Status: &commonpb.Status{ErrorCode: commonpb.ErrorCode_UnexpectedError}, + }, nil). Once() mp.EXPECT(). DescribeIndex(mock.Anything, mock.Anything). Return(&milvuspb.DescribeIndexResponse{ - Status: &commonpb.Status{ErrorCode: commonpb.ErrorCode_UnexpectedError}}, nil). + Status: &commonpb.Status{ErrorCode: commonpb.ErrorCode_UnexpectedError}, + }, nil). Once() req := httptest.NewRequest(http.MethodGet, "/vector/collections/describe?collectionName=book", nil) req.SetBasicAuth(util.UserRoot, util.DefaultRootPassword) @@ -1453,7 +1456,8 @@ func Test_Handles_VectorCollectionsDescribe(t *testing.T) { DescribeCollection(mock.Anything, mock.Anything). Return(&milvuspb.DescribeCollectionResponse{ Schema: getCollectionSchema("collectionName"), - Status: &commonpb.Status{ErrorCode: commonpb.ErrorCode_Success}}, nil). + Status: &commonpb.Status{ErrorCode: commonpb.ErrorCode_Success}, + }, nil). Once() mp.EXPECT(). GetLoadState(mock.Anything, mock.Anything). @@ -1477,7 +1481,8 @@ func Test_Handles_VectorCollectionsDescribe(t *testing.T) { }, }, }, - Status: &commonpb.Status{ErrorCode: commonpb.ErrorCode_UnexpectedError}}, nil). + Status: &commonpb.Status{ErrorCode: commonpb.ErrorCode_UnexpectedError}, + }, nil). Once() req := httptest.NewRequest(http.MethodGet, "/vector/collections/describe?collectionName=book", nil) req.SetBasicAuth(util.UserRoot, util.DefaultRootPassword) diff --git a/internal/distributed/proxy/httpserver/utils.go b/internal/distributed/proxy/httpserver/utils.go index f6f138cda5..e7ca9f89d0 100644 --- a/internal/distributed/proxy/httpserver/utils.go +++ b/internal/distributed/proxy/httpserver/utils.go @@ -10,25 +10,21 @@ import ( "strconv" "strings" - "github.com/milvus-io/milvus/pkg/util/parameterutil.go" - "go.uber.org/zap" - - "github.com/milvus-io/milvus/pkg/util/merr" - "github.com/cockroachdb/errors" - "github.com/gin-gonic/gin" "github.com/golang/protobuf/proto" "github.com/spf13/cast" "github.com/tidwall/gjson" + "go.uber.org/zap" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" - "github.com/milvus-io/milvus/pkg/common" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/funcutil" + "github.com/milvus-io/milvus/pkg/util/merr" + "github.com/milvus-io/milvus/pkg/util/parameterutil.go" ) func ParseUsernamePassword(c *gin.Context) (string, string, bool) { @@ -834,7 +830,6 @@ func buildQueryResp(rowsNum int64, needFields []string, fieldDataList []*schemap } } } - } default: row[fieldDataList[j].FieldName] = "" diff --git a/internal/distributed/proxy/httpserver/utils_test.go b/internal/distributed/proxy/httpserver/utils_test.go index 62249d30ca..0fe057b00b 100644 --- a/internal/distributed/proxy/httpserver/utils_test.go +++ b/internal/distributed/proxy/httpserver/utils_test.go @@ -5,12 +5,13 @@ import ( "testing" "github.com/gin-gonic/gin" + "github.com/stretchr/testify/assert" + "github.com/tidwall/gjson" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/pkg/common" - "github.com/stretchr/testify/assert" - "github.com/tidwall/gjson" ) const ( @@ -115,7 +116,8 @@ func generateIndexes() []*milvuspb.IndexDescription { { Key: "index_type", Value: "IVF_FLAT", - }, { + }, + { Key: Params, Value: "{\"nlist\":1024}", }, @@ -252,25 +254,29 @@ func TestPrintCollectionDetails(t *testing.T) { HTTPReturnFieldType: "Int64", HTTPReturnFieldPrimaryKey: true, HTTPReturnFieldAutoID: false, - HTTPReturnDescription: ""}, + HTTPReturnDescription: "", + }, { HTTPReturnFieldName: FieldWordCount, HTTPReturnFieldType: "Int64", HTTPReturnFieldPrimaryKey: false, HTTPReturnFieldAutoID: false, - HTTPReturnDescription: ""}, + HTTPReturnDescription: "", + }, { HTTPReturnFieldName: FieldBookIntro, HTTPReturnFieldType: "FloatVector(2)", HTTPReturnFieldPrimaryKey: false, HTTPReturnFieldAutoID: false, - HTTPReturnDescription: ""}, + HTTPReturnDescription: "", + }, }) assert.Equal(t, printIndexes(indexes), []gin.H{ { HTTPReturnIndexName: DefaultIndexName, HTTPReturnIndexField: FieldBookIntro, - HTTPReturnIndexMetricsType: DefaultMetricType}, + HTTPReturnIndexMetricsType: DefaultMetricType, + }, }) assert.Equal(t, getMetricType(indexes[0].Params), DefaultMetricType) assert.Equal(t, getMetricType(nil), DefaultMetricType) @@ -286,7 +292,8 @@ func TestPrintCollectionDetails(t *testing.T) { HTTPReturnFieldType: "VarChar(10)", HTTPReturnFieldPrimaryKey: false, HTTPReturnFieldAutoID: false, - HTTPReturnDescription: ""}, + HTTPReturnDescription: "", + }, }) } @@ -341,8 +348,8 @@ func TestInsertWithDynamicFields(t *testing.T) { func TestSerialize(t *testing.T) { parameters := []float32{0.11111, 0.22222} - //assert.Equal(t, string(serialize(parameters)), "\ufffd\ufffd\ufffd=\ufffd\ufffdc\u003e") - //assert.Equal(t, string(vector2PlaceholderGroupBytes(parameters)), "vector2PlaceholderGroupBytes") // todo + // assert.Equal(t, string(serialize(parameters)), "\ufffd\ufffd\ufffd=\ufffd\ufffdc\u003e") + // assert.Equal(t, string(vector2PlaceholderGroupBytes(parameters)), "vector2PlaceholderGroupBytes") // todo assert.Equal(t, string(serialize(parameters)), "\xa4\x8d\xe3=\xa4\x8dc>") assert.Equal(t, string(vector2PlaceholderGroupBytes(parameters)), "\n\x10\n\x02$0\x10e\x1a\b\xa4\x8d\xe3=\xa4\x8dc>") // todo } @@ -373,7 +380,6 @@ func compareRow64(m1 map[string]interface{}, m2 map[string]interface{}) bool { } } return true - } func compareRow(m1 map[string]interface{}, m2 map[string]interface{}) bool { @@ -413,7 +419,6 @@ func compareRow(m1 map[string]interface{}, m2 map[string]interface{}) bool { } } return true - } type CompareFunc func(map[string]interface{}, map[string]interface{}) bool @@ -785,11 +790,13 @@ func TestBuildQueryResps(t *testing.T) { assert.Equal(t, compareRows(rows, exceptRows, compareRow), true) } - dataTypes := []schemapb.DataType{schemapb.DataType_FloatVector, schemapb.DataType_BinaryVector, + dataTypes := []schemapb.DataType{ + schemapb.DataType_FloatVector, schemapb.DataType_BinaryVector, schemapb.DataType_Bool, schemapb.DataType_Int8, schemapb.DataType_Int16, schemapb.DataType_Int32, schemapb.DataType_Float, schemapb.DataType_Double, schemapb.DataType_String, schemapb.DataType_VarChar, - schemapb.DataType_JSON, schemapb.DataType_Array} + schemapb.DataType_JSON, schemapb.DataType_Array, + } for _, dateType := range dataTypes { _, err := buildQueryResp(int64(0), outputFields, newFieldData([]*schemapb.FieldData{}, dateType), generateIds(3), []float32{0.01, 0.04, 0.09}) assert.Equal(t, err, nil) diff --git a/internal/distributed/proxy/httpserver/wrap_request.go b/internal/distributed/proxy/httpserver/wrap_request.go index b9d463731c..a8f5eec8b9 100644 --- a/internal/distributed/proxy/httpserver/wrap_request.go +++ b/internal/distributed/proxy/httpserver/wrap_request.go @@ -7,8 +7,8 @@ import ( "math" "github.com/cockroachdb/errors" - "github.com/golang/protobuf/proto" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" diff --git a/internal/distributed/proxy/httpserver/wrap_request_test.go b/internal/distributed/proxy/httpserver/wrap_request_test.go index 92cde8d4d9..defddf831a 100644 --- a/internal/distributed/proxy/httpserver/wrap_request_test.go +++ b/internal/distributed/proxy/httpserver/wrap_request_test.go @@ -4,9 +4,10 @@ import ( "encoding/json" "testing" + "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" - "github.com/stretchr/testify/assert" ) func TestFieldData_AsSchemapb(t *testing.T) { diff --git a/internal/distributed/proxy/httpserver/wrapper.go b/internal/distributed/proxy/httpserver/wrapper.go index 267d719abc..69bf66b219 100644 --- a/internal/distributed/proxy/httpserver/wrapper.go +++ b/internal/distributed/proxy/httpserver/wrapper.go @@ -5,15 +5,13 @@ import ( "net/http" "github.com/cockroachdb/errors" - "github.com/gin-gonic/gin" "github.com/gin-gonic/gin/binding" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" ) -var ( - errBadRequest = errors.New("bad request") -) +var errBadRequest = errors.New("bad request") // handlerFunc handles http request with gin context type handlerFunc func(c *gin.Context) (interface{}, error) diff --git a/internal/distributed/proxy/httpserver/wrapper_test.go b/internal/distributed/proxy/httpserver/wrapper_test.go index f62e040ea9..6e591ab3d0 100644 --- a/internal/distributed/proxy/httpserver/wrapper_test.go +++ b/internal/distributed/proxy/httpserver/wrapper_test.go @@ -6,13 +6,12 @@ import ( "testing" "github.com/cockroachdb/errors" - "github.com/gin-gonic/gin" "github.com/stretchr/testify/assert" ) func TestWrapHandler(t *testing.T) { - var testWrapFunc = func(c *gin.Context) (interface{}, error) { + testWrapFunc := func(c *gin.Context) (interface{}, error) { Case := c.Param("case") switch Case { case "0": @@ -55,5 +54,4 @@ func TestWrapHandler(t *testing.T) { testEngine.ServeHTTP(w, req) assert.Equal(t, http.StatusInternalServerError, w.Code) }) - } diff --git a/internal/distributed/proxy/service.go b/internal/distributed/proxy/service.go index ee37d7859a..b43b3e33f0 100644 --- a/internal/distributed/proxy/service.go +++ b/internal/distributed/proxy/service.go @@ -30,21 +30,21 @@ import ( "sync" "time" - "google.golang.org/grpc/credentials" - - management "github.com/milvus-io/milvus/internal/http" - "github.com/milvus-io/milvus/internal/proxy/accesslog" - "github.com/milvus-io/milvus/internal/util/componentutil" - "github.com/milvus-io/milvus/internal/util/dependency" - "github.com/milvus-io/milvus/pkg/tracer" - "github.com/milvus-io/milvus/pkg/util/interceptor" - "github.com/milvus-io/milvus/pkg/util/metricsinfo" - "github.com/soheilhy/cmux" - "go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc" - "github.com/gin-gonic/gin" grpc_middleware "github.com/grpc-ecosystem/go-grpc-middleware" grpc_auth "github.com/grpc-ecosystem/go-grpc-middleware/auth" + "github.com/soheilhy/cmux" + clientv3 "go.etcd.io/etcd/client/v3" + "go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc" + "go.uber.org/atomic" + "go.uber.org/zap" + "google.golang.org/grpc" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/credentials" + "google.golang.org/grpc/health/grpc_health_v1" + "google.golang.org/grpc/keepalive" + "google.golang.org/grpc/status" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/federpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" @@ -52,24 +52,23 @@ import ( "github.com/milvus-io/milvus/internal/distributed/proxy/httpserver" qcc "github.com/milvus-io/milvus/internal/distributed/querycoord/client" rcc "github.com/milvus-io/milvus/internal/distributed/rootcoord/client" + management "github.com/milvus-io/milvus/internal/http" "github.com/milvus-io/milvus/internal/proto/internalpb" "github.com/milvus-io/milvus/internal/proto/proxypb" "github.com/milvus-io/milvus/internal/proxy" + "github.com/milvus-io/milvus/internal/proxy/accesslog" "github.com/milvus-io/milvus/internal/types" + "github.com/milvus-io/milvus/internal/util/componentutil" + "github.com/milvus-io/milvus/internal/util/dependency" "github.com/milvus-io/milvus/pkg/log" + "github.com/milvus-io/milvus/pkg/tracer" "github.com/milvus-io/milvus/pkg/util/etcd" "github.com/milvus-io/milvus/pkg/util/funcutil" + "github.com/milvus-io/milvus/pkg/util/interceptor" "github.com/milvus-io/milvus/pkg/util/logutil" "github.com/milvus-io/milvus/pkg/util/merr" + "github.com/milvus-io/milvus/pkg/util/metricsinfo" "github.com/milvus-io/milvus/pkg/util/paramtable" - clientv3 "go.etcd.io/etcd/client/v3" - "go.uber.org/atomic" - "go.uber.org/zap" - "google.golang.org/grpc" - "google.golang.org/grpc/codes" - "google.golang.org/grpc/health/grpc_health_v1" - "google.golang.org/grpc/keepalive" - "google.golang.org/grpc/status" ) var ( @@ -103,7 +102,6 @@ type Server struct { // NewServer create a Proxy server. func NewServer(ctx context.Context, factory dependency.Factory) (*Server, error) { - var err error server := &Server{ ctx: ctx, @@ -192,12 +190,12 @@ func (s *Server) startExternalRPCServer(grpcExternalPort int, errChan chan error func (s *Server) startExternalGrpc(grpcPort int, errChan chan error) { defer s.wg.Done() Params := ¶mtable.Get().ProxyGrpcServerCfg - var kaep = keepalive.EnforcementPolicy{ + kaep := keepalive.EnforcementPolicy{ MinTime: 5 * time.Second, // If a client pings more than once every 5 seconds, terminate the connection PermitWithoutStream: true, // Allow pings even when there are no active streams } - var kasp = keepalive.ServerParameters{ + kasp := keepalive.ServerParameters{ Time: 60 * time.Second, // Ping the client if it is idle for 60 seconds to ensure the connection is still active Timeout: 10 * time.Second, // Wait 10 second for the ping ack before assuming the connection is dead } @@ -287,12 +285,12 @@ func (s *Server) startExternalGrpc(grpcPort int, errChan chan error) { func (s *Server) startInternalGrpc(grpcPort int, errChan chan error) { defer s.wg.Done() Params := ¶mtable.Get().ProxyGrpcServerCfg - var kaep = keepalive.EnforcementPolicy{ + kaep := keepalive.EnforcementPolicy{ MinTime: 5 * time.Second, // If a client pings more than once every 5 seconds, terminate the connection PermitWithoutStream: true, // Allow pings even when there are no active streams } - var kasp = keepalive.ServerParameters{ + kasp := keepalive.ServerParameters{ Time: 60 * time.Second, // Ping the client if it is idle for 60 seconds to ensure the connection is still active Timeout: 10 * time.Second, // Wait 10 second for the ping ack before assuming the connection is dead } @@ -491,7 +489,6 @@ func (s *Server) init() error { } } } - } { s.startExternalRPCServer(Params.Port.GetAsInt(), errChan) @@ -857,7 +854,6 @@ func (s *Server) GetPersistentSegmentInfo(ctx context.Context, request *milvuspb // GetQuerySegmentInfo notifies Proxy to get query segment info. func (s *Server) GetQuerySegmentInfo(ctx context.Context, request *milvuspb.GetQuerySegmentInfoRequest) (*milvuspb.GetQuerySegmentInfoResponse, error) { return s.proxy.GetQuerySegmentInfo(ctx, request) - } func (s *Server) Dummy(ctx context.Context, request *milvuspb.DummyRequest) (*milvuspb.DummyResponse, error) { diff --git a/internal/distributed/proxy/service_test.go b/internal/distributed/proxy/service_test.go index 59a3769991..fe1a83f2ce 100644 --- a/internal/distributed/proxy/service_test.go +++ b/internal/distributed/proxy/service_test.go @@ -28,10 +28,6 @@ import ( "time" "github.com/cockroachdb/errors" - milvusmock "github.com/milvus-io/milvus/internal/util/mock" - - "github.com/milvus-io/milvus/internal/proto/indexpb" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" clientv3 "go.etcd.io/etcd/client/v3" @@ -46,11 +42,13 @@ import ( "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus/internal/mocks" "github.com/milvus-io/milvus/internal/proto/datapb" + "github.com/milvus-io/milvus/internal/proto/indexpb" "github.com/milvus-io/milvus/internal/proto/internalpb" "github.com/milvus-io/milvus/internal/proto/proxypb" "github.com/milvus-io/milvus/internal/proto/rootcoordpb" "github.com/milvus-io/milvus/internal/proxy" "github.com/milvus-io/milvus/internal/types" + milvusmock "github.com/milvus-io/milvus/internal/util/mock" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/metricsinfo" "github.com/milvus-io/milvus/pkg/util/paramtable" @@ -730,19 +728,15 @@ func (m *MockProxy) GetProxyMetrics(ctx context.Context, request *milvuspb.GetMe } func (m *MockProxy) SetRootCoordClient(rootCoord types.RootCoord) { - } func (m *MockProxy) SetDataCoordClient(dataCoord types.DataCoord) { - } func (m *MockProxy) SetQueryCoordClient(queryCoord types.QueryCoord) { - } func (m *MockProxy) SetQueryNodeCreator(func(ctx context.Context, addr string, nodeID int64) (types.QueryNode, error)) { - } func (m *MockProxy) GetRateLimiter() (types.Limiter, error) { @@ -750,7 +744,6 @@ func (m *MockProxy) GetRateLimiter() (types.Limiter, error) { } func (m *MockProxy) UpdateStateCode(stateCode commonpb.StateCode) { - } func (m *MockProxy) SetAddress(address string) { @@ -997,9 +990,11 @@ func waitForGrpcReady(opt *WaitOption) { } // TODO: should tls-related configurations be hard code here? -var waitDuration = time.Second * 1 -var clientPemPath = "../../../configs/cert/client.pem" -var clientKeyPath = "../../../configs/cert/client.key" +var ( + waitDuration = time.Second * 1 + clientPemPath = "../../../configs/cert/client.pem" + clientKeyPath = "../../../configs/cert/client.key" +) // waitForServerReady wait for internal grpc service and external service to be ready, according to the params. func waitForServerReady() { @@ -1461,7 +1456,7 @@ func TestServer_Watch(t *testing.T) { watchServer := milvusmock.NewGrpcHealthWatchServer() resultChan := watchServer.Chan() req := &grpc_health_v1.HealthCheckRequest{Service: ""} - //var ret *grpc_health_v1.HealthCheckResponse + // var ret *grpc_health_v1.HealthCheckResponse err := server.Watch(req, watchServer) ret := <-resultChan diff --git a/internal/distributed/querycoord/client/client.go b/internal/distributed/querycoord/client/client.go index 655445c5ae..909b6324e3 100644 --- a/internal/distributed/querycoord/client/client.go +++ b/internal/distributed/querycoord/client/client.go @@ -20,6 +20,10 @@ import ( "context" "fmt" + clientv3 "go.etcd.io/etcd/client/v3" + "go.uber.org/zap" + "google.golang.org/grpc" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus/internal/proto/internalpb" @@ -31,9 +35,6 @@ import ( "github.com/milvus-io/milvus/pkg/util/funcutil" "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/milvus-io/milvus/pkg/util/typeutil" - clientv3 "go.etcd.io/etcd/client/v3" - "go.uber.org/zap" - "google.golang.org/grpc" ) var Params *paramtable.ComponentParam = paramtable.Get() diff --git a/internal/distributed/querycoord/client/client_test.go b/internal/distributed/querycoord/client/client_test.go index ead3d65eaa..4e3dcca5e8 100644 --- a/internal/distributed/querycoord/client/client_test.go +++ b/internal/distributed/querycoord/client/client_test.go @@ -25,17 +25,16 @@ import ( "time" "github.com/cockroachdb/errors" - "github.com/milvus-io/milvus/internal/util/mock" + "github.com/stretchr/testify/assert" "go.uber.org/zap" - - "github.com/milvus-io/milvus/internal/proto/querypb" - "github.com/milvus-io/milvus/pkg/log" - "github.com/milvus-io/milvus/pkg/util/paramtable" "google.golang.org/grpc" + "github.com/milvus-io/milvus/internal/proto/querypb" "github.com/milvus-io/milvus/internal/proxy" + "github.com/milvus-io/milvus/internal/util/mock" + "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/etcd" - "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus/pkg/util/paramtable" ) func TestMain(m *testing.M) { diff --git a/internal/distributed/querycoord/service.go b/internal/distributed/querycoord/service.go index ae4d8d4047..d21b14e21e 100644 --- a/internal/distributed/querycoord/service.go +++ b/internal/distributed/querycoord/service.go @@ -24,10 +24,6 @@ import ( "time" grpc_middleware "github.com/grpc-ecosystem/go-grpc-middleware" - "github.com/milvus-io/milvus/internal/util/componentutil" - "github.com/milvus-io/milvus/internal/util/dependency" - "github.com/milvus-io/milvus/pkg/tracer" - "github.com/milvus-io/milvus/pkg/util/interceptor" "github.com/tikv/client-go/v2/txnkv" clientv3 "go.etcd.io/etcd/client/v3" "go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc" @@ -44,10 +40,14 @@ import ( "github.com/milvus-io/milvus/internal/proto/querypb" qc "github.com/milvus-io/milvus/internal/querycoordv2" "github.com/milvus-io/milvus/internal/types" + "github.com/milvus-io/milvus/internal/util/componentutil" + "github.com/milvus-io/milvus/internal/util/dependency" "github.com/milvus-io/milvus/pkg/log" + "github.com/milvus-io/milvus/pkg/tracer" "github.com/milvus-io/milvus/pkg/util" "github.com/milvus-io/milvus/pkg/util/etcd" "github.com/milvus-io/milvus/pkg/util/funcutil" + "github.com/milvus-io/milvus/pkg/util/interceptor" "github.com/milvus-io/milvus/pkg/util/logutil" "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/milvus-io/milvus/pkg/util/tikv" @@ -95,7 +95,6 @@ func NewServer(ctx context.Context, factory dependency.Factory) (*Server, error) // Run initializes and starts QueryCoord's grpc service. func (s *Server) Run() error { - if err := s.init(); err != nil { return err } @@ -219,12 +218,12 @@ func (s *Server) init() error { func (s *Server) startGrpcLoop(grpcPort int) { defer s.wg.Done() Params := ¶mtable.Get().QueryCoordGrpcServerCfg - var kaep = keepalive.EnforcementPolicy{ + kaep := keepalive.EnforcementPolicy{ MinTime: 5 * time.Second, // If a client pings more than once every 5 seconds, terminate the connection PermitWithoutStream: true, // Allow pings even when there are no active streams } - var kasp = keepalive.ServerParameters{ + kasp := keepalive.ServerParameters{ Time: 60 * time.Second, // Ping the client if it is idle for 60 seconds to ensure the connection is still active Timeout: 10 * time.Second, // Wait 10 second for the ping ack before assuming the connection is dead } diff --git a/internal/distributed/querycoord/service_test.go b/internal/distributed/querycoord/service_test.go index 156d2c7d40..4251ecf1f5 100644 --- a/internal/distributed/querycoord/service_test.go +++ b/internal/distributed/querycoord/service_test.go @@ -22,20 +22,19 @@ import ( "testing" "github.com/cockroachdb/errors" - - "github.com/milvus-io/milvus/internal/mocks" - "github.com/milvus-io/milvus/internal/types" - "github.com/milvus-io/milvus/pkg/util/merr" - "github.com/milvus-io/milvus/pkg/util/paramtable" - - "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" - "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" - "github.com/milvus-io/milvus/internal/proto/internalpb" - "github.com/milvus-io/milvus/internal/proto/querypb" - "github.com/milvus-io/milvus/pkg/util/tikv" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" "github.com/tikv/client-go/v2/txnkv" + + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" + "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" + "github.com/milvus-io/milvus/internal/mocks" + "github.com/milvus-io/milvus/internal/proto/internalpb" + "github.com/milvus-io/milvus/internal/proto/querypb" + "github.com/milvus-io/milvus/internal/types" + "github.com/milvus-io/milvus/pkg/util/merr" + "github.com/milvus-io/milvus/pkg/util/paramtable" + "github.com/milvus-io/milvus/pkg/util/tikv" ) // ///////////////////////////////////////////////////////////////////////////////////////////////////////////////////// @@ -418,7 +417,6 @@ func TestServer_Run3(t *testing.T) { err = server.Stop() assert.NoError(t, err) } - } func TestServer_Run4(t *testing.T) { diff --git a/internal/distributed/querynode/client/client.go b/internal/distributed/querynode/client/client.go index e8853db7d8..47335dd996 100644 --- a/internal/distributed/querynode/client/client.go +++ b/internal/distributed/querynode/client/client.go @@ -20,13 +20,13 @@ import ( "context" "fmt" - "github.com/milvus-io/milvus/internal/util/grpcclient" "google.golang.org/grpc" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus/internal/proto/internalpb" "github.com/milvus-io/milvus/internal/proto/querypb" + "github.com/milvus-io/milvus/internal/util/grpcclient" "github.com/milvus-io/milvus/internal/util/streamrpc" "github.com/milvus-io/milvus/pkg/util/commonpbutil" "github.com/milvus-io/milvus/pkg/util/funcutil" diff --git a/internal/distributed/querynode/client/client_test.go b/internal/distributed/querynode/client/client_test.go index 26381475d0..8d56ae9251 100644 --- a/internal/distributed/querynode/client/client_test.go +++ b/internal/distributed/querynode/client/client_test.go @@ -21,13 +21,13 @@ import ( "testing" "github.com/cockroachdb/errors" - "github.com/milvus-io/milvus/internal/util/mock" - "github.com/milvus-io/milvus/internal/util/streamrpc" - - "github.com/milvus-io/milvus/internal/proto/querypb" - "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/stretchr/testify/assert" "google.golang.org/grpc" + + "github.com/milvus-io/milvus/internal/proto/querypb" + "github.com/milvus-io/milvus/internal/util/mock" + "github.com/milvus-io/milvus/internal/util/streamrpc" + "github.com/milvus-io/milvus/pkg/util/paramtable" ) func Test_NewClient(t *testing.T) { @@ -115,7 +115,7 @@ func Test_NewClient(t *testing.T) { r20, err := client.SearchSegments(ctx, nil) retCheck(retNotNil, r20, err) - //stream rpc + // stream rpc streamer1 := streamrpc.NewGrpcQueryStreamer() err = client.QueryStream(ctx, nil, streamer1) retCheck(retNotNil, streamer1.AsClient(), err) diff --git a/internal/distributed/querynode/service.go b/internal/distributed/querynode/service.go index 451e05798a..46529afc62 100644 --- a/internal/distributed/querynode/service.go +++ b/internal/distributed/querynode/service.go @@ -25,10 +25,6 @@ import ( "time" grpc_middleware "github.com/grpc-ecosystem/go-grpc-middleware" - "github.com/milvus-io/milvus/internal/util/dependency" - "github.com/milvus-io/milvus/internal/util/streamrpc" - "github.com/milvus-io/milvus/pkg/tracer" - "github.com/milvus-io/milvus/pkg/util/interceptor" clientv3 "go.etcd.io/etcd/client/v3" "go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc" "go.uber.org/atomic" @@ -42,9 +38,13 @@ import ( "github.com/milvus-io/milvus/internal/proto/querypb" qn "github.com/milvus-io/milvus/internal/querynodev2" "github.com/milvus-io/milvus/internal/types" + "github.com/milvus-io/milvus/internal/util/dependency" + "github.com/milvus-io/milvus/internal/util/streamrpc" "github.com/milvus-io/milvus/pkg/log" + "github.com/milvus-io/milvus/pkg/tracer" "github.com/milvus-io/milvus/pkg/util/etcd" "github.com/milvus-io/milvus/pkg/util/funcutil" + "github.com/milvus-io/milvus/pkg/util/interceptor" "github.com/milvus-io/milvus/pkg/util/logutil" "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/milvus-io/milvus/pkg/util/retry" @@ -149,12 +149,12 @@ func (s *Server) start() error { func (s *Server) startGrpcLoop(grpcPort int) { defer s.wg.Done() Params := ¶mtable.Get().QueryNodeGrpcServerCfg - var kaep = keepalive.EnforcementPolicy{ + kaep := keepalive.EnforcementPolicy{ MinTime: 5 * time.Second, // If a client pings more than once every 5 seconds, terminate the connection PermitWithoutStream: true, // Allow pings even when there are no active streams } - var kasp = keepalive.ServerParameters{ + kasp := keepalive.ServerParameters{ Time: 60 * time.Second, // Ping the client if it is idle for 60 seconds to ensure the connection is still active Timeout: 10 * time.Second, // Wait 10 second for the ping ack before assuming the connection is dead } @@ -215,12 +215,10 @@ func (s *Server) startGrpcLoop(grpcPort int) { log.Debug("QueryNode Start Grpc Failed!!!!") s.grpcErrChan <- err } - } // Run initializes and starts QueryNode's grpc service. func (s *Server) Run() error { - if err := s.init(); err != nil { return err } diff --git a/internal/distributed/querynode/service_test.go b/internal/distributed/querynode/service_test.go index 4260091b21..57fac8a2dc 100644 --- a/internal/distributed/querynode/service_test.go +++ b/internal/distributed/querynode/service_test.go @@ -22,20 +22,18 @@ import ( "testing" "github.com/cockroachdb/errors" - - "github.com/milvus-io/milvus/internal/mocks" - "github.com/milvus-io/milvus/internal/types" - streamMocks "github.com/milvus-io/milvus/internal/util/streamrpc/mocks" - "github.com/milvus-io/milvus/pkg/util/paramtable" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" clientv3 "go.etcd.io/etcd/client/v3" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" + "github.com/milvus-io/milvus/internal/mocks" "github.com/milvus-io/milvus/internal/proto/internalpb" "github.com/milvus-io/milvus/internal/proto/querypb" + "github.com/milvus-io/milvus/internal/types" + streamMocks "github.com/milvus-io/milvus/internal/util/streamrpc/mocks" + "github.com/milvus-io/milvus/pkg/util/paramtable" ) type MockRootCoord struct { @@ -105,7 +103,8 @@ func Test_NewServer(t *testing.T) { mockQN.EXPECT().GetComponentStates(mock.Anything).Return(&milvuspb.ComponentStates{ State: &milvuspb.ComponentInfo{ StateCode: commonpb.StateCode_Healthy, - }}, nil) + }, + }, nil) req := &milvuspb.GetComponentStatesRequest{} states, err := server.GetComponentStates(ctx, req) assert.NoError(t, err) @@ -178,7 +177,8 @@ func Test_NewServer(t *testing.T) { t.Run("GetSegmentInfo", func(t *testing.T) { mockQN.EXPECT().GetSegmentInfo(mock.Anything, mock.Anything).Return(&querypb.GetSegmentInfoResponse{ - Status: &commonpb.Status{ErrorCode: commonpb.ErrorCode_Success}}, nil) + Status: &commonpb.Status{ErrorCode: commonpb.ErrorCode_Success}, + }, nil) req := &querypb.GetSegmentInfoRequest{} resp, err := server.GetSegmentInfo(ctx, req) assert.NoError(t, err) @@ -198,7 +198,8 @@ func Test_NewServer(t *testing.T) { t.Run("Search", func(t *testing.T) { mockQN.EXPECT().Search(mock.Anything, mock.Anything).Return(&internalpb.SearchResults{ - Status: &commonpb.Status{ErrorCode: commonpb.ErrorCode_Success}}, nil) + Status: &commonpb.Status{ErrorCode: commonpb.ErrorCode_Success}, + }, nil) req := &querypb.SearchRequest{} resp, err := server.Search(ctx, req) assert.NoError(t, err) @@ -207,7 +208,8 @@ func Test_NewServer(t *testing.T) { t.Run("SearchSegments", func(t *testing.T) { mockQN.EXPECT().SearchSegments(mock.Anything, mock.Anything).Return(&internalpb.SearchResults{ - Status: &commonpb.Status{ErrorCode: commonpb.ErrorCode_Success}}, nil) + Status: &commonpb.Status{ErrorCode: commonpb.ErrorCode_Success}, + }, nil) req := &querypb.SearchRequest{} resp, err := server.SearchSegments(ctx, req) assert.NoError(t, err) @@ -216,7 +218,8 @@ func Test_NewServer(t *testing.T) { t.Run("Query", func(t *testing.T) { mockQN.EXPECT().Query(mock.Anything, mock.Anything).Return(&internalpb.RetrieveResults{ - Status: &commonpb.Status{ErrorCode: commonpb.ErrorCode_Success}}, nil) + Status: &commonpb.Status{ErrorCode: commonpb.ErrorCode_Success}, + }, nil) req := &querypb.QueryRequest{} resp, err := server.Query(ctx, req) assert.NoError(t, err) @@ -235,7 +238,8 @@ func Test_NewServer(t *testing.T) { t.Run("QuerySegments", func(t *testing.T) { mockQN.EXPECT().QuerySegments(mock.Anything, mock.Anything).Return(&internalpb.RetrieveResults{ - Status: &commonpb.Status{ErrorCode: commonpb.ErrorCode_Success}}, nil) + Status: &commonpb.Status{ErrorCode: commonpb.ErrorCode_Success}, + }, nil) req := &querypb.QueryRequest{} resp, err := server.QuerySegments(ctx, req) assert.NoError(t, err) diff --git a/internal/distributed/rootcoord/client/client.go b/internal/distributed/rootcoord/client/client.go index 72b6f6173e..75abafb77c 100644 --- a/internal/distributed/rootcoord/client/client.go +++ b/internal/distributed/rootcoord/client/client.go @@ -20,6 +20,12 @@ import ( "context" "fmt" + clientv3 "go.etcd.io/etcd/client/v3" + "go.uber.org/zap" + "google.golang.org/grpc" + grpcCodes "google.golang.org/grpc/codes" + grpcStatus "google.golang.org/grpc/status" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus/internal/proto/internalpb" @@ -32,11 +38,6 @@ import ( "github.com/milvus-io/milvus/pkg/util/funcutil" "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/milvus-io/milvus/pkg/util/typeutil" - clientv3 "go.etcd.io/etcd/client/v3" - "go.uber.org/zap" - "google.golang.org/grpc" - grpcCodes "google.golang.org/grpc/codes" - grpcStatus "google.golang.org/grpc/status" ) var Params *paramtable.ComponentParam = paramtable.Get() diff --git a/internal/distributed/rootcoord/client/client_test.go b/internal/distributed/rootcoord/client/client_test.go index 3d339a4356..999638088c 100644 --- a/internal/distributed/rootcoord/client/client_test.go +++ b/internal/distributed/rootcoord/client/client_test.go @@ -25,17 +25,16 @@ import ( "time" "github.com/cockroachdb/errors" - "github.com/milvus-io/milvus/internal/util/mock" + "github.com/stretchr/testify/assert" "go.uber.org/zap" - - "github.com/milvus-io/milvus/internal/proto/rootcoordpb" - "github.com/milvus-io/milvus/pkg/log" - "github.com/milvus-io/milvus/pkg/util/paramtable" "google.golang.org/grpc" + "github.com/milvus-io/milvus/internal/proto/rootcoordpb" "github.com/milvus-io/milvus/internal/proxy" + "github.com/milvus-io/milvus/internal/util/mock" + "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/etcd" - "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus/pkg/util/paramtable" ) func TestMain(m *testing.M) { diff --git a/internal/distributed/rootcoord/service.go b/internal/distributed/rootcoord/service.go index 3cd8d7a22f..5032d68299 100644 --- a/internal/distributed/rootcoord/service.go +++ b/internal/distributed/rootcoord/service.go @@ -24,9 +24,6 @@ import ( "time" grpc_middleware "github.com/grpc-ecosystem/go-grpc-middleware" - "github.com/milvus-io/milvus/internal/util/dependency" - "github.com/milvus-io/milvus/pkg/tracer" - "github.com/milvus-io/milvus/pkg/util/interceptor" "github.com/tikv/client-go/v2/txnkv" clientv3 "go.etcd.io/etcd/client/v3" "go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc" @@ -37,21 +34,23 @@ import ( "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" + dcc "github.com/milvus-io/milvus/internal/distributed/datacoord/client" + qcc "github.com/milvus-io/milvus/internal/distributed/querycoord/client" "github.com/milvus-io/milvus/internal/proto/internalpb" "github.com/milvus-io/milvus/internal/proto/proxypb" "github.com/milvus-io/milvus/internal/proto/rootcoordpb" "github.com/milvus-io/milvus/internal/rootcoord" "github.com/milvus-io/milvus/internal/types" + "github.com/milvus-io/milvus/internal/util/dependency" "github.com/milvus-io/milvus/pkg/log" + "github.com/milvus-io/milvus/pkg/tracer" "github.com/milvus-io/milvus/pkg/util" "github.com/milvus-io/milvus/pkg/util/etcd" "github.com/milvus-io/milvus/pkg/util/funcutil" + "github.com/milvus-io/milvus/pkg/util/interceptor" "github.com/milvus-io/milvus/pkg/util/logutil" "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/milvus-io/milvus/pkg/util/tikv" - - dcc "github.com/milvus-io/milvus/internal/distributed/datacoord/client" - qcc "github.com/milvus-io/milvus/internal/distributed/querycoord/client" ) // Server grpc wrapper @@ -246,12 +245,12 @@ func (s *Server) startGrpc(port int) error { func (s *Server) startGrpcLoop(port int) { defer s.wg.Done() Params := ¶mtable.Get().RootCoordGrpcServerCfg - var kaep = keepalive.EnforcementPolicy{ + kaep := keepalive.EnforcementPolicy{ MinTime: 5 * time.Second, // If a client pings more than once every 5 seconds, terminate the connection PermitWithoutStream: true, // Allow pings even when there are no active streams } - var kasp = keepalive.ServerParameters{ + kasp := keepalive.ServerParameters{ Time: 60 * time.Second, // Ping the client if it is idle for 60 seconds to ensure the connection is still active Timeout: 10 * time.Second, // Wait 10 second for the ping ack before assuming the connection is dead } diff --git a/internal/distributed/rootcoord/service_test.go b/internal/distributed/rootcoord/service_test.go index dfb2b407f9..c91c4d1c04 100644 --- a/internal/distributed/rootcoord/service_test.go +++ b/internal/distributed/rootcoord/service_test.go @@ -25,17 +25,15 @@ import ( "time" "github.com/cockroachdb/errors" - "github.com/milvus-io/milvus/internal/util/sessionutil" - + "github.com/stretchr/testify/assert" "github.com/tikv/client-go/v2/txnkv" clientv3 "go.etcd.io/etcd/client/v3" - "github.com/stretchr/testify/assert" - "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus/internal/rootcoord" "github.com/milvus-io/milvus/internal/types" + "github.com/milvus-io/milvus/internal/util/sessionutil" "github.com/milvus-io/milvus/pkg/util/etcd" "github.com/milvus-io/milvus/pkg/util/merr" "github.com/milvus-io/milvus/pkg/util/paramtable" @@ -118,9 +116,11 @@ type mockDataCoord struct { func (m *mockDataCoord) Init() error { return m.initErr } + func (m *mockDataCoord) Start() error { return m.startErr } + func (m *mockDataCoord) GetComponentStates(ctx context.Context) (*milvuspb.ComponentStates, error) { return &milvuspb.ComponentStates{ State: &milvuspb.ComponentInfo{ @@ -134,6 +134,7 @@ func (m *mockDataCoord) GetComponentStates(ctx context.Context) (*milvuspb.Compo }, }, nil } + func (m *mockDataCoord) Stop() error { return fmt.Errorf("stop error") } diff --git a/internal/http/healthz/healthz_handler.go b/internal/http/healthz/healthz_handler.go index f0de6c3d7d..7509710851 100644 --- a/internal/http/healthz/healthz_handler.go +++ b/internal/http/healthz/healthz_handler.go @@ -22,10 +22,10 @@ import ( "fmt" "net/http" - "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" - "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "go.uber.org/zap" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" + "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus/pkg/log" ) diff --git a/internal/http/server_test.go b/internal/http/server_test.go index 357f1ef0b3..353f62a57b 100644 --- a/internal/http/server_test.go +++ b/internal/http/server_test.go @@ -25,12 +25,12 @@ import ( "net/http/httptest" "testing" - "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" - "github.com/milvus-io/milvus/internal/http/healthz" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" "go.uber.org/zap" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" + "github.com/milvus-io/milvus/internal/http/healthz" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/paramtable" ) @@ -54,7 +54,6 @@ type HTTPServerTestSuite struct { func (suite *HTTPServerTestSuite) SetupSuite() { suite.server = httptest.NewServer(nil) registerDefaults() - } func (suite *HTTPServerTestSuite) TearDownSuite() { diff --git a/internal/indexnode/chunkmgr_mock.go b/internal/indexnode/chunkmgr_mock.go index ee554fe796..f911372b3e 100644 --- a/internal/indexnode/chunkmgr_mock.go +++ b/internal/indexnode/chunkmgr_mock.go @@ -72,9 +72,7 @@ var ( } ) -var ( - mockChunkMgrIns = &mockChunkmgr{} -) +var mockChunkMgrIns = &mockChunkmgr{} type mockStorageFactory struct{} diff --git a/internal/indexnode/indexnode.go b/internal/indexnode/indexnode.go index 64e3677231..37d99dd9bd 100644 --- a/internal/indexnode/indexnode.go +++ b/internal/indexnode/indexnode.go @@ -40,7 +40,6 @@ import ( "unsafe" "github.com/cockroachdb/errors" - "github.com/milvus-io/milvus/internal/util/sessionutil" clientv3 "go.etcd.io/etcd/client/v3" "go.uber.org/zap" @@ -50,6 +49,7 @@ import ( "github.com/milvus-io/milvus/internal/types" "github.com/milvus-io/milvus/internal/util/dependency" "github.com/milvus-io/milvus/internal/util/initcore" + "github.com/milvus-io/milvus/internal/util/sessionutil" "github.com/milvus-io/milvus/pkg/common" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/metrics" @@ -127,7 +127,7 @@ func (i *IndexNode) Register() error { i.session.Register() metrics.NumNodes.WithLabelValues(fmt.Sprint(paramtable.GetNodeID()), typeutil.IndexNodeRole).Inc() - //start liveness check + // start liveness check i.session.LivenessCheck(i.loopCtx, func() { log.Error("Index Node disconnected from etcd, process will exit", zap.Int64("Server Id", i.session.ServerID)) if err := i.Stop(); err != nil { @@ -158,7 +158,7 @@ func (i *IndexNode) initSegcore() { cIndexSliceSize := C.int64_t(Params.CommonCfg.IndexSliceSize.GetAsInt64()) C.InitIndexSliceSize(cIndexSliceSize) - //set up thread pool for different priorities + // set up thread pool for different priorities cHighPriorityThreadCoreCoefficient := C.int64_t(paramtable.Get().CommonCfg.HighPriorityThreadCoreCoefficient.GetAsInt64()) C.InitHighPriorityThreadCoreCoefficient(cHighPriorityThreadCoreCoefficient) cMiddlePriorityThreadCoreCoefficient := C.int64_t(paramtable.Get().CommonCfg.MiddlePriorityThreadCoreCoefficient.GetAsInt64()) diff --git a/internal/indexnode/indexnode_service.go b/internal/indexnode/indexnode_service.go index 3b94c10de7..a2d71e7e89 100644 --- a/internal/indexnode/indexnode_service.go +++ b/internal/indexnode/indexnode_service.go @@ -74,7 +74,8 @@ func (i *IndexNode) CreateJob(ctx context.Context, req *indexpb.CreateJobRequest taskCtx, taskCancel := context.WithCancel(i.loopCtx) if oldInfo := i.loadOrStoreTask(req.GetClusterID(), req.GetBuildID(), &taskInfo{ cancel: taskCancel, - state: commonpb.IndexState_InProgress}); oldInfo != nil { + state: commonpb.IndexState_InProgress, + }); oldInfo != nil { log.Ctx(ctx).Warn("duplicated index build task", zap.String("clusterID", req.GetClusterID()), zap.Int64("buildID", req.GetBuildID())) metrics.IndexNodeBuildIndexTaskCounter.WithLabelValues(fmt.Sprint(paramtable.GetNodeID()), metrics.FailLabel).Inc() return &commonpb.Status{ diff --git a/internal/indexnode/indexnode_service_test.go b/internal/indexnode/indexnode_service_test.go index a8bd18dc33..26a632f004 100644 --- a/internal/indexnode/indexnode_service_test.go +++ b/internal/indexnode/indexnode_service_test.go @@ -75,9 +75,7 @@ func TestGetMetrics(t *testing.T) { } func TestGetMetricsError(t *testing.T) { - var ( - ctx = context.TODO() - ) + ctx := context.TODO() in, err := NewMockIndexNodeComponent(ctx) assert.NoError(t, err) diff --git a/internal/indexnode/indexnode_test.go b/internal/indexnode/indexnode_test.go index 14c4afd123..e813564f39 100644 --- a/internal/indexnode/indexnode_test.go +++ b/internal/indexnode/indexnode_test.go @@ -22,9 +22,10 @@ import ( "testing" "time" + "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus/pkg/util/paramtable" - "github.com/stretchr/testify/assert" ) //func TestRegister(t *testing.T) { diff --git a/internal/indexnode/task.go b/internal/indexnode/task.go index 1c26eab006..8ab483c932 100644 --- a/internal/indexnode/task.go +++ b/internal/indexnode/task.go @@ -356,7 +356,6 @@ func (it *indexBuildTask) BuildIndex(ctx context.Context) error { } func (it *indexBuildTask) SaveIndexFiles(ctx context.Context) error { - gcIndex := func() { if err := it.index.Delete(); err != nil { log.Ctx(ctx).Error("IndexNode indexBuildTask Execute CIndexDelete failed", zap.Error(err)) diff --git a/internal/indexnode/task_scheduler.go b/internal/indexnode/task_scheduler.go index 0a42493461..4e712b5245 100644 --- a/internal/indexnode/task_scheduler.go +++ b/internal/indexnode/task_scheduler.go @@ -24,7 +24,6 @@ import ( "sync" "github.com/cockroachdb/errors" - "go.uber.org/zap" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" diff --git a/internal/indexnode/task_scheduler_test.go b/internal/indexnode/task_scheduler_test.go index c97aab0d28..2393fd2b7e 100644 --- a/internal/indexnode/task_scheduler_test.go +++ b/internal/indexnode/task_scheduler_test.go @@ -7,9 +7,10 @@ import ( "testing" "time" + "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus/pkg/util/paramtable" - "github.com/stretchr/testify/assert" ) type fakeTaskState int diff --git a/internal/indexnode/taskinfo_ops.go b/internal/indexnode/taskinfo_ops.go index 4f17edb77f..553dea2b51 100644 --- a/internal/indexnode/taskinfo_ops.go +++ b/internal/indexnode/taskinfo_ops.go @@ -5,11 +5,12 @@ import ( "time" "github.com/golang/protobuf/proto" + "go.uber.org/zap" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus/internal/proto/indexpb" "github.com/milvus-io/milvus/pkg/common" "github.com/milvus-io/milvus/pkg/log" - "go.uber.org/zap" ) func (i *IndexNode) loadOrStoreTask(ClusterID string, buildID UniqueID, info *taskInfo) *taskInfo { diff --git a/internal/kv/etcd/embed_etcd_config_test.go b/internal/kv/etcd/embed_etcd_config_test.go index f75aff241f..1a121f6a09 100644 --- a/internal/kv/etcd/embed_etcd_config_test.go +++ b/internal/kv/etcd/embed_etcd_config_test.go @@ -37,7 +37,7 @@ func TestEtcdConfigLoad(te *testing.T) { te.Setenv("etcd.data.dir", "etcd.test.data.dir") param.Init(paramtable.NewBaseTable()) - //clean up data + // clean up data defer func() { os.RemoveAll("etcd.test.data.dir") }() diff --git a/internal/kv/etcd/embed_etcd_kv.go b/internal/kv/etcd/embed_etcd_kv.go index 5cfec20c61..15155c42d8 100644 --- a/internal/kv/etcd/embed_etcd_kv.go +++ b/internal/kv/etcd/embed_etcd_kv.go @@ -60,7 +60,7 @@ func NewEmbededEtcdKV(cfg *embed.Config, rootPath string) (*EmbedEtcdKV, error) etcd: e, } - //wait until embed etcd is ready + // wait until embed etcd is ready select { case <-e.Server.ReadyNotify(): log.Info("Embedded etcd is ready!") @@ -77,7 +77,6 @@ func (kv *EmbedEtcdKV) Close() { kv.client.Close() kv.etcd.Close() }) - } // GetPath returns the full path by given key diff --git a/internal/kv/etcd/embed_etcd_kv_test.go b/internal/kv/etcd/embed_etcd_kv_test.go index 03205a992c..5d6c599d71 100644 --- a/internal/kv/etcd/embed_etcd_kv_test.go +++ b/internal/kv/etcd/embed_etcd_kv_test.go @@ -98,17 +98,20 @@ func TestEmbedEtcd(te *testing.T) { metaKv.GetPath("test1"), metaKv.GetPath("test2"), metaKv.GetPath("test1/a"), - metaKv.GetPath("test1/b")}, []string{"value1", "value2", "value_a", "value_b"}, nil}, + metaKv.GetPath("test1/b"), + }, []string{"value1", "value2", "value_a", "value_b"}, nil}, {"test1", []string{ metaKv.GetPath("test1"), metaKv.GetPath("test1/a"), - metaKv.GetPath("test1/b")}, []string{"value1", "value_a", "value_b"}, nil}, + metaKv.GetPath("test1/b"), + }, []string{"value1", "value_a", "value_b"}, nil}, {"test2", []string{metaKv.GetPath("test2")}, []string{"value2"}, nil}, {"", []string{ metaKv.GetPath("test1"), metaKv.GetPath("test2"), metaKv.GetPath("test1/a"), - metaKv.GetPath("test1/b")}, []string{"value1", "value2", "value_a", "value_b"}, nil}, + metaKv.GetPath("test1/b"), + }, []string{"value1", "value2", "value_a", "value_b"}, nil}, {"test1/a", []string{metaKv.GetPath("test1/a")}, []string{"value_a"}, nil}, {"a", []string{}, []string{}, nil}, {"root", []string{}, []string{}, nil}, @@ -203,17 +206,20 @@ func TestEmbedEtcd(te *testing.T) { metaKv.GetPath("test1"), metaKv.GetPath("test2"), metaKv.GetPath("test1/a"), - metaKv.GetPath("test1/b")}, [][]byte{[]byte("value1"), []byte("value2"), []byte("value_a"), []byte("value_b")}, nil}, + metaKv.GetPath("test1/b"), + }, [][]byte{[]byte("value1"), []byte("value2"), []byte("value_a"), []byte("value_b")}, nil}, {"test1", []string{ metaKv.GetPath("test1"), metaKv.GetPath("test1/a"), - metaKv.GetPath("test1/b")}, [][]byte{[]byte("value1"), []byte("value_a"), []byte("value_b")}, nil}, + metaKv.GetPath("test1/b"), + }, [][]byte{[]byte("value1"), []byte("value_a"), []byte("value_b")}, nil}, {"test2", []string{metaKv.GetPath("test2")}, [][]byte{[]byte("value2")}, nil}, {"", []string{ metaKv.GetPath("test1"), metaKv.GetPath("test2"), metaKv.GetPath("test1/a"), - metaKv.GetPath("test1/b")}, [][]byte{[]byte("value1"), []byte("value2"), []byte("value_a"), []byte("value_b")}, nil}, + metaKv.GetPath("test1/b"), + }, [][]byte{[]byte("value1"), []byte("value2"), []byte("value_a"), []byte("value_b")}, nil}, {"test1/a", []string{metaKv.GetPath("test1/a")}, [][]byte{[]byte("value_a")}, nil}, {"a", []string{}, [][]byte{}, nil}, {"root", []string{}, [][]byte{}, nil}, @@ -300,7 +306,6 @@ func TestEmbedEtcd(te *testing.T) { assert.ElementsMatch(t, test.expectedValues, values) assert.NotZero(t, revision) } - }) te.Run("etcdKV MultiSaveAndMultiLoad", func(t *testing.T) { diff --git a/internal/kv/etcd/embed_etcd_restart_test.go b/internal/kv/etcd/embed_etcd_restart_test.go index 85d918d8e5..97fa8600a3 100644 --- a/internal/kv/etcd/embed_etcd_restart_test.go +++ b/internal/kv/etcd/embed_etcd_restart_test.go @@ -36,7 +36,7 @@ func TestEtcdRestartLoad(te *testing.T) { param.Init(paramtable.NewBaseTable()) param.Save("etcd.config.path", "../../../configs/advanced/etcd.yaml") param.Save("etcd.data.dir", etcdDataDir) - //clean up data + // clean up data defer func() { err := os.RemoveAll(etcdDataDir) assert.NoError(te, err) @@ -79,7 +79,7 @@ func TestEtcdRestartLoad(te *testing.T) { embed := metaKv.(*embed_etcd_kv.EmbedEtcdKV) embed.Close() - //restart and check test result + // restart and check test result metaKv, _ = embed_etcd_kv.NewMetaKvFactory(rootPath, ¶m.EtcdCfg) for _, test := range saveAndLoadTests { diff --git a/internal/kv/etcd/etcd_kv_test.go b/internal/kv/etcd/etcd_kv_test.go index 7cabcb8017..300568c147 100644 --- a/internal/kv/etcd/etcd_kv_test.go +++ b/internal/kv/etcd/etcd_kv_test.go @@ -108,17 +108,20 @@ func TestEtcdKV_Load(te *testing.T) { etcdKV.GetPath("test1"), etcdKV.GetPath("test2"), etcdKV.GetPath("test1/a"), - etcdKV.GetPath("test1/b")}, []string{"value1", "value2", "value_a", "value_b"}, nil}, + etcdKV.GetPath("test1/b"), + }, []string{"value1", "value2", "value_a", "value_b"}, nil}, {"test1", []string{ etcdKV.GetPath("test1"), etcdKV.GetPath("test1/a"), - etcdKV.GetPath("test1/b")}, []string{"value1", "value_a", "value_b"}, nil}, + etcdKV.GetPath("test1/b"), + }, []string{"value1", "value_a", "value_b"}, nil}, {"test2", []string{etcdKV.GetPath("test2")}, []string{"value2"}, nil}, {"", []string{ etcdKV.GetPath("test1"), etcdKV.GetPath("test2"), etcdKV.GetPath("test1/a"), - etcdKV.GetPath("test1/b")}, []string{"value1", "value2", "value_a", "value_b"}, nil}, + etcdKV.GetPath("test1/b"), + }, []string{"value1", "value2", "value_a", "value_b"}, nil}, {"test1/a", []string{etcdKV.GetPath("test1/a")}, []string{"value_a"}, nil}, {"a", []string{}, []string{}, nil}, {"root", []string{}, []string{}, nil}, @@ -211,17 +214,20 @@ func TestEtcdKV_Load(te *testing.T) { etcdKV.GetPath("test1"), etcdKV.GetPath("test2"), etcdKV.GetPath("test1/a"), - etcdKV.GetPath("test1/b")}, []string{"value1", "value2", "value_a", "value_b"}, nil}, + etcdKV.GetPath("test1/b"), + }, []string{"value1", "value2", "value_a", "value_b"}, nil}, {"test1", []string{ etcdKV.GetPath("test1"), etcdKV.GetPath("test1/a"), - etcdKV.GetPath("test1/b")}, []string{"value1", "value_a", "value_b"}, nil}, + etcdKV.GetPath("test1/b"), + }, []string{"value1", "value_a", "value_b"}, nil}, {"test2", []string{etcdKV.GetPath("test2")}, []string{"value2"}, nil}, {"", []string{ etcdKV.GetPath("test1"), etcdKV.GetPath("test2"), etcdKV.GetPath("test1/a"), - etcdKV.GetPath("test1/b")}, []string{"value1", "value2", "value_a", "value_b"}, nil}, + etcdKV.GetPath("test1/b"), + }, []string{"value1", "value2", "value_a", "value_b"}, nil}, {"test1/a", []string{etcdKV.GetPath("test1/a")}, []string{"value_a"}, nil}, {"a", []string{}, []string{}, nil}, {"root", []string{}, []string{}, nil}, @@ -318,7 +324,6 @@ func TestEtcdKV_Load(te *testing.T) { assert.ElementsMatch(t, test.expectedValues, stringValues) assert.NotZero(t, revision) } - }) te.Run("etcdKV MultiSaveAndMultiLoad", func(t *testing.T) { diff --git a/internal/kv/mem/mem_kv.go b/internal/kv/mem/mem_kv.go index 5a4a7d86ef..1607184c8a 100644 --- a/internal/kv/mem/mem_kv.go +++ b/internal/kv/mem/mem_kv.go @@ -21,6 +21,7 @@ import ( "sync" "github.com/google/btree" + "github.com/milvus-io/milvus/pkg/common" ) diff --git a/internal/kv/rocksdb/rocksdb_kv_test.go b/internal/kv/rocksdb/rocksdb_kv_test.go index aa6c9ca497..2b0dc2507a 100644 --- a/internal/kv/rocksdb/rocksdb_kv_test.go +++ b/internal/kv/rocksdb/rocksdb_kv_test.go @@ -188,7 +188,6 @@ func TestRocksdbKV_Prefix(t *testing.T) { val, err = rocksdbKV.Load("abcd") assert.NoError(t, err) assert.Equal(t, val, "123") - } func TestRocksdbKV_Txn(t *testing.T) { diff --git a/internal/kv/tikv/main_test.go b/internal/kv/tikv/main_test.go index a014c72375..78a4fbd4ec 100644 --- a/internal/kv/tikv/main_test.go +++ b/internal/kv/tikv/main_test.go @@ -29,14 +29,17 @@ import ( "github.com/milvus-io/milvus/pkg/util/paramtable" ) -var txnClient *txnkv.Client -var rawClient *rawkv.Client +var ( + txnClient *txnkv.Client + rawClient *rawkv.Client +) // creates a local TiKV Store for testing purpose. func setupLocalTiKV() { setupLocalTxn() setupLocalRaw() } + func setupLocalTxn() { client, cluster, pdClient, err := testutils.NewMockTiKV("", nil) if err != nil { diff --git a/internal/kv/tikv/txn_tikv.go b/internal/kv/tikv/txn_tikv.go index dfa45b25a2..0c95702e38 100644 --- a/internal/kv/tikv/txn_tikv.go +++ b/internal/kv/tikv/txn_tikv.go @@ -81,9 +81,11 @@ func tiTxnSnapshot(txn *txnkv.Client, paginationSize int) *txnsnapshot.KVSnapsho return ss } -var beginTxn = tiTxnBegin -var commitTxn = tiTxnCommit -var getSnapshot = tiTxnSnapshot +var ( + beginTxn = tiTxnBegin + commitTxn = tiTxnCommit + getSnapshot = tiTxnSnapshot +) // implementation assertion var _ kv.MetaKv = (*txnTiKV)(nil) @@ -621,7 +623,6 @@ func (kv *txnTiKV) getTiKVMeta(ctx context.Context, key string) (string, error) if err == tikverr.ErrNotExist { // If key is missing return "", common.NewKeyNotExistError(key) - } else { // If call to tikv fails return "", errors.Wrap(err, fmt.Sprintf("Failed to get value for key %s in getTiKVMeta", key)) diff --git a/internal/kv/tikv/txn_tikv_test.go b/internal/kv/tikv/txn_tikv_test.go index 66b7d20fc3..55c995766a 100644 --- a/internal/kv/tikv/txn_tikv_test.go +++ b/internal/kv/tikv/txn_tikv_test.go @@ -87,17 +87,20 @@ func TestTiKVLoad(te *testing.T) { kv.GetPath("test1"), kv.GetPath("test2"), kv.GetPath("test1/a"), - kv.GetPath("test1/b")}, []string{"value1", "value2", "value_a", "value_b"}, nil}, + kv.GetPath("test1/b"), + }, []string{"value1", "value2", "value_a", "value_b"}, nil}, {"test1", []string{ kv.GetPath("test1"), kv.GetPath("test1/a"), - kv.GetPath("test1/b")}, []string{"value1", "value_a", "value_b"}, nil}, + kv.GetPath("test1/b"), + }, []string{"value1", "value_a", "value_b"}, nil}, {"test2", []string{kv.GetPath("test2")}, []string{"value2"}, nil}, {"", []string{ kv.GetPath("test1"), kv.GetPath("test2"), kv.GetPath("test1/a"), - kv.GetPath("test1/b")}, []string{"value1", "value2", "value_a", "value_b"}, nil}, + kv.GetPath("test1/b"), + }, []string{"value1", "value2", "value_a", "value_b"}, nil}, {"test1/a", []string{kv.GetPath("test1/a")}, []string{"value_a"}, nil}, {"a", []string{}, []string{}, nil}, {"root", []string{}, []string{}, nil}, @@ -553,11 +556,10 @@ func TestEmptyKey(t *testing.T) { val, err = kv.Load("key3") assert.NoError(t, err) assert.Equal(t, val, "") - } func TestScanSize(t *testing.T) { - var scan_size = SnapshotScanSize + scan_size := SnapshotScanSize kv := NewTiKV(txnClient, "/") err := kv.RemoveWithPrefix("") require.NoError(t, err) diff --git a/internal/metastore/kv/datacoord/kv_catalog.go b/internal/metastore/kv/datacoord/kv_catalog.go index 52249431bf..4cdd076807 100644 --- a/internal/metastore/kv/datacoord/kv_catalog.go +++ b/internal/metastore/kv/datacoord/kv_catalog.go @@ -78,7 +78,7 @@ func (kc *Catalog) ListSegments(ctx context.Context) ([]*datapb.SegmentInfo, err }) } - //execute list segment meta + // execute list segment meta executeFn(storage.InsertBinlog, insertLogs) executeFn(storage.DeleteBinlog, deltaLogs) executeFn(storage.StatsBinlog, statsLogs) @@ -211,7 +211,8 @@ func (kc *Catalog) listBinlogs(binlogType storage.BinlogType) (map[typeutil.Uniq } func (kc *Catalog) applyBinlogInfo(segments []*datapb.SegmentInfo, insertLogs, deltaLogs, - statsLogs map[typeutil.UniqueID][]*datapb.FieldBinlog) { + statsLogs map[typeutil.UniqueID][]*datapb.FieldBinlog, +) { for _, segmentInfo := range segments { if len(segmentInfo.Binlogs) == 0 { segmentInfo.Binlogs = insertLogs[segmentInfo.ID] @@ -529,7 +530,8 @@ func (kc *Catalog) DropChannelCheckpoint(ctx context.Context, vChannel string) e } func (kc *Catalog) getBinlogsWithPrefix(binlogType storage.BinlogType, collectionID, partitionID, - segmentID typeutil.UniqueID) ([]string, []string, error) { + segmentID typeutil.UniqueID, +) ([]string, []string, error) { var binlogPrefix string switch binlogType { case storage.InsertBinlog: @@ -727,7 +729,8 @@ func (kc *Catalog) GcConfirm(ctx context.Context, collectionID, partitionID type } func fillLogPathByLogID(chunkManagerRootPath string, binlogType storage.BinlogType, collectionID, partitionID, - segmentID typeutil.UniqueID, fieldBinlog *datapb.FieldBinlog) error { + segmentID typeutil.UniqueID, fieldBinlog *datapb.FieldBinlog, +) error { for _, binlog := range fieldBinlog.Binlogs { path, err := buildLogPath(chunkManagerRootPath, binlogType, collectionID, partitionID, segmentID, fieldBinlog.GetFieldID(), binlog.GetLogID()) @@ -815,8 +818,8 @@ func hasSepcialStatslog(logs *datapb.FieldBinlog) bool { } func buildBinlogKvsWithLogID(collectionID, partitionID, segmentID typeutil.UniqueID, - binlogs, deltalogs, statslogs []*datapb.FieldBinlog, ignoreNumberCheck bool) (map[string]string, error) { - + binlogs, deltalogs, statslogs []*datapb.FieldBinlog, ignoreNumberCheck bool, +) (map[string]string, error) { checkBinlogs(storage.InsertBinlog, segmentID, binlogs) checkBinlogs(storage.DeleteBinlog, segmentID, deltalogs) checkBinlogs(storage.StatsBinlog, segmentID, statslogs) diff --git a/internal/metastore/kv/datacoord/kv_catalog_test.go b/internal/metastore/kv/datacoord/kv_catalog_test.go index de7fa38bbd..6943461875 100644 --- a/internal/metastore/kv/datacoord/kv_catalog_test.go +++ b/internal/metastore/kv/datacoord/kv_catalog_test.go @@ -122,7 +122,8 @@ var ( { EntriesNum: 5, LogPath: deltalogPath, - }}, + }, + }, }, } statslogs = []*datapb.FieldBinlog{ @@ -258,7 +259,6 @@ func Test_ListSegments(t *testing.T) { } if strings.HasPrefix(k3, s) { return f([]byte(k3), []byte(savedKvs[k3])) - } return errors.New("should not reach here") }) @@ -1136,7 +1136,8 @@ func addSegment(rootPath string, collectionID, partitionID, segmentID, fieldID i { EntriesNum: 5, LogPath: metautil.BuildDeltaLogPath(rootPath, collectionID, partitionID, segmentID, int64(rand.Int())), - }}, + }, + }, }, } statslogs = []*datapb.FieldBinlog{ diff --git a/internal/metastore/kv/querycoord/kv_catalog.go b/internal/metastore/kv/querycoord/kv_catalog.go index 75753ec237..eb1dce1e74 100644 --- a/internal/metastore/kv/querycoord/kv_catalog.go +++ b/internal/metastore/kv/querycoord/kv_catalog.go @@ -12,9 +12,7 @@ import ( "github.com/milvus-io/milvus/internal/proto/querypb" ) -var ( - ErrInvalidKey = errors.New("invalid load info key") -) +var ErrInvalidKey = errors.New("invalid load info key") const ( CollectionLoadInfoPrefix = "querycoord-collection-loadinfo" diff --git a/internal/metastore/kv/rootcoord/kv_catalog.go b/internal/metastore/kv/rootcoord/kv_catalog.go index ea2b028ec5..44a7e3b47d 100644 --- a/internal/metastore/kv/rootcoord/kv_catalog.go +++ b/internal/metastore/kv/rootcoord/kv_catalog.go @@ -6,6 +6,8 @@ import ( "fmt" "github.com/golang/protobuf/proto" + "go.uber.org/zap" + "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/internal/kv" @@ -21,7 +23,6 @@ import ( "github.com/milvus-io/milvus/pkg/util/funcutil" "github.com/milvus-io/milvus/pkg/util/merr" "github.com/milvus-io/milvus/pkg/util/typeutil" - "go.uber.org/zap" ) const ( @@ -341,8 +342,8 @@ func (kc *Catalog) listFieldsAfter210(ctx context.Context, collectionID typeutil } func (kc *Catalog) appendPartitionAndFieldsInfo(ctx context.Context, collMeta *pb.CollectionInfo, - ts typeutil.Timestamp) (*model.Collection, error) { - + ts typeutil.Timestamp, +) (*model.Collection, error) { collection := model.UnmarshalCollectionModel(collMeta) if !partitionVersionAfter210(collMeta) && !fieldVersionAfter210(collMeta) { diff --git a/internal/metastore/kv/rootcoord/kv_catalog_test.go b/internal/metastore/kv/rootcoord/kv_catalog_test.go index 70b622f322..6d0612d5b2 100644 --- a/internal/metastore/kv/rootcoord/kv_catalog_test.go +++ b/internal/metastore/kv/rootcoord/kv_catalog_test.go @@ -10,6 +10,12 @@ import ( "github.com/cockroachdb/errors" "github.com/golang/protobuf/proto" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/require" + "go.uber.org/atomic" + "golang.org/x/exp/maps" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" @@ -24,11 +30,6 @@ import ( "github.com/milvus-io/milvus/pkg/util/crypto" "github.com/milvus-io/milvus/pkg/util/funcutil" "github.com/milvus-io/milvus/pkg/util/typeutil" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/mock" - "github.com/stretchr/testify/require" - "go.uber.org/atomic" - "golang.org/x/exp/maps" ) var ( @@ -1423,7 +1424,6 @@ func TestRBAC_Credential(t *testing.T) { fmt.Sprintf("%s/%s", CredentialPrefix, "user3"), "random", } - } return nil }, @@ -1717,9 +1717,7 @@ func TestRBAC_Role(t *testing.T) { }) t.Run("test ListRole", func(t *testing.T) { - var ( - loadWithPrefixReturn atomic.Bool - ) + var loadWithPrefixReturn atomic.Bool t.Run("test entity!=nil", func(t *testing.T) { var ( @@ -1984,10 +1982,8 @@ func TestRBAC_Role(t *testing.T) { assert.Error(t, err) assert.Empty(t, res) } - }) } - }) }) t.Run("test ListUserRole", func(t *testing.T) { @@ -2176,7 +2172,8 @@ func TestRBAC_Grant(t *testing.T) { DbName: util.DefaultDBName, Grantor: &milvuspb.GrantorEntity{ User: &milvuspb.UserEntity{Name: test.userName}, - Privilege: &milvuspb.PrivilegeEntity{Name: test.privilegeName}}, + Privilege: &milvuspb.PrivilegeEntity{Name: test.privilegeName}, + }, }, milvuspb.OperatePrivilegeType_Grant) if test.isValid { @@ -2195,7 +2192,7 @@ func TestRBAC_Grant(t *testing.T) { }) t.Run("test Revoke", func(t *testing.T) { - var invalidPrivilegeRemove = "p-remove" + invalidPrivilegeRemove := "p-remove" invalidPrivilegeRemoveKey := funcutil.HandleTenantForEtcdKey(GranteeIDPrefix, tenant, fmt.Sprintf("%s/%s", validRoleValue, invalidPrivilegeRemove)) kvmock.EXPECT().Load(invalidPrivilegeRemoveKey).Call.Return("", nil) @@ -2233,7 +2230,8 @@ func TestRBAC_Grant(t *testing.T) { DbName: util.DefaultDBName, Grantor: &milvuspb.GrantorEntity{ User: &milvuspb.UserEntity{Name: test.userName}, - Privilege: &milvuspb.PrivilegeEntity{Name: test.privilegeName}}, + Privilege: &milvuspb.PrivilegeEntity{Name: test.privilegeName}, + }, }, milvuspb.OperatePrivilegeType_Revoke) if test.isValid { @@ -2306,7 +2304,6 @@ func TestRBAC_Grant(t *testing.T) { kvmock.EXPECT().LoadWithPrefix(invalidRoleKey).Call.Return(nil, nil, errors.New("mock loadWithPrefix error")) kvmock.EXPECT().LoadWithPrefix(mock.Anything).Call.Return( func(key string) []string { - // Mock kv_catalog.go:ListGrant:L871 if strings.Contains(key, GranteeIDPrefix) { return []string{ @@ -2344,21 +2341,25 @@ func TestRBAC_Grant(t *testing.T) { {false, &milvuspb.GrantEntity{ Object: &milvuspb.ObjectEntity{Name: "random"}, ObjectName: "random2", - Role: &milvuspb.RoleEntity{Name: "role1"}}, "valid role with not exist entity"}, + Role: &milvuspb.RoleEntity{Name: "role1"}, + }, "valid role with not exist entity"}, {true, &milvuspb.GrantEntity{ Object: &milvuspb.ObjectEntity{Name: "obj1"}, ObjectName: "obj_name1", - Role: &milvuspb.RoleEntity{Name: "role1"}}, "valid role with valid entity"}, + Role: &milvuspb.RoleEntity{Name: "role1"}, + }, "valid role with valid entity"}, {true, &milvuspb.GrantEntity{ Object: &milvuspb.ObjectEntity{Name: "obj1"}, ObjectName: "obj_name2", DbName: "foo", - Role: &milvuspb.RoleEntity{Name: "role1"}}, "valid role and dbName with valid entity"}, + Role: &milvuspb.RoleEntity{Name: "role1"}, + }, "valid role and dbName with valid entity"}, {false, &milvuspb.GrantEntity{ Object: &milvuspb.ObjectEntity{Name: "obj1"}, ObjectName: "obj_name2", DbName: "foo2", - Role: &milvuspb.RoleEntity{Name: "role1"}}, "valid role and invalid dbName with valid entity"}, + Role: &milvuspb.RoleEntity{Name: "role1"}, + }, "valid role and invalid dbName with valid entity"}, } for _, test := range tests { @@ -2375,7 +2376,6 @@ func TestRBAC_Grant(t *testing.T) { } else { assert.Error(t, err) } - }) } }) diff --git a/internal/metastore/kv/rootcoord/suffix_snapshot.go b/internal/metastore/kv/rootcoord/suffix_snapshot.go index 2f9cc48c46..45171a97ae 100644 --- a/internal/metastore/kv/rootcoord/suffix_snapshot.go +++ b/internal/metastore/kv/rootcoord/suffix_snapshot.go @@ -28,8 +28,6 @@ import ( "time" "github.com/cockroachdb/errors" - "github.com/milvus-io/milvus/pkg/util/tsoutil" - "go.uber.org/zap" "github.com/milvus-io/milvus/internal/kv" @@ -37,6 +35,7 @@ import ( "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/etcd" "github.com/milvus-io/milvus/pkg/util/retry" + "github.com/milvus-io/milvus/pkg/util/tsoutil" "github.com/milvus-io/milvus/pkg/util/typeutil" ) @@ -254,7 +253,7 @@ func binarySearchRecords(records []tsv, ts typeutil.Timestamp) (string, bool) { i, j := 0, len(records) for i+1 < j { k := (i + j) / 2 - //log.Warn("", zap.Int("i", i), zap.Int("j", j), zap.Int("k", k)) + // log.Warn("", zap.Int("i", i), zap.Int("j", j), zap.Int("k", k)) if records[k].ts == ts { return records[k].value, true } @@ -362,7 +361,7 @@ func (ss *SuffixSnapshot) Load(key string, ts typeutil.Timestamp) (string, error // 3. find i which ts[i] <= ts && ts[i+1] > ts // corner cases like len(records)==0, ts < records[0].ts is covered in binarySearch - //binary search + // binary search value, found := binarySearchRecords(records, ts) if !found { log.Warn("not found") @@ -435,8 +434,8 @@ func (ss *SuffixSnapshot) LoadWithPrefix(key string, ts typeutil.Timestamp) ([]s // ts 0 case shall be treated as fetch latest/current value if ts == 0 { keys, values, err := ss.MetaKv.LoadWithPrefix(key) - fks := keys[:0] //make([]string, 0, len(keys)) - fvs := values[:0] //make([]string, 0, len(values)) + fks := keys[:0] // make([]string, 0, len(keys)) + fvs := values[:0] // make([]string, 0, len(values)) // hide rootPrefix from return value for i, k := range keys { // filters tombstone @@ -494,7 +493,6 @@ func (ss *SuffixSnapshot) LoadWithPrefix(key string, ts typeutil.Timestamp) ([]s latestOriginalKey = curOriginalKey return nil }) - if err != nil { return nil, nil, err } @@ -656,7 +654,6 @@ func (ss *SuffixSnapshot) removeExpiredKvs(now time.Time) error { return nil }) - if err != nil { return err } diff --git a/internal/metastore/kv/rootcoord/suffix_snapshot_test.go b/internal/metastore/kv/rootcoord/suffix_snapshot_test.go index 0c3d4538b1..5efc00680d 100644 --- a/internal/metastore/kv/rootcoord/suffix_snapshot_test.go +++ b/internal/metastore/kv/rootcoord/suffix_snapshot_test.go @@ -24,8 +24,6 @@ import ( "time" "github.com/cockroachdb/errors" - "github.com/milvus-io/milvus/pkg/util/tsoutil" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" "github.com/stretchr/testify/require" @@ -34,12 +32,11 @@ import ( "github.com/milvus-io/milvus/internal/kv/mocks" "github.com/milvus-io/milvus/pkg/util/etcd" "github.com/milvus-io/milvus/pkg/util/paramtable" + "github.com/milvus-io/milvus/pkg/util/tsoutil" "github.com/milvus-io/milvus/pkg/util/typeutil" ) -var ( - snapshotPrefix = "snapshots" -) +var snapshotPrefix = "snapshots" var Params = paramtable.Get() @@ -272,7 +269,6 @@ func Test_SuffixSnaphotIsTSOfKey(t *testing.T) { assert.EqualValues(t, c.expected, ts) assert.Equal(t, c.shouldFound, found) } - } func Test_SuffixSnapshotLoad(t *testing.T) { diff --git a/internal/metastore/model/alias_test.go b/internal/metastore/model/alias_test.go index ab9d9061ba..172f5d6dce 100644 --- a/internal/metastore/model/alias_test.go +++ b/internal/metastore/model/alias_test.go @@ -3,8 +3,9 @@ package model import ( "testing" - "github.com/milvus-io/milvus/internal/proto/etcdpb" "github.com/stretchr/testify/assert" + + "github.com/milvus-io/milvus/internal/proto/etcdpb" ) func TestAlias_Available(t *testing.T) { diff --git a/internal/metastore/model/collection.go b/internal/metastore/model/collection.go index 5b03ad1976..66acf68cf2 100644 --- a/internal/metastore/model/collection.go +++ b/internal/metastore/model/collection.go @@ -1,11 +1,12 @@ package model import ( + "github.com/samber/lo" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" pb "github.com/milvus-io/milvus/internal/proto/etcdpb" "github.com/milvus-io/milvus/pkg/common" - "github.com/samber/lo" ) type Collection struct { diff --git a/internal/metastore/model/collection_test.go b/internal/metastore/model/collection_test.go index 75c627e092..7ddde61e9f 100644 --- a/internal/metastore/model/collection_test.go +++ b/internal/metastore/model/collection_test.go @@ -3,11 +3,12 @@ package model import ( "testing" + "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" pb "github.com/milvus-io/milvus/internal/proto/etcdpb" "github.com/milvus-io/milvus/pkg/common" - "github.com/stretchr/testify/assert" ) var ( diff --git a/internal/metastore/model/field.go b/internal/metastore/model/field.go index 51938533c2..10d44604d2 100644 --- a/internal/metastore/model/field.go +++ b/internal/metastore/model/field.go @@ -1,10 +1,9 @@ package model import ( - "github.com/milvus-io/milvus/pkg/common" - "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" + "github.com/milvus-io/milvus/pkg/common" ) type Field struct { diff --git a/internal/metastore/model/field_test.go b/internal/metastore/model/field_test.go index c519e9915a..b72c71ec08 100644 --- a/internal/metastore/model/field_test.go +++ b/internal/metastore/model/field_test.go @@ -3,10 +3,10 @@ package model import ( "testing" - "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" - - "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/stretchr/testify/assert" + + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" + "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" ) var ( diff --git a/internal/metastore/model/index.go b/internal/metastore/model/index.go index 052cbb0f91..ffbecd13a5 100644 --- a/internal/metastore/model/index.go +++ b/internal/metastore/model/index.go @@ -2,6 +2,7 @@ package model import ( "github.com/golang/protobuf/proto" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus/internal/proto/indexpb" ) diff --git a/internal/metastore/model/index_test.go b/internal/metastore/model/index_test.go index 11b5a02619..10cde6c136 100644 --- a/internal/metastore/model/index_test.go +++ b/internal/metastore/model/index_test.go @@ -3,11 +3,10 @@ package model import ( "testing" - "github.com/milvus-io/milvus/internal/proto/indexpb" - "github.com/stretchr/testify/assert" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" + "github.com/milvus-io/milvus/internal/proto/indexpb" ) var ( diff --git a/internal/metastore/model/segment.go b/internal/metastore/model/segment.go index ca428a4787..5c119ec2ad 100644 --- a/internal/metastore/model/segment.go +++ b/internal/metastore/model/segment.go @@ -15,7 +15,7 @@ type Segment struct { CompactionFrom []int64 CreatedByCompaction bool SegmentState commonpb.SegmentState - //IndexInfos []*SegmentIndex + // IndexInfos []*SegmentIndex ReplicaIds []int64 NodeIds []int64 } diff --git a/internal/metastore/model/segment_index_test.go b/internal/metastore/model/segment_index_test.go index 8a86cae641..a3c056ec6e 100644 --- a/internal/metastore/model/segment_index_test.go +++ b/internal/metastore/model/segment_index_test.go @@ -3,11 +3,10 @@ package model import ( "testing" - "github.com/milvus-io/milvus/internal/proto/indexpb" - "github.com/stretchr/testify/assert" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" + "github.com/milvus-io/milvus/internal/proto/indexpb" ) var ( diff --git a/internal/mocks/mock_datacoord.go b/internal/mocks/mock_datacoord.go index d30fd66f05..c4e83aec45 100644 --- a/internal/mocks/mock_datacoord.go +++ b/internal/mocks/mock_datacoord.go @@ -68,8 +68,8 @@ type MockDataCoord_AssignSegmentID_Call struct { } // AssignSegmentID is a helper method to define mock.On call -// - ctx context.Context -// - req *datapb.AssignSegmentIDRequest +// - ctx context.Context +// - req *datapb.AssignSegmentIDRequest func (_e *MockDataCoord_Expecter) AssignSegmentID(ctx interface{}, req interface{}) *MockDataCoord_AssignSegmentID_Call { return &MockDataCoord_AssignSegmentID_Call{Call: _e.mock.On("AssignSegmentID", ctx, req)} } @@ -123,8 +123,8 @@ type MockDataCoord_BroadcastAlteredCollection_Call struct { } // BroadcastAlteredCollection is a helper method to define mock.On call -// - ctx context.Context -// - req *datapb.AlterCollectionRequest +// - ctx context.Context +// - req *datapb.AlterCollectionRequest func (_e *MockDataCoord_Expecter) BroadcastAlteredCollection(ctx interface{}, req interface{}) *MockDataCoord_BroadcastAlteredCollection_Call { return &MockDataCoord_BroadcastAlteredCollection_Call{Call: _e.mock.On("BroadcastAlteredCollection", ctx, req)} } @@ -178,8 +178,8 @@ type MockDataCoord_CheckHealth_Call struct { } // CheckHealth is a helper method to define mock.On call -// - ctx context.Context -// - req *milvuspb.CheckHealthRequest +// - ctx context.Context +// - req *milvuspb.CheckHealthRequest func (_e *MockDataCoord_Expecter) CheckHealth(ctx interface{}, req interface{}) *MockDataCoord_CheckHealth_Call { return &MockDataCoord_CheckHealth_Call{Call: _e.mock.On("CheckHealth", ctx, req)} } @@ -233,8 +233,8 @@ type MockDataCoord_CreateIndex_Call struct { } // CreateIndex is a helper method to define mock.On call -// - ctx context.Context -// - req *indexpb.CreateIndexRequest +// - ctx context.Context +// - req *indexpb.CreateIndexRequest func (_e *MockDataCoord_Expecter) CreateIndex(ctx interface{}, req interface{}) *MockDataCoord_CreateIndex_Call { return &MockDataCoord_CreateIndex_Call{Call: _e.mock.On("CreateIndex", ctx, req)} } @@ -288,8 +288,8 @@ type MockDataCoord_DescribeIndex_Call struct { } // DescribeIndex is a helper method to define mock.On call -// - ctx context.Context -// - req *indexpb.DescribeIndexRequest +// - ctx context.Context +// - req *indexpb.DescribeIndexRequest func (_e *MockDataCoord_Expecter) DescribeIndex(ctx interface{}, req interface{}) *MockDataCoord_DescribeIndex_Call { return &MockDataCoord_DescribeIndex_Call{Call: _e.mock.On("DescribeIndex", ctx, req)} } @@ -343,8 +343,8 @@ type MockDataCoord_DropIndex_Call struct { } // DropIndex is a helper method to define mock.On call -// - ctx context.Context -// - req *indexpb.DropIndexRequest +// - ctx context.Context +// - req *indexpb.DropIndexRequest func (_e *MockDataCoord_Expecter) DropIndex(ctx interface{}, req interface{}) *MockDataCoord_DropIndex_Call { return &MockDataCoord_DropIndex_Call{Call: _e.mock.On("DropIndex", ctx, req)} } @@ -398,8 +398,8 @@ type MockDataCoord_DropVirtualChannel_Call struct { } // DropVirtualChannel is a helper method to define mock.On call -// - ctx context.Context -// - req *datapb.DropVirtualChannelRequest +// - ctx context.Context +// - req *datapb.DropVirtualChannelRequest func (_e *MockDataCoord_Expecter) DropVirtualChannel(ctx interface{}, req interface{}) *MockDataCoord_DropVirtualChannel_Call { return &MockDataCoord_DropVirtualChannel_Call{Call: _e.mock.On("DropVirtualChannel", ctx, req)} } @@ -453,8 +453,8 @@ type MockDataCoord_Flush_Call struct { } // Flush is a helper method to define mock.On call -// - ctx context.Context -// - req *datapb.FlushRequest +// - ctx context.Context +// - req *datapb.FlushRequest func (_e *MockDataCoord_Expecter) Flush(ctx interface{}, req interface{}) *MockDataCoord_Flush_Call { return &MockDataCoord_Flush_Call{Call: _e.mock.On("Flush", ctx, req)} } @@ -508,8 +508,8 @@ type MockDataCoord_GcConfirm_Call struct { } // GcConfirm is a helper method to define mock.On call -// - ctx context.Context -// - request *datapb.GcConfirmRequest +// - ctx context.Context +// - request *datapb.GcConfirmRequest func (_e *MockDataCoord_Expecter) GcConfirm(ctx interface{}, request interface{}) *MockDataCoord_GcConfirm_Call { return &MockDataCoord_GcConfirm_Call{Call: _e.mock.On("GcConfirm", ctx, request)} } @@ -563,8 +563,8 @@ type MockDataCoord_GetCollectionStatistics_Call struct { } // GetCollectionStatistics is a helper method to define mock.On call -// - ctx context.Context -// - req *datapb.GetCollectionStatisticsRequest +// - ctx context.Context +// - req *datapb.GetCollectionStatisticsRequest func (_e *MockDataCoord_Expecter) GetCollectionStatistics(ctx interface{}, req interface{}) *MockDataCoord_GetCollectionStatistics_Call { return &MockDataCoord_GetCollectionStatistics_Call{Call: _e.mock.On("GetCollectionStatistics", ctx, req)} } @@ -618,8 +618,8 @@ type MockDataCoord_GetCompactionState_Call struct { } // GetCompactionState is a helper method to define mock.On call -// - ctx context.Context -// - req *milvuspb.GetCompactionStateRequest +// - ctx context.Context +// - req *milvuspb.GetCompactionStateRequest func (_e *MockDataCoord_Expecter) GetCompactionState(ctx interface{}, req interface{}) *MockDataCoord_GetCompactionState_Call { return &MockDataCoord_GetCompactionState_Call{Call: _e.mock.On("GetCompactionState", ctx, req)} } @@ -673,8 +673,8 @@ type MockDataCoord_GetCompactionStateWithPlans_Call struct { } // GetCompactionStateWithPlans is a helper method to define mock.On call -// - ctx context.Context -// - req *milvuspb.GetCompactionPlansRequest +// - ctx context.Context +// - req *milvuspb.GetCompactionPlansRequest func (_e *MockDataCoord_Expecter) GetCompactionStateWithPlans(ctx interface{}, req interface{}) *MockDataCoord_GetCompactionStateWithPlans_Call { return &MockDataCoord_GetCompactionStateWithPlans_Call{Call: _e.mock.On("GetCompactionStateWithPlans", ctx, req)} } @@ -728,7 +728,7 @@ type MockDataCoord_GetComponentStates_Call struct { } // GetComponentStates is a helper method to define mock.On call -// - ctx context.Context +// - ctx context.Context func (_e *MockDataCoord_Expecter) GetComponentStates(ctx interface{}) *MockDataCoord_GetComponentStates_Call { return &MockDataCoord_GetComponentStates_Call{Call: _e.mock.On("GetComponentStates", ctx)} } @@ -782,8 +782,8 @@ type MockDataCoord_GetFlushAllState_Call struct { } // GetFlushAllState is a helper method to define mock.On call -// - ctx context.Context -// - req *milvuspb.GetFlushAllStateRequest +// - ctx context.Context +// - req *milvuspb.GetFlushAllStateRequest func (_e *MockDataCoord_Expecter) GetFlushAllState(ctx interface{}, req interface{}) *MockDataCoord_GetFlushAllState_Call { return &MockDataCoord_GetFlushAllState_Call{Call: _e.mock.On("GetFlushAllState", ctx, req)} } @@ -837,8 +837,8 @@ type MockDataCoord_GetFlushState_Call struct { } // GetFlushState is a helper method to define mock.On call -// - ctx context.Context -// - req *datapb.GetFlushStateRequest +// - ctx context.Context +// - req *datapb.GetFlushStateRequest func (_e *MockDataCoord_Expecter) GetFlushState(ctx interface{}, req interface{}) *MockDataCoord_GetFlushState_Call { return &MockDataCoord_GetFlushState_Call{Call: _e.mock.On("GetFlushState", ctx, req)} } @@ -892,8 +892,8 @@ type MockDataCoord_GetFlushedSegments_Call struct { } // GetFlushedSegments is a helper method to define mock.On call -// - ctx context.Context -// - req *datapb.GetFlushedSegmentsRequest +// - ctx context.Context +// - req *datapb.GetFlushedSegmentsRequest func (_e *MockDataCoord_Expecter) GetFlushedSegments(ctx interface{}, req interface{}) *MockDataCoord_GetFlushedSegments_Call { return &MockDataCoord_GetFlushedSegments_Call{Call: _e.mock.On("GetFlushedSegments", ctx, req)} } @@ -947,8 +947,8 @@ type MockDataCoord_GetIndexBuildProgress_Call struct { } // GetIndexBuildProgress is a helper method to define mock.On call -// - ctx context.Context -// - req *indexpb.GetIndexBuildProgressRequest +// - ctx context.Context +// - req *indexpb.GetIndexBuildProgressRequest func (_e *MockDataCoord_Expecter) GetIndexBuildProgress(ctx interface{}, req interface{}) *MockDataCoord_GetIndexBuildProgress_Call { return &MockDataCoord_GetIndexBuildProgress_Call{Call: _e.mock.On("GetIndexBuildProgress", ctx, req)} } @@ -1002,8 +1002,8 @@ type MockDataCoord_GetIndexInfos_Call struct { } // GetIndexInfos is a helper method to define mock.On call -// - ctx context.Context -// - req *indexpb.GetIndexInfoRequest +// - ctx context.Context +// - req *indexpb.GetIndexInfoRequest func (_e *MockDataCoord_Expecter) GetIndexInfos(ctx interface{}, req interface{}) *MockDataCoord_GetIndexInfos_Call { return &MockDataCoord_GetIndexInfos_Call{Call: _e.mock.On("GetIndexInfos", ctx, req)} } @@ -1057,8 +1057,8 @@ type MockDataCoord_GetIndexState_Call struct { } // GetIndexState is a helper method to define mock.On call -// - ctx context.Context -// - req *indexpb.GetIndexStateRequest +// - ctx context.Context +// - req *indexpb.GetIndexStateRequest func (_e *MockDataCoord_Expecter) GetIndexState(ctx interface{}, req interface{}) *MockDataCoord_GetIndexState_Call { return &MockDataCoord_GetIndexState_Call{Call: _e.mock.On("GetIndexState", ctx, req)} } @@ -1112,8 +1112,8 @@ type MockDataCoord_GetIndexStatistics_Call struct { } // GetIndexStatistics is a helper method to define mock.On call -// - ctx context.Context -// - req *indexpb.GetIndexStatisticsRequest +// - ctx context.Context +// - req *indexpb.GetIndexStatisticsRequest func (_e *MockDataCoord_Expecter) GetIndexStatistics(ctx interface{}, req interface{}) *MockDataCoord_GetIndexStatistics_Call { return &MockDataCoord_GetIndexStatistics_Call{Call: _e.mock.On("GetIndexStatistics", ctx, req)} } @@ -1167,8 +1167,8 @@ type MockDataCoord_GetInsertBinlogPaths_Call struct { } // GetInsertBinlogPaths is a helper method to define mock.On call -// - ctx context.Context -// - req *datapb.GetInsertBinlogPathsRequest +// - ctx context.Context +// - req *datapb.GetInsertBinlogPathsRequest func (_e *MockDataCoord_Expecter) GetInsertBinlogPaths(ctx interface{}, req interface{}) *MockDataCoord_GetInsertBinlogPaths_Call { return &MockDataCoord_GetInsertBinlogPaths_Call{Call: _e.mock.On("GetInsertBinlogPaths", ctx, req)} } @@ -1222,8 +1222,8 @@ type MockDataCoord_GetMetrics_Call struct { } // GetMetrics is a helper method to define mock.On call -// - ctx context.Context -// - req *milvuspb.GetMetricsRequest +// - ctx context.Context +// - req *milvuspb.GetMetricsRequest func (_e *MockDataCoord_Expecter) GetMetrics(ctx interface{}, req interface{}) *MockDataCoord_GetMetrics_Call { return &MockDataCoord_GetMetrics_Call{Call: _e.mock.On("GetMetrics", ctx, req)} } @@ -1277,8 +1277,8 @@ type MockDataCoord_GetPartitionStatistics_Call struct { } // GetPartitionStatistics is a helper method to define mock.On call -// - ctx context.Context -// - req *datapb.GetPartitionStatisticsRequest +// - ctx context.Context +// - req *datapb.GetPartitionStatisticsRequest func (_e *MockDataCoord_Expecter) GetPartitionStatistics(ctx interface{}, req interface{}) *MockDataCoord_GetPartitionStatistics_Call { return &MockDataCoord_GetPartitionStatistics_Call{Call: _e.mock.On("GetPartitionStatistics", ctx, req)} } @@ -1332,8 +1332,8 @@ type MockDataCoord_GetRecoveryInfo_Call struct { } // GetRecoveryInfo is a helper method to define mock.On call -// - ctx context.Context -// - req *datapb.GetRecoveryInfoRequest +// - ctx context.Context +// - req *datapb.GetRecoveryInfoRequest func (_e *MockDataCoord_Expecter) GetRecoveryInfo(ctx interface{}, req interface{}) *MockDataCoord_GetRecoveryInfo_Call { return &MockDataCoord_GetRecoveryInfo_Call{Call: _e.mock.On("GetRecoveryInfo", ctx, req)} } @@ -1387,8 +1387,8 @@ type MockDataCoord_GetRecoveryInfoV2_Call struct { } // GetRecoveryInfoV2 is a helper method to define mock.On call -// - ctx context.Context -// - req *datapb.GetRecoveryInfoRequestV2 +// - ctx context.Context +// - req *datapb.GetRecoveryInfoRequestV2 func (_e *MockDataCoord_Expecter) GetRecoveryInfoV2(ctx interface{}, req interface{}) *MockDataCoord_GetRecoveryInfoV2_Call { return &MockDataCoord_GetRecoveryInfoV2_Call{Call: _e.mock.On("GetRecoveryInfoV2", ctx, req)} } @@ -1442,8 +1442,8 @@ type MockDataCoord_GetSegmentIndexState_Call struct { } // GetSegmentIndexState is a helper method to define mock.On call -// - ctx context.Context -// - req *indexpb.GetSegmentIndexStateRequest +// - ctx context.Context +// - req *indexpb.GetSegmentIndexStateRequest func (_e *MockDataCoord_Expecter) GetSegmentIndexState(ctx interface{}, req interface{}) *MockDataCoord_GetSegmentIndexState_Call { return &MockDataCoord_GetSegmentIndexState_Call{Call: _e.mock.On("GetSegmentIndexState", ctx, req)} } @@ -1497,8 +1497,8 @@ type MockDataCoord_GetSegmentInfo_Call struct { } // GetSegmentInfo is a helper method to define mock.On call -// - ctx context.Context -// - req *datapb.GetSegmentInfoRequest +// - ctx context.Context +// - req *datapb.GetSegmentInfoRequest func (_e *MockDataCoord_Expecter) GetSegmentInfo(ctx interface{}, req interface{}) *MockDataCoord_GetSegmentInfo_Call { return &MockDataCoord_GetSegmentInfo_Call{Call: _e.mock.On("GetSegmentInfo", ctx, req)} } @@ -1552,7 +1552,7 @@ type MockDataCoord_GetSegmentInfoChannel_Call struct { } // GetSegmentInfoChannel is a helper method to define mock.On call -// - ctx context.Context +// - ctx context.Context func (_e *MockDataCoord_Expecter) GetSegmentInfoChannel(ctx interface{}) *MockDataCoord_GetSegmentInfoChannel_Call { return &MockDataCoord_GetSegmentInfoChannel_Call{Call: _e.mock.On("GetSegmentInfoChannel", ctx)} } @@ -1606,8 +1606,8 @@ type MockDataCoord_GetSegmentStates_Call struct { } // GetSegmentStates is a helper method to define mock.On call -// - ctx context.Context -// - req *datapb.GetSegmentStatesRequest +// - ctx context.Context +// - req *datapb.GetSegmentStatesRequest func (_e *MockDataCoord_Expecter) GetSegmentStates(ctx interface{}, req interface{}) *MockDataCoord_GetSegmentStates_Call { return &MockDataCoord_GetSegmentStates_Call{Call: _e.mock.On("GetSegmentStates", ctx, req)} } @@ -1661,8 +1661,8 @@ type MockDataCoord_GetSegmentsByStates_Call struct { } // GetSegmentsByStates is a helper method to define mock.On call -// - ctx context.Context -// - req *datapb.GetSegmentsByStatesRequest +// - ctx context.Context +// - req *datapb.GetSegmentsByStatesRequest func (_e *MockDataCoord_Expecter) GetSegmentsByStates(ctx interface{}, req interface{}) *MockDataCoord_GetSegmentsByStates_Call { return &MockDataCoord_GetSegmentsByStates_Call{Call: _e.mock.On("GetSegmentsByStates", ctx, req)} } @@ -1716,7 +1716,7 @@ type MockDataCoord_GetStatisticsChannel_Call struct { } // GetStatisticsChannel is a helper method to define mock.On call -// - ctx context.Context +// - ctx context.Context func (_e *MockDataCoord_Expecter) GetStatisticsChannel(ctx interface{}) *MockDataCoord_GetStatisticsChannel_Call { return &MockDataCoord_GetStatisticsChannel_Call{Call: _e.mock.On("GetStatisticsChannel", ctx)} } @@ -1770,7 +1770,7 @@ type MockDataCoord_GetTimeTickChannel_Call struct { } // GetTimeTickChannel is a helper method to define mock.On call -// - ctx context.Context +// - ctx context.Context func (_e *MockDataCoord_Expecter) GetTimeTickChannel(ctx interface{}) *MockDataCoord_GetTimeTickChannel_Call { return &MockDataCoord_GetTimeTickChannel_Call{Call: _e.mock.On("GetTimeTickChannel", ctx)} } @@ -1824,8 +1824,8 @@ type MockDataCoord_Import_Call struct { } // Import is a helper method to define mock.On call -// - ctx context.Context -// - req *datapb.ImportTaskRequest +// - ctx context.Context +// - req *datapb.ImportTaskRequest func (_e *MockDataCoord_Expecter) Import(ctx interface{}, req interface{}) *MockDataCoord_Import_Call { return &MockDataCoord_Import_Call{Call: _e.mock.On("Import", ctx, req)} } @@ -1920,8 +1920,8 @@ type MockDataCoord_ManualCompaction_Call struct { } // ManualCompaction is a helper method to define mock.On call -// - ctx context.Context -// - req *milvuspb.ManualCompactionRequest +// - ctx context.Context +// - req *milvuspb.ManualCompactionRequest func (_e *MockDataCoord_Expecter) ManualCompaction(ctx interface{}, req interface{}) *MockDataCoord_ManualCompaction_Call { return &MockDataCoord_ManualCompaction_Call{Call: _e.mock.On("ManualCompaction", ctx, req)} } @@ -1975,8 +1975,8 @@ type MockDataCoord_MarkSegmentsDropped_Call struct { } // MarkSegmentsDropped is a helper method to define mock.On call -// - ctx context.Context -// - req *datapb.MarkSegmentsDroppedRequest +// - ctx context.Context +// - req *datapb.MarkSegmentsDroppedRequest func (_e *MockDataCoord_Expecter) MarkSegmentsDropped(ctx interface{}, req interface{}) *MockDataCoord_MarkSegmentsDropped_Call { return &MockDataCoord_MarkSegmentsDropped_Call{Call: _e.mock.On("MarkSegmentsDropped", ctx, req)} } @@ -2071,8 +2071,8 @@ type MockDataCoord_ReportDataNodeTtMsgs_Call struct { } // ReportDataNodeTtMsgs is a helper method to define mock.On call -// - ctx context.Context -// - req *datapb.ReportDataNodeTtMsgsRequest +// - ctx context.Context +// - req *datapb.ReportDataNodeTtMsgsRequest func (_e *MockDataCoord_Expecter) ReportDataNodeTtMsgs(ctx interface{}, req interface{}) *MockDataCoord_ReportDataNodeTtMsgs_Call { return &MockDataCoord_ReportDataNodeTtMsgs_Call{Call: _e.mock.On("ReportDataNodeTtMsgs", ctx, req)} } @@ -2126,8 +2126,8 @@ type MockDataCoord_SaveBinlogPaths_Call struct { } // SaveBinlogPaths is a helper method to define mock.On call -// - ctx context.Context -// - req *datapb.SaveBinlogPathsRequest +// - ctx context.Context +// - req *datapb.SaveBinlogPathsRequest func (_e *MockDataCoord_Expecter) SaveBinlogPaths(ctx interface{}, req interface{}) *MockDataCoord_SaveBinlogPaths_Call { return &MockDataCoord_SaveBinlogPaths_Call{Call: _e.mock.On("SaveBinlogPaths", ctx, req)} } @@ -2181,8 +2181,8 @@ type MockDataCoord_SaveImportSegment_Call struct { } // SaveImportSegment is a helper method to define mock.On call -// - ctx context.Context -// - req *datapb.SaveImportSegmentRequest +// - ctx context.Context +// - req *datapb.SaveImportSegmentRequest func (_e *MockDataCoord_Expecter) SaveImportSegment(ctx interface{}, req interface{}) *MockDataCoord_SaveImportSegment_Call { return &MockDataCoord_SaveImportSegment_Call{Call: _e.mock.On("SaveImportSegment", ctx, req)} } @@ -2215,7 +2215,7 @@ type MockDataCoord_SetAddress_Call struct { } // SetAddress is a helper method to define mock.On call -// - address string +// - address string func (_e *MockDataCoord_Expecter) SetAddress(address interface{}) *MockDataCoord_SetAddress_Call { return &MockDataCoord_SetAddress_Call{Call: _e.mock.On("SetAddress", address)} } @@ -2248,7 +2248,7 @@ type MockDataCoord_SetDataNodeCreator_Call struct { } // SetDataNodeCreator is a helper method to define mock.On call -// - _a0 func(context.Context , string , int64)(types.DataNode , error) +// - _a0 func(context.Context , string , int64)(types.DataNode , error) func (_e *MockDataCoord_Expecter) SetDataNodeCreator(_a0 interface{}) *MockDataCoord_SetDataNodeCreator_Call { return &MockDataCoord_SetDataNodeCreator_Call{Call: _e.mock.On("SetDataNodeCreator", _a0)} } @@ -2281,7 +2281,7 @@ type MockDataCoord_SetEtcdClient_Call struct { } // SetEtcdClient is a helper method to define mock.On call -// - etcdClient *clientv3.Client +// - etcdClient *clientv3.Client func (_e *MockDataCoord_Expecter) SetEtcdClient(etcdClient interface{}) *MockDataCoord_SetEtcdClient_Call { return &MockDataCoord_SetEtcdClient_Call{Call: _e.mock.On("SetEtcdClient", etcdClient)} } @@ -2314,7 +2314,7 @@ type MockDataCoord_SetIndexNodeCreator_Call struct { } // SetIndexNodeCreator is a helper method to define mock.On call -// - _a0 func(context.Context , string , int64)(types.IndexNode , error) +// - _a0 func(context.Context , string , int64)(types.IndexNode , error) func (_e *MockDataCoord_Expecter) SetIndexNodeCreator(_a0 interface{}) *MockDataCoord_SetIndexNodeCreator_Call { return &MockDataCoord_SetIndexNodeCreator_Call{Call: _e.mock.On("SetIndexNodeCreator", _a0)} } @@ -2347,7 +2347,7 @@ type MockDataCoord_SetRootCoord_Call struct { } // SetRootCoord is a helper method to define mock.On call -// - rootCoord types.RootCoord +// - rootCoord types.RootCoord func (_e *MockDataCoord_Expecter) SetRootCoord(rootCoord interface{}) *MockDataCoord_SetRootCoord_Call { return &MockDataCoord_SetRootCoord_Call{Call: _e.mock.On("SetRootCoord", rootCoord)} } @@ -2401,8 +2401,8 @@ type MockDataCoord_SetSegmentState_Call struct { } // SetSegmentState is a helper method to define mock.On call -// - ctx context.Context -// - req *datapb.SetSegmentStateRequest +// - ctx context.Context +// - req *datapb.SetSegmentStateRequest func (_e *MockDataCoord_Expecter) SetSegmentState(ctx interface{}, req interface{}) *MockDataCoord_SetSegmentState_Call { return &MockDataCoord_SetSegmentState_Call{Call: _e.mock.On("SetSegmentState", ctx, req)} } @@ -2489,8 +2489,8 @@ type MockDataCoord_ShowConfigurations_Call struct { } // ShowConfigurations is a helper method to define mock.On call -// - ctx context.Context -// - req *internalpb.ShowConfigurationsRequest +// - ctx context.Context +// - req *internalpb.ShowConfigurationsRequest func (_e *MockDataCoord_Expecter) ShowConfigurations(ctx interface{}, req interface{}) *MockDataCoord_ShowConfigurations_Call { return &MockDataCoord_ShowConfigurations_Call{Call: _e.mock.On("ShowConfigurations", ctx, req)} } @@ -2626,8 +2626,8 @@ type MockDataCoord_UnsetIsImportingState_Call struct { } // UnsetIsImportingState is a helper method to define mock.On call -// - ctx context.Context -// - req *datapb.UnsetIsImportingStateRequest +// - ctx context.Context +// - req *datapb.UnsetIsImportingStateRequest func (_e *MockDataCoord_Expecter) UnsetIsImportingState(ctx interface{}, req interface{}) *MockDataCoord_UnsetIsImportingState_Call { return &MockDataCoord_UnsetIsImportingState_Call{Call: _e.mock.On("UnsetIsImportingState", ctx, req)} } @@ -2681,8 +2681,8 @@ type MockDataCoord_UpdateChannelCheckpoint_Call struct { } // UpdateChannelCheckpoint is a helper method to define mock.On call -// - ctx context.Context -// - req *datapb.UpdateChannelCheckpointRequest +// - ctx context.Context +// - req *datapb.UpdateChannelCheckpointRequest func (_e *MockDataCoord_Expecter) UpdateChannelCheckpoint(ctx interface{}, req interface{}) *MockDataCoord_UpdateChannelCheckpoint_Call { return &MockDataCoord_UpdateChannelCheckpoint_Call{Call: _e.mock.On("UpdateChannelCheckpoint", ctx, req)} } @@ -2736,8 +2736,8 @@ type MockDataCoord_UpdateSegmentStatistics_Call struct { } // UpdateSegmentStatistics is a helper method to define mock.On call -// - ctx context.Context -// - req *datapb.UpdateSegmentStatisticsRequest +// - ctx context.Context +// - req *datapb.UpdateSegmentStatisticsRequest func (_e *MockDataCoord_Expecter) UpdateSegmentStatistics(ctx interface{}, req interface{}) *MockDataCoord_UpdateSegmentStatistics_Call { return &MockDataCoord_UpdateSegmentStatistics_Call{Call: _e.mock.On("UpdateSegmentStatistics", ctx, req)} } @@ -2791,8 +2791,8 @@ type MockDataCoord_WatchChannels_Call struct { } // WatchChannels is a helper method to define mock.On call -// - ctx context.Context -// - req *datapb.WatchChannelsRequest +// - ctx context.Context +// - req *datapb.WatchChannelsRequest func (_e *MockDataCoord_Expecter) WatchChannels(ctx interface{}, req interface{}) *MockDataCoord_WatchChannels_Call { return &MockDataCoord_WatchChannels_Call{Call: _e.mock.On("WatchChannels", ctx, req)} } @@ -2819,7 +2819,8 @@ func (_c *MockDataCoord_WatchChannels_Call) RunAndReturn(run func(context.Contex func NewMockDataCoord(t interface { mock.TestingT Cleanup(func()) -}) *MockDataCoord { +}, +) *MockDataCoord { mock := &MockDataCoord{} mock.Mock.Test(t) diff --git a/internal/mocks/mock_datanode.go b/internal/mocks/mock_datanode.go index 0824259f05..2de723ff0b 100644 --- a/internal/mocks/mock_datanode.go +++ b/internal/mocks/mock_datanode.go @@ -64,8 +64,8 @@ type MockDataNode_AddImportSegment_Call struct { } // AddImportSegment is a helper method to define mock.On call -// - ctx context.Context -// - req *datapb.AddImportSegmentRequest +// - ctx context.Context +// - req *datapb.AddImportSegmentRequest func (_e *MockDataNode_Expecter) AddImportSegment(ctx interface{}, req interface{}) *MockDataNode_AddImportSegment_Call { return &MockDataNode_AddImportSegment_Call{Call: _e.mock.On("AddImportSegment", ctx, req)} } @@ -119,8 +119,8 @@ type MockDataNode_Compaction_Call struct { } // Compaction is a helper method to define mock.On call -// - ctx context.Context -// - req *datapb.CompactionPlan +// - ctx context.Context +// - req *datapb.CompactionPlan func (_e *MockDataNode_Expecter) Compaction(ctx interface{}, req interface{}) *MockDataNode_Compaction_Call { return &MockDataNode_Compaction_Call{Call: _e.mock.On("Compaction", ctx, req)} } @@ -174,8 +174,8 @@ type MockDataNode_FlushChannels_Call struct { } // FlushChannels is a helper method to define mock.On call -// - ctx context.Context -// - req *datapb.FlushChannelsRequest +// - ctx context.Context +// - req *datapb.FlushChannelsRequest func (_e *MockDataNode_Expecter) FlushChannels(ctx interface{}, req interface{}) *MockDataNode_FlushChannels_Call { return &MockDataNode_FlushChannels_Call{Call: _e.mock.On("FlushChannels", ctx, req)} } @@ -229,8 +229,8 @@ type MockDataNode_FlushSegments_Call struct { } // FlushSegments is a helper method to define mock.On call -// - ctx context.Context -// - req *datapb.FlushSegmentsRequest +// - ctx context.Context +// - req *datapb.FlushSegmentsRequest func (_e *MockDataNode_Expecter) FlushSegments(ctx interface{}, req interface{}) *MockDataNode_FlushSegments_Call { return &MockDataNode_FlushSegments_Call{Call: _e.mock.On("FlushSegments", ctx, req)} } @@ -325,8 +325,8 @@ type MockDataNode_GetCompactionState_Call struct { } // GetCompactionState is a helper method to define mock.On call -// - ctx context.Context -// - req *datapb.CompactionStateRequest +// - ctx context.Context +// - req *datapb.CompactionStateRequest func (_e *MockDataNode_Expecter) GetCompactionState(ctx interface{}, req interface{}) *MockDataNode_GetCompactionState_Call { return &MockDataNode_GetCompactionState_Call{Call: _e.mock.On("GetCompactionState", ctx, req)} } @@ -380,7 +380,7 @@ type MockDataNode_GetComponentStates_Call struct { } // GetComponentStates is a helper method to define mock.On call -// - ctx context.Context +// - ctx context.Context func (_e *MockDataNode_Expecter) GetComponentStates(ctx interface{}) *MockDataNode_GetComponentStates_Call { return &MockDataNode_GetComponentStates_Call{Call: _e.mock.On("GetComponentStates", ctx)} } @@ -434,8 +434,8 @@ type MockDataNode_GetMetrics_Call struct { } // GetMetrics is a helper method to define mock.On call -// - ctx context.Context -// - req *milvuspb.GetMetricsRequest +// - ctx context.Context +// - req *milvuspb.GetMetricsRequest func (_e *MockDataNode_Expecter) GetMetrics(ctx interface{}, req interface{}) *MockDataNode_GetMetrics_Call { return &MockDataNode_GetMetrics_Call{Call: _e.mock.On("GetMetrics", ctx, req)} } @@ -530,7 +530,7 @@ type MockDataNode_GetStatisticsChannel_Call struct { } // GetStatisticsChannel is a helper method to define mock.On call -// - ctx context.Context +// - ctx context.Context func (_e *MockDataNode_Expecter) GetStatisticsChannel(ctx interface{}) *MockDataNode_GetStatisticsChannel_Call { return &MockDataNode_GetStatisticsChannel_Call{Call: _e.mock.On("GetStatisticsChannel", ctx)} } @@ -584,8 +584,8 @@ type MockDataNode_Import_Call struct { } // Import is a helper method to define mock.On call -// - ctx context.Context -// - req *datapb.ImportTaskRequest +// - ctx context.Context +// - req *datapb.ImportTaskRequest func (_e *MockDataNode_Expecter) Import(ctx interface{}, req interface{}) *MockDataNode_Import_Call { return &MockDataNode_Import_Call{Call: _e.mock.On("Import", ctx, req)} } @@ -721,8 +721,8 @@ type MockDataNode_ResendSegmentStats_Call struct { } // ResendSegmentStats is a helper method to define mock.On call -// - ctx context.Context -// - req *datapb.ResendSegmentStatsRequest +// - ctx context.Context +// - req *datapb.ResendSegmentStatsRequest func (_e *MockDataNode_Expecter) ResendSegmentStats(ctx interface{}, req interface{}) *MockDataNode_ResendSegmentStats_Call { return &MockDataNode_ResendSegmentStats_Call{Call: _e.mock.On("ResendSegmentStats", ctx, req)} } @@ -755,7 +755,7 @@ type MockDataNode_SetAddress_Call struct { } // SetAddress is a helper method to define mock.On call -// - address string +// - address string func (_e *MockDataNode_Expecter) SetAddress(address interface{}) *MockDataNode_SetAddress_Call { return &MockDataNode_SetAddress_Call{Call: _e.mock.On("SetAddress", address)} } @@ -797,7 +797,7 @@ type MockDataNode_SetDataCoord_Call struct { } // SetDataCoord is a helper method to define mock.On call -// - dataCoord types.DataCoord +// - dataCoord types.DataCoord func (_e *MockDataNode_Expecter) SetDataCoord(dataCoord interface{}) *MockDataNode_SetDataCoord_Call { return &MockDataNode_SetDataCoord_Call{Call: _e.mock.On("SetDataCoord", dataCoord)} } @@ -830,7 +830,7 @@ type MockDataNode_SetEtcdClient_Call struct { } // SetEtcdClient is a helper method to define mock.On call -// - etcdClient *clientv3.Client +// - etcdClient *clientv3.Client func (_e *MockDataNode_Expecter) SetEtcdClient(etcdClient interface{}) *MockDataNode_SetEtcdClient_Call { return &MockDataNode_SetEtcdClient_Call{Call: _e.mock.On("SetEtcdClient", etcdClient)} } @@ -872,7 +872,7 @@ type MockDataNode_SetRootCoord_Call struct { } // SetRootCoord is a helper method to define mock.On call -// - rootCoord types.RootCoord +// - rootCoord types.RootCoord func (_e *MockDataNode_Expecter) SetRootCoord(rootCoord interface{}) *MockDataNode_SetRootCoord_Call { return &MockDataNode_SetRootCoord_Call{Call: _e.mock.On("SetRootCoord", rootCoord)} } @@ -926,8 +926,8 @@ type MockDataNode_ShowConfigurations_Call struct { } // ShowConfigurations is a helper method to define mock.On call -// - ctx context.Context -// - req *internalpb.ShowConfigurationsRequest +// - ctx context.Context +// - req *internalpb.ShowConfigurationsRequest func (_e *MockDataNode_Expecter) ShowConfigurations(ctx interface{}, req interface{}) *MockDataNode_ShowConfigurations_Call { return &MockDataNode_ShowConfigurations_Call{Call: _e.mock.On("ShowConfigurations", ctx, req)} } @@ -1063,8 +1063,8 @@ type MockDataNode_SyncSegments_Call struct { } // SyncSegments is a helper method to define mock.On call -// - ctx context.Context -// - req *datapb.SyncSegmentsRequest +// - ctx context.Context +// - req *datapb.SyncSegmentsRequest func (_e *MockDataNode_Expecter) SyncSegments(ctx interface{}, req interface{}) *MockDataNode_SyncSegments_Call { return &MockDataNode_SyncSegments_Call{Call: _e.mock.On("SyncSegments", ctx, req)} } @@ -1097,7 +1097,7 @@ type MockDataNode_UpdateStateCode_Call struct { } // UpdateStateCode is a helper method to define mock.On call -// - stateCode commonpb.StateCode +// - stateCode commonpb.StateCode func (_e *MockDataNode_Expecter) UpdateStateCode(stateCode interface{}) *MockDataNode_UpdateStateCode_Call { return &MockDataNode_UpdateStateCode_Call{Call: _e.mock.On("UpdateStateCode", stateCode)} } @@ -1151,8 +1151,8 @@ type MockDataNode_WatchDmChannels_Call struct { } // WatchDmChannels is a helper method to define mock.On call -// - ctx context.Context -// - req *datapb.WatchDmChannelsRequest +// - ctx context.Context +// - req *datapb.WatchDmChannelsRequest func (_e *MockDataNode_Expecter) WatchDmChannels(ctx interface{}, req interface{}) *MockDataNode_WatchDmChannels_Call { return &MockDataNode_WatchDmChannels_Call{Call: _e.mock.On("WatchDmChannels", ctx, req)} } @@ -1179,7 +1179,8 @@ func (_c *MockDataNode_WatchDmChannels_Call) RunAndReturn(run func(context.Conte func NewMockDataNode(t interface { mock.TestingT Cleanup(func()) -}) *MockDataNode { +}, +) *MockDataNode { mock := &MockDataNode{} mock.Mock.Test(t) diff --git a/internal/mq/mqimpl/rocksmq/client/client_impl.go b/internal/mq/mqimpl/rocksmq/client/client_impl.go index b32fc79435..b01e63fa45 100644 --- a/internal/mq/mqimpl/rocksmq/client/client_impl.go +++ b/internal/mq/mqimpl/rocksmq/client/client_impl.go @@ -32,7 +32,6 @@ type client struct { } func newClient(options Options) (*client, error) { - if options.Server == nil { return nil, newError(InvalidConfiguration, "options.Server is nil") } @@ -50,7 +49,6 @@ func newClient(options Options) (*client, error) { func (c *client) CreateProducer(options ProducerOptions) (Producer, error) { // Create a producer producer, err := newProducer(c, options) - if err != nil { return nil, err } @@ -171,7 +169,8 @@ func (c *client) deliver(consumer *consumer) { MsgID: msg.MsgID, Payload: msg.Payload, Properties: msg.Properties, - Topic: consumer.Topic()}: + Topic: consumer.Topic(), + }: case <-c.closeCh: return } diff --git a/internal/mq/mqimpl/rocksmq/client/consumer_impl_test.go b/internal/mq/mqimpl/rocksmq/client/consumer_impl_test.go index 9afd9d39ef..feeb689139 100644 --- a/internal/mq/mqimpl/rocksmq/client/consumer_impl_test.go +++ b/internal/mq/mqimpl/rocksmq/client/consumer_impl_test.go @@ -117,7 +117,7 @@ func TestConsumer_Subscription(t *testing.T) { }) assert.Nil(t, consumer) assert.Error(t, err) - //assert.Equal(t, consumerName, consumer.Subscription()) + // assert.Equal(t, consumerName, consumer.Subscription()) } func TestConsumer_Seek(t *testing.T) { diff --git a/internal/mq/mqimpl/rocksmq/client/producer_impl_test.go b/internal/mq/mqimpl/rocksmq/client/producer_impl_test.go index d04aa02359..f372ab1b97 100644 --- a/internal/mq/mqimpl/rocksmq/client/producer_impl_test.go +++ b/internal/mq/mqimpl/rocksmq/client/producer_impl_test.go @@ -40,5 +40,5 @@ func TestProducerTopic(t *testing.T) { }) assert.Nil(t, producer) assert.Error(t, err) - //assert.Equal(t, topicName, producer.Topic()) + // assert.Equal(t, topicName, producer.Topic()) } diff --git a/internal/mq/mqimpl/rocksmq/server/rocksmq_impl.go b/internal/mq/mqimpl/rocksmq/server/rocksmq_impl.go index 0a380c66eb..18a9685cb4 100644 --- a/internal/mq/mqimpl/rocksmq/server/rocksmq_impl.go +++ b/internal/mq/mqimpl/rocksmq/server/rocksmq_impl.go @@ -622,7 +622,6 @@ func (rmq *rocksmq) Produce(topicName string, messages []ProducerMessage) ([]Uni msgLen := len(messages) idStart, idEnd, err := rmq.idAllocator.Alloc(uint32(msgLen)) - if err != nil { return []UniqueID{}, err } @@ -901,7 +900,7 @@ func (rmq *rocksmq) seek(topicName string, groupName string, msgID UniqueID) err log.Warn("RocksMQ: trying to seek to no exist position, reset current id", zap.String("topic", topicName), zap.String("group", groupName), zap.Int64("msgId", msgID)) err := rmq.moveConsumePos(topicName, groupName, DefaultMessageID) - //skip seek if key is not found, this is the behavior as pulsar + // skip seek if key is not found, this is the behavior as pulsar return err } /* Step II: update current_id */ @@ -921,7 +920,7 @@ func (rmq *rocksmq) moveConsumePos(topicName string, groupName string, msgID Uni panic("move consume position backward") } - //update ack if position move forward + // update ack if position move forward err := rmq.updateAckedInfo(topicName, groupName, oldPos, msgID-1) if err != nil { log.Warn("failed to update acked info ", zap.String("topic", topicName), @@ -1053,7 +1052,6 @@ func (rmq *rocksmq) getLatestMsg(topicName string) (int64, error) { } msgID, err := strconv.ParseInt(seekMsgID[len(topicName)+1:], 10, 64) - if err != nil { return DefaultMessageID, err } diff --git a/internal/mq/mqimpl/rocksmq/server/rocksmq_impl_test.go b/internal/mq/mqimpl/rocksmq/server/rocksmq_impl_test.go index 3c89f7381b..f6c765ac89 100644 --- a/internal/mq/mqimpl/rocksmq/server/rocksmq_impl_test.go +++ b/internal/mq/mqimpl/rocksmq/server/rocksmq_impl_test.go @@ -102,7 +102,6 @@ func (rmq *rocksmq) produceBefore2(topicName string, messages []producerMessageB msgLen := len(messages) idStart, idEnd, err := rmq.idAllocator.Alloc(uint32(msgLen)) - if err != nil { return []UniqueID{}, err } @@ -150,7 +149,6 @@ func (rmq *rocksmq) produceBefore2(topicName string, messages []producerMessageB getProduceTime := time.Since(start).Milliseconds() if getProduceTime > 200 { - log.Warn("rocksmq produce too slowly", zap.String("topic", topicName), zap.Int64("get lock elapse", getLockTime), zap.Int64("alloc elapse", allocTime-getLockTime), @@ -183,7 +181,6 @@ func (rmq *rocksmq) produceIn2(topicName string, messages []ProducerMessage) ([] msgLen := len(messages) idStart, idEnd, err := rmq.idAllocator.Alloc(uint32(msgLen)) - if err != nil { return []UniqueID{}, err } @@ -663,7 +660,6 @@ func TestRocksmq_Seek(t *testing.T) { assert.Equal(t, messages[0].MsgID, seekID2) _ = rmq.DestroyConsumerGroup(channelName, groupName1) - } func TestRocksmq_Loop(t *testing.T) { @@ -1371,7 +1367,7 @@ func TestRocksmq_updateAckedInfoErr(t *testing.T) { rmq.CreateTopic(topicName) defer rmq.DestroyTopic(topicName) - //add message, make sure rmq has more than one page + // add message, make sure rmq has more than one page msgNum := 100 pMsgs := make([]ProducerMessage, msgNum) for i := 0; i < msgNum; i++ { @@ -1390,9 +1386,9 @@ func TestRocksmq_updateAckedInfoErr(t *testing.T) { GroupName: groupName + strconv.Itoa(i), MsgMutex: make(chan struct{}), } - //make sure consumer not in rmq.consumersID + // make sure consumer not in rmq.consumersID rmq.DestroyConsumerGroup(topicName, groupName+strconv.Itoa(i)) - //add consumer to rmq.consumers + // add consumer to rmq.consumers rmq.RegisterConsumer(consumer) } @@ -1445,7 +1441,7 @@ func TestRocksmq_Info(t *testing.T) { assert.True(t, rmq.Info()) - //test error + // test error rmq.kv = &rocksdbkv.RocksdbKV{} assert.False(t, rmq.Info()) } diff --git a/internal/mq/mqimpl/rocksmq/server/rocksmq_retention_test.go b/internal/mq/mqimpl/rocksmq/server/rocksmq_retention_test.go index af42fbee4b..ecaf612cdb 100644 --- a/internal/mq/mqimpl/rocksmq/server/rocksmq_retention_test.go +++ b/internal/mq/mqimpl/rocksmq/server/rocksmq_retention_test.go @@ -391,7 +391,6 @@ func TestRmqRetention_MultipleTopic(t *testing.T) { newRes, err = rmq.Consume(topicName, groupName, 1) assert.NoError(t, err) assert.Equal(t, len(newRes), 0) - } func TestRetentionInfo_InitRetentionInfo(t *testing.T) { diff --git a/internal/mq/msgstream/mq_factory.go b/internal/mq/msgstream/mq_factory.go index 60f1dea3f0..6d19d2abf0 100644 --- a/internal/mq/msgstream/mq_factory.go +++ b/internal/mq/msgstream/mq_factory.go @@ -1,12 +1,13 @@ package msgstream import ( + "go.uber.org/zap" + "github.com/milvus-io/milvus/internal/mq/mqimpl/rocksmq/server" "github.com/milvus-io/milvus/internal/mq/msgstream/mqwrapper/rmq" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/mq/msgstream" "github.com/milvus-io/milvus/pkg/util/paramtable" - "go.uber.org/zap" ) // NewRocksmqFactory creates a new message stream factory based on rocksmq. diff --git a/internal/mq/msgstream/mq_factory_test.go b/internal/mq/msgstream/mq_factory_test.go index c66d6a3310..dc0e9213c1 100644 --- a/internal/mq/msgstream/mq_factory_test.go +++ b/internal/mq/msgstream/mq_factory_test.go @@ -40,5 +40,4 @@ func TestRmsFactory(t *testing.T) { _, err = rmsFactory.NewTtMsgStream(ctx) assert.NoError(t, err) - } diff --git a/internal/mq/msgstream/mqwrapper/rmq/rmq_client.go b/internal/mq/msgstream/mqwrapper/rmq/rmq_client.go index 32421c62ce..3c28752779 100644 --- a/internal/mq/msgstream/mqwrapper/rmq/rmq_client.go +++ b/internal/mq/msgstream/mqwrapper/rmq/rmq_client.go @@ -21,10 +21,10 @@ import ( "strconv" "github.com/cockroachdb/errors" - "github.com/milvus-io/milvus/internal/mq/mqimpl/rocksmq/client" - "github.com/milvus-io/milvus/internal/mq/mqimpl/rocksmq/server" "go.uber.org/zap" + "github.com/milvus-io/milvus/internal/mq/mqimpl/rocksmq/client" + "github.com/milvus-io/milvus/internal/mq/mqimpl/rocksmq/server" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/metrics" "github.com/milvus-io/milvus/pkg/mq/msgstream/mqwrapper" diff --git a/internal/mq/msgstream/mqwrapper/rmq/rmq_client_test.go b/internal/mq/msgstream/mqwrapper/rmq/rmq_client_test.go index 11bed58150..361db2e0b0 100644 --- a/internal/mq/msgstream/mqwrapper/rmq/rmq_client_test.go +++ b/internal/mq/msgstream/mqwrapper/rmq/rmq_client_test.go @@ -25,11 +25,11 @@ import ( "time" "github.com/apache/pulsar-client-go/pulsar" - rocksmqimplclient "github.com/milvus-io/milvus/internal/mq/mqimpl/rocksmq/client" - rocksmqimplserver "github.com/milvus-io/milvus/internal/mq/mqimpl/rocksmq/server" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" + rocksmqimplclient "github.com/milvus-io/milvus/internal/mq/mqimpl/rocksmq/client" + rocksmqimplserver "github.com/milvus-io/milvus/internal/mq/mqimpl/rocksmq/server" "github.com/milvus-io/milvus/pkg/mq/msgstream/mqwrapper" pulsarwrapper "github.com/milvus-io/milvus/pkg/mq/msgstream/mqwrapper/pulsar" "github.com/milvus-io/milvus/pkg/util/paramtable" diff --git a/internal/mq/msgstream/mqwrapper/rmq/rmq_id_test.go b/internal/mq/msgstream/mqwrapper/rmq/rmq_id_test.go index e92db5a93f..0ad7b349f6 100644 --- a/internal/mq/msgstream/mqwrapper/rmq/rmq_id_test.go +++ b/internal/mq/msgstream/mqwrapper/rmq/rmq_id_test.go @@ -79,7 +79,6 @@ func Test_Equal(t *testing.T) { ret, err := rid1.Equal(rid1.Serialize()) assert.NoError(t, err) assert.True(t, ret) - } { diff --git a/internal/mq/msgstream/mqwrapper/rmq/rmq_producer.go b/internal/mq/msgstream/mqwrapper/rmq/rmq_producer.go index 408fe3810f..fc391afd37 100644 --- a/internal/mq/msgstream/mqwrapper/rmq/rmq_producer.go +++ b/internal/mq/msgstream/mqwrapper/rmq/rmq_producer.go @@ -52,6 +52,6 @@ func (rp *rmqProducer) Send(ctx context.Context, message *mqwrapper.ProducerMess // Close does nothing currently func (rp *rmqProducer) Close() { - //TODO: close producer. Now it has bug - //rp.p.Close() + // TODO: close producer. Now it has bug + // rp.p.Close() } diff --git a/internal/parser/planparserv2/check_identical_test.go b/internal/parser/planparserv2/check_identical_test.go index a59ae52d73..9f48aec504 100644 --- a/internal/parser/planparserv2/check_identical_test.go +++ b/internal/parser/planparserv2/check_identical_test.go @@ -3,10 +3,10 @@ package planparserv2 import ( "testing" - "github.com/milvus-io/milvus/internal/proto/planpb" - - "github.com/milvus-io/milvus/pkg/util/typeutil" "github.com/stretchr/testify/assert" + + "github.com/milvus-io/milvus/internal/proto/planpb" + "github.com/milvus-io/milvus/pkg/util/typeutil" ) func TestCheckIdentical(t *testing.T) { @@ -122,37 +122,45 @@ func TestCheckVectorANNSIdentical(t *testing.T) { }, { args: args{ - node1: &planpb.VectorANNS{VectorType: planpb.VectorType_FloatVector, FieldId: 100, PlaceholderTag: "$0", QueryInfo: &planpb.QueryInfo{Topk: 1, MetricType: "L2", SearchParams: `{"nprobe": 10}`, RoundDecimal: 6}, + node1: &planpb.VectorANNS{ + VectorType: planpb.VectorType_FloatVector, FieldId: 100, PlaceholderTag: "$0", QueryInfo: &planpb.QueryInfo{Topk: 1, MetricType: "L2", SearchParams: `{"nprobe": 10}`, RoundDecimal: 6}, Predicates: &planpb.Expr{ Expr: &planpb.Expr_ColumnExpr{ ColumnExpr: &planpb.ColumnExpr{ Info: &planpb.ColumnInfo{}, }, }, - }}, - node2: &planpb.VectorANNS{VectorType: planpb.VectorType_FloatVector, FieldId: 100, PlaceholderTag: "$0", QueryInfo: &planpb.QueryInfo{Topk: 1, MetricType: "L2", SearchParams: `{"nprobe": 10}`, RoundDecimal: 6}, + }, + }, + node2: &planpb.VectorANNS{ + VectorType: planpb.VectorType_FloatVector, FieldId: 100, PlaceholderTag: "$0", QueryInfo: &planpb.QueryInfo{Topk: 1, MetricType: "L2", SearchParams: `{"nprobe": 10}`, RoundDecimal: 6}, Predicates: &planpb.Expr{ Expr: &planpb.Expr_ValueExpr{ ValueExpr: &planpb.ValueExpr{Value: NewInt(100)}, }, - }}, + }, + }, }, want: false, }, { args: args{ - node1: &planpb.VectorANNS{VectorType: planpb.VectorType_FloatVector, FieldId: 100, PlaceholderTag: "$0", QueryInfo: &planpb.QueryInfo{Topk: 1, MetricType: "L2", SearchParams: `{"nprobe": 10}`, RoundDecimal: 6}, + node1: &planpb.VectorANNS{ + VectorType: planpb.VectorType_FloatVector, FieldId: 100, PlaceholderTag: "$0", QueryInfo: &planpb.QueryInfo{Topk: 1, MetricType: "L2", SearchParams: `{"nprobe": 10}`, RoundDecimal: 6}, Predicates: &planpb.Expr{ Expr: &planpb.Expr_ValueExpr{ ValueExpr: &planpb.ValueExpr{Value: NewInt(100)}, }, - }}, - node2: &planpb.VectorANNS{VectorType: planpb.VectorType_FloatVector, FieldId: 100, PlaceholderTag: "$0", QueryInfo: &planpb.QueryInfo{Topk: 1, MetricType: "L2", SearchParams: `{"nprobe": 10}`, RoundDecimal: 6}, + }, + }, + node2: &planpb.VectorANNS{ + VectorType: planpb.VectorType_FloatVector, FieldId: 100, PlaceholderTag: "$0", QueryInfo: &planpb.QueryInfo{Topk: 1, MetricType: "L2", SearchParams: `{"nprobe": 10}`, RoundDecimal: 6}, Predicates: &planpb.Expr{ Expr: &planpb.Expr_ValueExpr{ ValueExpr: &planpb.ValueExpr{Value: NewInt(100)}, }, - }}, + }, + }, }, want: true, }, @@ -214,23 +222,27 @@ func TestCheckPlanNodeIdentical(t *testing.T) { args: args{ node1: &planpb.PlanNode{ Node: &planpb.PlanNode_VectorAnns{ - VectorAnns: &planpb.VectorANNS{VectorType: planpb.VectorType_FloatVector, FieldId: 100, PlaceholderTag: "$0", QueryInfo: &planpb.QueryInfo{Topk: 1, MetricType: "L2", SearchParams: `{"nprobe": 10}`, RoundDecimal: 6}, + VectorAnns: &planpb.VectorANNS{ + VectorType: planpb.VectorType_FloatVector, FieldId: 100, PlaceholderTag: "$0", QueryInfo: &planpb.QueryInfo{Topk: 1, MetricType: "L2", SearchParams: `{"nprobe": 10}`, RoundDecimal: 6}, Predicates: &planpb.Expr{ Expr: &planpb.Expr_ValueExpr{ ValueExpr: &planpb.ValueExpr{Value: NewInt(100)}, }, - }}, + }, + }, }, OutputFieldIds: []int64{100}, }, node2: &planpb.PlanNode{ Node: &planpb.PlanNode_VectorAnns{ - VectorAnns: &planpb.VectorANNS{VectorType: planpb.VectorType_FloatVector, FieldId: 100, PlaceholderTag: "$0", QueryInfo: &planpb.QueryInfo{Topk: 1, MetricType: "L2", SearchParams: `{"nprobe": 10}`, RoundDecimal: 6}, + VectorAnns: &planpb.VectorANNS{ + VectorType: planpb.VectorType_FloatVector, FieldId: 100, PlaceholderTag: "$0", QueryInfo: &planpb.QueryInfo{Topk: 1, MetricType: "L2", SearchParams: `{"nprobe": 10}`, RoundDecimal: 6}, Predicates: &planpb.Expr{ Expr: &planpb.Expr_ValueExpr{ ValueExpr: &planpb.ValueExpr{Value: NewInt(100)}, }, - }}, + }, + }, }, OutputFieldIds: []int64{100}, }, diff --git a/internal/parser/planparserv2/parser_visitor.go b/internal/parser/planparserv2/parser_visitor.go index 1de20b5423..3476b2d44c 100644 --- a/internal/parser/planparserv2/parser_visitor.go +++ b/internal/parser/planparserv2/parser_visitor.go @@ -6,6 +6,7 @@ import ( "strings" "github.com/antlr/antlr4/runtime/Go/antlr" + "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" parser "github.com/milvus-io/milvus/internal/parser/planparserv2/generated" "github.com/milvus-io/milvus/internal/proto/planpb" diff --git a/internal/parser/planparserv2/plan_parser_v2.go b/internal/parser/planparserv2/plan_parser_v2.go index 81ac651e2f..91de906c1e 100644 --- a/internal/parser/planparserv2/plan_parser_v2.go +++ b/internal/parser/planparserv2/plan_parser_v2.go @@ -3,9 +3,9 @@ package planparserv2 import ( "fmt" + "github.com/antlr/antlr4/runtime/Go/antlr" "go.uber.org/zap" - "github.com/antlr/antlr4/runtime/Go/antlr" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/internal/proto/planpb" "github.com/milvus-io/milvus/pkg/log" diff --git a/internal/parser/planparserv2/plan_parser_v2_test.go b/internal/parser/planparserv2/plan_parser_v2_test.go index c23b7e8724..fce00dc72b 100644 --- a/internal/parser/planparserv2/plan_parser_v2_test.go +++ b/internal/parser/planparserv2/plan_parser_v2_test.go @@ -4,11 +4,12 @@ import ( "sync" "testing" + "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/internal/proto/planpb" "github.com/milvus-io/milvus/pkg/common" "github.com/milvus-io/milvus/pkg/util/typeutil" - "github.com/stretchr/testify/assert" ) func newTestSchema() *schemapb.CollectionSchema { diff --git a/internal/parser/planparserv2/pool.go b/internal/parser/planparserv2/pool.go index 005884b60e..73433572ee 100644 --- a/internal/parser/planparserv2/pool.go +++ b/internal/parser/planparserv2/pool.go @@ -4,6 +4,7 @@ import ( "sync" "github.com/antlr/antlr4/runtime/Go/antlr" + antlrparser "github.com/milvus-io/milvus/internal/parser/planparserv2/generated" ) diff --git a/internal/parser/planparserv2/pool_test.go b/internal/parser/planparserv2/pool_test.go index 37b64e12cb..bea97fb9b6 100644 --- a/internal/parser/planparserv2/pool_test.go +++ b/internal/parser/planparserv2/pool_test.go @@ -4,8 +4,9 @@ import ( "testing" "github.com/antlr/antlr4/runtime/Go/antlr" - antlrparser "github.com/milvus-io/milvus/internal/parser/planparserv2/generated" "github.com/stretchr/testify/assert" + + antlrparser "github.com/milvus-io/milvus/internal/parser/planparserv2/generated" ) func genNaiveInputStream() *antlr.InputStream { diff --git a/internal/parser/planparserv2/show_visitor.go b/internal/parser/planparserv2/show_visitor.go index 6930e8fe76..b9b263b6e0 100644 --- a/internal/parser/planparserv2/show_visitor.go +++ b/internal/parser/planparserv2/show_visitor.go @@ -9,8 +9,7 @@ import ( "github.com/milvus-io/milvus/pkg/log" ) -type ShowExprVisitor struct { -} +type ShowExprVisitor struct{} func extractColumnInfo(info *planpb.ColumnInfo) interface{} { js := make(map[string]interface{}) diff --git a/internal/parser/planparserv2/utils_test.go b/internal/parser/planparserv2/utils_test.go index a8c68a1814..843bd2d34b 100644 --- a/internal/parser/planparserv2/utils_test.go +++ b/internal/parser/planparserv2/utils_test.go @@ -3,10 +3,10 @@ package planparserv2 import ( "testing" - "github.com/milvus-io/milvus/internal/proto/planpb" "github.com/stretchr/testify/assert" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" + "github.com/milvus-io/milvus/internal/proto/planpb" ) func Test_relationalCompatible(t *testing.T) { diff --git a/internal/proxy/accesslog/access_log.go b/internal/proxy/accesslog/access_log.go index 8541a94d27..6cb7961b70 100644 --- a/internal/proxy/accesslog/access_log.go +++ b/internal/proxy/accesslog/access_log.go @@ -23,24 +23,27 @@ import ( "sync" "sync/atomic" - "github.com/milvus-io/milvus/pkg/log" - "github.com/milvus-io/milvus/pkg/util/paramtable" "go.uber.org/zap" "go.uber.org/zap/zapcore" - "google.golang.org/grpc" + + "github.com/milvus-io/milvus/pkg/log" + "github.com/milvus-io/milvus/pkg/util/paramtable" ) const ( clientRequestIDKey = "client_request_id" ) -var _globalL, _globalW atomic.Value -var once sync.Once +var ( + _globalL, _globalW atomic.Value + once sync.Once +) func A() *zap.Logger { return _globalL.Load().(*zap.Logger) } + func W() *RotateLogger { return _globalW.Load().(*RotateLogger) } @@ -109,11 +112,11 @@ func PrintAccessInfo(ctx context.Context, resp interface{}, err error, rpcInfo * } fields := []zap.Field{ - //format time cost of task + // format time cost of task zap.String("timeCost", fmt.Sprintf("%d ms", timeCost)), } - //get trace ID of task + // get trace ID of task traceID, ok := getTraceID(ctx) if !ok { log.Warn("access log print failed: could not get trace ID") @@ -121,7 +124,7 @@ func PrintAccessInfo(ctx context.Context, resp interface{}, err error, rpcInfo * } fields = append(fields, zap.String("traceId", traceID)) - //get response size of task + // get response size of task responseSize, ok := getResponseSize(resp) if !ok { log.Warn("access log print failed: could not get response size") @@ -129,7 +132,7 @@ func PrintAccessInfo(ctx context.Context, resp interface{}, err error, rpcInfo * } fields = append(fields, zap.Int("responseSize", responseSize)) - //get err code of task + // get err code of task errCode, ok := getErrCode(resp) if !ok { // unknown error code @@ -137,13 +140,13 @@ func PrintAccessInfo(ctx context.Context, resp interface{}, err error, rpcInfo * } fields = append(fields, zap.Int("errorCode", errCode)) - //get status of grpc + // get status of grpc Status := getGrpcStatus(err) if Status == "OK" && errCode > 0 { Status = "TaskFailed" } - //get method name of grpc + // get method name of grpc _, methodName := path.Split(rpcInfo.FullMethod) A().Info(fmt.Sprintf("%v: %s-%s", Status, getAccessAddr(ctx), methodName), fields...) diff --git a/internal/proxy/accesslog/access_log_test.go b/internal/proxy/accesslog/access_log_test.go index 755b9f20c5..c16204f9e7 100644 --- a/internal/proxy/accesslog/access_log_test.go +++ b/internal/proxy/accesslog/access_log_test.go @@ -23,13 +23,14 @@ import ( "testing" "time" - "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" - "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" - "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/stretchr/testify/assert" "google.golang.org/grpc" "google.golang.org/grpc/metadata" "google.golang.org/grpc/peer" + + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" + "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" + "github.com/milvus-io/milvus/pkg/util/paramtable" ) func TestAccessLogger_NotEnable(t *testing.T) { @@ -126,6 +127,7 @@ func TestAccessLogger_Stdout(t *testing.T) { ok := PrintAccessInfo(ctx, resp, nil, rpcInfo, 0) assert.True(t, ok) } + func TestAccessLogger_WithMinio(t *testing.T) { var Params paramtable.ComponentParam diff --git a/internal/proxy/accesslog/log_writer.go b/internal/proxy/accesslog/log_writer.go index f49042bc29..ab8d887188 100644 --- a/internal/proxy/accesslog/log_writer.go +++ b/internal/proxy/accesslog/log_writer.go @@ -25,30 +25,33 @@ import ( "sync" "time" + "go.uber.org/zap" + "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/paramtable" - "go.uber.org/zap" ) const megabyte = 1024 * 1024 -var CheckBucketRetryAttempts uint = 20 -var timeFormat = ".2006-01-02T15-04-05.000" +var ( + CheckBucketRetryAttempts uint = 20 + timeFormat = ".2006-01-02T15-04-05.000" +) // a rotated file logger for zap.log and could upload sealed log file to minIO type RotateLogger struct { - //local path is the path to save log before update to minIO - //use os.TempDir()/accesslog if empty + // local path is the path to save log before update to minIO + // use os.TempDir()/accesslog if empty localPath string fileName string - //the interval time of update log to minIO + // the interval time of update log to minIO rotatedTime int64 - //the max size(Mb) of log file - //if local file large than maxSize will update immediately - //close if empty(zero) + // the max size(Mb) of log file + // if local file large than maxSize will update immediately + // close if empty(zero) maxSize int - //MaxBackups is the maximum number of old log files to retain - //close retention limit if empty(zero) + // MaxBackups is the maximum number of old log files to retain + // close retention limit if empty(zero) maxBackups int handler *minioHandler @@ -162,7 +165,7 @@ func (l *RotateLogger) openFileExistingOrNew() error { return fmt.Errorf("file to get log file info: %s", err) } - file, err := os.OpenFile(filename, os.O_APPEND|os.O_WRONLY, 0644) + file, err := os.OpenFile(filename, os.O_APPEND|os.O_WRONLY, 0o644) if err != nil { return l.openNewFile() } @@ -173,13 +176,13 @@ func (l *RotateLogger) openFileExistingOrNew() error { } func (l *RotateLogger) openNewFile() error { - err := os.MkdirAll(l.dir(), 0744) + err := os.MkdirAll(l.dir(), 0o744) if err != nil { return fmt.Errorf("make directories for new log file filed: %s", err) } name := l.filename() - mode := os.FileMode(0644) + mode := os.FileMode(0o644) info, err := os.Stat(name) if err == nil { mode = info.Mode() @@ -269,7 +272,6 @@ func (l *RotateLogger) timeRotating() { case <-ticker.C: l.Rotate() } - } } diff --git a/internal/proxy/accesslog/log_writer_test.go b/internal/proxy/accesslog/log_writer_test.go index e5476c897f..98eea96468 100644 --- a/internal/proxy/accesslog/log_writer_test.go +++ b/internal/proxy/accesslog/log_writer_test.go @@ -22,20 +22,22 @@ import ( "testing" "time" - "github.com/milvus-io/milvus/pkg/log" - "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/stretchr/testify/assert" "go.uber.org/zap" + + "github.com/milvus-io/milvus/pkg/log" + "github.com/milvus-io/milvus/pkg/util/paramtable" ) func getText(size int) []byte { - var text = make([]byte, size) + text := make([]byte, size) for i := 0; i < size; i++ { text[i] = byte('-') } return text } + func TestRotateLogger_Basic(t *testing.T) { var Params paramtable.ComponentParam Params.Init(paramtable.NewBaseTable(paramtable.SkipRemote(true))) @@ -145,7 +147,6 @@ func TestRotateLogger_LocalRetention(t *testing.T) { logFiles, err := logger.oldLogFiles() assert.NoError(t, err) assert.Equal(t, 1, len(logFiles)) - } func TestRotateLogger_BasicError(t *testing.T) { @@ -161,7 +162,7 @@ func TestRotateLogger_BasicError(t *testing.T) { logger.openFileExistingOrNew() - os.Mkdir(path.Join(logger.dir(), "test"), 0744) + os.Mkdir(path.Join(logger.dir(), "test"), 0o744) logfile, err := logger.oldLogFiles() assert.NoError(t, err) assert.Equal(t, 0, len(logfile)) @@ -179,7 +180,7 @@ func TestRotateLogger_InitError(t *testing.T) { params.Save(params.ProxyCfg.AccessLog.LocalPath.Key, testPath) params.Save(params.ProxyCfg.AccessLog.MinioEnable.Key, "true") params.Save(params.MinioCfg.Address.Key, "") - //init err with invalid minio address + // init err with invalid minio address _, err := NewRotateLogger(¶ms.ProxyCfg.AccessLog, ¶ms.MinioCfg) assert.Error(t, err) } diff --git a/internal/proxy/accesslog/minio_handler.go b/internal/proxy/accesslog/minio_handler.go index 2a6372d443..df16387412 100644 --- a/internal/proxy/accesslog/minio_handler.go +++ b/internal/proxy/accesslog/minio_handler.go @@ -24,12 +24,13 @@ import ( "sync" "time" - "github.com/milvus-io/milvus/pkg/log" - "github.com/milvus-io/milvus/pkg/util/paramtable" - "github.com/milvus-io/milvus/pkg/util/retry" "github.com/minio/minio-go/v7" "github.com/minio/minio-go/v7/pkg/credentials" "go.uber.org/zap" + + "github.com/milvus-io/milvus/pkg/log" + "github.com/milvus-io/milvus/pkg/util/paramtable" + "github.com/milvus-io/milvus/pkg/util/retry" ) type config struct { @@ -43,14 +44,16 @@ type config struct { iamEndpoint string } -//minIO client for upload access log -//TODO file retention on minio +// minIO client for upload access log +// TODO file retention on minio +type ( + RetentionFunc func(object minio.ObjectInfo) bool + task struct { + objectName string + filePath string + } +) -type RetentionFunc func(object minio.ObjectInfo) bool -type task struct { - objectName string - filePath string -} type minioHandler struct { bucketName string rootPath string @@ -152,7 +155,6 @@ func (c *minioHandler) scheduler() { log.Warn("close minio logger handler") return } - } } diff --git a/internal/proxy/accesslog/minio_handler_test.go b/internal/proxy/accesslog/minio_handler_test.go index a298f506c8..d5c09333df 100644 --- a/internal/proxy/accesslog/minio_handler_test.go +++ b/internal/proxy/accesslog/minio_handler_test.go @@ -23,8 +23,9 @@ import ( "testing" "time" - "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/stretchr/testify/assert" + + "github.com/milvus-io/milvus/pkg/util/paramtable" ) func TestMinioHandler_ConnectError(t *testing.T) { @@ -54,14 +55,14 @@ func TestMinHandler_Basic(t *testing.T) { Params.Save(Params.ProxyCfg.AccessLog.MinioEnable.Key, "true") Params.Save(Params.ProxyCfg.AccessLog.RemotePath.Key, "accesslog") Params.Save(Params.ProxyCfg.AccessLog.MaxBackups.Key, "8") - //close retention + // close retention Params.Save(Params.ProxyCfg.AccessLog.RemoteMaxTime.Key, "0") - err := os.MkdirAll(testPath, 0744) + err := os.MkdirAll(testPath, 0o744) assert.NoError(t, err) defer os.RemoveAll(testPath) - //init MinioHandler + // init MinioHandler handler, err := NewMinioHandler( context.Background(), &Params.MinioCfg, @@ -72,22 +73,22 @@ func TestMinHandler_Basic(t *testing.T) { defer handler.Clean() prefix, ext := "accesslog", ".log" - //create a log file to upload + // create a log file to upload err = createAndUpdateFile(handler, time.Now(), testPath, prefix, ext) assert.NoError(t, err) time.Sleep(500 * time.Millisecond) - //check if upload success + // check if upload success lists, err := handler.listAll() assert.NoError(t, err) assert.Equal(t, 1, len(lists)) - //delete file from minio + // delete file from minio err = handler.removeWithPrefix(prefix) assert.NoError(t, err) time.Sleep(500 * time.Millisecond) - //check if delete success + // check if delete success lists, err = handler.listAll() assert.NoError(t, err) assert.Equal(t, 0, len(lists)) @@ -102,7 +103,7 @@ func TestMinioHandler_WithTimeRetention(t *testing.T) { Params.Save(Params.ProxyCfg.AccessLog.MaxBackups.Key, "8") Params.Save(Params.ProxyCfg.AccessLog.RemoteMaxTime.Key, "168") - err := os.MkdirAll(testPath, 0744) + err := os.MkdirAll(testPath, 0o744) assert.NoError(t, err) defer os.RemoveAll(testPath) @@ -118,16 +119,16 @@ func TestMinioHandler_WithTimeRetention(t *testing.T) { prefix, ext := "accesslog", ".log" handler.retentionPolicy = getTimeRetentionFunc(Params.ProxyCfg.AccessLog.RemoteMaxTime.GetAsInt(), prefix, ext) - //create a log file + // create a log file err = createAndUpdateFile(handler, time.Now(), testPath, prefix, ext) assert.NoError(t, err) - //mock a log file like time interval was large than RemoteMaxTime + // mock a log file like time interval was large than RemoteMaxTime oldTime := time.Now().Add(-1 * time.Duration(Params.ProxyCfg.AccessLog.RemoteMaxTime.GetAsInt()+1) * time.Hour) err = createAndUpdateFile(handler, oldTime, testPath, prefix, ext) assert.NoError(t, err) - //create a irrelevant file + // create a irrelevant file err = createAndUpdateFile(handler, time.Now(), testPath, "irrelevant", ext) assert.NoError(t, err) @@ -139,17 +140,16 @@ func TestMinioHandler_WithTimeRetention(t *testing.T) { handler.Retention() time.Sleep(500 * time.Millisecond) - //after retention the old file will be removed + // after retention the old file will be removed lists, err = handler.listAll() assert.NoError(t, err) assert.Equal(t, 2, len(lists)) } func createAndUpdateFile(handler *minioHandler, t time.Time, rootPath, prefix, ext string) error { - oldFileName := prefix + t.Format(timeFormat) + ext oldFilePath := path.Join(rootPath, oldFileName) - oldFileMode := os.FileMode(0644) + oldFileMode := os.FileMode(0o644) _, err := os.OpenFile(oldFilePath, os.O_CREATE|os.O_WRONLY|os.O_TRUNC, oldFileMode) if err != nil { return err diff --git a/internal/proxy/accesslog/util.go b/internal/proxy/accesslog/util.go index 5cb034823c..9de75fc80d 100644 --- a/internal/proxy/accesslog/util.go +++ b/internal/proxy/accesslog/util.go @@ -23,15 +23,15 @@ import ( "time" "github.com/cockroachdb/errors" - "github.com/golang/protobuf/proto" - "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "go.opentelemetry.io/otel/trace" "google.golang.org/grpc" "google.golang.org/grpc/codes" "google.golang.org/grpc/metadata" "google.golang.org/grpc/peer" "google.golang.org/grpc/status" + + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" ) type BaseResponse interface { diff --git a/internal/proxy/accesslog/util_test.go b/internal/proxy/accesslog/util_test.go index dfec23c6c9..2d832255d8 100644 --- a/internal/proxy/accesslog/util_test.go +++ b/internal/proxy/accesslog/util_test.go @@ -21,16 +21,17 @@ import ( "net" "testing" - "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" - "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" - "github.com/milvus-io/milvus/pkg/log" - "github.com/milvus-io/milvus/pkg/tracer" - "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/stretchr/testify/assert" "go.opentelemetry.io/otel" "go.uber.org/zap" "google.golang.org/grpc/metadata" "google.golang.org/grpc/peer" + + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" + "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" + "github.com/milvus-io/milvus/pkg/log" + "github.com/milvus-io/milvus/pkg/tracer" + "github.com/milvus-io/milvus/pkg/util/paramtable" ) func TestGetAccessAddr(t *testing.T) { diff --git a/internal/proxy/authentication_interceptor.go b/internal/proxy/authentication_interceptor.go index d1c1e2152a..09aced667b 100644 --- a/internal/proxy/authentication_interceptor.go +++ b/internal/proxy/authentication_interceptor.go @@ -21,7 +21,7 @@ func parseMD(authorization []string) (username, password string) { return } // token format: base64 - //token := strings.TrimPrefix(authorization[0], "Bearer ") + // token := strings.TrimPrefix(authorization[0], "Bearer ") token := authorization[0] rawToken, err := crypto.Base64Decode(token) if err != nil { @@ -40,7 +40,7 @@ func parseMD(authorization []string) (username, password string) { func validSourceID(ctx context.Context, authorization []string) bool { if len(authorization) < 1 { - //log.Warn("key not found in header", zap.String("key", util.HeaderSourceID)) + // log.Warn("key not found in header", zap.String("key", util.HeaderSourceID)) return false } // token format: base64 diff --git a/internal/proxy/channels_mgr.go b/internal/proxy/channels_mgr.go index a33721febe..159c79418f 100644 --- a/internal/proxy/channels_mgr.go +++ b/internal/proxy/channels_mgr.go @@ -24,6 +24,7 @@ import ( "sync" "github.com/cockroachdb/errors" + "go.uber.org/zap" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" @@ -33,8 +34,6 @@ import ( "github.com/milvus-io/milvus/pkg/mq/msgstream" "github.com/milvus-io/milvus/pkg/util/commonpbutil" "github.com/milvus-io/milvus/pkg/util/paramtable" - - "go.uber.org/zap" ) // channelsMgr manages the pchans, vchans and related message stream of collections. diff --git a/internal/proxy/channels_mgr_test.go b/internal/proxy/channels_mgr_test.go index 39c1bd14db..089c30abff 100644 --- a/internal/proxy/channels_mgr_test.go +++ b/internal/proxy/channels_mgr_test.go @@ -21,14 +21,12 @@ import ( "testing" "github.com/cockroachdb/errors" - - "github.com/milvus-io/milvus/pkg/mq/msgstream" - "github.com/milvus-io/milvus/pkg/util/paramtable" + "github.com/stretchr/testify/assert" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" - - "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus/pkg/mq/msgstream" + "github.com/milvus-io/milvus/pkg/util/paramtable" ) func Test_removeDuplicate(t *testing.T) { @@ -82,7 +80,8 @@ func Test_getDmlChannelsFunc(t *testing.T) { return &milvuspb.DescribeCollectionResponse{ VirtualChannelNames: []string{"111", "222"}, PhysicalChannelNames: []string{"111", "111"}, - Status: &commonpb.Status{ErrorCode: commonpb.ErrorCode_Success}}, nil + Status: &commonpb.Status{ErrorCode: commonpb.ErrorCode_Success}, + }, nil } f := getDmlChannelsFunc(ctx, rc) got, err := f(100) diff --git a/internal/proxy/channels_time_ticker.go b/internal/proxy/channels_time_ticker.go index 4ce450603a..c229010989 100644 --- a/internal/proxy/channels_time_ticker.go +++ b/internal/proxy/channels_time_ticker.go @@ -219,7 +219,6 @@ func newChannelsTimeTicker( getStatisticsFunc getPChanStatisticsFuncType, tso tsoAllocator, ) *channelsTimeTickerImpl { - ctx1, cancel := context.WithCancel(ctx) ticker := &channelsTimeTickerImpl{ diff --git a/internal/proxy/channels_time_ticker_test.go b/internal/proxy/channels_time_ticker_test.go index 6c6d10d274..a5600e9a5f 100644 --- a/internal/proxy/channels_time_ticker_test.go +++ b/internal/proxy/channels_time_ticker_test.go @@ -23,12 +23,12 @@ import ( "testing" "time" + "github.com/stretchr/testify/assert" + "go.uber.org/zap" + "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/funcutil" "github.com/milvus-io/milvus/pkg/util/typeutil" - - "github.com/stretchr/testify/assert" - "go.uber.org/zap" ) func newGetStatisticsFunc(pchans []pChan) getPChanStatisticsFuncType { diff --git a/internal/proxy/client_info.go b/internal/proxy/client_info.go index a6bfcecffd..5b60c92cce 100644 --- a/internal/proxy/client_info.go +++ b/internal/proxy/client_info.go @@ -4,9 +4,10 @@ import ( "context" "time" + "go.uber.org/zap" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus/pkg/log" - "go.uber.org/zap" ) type clientInfo struct { diff --git a/internal/proxy/condition_test.go b/internal/proxy/condition_test.go index 90ceea3095..13dba5b651 100644 --- a/internal/proxy/condition_test.go +++ b/internal/proxy/condition_test.go @@ -23,11 +23,10 @@ import ( "time" "github.com/cockroachdb/errors" - "github.com/stretchr/testify/assert" + "go.uber.org/zap" "github.com/milvus-io/milvus/pkg/log" - "go.uber.org/zap" ) func TestTaskCondition_Ctx(t *testing.T) { diff --git a/internal/proxy/connection_manager.go b/internal/proxy/connection_manager.go index 581d24e6a4..de47517fb1 100644 --- a/internal/proxy/connection_manager.go +++ b/internal/proxy/connection_manager.go @@ -9,7 +9,6 @@ import ( "github.com/golang/protobuf/proto" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" - "github.com/milvus-io/milvus/pkg/log" ) diff --git a/internal/proxy/connection_manager_test.go b/internal/proxy/connection_manager_test.go index 346542b2c0..25f8c98b27 100644 --- a/internal/proxy/connection_manager_test.go +++ b/internal/proxy/connection_manager_test.go @@ -5,9 +5,9 @@ import ( "testing" "time" - "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" - "github.com/stretchr/testify/assert" + + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" ) func Test_withDuration(t *testing.T) { diff --git a/internal/proxy/count_reducer.go b/internal/proxy/count_reducer.go index af0e5d6161..41a476ab99 100644 --- a/internal/proxy/count_reducer.go +++ b/internal/proxy/count_reducer.go @@ -6,8 +6,7 @@ import ( "github.com/milvus-io/milvus/internal/util/funcutil" ) -type cntReducer struct { -} +type cntReducer struct{} func (r *cntReducer) Reduce(results []*internalpb.RetrieveResults) (*milvuspb.QueryResults, error) { cnt := int64(0) diff --git a/internal/proxy/count_reducer_test.go b/internal/proxy/count_reducer_test.go index d7a2f10d88..4e1fd43677 100644 --- a/internal/proxy/count_reducer_test.go +++ b/internal/proxy/count_reducer_test.go @@ -3,11 +3,11 @@ package proxy import ( "testing" - "github.com/milvus-io/milvus/internal/util/funcutil" "github.com/stretchr/testify/assert" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/internal/proto/internalpb" + "github.com/milvus-io/milvus/internal/util/funcutil" ) func Test_cntReducer_Reduce(t *testing.T) { diff --git a/internal/proxy/data_coord_mock_test.go b/internal/proxy/data_coord_mock_test.go index 9870bcc364..11bb4786b1 100644 --- a/internal/proxy/data_coord_mock_test.go +++ b/internal/proxy/data_coord_mock_test.go @@ -19,6 +19,8 @@ package proxy import ( "context" + "go.uber.org/atomic" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus/internal/proto/datapb" @@ -29,7 +31,6 @@ import ( "github.com/milvus-io/milvus/pkg/util/merr" "github.com/milvus-io/milvus/pkg/util/typeutil" "github.com/milvus-io/milvus/pkg/util/uniquegenerator" - "go.uber.org/atomic" ) type DataCoordMock struct { diff --git a/internal/proxy/database_interceptor_test.go b/internal/proxy/database_interceptor_test.go index f1ea2ae300..77f62c6843 100644 --- a/internal/proxy/database_interceptor_test.go +++ b/internal/proxy/database_interceptor_test.go @@ -133,5 +133,4 @@ func TestDatabaseInterceptor(t *testing.T) { } } }) - } diff --git a/internal/proxy/dummyreq_test.go b/internal/proxy/dummyreq_test.go index ee1d1fa552..a86ec6ad83 100644 --- a/internal/proxy/dummyreq_test.go +++ b/internal/proxy/dummyreq_test.go @@ -20,10 +20,10 @@ import ( "encoding/json" "testing" - "github.com/milvus-io/milvus/pkg/log" + "github.com/stretchr/testify/assert" "go.uber.org/zap" - "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus/pkg/log" ) func Test_parseDummyRequestType(t *testing.T) { diff --git a/internal/proxy/hook_interceptor.go b/internal/proxy/hook_interceptor.go index a86e640cf6..a65f61296d 100644 --- a/internal/proxy/hook_interceptor.go +++ b/internal/proxy/hook_interceptor.go @@ -7,17 +7,17 @@ import ( "strconv" "strings" + "go.uber.org/zap" + "google.golang.org/grpc" + "github.com/milvus-io/milvus-proto/go-api/v2/hook" "github.com/milvus-io/milvus/pkg/config" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/metrics" "github.com/milvus-io/milvus/pkg/util/paramtable" - "go.uber.org/zap" - "google.golang.org/grpc" ) -type defaultHook struct { -} +type defaultHook struct{} func (d defaultHook) Init(params map[string]string) error { return nil diff --git a/internal/proxy/hook_interceptor_test.go b/internal/proxy/hook_interceptor_test.go index b0341fdcee..918f260897 100644 --- a/internal/proxy/hook_interceptor_test.go +++ b/internal/proxy/hook_interceptor_test.go @@ -5,11 +5,10 @@ import ( "testing" "github.com/cockroachdb/errors" - + "github.com/stretchr/testify/assert" "google.golang.org/grpc" "github.com/milvus-io/milvus/pkg/util/paramtable" - "github.com/stretchr/testify/assert" ) func TestInitHook(t *testing.T) { diff --git a/internal/proxy/impl.go b/internal/proxy/impl.go index 8b8462541c..f7f3c1e62a 100644 --- a/internal/proxy/impl.go +++ b/internal/proxy/impl.go @@ -1102,7 +1102,7 @@ func (node *Proxy) HasPartition(ctx context.Context, request *milvuspb.HasPartit defer sp.End() method := "HasPartition" tr := timerecord.NewTimeRecorder(method) - //TODO: use collectionID instead of collectionName + // TODO: use collectionID instead of collectionName metrics.ProxyFunctionCall.WithLabelValues(strconv.FormatInt(paramtable.GetNodeID(), 10), method, metrics.TotalLabel).Inc() @@ -1402,7 +1402,7 @@ func (node *Proxy) ShowPartitions(ctx context.Context, request *milvuspb.ShowPar method := "ShowPartitions" tr := timerecord.NewTimeRecorder(method) - //TODO: use collectionID instead of collectionName + // TODO: use collectionID instead of collectionName metrics.ProxyFunctionCall.WithLabelValues(strconv.FormatInt(paramtable.GetNodeID(), 10), method, metrics.TotalLabel).Inc() @@ -3284,7 +3284,7 @@ func (node *Proxy) RegisterLink(ctx context.Context, req *milvuspb.RegisterLinkR }, }, nil } - //metrics.ProxyLinkedSDKs.WithLabelValues(strconv.FormatInt(paramtable.GetNodeID(), 10)).Inc() + // metrics.ProxyLinkedSDKs.WithLabelValues(strconv.FormatInt(paramtable.GetNodeID(), 10)).Inc() return &milvuspb.RegisterLinkResponse{ Address: nil, Status: &commonpb.Status{ @@ -3416,7 +3416,7 @@ func (node *Proxy) GetProxyMetrics(ctx context.Context, req *milvuspb.GetMetrics }, nil } - //log.Debug("Proxy.GetProxyMetrics", + // log.Debug("Proxy.GetProxyMetrics", // zap.String("metricType", metricType)) return proxyMetrics, nil @@ -4387,7 +4387,8 @@ func (node *Proxy) CheckHealth(ctx context.Context, request *milvuspb.CheckHealt return &milvuspb.CheckHealthResponse{ Status: unhealthyStatus(), IsHealthy: false, - Reasons: []string{reason}}, nil + Reasons: []string{reason}, + }, nil } group, ctx := errgroup.WithContext(ctx) diff --git a/internal/proxy/impl_test.go b/internal/proxy/impl_test.go index 0c0696f7d2..78fbe2ee32 100644 --- a/internal/proxy/impl_test.go +++ b/internal/proxy/impl_test.go @@ -96,7 +96,8 @@ func TestProxy_CheckHealth(t *testing.T) { t.Run("proxy health check is fail", func(t *testing.T) { checkHealthFunc1 := func(ctx context.Context, - req *milvuspb.CheckHealthRequest) (*milvuspb.CheckHealthResponse, error) { + req *milvuspb.CheckHealthRequest, + ) (*milvuspb.CheckHealthResponse, error) { return &milvuspb.CheckHealthResponse{ IsHealthy: false, Reasons: []string{"unHealth"}, @@ -114,7 +115,8 @@ func TestProxy_CheckHealth(t *testing.T) { mock.checkHealthFunc = checkHealthFunc1 }), queryCoord: qc, - dataCoord: dataCoordMock} + dataCoord: dataCoordMock, + } node.multiRateLimiter = NewMultiRateLimiter() node.stateCode.Store(commonpb.StateCode_Healthy) ctx := context.Background() @@ -792,7 +794,6 @@ func TestProxy_ListClientInfos(t *testing.T) { resp, err := node.ListClientInfos(context.TODO(), nil) assert.NoError(t, err) assert.Equal(t, commonpb.ErrorCode_Success, resp.GetStatus().GetErrorCode()) - }) } @@ -948,7 +949,8 @@ func TestProxyListDatabase(t *testing.T) { rc := mocks.NewRootCoord(t) rc.On("ListDatabases", mock.Anything, mock.Anything). Return(&milvuspb.ListDatabasesResponse{ - Status: merr.Status(nil)}, nil) + Status: merr.Status(nil), + }, nil) node.rootCoord = rc node.stateCode.Store(commonpb.StateCode_Healthy) ctx := context.Background() diff --git a/internal/proxy/keep_active_interceptor.go b/internal/proxy/keep_active_interceptor.go index 0359279517..8e536f8cbb 100644 --- a/internal/proxy/keep_active_interceptor.go +++ b/internal/proxy/keep_active_interceptor.go @@ -5,13 +5,11 @@ import ( "fmt" "strconv" - "github.com/milvus-io/milvus/pkg/util" - - "github.com/milvus-io/milvus/pkg/util/funcutil" - + "google.golang.org/grpc" "google.golang.org/grpc/metadata" - "google.golang.org/grpc" + "github.com/milvus-io/milvus/pkg/util" + "github.com/milvus-io/milvus/pkg/util/funcutil" ) func getIdentifierFromContext(ctx context.Context) (int64, error) { diff --git a/internal/proxy/keep_active_interceptor_test.go b/internal/proxy/keep_active_interceptor_test.go index 1fc5139f3f..3de5e19f0b 100644 --- a/internal/proxy/keep_active_interceptor_test.go +++ b/internal/proxy/keep_active_interceptor_test.go @@ -4,9 +4,8 @@ import ( "context" "testing" - "google.golang.org/grpc" - "github.com/stretchr/testify/assert" + "google.golang.org/grpc" "google.golang.org/grpc/metadata" ) diff --git a/internal/proxy/look_aside_balancer.go b/internal/proxy/look_aside_balancer.go index 99a47dd1af..8afce328a0 100644 --- a/internal/proxy/look_aside_balancer.go +++ b/internal/proxy/look_aside_balancer.go @@ -24,6 +24,9 @@ import ( "sync" "time" + "go.uber.org/atomic" + "go.uber.org/zap" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus/internal/proto/internalpb" "github.com/milvus-io/milvus/pkg/log" @@ -32,8 +35,6 @@ import ( "github.com/milvus-io/milvus/pkg/util/merr" "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/milvus-io/milvus/pkg/util/typeutil" - "go.uber.org/atomic" - "go.uber.org/zap" ) type LookAsideBalancer struct { diff --git a/internal/proxy/look_aside_balancer_test.go b/internal/proxy/look_aside_balancer_test.go index 84360de537..97f6ec0b35 100644 --- a/internal/proxy/look_aside_balancer_test.go +++ b/internal/proxy/look_aside_balancer_test.go @@ -23,14 +23,15 @@ import ( "time" "github.com/cockroachdb/errors" + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/suite" + "go.uber.org/atomic" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus/internal/mocks" "github.com/milvus-io/milvus/internal/proto/internalpb" "github.com/milvus-io/milvus/pkg/util/merr" - "github.com/stretchr/testify/mock" - "github.com/stretchr/testify/suite" - "go.uber.org/atomic" ) type LookAsideBalancerSuite struct { diff --git a/internal/proxy/meta_cache.go b/internal/proxy/meta_cache.go index c5d7f073bf..1ba40b9ef8 100644 --- a/internal/proxy/meta_cache.go +++ b/internal/proxy/meta_cache.go @@ -497,7 +497,6 @@ func (m *MetaCache) GetPartitions(ctx context.Context, database, collectionName ret[k] = v.partitionID } return ret, nil - } defer m.mu.RUnlock() diff --git a/internal/proxy/metrics_info.go b/internal/proxy/metrics_info.go index d368dcf1f3..26e9c0bed9 100644 --- a/internal/proxy/metrics_info.go +++ b/internal/proxy/metrics_info.go @@ -30,8 +30,10 @@ import ( "github.com/milvus-io/milvus/pkg/util/typeutil" ) -type getMetricsFuncType func(ctx context.Context, request *milvuspb.GetMetricsRequest) (*milvuspb.GetMetricsResponse, error) -type showConfigurationsFuncType func(ctx context.Context, request *internalpb.ShowConfigurationsRequest) (*internalpb.ShowConfigurationsResponse, error) +type ( + getMetricsFuncType func(ctx context.Context, request *milvuspb.GetMetricsRequest) (*milvuspb.GetMetricsResponse, error) + showConfigurationsFuncType func(ctx context.Context, request *internalpb.ShowConfigurationsRequest) (*internalpb.ShowConfigurationsResponse, error) +) // getQuotaMetrics returns ProxyQuotaMetrics. func getQuotaMetrics() (*metricsinfo.ProxyQuotaMetrics, error) { diff --git a/internal/proxy/metrics_info_test.go b/internal/proxy/metrics_info_test.go index 73309f09ed..c6d8d9953c 100644 --- a/internal/proxy/metrics_info_test.go +++ b/internal/proxy/metrics_info_test.go @@ -20,18 +20,16 @@ import ( "context" "testing" - "github.com/milvus-io/milvus/internal/util/sessionutil" - "github.com/milvus-io/milvus/pkg/util/funcutil" - "github.com/milvus-io/milvus/pkg/util/merr" - "github.com/milvus-io/milvus/pkg/util/uniquegenerator" - - "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" - "github.com/milvus-io/milvus/pkg/util/typeutil" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" + "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" + "github.com/milvus-io/milvus/internal/util/sessionutil" + "github.com/milvus-io/milvus/pkg/util/funcutil" + "github.com/milvus-io/milvus/pkg/util/merr" "github.com/milvus-io/milvus/pkg/util/metricsinfo" + "github.com/milvus-io/milvus/pkg/util/typeutil" + "github.com/milvus-io/milvus/pkg/util/uniquegenerator" ) func TestProxy_metrics(t *testing.T) { @@ -200,7 +198,6 @@ func TestProxy_metrics(t *testing.T) { Response: resp, ComponentName: metricsinfo.ConstructComponentName(typeutil.DataCoordRole, id), }, nil - } req, _ := metricsinfo.ConstructRequestByMetricType(metricsinfo.SystemInfoMetrics) diff --git a/internal/proxy/mock_channels_mgr_test.go b/internal/proxy/mock_channels_mgr_test.go index 062b62ed42..88bfe35236 100644 --- a/internal/proxy/mock_channels_mgr_test.go +++ b/internal/proxy/mock_channels_mgr_test.go @@ -1,7 +1,9 @@ package proxy -type getVChannelsFuncType = func(collectionID UniqueID) ([]vChan, error) -type removeDMLStreamFuncType = func(collectionID UniqueID) error +type ( + getVChannelsFuncType = func(collectionID UniqueID) ([]vChan, error) + removeDMLStreamFuncType = func(collectionID UniqueID) error +) type mockChannelsMgr struct { channelsMgr diff --git a/internal/proxy/mock_test.go b/internal/proxy/mock_test.go index 67ee524dce..ab7b5b21df 100644 --- a/internal/proxy/mock_test.go +++ b/internal/proxy/mock_test.go @@ -79,8 +79,7 @@ func newMockTsoAllocator() tsoAllocator { return &mockTsoAllocator{} } -type mockIDAllocatorInterface struct { -} +type mockIDAllocatorInterface struct{} func (m *mockIDAllocatorInterface) AllocOne() (UniqueID, error) { return UniqueID(uniquegenerator.GetUniqueIntGeneratorIns().GetInt()), nil @@ -312,8 +311,7 @@ func newSimpleMockMsgStream() *simpleMockMsgStream { } } -type simpleMockMsgStreamFactory struct { -} +type simpleMockMsgStreamFactory struct{} func (factory *simpleMockMsgStreamFactory) Init(param *paramtable.ComponentParam) error { return nil @@ -430,7 +428,7 @@ func generateFieldData(dataType schemapb.DataType, fieldName string, numRows int }, } default: - //TODO:: + // TODO:: } return fieldData diff --git a/internal/proxy/msg_pack.go b/internal/proxy/msg_pack.go index 543127095a..7d1d58b213 100644 --- a/internal/proxy/msg_pack.go +++ b/internal/proxy/msg_pack.go @@ -44,7 +44,8 @@ func genInsertMsgsByPartition(ctx context.Context, partitionName string, rowOffsets []int, channelName string, - insertMsg *msgstream.InsertMsg) ([]msgstream.TsMsg, error) { + insertMsg *msgstream.InsertMsg, +) ([]msgstream.TsMsg, error) { threshold := Params.PulsarCfg.MaxMessageSize.GetAsInt() // create empty insert message @@ -108,7 +109,8 @@ func repackInsertDataByPartition(ctx context.Context, rowOffsets []int, channelName string, insertMsg *msgstream.InsertMsg, - segIDAssigner *segIDAssigner) ([]msgstream.TsMsg, error) { + segIDAssigner *segIDAssigner, +) ([]msgstream.TsMsg, error) { res := make([]msgstream.TsMsg, 0) maxTs := Timestamp(0) @@ -155,7 +157,8 @@ func repackInsertDataByPartition(ctx context.Context, func setMsgID(ctx context.Context, msgs []msgstream.TsMsg, - idAllocator *allocator.IDAllocator) error { + idAllocator *allocator.IDAllocator, +) error { var idBegin int64 var err error @@ -180,7 +183,8 @@ func repackInsertData(ctx context.Context, insertMsg *msgstream.InsertMsg, result *milvuspb.MutationResult, idAllocator *allocator.IDAllocator, - segIDAssigner *segIDAssigner) (*msgstream.MsgPack, error) { + segIDAssigner *segIDAssigner, +) (*msgstream.MsgPack, error) { msgPack := &msgstream.MsgPack{ BeginTs: insertMsg.BeginTs(), EndTs: insertMsg.EndTs(), @@ -219,7 +223,8 @@ func repackInsertDataWithPartitionKey(ctx context.Context, insertMsg *msgstream.InsertMsg, result *milvuspb.MutationResult, idAllocator *allocator.IDAllocator, - segIDAssigner *segIDAssigner) (*msgstream.MsgPack, error) { + segIDAssigner *segIDAssigner, +) (*msgstream.MsgPack, error) { msgPack := &msgstream.MsgPack{ BeginTs: insertMsg.BeginTs(), EndTs: insertMsg.EndTs(), diff --git a/internal/proxy/msg_pack_test.go b/internal/proxy/msg_pack_test.go index 194777a1a9..34fe910355 100644 --- a/internal/proxy/msg_pack_test.go +++ b/internal/proxy/msg_pack_test.go @@ -171,7 +171,8 @@ func TestRepackInsertDataWithPartitionKey(t *testing.T) { fieldName2Types := map[string]schemapb.DataType{ testInt64Field: schemapb.DataType_Int64, testVarCharField: schemapb.DataType_VarChar, - testFloatVecField: schemapb.DataType_FloatVector} + testFloatVecField: schemapb.DataType_FloatVector, + } t.Run("create collection with partition key", func(t *testing.T) { schema := ConstructCollectionSchemaWithPartitionKey(collectionName, fieldName2Types, testInt64Field, testVarCharField, false) diff --git a/internal/proxy/multi_rate_limiter_test.go b/internal/proxy/multi_rate_limiter_test.go index 87083efaca..a41176af57 100644 --- a/internal/proxy/multi_rate_limiter_test.go +++ b/internal/proxy/multi_rate_limiter_test.go @@ -24,6 +24,8 @@ import ( "testing" "time" + "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus/internal/proto/internalpb" @@ -32,7 +34,6 @@ import ( "github.com/milvus-io/milvus/pkg/util/merr" "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/milvus-io/milvus/pkg/util/ratelimitutil" - "github.com/stretchr/testify/assert" ) func TestMultiRateLimiter(t *testing.T) { @@ -65,7 +66,6 @@ func TestMultiRateLimiter(t *testing.T) { err = multiLimiter.Check(collectionID, internalpb.RateType(rt), math.MaxInt) assert.ErrorIs(t, err, merr.ErrServiceRateLimit) } - } Params.Save(Params.QuotaConfig.QuotaAndLimitsEnabled.Key, bak) }) diff --git a/internal/proxy/plan_parser.go b/internal/proxy/plan_parser.go index de48831587..aa730f885f 100644 --- a/internal/proxy/plan_parser.go +++ b/internal/proxy/plan_parser.go @@ -23,6 +23,7 @@ import ( ant_ast "github.com/antonmedv/expr/ast" ant_parser "github.com/antonmedv/expr/parser" + "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/internal/proto/planpb" "github.com/milvus-io/milvus/pkg/util/typeutil" diff --git a/internal/proxy/plan_parser_test.go b/internal/proxy/plan_parser_test.go index 2b8be9d1b4..b28a9381d1 100644 --- a/internal/proxy/plan_parser_test.go +++ b/internal/proxy/plan_parser_test.go @@ -20,15 +20,13 @@ import ( "fmt" "testing" - "github.com/milvus-io/milvus/internal/parser/planparserv2" - ant_ast "github.com/antonmedv/expr/ast" ant_parser "github.com/antonmedv/expr/parser" - "github.com/golang/protobuf/proto" "github.com/stretchr/testify/assert" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" + "github.com/milvus-io/milvus/internal/parser/planparserv2" "github.com/milvus-io/milvus/internal/proto/planpb" "github.com/milvus-io/milvus/pkg/util/typeutil" ) diff --git a/internal/proxy/privilege_interceptor.go b/internal/proxy/privilege_interceptor.go index 5bff2fb200..3a4c8882c9 100644 --- a/internal/proxy/privilege_interceptor.go +++ b/internal/proxy/privilege_interceptor.go @@ -92,6 +92,7 @@ func PrivilegeInterceptorWithUsername(ctx context.Context, username string, req } return privilegeInterceptor(ctx, privilegeExt, username, req) } + func privilegeInterceptor(ctx context.Context, privilegeExt commonpb.PrivilegeExt, username string, req interface{}) (context.Context, error) { if username == util.UserRoot { return ctx, nil diff --git a/internal/proxy/privilege_interceptor_test.go b/internal/proxy/privilege_interceptor_test.go index dcfcbc2f8d..deab633923 100644 --- a/internal/proxy/privilege_interceptor_test.go +++ b/internal/proxy/privilege_interceptor_test.go @@ -5,6 +5,8 @@ import ( "sync" "testing" + "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus/internal/mocks" @@ -12,7 +14,6 @@ import ( "github.com/milvus-io/milvus/pkg/util/funcutil" "github.com/milvus-io/milvus/pkg/util/merr" "github.com/milvus-io/milvus/pkg/util/paramtable" - "github.com/stretchr/testify/assert" ) func TestUnaryServerInterceptor(t *testing.T) { @@ -161,7 +162,6 @@ func TestPrivilegeInterceptor(t *testing.T) { getPolicyModel("foo") }) }) - } func TestResourceGroupPrivilege(t *testing.T) { @@ -220,5 +220,4 @@ func TestResourceGroupPrivilege(t *testing.T) { _, err = PrivilegeInterceptor(GetContext(context.Background(), "fooo:123456"), &milvuspb.TransferReplicaRequest{}) assert.NoError(t, err) }) - } diff --git a/internal/proxy/proxy.go b/internal/proxy/proxy.go index db8e9f37cf..901ec2889d 100644 --- a/internal/proxy/proxy.go +++ b/internal/proxy/proxy.go @@ -28,18 +28,16 @@ import ( "time" "github.com/cockroachdb/errors" - "github.com/milvus-io/milvus/internal/allocator" - "github.com/milvus-io/milvus/internal/util/dependency" - "github.com/milvus-io/milvus/internal/util/sessionutil" - "github.com/milvus-io/milvus/pkg/util/tsoutil" - clientv3 "go.etcd.io/etcd/client/v3" "go.uber.org/zap" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" + "github.com/milvus-io/milvus/internal/allocator" "github.com/milvus-io/milvus/internal/proto/internalpb" "github.com/milvus-io/milvus/internal/proxy/accesslog" "github.com/milvus-io/milvus/internal/types" + "github.com/milvus-io/milvus/internal/util/dependency" + "github.com/milvus-io/milvus/internal/util/sessionutil" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/metrics" "github.com/milvus-io/milvus/pkg/util/commonpbutil" @@ -47,6 +45,7 @@ import ( "github.com/milvus-io/milvus/pkg/util/metricsinfo" "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/milvus-io/milvus/pkg/util/ratelimitutil" + "github.com/milvus-io/milvus/pkg/util/tsoutil" "github.com/milvus-io/milvus/pkg/util/typeutil" ) @@ -154,7 +153,7 @@ func (node *Proxy) Register() error { } }) // TODO Reset the logger - //Params.initLogCfg() + // Params.initLogCfg() return nil } diff --git a/internal/proxy/proxy_rpc_test.go b/internal/proxy/proxy_rpc_test.go index 2566a35e00..e260e1a9ed 100644 --- a/internal/proxy/proxy_rpc_test.go +++ b/internal/proxy/proxy_rpc_test.go @@ -7,6 +7,8 @@ import ( "sync" "testing" + "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" grpcproxyclient "github.com/milvus-io/milvus/internal/distributed/proxy/client" "github.com/milvus-io/milvus/internal/proto/internalpb" @@ -17,7 +19,6 @@ import ( "github.com/milvus-io/milvus/pkg/util/funcutil" "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/milvus-io/milvus/pkg/util/typeutil" - "github.com/stretchr/testify/assert" ) func TestProxyRpcLimit(t *testing.T) { diff --git a/internal/proxy/proxy_test.go b/internal/proxy/proxy_test.go index b00014b129..97b44e3711 100644 --- a/internal/proxy/proxy_test.go +++ b/internal/proxy/proxy_test.go @@ -273,12 +273,12 @@ func (s *proxyTestServer) GetStatisticsChannel(ctx context.Context, request *int func (s *proxyTestServer) startGrpc(ctx context.Context, wg *sync.WaitGroup, p *paramtable.GrpcServerConfig) { defer wg.Done() - var kaep = keepalive.EnforcementPolicy{ + kaep := keepalive.EnforcementPolicy{ MinTime: 5 * time.Second, // If a client pings more than once every 5 seconds, terminate the connection PermitWithoutStream: true, // Allow pings even when there are no active streams } - var kasp = keepalive.ServerParameters{ + kasp := keepalive.ServerParameters{ Time: 60 * time.Second, // Ping the client if it is idle for 60 seconds to ensure the connection is still active Timeout: 10 * time.Second, // Wait 10 second for the ping ack before assuming the connection is dead } @@ -678,7 +678,6 @@ func TestProxy(t *testing.T) { resp, err = proxy.CreateCollection(ctx, reqInvalidField) assert.NoError(t, err) assert.NotEqual(t, commonpb.ErrorCode_Success, resp.ErrorCode) - }) wg.Add(1) @@ -765,7 +764,6 @@ func TestProxy(t *testing.T) { DbName: dbName, CollectionName: collectionName, }) - }) wg.Add(1) @@ -1026,7 +1024,7 @@ func TestProxy(t *testing.T) { assert.Equal(t, int64(rowNum), resp.InsertCnt) }) - //TODO(dragondriver): proxy.Delete() + // TODO(dragondriver): proxy.Delete() flushed := true wg.Add(1) @@ -1471,7 +1469,7 @@ func TestProxy(t *testing.T) { }, } - //resp, err := proxy.CalcDistance(ctx, &milvuspb.CalcDistanceRequest{ + // resp, err := proxy.CalcDistance(ctx, &milvuspb.CalcDistanceRequest{ _, err := proxy.CalcDistance(ctx, &milvuspb.CalcDistanceRequest{ Base: nil, OpLeft: opLeft, @@ -3370,7 +3368,6 @@ func TestProxy(t *testing.T) { resp, err = proxy.CreateCollection(ctx, reqInvalidField) assert.NoError(t, err) assert.NotEqual(t, commonpb.ErrorCode_Success, resp.ErrorCode) - }) wg.Add(1) @@ -3633,7 +3630,6 @@ func testProxyRole(ctx context.Context, t *testing.T, proxy *Proxy) { resp, err := proxy.OperateUserRole(ctx, &milvuspb.OperateUserRoleRequest{Username: username, RoleName: roleName}) assert.NoError(t, err) assert.Equal(t, commonpb.ErrorCode_Success, resp.ErrorCode) - } { resp, err := proxy.OperateUserRole(ctx, &milvuspb.OperateUserRoleRequest{Username: username, RoleName: "admin"}) @@ -4229,7 +4225,6 @@ func TestProxy_ListImportTasks(t *testing.T) { } func TestProxy_GetStatistics(t *testing.T) { - } func TestProxy_GetLoadState(t *testing.T) { diff --git a/internal/proxy/reducer_test.go b/internal/proxy/reducer_test.go index 58795450cf..668a94ce97 100644 --- a/internal/proxy/reducer_test.go +++ b/internal/proxy/reducer_test.go @@ -4,8 +4,9 @@ import ( "context" "testing" - "github.com/milvus-io/milvus/internal/proto/planpb" "github.com/stretchr/testify/assert" + + "github.com/milvus-io/milvus/internal/proto/planpb" ) func Test_createMilvusReducer(t *testing.T) { diff --git a/internal/proxy/repack_func.go b/internal/proxy/repack_func.go index 618f6aa0ba..2c88bce0e9 100644 --- a/internal/proxy/repack_func.go +++ b/internal/proxy/repack_func.go @@ -27,7 +27,6 @@ func insertRepackFunc( tsMsgs []msgstream.TsMsg, hashKeys [][]int32, ) (map[int32]*msgstream.MsgPack, error) { - if len(hashKeys) < len(tsMsgs) { return nil, fmt.Errorf( "the length of hash keys (%d) is less than the length of messages (%d)", @@ -59,7 +58,6 @@ func defaultInsertRepackFunc( tsMsgs []msgstream.TsMsg, hashKeys [][]int32, ) (map[int32]*msgstream.MsgPack, error) { - if len(hashKeys) < len(tsMsgs) { return nil, fmt.Errorf( "the length of hash keys (%d) is less than the length of messages (%d)", diff --git a/internal/proxy/repack_func_test.go b/internal/proxy/repack_func_test.go index e5cf96d64d..ffc01e4b79 100644 --- a/internal/proxy/repack_func_test.go +++ b/internal/proxy/repack_func_test.go @@ -20,9 +20,9 @@ import ( "math/rand" "testing" - "github.com/milvus-io/milvus/pkg/mq/msgstream" - "github.com/stretchr/testify/assert" + + "github.com/milvus-io/milvus/pkg/mq/msgstream" ) func Test_insertRepackFunc(t *testing.T) { diff --git a/internal/proxy/rootcoord_mock_test.go b/internal/proxy/rootcoord_mock_test.go index 4e3ea6ee37..1dafa56fe3 100644 --- a/internal/proxy/rootcoord_mock_test.go +++ b/internal/proxy/rootcoord_mock_test.go @@ -1131,7 +1131,6 @@ func (m *mockRootCoord) GetCredential(ctx context.Context, request *rootcoordpb. return m.GetGetCredentialFunc(ctx, request) } return nil, errors.New("mock") - } func (m *mockRootCoord) DescribeCollection(ctx context.Context, request *milvuspb.DescribeCollectionRequest) (*milvuspb.DescribeCollectionResponse, error) { diff --git a/internal/proxy/roundrobin_balancer.go b/internal/proxy/roundrobin_balancer.go index cd0f49cbcf..bd54f0f82a 100644 --- a/internal/proxy/roundrobin_balancer.go +++ b/internal/proxy/roundrobin_balancer.go @@ -18,10 +18,11 @@ package proxy import ( "context" + "go.uber.org/atomic" + "github.com/milvus-io/milvus/internal/proto/internalpb" "github.com/milvus-io/milvus/pkg/util/merr" "github.com/milvus-io/milvus/pkg/util/typeutil" - "go.uber.org/atomic" ) type RoundRobinBalancer struct { diff --git a/internal/proxy/segment.go b/internal/proxy/segment.go index c7e1a73f93..a3e21e0e0b 100644 --- a/internal/proxy/segment.go +++ b/internal/proxy/segment.go @@ -319,7 +319,6 @@ func (sa *segIDAssigner) syncSegments() (bool, error) { log.Debug("syncSegments call dataCoord.AssignSegmentID", zap.String("request", req.String())) resp, err := sa.dataCoord.AssignSegmentID(context.Background(), req) - if err != nil { return false, fmt.Errorf("syncSegmentID Failed:%w", err) } diff --git a/internal/proxy/segment_test.go b/internal/proxy/segment_test.go index 214e0d1ed9..54adbbd503 100644 --- a/internal/proxy/segment_test.go +++ b/internal/proxy/segment_test.go @@ -71,7 +71,6 @@ type mockDataCoord2 struct { } func (mockD *mockDataCoord2) AssignSegmentID(ctx context.Context, req *datapb.AssignSegmentIDRequest) (*datapb.AssignSegmentIDResponse, error) { - return &datapb.AssignSegmentIDResponse{ Status: &commonpb.Status{ ErrorCode: commonpb.ErrorCode_UnexpectedError, @@ -116,7 +115,6 @@ func TestSegmentAllocator1(t *testing.T) { _, err = segAllocator.GetSegmentID(1, 1, "abc", 10, 1001) assert.Error(t, err) wg.Wait() - } var curLastTick2 = Timestamp(200) @@ -156,7 +154,6 @@ func TestSegmentAllocator2(t *testing.T) { _, err = segAllocator.GetSegmentID(1, 1, "abc", segCountPerRPC-10, getLastTick2()) assert.Error(t, err) wg.Wait() - } func TestSegmentAllocator3(t *testing.T) { @@ -241,7 +238,6 @@ type mockDataCoord5 struct { } func (mockD *mockDataCoord5) AssignSegmentID(ctx context.Context, req *datapb.AssignSegmentIDRequest) (*datapb.AssignSegmentIDResponse, error) { - return &datapb.AssignSegmentIDResponse{ Status: &commonpb.Status{ ErrorCode: commonpb.ErrorCode_UnexpectedError, @@ -314,5 +310,4 @@ func TestSegmentAllocator6(t *testing.T) { } wg.Wait() assert.True(t, success) - } diff --git a/internal/proxy/shard_client_test.go b/internal/proxy/shard_client_test.go index da21ed512c..83c38e0fe7 100644 --- a/internal/proxy/shard_client_test.go +++ b/internal/proxy/shard_client_test.go @@ -4,9 +4,10 @@ import ( "context" "testing" + "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus/internal/types" "github.com/milvus-io/milvus/internal/util/mock" - "github.com/stretchr/testify/assert" ) func genShardLeaderInfo(channel string, leaderIDs []UniqueID) map[string][]nodeInfo { diff --git a/internal/proxy/task_database_test.go b/internal/proxy/task_database_test.go index fd211ff518..966d9fa831 100644 --- a/internal/proxy/task_database_test.go +++ b/internal/proxy/task_database_test.go @@ -4,12 +4,12 @@ import ( "context" "testing" - "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" + "github.com/milvus-io/milvus/pkg/util/paramtable" ) func TestCreateDatabaseTask(t *testing.T) { diff --git a/internal/proxy/task_delete.go b/internal/proxy/task_delete.go index ec82202124..ef91c870d3 100644 --- a/internal/proxy/task_delete.go +++ b/internal/proxy/task_delete.go @@ -113,7 +113,7 @@ func (dt *deleteTask) getChannels() []pChan { } func getExpr(plan *planpb.PlanNode) (bool, *planpb.Expr_TermExpr) { - //simple delete request need expr with "pk in [a, b]" + // simple delete request need expr with "pk in [a, b]" termExpr, ok := plan.Node.(*planpb.PlanNode_Query).Query.Predicates.Expr.(*planpb.Expr_TermExpr) if !ok { return false, nil @@ -339,7 +339,6 @@ func (dt *deleteTask) complexDelete(ctx context.Context, plan *planpb.PlanNode, nq: 1, exec: dt.getStreamingQueryAndDelteFunc(stream, plan), }) - if err != nil { log.Warn("fail to get or create dml stream", zap.Error(err)) return err diff --git a/internal/proxy/task_delete_test.go b/internal/proxy/task_delete_test.go index ccaa2fb796..2862a0500e 100644 --- a/internal/proxy/task_delete_test.go +++ b/internal/proxy/task_delete_test.go @@ -6,7 +6,6 @@ import ( "testing" "github.com/cockroachdb/errors" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" @@ -137,7 +136,8 @@ func TestDeleteTask_PreExecute(t *testing.T) { dt := deleteTask{ req: &milvuspb.DeleteRequest{ CollectionName: "foo", - }} + }, + } cache := NewMockCache(t) cache.On("GetCollectionID", mock.Anything, // context.Context @@ -205,7 +205,6 @@ func TestDeleteTask_PreExecute(t *testing.T) { }) t.Run("invalie partition", func(t *testing.T) { - dt := deleteTask{ req: &milvuspb.DeleteRequest{ CollectionName: "foo", @@ -666,7 +665,9 @@ func TestDeleteTask_SimpleDelete(t *testing.T) { TermExpr: &planpb.TermExpr{ ColumnInfo: &planpb.ColumnInfo{ DataType: schemapb.DataType_BinaryVector, - }}} + }, + }, + } stream := msgstream.NewMockMsgStream(t) err := task.simpleDelete(ctx, expr, stream) assert.Error(t, err) diff --git a/internal/proxy/task_index.go b/internal/proxy/task_index.go index df66751ba7..8583cf229d 100644 --- a/internal/proxy/task_index.go +++ b/internal/proxy/task_index.go @@ -250,7 +250,7 @@ func (cit *createIndexTask) parseIndexParams() error { } for k, v := range indexParamsMap { - //Currently, it is required that type_params and index_params do not have same keys. + // Currently, it is required that type_params and index_params do not have same keys. if k == DimKey || k == common.MaxLengthKey { delete(indexParamsMap, k) continue @@ -608,7 +608,8 @@ func (dit *getIndexStatisticsTask) Execute(ctx context.Context) error { } resp, err := dit.datacoord.GetIndexStatistics(ctx, &indexpb.GetIndexStatisticsRequest{ - CollectionID: dit.collectionID, IndexName: dit.IndexName}) + CollectionID: dit.collectionID, IndexName: dit.IndexName, + }) if err != nil || resp == nil { return err } diff --git a/internal/proxy/task_index_test.go b/internal/proxy/task_index_test.go index f3ce98083e..2816424457 100644 --- a/internal/proxy/task_index_test.go +++ b/internal/proxy/task_index_test.go @@ -317,7 +317,8 @@ func Test_parseIndexParams(t *testing.T) { Key: MetricTypeKey, Value: "L2", }, - }}, + }, + }, } t.Run("parse index params", func(t *testing.T) { @@ -401,7 +402,8 @@ func Test_parseIndexParams(t *testing.T) { Key: MetricTypeKey, Value: "L2", }, - }}, + }, + }, } t.Run("parse index params 2", func(t *testing.T) { Params.Save(Params.AutoIndexConfig.Enable.Key, "true") diff --git a/internal/proxy/task_policies.go b/internal/proxy/task_policies.go index c910eb51c5..c25ee434e3 100644 --- a/internal/proxy/task_policies.go +++ b/internal/proxy/task_policies.go @@ -18,9 +18,7 @@ type queryFunc func(context.Context, UniqueID, types.QueryNode, ...string) error type pickShardPolicy func(context.Context, shardClientMgr, queryFunc, map[string][]nodeInfo) error -var ( - errInvalidShardLeaders = errors.New("Invalid shard leader") -) +var errInvalidShardLeaders = errors.New("Invalid shard leader") // RoundRobinPolicy do the query with multiple dml channels // if request failed, it finds shard leader for failed dml channels @@ -28,8 +26,8 @@ func RoundRobinPolicy( ctx context.Context, mgr shardClientMgr, query queryFunc, - dml2leaders map[string][]nodeInfo) error { - + dml2leaders map[string][]nodeInfo, +) error { queryChannel := func(ctx context.Context, channel string) error { var combineErr error leaders := dml2leaders[channel] diff --git a/internal/proxy/task_policies_test.go b/internal/proxy/task_policies_test.go index ec0ba5fa90..130d40dcf1 100644 --- a/internal/proxy/task_policies_test.go +++ b/internal/proxy/task_policies_test.go @@ -16,9 +16,7 @@ import ( func TestRoundRobinPolicy(t *testing.T) { var err error - var ( - ctx = context.TODO() - ) + ctx := context.TODO() mgr := newShardClientMgr() diff --git a/internal/proxy/task_query.go b/internal/proxy/task_query.go index 4f1807eaf2..0e6e9fe996 100644 --- a/internal/proxy/task_query.go +++ b/internal/proxy/task_query.go @@ -111,7 +111,6 @@ func translateToOutputFieldIDs(outputFields []string, schema *schemapb.Collectio if !pkFound { outputFieldIDs = append(outputFieldIDs, pkFieldID) } - } return outputFieldIDs, nil } diff --git a/internal/proxy/task_query_test.go b/internal/proxy/task_query_test.go index 70dd87dc4e..8c4f8bc310 100644 --- a/internal/proxy/task_query_test.go +++ b/internal/proxy/task_query_test.go @@ -424,7 +424,6 @@ func TestTaskQuery_functions(t *testing.T) { Key: test.inKey[i], Value: test.inValue[i], }) - } ret, err := parseQueryParams(inParams) if test.expectErr { @@ -519,7 +518,8 @@ func TestTaskQuery_functions(t *testing.T) { 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 11.0, 22.0, 33.0, 44.0, 55.0, 66.0, 77.0, 88.0, - 11.0, 22.0, 33.0, 44.0, 55.0, 66.0, 77.0, 88.0} + 11.0, 22.0, 33.0, 44.0, 55.0, 66.0, 77.0, 88.0, + } t.Run("test limited", func(t *testing.T) { tests := []struct { @@ -541,7 +541,6 @@ func TestTaskQuery_functions(t *testing.T) { assert.NoError(t, err) }) } - }) t.Run("test unLimited and maxOutputSize", func(t *testing.T) { @@ -872,7 +871,6 @@ func Test_queryTask_createPlan(t *testing.T) { }) t.Run("query without expression", func(t *testing.T) { - tsk := &queryTask{ request: &milvuspb.QueryRequest{ OutputFields: []string{"a"}, @@ -883,7 +881,6 @@ func Test_queryTask_createPlan(t *testing.T) { }) t.Run("invalid expression", func(t *testing.T) { - schema := &schemapb.CollectionSchema{ Fields: []*schemapb.FieldSchema{ { @@ -907,7 +904,6 @@ func Test_queryTask_createPlan(t *testing.T) { }) t.Run("invalid output fields", func(t *testing.T) { - schema := &schemapb.CollectionSchema{ Fields: []*schemapb.FieldSchema{ { diff --git a/internal/proxy/task_scheduler.go b/internal/proxy/task_scheduler.go index 0467dfd4e9..89f13e983d 100644 --- a/internal/proxy/task_scheduler.go +++ b/internal/proxy/task_scheduler.go @@ -227,7 +227,7 @@ func (queue *dmTaskQueue) Enqueue(t task) error { // 1) Protect member pChanStatisticsInfos // 2) Serialize the timestamp allocation for dml tasks - //1. set the current pChannels for this dmTask + // 1. set the current pChannels for this dmTask dmt := t.(dmlTask) err := dmt.setChannels() if err != nil { @@ -235,19 +235,19 @@ func (queue *dmTaskQueue) Enqueue(t task) error { return err } - //2. enqueue dml task + // 2. enqueue dml task queue.statsLock.Lock() defer queue.statsLock.Unlock() err = queue.baseTaskQueue.Enqueue(t) if err != nil { return err } - //3. commit will use pChannels got previously when preAdding and will definitely succeed + // 3. commit will use pChannels got previously when preAdding and will definitely succeed pChannels := dmt.getChannels() queue.commitPChanStats(dmt, pChannels) - //there's indeed a possibility that the collection info cache was expired after preAddPChanStats - //but considering root coord knows everything about meta modification, invalid stats appended after the meta changed - //will be discarded by root coord and will not lead to inconsistent state + // there's indeed a possibility that the collection info cache was expired after preAddPChanStats + // but considering root coord knows everything about meta modification, invalid stats appended after the meta changed + // will be discarded by root coord and will not lead to inconsistent state return nil } @@ -269,7 +269,7 @@ func (queue *dmTaskQueue) PopActiveTask(taskID UniqueID) task { } func (queue *dmTaskQueue) commitPChanStats(dmt dmlTask, pChannels []pChan) { - //1. prepare new stat for all pChannels + // 1. prepare new stat for all pChannels newStats := make(map[pChan]pChanStatistics) beginTs := dmt.BeginTs() endTs := dmt.EndTs() @@ -279,7 +279,7 @@ func (queue *dmTaskQueue) commitPChanStats(dmt dmlTask, pChannels []pChan) { maxTs: endTs, } } - //2. update stats for all pChannels + // 2. update stats for all pChannels for cName, newStat := range newStats { currentStat, ok := queue.pChanStatisticsInfos[cName] if !ok { @@ -325,7 +325,6 @@ func (queue *dmTaskQueue) popPChanStats(t task) { } func (queue *dmTaskQueue) getPChanStatsInfo() (map[pChan]*pChanStatistics, error) { - ret := make(map[pChan]*pChanStatistics) queue.statsLock.RLock() defer queue.statsLock.RUnlock() diff --git a/internal/proxy/task_scheduler_test.go b/internal/proxy/task_scheduler_test.go index 4bba7033df..13db44ba11 100644 --- a/internal/proxy/task_scheduler_test.go +++ b/internal/proxy/task_scheduler_test.go @@ -34,7 +34,6 @@ import ( ) func TestBaseTaskQueue(t *testing.T) { - var err error var unissuedTask task var activeTask task @@ -111,7 +110,6 @@ func TestBaseTaskQueue(t *testing.T) { } func TestDdTaskQueue(t *testing.T) { - var err error var unissuedTask task var activeTask task @@ -189,7 +187,6 @@ func TestDdTaskQueue(t *testing.T) { // test the logic of queue func TestDmTaskQueue_Basic(t *testing.T) { - var err error var unissuedTask task var activeTask task @@ -266,7 +263,6 @@ func TestDmTaskQueue_Basic(t *testing.T) { // test the timestamp statistics func TestDmTaskQueue_TimestampStatistics(t *testing.T) { - var err error var unissuedTask task @@ -394,7 +390,7 @@ func TestDmTaskQueue_TimestampStatistics2(t *testing.T) { }() } wg.Wait() - //time.Sleep(time.Millisecond*100) + // time.Sleep(time.Millisecond*100) needLoop := true for needLoop { processCountMut.RLock() @@ -413,7 +409,6 @@ func TestDmTaskQueue_TimestampStatistics2(t *testing.T) { } func TestDqTaskQueue(t *testing.T) { - var err error var unissuedTask task var activeTask task @@ -490,7 +485,6 @@ func TestDqTaskQueue(t *testing.T) { } func TestTaskScheduler(t *testing.T) { - var err error ctx := context.Background() diff --git a/internal/proxy/task_search.go b/internal/proxy/task_search.go index 5f48df226e..c36d9813d9 100644 --- a/internal/proxy/task_search.go +++ b/internal/proxy/task_search.go @@ -832,7 +832,6 @@ func reduceSearchResultData(ctx context.Context, subSearchResultData []*schemapb maxOutputSize := paramtable.Get().QuotaConfig.MaxOutputSize.GetAsInt64() // reducing nq * topk results for i := int64(0); i < nq; i++ { - var ( // cursor of current data of each subSearch for merging the j-th data of TopK. // sum(cursors) == j diff --git a/internal/proxy/task_search_test.go b/internal/proxy/task_search_test.go index 84028d3348..220e35acf1 100644 --- a/internal/proxy/task_search_test.go +++ b/internal/proxy/task_search_test.go @@ -135,7 +135,8 @@ func getValidSearchParams() []*commonpb.KeyValuePair { { Key: IgnoreGrowingKey, Value: "false", - }} + }, + } } func getInvalidSearchParams(invalidName string) []*commonpb.KeyValuePair { @@ -273,7 +274,6 @@ func getQueryNode() *mocks.MockQueryNode { } func TestSearchTaskV2_Execute(t *testing.T) { - var ( err error @@ -1120,62 +1120,79 @@ func Test_checkSearchResultData(t *testing.T) { args args }{ - {"data.NumQueries != nq", true, + { + "data.NumQueries != nq", true, args{ data: &schemapb.SearchResultData{NumQueries: 100}, nq: 10, - }}, - {"data.TopK != topk", true, + }, + }, + { + "data.TopK != topk", true, args{ data: &schemapb.SearchResultData{NumQueries: 1, TopK: 1}, nq: 1, topk: 10, - }}, - {"size of IntId != NumQueries * TopK", true, + }, + }, + { + "size of IntId != NumQueries * TopK", true, args{ data: &schemapb.SearchResultData{ NumQueries: 1, TopK: 1, Ids: &schemapb.IDs{ - IdField: &schemapb.IDs_IntId{IntId: &schemapb.LongArray{Data: []int64{1, 2}}}}, + IdField: &schemapb.IDs_IntId{IntId: &schemapb.LongArray{Data: []int64{1, 2}}}, + }, }, nq: 1, topk: 1, - }}, - {"size of StrID != NumQueries * TopK", true, + }, + }, + { + "size of StrID != NumQueries * TopK", true, args{ data: &schemapb.SearchResultData{ NumQueries: 1, TopK: 1, Ids: &schemapb.IDs{ - IdField: &schemapb.IDs_StrId{StrId: &schemapb.StringArray{Data: []string{"1", "2"}}}}, + IdField: &schemapb.IDs_StrId{StrId: &schemapb.StringArray{Data: []string{"1", "2"}}}, + }, }, nq: 1, topk: 1, - }}, - {"size of score != nq * topK", true, + }, + }, + { + "size of score != nq * topK", true, args{ data: &schemapb.SearchResultData{ NumQueries: 1, TopK: 1, Ids: &schemapb.IDs{ - IdField: &schemapb.IDs_IntId{IntId: &schemapb.LongArray{Data: []int64{1}}}}, + IdField: &schemapb.IDs_IntId{IntId: &schemapb.LongArray{Data: []int64{1}}}, + }, Scores: []float32{0.99, 0.98}, }, nq: 1, topk: 1, - }}, - {"correct params", false, + }, + }, + { + "correct params", false, args{ data: &schemapb.SearchResultData{ NumQueries: 1, TopK: 1, Ids: &schemapb.IDs{ - IdField: &schemapb.IDs_IntId{IntId: &schemapb.LongArray{Data: []int64{1}}}}, - Scores: []float32{0.99}}, + IdField: &schemapb.IDs_IntId{IntId: &schemapb.LongArray{Data: []int64{1}}}, + }, + Scores: []float32{0.99}, + }, nq: 1, topk: 1, - }}, + }, + }, } for _, test := range tests { @@ -1412,21 +1429,31 @@ func TestTaskSearch_reduceSearchResultData(t *testing.T) { outScore []float32 outData []int64 }{ - {"offset 0, limit 5", 0, 5, + { + "offset 0, limit 5", 0, 5, []float32{-50, -49, -48, -47, -46, -45, -44, -43, -42, -41}, - []int64{50, 49, 48, 47, 46, 45, 44, 43, 42, 41}}, - {"offset 1, limit 4", 1, 4, + []int64{50, 49, 48, 47, 46, 45, 44, 43, 42, 41}, + }, + { + "offset 1, limit 4", 1, 4, []float32{-49, -48, -47, -46, -44, -43, -42, -41}, - []int64{49, 48, 47, 46, 44, 43, 42, 41}}, - {"offset 2, limit 3", 2, 3, + []int64{49, 48, 47, 46, 44, 43, 42, 41}, + }, + { + "offset 2, limit 3", 2, 3, []float32{-48, -47, -46, -43, -42, -41}, - []int64{48, 47, 46, 43, 42, 41}}, - {"offset 3, limit 2", 3, 2, + []int64{48, 47, 46, 43, 42, 41}, + }, + { + "offset 3, limit 2", 3, 2, []float32{-47, -46, -42, -41}, - []int64{47, 46, 42, 41}}, - {"offset 4, limit 1", 4, 1, + []int64{47, 46, 42, 41}, + }, + { + "offset 4, limit 1", 4, 1, []float32{-46, -41}, - []int64{46, 41}}, + []int64{46, 41}, + }, } var results []*schemapb.SearchResultData @@ -1460,24 +1487,36 @@ func TestTaskSearch_reduceSearchResultData(t *testing.T) { outScore []float32 outData []int64 }{ - {"offset 0, limit 6", 0, 6, 5, + { + "offset 0, limit 6", 0, 6, 5, []float32{-50, -49, -48, -47, -46, -45, -44, -43, -42, -41}, - []int64{50, 49, 48, 47, 46, 45, 44, 43, 42, 41}}, - {"offset 1, limit 5", 1, 5, 4, + []int64{50, 49, 48, 47, 46, 45, 44, 43, 42, 41}, + }, + { + "offset 1, limit 5", 1, 5, 4, []float32{-49, -48, -47, -46, -44, -43, -42, -41}, - []int64{49, 48, 47, 46, 44, 43, 42, 41}}, - {"offset 2, limit 4", 2, 4, 3, + []int64{49, 48, 47, 46, 44, 43, 42, 41}, + }, + { + "offset 2, limit 4", 2, 4, 3, []float32{-48, -47, -46, -43, -42, -41}, - []int64{48, 47, 46, 43, 42, 41}}, - {"offset 3, limit 3", 3, 3, 2, + []int64{48, 47, 46, 43, 42, 41}, + }, + { + "offset 3, limit 3", 3, 3, 2, []float32{-47, -46, -42, -41}, - []int64{47, 46, 42, 41}}, - {"offset 4, limit 2", 4, 2, 1, + []int64{47, 46, 42, 41}, + }, + { + "offset 4, limit 2", 4, 2, 1, []float32{-46, -41}, - []int64{46, 41}}, - {"offset 5, limit 1", 5, 1, 0, + []int64{46, 41}, + }, + { + "offset 5, limit 1", 5, 1, 0, []float32{}, - []int64{}}, + []int64{}, + }, } for _, test := range lessThanLimitTests { @@ -1544,7 +1583,6 @@ func TestTaskSearch_reduceSearchResultData(t *testing.T) { } func TestSearchTask_ErrExecute(t *testing.T) { - var ( err error ctx = context.TODO() @@ -1748,7 +1786,8 @@ func TestTaskSearch_parseQueryInfo(t *testing.T) { t.Run("parseSearchInfo error", func(t *testing.T) { spNoTopk := []*commonpb.KeyValuePair{{ Key: AnnsFieldKey, - Value: testFloatVecField}} + Value: testFloatVecField, + }} spInvalidTopk := append(spNoTopk, &commonpb.KeyValuePair{ Key: TopKKey, @@ -1868,19 +1907,20 @@ func TestSearchTask_Requery(t *testing.T) { node := mocks.NewMockProxy(t) node.EXPECT().Query(mock.Anything, mock.Anything). Return(&milvuspb.QueryResults{ - FieldsData: []*schemapb.FieldData{{ - Type: schemapb.DataType_Int64, - FieldName: pkField, - Field: &schemapb.FieldData_Scalars{ - Scalars: &schemapb.ScalarField{ - Data: &schemapb.ScalarField_LongData{ - LongData: &schemapb.LongArray{ - Data: ids, + FieldsData: []*schemapb.FieldData{ + { + Type: schemapb.DataType_Int64, + FieldName: pkField, + Field: &schemapb.FieldData_Scalars{ + Scalars: &schemapb.ScalarField{ + Data: &schemapb.ScalarField_LongData{ + LongData: &schemapb.LongArray{ + Data: ids, + }, }, }, }, }, - }, newFloatVectorFieldData(vecField, rows, dim), }, }, nil) @@ -2031,19 +2071,20 @@ func TestSearchTask_Requery(t *testing.T) { node := mocks.NewMockProxy(t) node.EXPECT().Query(mock.Anything, mock.Anything). Return(&milvuspb.QueryResults{ - FieldsData: []*schemapb.FieldData{{ - Type: schemapb.DataType_Int64, - FieldName: pkField, - Field: &schemapb.FieldData_Scalars{ - Scalars: &schemapb.ScalarField{ - Data: &schemapb.ScalarField_LongData{ - LongData: &schemapb.LongArray{ - Data: ids[:len(ids)-1], + FieldsData: []*schemapb.FieldData{ + { + Type: schemapb.DataType_Int64, + FieldName: pkField, + Field: &schemapb.FieldData_Scalars{ + Scalars: &schemapb.ScalarField{ + Data: &schemapb.ScalarField_LongData{ + LongData: &schemapb.LongArray{ + Data: ids[:len(ids)-1], + }, }, }, }, }, - }, newFloatVectorFieldData(vecField, rows, dim), }, }, nil) diff --git a/internal/proxy/task_statistic.go b/internal/proxy/task_statistic.go index 462582d19f..bbd84fd986 100644 --- a/internal/proxy/task_statistic.go +++ b/internal/proxy/task_statistic.go @@ -266,7 +266,6 @@ func (g *getStatisticsTask) getStatisticsFromQueryNode(ctx context.Context) erro nq: 1, exec: g.getStatisticsShard, }) - if err != nil { return errors.Wrap(err, "failed to statistic") } diff --git a/internal/proxy/task_test.go b/internal/proxy/task_test.go index 825aa2f9fa..43b66a01b9 100644 --- a/internal/proxy/task_test.go +++ b/internal/proxy/task_test.go @@ -75,7 +75,6 @@ func constructCollectionSchema( dim int, collectionName string, ) *schemapb.CollectionSchema { - pk := &schemapb.FieldSchema{ FieldID: 0, Name: int64Field, @@ -117,7 +116,6 @@ func constructCollectionSchemaEnableDynamicSchema( dim int, collectionName string, ) *schemapb.CollectionSchema { - pk := &schemapb.FieldSchema{ FieldID: 0, Name: int64Field, @@ -210,7 +208,6 @@ func constructCollectionSchemaWithAllType( dim int, collectionName string, ) *schemapb.CollectionSchema { - b := &schemapb.FieldSchema{ FieldID: 0, Name: boolField, @@ -572,7 +569,6 @@ func TestTranslateOutputFields(t *testing.T) { } func TestCreateCollectionTask(t *testing.T) { - rc := NewRootCoordMock() rc.Start() defer rc.Stop() @@ -921,7 +917,7 @@ func TestHasCollectionTask(t *testing.T) { ShardsNum: shardsNum, } - //CreateCollection + // CreateCollection task := &hasCollectionTask{ Condition: NewTaskCondition(ctx), HasCollectionRequest: &milvuspb.HasCollectionRequest{ @@ -968,7 +964,6 @@ func TestHasCollectionTask(t *testing.T) { assert.NoError(t, err) err = task.Execute(ctx) assert.Error(t, err) - } func TestDescribeCollectionTask(t *testing.T) { @@ -985,7 +980,7 @@ func TestDescribeCollectionTask(t *testing.T) { dbName := "" collectionName := prefix + funcutil.GenRandomStr() - //CreateCollection + // CreateCollection task := &describeCollectionTask{ Condition: NewTaskCondition(ctx), DescribeCollectionRequest: &milvuspb.DescribeCollectionRequest{ @@ -1071,7 +1066,7 @@ func TestDescribeCollectionTask_ShardsNum1(t *testing.T) { rc.CreateCollection(ctx, createColReq) globalMetaCache.GetCollectionID(ctx, GetCurDBNameFromContextOrDefault(ctx), collectionName) - //CreateCollection + // CreateCollection task := &describeCollectionTask{ Condition: NewTaskCondition(ctx), DescribeCollectionRequest: &milvuspb.DescribeCollectionRequest{ @@ -1135,7 +1130,7 @@ func TestDescribeCollectionTask_EnableDynamicSchema(t *testing.T) { rc.CreateCollection(ctx, createColReq) globalMetaCache.GetCollectionID(ctx, dbName, collectionName) - //CreateCollection + // CreateCollection task := &describeCollectionTask{ Condition: NewTaskCondition(ctx), DescribeCollectionRequest: &milvuspb.DescribeCollectionRequest{ @@ -1198,7 +1193,7 @@ func TestDescribeCollectionTask_ShardsNum2(t *testing.T) { rc.CreateCollection(ctx, createColReq) globalMetaCache.GetCollectionID(ctx, GetCurDBNameFromContextOrDefault(ctx), collectionName) - //CreateCollection + // CreateCollection task := &describeCollectionTask{ Condition: NewTaskCondition(ctx), DescribeCollectionRequest: &milvuspb.DescribeCollectionRequest{ @@ -1518,7 +1513,6 @@ func TestShowPartitionsTask(t *testing.T) { task.ShowPartitionsRequest.Type = milvuspb.ShowType_InMemory err = task.Execute(ctx) assert.Error(t, err) - } func TestTask_Int64PrimaryKey(t *testing.T) { @@ -1549,7 +1543,8 @@ func TestTask_Int64PrimaryKey(t *testing.T) { testInt64Field: schemapb.DataType_Int64, testFloatField: schemapb.DataType_Float, testDoubleField: schemapb.DataType_Double, - testFloatVecField: schemapb.DataType_FloatVector} + testFloatVecField: schemapb.DataType_FloatVector, + } if enableMultipleVectorFields { fieldName2Types[testBinaryVecField] = schemapb.DataType_BinaryVector } @@ -1794,7 +1789,8 @@ func TestTask_VarCharPrimaryKey(t *testing.T) { testFloatField: schemapb.DataType_Float, testDoubleField: schemapb.DataType_Double, testVarCharField: schemapb.DataType_VarChar, - testFloatVecField: schemapb.DataType_FloatVector} + testFloatVecField: schemapb.DataType_FloatVector, + } if enableMultipleVectorFields { fieldName2Types[testBinaryVecField] = schemapb.DataType_BinaryVector } @@ -2125,7 +2121,6 @@ func TestDropAlias_all(t *testing.T) { assert.NoError(t, task.PreExecute(ctx)) assert.NoError(t, task.Execute(ctx)) assert.NoError(t, task.PostExecute(ctx)) - } func TestAlterAlias_all(t *testing.T) { @@ -2541,7 +2536,7 @@ func Test_loadCollectionTask_Execute(t *testing.T) { dbName := funcutil.GenRandomStr() collectionName := funcutil.GenRandomStr() collectionID := UniqueID(1) - //fieldName := funcutil.GenRandomStr() + // fieldName := funcutil.GenRandomStr() indexName := funcutil.GenRandomStr() ctx := context.Background() indexID := int64(1000) @@ -2642,7 +2637,7 @@ func Test_loadPartitionTask_Execute(t *testing.T) { dbName := funcutil.GenRandomStr() collectionName := funcutil.GenRandomStr() collectionID := UniqueID(1) - //fieldName := funcutil.GenRandomStr() + // fieldName := funcutil.GenRandomStr() indexName := funcutil.GenRandomStr() ctx := context.Background() indexID := int64(1000) diff --git a/internal/proxy/timestamp_test.go b/internal/proxy/timestamp_test.go index ba52eb5a74..635e2c86ea 100644 --- a/internal/proxy/timestamp_test.go +++ b/internal/proxy/timestamp_test.go @@ -21,9 +21,9 @@ import ( "math/rand" "testing" - "github.com/milvus-io/milvus/pkg/util/uniquegenerator" - "github.com/stretchr/testify/assert" + + "github.com/milvus-io/milvus/pkg/util/uniquegenerator" ) func TestNewTimestampAllocator(t *testing.T) { diff --git a/internal/proxy/util.go b/internal/proxy/util.go index f095ed267a..bba74886b4 100644 --- a/internal/proxy/util.go +++ b/internal/proxy/util.go @@ -34,7 +34,6 @@ import ( "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/internal/parser/planparserv2" "github.com/milvus-io/milvus/internal/proto/planpb" - "github.com/milvus-io/milvus/internal/proto/querypb" "github.com/milvus-io/milvus/internal/types" typeutil2 "github.com/milvus-io/milvus/internal/util/typeutil" diff --git a/internal/proxy/util_test.go b/internal/proxy/util_test.go index a0aa7c1a31..2d9f46aa04 100644 --- a/internal/proxy/util_test.go +++ b/internal/proxy/util_test.go @@ -25,8 +25,6 @@ import ( "testing" "time" - "github.com/milvus-io/milvus/pkg/log" - "github.com/cockroachdb/errors" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/mock" @@ -41,6 +39,7 @@ import ( "github.com/milvus-io/milvus/internal/proto/querypb" "github.com/milvus-io/milvus/internal/proto/rootcoordpb" "github.com/milvus-io/milvus/pkg/common" + "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/mq/msgstream" "github.com/milvus-io/milvus/pkg/util" "github.com/milvus-io/milvus/pkg/util/crypto" diff --git a/internal/proxy/validate_util.go b/internal/proxy/validate_util.go index 0ed9fe87fd..8ce6c57fc8 100644 --- a/internal/proxy/validate_util.go +++ b/internal/proxy/validate_util.go @@ -4,6 +4,8 @@ import ( "fmt" "math" + "go.uber.org/zap" + "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/funcutil" @@ -11,7 +13,6 @@ import ( "github.com/milvus-io/milvus/pkg/util/parameterutil.go" "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/milvus-io/milvus/pkg/util/typeutil" - "go.uber.org/zap" ) type validateUtil struct { diff --git a/internal/proxy/validate_util_test.go b/internal/proxy/validate_util_test.go index 2d42d2ddae..9b43d26de0 100644 --- a/internal/proxy/validate_util_test.go +++ b/internal/proxy/validate_util_test.go @@ -2543,7 +2543,6 @@ func Test_validateUtil_fillWithDefaultValue(t *testing.T) { flag := checkFillWithDefaultValueData(data[0].GetScalars().GetStringData().Data, stringData[0], 1) assert.True(t, flag) }) - } func Test_verifyOverflowByRange(t *testing.T) { @@ -2617,7 +2616,6 @@ func Test_validateUtil_checkIntegerFieldData(t *testing.T) { }) t.Run("tiny int, overflow", func(t *testing.T) { - v := newValidateUtil(withOverflowCheck()) f := &schemapb.FieldSchema{ @@ -2640,7 +2638,6 @@ func Test_validateUtil_checkIntegerFieldData(t *testing.T) { }) t.Run("tiny int, normal case", func(t *testing.T) { - v := newValidateUtil(withOverflowCheck()) f := &schemapb.FieldSchema{ @@ -2663,7 +2660,6 @@ func Test_validateUtil_checkIntegerFieldData(t *testing.T) { }) t.Run("small int, overflow", func(t *testing.T) { - v := newValidateUtil(withOverflowCheck()) f := &schemapb.FieldSchema{ @@ -2686,7 +2682,6 @@ func Test_validateUtil_checkIntegerFieldData(t *testing.T) { }) t.Run("small int, normal case", func(t *testing.T) { - v := newValidateUtil(withOverflowCheck()) f := &schemapb.FieldSchema{ @@ -2707,7 +2702,6 @@ func Test_validateUtil_checkIntegerFieldData(t *testing.T) { err := v.checkIntegerFieldData(data, f) assert.NoError(t, err) }) - } func Test_validateUtil_checkJSONData(t *testing.T) { diff --git a/internal/querycoordv2/balance/balance.go b/internal/querycoordv2/balance/balance.go index 20a15e9c66..f06ff77ddc 100644 --- a/internal/querycoordv2/balance/balance.go +++ b/internal/querycoordv2/balance/balance.go @@ -113,7 +113,7 @@ func (b *RoundRobinBalancer) AssignChannel(channels []*meta.DmChannel, nodes []i } func (b *RoundRobinBalancer) BalanceReplica(replica *meta.Replica) ([]SegmentAssignPlan, []ChannelAssignPlan) { - //TODO by chun.han + // TODO by chun.han return nil, nil } diff --git a/internal/querycoordv2/balance/balance_test.go b/internal/querycoordv2/balance/balance_test.go index f528834c41..4a9e8a8415 100644 --- a/internal/querycoordv2/balance/balance_test.go +++ b/internal/querycoordv2/balance/balance_test.go @@ -19,11 +19,12 @@ package balance import ( "testing" + "github.com/stretchr/testify/suite" + "github.com/milvus-io/milvus/internal/proto/datapb" "github.com/milvus-io/milvus/internal/querycoordv2/meta" "github.com/milvus-io/milvus/internal/querycoordv2/session" "github.com/milvus-io/milvus/internal/querycoordv2/task" - "github.com/stretchr/testify/suite" ) type BalanceTestSuite struct { diff --git a/internal/querycoordv2/balance/rowcount_based_balancer.go b/internal/querycoordv2/balance/rowcount_based_balancer.go index 383b42856f..171f0fe21b 100644 --- a/internal/querycoordv2/balance/rowcount_based_balancer.go +++ b/internal/querycoordv2/balance/rowcount_based_balancer.go @@ -149,7 +149,6 @@ func (b *RowCountBasedBalancer) BalanceReplica(replica *meta.Replica) ([]Segment } segmentsToMove = append(segmentsToMove, s) - } if rowCount < average { item := newNodeItem(rowCount, node) diff --git a/internal/querycoordv2/balance/rowcount_based_balancer_test.go b/internal/querycoordv2/balance/rowcount_based_balancer_test.go index 207ee8153d..72eacc976f 100644 --- a/internal/querycoordv2/balance/rowcount_based_balancer_test.go +++ b/internal/querycoordv2/balance/rowcount_based_balancer_test.go @@ -397,7 +397,6 @@ func (suite *RowCountBasedBalancerTestSuite) TestBalance() { suite.ElementsMatch(c.expectPlans, segmentPlans) }) } - } func (suite *RowCountBasedBalancerTestSuite) TestBalanceOnPartStopping() { @@ -596,7 +595,6 @@ func (suite *RowCountBasedBalancerTestSuite) TestBalanceOnPartStopping() { suite.ElementsMatch(c.expectPlans, segmentPlans) }) } - } func (suite *RowCountBasedBalancerTestSuite) TestBalanceOutboundNodes() { @@ -752,11 +750,11 @@ func (suite *RowCountBasedBalancerTestSuite) TestBalanceOnLoadingCollection() { suite.ElementsMatch(c.expectPlans, segmentPlans) }) } - } func (suite *RowCountBasedBalancerTestSuite) getCollectionBalancePlans(balancer *RowCountBasedBalancer, - collectionID int64) ([]SegmentAssignPlan, []ChannelAssignPlan) { + collectionID int64, +) ([]SegmentAssignPlan, []ChannelAssignPlan) { replicas := balancer.meta.ReplicaManager.GetByCollection(collectionID) segmentPlans, channelPlans := make([]SegmentAssignPlan, 0), make([]ChannelAssignPlan, 0) for _, replica := range replicas { diff --git a/internal/querycoordv2/balance/score_based_balancer.go b/internal/querycoordv2/balance/score_based_balancer.go index 428ae14604..64bbd47162 100644 --- a/internal/querycoordv2/balance/score_based_balancer.go +++ b/internal/querycoordv2/balance/score_based_balancer.go @@ -39,7 +39,8 @@ func NewScoreBasedBalancer(scheduler task.Scheduler, nodeManager *session.NodeManager, dist *meta.DistributionManager, meta *meta.Meta, - targetMgr *meta.TargetManager) *ScoreBasedBalancer { + targetMgr *meta.TargetManager, +) *ScoreBasedBalancer { return &ScoreBasedBalancer{ RowCountBasedBalancer: NewRowCountBasedBalancer(scheduler, nodeManager, dist, meta, targetMgr), } @@ -162,7 +163,7 @@ func (b *ScoreBasedBalancer) BalanceReplica(replica *meta.Replica) ([]SegmentAss ) return nil, nil } - //print current distribution before generating plans + // print current distribution before generating plans segmentPlans, channelPlans := make([]SegmentAssignPlan, 0), make([]ChannelAssignPlan, 0) if len(stoppingNodesSegments) != 0 { log.Info("Handle stopping nodes", @@ -268,7 +269,7 @@ func (b *ScoreBasedBalancer) getNormalSegmentPlan(replica *meta.Replica, nodesSe break } if targetSegmentToMove == nil { - //the node with the highest score doesn't have any segments suitable for balancing, stop balancing this round + // the node with the highest score doesn't have any segments suitable for balancing, stop balancing this round break } @@ -277,7 +278,7 @@ func (b *ScoreBasedBalancer) getNormalSegmentPlan(replica *meta.Replica, nodesSe nextToPriority := toPriority + int(targetSegmentToMove.GetNumOfRows()) + int(float64(targetSegmentToMove.GetNumOfRows())* params.Params.QueryCoordCfg.GlobalRowCountFactor.GetAsFloat()) - //still unbalanced after this balance plan is executed + // still unbalanced after this balance plan is executed if nextToPriority <= nextFromPriority { plan := SegmentAssignPlan{ ReplicaID: replica.GetID(), @@ -287,9 +288,9 @@ func (b *ScoreBasedBalancer) getNormalSegmentPlan(replica *meta.Replica, nodesSe } segmentPlans = append(segmentPlans, plan) } else { - //if unbalance reverted after balance action, we will consider the benefit - //only trigger following balance when the generated reverted balance - //is far smaller than the original unbalance + // if unbalance reverted after balance action, we will consider the benefit + // only trigger following balance when the generated reverted balance + // is far smaller than the original unbalance nextUnbalance := nextToPriority - nextFromPriority if float64(nextUnbalance)*params.Params.QueryCoordCfg.ReverseUnbalanceTolerationFactor.GetAsFloat() < unbalance { plan := SegmentAssignPlan{ @@ -300,14 +301,14 @@ func (b *ScoreBasedBalancer) getNormalSegmentPlan(replica *meta.Replica, nodesSe } segmentPlans = append(segmentPlans, plan) } else { - //if the tiniest segment movement between the highest scored node and lowest scored node will - //not provide sufficient balance benefit, we will seize balancing in this round + // if the tiniest segment movement between the highest scored node and lowest scored node will + // not provide sufficient balance benefit, we will seize balancing in this round break } } havingMovedSegments.Insert(targetSegmentToMove.GetID()) - //update node priority + // update node priority toNode.setPriority(nextToPriority) fromNode.setPriority(nextFromPriority) // if toNode and fromNode can not find segment to balance, break, else try to balance the next round diff --git a/internal/querycoordv2/balance/score_based_balancer_test.go b/internal/querycoordv2/balance/score_based_balancer_test.go index 14aa5c5dff..cb1367f90b 100644 --- a/internal/querycoordv2/balance/score_based_balancer_test.go +++ b/internal/querycoordv2/balance/score_based_balancer_test.go @@ -106,14 +106,20 @@ func (suite *ScoreBasedBalancerTestSuite) TestAssignSegment() { segmentCnts: []int{0, 0, 0}, expectPlans: [][]SegmentAssignPlan{ { - //as assign segments is used while loading collection, - //all assignPlan should have weight equal to 1(HIGH PRIORITY) - {Segment: &meta.Segment{SegmentInfo: &datapb.SegmentInfo{ID: 3, NumOfRows: 15, - CollectionID: 1}}, From: -1, To: 1}, - {Segment: &meta.Segment{SegmentInfo: &datapb.SegmentInfo{ID: 2, NumOfRows: 10, - CollectionID: 1}}, From: -1, To: 3}, - {Segment: &meta.Segment{SegmentInfo: &datapb.SegmentInfo{ID: 1, NumOfRows: 5, - CollectionID: 1}}, From: -1, To: 2}, + // as assign segments is used while loading collection, + // all assignPlan should have weight equal to 1(HIGH PRIORITY) + {Segment: &meta.Segment{SegmentInfo: &datapb.SegmentInfo{ + ID: 3, NumOfRows: 15, + CollectionID: 1, + }}, From: -1, To: 1}, + {Segment: &meta.Segment{SegmentInfo: &datapb.SegmentInfo{ + ID: 2, NumOfRows: 10, + CollectionID: 1, + }}, From: -1, To: 3}, + {Segment: &meta.Segment{SegmentInfo: &datapb.SegmentInfo{ + ID: 1, NumOfRows: 5, + CollectionID: 1, + }}, From: -1, To: 2}, }, }, }, @@ -125,20 +131,20 @@ func (suite *ScoreBasedBalancerTestSuite) TestAssignSegment() { 1: { {SegmentInfo: &datapb.SegmentInfo{ID: 1, NumOfRows: 10, CollectionID: 1}, Node: 1}, {SegmentInfo: &datapb.SegmentInfo{ID: 2, NumOfRows: 300, CollectionID: 2}, Node: 1}, - //base: collection1-node1-priority is 10 + 0.1 * 310 = 41 - //assign3: collection1-node1-priority is 15 + 0.1 * 315 = 46.5 + // base: collection1-node1-priority is 10 + 0.1 * 310 = 41 + // assign3: collection1-node1-priority is 15 + 0.1 * 315 = 46.5 }, 2: { {SegmentInfo: &datapb.SegmentInfo{ID: 3, NumOfRows: 20, CollectionID: 1}, Node: 2}, {SegmentInfo: &datapb.SegmentInfo{ID: 4, NumOfRows: 180, CollectionID: 2}, Node: 2}, - //base: collection1-node2-priority is 20 + 0.1 * 200 = 40 - //assign2: collection1-node2-priority is 30 + 0.1 * 210 = 51 + // base: collection1-node2-priority is 20 + 0.1 * 200 = 40 + // assign2: collection1-node2-priority is 30 + 0.1 * 210 = 51 }, 3: { {SegmentInfo: &datapb.SegmentInfo{ID: 5, NumOfRows: 30, CollectionID: 1}, Node: 3}, {SegmentInfo: &datapb.SegmentInfo{ID: 6, NumOfRows: 20, CollectionID: 2}, Node: 3}, - //base: collection1-node2-priority is 30 + 0.1 * 50 = 35 - //assign1: collection1-node2-priority is 45 + 0.1 * 65 = 51.5 + // base: collection1-node2-priority is 30 + 0.1 * 50 = 35 + // assign1: collection1-node2-priority is 45 + 0.1 * 65 = 51.5 }, }, assignments: [][]*meta.Segment{ @@ -190,10 +196,10 @@ func (suite *ScoreBasedBalancerTestSuite) TestAssignSegment() { states: []session.State{session.NodeStateNormal, session.NodeStateNormal, session.NodeStateNormal}, segmentCnts: []int{0, 0, 0}, expectPlans: [][]SegmentAssignPlan{ - //note that these two segments plans are absolutely unbalanced globally, - //as if the assignment for collection1 could succeed, node1 and node2 will both have 70 rows - //much more than node3, but following assignment will still assign segment based on [10,20,40] - //rather than [70,70,40], this flaw will be mitigated by balance process and maybe fixed in the later versions + // note that these two segments plans are absolutely unbalanced globally, + // as if the assignment for collection1 could succeed, node1 and node2 will both have 70 rows + // much more than node3, but following assignment will still assign segment based on [10,20,40] + // rather than [70,70,40], this flaw will be mitigated by balance process and maybe fixed in the later versions { {Segment: &meta.Segment{SegmentInfo: &datapb.SegmentInfo{ID: 4, NumOfRows: 60, CollectionID: 1}}, From: -1, To: 1}, {Segment: &meta.Segment{SegmentInfo: &datapb.SegmentInfo{ID: 5, NumOfRows: 50, CollectionID: 1}}, From: -1, To: 2}, @@ -292,7 +298,7 @@ func (suite *ScoreBasedBalancerTestSuite) TestBalanceOneRound() { defer suite.TearDownTest() balancer := suite.balancer - //1. set up target for multi collections + // 1. set up target for multi collections collection := utils.CreateTestCollection(c.collectionID, int32(c.replicaID)) suite.broker.EXPECT().GetRecoveryInfoV2(mock.Anything, c.collectionID).Return( nil, c.collectionsSegments, nil) @@ -305,7 +311,7 @@ func (suite *ScoreBasedBalancerTestSuite) TestBalanceOneRound() { balancer.targetMgr.UpdateCollectionNextTarget(c.collectionID) balancer.targetMgr.UpdateCollectionCurrentTarget(c.collectionID) - //2. set up target for distribution for multi collections + // 2. set up target for distribution for multi collections for node, s := range c.distributions { balancer.dist.SegmentDistManager.Update(node, s...) } @@ -313,7 +319,7 @@ func (suite *ScoreBasedBalancerTestSuite) TestBalanceOneRound() { balancer.dist.ChannelDistManager.Update(node, v...) } - //3. set up nodes info and resourceManager for balancer + // 3. set up nodes info and resourceManager for balancer for i := range c.nodes { nodeInfo := session.NewNodeInfo(c.nodes[i], "127.0.0.1:0") nodeInfo.UpdateStats(session.WithChannelCnt(len(c.distributionChannels[c.nodes[i]]))) @@ -322,7 +328,7 @@ func (suite *ScoreBasedBalancerTestSuite) TestBalanceOneRound() { suite.balancer.meta.ResourceManager.AssignNode(meta.DefaultResourceGroupName, c.nodes[i]) } - //4. balance and verify result + // 4. balance and verify result segmentPlans, channelPlans := suite.getCollectionBalancePlans(balancer, c.collectionID) suite.ElementsMatch(c.expectChannelPlans, channelPlans) suite.ElementsMatch(c.expectPlans, segmentPlans) @@ -384,8 +390,11 @@ func (suite *ScoreBasedBalancerTestSuite) TestBalanceMultiRound() { }, expectPlans: [][]SegmentAssignPlan{ { - {Segment: &meta.Segment{SegmentInfo: &datapb.SegmentInfo{ID: 3, CollectionID: 1, NumOfRows: 20}, - Node: 2}, From: 2, To: 3, ReplicaID: 1, + { + Segment: &meta.Segment{ + SegmentInfo: &datapb.SegmentInfo{ID: 3, CollectionID: 1, NumOfRows: 20}, + Node: 2, + }, From: 2, To: 3, ReplicaID: 1, }, }, {}, @@ -396,7 +405,7 @@ func (suite *ScoreBasedBalancerTestSuite) TestBalanceMultiRound() { defer suite.TearDownTest() balancer := suite.balancer - //1. set up target for multi collections + // 1. set up target for multi collections for i := range balanceCase.collectionIDs { collection := utils.CreateTestCollection(balanceCase.collectionIDs[i], int32(balanceCase.replicaIDs[i])) suite.broker.EXPECT().GetRecoveryInfoV2(mock.Anything, balanceCase.collectionIDs[i]).Return( @@ -413,12 +422,12 @@ func (suite *ScoreBasedBalancerTestSuite) TestBalanceMultiRound() { balancer.targetMgr.UpdateCollectionCurrentTarget(balanceCase.collectionIDs[i]) } - //2. set up target for distribution for multi collections + // 2. set up target for distribution for multi collections for node, s := range balanceCase.distributions[0] { balancer.dist.SegmentDistManager.Update(node, s...) } - //3. set up nodes info and resourceManager for balancer + // 3. set up nodes info and resourceManager for balancer for i := range balanceCase.nodes { nodeInfo := session.NewNodeInfo(balanceCase.nodes[i], "127.0.0.1:0") nodeInfo.SetState(balanceCase.states[i]) @@ -426,16 +435,16 @@ func (suite *ScoreBasedBalancerTestSuite) TestBalanceMultiRound() { suite.balancer.meta.ResourceManager.AssignNode(meta.DefaultResourceGroupName, balanceCase.nodes[i]) } - //4. first round balance + // 4. first round balance segmentPlans, _ := suite.getCollectionBalancePlans(balancer, balanceCase.collectionIDs[0]) suite.ElementsMatch(balanceCase.expectPlans[0], segmentPlans) - //5. update segment distribution to simulate balance effect + // 5. update segment distribution to simulate balance effect for node, s := range balanceCase.distributions[1] { balancer.dist.SegmentDistManager.Update(node, s...) } - //6. balance again + // 6. balance again segmentPlans, _ = suite.getCollectionBalancePlans(balancer, balanceCase.collectionIDs[1]) suite.ElementsMatch(balanceCase.expectPlans[1], segmentPlans) } @@ -475,10 +484,14 @@ func (suite *ScoreBasedBalancerTestSuite) TestStoppedBalance() { }, }, expectPlans: []SegmentAssignPlan{ - {Segment: &meta.Segment{SegmentInfo: &datapb.SegmentInfo{ID: 2, CollectionID: 1, NumOfRows: 20}, - Node: 1}, From: 1, To: 3, ReplicaID: 1}, - {Segment: &meta.Segment{SegmentInfo: &datapb.SegmentInfo{ID: 1, CollectionID: 1, NumOfRows: 10}, - Node: 1}, From: 1, To: 3, ReplicaID: 1}, + {Segment: &meta.Segment{ + SegmentInfo: &datapb.SegmentInfo{ID: 2, CollectionID: 1, NumOfRows: 20}, + Node: 1, + }, From: 1, To: 3, ReplicaID: 1}, + {Segment: &meta.Segment{ + SegmentInfo: &datapb.SegmentInfo{ID: 1, CollectionID: 1, NumOfRows: 10}, + Node: 1, + }, From: 1, To: 3, ReplicaID: 1}, }, expectChannelPlans: []ChannelAssignPlan{}, }, @@ -536,7 +549,7 @@ func (suite *ScoreBasedBalancerTestSuite) TestStoppedBalance() { defer suite.TearDownTest() balancer := suite.balancer - //1. set up target for multi collections + // 1. set up target for multi collections collection := utils.CreateTestCollection(c.collectionID, int32(c.replicaID)) suite.broker.EXPECT().GetRecoveryInfoV2(mock.Anything, c.collectionID).Return( nil, c.collectionsSegments, nil) @@ -549,7 +562,7 @@ func (suite *ScoreBasedBalancerTestSuite) TestStoppedBalance() { balancer.targetMgr.UpdateCollectionNextTarget(c.collectionID) balancer.targetMgr.UpdateCollectionCurrentTarget(c.collectionID) - //2. set up target for distribution for multi collections + // 2. set up target for distribution for multi collections for node, s := range c.distributions { balancer.dist.SegmentDistManager.Update(node, s...) } @@ -557,7 +570,7 @@ func (suite *ScoreBasedBalancerTestSuite) TestStoppedBalance() { balancer.dist.ChannelDistManager.Update(node, v...) } - //3. set up nodes info and resourceManager for balancer + // 3. set up nodes info and resourceManager for balancer for i := range c.nodes { nodeInfo := session.NewNodeInfo(c.nodes[i], "127.0.0.1:0") nodeInfo.UpdateStats(session.WithChannelCnt(len(c.distributionChannels[c.nodes[i]]))) @@ -570,7 +583,7 @@ func (suite *ScoreBasedBalancerTestSuite) TestStoppedBalance() { suite.balancer.meta.ResourceManager.UnassignNode(meta.DefaultResourceGroupName, c.outBoundNodes[i]) } - //4. balance and verify result + // 4. balance and verify result segmentPlans, channelPlans := suite.getCollectionBalancePlans(suite.balancer, c.collectionID) suite.ElementsMatch(c.expectChannelPlans, channelPlans) suite.ElementsMatch(c.expectPlans, segmentPlans) @@ -583,7 +596,8 @@ func TestScoreBasedBalancerSuite(t *testing.T) { } func (suite *ScoreBasedBalancerTestSuite) getCollectionBalancePlans(balancer *ScoreBasedBalancer, - collectionID int64) ([]SegmentAssignPlan, []ChannelAssignPlan) { + collectionID int64, +) ([]SegmentAssignPlan, []ChannelAssignPlan) { replicas := balancer.meta.ReplicaManager.GetByCollection(collectionID) segmentPlans, channelPlans := make([]SegmentAssignPlan, 0), make([]ChannelAssignPlan, 0) for _, replica := range replicas { diff --git a/internal/querycoordv2/balance/utils.go b/internal/querycoordv2/balance/utils.go index 185db59f4b..16d5ede8d7 100644 --- a/internal/querycoordv2/balance/utils.go +++ b/internal/querycoordv2/balance/utils.go @@ -78,7 +78,7 @@ func CreateSegmentTasksFromPlans(ctx context.Context, checkerID int64, timeout t // from balance checker t.SetPriority(task.TaskPriorityLow) } else { - //from segment checker + // from segment checker t.SetPriority(task.TaskPriorityNormal) } ret = append(ret, t) @@ -124,7 +124,8 @@ func CreateChannelTasksFromPlans(ctx context.Context, checkerID int64, timeout t } func PrintNewBalancePlans(collectionID int64, replicaID int64, segmentPlans []SegmentAssignPlan, - channelPlans []ChannelAssignPlan) { + channelPlans []ChannelAssignPlan, +) { balanceInfo := fmt.Sprintf("%s new plans:{collectionID:%d, replicaID:%d, ", PlanInfoPrefix, collectionID, replicaID) for _, segmentPlan := range segmentPlans { balanceInfo += segmentPlan.ToString() @@ -138,9 +139,10 @@ func PrintNewBalancePlans(collectionID int64, replicaID int64, segmentPlans []Se func PrintCurrentReplicaDist(replica *meta.Replica, stoppingNodesSegments map[int64][]*meta.Segment, nodeSegments map[int64][]*meta.Segment, - channelManager *meta.ChannelDistManager, segmentDistMgr *meta.SegmentDistManager) { + channelManager *meta.ChannelDistManager, segmentDistMgr *meta.SegmentDistManager, +) { distInfo := fmt.Sprintf("%s {collectionID:%d, replicaID:%d, ", DistInfoPrefix, replica.CollectionID, replica.GetID()) - //1. print stopping nodes segment distribution + // 1. print stopping nodes segment distribution distInfo += "[stoppingNodesSegmentDist:" for stoppingNodeID, stoppedSegments := range stoppingNodesSegments { distInfo += fmt.Sprintf("[nodeID:%d, ", stoppingNodeID) @@ -151,7 +153,7 @@ func PrintCurrentReplicaDist(replica *meta.Replica, distInfo += "]]" } distInfo += "]" - //2. print normal nodes segment distribution + // 2. print normal nodes segment distribution distInfo += "[normalNodesSegmentDist:" for normalNodeID, normalNodeCollectionSegments := range nodeSegments { distInfo += fmt.Sprintf("[nodeID:%d, ", normalNodeID) @@ -171,7 +173,7 @@ func PrintCurrentReplicaDist(replica *meta.Replica, } distInfo += "]" - //3. print stopping nodes channel distribution + // 3. print stopping nodes channel distribution distInfo += "[stoppingNodesChannelDist:" for stoppingNodeID := range stoppingNodesSegments { stoppingNodeChannels := channelManager.GetByCollectionAndNode(replica.GetCollectionID(), stoppingNodeID) @@ -184,7 +186,7 @@ func PrintCurrentReplicaDist(replica *meta.Replica, } distInfo += "]" - //4. print normal nodes channel distribution + // 4. print normal nodes channel distribution distInfo += "[normalNodesChannelDist:" for normalNodeID := range nodeSegments { normalNodeChannels := channelManager.GetByCollectionAndNode(replica.GetCollectionID(), normalNodeID) diff --git a/internal/querycoordv2/checkers/balance_checker.go b/internal/querycoordv2/checkers/balance_checker.go index eb417ea161..8444392fd6 100644 --- a/internal/querycoordv2/checkers/balance_checker.go +++ b/internal/querycoordv2/checkers/balance_checker.go @@ -21,6 +21,9 @@ import ( "sort" "time" + "github.com/samber/lo" + "go.uber.org/zap" + "github.com/milvus-io/milvus/internal/proto/querypb" "github.com/milvus-io/milvus/internal/querycoordv2/balance" "github.com/milvus-io/milvus/internal/querycoordv2/meta" @@ -29,9 +32,6 @@ import ( "github.com/milvus-io/milvus/internal/querycoordv2/task" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/typeutil" - - "github.com/samber/lo" - "go.uber.org/zap" ) // BalanceChecker checks the cluster distribution and generates balance tasks. @@ -84,12 +84,12 @@ func (b *BalanceChecker) replicasToBalance() []int64 { } } } - //do stopping balance only in this round + // do stopping balance only in this round if len(stoppingReplicas) > 0 { return stoppingReplicas } - //no stopping balance and auto balance is disabled, return empty collections for balance + // no stopping balance and auto balance is disabled, return empty collections for balance if !Params.QueryCoordCfg.AutoBalance.GetAsBool() { return nil } @@ -98,7 +98,7 @@ func (b *BalanceChecker) replicasToBalance() []int64 { return nil } - //iterator one normal collection in one round + // iterator one normal collection in one round normalReplicasToBalance := make([]int64, 0) hasUnbalancedCollection := false for _, cid := range loadedCollections { diff --git a/internal/querycoordv2/checkers/balance_checker_test.go b/internal/querycoordv2/checkers/balance_checker_test.go index 9b0451b120..f15bb2b494 100644 --- a/internal/querycoordv2/checkers/balance_checker_test.go +++ b/internal/querycoordv2/checkers/balance_checker_test.go @@ -20,6 +20,9 @@ import ( "context" "testing" + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/suite" + "github.com/milvus-io/milvus/internal/kv" etcdkv "github.com/milvus-io/milvus/internal/kv/etcd" "github.com/milvus-io/milvus/internal/metastore/kv/querycoord" @@ -32,9 +35,6 @@ import ( "github.com/milvus-io/milvus/internal/querycoordv2/utils" "github.com/milvus-io/milvus/pkg/util/etcd" "github.com/milvus-io/milvus/pkg/util/paramtable" - - "github.com/stretchr/testify/mock" - "github.com/stretchr/testify/suite" ) type BalanceCheckerTestSuite struct { @@ -83,7 +83,7 @@ func (suite *BalanceCheckerTestSuite) TearDownTest() { } func (suite *BalanceCheckerTestSuite) TestAutoBalanceConf() { - //set up nodes info + // set up nodes info nodeID1, nodeID2 := 1, 2 suite.nodeMgr.Add(session.NewNodeInfo(int64(nodeID1), "localhost")) suite.nodeMgr.Add(session.NewNodeInfo(int64(nodeID2), "localhost")) @@ -105,7 +105,7 @@ func (suite *BalanceCheckerTestSuite) TestAutoBalanceConf() { suite.checker.meta.CollectionManager.PutCollection(collection2) suite.checker.meta.ReplicaManager.Put(replica2) - //test disable auto balance + // test disable auto balance paramtable.Get().Save(Params.QueryCoordCfg.AutoBalance.Key, "false") suite.scheduler.EXPECT().GetSegmentTaskNum().Maybe().Return(func() int { return 0 @@ -115,22 +115,22 @@ func (suite *BalanceCheckerTestSuite) TestAutoBalanceConf() { segPlans, _ := suite.checker.balanceReplicas(replicasToBalance) suite.Empty(segPlans) - //test enable auto balance + // test enable auto balance paramtable.Get().Save(Params.QueryCoordCfg.AutoBalance.Key, "true") idsToBalance := []int64{int64(replicaID1)} replicasToBalance = suite.checker.replicasToBalance() suite.ElementsMatch(idsToBalance, replicasToBalance) - //next round + // next round idsToBalance = []int64{int64(replicaID2)} replicasToBalance = suite.checker.replicasToBalance() suite.ElementsMatch(idsToBalance, replicasToBalance) - //final round + // final round replicasToBalance = suite.checker.replicasToBalance() suite.Empty(replicasToBalance) } func (suite *BalanceCheckerTestSuite) TestBusyScheduler() { - //set up nodes info + // set up nodes info nodeID1, nodeID2 := 1, 2 suite.nodeMgr.Add(session.NewNodeInfo(int64(nodeID1), "localhost")) suite.nodeMgr.Add(session.NewNodeInfo(int64(nodeID2), "localhost")) @@ -152,7 +152,7 @@ func (suite *BalanceCheckerTestSuite) TestBusyScheduler() { suite.checker.meta.CollectionManager.PutCollection(collection2) suite.checker.meta.ReplicaManager.Put(replica2) - //test scheduler busy + // test scheduler busy paramtable.Get().Save(Params.QueryCoordCfg.AutoBalance.Key, "true") suite.scheduler.EXPECT().GetSegmentTaskNum().Maybe().Return(func() int { return 1 @@ -164,7 +164,7 @@ func (suite *BalanceCheckerTestSuite) TestBusyScheduler() { } func (suite *BalanceCheckerTestSuite) TestStoppingBalance() { - //set up nodes info, stopping node1 + // set up nodes info, stopping node1 nodeID1, nodeID2 := 1, 2 suite.nodeMgr.Add(session.NewNodeInfo(int64(nodeID1), "localhost")) suite.nodeMgr.Add(session.NewNodeInfo(int64(nodeID2), "localhost")) @@ -187,12 +187,12 @@ func (suite *BalanceCheckerTestSuite) TestStoppingBalance() { suite.checker.meta.CollectionManager.PutCollection(collection2) suite.checker.meta.ReplicaManager.Put(replica2) - //test stopping balance + // test stopping balance idsToBalance := []int64{int64(replicaID1), int64(replicaID2)} replicasToBalance := suite.checker.replicasToBalance() suite.ElementsMatch(idsToBalance, replicasToBalance) - //checker check + // checker check segPlans, chanPlans := make([]balance.SegmentAssignPlan, 0), make([]balance.ChannelAssignPlan, 0) mockPlan := balance.SegmentAssignPlan{ Segment: utils.CreateTestSegment(1, 1, 1, 1, 1, "1"), diff --git a/internal/querycoordv2/checkers/channel_checker.go b/internal/querycoordv2/checkers/channel_checker.go index bcb1689693..046cbc45fc 100644 --- a/internal/querycoordv2/checkers/channel_checker.go +++ b/internal/querycoordv2/checkers/channel_checker.go @@ -110,7 +110,8 @@ func (c *ChannelChecker) checkReplica(ctx context.Context, replica *meta.Replica // GetDmChannelDiff get channel diff between target and dist func (c *ChannelChecker) getDmChannelDiff(collectionID int64, - replicaID int64) (toLoad, toRelease []*meta.DmChannel) { + replicaID int64, +) (toLoad, toRelease []*meta.DmChannel) { replica := c.meta.Get(replicaID) if replica == nil { log.Info("replica does not exist, skip it") @@ -135,7 +136,7 @@ func (c *ChannelChecker) getDmChannelDiff(collectionID int64, } } - //get channels which exists on next target, but not on dist + // get channels which exists on next target, but not on dist for name, channel := range nextTargetMap { _, existOnDist := distMap[name] if !existOnDist { diff --git a/internal/querycoordv2/checkers/controller.go b/internal/querycoordv2/checkers/controller.go index 8b602683f9..8f54c1166a 100644 --- a/internal/querycoordv2/checkers/controller.go +++ b/internal/querycoordv2/checkers/controller.go @@ -21,18 +21,17 @@ import ( "sync" "time" + "go.uber.org/zap" + "github.com/milvus-io/milvus/internal/querycoordv2/balance" "github.com/milvus-io/milvus/internal/querycoordv2/meta" . "github.com/milvus-io/milvus/internal/querycoordv2/params" "github.com/milvus-io/milvus/internal/querycoordv2/session" "github.com/milvus-io/milvus/internal/querycoordv2/task" "github.com/milvus-io/milvus/pkg/log" - "go.uber.org/zap" ) -var ( - checkRoundTaskNumLimit = 256 -) +var checkRoundTaskNumLimit = 256 var ( Segment_Checker = "segment_checker" @@ -67,7 +66,6 @@ func NewCheckerController( scheduler task.Scheduler, broker meta.Broker, ) *CheckerController { - // CheckerController runs checkers with the order, // the former checker has higher priority checkers := map[string]Checker{ @@ -119,7 +117,6 @@ func getCheckerInterval(checkerType string) time.Duration { default: return Params.QueryCoordCfg.CheckInterval.GetAsDuration(time.Millisecond) } - } func (controller *CheckerController) StartChecker(ctx context.Context, checkerType string) { diff --git a/internal/querycoordv2/checkers/controller_test.go b/internal/querycoordv2/checkers/controller_test.go index eca3e01047..b12428ff65 100644 --- a/internal/querycoordv2/checkers/controller_test.go +++ b/internal/querycoordv2/checkers/controller_test.go @@ -21,6 +21,10 @@ import ( "testing" "time" + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/suite" + "go.uber.org/atomic" + "github.com/milvus-io/milvus/internal/kv" etcdkv "github.com/milvus-io/milvus/internal/kv/etcd" "github.com/milvus-io/milvus/internal/metastore/kv/querycoord" @@ -33,9 +37,6 @@ import ( "github.com/milvus-io/milvus/internal/querycoordv2/utils" "github.com/milvus-io/milvus/pkg/util/etcd" "github.com/milvus-io/milvus/pkg/util/paramtable" - "github.com/stretchr/testify/mock" - "github.com/stretchr/testify/suite" - "go.uber.org/atomic" ) type CheckerControllerSuite struct { diff --git a/internal/querycoordv2/checkers/index_checker.go b/internal/querycoordv2/checkers/index_checker.go index fd935ee4eb..48471f928b 100644 --- a/internal/querycoordv2/checkers/index_checker.go +++ b/internal/querycoordv2/checkers/index_checker.go @@ -20,14 +20,15 @@ import ( "context" "time" + "github.com/samber/lo" + "go.uber.org/zap" + "github.com/milvus-io/milvus/internal/proto/querypb" "github.com/milvus-io/milvus/internal/querycoordv2/meta" "github.com/milvus-io/milvus/internal/querycoordv2/params" "github.com/milvus-io/milvus/internal/querycoordv2/task" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/typeutil" - "github.com/samber/lo" - "go.uber.org/zap" ) var _ Checker = (*IndexChecker)(nil) diff --git a/internal/querycoordv2/checkers/index_checker_test.go b/internal/querycoordv2/checkers/index_checker_test.go index f17fc453f1..fe1b9774a8 100644 --- a/internal/querycoordv2/checkers/index_checker_test.go +++ b/internal/querycoordv2/checkers/index_checker_test.go @@ -21,6 +21,9 @@ import ( "testing" "github.com/cockroachdb/errors" + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/suite" + "github.com/milvus-io/milvus/internal/kv" etcdkv "github.com/milvus-io/milvus/internal/kv/etcd" "github.com/milvus-io/milvus/internal/metastore/kv/querycoord" @@ -32,8 +35,6 @@ import ( "github.com/milvus-io/milvus/internal/querycoordv2/utils" "github.com/milvus-io/milvus/pkg/util/etcd" "github.com/milvus-io/milvus/pkg/util/paramtable" - "github.com/stretchr/testify/mock" - "github.com/stretchr/testify/suite" ) type IndexCheckerSuite struct { diff --git a/internal/querycoordv2/checkers/segment_checker.go b/internal/querycoordv2/checkers/segment_checker.go index a19e3974c0..9e80984d64 100644 --- a/internal/querycoordv2/checkers/segment_checker.go +++ b/internal/querycoordv2/checkers/segment_checker.go @@ -139,7 +139,8 @@ func (c *SegmentChecker) checkReplica(ctx context.Context, replica *meta.Replica // GetStreamingSegmentDiff get streaming segment diff between leader view and target func (c *SegmentChecker) getStreamingSegmentDiff(collectionID int64, - replicaID int64) (toLoad []*datapb.SegmentInfo, toRelease []*meta.Segment) { + replicaID int64, +) (toLoad []*datapb.SegmentInfo, toRelease []*meta.Segment) { replica := c.meta.Get(replicaID) if replica == nil { log.Info("replica does not exist, skip it") @@ -198,7 +199,8 @@ func (c *SegmentChecker) getStreamingSegmentDiff(collectionID int64, // GetHistoricalSegmentDiff get historical segment diff between target and dist func (c *SegmentChecker) getHistoricalSegmentDiff( collectionID int64, - replicaID int64) (toLoad []*datapb.SegmentInfo, toRelease []*meta.Segment) { + replicaID int64, +) (toLoad []*datapb.SegmentInfo, toRelease []*meta.Segment) { replica := c.meta.Get(replicaID) if replica == nil { log.Info("replica does not exist, skip it") @@ -344,7 +346,6 @@ func (c *SegmentChecker) createSegmentReduceTasks(ctx context.Context, segments replicaID, action, ) - if err != nil { log.Warn("create segment reduce task failed", zap.Int64("collection", s.GetCollectionID()), diff --git a/internal/querycoordv2/dist/dist_handler.go b/internal/querycoordv2/dist/dist_handler.go index b2f4bccd63..c9f3bde2ff 100644 --- a/internal/querycoordv2/dist/dist_handler.go +++ b/internal/querycoordv2/dist/dist_handler.go @@ -233,7 +233,6 @@ func (dh *distHandler) getDistribution(ctx context.Context) (*querypb.GetDataDis ), Checkpoints: channels, }) - if err != nil { return nil, err } diff --git a/internal/querycoordv2/handlers.go b/internal/querycoordv2/handlers.go index e7d5c3577e..635826207a 100644 --- a/internal/querycoordv2/handlers.go +++ b/internal/querycoordv2/handlers.go @@ -67,12 +67,12 @@ func (s *Server) getCollectionSegmentInfo(collection int64) []*querypb.SegmentIn infos := make(map[int64]*querypb.SegmentInfo) for _, segment := range segments { if _, existCurrentTarget := currentTargetSegmentsMap[segment.GetID()]; !existCurrentTarget { - //if one segment exists in distMap but doesn't exist in currentTargetMap - //in order to guarantee that get segment request launched by sdk could get - //consistent result, for example - //sdk insert three segments:A, B, D, then A + B----compact--> C - //In this scenario, we promise that clients see either 2 segments(C,D) or 3 segments(A, B, D) - //rather than 4 segments(A, B, C, D), in which query nodes are loading C but have completed loading process + // if one segment exists in distMap but doesn't exist in currentTargetMap + // in order to guarantee that get segment request launched by sdk could get + // consistent result, for example + // sdk insert three segments:A, B, D, then A + B----compact--> C + // In this scenario, we promise that clients see either 2 segments(C,D) or 3 segments(A, B, D) + // rather than 4 segments(A, B, C, D), in which query nodes are loading C but have completed loading process log.Info("filtered segment being in the intermediate status", zap.Int64("segmentID", segment.GetID())) continue @@ -149,7 +149,6 @@ func (s *Server) balanceSegments(ctx context.Context, req *querypb.LoadBalanceRe task.NewSegmentActionWithScope(plan.To, task.ActionTypeGrow, plan.Segment.GetInsertChannel(), plan.Segment.GetID(), querypb.DataScope_Historical), task.NewSegmentActionWithScope(srcNode, task.ActionTypeReduce, plan.Segment.GetInsertChannel(), plan.Segment.GetID(), querypb.DataScope_Historical), ) - if err != nil { log.Warn("create segment task for balance failed", zap.Int64("collection", req.GetCollectionID()), @@ -174,8 +173,8 @@ func (s *Server) balanceSegments(ctx context.Context, req *querypb.LoadBalanceRe // TODO(dragondriver): add more detail metrics func (s *Server) getSystemInfoMetrics( ctx context.Context, - req *milvuspb.GetMetricsRequest) (string, error) { - + req *milvuspb.GetMetricsRequest, +) (string, error) { clusterTopology := metricsinfo.QueryClusterTopology{ Self: metricsinfo.QueryCoordInfos{ BaseComponentInfos: metricsinfo.BaseComponentInfos{ diff --git a/internal/querycoordv2/job/job_sync.go b/internal/querycoordv2/job/job_sync.go index 4b6c8eb435..72a25b9a67 100644 --- a/internal/querycoordv2/job/job_sync.go +++ b/internal/querycoordv2/job/job_sync.go @@ -20,9 +20,9 @@ import ( "context" "time" + "github.com/cockroachdb/errors" "go.uber.org/zap" - "github.com/cockroachdb/errors" "github.com/milvus-io/milvus/internal/proto/querypb" "github.com/milvus-io/milvus/internal/querycoordv2/meta" "github.com/milvus-io/milvus/internal/querycoordv2/session" diff --git a/internal/querycoordv2/job/undo.go b/internal/querycoordv2/job/undo.go index 5ea53e62ad..64b89bb78c 100644 --- a/internal/querycoordv2/job/undo.go +++ b/internal/querycoordv2/job/undo.go @@ -19,11 +19,12 @@ package job import ( "context" + "go.uber.org/zap" + "github.com/milvus-io/milvus/internal/querycoordv2/meta" "github.com/milvus-io/milvus/internal/querycoordv2/observers" "github.com/milvus-io/milvus/internal/querycoordv2/session" "github.com/milvus-io/milvus/pkg/log" - "go.uber.org/zap" ) type UndoList struct { @@ -42,7 +43,8 @@ type UndoList struct { } func NewUndoList(ctx context.Context, meta *meta.Meta, - cluster session.Cluster, targetMgr *meta.TargetManager, targetObserver *observers.TargetObserver) *UndoList { + cluster session.Cluster, targetMgr *meta.TargetManager, targetObserver *observers.TargetObserver, +) *UndoList { return &UndoList{ ctx: ctx, meta: meta, diff --git a/internal/querycoordv2/job/utils.go b/internal/querycoordv2/job/utils.go index 7e06d80fc6..c6a9b26cfc 100644 --- a/internal/querycoordv2/job/utils.go +++ b/internal/querycoordv2/job/utils.go @@ -68,7 +68,8 @@ func loadPartitions(ctx context.Context, broker meta.Broker, withSchema bool, collection int64, - partitions ...int64) error { + partitions ...int64, +) error { var err error var schema *schemapb.CollectionSchema if withSchema { @@ -113,7 +114,8 @@ func releasePartitions(ctx context.Context, meta *meta.Meta, cluster session.Cluster, collection int64, - partitions ...int64) { + partitions ...int64, +) { log := log.Ctx(ctx).With(zap.Int64("collection", collection), zap.Int64s("partitions", partitions)) replicas := meta.ReplicaManager.GetByCollection(collection) releaseReq := &querypb.ReleasePartitionsRequest{ diff --git a/internal/querycoordv2/meta/coordinator_broker.go b/internal/querycoordv2/meta/coordinator_broker.go index 0bc856852e..50237682be 100644 --- a/internal/querycoordv2/meta/coordinator_broker.go +++ b/internal/querycoordv2/meta/coordinator_broker.go @@ -55,7 +55,8 @@ type CoordinatorBroker struct { func NewCoordinatorBroker( dataCoord types.DataCoord, - rootCoord types.RootCoord) *CoordinatorBroker { + rootCoord types.RootCoord, +) *CoordinatorBroker { return &CoordinatorBroker{ dataCoord, rootCoord, diff --git a/internal/querycoordv2/meta/resource_manager.go b/internal/querycoordv2/meta/resource_manager.go index e859e62acf..b05600efc6 100644 --- a/internal/querycoordv2/meta/resource_manager.go +++ b/internal/querycoordv2/meta/resource_manager.go @@ -481,7 +481,6 @@ func (rm *ResourceManager) HandleNodeDown(node int64) (string, error) { rgName, err := rm.findResourceGroupByNode(node) if err != nil { return "", ErrNodeNotAssignToRG - } newNodes := []int64{} @@ -528,7 +527,7 @@ func (rm *ResourceManager) TransferNode(from string, to string, numNode int) ([] return nil, ErrNodeNotEnough } - //todo: a better way to choose a node with least balance cost + // todo: a better way to choose a node with least balance cost movedNodes, err := rm.transferNodeInStore(from, to, numNode) if err != nil { return nil, err @@ -627,7 +626,7 @@ func (rm *ResourceManager) AutoRecoverResourceGroup(rgName string) ([]int64, err lackNodesNum := rm.groups[rgName].LackOfNodes() nodesInDefault := rm.groups[DefaultResourceGroupName].GetNodes() for i := 0; i < len(nodesInDefault) && i < lackNodesNum; i++ { - //todo: a better way to choose a node with least balance cost + // todo: a better way to choose a node with least balance cost node := nodesInDefault[i] err := rm.unassignNode(DefaultResourceGroupName, node) if err != nil { diff --git a/internal/querycoordv2/meta/target_manager.go b/internal/querycoordv2/meta/target_manager.go index 2d2c9d2b3c..d034b34c57 100644 --- a/internal/querycoordv2/meta/target_manager.go +++ b/internal/querycoordv2/meta/target_manager.go @@ -20,14 +20,15 @@ import ( "context" "sync" + "github.com/samber/lo" + "go.uber.org/zap" + "google.golang.org/grpc/codes" + "github.com/milvus-io/milvus/internal/proto/datapb" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/funcutil" "github.com/milvus-io/milvus/pkg/util/retry" "github.com/milvus-io/milvus/pkg/util/typeutil" - "github.com/samber/lo" - "go.uber.org/zap" - "google.golang.org/grpc/codes" ) type TargetScope = int32 @@ -324,7 +325,8 @@ func (mgr *TargetManager) getTarget(scope TargetScope) *target { } func (mgr *TargetManager) GetStreamingSegmentsByCollection(collectionID int64, - scope TargetScope) typeutil.UniqueSet { + scope TargetScope, +) typeutil.UniqueSet { mgr.rwMutex.RLock() defer mgr.rwMutex.RUnlock() @@ -345,7 +347,8 @@ func (mgr *TargetManager) GetStreamingSegmentsByCollection(collectionID int64, func (mgr *TargetManager) GetStreamingSegmentsByChannel(collectionID int64, channelName string, - scope TargetScope) typeutil.UniqueSet { + scope TargetScope, +) typeutil.UniqueSet { mgr.rwMutex.RLock() defer mgr.rwMutex.RUnlock() @@ -367,7 +370,8 @@ func (mgr *TargetManager) GetStreamingSegmentsByChannel(collectionID int64, } func (mgr *TargetManager) GetHistoricalSegmentsByCollection(collectionID int64, - scope TargetScope) map[int64]*datapb.SegmentInfo { + scope TargetScope, +) map[int64]*datapb.SegmentInfo { mgr.rwMutex.RLock() defer mgr.rwMutex.RUnlock() @@ -382,7 +386,8 @@ func (mgr *TargetManager) GetHistoricalSegmentsByCollection(collectionID int64, func (mgr *TargetManager) GetHistoricalSegmentsByChannel(collectionID int64, channelName string, - scope TargetScope) map[int64]*datapb.SegmentInfo { + scope TargetScope, +) map[int64]*datapb.SegmentInfo { mgr.rwMutex.RLock() defer mgr.rwMutex.RUnlock() @@ -405,7 +410,8 @@ func (mgr *TargetManager) GetHistoricalSegmentsByChannel(collectionID int64, func (mgr *TargetManager) GetDroppedSegmentsByChannel(collectionID int64, channelName string, - scope TargetScope) []int64 { + scope TargetScope, +) []int64 { mgr.rwMutex.RLock() defer mgr.rwMutex.RUnlock() @@ -425,7 +431,8 @@ func (mgr *TargetManager) GetDroppedSegmentsByChannel(collectionID int64, } func (mgr *TargetManager) GetHistoricalSegmentsByPartition(collectionID int64, - partitionID int64, scope TargetScope) map[int64]*datapb.SegmentInfo { + partitionID int64, scope TargetScope, +) map[int64]*datapb.SegmentInfo { mgr.rwMutex.RLock() defer mgr.rwMutex.RUnlock() diff --git a/internal/querycoordv2/meta/target_manager_test.go b/internal/querycoordv2/meta/target_manager_test.go index b57449c431..090fdd4d26 100644 --- a/internal/querycoordv2/meta/target_manager_test.go +++ b/internal/querycoordv2/meta/target_manager_test.go @@ -138,7 +138,8 @@ func (suite *TargetManagerSuite) SetupTest() { suite.meta.PutCollection(&Collection{ CollectionLoadInfo: &querypb.CollectionLoadInfo{ CollectionID: collection, - ReplicaNumber: 1}, + ReplicaNumber: 1, + }, }) for _, partition := range suite.partitions[collection] { suite.meta.PutPartition(&Partition{ @@ -183,7 +184,8 @@ func (suite *TargetManagerSuite) TestUpdateNextTarget() { suite.meta.PutCollection(&Collection{ CollectionLoadInfo: &querypb.CollectionLoadInfo{ CollectionID: collectionID, - ReplicaNumber: 1}, + ReplicaNumber: 1, + }, }) suite.meta.PutPartition(&Partition{ PartitionLoadInfo: &querypb.PartitionLoadInfo{ @@ -251,7 +253,6 @@ func (suite *TargetManagerSuite) TestUpdateNextTarget() { err = suite.mgr.UpdateCollectionNextTarget(collectionID) suite.NoError(err) - } func (suite *TargetManagerSuite) TestRemovePartition() { @@ -365,7 +366,8 @@ func (suite *TargetManagerSuite) TestGetSegmentByChannel() { suite.meta.PutCollection(&Collection{ CollectionLoadInfo: &querypb.CollectionLoadInfo{ CollectionID: collectionID, - ReplicaNumber: 1}, + ReplicaNumber: 1, + }, }) suite.meta.PutPartition(&Partition{ PartitionLoadInfo: &querypb.PartitionLoadInfo{ diff --git a/internal/querycoordv2/observers/leader_observer.go b/internal/querycoordv2/observers/leader_observer.go index ceb50d25e1..4fb2a36c25 100644 --- a/internal/querycoordv2/observers/leader_observer.go +++ b/internal/querycoordv2/observers/leader_observer.go @@ -21,6 +21,7 @@ import ( "sync" "time" + "github.com/samber/lo" "go.uber.org/zap" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" @@ -30,7 +31,6 @@ import ( "github.com/milvus-io/milvus/internal/querycoordv2/utils" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/commonpbutil" - "github.com/samber/lo" ) const ( diff --git a/internal/querycoordv2/observers/leader_observer_test.go b/internal/querycoordv2/observers/leader_observer_test.go index ab240a41d7..840f5422ab 100644 --- a/internal/querycoordv2/observers/leader_observer_test.go +++ b/internal/querycoordv2/observers/leader_observer_test.go @@ -471,7 +471,6 @@ func (suite *LeaderObserverTestSuite) TestSyncRemovedSegments() { } func (suite *LeaderObserverTestSuite) TestIgnoreSyncRemovedSegments() { - observer := suite.observer observer.meta.CollectionManager.PutCollection(utils.CreateTestCollection(1, 1)) observer.meta.CollectionManager.PutPartition(utils.CreateTestPartition(1, 1)) diff --git a/internal/querycoordv2/observers/replica_observer_test.go b/internal/querycoordv2/observers/replica_observer_test.go index e6181a2346..ccf00942f1 100644 --- a/internal/querycoordv2/observers/replica_observer_test.go +++ b/internal/querycoordv2/observers/replica_observer_test.go @@ -39,7 +39,7 @@ type ReplicaObserverSuite struct { suite.Suite kv kv.MetaKv - //dependency + // dependency meta *meta.Meta distMgr *meta.DistributionManager diff --git a/internal/querycoordv2/observers/resource_observer_test.go b/internal/querycoordv2/observers/resource_observer_test.go index 3bbe78b262..fbf93cb579 100644 --- a/internal/querycoordv2/observers/resource_observer_test.go +++ b/internal/querycoordv2/observers/resource_observer_test.go @@ -41,7 +41,7 @@ type ResourceObserverSuite struct { suite.Suite kv kv.MetaKv - //dependency + // dependency store *mocks.QueryCoordCatalog meta *meta.Meta observer *ResourceObserver @@ -122,7 +122,7 @@ func (suite *ResourceObserverSuite) TestCheckNodesInReplica() { suite.meta.ResourceManager.HandleNodeDown(100) suite.meta.ResourceManager.HandleNodeDown(101) - //before auto recover rg + // before auto recover rg suite.Eventually(func() bool { lackNodesNum := suite.meta.ResourceManager.CheckLackOfNode("rg") nodesInReplica := suite.meta.ReplicaManager.Get(2).GetNodes() @@ -189,7 +189,7 @@ func (suite *ResourceObserverSuite) TestRecoverReplicaFailed() { suite.meta.ResourceManager.HandleNodeDown(100) suite.meta.ResourceManager.HandleNodeDown(101) - //before auto recover rg + // before auto recover rg suite.Eventually(func() bool { lackNodesNum := suite.meta.ResourceManager.CheckLackOfNode("rg") nodesInReplica := suite.meta.ReplicaManager.Get(2).GetNodes() diff --git a/internal/querycoordv2/observers/target_observer_test.go b/internal/querycoordv2/observers/target_observer_test.go index 9a09f1b1fb..59d17839a8 100644 --- a/internal/querycoordv2/observers/target_observer_test.go +++ b/internal/querycoordv2/observers/target_observer_test.go @@ -41,7 +41,7 @@ type TargetObserverSuite struct { suite.Suite kv kv.MetaKv - //dependency + // dependency meta *meta.Meta targetMgr *meta.TargetManager distMgr *meta.DistributionManager diff --git a/internal/querycoordv2/params/params.go b/internal/querycoordv2/params/params.go index e544051873..1b7fe5aa9f 100644 --- a/internal/querycoordv2/params/params.go +++ b/internal/querycoordv2/params/params.go @@ -29,9 +29,7 @@ import ( var Params *paramtable.ComponentParam = paramtable.Get() -var ( - ErrFailedAllocateID = errors.New("failed to allocate ID") -) +var ErrFailedAllocateID = errors.New("failed to allocate ID") // GenerateEtcdConfig returns a etcd config with a random root path, // NOTE: for test only diff --git a/internal/querycoordv2/server.go b/internal/querycoordv2/server.go index e8d272d3c5..1d096605fa 100644 --- a/internal/querycoordv2/server.go +++ b/internal/querycoordv2/server.go @@ -62,10 +62,8 @@ import ( "github.com/milvus-io/milvus/pkg/util/typeutil" ) -var ( - // Only for re-export - Params = params.Params -) +// Only for re-export +var Params = params.Params type Server struct { ctx context.Context @@ -525,7 +523,7 @@ func (s *Server) GetComponentStates(ctx context.Context) (*milvuspb.ComponentSta return &milvuspb.ComponentStates{ Status: merr.Status(nil), State: serviceComponentInfo, - //SubcomponentStates: subComponentInfos, + // SubcomponentStates: subComponentInfos, }, nil } diff --git a/internal/querycoordv2/server_test.go b/internal/querycoordv2/server_test.go index 9c53bc6159..192ab65e97 100644 --- a/internal/querycoordv2/server_test.go +++ b/internal/querycoordv2/server_test.go @@ -328,7 +328,7 @@ func (suite *ServerSuite) TestEnableActiveStandby() { suite.NoError(err) err = suite.server.SetDataCoord(mockDataCoord) suite.NoError(err) - //suite.hackServer() + // suite.hackServer() states1, err := suite.server.GetComponentStates(context.Background()) suite.NoError(err) suite.Equal(commonpb.StateCode_StandBy, states1.GetState().GetStateCode()) diff --git a/internal/querycoordv2/services_test.go b/internal/querycoordv2/services_test.go index a42b7f45d4..113266a10f 100644 --- a/internal/querycoordv2/services_test.go +++ b/internal/querycoordv2/services_test.go @@ -118,8 +118,10 @@ func (suite *ServiceSuite) SetupSuite() { 1000: 1, 1001: 3, } - suite.nodes = []int64{1, 2, 3, 4, 5, - 101, 102, 103, 104, 105} + suite.nodes = []int64{ + 1, 2, 3, 4, 5, + 101, 102, 103, 104, 105, + } } func (suite *ServiceSuite) SetupTest() { @@ -389,14 +391,16 @@ func (suite *ServiceSuite) TestResourceGroup() { ID: 1, CollectionID: 1, Nodes: []int64{1011, 1013}, - ResourceGroup: "rg11"}, + ResourceGroup: "rg11", + }, typeutil.NewUniqueSet(1011, 1013)), ) server.meta.ReplicaManager.Put(meta.NewReplica(&querypb.Replica{ ID: 2, CollectionID: 2, Nodes: []int64{1012, 1014}, - ResourceGroup: "rg12"}, + ResourceGroup: "rg12", + }, typeutil.NewUniqueSet(1012, 1014)), ) diff --git a/internal/querycoordv2/session/cluster.go b/internal/querycoordv2/session/cluster.go index a06569105e..32f6e3bf1b 100644 --- a/internal/querycoordv2/session/cluster.go +++ b/internal/querycoordv2/session/cluster.go @@ -23,15 +23,15 @@ import ( "time" "github.com/cockroachdb/errors" - "github.com/golang/protobuf/proto" + "go.uber.org/zap" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" grpcquerynodeclient "github.com/milvus-io/milvus/internal/distributed/querynode/client" "github.com/milvus-io/milvus/internal/proto/querypb" "github.com/milvus-io/milvus/internal/types" "github.com/milvus-io/milvus/pkg/log" - "go.uber.org/zap" ) const ( @@ -40,9 +40,7 @@ const ( bufferFlushPeriod = 500 * time.Millisecond ) -var ( - ErrNodeNotFound = errors.New("NodeNotFound") -) +var ErrNodeNotFound = errors.New("NodeNotFound") func WrapErrNodeNotFound(nodeID int64) error { return fmt.Errorf("%w(%v)", ErrNodeNotFound, nodeID) diff --git a/internal/querycoordv2/session/cluster_test.go b/internal/querycoordv2/session/cluster_test.go index 6a3b84feb1..027edf4150 100644 --- a/internal/querycoordv2/session/cluster_test.go +++ b/internal/querycoordv2/session/cluster_test.go @@ -23,16 +23,17 @@ import ( "testing" "time" + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/suite" + "google.golang.org/grpc" + "google.golang.org/grpc/credentials/insecure" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus/internal/proto/querypb" "github.com/milvus-io/milvus/internal/querycoordv2/mocks" "github.com/milvus-io/milvus/pkg/util/merr" "github.com/milvus-io/milvus/pkg/util/paramtable" - "github.com/stretchr/testify/mock" - "github.com/stretchr/testify/suite" - "google.golang.org/grpc" - "google.golang.org/grpc/credentials/insecure" ) const bufSize = 1024 * 1024 @@ -65,6 +66,7 @@ func (suite *ClusterTestSuite) SetupTest() { func (suite *ClusterTestSuite) TearDownTest() { suite.cluster.Stop() } + func (suite *ClusterTestSuite) setupServers() { svrs := suite.createTestServers() for _, svr := range svrs { diff --git a/internal/querycoordv2/session/node_manager.go b/internal/querycoordv2/session/node_manager.go index bb0f51f8d4..451a043f3a 100644 --- a/internal/querycoordv2/session/node_manager.go +++ b/internal/querycoordv2/session/node_manager.go @@ -21,8 +21,9 @@ import ( "sync" "time" - "github.com/milvus-io/milvus/pkg/metrics" "go.uber.org/atomic" + + "github.com/milvus-io/milvus/pkg/metrics" ) type Manager interface { diff --git a/internal/querycoordv2/task/executor.go b/internal/querycoordv2/task/executor.go index 8621327748..0d1e5ccdd3 100644 --- a/internal/querycoordv2/task/executor.go +++ b/internal/querycoordv2/task/executor.go @@ -22,8 +22,6 @@ import ( "time" "github.com/cockroachdb/errors" - "github.com/milvus-io/milvus/pkg/util/tsoutil" - "github.com/milvus-io/milvus/pkg/util/typeutil" "go.uber.org/atomic" "go.uber.org/zap" @@ -35,6 +33,8 @@ import ( "github.com/milvus-io/milvus/internal/querycoordv2/utils" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/merr" + "github.com/milvus-io/milvus/pkg/util/tsoutil" + "github.com/milvus-io/milvus/pkg/util/typeutil" ) type Executor struct { @@ -59,7 +59,8 @@ func NewExecutor(meta *meta.Meta, broker meta.Broker, targetMgr *meta.TargetManager, cluster session.Cluster, - nodeMgr *session.NodeManager) *Executor { + nodeMgr *session.NodeManager, +) *Executor { return &Executor{ doneCh: make(chan struct{}), meta: meta, @@ -285,7 +286,7 @@ func (ex *Executor) loadSegment(task *SegmentTask, step int) error { } log = log.With(zap.Int64("shardLeader", leader)) - //Get collection index info + // Get collection index info indexInfo, err := ex.broker.DescribeIndex(ctx, task.CollectionID()) if err != nil { log.Warn("fail to get index meta of collection") diff --git a/internal/querycoordv2/task/scheduler.go b/internal/querycoordv2/task/scheduler.go index b3ea654c2c..74c3c974c2 100644 --- a/internal/querycoordv2/task/scheduler.go +++ b/internal/querycoordv2/task/scheduler.go @@ -23,10 +23,11 @@ import ( "sync" "time" + "github.com/cockroachdb/errors" + "github.com/samber/lo" "go.uber.org/atomic" "go.uber.org/zap" - "github.com/cockroachdb/errors" "github.com/milvus-io/milvus/internal/proto/datapb" "github.com/milvus-io/milvus/internal/proto/querypb" "github.com/milvus-io/milvus/internal/querycoordv2/meta" @@ -37,7 +38,6 @@ import ( "github.com/milvus-io/milvus/pkg/util/funcutil" "github.com/milvus-io/milvus/pkg/util/merr" . "github.com/milvus-io/milvus/pkg/util/typeutil" - "github.com/samber/lo" ) const ( @@ -165,7 +165,8 @@ func NewScheduler(ctx context.Context, targetMgr *meta.TargetManager, broker meta.Broker, cluster session.Cluster, - nodeMgr *session.NodeManager) *taskScheduler { + nodeMgr *session.NodeManager, +) *taskScheduler { id := time.Now().UnixMilli() return &taskScheduler{ ctx: ctx, diff --git a/internal/querycoordv2/task/task.go b/internal/querycoordv2/task/task.go index 6095bdc395..5146efa4e0 100644 --- a/internal/querycoordv2/task/task.go +++ b/internal/querycoordv2/task/task.go @@ -22,17 +22,20 @@ import ( "time" "github.com/cockroachdb/errors" + "go.opentelemetry.io/otel" + "go.opentelemetry.io/otel/trace" + "go.uber.org/atomic" + "github.com/milvus-io/milvus/internal/proto/querypb" "github.com/milvus-io/milvus/internal/querycoordv2/meta" "github.com/milvus-io/milvus/pkg/util/merr" . "github.com/milvus-io/milvus/pkg/util/typeutil" - "go.opentelemetry.io/otel" - "go.opentelemetry.io/otel/trace" - "go.uber.org/atomic" ) -type Status = int32 -type Priority int32 +type ( + Status = int32 + Priority int32 +) const ( TaskStatusCreated Status = iota + 1 @@ -58,10 +61,8 @@ func (p Priority) String() string { return TaskPriorityName[p] } -var ( - // All task priorities from low to high - TaskPriorities = []Priority{TaskPriorityLow, TaskPriorityNormal, TaskPriorityHigh} -) +// All task priorities from low to high +var TaskPriorities = []Priority{TaskPriorityLow, TaskPriorityNormal, TaskPriorityHigh} type Task interface { Context() context.Context @@ -286,7 +287,8 @@ func NewSegmentTask(ctx context.Context, sourceID, collectionID, replicaID UniqueID, - actions ...Action) (*SegmentTask, error) { + actions ...Action, +) (*SegmentTask, error) { if len(actions) == 0 { return nil, errors.WithStack(merr.WrapErrParameterInvalid("non-empty actions", "no action")) } @@ -342,7 +344,8 @@ func NewChannelTask(ctx context.Context, sourceID, collectionID, replicaID UniqueID, - actions ...Action) (*ChannelTask, error) { + actions ...Action, +) (*ChannelTask, error) { if len(actions) == 0 { return nil, errors.WithStack(merr.WrapErrParameterInvalid("non-empty actions", "no action")) } diff --git a/internal/querycoordv2/task/task_test.go b/internal/querycoordv2/task/task_test.go index 6c1779867f..088b385afb 100644 --- a/internal/querycoordv2/task/task_test.go +++ b/internal/querycoordv2/task/task_test.go @@ -205,13 +205,15 @@ func (suite *TaskSuite) TestSubscribeChannelTask() { }, nil) for channel, segment := range suite.growingSegments { suite.broker.EXPECT().GetSegmentInfo(mock.Anything, segment). - Return(&datapb.GetSegmentInfoResponse{Infos: []*datapb.SegmentInfo{ - { - ID: segment, - CollectionID: suite.collection, - PartitionID: partitions[0], - InsertChannel: channel, - }}, + Return(&datapb.GetSegmentInfoResponse{ + Infos: []*datapb.SegmentInfo{ + { + ID: segment, + CollectionID: suite.collection, + PartitionID: partitions[0], + InsertChannel: channel, + }, + }, }, nil) } suite.broker.EXPECT().DescribeIndex(mock.Anything, suite.collection).Return([]*indexpb.IndexInfo{ @@ -394,13 +396,15 @@ func (suite *TaskSuite) TestLoadSegmentTask() { }, }, nil) for _, segment := range suite.loadSegments { - suite.broker.EXPECT().GetSegmentInfo(mock.Anything, segment).Return(&datapb.GetSegmentInfoResponse{Infos: []*datapb.SegmentInfo{ - { - ID: segment, - CollectionID: suite.collection, - PartitionID: partition, - InsertChannel: channel.ChannelName, - }}, + suite.broker.EXPECT().GetSegmentInfo(mock.Anything, segment).Return(&datapb.GetSegmentInfoResponse{ + Infos: []*datapb.SegmentInfo{ + { + ID: segment, + CollectionID: suite.collection, + PartitionID: partition, + InsertChannel: channel.ChannelName, + }, + }, }, nil) suite.broker.EXPECT().GetIndexInfo(mock.Anything, suite.collection, segment).Return(nil, nil) } @@ -488,13 +492,15 @@ func (suite *TaskSuite) TestLoadSegmentTaskNotIndex() { }, }, nil) for _, segment := range suite.loadSegments { - suite.broker.EXPECT().GetSegmentInfo(mock.Anything, segment).Return(&datapb.GetSegmentInfoResponse{Infos: []*datapb.SegmentInfo{ - { - ID: segment, - CollectionID: suite.collection, - PartitionID: partition, - InsertChannel: channel.ChannelName, - }}, + suite.broker.EXPECT().GetSegmentInfo(mock.Anything, segment).Return(&datapb.GetSegmentInfoResponse{ + Infos: []*datapb.SegmentInfo{ + { + ID: segment, + CollectionID: suite.collection, + PartitionID: partition, + InsertChannel: channel.ChannelName, + }, + }, }, nil) suite.broker.EXPECT().GetIndexInfo(mock.Anything, suite.collection, segment).Return(nil, merr.WrapErrIndexNotFound()) } @@ -577,13 +583,15 @@ func (suite *TaskSuite) TestLoadSegmentTaskFailed() { }, }, nil) for _, segment := range suite.loadSegments { - suite.broker.EXPECT().GetSegmentInfo(mock.Anything, segment).Return(&datapb.GetSegmentInfoResponse{Infos: []*datapb.SegmentInfo{ - { - ID: segment, - CollectionID: suite.collection, - PartitionID: partition, - InsertChannel: channel.ChannelName, - }}, + suite.broker.EXPECT().GetSegmentInfo(mock.Anything, segment).Return(&datapb.GetSegmentInfoResponse{ + Infos: []*datapb.SegmentInfo{ + { + ID: segment, + CollectionID: suite.collection, + PartitionID: partition, + InsertChannel: channel.ChannelName, + }, + }, }, nil) suite.broker.EXPECT().GetIndexInfo(mock.Anything, suite.collection, segment).Return(nil, errors.New("index not ready")) } @@ -778,13 +786,15 @@ func (suite *TaskSuite) TestMoveSegmentTask() { }, }, nil) for _, segment := range suite.moveSegments { - suite.broker.EXPECT().GetSegmentInfo(mock.Anything, segment).Return(&datapb.GetSegmentInfoResponse{Infos: []*datapb.SegmentInfo{ - { - ID: segment, - CollectionID: suite.collection, - PartitionID: partition, - InsertChannel: channel.ChannelName, - }}, + suite.broker.EXPECT().GetSegmentInfo(mock.Anything, segment).Return(&datapb.GetSegmentInfoResponse{ + Infos: []*datapb.SegmentInfo{ + { + ID: segment, + CollectionID: suite.collection, + PartitionID: partition, + InsertChannel: channel.ChannelName, + }, + }, }, nil) suite.broker.EXPECT().GetIndexInfo(mock.Anything, suite.collection, segment).Return(nil, nil) } @@ -946,13 +956,15 @@ func (suite *TaskSuite) TestTaskCanceled() { }, }, nil) for _, segment := range suite.loadSegments { - suite.broker.EXPECT().GetSegmentInfo(mock.Anything, segment).Return(&datapb.GetSegmentInfoResponse{Infos: []*datapb.SegmentInfo{ - { - ID: segment, - CollectionID: suite.collection, - PartitionID: partition, - InsertChannel: channel.ChannelName, - }}, + suite.broker.EXPECT().GetSegmentInfo(mock.Anything, segment).Return(&datapb.GetSegmentInfoResponse{ + Infos: []*datapb.SegmentInfo{ + { + ID: segment, + CollectionID: suite.collection, + PartitionID: partition, + InsertChannel: channel.ChannelName, + }, + }, }, nil) suite.broker.EXPECT().GetIndexInfo(mock.Anything, suite.collection, segment).Return(nil, nil) } @@ -1031,13 +1043,15 @@ func (suite *TaskSuite) TestSegmentTaskStale() { }, }, nil) for _, segment := range suite.loadSegments { - suite.broker.EXPECT().GetSegmentInfo(mock.Anything, segment).Return(&datapb.GetSegmentInfoResponse{Infos: []*datapb.SegmentInfo{ - { - ID: segment, - CollectionID: suite.collection, - PartitionID: partition, - InsertChannel: channel.ChannelName, - }}, + suite.broker.EXPECT().GetSegmentInfo(mock.Anything, segment).Return(&datapb.GetSegmentInfoResponse{ + Infos: []*datapb.SegmentInfo{ + { + ID: segment, + CollectionID: suite.collection, + PartitionID: partition, + InsertChannel: channel.ChannelName, + }, + }, }, nil) suite.broker.EXPECT().GetIndexInfo(mock.Anything, suite.collection, segment).Return(nil, nil) } diff --git a/internal/querycoordv2/utils/types.go b/internal/querycoordv2/utils/types.go index a1777d6a19..7920f10f64 100644 --- a/internal/querycoordv2/utils/types.go +++ b/internal/querycoordv2/utils/types.go @@ -19,8 +19,6 @@ package utils import ( "time" - "github.com/milvus-io/milvus/pkg/util/paramtable" - "go.uber.org/zap" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" @@ -29,6 +27,7 @@ import ( "github.com/milvus-io/milvus/internal/proto/querypb" "github.com/milvus-io/milvus/internal/querycoordv2/meta" "github.com/milvus-io/milvus/pkg/log" + "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/milvus-io/milvus/pkg/util/tsoutil" ) diff --git a/internal/querycoordv2/utils/types_test.go b/internal/querycoordv2/utils/types_test.go index fb55c7bcc6..f9bc9d9489 100644 --- a/internal/querycoordv2/utils/types_test.go +++ b/internal/querycoordv2/utils/types_test.go @@ -21,11 +21,11 @@ import ( "time" "github.com/golang/protobuf/proto" - "github.com/milvus-io/milvus/pkg/util/tsoutil" "github.com/stretchr/testify/assert" "github.com/milvus-io/milvus-proto/go-api/v2/msgpb" "github.com/milvus-io/milvus/internal/proto/datapb" + "github.com/milvus-io/milvus/pkg/util/tsoutil" ) func Test_packLoadSegmentRequest(t *testing.T) { diff --git a/internal/querynodev2/cluster/manager.go b/internal/querynodev2/cluster/manager.go index 3a4387fc23..b239f24736 100644 --- a/internal/querynodev2/cluster/manager.go +++ b/internal/querynodev2/cluster/manager.go @@ -39,7 +39,7 @@ type WorkerBuilder func(ctx context.Context, nodeID int64) (Worker, error) type grpcWorkerManager struct { workers *typeutil.ConcurrentMap[int64, Worker] builder WorkerBuilder - sf conc.Singleflight[Worker] //singleflight.Group + sf conc.Singleflight[Worker] // singleflight.Group } // GetWorker returns worker with specified nodeID. diff --git a/internal/querynodev2/cluster/manager_test.go b/internal/querynodev2/cluster/manager_test.go index 3009c2677a..c953d5d1c5 100644 --- a/internal/querynodev2/cluster/manager_test.go +++ b/internal/querynodev2/cluster/manager_test.go @@ -21,7 +21,6 @@ import ( "testing" "github.com/cockroachdb/errors" - "github.com/stretchr/testify/assert" ) diff --git a/internal/querynodev2/cluster/worker_test.go b/internal/querynodev2/cluster/worker_test.go index 0890e2fe71..3f9979e070 100644 --- a/internal/querynodev2/cluster/worker_test.go +++ b/internal/querynodev2/cluster/worker_test.go @@ -24,6 +24,9 @@ import ( "testing" "github.com/cockroachdb/errors" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/suite" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" @@ -34,9 +37,6 @@ import ( querypb "github.com/milvus-io/milvus/internal/proto/querypb" "github.com/milvus-io/milvus/internal/util/streamrpc" "github.com/milvus-io/milvus/pkg/util/merr" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/mock" - "github.com/stretchr/testify/suite" ) type RemoteWorkerSuite struct { diff --git a/internal/querynodev2/collector/average_test.go b/internal/querynodev2/collector/average_test.go index e1eaa1bdca..9306ec7fac 100644 --- a/internal/querynodev2/collector/average_test.go +++ b/internal/querynodev2/collector/average_test.go @@ -34,17 +34,17 @@ func (suite *AverageCollectorTestSuite) SetupSuite() { } func (suite *AverageCollectorTestSuite) TestBasic() { - //Get average not register + // Get average not register _, err := suite.average.Average(suite.label) suite.Error(err) - //register and get + // register and get suite.average.Register(suite.label) value, err := suite.average.Average(suite.label) suite.Equal(float64(0), value) suite.NoError(err) - //add and get + // add and get sum := 4 for i := 0; i <= sum; i++ { suite.average.Add(suite.label, float64(i)) diff --git a/internal/querynodev2/collector/collector.go b/internal/querynodev2/collector/collector.go index c39fde724e..797a29d319 100644 --- a/internal/querynodev2/collector/collector.go +++ b/internal/querynodev2/collector/collector.go @@ -17,10 +17,11 @@ package collector import ( + "go.uber.org/zap" + "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/metricsinfo" "github.com/milvus-io/milvus/pkg/util/ratelimitutil" - "go.uber.org/zap" ) var Average *averageCollector @@ -65,11 +66,11 @@ func init() { Average = newAverageCollector() Counter = newCounter() - //init rate Metric + // init rate Metric for _, label := range RateMetrics() { Rate.Register(label) } - //init average metric + // init average metric for _, label := range AverageMetrics() { Average.Register(label) diff --git a/internal/querynodev2/collector/counter_test.go b/internal/querynodev2/collector/counter_test.go index 74ce05ff08..731dd6477b 100644 --- a/internal/querynodev2/collector/counter_test.go +++ b/internal/querynodev2/collector/counter_test.go @@ -34,26 +34,26 @@ func (suite *CounterTestSuite) SetupSuite() { } func (suite *CounterTestSuite) TestBasic() { - //get default value(zero) + // get default value(zero) value := suite.counter.Get(suite.label) suite.Equal(int64(0), value) - //get after inc + // get after inc suite.counter.Inc(suite.label, 3) value = suite.counter.Get(suite.label) suite.Equal(int64(3), value) - //remove + // remove suite.counter.Remove(suite.label) value = suite.counter.Get(suite.label) suite.Equal(int64(0), value) - //get after dec + // get after dec suite.counter.Dec(suite.label, 3) value = suite.counter.Get(suite.label) suite.Equal(int64(-3), value) - //remove + // remove suite.counter.Remove(suite.label) value = suite.counter.Get(suite.label) suite.Equal(int64(0), value) diff --git a/internal/querynodev2/delegator/delegator.go b/internal/querynodev2/delegator/delegator.go index 0c77fd183d..4d9c562255 100644 --- a/internal/querynodev2/delegator/delegator.go +++ b/internal/querynodev2/delegator/delegator.go @@ -61,7 +61,7 @@ type ShardDelegator interface { QueryStream(ctx context.Context, req *querypb.QueryRequest, srv streamrpc.QueryStreamServer) error GetStatistics(ctx context.Context, req *querypb.GetStatisticsRequest) ([]*internalpb.GetStatisticsResponse, error) - //data + // data ProcessInsert(insertRecords map[int64]*InsertData) ProcessDelete(deleteData []*DeleteData, ts uint64) LoadGrowing(ctx context.Context, infos []*querypb.SegmentLoadInfo, version int64) error @@ -113,7 +113,7 @@ type shardDelegator struct { // L0 delete buffer deleteMut sync.Mutex deleteBuffer deletebuffer.DeleteBuffer[*deletebuffer.Item] - //dispatcherClient msgdispatcher.Client + // dispatcherClient msgdispatcher.Client factory msgstream.Factory sf conc.Singleflight[struct{}] @@ -634,7 +634,8 @@ func (sd *shardDelegator) Close() { // NewShardDelegator creates a new ShardDelegator instance with all fields initialized. func NewShardDelegator(collectionID UniqueID, replicaID UniqueID, channel string, version int64, workerManager cluster.Manager, manager *segments.Manager, tsafeManager tsafe.Manager, loader segments.Loader, - factory msgstream.Factory, startTs uint64) (ShardDelegator, error) { + factory msgstream.Factory, startTs uint64, +) (ShardDelegator, error) { log := log.With(zap.Int64("collectionID", collectionID), zap.Int64("replicaID", replicaID), zap.String("channel", channel), diff --git a/internal/querynodev2/delegator/delegator_data.go b/internal/querynodev2/delegator/delegator_data.go index 5accd82202..15a051e486 100644 --- a/internal/querynodev2/delegator/delegator_data.go +++ b/internal/querynodev2/delegator/delegator_data.go @@ -505,7 +505,6 @@ func (sd *shardDelegator) loadStreamDelete(ctx context.Context, } func (sd *shardDelegator) readDeleteFromMsgstream(ctx context.Context, position *msgpb.MsgPosition, safeTs uint64, candidate *pkoracle.BloomFilterSet) (*storage.DeleteData, error) { - log := sd.getLogger(ctx).With( zap.String("channel", position.ChannelName), zap.Int64("segmentID", candidate.ID()), @@ -657,7 +656,8 @@ func (sd *shardDelegator) ReleaseSegments(ctx context.Context, req *querypb.Rele } func (sd *shardDelegator) SyncTargetVersion(newVersion int64, growingInTarget []int64, - sealedInTarget []int64, droppedInTarget []int64) { + sealedInTarget []int64, droppedInTarget []int64, +) { growings := sd.segmentManager.GetBy( segments.WithType(segments.SegmentTypeGrowing), segments.WithChannel(sd.vchannelName), diff --git a/internal/querynodev2/delegator/delegator_test.go b/internal/querynodev2/delegator/delegator_test.go index 06bb2a5f05..0ef707b44a 100644 --- a/internal/querynodev2/delegator/delegator_test.go +++ b/internal/querynodev2/delegator/delegator_test.go @@ -71,7 +71,6 @@ func (s *DelegatorSuite) SetupSuite() { } func (s *DelegatorSuite) TearDownSuite() { - } func (s *DelegatorSuite) SetupTest() { @@ -309,7 +308,7 @@ func (s *DelegatorSuite) TestSearch() { _, err := s.delegator.Search(ctx, &querypb.SearchRequest{ Req: &internalpb.SearchRequest{ Base: commonpbutil.NewMsgBase(), - //not load partation -1,will return error + // not load partation -1,will return error PartitionIDs: []int64{-1}, }, DmlChannels: []string{s.vchannelName}, @@ -512,7 +511,7 @@ func (s *DelegatorSuite) TestQuery() { _, err := s.delegator.Query(ctx, &querypb.QueryRequest{ Req: &internalpb.RetrieveRequest{ Base: commonpbutil.NewMsgBase(), - //not load partation -1,will return error + // not load partation -1,will return error PartitionIDs: []int64{-1}, }, DmlChannels: []string{s.vchannelName}, @@ -727,7 +726,7 @@ func (s *DelegatorSuite) TestQueryStream() { err := s.delegator.QueryStream(ctx, &querypb.QueryRequest{ Req: &internalpb.RetrieveRequest{ Base: commonpbutil.NewMsgBase(), - //not load partation -1,will return error + // not load partation -1,will return error PartitionIDs: []int64{-1}, }, DmlChannels: []string{s.vchannelName}, @@ -882,7 +881,6 @@ func (s *DelegatorSuite) TestQueryStream() { s.Error(err) }) - } func (s *DelegatorSuite) TestGetStats() { diff --git a/internal/querynodev2/delegator/deletebuffer/delete_buffer.go b/internal/querynodev2/delegator/deletebuffer/delete_buffer.go index 970c9b2355..c652ae1f27 100644 --- a/internal/querynodev2/delegator/deletebuffer/delete_buffer.go +++ b/internal/querynodev2/delegator/deletebuffer/delete_buffer.go @@ -23,9 +23,7 @@ import ( "github.com/cockroachdb/errors" ) -var ( - errBufferFull = errors.New("buffer full") -) +var errBufferFull = errors.New("buffer full") type timed interface { Timestamp() uint64 diff --git a/internal/querynodev2/delegator/deletebuffer/delete_buffer_test.go b/internal/querynodev2/delegator/deletebuffer/delete_buffer_test.go index 86b9217fd1..e580916ac7 100644 --- a/internal/querynodev2/delegator/deletebuffer/delete_buffer_test.go +++ b/internal/querynodev2/delegator/deletebuffer/delete_buffer_test.go @@ -19,9 +19,10 @@ package deletebuffer import ( "testing" - "github.com/milvus-io/milvus/internal/storage" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" + + "github.com/milvus-io/milvus/internal/storage" ) func TestSkipListDeleteBuffer(t *testing.T) { diff --git a/internal/querynodev2/delegator/deletebuffer/delete_item.go b/internal/querynodev2/delegator/deletebuffer/delete_item.go index df85f7b37c..abc89baa0c 100644 --- a/internal/querynodev2/delegator/deletebuffer/delete_item.go +++ b/internal/querynodev2/delegator/deletebuffer/delete_item.go @@ -1,8 +1,9 @@ package deletebuffer import ( - "github.com/milvus-io/milvus/internal/storage" "github.com/samber/lo" + + "github.com/milvus-io/milvus/internal/storage" ) // Item wraps cache item as `timed`. diff --git a/internal/querynodev2/delegator/deletebuffer/delete_item_test.go b/internal/querynodev2/delegator/deletebuffer/delete_item_test.go index a35cab2688..59bf9d9793 100644 --- a/internal/querynodev2/delegator/deletebuffer/delete_item_test.go +++ b/internal/querynodev2/delegator/deletebuffer/delete_item_test.go @@ -3,8 +3,9 @@ package deletebuffer import ( "testing" - "github.com/milvus-io/milvus/internal/storage" "github.com/stretchr/testify/assert" + + "github.com/milvus-io/milvus/internal/storage" ) func TestDeleteBufferItem(t *testing.T) { diff --git a/internal/querynodev2/delegator/distribution.go b/internal/querynodev2/delegator/distribution.go index fc949b6f22..a982f17f67 100644 --- a/internal/querynodev2/delegator/distribution.go +++ b/internal/querynodev2/delegator/distribution.go @@ -19,12 +19,12 @@ package delegator import ( "sync" + "github.com/samber/lo" "go.uber.org/atomic" "go.uber.org/zap" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/typeutil" - "github.com/samber/lo" ) const ( diff --git a/internal/querynodev2/delegator/mock_delegator.go b/internal/querynodev2/delegator/mock_delegator.go index 118012eaee..279eb88ee7 100644 --- a/internal/querynodev2/delegator/mock_delegator.go +++ b/internal/querynodev2/delegator/mock_delegator.go @@ -120,7 +120,7 @@ type MockShardDelegator_GetSegmentInfo_Call struct { } // GetSegmentInfo is a helper method to define mock.On call -// - readable bool +// - readable bool func (_e *MockShardDelegator_Expecter) GetSegmentInfo(readable interface{}) *MockShardDelegator_GetSegmentInfo_Call { return &MockShardDelegator_GetSegmentInfo_Call{Call: _e.mock.On("GetSegmentInfo", readable)} } @@ -166,8 +166,8 @@ type MockShardDelegator_GetStatistics_Call struct { } // GetStatistics is a helper method to define mock.On call -// - ctx context.Context -// - req *querypb.GetStatisticsRequest +// - ctx context.Context +// - req *querypb.GetStatisticsRequest func (_e *MockShardDelegator_Expecter) GetStatistics(ctx interface{}, req interface{}) *MockShardDelegator_GetStatistics_Call { return &MockShardDelegator_GetStatistics_Call{Call: _e.mock.On("GetStatistics", ctx, req)} } @@ -240,9 +240,9 @@ type MockShardDelegator_LoadGrowing_Call struct { } // LoadGrowing is a helper method to define mock.On call -// - ctx context.Context -// - infos []*querypb.SegmentLoadInfo -// - version int64 +// - ctx context.Context +// - infos []*querypb.SegmentLoadInfo +// - version int64 func (_e *MockShardDelegator_Expecter) LoadGrowing(ctx interface{}, infos interface{}, version interface{}) *MockShardDelegator_LoadGrowing_Call { return &MockShardDelegator_LoadGrowing_Call{Call: _e.mock.On("LoadGrowing", ctx, infos, version)} } @@ -279,8 +279,8 @@ type MockShardDelegator_LoadSegments_Call struct { } // LoadSegments is a helper method to define mock.On call -// - ctx context.Context -// - req *querypb.LoadSegmentsRequest +// - ctx context.Context +// - req *querypb.LoadSegmentsRequest func (_e *MockShardDelegator_Expecter) LoadSegments(ctx interface{}, req interface{}) *MockShardDelegator_LoadSegments_Call { return &MockShardDelegator_LoadSegments_Call{Call: _e.mock.On("LoadSegments", ctx, req)} } @@ -308,8 +308,8 @@ type MockShardDelegator_ProcessDelete_Call struct { } // ProcessDelete is a helper method to define mock.On call -// - deleteData []*DeleteData -// - ts uint64 +// - deleteData []*DeleteData +// - ts uint64 func (_e *MockShardDelegator_Expecter) ProcessDelete(deleteData interface{}, ts interface{}) *MockShardDelegator_ProcessDelete_Call { return &MockShardDelegator_ProcessDelete_Call{Call: _e.mock.On("ProcessDelete", deleteData, ts)} } @@ -337,7 +337,7 @@ type MockShardDelegator_ProcessInsert_Call struct { } // ProcessInsert is a helper method to define mock.On call -// - insertRecords map[int64]*InsertData +// - insertRecords map[int64]*InsertData func (_e *MockShardDelegator_Expecter) ProcessInsert(insertRecords interface{}) *MockShardDelegator_ProcessInsert_Call { return &MockShardDelegator_ProcessInsert_Call{Call: _e.mock.On("ProcessInsert", insertRecords)} } @@ -383,8 +383,8 @@ type MockShardDelegator_Query_Call struct { } // Query is a helper method to define mock.On call -// - ctx context.Context -// - req *querypb.QueryRequest +// - ctx context.Context +// - req *querypb.QueryRequest func (_e *MockShardDelegator_Expecter) Query(ctx interface{}, req interface{}) *MockShardDelegator_Query_Call { return &MockShardDelegator_Query_Call{Call: _e.mock.On("Query", ctx, req)} } @@ -421,9 +421,9 @@ type MockShardDelegator_QueryStream_Call struct { } // QueryStream is a helper method to define mock.On call -// - ctx context.Context -// - req *querypb.QueryRequest -// - srv streamrpc.QueryStreamServer +// - ctx context.Context +// - req *querypb.QueryRequest +// - srv streamrpc.QueryStreamServer func (_e *MockShardDelegator_Expecter) QueryStream(ctx interface{}, req interface{}, srv interface{}) *MockShardDelegator_QueryStream_Call { return &MockShardDelegator_QueryStream_Call{Call: _e.mock.On("QueryStream", ctx, req, srv)} } @@ -460,9 +460,9 @@ type MockShardDelegator_ReleaseSegments_Call struct { } // ReleaseSegments is a helper method to define mock.On call -// - ctx context.Context -// - req *querypb.ReleaseSegmentsRequest -// - force bool +// - ctx context.Context +// - req *querypb.ReleaseSegmentsRequest +// - force bool func (_e *MockShardDelegator_Expecter) ReleaseSegments(ctx interface{}, req interface{}, force interface{}) *MockShardDelegator_ReleaseSegments_Call { return &MockShardDelegator_ReleaseSegments_Call{Call: _e.mock.On("ReleaseSegments", ctx, req, force)} } @@ -508,8 +508,8 @@ type MockShardDelegator_Search_Call struct { } // Search is a helper method to define mock.On call -// - ctx context.Context -// - req *querypb.SearchRequest +// - ctx context.Context +// - req *querypb.SearchRequest func (_e *MockShardDelegator_Expecter) Search(ctx interface{}, req interface{}) *MockShardDelegator_Search_Call { return &MockShardDelegator_Search_Call{Call: _e.mock.On("Search", ctx, req)} } @@ -607,8 +607,8 @@ type MockShardDelegator_SyncDistribution_Call struct { } // SyncDistribution is a helper method to define mock.On call -// - ctx context.Context -// - entries ...SegmentEntry +// - ctx context.Context +// - entries ...SegmentEntry func (_e *MockShardDelegator_Expecter) SyncDistribution(ctx interface{}, entries ...interface{}) *MockShardDelegator_SyncDistribution_Call { return &MockShardDelegator_SyncDistribution_Call{Call: _e.mock.On("SyncDistribution", append([]interface{}{ctx}, entries...)...)} @@ -719,7 +719,8 @@ func (_c *MockShardDelegator_Version_Call) RunAndReturn(run func() int64) *MockS func NewMockShardDelegator(t interface { mock.TestingT Cleanup(func()) -}) *MockShardDelegator { +}, +) *MockShardDelegator { mock := &MockShardDelegator{} mock.Mock.Test(t) diff --git a/internal/querynodev2/delegator/types.go b/internal/querynodev2/delegator/types.go index 597ee6af95..b981cbbfbd 100644 --- a/internal/querynodev2/delegator/types.go +++ b/internal/querynodev2/delegator/types.go @@ -50,10 +50,8 @@ type TSafeUpdater interface { UnregisterChannel(string) error } -var ( - // ErrTsLagTooLarge serviceable and guarantee lag too large. - ErrTsLagTooLarge = errors.New("Timestamp lag too large") -) +// ErrTsLagTooLarge serviceable and guarantee lag too large. +var ErrTsLagTooLarge = errors.New("Timestamp lag too large") // WrapErrTsLagTooLarge wraps ErrTsLagTooLarge with lag and max value. func WrapErrTsLagTooLarge(duration time.Duration, maxLag time.Duration) error { diff --git a/internal/querynodev2/local_worker.go b/internal/querynodev2/local_worker.go index 394fdfb5d6..0ef6af5013 100644 --- a/internal/querynodev2/local_worker.go +++ b/internal/querynodev2/local_worker.go @@ -20,6 +20,9 @@ import ( "context" "fmt" + "github.com/samber/lo" + "go.uber.org/zap" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus/internal/proto/internalpb" "github.com/milvus-io/milvus/internal/proto/querypb" @@ -27,8 +30,6 @@ import ( "github.com/milvus-io/milvus/internal/querynodev2/segments" "github.com/milvus-io/milvus/internal/util/streamrpc" "github.com/milvus-io/milvus/pkg/log" - "github.com/samber/lo" - "go.uber.org/zap" ) var _ cluster.Worker = &LocalWorker{} diff --git a/internal/querynodev2/local_worker_test.go b/internal/querynodev2/local_worker_test.go index 3a68525cfb..67a55c0490 100644 --- a/internal/querynodev2/local_worker_test.go +++ b/internal/querynodev2/local_worker_test.go @@ -18,7 +18,6 @@ package querynodev2 import ( "context" - "testing" "github.com/samber/lo" diff --git a/internal/querynodev2/mock_data.go b/internal/querynodev2/mock_data.go index d0387ab680..ef884a3234 100644 --- a/internal/querynodev2/mock_data.go +++ b/internal/querynodev2/mock_data.go @@ -108,7 +108,7 @@ func genPlaceHolderGroup(nq int64) ([]byte, error) { Values: make([][]byte, 0), } for i := int64(0); i < nq; i++ { - var vec = make([]float32, defaultDim) + vec := make([]float32, defaultDim) for j := 0; j < defaultDim; j++ { vec[j] = rand.Float32() } diff --git a/internal/querynodev2/pipeline/delete_node.go b/internal/querynodev2/pipeline/delete_node.go index 91c3ce9bce..a408f98f91 100644 --- a/internal/querynodev2/pipeline/delete_node.go +++ b/internal/querynodev2/pipeline/delete_node.go @@ -74,11 +74,11 @@ func (dNode *deleteNode) Operate(in Msg) Msg { } if len(deleteDatas) > 0 { - //do Delete, use ts range max as ts + // do Delete, use ts range max as ts dNode.delegator.ProcessDelete(lo.Values(deleteDatas), nodeMsg.timeRange.timestampMax) } - //update tSafe + // update tSafe err := dNode.tSafeManager.Set(dNode.channel, nodeMsg.timeRange.timestampMax) if err != nil { // should not happen, QueryNode should addTSafe before start pipeline diff --git a/internal/querynodev2/pipeline/delete_node_test.go b/internal/querynodev2/pipeline/delete_node_test.go index 52b430ac57..1c4fd45d6f 100644 --- a/internal/querynodev2/pipeline/delete_node_test.go +++ b/internal/querynodev2/pipeline/delete_node_test.go @@ -19,18 +19,19 @@ package pipeline import ( "testing" + "github.com/samber/lo" + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/suite" + "github.com/milvus-io/milvus/internal/querynodev2/delegator" "github.com/milvus-io/milvus/internal/querynodev2/segments" "github.com/milvus-io/milvus/internal/querynodev2/tsafe" "github.com/milvus-io/milvus/pkg/util/paramtable" - "github.com/samber/lo" - "github.com/stretchr/testify/mock" - "github.com/stretchr/testify/suite" ) type DeleteNodeSuite struct { suite.Suite - //datas + // datas collectionID int64 collectionName string partitionIDs []int64 @@ -38,9 +39,9 @@ type DeleteNodeSuite struct { channel string timeRange TimeRange - //dependency + // dependency tSafeManager TSafeManager - //mocks + // mocks manager *segments.Manager delegator *delegator.MockShardDelegator } @@ -51,7 +52,7 @@ func (suite *DeleteNodeSuite) SetupSuite() { suite.collectionName = "test-collection" suite.partitionIDs = []int64{11, 22} suite.channel = "test-channel" - //segment own data row which‘s pk same with segment‘s ID + // segment own data row which‘s pk same with segment‘s ID suite.deletePKs = []int64{1, 2, 3, 4} suite.timeRange = TimeRange{ timestampMin: 0, @@ -74,7 +75,7 @@ func (suite *DeleteNodeSuite) buildDeleteNodeMsg() *deleteNodeMsg { } func (suite *DeleteNodeSuite) TestBasic() { - //mock + // mock mockCollectionManager := segments.NewMockCollectionManager(suite.T()) mockSegmentManager := segments.NewMockSegmentManager(suite.T()) suite.manager = &segments.Manager{ @@ -90,16 +91,16 @@ func (suite *DeleteNodeSuite) TestBasic() { } } }) - //init dependency + // init dependency suite.tSafeManager = tsafe.NewTSafeReplica() suite.tSafeManager.Add(suite.channel, 0) - //build delete node and data + // build delete node and data node := newDeleteNode(suite.collectionID, suite.channel, suite.manager, suite.tSafeManager, suite.delegator, 8) in := suite.buildDeleteNodeMsg() - //run + // run out := node.Operate(in) suite.Nil(out) - //check tsafe + // check tsafe tt, err := suite.tSafeManager.Get(suite.channel) suite.NoError(err) suite.Equal(suite.timeRange.timestampMax, tt) diff --git a/internal/querynodev2/pipeline/filter_node.go b/internal/querynodev2/pipeline/filter_node.go index a523e9f838..ca77022d10 100644 --- a/internal/querynodev2/pipeline/filter_node.go +++ b/internal/querynodev2/pipeline/filter_node.go @@ -69,7 +69,7 @@ func (fNode *filterNode) Operate(in Msg) Msg { WithLabelValues(fmt.Sprint(paramtable.GetNodeID()), metrics.InsertLabel, fmt.Sprint(fNode.collectionID)). Set(float64(tsoutil.SubByNow(streamMsgPack.EndTs))) - //Get collection from collection manager + // Get collection from collection manager collection := fNode.manager.Collection.Get(fNode.collectionID) if collection == nil { log.Fatal("collection not found in meta", zap.Int64("collectionID", fNode.collectionID)) @@ -84,7 +84,7 @@ func (fNode *filterNode) Operate(in Msg) Msg { }, } - //add msg to out if msg pass check of filter + // add msg to out if msg pass check of filter for _, msg := range streamMsgPack.Msgs { err := fNode.filtrate(collection, msg) if err != nil { @@ -105,7 +105,6 @@ func (fNode *filterNode) Operate(in Msg) Msg { // filtrate message with filter policy func (fNode *filterNode) filtrate(c *Collection, msg msgstream.TsMsg) error { - switch msg.Type() { case commonpb.MsgType_Insert: insertMsg := msg.(*msgstream.InsertMsg) diff --git a/internal/querynodev2/pipeline/filter_node_test.go b/internal/querynodev2/pipeline/filter_node_test.go index 2bd51a8318..8d5eda9f33 100644 --- a/internal/querynodev2/pipeline/filter_node_test.go +++ b/internal/querynodev2/pipeline/filter_node_test.go @@ -34,7 +34,7 @@ import ( // test of filter node type FilterNodeSuite struct { suite.Suite - //datas + // datas collectionID int64 partitionIDs []int64 channel string @@ -44,10 +44,10 @@ type FilterNodeSuite struct { excludedSegmentIDs []int64 insertSegmentIDs []int64 deleteSegmentSum int - //segmentID of msg invalid because empty of not aligned + // segmentID of msg invalid because empty of not aligned errSegmentID int64 - //mocks + // mocks manager *segments.Manager } @@ -63,7 +63,7 @@ func (suite *FilterNodeSuite) SetupSuite() { suite.deleteSegmentSum = 4 suite.errSegmentID = 7 - //init excludedSegment + // init excludedSegment suite.excludedSegments = typeutil.NewConcurrentMap[int64, *datapb.SegmentInfo]() for _, id := range suite.excludedSegmentIDs { suite.excludedSegments.Insert(id, &datapb.SegmentInfo{ @@ -76,10 +76,10 @@ func (suite *FilterNodeSuite) SetupSuite() { // test filter node with collection load collection func (suite *FilterNodeSuite) TestWithLoadCollection() { - //data + // data suite.validSegmentIDs = []int64{2, 3, 4, 5, 6} - //mock + // mock collection := segments.NewCollectionWithoutSchema(suite.collectionID, querypb.LoadType_LoadCollection) for _, partitionID := range suite.partitionIDs { collection.AddPartition(partitionID) @@ -111,10 +111,10 @@ func (suite *FilterNodeSuite) TestWithLoadCollection() { // test filter node with collection load partition func (suite *FilterNodeSuite) TestWithLoadPartation() { - //data + // data suite.validSegmentIDs = []int64{2, 3, 4, 5, 6} - //mock + // mock collection := segments.NewCollectionWithoutSchema(suite.collectionID, querypb.LoadType_LoadPartition) collection.AddPartition(suite.partitionIDs[0]) @@ -149,43 +149,43 @@ func (suite *FilterNodeSuite) buildMsgPack() *msgstream.MsgPack { Msgs: []msgstream.TsMsg{}, } - //add valid insert + // add valid insert for _, id := range suite.insertSegmentIDs { insertMsg := buildInsertMsg(suite.collectionID, suite.partitionIDs[id%2], id, suite.channel, 1) msgPack.Msgs = append(msgPack.Msgs, insertMsg) } - //add valid delete + // add valid delete for i := 0; i < suite.deleteSegmentSum; i++ { deleteMsg := buildDeleteMsg(suite.collectionID, suite.partitionIDs[i%2], suite.channel, 1) msgPack.Msgs = append(msgPack.Msgs, deleteMsg) } - //add invalid msg + // add invalid msg - //segment in excludedSegments - //some one end timestamp befroe dmlPosition timestamp will be invalid + // segment in excludedSegments + // some one end timestamp befroe dmlPosition timestamp will be invalid for _, id := range suite.excludedSegmentIDs { insertMsg := buildInsertMsg(suite.collectionID, suite.partitionIDs[id%2], id, suite.channel, 1) insertMsg.EndTimestamp = uint64(id) msgPack.Msgs = append(msgPack.Msgs, insertMsg) } - //empty msg + // empty msg insertMsg := buildInsertMsg(suite.collectionID, suite.partitionIDs[0], suite.errSegmentID, suite.channel, 0) msgPack.Msgs = append(msgPack.Msgs, insertMsg) deleteMsg := buildDeleteMsg(suite.collectionID, suite.partitionIDs[0], suite.channel, 0) msgPack.Msgs = append(msgPack.Msgs, deleteMsg) - //msg not target + // msg not target insertMsg = buildInsertMsg(suite.collectionID+1, 1, 0, "Unknown", 1) msgPack.Msgs = append(msgPack.Msgs, insertMsg) deleteMsg = buildDeleteMsg(suite.collectionID+1, 1, "Unknown", 1) msgPack.Msgs = append(msgPack.Msgs, deleteMsg) - //msg not aligned + // msg not aligned insertMsg = buildInsertMsg(suite.collectionID, suite.partitionIDs[0], suite.errSegmentID, suite.channel, 1) insertMsg.Timestamps = []uint64{} msgPack.Msgs = append(msgPack.Msgs, insertMsg) diff --git a/internal/querynodev2/pipeline/insert_node.go b/internal/querynodev2/pipeline/insert_node.go index 8e7c060a6b..16c588bbec 100644 --- a/internal/querynodev2/pipeline/insert_node.go +++ b/internal/querynodev2/pipeline/insert_node.go @@ -101,7 +101,7 @@ func (iNode *insertNode) Operate(in Msg) Msg { panic("insertNode with collection not exist") } - //get InsertData and merge datas of same segment + // get InsertData and merge datas of same segment for _, msg := range nodeMsg.insertMsgs { iNode.addInsertData(insertDatas, msg, collection) } diff --git a/internal/querynodev2/pipeline/insert_node_test.go b/internal/querynodev2/pipeline/insert_node_test.go index a3b58aabbc..6d6979fa9b 100644 --- a/internal/querynodev2/pipeline/insert_node_test.go +++ b/internal/querynodev2/pipeline/insert_node_test.go @@ -19,26 +19,27 @@ package pipeline import ( "testing" + "github.com/samber/lo" + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/suite" + "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/internal/proto/querypb" "github.com/milvus-io/milvus/internal/querynodev2/delegator" "github.com/milvus-io/milvus/internal/querynodev2/segments" "github.com/milvus-io/milvus/pkg/util/paramtable" - "github.com/samber/lo" - "github.com/stretchr/testify/mock" - "github.com/stretchr/testify/suite" ) type InsertNodeSuite struct { suite.Suite - //datas + // datas collectionName string collectionID int64 partitionID int64 channel string insertSegmentIDs []int64 deleteSegmentSum int - //mocks + // mocks manager *segments.Manager delegator *delegator.MockShardDelegator } @@ -56,14 +57,14 @@ func (suite *InsertNodeSuite) SetupSuite() { } func (suite *InsertNodeSuite) TestBasic() { - //data + // data schema := segments.GenTestCollectionSchema(suite.collectionName, schemapb.DataType_Int64) in := suite.buildInsertNodeMsg(schema) collection := segments.NewCollection(suite.collectionID, schema, segments.GenTestIndexMeta(suite.collectionID, schema), querypb.LoadType_LoadCollection) collection.AddPartition(suite.partitionID) - //init mock + // init mock mockCollectionManager := segments.NewMockCollectionManager(suite.T()) mockCollectionManager.EXPECT().Get(suite.collectionID).Return(collection) @@ -81,7 +82,7 @@ func (suite *InsertNodeSuite) TestBasic() { } }) - //TODO mock a delgator for test + // TODO mock a delgator for test node := newInsertNode(suite.collectionID, suite.channel, suite.manager, suite.delegator, 8) out := node.Operate(in) @@ -97,7 +98,7 @@ func (suite *InsertNodeSuite) TestDataTypeNotSupported() { collection := segments.NewCollection(suite.collectionID, schema, segments.GenTestIndexMeta(suite.collectionID, schema), querypb.LoadType_LoadCollection) collection.AddPartition(suite.partitionID) - //init mock + // init mock mockCollectionManager := segments.NewMockCollectionManager(suite.T()) mockCollectionManager.EXPECT().Get(suite.collectionID).Return(collection) @@ -116,7 +117,7 @@ func (suite *InsertNodeSuite) TestDataTypeNotSupported() { } } - //TODO mock a delgator for test + // TODO mock a delgator for test node := newInsertNode(suite.collectionID, suite.channel, suite.manager, suite.delegator, 8) suite.Panics(func() { node.Operate(in) diff --git a/internal/querynodev2/pipeline/manager.go b/internal/querynodev2/pipeline/manager.go index 1d486e9fc3..cf4a746d7d 100644 --- a/internal/querynodev2/pipeline/manager.go +++ b/internal/querynodev2/pipeline/manager.go @@ -77,7 +77,7 @@ func (m *manager) Add(collectionID UniqueID, channel string) (Pipeline, error) { return pipeline, nil } - //get shard delegator for add growing in pipeline + // get shard delegator for add growing in pipeline delegator, ok := m.delegators.Get(channel) if !ok { return nil, merr.WrapErrChannelNotFound(channel, "delegator not found") @@ -132,7 +132,7 @@ func (m *manager) Start(channels ...string) error { m.mu.Lock() defer m.mu.Unlock() - //check pipelie all exist before start + // check pipelie all exist before start for _, channel := range channels { if _, ok := m.channel2Pipeline[channel]; !ok { reason := fmt.Sprintf("pipeline with channel %s not exist", channel) diff --git a/internal/querynodev2/pipeline/manager_test.go b/internal/querynodev2/pipeline/manager_test.go index 38c0cfec70..849857c1fb 100644 --- a/internal/querynodev2/pipeline/manager_test.go +++ b/internal/querynodev2/pipeline/manager_test.go @@ -35,14 +35,14 @@ import ( type PipelineManagerTestSuite struct { suite.Suite - //data + // data collectionID int64 channel string - //dependencies + // dependencies tSafeManager TSafeManager delegators *typeutil.ConcurrentMap[string, delegator.ShardDelegator] - //mocks + // mocks segmentManager *segments.MockSegmentManager collectionManager *segments.MockCollectionManager delegator *delegator.MockShardDelegator @@ -57,13 +57,13 @@ func (suite *PipelineManagerTestSuite) SetupSuite() { func (suite *PipelineManagerTestSuite) SetupTest() { paramtable.Init() - //init dependency + // init dependency // init tsafeManager suite.tSafeManager = tsafe.NewTSafeReplica() suite.tSafeManager.Add(suite.channel, 0) suite.delegators = typeutil.NewConcurrentMap[string, delegator.ShardDelegator]() - //init mock + // init mock // init manager suite.collectionManager = segments.NewMockCollectionManager(suite.T()) suite.segmentManager = segments.NewMockSegmentManager(suite.T()) @@ -75,14 +75,14 @@ func (suite *PipelineManagerTestSuite) SetupTest() { } func (suite *PipelineManagerTestSuite) TestBasic() { - //init mock + // init mock // mock collection manager suite.collectionManager.EXPECT().Get(suite.collectionID).Return(&segments.Collection{}) // mock mq factory suite.msgDispatcher.EXPECT().Register(mock.Anything, suite.channel, mock.Anything, mqwrapper.SubscriptionPositionUnknown).Return(suite.msgChan, nil) suite.msgDispatcher.EXPECT().Deregister(suite.channel) - //build manager + // build manager manager := &segments.Manager{ Collection: suite.collectionManager, Segment: suite.segmentManager, @@ -90,24 +90,24 @@ func (suite *PipelineManagerTestSuite) TestBasic() { pipelineManager := NewManager(manager, suite.tSafeManager, suite.msgDispatcher, suite.delegators) defer pipelineManager.Close() - //Add pipeline + // Add pipeline _, err := pipelineManager.Add(suite.collectionID, suite.channel) suite.NoError(err) suite.Equal(1, pipelineManager.Num()) - //Get pipeline + // Get pipeline pipeline := pipelineManager.Get(suite.channel) suite.NotNil(pipeline) - //Init Consumer + // Init Consumer err = pipeline.ConsumeMsgStream(&msgpb.MsgPosition{}) suite.NoError(err) - //Start pipeline + // Start pipeline err = pipelineManager.Start(suite.channel) suite.NoError(err) - //Remove pipeline + // Remove pipeline pipelineManager.Remove(suite.channel) suite.Equal(0, pipelineManager.Num()) } diff --git a/internal/querynodev2/pipeline/pipeline_test.go b/internal/querynodev2/pipeline/pipeline_test.go index d32c7e6bfa..a1e13e2c77 100644 --- a/internal/querynodev2/pipeline/pipeline_test.go +++ b/internal/querynodev2/pipeline/pipeline_test.go @@ -37,7 +37,7 @@ import ( type PipelineTestSuite struct { suite.Suite - //datas + // datas collectionName string collectionID int64 partitionIDs []int64 @@ -45,10 +45,10 @@ type PipelineTestSuite struct { insertSegmentIDs []int64 deletePKs []int64 - //dependencies + // dependencies tSafeManager TSafeManager - //mocks + // mocks segmentManager *segments.MockSegmentManager collectionManager *segments.MockCollectionManager delegator *delegator.MockShardDelegator @@ -89,7 +89,7 @@ func (suite *PipelineTestSuite) buildMsgPack(schema *schemapb.CollectionSchema) func (suite *PipelineTestSuite) SetupTest() { paramtable.Init() - //init mock + // init mock // init manager suite.collectionManager = segments.NewMockCollectionManager(suite.T()) suite.segmentManager = segments.NewMockSegmentManager(suite.T()) @@ -98,14 +98,14 @@ func (suite *PipelineTestSuite) SetupTest() { // init mq dispatcher suite.msgDispatcher = msgdispatcher.NewMockClient(suite.T()) - //init dependency + // init dependency // init tsafeManager suite.tSafeManager = tsafe.NewTSafeReplica() suite.tSafeManager.Add(suite.channel, 0) } func (suite *PipelineTestSuite) TestBasic() { - //init mock + // init mock // mock collection manager schema := segments.GenTestCollectionSchema(suite.collectionName, schemapb.DataType_Int64) collection := segments.NewCollection(suite.collectionID, schema, segments.GenTestIndexMeta(suite.collectionID, schema), querypb.LoadType_LoadCollection) @@ -131,7 +131,7 @@ func (suite *PipelineTestSuite) TestBasic() { } } }) - //build pipleine + // build pipleine manager := &segments.Manager{ Collection: suite.collectionManager, Segment: suite.segmentManager, @@ -139,7 +139,7 @@ func (suite *PipelineTestSuite) TestBasic() { pipeline, err := NewPipeLine(suite.collectionID, suite.channel, manager, suite.tSafeManager, suite.msgDispatcher, suite.delegator) suite.NoError(err) - //Init Consumer + // Init Consumer err = pipeline.ConsumeMsgStream(&msgpb.MsgPosition{}) suite.NoError(err) @@ -157,7 +157,7 @@ func (suite *PipelineTestSuite) TestBasic() { // wait pipeline work <-listener.On() - //check tsafe + // check tsafe tsafe, err := suite.tSafeManager.Get(suite.channel) suite.NoError(err) suite.Equal(in.EndTs, tsafe) diff --git a/internal/querynodev2/segments/bloom_filter_set_test.go b/internal/querynodev2/segments/bloom_filter_set_test.go index 25f2be58b5..a427737b4d 100644 --- a/internal/querynodev2/segments/bloom_filter_set_test.go +++ b/internal/querynodev2/segments/bloom_filter_set_test.go @@ -19,8 +19,9 @@ package segments import ( "testing" - "github.com/milvus-io/milvus/internal/storage" "github.com/stretchr/testify/suite" + + "github.com/milvus-io/milvus/internal/storage" ) type BloomFilterSetSuite struct { diff --git a/internal/querynodev2/segments/count_reducer.go b/internal/querynodev2/segments/count_reducer.go index 758030b34e..70a5f0dfb8 100644 --- a/internal/querynodev2/segments/count_reducer.go +++ b/internal/querynodev2/segments/count_reducer.go @@ -8,8 +8,7 @@ import ( "github.com/milvus-io/milvus/internal/util/funcutil" ) -type cntReducer struct { -} +type cntReducer struct{} func (r *cntReducer) Reduce(ctx context.Context, results []*internalpb.RetrieveResults) (*internalpb.RetrieveResults, error) { cnt := int64(0) @@ -23,8 +22,7 @@ func (r *cntReducer) Reduce(ctx context.Context, results []*internalpb.RetrieveR return funcutil.WrapCntToInternalResult(cnt), nil } -type cntReducerSegCore struct { -} +type cntReducerSegCore struct{} func (r *cntReducerSegCore) Reduce(ctx context.Context, results []*segcorepb.RetrieveResults) (*segcorepb.RetrieveResults, error) { cnt := int64(0) diff --git a/internal/querynodev2/segments/count_reducer_test.go b/internal/querynodev2/segments/count_reducer_test.go index 3dd9094bc6..ba33c2d305 100644 --- a/internal/querynodev2/segments/count_reducer_test.go +++ b/internal/querynodev2/segments/count_reducer_test.go @@ -6,11 +6,10 @@ import ( "github.com/stretchr/testify/suite" - "github.com/milvus-io/milvus/internal/proto/segcorepb" - "github.com/milvus-io/milvus/internal/util/funcutil" - "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/internal/proto/internalpb" + "github.com/milvus-io/milvus/internal/proto/segcorepb" + "github.com/milvus-io/milvus/internal/util/funcutil" ) type InternalCntReducerSuite struct { diff --git a/internal/querynodev2/segments/load_field_data_info.go b/internal/querynodev2/segments/load_field_data_info.go index f465b600ae..5904457853 100644 --- a/internal/querynodev2/segments/load_field_data_info.go +++ b/internal/querynodev2/segments/load_field_data_info.go @@ -21,6 +21,7 @@ package segments #include "segcore/load_field_data_c.h" */ import "C" + import ( "unsafe" diff --git a/internal/querynodev2/segments/manager.go b/internal/querynodev2/segments/manager.go index 2f620d5564..baf728396f 100644 --- a/internal/querynodev2/segments/manager.go +++ b/internal/querynodev2/segments/manager.go @@ -29,6 +29,8 @@ import ( "fmt" "sync" + "go.uber.org/zap" + "github.com/milvus-io/milvus/internal/proto/querypb" "github.com/milvus-io/milvus/pkg/eventlog" "github.com/milvus-io/milvus/pkg/log" @@ -36,7 +38,6 @@ import ( "github.com/milvus-io/milvus/pkg/util/merr" "github.com/milvus-io/milvus/pkg/util/paramtable" . "github.com/milvus-io/milvus/pkg/util/typeutil" - "go.uber.org/zap" ) type SegmentFilter func(segment Segment) bool @@ -523,7 +524,7 @@ func (mgr *segmentManager) Clear() { func (mgr *segmentManager) updateMetric() { // update collection and partiation metric - var collections, partiations = make(Set[int64]), make(Set[int64]) + collections, partiations := make(Set[int64]), make(Set[int64]) for _, seg := range mgr.growingSegments { collections.Insert(seg.Collection()) partiations.Insert(seg.Partition()) diff --git a/internal/querynodev2/segments/mock_data.go b/internal/querynodev2/segments/mock_data.go index 8ecc23159f..a98e63307b 100644 --- a/internal/querynodev2/segments/mock_data.go +++ b/internal/querynodev2/segments/mock_data.go @@ -394,6 +394,7 @@ func generateStringArray(numRows int) []string { } return ret } + func generateArrayArray(numRows int) []*schemapb.ScalarField { ret := make([]*schemapb.ScalarField, 0, numRows) for i := 0; i < numRows; i++ { @@ -407,6 +408,7 @@ func generateArrayArray(numRows int) []*schemapb.ScalarField { } return ret } + func generateJSONArray(numRows int) [][]byte { ret := make([][]byte, 0, numRows) for i := 0; i < numRows; i++ { @@ -568,8 +570,10 @@ func GenTestScalarFieldData(dType schemapb.DataType, fieldName string, fieldID i Data: &schemapb.ScalarField_JsonData{ JsonData: &schemapb.JSONArray{ Data: generateJSONArray(numRows), - }}, - }} + }, + }, + }, + } default: panic("data type not supported") @@ -667,7 +671,7 @@ func SaveBinLog(ctx context.Context, } k := JoinIDPath(collectionID, partitionID, segmentID, fieldID) - //key := path.Join(defaultLocalStorage, "insert-log", k) + // key := path.Join(defaultLocalStorage, "insert-log", k) key := path.Join(chunkManager.RootPath(), "insert-log", k) kvs[key] = blob.Value fieldBinlog = append(fieldBinlog, &datapb.FieldBinlog{ @@ -690,7 +694,7 @@ func SaveBinLog(ctx context.Context, } k := JoinIDPath(collectionID, partitionID, segmentID, fieldID) - //key := path.Join(defaultLocalStorage, "stats-log", k) + // key := path.Join(defaultLocalStorage, "stats-log", k) key := path.Join(chunkManager.RootPath(), "stats-log", k) kvs[key] = blob.Value[:] statsBinlog = append(statsBinlog, &datapb.FieldBinlog{ @@ -708,7 +712,8 @@ func genStorageBlob(collectionID int64, partitionID int64, segmentID int64, msgLength int, - schema *schemapb.CollectionSchema) ([]*storage.Blob, []*storage.Blob, error) { + schema *schemapb.CollectionSchema, +) ([]*storage.Blob, []*storage.Blob, error) { tmpSchema := &schemapb.CollectionSchema{ Name: schema.Name, AutoID: schema.AutoID, @@ -842,7 +847,6 @@ func SaveDeltaLog(collectionID int64, segmentID int64, cm storage.ChunkManager, ) ([]*datapb.FieldBinlog, error) { - binlogWriter := storage.NewDeleteBinlogWriter(schemapb.DataType_String, collectionID, partitionID, segmentID) eventWriter, _ := binlogWriter.NextDeleteEventWriter() dData := &storage.DeleteData{ @@ -874,7 +878,7 @@ func SaveDeltaLog(collectionID int64, fieldBinlog := make([]*datapb.FieldBinlog, 0) log.Debug("[query node unittest] save delta log", zap.Int64("fieldID", pkFieldID)) key := JoinIDPath(collectionID, partitionID, segmentID, pkFieldID) - //keyPath := path.Join(defaultLocalStorage, "delta-log", key) + // keyPath := path.Join(defaultLocalStorage, "delta-log", key) keyPath := path.Join(cm.RootPath(), "delta-log", key) kvs[keyPath] = blob.Value[:] fieldBinlog = append(fieldBinlog, &datapb.FieldBinlog{ @@ -930,7 +934,7 @@ func GenAndSaveIndex(collectionID, partitionID, segmentID, fieldID int64, msgLen indexPaths := make([]string, 0) for _, index := range serializedIndexBlobs { - //indexPath := filepath.Join(defaultLocalStorage, strconv.Itoa(int(segmentID)), index.Key) + // indexPath := filepath.Join(defaultLocalStorage, strconv.Itoa(int(segmentID)), index.Key) indexPath := filepath.Join(cm.RootPath(), "index_files", strconv.Itoa(int(segmentID)), index.Key) indexPaths = append(indexPaths, indexPath) @@ -970,13 +974,13 @@ func genIndexParams(indexType, metricType string) (map[string]string, map[string } else if indexType == IndexHNSW { indexParams["M"] = strconv.Itoa(16) indexParams["efConstruction"] = strconv.Itoa(efConstruction) - //indexParams["ef"] = strconv.Itoa(ef) + // indexParams["ef"] = strconv.Itoa(ef) } else if indexType == IndexFaissBinIVFFlat { // binary vector indexParams["nlist"] = strconv.Itoa(nlist) indexParams["m"] = strconv.Itoa(m) indexParams["nbits"] = strconv.Itoa(nbits) } else if indexType == IndexFaissBinIDMap { - //indexParams[common.DimKey] = strconv.Itoa(defaultDim) + // indexParams[common.DimKey] = strconv.Itoa(defaultDim) } else { panic("") } @@ -1039,7 +1043,7 @@ func genPlaceHolderGroup(nq int64) ([]byte, error) { Values: make([][]byte, 0), } for i := int64(0); i < nq; i++ { - var vec = make([]float32, defaultDim) + vec := make([]float32, defaultDim) for j := 0; j < defaultDim; j++ { vec[j] = rand.Float32() } @@ -1190,7 +1194,6 @@ func checkSearchResult(nq int64, plan *SearchPlan, searchResult *SearchResult) e } func genSearchPlanAndRequests(collection *Collection, segments []int64, indexType string, nq int64) (*SearchRequest, error) { - iReq, _ := genSearchRequest(nq, indexType, collection) queryReq := &querypb.SearchRequest{ Req: iReq, diff --git a/internal/querynodev2/segments/plan.go b/internal/querynodev2/segments/plan.go index 81d081eed4..a9f07bfc78 100644 --- a/internal/querynodev2/segments/plan.go +++ b/internal/querynodev2/segments/plan.go @@ -53,7 +53,7 @@ func createSearchPlanByExpr(col *Collection, expr []byte, metricType string) (*S return nil, err1 } - var newPlan = &SearchPlan{cSearchPlan: cPlan} + newPlan := &SearchPlan{cSearchPlan: cPlan} if len(metricType) != 0 { newPlan.setMetricType(metricType) } else { @@ -106,7 +106,7 @@ func NewSearchRequest(collection *Collection, req *querypb.SearchRequest, placeh return nil, errors.New("empty search request") } - var blobPtr = unsafe.Pointer(&placeholderGrp[0]) + blobPtr := unsafe.Pointer(&placeholderGrp[0]) blobSize := C.int64_t(len(placeholderGrp)) var cPlaceholderGroup C.CPlaceholderGroup status := C.ParsePlaceholderGroup(plan.cSearchPlan, blobPtr, blobSize, &cPlaceholderGroup) @@ -153,7 +153,7 @@ func parseSearchRequest(plan *SearchPlan, searchRequestBlob []byte) (*SearchRequ if len(searchRequestBlob) == 0 { return nil, fmt.Errorf("empty search request") } - var blobPtr = unsafe.Pointer(&searchRequestBlob[0]) + blobPtr := unsafe.Pointer(&searchRequestBlob[0]) blobSize := C.int64_t(len(searchRequestBlob)) var cPlaceholderGroup C.CPlaceholderGroup status := C.ParsePlaceholderGroup(plan.cSearchPlan, blobPtr, blobSize, &cPlaceholderGroup) @@ -162,7 +162,7 @@ func parseSearchRequest(plan *SearchPlan, searchRequestBlob []byte) (*SearchRequ return nil, err } - var ret = &SearchRequest{cPlaceholderGroup: cPlaceholderGroup, plan: plan} + ret := &SearchRequest{cPlaceholderGroup: cPlaceholderGroup, plan: plan} return ret, nil } @@ -189,7 +189,7 @@ func NewRetrievePlan(col *Collection, expr []byte, timestamp Timestamp, msgID Un return nil, err } - var newPlan = &RetrievePlan{ + newPlan := &RetrievePlan{ cRetrievePlan: cPlan, Timestamp: timestamp, msgID: msgID, diff --git a/internal/querynodev2/segments/pool.go b/internal/querynodev2/segments/pool.go index fc34ccc0e4..a6196da59e 100644 --- a/internal/querynodev2/segments/pool.go +++ b/internal/querynodev2/segments/pool.go @@ -21,9 +21,10 @@ import ( "runtime" "sync" + "go.uber.org/atomic" + "github.com/milvus-io/milvus/pkg/util/conc" "github.com/milvus-io/milvus/pkg/util/paramtable" - "go.uber.org/atomic" ) var ( diff --git a/internal/querynodev2/segments/reduce.go b/internal/querynodev2/segments/reduce.go index 4a55190950..7e8ec94441 100644 --- a/internal/querynodev2/segments/reduce.go +++ b/internal/querynodev2/segments/reduce.go @@ -23,6 +23,7 @@ package segments #include "segcore/reduce_c.h" */ import "C" + import ( "fmt" ) @@ -70,7 +71,8 @@ func ParseSliceInfo(originNQs []int64, originTopKs []int64, nqPerSlice int64) *S } func ReduceSearchResultsAndFillData(plan *SearchPlan, searchResults []*SearchResult, - numSegments int64, sliceNQs []int64, sliceTopKs []int64) (searchResultDataBlobs, error) { + numSegments int64, sliceNQs []int64, sliceTopKs []int64, +) (searchResultDataBlobs, error) { if plan.cSearchPlan == nil { return nil, fmt.Errorf("nil search plan") } @@ -92,9 +94,9 @@ func ReduceSearchResultsAndFillData(plan *SearchPlan, searchResults []*SearchRes } cSearchResultPtr := &cSearchResults[0] cNumSegments := C.int64_t(numSegments) - var cSliceNQSPtr = (*C.int64_t)(&sliceNQs[0]) - var cSliceTopKSPtr = (*C.int64_t)(&sliceTopKs[0]) - var cNumSlices = C.int64_t(len(sliceNQs)) + cSliceNQSPtr := (*C.int64_t)(&sliceNQs[0]) + cSliceTopKSPtr := (*C.int64_t)(&sliceTopKs[0]) + cNumSlices := C.int64_t(len(sliceNQs)) var cSearchResultDataBlobs searchResultDataBlobs status := C.ReduceSearchResultsAndFillData(&cSearchResultDataBlobs, plan.cSearchPlan, cSearchResultPtr, cNumSegments, cSliceNQSPtr, cSliceTopKSPtr, cNumSlices) diff --git a/internal/querynodev2/segments/reducer.go b/internal/querynodev2/segments/reducer.go index 24ba1ef622..f6e2f2b1d4 100644 --- a/internal/querynodev2/segments/reducer.go +++ b/internal/querynodev2/segments/reducer.go @@ -3,11 +3,10 @@ package segments import ( "context" - "github.com/milvus-io/milvus/internal/proto/segcorepb" - "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/internal/proto/internalpb" "github.com/milvus-io/milvus/internal/proto/querypb" + "github.com/milvus-io/milvus/internal/proto/segcorepb" ) type internalReducer interface { diff --git a/internal/querynodev2/segments/reducer_test.go b/internal/querynodev2/segments/reducer_test.go index 94fd22769c..2c1940014e 100644 --- a/internal/querynodev2/segments/reducer_test.go +++ b/internal/querynodev2/segments/reducer_test.go @@ -43,7 +43,6 @@ func (suite *ReducerFactorySuite) TestCreateInternalReducer() { } func (suite *ReducerFactorySuite) TestCreateSegCoreReducer() { - req := &querypb.QueryRequest{ Req: &internalpb.RetrieveRequest{ IsCount: false, diff --git a/internal/querynodev2/segments/result.go b/internal/querynodev2/segments/result.go index 0d66d9c989..d9c78317c8 100644 --- a/internal/querynodev2/segments/result.go +++ b/internal/querynodev2/segments/result.go @@ -128,7 +128,7 @@ func ReduceSearchResultData(ctx context.Context, searchResultData []*schemapb.Se for i := int64(0); i < nq; i++ { offsets := make([]int64, len(searchResultData)) - var idSet = make(map[interface{}]struct{}) + idSet := make(map[interface{}]struct{}) var j int64 for j = 0; j < topk; { sel := SelectSearchResultData(searchResultData, resultOffsets, offsets, i) @@ -418,7 +418,6 @@ func mergeInternalRetrieveResultsAndFillIfEmpty( retrieveResults []*internalpb.RetrieveResults, param *mergeParam, ) (*internalpb.RetrieveResults, error) { - mergedResult, err := MergeInternalRetrieveResult(ctx, retrieveResults, param) if err != nil { return nil, err @@ -436,7 +435,6 @@ func mergeSegcoreRetrieveResultsAndFillIfEmpty( retrieveResults []*segcorepb.RetrieveResults, param *mergeParam, ) (*segcorepb.RetrieveResults, error) { - mergedResult, err := MergeSegcoreRetrieveResults(ctx, retrieveResults, param) if err != nil { return nil, err diff --git a/internal/querynodev2/segments/result_test.go b/internal/querynodev2/segments/result_test.go index de1ed9470d..867611dc1f 100644 --- a/internal/querynodev2/segments/result_test.go +++ b/internal/querynodev2/segments/result_test.go @@ -144,7 +144,8 @@ func (suite *ResultSuite) TestResult_MergeSegcoreRetrieveResults() { 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 11.0, 22.0, 33.0, 44.0, 55.0, 66.0, 77.0, 88.0, - 11.0, 22.0, 33.0, 44.0, 55.0, 66.0, 77.0, 88.0} + 11.0, 22.0, 33.0, 44.0, 55.0, 66.0, 77.0, 88.0, + } suite.Run("test limited", func() { tests := []struct { @@ -217,13 +218,17 @@ func (suite *ResultSuite) TestResult_MergeSegcoreRetrieveResults() { IdField: &schemapb.IDs_StrId{ StrId: &schemapb.StringArray{ Data: []string{"a", "c"}, - }}} + }, + }, + } r2.Ids = &schemapb.IDs{ IdField: &schemapb.IDs_StrId{ StrId: &schemapb.StringArray{ Data: []string{"b", "d"}, - }}} + }, + }, + } result, err := MergeSegcoreRetrieveResults(context.Background(), []*segcorepb.RetrieveResults{r1, r2}, NewMergeParam(typeutil.Unlimited, make([]int64, 0), nil, false)) @@ -301,7 +306,8 @@ func (suite *ResultSuite) TestResult_MergeInternalRetrieveResults() { IdField: &schemapb.IDs_IntId{ IntId: &schemapb.LongArray{ Data: []int64{0, 1}, - }}, + }, + }, }, FieldsData: []*schemapb.FieldData{ genFieldData(common.TimeStampFieldName, common.TimeStampField, schemapb.DataType_Int64, @@ -315,7 +321,8 @@ func (suite *ResultSuite) TestResult_MergeInternalRetrieveResults() { IdField: &schemapb.IDs_IntId{ IntId: &schemapb.LongArray{ Data: []int64{0, 1}, - }}, + }, + }, }, FieldsData: []*schemapb.FieldData{ genFieldData(common.TimeStampFieldName, common.TimeStampField, schemapb.DataType_Int64, @@ -358,7 +365,8 @@ func (suite *ResultSuite) TestResult_MergeInternalRetrieveResults() { 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 11.0, 22.0, 33.0, 44.0, 55.0, 66.0, 77.0, 88.0, - 11.0, 22.0, 33.0, 44.0, 55.0, 66.0, 77.0, 88.0} + 11.0, 22.0, 33.0, 44.0, 55.0, 66.0, 77.0, 88.0, + } suite.Run("test limited", func() { tests := []struct { @@ -450,7 +458,6 @@ func (suite *ResultSuite) TestResult_MergeInternalRetrieveResults() { suite.InDeltaSlice(resultFloat, result.FieldsData[1].GetVectors().GetFloatVector().Data, 10e-10) suite.NoError(err) }) - }) } @@ -506,8 +513,8 @@ func (suite *ResultSuite) TestResult_MergeStopForBestResult() { suite.NoError(err) suite.Equal(2, len(result.GetFieldsData())) suite.Equal([]int64{0, 1, 2, 3, 4}, result.GetIds().GetIntId().GetData()) - //here, we can only get best result from 0 to 4 without 6, because we can never know whether there is - //one potential 5 in following result1 + // here, we can only get best result from 0 to 4 without 6, because we can never know whether there is + // one potential 5 in following result1 suite.Equal([]int64{11, 22, 11, 22, 33}, result.GetFieldsData()[0].GetScalars().GetLongData().Data) suite.InDeltaSlice([]float32{1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8, 11, 22, 33, 44}, result.FieldsData[1].GetVectors().GetFloatVector().Data, 10e-10) @@ -518,8 +525,8 @@ func (suite *ResultSuite) TestResult_MergeStopForBestResult() { suite.NoError(err) suite.Equal(2, len(result.GetFieldsData())) suite.Equal([]int64{0, 1, 2, 3, 4, 6}, result.GetIds().GetIntId().GetData()) - //here, we can only get best result from 0 to 4 without 6, because we can never know whether there is - //one potential 5 in following result1 + // here, we can only get best result from 0 to 4 without 6, because we can never know whether there is + // one potential 5 in following result1 suite.Equal([]int64{11, 22, 11, 22, 33, 33}, result.GetFieldsData()[0].GetScalars().GetLongData().Data) suite.InDeltaSlice([]float32{1, 2, 3, 4, 5, 6, 7, 8, 1, 2, 3, 4, 5, 6, 7, 8, 11, 22, 33, 44, 11, 22, 33, 44}, result.FieldsData[1].GetVectors().GetFloatVector().Data, 10e-10) @@ -712,7 +719,8 @@ func (suite *ResultSuite) TestSort() { IdField: &schemapb.IDs_IntId{ IntId: &schemapb.LongArray{ Data: []int64{5, 4, 3, 2, 9, 8, 7, 6}, - }}, + }, + }, }, Offset: []int64{5, 4, 3, 2, 9, 8, 7, 6}, FieldsData: []*schemapb.FieldData{ @@ -733,8 +741,10 @@ func (suite *ResultSuite) TestSort() { genFieldData("binary vector field", 107, schemapb.DataType_BinaryVector, []byte{5, 4, 3, 2, 9, 8, 7, 6}, 8), genFieldData("json field", 108, schemapb.DataType_JSON, - [][]byte{[]byte("{\"5\": 5}"), []byte("{\"4\": 4}"), []byte("{\"3\": 3}"), []byte("{\"2\": 2}"), - []byte("{\"9\": 9}"), []byte("{\"8\": 8}"), []byte("{\"7\": 7}"), []byte("{\"6\": 6}")}, 1), + [][]byte{ + []byte("{\"5\": 5}"), []byte("{\"4\": 4}"), []byte("{\"3\": 3}"), []byte("{\"2\": 2}"), + []byte("{\"9\": 9}"), []byte("{\"8\": 8}"), []byte("{\"7\": 7}"), []byte("{\"6\": 6}"), + }, 1), genFieldData("json field", 108, schemapb.DataType_Array, []*schemapb.ScalarField{ {Data: &schemapb.ScalarField_IntData{IntData: &schemapb.IntArray{Data: []int32{5, 6, 7}}}}, @@ -761,8 +771,10 @@ func (suite *ResultSuite) TestSort() { suite.Equal([]int32{2, 3, 4, 5, 6, 7, 8, 9}, result.FieldsData[5].GetScalars().GetIntData().Data) suite.InDeltaSlice([]float32{2, 3, 4, 5, 6, 7, 8, 9}, result.FieldsData[6].GetVectors().GetFloatVector().GetData(), 10e-10) suite.Equal([]byte{2, 3, 4, 5, 6, 7, 8, 9}, result.FieldsData[7].GetVectors().GetBinaryVector()) - suite.Equal([][]byte{[]byte("{\"2\": 2}"), []byte("{\"3\": 3}"), []byte("{\"4\": 4}"), []byte("{\"5\": 5}"), - []byte("{\"6\": 6}"), []byte("{\"7\": 7}"), []byte("{\"8\": 8}"), []byte("{\"9\": 9}")}, result.FieldsData[8].GetScalars().GetJsonData().GetData()) + suite.Equal([][]byte{ + []byte("{\"2\": 2}"), []byte("{\"3\": 3}"), []byte("{\"4\": 4}"), []byte("{\"5\": 5}"), + []byte("{\"6\": 6}"), []byte("{\"7\": 7}"), []byte("{\"8\": 8}"), []byte("{\"9\": 9}"), + }, result.FieldsData[8].GetScalars().GetJsonData().GetData()) suite.Equal([]*schemapb.ScalarField{ {Data: &schemapb.ScalarField_IntData{IntData: &schemapb.IntArray{Data: []int32{2, 3, 4}}}}, {Data: &schemapb.ScalarField_IntData{IntData: &schemapb.IntArray{Data: []int32{3, 4, 5}}}}, diff --git a/internal/querynodev2/segments/segment.go b/internal/querynodev2/segments/segment.go index 09a7650f5a..f755182256 100644 --- a/internal/querynodev2/segments/segment.go +++ b/internal/querynodev2/segments/segment.go @@ -31,10 +31,6 @@ import ( "sync" "unsafe" - "github.com/milvus-io/milvus/pkg/common" - "github.com/milvus-io/milvus/pkg/util/funcutil" - "github.com/milvus-io/milvus/pkg/util/merr" - "github.com/cockroachdb/errors" "github.com/golang/protobuf/proto" "go.opentelemetry.io/otel/trace" @@ -49,8 +45,11 @@ import ( "github.com/milvus-io/milvus/internal/proto/segcorepb" pkoracle "github.com/milvus-io/milvus/internal/querynodev2/pkoracle" "github.com/milvus-io/milvus/internal/storage" + "github.com/milvus-io/milvus/pkg/common" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/metrics" + "github.com/milvus-io/milvus/pkg/util/funcutil" + "github.com/milvus-io/milvus/pkg/util/merr" "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/milvus-io/milvus/pkg/util/timerecord" "github.com/milvus-io/milvus/pkg/util/typeutil" @@ -63,9 +62,7 @@ const ( SegmentTypeSealed = commonpb.SegmentState_Sealed ) -var ( - ErrSegmentUnhealthy = errors.New("segment unhealthy") -) +var ErrSegmentUnhealthy = errors.New("segment unhealthy") // IndexedFieldInfo contains binlog info of vector field type IndexedFieldInfo struct { @@ -185,7 +182,7 @@ func NewSegment(collection *Collection, zap.Int64("segmentID", segmentID), zap.String("segmentType", segmentType.String())) - var segment = &LocalSegment{ + segment := &LocalSegment{ baseSegment: newBaseSegment(segmentID, partitionID, collectionID, shard, segmentType, version, startPosition), ptr: segmentPtr, lastDeltaTimestamp: atomic.NewUint64(0), @@ -510,11 +507,11 @@ func (s *LocalSegment) Insert(rowIDs []int64, timestamps []typeutil.Timestamp, r return fmt.Errorf("failed to marshal insert record: %s", err) } - var numOfRow = len(rowIDs) - var cOffset = C.int64_t(offset) - var cNumOfRows = C.int64_t(numOfRow) - var cEntityIdsPtr = (*C.int64_t)(&(rowIDs)[0]) - var cTimestampsPtr = (*C.uint64_t)(&(timestamps)[0]) + numOfRow := len(rowIDs) + cOffset := C.int64_t(offset) + cNumOfRows := C.int64_t(numOfRow) + cEntityIdsPtr := (*C.int64_t)(&(rowIDs)[0]) + cTimestampsPtr := (*C.uint64_t)(&(timestamps)[0]) var status C.CStatus @@ -559,9 +556,9 @@ func (s *LocalSegment) Delete(primaryKeys []storage.PrimaryKey, timestamps []typ return merr.WrapErrSegmentNotLoaded(s.segmentID, "segment released") } - var cOffset = C.int64_t(0) // depre - var cSize = C.int64_t(len(primaryKeys)) - var cTimestampsPtr = (*C.uint64_t)(&(timestamps)[0]) + cOffset := C.int64_t(0) // depre + cSize := C.int64_t(len(primaryKeys)) + cTimestampsPtr := (*C.uint64_t)(&(timestamps)[0]) ids := &schemapb.IDs{} pkType := primaryKeys[0].Type() diff --git a/internal/querynodev2/segments/segment_loader.go b/internal/querynodev2/segments/segment_loader.go index d0ec6fb469..b99cd6063e 100644 --- a/internal/querynodev2/segments/segment_loader.go +++ b/internal/querynodev2/segments/segment_loader.go @@ -54,9 +54,7 @@ const ( UsedDiskMemoryRatio = 4 ) -var ( - ErrReadDeltaMsgFailed = errors.New("ReadDeltaMsgFailed") -) +var ErrReadDeltaMsgFailed = errors.New("ReadDeltaMsgFailed") type Loader interface { // Load loads binlogs, and spawn segments, @@ -319,7 +317,6 @@ func (loader *segmentLoader) unregister(segments ...*querypb.SegmentLoadInfo) { } func (loader *segmentLoader) notifyLoadFinish(segments ...*querypb.SegmentLoadInfo) { - for _, loadInfo := range segments { result, ok := loader.loadingSegments.Get(loadInfo.GetSegmentID()) if ok { @@ -660,7 +657,8 @@ func (loader *segmentLoader) loadFieldsIndex(ctx context.Context, schema *schemapb.CollectionSchema, segment *LocalSegment, numRows int64, - vecFieldInfos map[int64]*IndexedFieldInfo) error { + vecFieldInfos map[int64]*IndexedFieldInfo, +) error { schemaHelper, _ := typeutil.CreateSchemaHelper(schema) for fieldID, fieldInfo := range vecFieldInfos { @@ -715,8 +713,8 @@ func (loader *segmentLoader) loadFieldIndex(ctx context.Context, segment *LocalS } func (loader *segmentLoader) loadBloomFilter(ctx context.Context, segmentID int64, bfs *pkoracle.BloomFilterSet, - binlogPaths []string, logType storage.StatsLogType) error { - + binlogPaths []string, logType storage.StatsLogType, +) error { log := log.Ctx(ctx).With( zap.Int64("segmentID", segmentID), ) diff --git a/internal/querynodev2/segments/segment_loader_test.go b/internal/querynodev2/segments/segment_loader_test.go index e4d2b7b190..31524f7651 100644 --- a/internal/querynodev2/segments/segment_loader_test.go +++ b/internal/querynodev2/segments/segment_loader_test.go @@ -68,7 +68,7 @@ func (suite *SegmentLoaderSuite) SetupTest() { suite.manager = NewManager() ctx := context.Background() // TODO:: cpp chunk manager not support local chunk manager - //suite.chunkManager = storage.NewLocalChunkManager(storage.RootPath( + // suite.chunkManager = storage.NewLocalChunkManager(storage.RootPath( // fmt.Sprintf("/tmp/milvus-ut/%d", rand.Int63()))) chunkManagerFactory := NewTestChunkManagerFactory(paramtable.Get(), suite.rootPath) suite.chunkManager, _ = chunkManagerFactory.NewPersistentStorageChunkManager(ctx) @@ -212,7 +212,6 @@ func (suite *SegmentLoaderSuite) TestLoadMultipleSegments() { suite.True(exist) } } - } func (suite *SegmentLoaderSuite) TestLoadWithIndex() { @@ -432,7 +431,6 @@ func (suite *SegmentLoaderSuite) TestLoadIndex() { err := suite.loader.LoadIndex(ctx, segment, loadInfo, 0) suite.ErrorIs(err, merr.ErrIndexNotFound) - } func (suite *SegmentLoaderSuite) TestLoadWithMmap() { @@ -676,7 +674,6 @@ func (suite *SegmentLoaderDetailSuite) TestWaitSegmentLoadDone() { }) suite.Run("wait_failure", func() { - suite.SetupTest() var idx int @@ -705,7 +702,6 @@ func (suite *SegmentLoaderDetailSuite) TestWaitSegmentLoadDone() { }) suite.Run("wait_timeout", func() { - suite.SetupTest() suite.segmentManager.EXPECT().GetBy(mock.Anything, mock.Anything).Return(nil) diff --git a/internal/querynodev2/segments/segment_test.go b/internal/querynodev2/segments/segment_test.go index 49d643bc3e..46f4e3f34f 100644 --- a/internal/querynodev2/segments/segment_test.go +++ b/internal/querynodev2/segments/segment_test.go @@ -154,7 +154,8 @@ func (suite *SegmentSuite) TestValidateIndexedFieldsData() { IdField: &schemapb.IDs_IntId{ IntId: &schemapb.LongArray{ Data: []int64{5, 4, 3, 2, 9, 8, 7, 6}, - }}, + }, + }, }, Offset: []int64{5, 4, 3, 2, 9, 8, 7, 6}, FieldsData: []*schemapb.FieldData{ diff --git a/internal/querynodev2/segments/validate.go b/internal/querynodev2/segments/validate.go index b83ad77b76..2003e99cab 100644 --- a/internal/querynodev2/segments/validate.go +++ b/internal/querynodev2/segments/validate.go @@ -37,7 +37,7 @@ func validate(ctx context.Context, manager *Manager, collectionID int64, partiti return nil, merr.WrapErrCollectionNotFound(collectionID) } - //validate partition + // validate partition // no partition id specified, get all partition ids in collection if len(partitionIDs) == 0 { searchPartIDs = collection.GetPartitions() @@ -59,7 +59,7 @@ func validate(ctx context.Context, manager *Manager, collectionID int64, partiti return []Segment{}, nil } - //validate segment + // validate segment segments := make([]Segment, 0, len(segmentIDs)) var err error if len(segmentIDs) == 0 { diff --git a/internal/querynodev2/server.go b/internal/querynodev2/server.go index b2ce5055a6..e4e355c453 100644 --- a/internal/querynodev2/server.go +++ b/internal/querynodev2/server.go @@ -41,6 +41,7 @@ import ( "time" "unsafe" + "github.com/samber/lo" clientv3 "go.etcd.io/etcd/client/v3" "go.uber.org/zap" @@ -66,7 +67,6 @@ import ( "github.com/milvus-io/milvus/pkg/util/lifetime" "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/milvus-io/milvus/pkg/util/typeutil" - "github.com/samber/lo" ) // make sure QueryNode implements types.QueryNode @@ -207,7 +207,7 @@ func (node *QueryNode) InitSegcore() error { cIndexSliceSize := C.int64_t(paramtable.Get().CommonCfg.IndexSliceSize.GetAsInt64()) C.InitIndexSliceSize(cIndexSliceSize) - //set up thread pool for different priorities + // set up thread pool for different priorities cHighPriorityThreadCoreCoefficient := C.int64_t(paramtable.Get().CommonCfg.HighPriorityThreadCoreCoefficient.GetAsInt64()) C.InitHighPriorityThreadCoreCoefficient(cHighPriorityThreadCoreCoefficient) cMiddlePriorityThreadCoreCoefficient := C.int64_t(paramtable.Get().CommonCfg.MiddlePriorityThreadCoreCoefficient.GetAsInt64()) @@ -437,7 +437,6 @@ func (node *QueryNode) Stop() error { case <-time.After(time.Second): } } - } node.UpdateStateCode(commonpb.StateCode_Abnormal) diff --git a/internal/querynodev2/server_test.go b/internal/querynodev2/server_test.go index 413bb9af63..a682a7e030 100644 --- a/internal/querynodev2/server_test.go +++ b/internal/querynodev2/server_test.go @@ -56,7 +56,6 @@ type QueryNodeSuite struct { func (suite *QueryNodeSuite) SetupSuite() { suite.address = "test-address" - } func (suite *QueryNodeSuite) SetupTest() { diff --git a/internal/querynodev2/services.go b/internal/querynodev2/services.go index 6c08ee449e..b8b0de5aeb 100644 --- a/internal/querynodev2/services.go +++ b/internal/querynodev2/services.go @@ -352,7 +352,6 @@ func (node *QueryNode) UnsubDmChannel(ctx context.Context, req *querypb.UnsubDmC // check node healthy if !node.lifetime.Add(commonpbutil.IsHealthy) { - msg := fmt.Sprintf("query node %d is not ready", paramtable.GetNodeID()) err := merr.WrapErrServiceNotReady(msg) return merr.Status(err), nil diff --git a/internal/querynodev2/services_test.go b/internal/querynodev2/services_test.go index 474f0e2ffc..bfd15ab1ba 100644 --- a/internal/querynodev2/services_test.go +++ b/internal/querynodev2/services_test.go @@ -113,7 +113,7 @@ func (suite *ServiceSuite) SetupTest() { suite.factory = dependency.NewMockFactory(suite.T()) suite.msgStream = msgstream.NewMockMsgStream(suite.T()) // TODO:: cpp chunk manager not support local chunk manager - //suite.chunkManagerFactory = storage.NewChunkManagerFactory("local", storage.RootPath("/tmp/milvus-test")) + // suite.chunkManagerFactory = storage.NewChunkManagerFactory("local", storage.RootPath("/tmp/milvus-test")) suite.chunkManagerFactory = segments.NewTestChunkManagerFactory(paramtable.Get(), suite.rootPath) suite.factory.EXPECT().Init(mock.Anything).Return() suite.factory.EXPECT().NewPersistentStorageChunkManager(mock.Anything).Return(suite.chunkManagerFactory.NewPersistentStorageChunkManager(ctx)) @@ -1467,7 +1467,6 @@ func (suite *ServiceSuite) TestQueryStream_Failed() { if err != nil { suite.ErrorIs(err, merr.ErrChannelNotFound) } - } wg.Wait() }) @@ -1597,7 +1596,6 @@ func (suite *ServiceSuite) TestSyncReplicaSegments_Normal() { status, err := suite.node.SyncReplicaSegments(ctx, req) suite.NoError(err) suite.Equal(commonpb.ErrorCode_Success, status.ErrorCode) - } func (suite *ServiceSuite) TestShowConfigurations_Normal() { diff --git a/internal/querynodev2/tasks/concurrent_safe_scheduler.go b/internal/querynodev2/tasks/concurrent_safe_scheduler.go index fe054c02e3..ab954115db 100644 --- a/internal/querynodev2/tasks/concurrent_safe_scheduler.go +++ b/internal/querynodev2/tasks/concurrent_safe_scheduler.go @@ -4,14 +4,15 @@ import ( "context" "fmt" + "go.uber.org/atomic" + "go.uber.org/zap" + "github.com/milvus-io/milvus/internal/querynodev2/collector" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/metrics" "github.com/milvus-io/milvus/pkg/util/conc" "github.com/milvus-io/milvus/pkg/util/metricsinfo" "github.com/milvus-io/milvus/pkg/util/paramtable" - "go.uber.org/atomic" - "go.uber.org/zap" ) const ( diff --git a/internal/querynodev2/tasks/concurrent_safe_scheduler_test.go b/internal/querynodev2/tasks/concurrent_safe_scheduler_test.go index fd0ed081df..167eb3af26 100644 --- a/internal/querynodev2/tasks/concurrent_safe_scheduler_test.go +++ b/internal/querynodev2/tasks/concurrent_safe_scheduler_test.go @@ -7,10 +7,10 @@ import ( "testing" "time" + "github.com/stretchr/testify/assert" "go.uber.org/atomic" "github.com/milvus-io/milvus/pkg/util/paramtable" - "github.com/stretchr/testify/assert" ) func TestScheduler(t *testing.T) { diff --git a/internal/querynodev2/tasks/policy_test.go b/internal/querynodev2/tasks/policy_test.go index fd4fcef719..03ce1a811f 100644 --- a/internal/querynodev2/tasks/policy_test.go +++ b/internal/querynodev2/tasks/policy_test.go @@ -4,8 +4,9 @@ import ( "fmt" "testing" - "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/stretchr/testify/assert" + + "github.com/milvus-io/milvus/pkg/util/paramtable" ) func TestUserTaskPollingPolicy(t *testing.T) { diff --git a/internal/querynodev2/tsafe/manager.go b/internal/querynodev2/tsafe/manager.go index 6e2448f0d9..c3da4e009b 100644 --- a/internal/querynodev2/tsafe/manager.go +++ b/internal/querynodev2/tsafe/manager.go @@ -20,10 +20,11 @@ import ( "fmt" "sync" + "go.uber.org/zap" + "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/tsoutil" . "github.com/milvus-io/milvus/pkg/util/typeutil" - "go.uber.org/zap" ) // Manager is the interface for tsafe manager. diff --git a/internal/querynodev2/tsafe/tsafe_test.go b/internal/querynodev2/tsafe/tsafe_test.go index fa9c1c8064..8edbd2de8a 100644 --- a/internal/querynodev2/tsafe/tsafe_test.go +++ b/internal/querynodev2/tsafe/tsafe_test.go @@ -48,19 +48,19 @@ func (suite *TSafeTestSuite) TestBasic() { suite.NoError(err) suite.Equal(ZeroTimestamp, t) - //Add listener + // Add listener globalWatcher := suite.tSafeReplica.WatchChannel(suite.channel) channelWatcher := suite.tSafeReplica.Watch() defer globalWatcher.Close() defer channelWatcher.Close() - //Test Set tSafe + // Test Set tSafe suite.tSafeReplica.Set(suite.channel, suite.time) t, err = suite.tSafeReplica.Get(suite.channel) suite.NoError(err) suite.Equal(suite.time, t) - //Test listener + // Test listener select { case <-globalWatcher.On(): default: diff --git a/internal/rootcoord/alter_alias_task_test.go b/internal/rootcoord/alter_alias_task_test.go index c4c714c072..8eefe721c7 100644 --- a/internal/rootcoord/alter_alias_task_test.go +++ b/internal/rootcoord/alter_alias_task_test.go @@ -23,7 +23,6 @@ import ( "github.com/stretchr/testify/assert" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" - "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" ) diff --git a/internal/rootcoord/alter_collection_task_test.go b/internal/rootcoord/alter_collection_task_test.go index 4dd9d97a24..20c31cc4f6 100644 --- a/internal/rootcoord/alter_collection_task_test.go +++ b/internal/rootcoord/alter_collection_task_test.go @@ -220,6 +220,5 @@ func Test_alterCollectionTask_Execute(t *testing.T) { Key: common.CollectionAutoCompactionKey, Value: "true", }) - }) } diff --git a/internal/rootcoord/broker.go b/internal/rootcoord/broker.go index 82e56fc6e1..b7d4b6ce40 100644 --- a/internal/rootcoord/broker.go +++ b/internal/rootcoord/broker.go @@ -307,7 +307,6 @@ func (b *ServerBroker) GcConfirm(ctx context.Context, collectionID, partitionID req := &datapb.GcConfirmRequest{CollectionId: collectionID, PartitionId: partitionID} resp, err := b.s.dataCoord.GcConfirm(ctx, req) - if err != nil { log.Warn("gc is not finished", zap.Error(err)) return false diff --git a/internal/rootcoord/broker_test.go b/internal/rootcoord/broker_test.go index f57a087338..6f4ef30f2b 100644 --- a/internal/rootcoord/broker_test.go +++ b/internal/rootcoord/broker_test.go @@ -21,18 +21,16 @@ import ( "testing" "github.com/cockroachdb/errors" - - "github.com/milvus-io/milvus/internal/proto/datapb" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus/internal/metastore/model" "github.com/milvus-io/milvus/internal/mocks" + "github.com/milvus-io/milvus/internal/proto/datapb" "github.com/milvus-io/milvus/internal/proto/indexpb" mockrootcoord "github.com/milvus-io/milvus/internal/rootcoord/mocks" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/mock" ) func TestServerBroker_ReleaseCollection(t *testing.T) { diff --git a/internal/rootcoord/create_alias_task.go b/internal/rootcoord/create_alias_task.go index 88ec00d270..7cd8334bd7 100644 --- a/internal/rootcoord/create_alias_task.go +++ b/internal/rootcoord/create_alias_task.go @@ -20,7 +20,6 @@ import ( "context" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" - "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" ) diff --git a/internal/rootcoord/create_alias_task_test.go b/internal/rootcoord/create_alias_task_test.go index 7bea4d775d..77d8a16f74 100644 --- a/internal/rootcoord/create_alias_task_test.go +++ b/internal/rootcoord/create_alias_task_test.go @@ -20,9 +20,9 @@ import ( "context" "testing" - "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" ) diff --git a/internal/rootcoord/ddl_ts_lock_manager_test.go b/internal/rootcoord/ddl_ts_lock_manager_test.go index b7e7f9a45b..7fe9ccd9b9 100644 --- a/internal/rootcoord/ddl_ts_lock_manager_test.go +++ b/internal/rootcoord/ddl_ts_lock_manager_test.go @@ -20,7 +20,6 @@ import ( "testing" "github.com/cockroachdb/errors" - "github.com/stretchr/testify/assert" ) diff --git a/internal/rootcoord/dml_channels.go b/internal/rootcoord/dml_channels.go index 618df6a2e9..66689e31e3 100644 --- a/internal/rootcoord/dml_channels.go +++ b/internal/rootcoord/dml_channels.go @@ -25,12 +25,10 @@ import ( "sync" "github.com/cockroachdb/errors" - - "github.com/milvus-io/milvus/pkg/metrics" - "go.uber.org/zap" "github.com/milvus-io/milvus/pkg/log" + "github.com/milvus-io/milvus/pkg/metrics" "github.com/milvus-io/milvus/pkg/mq/msgstream" "github.com/milvus-io/milvus/pkg/mq/msgstream/mqwrapper" "github.com/milvus-io/milvus/pkg/util/paramtable" diff --git a/internal/rootcoord/dml_channels_test.go b/internal/rootcoord/dml_channels_test.go index abb7125323..db61ff1327 100644 --- a/internal/rootcoord/dml_channels_test.go +++ b/internal/rootcoord/dml_channels_test.go @@ -24,20 +24,18 @@ import ( "testing" "github.com/cockroachdb/errors" - "github.com/milvus-io/milvus/internal/util/dependency" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/milvus-io/milvus/internal/util/dependency" "github.com/milvus-io/milvus/pkg/mq/msgstream" "github.com/milvus-io/milvus/pkg/mq/msgstream/mqwrapper" "github.com/milvus-io/milvus/pkg/util/funcutil" "github.com/milvus-io/milvus/pkg/util/paramtable" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" ) func TestDmlMsgStream(t *testing.T) { t.Run("RefCnt", func(t *testing.T) { - dms := &dmlMsgStream{refcnt: 0} assert.Equal(t, int64(0), dms.RefCnt()) assert.Equal(t, int64(0), dms.Used()) diff --git a/internal/rootcoord/drop_alias_task_test.go b/internal/rootcoord/drop_alias_task_test.go index f8796e5bc0..199a583107 100644 --- a/internal/rootcoord/drop_alias_task_test.go +++ b/internal/rootcoord/drop_alias_task_test.go @@ -54,7 +54,6 @@ func Test_dropAliasTask_Execute(t *testing.T) { task := &dropAliasTask{ baseTask: newBaseTask(context.Background(), core), Req: &milvuspb.DropAliasRequest{ - Base: &commonpb.MsgBase{MsgType: commonpb.MsgType_DropAlias}, Alias: alias, }, @@ -69,7 +68,6 @@ func Test_dropAliasTask_Execute(t *testing.T) { task := &dropAliasTask{ baseTask: newBaseTask(context.Background(), core), Req: &milvuspb.DropAliasRequest{ - Base: &commonpb.MsgBase{MsgType: commonpb.MsgType_DropAlias}, Alias: alias, }, @@ -92,7 +90,6 @@ func Test_dropAliasTask_Execute(t *testing.T) { task := &dropAliasTask{ baseTask: newBaseTask(context.Background(), core), Req: &milvuspb.DropAliasRequest{ - Base: &commonpb.MsgBase{MsgType: commonpb.MsgType_DropAlias}, Alias: alias, }, diff --git a/internal/rootcoord/drop_collection_task.go b/internal/rootcoord/drop_collection_task.go index 5440186d38..31699ee1c2 100644 --- a/internal/rootcoord/drop_collection_task.go +++ b/internal/rootcoord/drop_collection_task.go @@ -20,9 +20,9 @@ import ( "context" "fmt" + "github.com/cockroachdb/errors" "go.uber.org/zap" - "github.com/cockroachdb/errors" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" pb "github.com/milvus-io/milvus/internal/proto/etcdpb" diff --git a/internal/rootcoord/expire_cache.go b/internal/rootcoord/expire_cache.go index fe6f50b4d2..eccfa6e7c2 100644 --- a/internal/rootcoord/expire_cache.go +++ b/internal/rootcoord/expire_cache.go @@ -70,8 +70,8 @@ func (c *Core) ExpireMetaCache(ctx context.Context, dbName string, collNames []s for _, collName := range collNames { req := proxypb.InvalidateCollMetaCacheRequest{ Base: commonpbutil.NewMsgBase( - commonpbutil.WithMsgType(0), //TODO, msg type - commonpbutil.WithMsgID(0), //TODO, msg id + commonpbutil.WithMsgType(0), // TODO, msg type + commonpbutil.WithMsgID(0), // TODO, msg id commonpbutil.WithTimeStamp(ts), commonpbutil.WithSourceID(c.session.ServerID), ), diff --git a/internal/rootcoord/expire_cache_test.go b/internal/rootcoord/expire_cache_test.go index 61974b17dd..82782c6753 100644 --- a/internal/rootcoord/expire_cache_test.go +++ b/internal/rootcoord/expire_cache_test.go @@ -19,10 +19,10 @@ package rootcoord import ( "testing" - "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" - - "github.com/milvus-io/milvus/internal/proto/proxypb" "github.com/stretchr/testify/assert" + + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" + "github.com/milvus-io/milvus/internal/proto/proxypb" ) func Test_expireCacheConfig_apply(t *testing.T) { diff --git a/internal/rootcoord/import_manager.go b/internal/rootcoord/import_manager.go index c48d4cd25a..b15c618775 100644 --- a/internal/rootcoord/import_manager.go +++ b/internal/rootcoord/import_manager.go @@ -91,7 +91,8 @@ func newImportManager(ctx context.Context, client kv.TxnKV, importService func(ctx context.Context, req *datapb.ImportTaskRequest) (*datapb.ImportTaskResponse, error), getSegmentStates func(ctx context.Context, req *datapb.GetSegmentStatesRequest) (*datapb.GetSegmentStatesResponse, error), getCollectionName func(dbName string, collID, partitionID typeutil.UniqueID) (string, string, error), - unsetIsImportingState func(context.Context, *datapb.UnsetIsImportingStateRequest) (*commonpb.Status, error)) *importManager { + unsetIsImportingState func(context.Context, *datapb.UnsetIsImportingStateRequest) (*commonpb.Status, error), +) *importManager { mgr := &importManager{ ctx: ctx, taskStore: client, @@ -334,7 +335,6 @@ func (m *importManager) flipTaskFlushedState(ctx context.Context, importTask *mi log.Info("a DataNode is no longer busy after processing task", zap.Int64("dataNode ID", dataNodeID), zap.Int64("task ID", importTask.GetId())) - }() // Unset isImporting flag. if m.callUnsetIsImportingState == nil { @@ -625,7 +625,6 @@ func (m *importManager) updateTaskInfo(ir *rootcoordpb.ImportResult) (*datapb.Im return toPersistImportTaskInfo, nil }() - if err != nil { return nil, err } @@ -1070,10 +1069,9 @@ func tryUpdateErrMsg(errReason string, toPersistImportTaskInfo *datapb.ImportTas if toPersistImportTaskInfo.GetState().GetErrorMessage() == "" { toPersistImportTaskInfo.State.ErrorMessage = errReason } else { - toPersistImportTaskInfo.State.ErrorMessage = - fmt.Sprintf("%s; %s", - toPersistImportTaskInfo.GetState().GetErrorMessage(), - errReason) + toPersistImportTaskInfo.State.ErrorMessage = fmt.Sprintf("%s; %s", + toPersistImportTaskInfo.GetState().GetErrorMessage(), + errReason) } } } diff --git a/internal/rootcoord/import_manager_test.go b/internal/rootcoord/import_manager_test.go index 13ca2912c3..e218c92948 100644 --- a/internal/rootcoord/import_manager_test.go +++ b/internal/rootcoord/import_manager_test.go @@ -42,9 +42,9 @@ import ( func TestImportManager_NewImportManager(t *testing.T) { var countLock sync.RWMutex - var globalCount = typeutil.UniqueID(0) + globalCount := typeutil.UniqueID(0) - var idAlloc = func(count uint32) (typeutil.UniqueID, typeutil.UniqueID, error) { + idAlloc := func(count uint32) (typeutil.UniqueID, typeutil.UniqueID, error) { countLock.Lock() defer countLock.Unlock() globalCount++ @@ -228,9 +228,9 @@ func TestImportManager_NewImportManager(t *testing.T) { func TestImportManager_TestSetImportTaskState(t *testing.T) { var countLock sync.RWMutex - var globalCount = typeutil.UniqueID(0) + globalCount := typeutil.UniqueID(0) - var idAlloc = func(count uint32) (typeutil.UniqueID, typeutil.UniqueID, error) { + idAlloc := func(count uint32) (typeutil.UniqueID, typeutil.UniqueID, error) { countLock.Lock() defer countLock.Unlock() globalCount++ @@ -297,9 +297,9 @@ func TestImportManager_TestSetImportTaskState(t *testing.T) { func TestImportManager_TestEtcdCleanUp(t *testing.T) { var countLock sync.RWMutex - var globalCount = typeutil.UniqueID(0) + globalCount := typeutil.UniqueID(0) - var idAlloc = func(count uint32) (typeutil.UniqueID, typeutil.UniqueID, error) { + idAlloc := func(count uint32) (typeutil.UniqueID, typeutil.UniqueID, error) { countLock.Lock() defer countLock.Unlock() globalCount++ @@ -381,9 +381,9 @@ func TestImportManager_TestEtcdCleanUp(t *testing.T) { func TestImportManager_TestFlipTaskStateLoop(t *testing.T) { var countLock sync.RWMutex - var globalCount = typeutil.UniqueID(0) + globalCount := typeutil.UniqueID(0) - var idAlloc = func(count uint32) (typeutil.UniqueID, typeutil.UniqueID, error) { + idAlloc := func(count uint32) (typeutil.UniqueID, typeutil.UniqueID, error) { countLock.Lock() defer countLock.Unlock() globalCount++ @@ -501,9 +501,9 @@ func TestImportManager_TestFlipTaskStateLoop(t *testing.T) { func TestImportManager_ImportJob(t *testing.T) { var countLock sync.RWMutex - var globalCount = typeutil.UniqueID(0) + globalCount := typeutil.UniqueID(0) - var idAlloc = func(count uint32) (typeutil.UniqueID, typeutil.UniqueID, error) { + idAlloc := func(count uint32) (typeutil.UniqueID, typeutil.UniqueID, error) { countLock.Lock() defer countLock.Unlock() globalCount++ @@ -631,9 +631,9 @@ func TestImportManager_ImportJob(t *testing.T) { func TestImportManager_AllDataNodesBusy(t *testing.T) { var countLock sync.RWMutex - var globalCount = typeutil.UniqueID(0) + globalCount := typeutil.UniqueID(0) - var idAlloc = func(count uint32) (typeutil.UniqueID, typeutil.UniqueID, error) { + idAlloc := func(count uint32) (typeutil.UniqueID, typeutil.UniqueID, error) { countLock.Lock() defer countLock.Unlock() globalCount++ @@ -725,9 +725,9 @@ func TestImportManager_AllDataNodesBusy(t *testing.T) { func TestImportManager_TaskState(t *testing.T) { var countLock sync.RWMutex - var globalCount = typeutil.UniqueID(0) + globalCount := typeutil.UniqueID(0) - var idAlloc = func(count uint32) (typeutil.UniqueID, typeutil.UniqueID, error) { + idAlloc := func(count uint32) (typeutil.UniqueID, typeutil.UniqueID, error) { countLock.Lock() defer countLock.Unlock() globalCount++ @@ -834,7 +834,7 @@ func TestImportManager_TaskState(t *testing.T) { } func TestImportManager_AllocFail(t *testing.T) { - var idAlloc = func(count uint32) (typeutil.UniqueID, typeutil.UniqueID, error) { + idAlloc := func(count uint32) (typeutil.UniqueID, typeutil.UniqueID, error) { return 0, 0, errors.New("injected failure") } paramtable.Get().Save(Params.RootCoordCfg.ImportTaskSubPath.Key, "test_import_task") @@ -865,9 +865,9 @@ func TestImportManager_AllocFail(t *testing.T) { func TestImportManager_ListAllTasks(t *testing.T) { var countLock sync.RWMutex - var globalCount = typeutil.UniqueID(0) + globalCount := typeutil.UniqueID(0) - var idAlloc = func(count uint32) (typeutil.UniqueID, typeutil.UniqueID, error) { + idAlloc := func(count uint32) (typeutil.UniqueID, typeutil.UniqueID, error) { countLock.Lock() defer countLock.Unlock() globalCount++ diff --git a/internal/rootcoord/list_db_task_test.go b/internal/rootcoord/list_db_task_test.go index f2797338ea..79eea20c5e 100644 --- a/internal/rootcoord/list_db_task_test.go +++ b/internal/rootcoord/list_db_task_test.go @@ -20,12 +20,13 @@ import ( "context" "testing" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus/internal/metastore/model" mockrootcoord "github.com/milvus-io/milvus/internal/rootcoord/mocks" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/mock" ) func Test_ListDBTask(t *testing.T) { diff --git a/internal/rootcoord/meta_table.go b/internal/rootcoord/meta_table.go index aa90d14a0c..37928de18a 100644 --- a/internal/rootcoord/meta_table.go +++ b/internal/rootcoord/meta_table.go @@ -730,7 +730,7 @@ func (mt *MetaTable) RenameCollection(ctx context.Context, dbName string, oldNam return fmt.Errorf("target database:%s not found", newDBName) } - //old collection should not be an alias + // old collection should not be an alias _, ok = mt.aliases.get(dbName, oldName) if ok { log.Warn("unsupported use a alias to rename collection") @@ -862,7 +862,7 @@ func (mt *MetaTable) RemovePartition(ctx context.Context, dbID int64, collection if !ok { return nil } - var loc = -1 + loc := -1 for idx, part := range coll.Partitions { if part.PartitionID == partitionID { loc = idx diff --git a/internal/rootcoord/meta_table_test.go b/internal/rootcoord/meta_table_test.go index 0a1635797e..59ad1222cb 100644 --- a/internal/rootcoord/meta_table_test.go +++ b/internal/rootcoord/meta_table_test.go @@ -165,7 +165,6 @@ func TestRbacOperateRole(t *testing.T) { assert.Error(t, err) }) } - } func TestRbacSelect(t *testing.T) { @@ -263,7 +262,6 @@ func TestRbacSelect(t *testing.T) { if test.isValid { assert.NoError(t, err) assert.Equal(t, test.expectedOutLength, len(res)) - } else { assert.Error(t, err) } @@ -283,57 +281,71 @@ func TestRbacOperatePrivilege(t *testing.T) { {"empty objectName", &milvuspb.GrantEntity{ObjectName: ""}, milvuspb.OperatePrivilegeType_Grant}, {"nil Object", &milvuspb.GrantEntity{ Object: nil, - ObjectName: "obj_name"}, milvuspb.OperatePrivilegeType_Grant}, + ObjectName: "obj_name", + }, milvuspb.OperatePrivilegeType_Grant}, {"empty Object name", &milvuspb.GrantEntity{ Object: &milvuspb.ObjectEntity{Name: ""}, - ObjectName: "obj_name"}, milvuspb.OperatePrivilegeType_Grant}, + ObjectName: "obj_name", + }, milvuspb.OperatePrivilegeType_Grant}, {"nil Role", &milvuspb.GrantEntity{ Role: nil, Object: &milvuspb.ObjectEntity{Name: "obj_name"}, - ObjectName: "obj_name"}, milvuspb.OperatePrivilegeType_Grant}, + ObjectName: "obj_name", + }, milvuspb.OperatePrivilegeType_Grant}, {"empty Role name", &milvuspb.GrantEntity{ Role: &milvuspb.RoleEntity{Name: ""}, Object: &milvuspb.ObjectEntity{Name: "obj_name"}, - ObjectName: "obj_name"}, milvuspb.OperatePrivilegeType_Grant}, + ObjectName: "obj_name", + }, milvuspb.OperatePrivilegeType_Grant}, {"nil grantor", &milvuspb.GrantEntity{ Grantor: nil, Role: &milvuspb.RoleEntity{Name: "role_name"}, Object: &milvuspb.ObjectEntity{Name: "obj_name"}, - ObjectName: "obj_name"}, milvuspb.OperatePrivilegeType_Grant}, + ObjectName: "obj_name", + }, milvuspb.OperatePrivilegeType_Grant}, {"nil grantor privilege", &milvuspb.GrantEntity{ Grantor: &milvuspb.GrantorEntity{ Privilege: nil, }, Role: &milvuspb.RoleEntity{Name: "role_name"}, Object: &milvuspb.ObjectEntity{Name: "obj_name"}, - ObjectName: "obj_name"}, milvuspb.OperatePrivilegeType_Grant}, + ObjectName: "obj_name", + }, milvuspb.OperatePrivilegeType_Grant}, {"empty grantor privilege name", &milvuspb.GrantEntity{ Grantor: &milvuspb.GrantorEntity{ - Privilege: &milvuspb.PrivilegeEntity{Name: ""}}, + Privilege: &milvuspb.PrivilegeEntity{Name: ""}, + }, Role: &milvuspb.RoleEntity{Name: "role_name"}, Object: &milvuspb.ObjectEntity{Name: "obj_name"}, - ObjectName: "obj_name"}, milvuspb.OperatePrivilegeType_Grant}, + ObjectName: "obj_name", + }, milvuspb.OperatePrivilegeType_Grant}, {"nil grantor user", &milvuspb.GrantEntity{ Grantor: &milvuspb.GrantorEntity{ User: nil, - Privilege: &milvuspb.PrivilegeEntity{Name: "privilege_name"}}, + Privilege: &milvuspb.PrivilegeEntity{Name: "privilege_name"}, + }, Role: &milvuspb.RoleEntity{Name: "role_name"}, Object: &milvuspb.ObjectEntity{Name: "obj_name"}, - ObjectName: "obj_name"}, milvuspb.OperatePrivilegeType_Grant}, + ObjectName: "obj_name", + }, milvuspb.OperatePrivilegeType_Grant}, {"empty grantor user name", &milvuspb.GrantEntity{ Grantor: &milvuspb.GrantorEntity{ User: &milvuspb.UserEntity{Name: ""}, - Privilege: &milvuspb.PrivilegeEntity{Name: "privilege_name"}}, + Privilege: &milvuspb.PrivilegeEntity{Name: "privilege_name"}, + }, Role: &milvuspb.RoleEntity{Name: "role_name"}, Object: &milvuspb.ObjectEntity{Name: "obj_name"}, - ObjectName: "obj_name"}, milvuspb.OperatePrivilegeType_Grant}, + ObjectName: "obj_name", + }, milvuspb.OperatePrivilegeType_Grant}, {"invalid operateType", &milvuspb.GrantEntity{ Grantor: &milvuspb.GrantorEntity{ User: &milvuspb.UserEntity{Name: "user_name"}, - Privilege: &milvuspb.PrivilegeEntity{Name: "privilege_name"}}, + Privilege: &milvuspb.PrivilegeEntity{Name: "privilege_name"}, + }, Role: &milvuspb.RoleEntity{Name: "role_name"}, Object: &milvuspb.ObjectEntity{Name: "obj_name"}, - ObjectName: "obj_name"}, milvuspb.OperatePrivilegeType(-1)}, + ObjectName: "obj_name", + }, milvuspb.OperatePrivilegeType(-1)}, } for _, test := range tests { @@ -346,10 +358,12 @@ func TestRbacOperatePrivilege(t *testing.T) { validEntity := milvuspb.GrantEntity{ Grantor: &milvuspb.GrantorEntity{ User: &milvuspb.UserEntity{Name: "user_name"}, - Privilege: &milvuspb.PrivilegeEntity{Name: "privilege_name"}}, + Privilege: &milvuspb.PrivilegeEntity{Name: "privilege_name"}, + }, Role: &milvuspb.RoleEntity{Name: "role_name"}, Object: &milvuspb.ObjectEntity{Name: "obj_name"}, - ObjectName: "obj_name"} + ObjectName: "obj_name", + } err := mt.OperatePrivilege(util.DefaultTenant, &validEntity, milvuspb.OperatePrivilegeType_Grant) assert.NoError(t, err) @@ -366,11 +380,14 @@ func TestRbacSelectGrant(t *testing.T) { }{ {"nil Entity", false, nil}, {"nil entity Role", false, &milvuspb.GrantEntity{ - Role: nil}}, + Role: nil, + }}, {"empty entity Role name", false, &milvuspb.GrantEntity{ - Role: &milvuspb.RoleEntity{Name: ""}}}, + Role: &milvuspb.RoleEntity{Name: ""}, + }}, {"valid", true, &milvuspb.GrantEntity{ - Role: &milvuspb.RoleEntity{Name: "role"}}}, + Role: &milvuspb.RoleEntity{Name: "role"}, + }}, } for _, test := range tests { diff --git a/internal/rootcoord/mock_test.go b/internal/rootcoord/mock_test.go index d4fd3389e4..10065bcf01 100644 --- a/internal/rootcoord/mock_test.go +++ b/internal/rootcoord/mock_test.go @@ -22,6 +22,9 @@ import ( "os" "github.com/cockroachdb/errors" + "github.com/stretchr/testify/mock" + "go.uber.org/zap" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus/internal/allocator" @@ -43,8 +46,6 @@ import ( "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/milvus-io/milvus/pkg/util/retry" "github.com/milvus-io/milvus/pkg/util/typeutil" - "github.com/stretchr/testify/mock" - "go.uber.org/zap" ) const ( diff --git a/internal/rootcoord/quota_center.go b/internal/rootcoord/quota_center.go index a804aa2b8c..f8eb772fa0 100644 --- a/internal/rootcoord/quota_center.go +++ b/internal/rootcoord/quota_center.go @@ -264,7 +264,7 @@ func (q *QuotaCenter) syncMetrics() error { if err != nil { return err } - //log.Debug("QuotaCenter sync metrics done", + // log.Debug("QuotaCenter sync metrics done", // zap.Any("dataNodeMetrics", q.dataNodeMetrics), // zap.Any("queryNodeMetrics", q.queryNodeMetrics), // zap.Any("proxyMetrics", q.proxyMetrics), diff --git a/internal/rootcoord/quota_center_test.go b/internal/rootcoord/quota_center_test.go index b9d65ba089..7f45dfb525 100644 --- a/internal/rootcoord/quota_center_test.go +++ b/internal/rootcoord/quota_center_test.go @@ -289,7 +289,8 @@ func TestQuotaCenter(t *testing.T) { 1: {Rms: []metricsinfo.RateMetric{ {Label: internalpb.RateType_DQLSearch.String(), Rate: 100}, {Label: internalpb.RateType_DQLQuery.String(), Rate: 100}, - }}} + }}, + } paramtable.Get().Save(Params.QuotaConfig.ForceDenyReading.Key, "false") paramtable.Get().Save(Params.QuotaConfig.QueueProtectionEnabled.Key, "true") @@ -304,7 +305,8 @@ func TestQuotaCenter(t *testing.T) { }, Effect: metricsinfo.NodeEffect{ NodeID: 1, CollectionIDs: []int64{1, 2, 3}, - }}} + }}, + } quotaCenter.calculateReadRates() for _, collection := range quotaCenter.readableCollections { assert.Equal(t, Limit(100.0*0.9), quotaCenter.currentRates[collection][internalpb.RateType_DQLSearch]) @@ -315,7 +317,8 @@ func TestQuotaCenter(t *testing.T) { quotaCenter.queryNodeMetrics = map[UniqueID]*metricsinfo.QueryNodeQuotaMetrics{ 1: {SearchQueue: metricsinfo.ReadInfoInQueue{ UnsolvedQueue: Params.QuotaConfig.NQInQueueThreshold.GetAsInt64(), - }}} + }}, + } quotaCenter.calculateReadRates() for _, collection := range quotaCenter.readableCollections { assert.Equal(t, Limit(100.0*0.9), quotaCenter.currentRates[collection][internalpb.RateType_DQLSearch]) @@ -329,7 +332,8 @@ func TestQuotaCenter(t *testing.T) { {Label: internalpb.RateType_DQLSearch.String(), Rate: 100}, {Label: internalpb.RateType_DQLQuery.String(), Rate: 100}, {Label: metricsinfo.ReadResultThroughput, Rate: 1.2}, - }}} + }}, + } quotaCenter.queryNodeMetrics = map[UniqueID]*metricsinfo.QueryNodeQuotaMetrics{1: {SearchQueue: metricsinfo.ReadInfoInQueue{}}} quotaCenter.calculateReadRates() for _, collection := range quotaCenter.readableCollections { @@ -500,7 +504,8 @@ func TestQuotaCenter(t *testing.T) { paramtable.Get().Save(Params.QuotaConfig.DiskQuota.Key, "99") quotaCenter.dataCoordMetrics = &metricsinfo.DataCoordQuotaMetrics{ TotalBinlogSize: 200 * 1024 * 1024, - CollectionBinlogSize: map[int64]int64{1: 100 * 1024 * 1024}} + CollectionBinlogSize: map[int64]int64{1: 100 * 1024 * 1024}, + } quotaCenter.writableCollections = []int64{1, 2, 3} quotaCenter.resetAllCurrentRates() quotaCenter.checkDiskQuota() @@ -515,7 +520,8 @@ func TestQuotaCenter(t *testing.T) { colQuotaBackup := Params.QuotaConfig.DiskQuotaPerCollection.GetValue() paramtable.Get().Save(Params.QuotaConfig.DiskQuotaPerCollection.Key, "30") quotaCenter.dataCoordMetrics = &metricsinfo.DataCoordQuotaMetrics{CollectionBinlogSize: map[int64]int64{ - 1: 20 * 1024 * 1024, 2: 30 * 1024 * 1024, 3: 60 * 1024 * 1024}} + 1: 20 * 1024 * 1024, 2: 30 * 1024 * 1024, 3: 60 * 1024 * 1024, + }} quotaCenter.writableCollections = []int64{1, 2, 3} quotaCenter.resetAllCurrentRates() quotaCenter.checkDiskQuota() diff --git a/internal/rootcoord/redo.go b/internal/rootcoord/redo.go index 8c9b412485..72406ee296 100644 --- a/internal/rootcoord/redo.go +++ b/internal/rootcoord/redo.go @@ -19,8 +19,9 @@ package rootcoord import ( "context" - "github.com/milvus-io/milvus/pkg/log" "go.uber.org/zap" + + "github.com/milvus-io/milvus/pkg/log" ) type baseRedoTask struct { diff --git a/internal/rootcoord/redo_test.go b/internal/rootcoord/redo_test.go index 7c80558a18..a01e897fcd 100644 --- a/internal/rootcoord/redo_test.go +++ b/internal/rootcoord/redo_test.go @@ -21,7 +21,6 @@ import ( "testing" "github.com/cockroachdb/errors" - "github.com/stretchr/testify/assert" ) diff --git a/internal/rootcoord/rename_collection_task_test.go b/internal/rootcoord/rename_collection_task_test.go index caac2a03a2..dd4be07ab9 100644 --- a/internal/rootcoord/rename_collection_task_test.go +++ b/internal/rootcoord/rename_collection_task_test.go @@ -21,11 +21,9 @@ import ( "testing" "github.com/cockroachdb/errors" - "github.com/stretchr/testify/assert" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" - "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" ) diff --git a/internal/rootcoord/root_coord.go b/internal/rootcoord/root_coord.go index 5557d2e123..2b30fec295 100644 --- a/internal/rootcoord/root_coord.go +++ b/internal/rootcoord/root_coord.go @@ -1976,7 +1976,6 @@ func (c *Core) ReportImport(ctx context.Context, ir *rootcoordpb.ImportResult) ( log.Info("a DataNode is no longer busy after processing task", zap.Int64("dataNode ID", ir.GetDatanodeId()), zap.Int64("task ID", ir.GetTaskId())) - }() err := c.importManager.sendOutTasks(c.importManager.ctx) if err != nil { @@ -2028,8 +2027,8 @@ func (c *Core) ReportImport(ctx context.Context, ir *rootcoordpb.ImportResult) ( func (c *Core) ExpireCredCache(ctx context.Context, username string) error { req := proxypb.InvalidateCredCacheRequest{ Base: commonpbutil.NewMsgBase( - commonpbutil.WithMsgType(0), //TODO, msg type - commonpbutil.WithMsgID(0), //TODO, msg id + commonpbutil.WithMsgType(0), // TODO, msg type + commonpbutil.WithMsgID(0), // TODO, msg id commonpbutil.WithSourceID(c.session.ServerID), ), Username: username, @@ -2041,8 +2040,8 @@ func (c *Core) ExpireCredCache(ctx context.Context, username string) error { func (c *Core) UpdateCredCache(ctx context.Context, credInfo *internalpb.CredentialInfo) error { req := proxypb.UpdateCredCacheRequest{ Base: commonpbutil.NewMsgBase( - commonpbutil.WithMsgType(0), //TODO, msg type - commonpbutil.WithMsgID(0), //TODO, msg id + commonpbutil.WithMsgType(0), // TODO, msg type + commonpbutil.WithMsgID(0), // TODO, msg id commonpbutil.WithSourceID(c.session.ServerID), ), Username: credInfo.Username, diff --git a/internal/rootcoord/root_coord_test.go b/internal/rootcoord/root_coord_test.go index 8ea497d15d..93a5412eea 100644 --- a/internal/rootcoord/root_coord_test.go +++ b/internal/rootcoord/root_coord_test.go @@ -1340,8 +1340,8 @@ func TestCore_ListImportTasks(t *testing.T) { func TestCore_ReportImport(t *testing.T) { paramtable.Get().Save(Params.RootCoordCfg.ImportTaskSubPath.Key, "importtask") var countLock sync.RWMutex - var globalCount = typeutil.UniqueID(0) - var idAlloc = func(count uint32) (typeutil.UniqueID, typeutil.UniqueID, error) { + globalCount := typeutil.UniqueID(0) + idAlloc := func(count uint32) (typeutil.UniqueID, typeutil.UniqueID, error) { countLock.Lock() defer countLock.Unlock() globalCount++ @@ -1674,7 +1674,8 @@ func TestRootcoord_EnableActiveStandby(t *testing.T) { Timestamp: 0, SourceID: paramtable.GetNodeID(), }, - CollectionName: "unexist"}) + CollectionName: "unexist", + }) assert.NoError(t, err) assert.NotEqual(t, commonpb.ErrorCode_Success, resp.GetStatus().GetErrorCode()) err = core.Stop() @@ -1724,7 +1725,8 @@ func TestRootcoord_DisableActiveStandby(t *testing.T) { Timestamp: 0, SourceID: paramtable.GetNodeID(), }, - CollectionName: "unexist"}) + CollectionName: "unexist", + }) assert.NoError(t, err) assert.NotEqual(t, commonpb.ErrorCode_Success, resp.GetStatus().GetErrorCode()) err = core.Stop() @@ -2045,7 +2047,8 @@ func (s *RootCoordSuite) TestRestore() { meta.EXPECT().ListDatabases(mock.Anything, mock.Anything). Return([]*model.Database{ {Name: "available_colls_db"}, - {Name: "not_available_colls_db"}}, nil) + {Name: "not_available_colls_db"}, + }, nil) meta.EXPECT().ListCollections(mock.Anything, "available_colls_db", mock.Anything, false). Return([]*model.Collection{ diff --git a/internal/rootcoord/show_collection_task_test.go b/internal/rootcoord/show_collection_task_test.go index 9cf72af2f9..3929b86d2b 100644 --- a/internal/rootcoord/show_collection_task_test.go +++ b/internal/rootcoord/show_collection_task_test.go @@ -20,10 +20,11 @@ import ( "context" "testing" + "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus/internal/metastore/model" - "github.com/stretchr/testify/assert" ) func Test_showCollectionTask_Prepare(t *testing.T) { diff --git a/internal/rootcoord/show_partition_task_test.go b/internal/rootcoord/show_partition_task_test.go index de606dd8a8..074ec5c283 100644 --- a/internal/rootcoord/show_partition_task_test.go +++ b/internal/rootcoord/show_partition_task_test.go @@ -20,11 +20,12 @@ import ( "context" "testing" + "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus/internal/metastore/model" "github.com/milvus-io/milvus/pkg/util/typeutil" - "github.com/stretchr/testify/assert" ) func Test_showPartitionTask_Prepare(t *testing.T) { diff --git a/internal/rootcoord/step.go b/internal/rootcoord/step.go index 60ba42f737..5868f3a1f4 100644 --- a/internal/rootcoord/step.go +++ b/internal/rootcoord/step.go @@ -382,8 +382,7 @@ func (s *removePartitionMetaStep) Weight() stepPriority { return stepPriorityNormal } -type nullStep struct { -} +type nullStep struct{} func (s *nullStep) Execute(ctx context.Context) ([]nestedStep, error) { return nil, nil diff --git a/internal/rootcoord/step_executor.go b/internal/rootcoord/step_executor.go index 03da036cec..f28b51d2ad 100644 --- a/internal/rootcoord/step_executor.go +++ b/internal/rootcoord/step_executor.go @@ -22,9 +22,10 @@ import ( "sync" "time" + "go.uber.org/zap" + "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/retry" - "go.uber.org/zap" ) const ( diff --git a/internal/rootcoord/step_executor_test.go b/internal/rootcoord/step_executor_test.go index 9d8d85c265..29d3d47848 100644 --- a/internal/rootcoord/step_executor_test.go +++ b/internal/rootcoord/step_executor_test.go @@ -23,13 +23,12 @@ import ( "time" "github.com/cockroachdb/errors" + "github.com/stretchr/testify/assert" "github.com/milvus-io/milvus/pkg/util/retry" - "github.com/stretchr/testify/assert" ) -type mockChildStep struct { -} +type mockChildStep struct{} func (m *mockChildStep) Execute(ctx context.Context) ([]nestedStep, error) { return nil, nil @@ -47,8 +46,7 @@ func newMockChildStep() *mockChildStep { return &mockChildStep{} } -type mockStepWithChild struct { -} +type mockStepWithChild struct{} func (m *mockStepWithChild) Execute(ctx context.Context) ([]nestedStep, error) { return []nestedStep{newMockChildStep()}, nil diff --git a/internal/rootcoord/step_test.go b/internal/rootcoord/step_test.go index 384f0196d7..4979be3937 100644 --- a/internal/rootcoord/step_test.go +++ b/internal/rootcoord/step_test.go @@ -25,8 +25,8 @@ import ( ) func Test_waitForTsSyncedStep_Execute(t *testing.T) { - //Params.InitOnce() - //Params.ProxyCfg.TimeTickInterval = time.Millisecond + // Params.InitOnce() + // Params.ProxyCfg.TimeTickInterval = time.Millisecond ticker := newRocksMqTtSynchronizer() core := newTestCore(withTtSynchronizer(ticker)) diff --git a/internal/rootcoord/timestamp_bench_test.go b/internal/rootcoord/timestamp_bench_test.go index 92e5232ddb..e8526af5de 100644 --- a/internal/rootcoord/timestamp_bench_test.go +++ b/internal/rootcoord/timestamp_bench_test.go @@ -87,7 +87,6 @@ func Benchmark_RootCoord_AllocTimestamp(b *testing.B) { } _, err := c.AllocTimestamp(ctx, &req) assert.Nil(b, err) - } b.StopTimer() } diff --git a/internal/rootcoord/timeticksync_test.go b/internal/rootcoord/timeticksync_test.go index 26a4ee0ebf..ec9572f513 100644 --- a/internal/rootcoord/timeticksync_test.go +++ b/internal/rootcoord/timeticksync_test.go @@ -221,7 +221,6 @@ func TestTimetickSyncWithExistChannels(t *testing.T) { }) // test get new channels - } func TestTimetickSyncInvalidName(t *testing.T) { diff --git a/internal/rootcoord/undo.go b/internal/rootcoord/undo.go index 8827f98823..29e9b7dfac 100644 --- a/internal/rootcoord/undo.go +++ b/internal/rootcoord/undo.go @@ -20,8 +20,9 @@ import ( "context" "fmt" - "github.com/milvus-io/milvus/pkg/log" "go.uber.org/zap" + + "github.com/milvus-io/milvus/pkg/log" ) type baseUndoTask struct { diff --git a/internal/rootcoord/util_test.go b/internal/rootcoord/util_test.go index 7c9ea2c472..8023f6ee31 100644 --- a/internal/rootcoord/util_test.go +++ b/internal/rootcoord/util_test.go @@ -19,13 +19,14 @@ package rootcoord import ( "testing" + "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus/internal/metastore/model" "github.com/milvus-io/milvus/pkg/common" "github.com/milvus-io/milvus/pkg/mq/msgstream" "github.com/milvus-io/milvus/pkg/util/typeutil" - "github.com/stretchr/testify/assert" ) func Test_EqualKeyPairArray(t *testing.T) { diff --git a/internal/storage/aliyun/aliyun_test.go b/internal/storage/aliyun/aliyun_test.go index 8e9660eb6e..b05f3b645b 100644 --- a/internal/storage/aliyun/aliyun_test.go +++ b/internal/storage/aliyun/aliyun_test.go @@ -5,10 +5,11 @@ import ( "testing" "github.com/cockroachdb/errors" - "github.com/milvus-io/milvus/internal/storage/aliyun/mocks" "github.com/minio/minio-go/v7" "github.com/minio/minio-go/v7/pkg/credentials" "github.com/stretchr/testify/assert" + + "github.com/milvus-io/milvus/internal/storage/aliyun/mocks" ) func TestNewMinioClient(t *testing.T) { diff --git a/internal/storage/azure_object_storage.go b/internal/storage/azure_object_storage.go index ef08362ba1..2cb2da0420 100644 --- a/internal/storage/azure_object_storage.go +++ b/internal/storage/azure_object_storage.go @@ -23,18 +23,15 @@ import ( "os" "time" - "github.com/Azure/azure-sdk-for-go/sdk/azidentity" - - "github.com/Azure/azure-sdk-for-go/sdk/storage/azblob/bloberror" - "github.com/Azure/azure-sdk-for-go/sdk/azcore" - "github.com/Azure/azure-sdk-for-go/sdk/storage/azblob/container" - - "github.com/Azure/azure-sdk-for-go/sdk/storage/azblob/blob" - "github.com/milvus-io/milvus/pkg/util/retry" - + "github.com/Azure/azure-sdk-for-go/sdk/azidentity" "github.com/Azure/azure-sdk-for-go/sdk/storage/azblob" + "github.com/Azure/azure-sdk-for-go/sdk/storage/azblob/blob" + "github.com/Azure/azure-sdk-for-go/sdk/storage/azblob/bloberror" + "github.com/Azure/azure-sdk-for-go/sdk/storage/azblob/container" "github.com/Azure/azure-sdk-for-go/sdk/storage/azblob/service" + + "github.com/milvus-io/milvus/pkg/util/retry" ) type AzureObjectStorage struct { @@ -121,7 +118,7 @@ func (AzureObjectStorage *AzureObjectStorage) StatObject(ctx context.Context, bu } func (AzureObjectStorage *AzureObjectStorage) ListObjects(ctx context.Context, bucketName string, prefix string, recursive bool) (map[string]time.Time, error) { - var pager = AzureObjectStorage.Client.NewContainerClient(bucketName).NewListBlobsFlatPager(&azblob.ListBlobsFlatOptions{ + pager := AzureObjectStorage.Client.NewContainerClient(bucketName).NewListBlobsFlatPager(&azblob.ListBlobsFlatOptions{ Prefix: &prefix, }) objects := map[string]time.Time{} diff --git a/internal/storage/azure_object_storage_test.go b/internal/storage/azure_object_storage_test.go index a2538465c7..4af7eff35c 100644 --- a/internal/storage/azure_object_storage_test.go +++ b/internal/storage/azure_object_storage_test.go @@ -49,7 +49,6 @@ func TestAzureObjectStorage(t *testing.T) { }) t.Run("test load", func(t *testing.T) { - testCM, err := newAzureObjectStorageWithConfig(ctx, &config) assert.Equal(t, err, nil) defer testCM.DeleteContainer(ctx, config.bucketName, &azblob.DeleteContainerOptions{}) @@ -137,7 +136,6 @@ func TestAzureObjectStorage(t *testing.T) { } }) } - }) t.Run("test useIAM", func(t *testing.T) { diff --git a/internal/storage/binlog_iterator.go b/internal/storage/binlog_iterator.go index c3445d3a86..fad450b8ad 100644 --- a/internal/storage/binlog_iterator.go +++ b/internal/storage/binlog_iterator.go @@ -66,7 +66,6 @@ func NewInsertBinlogIterator(blobs []*Blob, PKfieldID UniqueID, pkType schemapb. reader := NewInsertCodecWithSchema(nil) _, _, serData, err := reader.Deserialize(blobs) - if err != nil { return nil, err } diff --git a/internal/storage/binlog_iterator_test.go b/internal/storage/binlog_iterator_test.go index 7a91316af4..d62218aec8 100644 --- a/internal/storage/binlog_iterator_test.go +++ b/internal/storage/binlog_iterator_test.go @@ -19,11 +19,11 @@ package storage import ( "testing" + "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/internal/proto/etcdpb" "github.com/milvus-io/milvus/pkg/common" - - "github.com/stretchr/testify/assert" ) func generateTestData(t *testing.T, num int) []*Blob { diff --git a/internal/storage/binlog_test.go b/internal/storage/binlog_test.go index 4e1833139a..15454bfb71 100644 --- a/internal/storage/binlog_test.go +++ b/internal/storage/binlog_test.go @@ -25,14 +25,14 @@ import ( "time" "unsafe" + "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/pkg/common" "github.com/milvus-io/milvus/pkg/util/funcutil" "github.com/milvus-io/milvus/pkg/util/tsoutil" "github.com/milvus-io/milvus/pkg/util/typeutil" "github.com/milvus-io/milvus/pkg/util/uniquegenerator" - - "github.com/stretchr/testify/assert" ) /* #nosec G103 */ @@ -89,63 +89,63 @@ func TestInsertBinlog(t *testing.T) { assert.LessOrEqual(t, diffts, maxdiff) pos += int(unsafe.Sizeof(ts)) - //descriptor header, type code + // descriptor header, type code tc := UnsafeReadInt8(buf, pos) assert.Equal(t, EventTypeCode(tc), DescriptorEventType) pos += int(unsafe.Sizeof(tc)) - //descriptor header, event length + // descriptor header, event length descEventLen := UnsafeReadInt32(buf, pos) pos += int(unsafe.Sizeof(descEventLen)) - //descriptor header, next position + // descriptor header, next position descNxtPos := UnsafeReadInt32(buf, pos) assert.Equal(t, descEventLen+int32(unsafe.Sizeof(MagicNumber)), descNxtPos) pos += int(unsafe.Sizeof(descNxtPos)) - //descriptor data fix, collection id + // descriptor data fix, collection id collID := UnsafeReadInt64(buf, pos) assert.Equal(t, collID, int64(10)) pos += int(unsafe.Sizeof(collID)) - //descriptor data fix, partition id + // descriptor data fix, partition id partID := UnsafeReadInt64(buf, pos) assert.Equal(t, partID, int64(20)) pos += int(unsafe.Sizeof(partID)) - //descriptor data fix, segment id + // descriptor data fix, segment id segID := UnsafeReadInt64(buf, pos) assert.Equal(t, segID, int64(30)) pos += int(unsafe.Sizeof(segID)) - //descriptor data fix, field id + // descriptor data fix, field id fieldID := UnsafeReadInt64(buf, pos) assert.Equal(t, fieldID, int64(40)) pos += int(unsafe.Sizeof(fieldID)) - //descriptor data fix, start time stamp + // descriptor data fix, start time stamp startts := UnsafeReadInt64(buf, pos) assert.Equal(t, startts, int64(1000)) pos += int(unsafe.Sizeof(startts)) - //descriptor data fix, end time stamp + // descriptor data fix, end time stamp endts := UnsafeReadInt64(buf, pos) assert.Equal(t, endts, int64(2000)) pos += int(unsafe.Sizeof(endts)) - //descriptor data fix, payload type + // descriptor data fix, payload type colType := UnsafeReadInt32(buf, pos) assert.Equal(t, schemapb.DataType(colType), schemapb.DataType_Int64) pos += int(unsafe.Sizeof(colType)) - //descriptor data, post header lengths + // descriptor data, post header lengths for i := DescriptorEventType; i < EventTypeEnd; i++ { size := getEventFixPartSize(i) assert.Equal(t, uint8(size), buf[pos]) pos++ } - //descriptor data, extra length + // descriptor data, extra length extraLength := UnsafeReadInt32(buf, pos) assert.Equal(t, extraLength, w.baseBinlogWriter.descriptorEventData.ExtraLength) pos += int(unsafe.Sizeof(extraLength)) @@ -166,40 +166,40 @@ func TestInsertBinlog(t *testing.T) { assert.True(t, ok) assert.Equal(t, fmt.Sprintf("%v", sizeTotal), fmt.Sprintf("%v", size)) - //start of e1 + // start of e1 assert.Equal(t, pos, int(descNxtPos)) - //insert e1 header, Timestamp + // insert e1 header, Timestamp e1ts := UnsafeReadInt64(buf, pos) diffts = curts - e1ts assert.LessOrEqual(t, diffts, maxdiff) pos += int(unsafe.Sizeof(e1ts)) - //insert e1 header, type code + // insert e1 header, type code e1tc := UnsafeReadInt8(buf, pos) assert.Equal(t, EventTypeCode(e1tc), InsertEventType) pos += int(unsafe.Sizeof(e1tc)) - //insert e1 header, event length + // insert e1 header, event length e1EventLen := UnsafeReadInt32(buf, pos) pos += int(unsafe.Sizeof(e1EventLen)) - //insert e1 header, next position + // insert e1 header, next position e1NxtPos := UnsafeReadInt32(buf, pos) assert.Equal(t, descNxtPos+e1EventLen, e1NxtPos) pos += int(unsafe.Sizeof(descNxtPos)) - //insert e1 data, start time stamp + // insert e1 data, start time stamp e1st := UnsafeReadInt64(buf, pos) assert.Equal(t, e1st, int64(100)) pos += int(unsafe.Sizeof(e1st)) - //insert e1 data, end time stamp + // insert e1 data, end time stamp e1et := UnsafeReadInt64(buf, pos) assert.Equal(t, e1et, int64(200)) pos += int(unsafe.Sizeof(e1et)) - //insert e1, payload + // insert e1, payload e1Payload := buf[pos:e1NxtPos] e1r, err := NewPayloadReader(schemapb.DataType_Int64, e1Payload) assert.NoError(t, err) @@ -208,40 +208,40 @@ func TestInsertBinlog(t *testing.T) { assert.Equal(t, e1a, []int64{1, 2, 3, 4, 5, 6}) e1r.Close() - //start of e2 + // start of e2 pos = int(e1NxtPos) - //insert e2 header, Timestamp + // insert e2 header, Timestamp e2ts := UnsafeReadInt64(buf, pos) diffts = curts - e2ts assert.LessOrEqual(t, diffts, maxdiff) pos += int(unsafe.Sizeof(e2ts)) - //insert e2 header, type code + // insert e2 header, type code e2tc := UnsafeReadInt8(buf, pos) assert.Equal(t, EventTypeCode(e2tc), InsertEventType) pos += int(unsafe.Sizeof(e2tc)) - //insert e2 header, event length + // insert e2 header, event length e2EventLen := UnsafeReadInt32(buf, pos) pos += int(unsafe.Sizeof(e2EventLen)) - //insert e2 header, next position + // insert e2 header, next position e2NxtPos := UnsafeReadInt32(buf, pos) assert.Equal(t, e1NxtPos+e2EventLen, e2NxtPos) pos += int(unsafe.Sizeof(descNxtPos)) - //insert e2 data, start time stamp + // insert e2 data, start time stamp e2st := UnsafeReadInt64(buf, pos) assert.Equal(t, e2st, int64(300)) pos += int(unsafe.Sizeof(e2st)) - //insert e2 data, end time stamp + // insert e2 data, end time stamp e2et := UnsafeReadInt64(buf, pos) assert.Equal(t, e2et, int64(400)) pos += int(unsafe.Sizeof(e2et)) - //insert e2, payload + // insert e2, payload e2Payload := buf[pos:] e2r, err := NewPayloadReader(schemapb.DataType_Int64, e2Payload) assert.NoError(t, err) @@ -252,7 +252,7 @@ func TestInsertBinlog(t *testing.T) { assert.Equal(t, int(e2NxtPos), len(buf)) - //read binlog + // read binlog r, err := NewBinlogReader(buf) assert.NoError(t, err) event1, err := r.NextEventReader() @@ -321,12 +321,12 @@ func TestDeleteBinlog(t *testing.T) { w.Close() - //magic number + // magic number magicNum := UnsafeReadInt32(buf, 0) assert.Equal(t, magicNum, MagicNumber) pos := int(unsafe.Sizeof(MagicNumber)) - //descriptor header, timestamp + // descriptor header, timestamp ts := UnsafeReadInt64(buf, pos) assert.Greater(t, ts, int64(0)) curts := time.Now().UnixNano() / int64(time.Millisecond) @@ -336,63 +336,63 @@ func TestDeleteBinlog(t *testing.T) { assert.LessOrEqual(t, diffts, maxdiff) pos += int(unsafe.Sizeof(ts)) - //descriptor header, type code + // descriptor header, type code tc := UnsafeReadInt8(buf, pos) assert.Equal(t, EventTypeCode(tc), DescriptorEventType) pos += int(unsafe.Sizeof(tc)) - //descriptor header, event length + // descriptor header, event length descEventLen := UnsafeReadInt32(buf, pos) pos += int(unsafe.Sizeof(descEventLen)) - //descriptor header, next position + // descriptor header, next position descNxtPos := UnsafeReadInt32(buf, pos) assert.Equal(t, descEventLen+int32(unsafe.Sizeof(MagicNumber)), descNxtPos) pos += int(unsafe.Sizeof(descNxtPos)) - //descriptor data fix, collection id + // descriptor data fix, collection id collID := UnsafeReadInt64(buf, pos) assert.Equal(t, collID, int64(50)) pos += int(unsafe.Sizeof(collID)) - //descriptor data fix, partition id + // descriptor data fix, partition id partID := UnsafeReadInt64(buf, pos) assert.Equal(t, partID, int64(1)) pos += int(unsafe.Sizeof(partID)) - //descriptor data fix, segment id + // descriptor data fix, segment id segID := UnsafeReadInt64(buf, pos) assert.Equal(t, segID, int64(1)) pos += int(unsafe.Sizeof(segID)) - //descriptor data fix, field id + // descriptor data fix, field id fieldID := UnsafeReadInt64(buf, pos) assert.Equal(t, fieldID, int64(-1)) pos += int(unsafe.Sizeof(fieldID)) - //descriptor data fix, start time stamp + // descriptor data fix, start time stamp startts := UnsafeReadInt64(buf, pos) assert.Equal(t, startts, int64(1000)) pos += int(unsafe.Sizeof(startts)) - //descriptor data fix, end time stamp + // descriptor data fix, end time stamp endts := UnsafeReadInt64(buf, pos) assert.Equal(t, endts, int64(2000)) pos += int(unsafe.Sizeof(endts)) - //descriptor data fix, payload type + // descriptor data fix, payload type colType := UnsafeReadInt32(buf, pos) assert.Equal(t, schemapb.DataType(colType), schemapb.DataType_Int64) pos += int(unsafe.Sizeof(colType)) - //descriptor data, post header lengths + // descriptor data, post header lengths for i := DescriptorEventType; i < EventTypeEnd; i++ { size := getEventFixPartSize(i) assert.Equal(t, uint8(size), buf[pos]) pos++ } - //descriptor data, extra length + // descriptor data, extra length extraLength := UnsafeReadInt32(buf, pos) assert.Equal(t, extraLength, w.baseBinlogWriter.descriptorEventData.ExtraLength) pos += int(unsafe.Sizeof(extraLength)) @@ -413,40 +413,40 @@ func TestDeleteBinlog(t *testing.T) { assert.True(t, ok) assert.Equal(t, fmt.Sprintf("%v", sizeTotal), fmt.Sprintf("%v", size)) - //start of e1 + // start of e1 assert.Equal(t, pos, int(descNxtPos)) - //insert e1 header, Timestamp + // insert e1 header, Timestamp e1ts := UnsafeReadInt64(buf, pos) diffts = curts - e1ts assert.LessOrEqual(t, diffts, maxdiff) pos += int(unsafe.Sizeof(e1ts)) - //insert e1 header, type code + // insert e1 header, type code e1tc := UnsafeReadInt8(buf, pos) assert.Equal(t, EventTypeCode(e1tc), DeleteEventType) pos += int(unsafe.Sizeof(e1tc)) - //insert e1 header, event length + // insert e1 header, event length e1EventLen := UnsafeReadInt32(buf, pos) pos += int(unsafe.Sizeof(e1EventLen)) - //insert e1 header, next position + // insert e1 header, next position e1NxtPos := UnsafeReadInt32(buf, pos) assert.Equal(t, descNxtPos+e1EventLen, e1NxtPos) pos += int(unsafe.Sizeof(descNxtPos)) - //insert e1 data, start time stamp + // insert e1 data, start time stamp e1st := UnsafeReadInt64(buf, pos) assert.Equal(t, e1st, int64(100)) pos += int(unsafe.Sizeof(e1st)) - //insert e1 data, end time stamp + // insert e1 data, end time stamp e1et := UnsafeReadInt64(buf, pos) assert.Equal(t, e1et, int64(200)) pos += int(unsafe.Sizeof(e1et)) - //insert e1, payload + // insert e1, payload e1Payload := buf[pos:e1NxtPos] e1r, err := NewPayloadReader(schemapb.DataType_Int64, e1Payload) assert.NoError(t, err) @@ -455,40 +455,40 @@ func TestDeleteBinlog(t *testing.T) { assert.Equal(t, e1a, []int64{1, 2, 3, 4, 5, 6}) e1r.Close() - //start of e2 + // start of e2 pos = int(e1NxtPos) - //insert e2 header, Timestamp + // insert e2 header, Timestamp e2ts := UnsafeReadInt64(buf, pos) diffts = curts - e2ts assert.LessOrEqual(t, diffts, maxdiff) pos += int(unsafe.Sizeof(e2ts)) - //insert e2 header, type code + // insert e2 header, type code e2tc := UnsafeReadInt8(buf, pos) assert.Equal(t, EventTypeCode(e2tc), DeleteEventType) pos += int(unsafe.Sizeof(e2tc)) - //insert e2 header, event length + // insert e2 header, event length e2EventLen := UnsafeReadInt32(buf, pos) pos += int(unsafe.Sizeof(e2EventLen)) - //insert e2 header, next position + // insert e2 header, next position e2NxtPos := UnsafeReadInt32(buf, pos) assert.Equal(t, e1NxtPos+e2EventLen, e2NxtPos) pos += int(unsafe.Sizeof(descNxtPos)) - //insert e2 data, start time stamp + // insert e2 data, start time stamp e2st := UnsafeReadInt64(buf, pos) assert.Equal(t, e2st, int64(300)) pos += int(unsafe.Sizeof(e2st)) - //insert e2 data, end time stamp + // insert e2 data, end time stamp e2et := UnsafeReadInt64(buf, pos) assert.Equal(t, e2et, int64(400)) pos += int(unsafe.Sizeof(e2et)) - //insert e2, payload + // insert e2, payload e2Payload := buf[pos:] e2r, err := NewPayloadReader(schemapb.DataType_Int64, e2Payload) assert.NoError(t, err) @@ -499,7 +499,7 @@ func TestDeleteBinlog(t *testing.T) { assert.Equal(t, int(e2NxtPos), len(buf)) - //read binlog + // read binlog r, err := NewBinlogReader(buf) assert.NoError(t, err) event1, err := r.NextEventReader() @@ -568,12 +568,12 @@ func TestDDLBinlog1(t *testing.T) { w.Close() - //magic number + // magic number magicNum := UnsafeReadInt32(buf, 0) assert.Equal(t, magicNum, MagicNumber) pos := int(unsafe.Sizeof(MagicNumber)) - //descriptor header, timestamp + // descriptor header, timestamp ts := UnsafeReadInt64(buf, pos) assert.Greater(t, ts, int64(0)) curts := time.Now().UnixNano() / int64(time.Millisecond) @@ -583,63 +583,63 @@ func TestDDLBinlog1(t *testing.T) { assert.LessOrEqual(t, diffts, maxdiff) pos += int(unsafe.Sizeof(ts)) - //descriptor header, type code + // descriptor header, type code tc := UnsafeReadInt8(buf, pos) assert.Equal(t, EventTypeCode(tc), DescriptorEventType) pos += int(unsafe.Sizeof(tc)) - //descriptor header, event length + // descriptor header, event length descEventLen := UnsafeReadInt32(buf, pos) pos += int(unsafe.Sizeof(descEventLen)) - //descriptor header, next position + // descriptor header, next position descNxtPos := UnsafeReadInt32(buf, pos) assert.Equal(t, descEventLen+int32(unsafe.Sizeof(MagicNumber)), descNxtPos) pos += int(unsafe.Sizeof(descNxtPos)) - //descriptor data fix, collection id + // descriptor data fix, collection id collID := UnsafeReadInt64(buf, pos) assert.Equal(t, collID, int64(50)) pos += int(unsafe.Sizeof(collID)) - //descriptor data fix, partition id + // descriptor data fix, partition id partID := UnsafeReadInt64(buf, pos) assert.Equal(t, partID, int64(-1)) pos += int(unsafe.Sizeof(partID)) - //descriptor data fix, segment id + // descriptor data fix, segment id segID := UnsafeReadInt64(buf, pos) assert.Equal(t, segID, int64(-1)) pos += int(unsafe.Sizeof(segID)) - //descriptor data fix, field id + // descriptor data fix, field id fieldID := UnsafeReadInt64(buf, pos) assert.Equal(t, fieldID, int64(-1)) pos += int(unsafe.Sizeof(fieldID)) - //descriptor data fix, start time stamp + // descriptor data fix, start time stamp startts := UnsafeReadInt64(buf, pos) assert.Equal(t, startts, int64(1000)) pos += int(unsafe.Sizeof(startts)) - //descriptor data fix, end time stamp + // descriptor data fix, end time stamp endts := UnsafeReadInt64(buf, pos) assert.Equal(t, endts, int64(2000)) pos += int(unsafe.Sizeof(endts)) - //descriptor data fix, payload type + // descriptor data fix, payload type colType := UnsafeReadInt32(buf, pos) assert.Equal(t, schemapb.DataType(colType), schemapb.DataType_Int64) pos += int(unsafe.Sizeof(colType)) - //descriptor data, post header lengths + // descriptor data, post header lengths for i := DescriptorEventType; i < EventTypeEnd; i++ { size := getEventFixPartSize(i) assert.Equal(t, uint8(size), buf[pos]) pos++ } - //descriptor data, extra length + // descriptor data, extra length extraLength := UnsafeReadInt32(buf, pos) assert.Equal(t, extraLength, w.baseBinlogWriter.descriptorEventData.ExtraLength) pos += int(unsafe.Sizeof(extraLength)) @@ -660,40 +660,40 @@ func TestDDLBinlog1(t *testing.T) { assert.True(t, ok) assert.Equal(t, fmt.Sprintf("%v", sizeTotal), fmt.Sprintf("%v", size)) - //start of e1 + // start of e1 assert.Equal(t, pos, int(descNxtPos)) - //insert e1 header, Timestamp + // insert e1 header, Timestamp e1ts := UnsafeReadInt64(buf, pos) diffts = curts - e1ts assert.LessOrEqual(t, diffts, maxdiff) pos += int(unsafe.Sizeof(e1ts)) - //insert e1 header, type code + // insert e1 header, type code e1tc := UnsafeReadInt8(buf, pos) assert.Equal(t, EventTypeCode(e1tc), CreateCollectionEventType) pos += int(unsafe.Sizeof(e1tc)) - //insert e1 header, event length + // insert e1 header, event length e1EventLen := UnsafeReadInt32(buf, pos) pos += int(unsafe.Sizeof(e1EventLen)) - //insert e1 header, next position + // insert e1 header, next position e1NxtPos := UnsafeReadInt32(buf, pos) assert.Equal(t, descNxtPos+e1EventLen, e1NxtPos) pos += int(unsafe.Sizeof(descNxtPos)) - //insert e1 data, start time stamp + // insert e1 data, start time stamp e1st := UnsafeReadInt64(buf, pos) assert.Equal(t, e1st, int64(100)) pos += int(unsafe.Sizeof(e1st)) - //insert e1 data, end time stamp + // insert e1 data, end time stamp e1et := UnsafeReadInt64(buf, pos) assert.Equal(t, e1et, int64(200)) pos += int(unsafe.Sizeof(e1et)) - //insert e1, payload + // insert e1, payload e1Payload := buf[pos:e1NxtPos] e1r, err := NewPayloadReader(schemapb.DataType_Int64, e1Payload) assert.NoError(t, err) @@ -702,40 +702,40 @@ func TestDDLBinlog1(t *testing.T) { assert.Equal(t, e1a, []int64{1, 2, 3, 4, 5, 6}) e1r.Close() - //start of e2 + // start of e2 pos = int(e1NxtPos) - //insert e2 header, Timestamp + // insert e2 header, Timestamp e2ts := UnsafeReadInt64(buf, pos) diffts = curts - e2ts assert.LessOrEqual(t, diffts, maxdiff) pos += int(unsafe.Sizeof(e2ts)) - //insert e2 header, type code + // insert e2 header, type code e2tc := UnsafeReadInt8(buf, pos) assert.Equal(t, EventTypeCode(e2tc), DropCollectionEventType) pos += int(unsafe.Sizeof(e2tc)) - //insert e2 header, event length + // insert e2 header, event length e2EventLen := UnsafeReadInt32(buf, pos) pos += int(unsafe.Sizeof(e2EventLen)) - //insert e2 header, next position + // insert e2 header, next position e2NxtPos := UnsafeReadInt32(buf, pos) assert.Equal(t, e1NxtPos+e2EventLen, e2NxtPos) pos += int(unsafe.Sizeof(descNxtPos)) - //insert e2 data, start time stamp + // insert e2 data, start time stamp e2st := UnsafeReadInt64(buf, pos) assert.Equal(t, e2st, int64(300)) pos += int(unsafe.Sizeof(e2st)) - //insert e2 data, end time stamp + // insert e2 data, end time stamp e2et := UnsafeReadInt64(buf, pos) assert.Equal(t, e2et, int64(400)) pos += int(unsafe.Sizeof(e2et)) - //insert e2, payload + // insert e2, payload e2Payload := buf[pos:] e2r, err := NewPayloadReader(schemapb.DataType_Int64, e2Payload) assert.NoError(t, err) @@ -746,7 +746,7 @@ func TestDDLBinlog1(t *testing.T) { assert.Equal(t, int(e2NxtPos), len(buf)) - //read binlog + // read binlog r, err := NewBinlogReader(buf) assert.NoError(t, err) event1, err := r.NextEventReader() @@ -814,12 +814,12 @@ func TestDDLBinlog2(t *testing.T) { assert.NoError(t, err) w.Close() - //magic number + // magic number magicNum := UnsafeReadInt32(buf, 0) assert.Equal(t, magicNum, MagicNumber) pos := int(unsafe.Sizeof(MagicNumber)) - //descriptor header, timestamp + // descriptor header, timestamp ts := UnsafeReadInt64(buf, pos) assert.Greater(t, ts, int64(0)) curts := time.Now().UnixNano() / int64(time.Millisecond) @@ -829,63 +829,63 @@ func TestDDLBinlog2(t *testing.T) { assert.LessOrEqual(t, diffts, maxdiff) pos += int(unsafe.Sizeof(ts)) - //descriptor header, type code + // descriptor header, type code tc := UnsafeReadInt8(buf, pos) assert.Equal(t, EventTypeCode(tc), DescriptorEventType) pos += int(unsafe.Sizeof(tc)) - //descriptor header, event length + // descriptor header, event length descEventLen := UnsafeReadInt32(buf, pos) pos += int(unsafe.Sizeof(descEventLen)) - //descriptor header, next position + // descriptor header, next position descNxtPos := UnsafeReadInt32(buf, pos) assert.Equal(t, descEventLen+int32(unsafe.Sizeof(MagicNumber)), descNxtPos) pos += int(unsafe.Sizeof(descNxtPos)) - //descriptor data fix, collection id + // descriptor data fix, collection id collID := UnsafeReadInt64(buf, pos) assert.Equal(t, collID, int64(50)) pos += int(unsafe.Sizeof(collID)) - //descriptor data fix, partition id + // descriptor data fix, partition id partID := UnsafeReadInt64(buf, pos) assert.Equal(t, partID, int64(-1)) pos += int(unsafe.Sizeof(partID)) - //descriptor data fix, segment id + // descriptor data fix, segment id segID := UnsafeReadInt64(buf, pos) assert.Equal(t, segID, int64(-1)) pos += int(unsafe.Sizeof(segID)) - //descriptor data fix, field id + // descriptor data fix, field id fieldID := UnsafeReadInt64(buf, pos) assert.Equal(t, fieldID, int64(-1)) pos += int(unsafe.Sizeof(fieldID)) - //descriptor data fix, start time stamp + // descriptor data fix, start time stamp startts := UnsafeReadInt64(buf, pos) assert.Equal(t, startts, int64(1000)) pos += int(unsafe.Sizeof(startts)) - //descriptor data fix, end time stamp + // descriptor data fix, end time stamp endts := UnsafeReadInt64(buf, pos) assert.Equal(t, endts, int64(2000)) pos += int(unsafe.Sizeof(endts)) - //descriptor data fix, payload type + // descriptor data fix, payload type colType := UnsafeReadInt32(buf, pos) assert.Equal(t, schemapb.DataType(colType), schemapb.DataType_Int64) pos += int(unsafe.Sizeof(colType)) - //descriptor data, post header lengths + // descriptor data, post header lengths for i := DescriptorEventType; i < EventTypeEnd; i++ { size := getEventFixPartSize(i) assert.Equal(t, uint8(size), buf[pos]) pos++ } - //descriptor data, extra length + // descriptor data, extra length extraLength := UnsafeReadInt32(buf, pos) assert.Equal(t, extraLength, w.baseBinlogWriter.descriptorEventData.ExtraLength) pos += int(unsafe.Sizeof(extraLength)) @@ -906,40 +906,40 @@ func TestDDLBinlog2(t *testing.T) { assert.True(t, ok) assert.Equal(t, fmt.Sprintf("%v", sizeTotal), fmt.Sprintf("%v", size)) - //start of e1 + // start of e1 assert.Equal(t, pos, int(descNxtPos)) - //insert e1 header, Timestamp + // insert e1 header, Timestamp e1ts := UnsafeReadInt64(buf, pos) diffts = curts - e1ts assert.LessOrEqual(t, diffts, maxdiff) pos += int(unsafe.Sizeof(e1ts)) - //insert e1 header, type code + // insert e1 header, type code e1tc := UnsafeReadInt8(buf, pos) assert.Equal(t, EventTypeCode(e1tc), CreatePartitionEventType) pos += int(unsafe.Sizeof(e1tc)) - //insert e1 header, event length + // insert e1 header, event length e1EventLen := UnsafeReadInt32(buf, pos) pos += int(unsafe.Sizeof(e1EventLen)) - //insert e1 header, next position + // insert e1 header, next position e1NxtPos := UnsafeReadInt32(buf, pos) assert.Equal(t, descNxtPos+e1EventLen, e1NxtPos) pos += int(unsafe.Sizeof(descNxtPos)) - //insert e1 data, start time stamp + // insert e1 data, start time stamp e1st := UnsafeReadInt64(buf, pos) assert.Equal(t, e1st, int64(100)) pos += int(unsafe.Sizeof(e1st)) - //insert e1 data, end time stamp + // insert e1 data, end time stamp e1et := UnsafeReadInt64(buf, pos) assert.Equal(t, e1et, int64(200)) pos += int(unsafe.Sizeof(e1et)) - //insert e1, payload + // insert e1, payload e1Payload := buf[pos:e1NxtPos] e1r, err := NewPayloadReader(schemapb.DataType_Int64, e1Payload) assert.NoError(t, err) @@ -948,40 +948,40 @@ func TestDDLBinlog2(t *testing.T) { assert.Equal(t, e1a, []int64{1, 2, 3, 4, 5, 6}) e1r.Close() - //start of e2 + // start of e2 pos = int(e1NxtPos) - //insert e2 header, Timestamp + // insert e2 header, Timestamp e2ts := UnsafeReadInt64(buf, pos) diffts = curts - e2ts assert.LessOrEqual(t, diffts, maxdiff) pos += int(unsafe.Sizeof(e2ts)) - //insert e2 header, type code + // insert e2 header, type code e2tc := UnsafeReadInt8(buf, pos) assert.Equal(t, EventTypeCode(e2tc), DropPartitionEventType) pos += int(unsafe.Sizeof(e2tc)) - //insert e2 header, event length + // insert e2 header, event length e2EventLen := UnsafeReadInt32(buf, pos) pos += int(unsafe.Sizeof(e2EventLen)) - //insert e2 header, next position + // insert e2 header, next position e2NxtPos := UnsafeReadInt32(buf, pos) assert.Equal(t, e1NxtPos+e2EventLen, e2NxtPos) pos += int(unsafe.Sizeof(descNxtPos)) - //insert e2 data, start time stamp + // insert e2 data, start time stamp e2st := UnsafeReadInt64(buf, pos) assert.Equal(t, e2st, int64(300)) pos += int(unsafe.Sizeof(e2st)) - //insert e2 data, end time stamp + // insert e2 data, end time stamp e2et := UnsafeReadInt64(buf, pos) assert.Equal(t, e2et, int64(400)) pos += int(unsafe.Sizeof(e2et)) - //insert e2, payload + // insert e2, payload e2Payload := buf[pos:] e2r, err := NewPayloadReader(schemapb.DataType_Int64, e2Payload) assert.NoError(t, err) @@ -992,7 +992,7 @@ func TestDDLBinlog2(t *testing.T) { assert.Equal(t, int(e2NxtPos), len(buf)) - //read binlog + // read binlog r, err := NewBinlogReader(buf) assert.NoError(t, err) event1, err := r.NextEventReader() @@ -1060,73 +1060,73 @@ func TestIndexFileBinlog(t *testing.T) { w.Close() - //magic number + // magic number magicNum := UnsafeReadInt32(buf, 0) assert.Equal(t, magicNum, MagicNumber) pos := int(unsafe.Sizeof(MagicNumber)) - //descriptor header, timestamp + // descriptor header, timestamp ts := UnsafeReadInt64(buf, pos) assert.Greater(t, ts, int64(0)) pos += int(unsafe.Sizeof(ts)) - //descriptor header, type code + // descriptor header, type code tc := UnsafeReadInt8(buf, pos) assert.Equal(t, EventTypeCode(tc), DescriptorEventType) pos += int(unsafe.Sizeof(tc)) - //descriptor header, event length + // descriptor header, event length descEventLen := UnsafeReadInt32(buf, pos) pos += int(unsafe.Sizeof(descEventLen)) - //descriptor header, next position + // descriptor header, next position descNxtPos := UnsafeReadInt32(buf, pos) assert.Equal(t, descEventLen+int32(unsafe.Sizeof(MagicNumber)), descNxtPos) pos += int(unsafe.Sizeof(descNxtPos)) - //descriptor data fix, collection id + // descriptor data fix, collection id collID := UnsafeReadInt64(buf, pos) assert.Equal(t, collID, collectionID) pos += int(unsafe.Sizeof(collID)) - //descriptor data fix, partition id + // descriptor data fix, partition id partID := UnsafeReadInt64(buf, pos) assert.Equal(t, partID, partitionID) pos += int(unsafe.Sizeof(partID)) - //descriptor data fix, segment id + // descriptor data fix, segment id segID := UnsafeReadInt64(buf, pos) assert.Equal(t, segID, segmentID) pos += int(unsafe.Sizeof(segID)) - //descriptor data fix, field id + // descriptor data fix, field id fID := UnsafeReadInt64(buf, pos) assert.Equal(t, fieldID, fID) pos += int(unsafe.Sizeof(fID)) - //descriptor data fix, start time stamp + // descriptor data fix, start time stamp startts := UnsafeReadInt64(buf, pos) assert.Equal(t, startts, int64(timestamp)) pos += int(unsafe.Sizeof(startts)) - //descriptor data fix, end time stamp + // descriptor data fix, end time stamp endts := UnsafeReadInt64(buf, pos) assert.Equal(t, endts, int64(timestamp)) pos += int(unsafe.Sizeof(endts)) - //descriptor data fix, payload type + // descriptor data fix, payload type colType := UnsafeReadInt32(buf, pos) assert.Equal(t, schemapb.DataType(colType), schemapb.DataType_Int8) pos += int(unsafe.Sizeof(colType)) - //descriptor data, post header lengths + // descriptor data, post header lengths for i := DescriptorEventType; i < EventTypeEnd; i++ { size := getEventFixPartSize(i) assert.Equal(t, uint8(size), buf[pos]) pos++ } - //descriptor data, extra length + // descriptor data, extra length extraLength := UnsafeReadInt32(buf, pos) assert.Equal(t, extraLength, w.baseBinlogWriter.descriptorEventData.ExtraLength) pos += int(unsafe.Sizeof(extraLength)) @@ -1189,73 +1189,73 @@ func TestIndexFileBinlogV2(t *testing.T) { w.Close() - //magic number + // magic number magicNum := UnsafeReadInt32(buf, 0) assert.Equal(t, magicNum, MagicNumber) pos := int(unsafe.Sizeof(MagicNumber)) - //descriptor header, timestamp + // descriptor header, timestamp ts := UnsafeReadInt64(buf, pos) assert.Greater(t, ts, int64(0)) pos += int(unsafe.Sizeof(ts)) - //descriptor header, type code + // descriptor header, type code tc := UnsafeReadInt8(buf, pos) assert.Equal(t, EventTypeCode(tc), DescriptorEventType) pos += int(unsafe.Sizeof(tc)) - //descriptor header, event length + // descriptor header, event length descEventLen := UnsafeReadInt32(buf, pos) pos += int(unsafe.Sizeof(descEventLen)) - //descriptor header, next position + // descriptor header, next position descNxtPos := UnsafeReadInt32(buf, pos) assert.Equal(t, descEventLen+int32(unsafe.Sizeof(MagicNumber)), descNxtPos) pos += int(unsafe.Sizeof(descNxtPos)) - //descriptor data fix, collection id + // descriptor data fix, collection id collID := UnsafeReadInt64(buf, pos) assert.Equal(t, collID, collectionID) pos += int(unsafe.Sizeof(collID)) - //descriptor data fix, partition id + // descriptor data fix, partition id partID := UnsafeReadInt64(buf, pos) assert.Equal(t, partID, partitionID) pos += int(unsafe.Sizeof(partID)) - //descriptor data fix, segment id + // descriptor data fix, segment id segID := UnsafeReadInt64(buf, pos) assert.Equal(t, segID, segmentID) pos += int(unsafe.Sizeof(segID)) - //descriptor data fix, field id + // descriptor data fix, field id fID := UnsafeReadInt64(buf, pos) assert.Equal(t, fieldID, fID) pos += int(unsafe.Sizeof(fID)) - //descriptor data fix, start time stamp + // descriptor data fix, start time stamp startts := UnsafeReadInt64(buf, pos) assert.Equal(t, startts, int64(timestamp)) pos += int(unsafe.Sizeof(startts)) - //descriptor data fix, end time stamp + // descriptor data fix, end time stamp endts := UnsafeReadInt64(buf, pos) assert.Equal(t, endts, int64(timestamp)) pos += int(unsafe.Sizeof(endts)) - //descriptor data fix, payload type + // descriptor data fix, payload type colType := UnsafeReadInt32(buf, pos) assert.Equal(t, schemapb.DataType(colType), schemapb.DataType_String) pos += int(unsafe.Sizeof(colType)) - //descriptor data, post header lengths + // descriptor data, post header lengths for i := DescriptorEventType; i < EventTypeEnd; i++ { size := getEventFixPartSize(i) assert.Equal(t, uint8(size), buf[pos]) pos++ } - //descriptor data, extra length + // descriptor data, extra length extraLength := UnsafeReadInt32(buf, pos) assert.Equal(t, extraLength, w.baseBinlogWriter.descriptorEventData.ExtraLength) pos += int(unsafe.Sizeof(extraLength)) @@ -1482,6 +1482,7 @@ func (e *testEvent) GetMemoryUsageInBytes() (int32, error) { } return 0, nil } + func (e *testEvent) GetPayloadLengthFromWriter() (int, error) { if e.getPayloadLengthError { return -1, fmt.Errorf("getPayloadLength error") @@ -1493,7 +1494,6 @@ func (e *testEvent) ReleasePayloadWriter() { } func (e *testEvent) SetOffset(offset int32) { - } var _ EventWriter = (*testEvent)(nil) diff --git a/internal/storage/binlog_util_test.go b/internal/storage/binlog_util_test.go index f1c4c9e2ed..de3c1de5d3 100644 --- a/internal/storage/binlog_util_test.go +++ b/internal/storage/binlog_util_test.go @@ -7,7 +7,6 @@ import ( ) func TestParseSegmentIDByBinlog(t *testing.T) { - type testCase struct { name string input string diff --git a/internal/storage/binlog_writer.go b/internal/storage/binlog_writer.go index dffed7cc7f..0a38c68854 100644 --- a/internal/storage/binlog_writer.go +++ b/internal/storage/binlog_writer.go @@ -41,6 +41,7 @@ const ( // StatsBinlog BinlogType for stats data StatsBinlog ) + const ( // MagicNumber used in binlog MagicNumber int32 = 0xfffabc diff --git a/internal/storage/binlog_writer_test.go b/internal/storage/binlog_writer_test.go index 69c473357b..8bc80f6658 100644 --- a/internal/storage/binlog_writer_test.go +++ b/internal/storage/binlog_writer_test.go @@ -20,9 +20,9 @@ import ( "fmt" "testing" - "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" - "github.com/stretchr/testify/assert" + + "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" ) func TestBinlogWriterReader(t *testing.T) { diff --git a/internal/storage/data_codec.go b/internal/storage/data_codec.go index 832b861b5a..32b2c3b5e7 100644 --- a/internal/storage/data_codec.go +++ b/internal/storage/data_codec.go @@ -182,9 +182,11 @@ func (data *JSONFieldData) GetRow(i int) any { return data.Data[i] } func (data *BinaryVectorFieldData) GetRow(i int) any { return data.Data[i*data.Dim/8 : (i+1)*data.Dim/8] } + func (data *FloatVectorFieldData) GetRow(i int) any { return data.Data[i*data.Dim : (i+1)*data.Dim] } + func (data *Float16VectorFieldData) GetRow(i int) any { return data.Data[i*data.Dim*2 : (i+1)*data.Dim*2] } @@ -331,7 +333,7 @@ func (insertCodec *InsertCodec) SerializePkStats(stats *PrimaryKeyStats, rowNum return nil, fmt.Errorf("sericalize empty pk stats") } - //Serialize by pk stats + // Serialize by pk stats blobKey := fmt.Sprintf("%d", stats.FieldID) statsWriter := &StatsWriter{} err := statsWriter.Generate(stats) @@ -1055,8 +1057,7 @@ func (data *DeleteData) Append(pk PrimaryKey, ts Timestamp) { } // DeleteCodec serializes and deserializes the delete data -type DeleteCodec struct { -} +type DeleteCodec struct{} // NewDeleteCodec returns a DeleteCodec func NewDeleteCodec() *DeleteCodec { @@ -1187,7 +1188,6 @@ func (deleteCodec *DeleteCodec) Deserialize(blobs []*Blob) (partitionID UniqueID } eventReader.Close() binlogReader.Close() - } result.RowCount = int64(len(result.Pks)) @@ -1377,7 +1377,6 @@ func (dataDefinitionCodec *DataDefinitionCodec) Deserialize(blobs []*Blob) (ts [ eventReader.Close() } binlogReader.Close() - } return resultTs, requestsStrings, nil diff --git a/internal/storage/data_codec_test.go b/internal/storage/data_codec_test.go index 04eefbd442..00aa943535 100644 --- a/internal/storage/data_codec_test.go +++ b/internal/storage/data_codec_test.go @@ -21,12 +21,12 @@ import ( "fmt" "testing" + "github.com/stretchr/testify/assert" "go.uber.org/zap" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/internal/proto/etcdpb" "github.com/milvus-io/milvus/pkg/log" - "github.com/stretchr/testify/assert" ) const ( @@ -683,5 +683,4 @@ func TestMemorySize(t *testing.T) { assert.Equal(t, insertDataEmpty.Data[StringField].GetMemorySize(), 0) assert.Equal(t, insertDataEmpty.Data[BinaryVectorField].GetMemorySize(), 4) assert.Equal(t, insertDataEmpty.Data[FloatVectorField].GetMemorySize(), 4) - } diff --git a/internal/storage/data_sorter_test.go b/internal/storage/data_sorter_test.go index a0ca71b376..8a9ed44b85 100644 --- a/internal/storage/data_sorter_test.go +++ b/internal/storage/data_sorter_test.go @@ -20,9 +20,10 @@ import ( "sort" "testing" + "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/internal/proto/etcdpb" - "github.com/stretchr/testify/assert" ) func TestDataSorter(t *testing.T) { diff --git a/internal/storage/event_data.go b/internal/storage/event_data.go index a4a28d0b8a..2b0c9baa6f 100644 --- a/internal/storage/event_data.go +++ b/internal/storage/event_data.go @@ -65,7 +65,6 @@ func (data *descriptorEventData) GetEventDataFixPartSize() int32 { // GetMemoryUsageInBytes returns the memory size of DescriptorEventDataFixPart. func (data *descriptorEventData) GetMemoryUsageInBytes() int32 { return data.GetEventDataFixPartSize() + int32(binary.Size(data.PostHeaderLengths)) + int32(binary.Size(data.ExtraLength)) + data.ExtraLength - } // AddExtra add extra params to description event. @@ -368,36 +367,42 @@ func newInsertEventData() *insertEventData { EndTimestamp: 0, } } + func newDeleteEventData() *deleteEventData { return &deleteEventData{ StartTimestamp: 0, EndTimestamp: 0, } } + func newCreateCollectionEventData() *createCollectionEventData { return &createCollectionEventData{ StartTimestamp: 0, EndTimestamp: 0, } } + func newDropCollectionEventData() *dropCollectionEventData { return &dropCollectionEventData{ StartTimestamp: 0, EndTimestamp: 0, } } + func newCreatePartitionEventData() *createPartitionEventData { return &createPartitionEventData{ StartTimestamp: 0, EndTimestamp: 0, } } + func newDropPartitionEventData() *dropPartitionEventData { return &dropPartitionEventData{ StartTimestamp: 0, EndTimestamp: 0, } } + func newIndexFileEventData() *indexFileEventData { return &indexFileEventData{ StartTimestamp: 0, diff --git a/internal/storage/event_test.go b/internal/storage/event_test.go index 848188a6e6..e432e3a829 100644 --- a/internal/storage/event_test.go +++ b/internal/storage/event_test.go @@ -24,12 +24,13 @@ import ( "time" "unsafe" + "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/pkg/common" "github.com/milvus-io/milvus/pkg/util/funcutil" "github.com/milvus-io/milvus/pkg/util/tsoutil" "github.com/milvus-io/milvus/pkg/util/typeutil" - "github.com/stretchr/testify/assert" ) /* #nosec G103 */ @@ -832,7 +833,6 @@ func TestDropPartitionEvent(t *testing.T) { r.Close() }) - } /* #nosec G103 */ @@ -1081,7 +1081,6 @@ func TestEventReaderError(t *testing.T) { r, err = newEventReader(schemapb.DataType_Int64, buf) assert.Nil(t, r) assert.Error(t, err) - } func TestEventClose(t *testing.T) { diff --git a/internal/storage/event_writer_test.go b/internal/storage/event_writer_test.go index 2c1d2a3d72..a6b6456159 100644 --- a/internal/storage/event_writer_test.go +++ b/internal/storage/event_writer_test.go @@ -21,9 +21,10 @@ import ( "encoding/binary" "testing" + "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/pkg/common" - "github.com/stretchr/testify/assert" ) func TestEventTypeCode_String(t *testing.T) { diff --git a/internal/storage/gcp/gcp_test.go b/internal/storage/gcp/gcp_test.go index 9695316a3d..990166f0be 100644 --- a/internal/storage/gcp/gcp_test.go +++ b/internal/storage/gcp/gcp_test.go @@ -5,7 +5,6 @@ import ( "testing" "github.com/cockroachdb/errors" - "github.com/minio/minio-go/v7" "github.com/minio/minio-go/v7/pkg/credentials" "github.com/stretchr/testify/assert" @@ -100,5 +99,4 @@ func TestGCPWrappedHTTPTransport_RoundTrip(t *testing.T) { _, err = ts.RoundTrip(req) assert.Error(t, err) }) - } diff --git a/internal/storage/index_data_codec.go b/internal/storage/index_data_codec.go index 55343eb2b9..0e928c8223 100644 --- a/internal/storage/index_data_codec.go +++ b/internal/storage/index_data_codec.go @@ -23,7 +23,6 @@ import ( "time" "github.com/cockroachdb/errors" - "go.uber.org/zap" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" @@ -31,8 +30,7 @@ import ( "github.com/milvus-io/milvus/pkg/util/typeutil" ) -type IndexFileBinlogCodec struct { -} +type IndexFileBinlogCodec struct{} // NewIndexFileBinlogCodec is constructor for IndexFileBinlogCodec func NewIndexFileBinlogCodec() *IndexFileBinlogCodec { @@ -85,7 +83,7 @@ func (codec *IndexFileBinlogCodec) serializeImpl( return &Blob{ Key: key, - //Key: strconv.Itoa(len(datas)), + // Key: strconv.Itoa(len(datas)), Value: buffer, }, nil } @@ -100,7 +98,8 @@ func (codec *IndexFileBinlogCodec) SerializeIndexParams( fieldID UniqueID, indexParams map[string]string, indexName string, - indexID UniqueID) (*Blob, error) { + indexID UniqueID, +) (*Blob, error) { ts := Timestamp(time.Now().UnixNano()) // save index params. @@ -126,7 +125,6 @@ func (codec *IndexFileBinlogCodec) Serialize( indexID UniqueID, datas []*Blob, ) ([]*Blob, error) { - var err error var blobs []*Blob @@ -268,7 +266,6 @@ func (codec *IndexFileBinlogCodec) DeserializeImpl(blobs []*Blob) ( eventReader.Close() } binlogReader.Close() - } return indexBuildID, version, collectionID, partitionID, segmentID, fieldID, indexParams, indexName, indexID, datas, nil @@ -286,8 +283,7 @@ func (codec *IndexFileBinlogCodec) Deserialize(blobs []*Blob) ( } // IndexCodec can serialize and deserialize index -type IndexCodec struct { -} +type IndexCodec struct{} // NewIndexCodec creates IndexCodec func NewIndexCodec() *IndexCodec { diff --git a/internal/storage/minio_chunk_manager.go b/internal/storage/minio_chunk_manager.go index be983f05d5..d5c660d968 100644 --- a/internal/storage/minio_chunk_manager.go +++ b/internal/storage/minio_chunk_manager.go @@ -26,6 +26,12 @@ import ( "time" "github.com/cockroachdb/errors" + minio "github.com/minio/minio-go/v7" + "github.com/minio/minio-go/v7/pkg/credentials" + "go.uber.org/zap" + "golang.org/x/exp/mmap" + "golang.org/x/sync/errgroup" + "github.com/milvus-io/milvus/internal/storage/aliyun" "github.com/milvus-io/milvus/internal/storage/gcp" "github.com/milvus-io/milvus/pkg/log" @@ -33,18 +39,11 @@ import ( "github.com/milvus-io/milvus/pkg/util/merr" "github.com/milvus-io/milvus/pkg/util/retry" "github.com/milvus-io/milvus/pkg/util/timerecord" - minio "github.com/minio/minio-go/v7" - "github.com/minio/minio-go/v7/pkg/credentials" - "go.uber.org/zap" - "golang.org/x/exp/mmap" - "golang.org/x/sync/errgroup" ) const NoSuchKey = "NoSuchKey" -var ( - ErrNoSuchKey = errors.New(NoSuchKey) -) +var ErrNoSuchKey = errors.New(NoSuchKey) func WrapErrNoSuchKey(key string) error { return fmt.Errorf("%w(key=%s)", ErrNoSuchKey, key) @@ -80,8 +79,8 @@ func NewMinioChunkManager(ctx context.Context, opts ...Option) (*MinioChunkManag func newMinioChunkManagerWithConfig(ctx context.Context, c *config) (*MinioChunkManager, error) { var creds *credentials.Credentials - var newMinioFn = minio.New - var bucketLookupType = minio.BucketLookupAuto + newMinioFn := minio.New + bucketLookupType := minio.BucketLookupAuto if c.useVirtualHost { bucketLookupType = minio.BucketLookupDNS @@ -208,7 +207,6 @@ func (mcm *MinioChunkManager) Size(ctx context.Context, filePath string) (int64, // Write writes the data to minio storage. func (mcm *MinioChunkManager) Write(ctx context.Context, filePath string, content []byte) error { _, err := mcm.putMinioObject(ctx, mcm.bucketName, filePath, bytes.NewReader(content), int64(len(content)), minio.PutObjectOptions{}) - if err != nil { log.Warn("failed to put object", zap.String("bucket", mcm.bucketName), zap.String("path", filePath), zap.Error(err)) return err @@ -414,7 +412,6 @@ func (mcm *MinioChunkManager) RemoveWithPrefix(ctx context.Context, prefix strin // calling `ListWithPrefix` with `prefix` = a && `recursive` = false will only returns [a, ab] // If caller needs all objects without level limitation, `recursive` shall be true. func (mcm *MinioChunkManager) ListWithPrefix(ctx context.Context, prefix string, recursive bool) ([]string, []time.Time, error) { - // cannot use ListObjects(ctx, bucketName, Opt{Prefix:prefix, Recursive:true}) // if minio has lots of objects under the provided path // recursive = true may timeout during the recursive browsing the objects. @@ -475,7 +472,8 @@ func Read(r io.Reader, size int64) ([]byte, error) { } func (mcm *MinioChunkManager) getMinioObject(ctx context.Context, bucketName, objectName string, - opts minio.GetObjectOptions) (*minio.Object, error) { + opts minio.GetObjectOptions, +) (*minio.Object, error) { start := timerecord.NewTimeRecorder("getMinioObject") reader, err := mcm.Client.GetObject(ctx, bucketName, objectName, opts) @@ -491,7 +489,8 @@ func (mcm *MinioChunkManager) getMinioObject(ctx context.Context, bucketName, ob } func (mcm *MinioChunkManager) putMinioObject(ctx context.Context, bucketName, objectName string, reader io.Reader, objectSize int64, - opts minio.PutObjectOptions) (minio.UploadInfo, error) { + opts minio.PutObjectOptions, +) (minio.UploadInfo, error) { start := timerecord.NewTimeRecorder("putMinioObject") info, err := mcm.Client.PutObject(ctx, bucketName, objectName, reader, objectSize, opts) @@ -507,7 +506,8 @@ func (mcm *MinioChunkManager) putMinioObject(ctx context.Context, bucketName, ob } func (mcm *MinioChunkManager) statMinioObject(ctx context.Context, bucketName, objectName string, - opts minio.StatObjectOptions) (minio.ObjectInfo, error) { + opts minio.StatObjectOptions, +) (minio.ObjectInfo, error) { start := timerecord.NewTimeRecorder("statMinioObject") info, err := mcm.Client.StatObject(ctx, bucketName, objectName, opts) @@ -523,7 +523,8 @@ func (mcm *MinioChunkManager) statMinioObject(ctx context.Context, bucketName, o } func (mcm *MinioChunkManager) listMinioObjects(ctx context.Context, bucketName string, - opts minio.ListObjectsOptions) <-chan minio.ObjectInfo { + opts minio.ListObjectsOptions, +) <-chan minio.ObjectInfo { start := timerecord.NewTimeRecorder("listMinioObjects") res := mcm.Client.ListObjects(ctx, bucketName, opts) @@ -535,7 +536,8 @@ func (mcm *MinioChunkManager) listMinioObjects(ctx context.Context, bucketName s } func (mcm *MinioChunkManager) removeMinioObject(ctx context.Context, bucketName, objectName string, - opts minio.RemoveObjectOptions) error { + opts minio.RemoveObjectOptions, +) error { start := timerecord.NewTimeRecorder("removeMinioObject") err := mcm.Client.RemoveObject(ctx, bucketName, objectName, opts) diff --git a/internal/storage/minio_chunk_manager_test.go b/internal/storage/minio_chunk_manager_test.go index 54feec3e1f..5aa4eaa533 100644 --- a/internal/storage/minio_chunk_manager_test.go +++ b/internal/storage/minio_chunk_manager_test.go @@ -76,7 +76,6 @@ func TestMinIOCMFail(t *testing.T) { ) assert.Error(t, err) assert.Nil(t, client) - } func TestMinIOCM(t *testing.T) { @@ -440,7 +439,6 @@ func TestMinIOCM(t *testing.T) { r, err := testCM.Mmap(ctx, key) assert.Error(t, err) assert.Nil(t, r) - }) t.Run("test Prefix", func(t *testing.T) { diff --git a/internal/storage/minio_object_storage.go b/internal/storage/minio_object_storage.go index a66e3b32ca..76a14a492f 100644 --- a/internal/storage/minio_object_storage.go +++ b/internal/storage/minio_object_storage.go @@ -22,14 +22,14 @@ import ( "io" "time" + minio "github.com/minio/minio-go/v7" + "github.com/minio/minio-go/v7/pkg/credentials" + "go.uber.org/zap" + "github.com/milvus-io/milvus/internal/storage/aliyun" "github.com/milvus-io/milvus/internal/storage/gcp" - "github.com/milvus-io/milvus/pkg/util/retry" - "github.com/minio/minio-go/v7/pkg/credentials" - "github.com/milvus-io/milvus/pkg/log" - minio "github.com/minio/minio-go/v7" - "go.uber.org/zap" + "github.com/milvus-io/milvus/pkg/util/retry" ) type MinioObjectStorage struct { @@ -38,8 +38,8 @@ type MinioObjectStorage struct { func newMinioObjectStorageWithConfig(ctx context.Context, c *config) (*MinioObjectStorage, error) { var creds *credentials.Credentials - var newMinioFn = minio.New - var bucketLookupType = minio.BucketLookupAuto + newMinioFn := minio.New + bucketLookupType := minio.BucketLookupAuto switch c.cloudProvider { case CloudProviderAliyun: diff --git a/internal/storage/minio_object_storage_test.go b/internal/storage/minio_object_storage_test.go index b3ea6bb68d..5e7968ddc3 100644 --- a/internal/storage/minio_object_storage_test.go +++ b/internal/storage/minio_object_storage_test.go @@ -23,7 +23,6 @@ import ( "testing" "github.com/minio/minio-go/v7" - "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -54,7 +53,6 @@ func TestMinioObjectStorage(t *testing.T) { }) t.Run("test load", func(t *testing.T) { - testCM, err := newMinioObjectStorageWithConfig(ctx, &config) assert.Equal(t, err, nil) defer testCM.RemoveBucket(ctx, config.bucketName) @@ -142,7 +140,6 @@ func TestMinioObjectStorage(t *testing.T) { } }) } - }) t.Run("test useIAM", func(t *testing.T) { diff --git a/internal/storage/options.go b/internal/storage/options.go index 83fe9f8848..a6ab260651 100644 --- a/internal/storage/options.go +++ b/internal/storage/options.go @@ -40,6 +40,7 @@ func AccessKeyID(accessKeyID string) Option { c.accessKeyID = accessKeyID } } + func SecretAccessKeyID(secretAccessKeyID string) Option { return func(c *config) { c.secretAccessKeyID = secretAccessKeyID diff --git a/internal/storage/payload_reader.go b/internal/storage/payload_reader.go index 64effc8ebe..019cabb767 100644 --- a/internal/storage/payload_reader.go +++ b/internal/storage/payload_reader.go @@ -4,12 +4,11 @@ import ( "bytes" "fmt" - "github.com/cockroachdb/errors" - "github.com/golang/protobuf/proto" - "github.com/apache/arrow/go/v8/arrow" "github.com/apache/arrow/go/v8/parquet" "github.com/apache/arrow/go/v8/parquet/file" + "github.com/cockroachdb/errors" + "github.com/golang/protobuf/proto" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" ) diff --git a/internal/storage/payload_reader_test.go b/internal/storage/payload_reader_test.go index 44a86fcbf0..4248da71c1 100644 --- a/internal/storage/payload_reader_test.go +++ b/internal/storage/payload_reader_test.go @@ -5,8 +5,9 @@ import ( "testing" "github.com/apache/arrow/go/v8/parquet/file" - "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/stretchr/testify/suite" + + "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" ) type ReadDataFromAllRowGroupsSuite struct { diff --git a/internal/storage/payload_test.go b/internal/storage/payload_test.go index f3acb569ae..a9fe6177c6 100644 --- a/internal/storage/payload_test.go +++ b/internal/storage/payload_test.go @@ -26,7 +26,6 @@ import ( ) func TestPayload_ReaderAndWriter(t *testing.T) { - t.Run("TestBool", func(t *testing.T) { w, err := NewPayloadWriter(schemapb.DataType_Bool) require.Nil(t, err) @@ -60,7 +59,6 @@ func TestPayload_ReaderAndWriter(t *testing.T) { assert.NoError(t, err) assert.ElementsMatch(t, []bool{false, false, false, false, false, false, false, false}, bools) defer r.ReleasePayloadReader() - }) t.Run("TestInt8", func(t *testing.T) { diff --git a/internal/storage/payload_writer.go b/internal/storage/payload_writer.go index 2267f5519e..91f7c08b7f 100644 --- a/internal/storage/payload_writer.go +++ b/internal/storage/payload_writer.go @@ -30,6 +30,7 @@ import ( "github.com/apache/arrow/go/v8/parquet/pqarrow" "github.com/cockroachdb/errors" "github.com/golang/protobuf/proto" + "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/pkg/common" "github.com/milvus-io/milvus/pkg/util/typeutil" diff --git a/internal/storage/pk_statistics.go b/internal/storage/pk_statistics.go index 0f62ed90a8..2278ee22b7 100644 --- a/internal/storage/pk_statistics.go +++ b/internal/storage/pk_statistics.go @@ -19,9 +19,9 @@ package storage import ( "fmt" + "github.com/bits-and-blooms/bloom/v3" "github.com/cockroachdb/errors" - "github.com/bits-and-blooms/bloom/v3" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/pkg/common" ) @@ -102,7 +102,7 @@ func (st *PkStatistics) PkExist(pk PrimaryKey) bool { varCharPk := pk.(*VarCharPrimaryKey) return st.PkFilter.TestString(varCharPk.Value) default: - //TODO:: + // TODO:: } // no idea, just make it as false positive return true diff --git a/internal/storage/primary_key.go b/internal/storage/primary_key.go index afeb4d1f89..80f33bad89 100644 --- a/internal/storage/primary_key.go +++ b/internal/storage/primary_key.go @@ -372,7 +372,7 @@ func ParseIDs2PrimaryKeys(ids *schemapb.IDs) []PrimaryKey { ret = append(ret, pk) } default: - //TODO:: + // TODO:: } return ret @@ -405,7 +405,7 @@ func ParsePrimaryKeys2IDs(pks []PrimaryKey) *schemapb.IDs { }, } default: - //TODO:: + // TODO:: } return ret diff --git a/internal/storage/print_binlog.go b/internal/storage/print_binlog.go index 36f3f85b63..da3eafc968 100644 --- a/internal/storage/print_binlog.go +++ b/internal/storage/print_binlog.go @@ -23,11 +23,11 @@ import ( "github.com/cockroachdb/errors" "github.com/golang/protobuf/proto" - "github.com/milvus-io/milvus/pkg/util/tsoutil" "golang.org/x/exp/mmap" "github.com/milvus-io/milvus-proto/go-api/v2/msgpb" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" + "github.com/milvus-io/milvus/pkg/util/tsoutil" ) // PrintBinlogFiles call printBinlogFile in turn for the file list specified by parameter fileList. @@ -43,7 +43,7 @@ func PrintBinlogFiles(fileList []string) error { // nolint func printBinlogFile(filename string) error { - fd, err := os.OpenFile(filename, os.O_RDONLY, 0400) + fd, err := os.OpenFile(filename, os.O_RDONLY, 0o400) if err != nil { return err } diff --git a/internal/storage/print_binlog_test.go b/internal/storage/print_binlog_test.go index d3d84648b2..14effbd48f 100644 --- a/internal/storage/print_binlog_test.go +++ b/internal/storage/print_binlog_test.go @@ -24,15 +24,15 @@ import ( "time" "github.com/golang/protobuf/proto" - "github.com/milvus-io/milvus/pkg/common" - "github.com/milvus-io/milvus/pkg/util/tsoutil" "github.com/stretchr/testify/assert" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/msgpb" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/internal/proto/etcdpb" + "github.com/milvus-io/milvus/pkg/common" "github.com/milvus-io/milvus/pkg/util/funcutil" + "github.com/milvus-io/milvus/pkg/util/tsoutil" "github.com/milvus-io/milvus/pkg/util/uniquegenerator" ) @@ -81,7 +81,6 @@ func TestPrintBinlogFilesInt64(t *testing.T) { assert.Equal(t, num, len(buf)) err = fd.Close() assert.NoError(t, err) - } func TestPrintBinlogFiles(t *testing.T) { diff --git a/internal/storage/remote_chunk_manager.go b/internal/storage/remote_chunk_manager.go index c2ab2b5576..6ba546a57e 100644 --- a/internal/storage/remote_chunk_manager.go +++ b/internal/storage/remote_chunk_manager.go @@ -26,16 +26,16 @@ import ( "github.com/Azure/azure-sdk-for-go/sdk/azcore" "github.com/Azure/azure-sdk-for-go/sdk/storage/azblob/bloberror" - "github.com/cockroachdb/errors" - "github.com/milvus-io/milvus/pkg/log" - "github.com/milvus-io/milvus/pkg/metrics" - "github.com/milvus-io/milvus/pkg/util/merr" - "github.com/milvus-io/milvus/pkg/util/timerecord" minio "github.com/minio/minio-go/v7" "go.uber.org/zap" "golang.org/x/exp/mmap" "golang.org/x/sync/errgroup" + + "github.com/milvus-io/milvus/pkg/log" + "github.com/milvus-io/milvus/pkg/metrics" + "github.com/milvus-io/milvus/pkg/util/merr" + "github.com/milvus-io/milvus/pkg/util/timerecord" ) const ( @@ -125,7 +125,6 @@ func (mcm *RemoteChunkManager) Size(ctx context.Context, filePath string) (int64 // Write writes the data to minio storage. func (mcm *RemoteChunkManager) Write(ctx context.Context, filePath string, content []byte) error { err := mcm.putObject(ctx, mcm.bucketName, filePath, bytes.NewReader(content), int64(len(content))) - if err != nil { log.Warn("failed to put object", zap.String("bucket", mcm.bucketName), zap.String("path", filePath), zap.Error(err)) return err @@ -317,7 +316,6 @@ func (mcm *RemoteChunkManager) RemoveWithPrefix(ctx context.Context, prefix stri // calling `ListWithPrefix` with `prefix` = a && `recursive` = false will only returns [a, ab] // If caller needs all objects without level limitation, `recursive` shall be true. func (mcm *RemoteChunkManager) ListWithPrefix(ctx context.Context, prefix string, recursive bool) ([]string, []time.Time, error) { - // cannot use ListObjects(ctx, bucketName, Opt{Prefix:prefix, Recursive:true}) // if minio has lots of objects under the provided path // recursive = true may timeout during the recursive browsing the objects. @@ -336,13 +334,11 @@ func (mcm *RemoteChunkManager) ListWithPrefix(ctx context.Context, prefix string // TODO add concurrent call if performance matters // only return current level per call objects, err := mcm.listObjects(ctx, mcm.bucketName, pre, false) - if err != nil { return nil, nil, err } for object, lastModified := range objects { - // with tailing "/", object is a "directory" if strings.HasSuffix(object, "/") && recursive { // enqueue when recursive is true @@ -360,7 +356,8 @@ func (mcm *RemoteChunkManager) ListWithPrefix(ctx context.Context, prefix string } func (mcm *RemoteChunkManager) getObject(ctx context.Context, bucketName, objectName string, - offset int64, size int64) (FileReader, error) { + offset int64, size int64, +) (FileReader, error) { start := timerecord.NewTimeRecorder("getObject") reader, err := mcm.client.GetObject(ctx, bucketName, objectName, offset, size) diff --git a/internal/storage/remote_chunk_manager_test.go b/internal/storage/remote_chunk_manager_test.go index be8ecbf7d5..527fe4225b 100644 --- a/internal/storage/remote_chunk_manager_test.go +++ b/internal/storage/remote_chunk_manager_test.go @@ -427,7 +427,6 @@ func TestMinioChunkManager(t *testing.T) { r, err := testCM.Mmap(ctx, key) assert.Error(t, err) assert.Nil(t, r) - }) t.Run("test Prefix", func(t *testing.T) { @@ -882,7 +881,6 @@ func TestAzureChunkManager(t *testing.T) { r, err := testCM.Mmap(ctx, key) assert.Error(t, err) assert.Nil(t, r) - }) t.Run("test Prefix", func(t *testing.T) { diff --git a/internal/storage/stats.go b/internal/storage/stats.go index c4d24eb998..19522a0423 100644 --- a/internal/storage/stats.go +++ b/internal/storage/stats.go @@ -21,6 +21,7 @@ import ( "fmt" "github.com/bits-and-blooms/bloom/v3" + "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/pkg/common" "github.com/milvus-io/milvus/pkg/log" @@ -37,7 +38,7 @@ const ( type PrimaryKeyStats struct { FieldID int64 `json:"fieldID"` Max int64 `json:"max"` // useless, will delete - Min int64 `json:"min"` //useless, will delete + Min int64 `json:"min"` // useless, will delete BF *bloom.BloomFilter `json:"bf"` PkType int64 `json:"pkType"` MaxPk PrimaryKey `json:"maxPk"` @@ -154,7 +155,7 @@ func (stats *PrimaryKeyStats) UpdateByMsgs(msgs FieldData) { stats.BF.AddString(str) } default: - //TODO:: + // TODO:: } } @@ -172,7 +173,6 @@ func (stats *PrimaryKeyStats) Update(pk PrimaryKey) { default: log.Warn("Update pk stats with invalid data type") } - } // updatePk update minPk and maxPk value diff --git a/internal/storage/utils.go b/internal/storage/utils.go index 0e4d772d34..cc247aa88d 100644 --- a/internal/storage/utils.go +++ b/internal/storage/utils.go @@ -25,7 +25,6 @@ import ( "strconv" "github.com/cockroachdb/errors" - "github.com/golang/protobuf/proto" "go.uber.org/zap" @@ -207,7 +206,7 @@ func ReadBinary(reader io.Reader, receiver interface{}, dataType schemapb.DataTy func readFloatVectors(blobReaders []io.Reader, dim int) []float32 { ret := make([]float32, 0) for _, r := range blobReaders { - var v = make([]float32, dim) + v := make([]float32, dim) ReadBinary(r, &v, schemapb.DataType_FloatVector) ret = append(ret, v...) } @@ -217,7 +216,7 @@ func readFloatVectors(blobReaders []io.Reader, dim int) []float32 { func readBinaryVectors(blobReaders []io.Reader, dim int) []byte { ret := make([]byte, 0) for _, r := range blobReaders { - var v = make([]byte, dim/8) + v := make([]byte, dim/8) ReadBinary(r, &v, schemapb.DataType_BinaryVector) ret = append(ret, v...) } @@ -227,7 +226,7 @@ func readBinaryVectors(blobReaders []io.Reader, dim int) []byte { func readFloat16Vectors(blobReaders []io.Reader, dim int) []byte { ret := make([]byte, 0) for _, r := range blobReaders { - var v = make([]byte, dim*2) + v := make([]byte, dim*2) ReadBinary(r, &v, schemapb.DataType_Float16Vector) ret = append(ret, v...) } @@ -831,7 +830,7 @@ func GetPkFromInsertData(collSchema *schemapb.CollectionSchema, data *InsertData case schemapb.DataType_VarChar: realPfData, ok = pfData.(*StringFieldData) default: - //TODO + // TODO } if !ok { log.Warn("primary field not in Int64 or VarChar format", zap.Int64("fieldID", pf.FieldID)) diff --git a/internal/storage/vector_chunk_manager.go b/internal/storage/vector_chunk_manager.go index b1dd545c2c..738bf8cde1 100644 --- a/internal/storage/vector_chunk_manager.go +++ b/internal/storage/vector_chunk_manager.go @@ -24,7 +24,6 @@ import ( "time" "github.com/cockroachdb/errors" - "go.uber.org/zap" "golang.org/x/exp/mmap" @@ -33,9 +32,7 @@ import ( "github.com/milvus-io/milvus/pkg/util/cache" ) -var ( - defaultLocalCacheSize = 64 -) +var defaultLocalCacheSize = 64 // VectorChunkManager is responsible for read and write vector data. type VectorChunkManager struct { @@ -291,6 +288,7 @@ func (vcm *VectorChunkManager) ReadAt(ctx context.Context, filePath string, off } return p, nil } + func (vcm *VectorChunkManager) Remove(ctx context.Context, filePath string) error { err := vcm.vectorStorage.Remove(ctx, filePath) if err != nil { diff --git a/internal/storage/vector_chunk_manager_test.go b/internal/storage/vector_chunk_manager_test.go index 0c1c026610..89d5eb55e2 100644 --- a/internal/storage/vector_chunk_manager_test.go +++ b/internal/storage/vector_chunk_manager_test.go @@ -23,7 +23,6 @@ import ( "testing" "github.com/cockroachdb/errors" - "github.com/stretchr/testify/assert" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" @@ -132,8 +131,10 @@ func buildVectorChunkManager(ctx context.Context, localPath string, localCacheEn return vcm, nil } -var Params = paramtable.Get() -var localPath = "/tmp/milvus_test/chunkmanager/" +var ( + Params = paramtable.Get() + localPath = "/tmp/milvus_test/chunkmanager/" +) func TestMain(m *testing.M) { paramtable.Init() @@ -322,6 +323,7 @@ func (m *mockFailedChunkManager) RemoveWithPrefix(ctx context.Context, prefix st } return nil } + func (m *mockFailedChunkManager) MultiRemove(ctx context.Context, key []string) error { if m.fail { return errors.New("multi remove error") diff --git a/internal/tso/global_allocator.go b/internal/tso/global_allocator.go index 3b6424866a..7d737387a5 100644 --- a/internal/tso/global_allocator.go +++ b/internal/tso/global_allocator.go @@ -34,11 +34,11 @@ import ( "time" "github.com/cockroachdb/errors" - "github.com/milvus-io/milvus/pkg/log" - "github.com/milvus-io/milvus/pkg/util/tsoutil" "go.uber.org/zap" "github.com/milvus-io/milvus/internal/kv" + "github.com/milvus-io/milvus/pkg/log" + "github.com/milvus-io/milvus/pkg/util/tsoutil" "github.com/milvus-io/milvus/pkg/util/typeutil" ) @@ -139,7 +139,7 @@ func (gta *GlobalTSOAllocator) GenerateTSO(count uint32) (uint64, error) { // Alloc allocates a batch of timestamps. What is returned is the starting timestamp. func (gta *GlobalTSOAllocator) Alloc(count uint32) (typeutil.Timestamp, error) { - //return gta.tso.SyncTimestamp() + // return gta.tso.SyncTimestamp() start, err := gta.GenerateTSO(count) if err != nil { return typeutil.ZeroTimestamp, err diff --git a/internal/tso/tso.go b/internal/tso/tso.go index 9d7f3f0874..495ec510d1 100644 --- a/internal/tso/tso.go +++ b/internal/tso/tso.go @@ -36,11 +36,11 @@ import ( "unsafe" "github.com/cockroachdb/errors" - "github.com/milvus-io/milvus/pkg/util/tsoutil" "go.uber.org/zap" "github.com/milvus-io/milvus/internal/kv" "github.com/milvus-io/milvus/pkg/log" + "github.com/milvus-io/milvus/pkg/util/tsoutil" "github.com/milvus-io/milvus/pkg/util/typeutil" ) @@ -81,7 +81,7 @@ func (t *timestampOracle) loadTimestamp() (time.Time, error) { return typeutil.ZeroTime, nil } - var binData = []byte(strData) + binData := []byte(strData) if len(binData) == 0 { return typeutil.ZeroTime, nil } @@ -91,7 +91,7 @@ func (t *timestampOracle) loadTimestamp() (time.Time, error) { // save timestamp, if lastTs is 0, we think the timestamp doesn't exist, so create it, // otherwise, update it. func (t *timestampOracle) saveTimestamp(ts time.Time) error { - //we use big endian here for compatibility issues + // we use big endian here for compatibility issues data := typeutil.Uint64ToBytesBigEndian(uint64(ts.UnixNano())) err := t.txnKV.Save(t.key, string(data)) if err != nil { diff --git a/internal/types/types.go b/internal/types/types.go index c5df9214b1..de2f98f8cc 100644 --- a/internal/types/types.go +++ b/internal/types/types.go @@ -19,9 +19,7 @@ package types import ( "context" - "github.com/milvus-io/milvus/internal/util/streamrpc" "github.com/tikv/client-go/v2/txnkv" - clientv3 "go.etcd.io/etcd/client/v3" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" @@ -32,6 +30,7 @@ import ( "github.com/milvus-io/milvus/internal/proto/proxypb" "github.com/milvus-io/milvus/internal/proto/querypb" "github.com/milvus-io/milvus/internal/proto/rootcoordpb" + "github.com/milvus-io/milvus/internal/util/streamrpc" ) // TimeTickProvider is the interface all services implement @@ -54,8 +53,8 @@ type Component interface { GetComponentStates(ctx context.Context) (*milvuspb.ComponentStates, error) GetStatisticsChannel(ctx context.Context) (*milvuspb.StringResponse, error) Register() error - //SetAddress(address string) - //GetAddress() string + // SetAddress(address string) + // GetAddress() string } // DataNode is the interface `datanode` package implements @@ -408,14 +407,14 @@ type DataCoordComponent interface { // SetDataNodeCreator set DataNode client creator func for DataCoord SetDataNodeCreator(func(context.Context, string, int64) (DataNode, error)) - //SetIndexNodeCreator set Index client creator func for DataCoord + // SetIndexNodeCreator set Index client creator func for DataCoord SetIndexNodeCreator(func(context.Context, string, int64) (IndexNode, error)) } // IndexNode is the interface `indexnode` package implements type IndexNode interface { Component - //TimeTickProvider + // TimeTickProvider // BuildIndex receives request from IndexCoordinator to build an index. // Index building is asynchronous, so when an index building request comes, IndexNode records the task and returns. @@ -631,7 +630,7 @@ type RootCoord interface { // error is always nil //DescribeIndex(ctx context.Context, req *milvuspb.DescribeIndexRequest) (*milvuspb.DescribeIndexResponse, error) - //GetIndexState(ctx context.Context, req *milvuspb.GetIndexStateRequest) (*milvuspb.GetIndexStateResponse, error) + // GetIndexState(ctx context.Context, req *milvuspb.GetIndexStateRequest) (*milvuspb.GetIndexStateResponse, error) // DropIndex notifies RootCoord to drop the specified index for the specified field // @@ -913,7 +912,7 @@ type ProxyComponent interface { // `etcdClient` is a client of etcd SetEtcdClient(etcdClient *clientv3.Client) - //SetRootCoordClient set RootCoord for Proxy + // SetRootCoordClient set RootCoord for Proxy // `rootCoord` is a client of root coordinator. SetRootCoordClient(rootCoord RootCoord) @@ -923,7 +922,7 @@ type ProxyComponent interface { // SetIndexCoordClient set IndexCoord for Proxy // `indexCoord` is a client of index coordinator. - //SetIndexCoordClient(indexCoord IndexCoord) + // SetIndexCoordClient(indexCoord IndexCoord) // SetQueryCoordClient set QueryCoord for Proxy // `queryCoord` is a client of query coordinator. diff --git a/internal/util/componentutil/componentutil_test.go b/internal/util/componentutil/componentutil_test.go index bfa7f56b8c..60d4abca66 100644 --- a/internal/util/componentutil/componentutil_test.go +++ b/internal/util/componentutil/componentutil_test.go @@ -22,11 +22,11 @@ import ( "time" "github.com/cockroachdb/errors" + "github.com/stretchr/testify/assert" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus/pkg/util/funcutil" - "github.com/stretchr/testify/assert" ) type MockComponent struct { diff --git a/internal/util/dependency/factory.go b/internal/util/dependency/factory.go index 6dc6326380..7611434597 100644 --- a/internal/util/dependency/factory.go +++ b/internal/util/dependency/factory.go @@ -4,12 +4,13 @@ import ( "context" "github.com/cockroachdb/errors" + "go.uber.org/zap" + smsgstream "github.com/milvus-io/milvus/internal/mq/msgstream" "github.com/milvus-io/milvus/internal/storage" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/mq/msgstream" "github.com/milvus-io/milvus/pkg/util/paramtable" - "go.uber.org/zap" ) const ( diff --git a/internal/util/flowgraph/flow_graph_test.go b/internal/util/flowgraph/flow_graph_test.go index f79145e4b1..dbe87b3717 100644 --- a/internal/util/flowgraph/flow_graph_test.go +++ b/internal/util/flowgraph/flow_graph_test.go @@ -24,8 +24,9 @@ import ( "testing" "time" - "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/stretchr/testify/assert" + + "github.com/milvus-io/milvus/pkg/util/paramtable" ) // Flow graph basic example: count `c = pow(a) + 2` @@ -137,7 +138,7 @@ func createExampleFlowGraph() (*TimeTickedFlowGraph, chan float64, chan float64, fg.AddNode(b) fg.AddNode(c) - var err = fg.SetEdges(a.Name(), + err := fg.SetEdges(a.Name(), []string{b.Name()}, ) if err != nil { diff --git a/internal/util/flowgraph/input_node.go b/internal/util/flowgraph/input_node.go index a847c98366..6100f7cdf2 100644 --- a/internal/util/flowgraph/input_node.go +++ b/internal/util/flowgraph/input_node.go @@ -21,17 +21,16 @@ import ( "fmt" "sync" - "github.com/milvus-io/milvus/pkg/util/tsoutil" "go.opentelemetry.io/otel" "go.opentelemetry.io/otel/trace" - - "github.com/milvus-io/milvus/pkg/util/typeutil" + "go.uber.org/atomic" + "go.uber.org/zap" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/metrics" "github.com/milvus-io/milvus/pkg/mq/msgstream" - "go.uber.org/atomic" - "go.uber.org/zap" + "github.com/milvus-io/milvus/pkg/util/tsoutil" + "github.com/milvus-io/milvus/pkg/util/typeutil" ) const ( diff --git a/internal/util/flowgraph/input_node_test.go b/internal/util/flowgraph/input_node_test.go index f9440165d2..84c1c396ed 100644 --- a/internal/util/flowgraph/input_node_test.go +++ b/internal/util/flowgraph/input_node_test.go @@ -20,9 +20,9 @@ import ( "context" "testing" - "github.com/milvus-io/milvus/internal/util/dependency" "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus/internal/util/dependency" "github.com/milvus-io/milvus/pkg/mq/msgstream/mqwrapper" ) diff --git a/internal/util/flowgraph/message_test.go b/internal/util/flowgraph/message_test.go index adb35ac1a5..720aa631f2 100644 --- a/internal/util/flowgraph/message_test.go +++ b/internal/util/flowgraph/message_test.go @@ -20,8 +20,9 @@ import ( "context" "testing" - "github.com/milvus-io/milvus/pkg/mq/msgstream" "github.com/stretchr/testify/assert" + + "github.com/milvus-io/milvus/pkg/mq/msgstream" ) type MockMsg struct { @@ -77,7 +78,6 @@ func (bm *MockMsg) Position() *MsgPosition { } func (bm *MockMsg) SetPosition(position *MsgPosition) { - } func (bm *MockMsg) Size() int { diff --git a/internal/util/flowgraph/node.go b/internal/util/flowgraph/node.go index 1e956fec95..9962b5c765 100644 --- a/internal/util/flowgraph/node.go +++ b/internal/util/flowgraph/node.go @@ -21,10 +21,10 @@ import ( "sync" "time" - "github.com/milvus-io/milvus/pkg/util/timerecord" + "go.uber.org/zap" "github.com/milvus-io/milvus/pkg/log" - "go.uber.org/zap" + "github.com/milvus-io/milvus/pkg/util/timerecord" ) const ( diff --git a/internal/util/funcutil/count_util.go b/internal/util/funcutil/count_util.go index 9521f7fdef..f00b3c430d 100644 --- a/internal/util/funcutil/count_util.go +++ b/internal/util/funcutil/count_util.go @@ -3,9 +3,8 @@ package funcutil import ( "fmt" - "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" - "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" + "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/internal/proto/internalpb" "github.com/milvus-io/milvus/internal/proto/segcorepb" diff --git a/internal/util/funcutil/count_util_test.go b/internal/util/funcutil/count_util_test.go index 620582c571..db4c430d72 100644 --- a/internal/util/funcutil/count_util_test.go +++ b/internal/util/funcutil/count_util_test.go @@ -3,11 +3,11 @@ package funcutil import ( "testing" - "github.com/milvus-io/milvus/internal/proto/segcorepb" + "github.com/stretchr/testify/assert" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/internal/proto/internalpb" - "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus/internal/proto/segcorepb" ) func TestCntOfInternalResult(t *testing.T) { @@ -20,7 +20,6 @@ func TestCntOfInternalResult(t *testing.T) { }) t.Run("normal case", func(t *testing.T) { - res := WrapCntToInternalResult(5) cnt, err := CntOfInternalResult(res) assert.NoError(t, err) @@ -38,7 +37,6 @@ func TestCntOfSegCoreResult(t *testing.T) { }) t.Run("normal case", func(t *testing.T) { - res := WrapCntToSegCoreResult(5) cnt, err := CntOfSegCoreResult(res) assert.NoError(t, err) @@ -56,7 +54,6 @@ func TestCntOfFieldData(t *testing.T) { }) t.Run("not long data", func(t *testing.T) { - f := &schemapb.FieldData{ Field: &schemapb.FieldData_Scalars{ Scalars: &schemapb.ScalarField{ @@ -69,7 +66,6 @@ func TestCntOfFieldData(t *testing.T) { }) t.Run("more than one row", func(t *testing.T) { - f := &schemapb.FieldData{ Field: &schemapb.FieldData_Scalars{ Scalars: &schemapb.ScalarField{ @@ -86,7 +82,6 @@ func TestCntOfFieldData(t *testing.T) { }) t.Run("more than one row", func(t *testing.T) { - f := WrapCntToFieldData(1000) cnt, err := CntOfFieldData(f) assert.NoError(t, err) diff --git a/internal/util/grpcclient/client.go b/internal/util/grpcclient/client.go index 933fecab9a..9f78e0a011 100644 --- a/internal/util/grpcclient/client.go +++ b/internal/util/grpcclient/client.go @@ -25,6 +25,16 @@ import ( "github.com/cockroachdb/errors" grpc_middleware "github.com/grpc-ecosystem/go-grpc-middleware" + "go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc" + "go.uber.org/atomic" + "go.uber.org/zap" + "google.golang.org/grpc" + "google.golang.org/grpc/backoff" + "google.golang.org/grpc/codes" + "google.golang.org/grpc/credentials" + "google.golang.org/grpc/credentials/insecure" + "google.golang.org/grpc/keepalive" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus/internal/util/sessionutil" @@ -38,15 +48,6 @@ import ( "github.com/milvus-io/milvus/pkg/util/merr" "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/milvus-io/milvus/pkg/util/retry" - "go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc" - "go.uber.org/atomic" - "go.uber.org/zap" - "google.golang.org/grpc" - "google.golang.org/grpc/backoff" - "google.golang.org/grpc/codes" - "google.golang.org/grpc/credentials" - "google.golang.org/grpc/credentials/insecure" - "google.golang.org/grpc/keepalive" ) // GrpcClient abstracts client of grpc @@ -461,7 +462,6 @@ func (c *ClientBase[T]) call(ctx context.Context, caller func(client T) (any, er retry.MaxSleepTime(time.Duration(c.MaxBackoff*1000)*time.Millisecond)) // default value list: MaxAttempts 10, InitialBackoff 0.2s, MaxBackoff 10s // and consume 52.8s if all retry failed - if err != nil { // make the error more friendly to user if IsCrossClusterRoutingErr(err) { diff --git a/internal/util/grpcclient/client_test.go b/internal/util/grpcclient/client_test.go index dd18c64985..7a403fdcd0 100644 --- a/internal/util/grpcclient/client_test.go +++ b/internal/util/grpcclient/client_test.go @@ -27,21 +27,19 @@ import ( "time" "github.com/cockroachdb/errors" - "github.com/milvus-io/milvus/pkg/util/merr" - + "github.com/stretchr/testify/assert" + "google.golang.org/grpc" + "google.golang.org/grpc/codes" "google.golang.org/grpc/examples/helloworld/helloworld" "google.golang.org/grpc/keepalive" + "google.golang.org/grpc/reflection" + "google.golang.org/grpc/status" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus/internal/proto/rootcoordpb" + "github.com/milvus-io/milvus/pkg/util/merr" "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/milvus-io/milvus/pkg/util/typeutil" - "google.golang.org/grpc" - "google.golang.org/grpc/reflection" - - "github.com/stretchr/testify/assert" - "google.golang.org/grpc/codes" - "google.golang.org/grpc/status" ) func TestMain(m *testing.M) { @@ -309,7 +307,6 @@ func TestClientBase_Recall(t *testing.T) { assert.Error(t, err) assert.True(t, errors.Is(err, ErrConnect)) }) - } type server struct { @@ -335,11 +332,11 @@ func TestClientBase_RetryPolicy(t *testing.T) { if err != nil { log.Fatalf("failed to listen: %v", err) } - var kaep = keepalive.EnforcementPolicy{ + kaep := keepalive.EnforcementPolicy{ MinTime: 5 * time.Second, PermitWithoutStream: true, } - var kasp = keepalive.ServerParameters{ + kasp := keepalive.ServerParameters{ Time: 60 * time.Second, Timeout: 60 * time.Second, } @@ -397,11 +394,11 @@ func TestClientBase_Compression(t *testing.T) { if err != nil { log.Fatalf("failed to listen: %v", err) } - var kaep = keepalive.EnforcementPolicy{ + kaep := keepalive.EnforcementPolicy{ MinTime: 5 * time.Second, PermitWithoutStream: true, } - var kasp = keepalive.ServerParameters{ + kasp := keepalive.ServerParameters{ Time: 60 * time.Second, Timeout: 60 * time.Second, } diff --git a/internal/util/grpcclient/grpc_encoder.go b/internal/util/grpcclient/grpc_encoder.go index fbc7c1369d..276008eae7 100644 --- a/internal/util/grpcclient/grpc_encoder.go +++ b/internal/util/grpcclient/grpc_encoder.go @@ -25,8 +25,10 @@ import ( "google.golang.org/grpc/encoding" ) -const None = "" -const Zstd = "zstd" +const ( + None = "" + Zstd = "zstd" +) type grpcCompressor struct { encoder *zstd.Encoder diff --git a/internal/util/importutil/binlog_adapter.go b/internal/util/importutil/binlog_adapter.go index 2b14094b10..a257ace5d9 100644 --- a/internal/util/importutil/binlog_adapter.go +++ b/internal/util/importutil/binlog_adapter.go @@ -24,13 +24,13 @@ import ( "strings" "github.com/cockroachdb/errors" + "go.uber.org/zap" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/internal/storage" "github.com/milvus-io/milvus/pkg/common" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/typeutil" - "go.uber.org/zap" ) // A struct to hold insert log paths and delta log paths of a segment @@ -74,7 +74,8 @@ func NewBinlogAdapter(ctx context.Context, chunkManager storage.ChunkManager, flushFunc ImportFlushFunc, tsStartPoint uint64, - tsEndPoint uint64) (*BinlogAdapter, error) { + tsEndPoint uint64, +) (*BinlogAdapter, error) { if collectionInfo == nil { log.Warn("Binlog adapter: collection schema is nil") return nil, errors.New("collection schema is nil") @@ -513,7 +514,8 @@ func (p *BinlogAdapter) readPrimaryKeys(logPath string) ([]int64, []string, erro func (p *BinlogAdapter) getShardingListByPrimaryInt64(primaryKeys []int64, timestampList []int64, memoryData []ShardData, - intDeletedList map[int64]uint64) ([]int32, error) { + intDeletedList map[int64]uint64, +) ([]int32, error) { if len(timestampList) != len(primaryKeys) { log.Warn("Binlog adapter: primary key length is not equal to timestamp list length", zap.Int("primaryKeysLen", len(primaryKeys)), zap.Int("timestampLen", len(timestampList))) @@ -566,7 +568,8 @@ func (p *BinlogAdapter) getShardingListByPrimaryInt64(primaryKeys []int64, func (p *BinlogAdapter) getShardingListByPrimaryVarchar(primaryKeys []string, timestampList []int64, memoryData []ShardData, - strDeletedList map[string]uint64) ([]int32, error) { + strDeletedList map[string]uint64, +) ([]int32, error) { if len(timestampList) != len(primaryKeys) { log.Warn("Binlog adapter: primary key length is not equal to timestamp list length", zap.Int("primaryKeysLen", len(primaryKeys)), zap.Int("timestampLen", len(timestampList))) @@ -637,7 +640,8 @@ func (p *BinlogAdapter) verifyField(fieldID storage.FieldID, memoryData []ShardD // the no.2, no.4, no.6, no.8, no.10 will be put into shard_1 // Note: the row count of insert log need to be equal to length of shardList func (p *BinlogAdapter) readInsertlog(fieldID storage.FieldID, logPath string, - memoryData []ShardData, shardList []int32) error { + memoryData []ShardData, shardList []int32, +) error { err := p.verifyField(fieldID, memoryData) if err != nil { log.Warn("Binlog adapter: could not read binlog file", zap.String("logPath", logPath), zap.Error(err)) @@ -779,7 +783,8 @@ func (p *BinlogAdapter) readInsertlog(fieldID storage.FieldID, logPath string, } func (p *BinlogAdapter) dispatchBoolToShards(data []bool, memoryData []ShardData, - shardList []int32, fieldID storage.FieldID) error { + shardList []int32, fieldID storage.FieldID, +) error { // verify row count if len(data) != len(shardList) { log.Warn("Binlog adapter: bool field row count is not equal to shard list row count %d", zap.Int("dataLen", len(data)), zap.Int("shardLen", len(shardList))) @@ -813,7 +818,8 @@ func (p *BinlogAdapter) dispatchBoolToShards(data []bool, memoryData []ShardData } func (p *BinlogAdapter) dispatchInt8ToShards(data []int8, memoryData []ShardData, - shardList []int32, fieldID storage.FieldID) error { + shardList []int32, fieldID storage.FieldID, +) error { // verify row count if len(data) != len(shardList) { log.Warn("Binlog adapter: int8 field row count is not equal to shard list row count", zap.Int("dataLen", len(data)), zap.Int("shardLen", len(shardList))) @@ -847,7 +853,8 @@ func (p *BinlogAdapter) dispatchInt8ToShards(data []int8, memoryData []ShardData } func (p *BinlogAdapter) dispatchInt16ToShards(data []int16, memoryData []ShardData, - shardList []int32, fieldID storage.FieldID) error { + shardList []int32, fieldID storage.FieldID, +) error { // verify row count if len(data) != len(shardList) { log.Warn("Binlog adapter: int16 field row count is not equal to shard list row count", zap.Int("dataLen", len(data)), zap.Int("shardLen", len(shardList))) @@ -881,7 +888,8 @@ func (p *BinlogAdapter) dispatchInt16ToShards(data []int16, memoryData []ShardDa } func (p *BinlogAdapter) dispatchInt32ToShards(data []int32, memoryData []ShardData, - shardList []int32, fieldID storage.FieldID) error { + shardList []int32, fieldID storage.FieldID, +) error { // verify row count if len(data) != len(shardList) { log.Warn("Binlog adapter: int32 field row count is not equal to shard list row count", zap.Int("dataLen", len(data)), zap.Int("shardLen", len(shardList))) @@ -915,7 +923,8 @@ func (p *BinlogAdapter) dispatchInt32ToShards(data []int32, memoryData []ShardDa } func (p *BinlogAdapter) dispatchInt64ToShards(data []int64, memoryData []ShardData, - shardList []int32, fieldID storage.FieldID) error { + shardList []int32, fieldID storage.FieldID, +) error { // verify row count if len(data) != len(shardList) { log.Warn("Binlog adapter: int64 field row count is not equal to shard list row count", zap.Int("dataLen", len(data)), zap.Int("shardLen", len(shardList))) @@ -949,7 +958,8 @@ func (p *BinlogAdapter) dispatchInt64ToShards(data []int64, memoryData []ShardDa } func (p *BinlogAdapter) dispatchFloatToShards(data []float32, memoryData []ShardData, - shardList []int32, fieldID storage.FieldID) error { + shardList []int32, fieldID storage.FieldID, +) error { // verify row count if len(data) != len(shardList) { log.Warn("Binlog adapter: float field row count is not equal to shard list row count", zap.Int("dataLen", len(data)), zap.Int("shardLen", len(shardList))) @@ -983,7 +993,8 @@ func (p *BinlogAdapter) dispatchFloatToShards(data []float32, memoryData []Shard } func (p *BinlogAdapter) dispatchDoubleToShards(data []float64, memoryData []ShardData, - shardList []int32, fieldID storage.FieldID) error { + shardList []int32, fieldID storage.FieldID, +) error { // verify row count if len(data) != len(shardList) { log.Warn("Binlog adapter: double field row count is not equal to shard list row count", zap.Int("dataLen", len(data)), zap.Int("shardLen", len(shardList))) @@ -1017,7 +1028,8 @@ func (p *BinlogAdapter) dispatchDoubleToShards(data []float64, memoryData []Shar } func (p *BinlogAdapter) dispatchVarcharToShards(data []string, memoryData []ShardData, - shardList []int32, fieldID storage.FieldID) error { + shardList []int32, fieldID storage.FieldID, +) error { // verify row count if len(data) != len(shardList) { log.Warn("Binlog adapter: varchar field row count is not equal to shard list row count", zap.Int("dataLen", len(data)), zap.Int("shardLen", len(shardList))) @@ -1051,7 +1063,8 @@ func (p *BinlogAdapter) dispatchVarcharToShards(data []string, memoryData []Shar } func (p *BinlogAdapter) dispatchBytesToShards(data [][]byte, memoryData []ShardData, - shardList []int32, fieldID storage.FieldID) error { + shardList []int32, fieldID storage.FieldID, +) error { // verify row count if len(data) != len(shardList) { log.Warn("Binlog adapter: JSON field row count is not equal to shard list row count", zap.Int("dataLen", len(data)), zap.Int("shardLen", len(shardList))) @@ -1085,7 +1098,8 @@ func (p *BinlogAdapter) dispatchBytesToShards(data [][]byte, memoryData []ShardD } func (p *BinlogAdapter) dispatchBinaryVecToShards(data []byte, dim int, memoryData []ShardData, - shardList []int32, fieldID storage.FieldID) error { + shardList []int32, fieldID storage.FieldID, +) error { // verify row count bytesPerVector := dim / 8 count := len(data) / bytesPerVector @@ -1132,7 +1146,8 @@ func (p *BinlogAdapter) dispatchBinaryVecToShards(data []byte, dim int, memoryDa } func (p *BinlogAdapter) dispatchFloatVecToShards(data []float32, dim int, memoryData []ShardData, - shardList []int32, fieldID storage.FieldID) error { + shardList []int32, fieldID storage.FieldID, +) error { // verify row count count := len(data) / dim if count != len(shardList) { diff --git a/internal/util/importutil/binlog_adapter_test.go b/internal/util/importutil/binlog_adapter_test.go index 7fda1d3667..866169a797 100644 --- a/internal/util/importutil/binlog_adapter_test.go +++ b/internal/util/importutil/binlog_adapter_test.go @@ -23,11 +23,11 @@ import ( "testing" "github.com/cockroachdb/errors" + "github.com/stretchr/testify/assert" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/internal/storage" "github.com/milvus-io/milvus/pkg/common" - "github.com/stretchr/testify/assert" ) const ( diff --git a/internal/util/importutil/binlog_file.go b/internal/util/importutil/binlog_file.go index 36cff16660..2353de7299 100644 --- a/internal/util/importutil/binlog_file.go +++ b/internal/util/importutil/binlog_file.go @@ -21,11 +21,11 @@ import ( "fmt" "github.com/cockroachdb/errors" + "go.uber.org/zap" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/internal/storage" "github.com/milvus-io/milvus/pkg/log" - "go.uber.org/zap" ) // BinlogFile class is a wrapper of storage.BinlogReader, to read binlog file, block by block. diff --git a/internal/util/importutil/binlog_file_test.go b/internal/util/importutil/binlog_file_test.go index fcb753b0e0..4a80983d8f 100644 --- a/internal/util/importutil/binlog_file_test.go +++ b/internal/util/importutil/binlog_file_test.go @@ -21,10 +21,10 @@ import ( "testing" "github.com/cockroachdb/errors" + "github.com/stretchr/testify/assert" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/internal/storage" - "github.com/stretchr/testify/assert" ) func createBinlogBuf(t *testing.T, dataType schemapb.DataType, data interface{}) []byte { diff --git a/internal/util/importutil/binlog_parser.go b/internal/util/importutil/binlog_parser.go index cd6d56e3bb..9ea2ce039a 100644 --- a/internal/util/importutil/binlog_parser.go +++ b/internal/util/importutil/binlog_parser.go @@ -25,10 +25,10 @@ import ( "strings" "github.com/cockroachdb/errors" + "go.uber.org/zap" "github.com/milvus-io/milvus/internal/storage" "github.com/milvus-io/milvus/pkg/log" - "go.uber.org/zap" ) type BinlogParser struct { @@ -60,7 +60,8 @@ func NewBinlogParser(ctx context.Context, flushFunc ImportFlushFunc, updateProgressFunc func(percent int64), tsStartPoint uint64, - tsEndPoint uint64) (*BinlogParser, error) { + tsEndPoint uint64, +) (*BinlogParser, error) { if collectionInfo == nil { log.Warn("Binlog parser: collection schema is nil") return nil, errors.New("collection schema is nil") diff --git a/internal/util/importutil/binlog_parser_test.go b/internal/util/importutil/binlog_parser_test.go index 8d7a152ee2..d2be3fd63f 100644 --- a/internal/util/importutil/binlog_parser_test.go +++ b/internal/util/importutil/binlog_parser_test.go @@ -23,9 +23,9 @@ import ( "testing" "github.com/cockroachdb/errors" + "github.com/stretchr/testify/assert" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" - "github.com/stretchr/testify/assert" ) func Test_BinlogParserNew(t *testing.T) { diff --git a/internal/util/importutil/collection_info.go b/internal/util/importutil/collection_info.go index ad0194a45f..00a38e5190 100644 --- a/internal/util/importutil/collection_info.go +++ b/internal/util/importutil/collection_info.go @@ -55,7 +55,8 @@ func DeduceTargetPartitions(partitions map[string]int64, collectionSchema *schem func NewCollectionInfo(collectionSchema *schemapb.CollectionSchema, shardNum int32, - partitionIDs []int64) (*CollectionInfo, error) { + partitionIDs []int64, +) (*CollectionInfo, error) { if shardNum <= 0 { return nil, fmt.Errorf("illegal shard number %d", shardNum) } diff --git a/internal/util/importutil/collection_info_test.go b/internal/util/importutil/collection_info_test.go index e1ec73c068..3ae97699eb 100644 --- a/internal/util/importutil/collection_info_test.go +++ b/internal/util/importutil/collection_info_test.go @@ -18,8 +18,9 @@ package importutil import ( "testing" - "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/stretchr/testify/assert" + + "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" ) func Test_DeduceTargetPartitions(t *testing.T) { diff --git a/internal/util/importutil/import_options_test.go b/internal/util/importutil/import_options_test.go index e7de6dfb0a..9bb58476a2 100644 --- a/internal/util/importutil/import_options_test.go +++ b/internal/util/importutil/import_options_test.go @@ -20,12 +20,12 @@ import ( "math" "testing" - "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/stretchr/testify/assert" + + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" ) func Test_ValidateOptions(t *testing.T) { - assert.NoError(t, ValidateOptions([]*commonpb.KeyValuePair{})) assert.NoError(t, ValidateOptions([]*commonpb.KeyValuePair{ {Key: "start_ts", Value: "1666007457"}, diff --git a/internal/util/importutil/import_util.go b/internal/util/importutil/import_util.go index 636f6beafd..bed5d8aaec 100644 --- a/internal/util/importutil/import_util.go +++ b/internal/util/importutil/import_util.go @@ -37,8 +37,10 @@ import ( "github.com/milvus-io/milvus/pkg/util/typeutil" ) -type BlockData map[storage.FieldID]storage.FieldData // a map of field ID to field data -type ShardData map[int64]BlockData // a map of partition ID to block data +type ( + BlockData map[storage.FieldID]storage.FieldData // a map of field ID to field data + ShardData map[int64]BlockData // a map of partition ID to block data +) func isCanceled(ctx context.Context) bool { // canceled? @@ -154,7 +156,7 @@ type Validator struct { isString bool // for string field dimension int // only for vector field fieldName string // field name - fieldID int64 //field ID + fieldID int64 // field ID } // initValidators constructs valiator methods and data conversion methods @@ -469,8 +471,8 @@ func tryFlushBlocks(ctx context.Context, callFlushFunc ImportFlushFunc, blockSize int64, maxTotalSize int64, - force bool) error { - + force bool, +) error { totalSize := 0 biggestSize := 0 biggestItem := -1 diff --git a/internal/util/importutil/import_util_test.go b/internal/util/importutil/import_util_test.go index 88bb820f1c..859b746f46 100644 --- a/internal/util/importutil/import_util_test.go +++ b/internal/util/importutil/import_util_test.go @@ -24,13 +24,13 @@ import ( "testing" "github.com/cockroachdb/errors" + "github.com/stretchr/testify/assert" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/internal/storage" "github.com/milvus-io/milvus/pkg/common" "github.com/milvus-io/milvus/pkg/util/typeutil" - "github.com/stretchr/testify/assert" ) // sampleSchema() return a schema contains all supported data types with an int64 primary key @@ -381,14 +381,14 @@ func createBlockData(collectionSchema *schemapb.CollectionSchema, fieldsData map } func createShardsData(collectionSchema *schemapb.CollectionSchema, fieldsData map[storage.FieldID]interface{}, - shardNum int32, partitionIDs []int64) []ShardData { + shardNum int32, partitionIDs []int64, +) []ShardData { shardsData := make([]ShardData, 0, shardNum) for i := 0; i < int(shardNum); i++ { shardData := make(ShardData) for p := 0; p < len(partitionIDs); p++ { blockData := createBlockData(collectionSchema, fieldsData) shardData[partitionIDs[p]] = blockData - } shardsData = append(shardsData, shardData) } diff --git a/internal/util/importutil/import_wrapper.go b/internal/util/importutil/import_wrapper.go index 70ef5cfe1b..35fa92b678 100644 --- a/internal/util/importutil/import_wrapper.go +++ b/internal/util/importutil/import_wrapper.go @@ -68,11 +68,13 @@ const ( // ReportImportAttempts is the maximum # of attempts to retry when import fails. var ReportImportAttempts uint = 10 -type ImportFlushFunc func(fields BlockData, shardID int, partID int64) error -type AssignSegmentFunc func(shardID int, partID int64) (int64, string, error) -type CreateBinlogsFunc func(fields BlockData, segmentID int64, partID int64) ([]*datapb.FieldBinlog, []*datapb.FieldBinlog, error) -type SaveSegmentFunc func(fieldsInsert []*datapb.FieldBinlog, fieldsStats []*datapb.FieldBinlog, segmentID int64, targetChName string, rowCount int64, partID int64) error -type ReportFunc func(res *rootcoordpb.ImportResult) error +type ( + ImportFlushFunc func(fields BlockData, shardID int, partID int64) error + AssignSegmentFunc func(shardID int, partID int64) (int64, string, error) + CreateBinlogsFunc func(fields BlockData, segmentID int64, partID int64) ([]*datapb.FieldBinlog, []*datapb.FieldBinlog, error) + SaveSegmentFunc func(fieldsInsert []*datapb.FieldBinlog, fieldsStats []*datapb.FieldBinlog, segmentID int64, targetChName string, rowCount int64, partID int64) error + ReportFunc func(res *rootcoordpb.ImportResult) error +) type WorkingSegment struct { segmentID int64 // segment ID @@ -107,7 +109,8 @@ type ImportWrapper struct { func NewImportWrapper(ctx context.Context, collectionInfo *CollectionInfo, segmentSize int64, idAlloc *allocator.IDAllocator, cm storage.ChunkManager, importResult *rootcoordpb.ImportResult, - reportFunc func(res *rootcoordpb.ImportResult) error) *ImportWrapper { + reportFunc func(res *rootcoordpb.ImportResult) error, +) *ImportWrapper { if collectionInfo == nil || collectionInfo.Schema == nil { log.Warn("import wrapper: collection schema is nil") return nil @@ -424,7 +427,7 @@ func (p *ImportWrapper) parseRowBasedJSON(filePath string, onlyValidate bool) er } } else { flushFunc = func(fields BlockData, shardID int, partitionID int64) error { - var filePaths = []string{filePath} + filePaths := []string{filePath} printFieldsDataInfo(fields, "import wrapper: prepare to flush binlogs", filePaths) return p.flushFunc(fields, shardID, partitionID) } diff --git a/internal/util/importutil/import_wrapper_test.go b/internal/util/importutil/import_wrapper_test.go index ca74fc5643..d274ca8c78 100644 --- a/internal/util/importutil/import_wrapper_test.go +++ b/internal/util/importutil/import_wrapper_test.go @@ -29,7 +29,6 @@ import ( "time" "github.com/cockroachdb/errors" - "github.com/stretchr/testify/assert" "golang.org/x/exp/mmap" @@ -171,7 +170,8 @@ func createMockCallbackFunctions(t *testing.T, rowCounter *rowCounterTest) (Assi } saveSegmentFunc := func(fieldsInsert []*datapb.FieldBinlog, fieldsStats []*datapb.FieldBinlog, - segmentID int64, targetChName string, rowCount int64, partID int64) error { + segmentID int64, targetChName string, rowCount int64, partID int64, + ) error { return nil } @@ -215,7 +215,8 @@ func Test_ImportWrapperNew(t *testing.T) { return nil, nil, nil } saveBinFunc := func(fieldsInsert []*datapb.FieldBinlog, fieldsStats []*datapb.FieldBinlog, - segmentID int64, targetChName string, rowCount int64, partID int64) error { + segmentID int64, targetChName string, rowCount int64, partID int64, + ) error { return nil } @@ -908,7 +909,8 @@ func Test_ImportWrapperReportPersisted(t *testing.T) { // error when closing segments wrapper.saveSegmentFunc = func(fieldsInsert []*datapb.FieldBinlog, fieldsStats []*datapb.FieldBinlog, - segmentID int64, targetChName string, rowCount int64, partID int64) error { + segmentID int64, targetChName string, rowCount int64, partID int64, + ) error { return errors.New("error") } wrapper.workingSegments[0] = map[int64]*WorkingSegment{ @@ -919,7 +921,8 @@ func Test_ImportWrapperReportPersisted(t *testing.T) { // failed to report wrapper.saveSegmentFunc = func(fieldsInsert []*datapb.FieldBinlog, fieldsStats []*datapb.FieldBinlog, - segmentID int64, targetChName string, rowCount int64, partID int64) error { + segmentID int64, targetChName string, rowCount int64, partID int64, + ) error { return nil } wrapper.reportFunc = func(res *rootcoordpb.ImportResult) error { @@ -995,7 +998,8 @@ func Test_ImportWrapperFlushFunc(t *testing.T) { t.Run("close segment, saveSegmentFunc returns error", func(t *testing.T) { wrapper.saveSegmentFunc = func(fieldsInsert []*datapb.FieldBinlog, fieldsStats []*datapb.FieldBinlog, - segmentID int64, targetChName string, rowCount int64, partID int64) error { + segmentID int64, targetChName string, rowCount int64, partID int64, + ) error { return errors.New("error") } wrapper.segmentSize = 1 @@ -1020,7 +1024,8 @@ func Test_ImportWrapperFlushFunc(t *testing.T) { t.Run("createBinlogsFunc returns error", func(t *testing.T) { wrapper.saveSegmentFunc = func(fieldsInsert []*datapb.FieldBinlog, fieldsStats []*datapb.FieldBinlog, - segmentID int64, targetChName string, rowCount int64, partID int64) error { + segmentID int64, targetChName string, rowCount int64, partID int64, + ) error { return nil } wrapper.assignSegmentFunc = func(shardID int, partID int64) (int64, string, error) { diff --git a/internal/util/importutil/json_handler.go b/internal/util/importutil/json_handler.go index 6cef1eeb24..eec84d3d84 100644 --- a/internal/util/importutil/json_handler.go +++ b/internal/util/importutil/json_handler.go @@ -23,7 +23,6 @@ import ( "strconv" "github.com/cockroachdb/errors" - "go.uber.org/zap" "github.com/milvus-io/milvus/internal/allocator" @@ -72,7 +71,8 @@ func NewJSONRowConsumer(ctx context.Context, collectionInfo *CollectionInfo, idAlloc *allocator.IDAllocator, blockSize int64, - flushFunc ImportFlushFunc) (*JSONRowConsumer, error) { + flushFunc ImportFlushFunc, +) (*JSONRowConsumer, error) { if collectionInfo == nil { log.Warn("JSON row consumer: collection schema is nil") return nil, errors.New("collection schema is nil") diff --git a/internal/util/importutil/json_handler_test.go b/internal/util/importutil/json_handler_test.go index 59188b2e81..0af9a7a991 100644 --- a/internal/util/importutil/json_handler_test.go +++ b/internal/util/importutil/json_handler_test.go @@ -23,7 +23,6 @@ import ( "testing" "github.com/cockroachdb/errors" - "github.com/stretchr/testify/assert" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" diff --git a/internal/util/importutil/json_parser.go b/internal/util/importutil/json_parser.go index 5667847dc1..d187b5c4b8 100644 --- a/internal/util/importutil/json_parser.go +++ b/internal/util/importutil/json_parser.go @@ -24,13 +24,13 @@ import ( "strings" "github.com/cockroachdb/errors" + "go.uber.org/zap" + "golang.org/x/exp/maps" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/internal/storage" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/typeutil" - "go.uber.org/zap" - "golang.org/x/exp/maps" ) const ( diff --git a/internal/util/importutil/json_parser_test.go b/internal/util/importutil/json_parser_test.go index ceadbec0c3..e241c1dc63 100644 --- a/internal/util/importutil/json_parser_test.go +++ b/internal/util/importutil/json_parser_test.go @@ -26,12 +26,12 @@ import ( "testing" "github.com/cockroachdb/errors" + "github.com/stretchr/testify/assert" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/internal/storage" "github.com/milvus-io/milvus/pkg/common" - "github.com/stretchr/testify/assert" ) // mock class of JSONRowCounsumer @@ -83,7 +83,6 @@ func Test_AdjustBufSize(t *testing.T) { assert.NotNil(t, parser) assert.Greater(t, parser.bufRowCount, 0) adjustBufSize(parser, schema) - } func Test_JSONParserParseRows_IntPK(t *testing.T) { diff --git a/internal/util/importutil/numpy_adapter.go b/internal/util/importutil/numpy_adapter.go index 17c6832717..690540187c 100644 --- a/internal/util/importutil/numpy_adapter.go +++ b/internal/util/importutil/numpy_adapter.go @@ -29,13 +29,13 @@ import ( "unicode/utf8" "github.com/cockroachdb/errors" - - "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" - "github.com/milvus-io/milvus/pkg/log" "github.com/sbinet/npyio" "github.com/sbinet/npyio/npy" "go.uber.org/zap" "golang.org/x/text/encoding/unicode" + + "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" + "github.com/milvus-io/milvus/pkg/log" ) var ( diff --git a/internal/util/importutil/numpy_adapter_test.go b/internal/util/importutil/numpy_adapter_test.go index 3e5f1fa58b..06ac172be5 100644 --- a/internal/util/importutil/numpy_adapter_test.go +++ b/internal/util/importutil/numpy_adapter_test.go @@ -25,13 +25,13 @@ import ( "strings" "testing" - "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/sbinet/npyio/npy" "github.com/stretchr/testify/assert" + + "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" ) -type MockReader struct { -} +type MockReader struct{} func (r *MockReader) Read(p []byte) (n int, err error) { return 0, io.EOF diff --git a/internal/util/importutil/numpy_parser.go b/internal/util/importutil/numpy_parser.go index 3b3f74b039..ef5b0f951a 100644 --- a/internal/util/importutil/numpy_parser.go +++ b/internal/util/importutil/numpy_parser.go @@ -72,7 +72,8 @@ func NewNumpyParser(ctx context.Context, blockSize int64, chunkManager storage.ChunkManager, flushFunc ImportFlushFunc, - updateProgressFunc func(percent int64)) (*NumpyParser, error) { + updateProgressFunc func(percent int64), +) (*NumpyParser, error) { if collectionInfo == nil { log.Warn("Numper parser: collection schema is nil") return nil, errors.New("collection schema is nil") @@ -600,7 +601,6 @@ func (p *NumpyParser) readData(columnReader *NumpyColumnReader, rowCount int) (s log.Warn("Numpy parser: illegal value in float vector array", zap.Error(err)) return nil, fmt.Errorf("illegal value in float vector array: %s", err.Error()) } - } else if elementType == schemapb.DataType_Double { data = make([]float32, 0, columnReader.rowCount) data64, err := columnReader.reader.ReadFloat64(rowCount * columnReader.dimension) diff --git a/internal/util/importutil/numpy_parser_test.go b/internal/util/importutil/numpy_parser_test.go index 6b687ef351..be545fe48a 100644 --- a/internal/util/importutil/numpy_parser_test.go +++ b/internal/util/importutil/numpy_parser_test.go @@ -24,7 +24,6 @@ import ( "testing" "github.com/cockroachdb/errors" - "github.com/stretchr/testify/assert" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" diff --git a/internal/util/indexcgowrapper/index.go b/internal/util/indexcgowrapper/index.go index c8ed6378ee..9aea7b1718 100644 --- a/internal/util/indexcgowrapper/index.go +++ b/internal/util/indexcgowrapper/index.go @@ -7,6 +7,7 @@ package indexcgowrapper #include "indexbuilder/index_c.h" */ import "C" + import ( "context" "fmt" @@ -18,7 +19,6 @@ import ( "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" - "github.com/milvus-io/milvus/internal/proto/indexcgopb" "github.com/milvus-io/milvus/internal/storage" "github.com/milvus-io/milvus/pkg/log" @@ -41,9 +41,7 @@ type CodecIndex interface { UpLoad() (map[string]int64, error) } -var ( - _ CodecIndex = (*CgoIndex)(nil) -) +var _ CodecIndex = (*CgoIndex)(nil) type CgoIndex struct { indexPtr C.CIndex diff --git a/internal/util/indexcgowrapper/index_test.go b/internal/util/indexcgowrapper/index_test.go index 97369bbc13..8678bc227f 100644 --- a/internal/util/indexcgowrapper/index_test.go +++ b/internal/util/indexcgowrapper/index_test.go @@ -185,9 +185,9 @@ func TestCIndex_Codec(t *testing.T) { err = copyIndex.Load(blobs) assert.Equal(t, err, nil) // IVF_FLAT_NM index don't support load and serialize - //copyBlobs, err := copyIndex.Serialize() - //assert.Equal(t, err, nil) - //assert.Equal(t, len(blobs), len(copyBlobs)) + // copyBlobs, err := copyIndex.Serialize() + // assert.Equal(t, err, nil) + // assert.Equal(t, len(blobs), len(copyBlobs)) // TODO: check key, value and more err = index.Delete() @@ -224,10 +224,11 @@ func TestCIndex_Error(t *testing.T) { }) t.Run("Load error", func(t *testing.T) { - blobs := []*Blob{{ - Key: "test", - Value: []byte("value"), - }, + blobs := []*Blob{ + { + Key: "test", + Value: []byte("value"), + }, } err = indexPtr.Load(blobs) assert.Error(t, err) diff --git a/internal/util/metrics/c_registry.go b/internal/util/metrics/c_registry.go index 3e8973fc75..89771aff13 100644 --- a/internal/util/metrics/c_registry.go +++ b/internal/util/metrics/c_registry.go @@ -26,18 +26,19 @@ package metrics */ import "C" + import ( "sort" "strings" "sync" "unsafe" - "github.com/milvus-io/milvus/pkg/log" "github.com/prometheus/client_golang/prometheus" + dto "github.com/prometheus/client_model/go" "github.com/prometheus/common/expfmt" "go.uber.org/zap" - dto "github.com/prometheus/client_model/go" + "github.com/milvus-io/milvus/pkg/log" ) // metricSorter is a sortable slice of *dto.Metric. @@ -119,9 +120,7 @@ type CRegistry struct { // Gather implements Gatherer. func (r *CRegistry) Gather() (res []*dto.MetricFamily, err error) { - var ( - parser expfmt.TextParser - ) + var parser expfmt.TextParser r.mtx.RLock() cMetricsStr := C.GetKnowhereMetrics() diff --git a/internal/util/mock/datacoord_client.go b/internal/util/mock/datacoord_client.go index 4f70c8f659..8276afaa33 100644 --- a/internal/util/mock/datacoord_client.go +++ b/internal/util/mock/datacoord_client.go @@ -19,12 +19,13 @@ package mock import ( "context" + "google.golang.org/grpc" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus/internal/proto/datapb" "github.com/milvus-io/milvus/internal/proto/internalpb" "github.com/milvus-io/milvus/internal/types" - "google.golang.org/grpc" ) // DataCoordClient mocks of DataCoordClient @@ -112,6 +113,7 @@ func (m *DataCoordClient) GetCompactionStateWithPlans(ctx context.Context, req * func (m *DataCoordClient) WatchChannels(ctx context.Context, req *datapb.WatchChannelsRequest, opts ...grpc.CallOption) (*datapb.WatchChannelsResponse, error) { return &datapb.WatchChannelsResponse{}, m.Err } + func (m *DataCoordClient) GetFlushState(ctx context.Context, req *datapb.GetFlushStateRequest, opts ...grpc.CallOption) (*milvuspb.GetFlushStateResponse, error) { return &milvuspb.GetFlushStateResponse{}, m.Err } diff --git a/internal/util/mock/grpc_datacoord_client.go b/internal/util/mock/grpc_datacoord_client.go index a04634d158..894f760535 100644 --- a/internal/util/mock/grpc_datacoord_client.go +++ b/internal/util/mock/grpc_datacoord_client.go @@ -19,13 +19,12 @@ package mock import ( "context" - "github.com/milvus-io/milvus/internal/proto/indexpb" - "google.golang.org/grpc" "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus/internal/proto/datapb" + "github.com/milvus-io/milvus/internal/proto/indexpb" "github.com/milvus-io/milvus/internal/proto/internalpb" ) @@ -178,7 +177,6 @@ func (m *GrpcDataCoordClient) MarkSegmentsDropped(context.Context, *datapb.MarkS func (m *GrpcDataCoordClient) BroadcastAlteredCollection(ctx context.Context, in *datapb.AlterCollectionRequest, opts ...grpc.CallOption) (*commonpb.Status, error) { return &commonpb.Status{}, m.Err - } func (m *GrpcDataCoordClient) CreateIndex(ctx context.Context, req *indexpb.CreateIndexRequest, opts ...grpc.CallOption) (*commonpb.Status, error) { diff --git a/internal/util/mock/grpc_stream_client.go b/internal/util/mock/grpc_stream_client.go index d26863d08b..2cf33c476e 100644 --- a/internal/util/mock/grpc_stream_client.go +++ b/internal/util/mock/grpc_stream_client.go @@ -19,10 +19,11 @@ package mock import ( "context" - "github.com/milvus-io/milvus/internal/proto/internalpb" - "github.com/milvus-io/milvus/internal/proto/querypb" "google.golang.org/grpc" "google.golang.org/grpc/metadata" + + "github.com/milvus-io/milvus/internal/proto/internalpb" + "github.com/milvus-io/milvus/internal/proto/querypb" ) var _ grpc.ClientStream = &MockClientStream{} diff --git a/internal/util/mock/grpcclient.go b/internal/util/mock/grpcclient.go index 50d83a0a22..b466f097c3 100644 --- a/internal/util/mock/grpcclient.go +++ b/internal/util/mock/grpcclient.go @@ -58,7 +58,6 @@ func (c *GRPCClientBase[T]) SetRole(role string) { } func (c *GRPCClientBase[T]) EnableEncryption() { - } func (c *GRPCClientBase[T]) SetNewGrpcClientFunc(f func(cc *grpc.ClientConn) T) { diff --git a/internal/util/pipeline/node.go b/internal/util/pipeline/node.go index e4fc7242b5..ad42e6318f 100644 --- a/internal/util/pipeline/node.go +++ b/internal/util/pipeline/node.go @@ -20,9 +20,10 @@ import ( "fmt" "sync" + "go.uber.org/zap" + "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/timerecord" - "go.uber.org/zap" ) type Node interface { @@ -66,7 +67,7 @@ func (c *nodeCtx) work() { for { select { - //close + // close case <-c.closeCh: c.node.Close() close(c.inputChannel) diff --git a/internal/util/pipeline/pipeline.go b/internal/util/pipeline/pipeline.go index 17bb3eea51..047bf65f48 100644 --- a/internal/util/pipeline/pipeline.go +++ b/internal/util/pipeline/pipeline.go @@ -19,9 +19,10 @@ package pipeline import ( "time" + "go.uber.org/zap" + "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/timerecord" - "go.uber.org/zap" ) type Pipeline interface { diff --git a/internal/util/pipeline/pipeline_test.go b/internal/util/pipeline/pipeline_test.go index 82793091c6..8ddeb9c355 100644 --- a/internal/util/pipeline/pipeline_test.go +++ b/internal/util/pipeline/pipeline_test.go @@ -19,8 +19,9 @@ package pipeline import ( "testing" - "github.com/milvus-io/milvus/pkg/mq/msgstream" "github.com/stretchr/testify/suite" + + "github.com/milvus-io/milvus/pkg/mq/msgstream" ) type testNode struct { diff --git a/internal/util/pipeline/stream_pipeline.go b/internal/util/pipeline/stream_pipeline.go index 575369ce62..6cb6b6900e 100644 --- a/internal/util/pipeline/stream_pipeline.go +++ b/internal/util/pipeline/stream_pipeline.go @@ -21,13 +21,14 @@ import ( "sync" "time" + "go.uber.org/zap" + "github.com/milvus-io/milvus-proto/go-api/v2/msgpb" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/mq/msgdispatcher" "github.com/milvus-io/milvus/pkg/mq/msgstream" "github.com/milvus-io/milvus/pkg/mq/msgstream/mqwrapper" "github.com/milvus-io/milvus/pkg/util/tsoutil" - "go.uber.org/zap" ) type StreamPipeline interface { diff --git a/internal/util/pipeline/stream_pipeline_test.go b/internal/util/pipeline/stream_pipeline_test.go index 3c21644837..7bf28a5a0c 100644 --- a/internal/util/pipeline/stream_pipeline_test.go +++ b/internal/util/pipeline/stream_pipeline_test.go @@ -20,12 +20,13 @@ import ( "fmt" "testing" + "github.com/stretchr/testify/mock" + "github.com/stretchr/testify/suite" + "github.com/milvus-io/milvus-proto/go-api/v2/msgpb" "github.com/milvus-io/milvus/pkg/mq/msgdispatcher" "github.com/milvus-io/milvus/pkg/mq/msgstream" "github.com/milvus-io/milvus/pkg/mq/msgstream/mqwrapper" - "github.com/stretchr/testify/mock" - "github.com/stretchr/testify/suite" ) type StreamPipelineSuite struct { @@ -33,10 +34,10 @@ type StreamPipelineSuite struct { pipeline StreamPipeline inChannel chan *msgstream.MsgPack outChannel chan msgstream.Timestamp - //data + // data length int channel string - //mock + // mock msgDispatcher *msgdispatcher.MockClient } diff --git a/internal/util/segmentutil/utils.go b/internal/util/segmentutil/utils.go index de20278070..3f183ef32b 100644 --- a/internal/util/segmentutil/utils.go +++ b/internal/util/segmentutil/utils.go @@ -1,9 +1,10 @@ package segmentutil import ( + "go.uber.org/zap" + "github.com/milvus-io/milvus/internal/proto/datapb" "github.com/milvus-io/milvus/pkg/log" - "go.uber.org/zap" ) // ReCalcRowCount re-calculates number of rows of `oldSeg` based on its bin log count, and correct its value in its diff --git a/internal/util/sessionutil/session_util.go b/internal/util/sessionutil/session_util.go index 2788cc4eb2..c45fc40b51 100644 --- a/internal/util/sessionutil/session_util.go +++ b/internal/util/sessionutil/session_util.go @@ -176,7 +176,6 @@ func (s *Session) UnmarshalJSON(data []byte) error { // MarshalJSON marshals session to bytes. func (s *Session) MarshalJSON() ([]byte, error) { - verStr := s.Version.String() return json.Marshal(&struct { ServerID int64 `json:"ServerID,omitempty"` @@ -197,7 +196,6 @@ func (s *Session) MarshalJSON() ([]byte, error) { Version: verStr, LeaseID: s.leaseID, }) - } // NewSession is a helper to build Session object. @@ -426,7 +424,6 @@ func (s *Session) registerService() (<-chan *clientv3.LeaseKeepAliveResponse, er "=", 0)). Then(clientv3.OpPut(completeKey, string(sessionJSON), clientv3.WithLease(resp.ID))).Commit() - if err != nil { log.Warn("compare and swap error, maybe the key has already been registered", zap.Error(err)) return err @@ -491,7 +488,6 @@ func (s *Session) processKeepAliveResponse(ch <-chan *clientv3.LeaseKeepAliveRes keepAliveOnceResp = resp return err }, retry.Attempts(3)) - if err != nil { log.Warn("fail to retry keepAliveOnce", zap.String("serverName", s.ServerName), zap.Int64("leaseID", int64(*s.leaseID)), zap.Error(err)) s.safeCloseLiveCh() @@ -758,7 +754,7 @@ func (w *sessionWatcher) handleWatchResponse(wresp clientv3.WatchResponse) { func (w *sessionWatcher) handleWatchErr(err error) error { // if not ErrCompacted, just close the channel if err != v3rpc.ErrCompacted { - //close event channel + // close event channel log.Warn("Watch service found error", zap.Error(err)) close(w.eventCh) return err @@ -827,7 +823,7 @@ func (s *Session) LivenessCheck(ctx context.Context, callback func()) { if resp.Err() != nil { // if not ErrCompacted, just close the channel if resp.Err() != v3rpc.ErrCompacted { - //close event channel + // close event channel log.Warn("Watch service found error", zap.Error(resp.Err())) s.cancelKeepAlive() return @@ -1048,7 +1044,7 @@ func (s *Session) ForceActiveStandby(activateFunc func() error) error { if len(sessions) != 0 { activeSess := sessions[s.ServerName] if activeSess == nil || activeSess.leaseID == nil { - //force delete all old sessions + // force delete all old sessions s.etcdCli.Delete(s.ctx, s.activeKey) for _, sess := range sessions { if sess.ServerID != s.ServerID { diff --git a/internal/util/sessionutil/session_util_test.go b/internal/util/sessionutil/session_util_test.go index e2e1801e21..24a5713941 100644 --- a/internal/util/sessionutil/session_util_test.go +++ b/internal/util/sessionutil/session_util_test.go @@ -54,7 +54,7 @@ func TestGetServerIDConcurrently(t *testing.T) { defer etcdKV.RemoveWithPrefix("") var wg sync.WaitGroup - var muList = sync.Mutex{} + muList := sync.Mutex{} s := NewSession(ctx, metaRoot, etcdCli) res := make([]int64, 0) @@ -122,7 +122,7 @@ func TestUpdateSessions(t *testing.T) { defer etcdKV.RemoveWithPrefix("") var wg sync.WaitGroup - var muList = sync.Mutex{} + muList := sync.Mutex{} s := NewSession(ctx, metaRoot, etcdCli, WithResueNodeID(false)) @@ -365,9 +365,7 @@ func TestWatcherHandleWatchResp(t *testing.T) { assert.Panics(t, func() { w.handleWatchResponse(wresp) }) - }) - } func TestSession_Registered(t *testing.T) { @@ -491,7 +489,6 @@ func (suite *SessionWithVersionSuite) SetupTest() { s3.Register() suite.sessions = append(suite.sessions, s3) - } func (suite *SessionWithVersionSuite) TearDownTest() { @@ -640,7 +637,7 @@ func TestSessionProcessActiveStandBy(t *testing.T) { }) assert.True(t, s2.isStandby.Load().(bool)) - //assert.True(t, s2.watchingPrimaryKeyLock) + // assert.True(t, s2.watchingPrimaryKeyLock) // stop session 1, session 2 will take over primary service log.Debug("Stop session 1, session 2 will take over primary service") assert.False(t, flag) diff --git a/internal/util/typeutil/hash.go b/internal/util/typeutil/hash.go index bdf6f6bca9..8815768336 100644 --- a/internal/util/typeutil/hash.go +++ b/internal/util/typeutil/hash.go @@ -35,7 +35,6 @@ func HashKey2Partitions(fieldSchema *schemapb.FieldSchema, keys []*planpb.Generi } default: return nil, errors.New("currently only support DataType Int64 or VarChar as partition keys") - } result := make([]string, 0) diff --git a/internal/util/typeutil/result_helper_test.go b/internal/util/typeutil/result_helper_test.go index 6820f58f3f..8d1cca1902 100644 --- a/internal/util/typeutil/result_helper_test.go +++ b/internal/util/typeutil/result_helper_test.go @@ -3,19 +3,15 @@ package typeutil import ( "testing" - "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" - "github.com/milvus-io/milvus/pkg/common" - "github.com/milvus-io/milvus/pkg/util/typeutil" - - "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" - - "github.com/milvus-io/milvus/internal/proto/internalpb" - - "github.com/milvus-io/milvus/internal/proto/segcorepb" - "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" + "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" + "github.com/milvus-io/milvus/internal/proto/internalpb" + "github.com/milvus-io/milvus/internal/proto/segcorepb" + "github.com/milvus-io/milvus/pkg/common" + "github.com/milvus-io/milvus/pkg/util/typeutil" ) func fieldDataEmpty(data *schemapb.FieldData) bool { diff --git a/pkg/Makefile b/pkg/Makefile index b478d71d19..13b897298c 100644 --- a/pkg/Makefile +++ b/pkg/Makefile @@ -15,4 +15,3 @@ generate-mockery: getdeps $(INSTALL_PATH)/mockery --name=MsgStream --dir=$(PWD)/mq/msgstream --output=$(PWD)/mq/msgstream --filename=mock_msgstream.go --with-expecter --structname=MockMsgStream --outpkg=msgstream --inpackage $(INSTALL_PATH)/mockery --name=Client --dir=$(PWD)/mq/msgdispatcher --output=$(PWD)/mq/msgsdispatcher --filename=mock_client.go --with-expecter --structname=MockClient --outpkg=msgdispatcher --inpackage $(INSTALL_PATH)/mockery --name=Logger --dir=$(PWD)/eventlog --output=$(PWD)/eventlog --filename=mock_logger.go --with-expecter --structname=MockLogger --outpkg=eventlog --inpackage - diff --git a/pkg/common/byte_slice_test.go b/pkg/common/byte_slice_test.go index 46f0c492d1..8a6385f078 100644 --- a/pkg/common/byte_slice_test.go +++ b/pkg/common/byte_slice_test.go @@ -30,7 +30,8 @@ func TestCloneByteSlice(t *testing.T) { { args: args{s: []byte{0xf0}}, want: []byte{0xf0}, - }, { + }, + { args: args{s: []byte{0x0, 0xff, 0x0f, 0xf0}}, want: []byte{0x0, 0xff, 0x0f, 0xf0}, }, diff --git a/pkg/common/error.go b/pkg/common/error.go index 81d815160e..1bc267cbfa 100644 --- a/pkg/common/error.go +++ b/pkg/common/error.go @@ -22,10 +22,8 @@ import ( "github.com/cockroachdb/errors" ) -var ( - // ErrNodeIDNotMatch stands for the error that grpc target id and node session id not match. - ErrNodeIDNotMatch = errors.New("target node id not match") -) +// ErrNodeIDNotMatch stands for the error that grpc target id and node session id not match. +var ErrNodeIDNotMatch = errors.New("target node id not match") // WrapNodeIDNotMatchError wraps `ErrNodeIDNotMatch` with targetID and sessionID. func WrapNodeIDNotMatchError(targetID, nodeID int64) error { diff --git a/pkg/common/key_data_pairs_test.go b/pkg/common/key_data_pairs_test.go index 9beb7d0c31..ce9c74c1c0 100644 --- a/pkg/common/key_data_pairs_test.go +++ b/pkg/common/key_data_pairs_test.go @@ -3,8 +3,9 @@ package common import ( "testing" - "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/stretchr/testify/assert" + + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" ) func TestCloneKeyDataPairs(t *testing.T) { diff --git a/pkg/common/key_value_pairs_test.go b/pkg/common/key_value_pairs_test.go index 0a18f2b5ea..761030921e 100644 --- a/pkg/common/key_value_pairs_test.go +++ b/pkg/common/key_value_pairs_test.go @@ -3,8 +3,9 @@ package common import ( "testing" - "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/stretchr/testify/assert" + + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" ) func TestCloneKeyValuePairs(t *testing.T) { diff --git a/pkg/config/config.go b/pkg/config/config.go index de93941e6e..4e81220790 100644 --- a/pkg/config/config.go +++ b/pkg/config/config.go @@ -37,7 +37,6 @@ func Init(opts ...Option) (*Manager, error) { if o.FileInfo != nil { s := NewFileSource(o.FileInfo) sourceManager.AddSource(s) - } if o.EnvKeyFormatter != nil { sourceManager.AddSource(NewEnvSource(o.EnvKeyFormatter)) @@ -50,7 +49,6 @@ func Init(opts ...Option) (*Manager, error) { sourceManager.AddSource(s) } return sourceManager, nil - } func formatKey(key string) string { diff --git a/pkg/config/env_source.go b/pkg/config/env_source.go index 9c2138b5c3..abef8bb821 100644 --- a/pkg/config/env_source.go +++ b/pkg/config/env_source.go @@ -79,11 +79,10 @@ func (es EnvSource) GetSourceName() string { } func (es EnvSource) SetEventHandler(eh EventHandler) { - } + func (es EnvSource) UpdateOptions(opts Options) { } func (es EnvSource) Close() { - } diff --git a/pkg/config/event.go b/pkg/config/event.go index 993ac04e9b..e2f2173b7e 100644 --- a/pkg/config/event.go +++ b/pkg/config/event.go @@ -38,7 +38,6 @@ func newEvent(eventSource, eventType string, key string, value string) *Event { Value: value, HasUpdated: false, } - } func PopulateEvents(source string, currentConfig, updatedConfig map[string]string) ([]*Event, error) { diff --git a/pkg/config/manager.go b/pkg/config/manager.go index 11cf407a40..7a28797bbd 100644 --- a/pkg/config/manager.go +++ b/pkg/config/manager.go @@ -324,7 +324,6 @@ func (m *Manager) updateEvent(e *Event) error { m.keySourceMap[e.Key] = source.GetSourceName() } } - } log.Info("receive update event", zap.Any("event", e)) @@ -387,7 +386,7 @@ func (m *Manager) getHighPrioritySource(srcNameA, srcNameB string) Source { return sourceA } - if sourceA.GetPriority() < sourceB.GetPriority() { //less value has high priority + if sourceA.GetPriority() < sourceB.GetPriority() { // less value has high priority return sourceA } diff --git a/pkg/config/manager_test.go b/pkg/config/manager_test.go index 8f39cd5d34..2635f07979 100644 --- a/pkg/config/manager_test.go +++ b/pkg/config/manager_test.go @@ -38,8 +38,8 @@ func TestAllConfigFromManager(t *testing.T) { func TestConfigChangeEvent(t *testing.T) { dir, _ := os.MkdirTemp("", "milvus") - os.WriteFile(path.Join(dir, "milvus.yaml"), []byte("a.b: 1\nc.d: 2"), 0600) - os.WriteFile(path.Join(dir, "user.yaml"), []byte("a.b: 3"), 0600) + os.WriteFile(path.Join(dir, "milvus.yaml"), []byte("a.b: 1\nc.d: 2"), 0o600) + os.WriteFile(path.Join(dir, "user.yaml"), []byte("a.b: 3"), 0o600) fs := NewFileSource(&FileInfo{[]string{path.Join(dir, "milvus.yaml"), path.Join(dir, "user.yaml")}, 1}) mgr, _ := Init() @@ -48,7 +48,7 @@ func TestConfigChangeEvent(t *testing.T) { res, err := mgr.GetConfig("a.b") assert.NoError(t, err) assert.Equal(t, res, "3") - os.WriteFile(path.Join(dir, "user.yaml"), []byte("a.b: 6"), 0600) + os.WriteFile(path.Join(dir, "user.yaml"), []byte("a.b: 6"), 0o600) time.Sleep(3 * time.Second) res, err = mgr.GetConfig("a.b") assert.NoError(t, err) @@ -69,8 +69,7 @@ func TestAllDupliateSource(t *testing.T) { assert.Error(t, err, "invalid source or source not added") } -type ErrSource struct { -} +type ErrSource struct{} func (e ErrSource) Close() { } @@ -95,7 +94,6 @@ func (ErrSource) GetSourceName() string { } func (e ErrSource) SetEventHandler(eh EventHandler) { - } func (e ErrSource) UpdateOptions(opt Options) { diff --git a/pkg/config/refresher.go b/pkg/config/refresher.go index 353629cf00..4987d986b3 100644 --- a/pkg/config/refresher.go +++ b/pkg/config/refresher.go @@ -77,7 +77,6 @@ func (r *refresher) refreshPeriodically(name string) { return } } - } func (r *refresher) fireEvents(name string, source, target map[string]string) error { diff --git a/pkg/config/source.go b/pkg/config/source.go index 4095971e18..8382915797 100644 --- a/pkg/config/source.go +++ b/pkg/config/source.go @@ -44,7 +44,7 @@ type EtcdInfo struct { CaCertFile string MinVersion string - //Pull Configuration interval, unit is second + // Pull Configuration interval, unit is second RefreshInterval time.Duration } diff --git a/pkg/config/source_test.go b/pkg/config/source_test.go index 0836bc9be8..1b9068faf9 100644 --- a/pkg/config/source_test.go +++ b/pkg/config/source_test.go @@ -39,8 +39,8 @@ func TestLoadFromFileSource(t *testing.T) { t.Run("multiple files", func(t *testing.T) { dir, _ := os.MkdirTemp("", "milvus") - os.WriteFile(path.Join(dir, "milvus.yaml"), []byte("a.b: 1\nc.d: 2"), 0600) - os.WriteFile(path.Join(dir, "user.yaml"), []byte("a.b: 3"), 0600) + os.WriteFile(path.Join(dir, "milvus.yaml"), []byte("a.b: 1\nc.d: 2"), 0o600) + os.WriteFile(path.Join(dir, "user.yaml"), []byte("a.b: 3"), 0o600) fs := NewFileSource(&FileInfo{[]string{path.Join(dir, "milvus.yaml"), path.Join(dir, "user.yaml")}, -1}) fs.loadFromFile() diff --git a/pkg/eventlog/global.go b/pkg/eventlog/global.go index bd363d5096..13549ac234 100644 --- a/pkg/eventlog/global.go +++ b/pkg/eventlog/global.go @@ -17,9 +17,10 @@ package eventlog import ( + "go.uber.org/atomic" + "github.com/milvus-io/milvus/pkg/util/conc" "github.com/milvus-io/milvus/pkg/util/typeutil" - "go.uber.org/atomic" ) var ( diff --git a/pkg/eventlog/grpc.go b/pkg/eventlog/grpc.go index ddb515869b..e3cd991180 100644 --- a/pkg/eventlog/grpc.go +++ b/pkg/eventlog/grpc.go @@ -21,11 +21,12 @@ import ( "sync" "time" + "go.uber.org/atomic" + "google.golang.org/grpc" + "github.com/milvus-io/milvus/pkg/util/conc" "github.com/milvus-io/milvus/pkg/util/funcutil" "github.com/milvus-io/milvus/pkg/util/typeutil" - "go.uber.org/atomic" - "google.golang.org/grpc" ) var ( diff --git a/pkg/eventlog/handler.go b/pkg/eventlog/handler.go index 4ba5ea496d..7461d2c627 100644 --- a/pkg/eventlog/handler.go +++ b/pkg/eventlog/handler.go @@ -20,8 +20,9 @@ import ( "encoding/json" "net/http" - "github.com/milvus-io/milvus/pkg/log" "go.uber.org/zap" + + "github.com/milvus-io/milvus/pkg/log" ) const ( @@ -34,8 +35,7 @@ const ( ) // eventLogHandler is the event log http handler -type eventLogHandler struct { -} +type eventLogHandler struct{} func Handler() http.Handler { return &eventLogHandler{} diff --git a/pkg/eventlog/mock_logger.go b/pkg/eventlog/mock_logger.go index 0dc62c71eb..566126521a 100644 --- a/pkg/eventlog/mock_logger.go +++ b/pkg/eventlog/mock_logger.go @@ -69,7 +69,7 @@ type MockLogger_Record_Call struct { } // Record is a helper method to define mock.On call -// - _a0 Evt +// - _a0 Evt func (_e *MockLogger_Expecter) Record(_a0 interface{}) *MockLogger_Record_Call { return &MockLogger_Record_Call{Call: _e.mock.On("Record", _a0)} } @@ -102,8 +102,8 @@ type MockLogger_RecordFunc_Call struct { } // RecordFunc is a helper method to define mock.On call -// - _a0 Level -// - _a1 func() Evt +// - _a0 Level +// - _a1 func() Evt func (_e *MockLogger_Expecter) RecordFunc(_a0 interface{}, _a1 interface{}) *MockLogger_RecordFunc_Call { return &MockLogger_RecordFunc_Call{Call: _e.mock.On("RecordFunc", _a0, _a1)} } @@ -130,7 +130,8 @@ func (_c *MockLogger_RecordFunc_Call) RunAndReturn(run func(Level, func() Evt)) func NewMockLogger(t interface { mock.TestingT Cleanup(func()) -}) *MockLogger { +}, +) *MockLogger { mock := &MockLogger{} mock.Mock.Test(t) diff --git a/pkg/log/global.go b/pkg/log/global.go index 8fc1df50af..297c906fcf 100644 --- a/pkg/log/global.go +++ b/pkg/log/global.go @@ -22,9 +22,7 @@ import ( type ctxLogKeyType struct{} -var ( - CtxLogKey = ctxLogKeyType{} -) +var CtxLogKey = ctxLogKeyType{} // Debug logs a message at DebugLevel. The message includes any fields passed // at the log site, as well as any fields accumulated on the logger. diff --git a/pkg/log/log.go b/pkg/log/log.go index 25de71bde1..2a9d1947c1 100644 --- a/pkg/log/log.go +++ b/pkg/log/log.go @@ -64,7 +64,6 @@ func init() { r := utils.NewRateLimiter(1.0, 60.0) _globalR.Store(r) - } // InitLogger initializes a zap logger. @@ -218,8 +217,10 @@ func ReplaceGlobals(logger *zap.Logger, props *ZapProperties) { } func replaceLeveledLoggers(debugLogger *zap.Logger) { - levels := []zapcore.Level{zapcore.DebugLevel, zapcore.InfoLevel, zapcore.WarnLevel, zapcore.ErrorLevel, - zapcore.DPanicLevel, zapcore.PanicLevel, zapcore.FatalLevel} + levels := []zapcore.Level{ + zapcore.DebugLevel, zapcore.InfoLevel, zapcore.WarnLevel, zapcore.ErrorLevel, + zapcore.DPanicLevel, zapcore.PanicLevel, zapcore.FatalLevel, + } for _, level := range levels { levelL := debugLogger.WithOptions(zap.IncreaseLevel(level)) _globalLevelLogger.Store(level, levelL) diff --git a/pkg/log/log_test.go b/pkg/log/log_test.go index fa6d64897d..d42673244e 100644 --- a/pkg/log/log_test.go +++ b/pkg/log/log_test.go @@ -253,7 +253,6 @@ func TestLeveledLogger(t *testing.T) { SetLevel(zapcore.FatalLevel + 1) assert.Equal(t, ctxL(), L()) SetLevel(orgLevel) - } func TestStdAndFileLogger(t *testing.T) { diff --git a/pkg/metrics/datacoord_metrics.go b/pkg/metrics/datacoord_metrics.go index ab401dc583..61d33320d3 100644 --- a/pkg/metrics/datacoord_metrics.go +++ b/pkg/metrics/datacoord_metrics.go @@ -19,9 +19,10 @@ package metrics import ( "fmt" + "github.com/prometheus/client_golang/prometheus" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus/pkg/util/typeutil" - "github.com/prometheus/client_golang/prometheus" ) const ( @@ -41,7 +42,7 @@ const ( ) var ( - //DataCoordNumDataNodes records the num of data nodes managed by DataCoord. + // DataCoordNumDataNodes records the num of data nodes managed by DataCoord. DataCoordNumDataNodes = prometheus.NewGaugeVec( prometheus.GaugeOpts{ Namespace: milvusNamespace, @@ -60,7 +61,7 @@ var ( segmentStateLabelName, }) - //DataCoordCollectionNum records the num of collections managed by DataCoord. + // DataCoordCollectionNum records the num of collections managed by DataCoord. DataCoordNumCollections = prometheus.NewGaugeVec( prometheus.GaugeOpts{ Namespace: milvusNamespace, diff --git a/pkg/metrics/proxy_metrics.go b/pkg/metrics/proxy_metrics.go index 32ecab9677..3b45aaf43f 100644 --- a/pkg/metrics/proxy_metrics.go +++ b/pkg/metrics/proxy_metrics.go @@ -334,26 +334,48 @@ func RegisterProxy(registry *prometheus.Registry) { } func CleanupCollectionMetrics(nodeID int64, collection string) { - ProxyCollectionSQLatency.Delete(prometheus.Labels{nodeIDLabelName: strconv.FormatInt(nodeID, 10), - queryTypeLabelName: SearchLabel, collectionName: collection}) - ProxyCollectionSQLatency.Delete(prometheus.Labels{nodeIDLabelName: strconv.FormatInt(nodeID, 10), - queryTypeLabelName: QueryLabel, collectionName: collection}) - ProxyCollectionMutationLatency.Delete(prometheus.Labels{nodeIDLabelName: strconv.FormatInt(nodeID, 10), - msgTypeLabelName: InsertLabel, collectionName: collection}) - ProxyCollectionMutationLatency.Delete(prometheus.Labels{nodeIDLabelName: strconv.FormatInt(nodeID, 10), - msgTypeLabelName: DeleteLabel, collectionName: collection}) - ProxyReceivedNQ.Delete(prometheus.Labels{nodeIDLabelName: strconv.FormatInt(nodeID, 10), - queryTypeLabelName: SearchLabel, collectionName: collection}) - ProxyReceivedNQ.Delete(prometheus.Labels{nodeIDLabelName: strconv.FormatInt(nodeID, 10), - queryTypeLabelName: QueryLabel, collectionName: collection}) - ProxyReceiveBytes.Delete(prometheus.Labels{nodeIDLabelName: strconv.FormatInt(nodeID, 10), - msgTypeLabelName: SearchLabel, collectionName: collection}) - ProxyReceiveBytes.Delete(prometheus.Labels{nodeIDLabelName: strconv.FormatInt(nodeID, 10), - msgTypeLabelName: QueryLabel, collectionName: collection}) - ProxyReceiveBytes.Delete(prometheus.Labels{nodeIDLabelName: strconv.FormatInt(nodeID, 10), - msgTypeLabelName: InsertLabel, collectionName: collection}) - ProxyReceiveBytes.Delete(prometheus.Labels{nodeIDLabelName: strconv.FormatInt(nodeID, 10), - msgTypeLabelName: DeleteLabel, collectionName: collection}) - ProxyReceiveBytes.Delete(prometheus.Labels{nodeIDLabelName: strconv.FormatInt(nodeID, 10), - msgTypeLabelName: UpsertLabel, collectionName: collection}) + ProxyCollectionSQLatency.Delete(prometheus.Labels{ + nodeIDLabelName: strconv.FormatInt(nodeID, 10), + queryTypeLabelName: SearchLabel, collectionName: collection, + }) + ProxyCollectionSQLatency.Delete(prometheus.Labels{ + nodeIDLabelName: strconv.FormatInt(nodeID, 10), + queryTypeLabelName: QueryLabel, collectionName: collection, + }) + ProxyCollectionMutationLatency.Delete(prometheus.Labels{ + nodeIDLabelName: strconv.FormatInt(nodeID, 10), + msgTypeLabelName: InsertLabel, collectionName: collection, + }) + ProxyCollectionMutationLatency.Delete(prometheus.Labels{ + nodeIDLabelName: strconv.FormatInt(nodeID, 10), + msgTypeLabelName: DeleteLabel, collectionName: collection, + }) + ProxyReceivedNQ.Delete(prometheus.Labels{ + nodeIDLabelName: strconv.FormatInt(nodeID, 10), + queryTypeLabelName: SearchLabel, collectionName: collection, + }) + ProxyReceivedNQ.Delete(prometheus.Labels{ + nodeIDLabelName: strconv.FormatInt(nodeID, 10), + queryTypeLabelName: QueryLabel, collectionName: collection, + }) + ProxyReceiveBytes.Delete(prometheus.Labels{ + nodeIDLabelName: strconv.FormatInt(nodeID, 10), + msgTypeLabelName: SearchLabel, collectionName: collection, + }) + ProxyReceiveBytes.Delete(prometheus.Labels{ + nodeIDLabelName: strconv.FormatInt(nodeID, 10), + msgTypeLabelName: QueryLabel, collectionName: collection, + }) + ProxyReceiveBytes.Delete(prometheus.Labels{ + nodeIDLabelName: strconv.FormatInt(nodeID, 10), + msgTypeLabelName: InsertLabel, collectionName: collection, + }) + ProxyReceiveBytes.Delete(prometheus.Labels{ + nodeIDLabelName: strconv.FormatInt(nodeID, 10), + msgTypeLabelName: DeleteLabel, collectionName: collection, + }) + ProxyReceiveBytes.Delete(prometheus.Labels{ + nodeIDLabelName: strconv.FormatInt(nodeID, 10), + msgTypeLabelName: UpsertLabel, collectionName: collection, + }) } diff --git a/pkg/metrics/rootcoord_metrics.go b/pkg/metrics/rootcoord_metrics.go index 1ce0e94a68..c73238f470 100644 --- a/pkg/metrics/rootcoord_metrics.go +++ b/pkg/metrics/rootcoord_metrics.go @@ -36,7 +36,7 @@ var ( Help: "count of DDL operations", }, []string{functionLabelName, statusLabelName}) - //RootCoordDDLReqLatency records the latency for read type of DDL operations. + // RootCoordDDLReqLatency records the latency for read type of DDL operations. RootCoordDDLReqLatency = prometheus.NewHistogramVec( prometheus.HistogramOpts{ Namespace: milvusNamespace, @@ -65,7 +65,7 @@ var ( Help: "count of ID allocated", }) - //RootCoordTimestamp records the number of timestamp allocations in RootCoord. + // RootCoordTimestamp records the number of timestamp allocations in RootCoord. RootCoordTimestamp = prometheus.NewGauge( prometheus.GaugeOpts{ Namespace: milvusNamespace, diff --git a/pkg/mq/msgdispatcher/client.go b/pkg/mq/msgdispatcher/client.go index 9247adfa1f..762bb1fe41 100644 --- a/pkg/mq/msgdispatcher/client.go +++ b/pkg/mq/msgdispatcher/client.go @@ -20,9 +20,9 @@ import ( "context" "sync" - "github.com/milvus-io/milvus-proto/go-api/v2/msgpb" "go.uber.org/zap" + "github.com/milvus-io/milvus-proto/go-api/v2/msgpb" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/mq/msgstream" "github.com/milvus-io/milvus/pkg/mq/msgstream/mqwrapper" diff --git a/pkg/mq/msgdispatcher/dispatcher.go b/pkg/mq/msgdispatcher/dispatcher.go index 464b7cb433..ee552046dd 100644 --- a/pkg/mq/msgdispatcher/dispatcher.go +++ b/pkg/mq/msgdispatcher/dispatcher.go @@ -22,10 +22,10 @@ import ( "sync" "time" - "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "go.uber.org/atomic" "go.uber.org/zap" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/metrics" "github.com/milvus-io/milvus/pkg/mq/msgstream" diff --git a/pkg/mq/msgdispatcher/manager.go b/pkg/mq/msgdispatcher/manager.go index d799536df8..ecd3d079ae 100644 --- a/pkg/mq/msgdispatcher/manager.go +++ b/pkg/mq/msgdispatcher/manager.go @@ -22,7 +22,6 @@ import ( "sync" "time" - "github.com/milvus-io/milvus/pkg/util/tsoutil" "github.com/prometheus/client_golang/prometheus" "go.uber.org/zap" @@ -31,12 +30,11 @@ import ( "github.com/milvus-io/milvus/pkg/mq/msgstream" "github.com/milvus-io/milvus/pkg/mq/msgstream/mqwrapper" "github.com/milvus-io/milvus/pkg/util/retry" + "github.com/milvus-io/milvus/pkg/util/tsoutil" "github.com/milvus-io/milvus/pkg/util/typeutil" ) -var ( - CheckPeriod = 1 * time.Second // TODO: dyh, move to config -) +var CheckPeriod = 1 * time.Second // TODO: dyh, move to config type DispatcherManager interface { Add(ctx context.Context, vchannel string, pos *Pos, subPos SubPos) (<-chan *MsgPack, error) diff --git a/pkg/mq/msgdispatcher/manager_test.go b/pkg/mq/msgdispatcher/manager_test.go index 96f5ea8f31..621767dd6e 100644 --- a/pkg/mq/msgdispatcher/manager_test.go +++ b/pkg/mq/msgdispatcher/manager_test.go @@ -25,10 +25,10 @@ import ( "testing" "time" - "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/suite" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus/pkg/mq/msgstream" "github.com/milvus-io/milvus/pkg/mq/msgstream/mqwrapper" "github.com/milvus-io/milvus/pkg/util/typeutil" @@ -256,7 +256,7 @@ func (suite *SimulationSuite) consumeMsg(ctx context.Context, wg *sync.WaitGroup } func (suite *SimulationSuite) produceTimeTickOnly(ctx context.Context) { - var tt = 1 + tt := 1 ticker := time.NewTicker(10 * time.Millisecond) defer ticker.Stop() for { @@ -369,7 +369,6 @@ func (suite *SimulationSuite) TearDownTest() { } func (suite *SimulationSuite) TearDownSuite() { - } func TestSimulation(t *testing.T) { diff --git a/pkg/mq/msgdispatcher/mock_client.go b/pkg/mq/msgdispatcher/mock_client.go index 265d6d5f3b..4b99b5e8f4 100644 --- a/pkg/mq/msgdispatcher/mock_client.go +++ b/pkg/mq/msgdispatcher/mock_client.go @@ -69,7 +69,7 @@ type MockClient_Deregister_Call struct { } // Deregister is a helper method to define mock.On call -// - vchannel string +// - vchannel string func (_e *MockClient_Expecter) Deregister(vchannel interface{}) *MockClient_Deregister_Call { return &MockClient_Deregister_Call{Call: _e.mock.On("Deregister", vchannel)} } @@ -123,10 +123,10 @@ type MockClient_Register_Call struct { } // Register is a helper method to define mock.On call -// - ctx context.Context -// - vchannel string -// - pos *msgpb.MsgPosition -// - subPos mqwrapper.SubscriptionInitialPosition +// - ctx context.Context +// - vchannel string +// - pos *msgpb.MsgPosition +// - subPos mqwrapper.SubscriptionInitialPosition func (_e *MockClient_Expecter) Register(ctx interface{}, vchannel interface{}, pos interface{}, subPos interface{}) *MockClient_Register_Call { return &MockClient_Register_Call{Call: _e.mock.On("Register", ctx, vchannel, pos, subPos)} } @@ -153,7 +153,8 @@ func (_c *MockClient_Register_Call) RunAndReturn(run func(context.Context, strin func NewMockClient(t interface { mock.TestingT Cleanup(func()) -}) *MockClient { +}, +) *MockClient { mock := &MockClient{} mock.Mock.Test(t) diff --git a/pkg/mq/msgdispatcher/mock_test.go b/pkg/mq/msgdispatcher/mock_test.go index 6a64c34d7f..b1685cf0c3 100644 --- a/pkg/mq/msgdispatcher/mock_test.go +++ b/pkg/mq/msgdispatcher/mock_test.go @@ -199,7 +199,6 @@ func defaultInsertRepackFunc( tsMsgs []msgstream.TsMsg, hashKeys [][]int32, ) (map[int32]*msgstream.MsgPack, error) { - if len(hashKeys) < len(tsMsgs) { return nil, fmt.Errorf( "the length of hash keys (%d) is less than the length of messages (%d)", diff --git a/pkg/mq/msgstream/common_mq_factory.go b/pkg/mq/msgstream/common_mq_factory.go index c5e29e47ca..1e301f3943 100644 --- a/pkg/mq/msgstream/common_mq_factory.go +++ b/pkg/mq/msgstream/common_mq_factory.go @@ -4,6 +4,7 @@ import ( "context" "github.com/cockroachdb/errors" + "github.com/milvus-io/milvus/pkg/mq/msgstream/mqwrapper" ) diff --git a/pkg/mq/msgstream/factory_stream_test.go b/pkg/mq/msgstream/factory_stream_test.go index 97ee0154fb..cb7ff8702c 100644 --- a/pkg/mq/msgstream/factory_stream_test.go +++ b/pkg/mq/msgstream/factory_stream_test.go @@ -8,12 +8,13 @@ import ( "runtime" "testing" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/msgpb" "github.com/milvus-io/milvus/pkg/mq/msgstream/mqwrapper" "github.com/milvus-io/milvus/pkg/util/funcutil" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" ) type streamNewer func(ctx context.Context) (MsgStream, error) diff --git a/pkg/mq/msgstream/mock_msgstream.go b/pkg/mq/msgstream/mock_msgstream.go index f0bdca37fc..44adffd7da 100644 --- a/pkg/mq/msgstream/mock_msgstream.go +++ b/pkg/mq/msgstream/mock_msgstream.go @@ -44,10 +44,10 @@ type MockMsgStream_AsConsumer_Call struct { } // AsConsumer is a helper method to define mock.On call -// - ctx context.Context -// - channels []string -// - subName string -// - position mqwrapper.SubscriptionInitialPosition +// - ctx context.Context +// - channels []string +// - subName string +// - position mqwrapper.SubscriptionInitialPosition func (_e *MockMsgStream_Expecter) AsConsumer(ctx interface{}, channels interface{}, subName interface{}, position interface{}) *MockMsgStream_AsConsumer_Call { return &MockMsgStream_AsConsumer_Call{Call: _e.mock.On("AsConsumer", ctx, channels, subName, position)} } @@ -80,7 +80,7 @@ type MockMsgStream_AsProducer_Call struct { } // AsProducer is a helper method to define mock.On call -// - channels []string +// - channels []string func (_e *MockMsgStream_Expecter) AsProducer(channels interface{}) *MockMsgStream_AsProducer_Call { return &MockMsgStream_AsProducer_Call{Call: _e.mock.On("AsProducer", channels)} } @@ -134,7 +134,7 @@ type MockMsgStream_Broadcast_Call struct { } // Broadcast is a helper method to define mock.On call -// - _a0 *MsgPack +// - _a0 *MsgPack func (_e *MockMsgStream_Expecter) Broadcast(_a0 interface{}) *MockMsgStream_Broadcast_Call { return &MockMsgStream_Broadcast_Call{Call: _e.mock.On("Broadcast", _a0)} } @@ -219,7 +219,7 @@ type MockMsgStream_CheckTopicValid_Call struct { } // CheckTopicValid is a helper method to define mock.On call -// - channel string +// - channel string func (_e *MockMsgStream_Expecter) CheckTopicValid(channel interface{}) *MockMsgStream_CheckTopicValid_Call { return &MockMsgStream_CheckTopicValid_Call{Call: _e.mock.On("CheckTopicValid", channel)} } @@ -305,7 +305,7 @@ type MockMsgStream_GetLatestMsgID_Call struct { } // GetLatestMsgID is a helper method to define mock.On call -// - channel string +// - channel string func (_e *MockMsgStream_Expecter) GetLatestMsgID(channel interface{}) *MockMsgStream_GetLatestMsgID_Call { return &MockMsgStream_GetLatestMsgID_Call{Call: _e.mock.On("GetLatestMsgID", channel)} } @@ -390,7 +390,7 @@ type MockMsgStream_Produce_Call struct { } // Produce is a helper method to define mock.On call -// - _a0 *MsgPack +// - _a0 *MsgPack func (_e *MockMsgStream_Expecter) Produce(_a0 interface{}) *MockMsgStream_Produce_Call { return &MockMsgStream_Produce_Call{Call: _e.mock.On("Produce", _a0)} } @@ -432,8 +432,8 @@ type MockMsgStream_Seek_Call struct { } // Seek is a helper method to define mock.On call -// - ctx context.Context -// - offset []*msgpb.MsgPosition +// - ctx context.Context +// - offset []*msgpb.MsgPosition func (_e *MockMsgStream_Expecter) Seek(ctx interface{}, offset interface{}) *MockMsgStream_Seek_Call { return &MockMsgStream_Seek_Call{Call: _e.mock.On("Seek", ctx, offset)} } @@ -466,7 +466,7 @@ type MockMsgStream_SetRepackFunc_Call struct { } // SetRepackFunc is a helper method to define mock.On call -// - repackFunc RepackFunc +// - repackFunc RepackFunc func (_e *MockMsgStream_Expecter) SetRepackFunc(repackFunc interface{}) *MockMsgStream_SetRepackFunc_Call { return &MockMsgStream_SetRepackFunc_Call{Call: _e.mock.On("SetRepackFunc", repackFunc)} } @@ -493,7 +493,8 @@ func (_c *MockMsgStream_SetRepackFunc_Call) RunAndReturn(run func(RepackFunc)) * func NewMockMsgStream(t interface { mock.TestingT Cleanup(func()) -}) *MockMsgStream { +}, +) *MockMsgStream { mock := &MockMsgStream{} mock.Mock.Test(t) diff --git a/pkg/mq/msgstream/mq_factory_test.go b/pkg/mq/msgstream/mq_factory_test.go index 2b4a8d50cb..1f1d161335 100644 --- a/pkg/mq/msgstream/mq_factory_test.go +++ b/pkg/mq/msgstream/mq_factory_test.go @@ -100,7 +100,6 @@ func TestPmsFactoryWithAuth(t *testing.T) { _, err = pmsFactory.NewTtMsgStream(ctx) assert.Error(t, err) - } func TestKafkaFactory(t *testing.T) { diff --git a/pkg/mq/msgstream/mq_kafka_msgstream_test.go b/pkg/mq/msgstream/mq_kafka_msgstream_test.go index 7be61313b0..468d4e054a 100644 --- a/pkg/mq/msgstream/mq_kafka_msgstream_test.go +++ b/pkg/mq/msgstream/mq_kafka_msgstream_test.go @@ -23,10 +23,10 @@ import ( "testing" "github.com/confluentinc/confluent-kafka-go/kafka" - "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" - "github.com/milvus-io/milvus-proto/go-api/v2/msgpb" "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" + "github.com/milvus-io/milvus-proto/go-api/v2/msgpb" "github.com/milvus-io/milvus/pkg/mq/msgstream/mqwrapper" kafkawrapper "github.com/milvus-io/milvus/pkg/mq/msgstream/mqwrapper/kafka" "github.com/milvus-io/milvus/pkg/util/funcutil" @@ -392,7 +392,6 @@ func TestStream_KafkaTtMsgStream_2(t *testing.T) { cnt1 := (len(msgPacks1)/2 - 1) * len(msgPacks1[0].Msgs) cnt2 := (len(msgPacks2)/2 - 1) * len(msgPacks2[0].Msgs) assert.Equal(t, (cnt1 + cnt2), msgCount) - } func TestStream_KafkaTtMsgStream_DataNodeTimetickMsgstream(t *testing.T) { diff --git a/pkg/mq/msgstream/mq_msgstream.go b/pkg/mq/msgstream/mq_msgstream.go index e7f3bcb6d2..13e03b905e 100644 --- a/pkg/mq/msgstream/mq_msgstream.go +++ b/pkg/mq/msgstream/mq_msgstream.go @@ -26,11 +26,11 @@ import ( "github.com/cockroachdb/errors" "github.com/golang/protobuf/proto" - "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" - "github.com/milvus-io/milvus-proto/go-api/v2/msgpb" "github.com/samber/lo" "go.uber.org/zap" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" + "github.com/milvus-io/milvus-proto/go-api/v2/msgpb" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/mq/msgstream/mqwrapper" "github.com/milvus-io/milvus/pkg/util/merr" @@ -67,8 +67,8 @@ func NewMqMsgStream(ctx context.Context, receiveBufSize int64, bufSize int64, client mqwrapper.Client, - unmarshal UnmarshalDispatcher) (*mqMsgStream, error) { - + unmarshal UnmarshalDispatcher, +) (*mqMsgStream, error) { streamCtx, streamCancel := context.WithCancel(ctx) producers := make(map[string]mqwrapper.Producer) consumers := make(map[string]mqwrapper.Consumer) @@ -214,7 +214,6 @@ func (ms *mqMsgStream) Close() { ms.client.Close() close(ms.receiveBuf) - } func (ms *mqMsgStream) ComputeProduceChannelIndexes(tsMsgs []TsMsg) [][]int32 { @@ -474,7 +473,8 @@ func NewMqTtMsgStream(ctx context.Context, receiveBufSize int64, bufSize int64, client mqwrapper.Client, - unmarshal UnmarshalDispatcher) (*MqTtMsgStream, error) { + unmarshal UnmarshalDispatcher, +) (*MqTtMsgStream, error) { msgStream, err := NewMqMsgStream(ctx, receiveBufSize, bufSize, client, unmarshal) if err != nil { return nil, err diff --git a/pkg/mq/msgstream/mq_msgstream_test.go b/pkg/mq/msgstream/mq_msgstream_test.go index afa2570227..b6396eb452 100644 --- a/pkg/mq/msgstream/mq_msgstream_test.go +++ b/pkg/mq/msgstream/mq_msgstream_test.go @@ -29,12 +29,12 @@ import ( "github.com/apache/pulsar-client-go/pulsar" "github.com/cockroachdb/errors" "github.com/confluentinc/confluent-kafka-go/kafka" - "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" - "github.com/milvus-io/milvus-proto/go-api/v2/msgpb" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "go.uber.org/atomic" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" + "github.com/milvus-io/milvus-proto/go-api/v2/msgpb" "github.com/milvus-io/milvus/pkg/mq/msgstream/mqwrapper" pulsarwrapper "github.com/milvus-io/milvus/pkg/mq/msgstream/mqwrapper/pulsar" "github.com/milvus-io/milvus/pkg/util/funcutil" diff --git a/pkg/mq/msgstream/mqwrapper/kafka/kafka_client.go b/pkg/mq/msgstream/mqwrapper/kafka/kafka_client.go index ce31ecfbf4..d8e45bdcbb 100644 --- a/pkg/mq/msgstream/mqwrapper/kafka/kafka_client.go +++ b/pkg/mq/msgstream/mqwrapper/kafka/kafka_client.go @@ -46,7 +46,6 @@ func getBasicConfig(address string) kafka.ConfigMap { func NewKafkaClientInstance(address string) *kafkaClient { config := getBasicConfig(address) return NewKafkaClientInstanceWithConfigMap(config, kafka.ConfigMap{}, kafka.ConfigMap{}) - } func NewKafkaClientInstanceWithConfigMap(config kafka.ConfigMap, extraConsumerConfig kafka.ConfigMap, extraProducerConfig kafka.ConfigMap) *kafkaClient { @@ -93,7 +92,6 @@ func NewKafkaClientInstanceWithConfig(ctx context.Context, config *paramtable.Ka kafkaConfig, specExtraConfig(config.ConsumerExtraConfig.GetValue()), specExtraConfig(config.ProducerExtraConfig.GetValue())), nil - } func cloneKafkaConfig(config kafka.ConfigMap) *kafka.ConfigMap { @@ -152,7 +150,7 @@ func (kc *kafkaClient) newProducerConfig() *kafka.ConfigMap { // we want to ensure tt send out as soon as possible newConf.SetKey("linger.ms", 2) - //special producer config + // special producer config kc.specialExtraConfig(newConf, kc.producerConfig) return newConf @@ -163,9 +161,9 @@ func (kc *kafkaClient) newConsumerConfig(group string, offset mqwrapper.Subscrip newConf.SetKey("group.id", group) newConf.SetKey("enable.auto.commit", false) - //Kafka default will not create topics if consumer's the topics don't exist. - //In order to compatible with other MQ, we need to enable the following configuration, - //meanwhile, some implementation also try to consume a non-exist topic, such as dataCoordTimeTick. + // Kafka default will not create topics if consumer's the topics don't exist. + // In order to compatible with other MQ, we need to enable the following configuration, + // meanwhile, some implementation also try to consume a non-exist topic, such as dataCoordTimeTick. newConf.SetKey("allow.auto.create.topics", true) kc.specialExtraConfig(newConf, kc.consumerConfig) diff --git a/pkg/mq/msgstream/mqwrapper/kafka/kafka_client_test.go b/pkg/mq/msgstream/mqwrapper/kafka/kafka_client_test.go index 2d75ae98f4..8b392e3606 100644 --- a/pkg/mq/msgstream/mqwrapper/kafka/kafka_client_test.go +++ b/pkg/mq/msgstream/mqwrapper/kafka/kafka_client_test.go @@ -407,7 +407,8 @@ func createConsumer(t *testing.T, kc *kafkaClient, topic string, groupID string, - initPosition mqwrapper.SubscriptionInitialPosition) mqwrapper.Consumer { + initPosition mqwrapper.SubscriptionInitialPosition, +) mqwrapper.Consumer { consumer, err := kc.Subscribe(mqwrapper.ConsumerOptions{ Topic: topic, SubscriptionName: groupID, diff --git a/pkg/mq/msgstream/mqwrapper/kafka/kafka_consumer.go b/pkg/mq/msgstream/mqwrapper/kafka/kafka_consumer.go index 18ed40f96b..ca19779230 100644 --- a/pkg/mq/msgstream/mqwrapper/kafka/kafka_consumer.go +++ b/pkg/mq/msgstream/mqwrapper/kafka/kafka_consumer.go @@ -197,7 +197,8 @@ func (kc *Consumer) internalSeek(offset kafka.Offset, inclusive bool) error { if err := kc.c.Seek(kafka.TopicPartition{ Topic: &kc.topic, Partition: mqwrapper.DefaultPartitionIdx, - Offset: offset}, timeout); err != nil { + Offset: offset, + }, timeout); err != nil { return err } cost = time.Since(start).Milliseconds() diff --git a/pkg/mq/msgstream/mqwrapper/kafka/kafka_consumer_test.go b/pkg/mq/msgstream/mqwrapper/kafka/kafka_consumer_test.go index 5078c541e3..4fe5bc4bd9 100644 --- a/pkg/mq/msgstream/mqwrapper/kafka/kafka_consumer_test.go +++ b/pkg/mq/msgstream/mqwrapper/kafka/kafka_consumer_test.go @@ -113,8 +113,7 @@ func TestKafkaConsumer_ChanWithNoAssign(t *testing.T) { }) } -type mockMsgID struct { -} +type mockMsgID struct{} func (m2 mockMsgID) AtEarliestPosition() bool { return false diff --git a/pkg/mq/msgstream/mqwrapper/kafka/kafka_producer.go b/pkg/mq/msgstream/mqwrapper/kafka/kafka_producer.go index e9e1a2dd01..f2f0ec4e43 100644 --- a/pkg/mq/msgstream/mqwrapper/kafka/kafka_producer.go +++ b/pkg/mq/msgstream/mqwrapper/kafka/kafka_producer.go @@ -7,12 +7,13 @@ import ( "time" "github.com/confluentinc/confluent-kafka-go/kafka" + "go.uber.org/zap" + "github.com/milvus-io/milvus/pkg/common" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/metrics" "github.com/milvus-io/milvus/pkg/mq/msgstream/mqwrapper" "github.com/milvus-io/milvus/pkg/util/timerecord" - "go.uber.org/zap" ) type kafkaProducer struct { @@ -47,7 +48,6 @@ func (kp *kafkaProducer) Send(ctx context.Context, message *mqwrapper.ProducerMe Value: message.Payload, Headers: headers, }, kp.deliveryChan) - if err != nil { metrics.MsgStreamOpCounter.WithLabelValues(metrics.SendMsgLabel, metrics.FailLabel).Inc() return nil, err @@ -78,7 +78,7 @@ func (kp *kafkaProducer) Close() { kp.isClosed = true start := time.Now() - //flush in-flight msg within queue. + // flush in-flight msg within queue. i := kp.p.Flush(10000) if i > 0 { log.Warn("There are still un-flushed outstanding events", zap.Int("event_num", i), zap.Any("topic", kp.topic)) diff --git a/pkg/mq/msgstream/mqwrapper/kafka/kafka_producer_test.go b/pkg/mq/msgstream/mqwrapper/kafka/kafka_producer_test.go index 9c80a19d1f..3ddbde0269 100644 --- a/pkg/mq/msgstream/mqwrapper/kafka/kafka_producer_test.go +++ b/pkg/mq/msgstream/mqwrapper/kafka/kafka_producer_test.go @@ -44,7 +44,6 @@ func TestKafkaProducer_SendSuccess(t *testing.T) { func TestKafkaProducer_SendFail(t *testing.T) { kafkaAddress := getKafkaBrokerList() { - deliveryChan := make(chan kafka.Event, 1) rand.Seed(time.Now().UnixNano()) topic := fmt.Sprintf("test-topic-%d", rand.Int()) diff --git a/pkg/mq/msgstream/mqwrapper/nmq/nmq_id_test.go b/pkg/mq/msgstream/mqwrapper/nmq/nmq_id_test.go index 4044031d04..5593499f00 100644 --- a/pkg/mq/msgstream/mqwrapper/nmq/nmq_id_test.go +++ b/pkg/mq/msgstream/mqwrapper/nmq/nmq_id_test.go @@ -79,7 +79,6 @@ func Test_Equal(t *testing.T) { ret, err := rid1.Equal(rid1.Serialize()) assert.NoError(t, err) assert.True(t, ret) - } { diff --git a/pkg/mq/msgstream/mqwrapper/nmq/nmq_message.go b/pkg/mq/msgstream/mqwrapper/nmq/nmq_message.go index cdcb2e281a..833245aa85 100644 --- a/pkg/mq/msgstream/mqwrapper/nmq/nmq_message.go +++ b/pkg/mq/msgstream/mqwrapper/nmq/nmq_message.go @@ -19,9 +19,10 @@ package nmq import ( "log" - "github.com/milvus-io/milvus/pkg/mq/msgstream/mqwrapper" "github.com/nats-io/nats.go" "go.uber.org/zap" + + "github.com/milvus-io/milvus/pkg/mq/msgstream/mqwrapper" ) // Check nmqMessage implements ConsumerMessage diff --git a/pkg/mq/msgstream/mqwrapper/nmq/nmq_producer.go b/pkg/mq/msgstream/mqwrapper/nmq/nmq_producer.go index e6335f8c65..26c627e5aa 100644 --- a/pkg/mq/msgstream/mqwrapper/nmq/nmq_producer.go +++ b/pkg/mq/msgstream/mqwrapper/nmq/nmq_producer.go @@ -19,12 +19,13 @@ package nmq import ( "context" + "github.com/nats-io/nats.go" + "go.uber.org/zap" + "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/metrics" "github.com/milvus-io/milvus/pkg/mq/msgstream/mqwrapper" "github.com/milvus-io/milvus/pkg/util/timerecord" - "github.com/nats-io/nats.go" - "go.uber.org/zap" ) var _ mqwrapper.Producer = (*nmqProducer)(nil) diff --git a/pkg/mq/msgstream/mqwrapper/nmq/nmq_server_test.go b/pkg/mq/msgstream/mqwrapper/nmq/nmq_server_test.go index 53a5b1509d..c65e169919 100644 --- a/pkg/mq/msgstream/mqwrapper/nmq/nmq_server_test.go +++ b/pkg/mq/msgstream/mqwrapper/nmq/nmq_server_test.go @@ -22,9 +22,10 @@ import ( "testing" "time" - "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/nats-io/nats-server/v2/server" "github.com/stretchr/testify/assert" + + "github.com/milvus-io/milvus/pkg/util/paramtable" ) var natsServerAddress string diff --git a/pkg/mq/msgstream/mqwrapper/producer.go b/pkg/mq/msgstream/mqwrapper/producer.go index 270e1dec51..caf43688d9 100644 --- a/pkg/mq/msgstream/mqwrapper/producer.go +++ b/pkg/mq/msgstream/mqwrapper/producer.go @@ -40,7 +40,7 @@ type ProducerMessage struct { // Producer is the interface that provides operations of producer type Producer interface { // return the topic which producer is publishing to - //Topic() string + // Topic() string // publish a message Send(ctx context.Context, message *ProducerMessage) (MessageID, error) diff --git a/pkg/mq/msgstream/mqwrapper/pulsar/pulsar_client_test.go b/pkg/mq/msgstream/mqwrapper/pulsar/pulsar_client_test.go index 68d5f48764..751532a750 100644 --- a/pkg/mq/msgstream/mqwrapper/pulsar/pulsar_client_test.go +++ b/pkg/mq/msgstream/mqwrapper/pulsar/pulsar_client_test.go @@ -135,7 +135,7 @@ func Consume1(ctx context.Context, t *testing.T, pc *pulsarClient, topic string, consumer.Ack(msg) VerifyMessage(t, msg) (*total)++ - //log.Debug("total", zap.Int("val", *total)) + // log.Debug("total", zap.Int("val", *total)) } } c <- msg.ID() @@ -174,7 +174,7 @@ func Consume2(ctx context.Context, t *testing.T, pc *pulsarClient, topic string, consumer.Ack(msg) VerifyMessage(t, msg) (*total)++ - //log.Debug("total", zap.Int("val", *total)) + // log.Debug("total", zap.Int("val", *total)) } } } @@ -201,7 +201,7 @@ func Consume3(ctx context.Context, t *testing.T, pc *pulsarClient, topic string, consumer.Ack(msg) VerifyMessage(t, msg) (*total)++ - //log.Debug("total", zap.Int("val", *total)) + // log.Debug("total", zap.Int("val", *total)) } } } @@ -284,7 +284,7 @@ func Consume21(ctx context.Context, t *testing.T, pc *pulsarClient, topic string v := BytesToInt(msg.Payload()) log.Info("RECV", zap.Any("v", v)) (*total)++ - //log.Debug("total", zap.Int("val", *total)) + // log.Debug("total", zap.Int("val", *total)) } } c <- &pulsarID{messageID: msg.ID()} @@ -324,7 +324,7 @@ func Consume22(ctx context.Context, t *testing.T, pc *pulsarClient, topic string v := BytesToInt(msg.Payload()) log.Info("RECV", zap.Any("v", v)) (*total)++ - //log.Debug("total", zap.Int("val", *total)) + // log.Debug("total", zap.Int("val", *total)) } } } @@ -352,7 +352,7 @@ func Consume23(ctx context.Context, t *testing.T, pc *pulsarClient, topic string v := BytesToInt(msg.Payload()) log.Info("RECV", zap.Any("v", v)) (*total)++ - //log.Debug("total", zap.Int("val", *total)) + // log.Debug("total", zap.Int("val", *total)) } } } diff --git a/pkg/mq/msgstream/msg.go b/pkg/mq/msgstream/msg.go index f05f868407..2db757a0f6 100644 --- a/pkg/mq/msgstream/msg.go +++ b/pkg/mq/msgstream/msg.go @@ -23,10 +23,10 @@ import ( "github.com/cockroachdb/errors" "github.com/golang/protobuf/proto" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/msgpb" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" - "github.com/milvus-io/milvus/pkg/util/commonpbutil" "github.com/milvus-io/milvus/pkg/util/funcutil" "github.com/milvus-io/milvus/pkg/util/typeutil" diff --git a/pkg/mq/msgstream/msg_test.go b/pkg/mq/msgstream/msg_test.go index 4acad24596..20e7b4c81c 100644 --- a/pkg/mq/msgstream/msg_test.go +++ b/pkg/mq/msgstream/msg_test.go @@ -20,10 +20,11 @@ import ( "context" "testing" + "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/msgpb" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" - "github.com/stretchr/testify/assert" ) func TestBaseMsg(t *testing.T) { diff --git a/pkg/mq/msgstream/msgstream.go b/pkg/mq/msgstream/msgstream.go index d95bd84412..ebe0123b35 100644 --- a/pkg/mq/msgstream/msgstream.go +++ b/pkg/mq/msgstream/msgstream.go @@ -20,7 +20,6 @@ import ( "context" "github.com/milvus-io/milvus-proto/go-api/v2/msgpb" - "github.com/milvus-io/milvus/pkg/mq/msgstream/mqwrapper" "github.com/milvus-io/milvus/pkg/util/typeutil" ) diff --git a/pkg/mq/msgstream/repack_func.go b/pkg/mq/msgstream/repack_func.go index 54c86cc839..bbc38b64fe 100644 --- a/pkg/mq/msgstream/repack_func.go +++ b/pkg/mq/msgstream/repack_func.go @@ -20,6 +20,7 @@ import ( "fmt" "github.com/cockroachdb/errors" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" ) diff --git a/pkg/mq/msgstream/stream_bench_test.go b/pkg/mq/msgstream/stream_bench_test.go index 7c02ad8cee..823fbf637d 100644 --- a/pkg/mq/msgstream/stream_bench_test.go +++ b/pkg/mq/msgstream/stream_bench_test.go @@ -8,11 +8,12 @@ import ( "sync" "testing" + "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus/pkg/mq/msgstream/mqwrapper" "github.com/milvus-io/milvus/pkg/mq/msgstream/mqwrapper/nmq" "github.com/milvus-io/milvus/pkg/util/funcutil" "github.com/milvus-io/milvus/pkg/util/paramtable" - "github.com/stretchr/testify/assert" ) func BenchmarkProduceAndConsumeNatsMQ(b *testing.B) { diff --git a/pkg/mq/msgstream/stream_test.go b/pkg/mq/msgstream/stream_test.go index d297ea4365..fea2746fbe 100644 --- a/pkg/mq/msgstream/stream_test.go +++ b/pkg/mq/msgstream/stream_test.go @@ -9,11 +9,12 @@ import ( "testing" "time" + "github.com/stretchr/testify/assert" + "go.uber.org/zap" + "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/mq/msgstream/mqwrapper" "github.com/milvus-io/milvus/pkg/util/funcutil" - "github.com/stretchr/testify/assert" - "go.uber.org/zap" ) func testStreamOperation(t *testing.T, mqClient mqwrapper.Client) { diff --git a/pkg/mq/msgstream/trace.go b/pkg/mq/msgstream/trace.go index 55719ae50e..db1d027615 100644 --- a/pkg/mq/msgstream/trace.go +++ b/pkg/mq/msgstream/trace.go @@ -19,11 +19,12 @@ package msgstream import ( "context" - "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "go.opentelemetry.io/otel" "go.opentelemetry.io/otel/attribute" "go.opentelemetry.io/otel/propagation" "go.opentelemetry.io/otel/trace" + + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" ) // ExtractCtx extracts trace span from msg.properties. diff --git a/pkg/mq/msgstream/unmarshal.go b/pkg/mq/msgstream/unmarshal.go index 89e66e1f79..f06349aebd 100644 --- a/pkg/mq/msgstream/unmarshal.go +++ b/pkg/mq/msgstream/unmarshal.go @@ -18,6 +18,7 @@ package msgstream import ( "github.com/cockroachdb/errors" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" ) diff --git a/pkg/mq/msgstream/unmarshal_test.go b/pkg/mq/msgstream/unmarshal_test.go index b2413e743e..962102bafe 100644 --- a/pkg/mq/msgstream/unmarshal_test.go +++ b/pkg/mq/msgstream/unmarshal_test.go @@ -20,9 +20,10 @@ import ( "testing" "github.com/golang/protobuf/proto" + "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/msgpb" - "github.com/stretchr/testify/assert" ) func Test_ProtoUnmarshalDispatcher(t *testing.T) { diff --git a/pkg/tracer/interceptor_suite.go b/pkg/tracer/interceptor_suite.go index 2d94bfc582..15ac8b0fe4 100644 --- a/pkg/tracer/interceptor_suite.go +++ b/pkg/tracer/interceptor_suite.go @@ -21,21 +21,19 @@ import ( "go.opentelemetry.io/otel" ) -var ( - filterFunc = func(info *otelgrpc.InterceptorInfo) bool { - var fullMethod string - if info.UnaryServerInfo != nil { - fullMethod = info.UnaryServerInfo.FullMethod - } else if info.StreamServerInfo != nil { - fullMethod = info.StreamServerInfo.FullMethod - } - if fullMethod == `/milvus.proto.rootcoord.RootCoord/UpdateChannelTimeTick` || - fullMethod == `/milvus.proto.rootcoord.RootCoord/AllocTimestamp` { - return false - } - return true +var filterFunc = func(info *otelgrpc.InterceptorInfo) bool { + var fullMethod string + if info.UnaryServerInfo != nil { + fullMethod = info.UnaryServerInfo.FullMethod + } else if info.StreamServerInfo != nil { + fullMethod = info.StreamServerInfo.FullMethod } -) + if fullMethod == `/milvus.proto.rootcoord.RootCoord/UpdateChannelTimeTick` || + fullMethod == `/milvus.proto.rootcoord.RootCoord/AllocTimestamp` { + return false + } + return true +} // GetInterceptorOpts returns the Option of gRPC open-tracing func GetInterceptorOpts() []otelgrpc.Option { diff --git a/pkg/util/cache/hash_test.go b/pkg/util/cache/hash_test.go index df599e1ecc..6f6c4d86a9 100644 --- a/pkg/util/cache/hash_test.go +++ b/pkg/util/cache/hash_test.go @@ -45,7 +45,7 @@ func sumFNVu32(v uint32) uint64 { } func TestSum(t *testing.T) { - var tests = []struct { + tests := []struct { k interface{} h uint64 }{ diff --git a/pkg/util/commonpbutil/commonpbutil.go b/pkg/util/commonpbutil/commonpbutil.go index 42b5055257..9eeb7649e1 100644 --- a/pkg/util/commonpbutil/commonpbutil.go +++ b/pkg/util/commonpbutil/commonpbutil.go @@ -72,7 +72,6 @@ func FillMsgBaseFromClient(sourceID int64, options ...MsgBaseOptions) MsgBaseOpt op(msgBase) } } - } func newMsgBaseDefault() *commonpb.MsgBase { diff --git a/pkg/util/commonpbutil/commonpbutil_test.go b/pkg/util/commonpbutil/commonpbutil_test.go index 4ab0964aa0..9c21b527ab 100644 --- a/pkg/util/commonpbutil/commonpbutil_test.go +++ b/pkg/util/commonpbutil/commonpbutil_test.go @@ -21,8 +21,9 @@ package commonpbutil import ( "testing" - "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/stretchr/testify/assert" + + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" ) func TestIsHealthy(t *testing.T) { diff --git a/pkg/util/conc/options.go b/pkg/util/conc/options.go index 4deb9d292b..281ab675b3 100644 --- a/pkg/util/conc/options.go +++ b/pkg/util/conc/options.go @@ -19,9 +19,10 @@ package conc import ( "time" - "github.com/milvus-io/milvus/pkg/log" "github.com/panjf2000/ants/v2" "go.uber.org/zap" + + "github.com/milvus-io/milvus/pkg/log" ) type poolOption struct { diff --git a/pkg/util/conc/pool.go b/pkg/util/conc/pool.go index 600518ca4b..1d19ea02eb 100644 --- a/pkg/util/conc/pool.go +++ b/pkg/util/conc/pool.go @@ -21,8 +21,9 @@ import ( "runtime" "sync" - "github.com/milvus-io/milvus/pkg/util/generic" ants "github.com/panjf2000/ants/v2" + + "github.com/milvus-io/milvus/pkg/util/generic" ) // A goroutine pool diff --git a/pkg/util/errorutil/util.go b/pkg/util/errorutil/util.go index f9967d3ad2..1c0368c27d 100644 --- a/pkg/util/errorutil/util.go +++ b/pkg/util/errorutil/util.go @@ -4,9 +4,9 @@ import ( "fmt" "github.com/cockroachdb/errors" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" - "github.com/milvus-io/milvus/pkg/util/typeutil" ) diff --git a/pkg/util/etcd/etcd_util.go b/pkg/util/etcd/etcd_util.go index 9ee284e139..5fd3885158 100644 --- a/pkg/util/etcd/etcd_util.go +++ b/pkg/util/etcd/etcd_util.go @@ -33,9 +33,7 @@ import ( "github.com/milvus-io/milvus/pkg/log" ) -var ( - maxTxnNum = 128 -) +var maxTxnNum = 128 // GetEtcdClient returns etcd client func GetEtcdClient( @@ -45,7 +43,8 @@ func GetEtcdClient( certFile string, keyFile string, caCertFile string, - minVersion string) (*clientv3.Client, error) { + minVersion string, +) (*clientv3.Client, error) { log.Info("create etcd client", zap.Bool("useEmbedEtcd", useEmbedEtcd), zap.Bool("useSSL", useSSL), diff --git a/pkg/util/funcutil/func.go b/pkg/util/funcutil/func.go index 0341db76d8..ffca8c19a1 100644 --- a/pkg/util/funcutil/func.go +++ b/pkg/util/funcutil/func.go @@ -29,12 +29,13 @@ import ( "time" "github.com/cockroachdb/errors" + "google.golang.org/grpc/codes" + grpcStatus "google.golang.org/grpc/status" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/pkg/util/typeutil" - "google.golang.org/grpc/codes" - grpcStatus "google.golang.org/grpc/status" ) // CheckGrpcReady wait for context timeout, or wait 100ms then send nil to targetCh diff --git a/pkg/util/funcutil/func_test.go b/pkg/util/funcutil/func_test.go index fd35d8a535..cabf80982c 100644 --- a/pkg/util/funcutil/func_test.go +++ b/pkg/util/funcutil/func_test.go @@ -27,11 +27,12 @@ import ( "time" "github.com/cockroachdb/errors" - "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" - "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/stretchr/testify/assert" grpcCodes "google.golang.org/grpc/codes" grpcStatus "google.golang.org/grpc/status" + + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" + "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" ) func Test_CheckGrpcReady(t *testing.T) { @@ -346,7 +347,7 @@ func Test_ReadBinary(t *testing.T) { // float vector bs = []byte{0, 0, 0, 0, 0, 0, 0, 0} - var fs = make([]float32, 2) + fs := make([]float32, 2) assert.NoError(t, ReadBinary(endian, bs, &fs)) assert.ElementsMatch(t, []float32{0, 0}, fs) } diff --git a/pkg/util/funcutil/policy.go b/pkg/util/funcutil/policy.go index 5405f25cc7..0914038cd8 100644 --- a/pkg/util/funcutil/policy.go +++ b/pkg/util/funcutil/policy.go @@ -6,12 +6,13 @@ import ( "github.com/golang/protobuf/descriptor" "github.com/golang/protobuf/proto" + "go.uber.org/zap" + "google.golang.org/protobuf/reflect/protoreflect" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util" - "go.uber.org/zap" - "google.golang.org/protobuf/reflect/protoreflect" ) func GetVersion(m proto.GeneratedMessage) (string, error) { diff --git a/pkg/util/funcutil/policy_test.go b/pkg/util/funcutil/policy_test.go index a0a5ce51b4..03bf498884 100644 --- a/pkg/util/funcutil/policy_test.go +++ b/pkg/util/funcutil/policy_test.go @@ -3,9 +3,10 @@ package funcutil import ( "testing" + "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" - "github.com/stretchr/testify/assert" ) func Test_GetPrivilegeExtObj(t *testing.T) { @@ -43,7 +44,6 @@ func Test_GetResourceName(t *testing.T) { request = &milvuspb.SelectUserRequest{} assert.Equal(t, "*", GetObjectName(request, 2)) } - } func Test_GetResourceNames(t *testing.T) { diff --git a/pkg/util/funcutil/verify_response.go b/pkg/util/funcutil/verify_response.go index cbb3f6bd24..d23cfd644c 100644 --- a/pkg/util/funcutil/verify_response.go +++ b/pkg/util/funcutil/verify_response.go @@ -2,6 +2,7 @@ package funcutil import ( "github.com/cockroachdb/errors" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" ) diff --git a/pkg/util/hardware/container_linux.go b/pkg/util/hardware/container_linux.go index 08eb37bfa8..49d5168054 100644 --- a/pkg/util/hardware/container_linux.go +++ b/pkg/util/hardware/container_linux.go @@ -15,7 +15,6 @@ import ( "strings" "github.com/cockroachdb/errors" - "github.com/containerd/cgroups" ) diff --git a/pkg/util/indexparamcheck/base_checker.go b/pkg/util/indexparamcheck/base_checker.go index a416a2990e..a8c27776c7 100644 --- a/pkg/util/indexparamcheck/base_checker.go +++ b/pkg/util/indexparamcheck/base_checker.go @@ -2,11 +2,11 @@ package indexparamcheck import ( "github.com/cockroachdb/errors" + "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" ) -type baseChecker struct { -} +type baseChecker struct{} func (c baseChecker) CheckTrain(params map[string]string) error { if !CheckIntByRange(params, DIM, DefaultMinDim, DefaultMaxDim) { diff --git a/pkg/util/indexparamcheck/base_checker_test.go b/pkg/util/indexparamcheck/base_checker_test.go index eee11c5af4..a016d4da88 100644 --- a/pkg/util/indexparamcheck/base_checker_test.go +++ b/pkg/util/indexparamcheck/base_checker_test.go @@ -4,10 +4,10 @@ import ( "strconv" "testing" + "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/pkg/util/metric" - - "github.com/stretchr/testify/assert" ) func Test_baseChecker_CheckTrain(t *testing.T) { diff --git a/pkg/util/indexparamcheck/bin_flat_checker_test.go b/pkg/util/indexparamcheck/bin_flat_checker_test.go index 4fa8814cd4..7c10f2e62b 100644 --- a/pkg/util/indexparamcheck/bin_flat_checker_test.go +++ b/pkg/util/indexparamcheck/bin_flat_checker_test.go @@ -4,10 +4,10 @@ import ( "strconv" "testing" + "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/pkg/util/metric" - - "github.com/stretchr/testify/assert" ) func Test_binFlatChecker_CheckTrain(t *testing.T) { @@ -76,7 +76,6 @@ func Test_binFlatChecker_CheckTrain(t *testing.T) { } func Test_binFlatChecker_CheckValidDataType(t *testing.T) { - cases := []struct { dType schemapb.DataType errIsNil bool diff --git a/pkg/util/indexparamcheck/bin_ivf_flat_checker_test.go b/pkg/util/indexparamcheck/bin_ivf_flat_checker_test.go index 487e47198c..27ef913c2a 100644 --- a/pkg/util/indexparamcheck/bin_ivf_flat_checker_test.go +++ b/pkg/util/indexparamcheck/bin_ivf_flat_checker_test.go @@ -4,10 +4,10 @@ import ( "strconv" "testing" + "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/pkg/util/metric" - - "github.com/stretchr/testify/assert" ) func Test_binIVFFlatChecker_CheckTrain(t *testing.T) { @@ -127,7 +127,6 @@ func Test_binIVFFlatChecker_CheckTrain(t *testing.T) { } func Test_binIVFFlatChecker_CheckValidDataType(t *testing.T) { - cases := []struct { dType schemapb.DataType errIsNil bool diff --git a/pkg/util/indexparamcheck/binary_vector_base_checker.go b/pkg/util/indexparamcheck/binary_vector_base_checker.go index 4fa69af204..ccafd4f0a9 100644 --- a/pkg/util/indexparamcheck/binary_vector_base_checker.go +++ b/pkg/util/indexparamcheck/binary_vector_base_checker.go @@ -3,9 +3,8 @@ package indexparamcheck import ( "fmt" - "github.com/milvus-io/milvus/pkg/common" - "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" + "github.com/milvus-io/milvus/pkg/common" ) type binaryVectorBaseChecker struct { diff --git a/pkg/util/indexparamcheck/binary_vector_base_checker_test.go b/pkg/util/indexparamcheck/binary_vector_base_checker_test.go index d1b09cd449..fc166fabd9 100644 --- a/pkg/util/indexparamcheck/binary_vector_base_checker_test.go +++ b/pkg/util/indexparamcheck/binary_vector_base_checker_test.go @@ -3,12 +3,12 @@ package indexparamcheck import ( "testing" - "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/stretchr/testify/assert" + + "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" ) func Test_binaryVectorBaseChecker_CheckValidDataType(t *testing.T) { - cases := []struct { dType schemapb.DataType errIsNil bool diff --git a/pkg/util/indexparamcheck/constraints.go b/pkg/util/indexparamcheck/constraints.go index f2e80db3d6..b30d16b86c 100644 --- a/pkg/util/indexparamcheck/constraints.go +++ b/pkg/util/indexparamcheck/constraints.go @@ -42,11 +42,13 @@ const ( var METRICS = []string{metric.L2, metric.IP, metric.COSINE} // const // BinIDMapMetrics is a set of all metric types supported for binary vector. -var BinIDMapMetrics = []string{metric.HAMMING, metric.JACCARD, metric.SUBSTRUCTURE, metric.SUPERSTRUCTURE} // const -var BinIvfMetrics = []string{metric.HAMMING, metric.JACCARD} // const -var HnswMetrics = []string{metric.L2, metric.IP, metric.COSINE, metric.HAMMING, metric.JACCARD} // const -var supportDimPerSubQuantizer = []int{32, 28, 24, 20, 16, 12, 10, 8, 6, 4, 3, 2, 1} // const -var supportSubQuantizer = []int{96, 64, 56, 48, 40, 32, 28, 24, 20, 16, 12, 8, 4, 3, 2, 1} // const +var ( + BinIDMapMetrics = []string{metric.HAMMING, metric.JACCARD, metric.SUBSTRUCTURE, metric.SUPERSTRUCTURE} // const + BinIvfMetrics = []string{metric.HAMMING, metric.JACCARD} // const + HnswMetrics = []string{metric.L2, metric.IP, metric.COSINE, metric.HAMMING, metric.JACCARD} // const + supportDimPerSubQuantizer = []int{32, 28, 24, 20, 16, 12, 10, 8, 6, 4, 3, 2, 1} // const + supportSubQuantizer = []int{96, 64, 56, 48, 40, 32, 28, 24, 20, 16, 12, 8, 4, 3, 2, 1} // const +) const ( FloatVectorDefaultMetricType = metric.IP diff --git a/pkg/util/indexparamcheck/diskann_checker_test.go b/pkg/util/indexparamcheck/diskann_checker_test.go index 11005e1611..411e8f97d8 100644 --- a/pkg/util/indexparamcheck/diskann_checker_test.go +++ b/pkg/util/indexparamcheck/diskann_checker_test.go @@ -4,10 +4,10 @@ import ( "strconv" "testing" + "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/pkg/util/metric" - - "github.com/stretchr/testify/assert" ) func Test_diskannChecker_CheckTrain(t *testing.T) { @@ -84,7 +84,6 @@ func Test_diskannChecker_CheckTrain(t *testing.T) { } func Test_diskannChecker_CheckValidDataType(t *testing.T) { - cases := []struct { dType schemapb.DataType errIsNil bool diff --git a/pkg/util/indexparamcheck/flat_checker_test.go b/pkg/util/indexparamcheck/flat_checker_test.go index c44a215dae..115fd83931 100644 --- a/pkg/util/indexparamcheck/flat_checker_test.go +++ b/pkg/util/indexparamcheck/flat_checker_test.go @@ -4,13 +4,12 @@ import ( "strconv" "testing" - "github.com/milvus-io/milvus/pkg/util/metric" - "github.com/stretchr/testify/assert" + + "github.com/milvus-io/milvus/pkg/util/metric" ) func Test_flatChecker_CheckTrain(t *testing.T) { - p1 := map[string]string{ DIM: strconv.Itoa(128), Metric: metric.L2, diff --git a/pkg/util/indexparamcheck/float_vector_base_checker.go b/pkg/util/indexparamcheck/float_vector_base_checker.go index 562c20e771..af43b2b523 100644 --- a/pkg/util/indexparamcheck/float_vector_base_checker.go +++ b/pkg/util/indexparamcheck/float_vector_base_checker.go @@ -3,9 +3,8 @@ package indexparamcheck import ( "fmt" - "github.com/milvus-io/milvus/pkg/common" - "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" + "github.com/milvus-io/milvus/pkg/common" ) type floatVectorBaseChecker struct { diff --git a/pkg/util/indexparamcheck/float_vector_base_checker_test.go b/pkg/util/indexparamcheck/float_vector_base_checker_test.go index 22ae463e4d..affc4d9d53 100644 --- a/pkg/util/indexparamcheck/float_vector_base_checker_test.go +++ b/pkg/util/indexparamcheck/float_vector_base_checker_test.go @@ -3,12 +3,12 @@ package indexparamcheck import ( "testing" - "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/stretchr/testify/assert" + + "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" ) func Test_floatVectorBaseChecker_CheckValidDataType(t *testing.T) { - cases := []struct { dType schemapb.DataType errIsNil bool diff --git a/pkg/util/indexparamcheck/hnsw_checker_test.go b/pkg/util/indexparamcheck/hnsw_checker_test.go index d2ea5d9f70..bcb7c482a1 100644 --- a/pkg/util/indexparamcheck/hnsw_checker_test.go +++ b/pkg/util/indexparamcheck/hnsw_checker_test.go @@ -4,14 +4,13 @@ import ( "strconv" "testing" + "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/pkg/util/metric" - - "github.com/stretchr/testify/assert" ) func Test_hnswChecker_CheckTrain(t *testing.T) { - validParams := map[string]string{ DIM: strconv.Itoa(128), HNSWM: strconv.Itoa(16), @@ -105,7 +104,6 @@ func Test_hnswChecker_CheckTrain(t *testing.T) { } func Test_hnswChecker_CheckValidDataType(t *testing.T) { - cases := []struct { dType schemapb.DataType errIsNil bool diff --git a/pkg/util/indexparamcheck/ivf_base_checker_test.go b/pkg/util/indexparamcheck/ivf_base_checker_test.go index e9ed4c017d..ad0ad42a20 100644 --- a/pkg/util/indexparamcheck/ivf_base_checker_test.go +++ b/pkg/util/indexparamcheck/ivf_base_checker_test.go @@ -4,10 +4,10 @@ import ( "strconv" "testing" + "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/pkg/util/metric" - - "github.com/stretchr/testify/assert" ) func Test_ivfBaseChecker_CheckTrain(t *testing.T) { diff --git a/pkg/util/indexparamcheck/ivf_pq_checker_test.go b/pkg/util/indexparamcheck/ivf_pq_checker_test.go index 11938473d0..8c44f22c34 100644 --- a/pkg/util/indexparamcheck/ivf_pq_checker_test.go +++ b/pkg/util/indexparamcheck/ivf_pq_checker_test.go @@ -4,10 +4,10 @@ import ( "strconv" "testing" + "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/pkg/util/metric" - - "github.com/stretchr/testify/assert" ) func Test_ivfPQChecker_CheckTrain(t *testing.T) { @@ -151,7 +151,6 @@ func Test_ivfPQChecker_CheckTrain(t *testing.T) { } func Test_ivfPQChecker_CheckValidDataType(t *testing.T) { - cases := []struct { dType schemapb.DataType errIsNil bool diff --git a/pkg/util/indexparamcheck/ivf_sq_checker_test.go b/pkg/util/indexparamcheck/ivf_sq_checker_test.go index eef0a73251..fa8a5a73c8 100644 --- a/pkg/util/indexparamcheck/ivf_sq_checker_test.go +++ b/pkg/util/indexparamcheck/ivf_sq_checker_test.go @@ -4,10 +4,10 @@ import ( "strconv" "testing" + "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/pkg/util/metric" - - "github.com/stretchr/testify/assert" ) func Test_ivfSQChecker_CheckTrain(t *testing.T) { diff --git a/pkg/util/indexparamcheck/raft_ivf_pq_checker_test.go b/pkg/util/indexparamcheck/raft_ivf_pq_checker_test.go index 27d6939ed7..f1b7433597 100644 --- a/pkg/util/indexparamcheck/raft_ivf_pq_checker_test.go +++ b/pkg/util/indexparamcheck/raft_ivf_pq_checker_test.go @@ -4,14 +4,13 @@ import ( "strconv" "testing" + "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/pkg/util/metric" - - "github.com/stretchr/testify/assert" ) func Test_raftIVFPQChecker_CheckTrain(t *testing.T) { - validParams := map[string]string{ DIM: strconv.Itoa(128), NLIST: strconv.Itoa(1024), diff --git a/pkg/util/indexparamcheck/scalar_index_checker_test.go b/pkg/util/indexparamcheck/scalar_index_checker_test.go index 01a755d700..3289cd00b2 100644 --- a/pkg/util/indexparamcheck/scalar_index_checker_test.go +++ b/pkg/util/indexparamcheck/scalar_index_checker_test.go @@ -3,8 +3,9 @@ package indexparamcheck import ( "testing" - "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/stretchr/testify/assert" + + "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" ) func TestCheckIndexValid(t *testing.T) { diff --git a/pkg/util/indexparams/disk_index_params.go b/pkg/util/indexparams/disk_index_params.go index d9dee23d6c..e4b8db6780 100644 --- a/pkg/util/indexparams/disk_index_params.go +++ b/pkg/util/indexparams/disk_index_params.go @@ -260,7 +260,6 @@ func SetDiskIndexLoadParams(params *paramtable.ComponentParam, indexParams map[s if err != nil { return err } - } indexParams[SearchCacheBudgetKey] = fmt.Sprintf("%f", diff --git a/pkg/util/indexparams/disk_index_params_test.go b/pkg/util/indexparams/disk_index_params_test.go index 503321733f..5079f8be93 100644 --- a/pkg/util/indexparams/disk_index_params_test.go +++ b/pkg/util/indexparams/disk_index_params_test.go @@ -22,10 +22,11 @@ import ( "strconv" "testing" + "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus/pkg/common" "github.com/milvus-io/milvus/pkg/util/hardware" "github.com/milvus-io/milvus/pkg/util/paramtable" - "github.com/stretchr/testify/assert" ) func TestDiskIndexParams(t *testing.T) { diff --git a/pkg/util/lifetime/safe_chan_test.go b/pkg/util/lifetime/safe_chan_test.go index 98ddce20b0..05dd445134 100644 --- a/pkg/util/lifetime/safe_chan_test.go +++ b/pkg/util/lifetime/safe_chan_test.go @@ -3,8 +3,9 @@ package lifetime import ( "testing" - "github.com/milvus-io/milvus/pkg/util/typeutil" "github.com/stretchr/testify/suite" + + "github.com/milvus-io/milvus/pkg/util/typeutil" ) type SafeChanSuite struct { diff --git a/pkg/util/lock/metric_mutex.go b/pkg/util/lock/metric_mutex.go index 32c0a5117f..4c8d4fb8ef 100644 --- a/pkg/util/lock/metric_mutex.go +++ b/pkg/util/lock/metric_mutex.go @@ -5,10 +5,11 @@ import ( "time" "github.com/cockroachdb/errors" + "go.uber.org/zap" + "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/metrics" "github.com/milvus-io/milvus/pkg/util/paramtable" - "go.uber.org/zap" ) type MetricsLockManager struct { diff --git a/pkg/util/lock/metrics_mutex_test.go b/pkg/util/lock/metrics_mutex_test.go index 3f050b713a..293f109b8c 100644 --- a/pkg/util/lock/metrics_mutex_test.go +++ b/pkg/util/lock/metrics_mutex_test.go @@ -5,8 +5,9 @@ import ( "testing" "time" - "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/stretchr/testify/assert" + + "github.com/milvus-io/milvus/pkg/util/paramtable" ) func TestMetricsLockLock(t *testing.T) { diff --git a/pkg/util/logutil/logutil_test.go b/pkg/util/logutil/logutil_test.go index c177d8b092..3506852327 100644 --- a/pkg/util/logutil/logutil_test.go +++ b/pkg/util/logutil/logutil_test.go @@ -27,5 +27,4 @@ func TestName(t *testing.T) { wrapper.Error("Testing") wrapper.Errorln("Testing") wrapper.Errorf("%s", "Testing") - } diff --git a/pkg/util/merr/errors_test.go b/pkg/util/merr/errors_test.go index 7410977746..40c74ba907 100644 --- a/pkg/util/merr/errors_test.go +++ b/pkg/util/merr/errors_test.go @@ -21,9 +21,9 @@ import ( "testing" "github.com/cockroachdb/errors" - "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/stretchr/testify/suite" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus/pkg/util/paramtable" ) diff --git a/pkg/util/merr/utils.go b/pkg/util/merr/utils.go index e0255d6d57..1c53e7cd7e 100644 --- a/pkg/util/merr/utils.go +++ b/pkg/util/merr/utils.go @@ -22,17 +22,16 @@ import ( "strings" "github.com/cockroachdb/errors" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus/pkg/util/paramtable" ) -var ( - // For compatibility - oldErrCodes = map[int32]commonpb.ErrorCode{ - ErrServiceNotReady.code(): commonpb.ErrorCode_NotReadyServe, - ErrCollectionNotFound.code(): commonpb.ErrorCode_CollectionNotExists, - } -) +// For compatibility +var oldErrCodes = map[int32]commonpb.ErrorCode{ + ErrServiceNotReady.code(): commonpb.ErrorCode_NotReadyServe, + ErrCollectionNotFound.code(): commonpb.ErrorCode_CollectionNotExists, +} // Code returns the error code of the given error, // WARN: DO NOT use this for now diff --git a/pkg/util/metricsinfo/cache.go b/pkg/util/metricsinfo/cache.go index 0dbb817f04..aae12709e4 100644 --- a/pkg/util/metricsinfo/cache.go +++ b/pkg/util/metricsinfo/cache.go @@ -106,7 +106,6 @@ func (manager *MetricsCacheManager) GetSystemInfoMetrics() (*milvuspb.GetMetrics if manager.systemInfoMetricsInvalid || manager.systemInfoMetrics == nil || time.Since(manager.systemInfoMetricsLastUpdatedTime) >= retention { - return nil, errInvalidSystemInfosMetricCache } diff --git a/pkg/util/metricsinfo/cache_test.go b/pkg/util/metricsinfo/cache_test.go index 97694ed25b..dfbddeb4b3 100644 --- a/pkg/util/metricsinfo/cache_test.go +++ b/pkg/util/metricsinfo/cache_test.go @@ -15,8 +15,9 @@ import ( "testing" "time" - "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/stretchr/testify/assert" + + "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" ) func Test_NewMetricsCacheManager(t *testing.T) { diff --git a/pkg/util/metricsinfo/metric_type.go b/pkg/util/metricsinfo/metric_type.go index 85e0b183b9..60e0503152 100644 --- a/pkg/util/metricsinfo/metric_type.go +++ b/pkg/util/metricsinfo/metric_type.go @@ -17,7 +17,6 @@ import ( "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" - "github.com/milvus-io/milvus/pkg/common" "github.com/milvus-io/milvus/pkg/util/commonpbutil" ) @@ -52,7 +51,7 @@ func ConstructRequestByMetricType(metricType string) (*milvuspb.GetMetricsReques if err != nil { return nil, fmt.Errorf("failed to construct request by metric type %s: %s", metricType, err.Error()) } - //TODO:: switch metricType to different msgType and return err when metricType is not supported + // TODO:: switch metricType to different msgType and return err when metricType is not supported return &milvuspb.GetMetricsRequest{ Base: commonpbutil.NewMsgBase( commonpbutil.WithMsgType(commonpb.MsgType_SystemInfo), diff --git a/pkg/util/metricsinfo/metric_type_test.go b/pkg/util/metricsinfo/metric_type_test.go index 07b62df436..12414d233c 100644 --- a/pkg/util/metricsinfo/metric_type_test.go +++ b/pkg/util/metricsinfo/metric_type_test.go @@ -58,7 +58,6 @@ func Test_ParseMetricType(t *testing.T) { t.Errorf("ParseMetricType(%s) = %s, but got: %s", test.s, test.want, got) } } - } func Test_ConstructRequestByMetricType(t *testing.T) { diff --git a/pkg/util/metricsinfo/metrics_info.go b/pkg/util/metricsinfo/metrics_info.go index a8f2420a3d..7673e5d0e8 100644 --- a/pkg/util/metricsinfo/metrics_info.go +++ b/pkg/util/metricsinfo/metrics_info.go @@ -16,8 +16,7 @@ import ( ) // ComponentInfos defines the interface of all component infos -type ComponentInfos interface { -} +type ComponentInfos interface{} // MarshalComponentInfos returns the json string of ComponentInfos func MarshalComponentInfos(infos ComponentInfos) (string, error) { diff --git a/pkg/util/metricsinfo/topology.go b/pkg/util/metricsinfo/topology.go index a9d8810aef..774cfbcb27 100644 --- a/pkg/util/metricsinfo/topology.go +++ b/pkg/util/metricsinfo/topology.go @@ -27,8 +27,7 @@ func ConstructComponentName(role string, id typeutil.UniqueID) string { } // Topology defines the interface of topology graph between different components -type Topology interface { -} +type Topology interface{} // MarshalTopology returns the json string of Topology func MarshalTopology(topology Topology) (string, error) { diff --git a/pkg/util/parameterutil.go/get_max_len_test.go b/pkg/util/parameterutil.go/get_max_len_test.go index 1b6b40b74b..cf27715fe0 100644 --- a/pkg/util/parameterutil.go/get_max_len_test.go +++ b/pkg/util/parameterutil.go/get_max_len_test.go @@ -3,12 +3,11 @@ package parameterutil import ( "testing" - "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" - "github.com/milvus-io/milvus/pkg/common" - "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" + "github.com/milvus-io/milvus/pkg/common" ) func TestGetMaxLength(t *testing.T) { diff --git a/pkg/util/paramtable/autoindex_param.go b/pkg/util/paramtable/autoindex_param.go index 9fff408ac4..cdc4f5289f 100644 --- a/pkg/util/paramtable/autoindex_param.go +++ b/pkg/util/paramtable/autoindex_param.go @@ -19,10 +19,9 @@ package paramtable import ( "fmt" + "github.com/milvus-io/milvus/pkg/common" "github.com/milvus-io/milvus/pkg/config" "github.com/milvus-io/milvus/pkg/util/funcutil" - - "github.com/milvus-io/milvus/pkg/common" "github.com/milvus-io/milvus/pkg/util/indexparamcheck" ) diff --git a/pkg/util/paramtable/autoindex_param_test.go b/pkg/util/paramtable/autoindex_param_test.go index 0dcdd99454..1c4f262a47 100644 --- a/pkg/util/paramtable/autoindex_param_test.go +++ b/pkg/util/paramtable/autoindex_param_test.go @@ -21,13 +21,11 @@ import ( "strconv" "testing" - "github.com/milvus-io/milvus/pkg/util/indexparamcheck" - - "github.com/milvus-io/milvus/pkg/config" - "github.com/stretchr/testify/assert" "github.com/milvus-io/milvus/pkg/common" + "github.com/milvus-io/milvus/pkg/config" + "github.com/milvus-io/milvus/pkg/util/indexparamcheck" ) const ( @@ -41,8 +39,8 @@ func TestAutoIndexParams_build(t *testing.T) { CParams.Init(bt) t.Run("test parseBuildParams success", func(t *testing.T) { - //Params := CParams.AutoIndexConfig - //buildParams := make([string]interface) + // Params := CParams.AutoIndexConfig + // buildParams := make([string]interface) var err error map1 := map[string]any{ IndexTypeKey: "HNSW", diff --git a/pkg/util/paramtable/base_table_test.go b/pkg/util/paramtable/base_table_test.go index bcba2b0364..5fe37cb51a 100644 --- a/pkg/util/paramtable/base_table_test.go +++ b/pkg/util/paramtable/base_table_test.go @@ -110,7 +110,7 @@ func TestBaseTable_Get(t *testing.T) { } func TestBaseTable_Pulsar(t *testing.T) { - //test PULSAR ADDRESS + // test PULSAR ADDRESS t.Setenv("PULSAR_ADDRESS", "pulsar://localhost:6650") baseParams.init() diff --git a/pkg/util/paramtable/component_param.go b/pkg/util/paramtable/component_param.go index 4d374601e2..278637fba9 100644 --- a/pkg/util/paramtable/component_param.go +++ b/pkg/util/paramtable/component_param.go @@ -212,7 +212,7 @@ type commonConfig struct { MetricsPort ParamItem `refreshable:"false"` - //lock related params + // lock related params EnableLockMetrics ParamItem `refreshable:"false"` LockSlowLogInfoThreshold ParamItem `refreshable:"true"` LockSlowLogWarnThreshold ParamItem `refreshable:"true"` @@ -2601,12 +2601,15 @@ func (p *integrationTestConfig) init(base *BaseTable) { func (params *ComponentParam) Save(key string, value string) error { return params.baseTable.Save(key, value) } + func (params *ComponentParam) Remove(key string) error { return params.baseTable.Remove(key) } + func (params *ComponentParam) Reset(key string) error { return params.baseTable.Reset(key) } + func (params *ComponentParam) GetWithDefault(key string, dft string) string { return params.baseTable.GetWithDefault(key, dft) } diff --git a/pkg/util/paramtable/quota_param_test.go b/pkg/util/paramtable/quota_param_test.go index d8bf1cd30d..8387f83650 100644 --- a/pkg/util/paramtable/quota_param_test.go +++ b/pkg/util/paramtable/quota_param_test.go @@ -212,6 +212,5 @@ func TestQuotaParam(t *testing.T) { // test invalid config params.Save(params.QuotaConfig.DiskQuotaPerCollection.Key, "-1") assert.Equal(t, qc.DiskQuota.GetAsFloat(), qc.DiskQuotaPerCollection.GetAsFloat()) - }) } diff --git a/pkg/util/paramtable/runtime.go b/pkg/util/paramtable/runtime.go index 7831dc3755..55856d3836 100644 --- a/pkg/util/paramtable/runtime.go +++ b/pkg/util/paramtable/runtime.go @@ -24,9 +24,11 @@ const ( runtimeUpdateTimeKey = "runtime.updateTime" ) -var once sync.Once -var params ComponentParam -var hookParams hookConfig +var ( + once sync.Once + params ComponentParam + hookParams hookConfig +) func Init() { once.Do(func() { diff --git a/pkg/util/retry/retry_test.go b/pkg/util/retry/retry_test.go index eeb0200268..d21522482e 100644 --- a/pkg/util/retry/retry_test.go +++ b/pkg/util/retry/retry_test.go @@ -19,8 +19,9 @@ import ( "github.com/cockroachdb/errors" "github.com/lingdor/stackerror" - "github.com/milvus-io/milvus/pkg/util/merr" "github.com/stretchr/testify/assert" + + "github.com/milvus-io/milvus/pkg/util/merr" ) func TestDo(t *testing.T) { diff --git a/pkg/util/tikv/tikv_util.go b/pkg/util/tikv/tikv_util.go index 870cea0bb5..53a1303bcf 100644 --- a/pkg/util/tikv/tikv_util.go +++ b/pkg/util/tikv/tikv_util.go @@ -17,9 +17,10 @@ package tikv import ( - "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/tikv/client-go/v2/config" "github.com/tikv/client-go/v2/txnkv" + + "github.com/milvus-io/milvus/pkg/util/paramtable" ) func GetTiKVClient(cfg *paramtable.TiKVConfig) (*txnkv.Client, error) { diff --git a/pkg/util/tsoutil/tso_test.go b/pkg/util/tsoutil/tso_test.go index fac9bedce3..a4c44a73f6 100644 --- a/pkg/util/tsoutil/tso_test.go +++ b/pkg/util/tsoutil/tso_test.go @@ -62,11 +62,11 @@ func TestAddPhysicalDurationOnTs(t *testing.T) { duration := time.Millisecond * (20 * 1000) ts2 := AddPhysicalDurationOnTs(ts1, duration) ts3 := ComposeTSByTime(now.Add(duration), 0) - //diff := CalculateDuration(ts2, ts1) + // diff := CalculateDuration(ts2, ts1) assert.Equal(t, ts3, ts2) ts2 = AddPhysicalDurationOnTs(ts1, -duration) ts3 = ComposeTSByTime(now.Add(-duration), 0) - //diff := CalculateDuration(ts2, ts1) + // diff := CalculateDuration(ts2, ts1) assert.Equal(t, ts3, ts2) } diff --git a/pkg/util/typeutil/conversion_test.go b/pkg/util/typeutil/conversion_test.go index 4f948ab68f..da5a9623fb 100644 --- a/pkg/util/typeutil/conversion_test.go +++ b/pkg/util/typeutil/conversion_test.go @@ -94,5 +94,4 @@ func TestConversion(t *testing.T) { ret1 := SliceRemoveDuplicate(arr) assert.Equal(t, 3, len(ret1)) }) - } diff --git a/pkg/util/typeutil/float_util_test.go b/pkg/util/typeutil/float_util_test.go index 6ac94aad96..16f204e333 100644 --- a/pkg/util/typeutil/float_util_test.go +++ b/pkg/util/typeutil/float_util_test.go @@ -24,7 +24,7 @@ import ( ) func Test_VerifyFloat(t *testing.T) { - var value = math.NaN() + value := math.NaN() err := VerifyFloat(value) assert.Error(t, err) diff --git a/pkg/util/typeutil/hash.go b/pkg/util/typeutil/hash.go index 68f9336ae5..331785f305 100644 --- a/pkg/util/typeutil/hash.go +++ b/pkg/util/typeutil/hash.go @@ -93,7 +93,7 @@ func HashPK2Channels(primaryKeys *schemapb.IDs, shardNames []string) []uint32 { hashValues = append(hashValues, hash%numShard) } default: - //TODO:: + // TODO:: } return hashValues @@ -121,7 +121,6 @@ func HashKey2Partitions(keys *schemapb.FieldData, partitionNames []string) ([]ui } default: return nil, errors.New("currently only support DataType Int64 or VarChar as partition key Field") - } default: return nil, errors.New("currently not support vector field as partition keys") diff --git a/pkg/util/typeutil/hash_test.go b/pkg/util/typeutil/hash_test.go index fccbb3ad12..e561af91c9 100644 --- a/pkg/util/typeutil/hash_test.go +++ b/pkg/util/typeutil/hash_test.go @@ -21,13 +21,14 @@ import ( "testing" "unsafe" - "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/stretchr/testify/assert" + + "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" ) func TestUint64(t *testing.T) { var i int64 = -1 - var u = uint64(i) + u := uint64(i) t.Log(i) t.Log(u) } @@ -54,7 +55,7 @@ func TestHash32_Uint64(t *testing.T) { } func TestHash32_String(t *testing.T) { - var u = "ok" + u := "ok" h, err := Hash32String(u) assert.NoError(t, err) @@ -151,7 +152,7 @@ func TestHashPK2Channels(t *testing.T) { } ret := HashPK2Channels(int64IDs, channels) assert.Equal(t, 5, len(ret)) - //same pk hash to same channel + // same pk hash to same channel assert.Equal(t, ret[1], ret[2]) stringIDs := &schemapb.IDs{ diff --git a/pkg/util/typeutil/index_test.go b/pkg/util/typeutil/index_test.go index 856625ded6..b6044aac4c 100644 --- a/pkg/util/typeutil/index_test.go +++ b/pkg/util/typeutil/index_test.go @@ -19,8 +19,9 @@ package typeutil import ( "testing" - "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/stretchr/testify/assert" + + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" ) func TestCompareIndexParams(t *testing.T) { diff --git a/pkg/util/typeutil/kv_pair_helper_test.go b/pkg/util/typeutil/kv_pair_helper_test.go index 86de0d214d..576aea68e7 100644 --- a/pkg/util/typeutil/kv_pair_helper_test.go +++ b/pkg/util/typeutil/kv_pair_helper_test.go @@ -3,9 +3,10 @@ package typeutil import ( "testing" + "github.com/stretchr/testify/assert" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus/pkg/common" - "github.com/stretchr/testify/assert" ) func TestNewKvPairs(t *testing.T) { diff --git a/pkg/util/typeutil/schema.go b/pkg/util/typeutil/schema.go index 35464db991..6100ae49be 100644 --- a/pkg/util/typeutil/schema.go +++ b/pkg/util/typeutil/schema.go @@ -850,7 +850,7 @@ func AppendIDs(dst *schemapb.IDs, src *schemapb.IDs, idx int) { dst.GetStrId().Data = append(dst.GetStrId().Data, src.GetStrId().Data[idx]) } default: - //TODO + // TODO } } @@ -866,7 +866,7 @@ func GetSizeOfIDs(data *schemapb.IDs) int { case *schemapb.IDs_StrId: result = len(data.GetStrId().GetData()) default: - //TODO:: + // TODO:: } return result @@ -1006,15 +1006,15 @@ func SelectMinPK[T ResultWithID](results []T, cursors []int64, stopForBest bool, for i, cursor := range cursors { if int(cursor) >= GetSizeOfIDs(results[i].GetIds()) { if realLimit == Unlimited { - //if there is no limit set and all possible results of one query unit(shard or segment) - //has drained all possible results without any leftover, so it's safe to continue the selection - //under this case + // if there is no limit set and all possible results of one query unit(shard or segment) + // has drained all possible results without any leftover, so it's safe to continue the selection + // under this case continue } if stopForBest && GetSizeOfIDs(results[i].GetIds()) >= int(realLimit) { - //if one query unit(shard or segment) has more than realLimit results, and it has run out of - //all results in this round, then we have to stop select since there may be further the latest result - //in the following result of current query unit + // if one query unit(shard or segment) has more than realLimit results, and it has run out of + // all results in this round, then we have to stop select since there may be further the latest result + // in the following result of current query unit return -1 } continue diff --git a/pkg/util/typeutil/schema_test.go b/pkg/util/typeutil/schema_test.go index c432d7948c..ce0ac41b10 100644 --- a/pkg/util/typeutil/schema_test.go +++ b/pkg/util/typeutil/schema_test.go @@ -21,12 +21,12 @@ import ( "reflect" "testing" - "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" - "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "go.uber.org/zap" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" + "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/pkg/common" "github.com/milvus-io/milvus/pkg/log" ) @@ -210,7 +210,6 @@ func TestSchema(t *testing.T) { } func TestSchema_GetVectorFieldSchema(t *testing.T) { - schemaNormal := &schemapb.CollectionSchema{ Name: "testColl", Description: "", @@ -264,7 +263,6 @@ func TestSchema_GetVectorFieldSchema(t *testing.T) { _, err := GetVectorFieldSchema(schemaInvalid) assert.Error(t, err) }) - } func TestSchema_invalid(t *testing.T) { @@ -669,8 +667,10 @@ func TestAppendFieldData(t *testing.T) { DoubleArray := []float64{11.0, 22.0} BinaryVector := []byte{0x12, 0x34} FloatVector := []float32{1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 11.0, 22.0, 33.0, 44.0, 55.0, 66.0, 77.0, 88.0} - Float16Vector := []byte{0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff, - 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff} + Float16Vector := []byte{ + 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff, + 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff, + } ArrayArray := []*schemapb.ScalarField{ { Data: &schemapb.ScalarField_IntData{ @@ -758,8 +758,10 @@ func TestDeleteFieldData(t *testing.T) { JSONArray := [][]byte{[]byte("{\"hello\":0}"), []byte("{\"key\":1}")} BinaryVector := []byte{0x12, 0x34} FloatVector := []float32{1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 11.0, 22.0, 33.0, 44.0, 55.0, 66.0, 77.0, 88.0} - Float16Vector := []byte{0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff, - 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff} + Float16Vector := []byte{ + 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff, + 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x88, 0x99, 0xaa, 0xbb, 0xcc, 0xdd, 0xee, 0xff, + } result1 := make([]*schemapb.FieldData, 9) result2 := make([]*schemapb.FieldData, 9) @@ -1144,8 +1146,10 @@ func TestGetDataAndGetDataSize(t *testing.T) { VarCharArray := []string{"a", "b"} BinaryVector := []byte{0x12, 0x34} FloatVector := []float32{1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 11.0, 22.0, 33.0, 44.0, 55.0, 66.0, 77.0, 88.0} - Float16Vector := []byte{0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, - 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77} + Float16Vector := []byte{ + 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, + 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77, + } boolData := genFieldData(fieldName, fieldID, schemapb.DataType_Bool, BoolArray, 1) int8Data := genFieldData(fieldName, fieldID, schemapb.DataType_Int8, Int8Array, 1) diff --git a/pkg/util/typeutil/string_util.go b/pkg/util/typeutil/string_util.go index 001aec3783..031b23f2e2 100644 --- a/pkg/util/typeutil/string_util.go +++ b/pkg/util/typeutil/string_util.go @@ -27,7 +27,7 @@ func AddOne(data string) string { if len(data) == 0 { return data } - var datab = []byte(data) + datab := []byte(data) if datab[len(datab)-1] != 255 { datab[len(datab)-1]++ } else { diff --git a/pkg/util/typeutil/time.go b/pkg/util/typeutil/time.go index bad3b97012..565dba034e 100644 --- a/pkg/util/typeutil/time.go +++ b/pkg/util/typeutil/time.go @@ -32,7 +32,7 @@ var ZeroTimestamp = Timestamp(0) // ParseTimestamp returns a timestamp for a given byte slice. func ParseTimestamp(data []byte) (time.Time, error) { - //we use big endian here for compatibility issues + // we use big endian here for compatibility issues nano, err := BigEndianBytesToUint64(data) if err != nil { return ZeroTime, err diff --git a/rules.go b/rules.go index c35790a844..5bc3422c9b 100644 --- a/rules.go +++ b/rules.go @@ -56,7 +56,6 @@ func timeeq(m dsl.Matcher) { // err but no an error func errnoterror(m dsl.Matcher) { - // Would be easier to check for all err identifiers instead, but then how do we get the type from m[] ? m.Match( @@ -146,7 +145,6 @@ func ifreturn(m dsl.Matcher) { m.Match("if !$x { return $*_ }; if $x {$*_ }").Report("odd sequence of if test") m.Match("if $x == $y { return $*_ }; if $x != $y {$*_ }").Report("odd sequence of if test") m.Match("if $x != $y { return $*_ }; if $x == $y {$*_ }").Report("odd sequence of if test") - } func oddifsequence(m dsl.Matcher) { @@ -267,7 +265,6 @@ func floatloop(m dsl.Matcher) { } func urlredacted(m dsl.Matcher) { - m.Match( "log.Println($x, $*_)", "log.Println($*_, $x, $*_)", @@ -292,7 +289,6 @@ func sprinterr(m dsl.Matcher) { ). Where(m["err"].Type.Is("error")). Report("maybe call $err.Error() instead of fmt.Sprint()?") - } // disable this check, because it can not apply to generic type @@ -351,7 +347,6 @@ func nilerr(m dsl.Matcher) { `if err == nil { return $*_, err }`, ). Report(`return nil error instead of nil value`) - } func mailaddress(m dsl.Matcher) { @@ -367,7 +362,6 @@ func mailaddress(m dsl.Matcher) { ). Report("use net/mail Address.String() instead of fmt.Sprintf()"). Suggest("(&mail.Address{Name:$NAME, Address:$EMAIL}).String()") - } func errnetclosed(m dsl.Matcher) { @@ -377,7 +371,6 @@ func errnetclosed(m dsl.Matcher) { Where(m["text"].Text.Matches("\".*closed network connection.*\"")). Report(`String matching against error texts is fragile; use net.ErrClosed instead`). Suggest(`errors.Is($err, net.ErrClosed)`) - } func httpheaderadd(m dsl.Matcher) { diff --git a/tests/integration/bulkinsert/bulkinsert_test.go b/tests/integration/bulkinsert/bulkinsert_test.go index ead972a62e..4fd5e2be9a 100644 --- a/tests/integration/bulkinsert/bulkinsert_test.go +++ b/tests/integration/bulkinsert/bulkinsert_test.go @@ -65,7 +65,7 @@ func (s *BulkInsertSuite) TestBulkInsert() { prefix := "TestBulkInsert" dbName := "" collectionName := prefix + funcutil.GenRandomStr() - //floatVecField := floatVecField + // floatVecField := floatVecField dim := 128 schema := integration.ConstructSchema(collectionName, dim, true, @@ -187,7 +187,7 @@ func (s *BulkInsertSuite) TestBulkInsert() { s.WaitForLoad(ctx, collectionName) // search - expr := "" //fmt.Sprintf("%s > 0", int64Field) + expr := "" // fmt.Sprintf("%s > 0", int64Field) nq := 10 topk := 10 roundDecimal := -1 @@ -236,18 +236,18 @@ func GenerateNumpyFile(filePath string, rowCount int, dType schemapb.DataType, t if err != nil { return err } - //data := make([][]float32, rowCount) + // data := make([][]float32, rowCount) var data [][Dim]float32 for i := 0; i < rowCount; i++ { vec := [Dim]float32{} for j := 0; j < dim; j++ { vec[j] = 1.1 } - //v := reflect.Indirect(reflect.ValueOf(vec)) - //log.Info("type", zap.Any("type", v.Kind())) + // v := reflect.Indirect(reflect.ValueOf(vec)) + // log.Info("type", zap.Any("type", v.Kind())) data = append(data, vec) - //v2 := reflect.Indirect(reflect.ValueOf(data)) - //log.Info("type", zap.Any("type", v2.Kind())) + // v2 := reflect.Indirect(reflect.ValueOf(data)) + // log.Info("type", zap.Any("type", v2.Kind())) } err = importutil.CreateNumpyFile(filePath, data) if err != nil { diff --git a/tests/integration/crossclusterrouting/cross_cluster_routing_test.go b/tests/integration/crossclusterrouting/cross_cluster_routing_test.go index e2a22d7a06..08cc1eeb99 100644 --- a/tests/integration/crossclusterrouting/cross_cluster_routing_test.go +++ b/tests/integration/crossclusterrouting/cross_cluster_routing_test.go @@ -30,16 +30,6 @@ import ( "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" - "github.com/milvus-io/milvus/internal/proto/datapb" - "github.com/milvus-io/milvus/internal/proto/indexpb" - "github.com/milvus-io/milvus/internal/proto/proxypb" - "github.com/milvus-io/milvus/internal/proto/querypb" - "github.com/milvus-io/milvus/internal/util/dependency" - "github.com/milvus-io/milvus/pkg/util/commonpbutil" - "github.com/milvus-io/milvus/pkg/util/etcd" - "github.com/milvus-io/milvus/pkg/util/merr" - "github.com/milvus-io/milvus/pkg/util/paramtable" - grpcdatacoord "github.com/milvus-io/milvus/internal/distributed/datacoord" grpcdatacoordclient "github.com/milvus-io/milvus/internal/distributed/datacoord/client" grpcdatanode "github.com/milvus-io/milvus/internal/distributed/datanode" @@ -54,6 +44,15 @@ import ( grpcquerynodeclient "github.com/milvus-io/milvus/internal/distributed/querynode/client" grpcrootcoord "github.com/milvus-io/milvus/internal/distributed/rootcoord" grpcrootcoordclient "github.com/milvus-io/milvus/internal/distributed/rootcoord/client" + "github.com/milvus-io/milvus/internal/proto/datapb" + "github.com/milvus-io/milvus/internal/proto/indexpb" + "github.com/milvus-io/milvus/internal/proto/proxypb" + "github.com/milvus-io/milvus/internal/proto/querypb" + "github.com/milvus-io/milvus/internal/util/dependency" + "github.com/milvus-io/milvus/pkg/util/commonpbutil" + "github.com/milvus-io/milvus/pkg/util/etcd" + "github.com/milvus-io/milvus/pkg/util/merr" + "github.com/milvus-io/milvus/pkg/util/paramtable" ) type CrossClusterRoutingSuite struct { diff --git a/tests/integration/getvector/get_vector_test.go b/tests/integration/getvector/get_vector_test.go index 7e70d8540d..cfb7a2d0ed 100644 --- a/tests/integration/getvector/get_vector_test.go +++ b/tests/integration/getvector/get_vector_test.go @@ -201,7 +201,7 @@ func (s *TestGetVectorSuite) run() { } s.Require().Len(result.GetScores(), nq*topk) s.Require().GreaterOrEqual(len(result.GetFieldsData()), 1) - var vecFieldIndex = -1 + vecFieldIndex := -1 for i, fieldData := range result.GetFieldsData() { if typeutil.IsVectorType(fieldData.GetType()) { vecFieldIndex = i diff --git a/tests/integration/hellomilvus/hello_milvus_test.go b/tests/integration/hellomilvus/hello_milvus_test.go index 49b2c60399..4aec6030c7 100644 --- a/tests/integration/hellomilvus/hello_milvus_test.go +++ b/tests/integration/hellomilvus/hello_milvus_test.go @@ -153,7 +153,6 @@ func (s *HelloMilvusSuite) TestHelloMilvus() { s.Equal(commonpb.ErrorCode_Success, searchResult.GetStatus().GetErrorCode()) log.Info("TestHelloMilvus succeed") - } func TestHelloMilvus(t *testing.T) { diff --git a/tests/integration/insert/insert_test.go b/tests/integration/insert/insert_test.go index 02b4f6cb5c..b469015a2b 100644 --- a/tests/integration/insert/insert_test.go +++ b/tests/integration/insert/insert_test.go @@ -118,7 +118,6 @@ func (s *InsertSuite) TestInsert() { log.Info("TestInsert succeed") log.Info("==================") log.Info("==================") - } func TestInsert(t *testing.T) { diff --git a/tests/integration/jsonexpr/json_expr_test.go b/tests/integration/jsonexpr/json_expr_test.go index f4198240c2..c5b397c08e 100644 --- a/tests/integration/jsonexpr/json_expr_test.go +++ b/tests/integration/jsonexpr/json_expr_test.go @@ -24,18 +24,18 @@ import ( "testing" "time" - "github.com/milvus-io/milvus/pkg/common" - "github.com/milvus-io/milvus/tests/integration" - "github.com/stretchr/testify/suite" - "github.com/golang/protobuf/proto" + "github.com/stretchr/testify/suite" + "go.uber.org/zap" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" + "github.com/milvus-io/milvus/pkg/common" "github.com/milvus-io/milvus/pkg/log" "github.com/milvus-io/milvus/pkg/util/funcutil" "github.com/milvus-io/milvus/pkg/util/metric" - "go.uber.org/zap" + "github.com/milvus-io/milvus/tests/integration" ) type JSONExprSuite struct { diff --git a/tests/integration/meta_watcher.go b/tests/integration/meta_watcher.go index bc47488c67..b4cd2140b0 100644 --- a/tests/integration/meta_watcher.go +++ b/tests/integration/meta_watcher.go @@ -25,13 +25,13 @@ import ( "time" "github.com/golang/protobuf/proto" - "github.com/milvus-io/milvus/internal/util/sessionutil" - "github.com/milvus-io/milvus/pkg/log" clientv3 "go.etcd.io/etcd/client/v3" "go.uber.org/zap" "github.com/milvus-io/milvus/internal/proto/datapb" "github.com/milvus-io/milvus/internal/proto/querypb" + "github.com/milvus-io/milvus/internal/util/sessionutil" + "github.com/milvus-io/milvus/pkg/log" ) // MetaWatcher to observe meta data of milvus cluster @@ -117,7 +117,6 @@ func listReplicas(cli *clientv3.Client, prefix string) ([]*querypb.Replica, erro ctx, cancel := context.WithTimeout(context.Background(), time.Second*3) defer cancel() resp, err := cli.Get(ctx, prefix, clientv3.WithPrefix()) - if err != nil { return nil, err } diff --git a/tests/integration/minicluster.go b/tests/integration/minicluster.go index f73d68674e..9a3c5f7ec6 100644 --- a/tests/integration/minicluster.go +++ b/tests/integration/minicluster.go @@ -53,7 +53,7 @@ type Cluster interface { AddRootCoord(types.RootCoordComponent) error AddDataCoord(types.DataCoordComponent) error AddQueryCoord(types.QueryCoordComponent) error - //AddIndexCoord(types.IndexCoordComponent) error + // AddIndexCoord(types.IndexCoordComponent) error AddDataNode(types.DataNodeComponent) error AddQueryNode(types.QueryNodeComponent) error AddIndexNode(types.IndexNodeComponent) error @@ -61,7 +61,7 @@ type Cluster interface { RemoveRootCoord(types.RootCoordComponent) error RemoveDataCoord(types.DataCoordComponent) error RemoveQueryCoord(types.QueryCoordComponent) error - //RemoveIndexCoord(types.IndexCoordComponent) error + // RemoveIndexCoord(types.IndexCoordComponent) error RemoveDataNode(types.DataNodeComponent) error RemoveQueryNode(types.QueryNodeComponent) error RemoveIndexNode(types.IndexNodeComponent) error @@ -76,12 +76,12 @@ type Cluster interface { } type ClusterConfig struct { - //ProxyNum int + // ProxyNum int // todo coord num can be more than 1 if enable Active-Standby - //RootCoordNum int - //DataCoordNum int - //IndexCoordNum int - //QueryCoordNum int + // RootCoordNum int + // DataCoordNum int + // IndexCoordNum int + // QueryCoordNum int QueryNodeNum int DataNodeNum int IndexNodeNum int @@ -256,7 +256,7 @@ func StartMiniCluster(ctx context.Context, opts ...Option) (cluster *MiniCluster cluster.Proxy = proxy } - //cluster.dataCoord.SetIndexCoord(cluster.indexCoord) + // cluster.dataCoord.SetIndexCoord(cluster.indexCoord) cluster.DataCoord.SetRootCoord(cluster.RootCoord) err = cluster.RootCoord.SetDataCoord(cluster.DataCoord) @@ -272,7 +272,7 @@ func StartMiniCluster(ctx context.Context, opts ...Option) (cluster *MiniCluster return } - //err = cluster.queryCoord.SetIndexCoord(cluster.indexCoord) + // err = cluster.queryCoord.SetIndexCoord(cluster.indexCoord) if err != nil { return } @@ -306,7 +306,7 @@ func StartMiniCluster(ctx context.Context, opts ...Option) (cluster *MiniCluster } cluster.Proxy.SetDataCoordClient(cluster.DataCoord) - //cluster.proxy.SetIndexCoordClient(cluster.indexCoord) + // cluster.proxy.SetIndexCoordClient(cluster.indexCoord) cluster.Proxy.SetQueryCoordClient(cluster.QueryCoord) cluster.Proxy.SetRootCoordClient(cluster.RootCoord) @@ -438,7 +438,7 @@ func (cluster *MiniCluster) Stop() error { log.Info("mini cluster rootCoord stopped") cluster.DataCoord.Stop() log.Info("mini cluster dataCoord stopped") - //cluster.indexCoord.Stop() + // cluster.indexCoord.Stop() cluster.QueryCoord.Stop() log.Info("mini cluster queryCoord stopped") cluster.Proxy.Stop() @@ -674,10 +674,10 @@ func (cluster *MiniCluster) AddRootCoord(rootCoord types.RootCoordComponent) err // link rootCoord.SetDataCoord(cluster.DataCoord) rootCoord.SetQueryCoord(cluster.QueryCoord) - //rootCoord.SetIndexCoord(cluster.indexCoord) + // rootCoord.SetIndexCoord(cluster.indexCoord) cluster.DataCoord.SetRootCoord(rootCoord) cluster.QueryCoord.SetRootCoord(rootCoord) - //cluster.indexCoord.SetRootCoord(rootCoord) + // cluster.indexCoord.SetRootCoord(rootCoord) cluster.Proxy.SetRootCoordClient(rootCoord) for _, dataNode := range cluster.DataNodes { err = dataNode.SetRootCoord(rootCoord) @@ -740,7 +740,7 @@ func (cluster *MiniCluster) AddDataCoord(dataCoord types.DataCoordComponent) err } // link - //dataCoord.SetIndexCoord(cluster.indexCoord) + // dataCoord.SetIndexCoord(cluster.indexCoord) dataCoord.SetRootCoord(cluster.RootCoord) err = cluster.RootCoord.SetDataCoord(cluster.DataCoord) if err != nil { @@ -818,7 +818,7 @@ func (cluster *MiniCluster) AddQueryCoord(queryCoord types.QueryCoordComponent) // link queryCoord.SetRootCoord(cluster.RootCoord) queryCoord.SetDataCoord(cluster.DataCoord) - //queryCoord.SetIndexCoord(cluster.indexCoord) + // queryCoord.SetIndexCoord(cluster.indexCoord) cluster.RootCoord.SetQueryCoord(queryCoord) cluster.Proxy.SetQueryCoordClient(queryCoord) @@ -1131,8 +1131,8 @@ func (cluster *MiniCluster) UpdateClusterSize(clusterConfig ClusterConfig) error return errors.New("Illegal cluster size config") } // todo concurrent concerns - //cluster.mu.Lock() - //defer cluster.mu.Unlock() + // cluster.mu.Lock() + // defer cluster.mu.Unlock() if clusterConfig.DataNodeNum > len(cluster.DataNodes) { needAdd := clusterConfig.DataNodeNum - len(cluster.DataNodes) for i := 0; i < needAdd; i++ { diff --git a/tests/integration/minicluster_test.go b/tests/integration/minicluster_test.go index 3cd31ecfd6..2df1c232c7 100644 --- a/tests/integration/minicluster_test.go +++ b/tests/integration/minicluster_test.go @@ -32,7 +32,7 @@ type MiniClusterMethodsSuite struct { } func (s *MiniClusterMethodsSuite) TestStartAndStop() { - //Do nothing + // Do nothing } func (s *MiniClusterMethodsSuite) TestRemoveDataNode() { @@ -42,7 +42,7 @@ func (s *MiniClusterMethodsSuite) TestRemoveDataNode() { datanode := datanode.NewDataNode(ctx, c.factory) datanode.SetEtcdClient(c.EtcdCli) - //datanode := c.CreateDefaultDataNode() + // datanode := c.CreateDefaultDataNode() err := c.AddDataNode(datanode) s.NoError(err) @@ -77,7 +77,7 @@ func (s *MiniClusterMethodsSuite) TestRemoveQueryNode() { queryNode := querynodev2.NewQueryNode(ctx, c.factory) queryNode.SetEtcdClient(c.EtcdCli) - //queryNode := c.CreateDefaultQueryNode() + // queryNode := c.CreateDefaultQueryNode() err := c.AddQueryNode(queryNode) s.NoError(err) @@ -103,7 +103,6 @@ func (s *MiniClusterMethodsSuite) TestRemoveQueryNode() { s.Equal(1, c.clusterConfig.QueryNodeNum) s.Equal(1, len(c.QueryNodes)) - } func (s *MiniClusterMethodsSuite) TestRemoveIndexNode() { @@ -113,7 +112,7 @@ func (s *MiniClusterMethodsSuite) TestRemoveIndexNode() { indexNode := indexnode.NewIndexNode(ctx, c.factory) indexNode.SetEtcdClient(c.EtcdCli) - //indexNode := c.CreateDefaultIndexNode() + // indexNode := c.CreateDefaultIndexNode() err := c.AddIndexNode(indexNode) s.NoError(err) @@ -139,11 +138,9 @@ func (s *MiniClusterMethodsSuite) TestRemoveIndexNode() { s.Equal(1, c.clusterConfig.IndexNodeNum) s.Equal(1, len(c.IndexNodes)) - } func (s *MiniClusterMethodsSuite) TestUpdateClusterSize() { - c := s.Cluster err := c.UpdateClusterSize(ClusterConfig{ diff --git a/tests/integration/rangesearch/range_search_test.go b/tests/integration/rangesearch/range_search_test.go index f26cb0c9df..c264ba5bcc 100644 --- a/tests/integration/rangesearch/range_search_test.go +++ b/tests/integration/rangesearch/range_search_test.go @@ -186,7 +186,6 @@ func (s *RangeSearchSuite) TestRangeSearchIP() { log.Info("TestRangeSearchIP succeed") log.Info("=========================") log.Info("=========================") - } func (s *RangeSearchSuite) TestRangeSearchL2() { @@ -335,7 +334,6 @@ func (s *RangeSearchSuite) TestRangeSearchL2() { log.Info("TestRangeSearchL2 succeed") log.Info("=========================") log.Info("=========================") - } func TestRangeSearch(t *testing.T) { diff --git a/tests/integration/refreshconfig/refresh_config_test.go b/tests/integration/refreshconfig/refresh_config_test.go index 69aaef5d3d..509f389aff 100644 --- a/tests/integration/refreshconfig/refresh_config_test.go +++ b/tests/integration/refreshconfig/refresh_config_test.go @@ -23,6 +23,9 @@ import ( "time" "github.com/golang/protobuf/proto" + "github.com/stretchr/testify/suite" + "go.uber.org/zap" + "github.com/milvus-io/milvus-proto/go-api/v2/commonpb" "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" @@ -30,8 +33,6 @@ import ( "github.com/milvus-io/milvus/pkg/util/metric" "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/milvus-io/milvus/tests/integration" - "github.com/stretchr/testify/suite" - "go.uber.org/zap" ) type RefreshConfigSuite struct { @@ -65,7 +66,6 @@ func (s *RefreshConfigSuite) TestRefreshPasswordLength() { log.Debug("second create result", zap.Any("state", resp)) return commonpb.ErrorCode_Success == resp.GetErrorCode() }, time.Second*20, time.Millisecond*500) - } func (s *RefreshConfigSuite) TestRefreshDefaultIndexName() { diff --git a/tests/integration/suite.go b/tests/integration/suite.go index d86662192f..f83e30a93d 100644 --- a/tests/integration/suite.go +++ b/tests/integration/suite.go @@ -23,10 +23,11 @@ import ( "strings" "time" - "github.com/milvus-io/milvus/pkg/util/etcd" - "github.com/milvus-io/milvus/pkg/util/paramtable" "github.com/stretchr/testify/suite" "go.etcd.io/etcd/server/v3/embed" + + "github.com/milvus-io/milvus/pkg/util/etcd" + "github.com/milvus-io/milvus/pkg/util/paramtable" ) // EmbedEtcdSuite contains embed setup & teardown related logic diff --git a/tests/integration/upsert/upsert_test.go b/tests/integration/upsert/upsert_test.go index bc3ed2a7a3..9f0293ef8a 100644 --- a/tests/integration/upsert/upsert_test.go +++ b/tests/integration/upsert/upsert_test.go @@ -22,6 +22,9 @@ import ( "testing" "github.com/golang/protobuf/proto" + "github.com/stretchr/testify/suite" + "go.uber.org/zap" + "github.com/milvus-io/milvus-proto/go-api/v2/milvuspb" "github.com/milvus-io/milvus-proto/go-api/v2/schemapb" "github.com/milvus-io/milvus/pkg/common" @@ -30,8 +33,6 @@ import ( "github.com/milvus-io/milvus/pkg/util/merr" "github.com/milvus-io/milvus/pkg/util/metric" "github.com/milvus-io/milvus/tests/integration" - "github.com/stretchr/testify/suite" - "go.uber.org/zap" ) type UpsertSuite struct { @@ -155,7 +156,6 @@ func (s *UpsertSuite) TestUpsert() { log.Info("TestUpsert succeed") log.Info("==================") log.Info("==================") - } func TestUpsert(t *testing.T) {