Add ut for segmentReplica to 100% (#7589)

See also: #6357
Signed-off-by: yangxuan <xuan.yang@zilliz.com>
pull/7595/merge
XuanYang-cn 2021-09-09 10:14:00 +08:00 committed by GitHub
parent 5906551f41
commit 762060e670
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 429 additions and 12 deletions

View File

@ -0,0 +1,38 @@
// Copyright (C) 2019-2020 Zilliz. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed under the License
// is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
// or implied. See the License for the specific language governing permissions and limitations under the License.
package datanode
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestInsertMsg_TimeTick(te *testing.T) {
tests := []struct {
timeTimestanpMax Timestamp
description string
}{
{0, "Zero timestampMax"},
{1, "Normal timestampMax"},
}
for _, test := range tests {
te.Run(test.description, func(t *testing.T) {
im := &insertMsg{timeRange: TimeRange{timestampMax: test.timeTimestanpMax}}
assert.Equal(t, test.timeTimestanpMax, im.TimeTick())
})
}
}

View File

@ -22,7 +22,7 @@ import (
)
const (
collectionID0 = UniqueID(0)
collectionID0 = UniqueID(2)
collectionID1 = UniqueID(1)
collectionName0 = "collection_0"
collectionName1 = "collection_1"

View File

@ -520,10 +520,24 @@ func (m *RootCoordFactory) DescribeCollection(ctx context.Context, in *milvuspb.
f := MetaFactory{}
meta := f.CollectionMetaFactory(m.collectionID, m.collectionName)
resp := &milvuspb.DescribeCollectionResponse{
Status: &commonpb.Status{},
CollectionID: m.collectionID,
Schema: meta.Schema,
Status: &commonpb.Status{
ErrorCode: commonpb.ErrorCode_UnexpectedError,
},
}
if m.collectionID == -2 {
resp.Status.Reason = "Status not success"
return resp, nil
}
if m.collectionID == -1 {
resp.Status.ErrorCode = commonpb.ErrorCode_Success
return resp, errors.New(resp.Status.GetReason())
}
resp.CollectionID = m.collectionID
resp.Schema = meta.Schema
resp.Status.ErrorCode = commonpb.ErrorCode_Success
return resp, nil
}

View File

@ -177,7 +177,8 @@ func (replica *SegmentReplica) getCollectionAndPartitionID(segID UniqueID) (coll
return 0, 0, fmt.Errorf("Cannot find segment, id = %v", segID)
}
// addNewSegment adds a *New* and *NotFlushed* new segment
// addNewSegment adds a *New* and *NotFlushed* new segment. Before add, please make sure there's no
// such segment by `hasSegment`
func (replica *SegmentReplica) addNewSegment(segID, collID, partitionID UniqueID, channelName string,
startPos, endPos *internalpb.MsgPosition) error {
@ -185,7 +186,9 @@ func (replica *SegmentReplica) addNewSegment(segID, collID, partitionID UniqueID
defer replica.segMu.Unlock()
if collID != replica.collectionID {
log.Warn("Mismatch collection", zap.Int64("ID", collID))
log.Warn("Mismatch collection",
zap.Int64("input ID", collID),
zap.Int64("expected ID", replica.collectionID))
return fmt.Errorf("Mismatch collection, ID=%d", collID)
}
@ -218,13 +221,16 @@ func (replica *SegmentReplica) addNewSegment(segID, collID, partitionID UniqueID
return nil
}
// addNormalSegment adds a *NotNew* and *NotFlushed* segment
// addNormalSegment adds a *NotNew* and *NotFlushed* segment. Before add, please make sure there's no
// such segment by `hasSegment`
func (replica *SegmentReplica) addNormalSegment(segID, collID, partitionID UniqueID, channelName string, numOfRows int64, cp *segmentCheckPoint) error {
replica.segMu.Lock()
defer replica.segMu.Unlock()
if collID != replica.collectionID {
log.Warn("Mismatch collection", zap.Int64("ID", collID))
log.Warn("Mismatch collection",
zap.Int64("input ID", collID),
zap.Int64("expected ID", replica.collectionID))
return fmt.Errorf("Mismatch collection, ID=%d", collID)
}
@ -409,7 +415,7 @@ func (replica *SegmentReplica) getCollectionSchema(collID UniqueID, ts Timestamp
defer replica.segMu.Unlock()
if !replica.validCollection(collID) {
log.Error("Mismatch collection for the replica",
log.Warn("Mismatch collection for the replica",
zap.Int64("Want", replica.collectionID),
zap.Int64("Actual", collID),
)

View File

@ -40,6 +40,83 @@ func newSegmentReplica(rc types.RootCoord, collID UniqueID) *SegmentReplica {
return replica
}
func TestNewReplica(t *testing.T) {
rc := &RootCoordFactory{}
replica := newReplica(rc, 0)
assert.NotNil(t, replica)
}
func TestSegmentReplica_getCollectionAndPartitionID(te *testing.T) {
tests := []struct {
segInNew UniqueID
segInNormal UniqueID
segInFlushed UniqueID
inCollID UniqueID
inParID UniqueID
description string
}{
{100, 0, 0, 1, 10, "Segment 100 in NewSegments"},
{0, 200, 0, 2, 20, "Segment 200 in NormalSegments"},
{0, 0, 300, 3, 30, "Segment 300 in FlushedSegments"},
{0, 0, 0, 4, 40, "No Segment in replica"},
}
for _, test := range tests {
te.Run(test.description, func(t *testing.T) {
if test.segInNew != 0 {
sr := &SegmentReplica{
newSegments: map[UniqueID]*Segment{
test.segInNew: {
collectionID: test.inCollID,
partitionID: test.inParID,
segmentID: test.segInNew,
}},
}
collID, parID, err := sr.getCollectionAndPartitionID(test.segInNew)
assert.NoError(t, err)
assert.Equal(t, test.inCollID, collID)
assert.Equal(t, test.inParID, parID)
} else if test.segInNormal != 0 {
sr := &SegmentReplica{
normalSegments: map[UniqueID]*Segment{
test.segInNormal: {
collectionID: test.inCollID,
partitionID: test.inParID,
segmentID: test.segInNormal,
}},
}
collID, parID, err := sr.getCollectionAndPartitionID(test.segInNormal)
assert.NoError(t, err)
assert.Equal(t, test.inCollID, collID)
assert.Equal(t, test.inParID, parID)
} else if test.segInFlushed != 0 {
sr := &SegmentReplica{
flushedSegments: map[UniqueID]*Segment{
test.segInFlushed: {
collectionID: test.inCollID,
partitionID: test.inParID,
segmentID: test.segInFlushed,
}},
}
collID, parID, err := sr.getCollectionAndPartitionID(test.segInFlushed)
assert.Error(t, err)
assert.Zero(t, collID)
assert.Zero(t, parID)
} else {
sr := &SegmentReplica{}
collID, parID, err := sr.getCollectionAndPartitionID(1000)
assert.Error(t, err)
assert.Zero(t, collID)
assert.Zero(t, parID)
}
})
}
}
func TestSegmentReplica(t *testing.T) {
rc := &RootCoordFactory{}
collID := UniqueID(1)
@ -117,8 +194,287 @@ func TestSegmentReplica(t *testing.T) {
}
})
}
t.Run("Test inner function segment", func(t *testing.T) {
func TestSegmentReplica_InterfaceMethod(te *testing.T) {
rc := &RootCoordFactory{}
te.Run("Test_addNewSegment", func(to *testing.T) {
tests := []struct {
isValidCase bool
replicaCollID UniqueID
inCollID UniqueID
inSegID UniqueID
instartPos *internalpb.MsgPosition
expectdIsNew bool
expectedIsFlushed bool
description string
}{
{isValidCase: false, replicaCollID: 1, inCollID: 2, inSegID: 300, description: "input CollID 2 mismatch with Replica collID"},
{true, 1, 1, 200, new(internalpb.MsgPosition), true, false, "nill address for startPos"},
{true, 1, 1, 200, &internalpb.MsgPosition{}, true, false, "empty struct for startPos"},
}
for _, test := range tests {
to.Run(test.description, func(t *testing.T) {
sr := newSegmentReplica(rc, test.replicaCollID)
require.False(t, sr.hasSegment(test.inSegID, true))
err := sr.addNewSegment(test.inSegID,
test.inCollID, 1, "", test.instartPos, &internalpb.MsgPosition{})
if test.isValidCase {
assert.NoError(t, err)
assert.True(t, sr.hasSegment(test.inSegID, true))
assert.Equal(t, test.expectdIsNew, sr.newSegments[test.inSegID].isNew.Load().(bool))
assert.Equal(t, test.expectedIsFlushed, sr.newSegments[test.inSegID].isFlushed.Load().(bool))
} else {
assert.Error(t, err)
assert.False(t, sr.hasSegment(test.inSegID, true))
}
})
}
})
te.Run("Test_addNormalSegment", func(to *testing.T) {
tests := []struct {
isValidCase bool
replicaCollID UniqueID
inCollID UniqueID
inSegID UniqueID
expectdIsNew bool
expectedIsFlushed bool
description string
}{
{isValidCase: false, replicaCollID: 1, inCollID: 2, inSegID: 300, description: "input CollID 2 mismatch with Replica collID"},
{true, 1, 1, 200, false, false, "normal case"},
}
for _, test := range tests {
to.Run(test.description, func(t *testing.T) {
sr := newSegmentReplica(rc, test.replicaCollID)
require.False(t, sr.hasSegment(test.inSegID, true))
err := sr.addNormalSegment(test.inSegID, test.inCollID, 1, "", 0, &segmentCheckPoint{})
if test.isValidCase {
assert.NoError(t, err)
assert.True(t, sr.hasSegment(test.inSegID, true))
assert.Equal(t, test.expectdIsNew, sr.normalSegments[test.inSegID].isNew.Load().(bool))
assert.Equal(t, test.expectedIsFlushed, sr.normalSegments[test.inSegID].isFlushed.Load().(bool))
} else {
assert.Error(t, err)
assert.False(t, sr.hasSegment(test.inSegID, true))
}
})
}
})
te.Run("Test_listSegmentsCheckPoints", func(to *testing.T) {
tests := []struct {
newSegID UniqueID
newSegCP *segmentCheckPoint
normalSegID UniqueID
normalSegCP *segmentCheckPoint
flushedSegID UniqueID
flushedSegCP *segmentCheckPoint
description string
}{
{newSegID: 100, newSegCP: new(segmentCheckPoint),
description: "Only contain new Seg 100"},
{normalSegID: 200, normalSegCP: new(segmentCheckPoint),
description: "Only contain normal Seg 200"},
{flushedSegID: 300, flushedSegCP: new(segmentCheckPoint),
description: "Only contain flushed Seg 300"},
{100, new(segmentCheckPoint), 200, new(segmentCheckPoint), 0, new(segmentCheckPoint),
"New seg 100 and normal seg 200"},
{100, new(segmentCheckPoint), 0, new(segmentCheckPoint), 300, new(segmentCheckPoint),
"New seg 100 and flushed seg 300"},
{0, new(segmentCheckPoint), 200, new(segmentCheckPoint), 300, new(segmentCheckPoint),
"Normal seg 200 and flushed seg 300"},
{100, new(segmentCheckPoint), 200, new(segmentCheckPoint), 300, new(segmentCheckPoint),
"New seg 100, normal seg 200 and flushed seg 300"},
}
for _, test := range tests {
to.Run(test.description, func(t *testing.T) {
sr := SegmentReplica{
newSegments: make(map[UniqueID]*Segment),
normalSegments: make(map[UniqueID]*Segment),
flushedSegments: make(map[UniqueID]*Segment),
}
expectdCount := 0
if test.newSegID != 0 {
sr.newSegments[test.newSegID] = &Segment{checkPoint: *test.newSegCP}
expectdCount++
}
if test.normalSegID != 0 {
sr.normalSegments[test.normalSegID] = &Segment{checkPoint: *test.normalSegCP}
expectdCount++
}
if test.flushedSegID != 0 {
sr.flushedSegments[test.flushedSegID] = &Segment{checkPoint: *test.flushedSegCP}
}
scp := sr.listSegmentsCheckPoints()
assert.Equal(t, expectdCount, len(scp))
})
}
})
te.Run("Test_updateSegmentEndPosition", func(to *testing.T) {
tests := []struct {
newSegID UniqueID
normalSegID UniqueID
flushedSegID UniqueID
inSegID UniqueID
description string
}{
{newSegID: 100, inSegID: 100,
description: "input seg 100 in newSegments"},
{newSegID: 100, inSegID: 101,
description: "input seg 101 not in newSegments"},
{normalSegID: 200, inSegID: 200,
description: "input seg 200 in normalSegments"},
{normalSegID: 200, inSegID: 201,
description: "input seg 201 not in normalSegments"},
{flushedSegID: 300, inSegID: 300,
description: "input seg 300 in flushedSegments"},
{flushedSegID: 300, inSegID: 301,
description: "input seg 301 not in flushedSegments"},
}
for _, test := range tests {
to.Run(test.description, func(t *testing.T) {
sr := SegmentReplica{
newSegments: make(map[UniqueID]*Segment),
normalSegments: make(map[UniqueID]*Segment),
flushedSegments: make(map[UniqueID]*Segment),
}
if test.newSegID != 0 {
sr.newSegments[test.newSegID] = &Segment{}
}
if test.normalSegID != 0 {
sr.normalSegments[test.normalSegID] = &Segment{}
}
if test.flushedSegID != 0 {
sr.flushedSegments[test.flushedSegID] = &Segment{}
}
sr.updateSegmentEndPosition(test.inSegID, new(internalpb.MsgPosition))
sr.removeSegment(0)
})
}
})
te.Run("Test_updateStatistics", func(to *testing.T) {
tests := []struct {
isvalidCase bool
newSegID UniqueID
normalSegID UniqueID
flushedSegID UniqueID
inSegID UniqueID
inNumRows int64
description string
}{
{isvalidCase: true, newSegID: 100, inSegID: 100, inNumRows: 100,
description: "input seg 100 in newSegments with numRows 100"},
{isvalidCase: false, newSegID: 100, inSegID: 101, inNumRows: 100,
description: "input seg 101 not in newSegments with numRows 100"},
{isvalidCase: true, normalSegID: 200, inSegID: 200, inNumRows: 200,
description: "input seg 200 in normalSegments with numRows 200"},
{isvalidCase: false, normalSegID: 200, inSegID: 201, inNumRows: 200,
description: "input seg 201 not in normalSegments with numRows 200"},
{isvalidCase: false, flushedSegID: 300, inSegID: 300, inNumRows: 300,
description: "input seg 300 in flushedSegments"},
{isvalidCase: false, flushedSegID: 300, inSegID: 301, inNumRows: 300,
description: "input seg 301 not in flushedSegments"},
}
for _, test := range tests {
to.Run(test.description, func(t *testing.T) {
sr := SegmentReplica{
newSegments: make(map[UniqueID]*Segment),
normalSegments: make(map[UniqueID]*Segment),
flushedSegments: make(map[UniqueID]*Segment),
}
if test.newSegID != 0 {
sr.newSegments[test.newSegID] = &Segment{}
}
if test.normalSegID != 0 {
sr.normalSegments[test.normalSegID] = &Segment{}
}
if test.flushedSegID != 0 {
sr.flushedSegments[test.flushedSegID] = &Segment{}
}
err := sr.updateStatistics(test.inSegID, test.inNumRows)
if test.isvalidCase {
assert.NoError(t, err)
updates, err := sr.getSegmentStatisticsUpdates(test.inSegID)
assert.NoError(t, err)
assert.Equal(t, test.inNumRows, updates.GetNumRows())
assert.Equal(t, test.inSegID, updates.GetSegmentID())
sr.updateSegmentCheckPoint(10000)
} else {
assert.Error(t, err)
updates, err := sr.getSegmentStatisticsUpdates(test.inSegID)
assert.Error(t, err)
assert.Nil(t, updates)
}
})
}
})
te.Run("Test_getCollectionSchema", func(to *testing.T) {
tests := []struct {
isValid bool
replicaCollID UniqueID
inputCollID UniqueID
metaServiceErr bool
description string
}{
{true, 1, 1, false, "Normal case"},
{false, 1, 2, false, "Input collID 2 mismatch with replicaCollID 1"},
{false, 1, 1, true, "RPC call fails"},
}
for _, test := range tests {
to.Run(test.description, func(t *testing.T) {
sr := newSegmentReplica(rc, test.replicaCollID)
if test.metaServiceErr {
rc.setCollectionID(-1)
} else {
rc.setCollectionID(1)
}
s, err := sr.getCollectionSchema(test.inputCollID, Timestamp(0))
if test.isValid {
assert.NoError(t, err)
assert.NotNil(t, s)
} else {
assert.Error(t, err)
assert.Nil(t, s)
}
})
}
})
te.Run("Test inner function segment", func(t *testing.T) {
collID := UniqueID(1)
replica := newSegmentReplica(rc, collID)
assert.False(t, replica.hasSegment(0, true))
assert.False(t, replica.hasSegment(0, false))
@ -168,6 +524,9 @@ func TestSegmentReplica(t *testing.T) {
assert.False(t, seg.isNew.Load().(bool))
assert.False(t, seg.isFlushed.Load().(bool))
err = replica.addNormalSegment(1, 100000, 2, "invalid", int64(0), &segmentCheckPoint{})
assert.Error(t, err)
err = replica.updateStatistics(1, 10)
assert.NoError(t, err)
assert.Equal(t, int64(20), seg.numRows)
@ -203,7 +562,7 @@ func TestSegmentReplica(t *testing.T) {
})
}
func TestSegmentUpdatePKRange(t *testing.T) {
func TestSegmentReplica_UpdatePKRange(t *testing.T) {
seg := &Segment{
pkFilter: bloom.NewWithEstimates(100000, 0.005),
maxPK: math.MinInt64,
@ -226,7 +585,7 @@ func TestSegmentUpdatePKRange(t *testing.T) {
}
}
func TestReplicaUpdatePKRange(t *testing.T) {
func TestReplica_UpdatePKRange(t *testing.T) {
rc := &RootCoordFactory{}
collID := UniqueID(1)
partID := UniqueID(2)