Add NewDataService in main

Signed-off-by: sunby <bingyi.sun@zilliz.com>
pull/4973/head^2
sunby 2021-01-30 11:49:48 +08:00 committed by yefu.chen
parent 51eac0eb84
commit 3a67dda06c
4 changed files with 95 additions and 60 deletions

View File

@ -2,72 +2,25 @@ package main
import (
"context"
"fmt"
"log"
"os"
"os/signal"
"syscall"
"time"
ms "github.com/zilliztech/milvus-distributed/internal/distributed/masterservice"
"github.com/zilliztech/milvus-distributed/internal/masterservice"
"github.com/zilliztech/milvus-distributed/internal/distributed/dataservice"
"github.com/zilliztech/milvus-distributed/internal/proto/internalpb2"
"github.com/zilliztech/milvus-distributed/cmd/distributed/components"
)
func main() {
ctx, cancel := context.WithCancel(context.Background())
service := dataservice.NewGrpcService(ctx)
masterservice.Params.Init()
client, err := ms.NewGrpcClient(fmt.Sprintf("%s:%d", masterservice.Params.Address, masterservice.Params.Port), 30*time.Second)
svr, err := components.NewDataService(ctx)
if err != nil {
panic(err)
}
log.Println("master client create complete")
if err = client.Init(); err != nil {
if err = svr.Run(); err != nil {
panic(err)
}
if err = client.Start(); err != nil {
panic(err)
}
service.SetMasterClient(client)
ticker := time.NewTicker(500 * time.Millisecond)
tctx, tcancel := context.WithTimeout(ctx, 30*time.Second)
defer func() {
if err = client.Stop(); err != nil {
panic(err)
}
ticker.Stop()
tcancel()
}()
for {
var states *internalpb2.ComponentStates
select {
case <-ticker.C:
states, err = client.GetComponentStates()
if err != nil {
continue
}
case <-tctx.Done():
panic("master timeout")
}
if states.State.StateCode == internalpb2.StateCode_INITIALIZING || states.State.StateCode == internalpb2.StateCode_HEALTHY {
break
}
}
if err = service.Init(); err != nil {
panic(err)
}
if err = service.Start(); err != nil {
panic(err)
}
sc := make(chan os.Signal)
signal.Notify(sc,
syscall.SIGHUP,
@ -76,7 +29,7 @@ func main() {
syscall.SIGQUIT)
<-sc
cancel()
if err = service.Stop(); err != nil {
if err := svr.Stop(); err != nil {
panic(err)
}
log.Println("shut down data service")

View File

@ -0,0 +1,83 @@
package components
import (
"context"
"errors"
"fmt"
"log"
"time"
ms "github.com/zilliztech/milvus-distributed/internal/distributed/masterservice"
"github.com/zilliztech/milvus-distributed/internal/masterservice"
"github.com/zilliztech/milvus-distributed/internal/proto/internalpb2"
"github.com/zilliztech/milvus-distributed/internal/distributed/dataservice"
)
type DataService struct {
ctx context.Context
server *dataservice.Service
masterClient *ms.GrpcClient
}
func NewDataService(ctx context.Context) (*DataService, error) {
service := dataservice.NewGrpcService(ctx)
masterservice.Params.Init()
client, err := ms.NewGrpcClient(fmt.Sprintf("%s:%d", masterservice.Params.Address, masterservice.Params.Port), 30*time.Second)
if err != nil {
return nil, err
}
log.Println("master client create complete")
if err = client.Init(); err != nil {
return nil, err
}
if err = client.Start(); err != nil {
return nil, err
}
ticker := time.NewTicker(500 * time.Millisecond)
tctx, tcancel := context.WithTimeout(ctx, 30*time.Second)
defer func() {
ticker.Stop()
tcancel()
}()
for {
var states *internalpb2.ComponentStates
select {
case <-ticker.C:
states, err = client.GetComponentStates()
if err != nil {
continue
}
case <-tctx.Done():
return nil, errors.New("master client connect timeout")
}
if states.State.StateCode == internalpb2.StateCode_INITIALIZING || states.State.StateCode == internalpb2.StateCode_HEALTHY {
break
}
}
service.SetMasterClient(client)
return &DataService{
ctx: ctx,
server: service,
masterClient: client,
}, nil
}
func (s *DataService) Run() error {
if err := s.server.Init(); err != nil {
return err
}
if err := s.server.Start(); err != nil {
return err
}
return nil
}
func (s *DataService) Stop() error {
_ = s.masterClient.Stop()
_ = s.server.Stop()
return nil
}

View File

@ -7,8 +7,6 @@ import (
"net"
"time"
"github.com/zilliztech/milvus-distributed/internal/distributed/masterservice"
"google.golang.org/grpc"
"github.com/zilliztech/milvus-distributed/internal/dataservice"
@ -21,10 +19,9 @@ import (
)
type Service struct {
server *dataservice.Server
ctx context.Context
grpcServer *grpc.Server
masterClient *masterservice.GrpcClient
server *dataservice.Server
ctx context.Context
grpcServer *grpc.Server
}
func NewGrpcService(ctx context.Context) *Service {
@ -49,18 +46,17 @@ func (s *Service) Init() error {
datapb.RegisterDataServiceServer(s.grpcServer, s)
lis, err := net.Listen("tcp", fmt.Sprintf("%s:%d", dataservice.Params.Address, dataservice.Params.Port))
if err != nil {
log.Fatal(err.Error())
return nil
}
c := make(chan struct{})
go func() {
if err2 := s.grpcServer.Serve(lis); err2 != nil {
log.Println(err.Error())
close(c)
err = err2
}
}()
timer := time.NewTimer(1 * time.Second)
defer timer.Stop()
select {
case <-timer.C:
break

View File

@ -414,10 +414,13 @@ func (t *ShowPartitionReqTask) IgnoreTimeStamp() bool {
}
func (t *ShowPartitionReqTask) Execute() error {
coll, err := t.core.MetaTable.GetCollectionByName(t.Req.CollectionName)
coll, err := t.core.MetaTable.GetCollectionByID(t.Req.CollectionID)
if err != nil {
return err
}
if coll.Schema.Name != t.Req.CollectionName {
return errors.Errorf("collection %s not exist", t.Req.CollectionName)
}
for _, partID := range coll.PartitionIDs {
partMeta, err := t.core.MetaTable.GetPartitionByID(partID)
if err != nil {