feat(storage/readservice): define engine interface (#15894)
feat(storage/readservice): define engine interfacepull/15995/head
commit
afd124f19f
|
@ -0,0 +1,167 @@
|
|||
package launcher
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"sync"
|
||||
|
||||
"github.com/influxdata/influxdb"
|
||||
"github.com/influxdata/influxdb/http"
|
||||
"github.com/influxdata/influxdb/kit/prom"
|
||||
"github.com/influxdata/influxdb/models"
|
||||
"github.com/influxdata/influxdb/storage"
|
||||
"github.com/influxdata/influxdb/storage/readservice"
|
||||
"github.com/influxdata/influxdb/tsdb"
|
||||
"github.com/influxdata/influxdb/tsdb/cursors"
|
||||
"github.com/influxdata/influxql"
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
var _ Engine = (*storage.Engine)(nil)
|
||||
|
||||
// Engine defines the time-series storage engine. Wraps *storage.Engine
|
||||
// to facilitate testing.
|
||||
type Engine interface {
|
||||
influxdb.DeleteService
|
||||
readservice.Viewer
|
||||
storage.PointsWriter
|
||||
storage.BucketDeleter
|
||||
prom.PrometheusCollector
|
||||
|
||||
SeriesCardinality() int64
|
||||
|
||||
WithLogger(log *zap.Logger)
|
||||
Open(context.Context) error
|
||||
Close() error
|
||||
}
|
||||
|
||||
var _ Engine = (*TemporaryEngine)(nil)
|
||||
var _ http.Flusher = (*TemporaryEngine)(nil)
|
||||
|
||||
// TemporaryEngine creates a time-series storage engine backed
|
||||
// by a temporary directory that is removed on Close.
|
||||
type TemporaryEngine struct {
|
||||
path string
|
||||
config storage.Config
|
||||
options []storage.Option
|
||||
|
||||
mu sync.Mutex
|
||||
opened bool
|
||||
|
||||
engine *storage.Engine
|
||||
|
||||
logger *zap.Logger
|
||||
}
|
||||
|
||||
// NewTemporaryEngine creates a new engine that places the storage engine files into
|
||||
// a temporary directory; used for testing.
|
||||
func NewTemporaryEngine(c storage.Config, options ...storage.Option) *TemporaryEngine {
|
||||
return &TemporaryEngine{
|
||||
config: c,
|
||||
options: options,
|
||||
logger: zap.NewNop(),
|
||||
}
|
||||
}
|
||||
|
||||
// Open creates a temporary directory and opens the engine.
|
||||
func (t *TemporaryEngine) Open(ctx context.Context) error {
|
||||
t.mu.Lock()
|
||||
defer t.mu.Unlock()
|
||||
|
||||
if t.opened {
|
||||
return nil
|
||||
}
|
||||
|
||||
path, err := ioutil.TempDir("", "e2e")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
t.path = path
|
||||
t.engine = storage.NewEngine(path, t.config, t.options...)
|
||||
t.engine.WithLogger(t.logger)
|
||||
|
||||
if err := t.engine.Open(ctx); err != nil {
|
||||
_ = os.RemoveAll(path)
|
||||
return err
|
||||
}
|
||||
|
||||
t.opened = true
|
||||
return nil
|
||||
}
|
||||
|
||||
// Close will remove the directory containing the time-series files.
|
||||
func (t *TemporaryEngine) Close() error {
|
||||
t.mu.Lock()
|
||||
defer t.mu.Unlock()
|
||||
|
||||
t.opened = false
|
||||
err := t.engine.Close()
|
||||
_ = os.RemoveAll(t.path)
|
||||
return err
|
||||
}
|
||||
|
||||
// WritePoints stores points into the storage engine.
|
||||
func (t *TemporaryEngine) WritePoints(ctx context.Context, points []models.Point) error {
|
||||
return t.engine.WritePoints(ctx, points)
|
||||
}
|
||||
|
||||
// SeriesCardinality returns the number of series in the engine.
|
||||
func (t *TemporaryEngine) SeriesCardinality() int64 {
|
||||
return t.engine.SeriesCardinality()
|
||||
}
|
||||
|
||||
// DeleteBucketRangePredicate will delete a bucket from the range and predicate.
|
||||
func (t *TemporaryEngine) DeleteBucketRangePredicate(ctx context.Context, orgID, bucketID influxdb.ID, min, max int64, pred influxdb.Predicate) error {
|
||||
return t.engine.DeleteBucketRangePredicate(ctx, orgID, bucketID, min, max, pred)
|
||||
|
||||
}
|
||||
|
||||
// DeleteBucket deletes a bucket from the time-series data.
|
||||
func (t *TemporaryEngine) DeleteBucket(ctx context.Context, orgID, bucketID influxdb.ID) error {
|
||||
return t.engine.DeleteBucket(ctx, orgID, bucketID)
|
||||
}
|
||||
|
||||
// WithLogger sets the logger on the engine. It must be called before Open.
|
||||
func (t *TemporaryEngine) WithLogger(log *zap.Logger) {
|
||||
t.logger = log.With(zap.String("service", "temporary_engine"))
|
||||
}
|
||||
|
||||
// PrometheusCollectors returns all the prometheus collectors associated with
|
||||
// the engine and its components.
|
||||
func (t *TemporaryEngine) PrometheusCollectors() []prometheus.Collector {
|
||||
return t.engine.PrometheusCollectors()
|
||||
}
|
||||
|
||||
// CreateCursorIterator calls into the underlying engines CreateCurorIterator.
|
||||
func (t *TemporaryEngine) CreateCursorIterator(ctx context.Context) (tsdb.CursorIterator, error) {
|
||||
return t.engine.CreateCursorIterator(ctx)
|
||||
}
|
||||
|
||||
// CreateSeriesCursor calls into the underlying engines CreateSeriesCursor.
|
||||
func (t *TemporaryEngine) CreateSeriesCursor(ctx context.Context, req storage.SeriesCursorRequest, cond influxql.Expr) (storage.SeriesCursor, error) {
|
||||
return t.engine.CreateSeriesCursor(ctx, req, cond)
|
||||
}
|
||||
|
||||
// TagKeys calls into the underlying engines TagKeys.
|
||||
func (t *TemporaryEngine) TagKeys(ctx context.Context, orgID, bucketID influxdb.ID, start, end int64, predicate influxql.Expr) (cursors.StringIterator, error) {
|
||||
return t.engine.TagKeys(ctx, orgID, bucketID, start, end, predicate)
|
||||
}
|
||||
|
||||
// TagValues calls into the underlying engines TagValues.
|
||||
func (t *TemporaryEngine) TagValues(ctx context.Context, orgID, bucketID influxdb.ID, tagKey string, start, end int64, predicate influxql.Expr) (cursors.StringIterator, error) {
|
||||
return t.engine.TagValues(ctx, orgID, bucketID, tagKey, start, end, predicate)
|
||||
}
|
||||
|
||||
// Flush will remove the time-series files and re-open the engine.
|
||||
func (t *TemporaryEngine) Flush(ctx context.Context) {
|
||||
if err := t.Close(); err != nil {
|
||||
t.logger.Fatal("unable to close engine", zap.Error(err))
|
||||
}
|
||||
|
||||
if err := t.Open(ctx); err != nil {
|
||||
t.logger.Fatal("unable to open engine", zap.Error(err))
|
||||
}
|
||||
}
|
|
@ -0,0 +1,15 @@
|
|||
package launcher
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/influxdata/influxdb/http"
|
||||
)
|
||||
|
||||
type flushers []http.Flusher
|
||||
|
||||
func (f flushers) Flush(ctx context.Context) {
|
||||
for _, flusher := range []http.Flusher(f) {
|
||||
flusher.Flush(ctx)
|
||||
}
|
||||
}
|
|
@ -72,6 +72,7 @@ const (
|
|||
JaegerTracing = "jaeger"
|
||||
)
|
||||
|
||||
// NewCommand creates the command to run influxdb.
|
||||
func NewCommand() *cobra.Command {
|
||||
l := NewLauncher()
|
||||
cmd := &cobra.Command{
|
||||
|
@ -247,13 +248,13 @@ func buildLauncherCommand(l *Launcher, cmd *cobra.Command) {
|
|||
Desc: "vault authentication token",
|
||||
},
|
||||
{
|
||||
DestP: &l.httpTlsCert,
|
||||
DestP: &l.httpTLSCert,
|
||||
Flag: "tls-cert",
|
||||
Default: "",
|
||||
Desc: "TLS certificate for HTTPs",
|
||||
},
|
||||
{
|
||||
DestP: &l.httpTlsKey,
|
||||
DestP: &l.httpTLSKey,
|
||||
Flag: "tls-key",
|
||||
Default: "",
|
||||
Desc: "TLS key for HTTPs",
|
||||
|
@ -294,15 +295,15 @@ type Launcher struct {
|
|||
|
||||
boltClient *bolt.Client
|
||||
kvService *kv.Service
|
||||
engine *storage.Engine
|
||||
engine Engine
|
||||
StorageConfig storage.Config
|
||||
|
||||
queryController *control.Controller
|
||||
|
||||
httpPort int
|
||||
httpServer *nethttp.Server
|
||||
httpTlsCert string
|
||||
httpTlsKey string
|
||||
httpTLSCert string
|
||||
httpTLSKey string
|
||||
|
||||
natsServer *nats.Server
|
||||
natsPort int
|
||||
|
@ -364,7 +365,7 @@ func (m *Launcher) NatsURL() string {
|
|||
|
||||
// Engine returns a reference to the storage engine. It should only be called
|
||||
// for end-to-end testing purposes.
|
||||
func (m *Launcher) Engine() *storage.Engine {
|
||||
func (m *Launcher) Engine() Engine {
|
||||
return m.engine
|
||||
}
|
||||
|
||||
|
@ -493,20 +494,20 @@ func (m *Launcher) run(ctx context.Context) (err error) {
|
|||
SessionLength: time.Duration(m.sessionLength) * time.Minute,
|
||||
}
|
||||
|
||||
var flusher http.Flusher
|
||||
flushers := flushers{}
|
||||
switch m.storeType {
|
||||
case BoltStore:
|
||||
store := bolt.NewKVStore(m.boltPath)
|
||||
store.WithDB(m.boltClient.DB())
|
||||
m.kvService = kv.NewService(store, serviceConfig)
|
||||
if m.testing {
|
||||
flusher = store
|
||||
flushers = append(flushers, store)
|
||||
}
|
||||
case MemoryStore:
|
||||
store := inmem.NewKVStore()
|
||||
m.kvService = kv.NewService(store, serviceConfig)
|
||||
if m.testing {
|
||||
flusher = store
|
||||
flushers = append(flushers, store)
|
||||
}
|
||||
default:
|
||||
err := fmt.Errorf("unknown store type %s; expected bolt or memory", m.storeType)
|
||||
|
@ -576,56 +577,61 @@ func (m *Launcher) run(ctx context.Context) (err error) {
|
|||
return err
|
||||
}
|
||||
|
||||
var deleteService platform.DeleteService
|
||||
var pointsWriter storage.PointsWriter
|
||||
{
|
||||
if m.testing {
|
||||
// the testing engine will write/read into a temporary directory
|
||||
engine := NewTemporaryEngine(m.StorageConfig, storage.WithRetentionEnforcer(bucketSvc))
|
||||
flushers = append(flushers, engine)
|
||||
m.engine = engine
|
||||
} else {
|
||||
m.engine = storage.NewEngine(m.enginePath, m.StorageConfig, storage.WithRetentionEnforcer(bucketSvc))
|
||||
m.engine.WithLogger(m.logger)
|
||||
|
||||
if err := m.engine.Open(ctx); err != nil {
|
||||
m.logger.Error("failed to open engine", zap.Error(err))
|
||||
return err
|
||||
}
|
||||
// The Engine's metrics must be registered after it opens.
|
||||
m.reg.MustRegister(m.engine.PrometheusCollectors()...)
|
||||
|
||||
pointsWriter = m.engine
|
||||
deleteService = m.engine
|
||||
|
||||
// TODO(cwolff): Figure out a good default per-query memory limit:
|
||||
// https://github.com/influxdata/influxdb/issues/13642
|
||||
const (
|
||||
concurrencyQuota = 10
|
||||
memoryBytesQuotaPerQuery = math.MaxInt64
|
||||
QueueSize = 10
|
||||
)
|
||||
|
||||
cc := control.Config{
|
||||
ConcurrencyQuota: concurrencyQuota,
|
||||
MemoryBytesQuotaPerQuery: int64(memoryBytesQuotaPerQuery),
|
||||
QueueSize: QueueSize,
|
||||
Logger: m.logger.With(zap.String("service", "storage-reads")),
|
||||
}
|
||||
|
||||
authBucketSvc := authorizer.NewBucketService(bucketSvc)
|
||||
authOrgSvc := authorizer.NewOrgService(orgSvc)
|
||||
authSecretSvc := authorizer.NewSecretService(secretSvc)
|
||||
reader := reads.NewReader(readservice.NewStore(m.engine))
|
||||
deps, err := influxdb.NewDependencies(reader, m.engine, authBucketSvc, authOrgSvc, authSecretSvc, cc.MetricLabelKeys)
|
||||
if err != nil {
|
||||
m.logger.Error("Failed to get query controller dependencies", zap.Error(err))
|
||||
return err
|
||||
}
|
||||
cc.ExecutorDependencies = []flux.Dependency{deps}
|
||||
|
||||
c, err := control.New(cc)
|
||||
if err != nil {
|
||||
m.logger.Error("Failed to create query controller", zap.Error(err))
|
||||
return err
|
||||
}
|
||||
m.queryController = c
|
||||
m.reg.MustRegister(m.queryController.PrometheusCollectors()...)
|
||||
}
|
||||
m.engine.WithLogger(m.logger)
|
||||
if err := m.engine.Open(ctx); err != nil {
|
||||
m.logger.Error("failed to open engine", zap.Error(err))
|
||||
return err
|
||||
}
|
||||
// The Engine's metrics must be registered after it opens.
|
||||
m.reg.MustRegister(m.engine.PrometheusCollectors()...)
|
||||
|
||||
var (
|
||||
deleteService platform.DeleteService = m.engine
|
||||
pointsWriter storage.PointsWriter = m.engine
|
||||
)
|
||||
|
||||
// TODO(cwolff): Figure out a good default per-query memory limit:
|
||||
// https://github.com/influxdata/influxdb/issues/13642
|
||||
const (
|
||||
concurrencyQuota = 10
|
||||
memoryBytesQuotaPerQuery = math.MaxInt64
|
||||
QueueSize = 10
|
||||
)
|
||||
|
||||
deps, err := influxdb.NewDependencies(
|
||||
reads.NewReader(readservice.NewStore(m.engine)),
|
||||
m.engine,
|
||||
authorizer.NewBucketService(bucketSvc),
|
||||
authorizer.NewOrgService(orgSvc),
|
||||
authorizer.NewSecretService(secretSvc),
|
||||
nil,
|
||||
)
|
||||
if err != nil {
|
||||
m.logger.Error("Failed to get query controller dependencies", zap.Error(err))
|
||||
return err
|
||||
}
|
||||
|
||||
m.queryController, err = control.New(control.Config{
|
||||
ConcurrencyQuota: concurrencyQuota,
|
||||
MemoryBytesQuotaPerQuery: int64(memoryBytesQuotaPerQuery),
|
||||
QueueSize: QueueSize,
|
||||
Logger: m.logger.With(zap.String("service", "storage-reads")),
|
||||
ExecutorDependencies: []flux.Dependency{deps},
|
||||
})
|
||||
if err != nil {
|
||||
m.logger.Error("Failed to create query controller", zap.Error(err))
|
||||
return err
|
||||
}
|
||||
|
||||
m.reg.MustRegister(m.queryController.PrometheusCollectors()...)
|
||||
|
||||
var storageQueryService = readservice.NewProxyQueryService(m.queryController)
|
||||
var taskSvc platform.TaskService
|
||||
|
@ -869,7 +875,7 @@ func (m *Launcher) run(ctx context.Context) (err error) {
|
|||
m.httpServer.Handler = h
|
||||
// If we are in testing mode we allow all data to be flushed and removed.
|
||||
if m.testing {
|
||||
m.httpServer.Handler = http.DebugFlush(ctx, h, flusher)
|
||||
m.httpServer.Handler = http.DebugFlush(ctx, h, flushers)
|
||||
}
|
||||
|
||||
ln, err := net.Listen("tcp", m.httpBindAddress)
|
||||
|
@ -882,9 +888,9 @@ func (m *Launcher) run(ctx context.Context) (err error) {
|
|||
var cer tls.Certificate
|
||||
transport := "http"
|
||||
|
||||
if m.httpTlsCert != "" && m.httpTlsKey != "" {
|
||||
if m.httpTLSCert != "" && m.httpTLSKey != "" {
|
||||
var err error
|
||||
cer, err = tls.LoadX509KeyPair(m.httpTlsCert, m.httpTlsKey)
|
||||
cer, err = tls.LoadX509KeyPair(m.httpTLSCert, m.httpTLSKey)
|
||||
|
||||
if err != nil {
|
||||
httpLogger.Error("failed to load x509 key pair", zap.Error(err))
|
||||
|
@ -906,7 +912,7 @@ func (m *Launcher) run(ctx context.Context) (err error) {
|
|||
logger.Info("Listening", zap.String("transport", transport), zap.String("addr", m.httpBindAddress), zap.Int("port", m.httpPort))
|
||||
|
||||
if cer.Certificate != nil {
|
||||
if err := m.httpServer.ServeTLS(ln, m.httpTlsCert, m.httpTlsKey); err != nethttp.ErrServerClosed {
|
||||
if err := m.httpServer.ServeTLS(ln, m.httpTLSCert, m.httpTLSKey); err != nethttp.ErrServerClosed {
|
||||
logger.Error("failed https service", zap.Error(err))
|
||||
}
|
||||
} else {
|
||||
|
|
|
@ -34,8 +34,8 @@ type indexSeriesCursor struct {
|
|||
hasValueExpr bool
|
||||
}
|
||||
|
||||
func newIndexSeriesCursor(ctx context.Context, src *readSource, predicate *datatypes.Predicate, engine *storage.Engine) (*indexSeriesCursor, error) {
|
||||
queries, err := engine.CreateCursorIterator(ctx)
|
||||
func newIndexSeriesCursor(ctx context.Context, src *readSource, predicate *datatypes.Predicate, viewer Viewer) (*indexSeriesCursor, error) {
|
||||
queries, err := viewer.CreateCursorIterator(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -74,7 +74,7 @@ func newIndexSeriesCursor(ctx context.Context, src *readSource, predicate *datat
|
|||
scr := storage.SeriesCursorRequest{
|
||||
Name: tsdb.EncodeName(platform.ID(src.OrganizationID), platform.ID(src.BucketID)),
|
||||
}
|
||||
p.sqry, err = engine.CreateSeriesCursor(ctx, scr, opt.Condition)
|
||||
p.sqry, err = viewer.CreateSeriesCursor(ctx, scr, opt.Condition)
|
||||
if err != nil {
|
||||
p.Close()
|
||||
return nil, err
|
||||
|
|
|
@ -12,16 +12,26 @@ import (
|
|||
"github.com/influxdata/influxdb/storage"
|
||||
"github.com/influxdata/influxdb/storage/reads"
|
||||
"github.com/influxdata/influxdb/storage/reads/datatypes"
|
||||
"github.com/influxdata/influxdb/tsdb"
|
||||
"github.com/influxdata/influxdb/tsdb/cursors"
|
||||
"github.com/influxdata/influxql"
|
||||
)
|
||||
|
||||
type store struct {
|
||||
engine *storage.Engine
|
||||
// Viewer is used by the store to query data from time-series files.
|
||||
type Viewer interface {
|
||||
CreateCursorIterator(ctx context.Context) (tsdb.CursorIterator, error)
|
||||
CreateSeriesCursor(ctx context.Context, req storage.SeriesCursorRequest, cond influxql.Expr) (storage.SeriesCursor, error)
|
||||
TagKeys(ctx context.Context, orgID, bucketID influxdb.ID, start, end int64, predicate influxql.Expr) (cursors.StringIterator, error)
|
||||
TagValues(ctx context.Context, orgID, bucketID influxdb.ID, tagKey string, start, end int64, predicate influxql.Expr) (cursors.StringIterator, error)
|
||||
}
|
||||
|
||||
func NewStore(engine *storage.Engine) reads.Store {
|
||||
return &store{engine: engine}
|
||||
type store struct {
|
||||
viewer Viewer
|
||||
}
|
||||
|
||||
// NewStore creates a store used to query time-series data.
|
||||
func NewStore(viewer Viewer) reads.Store {
|
||||
return &store{viewer: viewer}
|
||||
}
|
||||
|
||||
func (s *store) ReadFilter(ctx context.Context, req *datatypes.ReadFilterRequest) (reads.ResultSet, error) {
|
||||
|
@ -35,7 +45,7 @@ func (s *store) ReadFilter(ctx context.Context, req *datatypes.ReadFilterRequest
|
|||
}
|
||||
|
||||
var cur reads.SeriesCursor
|
||||
if ic, err := newIndexSeriesCursor(ctx, &source, req.Predicate, s.engine); err != nil {
|
||||
if ic, err := newIndexSeriesCursor(ctx, &source, req.Predicate, s.viewer); err != nil {
|
||||
return nil, err
|
||||
} else if ic == nil {
|
||||
return nil, nil
|
||||
|
@ -57,7 +67,7 @@ func (s *store) ReadGroup(ctx context.Context, req *datatypes.ReadGroupRequest)
|
|||
}
|
||||
|
||||
newCursor := func() (reads.SeriesCursor, error) {
|
||||
cur, err := newIndexSeriesCursor(ctx, &source, req.Predicate, s.engine)
|
||||
cur, err := newIndexSeriesCursor(ctx, &source, req.Predicate, s.viewer)
|
||||
if cur == nil || err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -103,7 +113,7 @@ func (s *store) TagKeys(ctx context.Context, req *datatypes.TagKeysRequest) (cur
|
|||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return s.engine.TagKeys(ctx, influxdb.ID(readSource.OrganizationID), influxdb.ID(readSource.BucketID), req.Range.Start, req.Range.End, expr)
|
||||
return s.viewer.TagKeys(ctx, influxdb.ID(readSource.OrganizationID), influxdb.ID(readSource.BucketID), req.Range.Start, req.Range.End, expr)
|
||||
}
|
||||
|
||||
func (s *store) TagValues(ctx context.Context, req *datatypes.TagValuesRequest) (cursors.StringIterator, error) {
|
||||
|
@ -146,7 +156,7 @@ func (s *store) TagValues(ctx context.Context, req *datatypes.TagValuesRequest)
|
|||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return s.engine.TagValues(ctx, influxdb.ID(readSource.OrganizationID), influxdb.ID(readSource.BucketID), req.TagKey, req.Range.Start, req.Range.End, expr)
|
||||
return s.viewer.TagValues(ctx, influxdb.ID(readSource.OrganizationID), influxdb.ID(readSource.BucketID), req.TagKey, req.Range.Start, req.Range.End, expr)
|
||||
}
|
||||
|
||||
// this is easier than fooling around with .proto files.
|
||||
|
|
|
@ -11,8 +11,8 @@ describe('Buckets', () => {
|
|||
} = body
|
||||
cy.wrap(body.org).as('org')
|
||||
cy.wrap(bucket).as('bucket')
|
||||
cy.fixture('routes').then(({orgs}) => {
|
||||
cy.visit(`${orgs}/${id}/load-data/buckets`)
|
||||
cy.fixture('routes').then(({orgs, buckets}) => {
|
||||
cy.visit(`${orgs}/${id}${buckets}`)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
@ -204,25 +204,22 @@ describe('Buckets', () => {
|
|||
describe('Routing directly to the edit overlay', () => {
|
||||
it('reroutes to buckets view if bucket does not exist', () => {
|
||||
cy.get('@org').then(({id}: Organization) => {
|
||||
cy.fixture('routes').then(({orgs}) => {
|
||||
cy.fixture('routes').then(({orgs, buckets}) => {
|
||||
const idThatDoesntExist = '261234d1a7f932e4'
|
||||
cy.visit(`${orgs}/${id}/load-data/buckets/${idThatDoesntExist}/edit`)
|
||||
cy.location('pathname').should(
|
||||
'be',
|
||||
`${orgs}/${id}/load-data/buckets/`
|
||||
)
|
||||
cy.visit(`${orgs}/${id}${buckets}/${idThatDoesntExist}/edit`)
|
||||
cy.location('pathname').should('be', `${orgs}/${id}${buckets}/`)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('displays overlay if bucket exists', () => {
|
||||
cy.get('@org').then(({id: orgID}: Organization) => {
|
||||
cy.fixture('routes').then(({orgs}) => {
|
||||
cy.fixture('routes').then(({orgs, buckets}) => {
|
||||
cy.get('@bucket').then(({id: bucketID}: Bucket) => {
|
||||
cy.visit(`${orgs}/${orgID}/load-data/buckets/${bucketID}/edit`)
|
||||
cy.visit(`${orgs}/${orgID}${buckets}/${bucketID}/edit`)
|
||||
cy.location('pathname').should(
|
||||
'be',
|
||||
`${orgs}/${orgID}/load-data/buckets/${bucketID}/edit`
|
||||
`${orgs}/${orgID}${buckets}/${bucketID}/edit`
|
||||
)
|
||||
})
|
||||
cy.getByTestID(`overlay`).should('exist')
|
||||
|
@ -230,4 +227,38 @@ describe('Buckets', () => {
|
|||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('add data', () => {
|
||||
it('writing data to buckets', () => {
|
||||
// writing a well-formed line is accepted
|
||||
cy.getByTestID('add-data--button').click()
|
||||
cy.getByTestID('bucket-add-line-protocol').click()
|
||||
cy.getByTestID('Enter Manually').click()
|
||||
cy.getByTestID('line-protocol--text-area').type('m1,t1=v1 v=1.0')
|
||||
cy.getByTestID('next').click()
|
||||
cy.getByTestID('wizard-step--text-state success')
|
||||
cy.getByTestID('next').click()
|
||||
|
||||
// writing a poorly-formed line errors
|
||||
cy.getByTestID('add-data--button').click()
|
||||
cy.getByTestID('bucket-add-line-protocol').click()
|
||||
cy.getByTestID('Enter Manually').click()
|
||||
cy.getByTestID('line-protocol--text-area').type('invalid invalid')
|
||||
cy.getByTestID('next').click()
|
||||
cy.getByTestID('wizard-step--text-state error')
|
||||
cy.getByTestID('next').click()
|
||||
|
||||
// writing a well-formed line with millisecond precision is accepted
|
||||
cy.getByTestID('add-data--button').click()
|
||||
cy.getByTestID('bucket-add-line-protocol').click()
|
||||
cy.getByTestID('Enter Manually').click()
|
||||
cy.getByTestID('wizard-step--lp-precision--dropdown').click()
|
||||
cy.getByTestID('wizard-step--lp-precision-ms').click()
|
||||
const now = Date.now()
|
||||
cy.getByTestID('line-protocol--text-area').type(`m2,t2=v2 v=2.0 ${now}`)
|
||||
cy.getByTestID('next').click()
|
||||
cy.getByTestID('wizard-step--text-state success')
|
||||
cy.getByTestID('next').click()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -199,7 +199,7 @@ describe('Dashboard', () => {
|
|||
.contains('Graph')
|
||||
.click()
|
||||
.then(() => {
|
||||
cy.getByTestID('dropdown-item')
|
||||
cy.getByTestID('view-type--table')
|
||||
.contains('Table')
|
||||
.should('have.length', 1)
|
||||
.click()
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import {Doc} from 'codemirror'
|
||||
import {Organization} from '../../src/types'
|
||||
import {VIS_TYPES} from '../../src/timeMachine/constants'
|
||||
import {
|
||||
FROM,
|
||||
RANGE,
|
||||
|
@ -36,11 +37,9 @@ describe('DataExplorer', () => {
|
|||
|
||||
describe('numeric input using custom bin sizes in Histograms', () => {
|
||||
beforeEach(() => {
|
||||
cy.getByTestID('page-header--right').within(() => {
|
||||
cy.getByTestID('dropdown').click()
|
||||
cy.get('#histogram').click()
|
||||
cy.getByTestID('cog-cell--button').click()
|
||||
})
|
||||
cy.getByTestID('view-type--dropdown').click()
|
||||
cy.getByTestID(`view-type--histogram`).click()
|
||||
cy.getByTestID('cog-cell--button').click()
|
||||
})
|
||||
it('should put input field in error status and stay in error status when input is invalid or empty', () => {
|
||||
cy.get('.view-options').within(() => {
|
||||
|
@ -69,11 +68,9 @@ describe('DataExplorer', () => {
|
|||
|
||||
describe('numeric input validation when changing bin sizes in Heat Maps', () => {
|
||||
beforeEach(() => {
|
||||
cy.getByTestID('page-header--right').within(() => {
|
||||
cy.getByTestID('dropdown').click()
|
||||
cy.get('#heatmap').click()
|
||||
cy.getByTestID('cog-cell--button').click()
|
||||
})
|
||||
cy.getByTestID('view-type--dropdown').click()
|
||||
cy.getByTestID(`view-type--heatmap`).click()
|
||||
cy.getByTestID('cog-cell--button').click()
|
||||
})
|
||||
it('should put input field in error status and stay in error status when input is invalid or empty', () => {
|
||||
cy.get('.view-options').within(() => {
|
||||
|
@ -121,11 +118,9 @@ describe('DataExplorer', () => {
|
|||
|
||||
describe('numeric input validation when changing number of decimal places in Single Stat', () => {
|
||||
beforeEach(() => {
|
||||
cy.getByTestID('page-header--right').within(() => {
|
||||
cy.getByTestID('dropdown').click()
|
||||
cy.get('#single-stat').click()
|
||||
cy.getByTestID('cog-cell--button').click()
|
||||
})
|
||||
cy.getByTestID('view-type--dropdown').click()
|
||||
cy.getByTestID(`view-type--single-stat`).click()
|
||||
cy.getByTestID('cog-cell--button').click()
|
||||
})
|
||||
it('should put input field in error status and stay in error status when input is invalid or empty', () => {
|
||||
cy.get('.view-options').within(() => {
|
||||
|
@ -550,6 +545,56 @@ describe('DataExplorer', () => {
|
|||
cy.getByTestID('empty-graph--error').should('exist')
|
||||
})
|
||||
})
|
||||
|
||||
describe('visualize with 360 lines', () => {
|
||||
const numLines = 360
|
||||
|
||||
beforeEach(() => {
|
||||
cy.flush()
|
||||
|
||||
cy.signin().then(({body}) => {
|
||||
const {
|
||||
org: {id},
|
||||
bucket,
|
||||
} = body
|
||||
cy.wrap(body.org).as('org')
|
||||
cy.wrap(bucket).as('bucket')
|
||||
|
||||
// POST 360 lines to the server
|
||||
cy.writeData(lines(numLines))
|
||||
|
||||
// start at the data explorer
|
||||
cy.fixture('routes').then(({orgs, explorer}) => {
|
||||
cy.visit(`${orgs}/${id}${explorer}`)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
it('can view time-series data', () => {
|
||||
// build the query to return data from beforeEach
|
||||
cy.getByTestID(`selector-list m`).click()
|
||||
cy.getByTestID('selector-list v').click()
|
||||
cy.getByTestID(`selector-list tv1`).click()
|
||||
cy.getByTestID('selector-list max').click()
|
||||
|
||||
cy.getByTestID('time-machine-submit-button').click()
|
||||
|
||||
// cycle through all the visualizations of the data
|
||||
VIS_TYPES.forEach(({type}) => {
|
||||
cy.getByTestID('view-type--dropdown').click()
|
||||
cy.getByTestID(`view-type--${type}`).click()
|
||||
cy.getByTestID(`vis-graphic--${type}`).should('exist')
|
||||
if (type.includes('single-stat')) {
|
||||
cy.getByTestID('single-stat--text').should('contain', `${numLines}`)
|
||||
}
|
||||
})
|
||||
|
||||
// view raw data table
|
||||
cy.getByTestID('raw-data--toggle').click()
|
||||
cy.getByTestID('raw-data-table').should('exist')
|
||||
cy.getByTestID('raw-data--toggle').click()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
// skipping until feature flag feature is removed for deleteWithPredicate
|
||||
|
@ -597,3 +642,23 @@ describe('DataExplorer', () => {
|
|||
})
|
||||
})
|
||||
})
|
||||
|
||||
const lines = (numLines = 3) => {
|
||||
// each line is 10 seconds before the previous line
|
||||
const offset_ms = 10_000
|
||||
const now = Date.now()
|
||||
const nanos_per_ms = '000000'
|
||||
|
||||
const decendingValues = Array(numLines)
|
||||
.fill(0)
|
||||
.map((_, i) => i)
|
||||
.reverse()
|
||||
|
||||
const incrementingTimes = decendingValues.map(val => {
|
||||
return now - offset_ms * val
|
||||
})
|
||||
|
||||
return incrementingTimes.map((tm, i) => {
|
||||
return `m,tk1=tv1 v=${i + 1} ${tm}${nanos_per_ms}`
|
||||
})
|
||||
}
|
||||
|
|
|
@ -5,5 +5,6 @@
|
|||
"alerting": "/alerting",
|
||||
"checks": "/checks",
|
||||
"endpoints": "/endpoints",
|
||||
"rules": "/rules"
|
||||
"rules": "/rules",
|
||||
"buckets": "/load-data/buckets"
|
||||
}
|
||||
|
|
|
@ -48,7 +48,10 @@ export default class BucketAddDataButton extends PureComponent<Props> {
|
|||
className="bucket-add-data--option"
|
||||
onClick={onAddLineProtocol}
|
||||
>
|
||||
<div className="bucket-add-data--option-header">
|
||||
<div
|
||||
className="bucket-add-data--option-header"
|
||||
data-testid="bucket-add-line-protocol"
|
||||
>
|
||||
Line Protocol
|
||||
</div>
|
||||
<div className="bucket-add-data--option-desc">
|
||||
|
@ -71,6 +74,7 @@ export default class BucketAddDataButton extends PureComponent<Props> {
|
|||
<Button
|
||||
ref={this.triggerRef}
|
||||
text="Add Data"
|
||||
testID="add-data--button"
|
||||
icon={IconFont.Plus}
|
||||
size={ComponentSize.ExtraSmall}
|
||||
color={ComponentColor.Secondary}
|
||||
|
|
|
@ -36,6 +36,7 @@ class PrecisionDropdown extends PureComponent<Props> {
|
|||
<Dropdown
|
||||
style={{width: '200px'}}
|
||||
className="wizard-step--lp-precision"
|
||||
testID="wizard-step--lp-precision--dropdown"
|
||||
button={(active, onClick) => (
|
||||
<Dropdown.Button active={active} onClick={onClick}>
|
||||
{makePrecisionReadable[precision]}
|
||||
|
@ -49,6 +50,7 @@ class PrecisionDropdown extends PureComponent<Props> {
|
|||
value={value}
|
||||
id={value}
|
||||
onClick={setPrecision}
|
||||
testID={`wizard-step--lp-precision-${value}`}
|
||||
selected={`${value}` === `${precision}`}
|
||||
>
|
||||
{makePrecisionReadable[value]}
|
||||
|
|
|
@ -22,6 +22,7 @@ export default class extends PureComponent<Props> {
|
|||
value={tab}
|
||||
active={active}
|
||||
onClick={this.handleClick}
|
||||
testID={tab}
|
||||
>
|
||||
{tab}
|
||||
</Radio.Button>
|
||||
|
|
|
@ -46,6 +46,7 @@ export default class extends PureComponent<Props> {
|
|||
value={lineProtocolBody}
|
||||
placeholder="Write text here"
|
||||
onChange={this.handleTextChange}
|
||||
testID="line-protocol--text-area"
|
||||
/>
|
||||
)
|
||||
case LineProtocolTab.EnterURL:
|
||||
|
|
|
@ -28,7 +28,7 @@ export class StatusIndicator extends PureComponent<Props> {
|
|||
</div>
|
||||
</div>
|
||||
<div className="wizard-step--footer">
|
||||
<div className={this.footerClass}>
|
||||
<div className={this.footerClass} data-testid={this.footerClass}>
|
||||
{this.footerText}
|
||||
{this.errorMessage}
|
||||
</div>
|
||||
|
|
|
@ -63,6 +63,7 @@ class SigninForm extends PureComponent<Props, State> {
|
|||
onChange={this.handleUsername}
|
||||
size={ComponentSize.Medium}
|
||||
autoFocus={true}
|
||||
testID="username"
|
||||
/>
|
||||
</Form.Element>
|
||||
</Grid.Column>
|
||||
|
@ -74,6 +75,7 @@ class SigninForm extends PureComponent<Props, State> {
|
|||
onChange={this.handlePassword}
|
||||
size={ComponentSize.Medium}
|
||||
type={InputType.Password}
|
||||
testID="password"
|
||||
/>
|
||||
</Form.Element>
|
||||
</Grid.Column>
|
||||
|
|
|
@ -25,7 +25,11 @@ const SingleStat: SFC<Props> = ({stat, properties}) => {
|
|||
const formattedValue = formatStatValue(stat, {decimalPlaces, prefix, suffix})
|
||||
|
||||
return (
|
||||
<div className="single-stat" style={{backgroundColor}}>
|
||||
<div
|
||||
className="single-stat"
|
||||
style={{backgroundColor}}
|
||||
data-testid="single-stat"
|
||||
>
|
||||
<div className="single-stat--resizer">
|
||||
<svg
|
||||
width="100%"
|
||||
|
@ -34,6 +38,7 @@ const SingleStat: SFC<Props> = ({stat, properties}) => {
|
|||
>
|
||||
<text
|
||||
className="single-stat--text"
|
||||
data-testid="single-stat--text"
|
||||
fontSize="100"
|
||||
y="59%"
|
||||
x="50%"
|
||||
|
|
|
@ -35,6 +35,7 @@ class TimeMachineQueries extends PureComponent<Props> {
|
|||
active={isViewingRawData}
|
||||
onChange={this.handleToggleIsViewingRawData}
|
||||
size={ComponentSize.ExtraSmall}
|
||||
testID="raw-data--toggle"
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
|
|
|
@ -34,7 +34,7 @@ class RawFluxDataTable extends PureComponent<Props, State> {
|
|||
const tableHeight = height
|
||||
|
||||
return (
|
||||
<div className="raw-flux-data-table">
|
||||
<div className="raw-flux-data-table" data-testid="raw-data-table">
|
||||
<FancyScrollbar
|
||||
style={{
|
||||
overflowY: 'hidden',
|
||||
|
|
|
@ -34,6 +34,7 @@ class ViewTypeDropdown extends PureComponent<Props> {
|
|||
<Dropdown
|
||||
style={{width: '215px'}}
|
||||
className="view-type-dropdown"
|
||||
testID="view-type--dropdown"
|
||||
button={(active, onClick) => (
|
||||
<Dropdown.Button
|
||||
active={active}
|
||||
|
@ -63,6 +64,7 @@ class ViewTypeDropdown extends PureComponent<Props> {
|
|||
<Dropdown.Item
|
||||
key={`view-type--${g.type}`}
|
||||
id={`${g.type}`}
|
||||
testID={`view-type--${g.type}`}
|
||||
value={g.type}
|
||||
onClick={this.handleChange}
|
||||
selected={`${g.type}` === this.selectedView}
|
||||
|
|
|
@ -0,0 +1,41 @@
|
|||
import {ViewType} from 'src/types'
|
||||
|
||||
interface VisType {
|
||||
type: ViewType
|
||||
name: string
|
||||
}
|
||||
|
||||
export const VIS_TYPES: VisType[] = [
|
||||
{
|
||||
type: 'xy',
|
||||
name: 'Graph',
|
||||
},
|
||||
{
|
||||
type: 'line-plus-single-stat',
|
||||
name: 'Graph + Single Stat',
|
||||
},
|
||||
{
|
||||
type: 'heatmap',
|
||||
name: 'Heatmap',
|
||||
},
|
||||
{
|
||||
type: 'histogram',
|
||||
name: 'Histogram',
|
||||
},
|
||||
{
|
||||
type: 'single-stat',
|
||||
name: 'Single Stat',
|
||||
},
|
||||
{
|
||||
type: 'gauge',
|
||||
name: 'Gauge',
|
||||
},
|
||||
{
|
||||
type: 'table',
|
||||
name: 'Table',
|
||||
},
|
||||
{
|
||||
type: 'scatter',
|
||||
name: 'Scatter',
|
||||
},
|
||||
]
|
|
@ -1,10 +1,11 @@
|
|||
import React from 'react'
|
||||
|
||||
import {ViewType} from 'src/types'
|
||||
import {VIS_TYPES} from './index'
|
||||
|
||||
const GRAPHIC_SVGS = {
|
||||
heatmap: (
|
||||
<div className="vis-graphic">
|
||||
<div className="vis-graphic" data-testid="vis-graphic--heatmap">
|
||||
<svg
|
||||
width="100%"
|
||||
height="100%"
|
||||
|
@ -241,7 +242,7 @@ const GRAPHIC_SVGS = {
|
|||
</div>
|
||||
),
|
||||
histogram: (
|
||||
<div className="vis-graphic">
|
||||
<div className="vis-graphic" data-testid="vis-graphic--histogram">
|
||||
<svg
|
||||
width="100%"
|
||||
height="100%"
|
||||
|
@ -470,7 +471,7 @@ const GRAPHIC_SVGS = {
|
|||
</div>
|
||||
),
|
||||
xy: (
|
||||
<div className="vis-graphic">
|
||||
<div className="vis-graphic" data-testid="vis-graphic--xy">
|
||||
<svg
|
||||
width="100%"
|
||||
height="100%"
|
||||
|
@ -509,7 +510,7 @@ const GRAPHIC_SVGS = {
|
|||
</div>
|
||||
),
|
||||
'single-stat': (
|
||||
<div className="vis-graphic">
|
||||
<div className="vis-graphic" data-testid="vis-graphic--single-stat">
|
||||
<svg
|
||||
width="100%"
|
||||
height="100%"
|
||||
|
@ -548,7 +549,10 @@ const GRAPHIC_SVGS = {
|
|||
</div>
|
||||
),
|
||||
'line-plus-single-stat': (
|
||||
<div className="vis-graphic">
|
||||
<div
|
||||
className="vis-graphic"
|
||||
data-testid="vis-graphic--line-plus-single-stat"
|
||||
>
|
||||
<svg
|
||||
width="100%"
|
||||
height="100%"
|
||||
|
@ -597,7 +601,7 @@ const GRAPHIC_SVGS = {
|
|||
</div>
|
||||
),
|
||||
gauge: (
|
||||
<div className="vis-graphic">
|
||||
<div className="vis-graphic" data-testid="vis-graphic--gauge">
|
||||
<svg
|
||||
width="100%"
|
||||
height="100%"
|
||||
|
@ -737,7 +741,7 @@ const GRAPHIC_SVGS = {
|
|||
</div>
|
||||
),
|
||||
table: (
|
||||
<div className="vis-graphic">
|
||||
<div className="vis-graphic" data-testid="vis-graphic--table">
|
||||
<svg
|
||||
id="Table"
|
||||
x="0px"
|
||||
|
@ -813,7 +817,7 @@ const GRAPHIC_SVGS = {
|
|||
</div>
|
||||
),
|
||||
scatter: (
|
||||
<div className="vis-graphic">
|
||||
<div className="vis-graphic" data-testid="vis-graphic--scatter">
|
||||
<svg
|
||||
width="100%"
|
||||
height="100%"
|
||||
|
@ -903,45 +907,12 @@ interface VisGraphic {
|
|||
graphic: JSX.Element
|
||||
}
|
||||
|
||||
export const VIS_GRAPHICS: VisGraphic[] = [
|
||||
{
|
||||
type: 'xy',
|
||||
name: 'Graph',
|
||||
graphic: GRAPHIC_SVGS.xy,
|
||||
},
|
||||
{
|
||||
type: 'line-plus-single-stat',
|
||||
name: 'Graph + Single Stat',
|
||||
graphic: GRAPHIC_SVGS['line-plus-single-stat'],
|
||||
},
|
||||
{
|
||||
type: 'heatmap',
|
||||
name: 'Heatmap',
|
||||
graphic: GRAPHIC_SVGS.heatmap,
|
||||
},
|
||||
{
|
||||
type: 'histogram',
|
||||
name: 'Histogram',
|
||||
graphic: GRAPHIC_SVGS.histogram,
|
||||
},
|
||||
{
|
||||
type: 'single-stat',
|
||||
name: 'Single Stat',
|
||||
graphic: GRAPHIC_SVGS['single-stat'],
|
||||
},
|
||||
{
|
||||
type: 'gauge',
|
||||
name: 'Gauge',
|
||||
graphic: GRAPHIC_SVGS.gauge,
|
||||
},
|
||||
{
|
||||
type: 'table',
|
||||
name: 'Table',
|
||||
graphic: GRAPHIC_SVGS.table,
|
||||
},
|
||||
{
|
||||
type: 'scatter',
|
||||
name: 'Scatter',
|
||||
graphic: GRAPHIC_SVGS.scatter,
|
||||
},
|
||||
]
|
||||
export const VIS_GRAPHICS: VisGraphic[] = VIS_TYPES.map(
|
||||
({type, name}): VisGraphic => {
|
||||
return {
|
||||
type,
|
||||
name,
|
||||
graphic: GRAPHIC_SVGS[type],
|
||||
}
|
||||
}
|
||||
)
|
||||
|
|
Loading…
Reference in New Issue