influxdb/tsdb/series_file_test.go

125 lines
3.2 KiB
Go
Raw Normal View History

2017-11-15 23:09:25 +00:00
package tsdb_test
2017-09-14 15:41:58 +00:00
import (
2017-12-27 15:09:36 +00:00
"fmt"
2017-09-14 15:41:58 +00:00
"io/ioutil"
"os"
"testing"
2018-01-02 19:20:03 +00:00
"github.com/influxdata/influxdb/logger"
2017-09-14 15:41:58 +00:00
"github.com/influxdata/influxdb/models"
2017-11-15 23:09:25 +00:00
"github.com/influxdata/influxdb/tsdb"
2017-09-14 15:41:58 +00:00
)
// Ensure series file contains the correct set of series.
func TestSeriesFile_Series(t *testing.T) {
sfile := MustOpenSeriesFile()
defer sfile.Close()
series := []Series{
{Name: []byte("cpu"), Tags: models.NewTags(map[string]string{"region": "east"})},
{Name: []byte("cpu"), Tags: models.NewTags(map[string]string{"region": "west"})},
{Name: []byte("mem"), Tags: models.NewTags(map[string]string{"region": "east"})},
}
for _, s := range series {
if _, err := sfile.CreateSeriesListIfNotExists([][]byte{[]byte(s.Name)}, []models.Tags{s.Tags}); err != nil {
2017-09-14 15:41:58 +00:00
t.Fatal(err)
}
}
// Verify total number of series is correct.
if n := sfile.SeriesCount(); n != 3 {
t.Fatalf("unexpected series count: %d", n)
}
// Verify all series exist.
for i, s := range series {
2017-12-19 17:31:33 +00:00
if seriesID := sfile.SeriesID(s.Name, s.Tags, nil); seriesID == 0 {
2017-09-14 15:41:58 +00:00
t.Fatalf("series does not exist: i=%d", i)
}
}
// Verify non-existent series doesn't exist.
if sfile.HasSeries([]byte("foo"), models.NewTags(map[string]string{"region": "north"}), nil) {
t.Fatal("series should not exist")
}
}
2017-12-27 15:09:36 +00:00
// Ensure series file can be compacted.
func TestSeriesFileCompactor(t *testing.T) {
sfile := MustOpenSeriesFile()
defer sfile.Close()
2018-01-09 19:05:37 +00:00
// Disable automatic compactions.
for _, p := range sfile.Partitions() {
p.CompactThreshold = 0
}
2017-12-27 15:09:36 +00:00
var names [][]byte
var tagsSlice []models.Tags
for i := 0; i < 10000; i++ {
names = append(names, []byte(fmt.Sprintf("m%d", i)))
tagsSlice = append(tagsSlice, models.NewTags(map[string]string{"foo": "bar"}))
}
if _, err := sfile.CreateSeriesListIfNotExists(names, tagsSlice); err != nil {
2017-12-27 15:09:36 +00:00
t.Fatal(err)
}
// Verify total number of series is correct.
if n := sfile.SeriesCount(); n != uint64(len(names)) {
t.Fatalf("unexpected series count: %d", n)
}
2018-01-09 19:05:37 +00:00
// Compact in-place for each partition.
for _, p := range sfile.Partitions() {
compactor := tsdb.NewSeriesPartitionCompactor()
if err := compactor.Compact(p); err != nil {
t.Fatal(err)
}
2017-12-27 15:09:36 +00:00
}
// Verify all series exist.
for i := range names {
2017-12-29 18:57:30 +00:00
if seriesID := sfile.SeriesID(names[i], tagsSlice[i], nil); seriesID == 0 {
2017-12-27 15:09:36 +00:00
t.Fatalf("series does not exist: %s,%s", names[i], tagsSlice[i].String())
}
}
}
2017-09-14 15:41:58 +00:00
// Series represents name/tagset pairs that are used in testing.
type Series struct {
Name []byte
Tags models.Tags
Deleted bool
}
2017-11-15 23:09:25 +00:00
// SeriesFile is a test wrapper for tsdb.SeriesFile.
2017-09-14 15:41:58 +00:00
type SeriesFile struct {
2017-11-15 23:09:25 +00:00
*tsdb.SeriesFile
2017-09-14 15:41:58 +00:00
}
// NewSeriesFile returns a new instance of SeriesFile with a temporary file path.
func NewSeriesFile() *SeriesFile {
2017-12-29 18:57:30 +00:00
dir, err := ioutil.TempDir("", "tsdb-series-file-")
2017-09-14 15:41:58 +00:00
if err != nil {
panic(err)
}
2017-12-29 18:57:30 +00:00
return &SeriesFile{SeriesFile: tsdb.NewSeriesFile(dir)}
2017-09-14 15:41:58 +00:00
}
// MustOpenSeriesFile returns a new, open instance of SeriesFile. Panic on error.
func MustOpenSeriesFile() *SeriesFile {
f := NewSeriesFile()
2018-01-02 19:20:03 +00:00
f.Logger = logger.New(os.Stdout)
2017-09-14 15:41:58 +00:00
if err := f.Open(); err != nil {
panic(err)
}
return f
}
// Close closes the log file and removes it from disk.
func (f *SeriesFile) Close() error {
2017-12-29 18:57:30 +00:00
defer os.RemoveAll(f.Path())
2017-09-14 15:41:58 +00:00
return f.SeriesFile.Close()
}