feat(query/querytest): test framework improvements to help generate output (#51)
updated helper program so users can now start with a .in.csv file, and a .ifql file, and generate output. program asks user to verify output before writing. Also refactored the location of test files from query_test to querytest and put helper functions in new file querytest/execute.go.pull/10616/head
parent
c1ff42857c
commit
c5e0477b74
|
@ -1,32 +0,0 @@
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"io/ioutil"
|
|
||||||
"os"
|
|
||||||
"regexp"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"golang.org/x/text/unicode/norm"
|
|
||||||
)
|
|
||||||
|
|
||||||
func normalizeString(s string) []byte {
|
|
||||||
result := norm.NFC.String(strings.TrimSpace(s))
|
|
||||||
re := regexp.MustCompile(`\r?\n`)
|
|
||||||
return []byte(re.ReplaceAllString(result, "\r\n"))
|
|
||||||
}
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
if len(os.Args) != 2 {
|
|
||||||
fmt.Println("usage: normalize <filename>")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
fname := os.Args[1]
|
|
||||||
data, err := ioutil.ReadFile(fname)
|
|
||||||
if err != nil {
|
|
||||||
fmt.Printf("could not open file %s", fname)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
data = normalizeString(string(data))
|
|
||||||
ioutil.WriteFile(fname, data, 0644)
|
|
||||||
}
|
|
|
@ -1,191 +0,0 @@
|
||||||
package query
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"context"
|
|
||||||
"io/ioutil"
|
|
||||||
"log"
|
|
||||||
"math"
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
"github.com/influxdata/platform"
|
|
||||||
"github.com/influxdata/platform/query/csv"
|
|
||||||
platformfunctions "github.com/influxdata/platform/query/functions"
|
|
||||||
"github.com/influxdata/platform/query/influxql"
|
|
||||||
|
|
||||||
"fmt"
|
|
||||||
|
|
||||||
"github.com/influxdata/platform/query"
|
|
||||||
"github.com/influxdata/platform/query/control"
|
|
||||||
"github.com/influxdata/platform/query/functions"
|
|
||||||
"github.com/influxdata/platform/query/id"
|
|
||||||
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/andreyvit/diff"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
staticResultID platform.ID
|
|
||||||
)
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
staticResultID.DecodeFromString("1")
|
|
||||||
query.FinalizeRegistration()
|
|
||||||
}
|
|
||||||
|
|
||||||
// wrapController is needed to make *ifql.Controller implement platform.AsyncQueryService.
|
|
||||||
// TODO(nathanielc/adam): copied from ifqlde main.go, in which there's a note to remove this type by a better design
|
|
||||||
type wrapController struct {
|
|
||||||
*control.Controller
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c wrapController) Query(ctx context.Context, orgID platform.ID, query *query.Spec) (query.Query, error) {
|
|
||||||
q, err := c.Controller.Query(ctx, id.ID(orgID), query)
|
|
||||||
return q, err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c wrapController) QueryWithCompile(ctx context.Context, orgID platform.ID, query string) (query.Query, error) {
|
|
||||||
q, err := c.Controller.QueryWithCompile(ctx, id.ID(orgID), query)
|
|
||||||
return q, err
|
|
||||||
}
|
|
||||||
|
|
||||||
func Test_QueryEndToEnd(t *testing.T) {
|
|
||||||
config := control.Config{
|
|
||||||
ConcurrencyQuota: 0,
|
|
||||||
MemoryBytesQuota: math.MaxInt64,
|
|
||||||
}
|
|
||||||
|
|
||||||
c := control.New(config)
|
|
||||||
|
|
||||||
qs := query.QueryServiceBridge{
|
|
||||||
AsyncQueryService: wrapController{Controller: c},
|
|
||||||
}
|
|
||||||
|
|
||||||
influxqlTranspiler := influxql.NewTranspiler()
|
|
||||||
|
|
||||||
dir, err := os.Getwd()
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
path := filepath.Join(dir, "test_cases")
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
ifqlFiles, err := filepath.Glob(filepath.Join(path, "*.ifql"))
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("error searching for ifql files: %s", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, ifqlFile := range ifqlFiles {
|
|
||||||
ext := filepath.Ext(ifqlFile)
|
|
||||||
prefix := ifqlFile[0 : len(ifqlFile)-len(ext)]
|
|
||||||
|
|
||||||
csvIn := prefix + ".in.csv"
|
|
||||||
|
|
||||||
csvOut, err := getTestData(prefix, ".out.csv")
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("error in test case %s: %s", prefix, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
ifqlQuery, err := getTestData(prefix, ".ifql")
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("error in test case %s: %s", prefix, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
ifqlSpec, err := query.Compile(context.Background(), ifqlQuery)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("error in test case %s: %s", prefix, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
correct, err := QueryTestCheckSpec(t, qs, ifqlSpec, csvIn, csvOut)
|
|
||||||
if !correct {
|
|
||||||
t.Errorf("failed to run ifql query spec for test case %s. error=%s", prefix, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
influxqlQuery, err := getTestData(prefix, ".influxql")
|
|
||||||
if err != nil {
|
|
||||||
t.Logf("skipping influxql for test case %s: %s", prefix, err)
|
|
||||||
} else {
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("error in test case %s: %s", prefix, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
influxqlSpec, err := influxqlTranspiler.Transpile(context.Background(), influxqlQuery)
|
|
||||||
if err != nil {
|
|
||||||
t.Errorf("failed to obtain transpiled influxql query spec for test case %s. error=%s", prefix, err)
|
|
||||||
}
|
|
||||||
|
|
||||||
correct, err := QueryTestCheckSpec(t, qs, influxqlSpec, csvIn, csvOut)
|
|
||||||
if !correct {
|
|
||||||
t.Errorf("failed to run influxql query spec for test case %s. error=%s", prefix, err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func getTestData(prefix, suffix string) (string, error) {
|
|
||||||
datafile := prefix + suffix
|
|
||||||
csv, err := ioutil.ReadFile(datafile)
|
|
||||||
if err != nil {
|
|
||||||
return "", fmt.Errorf("failed to open file: %s", datafile)
|
|
||||||
}
|
|
||||||
return string(csv), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func ReplaceFromSpec(q *query.Spec, csvSrc string) {
|
|
||||||
for _, op := range q.Operations {
|
|
||||||
if op.Spec.Kind() == functions.FromKind {
|
|
||||||
op.Spec = &platformfunctions.FromCSVOpSpec{
|
|
||||||
File: csvSrc,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func QueryTestCheckSpec(t *testing.T, qs query.QueryServiceBridge, spec *query.Spec, inputFile, want string) (bool, error) {
|
|
||||||
t.Helper()
|
|
||||||
ReplaceFromSpec(spec, inputFile)
|
|
||||||
|
|
||||||
//log.Println("QueryTestCheckSpec", query.Formatted(spec, query.FmtJSON))
|
|
||||||
log.Println("QueryTestCheckSpec")
|
|
||||||
results, err := qs.Query(context.Background(), staticResultID, spec)
|
|
||||||
if err != nil {
|
|
||||||
t.Errorf("failed to run query spec error=%s", err)
|
|
||||||
return false, err
|
|
||||||
}
|
|
||||||
|
|
||||||
enc := csv.NewResultEncoder(csv.DefaultEncoderConfig())
|
|
||||||
buf := new(bytes.Buffer)
|
|
||||||
// we are only expecting one result, for now
|
|
||||||
for results.More() {
|
|
||||||
res := results.Next()
|
|
||||||
|
|
||||||
err := enc.Encode(buf, res)
|
|
||||||
if err != nil {
|
|
||||||
t.Errorf("failed to run query spec error=%s", err)
|
|
||||||
results.Cancel()
|
|
||||||
return false, err
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
err = results.Err()
|
|
||||||
if err != nil {
|
|
||||||
t.Errorf("failed to run query spec error=%s", err)
|
|
||||||
return false, err
|
|
||||||
}
|
|
||||||
|
|
||||||
got := buf.String()
|
|
||||||
if g, w := strings.TrimSpace(got), strings.TrimSpace(want); g != w {
|
|
||||||
t.Errorf("Result not as expected want(-) got (+):\n%v", diff.LineDiff(w, g))
|
|
||||||
results.Cancel()
|
|
||||||
return false, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
return true, nil
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,2 +0,0 @@
|
||||||
// no executable code in this directory, which contains tests for the query package
|
|
||||||
package query
|
|
|
@ -47,6 +47,11 @@ Compliant to the HTTP spec for CSV data, the output produced by the ifqld proces
|
||||||
- UTF-8 normalized
|
- UTF-8 normalized
|
||||||
- line endings are `\r\n`
|
- line endings are `\r\n`
|
||||||
|
|
||||||
The go program query/query_test/normalize_text/normalize.go can be run on a file to prepare the data in-place. So you
|
The go program query/querytest/prepcsvtests/prepcsvtests.go can be run to prepare tests. A valid test case must have
|
||||||
may create a new file by copy/pasting some text, then prepare the fire for testing by:
|
<CASENAME>.ifql and <CASENAME>.in.csv files. The prepcsvtests executable will iterate over all such test cases in
|
||||||
```go build ./query_test/normalize_text/normalize.go && ./normalize query/query_test/test_cases/simple_max_out.csv```
|
the user-supplied directory, run the ifql query on the input and prompt the user to approve saving the result as
|
||||||
|
<CASENAME>.out.csv.
|
||||||
|
```go build ./querytest/prepcsvtests/prepcsvtests.go && ./prepcsvtests query/querytests/test_cases```
|
||||||
|
|
||||||
|
Optionally, you can give a CASENAME to prep the output for a single case:
|
||||||
|
```go build ./querytest/prepcsvtests/prepcsvtests.go && ./prepcsvtests query/querytests/test_cases CASENAME```
|
|
@ -0,0 +1,93 @@
|
||||||
|
package querytest
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
"math"
|
||||||
|
|
||||||
|
"github.com/influxdata/platform"
|
||||||
|
"github.com/influxdata/platform/query"
|
||||||
|
"github.com/influxdata/platform/query/control"
|
||||||
|
"github.com/influxdata/platform/query/csv"
|
||||||
|
"github.com/influxdata/platform/query/functions"
|
||||||
|
"github.com/influxdata/platform/query/id"
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
staticResultID platform.ID
|
||||||
|
)
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
staticResultID.DecodeFromString("1")
|
||||||
|
}
|
||||||
|
|
||||||
|
// wrapController is needed to make *ifql.Controller implement platform.AsyncQueryService.
|
||||||
|
// TODO(nathanielc/adam): copied from ifqlde main.go, in which there's a note to remove this type by a better design
|
||||||
|
type wrapController struct {
|
||||||
|
*control.Controller
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c wrapController) Query(ctx context.Context, orgID platform.ID, query *query.Spec) (query.Query, error) {
|
||||||
|
q, err := c.Controller.Query(ctx, id.ID(orgID), query)
|
||||||
|
return q, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c wrapController) QueryWithCompile(ctx context.Context, orgID platform.ID, query string) (query.Query, error) {
|
||||||
|
q, err := c.Controller.QueryWithCompile(ctx, id.ID(orgID), query)
|
||||||
|
return q, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetQueryServiceBridge() *query.QueryServiceBridge {
|
||||||
|
config := control.Config{
|
||||||
|
ConcurrencyQuota: 1,
|
||||||
|
MemoryBytesQuota: math.MaxInt64,
|
||||||
|
}
|
||||||
|
|
||||||
|
c := control.New(config)
|
||||||
|
|
||||||
|
return &query.QueryServiceBridge{
|
||||||
|
AsyncQueryService: wrapController{Controller: c},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetQueryEncodedResults(qs *query.QueryServiceBridge, spec *query.Spec, inputFile string) (string, error) {
|
||||||
|
results, err := qs.Query(context.Background(), staticResultID, spec)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
enc := csv.NewResultEncoder(csv.DefaultEncoderConfig())
|
||||||
|
buf := new(bytes.Buffer)
|
||||||
|
// we are only expecting one result, for now
|
||||||
|
for results.More() {
|
||||||
|
res := results.Next()
|
||||||
|
|
||||||
|
err := enc.Encode(buf, res)
|
||||||
|
if err != nil {
|
||||||
|
results.Cancel()
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
return buf.String(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetTestData(prefix, suffix string) (string, error) {
|
||||||
|
datafile := prefix + suffix
|
||||||
|
csv, err := ioutil.ReadFile(datafile)
|
||||||
|
if err != nil {
|
||||||
|
return "", fmt.Errorf("failed to open file: %s", datafile)
|
||||||
|
}
|
||||||
|
return string(csv), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func ReplaceFromSpec(q *query.Spec, csvSrc string) {
|
||||||
|
for _, op := range q.Operations {
|
||||||
|
if op.Spec.Kind() == functions.FromKind {
|
||||||
|
op.Spec = &functions.FromCSVOpSpec{
|
||||||
|
File: csvSrc,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,96 @@
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bufio"
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/influxdata/platform/query"
|
||||||
|
"github.com/influxdata/platform/query/querytest"
|
||||||
|
|
||||||
|
"golang.org/x/text/unicode/norm"
|
||||||
|
)
|
||||||
|
|
||||||
|
func normalizeString(s string) []byte {
|
||||||
|
result := norm.NFC.String(strings.TrimSpace(s))
|
||||||
|
re := regexp.MustCompile(`\r?\n`)
|
||||||
|
return []byte(re.ReplaceAllString(result, "\r\n"))
|
||||||
|
}
|
||||||
|
|
||||||
|
func printUsage() {
|
||||||
|
fmt.Println("usage: prepcsvtests /path/to/testfiles [testname]")
|
||||||
|
}
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
query.FinalizeRegistration()
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
fnames := make([]string, 0)
|
||||||
|
path := ""
|
||||||
|
var err error
|
||||||
|
if len(os.Args) == 3 {
|
||||||
|
path = os.Args[1]
|
||||||
|
fnames = append(fnames, filepath.Join(path, os.Args[2])+".ifql")
|
||||||
|
} else if len(os.Args) == 2 {
|
||||||
|
path = os.Args[1]
|
||||||
|
fnames, err = filepath.Glob(filepath.Join(path, "*.ifql"))
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
printUsage()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, fname := range fnames {
|
||||||
|
ext := ".ifql"
|
||||||
|
testName := fname[0 : len(fname)-len(ext)]
|
||||||
|
incsv := testName + ".in.csv"
|
||||||
|
indata, err := ioutil.ReadFile(incsv)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("could not open file %s", fname)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("Generating output for test case %s\n", testName)
|
||||||
|
|
||||||
|
indata = normalizeString(string(indata))
|
||||||
|
fmt.Println("Writing input data to file")
|
||||||
|
ioutil.WriteFile(incsv, indata, 0644)
|
||||||
|
|
||||||
|
querytext, err := ioutil.ReadFile(fname)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("error reading query text: %s", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
qs := querytest.GetQueryServiceBridge()
|
||||||
|
qspec, err := query.Compile(context.Background(), string(querytext))
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("error compiling. \n query: \n %s \n err: %s", string(querytext), err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
querytest.ReplaceFromSpec(qspec, incsv)
|
||||||
|
result, err := querytest.GetQueryEncodedResults(qs, qspec, incsv)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Printf("error: %s", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Printf("CHECK RESULT:\n%s\n____________________________________________________________", result)
|
||||||
|
|
||||||
|
reader := bufio.NewReader(os.Stdin)
|
||||||
|
fmt.Print("Results ok (y/n)?: ")
|
||||||
|
text, _ := reader.ReadString('\n')
|
||||||
|
if text == "y\n" {
|
||||||
|
fmt.Printf("writing output file: %s", testName+".out.csv")
|
||||||
|
ioutil.WriteFile(testName+".out.csv", []byte(result), 0644)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,100 @@
|
||||||
|
package querytest
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/influxdata/platform/query"
|
||||||
|
"github.com/influxdata/platform/query/influxql"
|
||||||
|
|
||||||
|
"github.com/andreyvit/diff"
|
||||||
|
)
|
||||||
|
|
||||||
|
func Test_QueryEndToEnd(t *testing.T) {
|
||||||
|
qs := GetQueryServiceBridge()
|
||||||
|
|
||||||
|
influxqlTranspiler := influxql.NewTranspiler()
|
||||||
|
|
||||||
|
dir, err := os.Getwd()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
path := filepath.Join(dir, "test_cases")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
ifqlFiles, err := filepath.Glob(filepath.Join(path, "*.ifql"))
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("error searching for ifql files: %s", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, ifqlFile := range ifqlFiles {
|
||||||
|
ext := filepath.Ext(ifqlFile)
|
||||||
|
prefix := ifqlFile[0 : len(ifqlFile)-len(ext)]
|
||||||
|
_, caseName := filepath.Split(prefix)
|
||||||
|
csvIn := prefix + ".in.csv"
|
||||||
|
|
||||||
|
csvOut, err := GetTestData(prefix, ".out.csv")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("error in test case %s: %s", prefix, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
ifqlQuery, err := GetTestData(prefix, ".ifql")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("error in test case %s: %s", prefix, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
ifqlSpec, err := query.Compile(context.Background(), ifqlQuery)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("error in test case %s: %s", prefix, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
err = QueryTestCheckSpec(t, qs, ifqlSpec, caseName+".ifql", csvIn, csvOut)
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("failed to run ifql query spec for test case %s. error=%s", prefix, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
influxqlQuery, err := GetTestData(prefix, ".influxql")
|
||||||
|
if err != nil {
|
||||||
|
t.Logf("skipping influxql for test case %s: %s", prefix, err)
|
||||||
|
} else {
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("error in test case %s: %s", prefix, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
influxqlSpec, err := influxqlTranspiler.Transpile(context.Background(), influxqlQuery)
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("failed to obtain transpiled influxql query spec for test case %s. error=%s", prefix, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
err = QueryTestCheckSpec(t, qs, influxqlSpec, "influxql::"+caseName, csvIn, csvOut)
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("failed to run influxql query spec for test case %s. error=%s", prefix, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func QueryTestCheckSpec(t *testing.T, qs *query.QueryServiceBridge, spec *query.Spec, caseName, inputFile, want string) error {
|
||||||
|
t.Helper()
|
||||||
|
ReplaceFromSpec(spec, inputFile)
|
||||||
|
|
||||||
|
//log.Println("QueryTestCheckSpec", query.Formatted(spec, query.FmtJSON))
|
||||||
|
log.Println("QueryTestCheckSpec")
|
||||||
|
|
||||||
|
got, err := GetQueryEncodedResults(qs, spec, inputFile)
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("case %s: failed to run query spec error=%s", caseName, err)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if g, w := strings.TrimSpace(got), strings.TrimSpace(want); g != w {
|
||||||
|
t.Errorf("case %s: result not as expected want(-) got (+):\n%v", caseName, diff.LineDiff(w, g))
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
|
@ -0,0 +1,3 @@
|
||||||
|
from(db:"testdb")
|
||||||
|
|> range(start: 2018-05-23T13:09:22.885021542Z)
|
||||||
|
|> filter(fn: (r) => r["name"] == "disk0")
|
|
@ -0,0 +1,16 @@
|
||||||
|
#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,dateTime:RFC3339,long,string,string,string,string
|
||||||
|
#partition,false,false,false,false,false,false,true,true,true,true
|
||||||
|
#default,_result,,,,,,,,,
|
||||||
|
,result,table,_start,_stop,_time,_value,_field,_measurement,host,name
|
||||||
|
,,1,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:26Z,15204688,io_time,diskio,host.local,disk0
|
||||||
|
,,1,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:36Z,15204894,io_time,diskio,host.local,disk0
|
||||||
|
,,1,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:46Z,15205102,io_time,diskio,host.local,disk0
|
||||||
|
,,1,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:56Z,15205226,io_time,diskio,host.local,disk0
|
||||||
|
,,1,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:54:06Z,15205499,io_time,diskio,host.local,disk0
|
||||||
|
,,1,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:54:16Z,15205755,io_time,diskio,host.local,disk0
|
||||||
|
,,10,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:26Z,648,io_time,diskio,host.local,disk2
|
||||||
|
,,10,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:36Z,648,io_time,diskio,host.local,disk2
|
||||||
|
,,10,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:46Z,648,io_time,diskio,host.local,disk2
|
||||||
|
,,10,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:56Z,648,io_time,diskio,host.local,disk2
|
||||||
|
,,10,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:54:06Z,648,io_time,diskio,host.local,disk2
|
||||||
|
,,10,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:54:16Z,648,io_time,diskio,host.local,disk2
|
|
|
@ -0,0 +1,15 @@
|
||||||
|
#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,dateTime:RFC3339,long,string,string,string,string
|
||||||
|
#partition,false,false,false,false,false,false,true,true,true,true
|
||||||
|
#default,_result,,,,,,,,,
|
||||||
|
,result,table,_start,_stop,_time,_value,_field,_measurement,host,name
|
||||||
|
,,0,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:26Z,15204688,io_time,diskio,host.local,disk0
|
||||||
|
,,0,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:36Z,15204894,io_time,diskio,host.local,disk0
|
||||||
|
,,0,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:46Z,15205102,io_time,diskio,host.local,disk0
|
||||||
|
,,0,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:56Z,15205226,io_time,diskio,host.local,disk0
|
||||||
|
,,0,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:54:06Z,15205499,io_time,diskio,host.local,disk0
|
||||||
|
,,0,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:54:16Z,15205755,io_time,diskio,host.local,disk0
|
||||||
|
|
||||||
|
#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,dateTime:RFC3339,long,string,string,string,string
|
||||||
|
#partition,false,false,false,false,false,false,true,true,true,true
|
||||||
|
#default,_result,1,,,,,io_time,diskio,host.local,disk2
|
||||||
|
,result,table,_start,_stop,_time,_value,_field,_measurement,host,name
|
|
|
@ -0,0 +1,3 @@
|
||||||
|
from(db:"testdb")
|
||||||
|
|> range(start: 2018-05-23T13:09:22.885021542Z)
|
||||||
|
|> filter(fn: (r) => r._measurement == "swap")
|
|
@ -0,0 +1,34 @@
|
||||||
|
#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,dateTime:RFC3339,double,string,string,string
|
||||||
|
#partition,false,false,false,false,false,false,true,true,true
|
||||||
|
#default,_result,,,,,,,,
|
||||||
|
,result,table,_start,_stop,_time,_value,_field,_measurement,host
|
||||||
|
,,161,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:26Z,82.9833984375,used_percent,swap,host.local
|
||||||
|
,,161,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:36Z,82.598876953125,used_percent,swap,host.local
|
||||||
|
,,161,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:46Z,82.598876953125,used_percent,swap,host.local
|
||||||
|
,,161,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:56Z,82.598876953125,used_percent,swap,host.local
|
||||||
|
,,161,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:54:06Z,82.598876953125,used_percent,swap,host.local
|
||||||
|
,,161,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:54:16Z,82.6416015625,used_percent,swap,host.local
|
||||||
|
,,161,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:26Z,2.9833984375,used_percent,swap,host1.local
|
||||||
|
,,161,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:36Z,2.598876953125,used_percent,swap,host1.local
|
||||||
|
,,161,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:46Z,2.598876953125,used_percent,swap,host1.local
|
||||||
|
,,161,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:56Z,2.598876953125,used_percent,swap,host1.local
|
||||||
|
,,161,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:54:06Z,2.598876953125,used_percent,swap,host1.local
|
||||||
|
,,161,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:54:16Z,2.6416015625,used_percent,swap,host1.local
|
||||||
|
,,162,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:26Z,1.83,load1,system,host.local
|
||||||
|
,,162,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:36Z,1.7,load1,system,host.local
|
||||||
|
,,162,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:46Z,1.74,load1,system,host.local
|
||||||
|
,,162,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:56Z,1.63,load1,system,host.local
|
||||||
|
,,162,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:54:06Z,1.91,load1,system,host.local
|
||||||
|
,,162,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:54:16Z,1.84,load1,system,host.local
|
||||||
|
,,163,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:26Z,1.98,load15,system,host.local
|
||||||
|
,,163,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:36Z,1.97,load15,system,host.local
|
||||||
|
,,163,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:46Z,1.97,load15,system,host.local
|
||||||
|
,,163,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:56Z,1.96,load15,system,host.local
|
||||||
|
,,163,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:54:06Z,1.98,load15,system,host.local
|
||||||
|
,,163,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:54:16Z,1.97,load15,system,host.local
|
||||||
|
,,164,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:26Z,1.95,load5,system,host.local
|
||||||
|
,,164,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:36Z,1.92,load5,system,host.local
|
||||||
|
,,164,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:46Z,1.92,load5,system,host.local
|
||||||
|
,,164,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:56Z,1.89,load5,system,host.local
|
||||||
|
,,164,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:54:06Z,1.94,load5,system,host.local
|
||||||
|
,,164,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:54:16Z,1.93,load5,system,host.local
|
|
|
@ -0,0 +1,31 @@
|
||||||
|
#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,dateTime:RFC3339,double,string,string,string
|
||||||
|
#partition,false,false,false,false,false,false,true,true,true
|
||||||
|
#default,_result,0,,,,,load15,system,host.local
|
||||||
|
,result,table,_start,_stop,_time,_value,_field,_measurement,host
|
||||||
|
|
||||||
|
#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,dateTime:RFC3339,double,string,string,string
|
||||||
|
#partition,false,false,false,false,false,false,true,true,true
|
||||||
|
#default,_result,,,,,,,,
|
||||||
|
,result,table,_start,_stop,_time,_value,_field,_measurement,host
|
||||||
|
,,1,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:26Z,82.9833984375,used_percent,swap,host.local
|
||||||
|
,,1,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:36Z,82.598876953125,used_percent,swap,host.local
|
||||||
|
,,1,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:46Z,82.598876953125,used_percent,swap,host.local
|
||||||
|
,,1,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:56Z,82.598876953125,used_percent,swap,host.local
|
||||||
|
,,1,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:54:06Z,82.598876953125,used_percent,swap,host.local
|
||||||
|
,,1,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:54:16Z,82.6416015625,used_percent,swap,host.local
|
||||||
|
,,1,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:26Z,2.9833984375,used_percent,swap,host1.local
|
||||||
|
,,1,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:36Z,2.598876953125,used_percent,swap,host1.local
|
||||||
|
,,1,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:46Z,2.598876953125,used_percent,swap,host1.local
|
||||||
|
,,1,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:56Z,2.598876953125,used_percent,swap,host1.local
|
||||||
|
,,1,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:54:06Z,2.598876953125,used_percent,swap,host1.local
|
||||||
|
,,1,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:54:16Z,2.6416015625,used_percent,swap,host1.local
|
||||||
|
|
||||||
|
#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,dateTime:RFC3339,double,string,string,string
|
||||||
|
#partition,false,false,false,false,false,false,true,true,true
|
||||||
|
#default,_result,2,,,,,load5,system,host.local
|
||||||
|
,result,table,_start,_stop,_time,_value,_field,_measurement,host
|
||||||
|
|
||||||
|
#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,dateTime:RFC3339,double,string,string,string
|
||||||
|
#partition,false,false,false,false,false,false,true,true,true
|
||||||
|
#default,_result,3,,,,,load1,system,host.local
|
||||||
|
,result,table,_start,_stop,_time,_value,_field,_measurement,host
|
|
|
@ -0,0 +1,3 @@
|
||||||
|
from(db:"testdb")
|
||||||
|
|> range(start: 2018-05-23T13:09:22.885021542Z)
|
||||||
|
|> filter(fn: (r) => r._measurement == "system" AND r._field == "load1")
|
|
@ -0,0 +1,34 @@
|
||||||
|
#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,dateTime:RFC3339,double,string,string,string
|
||||||
|
#partition,false,false,false,false,false,false,true,true,true
|
||||||
|
#default,_result,,,,,,,,
|
||||||
|
,result,table,_start,_stop,_time,_value,_field,_measurement,host
|
||||||
|
,,161,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:26Z,82.9833984375,used_percent,swap,host.local
|
||||||
|
,,161,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:36Z,82.598876953125,used_percent,swap,host.local
|
||||||
|
,,161,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:46Z,82.598876953125,used_percent,swap,host.local
|
||||||
|
,,161,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:56Z,82.598876953125,used_percent,swap,host.local
|
||||||
|
,,161,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:54:06Z,82.598876953125,used_percent,swap,host.local
|
||||||
|
,,161,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:54:16Z,82.6416015625,used_percent,swap,host.local
|
||||||
|
,,161,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:26Z,2.9833984375,used_percent,swap,host1.local
|
||||||
|
,,161,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:36Z,2.598876953125,used_percent,swap,host1.local
|
||||||
|
,,161,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:46Z,2.598876953125,used_percent,swap,host1.local
|
||||||
|
,,161,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:56Z,2.598876953125,used_percent,swap,host1.local
|
||||||
|
,,161,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:54:06Z,2.598876953125,used_percent,swap,host1.local
|
||||||
|
,,161,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:54:16Z,2.6416015625,used_percent,swap,host1.local
|
||||||
|
,,162,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:26Z,1.83,load1,system,host.local
|
||||||
|
,,162,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:36Z,1.7,load1,system,host.local
|
||||||
|
,,162,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:46Z,1.74,load1,system,host.local
|
||||||
|
,,162,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:56Z,1.63,load1,system,host.local
|
||||||
|
,,162,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:54:06Z,1.91,load1,system,host.local
|
||||||
|
,,162,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:54:16Z,1.84,load1,system,host.local
|
||||||
|
,,163,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:26Z,1.98,load15,system,host.local
|
||||||
|
,,163,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:36Z,1.97,load15,system,host.local
|
||||||
|
,,163,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:46Z,1.97,load15,system,host.local
|
||||||
|
,,163,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:56Z,1.96,load15,system,host.local
|
||||||
|
,,163,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:54:06Z,1.98,load15,system,host.local
|
||||||
|
,,163,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:54:16Z,1.97,load15,system,host.local
|
||||||
|
,,164,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:26Z,1.95,load5,system,host.local
|
||||||
|
,,164,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:36Z,1.92,load5,system,host.local
|
||||||
|
,,164,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:46Z,1.92,load5,system,host.local
|
||||||
|
,,164,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:56Z,1.89,load5,system,host.local
|
||||||
|
,,164,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:54:06Z,1.94,load5,system,host.local
|
||||||
|
,,164,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:54:16Z,1.93,load5,system,host.local
|
|
|
@ -0,0 +1,25 @@
|
||||||
|
#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,dateTime:RFC3339,double,string,string,string
|
||||||
|
#partition,false,false,false,false,false,false,true,true,true
|
||||||
|
#default,_result,0,,,,,load15,system,host.local
|
||||||
|
,result,table,_start,_stop,_time,_value,_field,_measurement,host
|
||||||
|
|
||||||
|
#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,dateTime:RFC3339,double,string,string,string
|
||||||
|
#partition,false,false,false,false,false,false,true,true,true
|
||||||
|
#default,_result,1,,,,,used_percent,swap,host.local
|
||||||
|
,result,table,_start,_stop,_time,_value,_field,_measurement,host
|
||||||
|
|
||||||
|
#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,dateTime:RFC3339,double,string,string,string
|
||||||
|
#partition,false,false,false,false,false,false,true,true,true
|
||||||
|
#default,_result,2,,,,,load5,system,host.local
|
||||||
|
,result,table,_start,_stop,_time,_value,_field,_measurement,host
|
||||||
|
|
||||||
|
#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,dateTime:RFC3339,double,string,string,string
|
||||||
|
#partition,false,false,false,false,false,false,true,true,true
|
||||||
|
#default,_result,,,,,,,,
|
||||||
|
,result,table,_start,_stop,_time,_value,_field,_measurement,host
|
||||||
|
,,3,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:26Z,1.83,load1,system,host.local
|
||||||
|
,,3,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:36Z,1.7,load1,system,host.local
|
||||||
|
,,3,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:46Z,1.74,load1,system,host.local
|
||||||
|
,,3,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:53:56Z,1.63,load1,system,host.local
|
||||||
|
,,3,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:54:06Z,1.91,load1,system,host.local
|
||||||
|
,,3,2018-05-22T19:53:24.421470485Z,2018-05-22T19:54:24.421470485Z,2018-05-22T19:54:16Z,1.84,load1,system,host.local
|
|
|
@ -1,6 +1,6 @@
|
||||||
#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,dateTime:RFC3339,string,string,double
|
#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,dateTime:RFC3339,string,string,double
|
||||||
#partition,false,false,true,true,false,true,true,false
|
#partition,false,false,true,true,false,true,true,false
|
||||||
#default,_result,,,,,,,
|
#default,_result,,,,,,,
|
||||||
,result,table,_start,_stop,_time,_measurement,_field,_value
|
,result,table,_start,_stop,_time,_measurement,_field,_value
|
||||||
,,0,2018-04-17T00:00:00Z,2018-04-17T00:05:00Z,2018-04-17T00:00:00Z,m1,f1,42.0
|
,,0,2018-04-17T00:00:00Z,2018-04-17T00:05:00Z,2018-04-17T00:00:00Z,m1,f1,42.0
|
||||||
,,0,2018-04-17T00:00:00Z,2018-04-17T00:05:00Z,2018-04-17T00:00:01Z,m1,f1,43.0
|
,,0,2018-04-17T00:00:00Z,2018-04-17T00:05:00Z,2018-04-17T00:00:01Z,m1,f1,43.0
|
|
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue