feat: upgrade flux to v0.188.0 (#23911)
* feat: upgrade flux to 0.171.0 Tests failing, safety commit First step in https://github.com/influxdata/influxdb/issues/23815 * fix: remove "org" parameter" from writeOptSource I attempted to implement the "orgOpt" argument in a similar fashion topull/23917/headf6669f7512
. However, it looks like Flux doesn't accept "org" as a parameter to "load". It responds with: Error calling function \"load\" @113:16-113:30: error calling function \"to\" @6:19-6:47: unused arguments [org] This brings us from 194 passing to 570 passing. * fix: temporarily disable broken flux tests These tests expect rows to be stored in a certain order. However, nothing is specifying the sort order. This has been fixed in a later update to flux: (see 3d6f47ded). Temporarily disable these tests until we include a fixed version of the flux tests. * chore: add tests froma492993012
This fixes "test-flux.sh" so it runs tests within the "flux/" directory. This uncovered some other issues with the tests located within "flux/". These also needed to be updated to match the newer flux API. * feat: upgrade flux to 0.172.0 This includes changes made in "cbbf4b27da". Since "test.go" in 2.x diverged from 1.x, some modifications were required to make this compatible. * feat: upgrade flux to 0.173.0 * feat: upgrade flux to v0.174.0 * fix: Update the condition when reseting cursor (#23522) Filters that contain `or` may change between cursor resets so we must remember to update the condition in the read cursor. ```flux |> filter(fn: (r) => ((r["_field"] == "field1" and r["_value"]==true) or (r["_field"] == "field2" and r["_value"] == false))) ``` Closes https://github.com/influxdata/flux/issues/4804 * feat: upgrade flux to 0.174.1 * feat: upgrade flux to 0.175.0 * chore: remove end-to-end tests These were removed ina492993
for 2.x. These tests prevent "go test ./..." from completing. As stated in the original commit, these tests should now be handled by the "fluxtest" harness. * feat: upgrade flux to 0.176.0 Some tests needed to be disabled within the flux harness. This is a result of enabling "Optimize Aggregate Window" in flux@05a1065f. These tests are not present in 2.x. Therefore, I am unsure if the breakage is resolved in a later commit. * feat: upgrade flux to 0.177.0 * feat: upgrade flux to 0.178.0 * feat: upgrade flux to v0.179.0 This removes all invocations of "flux.RegisterOpSpec". According to flux@e39096d5, "flux.RegisterOpSpec" does nothing in the current version of flux and was removed. * chore: update fluxtest skip list (#23633) * chore: manually backport785a465e9a
This removes the reference to "flux.Spec". * build(flux): update flux to v0.181.0 (#23682) * build(flux): update flux to v0.184.2 * chore: skip more Flux acceptance tests There are issues for each skip detailed in test-flux.sh. * feat: upgrade flux to v0.185.0 This adds "FluxTesting" to the "HTTPD" configuration. This option is hidden and disabled by default. When "FluxTesting" is set, it enables the default testing flags for "Flux". These flags allow the "vectorized float tests" and tests requiring the "removeRedundantSortNodes" and "labelPolymorphism" flag enabled to work. These changes are based off ofd8553c002e
. flux@3d6f47ded is included within this version of Flux. Therefore we can now include the "group_*" tests. * feat: upgrade flux to 0.186.0 * feat: upgrade flux to 0.187.0 * feat: upgrade flux to 0.188.0 * fix: re-run ./generate.sh with updated protoc * fix: restrict cores to match CircleCI documentation Co-authored-by: davidby-influx <dbyrne@influxdata.com> Co-authored-by: Markus Westerlind <marwes91@gmail.com> Co-authored-by: Sean Brickley <sean@wabr.io> Co-authored-by: Jonathan A. Sternberg <jonathan@influxdata.com> Co-authored-by: Christopher M. Wolff <chris.wolff@influxdata.com>
parent
be9a3d4a07
commit
5976e41d54
|
@ -244,7 +244,7 @@ jobs:
|
|||
unit_test_race:
|
||||
docker:
|
||||
- image: quay.io/influxdb/cross-builder:<< pipeline.parameters.cross-container-tag >>
|
||||
resource_class: large
|
||||
resource_class: xlarge
|
||||
steps:
|
||||
- checkout
|
||||
- restore_cache:
|
||||
|
@ -257,7 +257,17 @@ jobs:
|
|||
set -x
|
||||
mkdir -p junit-race/
|
||||
export GORACE="halt_on_error=1"
|
||||
gotestsum --junitfile junit-race/influxdb.junit.xml -- -race ./...
|
||||
# "resource_class: xlarge" creates a Docker container with eight
|
||||
# virtual cpu cores. However, applications like "nproc" return
|
||||
# the host machine's core count (which in this case is 36).
|
||||
# When less cores are available than advertised, the
|
||||
# race-tests fail.
|
||||
#
|
||||
# We'll manually reduce the number of available cores to what
|
||||
# is specified by the CircleCI documentation:
|
||||
# https://circleci.com/product/features/resource-classes/
|
||||
taskset -c 0-7 \
|
||||
gotestsum --junitfile junit-race/influxdb.junit.xml -- -race ./...
|
||||
no_output_timeout: 1500s
|
||||
- store_test_results:
|
||||
path: junit-race/
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||
// versions:
|
||||
// protoc-gen-go v1.27.1
|
||||
// protoc-gen-go v1.28.1
|
||||
// protoc v3.17.3
|
||||
// source: tools_binary.proto
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||
// versions:
|
||||
// protoc-gen-go v1.27.1
|
||||
// protoc-gen-go v1.28.1
|
||||
// protoc v3.17.3
|
||||
// source: internal/backup_util.proto
|
||||
|
||||
|
|
|
@ -15,6 +15,7 @@ import (
|
|||
|
||||
"github.com/influxdata/flux"
|
||||
"github.com/influxdata/flux/dependencies/testing"
|
||||
"github.com/influxdata/flux/execute/executetest"
|
||||
"github.com/influxdata/influxdb"
|
||||
"github.com/influxdata/influxdb/coordinator"
|
||||
influxdb2 "github.com/influxdata/influxdb/flux/stdlib/influxdata/influxdb"
|
||||
|
@ -312,10 +313,16 @@ func (s *Server) appendHTTPDService(c httpd.Config) error {
|
|||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
deps := []flux.Dependency{storageDep, testing.FrameworkConfig{}}
|
||||
if s.config.HTTPD.FluxTesting {
|
||||
deps = append(deps, executetest.NewDefaultTestFlagger())
|
||||
}
|
||||
|
||||
srv.Handler.Controller, err = control.New(
|
||||
s.config.FluxController,
|
||||
s.Logger.With(zap.String("service", "flux-controller")),
|
||||
[]flux.Dependency{storageDep, testing.FrameworkConfig{}},
|
||||
deps,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
|
|
|
@ -149,7 +149,7 @@ func (rule LocalBucketsRule) Name() string {
|
|||
}
|
||||
|
||||
func (rule LocalBucketsRule) Pattern() plan.Pattern {
|
||||
return plan.Pat(influxdb.BucketsKind)
|
||||
return plan.MultiSuccessor(influxdb.BucketsKind)
|
||||
}
|
||||
|
||||
func (rule LocalBucketsRule) Rewrite(ctx context.Context, node plan.Node) (plan.Node, bool, error) {
|
||||
|
|
|
@ -33,9 +33,44 @@ testcase filter {
|
|||
,,0,2018-05-22T19:53:36Z,system,host.local,load1,1.63
|
||||
")
|
||||
|
||||
got = testing.loadStorage(csv: input)
|
||||
got = csv.from(csv: input)
|
||||
|> testing.load()
|
||||
|> range(start: -100y)
|
||||
|> filter(fn: (r) => r._measurement == "system" and r._field == "load1")
|
||||
|> drop(columns: ["_start", "_stop"])
|
||||
testing.diff(want, got)
|
||||
}
|
||||
|
||||
|
||||
input_issue_4804 = "#datatype,string,long,dateTime:RFC3339,string,string,string,boolean
|
||||
#group,false,false,false,true,true,true,false
|
||||
#default,_result,,,,,,
|
||||
,result,table,_time,_measurement,host,_field,_value
|
||||
,,0,2018-05-22T19:53:26Z,system,host.local,load1,true
|
||||
,,0,2018-05-22T19:53:36Z,system,host.local,load1,false
|
||||
,,1,2018-05-22T19:53:26Z,system,host.local,load3,false
|
||||
,,2,2018-05-22T19:53:26Z,system,host.local,load4,true
|
||||
"
|
||||
|
||||
testcase flux_issue_4804 {
|
||||
expect.planner(rules: [
|
||||
"influxdata/influxdb.FromStorageRule": 1,
|
||||
"PushDownRangeRule": 1,
|
||||
"PushDownFilterRule": 1,
|
||||
])
|
||||
|
||||
want = csv.from(csv: "#datatype,string,long,dateTime:RFC3339,string,string,string,boolean
|
||||
#group,false,false,false,true,true,true,false
|
||||
#default,_result,,,,,,
|
||||
,result,table,_time,_measurement,host,_field,_value
|
||||
,,0,2018-05-22T19:53:26Z,system,host.local,load1,true
|
||||
,,1,2018-05-22T19:53:26Z,system,host.local,load3,false
|
||||
")
|
||||
|
||||
got = csv.from(csv: input_issue_4804)
|
||||
|> testing.load()
|
||||
|> range(start: -100y)
|
||||
|> filter(fn: (r) => ((r["_field"] == "load1" and r["_value"] == true) or (r["_field"] == "load3" and r["_value"] == false)))
|
||||
|> drop(columns: ["_start", "_stop"])
|
||||
testing.diff(want, got)
|
||||
}
|
||||
|
|
|
@ -0,0 +1,267 @@
|
|||
package influxdb_test
|
||||
|
||||
import "csv"
|
||||
import "testing"
|
||||
|
||||
option now = () => 2030-01-01T00:00:00Z
|
||||
|
||||
input = "
|
||||
#datatype,string,long,dateTime:RFC3339,string,string,string,double
|
||||
#group,false,false,false,true,true,true,false
|
||||
#default,_result,,,,,,
|
||||
,result,table,_time,_measurement,host,_field,_value
|
||||
,,0,2018-05-22T19:53:26Z,system,host.local,load1,1.83
|
||||
,,0,2018-05-22T19:53:36Z,system,host.local,load1,1.72
|
||||
,,0,2018-05-22T19:53:46Z,system,host.local,load1,1.74
|
||||
,,0,2018-05-22T19:53:56Z,system,host.local,load1,1.63
|
||||
,,0,2018-05-22T19:54:06Z,system,host.local,load1,1.91
|
||||
,,0,2018-05-22T19:54:16Z,system,host.local,load1,1.84
|
||||
|
||||
,,1,2018-05-22T19:53:26Z,sys,host.local,load3,1.98
|
||||
,,1,2018-05-22T19:53:36Z,sys,host.local,load3,1.97
|
||||
,,1,2018-05-22T19:53:46Z,sys,host.local,load3,1.97
|
||||
,,1,2018-05-22T19:53:56Z,sys,host.local,load3,1.96
|
||||
,,1,2018-05-22T19:54:06Z,sys,host.local,load3,1.98
|
||||
,,1,2018-05-22T19:54:16Z,sys,host.local,load3,1.97
|
||||
|
||||
,,2,2018-05-22T19:53:26Z,system,host.local,load5,1.95
|
||||
,,2,2018-05-22T19:53:36Z,system,host.local,load5,1.92
|
||||
,,2,2018-05-22T19:53:46Z,system,host.local,load5,1.92
|
||||
,,2,2018-05-22T19:53:56Z,system,host.local,load5,1.89
|
||||
,,2,2018-05-22T19:54:06Z,system,host.local,load5,1.94
|
||||
,,2,2018-05-22T19:54:16Z,system,host.local,load5,1.93
|
||||
|
||||
,,3,2018-05-22T19:53:26Z,var,host.local,load3,91.98
|
||||
,,3,2018-05-22T19:53:36Z,var,host.local,load3,91.97
|
||||
,,3,2018-05-22T19:53:46Z,var,host.local,load3,91.97
|
||||
,,3,2018-05-22T19:53:56Z,var,host.local,load3,91.96
|
||||
,,3,2018-05-22T19:54:06Z,var,host.local,load3,91.98
|
||||
,,3,2018-05-22T19:54:16Z,var,host.local,load3,91.97
|
||||
|
||||
,,4,2018-05-22T19:53:26Z,swap,host.global,used_percent,82.98
|
||||
,,4,2018-05-22T19:53:36Z,swap,host.global,used_percent,82.59
|
||||
,,4,2018-05-22T19:53:46Z,swap,host.global,used_percent,82.59
|
||||
,,4,2018-05-22T19:53:56Z,swap,host.global,used_percent,82.59
|
||||
,,4,2018-05-22T19:54:06Z,swap,host.global,used_percent,82.59
|
||||
,,4,2018-05-22T19:54:16Z,swap,host.global,used_percent,82.64
|
||||
|
||||
#datatype,string,long,dateTime:RFC3339,string,string,string,double
|
||||
#group,false,false,false,true,true,true,false
|
||||
#default,_result,,,,,,
|
||||
,result,table,_time,_measurement,loc,_field,_value
|
||||
,,0,2018-05-22T19:53:26Z,locale,en,lat,37.09
|
||||
,,0,2018-05-22T19:53:36Z,locale,en,lat,37.10
|
||||
,,0,2018-05-22T19:53:46Z,locale,en,lat,37.08
|
||||
"
|
||||
|
||||
testcase multi_measure {
|
||||
got = csv.from(csv: input)
|
||||
|> testing.load()
|
||||
|> range(start: 2018-01-01T00:00:00Z, stop: 2019-01-01T00:00:00Z)
|
||||
|> filter(fn: (r) => r["_measurement"] == "system" or r["_measurement"] == "sys")
|
||||
|> filter(fn: (r) => r["_field"] == "load1" or r["_field"] == "load3")
|
||||
|> drop(columns: ["_start", "_stop"])
|
||||
|
||||
want = csv.from(csv: "#datatype,string,long,dateTime:RFC3339,string,string,string,double
|
||||
#group,false,false,false,true,true,true,false
|
||||
#default,_result,,,,,,
|
||||
,result,table,_time,_measurement,host,_field,_value
|
||||
,,0,2018-05-22T19:53:26Z,system,host.local,load1,1.83
|
||||
,,0,2018-05-22T19:53:36Z,system,host.local,load1,1.72
|
||||
,,0,2018-05-22T19:53:46Z,system,host.local,load1,1.74
|
||||
,,0,2018-05-22T19:53:56Z,system,host.local,load1,1.63
|
||||
,,0,2018-05-22T19:54:06Z,system,host.local,load1,1.91
|
||||
,,0,2018-05-22T19:54:16Z,system,host.local,load1,1.84
|
||||
,,1,2018-05-22T19:53:26Z,sys,host.local,load3,1.98
|
||||
,,1,2018-05-22T19:53:36Z,sys,host.local,load3,1.97
|
||||
,,1,2018-05-22T19:53:46Z,sys,host.local,load3,1.97
|
||||
,,1,2018-05-22T19:53:56Z,sys,host.local,load3,1.96
|
||||
,,1,2018-05-22T19:54:06Z,sys,host.local,load3,1.98
|
||||
,,1,2018-05-22T19:54:16Z,sys,host.local,load3,1.97
|
||||
")
|
||||
|
||||
testing.diff(got, want)
|
||||
}
|
||||
|
||||
testcase multi_measure_match_all {
|
||||
got = csv.from(csv: input)
|
||||
|> testing.load()
|
||||
|> range(start: 2018-01-01T00:00:00Z, stop: 2019-01-01T00:00:00Z)
|
||||
|> filter(fn: (r) => r["_measurement"] == "system" or r["_measurement"] == "sys" or r["_measurement"] == "var" or r["_measurement"] == "swap")
|
||||
|> filter(fn: (r) => r["_field"] == "load1" or r["_field"] == "load3" or r["_field"] == "load5" or r["_field"] == "used_percent")
|
||||
|> drop(columns: ["_start", "_stop"])
|
||||
|
||||
want = csv.from(csv: "#datatype,string,long,dateTime:RFC3339,string,string,string,double
|
||||
#group,false,false,false,true,true,true,false
|
||||
#default,_result,,,,,,
|
||||
,result,table,_time,_measurement,host,_field,_value
|
||||
,,0,2018-05-22T19:53:26Z,system,host.local,load1,1.83
|
||||
,,0,2018-05-22T19:53:36Z,system,host.local,load1,1.72
|
||||
,,0,2018-05-22T19:53:46Z,system,host.local,load1,1.74
|
||||
,,0,2018-05-22T19:53:56Z,system,host.local,load1,1.63
|
||||
,,0,2018-05-22T19:54:06Z,system,host.local,load1,1.91
|
||||
,,0,2018-05-22T19:54:16Z,system,host.local,load1,1.84
|
||||
,,1,2018-05-22T19:53:26Z,sys,host.local,load3,1.98
|
||||
,,1,2018-05-22T19:53:36Z,sys,host.local,load3,1.97
|
||||
,,1,2018-05-22T19:53:46Z,sys,host.local,load3,1.97
|
||||
,,1,2018-05-22T19:53:56Z,sys,host.local,load3,1.96
|
||||
,,1,2018-05-22T19:54:06Z,sys,host.local,load3,1.98
|
||||
,,1,2018-05-22T19:54:16Z,sys,host.local,load3,1.97
|
||||
,,2,2018-05-22T19:53:26Z,system,host.local,load5,1.95
|
||||
,,2,2018-05-22T19:53:36Z,system,host.local,load5,1.92
|
||||
,,2,2018-05-22T19:53:46Z,system,host.local,load5,1.92
|
||||
,,2,2018-05-22T19:53:56Z,system,host.local,load5,1.89
|
||||
,,2,2018-05-22T19:54:06Z,system,host.local,load5,1.94
|
||||
,,2,2018-05-22T19:54:16Z,system,host.local,load5,1.93
|
||||
,,3,2018-05-22T19:53:26Z,var,host.local,load3,91.98
|
||||
,,3,2018-05-22T19:53:36Z,var,host.local,load3,91.97
|
||||
,,3,2018-05-22T19:53:46Z,var,host.local,load3,91.97
|
||||
,,3,2018-05-22T19:53:56Z,var,host.local,load3,91.96
|
||||
,,3,2018-05-22T19:54:06Z,var,host.local,load3,91.98
|
||||
,,3,2018-05-22T19:54:16Z,var,host.local,load3,91.97
|
||||
,,4,2018-05-22T19:53:26Z,swap,host.global,used_percent,82.98
|
||||
,,4,2018-05-22T19:53:36Z,swap,host.global,used_percent,82.59
|
||||
,,4,2018-05-22T19:53:46Z,swap,host.global,used_percent,82.59
|
||||
,,4,2018-05-22T19:53:56Z,swap,host.global,used_percent,82.59
|
||||
,,4,2018-05-22T19:54:06Z,swap,host.global,used_percent,82.59
|
||||
,,4,2018-05-22T19:54:16Z,swap,host.global,used_percent,82.64
|
||||
")
|
||||
|
||||
testing.diff(got, want)
|
||||
}
|
||||
|
||||
testcase multi_measure_tag_filter {
|
||||
got = csv.from(csv: input)
|
||||
|> testing.load()
|
||||
|> range(start: 2018-01-01T00:00:00Z, stop: 2019-01-01T00:00:00Z)
|
||||
|> filter(fn: (r) => r["_measurement"] == "system" or r["_measurement"] == "swap")
|
||||
|> filter(fn: (r) => r["_field"] == "load1" or r["_field"] == "load3" or r["_field"] == "used_percent")
|
||||
|> filter(fn: (r) => r["host"] == "host.local" or r["host"] == "host.global")
|
||||
|> drop(columns: ["_start", "_stop"])
|
||||
|
||||
want = csv.from(csv: "#datatype,string,long,dateTime:RFC3339,string,string,string,double
|
||||
#group,false,false,false,true,true,true,false
|
||||
#default,_result,,,,,,
|
||||
,result,table,_time,_measurement,host,_field,_value
|
||||
,,0,2018-05-22T19:53:26Z,system,host.local,load1,1.83
|
||||
,,0,2018-05-22T19:53:36Z,system,host.local,load1,1.72
|
||||
,,0,2018-05-22T19:53:46Z,system,host.local,load1,1.74
|
||||
,,0,2018-05-22T19:53:56Z,system,host.local,load1,1.63
|
||||
,,0,2018-05-22T19:54:06Z,system,host.local,load1,1.91
|
||||
,,0,2018-05-22T19:54:16Z,system,host.local,load1,1.84
|
||||
,,4,2018-05-22T19:53:26Z,swap,host.global,used_percent,82.98
|
||||
,,4,2018-05-22T19:53:36Z,swap,host.global,used_percent,82.59
|
||||
,,4,2018-05-22T19:53:46Z,swap,host.global,used_percent,82.59
|
||||
,,4,2018-05-22T19:53:56Z,swap,host.global,used_percent,82.59
|
||||
,,4,2018-05-22T19:54:06Z,swap,host.global,used_percent,82.59
|
||||
,,4,2018-05-22T19:54:16Z,swap,host.global,used_percent,82.64
|
||||
")
|
||||
|
||||
testing.diff(got, want)
|
||||
}
|
||||
|
||||
testcase multi_measure_complex_or {
|
||||
got = csv.from(csv: input)
|
||||
|> testing.load()
|
||||
|> range(start: 2018-01-01T00:00:00Z, stop: 2019-01-01T00:00:00Z)
|
||||
|> filter(fn: (r) => (r["_measurement"] == "system" or r["_measurement"] == "swap") or (r["_measurement"] != "var" and r["host"] == "host.local"))
|
||||
|> drop(columns: ["_start", "_stop"])
|
||||
|
||||
want = csv.from(csv: "#datatype,string,long,dateTime:RFC3339,string,string,string,double
|
||||
#group,false,false,false,true,true,true,false
|
||||
#default,_result,,,,,,
|
||||
,result,table,_time,_measurement,host,_field,_value
|
||||
,,0,2018-05-22T19:53:26Z,system,host.local,load1,1.83
|
||||
,,0,2018-05-22T19:53:36Z,system,host.local,load1,1.72
|
||||
,,0,2018-05-22T19:53:46Z,system,host.local,load1,1.74
|
||||
,,0,2018-05-22T19:53:56Z,system,host.local,load1,1.63
|
||||
,,0,2018-05-22T19:54:06Z,system,host.local,load1,1.91
|
||||
,,0,2018-05-22T19:54:16Z,system,host.local,load1,1.84
|
||||
,,2,2018-05-22T19:53:26Z,system,host.local,load5,1.95
|
||||
,,2,2018-05-22T19:53:36Z,system,host.local,load5,1.92
|
||||
,,2,2018-05-22T19:53:46Z,system,host.local,load5,1.92
|
||||
,,2,2018-05-22T19:53:56Z,system,host.local,load5,1.89
|
||||
,,2,2018-05-22T19:54:06Z,system,host.local,load5,1.94
|
||||
,,2,2018-05-22T19:54:16Z,system,host.local,load5,1.93
|
||||
,,4,2018-05-22T19:53:26Z,swap,host.global,used_percent,82.98
|
||||
,,4,2018-05-22T19:53:36Z,swap,host.global,used_percent,82.59
|
||||
,,4,2018-05-22T19:53:46Z,swap,host.global,used_percent,82.59
|
||||
,,4,2018-05-22T19:53:56Z,swap,host.global,used_percent,82.59
|
||||
,,4,2018-05-22T19:54:06Z,swap,host.global,used_percent,82.59
|
||||
,,4,2018-05-22T19:54:16Z,swap,host.global,used_percent,82.64
|
||||
,,1,2018-05-22T19:53:26Z,sys,host.local,load3,1.98
|
||||
,,1,2018-05-22T19:53:36Z,sys,host.local,load3,1.97
|
||||
,,1,2018-05-22T19:53:46Z,sys,host.local,load3,1.97
|
||||
,,1,2018-05-22T19:53:56Z,sys,host.local,load3,1.96
|
||||
,,1,2018-05-22T19:54:06Z,sys,host.local,load3,1.98
|
||||
,,1,2018-05-22T19:54:16Z,sys,host.local,load3,1.97
|
||||
")
|
||||
|
||||
testing.diff(got, want)
|
||||
}
|
||||
|
||||
testcase multi_measure_complex_and {
|
||||
got = csv.from(csv: input)
|
||||
|> testing.load()
|
||||
|> range(start: 2018-01-01T00:00:00Z, stop: 2019-01-01T00:00:00Z)
|
||||
|> filter(fn: (r) => r["_measurement"] != "system" or r["_measurement"] == "swap")
|
||||
|> filter(fn: (r) => r["_measurement"] == "swap" or r["_measurement"] == "var")
|
||||
|> drop(columns: ["_start", "_stop"])
|
||||
|
||||
want = csv.from(csv: "#datatype,string,long,dateTime:RFC3339,string,string,string,double
|
||||
#group,false,false,false,true,true,true,false
|
||||
#default,_result,,,,,,
|
||||
,result,table,_time,_measurement,host,_field,_value
|
||||
,,4,2018-05-22T19:53:26Z,swap,host.global,used_percent,82.98
|
||||
,,4,2018-05-22T19:53:36Z,swap,host.global,used_percent,82.59
|
||||
,,4,2018-05-22T19:53:46Z,swap,host.global,used_percent,82.59
|
||||
,,4,2018-05-22T19:53:56Z,swap,host.global,used_percent,82.59
|
||||
,,4,2018-05-22T19:54:06Z,swap,host.global,used_percent,82.59
|
||||
,,4,2018-05-22T19:54:16Z,swap,host.global,used_percent,82.64
|
||||
,,3,2018-05-22T19:53:26Z,var,host.local,load3,91.98
|
||||
,,3,2018-05-22T19:53:36Z,var,host.local,load3,91.97
|
||||
,,3,2018-05-22T19:53:46Z,var,host.local,load3,91.97
|
||||
,,3,2018-05-22T19:53:56Z,var,host.local,load3,91.96
|
||||
,,3,2018-05-22T19:54:06Z,var,host.local,load3,91.98
|
||||
,,3,2018-05-22T19:54:16Z,var,host.local,load3,91.97
|
||||
")
|
||||
|
||||
testing.diff(got, want)
|
||||
}
|
||||
|
||||
testcase multi_measure_negation {
|
||||
got = csv.from(csv: input)
|
||||
|> testing.load()
|
||||
|> range(start: 2018-01-01T00:00:00Z, stop: 2019-01-01T00:00:00Z)
|
||||
|> filter(fn: (r) => r["_measurement"] != "system")
|
||||
|> filter(fn: (r) => r["host"] == "host.local" or not exists r["host"])
|
||||
|> drop(columns: ["_start", "_stop"])
|
||||
|
||||
want = csv.from(csv: "#datatype,string,long,dateTime:RFC3339,string,string,string,double
|
||||
#group,false,false,false,true,true,true,false
|
||||
#default,_result,,,,,,
|
||||
,result,table,_time,_measurement,host,_field,_value
|
||||
,,1,2018-05-22T19:53:26Z,sys,host.local,load3,1.98
|
||||
,,1,2018-05-22T19:53:36Z,sys,host.local,load3,1.97
|
||||
,,1,2018-05-22T19:53:46Z,sys,host.local,load3,1.97
|
||||
,,1,2018-05-22T19:53:56Z,sys,host.local,load3,1.96
|
||||
,,1,2018-05-22T19:54:06Z,sys,host.local,load3,1.98
|
||||
,,1,2018-05-22T19:54:16Z,sys,host.local,load3,1.97
|
||||
,,3,2018-05-22T19:53:26Z,var,host.local,load3,91.98
|
||||
,,3,2018-05-22T19:53:36Z,var,host.local,load3,91.97
|
||||
,,3,2018-05-22T19:53:46Z,var,host.local,load3,91.97
|
||||
,,3,2018-05-22T19:53:56Z,var,host.local,load3,91.96
|
||||
,,3,2018-05-22T19:54:06Z,var,host.local,load3,91.98
|
||||
,,3,2018-05-22T19:54:16Z,var,host.local,load3,91.97
|
||||
|
||||
#datatype,string,long,dateTime:RFC3339,string,string,string,double
|
||||
#group,false,false,false,true,true,true,false
|
||||
#default,_result,,,,,,
|
||||
,result,table,_time,_measurement,loc,_field,_value
|
||||
,,0,2018-05-22T19:53:26Z,locale,en,lat,37.09
|
||||
,,0,2018-05-22T19:53:36Z,locale,en,lat,37.10
|
||||
,,0,2018-05-22T19:53:46Z,locale,en,lat,37.08
|
||||
")
|
||||
|
||||
testing.diff(got, want)
|
||||
}
|
|
@ -46,7 +46,7 @@ func (rule FromStorageRule) Name() string {
|
|||
}
|
||||
|
||||
func (rule FromStorageRule) Pattern() plan.Pattern {
|
||||
return plan.Pat(influxdb.FromKind)
|
||||
return plan.MultiSuccessor(influxdb.FromKind)
|
||||
}
|
||||
|
||||
func (rule FromStorageRule) Rewrite(ctx context.Context, node plan.Node) (plan.Node, bool, error) {
|
||||
|
@ -73,7 +73,7 @@ func (rule PushDownGroupRule) Name() string {
|
|||
}
|
||||
|
||||
func (rule PushDownGroupRule) Pattern() plan.Pattern {
|
||||
return plan.Pat(universe.GroupKind, plan.Pat(ReadRangePhysKind))
|
||||
return plan.MultiSuccessor(universe.GroupKind, plan.SingleSuccessor(ReadRangePhysKind))
|
||||
}
|
||||
|
||||
func (rule PushDownGroupRule) Rewrite(ctx context.Context, node plan.Node) (plan.Node, bool, error) {
|
||||
|
@ -112,7 +112,7 @@ func (rule PushDownRangeRule) Name() string {
|
|||
|
||||
// Pattern matches 'from |> range'
|
||||
func (rule PushDownRangeRule) Pattern() plan.Pattern {
|
||||
return plan.Pat(universe.RangeKind, plan.Pat(FromKind))
|
||||
return plan.MultiSuccessor(universe.RangeKind, plan.SingleSuccessor(FromKind))
|
||||
}
|
||||
|
||||
// Rewrite converts 'from |> range' into 'ReadRange'
|
||||
|
@ -138,7 +138,7 @@ func (PushDownFilterRule) Name() string {
|
|||
}
|
||||
|
||||
func (PushDownFilterRule) Pattern() plan.Pattern {
|
||||
return plan.Pat(universe.FilterKind, plan.Pat(ReadRangePhysKind))
|
||||
return plan.MultiSuccessor(universe.FilterKind, plan.SingleSuccessor(ReadRangePhysKind))
|
||||
}
|
||||
|
||||
func (PushDownFilterRule) Rewrite(ctx context.Context, pn plan.Node) (plan.Node, bool, error) {
|
||||
|
@ -227,10 +227,10 @@ func (rule PushDownReadTagKeysRule) Name() string {
|
|||
}
|
||||
|
||||
func (rule PushDownReadTagKeysRule) Pattern() plan.Pattern {
|
||||
return plan.Pat(universe.DistinctKind,
|
||||
plan.Pat(universe.SchemaMutationKind,
|
||||
plan.Pat(universe.KeysKind,
|
||||
plan.Pat(ReadRangePhysKind))))
|
||||
return plan.MultiSuccessor(universe.DistinctKind,
|
||||
plan.SingleSuccessor(universe.SchemaMutationKind,
|
||||
plan.SingleSuccessor(universe.KeysKind,
|
||||
plan.SingleSuccessor(ReadRangePhysKind))))
|
||||
}
|
||||
|
||||
func hasFieldRef(node *datatypes.Node) bool {
|
||||
|
@ -311,10 +311,10 @@ func (rule PushDownReadTagValuesRule) Name() string {
|
|||
}
|
||||
|
||||
func (rule PushDownReadTagValuesRule) Pattern() plan.Pattern {
|
||||
return plan.Pat(universe.DistinctKind,
|
||||
plan.Pat(universe.GroupKind,
|
||||
plan.Pat(universe.SchemaMutationKind,
|
||||
plan.Pat(ReadRangePhysKind))))
|
||||
return plan.MultiSuccessor(universe.DistinctKind,
|
||||
plan.SingleSuccessor(universe.GroupKind,
|
||||
plan.SingleSuccessor(universe.SchemaMutationKind,
|
||||
plan.SingleSuccessor(ReadRangePhysKind))))
|
||||
}
|
||||
|
||||
func (rule PushDownReadTagValuesRule) Rewrite(ctx context.Context, pn plan.Node) (plan.Node, bool, error) {
|
||||
|
@ -628,7 +628,7 @@ func (SortedPivotRule) Name() string {
|
|||
}
|
||||
|
||||
func (SortedPivotRule) Pattern() plan.Pattern {
|
||||
return plan.Pat(universe.PivotKind, plan.Pat(ReadRangePhysKind))
|
||||
return plan.MultiSuccessor(universe.PivotKind, plan.SingleSuccessor(ReadRangePhysKind))
|
||||
}
|
||||
|
||||
func (SortedPivotRule) Rewrite(ctx context.Context, pn plan.Node) (plan.Node, bool, error) {
|
||||
|
@ -692,8 +692,8 @@ var windowPushableAggs = []plan.ProcedureKind{
|
|||
}
|
||||
|
||||
func (rule PushDownWindowAggregateRule) Pattern() plan.Pattern {
|
||||
return plan.OneOf(windowPushableAggs,
|
||||
plan.Pat(universe.WindowKind, plan.Pat(ReadRangePhysKind)))
|
||||
return plan.MultiSuccessorOneOf(windowPushableAggs,
|
||||
plan.SingleSuccessor(universe.WindowKind, plan.SingleSuccessor(ReadRangePhysKind)))
|
||||
}
|
||||
|
||||
func canPushWindowedAggregate(ctx context.Context, fnNode plan.Node) bool {
|
||||
|
@ -783,9 +783,9 @@ func (PushDownWindowAggregateByTimeRule) Name() string {
|
|||
}
|
||||
|
||||
func (rule PushDownWindowAggregateByTimeRule) Pattern() plan.Pattern {
|
||||
return plan.Pat(universe.WindowKind,
|
||||
plan.Pat(universe.SchemaMutationKind,
|
||||
plan.Pat(ReadWindowAggregatePhysKind)))
|
||||
return plan.MultiSuccessor(universe.WindowKind,
|
||||
plan.SingleSuccessor(universe.SchemaMutationKind,
|
||||
plan.SingleSuccessor(ReadWindowAggregatePhysKind)))
|
||||
}
|
||||
|
||||
func (PushDownWindowAggregateByTimeRule) Rewrite(ctx context.Context, pn plan.Node) (plan.Node, bool, error) {
|
||||
|
@ -843,8 +843,8 @@ func (p PushDownBareAggregateRule) Name() string {
|
|||
}
|
||||
|
||||
func (p PushDownBareAggregateRule) Pattern() plan.Pattern {
|
||||
return plan.OneOf(windowPushableAggs,
|
||||
plan.Pat(ReadRangePhysKind))
|
||||
return plan.MultiSuccessorOneOf(windowPushableAggs,
|
||||
plan.SingleSuccessor(ReadRangePhysKind))
|
||||
}
|
||||
|
||||
func (p PushDownBareAggregateRule) Rewrite(ctx context.Context, pn plan.Node) (plan.Node, bool, error) {
|
||||
|
@ -874,7 +874,7 @@ func (PushDownGroupAggregateRule) Name() string {
|
|||
}
|
||||
|
||||
func (rule PushDownGroupAggregateRule) Pattern() plan.Pattern {
|
||||
return plan.OneOf(
|
||||
return plan.MultiSuccessorOneOf(
|
||||
[]plan.ProcedureKind{
|
||||
universe.CountKind,
|
||||
universe.SumKind,
|
||||
|
@ -883,7 +883,7 @@ func (rule PushDownGroupAggregateRule) Pattern() plan.Pattern {
|
|||
universe.MinKind,
|
||||
universe.MaxKind,
|
||||
},
|
||||
plan.Pat(ReadGroupPhysKind))
|
||||
plan.SingleSuccessor(ReadGroupPhysKind))
|
||||
}
|
||||
|
||||
func (PushDownGroupAggregateRule) Rewrite(ctx context.Context, pn plan.Node) (plan.Node, bool, error) {
|
||||
|
|
|
@ -0,0 +1,135 @@
|
|||
// TODO(whb): These tests should get ported to the flux repo and removed here
|
||||
// when they are included with a flux release that InfluxDB uses to remove the
|
||||
// redundancy.
|
||||
|
||||
package influxdb_test
|
||||
|
||||
import "csv"
|
||||
import "testing"
|
||||
import "testing/expect"
|
||||
|
||||
option now = () => 2030-01-01T00:00:00Z
|
||||
|
||||
input = "
|
||||
#group,false,false,false,false,true,true,true,true,true,true,true
|
||||
#datatype,string,long,dateTime:RFC3339,long,string,string,string,string,string,string,string
|
||||
#default,_result,,,,,,,,,,
|
||||
,result,table,_time,_value,_field,_measurement,device,fstype,host,mode,path
|
||||
,,0,2020-10-21T20:48:30Z,4881964326,inodes_free,disk,disk1s5,apfs,euterpe.local,ro,/
|
||||
,,0,2020-10-21T20:48:40Z,4881964326,inodes_free,disk,disk1s5,apfs,euterpe.local,ro,/
|
||||
,,0,2020-10-21T20:48:50Z,4881964326,inodes_free,disk,disk1s5,apfs,euterpe.local,ro,/
|
||||
,,1,2020-10-21T20:48:30Z,4294963701,inodes_free,disk,disk2s1,hfs,euterpe.local,ro,/Volumes/IntelliJ IDEA CE
|
||||
,,1,2020-10-21T20:48:40Z,4294963701,inodes_free,disk,disk2s1,hfs,euterpe.local,ro,/Volumes/IntelliJ IDEA CE
|
||||
,,1,2020-10-21T20:48:50Z,4294963701,inodes_free,disk,disk2s1,hfs,euterpe.local,ro,/Volumes/IntelliJ IDEA CE
|
||||
,,2,2020-10-21T20:48:30Z,488514,inodes_used,disk,disk1s5,apfs,euterpe.local,ro,/
|
||||
,,2,2020-10-21T20:48:40Z,488514,inodes_used,disk,disk1s5,apfs,euterpe.local,ro,/
|
||||
,,2,2020-10-21T20:48:50Z,488514,inodes_used,disk,disk1s5,apfs,euterpe.local,ro,/
|
||||
,,3,2020-10-21T20:48:30Z,3578,inodes_used,disk,disk2s1,hfs,euterpe.local,ro,/Volumes/IntelliJ IDEA CE
|
||||
,,3,2020-10-21T20:48:40Z,3578,inodes_used,disk,disk2s1,hfs,euterpe.local,ro,/Volumes/IntelliJ IDEA CE
|
||||
,,3,2020-10-21T20:48:50Z,3578,inodes_used,disk,disk2s1,hfs,euterpe.local,ro,/Volumes/IntelliJ IDEA CE
|
||||
|
||||
#group,false,false,false,false,true,true,true,true,true
|
||||
#datatype,string,long,dateTime:RFC3339,double,string,string,string,string,string
|
||||
#default,_result,,,,,,,,
|
||||
,result,table,_time,_value,_field,_measurement,cpu,host,region
|
||||
,,4,2020-10-21T20:48:30Z,69.30000000167638,usage_idle,cpu,cpu0,euterpe.local,south
|
||||
,,4,2020-10-21T20:48:40Z,67.36736736724372,usage_idle,cpu,cpu0,euterpe.local,south
|
||||
,,4,2020-10-21T20:48:50Z,69.23076923005354,usage_idle,cpu,cpu0,euterpe.local,south
|
||||
,,5,2020-10-21T20:48:30Z,96.10000000102445,usage_idle,cpu,cpu1,euterpe.local,south
|
||||
,,5,2020-10-21T20:48:40Z,95.70000000055181,usage_idle,cpu,cpu1,euterpe.local,south
|
||||
,,5,2020-10-21T20:48:50Z,95.89999999860534,usage_idle,cpu,cpu1,euterpe.local,south
|
||||
|
||||
#group,false,false,false,false,true,true,true,true,true
|
||||
#datatype,string,long,dateTime:RFC3339,double,string,string,string,string,string
|
||||
#default,_result,,,,,,,,
|
||||
,result,table,_time,_value,_field,_measurement,cpu,host,region
|
||||
,,6,2020-10-21T20:48:30Z,69.30000000167638,usage_idle,cpu,cpu0,mnemosyne.local,east
|
||||
,,6,2020-10-21T20:48:40Z,67.36736736724372,usage_idle,cpu,cpu0,mnemosyne.local,east
|
||||
,,6,2020-10-21T20:48:50Z,69.23076923005354,usage_idle,cpu,cpu0,mnemosyne.local,east
|
||||
,,7,2020-10-21T20:48:30Z,96.10000000102445,usage_idle,cpu,cpu1,mnemosyne.local,east
|
||||
,,7,2020-10-21T20:48:40Z,95.70000000055181,usage_idle,cpu,cpu1,mnemosyne.local,east
|
||||
,,7,2020-10-21T20:48:50Z,95.89999999860534,usage_idle,cpu,cpu1,mnemosyne.local,east
|
||||
|
||||
#group,false,false,true,true,false,false,true,true,true
|
||||
#datatype,string,long,string,string,dateTime:RFC3339,double,string,string,string
|
||||
#default,_result,,,,,,,,
|
||||
,result,table,_field,_measurement,_time,_value,cpu,host,region
|
||||
,,8,usage_user,cpu,2020-10-21T20:48:30Z,19.30000000007567,cpu0,euterpe.local,north
|
||||
,,8,usage_user,cpu,2020-10-21T20:48:40Z,20.020020020038682,cpu0,euterpe.local,north
|
||||
,,8,usage_user,cpu,2020-10-21T20:48:50Z,18.581418581407107,cpu0,euterpe.local,north
|
||||
,,9,usage_user,cpu,2020-10-21T20:48:30Z,2.3000000000138243,cpu1,euterpe.local,north
|
||||
,,9,usage_user,cpu,2020-10-21T20:48:40Z,2.4000000000536965,cpu1,euterpe.local,north
|
||||
,,9,usage_user,cpu,2020-10-21T20:48:50Z,2.0999999999423746,cpu1,euterpe.local,north
|
||||
"
|
||||
|
||||
testcase tag_values_measurement_or_predicate {
|
||||
got = csv.from(csv: input)
|
||||
|> testing.load()
|
||||
|> range(start: -100y)
|
||||
|> filter(fn: (r) => r["_measurement"] == "cpu")
|
||||
|> filter(fn: (r) => r["_measurement"] == "someOtherThing" or r["host"] == "euterpe.local")
|
||||
|> keep(columns: ["region"])
|
||||
|> group()
|
||||
|> distinct(column: "region")
|
||||
|> limit(n: 200)
|
||||
|> sort()
|
||||
|
||||
want = csv.from(csv: "#datatype,string,long,string
|
||||
#group,false,false,false
|
||||
#default,0,,
|
||||
,result,table,_value
|
||||
,,0,north
|
||||
,,0,south
|
||||
")
|
||||
|
||||
expect.planner(rules: ["PushDownReadTagValuesRule": 1])
|
||||
testing.diff(got, want)
|
||||
}
|
||||
|
||||
testcase tag_values_measurement_or_negation {
|
||||
got = csv.from(csv: input)
|
||||
|> testing.load()
|
||||
|> range(start: -100y)
|
||||
|> filter(fn: (r) => r["_measurement"] != "cpu")
|
||||
|> filter(fn: (r) => r["_measurement"] == "someOtherThing" or r["fstype"] != "apfs")
|
||||
|> keep(columns: ["fstype"])
|
||||
|> group()
|
||||
|> distinct(column: "fstype")
|
||||
|> limit(n: 200)
|
||||
|> sort()
|
||||
|
||||
want = csv.from(csv: "#datatype,string,long,string
|
||||
#group,false,false,false
|
||||
#default,0,,
|
||||
,result,table,_value
|
||||
,,0,hfs
|
||||
")
|
||||
|
||||
expect.planner(rules: ["PushDownReadTagValuesRule": 1])
|
||||
testing.diff(got, want)
|
||||
}
|
||||
|
||||
testcase tag_values_measurement_or_regex {
|
||||
got = csv.from(csv: input)
|
||||
|> testing.load()
|
||||
|> range(start: -100y)
|
||||
|> filter(fn: (r) => r["_measurement"] =~ /cp.*/)
|
||||
|> filter(fn: (r) => r["_measurement"] == "someOtherThing" or r["host"] !~ /mnemo.*/)
|
||||
|> keep(columns: ["region"])
|
||||
|> group()
|
||||
|> distinct(column: "region")
|
||||
|> limit(n: 200)
|
||||
|> sort()
|
||||
|
||||
want = csv.from(csv: "#datatype,string,long,string
|
||||
#group,false,false,false
|
||||
#default,0,,
|
||||
,result,table,_value
|
||||
,,0,north
|
||||
,,0,south
|
||||
")
|
||||
|
||||
expect.planner(rules: ["PushDownReadTagValuesRule": 1])
|
||||
testing.diff(got, want)
|
||||
}
|
||||
|
|
@ -40,7 +40,6 @@ type ToOpSpec struct {
|
|||
func init() {
|
||||
toSignature := runtime.MustLookupBuiltinType("influxdata/influxdb", influxdb.ToKind)
|
||||
runtime.ReplacePackageValue("influxdata/influxdb", "to", flux.MustValue(flux.FunctionValueWithSideEffect(ToKind, createToOpSpec, toSignature)))
|
||||
flux.RegisterOpSpec(ToKind, func() flux.OperationSpec { return &ToOpSpec{} })
|
||||
plan.RegisterProcedureSpecWithSideEffect(ToKind, newToProcedure, ToKind)
|
||||
execute.RegisterTransformation(ToKind, createToTransformation)
|
||||
}
|
||||
|
@ -279,7 +278,7 @@ func (t *ToTransformation) Process(id execute.DatasetID, tbl flux.Table) error {
|
|||
var fn *execute.RowMapPreparedFn
|
||||
if t.fn != nil {
|
||||
var err error
|
||||
if fn, err = t.fn.Prepare(columns); err != nil {
|
||||
if fn, err = t.fn.Prepare(t.Ctx, columns); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,27 +26,6 @@ func TestTo_Query(t *testing.T) {
|
|||
{
|
||||
Name: "from with database with range",
|
||||
Raw: `from(bucket:"mydb") |> to(bucket:"myotherdb/autogen")`,
|
||||
Want: &flux.Spec{
|
||||
Operations: []*flux.Operation{
|
||||
{
|
||||
ID: "from0",
|
||||
Spec: &influxdb.FromOpSpec{
|
||||
Bucket: influxdb.NameOrID{Name: "mydb"},
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "influx1x/toKind1",
|
||||
Spec: &influxdb.ToOpSpec{
|
||||
Bucket: "myotherdb/autogen",
|
||||
TimeColumn: "_time",
|
||||
MeasurementColumn: "_measurement",
|
||||
},
|
||||
},
|
||||
},
|
||||
Edges: []flux.Edge{
|
||||
{Parent: "from0", Child: "influx1x/toKind1"},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tc := range tests {
|
||||
|
|
|
@ -24,7 +24,6 @@ type DatabasesOpSpec struct {
|
|||
func init() {
|
||||
databasesSignature := runtime.MustLookupBuiltinType("influxdata/influxdb/v1", "databases")
|
||||
runtime.ReplacePackageValue("influxdata/influxdb/v1", "databases", flux.MustValue(flux.FunctionValue(DatabasesKind, createDatabasesOpSpec, databasesSignature)))
|
||||
flux.RegisterOpSpec(DatabasesKind, newDatabasesOp)
|
||||
plan.RegisterProcedureSpec(DatabasesKind, newDatabasesProcedure, DatabasesKind)
|
||||
}
|
||||
|
||||
|
|
|
@ -49,7 +49,8 @@ testcase bare_count {
|
|||
,,0,23
|
||||
",
|
||||
)
|
||||
result = testing.loadStorage(csv: input)
|
||||
result = csv.from(csv: input)
|
||||
|> testing.load()
|
||||
|> range(start: -100y)
|
||||
|> count()
|
||||
|> keep(columns: ["_value"])
|
||||
|
@ -69,7 +70,8 @@ testcase bare_sum {
|
|||
,,0,23938.0
|
||||
",
|
||||
)
|
||||
result = testing.loadStorage(csv: input)
|
||||
result = csv.from(csv: input)
|
||||
|> testing.load()
|
||||
|> range(start: -100y)
|
||||
|> sum()
|
||||
|> keep(columns: ["_value"])
|
||||
|
@ -89,7 +91,8 @@ testcase bare_mean {
|
|||
,,0,1040.782608696
|
||||
",
|
||||
)
|
||||
result = testing.loadStorage(csv: input)
|
||||
result = csv.from(csv: input)
|
||||
|> testing.load()
|
||||
|> range(start: -100y)
|
||||
|> mean()
|
||||
|> keep(columns: ["_value"])
|
||||
|
@ -109,7 +112,8 @@ testcase bare_min {
|
|||
,,0,2021-01-26T08:00:00Z,-1099,bank,pge_bill
|
||||
",
|
||||
)
|
||||
result = testing.loadStorage(csv: input)
|
||||
result = csv.from(csv: input)
|
||||
|> testing.load()
|
||||
|> range(start: -100y)
|
||||
|> min()
|
||||
|> keep(columns: ["_time", "_value", "_field", "_measurement"])
|
||||
|
@ -129,7 +133,8 @@ testcase bare_max {
|
|||
,,0,2019-11-21T08:00:00Z,2187,bank,pge_bill
|
||||
",
|
||||
)
|
||||
result = testing.loadStorage(csv: input)
|
||||
result = csv.from(csv: input)
|
||||
|> testing.load()
|
||||
|> range(start: -100y)
|
||||
|> max()
|
||||
|> keep(columns: ["_time", "_value", "_field", "_measurement"])
|
||||
|
@ -149,7 +154,8 @@ testcase bare_first {
|
|||
,,0,2019-04-11T07:00:00Z,0,bank,pge_bill
|
||||
",
|
||||
)
|
||||
result = testing.loadStorage(csv: input)
|
||||
result = csv.from(csv: input)
|
||||
|> testing.load()
|
||||
|> range(start: -100y)
|
||||
|> first()
|
||||
|> keep(columns: ["_time", "_value", "_field", "_measurement"])
|
||||
|
@ -169,7 +175,8 @@ testcase bare_last {
|
|||
,,0,2021-01-26T08:00:00Z,-1099,bank,pge_bill
|
||||
",
|
||||
)
|
||||
result = testing.loadStorage(csv: input)
|
||||
result = csv.from(csv: input)
|
||||
|> testing.load()
|
||||
|> range(start: -100y)
|
||||
|> last()
|
||||
|> keep(columns: ["_time", "_value", "_field", "_measurement"])
|
||||
|
|
|
@ -3,6 +3,7 @@ package universe_test
|
|||
import "testing"
|
||||
import "testing/expect"
|
||||
import "planner"
|
||||
import "csv"
|
||||
|
||||
option now = () => (2030-01-01T00:00:00Z)
|
||||
|
||||
|
@ -29,7 +30,8 @@ output = "
|
|||
"
|
||||
|
||||
merge_filter_fn = () =>
|
||||
testing.loadStorage(csv: input)
|
||||
csv.from(csv: input)
|
||||
|> testing.load()
|
||||
|> range(start: 2018-05-22T19:53:26Z)
|
||||
|> filter(fn: (r) => r["_value"] == 1.77)
|
||||
|> filter(fn: (r) => r["_field"] == "load4")
|
||||
|
@ -39,5 +41,5 @@ testcase merge_filter {
|
|||
// expect.planner(rules: ["MergeFiltersRule": 1])
|
||||
|
||||
result = merge_filter_fn()
|
||||
testing.diff(got: result, want: testing.loadMem(csv: output))
|
||||
testing.diff(got: result, want: csv.from(csv: output))
|
||||
}
|
||||
|
|
|
@ -51,7 +51,8 @@ testcase windowed_by_time_count {
|
|||
,,0,2021-01-01T00:00:00Z,1
|
||||
",
|
||||
)
|
||||
result = testing.loadStorage(csv: input)
|
||||
result = csv.from(csv: input)
|
||||
|> testing.load()
|
||||
|> range(start: -100y)
|
||||
|> aggregateWindow(every: 1y, fn: count, timeSrc: "_start", createEmpty: false)
|
||||
|> keep(columns: ["_time", "_value"])
|
||||
|
@ -73,7 +74,8 @@ testcase windowed_by_time_sum {
|
|||
,,0,2021-01-01T00:00:00Z,-1099.00
|
||||
",
|
||||
)
|
||||
result = testing.loadStorage(csv: input)
|
||||
result = csv.from(csv: input)
|
||||
|> testing.load()
|
||||
|> range(start: -100y)
|
||||
|> aggregateWindow(every: 1y, fn: sum, timeSrc: "_start", createEmpty: false)
|
||||
|> keep(columns: ["_time", "_value"])
|
||||
|
@ -95,7 +97,8 @@ testcase windowed_by_time_mean {
|
|||
,,0,2021-01-01T00:00:00Z,-1099.00
|
||||
",
|
||||
)
|
||||
result = testing.loadStorage(csv: input)
|
||||
result = csv.from(csv: input)
|
||||
|> testing.load()
|
||||
|> range(start: -100y)
|
||||
|> aggregateWindow(every: 1y, fn: mean, timeSrc: "_start", createEmpty: false)
|
||||
|> keep(columns: ["_time", "_value"])
|
||||
|
@ -117,7 +120,8 @@ testcase windowed_by_time_min {
|
|||
,,0,2021-01-01T00:00:00Z,-1099,bank,pge_bill
|
||||
",
|
||||
)
|
||||
result = testing.loadStorage(csv: input)
|
||||
result = csv.from(csv: input)
|
||||
|> testing.load()
|
||||
|> range(start: -100y)
|
||||
|> aggregateWindow(every: 1y, fn: min, timeSrc: "_start", createEmpty: false)
|
||||
|> keep(columns: ["_time", "_value", "_field", "_measurement"])
|
||||
|
@ -140,7 +144,8 @@ testcase windowed_by_time_max {
|
|||
,,0,2021-01-01T00:00:00Z,-1099,bank,pge_bill
|
||||
",
|
||||
)
|
||||
result = testing.loadStorage(csv: input)
|
||||
result = csv.from(csv: input)
|
||||
|> testing.load()
|
||||
|> range(start: -100y)
|
||||
|> aggregateWindow(every: 1y, fn: max, timeSrc: "_start", createEmpty: false)
|
||||
|> keep(columns: ["_time", "_value", "_field", "_measurement"])
|
||||
|
@ -163,7 +168,8 @@ testcase windowed_by_time_first {
|
|||
,,0,2021-01-01T00:00:00Z,-1099,bank,pge_bill
|
||||
",
|
||||
)
|
||||
result = testing.loadStorage(csv: input)
|
||||
result = csv.from(csv: input)
|
||||
|> testing.load()
|
||||
|> range(start: -100y)
|
||||
|> aggregateWindow(every: 1y, fn: first, timeSrc: "_start", createEmpty: false)
|
||||
|> keep(columns: ["_time", "_value", "_field", "_measurement"])
|
||||
|
@ -186,7 +192,8 @@ testcase windowed_by_time_last {
|
|||
,,0,2021-01-01T00:00:00Z,-1099,bank,pge_bill
|
||||
",
|
||||
)
|
||||
result = testing.loadStorage(csv: input)
|
||||
result = csv.from(csv: input)
|
||||
|> testing.load()
|
||||
|> range(start: -100y)
|
||||
|> aggregateWindow(every: 1y, fn: last, timeSrc: "_start", createEmpty: false)
|
||||
|> keep(columns: ["_time", "_value", "_field", "_measurement"])
|
||||
|
|
|
@ -51,7 +51,8 @@ testcase windowed_count {
|
|||
,,2,2021-01-01T00:00:00Z,1
|
||||
",
|
||||
)
|
||||
result = testing.loadStorage(csv: input)
|
||||
result = csv.from(csv: input)
|
||||
|> testing.load()
|
||||
|> range(start: -100y)
|
||||
|> window(every: 1y)
|
||||
|> count()
|
||||
|
@ -74,7 +75,8 @@ testcase windowed_sum {
|
|||
,,2,2021-01-01T00:00:00Z,-1099.00
|
||||
",
|
||||
)
|
||||
result = testing.loadStorage(csv: input)
|
||||
result = csv.from(csv: input)
|
||||
|> testing.load()
|
||||
|> range(start: -100y)
|
||||
|> window(every: 1y)
|
||||
|> sum()
|
||||
|
@ -97,7 +99,8 @@ testcase windowed_mean {
|
|||
,,2,2021-01-01T00:00:00Z,-1099.00
|
||||
",
|
||||
)
|
||||
result = testing.loadStorage(csv: input)
|
||||
result = csv.from(csv: input)
|
||||
|> testing.load()
|
||||
|> range(start: -100y)
|
||||
|> window(every: 1y)
|
||||
|> mean()
|
||||
|
@ -120,7 +123,8 @@ testcase windowed_min {
|
|||
,,0,2021-01-26T08:00:00Z,-1099,bank,pge_bill
|
||||
",
|
||||
)
|
||||
result = testing.loadStorage(csv: input)
|
||||
result = csv.from(csv: input)
|
||||
|> testing.load()
|
||||
|> range(start: -100y)
|
||||
|> window(every: 1y)
|
||||
|> min()
|
||||
|
@ -144,7 +148,8 @@ testcase windowed_max {
|
|||
,,0,2021-01-26T08:00:00Z,-1099,bank,pge_bill
|
||||
",
|
||||
)
|
||||
result = testing.loadStorage(csv: input)
|
||||
result = csv.from(csv: input)
|
||||
|> testing.load()
|
||||
|> range(start: -100y)
|
||||
|> window(every: 1y)
|
||||
|> max()
|
||||
|
@ -168,7 +173,8 @@ testcase windowed_first {
|
|||
,,0,2021-01-26T08:00:00Z,-1099,bank,pge_bill
|
||||
",
|
||||
)
|
||||
result = testing.loadStorage(csv: input)
|
||||
result = csv.from(csv: input)
|
||||
|> testing.load()
|
||||
|> range(start: -100y)
|
||||
|> window(every: 1y)
|
||||
|> first()
|
||||
|
@ -192,7 +198,8 @@ testcase windowed_last {
|
|||
,,0,2021-01-26T08:00:00Z,-1099,bank,pge_bill
|
||||
",
|
||||
)
|
||||
result = testing.loadStorage(csv: input)
|
||||
result = csv.from(csv: input)
|
||||
|> testing.load()
|
||||
|> range(start: -100y)
|
||||
|> window(every: 1y)
|
||||
|> last()
|
||||
|
|
37
go.mod
37
go.mod
|
@ -16,7 +16,7 @@ require (
|
|||
github.com/golang/mock v1.5.0
|
||||
github.com/golang/snappy v0.0.4
|
||||
github.com/google/go-cmp v0.5.7
|
||||
github.com/influxdata/flux v0.170.1
|
||||
github.com/influxdata/flux v0.188.0
|
||||
github.com/influxdata/httprouter v1.3.1-0.20191122104820-ee83e2772f69
|
||||
github.com/influxdata/influxql v1.1.1-0.20211004132434-7e7d61973256
|
||||
github.com/influxdata/pkg-config v0.2.11
|
||||
|
@ -25,7 +25,7 @@ require (
|
|||
github.com/jsternberg/zap-logfmt v1.2.0
|
||||
github.com/jwilder/encoding v0.0.0-20170811194829-b4e1701a28ef
|
||||
github.com/klauspost/pgzip v1.0.2-0.20170402124221-0bf5dcad4ada
|
||||
github.com/mattn/go-isatty v0.0.12
|
||||
github.com/mattn/go-isatty v0.0.14
|
||||
github.com/mileusna/useragent v0.0.0-20190129205925-3e331f0949a5
|
||||
github.com/opentracing/opentracing-go v1.2.0
|
||||
github.com/peterh/liner v1.0.1-0.20180619022028-8c1271fcf47f
|
||||
|
@ -37,7 +37,7 @@ require (
|
|||
github.com/retailnext/hllpp v1.0.1-0.20180308014038-101a6d2f8b52
|
||||
github.com/spf13/cast v1.3.0
|
||||
github.com/spf13/cobra v0.0.3
|
||||
github.com/stretchr/testify v1.7.0
|
||||
github.com/stretchr/testify v1.8.0
|
||||
github.com/tinylib/msgp v1.1.0
|
||||
github.com/uber/jaeger-client-go v2.28.0+incompatible
|
||||
github.com/xlab/treeprint v0.0.0-20180616005107-d6fb6747feb6
|
||||
|
@ -45,12 +45,12 @@ require (
|
|||
go.uber.org/zap v1.16.0
|
||||
golang.org/x/crypto v0.0.0-20211117183948-ae814b36b871
|
||||
golang.org/x/sync v0.0.0-20220513210516-0976fa681c29
|
||||
golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e
|
||||
golang.org/x/sys v0.0.0-20220412211240-33da011f77ad
|
||||
golang.org/x/text v0.3.7
|
||||
golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba
|
||||
golang.org/x/tools v0.1.9
|
||||
golang.org/x/tools v0.1.10
|
||||
google.golang.org/grpc v1.44.0
|
||||
google.golang.org/protobuf v1.27.1
|
||||
google.golang.org/protobuf v1.28.1
|
||||
)
|
||||
|
||||
require (
|
||||
|
@ -73,7 +73,7 @@ require (
|
|||
github.com/SAP/go-hdb v0.14.1 // indirect
|
||||
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883 // indirect
|
||||
github.com/aokoli/goutils v1.0.1 // indirect
|
||||
github.com/apache/arrow/go/v7 v7.0.0 // indirect
|
||||
github.com/apache/arrow/go/v7 v7.0.1 // indirect
|
||||
github.com/aws/aws-sdk-go v1.30.12 // indirect
|
||||
github.com/aws/aws-sdk-go-v2 v1.11.0 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.0.0 // indirect
|
||||
|
@ -95,18 +95,19 @@ require (
|
|||
github.com/dimchansky/utfbom v1.1.0 // indirect
|
||||
github.com/dustin/go-humanize v1.0.0 // indirect
|
||||
github.com/eclipse/paho.mqtt.golang v1.2.0 // indirect
|
||||
github.com/fatih/color v1.13.0 // indirect
|
||||
github.com/form3tech-oss/jwt-go v3.2.5+incompatible // indirect
|
||||
github.com/gabriel-vasile/mimetype v1.4.0 // indirect
|
||||
github.com/glycerine/go-unsnap-stream v0.0.0-20180323001048-9f0cb55181dd // indirect
|
||||
github.com/go-sql-driver/mysql v1.5.0 // indirect
|
||||
github.com/goccy/go-json v0.7.10 // indirect
|
||||
github.com/goccy/go-json v0.9.6 // indirect
|
||||
github.com/gofrs/uuid v3.3.0+incompatible // indirect
|
||||
github.com/gogo/protobuf v1.3.2 // indirect
|
||||
github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe // indirect
|
||||
github.com/golang/geo v0.0.0-20190916061304-5b978397cfec // indirect
|
||||
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e // indirect
|
||||
github.com/golang/protobuf v1.5.2 // indirect
|
||||
github.com/google/flatbuffers v2.0.5+incompatible // indirect
|
||||
github.com/google/flatbuffers v22.9.30-0.20221019131441-5792623df42e+incompatible // indirect
|
||||
github.com/google/uuid v1.3.0 // indirect
|
||||
github.com/googleapis/gax-go/v2 v2.0.5 // indirect
|
||||
github.com/grpc-ecosystem/grpc-gateway v1.16.0 // indirect
|
||||
|
@ -115,25 +116,28 @@ require (
|
|||
github.com/inconshreveable/mousetrap v1.0.0 // indirect
|
||||
github.com/influxdata/gosnowflake v1.6.9 // indirect
|
||||
github.com/influxdata/influxdb-client-go/v2 v2.3.1-0.20210518120617-5d1fff431040 // indirect
|
||||
github.com/influxdata/influxdb-iox-client-go v1.0.0-beta.1 // indirect
|
||||
github.com/influxdata/line-protocol v0.0.0-20200327222509-2487e7298839 // indirect
|
||||
github.com/influxdata/line-protocol/v2 v2.2.1 // indirect
|
||||
github.com/influxdata/tdigest v0.0.2-0.20210216194612-fc98d27c9e8b // indirect
|
||||
github.com/jmespath/go-jmespath v0.4.0 // indirect
|
||||
github.com/jstemmer/go-junit-report v0.9.1 // indirect
|
||||
github.com/klauspost/compress v1.13.6 // indirect
|
||||
github.com/klauspost/compress v1.14.2 // indirect
|
||||
github.com/klauspost/crc32 v0.0.0-20161016154125-cb6bfca970f6 // indirect
|
||||
github.com/lib/pq v1.0.0 // indirect
|
||||
github.com/mattn/go-colorable v0.1.9 // indirect
|
||||
github.com/mattn/go-ieproxy v0.0.1 // indirect
|
||||
github.com/mattn/go-runewidth v0.0.3 // indirect
|
||||
github.com/matttproud/golang_protobuf_extensions v1.0.1 // indirect
|
||||
github.com/mitchellh/go-homedir v1.1.0 // indirect
|
||||
github.com/mschoch/smat v0.0.0-20160514031455-90eadee771ae // indirect
|
||||
github.com/philhofer/fwd v1.0.0 // indirect
|
||||
github.com/pierrec/lz4/v4 v4.1.11 // indirect
|
||||
github.com/pierrec/lz4/v4 v4.1.12 // indirect
|
||||
github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/prometheus/procfs v0.0.11 // indirect
|
||||
github.com/segmentio/kafka-go v0.2.0 // indirect
|
||||
github.com/sergi/go-diff v1.0.0 // indirect
|
||||
github.com/sergi/go-diff v1.1.0 // indirect
|
||||
github.com/sirupsen/logrus v1.8.1 // indirect
|
||||
github.com/spf13/pflag v1.0.5 // indirect
|
||||
github.com/uber-go/tally v3.3.15+incompatible // indirect
|
||||
|
@ -143,16 +147,17 @@ require (
|
|||
github.com/willf/bitset v1.1.9 // indirect
|
||||
go.opencensus.io v0.23.0 // indirect
|
||||
go.uber.org/atomic v1.7.0 // indirect
|
||||
golang.org/x/exp v0.0.0-20211216164055-b2b84827b756 // indirect
|
||||
golang.org/x/lint v0.0.0-20210508222113-6edffad5e616 // indirect
|
||||
golang.org/x/mod v0.6.0-dev.0.20211013180041-c96bc1413d57 // indirect
|
||||
golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3 // indirect
|
||||
golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd // indirect
|
||||
golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c // indirect
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211 // indirect
|
||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect
|
||||
gonum.org/v1/gonum v0.9.3 // indirect
|
||||
golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f // indirect
|
||||
gonum.org/v1/gonum v0.11.0 // indirect
|
||||
google.golang.org/api v0.47.0 // indirect
|
||||
google.golang.org/appengine v1.6.7 // indirect
|
||||
google.golang.org/genproto v0.0.0-20210630183607-d20f26d13c79 // indirect
|
||||
google.golang.org/genproto v0.0.0-20220126215142-9970aeb2e350 // indirect
|
||||
gopkg.in/yaml.v2 v2.3.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
)
|
||||
|
|
98
go.sum
98
go.sum
|
@ -133,8 +133,8 @@ github.com/aokoli/goutils v1.0.1/go.mod h1:SijmP0QR8LtwsmDs8Yii5Z/S4trXFGFC2oO5g
|
|||
github.com/apache/arrow/go/arrow v0.0.0-20191024131854-af6fa24be0db/go.mod h1:VTxUBvSJ3s3eHAg65PNgrsn5BtqCRPdmyXh6rAfdxN0=
|
||||
github.com/apache/arrow/go/arrow v0.0.0-20211112161151-bc219186db40 h1:q4dksr6ICHXqG5hm0ZW5IHyeEJXoIJSOZeBLmWPNeIQ=
|
||||
github.com/apache/arrow/go/arrow v0.0.0-20211112161151-bc219186db40/go.mod h1:Q7yQnSMnLvcXlZ8RV+jwz/6y1rQTqbX6C82SndT52Zs=
|
||||
github.com/apache/arrow/go/v7 v7.0.0 h1:3d+Qgwo/r75bNhC6N0MMzZXQhsOyB0TSn6wljfuBNWo=
|
||||
github.com/apache/arrow/go/v7 v7.0.0/go.mod h1:vG2y+fH8mEUcX29tM6hOULGE06/XqEI8sG5fANM6T5w=
|
||||
github.com/apache/arrow/go/v7 v7.0.1 h1:WpCfq+AQxvXaI6/KplHE27MPMFx5av0o5NbPCTAGfy4=
|
||||
github.com/apache/arrow/go/v7 v7.0.1/go.mod h1:JxDpochJbCVxqbX4G8i1jRqMrnTCQdf8pTccAfLD8Es=
|
||||
github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
|
||||
github.com/apache/thrift v0.13.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ=
|
||||
github.com/apache/thrift v0.15.0/go.mod h1:PHK3hniurgQaNMZYaCLEqXKsYK8upmhPbmdP2FXSqgU=
|
||||
|
@ -234,6 +234,7 @@ github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7
|
|||
github.com/coreos/pkg v0.0.0-20160727233714-3ac0863d7acf/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
|
||||
github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY=
|
||||
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||
github.com/cyberdelia/templates v0.0.0-20141128023046-ca7fffd4298c/go.mod h1:GyV+0YP4qX0UQ7r2MoYZ+AvYDp12OF5yg4q8rGnyNh4=
|
||||
github.com/dave/jennifer v1.2.0/go.mod h1:fIb+770HOpJ2fmN9EPPKOqm1vMGhB+TwXKMZhrIygKg=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
|
@ -277,6 +278,8 @@ github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7
|
|||
github.com/evanphx/json-patch v4.2.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLiYLvXMP4fmwYFNcr97nuDLSk=
|
||||
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
|
||||
github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU=
|
||||
github.com/fatih/color v1.13.0 h1:8LOYc1KYPPmyKMuN8QV2DNRWNbLo6LZ0iLs8+mlH53w=
|
||||
github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk=
|
||||
github.com/fogleman/gg v1.2.1-0.20190220221249-0403632d5b90/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k=
|
||||
github.com/fogleman/gg v1.3.0/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k=
|
||||
github.com/form3tech-oss/jwt-go v3.2.2+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k=
|
||||
|
@ -285,6 +288,10 @@ github.com/form3tech-oss/jwt-go v3.2.5+incompatible/go.mod h1:pbq4aXjuKjdthFRnoD
|
|||
github.com/foxcpp/go-mockdns v0.0.0-20201212160233-ede2f9158d15 h1:nLPjjvpUAODOR6vY/7o0hBIk8iTr19Fvmf8aFx/kC7A=
|
||||
github.com/franela/goblin v0.0.0-20200105215937-c9ffbefa60db/go.mod h1:7dvUGVsVBjqR7JHJk0brhHOZYGmfBYOrK0ZhYMEtBr4=
|
||||
github.com/franela/goreq v0.0.0-20171204163338-bcd34c9993f8/go.mod h1:ZhphrRTfi2rbfLwlschooIH4+wKKDR4Pdxhh+TRoA20=
|
||||
github.com/frankban/quicktest v1.11.0/go.mod h1:K+q6oSqb0W0Ininfk863uOk1lMy69l/P6txr3mVT54s=
|
||||
github.com/frankban/quicktest v1.11.2/go.mod h1:K+q6oSqb0W0Ininfk863uOk1lMy69l/P6txr3mVT54s=
|
||||
github.com/frankban/quicktest v1.13.0 h1:yNZif1OkDfNoDfb9zZa9aXIpejNR4F23Wely0c+Qdqk=
|
||||
github.com/frankban/quicktest v1.13.0/go.mod h1:qLE0fzW0VuyUAJgPU19zByoIr0HtCHN/r/VLSOOIySU=
|
||||
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
||||
github.com/gabriel-vasile/mimetype v1.4.0 h1:Cn9dkdYsMIu56tGho+fqzh7XmvY2YyGU0FnbhiOsEro=
|
||||
github.com/gabriel-vasile/mimetype v1.4.0/go.mod h1:fA8fi6KUiG7MgQQ+mEWotXoEOvmxRtOJlERCzSmRvr8=
|
||||
|
@ -404,8 +411,9 @@ github.com/gobuffalo/packd v0.1.0/go.mod h1:M2Juc+hhDXf/PnmBANFCqx4DM3wRbgDvnVWe
|
|||
github.com/gobuffalo/packr/v2 v2.0.9/go.mod h1:emmyGweYTm6Kdper+iywB6YK5YzuKchGtJQZ0Odn4pQ=
|
||||
github.com/gobuffalo/packr/v2 v2.2.0/go.mod h1:CaAwI0GPIAv+5wKLtv8Afwl+Cm78K/I/VCm/3ptBN+0=
|
||||
github.com/gobuffalo/syncx v0.0.0-20190224160051-33c29581e754/go.mod h1:HhnNqWY95UYwwW3uSASeV7vtgYkT2t16hJgV3AEPUpw=
|
||||
github.com/goccy/go-json v0.7.10 h1:ulhbuNe1JqE68nMRXXTJRrUu0uhouf0VevLINxQq4Ec=
|
||||
github.com/goccy/go-json v0.7.10/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
|
||||
github.com/goccy/go-json v0.9.6 h1:5/4CtRQdtsX0sal8fdVhTaiMN01Ri8BExZZ8iRmHQ6E=
|
||||
github.com/goccy/go-json v0.9.6/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
|
||||
github.com/gofrs/uuid v3.3.0+incompatible h1:8K4tyRfvU1CYPgJsveYFQMhpFd/wXNM7iK6rR7UHz84=
|
||||
github.com/gofrs/uuid v3.3.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM=
|
||||
github.com/gogo/googleapis v1.1.0/go.mod h1:gf4bu3Q80BeJ6H1S1vYPm8/ELATdvryBaNFGgqEef3s=
|
||||
|
@ -470,8 +478,8 @@ github.com/google/btree v1.0.1 h1:gK4Kx5IaGY9CD5sPJ36FHiBJ6ZXl0kilRiiCj+jdYp4=
|
|||
github.com/google/btree v1.0.1/go.mod h1:xXMiIv4Fb/0kKde4SpL7qlzvu5cMJDRkFDxJfI9uaxA=
|
||||
github.com/google/flatbuffers v1.11.0/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
|
||||
github.com/google/flatbuffers v2.0.0+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
|
||||
github.com/google/flatbuffers v2.0.5+incompatible h1:ANsW0idDAXIY+mNHzIHxWRfabV2x5LUEEIIWcwsYgB8=
|
||||
github.com/google/flatbuffers v2.0.5+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
|
||||
github.com/google/flatbuffers v22.9.30-0.20221019131441-5792623df42e+incompatible h1:Bqgl5d9t2UlT8pv9Oc/lkkI8yYk0jCwHkZKkHzbxEsc=
|
||||
github.com/google/flatbuffers v22.9.30-0.20221019131441-5792623df42e+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
|
||||
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
|
||||
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||
|
@ -579,8 +587,8 @@ github.com/imdario/mergo v0.3.5/go.mod h1:2EnlNZ0deacrJVfApfmtdGgDfMuh/nq6Ok1EcJ
|
|||
github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM=
|
||||
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
|
||||
github.com/influxdata/flux v0.65.0/go.mod h1:BwN2XG2lMszOoquQaFdPET8FRQfrXiZsWmcMO9rkaVY=
|
||||
github.com/influxdata/flux v0.170.1 h1:aP0boTO8WZ1yHVU9v01lVVdtLdFbwigeALIQPXTwbc0=
|
||||
github.com/influxdata/flux v0.170.1/go.mod h1:fNtcZ8tqtVDjwWYcPRvCdlY5t3n+NYCc5xunKCmigQA=
|
||||
github.com/influxdata/flux v0.188.0 h1:y9F3SAswnPKkbHWJF/x79IgbwBezlmuqAdXIkzthwIc=
|
||||
github.com/influxdata/flux v0.188.0/go.mod h1:HdQg0JxHSQhJhEProUY/7QRi9eqnM0HP5L1fH3EtS/c=
|
||||
github.com/influxdata/gosnowflake v1.6.9 h1:BhE39Mmh8bC+Rvd4QQsP2gHypfeYIH1wqW1AjGWxxrE=
|
||||
github.com/influxdata/gosnowflake v1.6.9/go.mod h1:9W/BvCXOKx2gJtQ+jdi1Vudev9t9/UDOEHnlJZ/y1nU=
|
||||
github.com/influxdata/httprouter v1.3.1-0.20191122104820-ee83e2772f69 h1:WQsmW0fXO4ZE/lFGIE84G6rIV5SJN3P3sjIXAP1a8eU=
|
||||
|
@ -588,6 +596,8 @@ github.com/influxdata/httprouter v1.3.1-0.20191122104820-ee83e2772f69/go.mod h1:
|
|||
github.com/influxdata/influxdb v1.8.0/go.mod h1:SIzcnsjaHRFpmlxpJ4S3NT64qtEKYweNTUMb/vh0OMQ=
|
||||
github.com/influxdata/influxdb-client-go/v2 v2.3.1-0.20210518120617-5d1fff431040 h1:MBLCfcSsUyFPDJp6T7EoHp/Ph3Jkrm4EuUKLD2rUWHg=
|
||||
github.com/influxdata/influxdb-client-go/v2 v2.3.1-0.20210518120617-5d1fff431040/go.mod h1:vLNHdxTJkIf2mSLvGrpj8TCcISApPoXkaxP8g9uRlW8=
|
||||
github.com/influxdata/influxdb-iox-client-go v1.0.0-beta.1 h1:zDmAiE2o3Y/YZinI6CENzgQueJDuibUB9TWOZC5zCq0=
|
||||
github.com/influxdata/influxdb-iox-client-go v1.0.0-beta.1/go.mod h1:Chl4pz0SRqoPmEavex4vZaQlunqXqrtEPWAN54THFfo=
|
||||
github.com/influxdata/influxdb1-client v0.0.0-20191209144304-8bf82d3c094d/go.mod h1:qj24IKcXYK6Iy9ceXlo3Tc+vtHo9lIhSX5JddghvEPo=
|
||||
github.com/influxdata/influxql v1.1.0/go.mod h1:KpVI7okXjK6PRi3Z5B+mtKZli+R1DnZgb3N+tzevNgo=
|
||||
github.com/influxdata/influxql v1.1.1-0.20211004132434-7e7d61973256 h1:8io3jjCJ0j9NFvq3/m/rMrDiEILpsfOqWDPItUt/078=
|
||||
|
@ -595,6 +605,13 @@ github.com/influxdata/influxql v1.1.1-0.20211004132434-7e7d61973256/go.mod h1:gH
|
|||
github.com/influxdata/line-protocol v0.0.0-20180522152040-32c6aa80de5e/go.mod h1:4kt73NQhadE3daL3WhR5EJ/J2ocX0PZzwxQ0gXJ7oFE=
|
||||
github.com/influxdata/line-protocol v0.0.0-20200327222509-2487e7298839 h1:W9WBk7wlPfJLvMCdtV4zPulc4uCPrlywQOmbFOhgQNU=
|
||||
github.com/influxdata/line-protocol v0.0.0-20200327222509-2487e7298839/go.mod h1:xaLFMmpvUxqXtVkUJfg9QmT88cDaCJ3ZKgdZ78oO8Qo=
|
||||
github.com/influxdata/line-protocol-corpus v0.0.0-20210519164801-ca6fa5da0184/go.mod h1:03nmhxzZ7Xk2pdG+lmMd7mHDfeVOYFyhOgwO61qWU98=
|
||||
github.com/influxdata/line-protocol-corpus v0.0.0-20210922080147-aa28ccfb8937 h1:MHJNQ+p99hFATQm6ORoLmpUCF7ovjwEFshs/NHzAbig=
|
||||
github.com/influxdata/line-protocol-corpus v0.0.0-20210922080147-aa28ccfb8937/go.mod h1:BKR9c0uHSmRgM/se9JhFHtTT7JTO67X23MtKMHtZcpo=
|
||||
github.com/influxdata/line-protocol/v2 v2.0.0-20210312151457-c52fdecb625a/go.mod h1:6+9Xt5Sq1rWx+glMgxhcg2c0DUaehK+5TDcPZ76GypY=
|
||||
github.com/influxdata/line-protocol/v2 v2.1.0/go.mod h1:QKw43hdUBg3GTk2iC3iyCxksNj7PX9aUSeYOYE/ceHY=
|
||||
github.com/influxdata/line-protocol/v2 v2.2.1 h1:EAPkqJ9Km4uAxtMRgUubJyqAr6zgWM0dznKMLRauQRE=
|
||||
github.com/influxdata/line-protocol/v2 v2.2.1/go.mod h1:DmB3Cnh+3oxmG6LOBIxce4oaL4CPj3OmMPgvauXh+tM=
|
||||
github.com/influxdata/pkg-config v0.2.11 h1:RDlWAvkTARzPRGChq34x179TYlRndq8OU5Ro80E9g3Q=
|
||||
github.com/influxdata/pkg-config v0.2.11/go.mod h1:EMS7Ll0S4qkzDk53XS3Z72/egBsPInt+BeRxb0WeSwk=
|
||||
github.com/influxdata/promql/v2 v2.12.0/go.mod h1:fxOPu+DY0bqCTCECchSRtWfc+0X19ybifQhZoQNF5D8=
|
||||
|
@ -643,9 +660,11 @@ github.com/klauspost/asmfmt v1.3.1/go.mod h1:AG8TuvYojzulgDAMCnYn50l/5QV3Bs/tp6j
|
|||
github.com/klauspost/compress v1.4.0/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=
|
||||
github.com/klauspost/compress v1.9.5/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=
|
||||
github.com/klauspost/compress v1.13.1/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg=
|
||||
github.com/klauspost/compress v1.13.6 h1:P76CopJELS0TiO2mebmnzgWaajssP/EszplttgQxcgc=
|
||||
github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
|
||||
github.com/klauspost/compress v1.14.2 h1:S0OHlFk/Gbon/yauFJ4FfJJF5V0fc5HbBTJazi28pRw=
|
||||
github.com/klauspost/compress v1.14.2/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
|
||||
github.com/klauspost/cpuid v0.0.0-20170728055534-ae7887de9fa5/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek=
|
||||
github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg=
|
||||
github.com/klauspost/crc32 v0.0.0-20161016154125-cb6bfca970f6 h1:KAZ1BW2TCmT6PRihDPpocIy1QTtsAsrx6TneU/4+CMg=
|
||||
github.com/klauspost/crc32 v0.0.0-20161016154125-cb6bfca970f6/go.mod h1:+ZoRqAPRLkC4NPOvfYeR5KNOrY6TD+/sAC3HXPZgDYg=
|
||||
github.com/klauspost/pgzip v1.0.2-0.20170402124221-0bf5dcad4ada h1:3L+neHp83cTjegPdCiOxVOJtRIy7/8RldvMTsyPYH10=
|
||||
|
@ -659,8 +678,9 @@ github.com/kr/pretty v0.2.1 h1:Fmg33tUaq4/8ym9TJN1x7sLJnHVwhP33CNkpYV/7rwI=
|
|||
github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
|
||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/pty v1.1.5/go.mod h1:9r2w37qlBe7rQ6e1fg1S/9xpWHSnaqNdHD3WcMdbPDA=
|
||||
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
|
||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/kylelemons/godebug v0.0.0-20160406211939-eadb3ce320cb/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k=
|
||||
github.com/labstack/echo/v4 v4.2.1/go.mod h1:AA49e0DZ8kk5jTOOCKNuPR6oTnBS0dYiM4FW1e6jwpg=
|
||||
github.com/labstack/gommon v0.3.0/go.mod h1:MULnywXg0yavhxWKc+lOruYdAhDwPK9wf0OL7NoOu+k=
|
||||
|
@ -684,8 +704,9 @@ github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVc
|
|||
github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
|
||||
github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
|
||||
github.com/mattn/go-colorable v0.1.7/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
|
||||
github.com/mattn/go-colorable v0.1.8 h1:c1ghPdyEDarC70ftn0y+A/Ee++9zz8ljHG1b13eJ0s8=
|
||||
github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
|
||||
github.com/mattn/go-colorable v0.1.9 h1:sqDoxXbdeALODt0DAeJCVp38ps9ZogZEAXjus69YV3U=
|
||||
github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
|
||||
github.com/mattn/go-ieproxy v0.0.1 h1:qiyop7gCflfhwCzGyeT0gro3sF9AIg9HU98JORTkqfI=
|
||||
github.com/mattn/go-ieproxy v0.0.1/go.mod h1:pYabZ6IHcRpFh7vIaLfK7rdcWgFEb3SFJ6/gNWuh88E=
|
||||
github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
|
||||
|
@ -694,8 +715,9 @@ github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hd
|
|||
github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ=
|
||||
github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcMEpPG5Rm84=
|
||||
github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE=
|
||||
github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY=
|
||||
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
|
||||
github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y=
|
||||
github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
|
||||
github.com/mattn/go-runewidth v0.0.2/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
|
||||
github.com/mattn/go-runewidth v0.0.3 h1:a+kO+98RDGEfo6asOGMmpodZq4FNtnGP54yps8BzLR4=
|
||||
github.com/mattn/go-runewidth v0.0.3/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
|
||||
|
@ -742,6 +764,7 @@ github.com/nats-io/nats.go v1.9.1/go.mod h1:ZjDU1L/7fJ09jvUSRVBR2e7+RnLiiIQyqyzE
|
|||
github.com/nats-io/nkeys v0.1.0/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w=
|
||||
github.com/nats-io/nkeys v0.1.3/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w=
|
||||
github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c=
|
||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
|
||||
github.com/oklog/oklog v0.3.2/go.mod h1:FCV+B7mhrz4o+ueLpx+KqkyXRGMWOYEvfiXtdGtbWGs=
|
||||
github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA=
|
||||
github.com/oklog/run v1.1.0/go.mod h1:sVPdnTZT1zYwAJeCMu2Th4T21pA3FPOQRfWjQlk7DVU=
|
||||
|
@ -785,9 +808,9 @@ github.com/pierrec/lz4 v1.0.2-0.20190131084431-473cd7ce01a1/go.mod h1:3/3N9NVKO0
|
|||
github.com/pierrec/lz4 v2.0.5+incompatible h1:2xWsjqPFWcplujydGg4WmhC/6fZqK42wMM8aXeqhl0I=
|
||||
github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
|
||||
github.com/pierrec/lz4/v4 v4.1.8/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
|
||||
github.com/pierrec/lz4/v4 v4.1.9/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
|
||||
github.com/pierrec/lz4/v4 v4.1.11 h1:LVs17FAZJFOjgmJXl9Tf13WfLUvZq7/RjfEJrnwZ9OE=
|
||||
github.com/pierrec/lz4/v4 v4.1.11/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
|
||||
github.com/pierrec/lz4/v4 v4.1.12 h1:44l88ehTZAUGW4VlO1QC4zkilL99M6Y9MXNwEs0uzP8=
|
||||
github.com/pierrec/lz4/v4 v4.1.12/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
|
||||
github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8 h1:KoWmjvw+nsYOo29YJK9vDA65RGE3NrOnUtO7a+RF9HU=
|
||||
github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8/go.mod h1:HKlIX3XHQyzLZPlr7++PzdhaXEj94dEiJgZDTsxEqUI=
|
||||
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
|
@ -853,8 +876,9 @@ github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg
|
|||
github.com/segmentio/kafka-go v0.1.0/go.mod h1:X6itGqS9L4jDletMsxZ7Dz+JFWxM6JHfPOCvTvk+EJo=
|
||||
github.com/segmentio/kafka-go v0.2.0 h1:HtCSf6B4gN/87yc5qTl7WsxPKQIIGXLPPM1bMCPOsoY=
|
||||
github.com/segmentio/kafka-go v0.2.0/go.mod h1:X6itGqS9L4jDletMsxZ7Dz+JFWxM6JHfPOCvTvk+EJo=
|
||||
github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=
|
||||
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
|
||||
github.com/sergi/go-diff v1.1.0 h1:we8PVUC3FE2uYfodKH/nBHMSetSfHDR6scGdBi+erh0=
|
||||
github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
|
||||
github.com/shurcooL/httpfs v0.0.0-20190707220628-8d4bc4ba7749/go.mod h1:ZY1cvUeJuFPAdZ/B6v7RHavJWZn2YPVFQ1OSXhCGOkg=
|
||||
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
|
||||
github.com/shurcooL/vfsgen v0.0.0-20181202132449-6a9ea43bcacd/go.mod h1:TrYk7fJVaAttu97ZZKrO9UbRa8izdowaMIZcxYMbVaw=
|
||||
|
@ -887,16 +911,20 @@ github.com/streadway/amqp v0.0.0-20190827072141-edfb9018d271/go.mod h1:AZpEONHx3
|
|||
github.com/streadway/handy v0.0.0-20190108123426-d5acb3125c2a/go.mod h1:qNTQ5P5JnDBl6z3cMAg/SywNDC5ABu5ApDIw6lUbRmI=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.2.0 h1:Hbg2NidpLE8veEBkEZTL3CvlkUIVzuU9jDplZO54c48=
|
||||
github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
|
||||
github.com/stretchr/objx v0.4.0 h1:M2gUjqZET1qApGOWNSnZ49BAIMX4F/1plDv3+l31EJ4=
|
||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||
github.com/stretchr/testify v1.2.0/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
|
||||
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals=
|
||||
github.com/stretchr/testify v1.8.0 h1:pSgiaMZlXftHpm5L7V1+rVB+AZJydKsMxsQBIJw4PKk=
|
||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||
github.com/tidwall/pretty v1.0.0/go.mod h1:XNkn88O1ChpSDQmQeStsy+sBenx6DDtFZJxhVysOjyk=
|
||||
github.com/tinylib/msgp v1.0.2/go.mod h1:+d+yLhGm8mzTaHzB+wgMYrodPfmZrzkirds8fDWklFE=
|
||||
github.com/tinylib/msgp v1.1.0 h1:9fQd+ICuRIu/ue4vxJZu6/LzxN0HwMds2nq/0cFvxHU=
|
||||
|
@ -934,7 +962,8 @@ github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9de
|
|||
github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
|
||||
github.com/zeebo/xxh3 v0.13.0/go.mod h1:AQY73TOrhF3jNsdiM9zZOb8MThrYbZONHj7ryDBaLpg=
|
||||
github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
|
||||
github.com/zeebo/xxh3 v1.0.1/go.mod h1:8VHV24/3AZLn3b6Mlp/KuC33LWH687Wq6EnziEB+rsA=
|
||||
go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU=
|
||||
go.etcd.io/etcd v0.0.0-20191023171146-3cf2f69b5738/go.mod h1:dnLIgRNXwCJa5e+c6mIZCrds/GIG4ncV9HhK5PX7jPg=
|
||||
go.mongodb.org/mongo-driver v1.0.3/go.mod h1:u7ryQJ+DOzQmeO7zB6MHyr8jkEQvC8vH7qLUO4lqsUM=
|
||||
|
@ -1001,6 +1030,7 @@ golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPh
|
|||
golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I=
|
||||
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.0.0-20211117183948-ae814b36b871 h1:/pEO3GD/ABYAjuakUS6xSEmmlyVS4kxBNkeA9tLJiTI=
|
||||
golang.org/x/crypto v0.0.0-20211117183948-ae814b36b871/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||
golang.org/x/exp v0.0.0-20180321215751-8460e604b9de/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
|
@ -1018,8 +1048,8 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0
|
|||
golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
|
||||
golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM=
|
||||
golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU=
|
||||
golang.org/x/exp v0.0.0-20211028214138-64b4c8e87d1a/go.mod h1:a3o/VtDNHN+dCVLEpzjjUHOzR+Ln3DHX056ZPzoZGGA=
|
||||
golang.org/x/exp v0.0.0-20211216164055-b2b84827b756 h1:/5Bs7sWi0i3rOVO5KnM55OwugpsD4bRW1zywKoZjbkI=
|
||||
golang.org/x/exp v0.0.0-20211216164055-b2b84827b756/go.mod h1:b9TAUYHmRtqA6klRHApnXMnj+OyLce4yF5cZCUbk2ps=
|
||||
golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81/go.mod h1:ux5Hcp/YLpHSI86hEcLt0YII63i6oz57MZXIpbrjZUs=
|
||||
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
|
||||
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||
|
@ -1055,9 +1085,10 @@ golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
|||
golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.5.1-0.20210830214625-1b1db11ec8f4/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
|
||||
golang.org/x/mod v0.6.0-dev.0.20211013180041-c96bc1413d57 h1:LQmS1nU0twXLA96Kt7U9qtHJEbBk3z6Q0V4UXjZkpr4=
|
||||
golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
|
||||
golang.org/x/mod v0.6.0-dev.0.20211013180041-c96bc1413d57/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3 h1:kQgndtyPBW/JIYERgdxfwMYh3AVStj88WQTlNDi2a+o=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3/go.mod h1:3p9vT2HGsQu2K1YbXdKPJLVgG5VJdoTa1poYQBtP1AY=
|
||||
golang.org/x/net v0.0.0-20170114055629-f2499483f923/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
|
@ -1113,6 +1144,7 @@ golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96b
|
|||
golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20210505024714-0287a6fb4125/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20210614182718-04defd469f4e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20211118161319-6a13c67c3ce4/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd h1:O7DYs+zxREGLKzKoMQrtrEacpb0ZVXA5rIwylE2Xchk=
|
||||
|
@ -1200,7 +1232,6 @@ golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7w
|
|||
golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200727154430-2d971f7391a4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200826173525-f9321e4c35a6/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200828194041-157a740278f4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
|
@ -1225,10 +1256,11 @@ golang.org/x/sys v0.0.0-20210601080250-7ecdf8ef093b/go.mod h1:oPkhp1MJrh7nUepCBc
|
|||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20210616045830-e2b7044e8c71/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20211025201205-69cdffdb9359/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20211117180635-dee7805ff2e1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e h1:fLOSk5Q00efkSvAm+4xcoXD+RRmLmmulPn5I3Y9F2EM=
|
||||
golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220412211240-33da011f77ad h1:ntjMns5wyP/fN65tdBD4g8J5w8n015+iIIs9rtjXkY0=
|
||||
golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211 h1:JGgROgKl9N8DuW20oFS5gxc+lE67/N3FcwmBPMe7ArY=
|
||||
|
@ -1328,19 +1360,22 @@ golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4f
|
|||
golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=
|
||||
golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
||||
golang.org/x/tools v0.1.9 h1:j9KsMiaP1c3B0OTQGth0/k+miLGTgLsAFUCrF2vLcF8=
|
||||
golang.org/x/tools v0.1.9/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU=
|
||||
golang.org/x/tools v0.1.8-0.20211029000441-d6a9af8af023/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU=
|
||||
golang.org/x/tools v0.1.10 h1:QjFRCZxdOhBJ/UNgnBZLbNV13DlbnK0quyivTnXJM20=
|
||||
golang.org/x/tools v0.1.10/go.mod h1:Uh6Zz+xoGYZom868N8YTex3t7RhtHDBrE8Gzo9bV56E=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=
|
||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f h1:GGU+dLjvlC3qDwqYgL6UgRmHXhOOgns0bZu2Ty5mm6U=
|
||||
golang.org/x/xerrors v0.0.0-20220411194840-2f41105eb62f/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
gonum.org/v1/gonum v0.0.0-20180816165407-929014505bf4/go.mod h1:Y+Yx5eoAFn32cQvJDxZx5Dpnq+c3wtXuadVZAcxbbBo=
|
||||
gonum.org/v1/gonum v0.0.0-20181121035319-3f7ecaa7e8ca/go.mod h1:Y+Yx5eoAFn32cQvJDxZx5Dpnq+c3wtXuadVZAcxbbBo=
|
||||
gonum.org/v1/gonum v0.6.0/go.mod h1:9mxDZsDKxgMAuccQkewq682L+0eCu4dCN2yonUJTCLU=
|
||||
gonum.org/v1/gonum v0.8.2/go.mod h1:oe/vMfY3deqTw+1EZJhuvEW2iwGF1bW9wwu7XCu0+v0=
|
||||
gonum.org/v1/gonum v0.9.3 h1:DnoIG+QAMaF5NvxnGe/oKsgKcAc6PcUyl8q0VetfQ8s=
|
||||
gonum.org/v1/gonum v0.9.3/go.mod h1:TZumC3NeyVQskjXqmyWt4S3bINhy7B4eYwW69EbyX+0=
|
||||
gonum.org/v1/gonum v0.11.0 h1:f1IJhK4Km5tBJmaiJXtk/PkL4cdVX6J+tGiM187uT5E=
|
||||
gonum.org/v1/gonum v0.11.0/go.mod h1:fSG4YDCxxUZQJ7rKsQrj0gMOg00Il0Z96/qMA4bVQhA=
|
||||
gonum.org/v1/netlib v0.0.0-20181029234149-ec6d1f5cefe6/go.mod h1:wa6Ws7BG/ESfp6dHfk7C6KdzKA7wR7u/rKwOGE66zvw=
|
||||
gonum.org/v1/netlib v0.0.0-20190313105609-8cb42192e0e0/go.mod h1:wa6Ws7BG/ESfp6dHfk7C6KdzKA7wR7u/rKwOGE66zvw=
|
||||
gonum.org/v1/plot v0.0.0-20190515093506-e2840ee46a6b/go.mod h1:Wt8AAjI+ypCyYX3nZBvf6cAIx93T+c/OS2HFAYskSZc=
|
||||
|
@ -1428,8 +1463,9 @@ google.golang.org/genproto v0.0.0-20210429181445-86c259c2b4ab/go.mod h1:P3QM42oQ
|
|||
google.golang.org/genproto v0.0.0-20210513213006-bf773b8c8384/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A=
|
||||
google.golang.org/genproto v0.0.0-20210517163617-5e0236093d7a/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A=
|
||||
google.golang.org/genproto v0.0.0-20210601144548-a796c710e9b6/go.mod h1:P3QM42oQyzQSnHPnZ/vqoCdDmzH28fzWByN9asMeM8A=
|
||||
google.golang.org/genproto v0.0.0-20210630183607-d20f26d13c79 h1:s1jFTXJryg4a1mew7xv03VZD8N9XjxFhk1o4Js4WvPQ=
|
||||
google.golang.org/genproto v0.0.0-20210630183607-d20f26d13c79/go.mod h1:yiaVoXHpRzHGyxV3o4DktVWY4mSUErTKaeEOq6C3t3U=
|
||||
google.golang.org/genproto v0.0.0-20220126215142-9970aeb2e350 h1:YxHp5zqIcAShDEvRr5/0rVESVS+njYF68PSdazrNLJo=
|
||||
google.golang.org/genproto v0.0.0-20220126215142-9970aeb2e350/go.mod h1:5CzLGKJ67TSI2B9POpiiyGha0AjJvZIUgRMt1dSmuhc=
|
||||
google.golang.org/grpc v1.17.0/go.mod h1:6QZJwpn2B+Zp71q/5VxRsJ6NXXVCE5NRUHRo+f3cWCs=
|
||||
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
|
||||
google.golang.org/grpc v1.20.0/go.mod h1:chYK+tFQF0nDUGJgXMSgLCQk3phJEuONr2DCgLDdAQM=
|
||||
|
@ -1460,6 +1496,7 @@ google.golang.org/grpc v1.37.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQ
|
|||
google.golang.org/grpc v1.37.1/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM=
|
||||
google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM=
|
||||
google.golang.org/grpc v1.39.0/go.mod h1:PImNr+rS9TWYb2O4/emRugxiyHZ5JyHW5F+RPnDzfrE=
|
||||
google.golang.org/grpc v1.40.0/go.mod h1:ogyxbiOoUXAkP+4+xa6PZSE9DZgIHtSpzjDTB9KAK34=
|
||||
google.golang.org/grpc v1.41.0/go.mod h1:U3l9uK9J0sini8mHphKoXyaqDA/8VyGnDee1zzIUK6k=
|
||||
google.golang.org/grpc v1.44.0 h1:weqSxi/TMs1SqFRMHCtBgXRs8k3X39QIDEZ0pRcttUg=
|
||||
google.golang.org/grpc v1.44.0/go.mod h1:k+4IHHFw41K8+bbowsex27ge2rCb65oeWqe4jJ590SU=
|
||||
|
@ -1475,12 +1512,14 @@ google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGj
|
|||
google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
|
||||
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
|
||||
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
||||
google.golang.org/protobuf v1.27.1 h1:SnqbnDw1V7RiZcXPx5MEeqPv2s79L9i7BJUlG/+RurQ=
|
||||
google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
|
||||
google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w=
|
||||
google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
|
||||
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
||||
gopkg.in/cheggaaa/pb.v1 v1.0.25/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw=
|
||||
|
@ -1503,6 +1542,7 @@ gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
|||
gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU=
|
||||
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
|
||||
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
|
|
|
@ -15,7 +15,6 @@ import (
|
|||
"github.com/influxdata/flux/ast"
|
||||
"github.com/influxdata/flux/cmd/flux/cmd"
|
||||
"github.com/influxdata/flux/csv"
|
||||
"github.com/influxdata/flux/execute/table"
|
||||
"github.com/influxdata/flux/parser"
|
||||
fluxClient "github.com/influxdata/influxdb/flux/client"
|
||||
"github.com/influxdata/influxdb/tests"
|
||||
|
@ -47,11 +46,11 @@ func (t *testExecutor) Close() error {
|
|||
|
||||
// Run executes an e2e test case for every supported index type.
|
||||
// On failure, logs collected from the server will be printed to stderr.
|
||||
func (t *testExecutor) Run(pkg *ast.Package) error {
|
||||
func (t *testExecutor) Run(pkg *ast.Package, fn cmd.TestResultFunc) error {
|
||||
var failed bool
|
||||
for _, idx := range []string{"inmem", "tsi1"} {
|
||||
logOut := &bytes.Buffer{}
|
||||
if err := t.run(pkg, idx, logOut); err != nil {
|
||||
if err := t.run(pkg, idx, logOut, fn); err != nil {
|
||||
failed = true
|
||||
_, _ = fmt.Fprintf(os.Stderr, "Failed for index %s:\n%v\n", idx, err)
|
||||
_, _ = io.Copy(os.Stderr, logOut)
|
||||
|
@ -66,12 +65,13 @@ func (t *testExecutor) Run(pkg *ast.Package) error {
|
|||
|
||||
// run executes an e2e test case against a specific index type.
|
||||
// Server logs will be written to the specified logOut writer, for reporting.
|
||||
func (t *testExecutor) run(pkg *ast.Package, index string, logOut io.Writer) error {
|
||||
func (t *testExecutor) run(pkg *ast.Package, index string, logOut io.Writer, fn cmd.TestResultFunc) error {
|
||||
_, _ = fmt.Fprintf(os.Stderr, "Testing %s...\n", index)
|
||||
|
||||
config := tests.NewConfig()
|
||||
config.HTTPD.FluxEnabled = true
|
||||
config.HTTPD.FluxLogEnabled = true
|
||||
config.HTTPD.FluxTesting = true
|
||||
config.Data.Index = index
|
||||
|
||||
s := tests.NewServer(config)
|
||||
|
@ -99,10 +99,22 @@ func (t *testExecutor) run(pkg *ast.Package, index string, logOut io.Writer) err
|
|||
|
||||
// During the first execution, we are performing the writes
|
||||
// that are in the testcase. We do not care about errors.
|
||||
_ = t.executeWithOptions(bucketOpt, t.writeOptAST, pkg, s.URL(), logOut, false)
|
||||
_ = t.executeWithOptions(bucketOpt, t.writeOptAST, pkg, s.URL(), logOut,
|
||||
func(ctx context.Context, results flux.ResultIterator) error {
|
||||
for results.More() {
|
||||
res := results.Next()
|
||||
if err := res.Tables().Do(func(table flux.Table) error {
|
||||
table.Done()
|
||||
return nil
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
// Execute the read pass.
|
||||
return t.executeWithOptions(bucketOpt, t.readOptAST, pkg, s.URL(), logOut, true)
|
||||
return t.executeWithOptions(bucketOpt, t.readOptAST, pkg, s.URL(), logOut, fn)
|
||||
}
|
||||
|
||||
// executeWithOptions runs a Flux query against a running server via the HTTP API.
|
||||
|
@ -114,7 +126,7 @@ func (t *testExecutor) executeWithOptions(
|
|||
pkg *ast.Package,
|
||||
serverUrl string,
|
||||
logOut io.Writer,
|
||||
checkOutput bool,
|
||||
fn cmd.TestResultFunc,
|
||||
) error {
|
||||
options := optionsAST.Copy().(*ast.File)
|
||||
options.Body = append([]ast.Statement{bucketOpt}, options.Body...)
|
||||
|
@ -164,31 +176,7 @@ func (t *testExecutor) executeWithOptions(
|
|||
}
|
||||
defer r.Release()
|
||||
|
||||
wasDiff := false
|
||||
if checkOutput {
|
||||
for r.More() {
|
||||
wasDiff = true
|
||||
v := r.Next()
|
||||
if err := v.Tables().Do(func(tbl flux.Table) error {
|
||||
// The data returned here is the result of `testing.diff`, so any result means that
|
||||
// a comparison of two tables showed inequality. Capture that inequality as part of the error.
|
||||
// XXX: rockstar (08 Dec 2020) - This could use some ergonomic work, as the diff testOutput
|
||||
// is not exactly "human readable."
|
||||
_, _ = fmt.Fprintln(logOut, table.Stringify(tbl))
|
||||
return nil
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
r.Release()
|
||||
if err := r.Err(); err != nil {
|
||||
return err
|
||||
}
|
||||
if wasDiff {
|
||||
return errors.New("test failed - diff table in output")
|
||||
}
|
||||
return nil
|
||||
return fn(t.ctx, r)
|
||||
}
|
||||
|
||||
// This options definition puts to() in the path of the CSV input. The tests
|
||||
|
@ -198,8 +186,8 @@ const writeOptSource = `
|
|||
import "testing"
|
||||
import c "csv"
|
||||
|
||||
option testing.loadStorage = (csv) => {
|
||||
return c.from(csv: csv) |> to(bucket: bucket)
|
||||
option testing.load = (tables=<-) => {
|
||||
return tables |> to(bucket: bucket)
|
||||
}
|
||||
`
|
||||
|
||||
|
@ -210,7 +198,7 @@ const readOptSource = `
|
|||
import "testing"
|
||||
import c "csv"
|
||||
|
||||
option testing.loadStorage = (csv) => {
|
||||
option testing.load = (tables=<-) => {
|
||||
return from(bucket: bucket)
|
||||
}
|
||||
`
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||
// versions:
|
||||
// protoc-gen-go v1.27.1
|
||||
// protoc-gen-go v1.28.1
|
||||
// protoc v3.17.3
|
||||
// source: binary.proto
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||
// versions:
|
||||
// protoc-gen-go v1.27.1
|
||||
// protoc-gen-go v1.28.1
|
||||
// protoc v3.17.3
|
||||
// source: internal/internal.proto
|
||||
|
||||
|
|
|
@ -40,6 +40,7 @@ type Config struct {
|
|||
WriteTracing bool `toml:"write-tracing"`
|
||||
FluxEnabled bool `toml:"flux-enabled"`
|
||||
FluxLogEnabled bool `toml:"flux-log-enabled"`
|
||||
FluxTesting bool `toml:"-"`
|
||||
PprofEnabled bool `toml:"pprof-enabled"`
|
||||
PprofAuthEnabled bool `toml:"pprof-auth-enabled"`
|
||||
DebugPprofEnabled bool `toml:"debug-pprof-enabled"`
|
||||
|
@ -72,6 +73,7 @@ func NewConfig() Config {
|
|||
Enabled: true,
|
||||
FluxEnabled: false,
|
||||
FluxLogEnabled: false,
|
||||
FluxTesting: false,
|
||||
BindAddress: DefaultBindAddress,
|
||||
LogEnabled: true,
|
||||
PprofEnabled: true,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||
// versions:
|
||||
// protoc-gen-go v1.27.1
|
||||
// protoc-gen-go v1.28.1
|
||||
// protoc v3.17.3
|
||||
// source: internal/meta.proto
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||
// versions:
|
||||
// protoc-gen-go v1.27.1
|
||||
// protoc-gen-go v1.28.1
|
||||
// protoc v3.17.3
|
||||
// source: source.proto
|
||||
|
||||
|
|
|
@ -296,6 +296,8 @@ func (c *floatMultiShardArrayCursor) reset(cur cursors.FloatArrayCursor, itrs cu
|
|||
if cond != nil {
|
||||
if c.filter == nil {
|
||||
c.filter = newFloatFilterArrayCursor(cond)
|
||||
} else {
|
||||
c.filter.cond = cond
|
||||
}
|
||||
c.filter.reset(cur)
|
||||
cur = c.filter
|
||||
|
@ -1299,6 +1301,8 @@ func (c *integerMultiShardArrayCursor) reset(cur cursors.IntegerArrayCursor, itr
|
|||
if cond != nil {
|
||||
if c.filter == nil {
|
||||
c.filter = newIntegerFilterArrayCursor(cond)
|
||||
} else {
|
||||
c.filter.cond = cond
|
||||
}
|
||||
c.filter.reset(cur)
|
||||
cur = c.filter
|
||||
|
@ -2302,6 +2306,8 @@ func (c *unsignedMultiShardArrayCursor) reset(cur cursors.UnsignedArrayCursor, i
|
|||
if cond != nil {
|
||||
if c.filter == nil {
|
||||
c.filter = newUnsignedFilterArrayCursor(cond)
|
||||
} else {
|
||||
c.filter.cond = cond
|
||||
}
|
||||
c.filter.reset(cur)
|
||||
cur = c.filter
|
||||
|
@ -3305,6 +3311,8 @@ func (c *stringMultiShardArrayCursor) reset(cur cursors.StringArrayCursor, itrs
|
|||
if cond != nil {
|
||||
if c.filter == nil {
|
||||
c.filter = newStringFilterArrayCursor(cond)
|
||||
} else {
|
||||
c.filter.cond = cond
|
||||
}
|
||||
c.filter.reset(cur)
|
||||
cur = c.filter
|
||||
|
@ -3730,6 +3738,8 @@ func (c *booleanMultiShardArrayCursor) reset(cur cursors.BooleanArrayCursor, itr
|
|||
if cond != nil {
|
||||
if c.filter == nil {
|
||||
c.filter = newBooleanFilterArrayCursor(cond)
|
||||
} else {
|
||||
c.filter.cond = cond
|
||||
}
|
||||
c.filter.reset(cur)
|
||||
cur = c.filter
|
||||
|
|
|
@ -242,6 +242,8 @@ func (c *{{.name}}MultiShardArrayCursor) reset(cur cursors.{{.Name}}ArrayCursor,
|
|||
if cond != nil {
|
||||
if c.filter == nil {
|
||||
c.filter = new{{.Name}}FilterArrayCursor(cond)
|
||||
} else {
|
||||
c.filter.cond = cond
|
||||
}
|
||||
c.filter.reset(cur)
|
||||
cur = c.filter
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||
// versions:
|
||||
// protoc-gen-go v1.27.1
|
||||
// protoc-gen-go v1.28.1
|
||||
// protoc v3.17.3
|
||||
// source: predicate.proto
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||
// versions:
|
||||
// protoc-gen-go v1.27.1
|
||||
// protoc-gen-go v1.28.1
|
||||
// protoc v3.17.3
|
||||
// source: storage_common.proto
|
||||
|
||||
|
|
69
test-flux.sh
69
test-flux.sh
|
@ -31,15 +31,72 @@ build_test_harness() {
|
|||
"$GO" build -o fluxtest ./internal/cmd/fluxtest-harness-influxdb
|
||||
}
|
||||
|
||||
# Many tests targeting 3rd party databases are not yet supported in CI and should be filtered out.
|
||||
DB_INTEGRATION_WRITE_TESTS=integration_mqtt_pub,integration_sqlite_write_to,integration_vertica_write_to,integration_mssql_write_to,integration_mysql_write_to,integration_mariadb_write_to,integration_pg_write_to,integration_hdb_write_to
|
||||
DB_INTEGRATION_READ_TESTS=integration_sqlite_read_from_seed,integration_sqlite_read_from_nonseed,integration_vertica_read_from_seed,integration_vertica_read_from_nonseed,integration_mssql_read_from_seed,integration_mssql_read_from_nonseed,integration_mariadb_read_from_seed,integration_mariadb_read_from_nonseed,integration_mysql_read_from_seed,integration_mysql_read_from_nonseed,integration_pg_read_from_seed,integration_pg_read_from_nonseed,integration_hdb_read_from_seed,integration_hdb_read_from_nonseed
|
||||
DB_INTEGRATION_INJECTION_TESTS="integration_sqlite_injection,integration_hdb_injection,integration_pg_injection,integration_mysql_injection,integration_mariadb_injection,integration_mssql_injection"
|
||||
DB_TESTS="${DB_INTEGRATION_WRITE_TESTS},${DB_INTEGRATION_READ_TESTS},${DB_INTEGRATION_INJECTION_TESTS}"
|
||||
skipped_tests() {
|
||||
doc=$(cat <<ENDSKIPS
|
||||
# Tests skipped because a feature flag must be enabled
|
||||
# the flag is: removeRedundantSortNodes
|
||||
remove_sort
|
||||
remove_sort_more_columns
|
||||
remove_sort_aggregate
|
||||
remove_sort_selector
|
||||
remove_sort_filter_range
|
||||
remove_sort_aggregate_window
|
||||
remove_sort_join
|
||||
|
||||
# Other skipped tests
|
||||
align_time
|
||||
buckets
|
||||
covariance
|
||||
cumulative_sum_default
|
||||
cumulative_sum_noop
|
||||
cumulative_sum
|
||||
difference_columns
|
||||
fill
|
||||
fill_bool
|
||||
fill_float
|
||||
fill_time
|
||||
fill_int
|
||||
fill_uint
|
||||
fill_string
|
||||
group
|
||||
group_nulls
|
||||
histogram_normalize
|
||||
histogram_quantile_minvalue
|
||||
histogram_quantile
|
||||
histogram
|
||||
key_values_host_name
|
||||
secrets
|
||||
set
|
||||
shapeDataWithFilter
|
||||
shapeData
|
||||
shift_negative_duration
|
||||
unique
|
||||
window_null
|
||||
|
||||
# https://github.com/influxdata/influxdb/issues/23757
|
||||
# Flux acceptance tests for group |> first (and last)
|
||||
push_down_group_one_tag_first
|
||||
push_down_group_all_filter_field_first
|
||||
push_down_group_one_tag_filter_field_first
|
||||
push_down_group_one_tag_last
|
||||
push_down_group_all_filter_field_last
|
||||
push_down_group_one_tag_filter_field_last
|
||||
|
||||
windowed_by_time_count # TODO(bnpfeife) broken by flux@05a1065f, OptimizeAggregateWindow
|
||||
windowed_by_time_sum # TODO(bnpfeife) broken by flux@05a1065f, OptimizeAggregateWindow
|
||||
windowed_by_time_mean # TODO(bnpfeife) broken by flux@05a1065f, OptimizeAggregateWindow
|
||||
ENDSKIPS
|
||||
)
|
||||
echo "$doc" | sed '/^[[:space:]]*$/d' | sed 's/[[:space:]]*#.*$//' | tr '\n' ',' | sed 's/,$//'
|
||||
}
|
||||
|
||||
run_integration_tests() {
|
||||
log "Running integration tests..."
|
||||
./fluxtest -v -p flux.zip -p flux/stdlib --skip "$DB_TESTS"
|
||||
./fluxtest \
|
||||
-v \
|
||||
-p flux.zip \
|
||||
-p flux/ \
|
||||
--skip "$(skipped_tests)"
|
||||
}
|
||||
|
||||
cleanup() {
|
||||
|
|
|
@ -10153,375 +10153,6 @@ func TestGroupByEndToEnd(t *testing.T) {
|
|||
assert.Equal(t, `{"results":[{"statement_id":0,"series":[{"name":"m0","columns":["time","scount"],"values":[["2021-05-10T00:00:00Z",10],["2021-05-11T00:00:00Z",5],["2021-05-12T00:00:00Z",3],["2021-05-13T00:00:00Z",7],["2021-05-14T00:00:00Z",4],["2021-05-15T00:00:00Z",null]]}]}]}`, results)
|
||||
}
|
||||
|
||||
func TestFluxBasicEndToEnd(t *testing.T) {
|
||||
config := NewConfig()
|
||||
config.HTTPD.FluxEnabled = true
|
||||
s := OpenServer(config)
|
||||
defer s.Close()
|
||||
|
||||
s.CreateDatabase(t.Name())
|
||||
defer s.DropDatabase(t.Name())
|
||||
u, err := url.Parse(s.URL())
|
||||
assert.NoError(t, err)
|
||||
u.Path = "/api/v2/query"
|
||||
httpClient := &http.Client{}
|
||||
|
||||
{
|
||||
// Query with json body
|
||||
query := fluxClient.QueryRequest{}.WithDefaults()
|
||||
query.Query = `import "influxdata/influxdb/v1" v1.databases()`
|
||||
j, err := json.Marshal(query)
|
||||
assert.NoError(t, err)
|
||||
req, err := http.NewRequest("POST", u.String(), bytes.NewBuffer(j))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
assert.NoError(t, err)
|
||||
resp, err := httpClient.Do(req)
|
||||
assert.NoError(t, err)
|
||||
defer resp.Body.Close()
|
||||
b, err := io.ReadAll(resp.Body)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t,
|
||||
strings.ReplaceAll(`,result,table,organizationID,databaseName,retentionPolicy,retentionPeriod,default,bucketId
|
||||
,_result,0,,TestFluxBasicEndToEnd,autogen,0,true,
|
||||
|
||||
`, "\n", "\r\n"),
|
||||
string(b))
|
||||
}
|
||||
{
|
||||
// Query with json body, with annotations
|
||||
query := fluxClient.QueryRequest{}.WithDefaults()
|
||||
query.Query = `import "influxdata/influxdb/v1" v1.databases()`
|
||||
query.Dialect.Annotations = csv.DefaultDialect().Annotations
|
||||
j, err := json.Marshal(query)
|
||||
assert.NoError(t, err)
|
||||
req, err := http.NewRequest("POST", u.String(), bytes.NewBuffer(j))
|
||||
req.Header.Set("Content-Type", "application/json")
|
||||
assert.NoError(t, err)
|
||||
resp, err := httpClient.Do(req)
|
||||
assert.NoError(t, err)
|
||||
defer resp.Body.Close()
|
||||
b, err := io.ReadAll(resp.Body)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t,
|
||||
strings.ReplaceAll(`#datatype,string,long,string,string,string,long,boolean,string
|
||||
#group,false,false,true,false,false,false,false,false
|
||||
#default,_result,,,,,,,
|
||||
,result,table,organizationID,databaseName,retentionPolicy,retentionPeriod,default,bucketId
|
||||
,,0,,TestFluxBasicEndToEnd,autogen,0,true,
|
||||
|
||||
`, "\n", "\r\n"),
|
||||
string(b))
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
{
|
||||
// Query with raw flux
|
||||
assert.NoError(t, err)
|
||||
req, err := http.NewRequest("POST", u.String(), bytes.NewBuffer([]byte(`import "influxdata/influxdb/v1" v1.databases()`)))
|
||||
req.Header.Set("Content-Type", "application/vnd.flux")
|
||||
assert.NoError(t, err)
|
||||
resp, err := httpClient.Do(req)
|
||||
assert.NoError(t, err)
|
||||
defer resp.Body.Close()
|
||||
b, err := io.ReadAll(resp.Body)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t,
|
||||
strings.ReplaceAll(`,result,table,organizationID,databaseName,retentionPolicy,retentionPeriod,default,bucketId
|
||||
,_result,0,,TestFluxBasicEndToEnd,autogen,0,true,
|
||||
|
||||
`, "\n", "\r\n"),
|
||||
string(b))
|
||||
}
|
||||
{
|
||||
// Make sure runFluxBuiltinTest complains when it finds a diff
|
||||
testFluxTmpl := `package universe_test
|
||||
import "testing"
|
||||
option now = () => (2030-01-01T00:00:00Z)
|
||||
|
||||
inData = "#datatype,string,long,string,string,dateTime:RFC3339,unsignedLong
|
||||
#group,false,false,true,true,false,false
|
||||
#default,_result,,,,,
|
||||
,result,table,_measurement,_field,_time,_value
|
||||
,,0,Sgf,DlXwgrw,2018-12-18T22:11:05Z,70
|
||||
,,0,Sgf,DlXwgrw,2018-12-18T22:11:15Z,50"
|
||||
|
||||
outData = "#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,string,string,unsignedLong
|
||||
#group,false,false,true,true,true,true,false
|
||||
#default,_result,,,,,,
|
||||
,result,table,_start,_stop,_measurement,_field,_value
|
||||
,,0,2018-12-01T00:00:00Z,2030-01-01T00:00:00Z,Sgf,DlXwgrw,%d"
|
||||
t_sum = (table=<-) => (table |> range(start: 2018-12-01T00:00:00Z) |> sum())
|
||||
test _sum = () => ({input: testing.loadStorage(csv: inData), want: testing.loadMem(csv: outData), fn: t_sum})
|
||||
`
|
||||
// This test passes: 70+50=120
|
||||
databasePass := t.Name() + "_pass"
|
||||
s.CreateDatabase(databasePass)
|
||||
defer s.DropDatabase(databasePass)
|
||||
file := mustParse(fmt.Sprintf(testFluxTmpl, 120))
|
||||
bucket := databasePass + "/autogen"
|
||||
runFluxBuiltinTest(t, file, u, bucket, false)
|
||||
err := runFluxBuiltinTest(t, file, u, bucket, true)
|
||||
assert.NoError(t, err)
|
||||
|
||||
// We want to make sure the end to end tests are doing something. We assert that the test runner returns
|
||||
// an error on diffs
|
||||
databaseFail := t.Name() + "_fail"
|
||||
s.CreateDatabase(databaseFail)
|
||||
defer s.DropDatabase(databaseFail)
|
||||
file = mustParse(fmt.Sprintf(testFluxTmpl, 121))
|
||||
bucket = databaseFail + "/autogen"
|
||||
runFluxBuiltinTest(t, file, u, bucket, false)
|
||||
err = runFluxBuiltinTest(t, file, u, bucket, true)
|
||||
assert.EqualError(t, err, "test failed - diff table in output")
|
||||
}
|
||||
}
|
||||
|
||||
func TestFluxRegressionEndToEnd(t *testing.T) {
|
||||
config := NewConfig()
|
||||
config.HTTPD.FluxEnabled = true
|
||||
s := OpenServer(config)
|
||||
defer s.Close()
|
||||
|
||||
s.CreateDatabase(t.Name())
|
||||
defer s.DropDatabase(t.Name())
|
||||
u, err := url.Parse(s.URL())
|
||||
assert.NoError(t, err)
|
||||
u.Path = "/api/v2/query"
|
||||
httpClient := &http.Client{}
|
||||
|
||||
{
|
||||
// buckets query
|
||||
assert.NoError(t, err)
|
||||
req, err := http.NewRequest("POST", u.String(), bytes.NewBuffer([]byte(`buckets()`)))
|
||||
req.Header.Set("Content-Type", "application/vnd.flux")
|
||||
assert.NoError(t, err)
|
||||
resp, err := httpClient.Do(req)
|
||||
assert.NoError(t, err)
|
||||
defer resp.Body.Close()
|
||||
b, err := io.ReadAll(resp.Body)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t,
|
||||
strings.ReplaceAll(`,result,table,name,id,organizationID,retentionPolicy,retentionPeriod
|
||||
,_result,0,TestFluxRegressionEndToEnd/autogen,,,autogen,0
|
||||
|
||||
`, "\n", "\r\n"),
|
||||
string(b))
|
||||
}
|
||||
}
|
||||
|
||||
var FluxEndToEndSkipList = map[string]map[string]string{
|
||||
"universe": {
|
||||
// TODO(adam) determine the reason for these test failures.
|
||||
"cov": "Reason TBD",
|
||||
"covariance": "Reason TBD",
|
||||
"cumulative_sum": "Reason TBD",
|
||||
"cumulative_sum_default": "Reason TBD",
|
||||
"cumulative_sum_noop": "Reason TBD",
|
||||
"drop_non_existent": "Reason TBD",
|
||||
"first": "Reason TBD",
|
||||
"highestAverage": "Reason TBD",
|
||||
"highestMax": "Reason TBD",
|
||||
"histogram": "Reason TBD",
|
||||
"histogram_normalize": "Reason TBD",
|
||||
"histogram_quantile": "Reason TBD",
|
||||
"join": "Reason TBD",
|
||||
"join_across_measurements": "Reason TBD",
|
||||
"join_agg": "Reason TBD",
|
||||
"keep_non_existent": "Reason TBD",
|
||||
"key_values": "Reason TBD",
|
||||
"key_values_host_name": "Reason TBD",
|
||||
"last": "Reason TBD",
|
||||
"lowestAverage": "Reason TBD",
|
||||
"max": "Reason TBD",
|
||||
"min": "Reason TBD",
|
||||
"sample": "Reason TBD",
|
||||
"selector_preserve_time": "Reason TBD",
|
||||
"shift": "Reason TBD",
|
||||
"shift_negative_duration": "Reason TBD",
|
||||
"task_per_line": "Reason TBD",
|
||||
"top": "Reason TBD",
|
||||
"union": "Reason TBD",
|
||||
"union_heterogeneous": "Reason TBD",
|
||||
"unique": "Reason TBD",
|
||||
"distinct": "Reason TBD",
|
||||
|
||||
// it appears these occur when writing the input data. `to` may not be null safe.
|
||||
"fill_bool": "failed to read meta data: panic: interface conversion: interface {} is nil, not uint64",
|
||||
"fill_float": "failed to read meta data: panic: interface conversion: interface {} is nil, not uint64",
|
||||
"fill_int": "failed to read meta data: panic: interface conversion: interface {} is nil, not uint64",
|
||||
"fill_string": "failed to read meta data: panic: interface conversion: interface {} is nil, not uint64",
|
||||
"fill_time": "failed to read meta data: panic: interface conversion: interface {} is nil, not uint64",
|
||||
"fill_uint": "failed to read meta data: panic: interface conversion: interface {} is nil, not uint64",
|
||||
"window_null": "failed to read meta data: panic: interface conversion: interface {} is nil, not float64",
|
||||
|
||||
// these may just be missing calls to range() in the tests. easy to fix in a new PR.
|
||||
"group_nulls": "unbounded test",
|
||||
"integral": "unbounded test",
|
||||
"integral_columns": "unbounded test",
|
||||
"map": "unbounded test",
|
||||
"join_missing_on_col": "unbounded test",
|
||||
"join_use_previous": "unbounded test (https://github.com/influxdata/flux/issues/2996)",
|
||||
"join_panic": "unbounded test (https://github.com/influxdata/flux/issues/3465)",
|
||||
"rowfn_with_import": "unbounded test",
|
||||
|
||||
// the following tests have a difference between the CSV-decoded input table, and the storage-retrieved version of that table
|
||||
"columns": "group key mismatch",
|
||||
"set": "column order mismatch",
|
||||
"simple_max": "_stop missing from expected output",
|
||||
"derivative": "time bounds mismatch (engine uses now() instead of bounds on input table)",
|
||||
"difference_columns": "data write/read path loses columns x and y",
|
||||
"keys": "group key mismatch",
|
||||
|
||||
// failed to read meta data errors: the CSV encoding is incomplete probably due to data schema errors. needs more detailed investigation to find root cause of error
|
||||
// "filter_by_regex": "failed to read metadata",
|
||||
// "filter_by_tags": "failed to read metadata",
|
||||
"group": "failed to read metadata",
|
||||
"group_except": "failed to read metadata",
|
||||
"group_ungroup": "failed to read metadata",
|
||||
"pivot_mean": "failed to read metadata",
|
||||
"histogram_quantile_minvalue": "failed to read meta data: no column with label _measurement exists",
|
||||
"increase": "failed to read meta data: table has no _value column",
|
||||
|
||||
"string_max": "error: invalid use of function: *functions.MaxSelector has no implementation for type string (https://github.com/influxdata/platform/issues/224)",
|
||||
"null_as_value": "null not supported as value in influxql (https://github.com/influxdata/platform/issues/353)",
|
||||
"string_interp": "string interpolation not working as expected in flux (https://github.com/influxdata/platform/issues/404)",
|
||||
"to": "to functions are not supported in the testing framework (https://github.com/influxdata/flux/issues/77)",
|
||||
"covariance_missing_column_1": "need to support known errors in new test framework (https://github.com/influxdata/flux/issues/536)",
|
||||
"covariance_missing_column_2": "need to support known errors in new test framework (https://github.com/influxdata/flux/issues/536)",
|
||||
"drop_before_rename": "need to support known errors in new test framework (https://github.com/influxdata/flux/issues/536)",
|
||||
"drop_referenced": "need to support known errors in new test framework (https://github.com/influxdata/flux/issues/536)",
|
||||
"yield": "yield requires special test case (https://github.com/influxdata/flux/issues/535)",
|
||||
|
||||
"window_group_mean_ungroup": "window trigger optimization modifies sort order of its output tables (https://github.com/influxdata/flux/issues/1067)",
|
||||
|
||||
"median_column": "failing in different ways (https://github.com/influxdata/influxdb/issues/13909)",
|
||||
"dynamic_query": "tableFind does not work in e2e tests: https://github.com/influxdata/influxdb/issues/13975",
|
||||
|
||||
"to_int": "dateTime conversion issue: https://github.com/influxdata/influxdb/issues/14575",
|
||||
"to_uint": "dateTime conversion issue: https://github.com/influxdata/influxdb/issues/14575",
|
||||
|
||||
"holt_winters_panic": "Expected output is an empty table which breaks the testing framework (https://github.com/influxdata/influxdb/issues/14749)",
|
||||
"map_nulls": "to cannot write null values",
|
||||
},
|
||||
"array": {
|
||||
"from": "test not meant to be consumed by influxdb",
|
||||
"from_group": "test not meant to be consumed by influxdb",
|
||||
},
|
||||
"experimental": {
|
||||
"set": "Reason TBD",
|
||||
"join": "unbounded test",
|
||||
"alignTime": "unbounded test",
|
||||
},
|
||||
"experimental/geo": {
|
||||
"filterRowsNotStrict": "tableFind does not work in e2e tests: https://github.com/influxdata/influxdb/issues/13975",
|
||||
"filterRowsStrict": "tableFind does not work in e2e tests: https://github.com/influxdata/influxdb/issues/13975",
|
||||
"gridFilterLevel": "tableFind does not work in e2e tests: https://github.com/influxdata/influxdb/issues/13975",
|
||||
"gridFilter": "tableFind does not work in e2e tests: https://github.com/influxdata/influxdb/issues/13975",
|
||||
"groupByArea": "tableFind does not work in e2e tests: https://github.com/influxdata/influxdb/issues/13975",
|
||||
"filterRowsPivoted": "tableFind does not work in e2e tests: https://github.com/influxdata/influxdb/issues/13975",
|
||||
"shapeDataWithFilter": "tableFind does not work in e2e tests: https://github.com/influxdata/influxdb/issues/13975",
|
||||
"shapeData": "test run before to() is finished: https://github.com/influxdata/influxdb/issues/13975",
|
||||
},
|
||||
"regexp": {
|
||||
"replaceAllString": "Reason TBD",
|
||||
},
|
||||
"http": {
|
||||
"http_endpoint": "need ability to test side effects in e2e tests: (https://github.com/influxdata/flux/issues/1723)",
|
||||
},
|
||||
"influxdata/influxdb/schema": {
|
||||
"show_tag_keys": "failing due to bug in test, unskip this after upgrading from Flux v0.91.0",
|
||||
},
|
||||
"influxdata/influxdb/monitor": {
|
||||
"state_changes_big_any_to_any": "unbounded test",
|
||||
"state_changes_big_info_to_ok": "unbounded test",
|
||||
"state_changes_big_ok_to_info": "unbounded test",
|
||||
"state_changes_any_to_any": "test run before to() is finished: https://github.com/influxdata/influxdb/issues/13975",
|
||||
"state_changes_info_to_any": "test run before to() is finished: https://github.com/influxdata/influxdb/issues/13975",
|
||||
"state_changes_invalid_any_to_any": "test run before to() is finished: https://github.com/influxdata/influxdb/issues/13975",
|
||||
"state_changes": "test run before to() is finished: https://github.com/influxdata/influxdb/issues/13975",
|
||||
},
|
||||
"influxdata/influxdb/secrets": {
|
||||
"secrets": "Cannot inject custom deps into the test framework so the secrets don't lookup correctly",
|
||||
},
|
||||
"internal/promql": {
|
||||
"join": "unbounded test",
|
||||
},
|
||||
"testing/chronograf": {
|
||||
"buckets": "unbounded test",
|
||||
"aggregate_window_count": "flakey test: https://github.com/influxdata/influxdb/issues/18463",
|
||||
},
|
||||
"testing/kapacitor": {
|
||||
"fill_default": "unknown field type for f1",
|
||||
},
|
||||
"testing/pandas": {
|
||||
"extract_regexp_findStringIndex": "pandas. map does not correctly handled returned arrays (https://github.com/influxdata/flux/issues/1387)",
|
||||
"partition_strings_splitN": "pandas. map does not correctly handled returned arrays (https://github.com/influxdata/flux/issues/1387)",
|
||||
},
|
||||
"testing/promql": {
|
||||
"emptyTable": "tests a source",
|
||||
"year": "flakey test: https://github.com/influxdata/influxdb/issues/15667",
|
||||
"extrapolatedRate_counter_rate": "option \"testing.loadStorage\" reassigned: https://github.com/influxdata/flux/issues/3155",
|
||||
"extrapolatedRate_nocounter": "option \"testing.loadStorage\" reassigned: https://github.com/influxdata/flux/issues/3155",
|
||||
"extrapolatedRate_norate": "option \"testing.loadStorage\" reassigned: https://github.com/influxdata/flux/issues/3155",
|
||||
"linearRegression_nopredict": "option \"testing.loadStorage\" reassigned: https://github.com/influxdata/flux/issues/3155",
|
||||
"linearRegression_predict": "option \"testing.loadStorage\" reassigned: https://github.com/influxdata/flux/issues/3155",
|
||||
},
|
||||
"testing/influxql": {
|
||||
"cumulative_sum": "invalid test data requires loadStorage to be overridden. See https://github.com/influxdata/flux/issues/3145",
|
||||
"elapsed": "failing since split with Flux upgrade: https://github.com/influxdata/influxdb/issues/19568",
|
||||
},
|
||||
"contrib/RohanSreerama5/naiveBayesClassifier": {
|
||||
"bayes": "error calling tableFind: ",
|
||||
},
|
||||
}
|
||||
|
||||
func TestFluxEndToEnd(t *testing.T) {
|
||||
runEndToEnd(t, stdlib.FluxTestPackages)
|
||||
}
|
||||
|
||||
func runEndToEnd(t *testing.T, pkgs []*ast.Package) {
|
||||
config := NewConfig()
|
||||
config.HTTPD.FluxEnabled = true
|
||||
s := OpenServer(config)
|
||||
defer s.Close()
|
||||
|
||||
for _, pkg := range pkgs {
|
||||
test := func(t *testing.T, f func(t *testing.T)) {
|
||||
t.Run(pkg.Path, f)
|
||||
}
|
||||
if pkg.Path == "universe" {
|
||||
test = func(t *testing.T, f func(t *testing.T)) {
|
||||
f(t)
|
||||
}
|
||||
}
|
||||
|
||||
test(t, func(t *testing.T) {
|
||||
for _, file := range pkg.Files {
|
||||
name := strings.TrimSuffix(file.Name, "_test.flux")
|
||||
t.Run(name, func(t *testing.T) {
|
||||
if reason, ok := FluxEndToEndSkipList[pkg.Path][name]; ok {
|
||||
t.Skip(reason)
|
||||
}
|
||||
// Set up the database & URL
|
||||
// We don't properly support slashes in database names for flux queries
|
||||
databaseName := strings.ReplaceAll(t.Name(), "/", "_")
|
||||
s.CreateDatabase(databaseName)
|
||||
defer s.DropDatabase(databaseName)
|
||||
u, err := url.Parse(s.URL())
|
||||
assert.NoError(t, err)
|
||||
u.Path = "/api/v2/query"
|
||||
bucket := databaseName + "/autogen"
|
||||
|
||||
// Run the end to end test. The first time we ignore the results, but as a side
|
||||
// effect the data is loaded into the TSDB store. The second test runs with `from`
|
||||
// gathering data from TSDB.
|
||||
runFluxBuiltinTest(t, file, u, bucket, false)
|
||||
err = runFluxBuiltinTest(t, file, u, bucket, true)
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func makeTestPackage(file *ast.File) *ast.Package {
|
||||
file = file.Copy().(*ast.File)
|
||||
file.Package.Name.Name = "main"
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||
// versions:
|
||||
// protoc-gen-go v1.27.1
|
||||
// protoc-gen-go v1.28.1
|
||||
// protoc v3.17.3
|
||||
// source: internal/fieldsindex.proto
|
||||
|
||||
|
|
Loading…
Reference in New Issue