Refactor Platform according to new organization in `flux` (#966)
We reorganized the functions in flux to have the structure: /functions /inputs /transformations /outputs this PR catches up platform to work with the new package layout. As a separate refactoring issue, we should discuss: from(bucket: ) should migrate from flux --> platform to_http and to_kafka should migrate from platform --> fluxpull/10616/head
parent
c6cd482e40
commit
58f4e9fc0b
|
@ -5,8 +5,8 @@ import (
|
|||
"os"
|
||||
|
||||
"github.com/influxdata/flux/execute"
|
||||
"github.com/influxdata/flux/functions"
|
||||
"github.com/influxdata/flux/functions/storage"
|
||||
"github.com/influxdata/flux/functions/inputs"
|
||||
"github.com/influxdata/flux/functions/inputs/storage"
|
||||
"github.com/influxdata/flux/repl"
|
||||
"github.com/influxdata/platform"
|
||||
"github.com/influxdata/platform/http"
|
||||
|
@ -66,7 +66,7 @@ func fluxQueryF(cmd *cobra.Command, args []string) {
|
|||
}
|
||||
|
||||
func injectDeps(deps execute.Dependencies, hosts storage.Reader, buckets platform.BucketService, orgs platform.OrganizationService) error {
|
||||
return functions.InjectFromDependencies(deps, storage.Dependencies{
|
||||
return inputs.InjectFromDependencies(deps, storage.Dependencies{
|
||||
Reader: hosts,
|
||||
BucketLookup: query.FromBucketService(buckets),
|
||||
OrganizationLookup: query.FromOrganizationService(orgs),
|
||||
|
|
|
@ -5,6 +5,7 @@ import (
|
|||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/influxdata/flux/functions/inputs"
|
||||
"io"
|
||||
"math"
|
||||
"regexp"
|
||||
|
@ -20,8 +21,7 @@ import (
|
|||
"github.com/influxdata/flux/ast"
|
||||
"github.com/influxdata/flux/control"
|
||||
"github.com/influxdata/flux/execute"
|
||||
"github.com/influxdata/flux/functions"
|
||||
fstorage "github.com/influxdata/flux/functions/storage"
|
||||
fstorage "github.com/influxdata/flux/functions/inputs/storage"
|
||||
"github.com/influxdata/flux/semantic"
|
||||
"github.com/influxdata/flux/values"
|
||||
"github.com/influxdata/influxdb/logger"
|
||||
|
@ -75,7 +75,7 @@ func NewController(
|
|||
Verbose: false,
|
||||
}
|
||||
|
||||
err := functions.InjectFromDependencies(cc.ExecutorDependencies, fstorage.Dependencies{
|
||||
err := inputs.InjectFromDependencies(cc.ExecutorDependencies, fstorage.Dependencies{
|
||||
Reader: NewReader(s),
|
||||
BucketLookup: bucketLookup,
|
||||
OrganizationLookup: orgLookup,
|
||||
|
|
15
go.mod
15
go.mod
|
@ -29,7 +29,6 @@ require (
|
|||
github.com/glycerine/go-unsnap-stream v0.0.0-20180323001048-9f0cb55181dd // indirect
|
||||
github.com/glycerine/goconvey v0.0.0-20180728074245-46e3a41ad493 // indirect
|
||||
github.com/go-ini/ini v1.38.1 // indirect
|
||||
github.com/go-sql-driver/mysql v1.4.0 // indirect
|
||||
github.com/gogo/protobuf v1.1.1
|
||||
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b // indirect
|
||||
github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db
|
||||
|
@ -48,9 +47,9 @@ require (
|
|||
github.com/hashicorp/raft v1.0.0 // indirect
|
||||
github.com/imdario/mergo v0.3.5 // indirect
|
||||
github.com/inconshreveable/mousetrap v1.0.0 // indirect
|
||||
github.com/influxdata/flux v0.0.0-20180919192308-1335e4da35f6
|
||||
github.com/influxdata/flux v0.0.0-20181004191346-fe166147b764
|
||||
github.com/influxdata/influxdb v0.0.0-20180904211643-ab81104697f6
|
||||
github.com/influxdata/influxql v0.0.0-20180823200743-a7267bff5327
|
||||
github.com/influxdata/influxql v0.0.0-20180925231337-1cbfca8e56b6
|
||||
github.com/influxdata/line-protocol v0.0.0-20180522152040-32c6aa80de5e
|
||||
github.com/influxdata/usage-client v0.0.0-20160829180054-6d3895376368
|
||||
github.com/jessevdk/go-flags v1.4.0
|
||||
|
@ -59,7 +58,7 @@ require (
|
|||
github.com/julienschmidt/httprouter v0.0.0-20180222160526-d18983907793
|
||||
github.com/jwilder/encoding v0.0.0-20170811194829-b4e1701a28ef
|
||||
github.com/kevinburke/go-bindata v3.11.0+incompatible
|
||||
github.com/lib/pq v1.0.0 // indirect
|
||||
github.com/kr/pty v1.1.3 // indirect
|
||||
github.com/magiconair/properties v1.7.6 // indirect
|
||||
github.com/masterminds/semver v1.4.2 // indirect
|
||||
github.com/mattn/go-tty v0.0.0-20180907095812-13ff1204f104 // indirect
|
||||
|
@ -83,7 +82,7 @@ require (
|
|||
github.com/prometheus/client_golang v0.0.0-20171201122222-661e31bf844d
|
||||
github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910
|
||||
github.com/prometheus/common v0.0.0-20180801064454-c7de2306084e
|
||||
github.com/prometheus/procfs v0.0.0-20180920065004-418d78d0b9a7 // indirect
|
||||
github.com/prometheus/procfs v0.0.0-20181004131639-6bfc2c70c4ee // indirect
|
||||
github.com/satori/go.uuid v1.2.0
|
||||
github.com/segmentio/kafka-go v0.1.0
|
||||
github.com/sirupsen/logrus v1.0.6
|
||||
|
@ -101,13 +100,13 @@ require (
|
|||
github.com/xlab/treeprint v0.0.0-20180616005107-d6fb6747feb6 // indirect
|
||||
go.uber.org/zap v1.9.1
|
||||
golang.org/x/crypto v0.0.0-20180723164146-c126467f60eb
|
||||
golang.org/x/net v0.0.0-20180911220305-26e67e76b6c3
|
||||
golang.org/x/net v0.0.0-20181003013248-f5e5bdd77824
|
||||
golang.org/x/oauth2 v0.0.0-20180521191639-dd5f5d8e78ce
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f
|
||||
golang.org/x/sys v0.0.0-20180920110915-d641721ec2de
|
||||
golang.org/x/sys v0.0.0-20181004145325-8469e314837c
|
||||
golang.org/x/text v0.3.0
|
||||
golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e // indirect
|
||||
golang.org/x/tools v0.0.0-20181004163742-59602fdee893 // indirect
|
||||
google.golang.org/api v0.0.0-20180723152133-cd7aead8ef37
|
||||
google.golang.org/appengine v1.0.0 // indirect
|
||||
google.golang.org/genproto v0.0.0-20180831171423-11092d34479b // indirect
|
||||
|
|
18
go.sum
18
go.sum
|
@ -123,11 +123,17 @@ github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NH
|
|||
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
|
||||
github.com/influxdata/flux v0.0.0-20180919192308-1335e4da35f6 h1:p2puINFb2Hy6YfMZOd+BHhPFsbWDD/5uapXF9iEJfqE=
|
||||
github.com/influxdata/flux v0.0.0-20180919192308-1335e4da35f6/go.mod h1:INqOzJeMwzRrdqUu9bjMov4brs4ctB/d8Ev8XhPxHYg=
|
||||
github.com/influxdata/flux v0.0.0-20181004191346-fe166147b764 h1:6fcunjtS4wtkcmdtq6LjrbVS6kF3yS3JBinXnOBqoII=
|
||||
github.com/influxdata/flux v0.0.0-20181004191346-fe166147b764/go.mod h1:bbf9n+wqw4MMubE1iwLJUCCM11y6k9G2JhyY5e9w3Us=
|
||||
github.com/influxdata/goreleaser v0.86.2-0.20180917235036-c23174192b3a/go.mod h1:aVuBpDAT5VtjtUxzvBt8HOd0buzvvk7OX3H2iaviixg=
|
||||
github.com/influxdata/ifql v0.0.6 h1:RNAO0casUSl7Yv29KGlfYNIocDnNj04do/tQOxhmIRw=
|
||||
github.com/influxdata/ifql v0.0.6/go.mod h1:CG+TYTUnRoXAPEV2oDzeVd1rQjRuWjBTW1mqRYUsuAE=
|
||||
github.com/influxdata/influxdb v0.0.0-20180904211643-ab81104697f6 h1:4lXKZjCh1+rd5MIauBsJ4FcwrqZFaErnkVWyrxoJpvQ=
|
||||
github.com/influxdata/influxdb v0.0.0-20180904211643-ab81104697f6/go.mod h1:qZna6X/4elxqT3yI9iZYdZrWWdeFOOprn86kgg4+IzY=
|
||||
github.com/influxdata/influxql v0.0.0-20180823200743-a7267bff5327 h1:2BI2JbxV11hx8W8gtFleWN7nLmU0WBpuj298yaDVYws=
|
||||
github.com/influxdata/influxql v0.0.0-20180823200743-a7267bff5327/go.mod h1:KpVI7okXjK6PRi3Z5B+mtKZli+R1DnZgb3N+tzevNgo=
|
||||
github.com/influxdata/influxql v0.0.0-20180925231337-1cbfca8e56b6 h1:CFx+pP90q/qg3spoiZjf8donE4WpAdjeJfPOcoNqkWo=
|
||||
github.com/influxdata/influxql v0.0.0-20180925231337-1cbfca8e56b6/go.mod h1:KpVI7okXjK6PRi3Z5B+mtKZli+R1DnZgb3N+tzevNgo=
|
||||
github.com/influxdata/line-protocol v0.0.0-20180522152040-32c6aa80de5e h1:/o3vQtpWJhvnIbXley4/jwzzqNeigJK9z+LZcJZ9zfM=
|
||||
github.com/influxdata/line-protocol v0.0.0-20180522152040-32c6aa80de5e/go.mod h1:4kt73NQhadE3daL3WhR5EJ/J2ocX0PZzwxQ0gXJ7oFE=
|
||||
github.com/influxdata/platform v0.0.0-20180912163125-1786402d48c7/go.mod h1:o8AOzOaMzxS6kSO2oCsbj61/kxrwECx+kFNgE8fqFug=
|
||||
|
@ -135,6 +141,10 @@ github.com/influxdata/tdigest v0.0.0-20180711151920-a7d76c6f093a h1:vMqgISSVkIqW
|
|||
github.com/influxdata/tdigest v0.0.0-20180711151920-a7d76c6f093a/go.mod h1:9GkyshztGufsdPQWjH+ifgnIr3xNUL5syI70g2dzU1o=
|
||||
github.com/influxdata/usage-client v0.0.0-20160829180054-6d3895376368 h1:+TUUmaFa4YD1Q+7bH9o5NCHQGPMqZCYJiNW6lIIS9z4=
|
||||
github.com/influxdata/usage-client v0.0.0-20160829180054-6d3895376368/go.mod h1:Wbbw6tYNvwa5dlB6304Sd+82Z3f7PmVZHVKU637d4po=
|
||||
github.com/influxdata/yamux v0.0.0-20171107173414-1f58ded512de h1:GqhjCUCDobXxeZrFNdaLTKpyTm25qM9Z8y7ihBTeuJA=
|
||||
github.com/influxdata/yamux v0.0.0-20171107173414-1f58ded512de/go.mod h1:9fPSNWQM1MYwdjNIBSTzkIHxr/yJvac86kKtvsDs+2Y=
|
||||
github.com/influxdata/yarpc v0.0.1 h1:qvQgl5KSfG0Jv2fUaZKjeLJnjsaoqMNtFP5i0zafsCE=
|
||||
github.com/influxdata/yarpc v0.0.1/go.mod h1:y3O0SndVHp7xavEbUpKN/WUj5Ajr+wEWXBRvR7sS3Cw=
|
||||
github.com/jessevdk/go-flags v1.4.0 h1:4IU2WS7AumrZ/40jfhf4QVDMsQwqA7VEHozFRrGARJA=
|
||||
github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
|
||||
github.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8 h1:12VvqtR6Aowv3l/EQUlocDHW2Cp4G9WJVH7uyH8QFJE=
|
||||
|
@ -152,6 +162,7 @@ github.com/kevinburke/go-bindata v3.11.0+incompatible/go.mod h1:/pEEZ72flUW2p0yi
|
|||
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
|
||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/pty v1.1.3/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
|
||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||
github.com/lib/pq v1.0.0 h1:X5PMW56eZitiTeO7tKzZxFCSpbFZJtkMMooicw2us9A=
|
||||
|
@ -220,6 +231,8 @@ github.com/prometheus/common v0.0.0-20180801064454-c7de2306084e/go.mod h1:daVV7q
|
|||
github.com/prometheus/procfs v0.0.0-20180725123919-05ee40e3a273/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
|
||||
github.com/prometheus/procfs v0.0.0-20180920065004-418d78d0b9a7 h1:NgR6WN8nQ4SmFC1sSUHY8SriLuWCZ6cCIQtH4vDZN3c=
|
||||
github.com/prometheus/procfs v0.0.0-20180920065004-418d78d0b9a7/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
|
||||
github.com/prometheus/procfs v0.0.0-20181004131639-6bfc2c70c4ee h1:rdsBwtAUYX3boir/J0XmwBioYAmEfuKFc48mfi+B688=
|
||||
github.com/prometheus/procfs v0.0.0-20181004131639-6bfc2c70c4ee/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
|
||||
github.com/satori/go.uuid v1.2.0 h1:0uYX9dsZ2yD7q2RtLRtPSdGDWzjeM3TbMJP9utgA0ww=
|
||||
github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
|
||||
github.com/segmentio/kafka-go v0.1.0 h1:IXCHG+sXPNiIR5pC/vTEItZduPKu4cnpr85YgxpxlW0=
|
||||
|
@ -268,6 +281,8 @@ golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73r
|
|||
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180911220305-26e67e76b6c3 h1:czFLhve3vsQetD6JOJ8NZZvGQIXlnN3/yXxbT6/awxI=
|
||||
golang.org/x/net v0.0.0-20180911220305-26e67e76b6c3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20181003013248-f5e5bdd77824 h1:MkjFNbaZJyH98M67Q3umtwZ+EdVdrNJLqSwZp5vcv60=
|
||||
golang.org/x/net v0.0.0-20181003013248-f5e5bdd77824/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/oauth2 v0.0.0-20180521191639-dd5f5d8e78ce h1:fGkx3ZAl797ZVpMlShhW+SWvvLXKd/J2O244qOjWnk0=
|
||||
golang.org/x/oauth2 v0.0.0-20180521191639-dd5f5d8e78ce/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f h1:wMNYb4v58l5UBM7MYRLPG6ZhfOqbKu7X5eyFl8ZhKvA=
|
||||
|
@ -276,6 +291,8 @@ golang.org/x/sys v0.0.0-20180906133057-8cf3aee42992/go.mod h1:STP8DvDyc/dI5b8T5h
|
|||
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20180920110915-d641721ec2de h1:soC2mvPVpAV+Ld2qtpNn1eq25WTn76uIGNV23bofu6Q=
|
||||
golang.org/x/sys v0.0.0-20180920110915-d641721ec2de/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20181004145325-8469e314837c h1:SJ7JoQNVl3mC7EWkkONgBWgCno8LcABIJwFMkWBC+EY=
|
||||
golang.org/x/sys v0.0.0-20181004145325-8469e314837c/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2 h1:+DCIGbF/swA92ohVg0//6X2IVY3KZs6p9mix0ziNYJM=
|
||||
|
@ -283,6 +300,7 @@ golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxb
|
|||
golang.org/x/tools v0.0.0-20180904205237-0aa4b8830f48/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e h1:FDhOuMEY4JVRztM/gsbk+IKUQ8kj74bxZrgw87eMMVc=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20181004163742-59602fdee893/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
google.golang.org/api v0.0.0-20180723152133-cd7aead8ef37 h1:j0oA5IZE0CsX25Tkm7sCIlCIDd01A3T2BUrk9T91lL4=
|
||||
google.golang.org/api v0.0.0-20180723152133-cd7aead8ef37/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0=
|
||||
google.golang.org/appengine v1.0.0 h1:dN4LljjBKVChsv0XCSI+zbyzdqrkEwX5LQFUMRSGqOc=
|
||||
|
|
|
@ -11,7 +11,7 @@ import (
|
|||
"github.com/grpc-ecosystem/grpc-opentracing/go/otgrpc"
|
||||
"github.com/influxdata/flux"
|
||||
"github.com/influxdata/flux/execute"
|
||||
"github.com/influxdata/flux/functions/storage"
|
||||
"github.com/influxdata/flux/functions/inputs/storage"
|
||||
"github.com/influxdata/flux/values"
|
||||
ostorage "github.com/influxdata/influxdb/services/storage"
|
||||
opentracing "github.com/opentracing/opentracing-go"
|
||||
|
|
|
@ -11,7 +11,7 @@ import (
|
|||
"github.com/influxdata/flux"
|
||||
"github.com/influxdata/flux/execute"
|
||||
"github.com/influxdata/flux/execute/executetest"
|
||||
ffunctions "github.com/influxdata/flux/functions"
|
||||
"github.com/influxdata/flux/functions/inputs"
|
||||
"github.com/influxdata/flux/querytest"
|
||||
"github.com/influxdata/platform/query/functions"
|
||||
)
|
||||
|
@ -25,7 +25,7 @@ func TestToHTTP_NewQuery(t *testing.T) {
|
|||
Operations: []*flux.Operation{
|
||||
{
|
||||
ID: "from0",
|
||||
Spec: &ffunctions.FromOpSpec{
|
||||
Spec: &inputs.FromOpSpec{
|
||||
Bucket: "mybucket",
|
||||
},
|
||||
},
|
||||
|
|
|
@ -10,7 +10,7 @@ import (
|
|||
"github.com/influxdata/flux"
|
||||
"github.com/influxdata/flux/execute"
|
||||
"github.com/influxdata/flux/execute/executetest"
|
||||
ffunctions "github.com/influxdata/flux/functions"
|
||||
"github.com/influxdata/flux/functions/inputs"
|
||||
"github.com/influxdata/flux/querytest"
|
||||
"github.com/influxdata/platform/query/functions"
|
||||
kafka "github.com/segmentio/kafka-go"
|
||||
|
@ -27,7 +27,7 @@ func TestToKafka_NewQuery(t *testing.T) {
|
|||
Operations: []*flux.Operation{
|
||||
{
|
||||
ID: "from0",
|
||||
Spec: &ffunctions.FromOpSpec{
|
||||
Spec: &inputs.FromOpSpec{
|
||||
Bucket: "mybucket",
|
||||
},
|
||||
},
|
||||
|
|
|
@ -6,7 +6,7 @@ import (
|
|||
"github.com/influxdata/flux"
|
||||
"github.com/influxdata/flux/ast"
|
||||
"github.com/influxdata/flux/execute"
|
||||
"github.com/influxdata/flux/functions"
|
||||
"github.com/influxdata/flux/functions/transformations"
|
||||
"github.com/influxdata/flux/semantic"
|
||||
"github.com/influxdata/influxql"
|
||||
)
|
||||
|
@ -67,7 +67,7 @@ func createVarRefCursor(t *transpilerState, ref *influxql.VarRef) (cursor, error
|
|||
}
|
||||
}
|
||||
|
||||
range_ := t.op("range", &functions.RangeOpSpec{
|
||||
range_ := t.op("range", &transformations.RangeOpSpec{
|
||||
Start: flux.Time{Absolute: tr.MinTime()},
|
||||
Stop: flux.Time{Absolute: tr.MaxTime()},
|
||||
TimeCol: execute.DefaultTimeColLabel,
|
||||
|
@ -75,7 +75,7 @@ func createVarRefCursor(t *transpilerState, ref *influxql.VarRef) (cursor, error
|
|||
StopCol: execute.DefaultStopColLabel,
|
||||
}, from)
|
||||
|
||||
id := t.op("filter", &functions.FilterOpSpec{
|
||||
id := t.op("filter", &transformations.FilterOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{
|
||||
{Key: &semantic.Identifier{Name: "r"}},
|
||||
|
|
|
@ -6,7 +6,7 @@ import (
|
|||
|
||||
"github.com/influxdata/flux"
|
||||
"github.com/influxdata/flux/execute"
|
||||
"github.com/influxdata/flux/functions"
|
||||
"github.com/influxdata/flux/functions/transformations"
|
||||
"github.com/influxdata/influxql"
|
||||
)
|
||||
|
||||
|
@ -113,7 +113,7 @@ func createFunctionCursor(t *transpilerState, call *influxql.Call, in cursor, no
|
|||
if !ok {
|
||||
return nil, fmt.Errorf("undefined variable: %s", call.Args[0])
|
||||
}
|
||||
cur.id = t.op("count", &functions.CountOpSpec{
|
||||
cur.id = t.op("count", &transformations.CountOpSpec{
|
||||
AggregateConfig: execute.AggregateConfig{
|
||||
Columns: []string{value},
|
||||
},
|
||||
|
@ -125,7 +125,7 @@ func createFunctionCursor(t *transpilerState, call *influxql.Call, in cursor, no
|
|||
if !ok {
|
||||
return nil, fmt.Errorf("undefined variable: %s", call.Args[0])
|
||||
}
|
||||
cur.id = t.op("min", &functions.MinOpSpec{
|
||||
cur.id = t.op("min", &transformations.MinOpSpec{
|
||||
SelectorConfig: execute.SelectorConfig{
|
||||
Column: value,
|
||||
},
|
||||
|
@ -137,7 +137,7 @@ func createFunctionCursor(t *transpilerState, call *influxql.Call, in cursor, no
|
|||
if !ok {
|
||||
return nil, fmt.Errorf("undefined variable: %s", call.Args[0])
|
||||
}
|
||||
cur.id = t.op("max", &functions.MaxOpSpec{
|
||||
cur.id = t.op("max", &transformations.MaxOpSpec{
|
||||
SelectorConfig: execute.SelectorConfig{
|
||||
Column: value,
|
||||
},
|
||||
|
@ -149,7 +149,7 @@ func createFunctionCursor(t *transpilerState, call *influxql.Call, in cursor, no
|
|||
if !ok {
|
||||
return nil, fmt.Errorf("undefined variable: %s", call.Args[0])
|
||||
}
|
||||
cur.id = t.op("sum", &functions.SumOpSpec{
|
||||
cur.id = t.op("sum", &transformations.SumOpSpec{
|
||||
AggregateConfig: execute.AggregateConfig{
|
||||
Columns: []string{value},
|
||||
},
|
||||
|
@ -161,7 +161,7 @@ func createFunctionCursor(t *transpilerState, call *influxql.Call, in cursor, no
|
|||
if !ok {
|
||||
return nil, fmt.Errorf("undefined variable: %s", call.Args[0])
|
||||
}
|
||||
cur.id = t.op("first", &functions.FirstOpSpec{
|
||||
cur.id = t.op("first", &transformations.FirstOpSpec{
|
||||
SelectorConfig: execute.SelectorConfig{
|
||||
Column: value,
|
||||
},
|
||||
|
@ -173,7 +173,7 @@ func createFunctionCursor(t *transpilerState, call *influxql.Call, in cursor, no
|
|||
if !ok {
|
||||
return nil, fmt.Errorf("undefined variable: %s", call.Args[0])
|
||||
}
|
||||
cur.id = t.op("last", &functions.LastOpSpec{
|
||||
cur.id = t.op("last", &transformations.LastOpSpec{
|
||||
SelectorConfig: execute.SelectorConfig{
|
||||
Column: value,
|
||||
},
|
||||
|
@ -185,7 +185,7 @@ func createFunctionCursor(t *transpilerState, call *influxql.Call, in cursor, no
|
|||
if !ok {
|
||||
return nil, fmt.Errorf("undefined variable: %s", call.Args[0])
|
||||
}
|
||||
cur.id = t.op("mean", &functions.MeanOpSpec{
|
||||
cur.id = t.op("mean", &transformations.MeanOpSpec{
|
||||
AggregateConfig: execute.AggregateConfig{
|
||||
Columns: []string{value},
|
||||
},
|
||||
|
@ -216,7 +216,7 @@ func createFunctionCursor(t *transpilerState, call *influxql.Call, in cursor, no
|
|||
return nil, errors.New("argument N must be between 0 and 100")
|
||||
}
|
||||
|
||||
cur.id = t.op("percentile", &functions.PercentileOpSpec{
|
||||
cur.id = t.op("percentile", &transformations.PercentileOpSpec{
|
||||
Percentile: percentile,
|
||||
Compression: 0,
|
||||
Method: "exact_selector",
|
||||
|
@ -233,11 +233,11 @@ func createFunctionCursor(t *transpilerState, call *influxql.Call, in cursor, no
|
|||
// If we have been told to normalize the time, we do it here.
|
||||
if normalize {
|
||||
if influxql.IsSelector(call) {
|
||||
cur.id = t.op("drop", &functions.DropOpSpec{
|
||||
cur.id = t.op("drop", &transformations.DropOpSpec{
|
||||
Cols: []string{execute.DefaultTimeColLabel},
|
||||
}, cur.id)
|
||||
}
|
||||
cur.id = t.op("duplicate", &functions.DuplicateOpSpec{
|
||||
cur.id = t.op("duplicate", &transformations.DuplicateOpSpec{
|
||||
Col: execute.DefaultStartColLabel,
|
||||
As: execute.DefaultTimeColLabel,
|
||||
}, cur.id)
|
||||
|
|
|
@ -8,7 +8,7 @@ import (
|
|||
|
||||
"github.com/influxdata/flux"
|
||||
"github.com/influxdata/flux/execute"
|
||||
"github.com/influxdata/flux/functions"
|
||||
"github.com/influxdata/flux/functions/transformations"
|
||||
"github.com/influxdata/flux/semantic"
|
||||
"github.com/influxdata/influxql"
|
||||
"github.com/pkg/errors"
|
||||
|
@ -206,7 +206,7 @@ func (gr *groupInfo) createCursor(t *transpilerState) (cursor, error) {
|
|||
if err != nil {
|
||||
return nil, errors.Wrap(err, "unable to evaluate condition")
|
||||
}
|
||||
id := t.op("filter", &functions.FilterOpSpec{
|
||||
id := t.op("filter", &transformations.FilterOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{{
|
||||
Key: &semantic.Identifier{Name: "r"},
|
||||
|
@ -241,7 +241,7 @@ func (gr *groupInfo) createCursor(t *transpilerState) (cursor, error) {
|
|||
// so they stay in the same table and are joined in the correct order.
|
||||
if interval > 0 {
|
||||
cur = &groupCursor{
|
||||
id: t.op("window", &functions.WindowOpSpec{
|
||||
id: t.op("window", &transformations.WindowOpSpec{
|
||||
Every: flux.Duration(math.MaxInt64),
|
||||
Period: flux.Duration(math.MaxInt64),
|
||||
TimeCol: execute.DefaultTimeColLabel,
|
||||
|
@ -361,12 +361,12 @@ func (gr *groupInfo) group(t *transpilerState, in cursor) (cursor, error) {
|
|||
// Perform the grouping by the tags we found. There is always a group by because
|
||||
// there is always something to group in influxql.
|
||||
// TODO(jsternberg): A wildcard will skip this step.
|
||||
id := t.op("group", &functions.GroupOpSpec{
|
||||
id := t.op("group", &transformations.GroupOpSpec{
|
||||
By: tags,
|
||||
}, in.ID())
|
||||
|
||||
if windowEvery > 0 {
|
||||
windowOp := &functions.WindowOpSpec{
|
||||
windowOp := &transformations.WindowOpSpec{
|
||||
Every: flux.Duration(windowEvery),
|
||||
Period: flux.Duration(windowEvery),
|
||||
TimeCol: execute.DefaultTimeColLabel,
|
||||
|
|
|
@ -4,7 +4,7 @@ import (
|
|||
"fmt"
|
||||
|
||||
"github.com/influxdata/flux"
|
||||
"github.com/influxdata/flux/functions"
|
||||
"github.com/influxdata/flux/functions/transformations"
|
||||
"github.com/influxdata/influxql"
|
||||
)
|
||||
|
||||
|
@ -43,7 +43,7 @@ func Join(t *transpilerState, cursors []cursor, on []string) cursor {
|
|||
for _, cur := range cursors {
|
||||
parents = append(parents, cur.ID())
|
||||
}
|
||||
id := t.op("join", &functions.JoinOpSpec{
|
||||
id := t.op("join", &transformations.JoinOpSpec{
|
||||
TableNames: tables,
|
||||
On: on,
|
||||
}, parents...)
|
||||
|
|
|
@ -7,7 +7,7 @@ import (
|
|||
"github.com/influxdata/flux"
|
||||
"github.com/influxdata/flux/ast"
|
||||
"github.com/influxdata/flux/execute"
|
||||
"github.com/influxdata/flux/functions"
|
||||
"github.com/influxdata/flux/functions/transformations"
|
||||
"github.com/influxdata/flux/semantic"
|
||||
"github.com/influxdata/influxql"
|
||||
)
|
||||
|
@ -67,7 +67,7 @@ func (t *transpilerState) mapFields(in cursor) (cursor, error) {
|
|||
Value: value,
|
||||
})
|
||||
}
|
||||
id := t.op("map", &functions.MapOpSpec{
|
||||
id := t.op("map", &transformations.MapOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{{
|
||||
Key: &semantic.Identifier{Name: "r"},
|
||||
|
|
|
@ -2,6 +2,8 @@ package spectests
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/influxdata/flux/functions/inputs"
|
||||
"github.com/influxdata/flux/functions/transformations"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"time"
|
||||
|
@ -9,20 +11,19 @@ import (
|
|||
"github.com/influxdata/flux"
|
||||
"github.com/influxdata/flux/ast"
|
||||
"github.com/influxdata/flux/execute"
|
||||
"github.com/influxdata/flux/functions"
|
||||
"github.com/influxdata/flux/semantic"
|
||||
"github.com/influxdata/influxql"
|
||||
)
|
||||
|
||||
var aggregateCreateFuncs = []func(config execute.AggregateConfig) flux.OperationSpec{
|
||||
func(config execute.AggregateConfig) flux.OperationSpec {
|
||||
return &functions.CountOpSpec{AggregateConfig: config}
|
||||
return &transformations.CountOpSpec{AggregateConfig: config}
|
||||
},
|
||||
func(config execute.AggregateConfig) flux.OperationSpec {
|
||||
return &functions.MeanOpSpec{AggregateConfig: config}
|
||||
return &transformations.MeanOpSpec{AggregateConfig: config}
|
||||
},
|
||||
func(config execute.AggregateConfig) flux.OperationSpec {
|
||||
return &functions.SumOpSpec{AggregateConfig: config}
|
||||
return &transformations.SumOpSpec{AggregateConfig: config}
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -55,13 +56,13 @@ func init() {
|
|||
Operations: []*flux.Operation{
|
||||
{
|
||||
ID: "from0",
|
||||
Spec: &functions.FromOpSpec{
|
||||
Spec: &inputs.FromOpSpec{
|
||||
BucketID: bucketID,
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "range0",
|
||||
Spec: &functions.RangeOpSpec{
|
||||
Spec: &transformations.RangeOpSpec{
|
||||
Start: flux.Time{Absolute: time.Unix(0, influxql.MinTime)},
|
||||
Stop: flux.Time{Absolute: time.Unix(0, influxql.MaxTime)},
|
||||
TimeCol: execute.DefaultTimeColLabel,
|
||||
|
@ -71,7 +72,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "filter0",
|
||||
Spec: &functions.FilterOpSpec{
|
||||
Spec: &transformations.FilterOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{
|
||||
{Key: &semantic.Identifier{Name: "r"}},
|
||||
|
@ -108,21 +109,21 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "group0",
|
||||
Spec: &functions.GroupOpSpec{
|
||||
Spec: &transformations.GroupOpSpec{
|
||||
By: []string{"_measurement", "_start"},
|
||||
},
|
||||
},
|
||||
&aggregate,
|
||||
{
|
||||
ID: "duplicate0",
|
||||
Spec: &functions.DuplicateOpSpec{
|
||||
Spec: &transformations.DuplicateOpSpec{
|
||||
Col: execute.DefaultStartColLabel,
|
||||
As: execute.DefaultTimeColLabel,
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "map0",
|
||||
Spec: &functions.MapOpSpec{
|
||||
Spec: &transformations.MapOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{{
|
||||
Key: &semantic.Identifier{Name: "r"},
|
||||
|
@ -155,7 +156,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "yield0",
|
||||
Spec: &functions.YieldOpSpec{
|
||||
Spec: &transformations.YieldOpSpec{
|
||||
Name: "0",
|
||||
},
|
||||
},
|
||||
|
|
|
@ -7,7 +7,8 @@ import (
|
|||
"github.com/influxdata/flux"
|
||||
"github.com/influxdata/flux/ast"
|
||||
"github.com/influxdata/flux/execute"
|
||||
"github.com/influxdata/flux/functions"
|
||||
"github.com/influxdata/flux/functions/inputs"
|
||||
"github.com/influxdata/flux/functions/transformations"
|
||||
"github.com/influxdata/flux/semantic"
|
||||
"github.com/influxdata/influxql"
|
||||
)
|
||||
|
@ -20,13 +21,13 @@ func init() {
|
|||
Operations: []*flux.Operation{
|
||||
{
|
||||
ID: "from0",
|
||||
Spec: &functions.FromOpSpec{
|
||||
Spec: &inputs.FromOpSpec{
|
||||
BucketID: bucketID,
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "range0",
|
||||
Spec: &functions.RangeOpSpec{
|
||||
Spec: &transformations.RangeOpSpec{
|
||||
Start: flux.Time{Absolute: time.Unix(0, influxql.MinTime)},
|
||||
Stop: flux.Time{Absolute: time.Unix(0, influxql.MaxTime)},
|
||||
TimeCol: execute.DefaultTimeColLabel,
|
||||
|
@ -36,7 +37,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "filter0",
|
||||
Spec: &functions.FilterOpSpec{
|
||||
Spec: &transformations.FilterOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{
|
||||
{Key: &semantic.Identifier{Name: "r"}},
|
||||
|
@ -73,7 +74,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "filter1",
|
||||
Spec: &functions.FilterOpSpec{
|
||||
Spec: &transformations.FilterOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{
|
||||
{Key: &semantic.Identifier{Name: "r"}},
|
||||
|
@ -95,21 +96,21 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "group0",
|
||||
Spec: &functions.GroupOpSpec{
|
||||
Spec: &transformations.GroupOpSpec{
|
||||
By: []string{"_measurement", "_start"},
|
||||
},
|
||||
},
|
||||
&aggregate,
|
||||
{
|
||||
ID: "duplicate0",
|
||||
Spec: &functions.DuplicateOpSpec{
|
||||
Spec: &transformations.DuplicateOpSpec{
|
||||
Col: execute.DefaultStartColLabel,
|
||||
As: execute.DefaultTimeColLabel,
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "map0",
|
||||
Spec: &functions.MapOpSpec{
|
||||
Spec: &transformations.MapOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{{
|
||||
Key: &semantic.Identifier{Name: "r"},
|
||||
|
@ -142,7 +143,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "yield0",
|
||||
Spec: &functions.YieldOpSpec{
|
||||
Spec: &transformations.YieldOpSpec{
|
||||
Name: "0",
|
||||
},
|
||||
},
|
||||
|
|
|
@ -8,7 +8,8 @@ import (
|
|||
|
||||
"github.com/influxdata/flux/ast"
|
||||
"github.com/influxdata/flux/execute"
|
||||
"github.com/influxdata/flux/functions"
|
||||
"github.com/influxdata/flux/functions/inputs"
|
||||
"github.com/influxdata/flux/functions/transformations"
|
||||
"github.com/influxdata/flux/semantic"
|
||||
"github.com/influxdata/influxql"
|
||||
)
|
||||
|
@ -21,13 +22,13 @@ func init() {
|
|||
Operations: []*flux.Operation{
|
||||
{
|
||||
ID: "from0",
|
||||
Spec: &functions.FromOpSpec{
|
||||
Spec: &inputs.FromOpSpec{
|
||||
BucketID: bucketID,
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "range0",
|
||||
Spec: &functions.RangeOpSpec{
|
||||
Spec: &transformations.RangeOpSpec{
|
||||
Start: flux.Time{Absolute: time.Unix(0, influxql.MinTime)},
|
||||
Stop: flux.Time{Absolute: time.Unix(0, influxql.MaxTime)},
|
||||
TimeCol: execute.DefaultTimeColLabel,
|
||||
|
@ -37,7 +38,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "filter0",
|
||||
Spec: &functions.FilterOpSpec{
|
||||
Spec: &transformations.FilterOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{
|
||||
{Key: &semantic.Identifier{Name: "r"}},
|
||||
|
@ -74,21 +75,21 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "group0",
|
||||
Spec: &functions.GroupOpSpec{
|
||||
Spec: &transformations.GroupOpSpec{
|
||||
By: []string{"_measurement", "_start", "host"},
|
||||
},
|
||||
},
|
||||
&aggregate,
|
||||
{
|
||||
ID: "duplicate0",
|
||||
Spec: &functions.DuplicateOpSpec{
|
||||
Spec: &transformations.DuplicateOpSpec{
|
||||
Col: execute.DefaultStartColLabel,
|
||||
As: execute.DefaultTimeColLabel,
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "map0",
|
||||
Spec: &functions.MapOpSpec{
|
||||
Spec: &transformations.MapOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{{
|
||||
Key: &semantic.Identifier{Name: "r"},
|
||||
|
@ -121,7 +122,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "yield0",
|
||||
Spec: &functions.YieldOpSpec{
|
||||
Spec: &transformations.YieldOpSpec{
|
||||
Name: "0",
|
||||
},
|
||||
},
|
||||
|
|
|
@ -2,6 +2,8 @@ package spectests
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/influxdata/flux/functions/inputs"
|
||||
"github.com/influxdata/flux/functions/transformations"
|
||||
"math"
|
||||
"time"
|
||||
|
||||
|
@ -9,7 +11,7 @@ import (
|
|||
|
||||
"github.com/influxdata/flux/ast"
|
||||
"github.com/influxdata/flux/execute"
|
||||
"github.com/influxdata/flux/functions"
|
||||
|
||||
"github.com/influxdata/flux/semantic"
|
||||
)
|
||||
|
||||
|
@ -21,13 +23,13 @@ func init() {
|
|||
Operations: []*flux.Operation{
|
||||
{
|
||||
ID: "from0",
|
||||
Spec: &functions.FromOpSpec{
|
||||
Spec: &inputs.FromOpSpec{
|
||||
BucketID: bucketID,
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "range0",
|
||||
Spec: &functions.RangeOpSpec{
|
||||
Spec: &transformations.RangeOpSpec{
|
||||
Start: flux.Time{Absolute: Now().Add(-10 * time.Minute)},
|
||||
Stop: flux.Time{Absolute: Now()},
|
||||
TimeCol: execute.DefaultTimeColLabel,
|
||||
|
@ -37,7 +39,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "filter0",
|
||||
Spec: &functions.FilterOpSpec{
|
||||
Spec: &transformations.FilterOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{
|
||||
{Key: &semantic.Identifier{Name: "r"}},
|
||||
|
@ -74,13 +76,13 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "group0",
|
||||
Spec: &functions.GroupOpSpec{
|
||||
Spec: &transformations.GroupOpSpec{
|
||||
By: []string{"_measurement", "_start"},
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "window0",
|
||||
Spec: &functions.WindowOpSpec{
|
||||
Spec: &transformations.WindowOpSpec{
|
||||
Every: flux.Duration(time.Minute),
|
||||
Period: flux.Duration(time.Minute),
|
||||
TimeCol: execute.DefaultTimeColLabel,
|
||||
|
@ -91,14 +93,14 @@ func init() {
|
|||
&aggregate,
|
||||
{
|
||||
ID: "duplicate0",
|
||||
Spec: &functions.DuplicateOpSpec{
|
||||
Spec: &transformations.DuplicateOpSpec{
|
||||
Col: execute.DefaultStartColLabel,
|
||||
As: execute.DefaultTimeColLabel,
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "window1",
|
||||
Spec: &functions.WindowOpSpec{
|
||||
Spec: &transformations.WindowOpSpec{
|
||||
Every: flux.Duration(math.MaxInt64),
|
||||
Period: flux.Duration(math.MaxInt64),
|
||||
TimeCol: execute.DefaultTimeColLabel,
|
||||
|
@ -108,7 +110,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "map0",
|
||||
Spec: &functions.MapOpSpec{
|
||||
Spec: &transformations.MapOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{{
|
||||
Key: &semantic.Identifier{Name: "r"},
|
||||
|
@ -141,7 +143,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "yield0",
|
||||
Spec: &functions.YieldOpSpec{
|
||||
Spec: &transformations.YieldOpSpec{
|
||||
Name: "0",
|
||||
},
|
||||
},
|
||||
|
|
|
@ -2,13 +2,16 @@ package spectests
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/influxdata/flux/functions/inputs"
|
||||
"github.com/influxdata/flux/functions/transformations"
|
||||
|
||||
"math"
|
||||
"time"
|
||||
|
||||
"github.com/influxdata/flux"
|
||||
"github.com/influxdata/flux/ast"
|
||||
"github.com/influxdata/flux/execute"
|
||||
"github.com/influxdata/flux/functions"
|
||||
|
||||
"github.com/influxdata/flux/semantic"
|
||||
)
|
||||
|
||||
|
@ -20,13 +23,13 @@ func init() {
|
|||
Operations: []*flux.Operation{
|
||||
{
|
||||
ID: "from0",
|
||||
Spec: &functions.FromOpSpec{
|
||||
Spec: &inputs.FromOpSpec{
|
||||
BucketID: bucketID,
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "range0",
|
||||
Spec: &functions.RangeOpSpec{
|
||||
Spec: &transformations.RangeOpSpec{
|
||||
Start: flux.Time{Absolute: Now().Add(-10 * time.Minute)},
|
||||
Stop: flux.Time{Absolute: Now()},
|
||||
TimeCol: execute.DefaultTimeColLabel,
|
||||
|
@ -36,7 +39,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "filter0",
|
||||
Spec: &functions.FilterOpSpec{
|
||||
Spec: &transformations.FilterOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{
|
||||
{Key: &semantic.Identifier{Name: "r"}},
|
||||
|
@ -73,13 +76,13 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "group0",
|
||||
Spec: &functions.GroupOpSpec{
|
||||
Spec: &transformations.GroupOpSpec{
|
||||
By: []string{"_measurement", "_start"},
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "window0",
|
||||
Spec: &functions.WindowOpSpec{
|
||||
Spec: &transformations.WindowOpSpec{
|
||||
Every: flux.Duration(5 * time.Minute),
|
||||
Period: flux.Duration(5 * time.Minute),
|
||||
Start: flux.Time{Absolute: time.Unix(0, 0).Add(time.Minute * 2)},
|
||||
|
@ -91,14 +94,14 @@ func init() {
|
|||
&aggregate,
|
||||
{
|
||||
ID: "duplicate0",
|
||||
Spec: &functions.DuplicateOpSpec{
|
||||
Spec: &transformations.DuplicateOpSpec{
|
||||
Col: execute.DefaultStartColLabel,
|
||||
As: execute.DefaultTimeColLabel,
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "window1",
|
||||
Spec: &functions.WindowOpSpec{
|
||||
Spec: &transformations.WindowOpSpec{
|
||||
Every: flux.Duration(math.MaxInt64),
|
||||
Period: flux.Duration(math.MaxInt64),
|
||||
TimeCol: execute.DefaultTimeColLabel,
|
||||
|
@ -108,7 +111,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "map0",
|
||||
Spec: &functions.MapOpSpec{
|
||||
Spec: &transformations.MapOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{{
|
||||
Key: &semantic.Identifier{Name: "r"},
|
||||
|
@ -141,7 +144,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "yield0",
|
||||
Spec: &functions.YieldOpSpec{
|
||||
Spec: &transformations.YieldOpSpec{
|
||||
Name: "0",
|
||||
},
|
||||
},
|
||||
|
|
|
@ -8,7 +8,7 @@ import (
|
|||
"github.com/influxdata/flux"
|
||||
"github.com/influxdata/flux/ast"
|
||||
"github.com/influxdata/flux/execute"
|
||||
"github.com/influxdata/flux/functions"
|
||||
|
||||
"github.com/influxdata/flux/semantic"
|
||||
"github.com/influxdata/platform/query/influxql"
|
||||
)
|
||||
|
@ -21,13 +21,13 @@ func init() {
|
|||
Operations: []*flux.Operation{
|
||||
{
|
||||
ID: "from0",
|
||||
Spec: &functions.FromOpSpec{
|
||||
Spec: &inputs.FromOpSpec{
|
||||
BucketID: bucketID,
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "range0",
|
||||
Spec: &functions.RangeOpSpec{
|
||||
Spec: &transformations.RangeOpSpec{
|
||||
Start: flux.Time{Absolute: time.Unix(0, influxql.MinTime)},
|
||||
Stop: flux.Time{Absolute: time.Unix(0, influxql.MaxTime)},
|
||||
TimeCol: execute.DefaultTimeColLabel,
|
||||
|
@ -37,7 +37,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "filter0",
|
||||
Spec: &functions.FilterOpSpec{
|
||||
Spec: &transformations.FilterOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{
|
||||
{Key: &semantic.Identifier{Name: "r"}},
|
||||
|
@ -74,13 +74,13 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "from1",
|
||||
Spec: &functions.FromOpSpec{
|
||||
Spec: &inputs.FromOpSpec{
|
||||
BucketID: bucketID,
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "range1",
|
||||
Spec: &functions.RangeOpSpec{
|
||||
Spec: &transformations.RangeOpSpec{
|
||||
Start: flux.Time{Absolute: time.Unix(0, influxql.MinTime)},
|
||||
Stop: flux.Time{Absolute: time.Unix(0, influxql.MaxTime)},
|
||||
TimeCol: execute.DefaultTimeColLabel,
|
||||
|
@ -90,7 +90,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "filter1",
|
||||
Spec: &functions.FilterOpSpec{
|
||||
Spec: &transformations.FilterOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{
|
||||
{Key: &semantic.Identifier{Name: "r"}},
|
||||
|
@ -127,7 +127,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "join0",
|
||||
Spec: &functions.JoinOpSpec{
|
||||
Spec: &transformations.JoinOpSpec{
|
||||
On: []string{"_measurement"},
|
||||
TableNames: map[flux.OperationID]string{
|
||||
"filter0": "t0",
|
||||
|
@ -137,13 +137,13 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "group0",
|
||||
Spec: &functions.GroupOpSpec{
|
||||
Spec: &transformations.GroupOpSpec{
|
||||
By: []string{"_measurement", "_start"},
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "map0",
|
||||
Spec: &functions.MapOpSpec{
|
||||
Spec: &transformations.MapOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{{
|
||||
Key: &semantic.Identifier{Name: "r"},
|
||||
|
@ -185,7 +185,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "yield0",
|
||||
Spec: &functions.YieldOpSpec{
|
||||
Spec: &transformations.YieldOpSpec{
|
||||
Name: "0",
|
||||
},
|
||||
},
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
package spectests
|
||||
|
||||
import (
|
||||
"github.com/influxdata/flux/functions/inputs"
|
||||
"github.com/influxdata/flux/functions/transformations"
|
||||
"time"
|
||||
|
||||
"github.com/influxdata/flux"
|
||||
"github.com/influxdata/flux/ast"
|
||||
"github.com/influxdata/flux/execute"
|
||||
"github.com/influxdata/flux/functions"
|
||||
|
||||
"github.com/influxdata/flux/semantic"
|
||||
"github.com/influxdata/influxql"
|
||||
)
|
||||
|
@ -19,13 +21,13 @@ func init() {
|
|||
Operations: []*flux.Operation{
|
||||
{
|
||||
ID: "from0",
|
||||
Spec: &functions.FromOpSpec{
|
||||
Spec: &inputs.FromOpSpec{
|
||||
BucketID: bucketID,
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "range0",
|
||||
Spec: &functions.RangeOpSpec{
|
||||
Spec: &transformations.RangeOpSpec{
|
||||
Start: flux.Time{Absolute: time.Unix(0, influxql.MinTime)},
|
||||
Stop: flux.Time{Absolute: time.Unix(0, influxql.MaxTime)},
|
||||
TimeCol: execute.DefaultTimeColLabel,
|
||||
|
@ -35,7 +37,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "filter0",
|
||||
Spec: &functions.FilterOpSpec{
|
||||
Spec: &transformations.FilterOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{
|
||||
{Key: &semantic.Identifier{Name: "r"}},
|
||||
|
@ -72,13 +74,13 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "group0",
|
||||
Spec: &functions.GroupOpSpec{
|
||||
Spec: &transformations.GroupOpSpec{
|
||||
By: []string{"_measurement", "_start"},
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "mean0",
|
||||
Spec: &functions.MeanOpSpec{
|
||||
Spec: &transformations.MeanOpSpec{
|
||||
AggregateConfig: execute.AggregateConfig{
|
||||
Columns: []string{execute.DefaultValueColLabel},
|
||||
},
|
||||
|
@ -86,20 +88,20 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "duplicate0",
|
||||
Spec: &functions.DuplicateOpSpec{
|
||||
Spec: &transformations.DuplicateOpSpec{
|
||||
Col: execute.DefaultStartColLabel,
|
||||
As: execute.DefaultTimeColLabel,
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "from1",
|
||||
Spec: &functions.FromOpSpec{
|
||||
Spec: &inputs.FromOpSpec{
|
||||
BucketID: bucketID,
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "range1",
|
||||
Spec: &functions.RangeOpSpec{
|
||||
Spec: &transformations.RangeOpSpec{
|
||||
Start: flux.Time{Absolute: time.Unix(0, influxql.MinTime)},
|
||||
Stop: flux.Time{Absolute: time.Unix(0, influxql.MaxTime)},
|
||||
TimeCol: execute.DefaultTimeColLabel,
|
||||
|
@ -109,7 +111,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "filter1",
|
||||
Spec: &functions.FilterOpSpec{
|
||||
Spec: &transformations.FilterOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{
|
||||
{Key: &semantic.Identifier{Name: "r"}},
|
||||
|
@ -146,13 +148,13 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "group1",
|
||||
Spec: &functions.GroupOpSpec{
|
||||
Spec: &transformations.GroupOpSpec{
|
||||
By: []string{"_measurement", "_start"},
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "max0",
|
||||
Spec: &functions.MaxOpSpec{
|
||||
Spec: &transformations.MaxOpSpec{
|
||||
SelectorConfig: execute.SelectorConfig{
|
||||
Column: execute.DefaultValueColLabel,
|
||||
},
|
||||
|
@ -160,20 +162,20 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "drop0",
|
||||
Spec: &functions.DropOpSpec{
|
||||
Spec: &transformations.DropOpSpec{
|
||||
Cols: []string{execute.DefaultTimeColLabel},
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "duplicate1",
|
||||
Spec: &functions.DuplicateOpSpec{
|
||||
Spec: &transformations.DuplicateOpSpec{
|
||||
Col: execute.DefaultStartColLabel,
|
||||
As: execute.DefaultTimeColLabel,
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "join0",
|
||||
Spec: &functions.JoinOpSpec{
|
||||
Spec: &transformations.JoinOpSpec{
|
||||
On: []string{"_time", "_measurement"},
|
||||
TableNames: map[flux.OperationID]string{
|
||||
"duplicate0": "t0",
|
||||
|
@ -183,7 +185,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "map0",
|
||||
Spec: &functions.MapOpSpec{
|
||||
Spec: &transformations.MapOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{{
|
||||
Key: &semantic.Identifier{Name: "r"},
|
||||
|
@ -225,7 +227,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "yield0",
|
||||
Spec: &functions.YieldOpSpec{
|
||||
Spec: &transformations.YieldOpSpec{
|
||||
Name: "0",
|
||||
},
|
||||
},
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
package spectests
|
||||
|
||||
import (
|
||||
"github.com/influxdata/flux/functions/inputs"
|
||||
"github.com/influxdata/flux/functions/transformations"
|
||||
"time"
|
||||
|
||||
"github.com/influxdata/flux"
|
||||
"github.com/influxdata/flux/ast"
|
||||
"github.com/influxdata/flux/execute"
|
||||
"github.com/influxdata/flux/functions"
|
||||
|
||||
"github.com/influxdata/flux/semantic"
|
||||
"github.com/influxdata/influxql"
|
||||
)
|
||||
|
@ -19,13 +21,13 @@ func init() {
|
|||
Operations: []*flux.Operation{
|
||||
{
|
||||
ID: "from0",
|
||||
Spec: &functions.FromOpSpec{
|
||||
Spec: &inputs.FromOpSpec{
|
||||
BucketID: bucketID,
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "range0",
|
||||
Spec: &functions.RangeOpSpec{
|
||||
Spec: &transformations.RangeOpSpec{
|
||||
Start: flux.Time{Absolute: time.Unix(0, influxql.MinTime)},
|
||||
Stop: flux.Time{Absolute: time.Unix(0, influxql.MaxTime)},
|
||||
TimeCol: execute.DefaultTimeColLabel,
|
||||
|
@ -35,7 +37,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "filter0",
|
||||
Spec: &functions.FilterOpSpec{
|
||||
Spec: &transformations.FilterOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{
|
||||
{Key: &semantic.Identifier{Name: "r"}},
|
||||
|
@ -72,13 +74,13 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "group0",
|
||||
Spec: &functions.GroupOpSpec{
|
||||
Spec: &transformations.GroupOpSpec{
|
||||
By: []string{"_measurement", "_start"},
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "mean0",
|
||||
Spec: &functions.MeanOpSpec{
|
||||
Spec: &transformations.MeanOpSpec{
|
||||
AggregateConfig: execute.AggregateConfig{
|
||||
Columns: []string{execute.DefaultValueColLabel},
|
||||
},
|
||||
|
@ -86,14 +88,14 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "duplicate0",
|
||||
Spec: &functions.DuplicateOpSpec{
|
||||
Spec: &transformations.DuplicateOpSpec{
|
||||
Col: execute.DefaultStartColLabel,
|
||||
As: execute.DefaultTimeColLabel,
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "map0",
|
||||
Spec: &functions.MapOpSpec{
|
||||
Spec: &transformations.MapOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{{
|
||||
Key: &semantic.Identifier{Name: "r"},
|
||||
|
@ -126,19 +128,19 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "yield0",
|
||||
Spec: &functions.YieldOpSpec{
|
||||
Spec: &transformations.YieldOpSpec{
|
||||
Name: "0",
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "from1",
|
||||
Spec: &functions.FromOpSpec{
|
||||
Spec: &inputs.FromOpSpec{
|
||||
BucketID: bucketID,
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "range1",
|
||||
Spec: &functions.RangeOpSpec{
|
||||
Spec: &transformations.RangeOpSpec{
|
||||
Start: flux.Time{Absolute: time.Unix(0, influxql.MinTime)},
|
||||
Stop: flux.Time{Absolute: time.Unix(0, influxql.MaxTime)},
|
||||
TimeCol: execute.DefaultTimeColLabel,
|
||||
|
@ -148,7 +150,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "filter1",
|
||||
Spec: &functions.FilterOpSpec{
|
||||
Spec: &transformations.FilterOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{
|
||||
{Key: &semantic.Identifier{Name: "r"}},
|
||||
|
@ -185,13 +187,13 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "group1",
|
||||
Spec: &functions.GroupOpSpec{
|
||||
Spec: &transformations.GroupOpSpec{
|
||||
By: []string{"_measurement", "_start"},
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "max0",
|
||||
Spec: &functions.MaxOpSpec{
|
||||
Spec: &transformations.MaxOpSpec{
|
||||
SelectorConfig: execute.SelectorConfig{
|
||||
Column: execute.DefaultValueColLabel,
|
||||
},
|
||||
|
@ -199,7 +201,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "map1",
|
||||
Spec: &functions.MapOpSpec{
|
||||
Spec: &transformations.MapOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{{
|
||||
Key: &semantic.Identifier{Name: "r"},
|
||||
|
@ -232,7 +234,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "yield1",
|
||||
Spec: &functions.YieldOpSpec{
|
||||
Spec: &transformations.YieldOpSpec{
|
||||
Name: "1",
|
||||
},
|
||||
},
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
package spectests
|
||||
|
||||
import (
|
||||
"github.com/influxdata/flux/functions/inputs"
|
||||
"github.com/influxdata/flux/functions/transformations"
|
||||
"time"
|
||||
|
||||
"github.com/influxdata/flux"
|
||||
"github.com/influxdata/flux/ast"
|
||||
"github.com/influxdata/flux/execute"
|
||||
"github.com/influxdata/flux/functions"
|
||||
|
||||
"github.com/influxdata/flux/semantic"
|
||||
"github.com/influxdata/influxql"
|
||||
)
|
||||
|
@ -19,13 +21,13 @@ func init() {
|
|||
Operations: []*flux.Operation{
|
||||
{
|
||||
ID: "from0",
|
||||
Spec: &functions.FromOpSpec{
|
||||
Spec: &inputs.FromOpSpec{
|
||||
BucketID: bucketID,
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "range0",
|
||||
Spec: &functions.RangeOpSpec{
|
||||
Spec: &transformations.RangeOpSpec{
|
||||
Start: flux.Time{Absolute: time.Unix(0, influxql.MinTime)},
|
||||
Stop: flux.Time{Absolute: time.Unix(0, influxql.MaxTime)},
|
||||
TimeCol: execute.DefaultTimeColLabel,
|
||||
|
@ -35,7 +37,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "filter0",
|
||||
Spec: &functions.FilterOpSpec{
|
||||
Spec: &transformations.FilterOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{
|
||||
{Key: &semantic.Identifier{Name: "r"}},
|
||||
|
@ -72,13 +74,13 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "group0",
|
||||
Spec: &functions.GroupOpSpec{
|
||||
Spec: &transformations.GroupOpSpec{
|
||||
By: []string{"_measurement", "_start"},
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "map0",
|
||||
Spec: &functions.MapOpSpec{
|
||||
Spec: &transformations.MapOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{{
|
||||
Key: &semantic.Identifier{Name: "r"},
|
||||
|
@ -111,7 +113,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "yield0",
|
||||
Spec: &functions.YieldOpSpec{
|
||||
Spec: &transformations.YieldOpSpec{
|
||||
Name: "0",
|
||||
},
|
||||
},
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
package spectests
|
||||
|
||||
import (
|
||||
"github.com/influxdata/flux/functions/inputs"
|
||||
"github.com/influxdata/flux/functions/transformations"
|
||||
"time"
|
||||
|
||||
"github.com/influxdata/flux"
|
||||
"github.com/influxdata/flux/ast"
|
||||
"github.com/influxdata/flux/execute"
|
||||
"github.com/influxdata/flux/functions"
|
||||
|
||||
"github.com/influxdata/flux/semantic"
|
||||
"github.com/influxdata/influxql"
|
||||
)
|
||||
|
@ -19,13 +21,13 @@ func init() {
|
|||
Operations: []*flux.Operation{
|
||||
{
|
||||
ID: "from0",
|
||||
Spec: &functions.FromOpSpec{
|
||||
Spec: &inputs.FromOpSpec{
|
||||
BucketID: bucketID,
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "range0",
|
||||
Spec: &functions.RangeOpSpec{
|
||||
Spec: &transformations.RangeOpSpec{
|
||||
Start: flux.Time{Absolute: time.Unix(0, influxql.MinTime)},
|
||||
Stop: flux.Time{Absolute: time.Unix(0, influxql.MaxTime)},
|
||||
TimeCol: execute.DefaultTimeColLabel,
|
||||
|
@ -35,7 +37,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "filter0",
|
||||
Spec: &functions.FilterOpSpec{
|
||||
Spec: &transformations.FilterOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{
|
||||
{Key: &semantic.Identifier{Name: "r"}},
|
||||
|
@ -72,7 +74,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "filter1",
|
||||
Spec: &functions.FilterOpSpec{
|
||||
Spec: &transformations.FilterOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{
|
||||
{Key: &semantic.Identifier{Name: "r"}},
|
||||
|
@ -94,13 +96,13 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "group0",
|
||||
Spec: &functions.GroupOpSpec{
|
||||
Spec: &transformations.GroupOpSpec{
|
||||
By: []string{"_measurement", "_start"},
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "map0",
|
||||
Spec: &functions.MapOpSpec{
|
||||
Spec: &transformations.MapOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{{
|
||||
Key: &semantic.Identifier{Name: "r"},
|
||||
|
@ -133,7 +135,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "yield0",
|
||||
Spec: &functions.YieldOpSpec{
|
||||
Spec: &transformations.YieldOpSpec{
|
||||
Name: "0",
|
||||
},
|
||||
},
|
||||
|
|
|
@ -1,13 +1,15 @@
|
|||
package spectests
|
||||
|
||||
import (
|
||||
"github.com/influxdata/flux/functions/inputs"
|
||||
"github.com/influxdata/flux/functions/transformations"
|
||||
"regexp"
|
||||
"time"
|
||||
|
||||
"github.com/influxdata/flux"
|
||||
"github.com/influxdata/flux/ast"
|
||||
"github.com/influxdata/flux/execute"
|
||||
"github.com/influxdata/flux/functions"
|
||||
|
||||
"github.com/influxdata/flux/semantic"
|
||||
"github.com/influxdata/influxql"
|
||||
)
|
||||
|
@ -20,13 +22,13 @@ func init() {
|
|||
Operations: []*flux.Operation{
|
||||
{
|
||||
ID: "from0",
|
||||
Spec: &functions.FromOpSpec{
|
||||
Spec: &inputs.FromOpSpec{
|
||||
BucketID: bucketID,
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "range0",
|
||||
Spec: &functions.RangeOpSpec{
|
||||
Spec: &transformations.RangeOpSpec{
|
||||
Start: flux.Time{Absolute: time.Unix(0, influxql.MinTime)},
|
||||
Stop: flux.Time{Absolute: time.Unix(0, influxql.MaxTime)},
|
||||
TimeCol: execute.DefaultTimeColLabel,
|
||||
|
@ -36,7 +38,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "filter0",
|
||||
Spec: &functions.FilterOpSpec{
|
||||
Spec: &transformations.FilterOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{
|
||||
{Key: &semantic.Identifier{Name: "r"}},
|
||||
|
@ -73,7 +75,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "filter1",
|
||||
Spec: &functions.FilterOpSpec{
|
||||
Spec: &transformations.FilterOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{
|
||||
{Key: &semantic.Identifier{Name: "r"}},
|
||||
|
@ -95,13 +97,13 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "group0",
|
||||
Spec: &functions.GroupOpSpec{
|
||||
Spec: &transformations.GroupOpSpec{
|
||||
By: []string{"_measurement", "_start"},
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "map0",
|
||||
Spec: &functions.MapOpSpec{
|
||||
Spec: &transformations.MapOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{{
|
||||
Key: &semantic.Identifier{Name: "r"},
|
||||
|
@ -134,7 +136,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "yield0",
|
||||
Spec: &functions.YieldOpSpec{
|
||||
Spec: &transformations.YieldOpSpec{
|
||||
Name: "0",
|
||||
},
|
||||
},
|
||||
|
@ -159,13 +161,13 @@ func init() {
|
|||
Operations: []*flux.Operation{
|
||||
{
|
||||
ID: "from0",
|
||||
Spec: &functions.FromOpSpec{
|
||||
Spec: &inputs.FromOpSpec{
|
||||
BucketID: bucketID,
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "range0",
|
||||
Spec: &functions.RangeOpSpec{
|
||||
Spec: &transformations.RangeOpSpec{
|
||||
Start: flux.Time{Absolute: time.Unix(0, influxql.MinTime)},
|
||||
Stop: flux.Time{Absolute: time.Unix(0, influxql.MaxTime)},
|
||||
TimeCol: execute.DefaultTimeColLabel,
|
||||
|
@ -175,7 +177,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "filter0",
|
||||
Spec: &functions.FilterOpSpec{
|
||||
Spec: &transformations.FilterOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{
|
||||
{Key: &semantic.Identifier{Name: "r"}},
|
||||
|
@ -212,7 +214,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "filter1",
|
||||
Spec: &functions.FilterOpSpec{
|
||||
Spec: &transformations.FilterOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{
|
||||
{Key: &semantic.Identifier{Name: "r"}},
|
||||
|
@ -234,13 +236,13 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "group0",
|
||||
Spec: &functions.GroupOpSpec{
|
||||
Spec: &transformations.GroupOpSpec{
|
||||
By: []string{"_measurement", "_start"},
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "map0",
|
||||
Spec: &functions.MapOpSpec{
|
||||
Spec: &transformations.MapOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{{
|
||||
Key: &semantic.Identifier{Name: "r"},
|
||||
|
@ -273,7 +275,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "yield0",
|
||||
Spec: &functions.YieldOpSpec{
|
||||
Spec: &transformations.YieldOpSpec{
|
||||
Name: "0",
|
||||
},
|
||||
},
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
package spectests
|
||||
|
||||
import (
|
||||
"github.com/influxdata/flux/functions/inputs"
|
||||
"github.com/influxdata/flux/functions/transformations"
|
||||
"time"
|
||||
|
||||
"github.com/influxdata/flux"
|
||||
"github.com/influxdata/flux/ast"
|
||||
"github.com/influxdata/flux/execute"
|
||||
"github.com/influxdata/flux/functions"
|
||||
|
||||
"github.com/influxdata/flux/semantic"
|
||||
"github.com/influxdata/influxql"
|
||||
)
|
||||
|
@ -19,13 +21,13 @@ func init() {
|
|||
Operations: []*flux.Operation{
|
||||
{
|
||||
ID: "from0",
|
||||
Spec: &functions.FromOpSpec{
|
||||
Spec: &inputs.FromOpSpec{
|
||||
BucketID: altBucketID,
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "range0",
|
||||
Spec: &functions.RangeOpSpec{
|
||||
Spec: &transformations.RangeOpSpec{
|
||||
Start: flux.Time{Absolute: time.Unix(0, influxql.MinTime)},
|
||||
Stop: flux.Time{Absolute: time.Unix(0, influxql.MaxTime)},
|
||||
TimeCol: execute.DefaultTimeColLabel,
|
||||
|
@ -35,7 +37,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "filter0",
|
||||
Spec: &functions.FilterOpSpec{
|
||||
Spec: &transformations.FilterOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{
|
||||
{Key: &semantic.Identifier{Name: "r"}},
|
||||
|
@ -72,13 +74,13 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "group0",
|
||||
Spec: &functions.GroupOpSpec{
|
||||
Spec: &transformations.GroupOpSpec{
|
||||
By: []string{"_measurement", "_start"},
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "map0",
|
||||
Spec: &functions.MapOpSpec{
|
||||
Spec: &transformations.MapOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{{
|
||||
Key: &semantic.Identifier{Name: "r"},
|
||||
|
@ -111,7 +113,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "yield0",
|
||||
Spec: &functions.YieldOpSpec{
|
||||
Spec: &transformations.YieldOpSpec{
|
||||
Name: "0",
|
||||
},
|
||||
},
|
||||
|
|
|
@ -2,6 +2,8 @@ package spectests
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/influxdata/flux/functions/inputs"
|
||||
"github.com/influxdata/flux/functions/transformations"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"time"
|
||||
|
@ -9,23 +11,23 @@ import (
|
|||
"github.com/influxdata/flux"
|
||||
"github.com/influxdata/flux/ast"
|
||||
"github.com/influxdata/flux/execute"
|
||||
"github.com/influxdata/flux/functions"
|
||||
|
||||
"github.com/influxdata/flux/semantic"
|
||||
"github.com/influxdata/influxql"
|
||||
)
|
||||
|
||||
var selectorCreateFuncs = []func(config execute.SelectorConfig) flux.OperationSpec{
|
||||
func(config execute.SelectorConfig) flux.OperationSpec {
|
||||
return &functions.FirstOpSpec{SelectorConfig: config}
|
||||
return &transformations.FirstOpSpec{SelectorConfig: config}
|
||||
},
|
||||
func(config execute.SelectorConfig) flux.OperationSpec {
|
||||
return &functions.LastOpSpec{SelectorConfig: config}
|
||||
return &transformations.LastOpSpec{SelectorConfig: config}
|
||||
},
|
||||
func(config execute.SelectorConfig) flux.OperationSpec {
|
||||
return &functions.MaxOpSpec{SelectorConfig: config}
|
||||
return &transformations.MaxOpSpec{SelectorConfig: config}
|
||||
},
|
||||
func(config execute.SelectorConfig) flux.OperationSpec {
|
||||
return &functions.MinOpSpec{SelectorConfig: config}
|
||||
return &transformations.MinOpSpec{SelectorConfig: config}
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -58,13 +60,13 @@ func init() {
|
|||
Operations: []*flux.Operation{
|
||||
{
|
||||
ID: "from0",
|
||||
Spec: &functions.FromOpSpec{
|
||||
Spec: &inputs.FromOpSpec{
|
||||
BucketID: bucketID,
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "range0",
|
||||
Spec: &functions.RangeOpSpec{
|
||||
Spec: &transformations.RangeOpSpec{
|
||||
Start: flux.Time{Absolute: time.Unix(0, influxql.MinTime)},
|
||||
Stop: flux.Time{Absolute: time.Unix(0, influxql.MaxTime)},
|
||||
TimeCol: execute.DefaultTimeColLabel,
|
||||
|
@ -74,7 +76,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "filter0",
|
||||
Spec: &functions.FilterOpSpec{
|
||||
Spec: &transformations.FilterOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{
|
||||
{Key: &semantic.Identifier{Name: "r"}},
|
||||
|
@ -111,14 +113,14 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "group0",
|
||||
Spec: &functions.GroupOpSpec{
|
||||
Spec: &transformations.GroupOpSpec{
|
||||
By: []string{"_measurement", "_start"},
|
||||
},
|
||||
},
|
||||
&selector,
|
||||
{
|
||||
ID: "map0",
|
||||
Spec: &functions.MapOpSpec{
|
||||
Spec: &transformations.MapOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{{
|
||||
Key: &semantic.Identifier{Name: "r"},
|
||||
|
@ -151,7 +153,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "yield0",
|
||||
Spec: &functions.YieldOpSpec{
|
||||
Spec: &transformations.YieldOpSpec{
|
||||
Name: "0",
|
||||
},
|
||||
},
|
||||
|
|
|
@ -1,11 +1,13 @@
|
|||
package spectests
|
||||
|
||||
import (
|
||||
"github.com/influxdata/flux/functions/inputs"
|
||||
"github.com/influxdata/flux/functions/transformations"
|
||||
"time"
|
||||
|
||||
"github.com/influxdata/flux"
|
||||
"github.com/influxdata/flux/execute"
|
||||
"github.com/influxdata/flux/functions"
|
||||
|
||||
)
|
||||
|
||||
func init() {
|
||||
|
@ -16,13 +18,13 @@ func init() {
|
|||
Operations: []*flux.Operation{
|
||||
{
|
||||
ID: "from0",
|
||||
Spec: &functions.FromOpSpec{
|
||||
Spec: &inputs.FromOpSpec{
|
||||
BucketID: bucketID,
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "range0",
|
||||
Spec: &functions.RangeOpSpec{
|
||||
Spec: &transformations.RangeOpSpec{
|
||||
Start: flux.Time{
|
||||
Relative: -time.Hour,
|
||||
IsRelative: true,
|
||||
|
@ -31,31 +33,31 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "keyValues0",
|
||||
Spec: &functions.KeyValuesOpSpec{
|
||||
Spec: &transformations.KeyValuesOpSpec{
|
||||
KeyCols: []string{"host"},
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "group0",
|
||||
Spec: &functions.GroupOpSpec{
|
||||
Spec: &transformations.GroupOpSpec{
|
||||
By: []string{"_measurement", "_key"},
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "distinct0",
|
||||
Spec: &functions.DistinctOpSpec{
|
||||
Spec: &transformations.DistinctOpSpec{
|
||||
Column: execute.DefaultValueColLabel,
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "group1",
|
||||
Spec: &functions.GroupOpSpec{
|
||||
Spec: &transformations.GroupOpSpec{
|
||||
By: []string{"_measurement"},
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "rename0",
|
||||
Spec: &functions.RenameOpSpec{
|
||||
Spec: &transformations.RenameOpSpec{
|
||||
Cols: map[string]string{
|
||||
"_key": "key",
|
||||
"_value": "value",
|
||||
|
@ -64,7 +66,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "yield0",
|
||||
Spec: &functions.YieldOpSpec{
|
||||
Spec: &transformations.YieldOpSpec{
|
||||
Name: "0",
|
||||
},
|
||||
},
|
||||
|
|
|
@ -1,11 +1,13 @@
|
|||
package spectests
|
||||
|
||||
import (
|
||||
"github.com/influxdata/flux/functions/inputs"
|
||||
"github.com/influxdata/flux/functions/transformations"
|
||||
"time"
|
||||
|
||||
"github.com/influxdata/flux"
|
||||
"github.com/influxdata/flux/execute"
|
||||
"github.com/influxdata/flux/functions"
|
||||
|
||||
)
|
||||
|
||||
func init() {
|
||||
|
@ -16,13 +18,13 @@ func init() {
|
|||
Operations: []*flux.Operation{
|
||||
{
|
||||
ID: "from0",
|
||||
Spec: &functions.FromOpSpec{
|
||||
Spec: &inputs.FromOpSpec{
|
||||
BucketID: bucketID,
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "range0",
|
||||
Spec: &functions.RangeOpSpec{
|
||||
Spec: &transformations.RangeOpSpec{
|
||||
Start: flux.Time{
|
||||
Relative: -time.Hour,
|
||||
IsRelative: true,
|
||||
|
@ -31,31 +33,31 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "keyValues0",
|
||||
Spec: &functions.KeyValuesOpSpec{
|
||||
Spec: &transformations.KeyValuesOpSpec{
|
||||
KeyCols: []string{"host", "region"},
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "group0",
|
||||
Spec: &functions.GroupOpSpec{
|
||||
Spec: &transformations.GroupOpSpec{
|
||||
By: []string{"_measurement", "_key"},
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "distinct0",
|
||||
Spec: &functions.DistinctOpSpec{
|
||||
Spec: &transformations.DistinctOpSpec{
|
||||
Column: execute.DefaultValueColLabel,
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "group1",
|
||||
Spec: &functions.GroupOpSpec{
|
||||
Spec: &transformations.GroupOpSpec{
|
||||
By: []string{"_measurement"},
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "rename0",
|
||||
Spec: &functions.RenameOpSpec{
|
||||
Spec: &transformations.RenameOpSpec{
|
||||
Cols: map[string]string{
|
||||
"_key": "key",
|
||||
"_value": "value",
|
||||
|
@ -64,7 +66,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "yield0",
|
||||
Spec: &functions.YieldOpSpec{
|
||||
Spec: &transformations.YieldOpSpec{
|
||||
Name: "0",
|
||||
},
|
||||
},
|
||||
|
|
|
@ -6,7 +6,8 @@ import (
|
|||
"github.com/influxdata/flux"
|
||||
"github.com/influxdata/flux/ast"
|
||||
"github.com/influxdata/flux/execute"
|
||||
"github.com/influxdata/flux/functions"
|
||||
"github.com/influxdata/flux/functions/inputs"
|
||||
"github.com/influxdata/flux/functions/transformations"
|
||||
"github.com/influxdata/flux/semantic"
|
||||
)
|
||||
|
||||
|
@ -18,13 +19,13 @@ func init() {
|
|||
Operations: []*flux.Operation{
|
||||
{
|
||||
ID: "from0",
|
||||
Spec: &functions.FromOpSpec{
|
||||
Spec: &inputs.FromOpSpec{
|
||||
BucketID: bucketID,
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "range0",
|
||||
Spec: &functions.RangeOpSpec{
|
||||
Spec: &transformations.RangeOpSpec{
|
||||
Start: flux.Time{
|
||||
Relative: -time.Hour,
|
||||
IsRelative: true,
|
||||
|
@ -33,7 +34,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "filter0",
|
||||
Spec: &functions.FilterOpSpec{
|
||||
Spec: &transformations.FilterOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{
|
||||
{Key: &semantic.Identifier{Name: "r"}},
|
||||
|
@ -73,31 +74,31 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "keyValues0",
|
||||
Spec: &functions.KeyValuesOpSpec{
|
||||
Spec: &transformations.KeyValuesOpSpec{
|
||||
KeyCols: []string{"host"},
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "group0",
|
||||
Spec: &functions.GroupOpSpec{
|
||||
Spec: &transformations.GroupOpSpec{
|
||||
By: []string{"_measurement", "_key"},
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "distinct0",
|
||||
Spec: &functions.DistinctOpSpec{
|
||||
Spec: &transformations.DistinctOpSpec{
|
||||
Column: execute.DefaultValueColLabel,
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "group1",
|
||||
Spec: &functions.GroupOpSpec{
|
||||
Spec: &transformations.GroupOpSpec{
|
||||
By: []string{"_measurement"},
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "rename0",
|
||||
Spec: &functions.RenameOpSpec{
|
||||
Spec: &transformations.RenameOpSpec{
|
||||
Cols: map[string]string{
|
||||
"_key": "key",
|
||||
"_value": "value",
|
||||
|
@ -106,7 +107,7 @@ func init() {
|
|||
},
|
||||
{
|
||||
ID: "yield0",
|
||||
Spec: &functions.YieldOpSpec{
|
||||
Spec: &transformations.YieldOpSpec{
|
||||
Name: "0",
|
||||
},
|
||||
},
|
||||
|
|
|
@ -11,7 +11,8 @@ import (
|
|||
"github.com/influxdata/flux"
|
||||
"github.com/influxdata/flux/ast"
|
||||
"github.com/influxdata/flux/execute"
|
||||
"github.com/influxdata/flux/functions"
|
||||
"github.com/influxdata/flux/functions/transformations"
|
||||
"github.com/influxdata/flux/functions/inputs"
|
||||
"github.com/influxdata/flux/semantic"
|
||||
"github.com/influxdata/influxql"
|
||||
"github.com/influxdata/platform"
|
||||
|
@ -82,7 +83,7 @@ func (t *transpilerState) Transpile(ctx context.Context, id int, s influxql.Stat
|
|||
if err != nil {
|
||||
return err
|
||||
}
|
||||
t.op("yield", &functions.YieldOpSpec{Name: strconv.Itoa(id)}, op)
|
||||
t.op("yield", &transformations.YieldOpSpec{Name: strconv.Itoa(id)}, op)
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -116,7 +117,7 @@ func (t *transpilerState) transpileShowTagValues(ctx context.Context, stmt *infl
|
|||
|
||||
// TODO(jsternberg): Read the range from the condition expression. 1.x doesn't actually do this so it isn't
|
||||
// urgent to implement this functionality so we can use the default range.
|
||||
op = t.op("range", &functions.RangeOpSpec{
|
||||
op = t.op("range", &transformations.RangeOpSpec{
|
||||
Start: flux.Time{
|
||||
Relative: -time.Hour,
|
||||
IsRelative: true,
|
||||
|
@ -153,7 +154,7 @@ func (t *transpilerState) transpileShowTagValues(ctx context.Context, stmt *infl
|
|||
Right: expr,
|
||||
}
|
||||
}
|
||||
op = t.op("filter", &functions.FilterOpSpec{
|
||||
op = t.op("filter", &transformations.FilterOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{
|
||||
{Key: &semantic.Identifier{Name: "r"}},
|
||||
|
@ -166,7 +167,7 @@ func (t *transpilerState) transpileShowTagValues(ctx context.Context, stmt *infl
|
|||
// TODO(jsternberg): Add the condition filter for the where clause.
|
||||
|
||||
// Create the key values op spec from the
|
||||
var keyValues functions.KeyValuesOpSpec
|
||||
var keyValues transformations.KeyValuesOpSpec
|
||||
switch expr := stmt.TagKeyExpr.(type) {
|
||||
case *influxql.ListLiteral:
|
||||
keyValues.KeyCols = expr.Vals
|
||||
|
@ -186,16 +187,16 @@ func (t *transpilerState) transpileShowTagValues(ctx context.Context, stmt *infl
|
|||
|
||||
// Group by the measurement and key, find distinct values, then group by the measurement
|
||||
// to join all of the different keys together. Finish by renaming the columns. This is static.
|
||||
return t.op("rename", &functions.RenameOpSpec{
|
||||
return t.op("rename", &transformations.RenameOpSpec{
|
||||
Cols: map[string]string{
|
||||
"_key": "key",
|
||||
"_value": "value",
|
||||
},
|
||||
}, t.op("group", &functions.GroupOpSpec{
|
||||
}, t.op("group", &transformations.GroupOpSpec{
|
||||
By: []string{"_measurement"},
|
||||
}, t.op("distinct", &functions.DistinctOpSpec{
|
||||
}, t.op("distinct", &transformations.DistinctOpSpec{
|
||||
Column: execute.DefaultValueColLabel,
|
||||
}, t.op("group", &functions.GroupOpSpec{
|
||||
}, t.op("group", &transformations.GroupOpSpec{
|
||||
By: []string{"_measurement", "_key"},
|
||||
}, op)))), nil
|
||||
}
|
||||
|
@ -267,7 +268,7 @@ func (t *transpilerState) from(m *influxql.Measurement) (flux.OperationID, error
|
|||
return "", err
|
||||
}
|
||||
|
||||
spec := &functions.FromOpSpec{
|
||||
spec := &inputs.FromOpSpec{
|
||||
BucketID: mapping.BucketID,
|
||||
}
|
||||
return t.op("from", spec), nil
|
||||
|
|
|
@ -8,7 +8,8 @@ import (
|
|||
"github.com/google/go-cmp/cmp/cmpopts"
|
||||
"github.com/influxdata/flux"
|
||||
"github.com/influxdata/flux/ast"
|
||||
"github.com/influxdata/flux/functions"
|
||||
"github.com/influxdata/flux/functions/inputs"
|
||||
"github.com/influxdata/flux/functions/transformations"
|
||||
"github.com/influxdata/flux/semantic"
|
||||
"github.com/influxdata/flux/semantic/semantictest"
|
||||
)
|
||||
|
@ -363,11 +364,11 @@ func TestBuild(t *testing.T) {
|
|||
Operations: []*flux.Operation{
|
||||
{
|
||||
ID: flux.OperationID("from"),
|
||||
Spec: &functions.FromOpSpec{Bucket: "prometheus"},
|
||||
Spec: &inputs.FromOpSpec{Bucket: "prometheus"},
|
||||
},
|
||||
{
|
||||
ID: "where",
|
||||
Spec: &functions.FilterOpSpec{
|
||||
Spec: &transformations.FilterOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{{Key: &semantic.Identifier{Name: "r"}}},
|
||||
Body: &semantic.LogicalExpression{
|
||||
|
@ -416,7 +417,7 @@ func TestBuild(t *testing.T) {
|
|||
},
|
||||
},
|
||||
{
|
||||
ID: flux.OperationID("count"), Spec: &functions.CountOpSpec{},
|
||||
ID: flux.OperationID("count"), Spec: &transformations.CountOpSpec{},
|
||||
},
|
||||
},
|
||||
Edges: []flux.Edge{
|
||||
|
@ -438,17 +439,17 @@ func TestBuild(t *testing.T) {
|
|||
Operations: []*flux.Operation{
|
||||
{
|
||||
ID: flux.OperationID("from"),
|
||||
Spec: &functions.FromOpSpec{Bucket: "prometheus"},
|
||||
Spec: &inputs.FromOpSpec{Bucket: "prometheus"},
|
||||
},
|
||||
{
|
||||
ID: flux.OperationID("range"),
|
||||
Spec: &functions.RangeOpSpec{
|
||||
Spec: &transformations.RangeOpSpec{
|
||||
Start: flux.Time{Relative: -time.Minute * 7},
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "where",
|
||||
Spec: &functions.FilterOpSpec{
|
||||
Spec: &transformations.FilterOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{{Key: &semantic.Identifier{Name: "r"}}},
|
||||
Body: &semantic.LogicalExpression{
|
||||
|
@ -502,17 +503,17 @@ func TestBuild(t *testing.T) {
|
|||
Operations: []*flux.Operation{
|
||||
{
|
||||
ID: flux.OperationID("from"),
|
||||
Spec: &functions.FromOpSpec{Bucket: "prometheus"},
|
||||
Spec: &inputs.FromOpSpec{Bucket: "prometheus"},
|
||||
},
|
||||
{
|
||||
ID: flux.OperationID("range"),
|
||||
Spec: &functions.RangeOpSpec{
|
||||
Spec: &transformations.RangeOpSpec{
|
||||
Start: flux.Time{Relative: -170 * time.Hour},
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "where",
|
||||
Spec: &functions.FilterOpSpec{
|
||||
Spec: &transformations.FilterOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{{Key: &semantic.Identifier{Name: "r"}}},
|
||||
Body: &semantic.LogicalExpression{
|
||||
|
@ -546,7 +547,7 @@ func TestBuild(t *testing.T) {
|
|||
},
|
||||
},
|
||||
{
|
||||
ID: flux.OperationID("sum"), Spec: &functions.SumOpSpec{},
|
||||
ID: flux.OperationID("sum"), Spec: &transformations.SumOpSpec{},
|
||||
},
|
||||
},
|
||||
Edges: []flux.Edge{
|
||||
|
|
|
@ -8,7 +8,8 @@ import (
|
|||
|
||||
"github.com/influxdata/flux"
|
||||
"github.com/influxdata/flux/ast"
|
||||
"github.com/influxdata/flux/functions"
|
||||
"github.com/influxdata/flux/functions/transformations"
|
||||
"github.com/influxdata/flux/functions/inputs"
|
||||
"github.com/influxdata/flux/semantic"
|
||||
)
|
||||
|
||||
|
@ -145,7 +146,7 @@ func (s *Selector) QuerySpec() (*flux.Spec, error) {
|
|||
ops := []*flux.Operation{
|
||||
{
|
||||
ID: "from", // TODO: Change this to a UUID
|
||||
Spec: &functions.FromOpSpec{
|
||||
Spec: &inputs.FromOpSpec{
|
||||
Bucket: "prometheus",
|
||||
},
|
||||
},
|
||||
|
@ -192,7 +193,7 @@ func NewRangeOp(rng, offset time.Duration) (*flux.Operation, error) {
|
|||
}
|
||||
return &flux.Operation{
|
||||
ID: "range", // TODO: Change this to a UUID
|
||||
Spec: &functions.RangeOpSpec{
|
||||
Spec: &transformations.RangeOpSpec{
|
||||
Start: flux.Time{
|
||||
Relative: -rng - offset,
|
||||
},
|
||||
|
@ -255,7 +256,7 @@ func NewWhereOperation(metricName string, labels []*LabelMatcher) (*flux.Operati
|
|||
|
||||
return &flux.Operation{
|
||||
ID: "where", // TODO: Change this to a UUID
|
||||
Spec: &functions.FilterOpSpec{
|
||||
Spec: &transformations.FilterOpSpec{
|
||||
Fn: &semantic.FunctionExpression{
|
||||
Params: []*semantic.FunctionParam{{Key: &semantic.Identifier{Name: "r"}}},
|
||||
Body: node,
|
||||
|
@ -308,7 +309,7 @@ func (a *Aggregate) QuerySpec() (*flux.Operation, error) {
|
|||
}
|
||||
return &flux.Operation{
|
||||
ID: "merge",
|
||||
Spec: &functions.GroupOpSpec{
|
||||
Spec: &transformations.GroupOpSpec{
|
||||
By: keys,
|
||||
},
|
||||
}, nil
|
||||
|
@ -373,37 +374,37 @@ func (o *Operator) QuerySpec() (*flux.Operation, error) {
|
|||
case CountKind:
|
||||
return &flux.Operation{
|
||||
ID: "count",
|
||||
Spec: &functions.CountOpSpec{},
|
||||
Spec: &transformations.CountOpSpec{},
|
||||
}, nil
|
||||
//case TopKind:
|
||||
// return &flux.Operation{
|
||||
// ID: "top",
|
||||
// Spec: &functions.TopOpSpec{}, // TODO: Top doesn't have arg yet
|
||||
// Spec: &transformations.TopOpSpec{}, // TODO: Top doesn't have arg yet
|
||||
// }, nil
|
||||
case SumKind:
|
||||
return &flux.Operation{
|
||||
ID: "sum",
|
||||
Spec: &functions.SumOpSpec{},
|
||||
Spec: &transformations.SumOpSpec{},
|
||||
}, nil
|
||||
//case MinKind:
|
||||
// return &flux.Operation{
|
||||
// ID: "min",
|
||||
// Spec: &functions.MinOpSpec{},
|
||||
// Spec: &transformations.MinOpSpec{},
|
||||
// }, nil
|
||||
//case MaxKind:
|
||||
// return &flux.Operation{
|
||||
// ID: "max",
|
||||
// Spec: &functions.MaxOpSpec{},
|
||||
// Spec: &transformations.MaxOpSpec{},
|
||||
// }, nil
|
||||
//case AvgKind:
|
||||
// return &flux.Operation{
|
||||
// ID: "mean",
|
||||
// Spec: &functions.MeanOpSpec{},
|
||||
// Spec: &transformations.MeanOpSpec{},
|
||||
// }, nil
|
||||
//case StdevKind:
|
||||
// return &flux.Operation{
|
||||
// ID: "stddev",
|
||||
// Spec: &functions.StddevOpSpec{},
|
||||
// Spec: &transformations.StddevOpSpec{},
|
||||
// }, nil
|
||||
default:
|
||||
return nil, fmt.Errorf("Unknown Op kind %d", o.Kind)
|
||||
|
|
|
@ -10,7 +10,7 @@ import (
|
|||
"github.com/google/go-cmp/cmp"
|
||||
"github.com/google/go-cmp/cmp/cmpopts"
|
||||
"github.com/influxdata/flux"
|
||||
"github.com/influxdata/flux/functions"
|
||||
"github.com/influxdata/flux/functions/transformations"
|
||||
"github.com/influxdata/flux/semantic/semantictest"
|
||||
"github.com/influxdata/platform"
|
||||
"github.com/influxdata/platform/query"
|
||||
|
@ -28,9 +28,9 @@ type BucketAwareQueryTestCase struct {
|
|||
var opts = append(
|
||||
semantictest.CmpOptions,
|
||||
cmp.AllowUnexported(flux.Spec{}),
|
||||
cmp.AllowUnexported(functions.JoinOpSpec{}),
|
||||
cmp.AllowUnexported(transformations.JoinOpSpec{}),
|
||||
cmpopts.IgnoreUnexported(flux.Spec{}),
|
||||
cmpopts.IgnoreUnexported(functions.JoinOpSpec{}),
|
||||
cmpopts.IgnoreUnexported(transformations.JoinOpSpec{}),
|
||||
)
|
||||
|
||||
func BucketAwareQueryTestHelper(t *testing.T, tc BucketAwareQueryTestCase) {
|
||||
|
|
Loading…
Reference in New Issue