Merge pull request #1275 from influxdata/feature/query-plan
feat(http): add plan endpoint to analyze queriespull/10616/head
commit
3e54ef9f53
|
@ -146,6 +146,7 @@ var apiLinks = map[string]interface{}{
|
||||||
"query": map[string]string{
|
"query": map[string]string{
|
||||||
"self": "/api/v2/query",
|
"self": "/api/v2/query",
|
||||||
"ast": "/api/v2/query/ast",
|
"ast": "/api/v2/query/ast",
|
||||||
|
"plan": "/api/v2/query/plan",
|
||||||
"spec": "/api/v2/query/spec",
|
"spec": "/api/v2/query/spec",
|
||||||
"suggestions": "/api/v2/query/suggestions",
|
"suggestions": "/api/v2/query/suggestions",
|
||||||
},
|
},
|
||||||
|
|
|
@ -15,6 +15,7 @@ import (
|
||||||
"github.com/influxdata/flux/complete"
|
"github.com/influxdata/flux/complete"
|
||||||
"github.com/influxdata/flux/csv"
|
"github.com/influxdata/flux/csv"
|
||||||
"github.com/influxdata/flux/parser"
|
"github.com/influxdata/flux/parser"
|
||||||
|
"github.com/influxdata/flux/plan"
|
||||||
"github.com/influxdata/platform"
|
"github.com/influxdata/platform"
|
||||||
pcontext "github.com/influxdata/platform/context"
|
pcontext "github.com/influxdata/platform/context"
|
||||||
"github.com/influxdata/platform/kit/errors"
|
"github.com/influxdata/platform/kit/errors"
|
||||||
|
@ -50,6 +51,7 @@ func NewFluxHandler() *FluxHandler {
|
||||||
|
|
||||||
h.HandlerFunc("POST", fluxPath, h.handlePostQuery)
|
h.HandlerFunc("POST", fluxPath, h.handlePostQuery)
|
||||||
h.HandlerFunc("POST", "/api/v2/query/ast", h.postFluxAST)
|
h.HandlerFunc("POST", "/api/v2/query/ast", h.postFluxAST)
|
||||||
|
h.HandlerFunc("POST", "/api/v2/query/plan", h.postFluxPlan)
|
||||||
h.HandlerFunc("POST", "/api/v2/query/spec", h.postFluxSpec)
|
h.HandlerFunc("POST", "/api/v2/query/spec", h.postFluxSpec)
|
||||||
h.HandlerFunc("GET", "/api/v2/query/suggestions", h.getFluxSuggestions)
|
h.HandlerFunc("GET", "/api/v2/query/suggestions", h.getFluxSuggestions)
|
||||||
h.HandlerFunc("GET", "/api/v2/query/suggestions/:name", h.getFluxSuggestion)
|
h.HandlerFunc("GET", "/api/v2/query/suggestions/:name", h.getFluxSuggestion)
|
||||||
|
@ -123,7 +125,7 @@ func (h *FluxHandler) postFluxAST(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
ast, err := parser.NewAST(request.Query)
|
ast, err := parser.NewAST(request.Query)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
EncodeError(ctx, errors.InvalidDataf("invalid json: %v", err), w)
|
EncodeError(ctx, errors.InvalidDataf("invalid AST: %v", err), w)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -154,7 +156,7 @@ func (h *FluxHandler) postFluxSpec(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
spec, err := flux.Compile(ctx, req.Query, h.Now())
|
spec, err := flux.Compile(ctx, req.Query, h.Now())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
EncodeError(ctx, errors.InvalidDataf("invalid json: %v", err), w)
|
EncodeError(ctx, errors.InvalidDataf("invalid spec: %v", err), w)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -168,6 +170,103 @@ func (h *FluxHandler) postFluxSpec(w http.ResponseWriter, r *http.Request) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type fluxPlan struct {
|
||||||
|
Roots []plan.PlanNode `json:"roots"`
|
||||||
|
Resources flux.ResourceManagement `json:"resources"`
|
||||||
|
Now time.Time `json:"now"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func newFluxPlan(p *plan.PlanSpec) *fluxPlan {
|
||||||
|
res := &fluxPlan{
|
||||||
|
Roots: []plan.PlanNode{},
|
||||||
|
Resources: p.Resources,
|
||||||
|
Now: p.Now,
|
||||||
|
}
|
||||||
|
|
||||||
|
for node := range p.Roots {
|
||||||
|
res.Roots = append(res.Roots, node)
|
||||||
|
}
|
||||||
|
|
||||||
|
return res
|
||||||
|
}
|
||||||
|
|
||||||
|
type postFluxPlanResponse struct {
|
||||||
|
Spec *flux.Spec `json:"spec"`
|
||||||
|
Logical *fluxPlan `json:"logical"`
|
||||||
|
Physical *fluxPlan `json:"physical"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type postPlanRequest struct {
|
||||||
|
Query string `json:"query,omitempty"`
|
||||||
|
Spec *flux.Spec `json:"spec,omityempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Valid check if the plan request has a query or spec defined, but not both.
|
||||||
|
func (p *postPlanRequest) Valid() error {
|
||||||
|
if p.Query == "" && p.Spec == nil {
|
||||||
|
return errors.MalformedDataf("query or spec required")
|
||||||
|
}
|
||||||
|
|
||||||
|
if p.Query != "" && p.Spec != nil {
|
||||||
|
return errors.MalformedDataf("cannot request both query and spec")
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// postFluxPlan returns a flux plan for provided flux string
|
||||||
|
func (h *FluxHandler) postFluxPlan(w http.ResponseWriter, r *http.Request) {
|
||||||
|
ctx := r.Context()
|
||||||
|
req, err := decodePostPlanRequest(ctx, r)
|
||||||
|
if err != nil {
|
||||||
|
EncodeError(ctx, err, w)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
var spec *flux.Spec = req.Spec
|
||||||
|
if req.Query != "" {
|
||||||
|
spec, err = flux.Compile(ctx, req.Query, h.Now())
|
||||||
|
if err != nil {
|
||||||
|
EncodeError(ctx, errors.InvalidDataf("invalid spec: %v", err), w)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
logical := plan.NewLogicalPlanner()
|
||||||
|
log, err := logical.Plan(spec)
|
||||||
|
if err != nil {
|
||||||
|
EncodeError(ctx, errors.InvalidDataf("invalid logical plan: %v", err), w)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
physical := plan.NewPhysicalPlanner()
|
||||||
|
phys, err := physical.Plan(log)
|
||||||
|
if err != nil {
|
||||||
|
EncodeError(ctx, errors.InvalidDataf("invalid physical plan: %v", err), w)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
res := postFluxPlanResponse{
|
||||||
|
Logical: newFluxPlan(log),
|
||||||
|
Physical: newFluxPlan(phys),
|
||||||
|
Spec: spec,
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := encodeResponse(ctx, w, http.StatusOK, res); err != nil {
|
||||||
|
EncodeError(ctx, err, w)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func decodePostPlanRequest(ctx context.Context, r *http.Request) (*postPlanRequest, error) {
|
||||||
|
req := &postPlanRequest{}
|
||||||
|
err := json.NewDecoder(r.Body).Decode(req)
|
||||||
|
if err != nil {
|
||||||
|
return nil, errors.MalformedDataf("invalid json: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
return req, req.Valid()
|
||||||
|
}
|
||||||
|
|
||||||
// fluxParams contain flux funciton parameters as defined by the semantic graph
|
// fluxParams contain flux funciton parameters as defined by the semantic graph
|
||||||
type fluxParams map[string]string
|
type fluxParams map[string]string
|
||||||
|
|
||||||
|
|
|
@ -11,6 +11,7 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/influxdata/flux"
|
||||||
"github.com/influxdata/flux/csv"
|
"github.com/influxdata/flux/csv"
|
||||||
"github.com/influxdata/flux/lang"
|
"github.com/influxdata/flux/lang"
|
||||||
"github.com/influxdata/platform"
|
"github.com/influxdata/platform"
|
||||||
|
@ -201,7 +202,7 @@ func TestFluxHandler_postFluxAST(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "get ast from()",
|
name: "get ast from()",
|
||||||
w: httptest.NewRecorder(),
|
w: httptest.NewRecorder(),
|
||||||
r: httptest.NewRequest("GET", "/api/v2/query/ast", bytes.NewBufferString(`{"query": "from()"}`)),
|
r: httptest.NewRequest("POST", "/api/v2/query/ast", bytes.NewBufferString(`{"query": "from()"}`)),
|
||||||
want: `{"ast":{"type":"Program","location":{"start":{"line":1,"column":1},"end":{"line":1,"column":7},"source":"from()"},"body":[{"type":"ExpressionStatement","location":{"start":{"line":1,"column":1},"end":{"line":1,"column":7},"source":"from()"},"expression":{"type":"CallExpression","location":{"start":{"line":1,"column":1},"end":{"line":1,"column":7},"source":"from()"},"callee":{"type":"Identifier","location":{"start":{"line":1,"column":1},"end":{"line":1,"column":5},"source":"from"},"name":"from"}}}]}}
|
want: `{"ast":{"type":"Program","location":{"start":{"line":1,"column":1},"end":{"line":1,"column":7},"source":"from()"},"body":[{"type":"ExpressionStatement","location":{"start":{"line":1,"column":1},"end":{"line":1,"column":7},"source":"from()"},"expression":{"type":"CallExpression","location":{"start":{"line":1,"column":1},"end":{"line":1,"column":7},"source":"from()"},"callee":{"type":"Identifier","location":{"start":{"line":1,"column":1},"end":{"line":1,"column":5},"source":"from"},"name":"from"}}}]}}
|
||||||
`,
|
`,
|
||||||
status: http.StatusOK,
|
status: http.StatusOK,
|
||||||
|
@ -209,7 +210,7 @@ func TestFluxHandler_postFluxAST(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "error from bad json",
|
name: "error from bad json",
|
||||||
w: httptest.NewRecorder(),
|
w: httptest.NewRecorder(),
|
||||||
r: httptest.NewRequest("GET", "/api/v2/query/ast", bytes.NewBufferString(`error!`)),
|
r: httptest.NewRequest("POST", "/api/v2/query/ast", bytes.NewBufferString(`error!`)),
|
||||||
status: http.StatusBadRequest,
|
status: http.StatusBadRequest,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -239,7 +240,7 @@ func TestFluxHandler_postFluxSpec(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "get spec from()",
|
name: "get spec from()",
|
||||||
w: httptest.NewRecorder(),
|
w: httptest.NewRecorder(),
|
||||||
r: httptest.NewRequest("GET", "/api/v2/query/spec", bytes.NewBufferString(`{"query": "from(bucket: \"telegraf\")"}`)),
|
r: httptest.NewRequest("POST", "/api/v2/query/spec", bytes.NewBufferString(`{"query": "from(bucket: \"telegraf\")"}`)),
|
||||||
now: func() time.Time { return time.Unix(0, 0).UTC() },
|
now: func() time.Time { return time.Unix(0, 0).UTC() },
|
||||||
want: `{"spec":{"operations":[{"kind":"from","id":"from0","spec":{"bucket":"telegraf"}}],"edges":null,"resources":{"priority":"high","concurrency_quota":0,"memory_bytes_quota":0},"now":"1970-01-01T00:00:00Z"}}
|
want: `{"spec":{"operations":[{"kind":"from","id":"from0","spec":{"bucket":"telegraf"}}],"edges":null,"resources":{"priority":"high","concurrency_quota":0,"memory_bytes_quota":0},"now":"1970-01-01T00:00:00Z"}}
|
||||||
`,
|
`,
|
||||||
|
@ -248,16 +249,25 @@ func TestFluxHandler_postFluxSpec(t *testing.T) {
|
||||||
{
|
{
|
||||||
name: "error from bad json",
|
name: "error from bad json",
|
||||||
w: httptest.NewRecorder(),
|
w: httptest.NewRecorder(),
|
||||||
r: httptest.NewRequest("GET", "/api/v2/query/spec", bytes.NewBufferString(`error!`)),
|
r: httptest.NewRequest("POST", "/api/v2/query/spec", bytes.NewBufferString(`error!`)),
|
||||||
status: http.StatusBadRequest,
|
status: http.StatusBadRequest,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: "error from incomplete spec",
|
name: "error from incomplete spec",
|
||||||
w: httptest.NewRecorder(),
|
w: httptest.NewRecorder(),
|
||||||
r: httptest.NewRequest("GET", "/api/v2/query/spec", bytes.NewBufferString(`{"query": "from()"}`)),
|
r: httptest.NewRequest("POST", "/api/v2/query/spec", bytes.NewBufferString(`{"query": "from()"}`)),
|
||||||
now: func() time.Time { return time.Unix(0, 0).UTC() },
|
now: func() time.Time { return time.Unix(0, 0).UTC() },
|
||||||
status: http.StatusUnprocessableEntity,
|
status: http.StatusUnprocessableEntity,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
name: "get spec with range and last",
|
||||||
|
w: httptest.NewRecorder(),
|
||||||
|
r: httptest.NewRequest("POST", "/api/v2/query/spec", bytes.NewBufferString(`{"query": "from(bucket:\"demo-bucket-in-1\") |> range(start:-2s) |> last()"}`)),
|
||||||
|
now: func() time.Time { return time.Unix(0, 0).UTC() },
|
||||||
|
want: `{"spec":{"operations":[{"kind":"from","id":"from0","spec":{"bucket":"demo-bucket-in-1"}},{"kind":"range","id":"range1","spec":{"start":"-2s","stop":"now","timeCol":"_time","startCol":"_start","stopCol":"_stop"}},{"kind":"last","id":"last2","spec":{"column":""}}],"edges":[{"parent":"from0","child":"range1"},{"parent":"range1","child":"last2"}],"resources":{"priority":"high","concurrency_quota":0,"memory_bytes_quota":0},"now":"1970-01-01T00:00:00Z"}}
|
||||||
|
`,
|
||||||
|
status: http.StatusOK,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
@ -271,6 +281,57 @@ func TestFluxHandler_postFluxSpec(t *testing.T) {
|
||||||
|
|
||||||
if got := tt.w.Code; got != tt.status {
|
if got := tt.w.Code; got != tt.status {
|
||||||
t.Errorf("http.postFluxSpec = got %d\nwant %d", got, tt.status)
|
t.Errorf("http.postFluxSpec = got %d\nwant %d", got, tt.status)
|
||||||
|
t.Log(tt.w.HeaderMap)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFluxHandler_postFluxPlan(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
w *httptest.ResponseRecorder
|
||||||
|
r *http.Request
|
||||||
|
now func() time.Time
|
||||||
|
want string
|
||||||
|
status int
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "get plan from()",
|
||||||
|
w: httptest.NewRecorder(),
|
||||||
|
r: httptest.NewRequest("POST", "/api/v2/query/plan", bytes.NewBufferString(`{"query": "from(bucket:\"telegraf\")|> range(start: -5000h)|> filter(fn: (r) => r._measurement == \"mem\" AND r._field == \"used_percent\")|> group(by:[\"host\"])|> mean()"}`)),
|
||||||
|
now: func() time.Time { return time.Unix(0, 0).UTC() },
|
||||||
|
want: `{"spec":{"operations":[{"kind":"from","id":"from0","spec":{"bucket":"telegraf"}},{"kind":"range","id":"range1","spec":{"start":"-5000h0m0s","stop":"now","timeCol":"_time","startCol":"_start","stopCol":"_stop"}},{"kind":"filter","id":"filter2","spec":{"fn":{"type":"FunctionExpression","block":{"type":"FunctionBlock","parameters":{"type":"FunctionParameters","list":[{"type":"FunctionParameter","key":{"type":"Identifier","name":"r"}}],"pipe":null},"body":{"type":"LogicalExpression","operator":"and","left":{"type":"BinaryExpression","operator":"==","left":{"type":"MemberExpression","object":{"type":"IdentifierExpression","name":"r"},"property":"_measurement"},"right":{"type":"StringLiteral","value":"mem"}},"right":{"type":"BinaryExpression","operator":"==","left":{"type":"MemberExpression","object":{"type":"IdentifierExpression","name":"r"},"property":"_field"},"right":{"type":"StringLiteral","value":"used_percent"}}}}}}},{"kind":"group","id":"group3","spec":{"by":["host"],"except":null,"all":false,"none":false}},{"kind":"mean","id":"mean4","spec":{"columns":["_value"]}}],"edges":[{"parent":"from0","child":"range1"},{"parent":"range1","child":"filter2"},{"parent":"filter2","child":"group3"},{"parent":"group3","child":"mean4"}],"resources":{"priority":"high","concurrency_quota":0,"memory_bytes_quota":0},"now":"1970-01-01T00:00:00Z"},"logical":{"roots":[{"Spec":{"Name":"_result"}}],"resources":{"priority":"high","concurrency_quota":1,"memory_bytes_quota":9223372036854775807},"now":"1970-01-01T00:00:00Z"},"physical":{"roots":[{"Spec":{"Name":"_result"}}],"resources":{"priority":"high","concurrency_quota":1,"memory_bytes_quota":9223372036854775807},"now":"1970-01-01T00:00:00Z"}}
|
||||||
|
`,
|
||||||
|
status: http.StatusOK,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "error from bad json",
|
||||||
|
w: httptest.NewRecorder(),
|
||||||
|
r: httptest.NewRequest("POST", "/api/v2/query/plan", bytes.NewBufferString(`error!`)),
|
||||||
|
status: http.StatusBadRequest,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "error from incomplete plan",
|
||||||
|
w: httptest.NewRecorder(),
|
||||||
|
r: httptest.NewRequest("POST", "/api/v2/query/plan", bytes.NewBufferString(`{"query": "from()"}`)),
|
||||||
|
now: func() time.Time { return time.Unix(0, 0).UTC() },
|
||||||
|
status: http.StatusUnprocessableEntity,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
h := &FluxHandler{
|
||||||
|
Now: tt.now,
|
||||||
|
}
|
||||||
|
h.postFluxPlan(tt.w, tt.r)
|
||||||
|
if got := tt.w.Body.String(); got != tt.want {
|
||||||
|
t.Errorf("http.postFluxPlan = got %s\nwant %s", got, tt.want)
|
||||||
|
}
|
||||||
|
|
||||||
|
if got := tt.w.Code; got != tt.status {
|
||||||
|
t.Errorf("http.postFluxPlan = got %d\nwant %d", got, tt.status)
|
||||||
|
t.Log(tt.w.HeaderMap)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -281,3 +342,54 @@ var crlfPattern = regexp.MustCompile(`\r?\n`)
|
||||||
func toCRLF(data string) string {
|
func toCRLF(data string) string {
|
||||||
return crlfPattern.ReplaceAllString(data, "\r\n")
|
return crlfPattern.ReplaceAllString(data, "\r\n")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func Test_postPlanRequest_Valid(t *testing.T) {
|
||||||
|
type fields struct {
|
||||||
|
Query string
|
||||||
|
Spec *flux.Spec
|
||||||
|
}
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
fields fields
|
||||||
|
wantErr bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "no query nor spec is an error",
|
||||||
|
fields: fields{},
|
||||||
|
wantErr: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "both query and spec is an error",
|
||||||
|
fields: fields{
|
||||||
|
Query: "from()|>last()",
|
||||||
|
Spec: &flux.Spec{},
|
||||||
|
},
|
||||||
|
wantErr: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
|
||||||
|
name: "request with query is valid",
|
||||||
|
fields: fields{
|
||||||
|
Query: `from(bucket:"telegraf")|> range(start: -5000h)|> filter(fn: (r) => r._measurement == "mem" AND r._field == "used_percent")|> group(by:["host"])|> mean()`,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
|
||||||
|
name: "request with spec is valid",
|
||||||
|
fields: fields{
|
||||||
|
Spec: &flux.Spec{},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
p := &postPlanRequest{
|
||||||
|
Query: tt.fields.Query,
|
||||||
|
Spec: tt.fields.Spec,
|
||||||
|
}
|
||||||
|
if err := p.Valid(); (err != nil) != tt.wantErr {
|
||||||
|
t.Errorf("postPlanRequest.Valid() error = %v, wantErr %v", err, tt.wantErr)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
131
http/swagger.yml
131
http/swagger.yml
|
@ -1466,12 +1466,104 @@ paths:
|
||||||
$ref: "#/components/schemas/Error"
|
$ref: "#/components/schemas/Error"
|
||||||
/query/ast:
|
/query/ast:
|
||||||
post:
|
post:
|
||||||
description: not currently documented # TODO(desa): document ast endpoint
|
description: analyzes flux query and generates a query specification.
|
||||||
tags:
|
tags:
|
||||||
- Query
|
- Query
|
||||||
|
parameters:
|
||||||
|
- in: header
|
||||||
|
name: Content-Type
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
enum:
|
||||||
|
- application/json
|
||||||
|
- in: header
|
||||||
|
name: Authorization
|
||||||
|
description: the authorization header should be in the format of `Token <key>`
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
requestBody:
|
||||||
|
description: analyzed flux query to generate abstract syntax tree.
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/LanguageRequest"
|
||||||
responses:
|
responses:
|
||||||
'200':
|
'200':
|
||||||
description: Suggestions for next functions in call chain
|
description: Abstract syntax tree of flux query.
|
||||||
|
content:
|
||||||
|
application/json: #TODO(goller): document the AST JSON schema
|
||||||
|
default:
|
||||||
|
description: Any response other than 200 is an internal server error
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/Error"
|
||||||
|
/query/spec:
|
||||||
|
post:
|
||||||
|
description: analyzes flux query and generates a query specification.
|
||||||
|
tags:
|
||||||
|
- Query
|
||||||
|
parameters:
|
||||||
|
- in: header
|
||||||
|
name: Content-Type
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
enum:
|
||||||
|
- application/json
|
||||||
|
- in: header
|
||||||
|
name: Authorization
|
||||||
|
description: the authorization header should be in the format of `Token <key>`
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
requestBody:
|
||||||
|
description: analyzed flux query to generate specification.
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/LanguageRequest"
|
||||||
|
responses:
|
||||||
|
'200':
|
||||||
|
description: Specification of flux query.
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/QuerySpecification"
|
||||||
|
default:
|
||||||
|
description: Any response other than 200 is an internal server error
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/Error"
|
||||||
|
/query/plan:
|
||||||
|
post:
|
||||||
|
description: analyzes flux query or specification and generates a query plan.
|
||||||
|
tags:
|
||||||
|
- Query
|
||||||
|
parameters:
|
||||||
|
- in: header
|
||||||
|
name: Content-Type
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
enum:
|
||||||
|
- application/json
|
||||||
|
- in: header
|
||||||
|
name: Authorization
|
||||||
|
description: the authorization header should be in the format of `Token <key>`
|
||||||
|
schema:
|
||||||
|
type: string
|
||||||
|
requestBody:
|
||||||
|
description: flux query or specification to generate plan.
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/PlanRequest"
|
||||||
|
responses:
|
||||||
|
'200':
|
||||||
|
description: Logical plan of flux query.
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: "#/components/schemas/PlanResponse"
|
||||||
default:
|
default:
|
||||||
description: Any response other than 200 is an internal server error
|
description: Any response other than 200 is an internal server error
|
||||||
content:
|
content:
|
||||||
|
@ -3223,6 +3315,41 @@ paths:
|
||||||
$ref: "#/components/schemas/Error"
|
$ref: "#/components/schemas/Error"
|
||||||
components:
|
components:
|
||||||
schemas:
|
schemas:
|
||||||
|
LanguageRequest:
|
||||||
|
description: flux query to be analyzed.
|
||||||
|
type: object
|
||||||
|
required:
|
||||||
|
- query
|
||||||
|
properties:
|
||||||
|
query:
|
||||||
|
description: flux query script to be analyzed
|
||||||
|
type: string
|
||||||
|
PlanRequest:
|
||||||
|
description: flux query or specification to be planned. The spec and query fields are mutually exclusive.
|
||||||
|
type: object
|
||||||
|
properties:
|
||||||
|
query:
|
||||||
|
description: flux query script to be analyzed
|
||||||
|
type: string
|
||||||
|
spec:
|
||||||
|
$ref: "#/components/schemas/QuerySpecification"
|
||||||
|
PlanResponse:
|
||||||
|
description: flux query or specification to be planned.
|
||||||
|
type: object
|
||||||
|
required:
|
||||||
|
- query
|
||||||
|
properties:
|
||||||
|
spec:
|
||||||
|
$ref: "#/components/schemas/QuerySpecification"
|
||||||
|
readOnly: true
|
||||||
|
logical:
|
||||||
|
description: logical plan of the query.
|
||||||
|
readOnly: true
|
||||||
|
type: object #TODO(goller): document the logical plan format
|
||||||
|
physical:
|
||||||
|
description: physical plan of the query.
|
||||||
|
readOnly: true
|
||||||
|
type: object #TODO(goller): document the physical plan format
|
||||||
Query:
|
Query:
|
||||||
description: query influx with specified return formatting. The spec and query fields are mutually exclusive.
|
description: query influx with specified return formatting. The spec and query fields are mutually exclusive.
|
||||||
type: object
|
type: object
|
||||||
|
|
|
@ -398,7 +398,6 @@ func TestBatcher_Write(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestBatcher_WriteTimeout(t *testing.T) {
|
func TestBatcher_WriteTimeout(t *testing.T) {
|
||||||
|
|
||||||
// mocking the write service here to either return an error
|
// mocking the write service here to either return an error
|
||||||
// or get back all the bytes from the reader.
|
// or get back all the bytes from the reader.
|
||||||
var got string
|
var got string
|
||||||
|
|
Loading…
Reference in New Issue