2018-09-04 21:08:00 +00:00
|
|
|
package http
|
|
|
|
|
|
|
|
import (
|
|
|
|
"bytes"
|
|
|
|
"context"
|
|
|
|
"encoding/json"
|
|
|
|
"fmt"
|
|
|
|
"io"
|
2019-02-21 04:42:29 +00:00
|
|
|
"io/ioutil"
|
2018-09-04 21:08:00 +00:00
|
|
|
"net/http"
|
2018-09-20 17:29:22 +00:00
|
|
|
"net/url"
|
2020-02-12 17:07:29 +00:00
|
|
|
"sort"
|
2018-10-04 18:21:53 +00:00
|
|
|
"time"
|
2018-09-04 21:08:00 +00:00
|
|
|
|
2019-07-29 19:47:55 +00:00
|
|
|
"github.com/NYTimes/gziphandler"
|
2018-09-13 18:21:19 +00:00
|
|
|
"github.com/influxdata/flux"
|
2018-10-04 18:21:53 +00:00
|
|
|
"github.com/influxdata/flux/ast"
|
2018-09-13 18:21:19 +00:00
|
|
|
"github.com/influxdata/flux/csv"
|
2019-03-07 15:32:13 +00:00
|
|
|
"github.com/influxdata/flux/iocounter"
|
2021-08-30 20:42:05 +00:00
|
|
|
"github.com/influxdata/flux/lang"
|
2019-11-25 14:22:19 +00:00
|
|
|
"github.com/influxdata/httprouter"
|
2020-04-03 17:39:20 +00:00
|
|
|
"github.com/influxdata/influxdb/v2"
|
|
|
|
pcontext "github.com/influxdata/influxdb/v2/context"
|
|
|
|
"github.com/influxdata/influxdb/v2/http/metric"
|
|
|
|
"github.com/influxdata/influxdb/v2/kit/check"
|
2020-06-17 13:55:29 +00:00
|
|
|
"github.com/influxdata/influxdb/v2/kit/feature"
|
2021-04-07 18:42:55 +00:00
|
|
|
"github.com/influxdata/influxdb/v2/kit/platform"
|
|
|
|
errors2 "github.com/influxdata/influxdb/v2/kit/platform/errors"
|
2020-04-03 17:39:20 +00:00
|
|
|
"github.com/influxdata/influxdb/v2/kit/tracing"
|
|
|
|
kithttp "github.com/influxdata/influxdb/v2/kit/transport/http"
|
|
|
|
"github.com/influxdata/influxdb/v2/logger"
|
|
|
|
"github.com/influxdata/influxdb/v2/query"
|
2021-04-07 18:42:55 +00:00
|
|
|
"github.com/influxdata/influxdb/v2/query/fluxlang"
|
2020-04-03 17:39:20 +00:00
|
|
|
"github.com/influxdata/influxdb/v2/query/influxql"
|
2019-03-26 03:05:44 +00:00
|
|
|
"github.com/pkg/errors"
|
2019-04-10 21:08:22 +00:00
|
|
|
prom "github.com/prometheus/client_golang/prometheus"
|
2019-03-26 03:05:44 +00:00
|
|
|
"go.uber.org/zap"
|
2018-09-04 21:08:00 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
const (
|
2019-12-09 23:54:16 +00:00
|
|
|
prefixQuery = "/api/v2/query"
|
2019-12-06 18:24:43 +00:00
|
|
|
traceIDHeader = "Trace-Id"
|
2018-09-04 21:08:00 +00:00
|
|
|
)
|
|
|
|
|
2019-01-16 16:12:57 +00:00
|
|
|
// FluxBackend is all services and associated parameters required to construct
|
|
|
|
// the FluxHandler.
|
|
|
|
type FluxBackend struct {
|
2021-03-30 18:10:02 +00:00
|
|
|
errors2.HTTPErrorHandler
|
2019-12-04 23:10:23 +00:00
|
|
|
log *zap.Logger
|
2021-08-30 20:42:05 +00:00
|
|
|
FluxLogEnabled bool
|
2019-04-10 21:08:22 +00:00
|
|
|
QueryEventRecorder metric.EventRecorder
|
2019-01-16 16:12:57 +00:00
|
|
|
|
2020-05-08 13:03:21 +00:00
|
|
|
AlgoWProxy FeatureProxyHandler
|
2019-08-22 02:08:51 +00:00
|
|
|
OrganizationService influxdb.OrganizationService
|
2019-01-16 16:12:57 +00:00
|
|
|
ProxyQueryService query.ProxyQueryService
|
2021-04-07 18:42:55 +00:00
|
|
|
FluxLanguageService fluxlang.FluxLanguageService
|
2020-06-17 13:55:29 +00:00
|
|
|
Flagger feature.Flagger
|
2019-01-16 16:12:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// NewFluxBackend returns a new instance of FluxBackend.
|
2019-12-04 23:10:23 +00:00
|
|
|
func NewFluxBackend(log *zap.Logger, b *APIBackend) *FluxBackend {
|
2019-01-16 16:12:57 +00:00
|
|
|
return &FluxBackend{
|
2019-06-27 01:33:20 +00:00
|
|
|
HTTPErrorHandler: b.HTTPErrorHandler,
|
2019-12-04 23:10:23 +00:00
|
|
|
log: log,
|
2021-08-30 20:42:05 +00:00
|
|
|
FluxLogEnabled: b.FluxLogEnabled,
|
2019-04-10 21:08:22 +00:00
|
|
|
QueryEventRecorder: b.QueryEventRecorder,
|
2020-05-08 13:03:21 +00:00
|
|
|
AlgoWProxy: b.AlgoWProxy,
|
2020-02-12 17:07:29 +00:00
|
|
|
ProxyQueryService: routingQueryService{
|
|
|
|
InfluxQLService: b.InfluxQLService,
|
|
|
|
DefaultService: b.FluxService,
|
|
|
|
},
|
2019-01-16 16:12:57 +00:00
|
|
|
OrganizationService: b.OrganizationService,
|
2020-03-05 16:32:17 +00:00
|
|
|
FluxLanguageService: b.FluxLanguageService,
|
2020-06-17 13:55:29 +00:00
|
|
|
Flagger: b.Flagger,
|
2019-01-16 16:12:57 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-05-09 17:41:14 +00:00
|
|
|
// HTTPDialect is an encoding dialect that can write metadata to HTTP headers
|
|
|
|
type HTTPDialect interface {
|
|
|
|
SetHeaders(w http.ResponseWriter)
|
|
|
|
}
|
|
|
|
|
2018-09-04 21:08:00 +00:00
|
|
|
// FluxHandler implements handling flux queries.
|
|
|
|
type FluxHandler struct {
|
|
|
|
*httprouter.Router
|
2021-03-30 18:10:02 +00:00
|
|
|
errors2.HTTPErrorHandler
|
2021-08-30 20:42:05 +00:00
|
|
|
log *zap.Logger
|
|
|
|
FluxLogEnabled bool
|
2018-09-04 21:08:00 +00:00
|
|
|
|
2018-11-20 18:56:58 +00:00
|
|
|
Now func() time.Time
|
2019-08-22 02:08:51 +00:00
|
|
|
OrganizationService influxdb.OrganizationService
|
2018-11-20 18:56:58 +00:00
|
|
|
ProxyQueryService query.ProxyQueryService
|
2021-04-07 18:42:55 +00:00
|
|
|
FluxLanguageService fluxlang.FluxLanguageService
|
2019-04-10 21:08:22 +00:00
|
|
|
|
|
|
|
EventRecorder metric.EventRecorder
|
2020-06-17 13:55:29 +00:00
|
|
|
|
|
|
|
Flagger feature.Flagger
|
2018-09-04 21:08:00 +00:00
|
|
|
}
|
|
|
|
|
2019-12-09 23:54:16 +00:00
|
|
|
// Prefix provides the route prefix.
|
|
|
|
func (*FluxHandler) Prefix() string {
|
|
|
|
return prefixQuery
|
|
|
|
}
|
|
|
|
|
2018-09-26 08:49:19 +00:00
|
|
|
// NewFluxHandler returns a new handler at /api/v2/query for flux queries.
|
2019-12-04 23:10:23 +00:00
|
|
|
func NewFluxHandler(log *zap.Logger, b *FluxBackend) *FluxHandler {
|
2018-09-04 21:08:00 +00:00
|
|
|
h := &FluxHandler{
|
2019-06-27 01:33:20 +00:00
|
|
|
Router: NewRouter(b.HTTPErrorHandler),
|
|
|
|
Now: time.Now,
|
|
|
|
HTTPErrorHandler: b.HTTPErrorHandler,
|
2019-12-04 23:10:23 +00:00
|
|
|
log: log,
|
2021-08-30 20:42:05 +00:00
|
|
|
FluxLogEnabled: b.FluxLogEnabled,
|
2019-01-16 16:12:57 +00:00
|
|
|
|
|
|
|
ProxyQueryService: b.ProxyQueryService,
|
|
|
|
OrganizationService: b.OrganizationService,
|
2019-04-10 21:08:22 +00:00
|
|
|
EventRecorder: b.QueryEventRecorder,
|
2020-03-09 18:30:43 +00:00
|
|
|
FluxLanguageService: b.FluxLanguageService,
|
2020-06-17 13:55:29 +00:00
|
|
|
Flagger: b.Flagger,
|
2018-09-04 21:08:00 +00:00
|
|
|
}
|
|
|
|
|
2019-07-29 19:47:55 +00:00
|
|
|
// query reponses can optionally be gzip encoded
|
|
|
|
qh := gziphandler.GzipHandler(http.HandlerFunc(h.handleQuery))
|
2020-05-08 13:03:21 +00:00
|
|
|
h.Handler("POST", prefixQuery, withFeatureProxy(b.AlgoWProxy, qh))
|
|
|
|
h.Handler("POST", "/api/v2/query/ast", withFeatureProxy(b.AlgoWProxy, http.HandlerFunc(h.postFluxAST)))
|
|
|
|
h.Handler("POST", "/api/v2/query/analyze", withFeatureProxy(b.AlgoWProxy, http.HandlerFunc(h.postQueryAnalyze)))
|
|
|
|
h.Handler("GET", "/api/v2/query/suggestions", withFeatureProxy(b.AlgoWProxy, http.HandlerFunc(h.getFluxSuggestions)))
|
|
|
|
h.Handler("GET", "/api/v2/query/suggestions/:name", withFeatureProxy(b.AlgoWProxy, http.HandlerFunc(h.getFluxSuggestion)))
|
2018-09-04 21:08:00 +00:00
|
|
|
return h
|
|
|
|
}
|
|
|
|
|
2019-01-02 19:36:16 +00:00
|
|
|
func (h *FluxHandler) handleQuery(w http.ResponseWriter, r *http.Request) {
|
2019-05-04 00:07:43 +00:00
|
|
|
const op = "http/handlePostQuery"
|
2019-03-05 00:38:10 +00:00
|
|
|
span, r := tracing.ExtractFromHTTPRequest(r, "FluxHandler")
|
|
|
|
defer span.Finish()
|
|
|
|
|
2018-09-04 21:08:00 +00:00
|
|
|
ctx := r.Context()
|
2019-12-09 23:54:16 +00:00
|
|
|
log := h.log.With(logger.TraceFields(ctx)...)
|
2019-12-18 15:30:38 +00:00
|
|
|
if id, _, found := tracing.InfoFromContext(ctx); found {
|
2019-12-06 18:24:43 +00:00
|
|
|
w.Header().Set(traceIDHeader, id)
|
|
|
|
}
|
2018-09-04 21:08:00 +00:00
|
|
|
|
2019-04-10 21:08:22 +00:00
|
|
|
// TODO(desa): I really don't like how we're recording the usage metrics here
|
|
|
|
// Ideally this will be moved when we solve https://github.com/influxdata/influxdb/issues/13403
|
2021-03-30 18:10:02 +00:00
|
|
|
var orgID platform.ID
|
2019-04-10 21:08:22 +00:00
|
|
|
var requestBytes int
|
2020-01-08 19:19:18 +00:00
|
|
|
sw := kithttp.NewStatusResponseWriter(w)
|
2019-04-10 21:08:22 +00:00
|
|
|
w = sw
|
|
|
|
defer func() {
|
|
|
|
h.EventRecorder.Record(ctx, metric.Event{
|
|
|
|
OrgID: orgID,
|
|
|
|
Endpoint: r.URL.Path, // This should be sufficient for the time being as it should only be single endpoint.
|
|
|
|
RequestBytes: requestBytes,
|
2020-01-08 19:19:18 +00:00
|
|
|
ResponseBytes: sw.ResponseBytes(),
|
|
|
|
Status: sw.Code(),
|
2019-04-10 21:08:22 +00:00
|
|
|
})
|
|
|
|
}()
|
|
|
|
|
2018-11-20 18:56:58 +00:00
|
|
|
a, err := pcontext.GetAuthorizer(ctx)
|
2018-09-04 21:08:00 +00:00
|
|
|
if err != nil {
|
2021-03-30 18:10:02 +00:00
|
|
|
err := &errors2.Error{
|
|
|
|
Code: errors2.EUnauthorized,
|
2019-05-04 00:07:43 +00:00
|
|
|
Msg: "authorization is invalid or missing in the query request",
|
|
|
|
Op: op,
|
|
|
|
Err: err,
|
|
|
|
}
|
2019-06-27 01:33:20 +00:00
|
|
|
h.HandleHTTPError(ctx, err, w)
|
2018-09-04 21:08:00 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2019-04-10 21:08:22 +00:00
|
|
|
req, n, err := decodeProxyQueryRequest(ctx, r, a, h.OrganizationService)
|
2019-08-22 02:08:51 +00:00
|
|
|
if err != nil && err != influxdb.ErrAuthorizerNotSupported {
|
2021-03-30 18:10:02 +00:00
|
|
|
err := &errors2.Error{
|
|
|
|
Code: errors2.EInvalid,
|
2019-05-04 00:07:43 +00:00
|
|
|
Msg: "failed to decode request body",
|
|
|
|
Op: op,
|
|
|
|
Err: err,
|
|
|
|
}
|
2019-06-27 01:33:20 +00:00
|
|
|
h.HandleHTTPError(ctx, err, w)
|
2018-09-04 21:08:00 +00:00
|
|
|
return
|
|
|
|
}
|
2019-12-18 03:15:49 +00:00
|
|
|
req.Request.Source = r.Header.Get("User-Agent")
|
2019-04-17 23:08:32 +00:00
|
|
|
orgID = req.Request.OrganizationID
|
2019-04-10 21:08:22 +00:00
|
|
|
requestBytes = n
|
2018-09-04 21:08:00 +00:00
|
|
|
|
2019-03-07 21:32:48 +00:00
|
|
|
// Transform the context into one with the request's authorization.
|
|
|
|
ctx = pcontext.SetAuthorizer(ctx, req.Request.Authorization)
|
2020-06-17 13:55:29 +00:00
|
|
|
if h.Flagger != nil {
|
|
|
|
ctx, _ = feature.Annotate(ctx, h.Flagger)
|
|
|
|
}
|
2019-03-07 21:32:48 +00:00
|
|
|
|
2018-09-04 21:08:00 +00:00
|
|
|
hd, ok := req.Dialect.(HTTPDialect)
|
|
|
|
if !ok {
|
2021-03-30 18:10:02 +00:00
|
|
|
err := &errors2.Error{
|
|
|
|
Code: errors2.EInvalid,
|
2019-05-04 00:07:43 +00:00
|
|
|
Msg: fmt.Sprintf("unsupported dialect over HTTP: %T", req.Dialect),
|
|
|
|
Op: op,
|
|
|
|
}
|
2019-06-27 01:33:20 +00:00
|
|
|
h.HandleHTTPError(ctx, err, w)
|
2018-09-04 21:08:00 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
hd.SetHeaders(w)
|
|
|
|
|
2019-03-07 15:32:13 +00:00
|
|
|
cw := iocounter.Writer{Writer: w}
|
2021-08-30 20:42:05 +00:00
|
|
|
stats, err := h.ProxyQueryService.Query(ctx, &cw, req)
|
|
|
|
if err != nil {
|
2019-03-07 15:32:13 +00:00
|
|
|
if cw.Count() == 0 {
|
2018-09-04 21:08:00 +00:00
|
|
|
// Only record the error headers IFF nothing has been written to w.
|
2019-07-18 16:43:15 +00:00
|
|
|
h.HandleHTTPError(ctx, err, w)
|
2018-09-04 21:08:00 +00:00
|
|
|
return
|
|
|
|
}
|
2019-12-05 19:35:40 +00:00
|
|
|
_ = tracing.LogError(span, err)
|
2019-12-03 17:49:25 +00:00
|
|
|
log.Info("Error writing response to client",
|
2018-09-04 21:08:00 +00:00
|
|
|
zap.String("handler", "flux"),
|
|
|
|
zap.Error(err),
|
|
|
|
)
|
|
|
|
}
|
2021-08-30 20:42:05 +00:00
|
|
|
|
|
|
|
// Detailed logging for flux queries if enabled
|
|
|
|
if h.FluxLogEnabled {
|
|
|
|
h.logFluxQuery(cw.Count(), stats, req.Request.Compiler, err)
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
func (h *FluxHandler) logFluxQuery(n int64, stats flux.Statistics, compiler flux.Compiler, err error) {
|
|
|
|
var q string
|
|
|
|
c, ok := compiler.(lang.FluxCompiler)
|
|
|
|
if !ok {
|
|
|
|
q = "unknown"
|
|
|
|
}
|
|
|
|
q = c.Query
|
|
|
|
|
|
|
|
h.log.Info("Executed Flux query",
|
|
|
|
zap.String("compiler_type", string(compiler.CompilerType())),
|
|
|
|
zap.Int64("response_size", n),
|
|
|
|
zap.String("query", q),
|
|
|
|
zap.Error(err),
|
|
|
|
zap.Duration("stat_total_duration", stats.TotalDuration),
|
|
|
|
zap.Duration("stat_compile_duration", stats.CompileDuration),
|
|
|
|
zap.Duration("stat_execute_duration", stats.ExecuteDuration),
|
|
|
|
zap.Int64("stat_max_allocated", stats.MaxAllocated),
|
|
|
|
zap.Int64("stat_total_allocated", stats.TotalAllocated),
|
|
|
|
)
|
2018-09-04 21:08:00 +00:00
|
|
|
}
|
|
|
|
|
2018-10-04 18:21:53 +00:00
|
|
|
type langRequest struct {
|
|
|
|
Query string `json:"query"`
|
|
|
|
}
|
|
|
|
|
|
|
|
type postFluxASTResponse struct {
|
2019-01-04 18:08:07 +00:00
|
|
|
AST *ast.Package `json:"ast"`
|
2018-10-04 18:21:53 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// postFluxAST returns a flux AST for provided flux string
|
|
|
|
func (h *FluxHandler) postFluxAST(w http.ResponseWriter, r *http.Request) {
|
2019-03-05 00:38:10 +00:00
|
|
|
span, r := tracing.ExtractFromHTTPRequest(r, "FluxHandler")
|
|
|
|
defer span.Finish()
|
|
|
|
|
2018-10-04 18:21:53 +00:00
|
|
|
var request langRequest
|
|
|
|
ctx := r.Context()
|
|
|
|
|
|
|
|
err := json.NewDecoder(r.Body).Decode(&request)
|
|
|
|
if err != nil {
|
2021-03-30 18:10:02 +00:00
|
|
|
h.HandleHTTPError(ctx, &errors2.Error{
|
|
|
|
Code: errors2.EInvalid,
|
2019-01-24 00:15:42 +00:00
|
|
|
Msg: "invalid json",
|
|
|
|
Err: err,
|
|
|
|
}, w)
|
2018-10-04 18:21:53 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2020-03-09 18:30:43 +00:00
|
|
|
pkg, err := query.Parse(h.FluxLanguageService, request.Query)
|
2020-03-05 16:32:17 +00:00
|
|
|
if err != nil {
|
2021-03-30 18:10:02 +00:00
|
|
|
h.HandleHTTPError(ctx, &errors2.Error{
|
|
|
|
Code: errors2.EInvalid,
|
2019-01-24 00:15:42 +00:00
|
|
|
Msg: "invalid AST",
|
|
|
|
Err: err,
|
|
|
|
}, w)
|
2018-10-04 18:21:53 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
res := postFluxASTResponse{
|
2019-01-04 18:08:07 +00:00
|
|
|
AST: pkg,
|
2018-10-04 18:21:53 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if err := encodeResponse(ctx, w, http.StatusOK, res); err != nil {
|
2019-12-04 23:10:23 +00:00
|
|
|
logEncodingError(h.log, r, err)
|
2018-10-04 18:21:53 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-12-05 18:13:26 +00:00
|
|
|
// postQueryAnalyze parses a query and returns any query errors.
|
|
|
|
func (h *FluxHandler) postQueryAnalyze(w http.ResponseWriter, r *http.Request) {
|
2019-03-05 00:38:10 +00:00
|
|
|
span, r := tracing.ExtractFromHTTPRequest(r, "FluxHandler")
|
|
|
|
defer span.Finish()
|
|
|
|
|
2018-12-05 18:13:26 +00:00
|
|
|
ctx := r.Context()
|
|
|
|
|
|
|
|
var req QueryRequest
|
|
|
|
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
|
2021-03-30 18:10:02 +00:00
|
|
|
h.HandleHTTPError(ctx, &errors2.Error{
|
|
|
|
Code: errors2.EInvalid,
|
2019-01-24 00:15:42 +00:00
|
|
|
Msg: "invalid json",
|
|
|
|
Err: err,
|
|
|
|
}, w)
|
2018-12-05 18:13:26 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2020-03-09 18:30:43 +00:00
|
|
|
a, err := req.Analyze(h.FluxLanguageService)
|
2018-12-05 18:13:26 +00:00
|
|
|
if err != nil {
|
2019-06-27 01:33:20 +00:00
|
|
|
h.HandleHTTPError(ctx, err, w)
|
2018-12-05 18:13:26 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
if err := encodeResponse(ctx, w, http.StatusOK, a); err != nil {
|
2019-12-04 23:10:23 +00:00
|
|
|
logEncodingError(h.log, r, err)
|
2018-12-05 18:13:26 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-10-04 18:21:53 +00:00
|
|
|
// fluxParams contain flux funciton parameters as defined by the semantic graph
|
|
|
|
type fluxParams map[string]string
|
|
|
|
|
|
|
|
// suggestionResponse provides the parameters available for a given Flux function
|
|
|
|
type suggestionResponse struct {
|
|
|
|
Name string `json:"name"`
|
|
|
|
Params fluxParams `json:"params"`
|
|
|
|
}
|
|
|
|
|
|
|
|
// suggestionsResponse provides a list of available Flux functions
|
|
|
|
type suggestionsResponse struct {
|
|
|
|
Functions []suggestionResponse `json:"funcs"`
|
|
|
|
}
|
|
|
|
|
|
|
|
// getFluxSuggestions returns a list of available Flux functions for the Flux Builder
|
|
|
|
func (h *FluxHandler) getFluxSuggestions(w http.ResponseWriter, r *http.Request) {
|
2019-03-05 00:38:10 +00:00
|
|
|
span, r := tracing.ExtractFromHTTPRequest(r, "FluxHandler")
|
|
|
|
defer span.Finish()
|
|
|
|
|
2018-10-04 18:21:53 +00:00
|
|
|
ctx := r.Context()
|
2020-03-09 18:30:43 +00:00
|
|
|
completer := h.FluxLanguageService.Completer()
|
2018-10-04 18:21:53 +00:00
|
|
|
names := completer.FunctionNames()
|
|
|
|
var functions []suggestionResponse
|
|
|
|
for _, name := range names {
|
|
|
|
suggestion, err := completer.FunctionSuggestion(name)
|
|
|
|
if err != nil {
|
2019-06-27 01:33:20 +00:00
|
|
|
h.HandleHTTPError(ctx, err, w)
|
2018-10-04 18:21:53 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
filteredParams := make(fluxParams)
|
|
|
|
for key, value := range suggestion.Params {
|
|
|
|
if key == "table" {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
filteredParams[key] = value
|
|
|
|
}
|
|
|
|
|
|
|
|
functions = append(functions, suggestionResponse{
|
|
|
|
Name: name,
|
|
|
|
Params: filteredParams,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
res := suggestionsResponse{Functions: functions}
|
|
|
|
|
|
|
|
if err := encodeResponse(ctx, w, http.StatusOK, res); err != nil {
|
2019-12-04 23:10:23 +00:00
|
|
|
logEncodingError(h.log, r, err)
|
2018-10-04 18:21:53 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// getFluxSuggestion returns the function parameters for the requested function
|
|
|
|
func (h *FluxHandler) getFluxSuggestion(w http.ResponseWriter, r *http.Request) {
|
2019-03-05 00:38:10 +00:00
|
|
|
span, r := tracing.ExtractFromHTTPRequest(r, "FluxHandler")
|
|
|
|
defer span.Finish()
|
|
|
|
|
2018-10-04 18:21:53 +00:00
|
|
|
ctx := r.Context()
|
|
|
|
name := httprouter.ParamsFromContext(ctx).ByName("name")
|
2020-03-09 18:30:43 +00:00
|
|
|
completer := h.FluxLanguageService.Completer()
|
2018-10-04 18:21:53 +00:00
|
|
|
|
|
|
|
suggestion, err := completer.FunctionSuggestion(name)
|
|
|
|
if err != nil {
|
2019-06-27 01:33:20 +00:00
|
|
|
h.HandleHTTPError(ctx, err, w)
|
2018-10-04 18:21:53 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
res := suggestionResponse{Name: name, Params: suggestion.Params}
|
|
|
|
if err := encodeResponse(ctx, w, http.StatusOK, res); err != nil {
|
2019-12-04 23:10:23 +00:00
|
|
|
logEncodingError(h.log, r, err)
|
2018-10-04 18:21:53 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-09-04 21:08:00 +00:00
|
|
|
// PrometheusCollectors satisifies the prom.PrometheusCollector interface.
|
2019-04-10 21:08:22 +00:00
|
|
|
func (h *FluxHandler) PrometheusCollectors() []prom.Collector {
|
2018-09-04 21:08:00 +00:00
|
|
|
// TODO: gather and return relevant metrics.
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2018-09-13 15:39:08 +00:00
|
|
|
var _ query.ProxyQueryService = (*FluxService)(nil)
|
|
|
|
|
2018-09-04 21:08:00 +00:00
|
|
|
// FluxService connects to Influx via HTTP using tokens to run queries.
|
|
|
|
type FluxService struct {
|
2018-10-04 19:11:45 +00:00
|
|
|
Addr string
|
2018-09-04 21:08:00 +00:00
|
|
|
Token string
|
2019-12-18 03:15:49 +00:00
|
|
|
Name string
|
2018-09-04 21:08:00 +00:00
|
|
|
InsecureSkipVerify bool
|
|
|
|
}
|
|
|
|
|
|
|
|
// Query runs a flux query against a influx server and sends the results to the io.Writer.
|
2018-09-13 15:39:08 +00:00
|
|
|
// Will use the token from the context over the token within the service struct.
|
2019-03-07 15:32:13 +00:00
|
|
|
func (s *FluxService) Query(ctx context.Context, w io.Writer, r *query.ProxyRequest) (flux.Statistics, error) {
|
2019-03-06 00:18:04 +00:00
|
|
|
span, ctx := tracing.StartSpanFromContext(ctx)
|
2019-03-05 00:38:10 +00:00
|
|
|
defer span.Finish()
|
2019-12-09 23:54:16 +00:00
|
|
|
u, err := NewURL(s.Addr, prefixQuery)
|
2018-09-04 21:08:00 +00:00
|
|
|
if err != nil {
|
2019-03-05 00:38:10 +00:00
|
|
|
return flux.Statistics{}, tracing.LogError(span, err)
|
2018-09-04 21:08:00 +00:00
|
|
|
}
|
2019-06-27 21:33:22 +00:00
|
|
|
params := url.Values{}
|
|
|
|
params.Set(OrgID, r.Request.OrganizationID.String())
|
|
|
|
u.RawQuery = params.Encode()
|
2018-09-13 15:39:08 +00:00
|
|
|
|
2018-09-12 21:10:09 +00:00
|
|
|
qreq, err := QueryRequestFromProxyRequest(r)
|
|
|
|
if err != nil {
|
2019-03-05 00:38:10 +00:00
|
|
|
return flux.Statistics{}, tracing.LogError(span, err)
|
2018-09-12 21:10:09 +00:00
|
|
|
}
|
2018-09-04 21:08:00 +00:00
|
|
|
var body bytes.Buffer
|
2018-09-12 21:10:09 +00:00
|
|
|
if err := json.NewEncoder(&body).Encode(qreq); err != nil {
|
2019-03-05 00:38:10 +00:00
|
|
|
return flux.Statistics{}, tracing.LogError(span, err)
|
2018-09-04 21:08:00 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
hreq, err := http.NewRequest("POST", u.String(), &body)
|
|
|
|
if err != nil {
|
2019-03-05 00:38:10 +00:00
|
|
|
return flux.Statistics{}, tracing.LogError(span, err)
|
2018-09-04 21:08:00 +00:00
|
|
|
}
|
2018-09-13 15:39:08 +00:00
|
|
|
|
2018-10-19 16:39:54 +00:00
|
|
|
SetToken(s.Token, hreq)
|
2018-09-13 15:39:08 +00:00
|
|
|
|
2018-09-04 21:08:00 +00:00
|
|
|
hreq.Header.Set("Content-Type", "application/json")
|
2018-09-13 15:39:08 +00:00
|
|
|
hreq.Header.Set("Accept", "text/csv")
|
2019-12-18 03:15:49 +00:00
|
|
|
if r.Request.Source != "" {
|
|
|
|
hreq.Header.Add("User-Agent", r.Request.Source)
|
|
|
|
} else if s.Name != "" {
|
|
|
|
hreq.Header.Add("User-Agent", s.Name)
|
|
|
|
}
|
2018-09-04 21:08:00 +00:00
|
|
|
|
2020-01-16 13:38:43 +00:00
|
|
|
// Now that the request is all set, we can apply header mutators.
|
|
|
|
if err := r.Request.ApplyOptions(hreq.Header); err != nil {
|
|
|
|
return flux.Statistics{}, tracing.LogError(span, err)
|
|
|
|
}
|
|
|
|
|
|
|
|
hreq = hreq.WithContext(ctx)
|
2019-05-09 17:41:14 +00:00
|
|
|
hc := NewClient(u.Scheme, s.InsecureSkipVerify)
|
2018-09-04 21:08:00 +00:00
|
|
|
resp, err := hc.Do(hreq)
|
|
|
|
if err != nil {
|
2019-03-05 00:38:10 +00:00
|
|
|
return flux.Statistics{}, tracing.LogError(span, err)
|
2018-09-04 21:08:00 +00:00
|
|
|
}
|
|
|
|
defer resp.Body.Close()
|
2018-09-13 18:21:19 +00:00
|
|
|
|
2018-09-04 21:08:00 +00:00
|
|
|
if err := CheckError(resp); err != nil {
|
2019-03-05 00:38:10 +00:00
|
|
|
return flux.Statistics{}, tracing.LogError(span, err)
|
2019-03-07 15:32:13 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if _, err := io.Copy(w, resp.Body); err != nil {
|
2019-03-05 00:38:10 +00:00
|
|
|
return flux.Statistics{}, tracing.LogError(span, err)
|
2018-09-04 21:08:00 +00:00
|
|
|
}
|
2019-03-07 15:32:13 +00:00
|
|
|
return flux.Statistics{}, nil
|
2018-09-04 21:08:00 +00:00
|
|
|
}
|
2018-09-13 18:21:19 +00:00
|
|
|
|
2019-03-26 03:05:44 +00:00
|
|
|
func (s FluxService) Check(ctx context.Context) check.Response {
|
|
|
|
return QueryHealthCheck(s.Addr, s.InsecureSkipVerify)
|
|
|
|
}
|
|
|
|
|
2018-09-13 18:21:19 +00:00
|
|
|
var _ query.QueryService = (*FluxQueryService)(nil)
|
|
|
|
|
2018-09-26 08:49:19 +00:00
|
|
|
// FluxQueryService implements query.QueryService by making HTTP requests to the /api/v2/query API endpoint.
|
2018-09-13 18:21:19 +00:00
|
|
|
type FluxQueryService struct {
|
2018-10-04 19:11:45 +00:00
|
|
|
Addr string
|
2018-09-13 18:21:19 +00:00
|
|
|
Token string
|
2019-12-18 03:15:49 +00:00
|
|
|
Name string
|
2018-09-13 18:21:19 +00:00
|
|
|
InsecureSkipVerify bool
|
|
|
|
}
|
|
|
|
|
|
|
|
// Query runs a flux query against a influx server and decodes the result
|
2018-09-13 20:26:36 +00:00
|
|
|
func (s *FluxQueryService) Query(ctx context.Context, r *query.Request) (flux.ResultIterator, error) {
|
2019-03-06 00:18:04 +00:00
|
|
|
span, ctx := tracing.StartSpanFromContext(ctx)
|
2019-03-05 00:38:10 +00:00
|
|
|
defer span.Finish()
|
|
|
|
|
2019-12-09 23:54:16 +00:00
|
|
|
u, err := NewURL(s.Addr, prefixQuery)
|
2018-09-13 18:21:19 +00:00
|
|
|
if err != nil {
|
2019-03-05 00:38:10 +00:00
|
|
|
return nil, tracing.LogError(span, err)
|
2018-09-13 18:21:19 +00:00
|
|
|
}
|
2018-09-20 17:29:22 +00:00
|
|
|
params := url.Values{}
|
|
|
|
params.Set(OrgID, r.OrganizationID.String())
|
|
|
|
u.RawQuery = params.Encode()
|
2018-09-13 18:21:19 +00:00
|
|
|
|
|
|
|
preq := &query.ProxyRequest{
|
2018-09-13 20:26:36 +00:00
|
|
|
Request: *r,
|
2018-09-13 18:21:19 +00:00
|
|
|
Dialect: csv.DefaultDialect(),
|
|
|
|
}
|
2018-09-12 21:10:09 +00:00
|
|
|
qreq, err := QueryRequestFromProxyRequest(preq)
|
|
|
|
if err != nil {
|
2019-03-05 00:38:10 +00:00
|
|
|
return nil, tracing.LogError(span, err)
|
2018-09-12 21:10:09 +00:00
|
|
|
}
|
2018-09-13 18:21:19 +00:00
|
|
|
var body bytes.Buffer
|
2018-09-12 21:10:09 +00:00
|
|
|
if err := json.NewEncoder(&body).Encode(qreq); err != nil {
|
2019-03-05 00:38:10 +00:00
|
|
|
return nil, tracing.LogError(span, err)
|
2018-09-13 18:21:19 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
hreq, err := http.NewRequest("POST", u.String(), &body)
|
|
|
|
if err != nil {
|
2019-03-05 00:38:10 +00:00
|
|
|
return nil, tracing.LogError(span, err)
|
2018-09-13 18:21:19 +00:00
|
|
|
}
|
|
|
|
|
2018-10-19 16:39:54 +00:00
|
|
|
SetToken(s.Token, hreq)
|
2018-09-13 18:21:19 +00:00
|
|
|
|
|
|
|
hreq.Header.Set("Content-Type", "application/json")
|
|
|
|
hreq.Header.Set("Accept", "text/csv")
|
2019-12-18 03:15:49 +00:00
|
|
|
if r.Source != "" {
|
|
|
|
hreq.Header.Add("User-Agent", r.Source)
|
|
|
|
} else if s.Name != "" {
|
|
|
|
hreq.Header.Add("User-Agent", s.Name)
|
|
|
|
}
|
2018-09-13 18:21:19 +00:00
|
|
|
hreq = hreq.WithContext(ctx)
|
|
|
|
|
2020-01-16 13:38:43 +00:00
|
|
|
// Now that the request is all set, we can apply header mutators.
|
|
|
|
if err := r.ApplyOptions(hreq.Header); err != nil {
|
|
|
|
return nil, tracing.LogError(span, err)
|
|
|
|
}
|
|
|
|
|
2019-05-09 17:41:14 +00:00
|
|
|
hc := NewClient(u.Scheme, s.InsecureSkipVerify)
|
2018-09-13 18:21:19 +00:00
|
|
|
resp, err := hc.Do(hreq)
|
|
|
|
if err != nil {
|
2019-03-05 00:38:10 +00:00
|
|
|
return nil, tracing.LogError(span, err)
|
2018-09-13 18:21:19 +00:00
|
|
|
}
|
2019-01-18 22:32:53 +00:00
|
|
|
// Can't defer resp.Body.Close here because the CSV decoder depends on reading from resp.Body after this function returns.
|
2018-09-13 18:21:19 +00:00
|
|
|
|
2019-01-24 01:02:37 +00:00
|
|
|
if err := CheckError(resp); err != nil {
|
2019-03-05 00:38:10 +00:00
|
|
|
return nil, tracing.LogError(span, err)
|
2018-09-13 18:21:19 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
decoder := csv.NewMultiResultDecoder(csv.ResultDecoderConfig{})
|
2019-03-05 00:38:10 +00:00
|
|
|
itr, err := decoder.Decode(resp.Body)
|
|
|
|
if err != nil {
|
|
|
|
return nil, tracing.LogError(span, err)
|
|
|
|
}
|
|
|
|
|
|
|
|
return itr, nil
|
2018-09-13 18:21:19 +00:00
|
|
|
}
|
2019-02-21 04:42:29 +00:00
|
|
|
|
2019-03-26 03:05:44 +00:00
|
|
|
func (s FluxQueryService) Check(ctx context.Context) check.Response {
|
|
|
|
return QueryHealthCheck(s.Addr, s.InsecureSkipVerify)
|
|
|
|
}
|
|
|
|
|
2019-12-30 17:06:51 +00:00
|
|
|
// GetQueryResponse runs a flux query with common parameters and returns the response from the query service.
|
2021-06-17 13:18:55 +00:00
|
|
|
func GetQueryResponse(qr *QueryRequest, addr *url.URL, org, token string, headers ...string) (*http.Response, error) {
|
2019-12-30 17:06:51 +00:00
|
|
|
if len(headers)%2 != 0 {
|
|
|
|
return nil, fmt.Errorf("headers must be key value pairs")
|
|
|
|
}
|
2021-06-17 13:18:55 +00:00
|
|
|
u := *addr
|
|
|
|
u.Path = prefixQuery
|
2019-02-21 04:42:29 +00:00
|
|
|
params := url.Values{}
|
2019-08-06 04:20:26 +00:00
|
|
|
params.Set(Org, org)
|
2019-02-21 04:42:29 +00:00
|
|
|
u.RawQuery = params.Encode()
|
|
|
|
|
|
|
|
var body bytes.Buffer
|
|
|
|
if err := json.NewEncoder(&body).Encode(qr); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
req, err := http.NewRequest("POST", u.String(), &body)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
SetToken(token, req)
|
|
|
|
|
2019-12-30 17:06:51 +00:00
|
|
|
// Default headers.
|
2019-02-21 04:42:29 +00:00
|
|
|
req.Header.Set("Content-Type", "application/json")
|
|
|
|
req.Header.Set("Accept", "text/csv")
|
2019-12-30 17:06:51 +00:00
|
|
|
// Apply custom headers.
|
|
|
|
for i := 0; i < len(headers); i += 2 {
|
|
|
|
req.Header.Set(headers[i], headers[i+1])
|
|
|
|
}
|
2019-02-21 04:42:29 +00:00
|
|
|
|
|
|
|
insecureSkipVerify := false
|
2019-05-09 17:41:14 +00:00
|
|
|
hc := NewClient(u.Scheme, insecureSkipVerify)
|
2019-12-30 17:06:51 +00:00
|
|
|
return hc.Do(req)
|
|
|
|
}
|
2019-02-21 04:42:29 +00:00
|
|
|
|
2019-12-30 17:06:51 +00:00
|
|
|
// GetQueryResponseBody reads the body of a response from some query service.
|
|
|
|
// It also checks for errors in the response.
|
|
|
|
func GetQueryResponseBody(res *http.Response) ([]byte, error) {
|
2019-02-21 04:42:29 +00:00
|
|
|
if err := CheckError(res); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
defer res.Body.Close()
|
|
|
|
return ioutil.ReadAll(res.Body)
|
|
|
|
}
|
2019-03-26 03:05:44 +00:00
|
|
|
|
2019-12-30 17:06:51 +00:00
|
|
|
// SimpleQuery runs a flux query with common parameters and returns CSV results.
|
2021-06-17 13:18:55 +00:00
|
|
|
func SimpleQuery(addr *url.URL, flux, org, token string, headers ...string) ([]byte, error) {
|
2020-01-15 15:03:40 +00:00
|
|
|
header := true
|
|
|
|
qr := &QueryRequest{
|
|
|
|
Type: "flux",
|
|
|
|
Query: flux,
|
|
|
|
Dialect: QueryDialect{
|
|
|
|
Header: &header,
|
|
|
|
Delimiter: ",",
|
|
|
|
CommentPrefix: "#",
|
|
|
|
DateTimeFormat: "RFC3339",
|
|
|
|
},
|
|
|
|
}
|
|
|
|
res, err := GetQueryResponse(qr, addr, org, token, headers...)
|
2019-12-30 17:06:51 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return GetQueryResponseBody(res)
|
|
|
|
}
|
|
|
|
|
2019-03-26 03:05:44 +00:00
|
|
|
func QueryHealthCheck(url string, insecureSkipVerify bool) check.Response {
|
2019-05-09 17:41:14 +00:00
|
|
|
u, err := NewURL(url, "/health")
|
2019-03-26 03:05:44 +00:00
|
|
|
if err != nil {
|
|
|
|
return check.Response{
|
|
|
|
Name: "query health",
|
|
|
|
Status: check.StatusFail,
|
|
|
|
Message: errors.Wrap(err, "could not form URL").Error(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-05-09 17:41:14 +00:00
|
|
|
hc := NewClient(u.Scheme, insecureSkipVerify)
|
2019-03-26 03:05:44 +00:00
|
|
|
resp, err := hc.Get(u.String())
|
|
|
|
if err != nil {
|
|
|
|
return check.Response{
|
|
|
|
Name: "query health",
|
|
|
|
Status: check.StatusFail,
|
|
|
|
Message: errors.Wrap(err, "error getting response").Error(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
defer resp.Body.Close()
|
|
|
|
|
|
|
|
if resp.StatusCode/100 != 2 {
|
|
|
|
return check.Response{
|
|
|
|
Name: "query health",
|
|
|
|
Status: check.StatusFail,
|
|
|
|
Message: fmt.Sprintf("http error %v", resp.StatusCode),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
var healthResponse check.Response
|
|
|
|
if err = json.NewDecoder(resp.Body).Decode(&healthResponse); err != nil {
|
|
|
|
return check.Response{
|
|
|
|
Name: "query health",
|
|
|
|
Status: check.StatusFail,
|
|
|
|
Message: errors.Wrap(err, "error decoding JSON response").Error(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return healthResponse
|
|
|
|
}
|
2020-02-12 17:07:29 +00:00
|
|
|
|
|
|
|
// routingQueryService routes queries to specific query services based on their compiler type.
|
|
|
|
type routingQueryService struct {
|
|
|
|
// InfluxQLService handles queries with compiler type of "influxql"
|
|
|
|
InfluxQLService query.ProxyQueryService
|
|
|
|
// DefaultService handles all other queries
|
|
|
|
DefaultService query.ProxyQueryService
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s routingQueryService) Check(ctx context.Context) check.Response {
|
|
|
|
// Produce combined check response
|
|
|
|
response := check.Response{
|
|
|
|
Name: "internal-routingQueryService",
|
|
|
|
Status: check.StatusPass,
|
|
|
|
}
|
|
|
|
def := s.DefaultService.Check(ctx)
|
|
|
|
influxql := s.InfluxQLService.Check(ctx)
|
|
|
|
if def.Status == check.StatusFail {
|
|
|
|
response.Status = def.Status
|
|
|
|
response.Message = def.Message
|
|
|
|
} else if influxql.Status == check.StatusFail {
|
|
|
|
response.Status = influxql.Status
|
|
|
|
response.Message = influxql.Message
|
|
|
|
}
|
|
|
|
response.Checks = []check.Response{def, influxql}
|
|
|
|
sort.Sort(response.Checks)
|
|
|
|
return response
|
|
|
|
}
|
|
|
|
|
|
|
|
func (s routingQueryService) Query(ctx context.Context, w io.Writer, req *query.ProxyRequest) (flux.Statistics, error) {
|
|
|
|
if req.Request.Compiler.CompilerType() == influxql.CompilerType {
|
|
|
|
return s.InfluxQLService.Query(ctx, w, req)
|
|
|
|
}
|
|
|
|
return s.DefaultService.Query(ctx, w, req)
|
|
|
|
}
|