2018-09-07 15:45:28 +00:00
|
|
|
package http
|
|
|
|
|
|
|
|
import (
|
|
|
|
"bytes"
|
|
|
|
"context"
|
|
|
|
"encoding/json"
|
2019-02-26 17:08:30 +00:00
|
|
|
"fmt"
|
2018-09-07 15:45:28 +00:00
|
|
|
"net/http"
|
|
|
|
"path"
|
|
|
|
|
2019-11-25 14:22:19 +00:00
|
|
|
"github.com/influxdata/httprouter"
|
2020-04-03 17:39:20 +00:00
|
|
|
"github.com/influxdata/influxdb/v2"
|
|
|
|
pctx "github.com/influxdata/influxdb/v2/context"
|
2021-09-13 19:12:35 +00:00
|
|
|
"github.com/influxdata/influxdb/v2/kit/platform"
|
|
|
|
"github.com/influxdata/influxdb/v2/kit/platform/errors"
|
2018-12-20 16:07:46 +00:00
|
|
|
"go.uber.org/zap"
|
2018-09-07 15:45:28 +00:00
|
|
|
)
|
|
|
|
|
refactor(kv): delete deprecated kv service code
This includes removal of a lot of kv.Service responsibilities. However,
it does not finish the re-wiring. It removes documents, telegrafs,
notification rules + endpoints, checks, orgs, users, buckets, passwords,
urms, labels and authorizations. There are some oustanding pieces that
are needed to get kv service compiling (dashboard service urm
dependency). Then all the call sites for kv service need updating and
the new implementations of telegraf and notification rules + endpoints
needed installing (along with any necessary migrations).
2020-10-20 13:25:36 +00:00
|
|
|
const (
|
|
|
|
prefixOrganizations = "/api/v2/orgs"
|
|
|
|
prefixBuckets = "/api/v2/buckets"
|
|
|
|
)
|
|
|
|
|
2019-01-16 12:44:17 +00:00
|
|
|
// ScraperBackend is all services and associated parameters required to construct
|
|
|
|
// the ScraperHandler.
|
|
|
|
type ScraperBackend struct {
|
2021-03-30 18:10:02 +00:00
|
|
|
errors.HTTPErrorHandler
|
2019-12-04 23:10:23 +00:00
|
|
|
log *zap.Logger
|
2019-01-16 12:44:17 +00:00
|
|
|
|
|
|
|
ScraperStorageService influxdb.ScraperTargetStoreService
|
|
|
|
BucketService influxdb.BucketService
|
|
|
|
OrganizationService influxdb.OrganizationService
|
|
|
|
UserService influxdb.UserService
|
|
|
|
UserResourceMappingService influxdb.UserResourceMappingService
|
|
|
|
LabelService influxdb.LabelService
|
|
|
|
}
|
|
|
|
|
|
|
|
// NewScraperBackend returns a new instance of ScraperBackend.
|
2019-12-04 23:10:23 +00:00
|
|
|
func NewScraperBackend(log *zap.Logger, b *APIBackend) *ScraperBackend {
|
2019-01-16 12:44:17 +00:00
|
|
|
return &ScraperBackend{
|
2019-06-27 01:33:20 +00:00
|
|
|
HTTPErrorHandler: b.HTTPErrorHandler,
|
2019-12-04 23:10:23 +00:00
|
|
|
log: log,
|
2019-01-16 12:44:17 +00:00
|
|
|
|
2019-02-08 07:38:31 +00:00
|
|
|
ScraperStorageService: b.ScraperTargetStoreService,
|
|
|
|
BucketService: b.BucketService,
|
|
|
|
OrganizationService: b.OrganizationService,
|
|
|
|
UserService: b.UserService,
|
|
|
|
UserResourceMappingService: b.UserResourceMappingService,
|
|
|
|
LabelService: b.LabelService,
|
2019-01-16 12:44:17 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-09-07 15:45:28 +00:00
|
|
|
// ScraperHandler represents an HTTP API handler for scraper targets.
|
|
|
|
type ScraperHandler struct {
|
|
|
|
*httprouter.Router
|
2021-03-30 18:10:02 +00:00
|
|
|
errors.HTTPErrorHandler
|
2019-12-04 23:10:23 +00:00
|
|
|
log *zap.Logger
|
2019-01-18 21:06:32 +00:00
|
|
|
UserService influxdb.UserService
|
|
|
|
UserResourceMappingService influxdb.UserResourceMappingService
|
|
|
|
LabelService influxdb.LabelService
|
|
|
|
ScraperStorageService influxdb.ScraperTargetStoreService
|
|
|
|
BucketService influxdb.BucketService
|
|
|
|
OrganizationService influxdb.OrganizationService
|
2018-09-07 15:45:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
const (
|
2019-12-09 23:54:16 +00:00
|
|
|
prefixTargets = "/api/v2/scrapers"
|
|
|
|
targetsIDMembersPath = prefixTargets + "/:id/members"
|
|
|
|
targetsIDMembersIDPath = prefixTargets + "/:id/members/:userID"
|
|
|
|
targetsIDOwnersPath = prefixTargets + "/:id/owners"
|
|
|
|
targetsIDOwnersIDPath = prefixTargets + "/:id/owners/:userID"
|
|
|
|
targetsIDLabelsPath = prefixTargets + "/:id/labels"
|
|
|
|
targetsIDLabelsIDPath = prefixTargets + "/:id/labels/:lid"
|
2018-09-07 15:45:28 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
// NewScraperHandler returns a new instance of ScraperHandler.
|
2019-12-04 23:10:23 +00:00
|
|
|
func NewScraperHandler(log *zap.Logger, b *ScraperBackend) *ScraperHandler {
|
2018-09-07 15:45:28 +00:00
|
|
|
h := &ScraperHandler{
|
2019-06-27 01:33:20 +00:00
|
|
|
Router: NewRouter(b.HTTPErrorHandler),
|
|
|
|
HTTPErrorHandler: b.HTTPErrorHandler,
|
2019-12-04 23:10:23 +00:00
|
|
|
log: log,
|
2019-01-16 12:44:17 +00:00
|
|
|
UserService: b.UserService,
|
|
|
|
UserResourceMappingService: b.UserResourceMappingService,
|
|
|
|
LabelService: b.LabelService,
|
|
|
|
ScraperStorageService: b.ScraperStorageService,
|
|
|
|
BucketService: b.BucketService,
|
|
|
|
OrganizationService: b.OrganizationService,
|
2018-09-07 15:45:28 +00:00
|
|
|
}
|
2019-12-09 23:54:16 +00:00
|
|
|
h.HandlerFunc("POST", prefixTargets, h.handlePostScraperTarget)
|
|
|
|
h.HandlerFunc("GET", prefixTargets, h.handleGetScraperTargets)
|
|
|
|
h.HandlerFunc("GET", prefixTargets+"/:id", h.handleGetScraperTarget)
|
|
|
|
h.HandlerFunc("PATCH", prefixTargets+"/:id", h.handlePatchScraperTarget)
|
|
|
|
h.HandlerFunc("DELETE", prefixTargets+"/:id", h.handleDeleteScraperTarget)
|
2019-01-18 21:06:32 +00:00
|
|
|
|
2019-02-05 19:27:49 +00:00
|
|
|
memberBackend := MemberBackend{
|
2019-06-27 01:33:20 +00:00
|
|
|
HTTPErrorHandler: b.HTTPErrorHandler,
|
2019-12-04 23:10:23 +00:00
|
|
|
log: b.log.With(zap.String("handler", "member")),
|
2019-02-05 19:27:49 +00:00
|
|
|
ResourceType: influxdb.ScraperResourceType,
|
|
|
|
UserType: influxdb.Member,
|
|
|
|
UserResourceMappingService: b.UserResourceMappingService,
|
|
|
|
UserService: b.UserService,
|
|
|
|
}
|
|
|
|
h.HandlerFunc("POST", targetsIDMembersPath, newPostMemberHandler(memberBackend))
|
|
|
|
h.HandlerFunc("GET", targetsIDMembersPath, newGetMembersHandler(memberBackend))
|
|
|
|
h.HandlerFunc("DELETE", targetsIDMembersIDPath, newDeleteMemberHandler(memberBackend))
|
|
|
|
|
|
|
|
ownerBackend := MemberBackend{
|
2019-06-27 01:33:20 +00:00
|
|
|
HTTPErrorHandler: b.HTTPErrorHandler,
|
2019-12-04 23:10:23 +00:00
|
|
|
log: b.log.With(zap.String("handler", "member")),
|
2019-02-05 19:27:49 +00:00
|
|
|
ResourceType: influxdb.ScraperResourceType,
|
|
|
|
UserType: influxdb.Owner,
|
|
|
|
UserResourceMappingService: b.UserResourceMappingService,
|
|
|
|
UserService: b.UserService,
|
|
|
|
}
|
|
|
|
h.HandlerFunc("POST", targetsIDOwnersPath, newPostMemberHandler(ownerBackend))
|
|
|
|
h.HandlerFunc("GET", targetsIDOwnersPath, newGetMembersHandler(ownerBackend))
|
|
|
|
h.HandlerFunc("DELETE", targetsIDOwnersIDPath, newDeleteMemberHandler(ownerBackend))
|
2019-01-18 21:06:32 +00:00
|
|
|
|
2019-02-05 19:27:49 +00:00
|
|
|
labelBackend := &LabelBackend{
|
2019-06-27 01:33:20 +00:00
|
|
|
HTTPErrorHandler: b.HTTPErrorHandler,
|
2019-12-04 23:10:23 +00:00
|
|
|
log: b.log.With(zap.String("handler", "label")),
|
2019-06-27 01:33:20 +00:00
|
|
|
LabelService: b.LabelService,
|
|
|
|
ResourceType: influxdb.ScraperResourceType,
|
2019-02-05 19:27:49 +00:00
|
|
|
}
|
|
|
|
h.HandlerFunc("GET", targetsIDLabelsPath, newGetLabelsHandler(labelBackend))
|
|
|
|
h.HandlerFunc("POST", targetsIDLabelsPath, newPostLabelHandler(labelBackend))
|
|
|
|
h.HandlerFunc("DELETE", targetsIDLabelsIDPath, newDeleteLabelHandler(labelBackend))
|
2019-01-18 21:06:32 +00:00
|
|
|
|
2018-09-07 15:45:28 +00:00
|
|
|
return h
|
|
|
|
}
|
|
|
|
|
2019-01-18 15:38:28 +00:00
|
|
|
// handlePostScraperTarget is HTTP handler for the POST /api/v2/scrapers route.
|
2018-09-07 15:45:28 +00:00
|
|
|
func (h *ScraperHandler) handlePostScraperTarget(w http.ResponseWriter, r *http.Request) {
|
|
|
|
ctx := r.Context()
|
|
|
|
req, err := decodeScraperTargetAddRequest(ctx, r)
|
|
|
|
if err != nil {
|
2019-06-27 01:33:20 +00:00
|
|
|
h.HandleHTTPError(ctx, err, w)
|
2018-09-07 15:45:28 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2019-01-18 20:46:37 +00:00
|
|
|
auth, err := pctx.GetAuthorizer(ctx)
|
|
|
|
if err != nil {
|
2019-06-27 01:33:20 +00:00
|
|
|
h.HandleHTTPError(ctx, err, w)
|
2019-01-18 20:46:37 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := h.ScraperStorageService.AddTarget(ctx, req, auth.GetUserID()); err != nil {
|
2019-06-27 01:33:20 +00:00
|
|
|
h.HandleHTTPError(ctx, err, w)
|
2018-09-07 15:45:28 +00:00
|
|
|
return
|
|
|
|
}
|
2019-12-04 23:10:23 +00:00
|
|
|
h.log.Debug("Scraper created", zap.String("scraper", fmt.Sprint(req)))
|
2019-07-09 15:16:26 +00:00
|
|
|
|
2019-01-11 17:51:15 +00:00
|
|
|
resp, err := h.newTargetResponse(ctx, *req)
|
|
|
|
if err != nil {
|
2019-06-27 01:33:20 +00:00
|
|
|
h.HandleHTTPError(ctx, err, w)
|
2019-01-11 17:51:15 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
if err := encodeResponse(ctx, w, http.StatusCreated, resp); err != nil {
|
2019-12-04 23:10:23 +00:00
|
|
|
logEncodingError(h.log, r, err)
|
2018-09-07 15:45:28 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-01-18 15:38:28 +00:00
|
|
|
// handleDeleteScraperTarget is the HTTP handler for the DELETE /api/v2/scrapers/:id route.
|
2018-09-07 15:45:28 +00:00
|
|
|
func (h *ScraperHandler) handleDeleteScraperTarget(w http.ResponseWriter, r *http.Request) {
|
|
|
|
ctx := r.Context()
|
|
|
|
id, err := decodeScraperTargetIDRequest(ctx, r)
|
|
|
|
if err != nil {
|
2019-06-27 01:33:20 +00:00
|
|
|
h.HandleHTTPError(ctx, err, w)
|
2018-09-07 15:45:28 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := h.ScraperStorageService.RemoveTarget(ctx, *id); err != nil {
|
2019-06-27 01:33:20 +00:00
|
|
|
h.HandleHTTPError(ctx, err, w)
|
2018-09-07 15:45:28 +00:00
|
|
|
return
|
|
|
|
}
|
2019-12-04 23:10:23 +00:00
|
|
|
h.log.Debug("Scraper deleted", zap.String("scraperTargetID", fmt.Sprint(id)))
|
2018-09-07 15:45:28 +00:00
|
|
|
|
2018-10-29 19:10:33 +00:00
|
|
|
w.WriteHeader(http.StatusNoContent)
|
2018-09-07 15:45:28 +00:00
|
|
|
}
|
|
|
|
|
2019-01-18 15:38:28 +00:00
|
|
|
// handlePatchScraperTarget is the HTTP handler for the PATCH /api/v2/scrapers/:id route.
|
2018-09-07 15:45:28 +00:00
|
|
|
func (h *ScraperHandler) handlePatchScraperTarget(w http.ResponseWriter, r *http.Request) {
|
|
|
|
ctx := r.Context()
|
|
|
|
update, err := decodeScraperTargetUpdateRequest(ctx, r)
|
|
|
|
if err != nil {
|
2019-06-27 01:33:20 +00:00
|
|
|
h.HandleHTTPError(ctx, err, w)
|
2018-09-07 15:45:28 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2019-01-18 20:46:37 +00:00
|
|
|
auth, err := pctx.GetAuthorizer(ctx)
|
|
|
|
if err != nil {
|
2019-06-27 01:33:20 +00:00
|
|
|
h.HandleHTTPError(ctx, err, w)
|
2019-01-18 20:46:37 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
target, err := h.ScraperStorageService.UpdateTarget(ctx, update, auth.GetUserID())
|
2018-09-07 15:45:28 +00:00
|
|
|
if err != nil {
|
2019-06-27 01:33:20 +00:00
|
|
|
h.HandleHTTPError(ctx, err, w)
|
2018-09-07 15:45:28 +00:00
|
|
|
return
|
|
|
|
}
|
2019-12-04 23:10:23 +00:00
|
|
|
h.log.Debug("Scraper updated", zap.String("scraper", fmt.Sprint(target)))
|
2018-09-07 15:45:28 +00:00
|
|
|
|
2019-01-11 17:51:15 +00:00
|
|
|
resp, err := h.newTargetResponse(ctx, *target)
|
|
|
|
if err != nil {
|
2019-06-27 01:33:20 +00:00
|
|
|
h.HandleHTTPError(ctx, err, w)
|
2019-01-11 17:51:15 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := encodeResponse(ctx, w, http.StatusOK, resp); err != nil {
|
2019-12-04 23:10:23 +00:00
|
|
|
logEncodingError(h.log, r, err)
|
2018-09-07 15:45:28 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (h *ScraperHandler) handleGetScraperTarget(w http.ResponseWriter, r *http.Request) {
|
|
|
|
ctx := r.Context()
|
|
|
|
id, err := decodeScraperTargetIDRequest(ctx, r)
|
|
|
|
if err != nil {
|
2019-06-27 01:33:20 +00:00
|
|
|
h.HandleHTTPError(ctx, err, w)
|
2018-09-07 15:45:28 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
target, err := h.ScraperStorageService.GetTargetByID(ctx, *id)
|
|
|
|
if err != nil {
|
2019-06-27 01:33:20 +00:00
|
|
|
h.HandleHTTPError(ctx, err, w)
|
2018-09-07 15:45:28 +00:00
|
|
|
return
|
|
|
|
}
|
2019-12-04 23:10:23 +00:00
|
|
|
h.log.Debug("Scraper retrieved", zap.String("scraper", fmt.Sprint(target)))
|
2018-09-07 15:45:28 +00:00
|
|
|
|
2019-01-11 17:51:15 +00:00
|
|
|
resp, err := h.newTargetResponse(ctx, *target)
|
|
|
|
if err != nil {
|
2019-06-27 01:33:20 +00:00
|
|
|
h.HandleHTTPError(ctx, err, w)
|
2019-01-11 17:51:15 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := encodeResponse(ctx, w, http.StatusOK, resp); err != nil {
|
2019-12-04 23:10:23 +00:00
|
|
|
logEncodingError(h.log, r, err)
|
2018-09-07 15:45:28 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-04-12 16:45:48 +00:00
|
|
|
type getScraperTargetsRequest struct {
|
|
|
|
filter influxdb.ScraperTargetFilter
|
|
|
|
}
|
|
|
|
|
|
|
|
func decodeScraperTargetsRequest(ctx context.Context, r *http.Request) (*getScraperTargetsRequest, error) {
|
|
|
|
qp := r.URL.Query()
|
|
|
|
req := &getScraperTargetsRequest{}
|
|
|
|
|
2021-03-30 18:10:02 +00:00
|
|
|
initialID := platform.InvalidID()
|
2019-04-12 16:45:48 +00:00
|
|
|
if ids, ok := qp["id"]; ok {
|
2021-03-30 18:10:02 +00:00
|
|
|
req.filter.IDs = make(map[platform.ID]bool)
|
2019-04-12 16:45:48 +00:00
|
|
|
for _, id := range ids {
|
|
|
|
i := initialID
|
|
|
|
if err := i.DecodeFromString(id); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
req.filter.IDs[i] = false
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if name := qp.Get("name"); name != "" {
|
|
|
|
req.filter.Name = &name
|
|
|
|
}
|
|
|
|
if orgID := qp.Get("orgID"); orgID != "" {
|
2021-03-30 18:10:02 +00:00
|
|
|
id := platform.InvalidID()
|
2019-04-12 16:45:48 +00:00
|
|
|
if err := id.DecodeFromString(orgID); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
req.filter.OrgID = &id
|
|
|
|
} else if org := qp.Get("org"); org != "" {
|
|
|
|
req.filter.Org = &org
|
|
|
|
}
|
|
|
|
|
|
|
|
return req, nil
|
|
|
|
}
|
|
|
|
|
2019-01-18 15:38:28 +00:00
|
|
|
// handleGetScraperTargets is the HTTP handler for the GET /api/v2/scrapers route.
|
2018-09-07 15:45:28 +00:00
|
|
|
func (h *ScraperHandler) handleGetScraperTargets(w http.ResponseWriter, r *http.Request) {
|
|
|
|
ctx := r.Context()
|
2019-04-12 16:45:48 +00:00
|
|
|
req, err := decodeScraperTargetsRequest(ctx, r)
|
|
|
|
if err != nil {
|
2019-06-27 01:33:20 +00:00
|
|
|
h.HandleHTTPError(ctx, err, w)
|
2019-04-12 16:45:48 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
targets, err := h.ScraperStorageService.ListTargets(ctx, req.filter)
|
2018-09-07 15:45:28 +00:00
|
|
|
if err != nil {
|
2019-06-27 01:33:20 +00:00
|
|
|
h.HandleHTTPError(ctx, err, w)
|
2018-09-07 15:45:28 +00:00
|
|
|
return
|
|
|
|
}
|
2019-12-04 23:10:23 +00:00
|
|
|
h.log.Debug("Scrapers retrieved", zap.String("scrapers", fmt.Sprint(targets)))
|
2018-09-07 15:45:28 +00:00
|
|
|
|
2019-01-11 17:51:15 +00:00
|
|
|
resp, err := h.newListTargetsResponse(ctx, targets)
|
|
|
|
if err != nil {
|
2019-06-27 01:33:20 +00:00
|
|
|
h.HandleHTTPError(ctx, err, w)
|
2019-01-11 17:51:15 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := encodeResponse(ctx, w, http.StatusOK, resp); err != nil {
|
2019-12-04 23:10:23 +00:00
|
|
|
logEncodingError(h.log, r, err)
|
2018-09-07 15:45:28 +00:00
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-01-11 17:51:15 +00:00
|
|
|
func decodeScraperTargetUpdateRequest(ctx context.Context, r *http.Request) (*influxdb.ScraperTarget, error) {
|
|
|
|
update := &influxdb.ScraperTarget{}
|
2018-09-07 15:45:28 +00:00
|
|
|
if err := json.NewDecoder(r.Body).Decode(update); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
id, err := decodeScraperTargetIDRequest(ctx, r)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
update.ID = *id
|
|
|
|
return update, nil
|
|
|
|
}
|
|
|
|
|
2019-01-11 17:51:15 +00:00
|
|
|
func decodeScraperTargetAddRequest(ctx context.Context, r *http.Request) (*influxdb.ScraperTarget, error) {
|
|
|
|
req := &influxdb.ScraperTarget{}
|
2018-09-07 15:45:28 +00:00
|
|
|
if err := json.NewDecoder(r.Body).Decode(req); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return req, nil
|
|
|
|
}
|
|
|
|
|
2021-03-30 18:10:02 +00:00
|
|
|
func decodeScraperTargetIDRequest(ctx context.Context, r *http.Request) (*platform.ID, error) {
|
2018-09-07 15:45:28 +00:00
|
|
|
params := httprouter.ParamsFromContext(ctx)
|
|
|
|
id := params.ByName("id")
|
|
|
|
if id == "" {
|
2021-03-30 18:10:02 +00:00
|
|
|
return nil, &errors.Error{
|
|
|
|
Code: errors.EInvalid,
|
2019-01-10 17:39:37 +00:00
|
|
|
Msg: "url missing id",
|
|
|
|
}
|
2018-09-07 15:45:28 +00:00
|
|
|
}
|
|
|
|
|
2021-03-30 18:10:02 +00:00
|
|
|
var i platform.ID
|
2018-09-07 15:45:28 +00:00
|
|
|
if err := i.DecodeFromString(id); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return &i, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// ScraperService connects to Influx via HTTP using tokens to manage scraper targets.
|
|
|
|
type ScraperService struct {
|
|
|
|
Addr string
|
|
|
|
Token string
|
|
|
|
InsecureSkipVerify bool
|
2018-12-17 14:07:38 +00:00
|
|
|
// OpPrefix is for update invalid ops
|
|
|
|
OpPrefix string
|
2018-09-07 15:45:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// ListTargets returns a list of all scraper targets.
|
2019-04-12 16:45:48 +00:00
|
|
|
func (s *ScraperService) ListTargets(ctx context.Context, filter influxdb.ScraperTargetFilter) ([]influxdb.ScraperTarget, error) {
|
2019-12-09 23:54:16 +00:00
|
|
|
url, err := NewURL(s.Addr, prefixTargets)
|
2018-09-07 15:45:28 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
query := url.Query()
|
2019-04-12 16:45:48 +00:00
|
|
|
if filter.IDs != nil {
|
|
|
|
for id := range filter.IDs {
|
|
|
|
query.Add("id", id.String())
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if filter.Name != nil {
|
|
|
|
query.Set("name", *filter.Name)
|
|
|
|
}
|
|
|
|
if filter.OrgID != nil {
|
|
|
|
query.Set("orgID", filter.OrgID.String())
|
|
|
|
}
|
|
|
|
if filter.Org != nil {
|
|
|
|
query.Set("org", *filter.Org)
|
|
|
|
}
|
2018-09-07 15:45:28 +00:00
|
|
|
|
|
|
|
req, err := http.NewRequest("GET", url.String(), nil)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
req.URL.RawQuery = query.Encode()
|
|
|
|
SetToken(s.Token, req)
|
|
|
|
|
2019-05-09 17:41:14 +00:00
|
|
|
hc := NewClient(url.Scheme, s.InsecureSkipVerify)
|
2018-09-07 15:45:28 +00:00
|
|
|
resp, err := hc.Do(req)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2019-01-18 22:32:53 +00:00
|
|
|
defer resp.Body.Close()
|
2019-01-24 01:02:37 +00:00
|
|
|
if err := CheckError(resp); err != nil {
|
2018-09-07 15:45:28 +00:00
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
var targetsResp getTargetsResponse
|
|
|
|
if err := json.NewDecoder(resp.Body).Decode(&targetsResp); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
2019-01-11 17:51:15 +00:00
|
|
|
targets := make([]influxdb.ScraperTarget, len(targetsResp.Targets))
|
2018-09-07 15:45:28 +00:00
|
|
|
for k, v := range targetsResp.Targets {
|
|
|
|
targets[k] = v.ScraperTarget
|
|
|
|
}
|
|
|
|
|
|
|
|
return targets, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// UpdateTarget updates a single scraper target with changeset.
|
|
|
|
// Returns the new target state after update.
|
2021-03-30 18:10:02 +00:00
|
|
|
func (s *ScraperService) UpdateTarget(ctx context.Context, update *influxdb.ScraperTarget, userID platform.ID) (*influxdb.ScraperTarget, error) {
|
2018-10-12 02:57:09 +00:00
|
|
|
if !update.ID.Valid() {
|
2021-03-30 18:10:02 +00:00
|
|
|
return nil, &errors.Error{
|
|
|
|
Code: errors.EInvalid,
|
2019-01-11 17:51:15 +00:00
|
|
|
Op: s.OpPrefix + influxdb.OpUpdateTarget,
|
2019-02-19 23:47:19 +00:00
|
|
|
Msg: "provided scraper target ID has invalid format",
|
2018-12-17 14:07:38 +00:00
|
|
|
}
|
2018-10-12 02:57:09 +00:00
|
|
|
}
|
2019-05-09 17:41:14 +00:00
|
|
|
url, err := NewURL(s.Addr, targetIDPath(update.ID))
|
2018-09-07 15:45:28 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
octets, err := json.Marshal(update)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
req, err := http.NewRequest("PATCH", url.String(), bytes.NewReader(octets))
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
req.Header.Set("Content-Type", "application/json")
|
|
|
|
SetToken(s.Token, req)
|
2019-05-09 17:41:14 +00:00
|
|
|
hc := NewClient(url.Scheme, s.InsecureSkipVerify)
|
2018-09-07 15:45:28 +00:00
|
|
|
|
|
|
|
resp, err := hc.Do(req)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2019-01-18 22:32:53 +00:00
|
|
|
defer resp.Body.Close()
|
2018-09-07 15:45:28 +00:00
|
|
|
|
2019-01-24 01:02:37 +00:00
|
|
|
if err := CheckError(resp); err != nil {
|
2018-09-07 15:45:28 +00:00
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
var targetResp targetResponse
|
|
|
|
if err := json.NewDecoder(resp.Body).Decode(&targetResp); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return &targetResp.ScraperTarget, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// AddTarget creates a new scraper target and sets target.ID with the new identifier.
|
2021-03-30 18:10:02 +00:00
|
|
|
func (s *ScraperService) AddTarget(ctx context.Context, target *influxdb.ScraperTarget, userID platform.ID) error {
|
2019-12-09 23:54:16 +00:00
|
|
|
url, err := NewURL(s.Addr, prefixTargets)
|
2018-09-07 15:45:28 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2019-01-10 17:39:37 +00:00
|
|
|
if !target.OrgID.Valid() {
|
2021-03-30 18:10:02 +00:00
|
|
|
return &errors.Error{
|
|
|
|
Code: errors.EInvalid,
|
2019-02-19 23:47:19 +00:00
|
|
|
Msg: "provided organization ID has invalid format",
|
2019-01-11 17:51:15 +00:00
|
|
|
Op: s.OpPrefix + influxdb.OpAddTarget,
|
2019-01-10 17:39:37 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
if !target.BucketID.Valid() {
|
2021-03-30 18:10:02 +00:00
|
|
|
return &errors.Error{
|
|
|
|
Code: errors.EInvalid,
|
2019-02-19 23:47:19 +00:00
|
|
|
Msg: "provided bucket ID has invalid format",
|
2019-01-11 17:51:15 +00:00
|
|
|
Op: s.OpPrefix + influxdb.OpAddTarget,
|
2019-01-10 17:39:37 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-09-07 15:45:28 +00:00
|
|
|
octets, err := json.Marshal(target)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
req, err := http.NewRequest("POST", url.String(), bytes.NewReader(octets))
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
req.Header.Set("Content-Type", "application/json")
|
|
|
|
SetToken(s.Token, req)
|
|
|
|
|
2019-05-09 17:41:14 +00:00
|
|
|
hc := NewClient(url.Scheme, s.InsecureSkipVerify)
|
2018-09-07 15:45:28 +00:00
|
|
|
|
|
|
|
resp, err := hc.Do(req)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2019-01-18 22:32:53 +00:00
|
|
|
defer resp.Body.Close()
|
2018-09-07 15:45:28 +00:00
|
|
|
|
|
|
|
// TODO(jsternberg): Should this check for a 201 explicitly?
|
2019-01-24 01:02:37 +00:00
|
|
|
if err := CheckError(resp); err != nil {
|
2018-09-07 15:45:28 +00:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
targetResp := new(targetResponse)
|
|
|
|
if err := json.NewDecoder(resp.Body).Decode(targetResp); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
// RemoveTarget removes a scraper target by ID.
|
2021-03-30 18:10:02 +00:00
|
|
|
func (s *ScraperService) RemoveTarget(ctx context.Context, id platform.ID) error {
|
2019-05-09 17:41:14 +00:00
|
|
|
url, err := NewURL(s.Addr, targetIDPath(id))
|
2018-09-07 15:45:28 +00:00
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
req, err := http.NewRequest("DELETE", url.String(), nil)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
SetToken(s.Token, req)
|
|
|
|
|
2019-05-09 17:41:14 +00:00
|
|
|
hc := NewClient(url.Scheme, s.InsecureSkipVerify)
|
2018-09-07 15:45:28 +00:00
|
|
|
resp, err := hc.Do(req)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
2019-01-18 22:32:53 +00:00
|
|
|
defer resp.Body.Close()
|
2018-10-29 19:10:33 +00:00
|
|
|
|
2019-01-24 01:02:37 +00:00
|
|
|
return CheckErrorStatus(http.StatusNoContent, resp)
|
2018-09-07 15:45:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// GetTargetByID returns a single target by ID.
|
2021-03-30 18:10:02 +00:00
|
|
|
func (s *ScraperService) GetTargetByID(ctx context.Context, id platform.ID) (*influxdb.ScraperTarget, error) {
|
2019-05-09 17:41:14 +00:00
|
|
|
url, err := NewURL(s.Addr, targetIDPath(id))
|
2018-09-07 15:45:28 +00:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
req, err := http.NewRequest("GET", url.String(), nil)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
SetToken(s.Token, req)
|
|
|
|
|
2019-05-09 17:41:14 +00:00
|
|
|
hc := NewClient(url.Scheme, s.InsecureSkipVerify)
|
2018-09-07 15:45:28 +00:00
|
|
|
resp, err := hc.Do(req)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2019-01-18 22:32:53 +00:00
|
|
|
defer resp.Body.Close()
|
2018-09-07 15:45:28 +00:00
|
|
|
|
2019-01-24 01:02:37 +00:00
|
|
|
if err := CheckError(resp); err != nil {
|
2018-09-07 15:45:28 +00:00
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
var targetResp targetResponse
|
|
|
|
if err := json.NewDecoder(resp.Body).Decode(&targetResp); err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return &targetResp.ScraperTarget, nil
|
|
|
|
}
|
|
|
|
|
2021-03-30 18:10:02 +00:00
|
|
|
func targetIDPath(id platform.ID) string {
|
2019-12-09 23:54:16 +00:00
|
|
|
return path.Join(prefixTargets, id.String())
|
2018-09-07 15:45:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
type getTargetsLinks struct {
|
|
|
|
Self string `json:"self"`
|
|
|
|
}
|
|
|
|
|
|
|
|
type getTargetsResponse struct {
|
|
|
|
Links getTargetsLinks `json:"links"`
|
2019-01-23 02:29:08 +00:00
|
|
|
Targets []targetResponse `json:"configurations"`
|
2018-09-07 15:45:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
type targetLinks struct {
|
2019-01-23 01:21:23 +00:00
|
|
|
Self string `json:"self"`
|
|
|
|
Bucket string `json:"bucket,omitempty"`
|
|
|
|
Organization string `json:"organization,omitempty"`
|
2019-02-26 17:08:30 +00:00
|
|
|
Members string `json:"members"`
|
|
|
|
Owners string `json:"owners"`
|
2018-09-07 15:45:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
type targetResponse struct {
|
2019-01-11 17:51:15 +00:00
|
|
|
influxdb.ScraperTarget
|
2019-04-10 19:21:49 +00:00
|
|
|
Org string `json:"org,omitempty"`
|
|
|
|
Bucket string `json:"bucket,omitempty"`
|
|
|
|
Links targetLinks `json:"links"`
|
2018-09-07 15:45:28 +00:00
|
|
|
}
|
|
|
|
|
2019-01-11 17:51:15 +00:00
|
|
|
func (h *ScraperHandler) newListTargetsResponse(ctx context.Context, targets []influxdb.ScraperTarget) (getTargetsResponse, error) {
|
2018-09-07 15:45:28 +00:00
|
|
|
res := getTargetsResponse{
|
|
|
|
Links: getTargetsLinks{
|
2019-12-09 23:54:16 +00:00
|
|
|
Self: prefixTargets,
|
2018-09-07 15:45:28 +00:00
|
|
|
},
|
|
|
|
Targets: make([]targetResponse, 0, len(targets)),
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, target := range targets {
|
2019-01-11 17:51:15 +00:00
|
|
|
resp, err := h.newTargetResponse(ctx, target)
|
|
|
|
if err != nil {
|
|
|
|
return res, err
|
|
|
|
}
|
|
|
|
res.Targets = append(res.Targets, resp)
|
2018-09-07 15:45:28 +00:00
|
|
|
}
|
|
|
|
|
2019-01-11 17:51:15 +00:00
|
|
|
return res, nil
|
2018-09-07 15:45:28 +00:00
|
|
|
}
|
|
|
|
|
2019-01-11 17:51:15 +00:00
|
|
|
func (h *ScraperHandler) newTargetResponse(ctx context.Context, target influxdb.ScraperTarget) (targetResponse, error) {
|
2019-01-23 01:21:23 +00:00
|
|
|
res := targetResponse{
|
2018-09-07 15:45:28 +00:00
|
|
|
Links: targetLinks{
|
2019-02-26 17:08:30 +00:00
|
|
|
Self: targetIDPath(target.ID),
|
|
|
|
Members: fmt.Sprintf("/api/v2/scrapers/%s/members", target.ID),
|
|
|
|
Owners: fmt.Sprintf("/api/v2/scrapers/%s/owners", target.ID),
|
2018-09-07 15:45:28 +00:00
|
|
|
},
|
|
|
|
ScraperTarget: target,
|
2019-01-23 01:21:23 +00:00
|
|
|
}
|
|
|
|
bucket, err := h.BucketService.FindBucketByID(ctx, target.BucketID)
|
|
|
|
if err == nil {
|
|
|
|
res.Bucket = bucket.Name
|
|
|
|
res.BucketID = bucket.ID
|
|
|
|
res.Links.Bucket = bucketIDPath(bucket.ID)
|
2019-01-23 01:35:23 +00:00
|
|
|
} else {
|
2021-03-30 18:10:02 +00:00
|
|
|
res.BucketID = platform.InvalidID()
|
2019-01-23 01:21:23 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
org, err := h.OrganizationService.FindOrganizationByID(ctx, target.OrgID)
|
|
|
|
if err == nil {
|
2019-04-10 19:21:49 +00:00
|
|
|
res.Org = org.Name
|
2019-01-23 01:21:23 +00:00
|
|
|
res.OrgID = org.ID
|
|
|
|
res.Links.Organization = organizationIDPath(org.ID)
|
2019-01-23 01:35:23 +00:00
|
|
|
} else {
|
2021-03-30 18:10:02 +00:00
|
|
|
res.OrgID = platform.InvalidID()
|
2019-01-23 01:21:23 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return res, nil
|
2018-09-07 15:45:28 +00:00
|
|
|
}
|
refactor(kv): delete deprecated kv service code
This includes removal of a lot of kv.Service responsibilities. However,
it does not finish the re-wiring. It removes documents, telegrafs,
notification rules + endpoints, checks, orgs, users, buckets, passwords,
urms, labels and authorizations. There are some oustanding pieces that
are needed to get kv service compiling (dashboard service urm
dependency). Then all the call sites for kv service need updating and
the new implementations of telegraf and notification rules + endpoints
needed installing (along with any necessary migrations).
2020-10-20 13:25:36 +00:00
|
|
|
|
2021-03-30 18:10:02 +00:00
|
|
|
func organizationIDPath(id platform.ID) string {
|
refactor(kv): delete deprecated kv service code
This includes removal of a lot of kv.Service responsibilities. However,
it does not finish the re-wiring. It removes documents, telegrafs,
notification rules + endpoints, checks, orgs, users, buckets, passwords,
urms, labels and authorizations. There are some oustanding pieces that
are needed to get kv service compiling (dashboard service urm
dependency). Then all the call sites for kv service need updating and
the new implementations of telegraf and notification rules + endpoints
needed installing (along with any necessary migrations).
2020-10-20 13:25:36 +00:00
|
|
|
return path.Join(prefixOrganizations, id.String())
|
|
|
|
}
|
|
|
|
|
2021-03-30 18:10:02 +00:00
|
|
|
func bucketIDPath(id platform.ID) string {
|
refactor(kv): delete deprecated kv service code
This includes removal of a lot of kv.Service responsibilities. However,
it does not finish the re-wiring. It removes documents, telegrafs,
notification rules + endpoints, checks, orgs, users, buckets, passwords,
urms, labels and authorizations. There are some oustanding pieces that
are needed to get kv service compiling (dashboard service urm
dependency). Then all the call sites for kv service need updating and
the new implementations of telegraf and notification rules + endpoints
needed installing (along with any necessary migrations).
2020-10-20 13:25:36 +00:00
|
|
|
return path.Join(prefixBuckets, id.String())
|
|
|
|
}
|