2018-10-12 01:06:43 +00:00
|
|
|
package http
|
|
|
|
|
|
|
|
import (
|
2019-01-08 12:22:03 +00:00
|
|
|
"bytes"
|
2018-10-12 01:06:43 +00:00
|
|
|
"context"
|
2019-01-08 12:22:03 +00:00
|
|
|
"encoding/json"
|
2019-01-07 18:33:49 +00:00
|
|
|
"fmt"
|
|
|
|
"io/ioutil"
|
|
|
|
"net/http"
|
2018-10-12 01:06:43 +00:00
|
|
|
"net/http/httptest"
|
|
|
|
"testing"
|
|
|
|
|
2021-03-30 18:10:02 +00:00
|
|
|
"github.com/influxdata/influxdb/v2/kit/platform"
|
|
|
|
"github.com/influxdata/influxdb/v2/kit/platform/errors"
|
|
|
|
|
2019-12-04 23:10:23 +00:00
|
|
|
"github.com/influxdata/httprouter"
|
2020-04-03 17:39:20 +00:00
|
|
|
"github.com/influxdata/influxdb/v2"
|
|
|
|
platcontext "github.com/influxdata/influxdb/v2/context"
|
|
|
|
httpMock "github.com/influxdata/influxdb/v2/http/mock"
|
|
|
|
kithttp "github.com/influxdata/influxdb/v2/kit/transport/http"
|
refactor(kv): delete deprecated kv service code
This includes removal of a lot of kv.Service responsibilities. However,
it does not finish the re-wiring. It removes documents, telegrafs,
notification rules + endpoints, checks, orgs, users, buckets, passwords,
urms, labels and authorizations. There are some oustanding pieces that
are needed to get kv service compiling (dashboard service urm
dependency). Then all the call sites for kv service need updating and
the new implementations of telegraf and notification rules + endpoints
needed installing (along with any necessary migrations).
2020-10-20 13:25:36 +00:00
|
|
|
"github.com/influxdata/influxdb/v2/kv"
|
2020-04-03 17:39:20 +00:00
|
|
|
"github.com/influxdata/influxdb/v2/mock"
|
refactor(kv): delete deprecated kv service code
This includes removal of a lot of kv.Service responsibilities. However,
it does not finish the re-wiring. It removes documents, telegrafs,
notification rules + endpoints, checks, orgs, users, buckets, passwords,
urms, labels and authorizations. There are some oustanding pieces that
are needed to get kv service compiling (dashboard service urm
dependency). Then all the call sites for kv service need updating and
the new implementations of telegraf and notification rules + endpoints
needed installing (along with any necessary migrations).
2020-10-20 13:25:36 +00:00
|
|
|
"github.com/influxdata/influxdb/v2/tenant"
|
2020-04-03 17:39:20 +00:00
|
|
|
platformtesting "github.com/influxdata/influxdb/v2/testing"
|
2019-12-04 23:10:23 +00:00
|
|
|
"go.uber.org/zap/zaptest"
|
2018-10-12 01:06:43 +00:00
|
|
|
)
|
|
|
|
|
2019-01-07 18:33:49 +00:00
|
|
|
const (
|
|
|
|
targetOneIDString = "0000000000000111"
|
2019-01-08 10:54:57 +00:00
|
|
|
targetTwoIDString = "0000000000000222"
|
2019-01-07 18:33:49 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
var (
|
|
|
|
targetOneID = platformtesting.MustIDBase16(targetOneIDString)
|
2019-01-08 10:54:57 +00:00
|
|
|
targetTwoID = platformtesting.MustIDBase16(targetTwoIDString)
|
2019-01-07 18:33:49 +00:00
|
|
|
)
|
|
|
|
|
2019-01-16 12:44:17 +00:00
|
|
|
// NewMockScraperBackend returns a ScraperBackend with mock services.
|
2019-12-04 23:10:23 +00:00
|
|
|
func NewMockScraperBackend(t *testing.T) *ScraperBackend {
|
2019-01-16 12:44:17 +00:00
|
|
|
return &ScraperBackend{
|
2019-12-04 23:10:23 +00:00
|
|
|
log: zaptest.NewLogger(t),
|
2019-01-16 12:44:17 +00:00
|
|
|
|
|
|
|
ScraperStorageService: &mock.ScraperTargetStoreService{},
|
|
|
|
BucketService: mock.NewBucketService(),
|
|
|
|
OrganizationService: mock.NewOrganizationService(),
|
|
|
|
UserService: mock.NewUserService(),
|
|
|
|
UserResourceMappingService: &mock.UserResourceMappingService{},
|
|
|
|
LabelService: mock.NewLabelService(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-01-08 10:54:57 +00:00
|
|
|
func TestService_handleGetScraperTargets(t *testing.T) {
|
|
|
|
type fields struct {
|
2019-04-12 16:45:48 +00:00
|
|
|
ScraperTargetStoreService influxdb.ScraperTargetStoreService
|
|
|
|
OrganizationService influxdb.OrganizationService
|
|
|
|
BucketService influxdb.BucketService
|
2019-01-08 10:54:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
type args struct {
|
|
|
|
queryParams map[string][]string
|
|
|
|
}
|
|
|
|
|
|
|
|
type wants struct {
|
|
|
|
statusCode int
|
|
|
|
contentType string
|
|
|
|
body string
|
|
|
|
}
|
|
|
|
|
|
|
|
tests := []struct {
|
|
|
|
name string
|
|
|
|
fields fields
|
|
|
|
args args
|
|
|
|
wants wants
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
name: "get all scraper targets",
|
|
|
|
fields: fields{
|
2019-01-11 17:51:15 +00:00
|
|
|
OrganizationService: &mock.OrganizationService{
|
2021-03-30 18:10:02 +00:00
|
|
|
FindOrganizationByIDF: func(ctx context.Context, id platform.ID) (*influxdb.Organization, error) {
|
2019-04-12 16:45:48 +00:00
|
|
|
return &influxdb.Organization{
|
2019-01-11 17:51:15 +00:00
|
|
|
ID: platformtesting.MustIDBase16("0000000000000211"),
|
|
|
|
Name: "org1",
|
|
|
|
}, nil
|
|
|
|
},
|
|
|
|
},
|
|
|
|
BucketService: &mock.BucketService{
|
2021-03-30 18:10:02 +00:00
|
|
|
FindBucketByIDFn: func(ctx context.Context, id platform.ID) (*influxdb.Bucket, error) {
|
2019-04-12 16:45:48 +00:00
|
|
|
return &influxdb.Bucket{
|
2019-01-11 17:51:15 +00:00
|
|
|
ID: platformtesting.MustIDBase16("0000000000000212"),
|
|
|
|
Name: "bucket1",
|
|
|
|
}, nil
|
|
|
|
},
|
|
|
|
},
|
|
|
|
ScraperTargetStoreService: &mock.ScraperTargetStoreService{
|
2019-04-12 16:45:48 +00:00
|
|
|
ListTargetsF: func(ctx context.Context, filter influxdb.ScraperTargetFilter) ([]influxdb.ScraperTarget, error) {
|
|
|
|
return []influxdb.ScraperTarget{
|
2019-01-08 10:54:57 +00:00
|
|
|
{
|
2019-01-10 17:39:37 +00:00
|
|
|
ID: targetOneID,
|
|
|
|
Name: "target-1",
|
2019-04-12 16:45:48 +00:00
|
|
|
Type: influxdb.PrometheusScraperType,
|
2019-01-10 17:39:37 +00:00
|
|
|
URL: "www.one.url",
|
|
|
|
OrgID: platformtesting.MustIDBase16("0000000000000211"),
|
|
|
|
BucketID: platformtesting.MustIDBase16("0000000000000212"),
|
2019-01-08 10:54:57 +00:00
|
|
|
},
|
|
|
|
{
|
2019-01-10 17:39:37 +00:00
|
|
|
ID: targetTwoID,
|
|
|
|
Name: "target-2",
|
2019-04-12 16:45:48 +00:00
|
|
|
Type: influxdb.PrometheusScraperType,
|
2019-01-10 17:39:37 +00:00
|
|
|
URL: "www.two.url",
|
|
|
|
OrgID: platformtesting.MustIDBase16("0000000000000211"),
|
|
|
|
BucketID: platformtesting.MustIDBase16("0000000000000212"),
|
2019-01-08 10:54:57 +00:00
|
|
|
},
|
|
|
|
}, nil
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
args: args{},
|
|
|
|
wants: wants{
|
|
|
|
statusCode: http.StatusOK,
|
|
|
|
contentType: "application/json; charset=utf-8",
|
|
|
|
body: fmt.Sprintf(
|
|
|
|
`
|
|
|
|
{
|
|
|
|
"links": {
|
2019-01-18 15:38:28 +00:00
|
|
|
"self": "/api/v2/scrapers"
|
2019-01-08 10:54:57 +00:00
|
|
|
},
|
2019-01-23 02:29:08 +00:00
|
|
|
"configurations": [
|
2019-01-08 10:54:57 +00:00
|
|
|
{
|
|
|
|
"id": "%s",
|
|
|
|
"name": "target-1",
|
2019-01-11 17:51:15 +00:00
|
|
|
"bucket": "bucket1",
|
2019-01-10 17:39:37 +00:00
|
|
|
"bucketID": "0000000000000212",
|
2019-04-10 19:21:49 +00:00
|
|
|
"org": "org1",
|
2019-01-10 17:39:37 +00:00
|
|
|
"orgID": "0000000000000211",
|
2019-01-08 10:54:57 +00:00
|
|
|
"type": "prometheus",
|
|
|
|
"url": "www.one.url",
|
|
|
|
"links": {
|
2019-01-23 01:21:23 +00:00
|
|
|
"bucket": "/api/v2/buckets/0000000000000212",
|
|
|
|
"organization": "/api/v2/orgs/0000000000000211",
|
2019-02-26 17:08:30 +00:00
|
|
|
"self": "/api/v2/scrapers/0000000000000111",
|
|
|
|
"members": "/api/v2/scrapers/0000000000000111/members",
|
|
|
|
"owners": "/api/v2/scrapers/0000000000000111/owners"
|
2019-01-08 10:54:57 +00:00
|
|
|
}
|
|
|
|
},
|
|
|
|
{
|
|
|
|
"id": "%s",
|
|
|
|
"name": "target-2",
|
2019-01-11 17:51:15 +00:00
|
|
|
"bucket": "bucket1",
|
2019-01-10 17:39:37 +00:00
|
|
|
"bucketID": "0000000000000212",
|
|
|
|
"orgID": "0000000000000211",
|
2019-04-10 19:21:49 +00:00
|
|
|
"org": "org1",
|
2019-01-08 10:54:57 +00:00
|
|
|
"type": "prometheus",
|
|
|
|
"url": "www.two.url",
|
|
|
|
"links": {
|
2019-01-23 01:21:23 +00:00
|
|
|
"bucket": "/api/v2/buckets/0000000000000212",
|
|
|
|
"organization": "/api/v2/orgs/0000000000000211",
|
2019-02-26 17:08:30 +00:00
|
|
|
"self": "/api/v2/scrapers/0000000000000222",
|
|
|
|
"members": "/api/v2/scrapers/0000000000000222/members",
|
|
|
|
"owners": "/api/v2/scrapers/0000000000000222/owners"
|
2019-01-08 10:54:57 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
]
|
|
|
|
}
|
|
|
|
`,
|
|
|
|
targetOneIDString,
|
|
|
|
targetTwoIDString,
|
|
|
|
),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "get all scraper targets when there are none",
|
|
|
|
fields: fields{
|
2019-01-11 17:51:15 +00:00
|
|
|
OrganizationService: &mock.OrganizationService{
|
2021-03-30 18:10:02 +00:00
|
|
|
FindOrganizationByIDF: func(ctx context.Context, id platform.ID) (*influxdb.Organization, error) {
|
2019-04-12 16:45:48 +00:00
|
|
|
return &influxdb.Organization{
|
2019-01-11 17:51:15 +00:00
|
|
|
ID: platformtesting.MustIDBase16("0000000000000211"),
|
|
|
|
Name: "org1",
|
|
|
|
}, nil
|
|
|
|
},
|
|
|
|
},
|
|
|
|
BucketService: &mock.BucketService{
|
2021-03-30 18:10:02 +00:00
|
|
|
FindBucketByIDFn: func(ctx context.Context, id platform.ID) (*influxdb.Bucket, error) {
|
2019-04-12 16:45:48 +00:00
|
|
|
return &influxdb.Bucket{
|
2019-01-11 17:51:15 +00:00
|
|
|
ID: platformtesting.MustIDBase16("0000000000000212"),
|
|
|
|
Name: "bucket1",
|
|
|
|
}, nil
|
|
|
|
},
|
|
|
|
},
|
|
|
|
ScraperTargetStoreService: &mock.ScraperTargetStoreService{
|
2019-04-12 16:45:48 +00:00
|
|
|
ListTargetsF: func(ctx context.Context, filter influxdb.ScraperTargetFilter) ([]influxdb.ScraperTarget, error) {
|
|
|
|
return []influxdb.ScraperTarget{}, nil
|
2019-01-08 10:54:57 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
args: args{},
|
|
|
|
wants: wants{
|
|
|
|
statusCode: http.StatusOK,
|
|
|
|
contentType: "application/json; charset=utf-8",
|
|
|
|
body: `
|
|
|
|
{
|
|
|
|
"links": {
|
2019-01-18 15:38:28 +00:00
|
|
|
"self": "/api/v2/scrapers"
|
2019-01-08 10:54:57 +00:00
|
|
|
},
|
2019-01-23 02:29:08 +00:00
|
|
|
"configurations": []
|
2019-01-08 10:54:57 +00:00
|
|
|
}
|
|
|
|
`,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
|
|
|
t.Run(tt.name, func(t *testing.T) {
|
2019-12-04 23:10:23 +00:00
|
|
|
scraperBackend := NewMockScraperBackend(t)
|
2020-02-03 19:07:43 +00:00
|
|
|
scraperBackend.HTTPErrorHandler = kithttp.ErrorHandler(0)
|
2019-01-16 12:44:17 +00:00
|
|
|
scraperBackend.ScraperStorageService = tt.fields.ScraperTargetStoreService
|
|
|
|
scraperBackend.OrganizationService = tt.fields.OrganizationService
|
|
|
|
scraperBackend.BucketService = tt.fields.BucketService
|
2019-12-04 23:10:23 +00:00
|
|
|
h := NewScraperHandler(zaptest.NewLogger(t), scraperBackend)
|
2019-01-08 10:54:57 +00:00
|
|
|
|
|
|
|
r := httptest.NewRequest("GET", "http://any.tld", nil)
|
|
|
|
|
|
|
|
qp := r.URL.Query()
|
|
|
|
for k, vs := range tt.args.queryParams {
|
|
|
|
for _, v := range vs {
|
|
|
|
qp.Add(k, v)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
r.URL.RawQuery = qp.Encode()
|
|
|
|
|
|
|
|
w := httptest.NewRecorder()
|
|
|
|
|
|
|
|
h.handleGetScraperTargets(w, r)
|
|
|
|
|
|
|
|
res := w.Result()
|
|
|
|
content := res.Header.Get("Content-Type")
|
|
|
|
body, _ := ioutil.ReadAll(res.Body)
|
|
|
|
|
|
|
|
if res.StatusCode != tt.wants.statusCode {
|
|
|
|
t.Errorf("%q. handleGetScraperTargets() = %v, want %v", tt.name, res.StatusCode, tt.wants.statusCode)
|
|
|
|
}
|
|
|
|
if tt.wants.contentType != "" && content != tt.wants.contentType {
|
|
|
|
t.Errorf("%q. handleGetScraperTargets() = %v, want %v", tt.name, content, tt.wants.contentType)
|
|
|
|
}
|
2019-05-08 19:51:03 +00:00
|
|
|
if tt.wants.body != "" {
|
|
|
|
if eq, diff, err := jsonEqual(string(body), tt.wants.body); err != nil {
|
2020-11-11 18:54:21 +00:00
|
|
|
t.Errorf("%q, handleGetScraperTargets(). error unmarshalling json %v", tt.name, err)
|
2019-05-08 19:51:03 +00:00
|
|
|
} else if !eq {
|
|
|
|
t.Errorf("%q. handleGetScraperTargets() = ***%s***", tt.name, diff)
|
|
|
|
}
|
2019-01-08 10:54:57 +00:00
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
2019-01-07 18:33:49 +00:00
|
|
|
|
|
|
|
func TestService_handleGetScraperTarget(t *testing.T) {
|
|
|
|
type fields struct {
|
2019-04-12 16:45:48 +00:00
|
|
|
OrganizationService influxdb.OrganizationService
|
|
|
|
BucketService influxdb.BucketService
|
|
|
|
ScraperTargetStoreService influxdb.ScraperTargetStoreService
|
2019-01-07 18:33:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
type args struct {
|
|
|
|
id string
|
|
|
|
}
|
|
|
|
|
|
|
|
type wants struct {
|
|
|
|
statusCode int
|
|
|
|
contentType string
|
|
|
|
body string
|
|
|
|
}
|
|
|
|
|
|
|
|
tests := []struct {
|
|
|
|
name string
|
|
|
|
fields fields
|
|
|
|
args args
|
|
|
|
wants wants
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
name: "get a scraper target by id",
|
|
|
|
fields: fields{
|
2019-01-11 17:51:15 +00:00
|
|
|
OrganizationService: &mock.OrganizationService{
|
2021-03-30 18:10:02 +00:00
|
|
|
FindOrganizationByIDF: func(ctx context.Context, id platform.ID) (*influxdb.Organization, error) {
|
2019-04-12 16:45:48 +00:00
|
|
|
return &influxdb.Organization{
|
2019-01-11 17:51:15 +00:00
|
|
|
ID: platformtesting.MustIDBase16("0000000000000211"),
|
|
|
|
Name: "org1",
|
|
|
|
}, nil
|
|
|
|
},
|
|
|
|
},
|
|
|
|
BucketService: &mock.BucketService{
|
2021-03-30 18:10:02 +00:00
|
|
|
FindBucketByIDFn: func(ctx context.Context, id platform.ID) (*influxdb.Bucket, error) {
|
2019-04-12 16:45:48 +00:00
|
|
|
return &influxdb.Bucket{
|
2019-01-11 17:51:15 +00:00
|
|
|
ID: platformtesting.MustIDBase16("0000000000000212"),
|
|
|
|
Name: "bucket1",
|
|
|
|
}, nil
|
|
|
|
},
|
|
|
|
},
|
|
|
|
ScraperTargetStoreService: &mock.ScraperTargetStoreService{
|
2021-03-30 18:10:02 +00:00
|
|
|
GetTargetByIDF: func(ctx context.Context, id platform.ID) (*influxdb.ScraperTarget, error) {
|
2019-01-07 18:33:49 +00:00
|
|
|
if id == targetOneID {
|
2019-04-12 16:45:48 +00:00
|
|
|
return &influxdb.ScraperTarget{
|
2019-01-10 17:39:37 +00:00
|
|
|
ID: targetOneID,
|
|
|
|
Name: "target-1",
|
2019-04-12 16:45:48 +00:00
|
|
|
Type: influxdb.PrometheusScraperType,
|
2019-01-10 17:39:37 +00:00
|
|
|
URL: "www.some.url",
|
|
|
|
OrgID: platformtesting.MustIDBase16("0000000000000211"),
|
|
|
|
BucketID: platformtesting.MustIDBase16("0000000000000212"),
|
2019-01-07 18:33:49 +00:00
|
|
|
}, nil
|
|
|
|
}
|
2021-03-30 18:10:02 +00:00
|
|
|
return nil, &errors.Error{
|
|
|
|
Code: errors.ENotFound,
|
2019-01-11 17:51:15 +00:00
|
|
|
Msg: "scraper target is not found",
|
|
|
|
}
|
2019-01-07 18:33:49 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
args: args{
|
|
|
|
id: targetOneIDString,
|
|
|
|
},
|
|
|
|
wants: wants{
|
|
|
|
statusCode: http.StatusOK,
|
|
|
|
contentType: "application/json; charset=utf-8",
|
2019-01-08 10:54:57 +00:00
|
|
|
body: fmt.Sprintf(
|
|
|
|
`
|
|
|
|
{
|
2019-03-07 03:10:09 +00:00
|
|
|
"id": "%s",
|
2019-01-08 10:54:57 +00:00
|
|
|
"name": "target-1",
|
|
|
|
"type": "prometheus",
|
2019-01-11 17:51:15 +00:00
|
|
|
"url": "www.some.url",
|
|
|
|
"bucket": "bucket1",
|
2019-01-10 17:39:37 +00:00
|
|
|
"bucketID": "0000000000000212",
|
2019-01-11 17:51:15 +00:00
|
|
|
"orgID": "0000000000000211",
|
2019-04-10 19:21:49 +00:00
|
|
|
"org": "org1",
|
2019-01-08 10:54:57 +00:00
|
|
|
"links": {
|
2019-01-23 01:21:23 +00:00
|
|
|
"bucket": "/api/v2/buckets/0000000000000212",
|
|
|
|
"organization": "/api/v2/orgs/0000000000000211",
|
2019-03-07 03:10:09 +00:00
|
|
|
"self": "/api/v2/scrapers/%s",
|
|
|
|
"members": "/api/v2/scrapers/%s/members",
|
|
|
|
"owners": "/api/v2/scrapers/%s/owners"
|
2019-01-08 10:54:57 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
`,
|
2019-03-07 03:10:09 +00:00
|
|
|
targetOneIDString, targetOneIDString, targetOneIDString, targetOneIDString,
|
2019-01-08 10:54:57 +00:00
|
|
|
),
|
2019-01-07 18:33:49 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
|
|
|
t.Run(tt.name, func(t *testing.T) {
|
2019-12-04 23:10:23 +00:00
|
|
|
scraperBackend := NewMockScraperBackend(t)
|
2020-02-03 19:07:43 +00:00
|
|
|
scraperBackend.HTTPErrorHandler = kithttp.ErrorHandler(0)
|
2019-01-16 12:44:17 +00:00
|
|
|
scraperBackend.ScraperStorageService = tt.fields.ScraperTargetStoreService
|
|
|
|
scraperBackend.OrganizationService = tt.fields.OrganizationService
|
|
|
|
scraperBackend.BucketService = tt.fields.BucketService
|
2019-12-04 23:10:23 +00:00
|
|
|
h := NewScraperHandler(zaptest.NewLogger(t), scraperBackend)
|
2019-01-07 18:33:49 +00:00
|
|
|
|
|
|
|
r := httptest.NewRequest("GET", "http://any.tld", nil)
|
|
|
|
|
|
|
|
r = r.WithContext(context.WithValue(
|
|
|
|
context.Background(),
|
|
|
|
httprouter.ParamsKey,
|
|
|
|
httprouter.Params{
|
|
|
|
{
|
|
|
|
Key: "id",
|
|
|
|
Value: tt.args.id,
|
|
|
|
},
|
|
|
|
}))
|
|
|
|
|
|
|
|
w := httptest.NewRecorder()
|
|
|
|
|
|
|
|
h.handleGetScraperTarget(w, r)
|
|
|
|
|
|
|
|
res := w.Result()
|
|
|
|
content := res.Header.Get("Content-Type")
|
|
|
|
body, _ := ioutil.ReadAll(res.Body)
|
|
|
|
|
|
|
|
if res.StatusCode != tt.wants.statusCode {
|
|
|
|
t.Errorf("%q. handleGetScraperTarget() = %v, want %v", tt.name, res.StatusCode, tt.wants.statusCode)
|
|
|
|
}
|
|
|
|
if tt.wants.contentType != "" && content != tt.wants.contentType {
|
|
|
|
t.Errorf("%q. handleGetScraperTarget() = %v, want %v", tt.name, content, tt.wants.contentType)
|
|
|
|
}
|
2019-05-08 19:51:03 +00:00
|
|
|
if tt.wants.body != "" {
|
|
|
|
if eq, diff, err := jsonEqual(string(body), tt.wants.body); err != nil {
|
2020-11-11 18:54:21 +00:00
|
|
|
t.Errorf("%q, handleGetScraperTarget(). error unmarshalling json %v", tt.name, err)
|
2019-05-08 19:51:03 +00:00
|
|
|
} else if !eq {
|
|
|
|
t.Errorf("%q. handleGetScraperTarget() = ***%s***", tt.name, diff)
|
|
|
|
}
|
2019-01-07 18:33:49 +00:00
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-01-08 12:22:03 +00:00
|
|
|
func TestService_handleDeleteScraperTarget(t *testing.T) {
|
|
|
|
type fields struct {
|
2019-04-12 16:45:48 +00:00
|
|
|
Service influxdb.ScraperTargetStoreService
|
2019-01-08 12:22:03 +00:00
|
|
|
}
|
2019-01-07 18:33:49 +00:00
|
|
|
|
2019-01-08 12:22:03 +00:00
|
|
|
type args struct {
|
|
|
|
id string
|
|
|
|
}
|
2019-01-07 18:33:49 +00:00
|
|
|
|
2019-01-08 12:22:03 +00:00
|
|
|
type wants struct {
|
|
|
|
statusCode int
|
|
|
|
contentType string
|
|
|
|
body string
|
|
|
|
}
|
|
|
|
|
|
|
|
tests := []struct {
|
|
|
|
name string
|
|
|
|
fields fields
|
|
|
|
args args
|
|
|
|
wants wants
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
name: "delete a scraper target by id",
|
|
|
|
fields: fields{
|
|
|
|
Service: &mock.ScraperTargetStoreService{
|
2021-03-30 18:10:02 +00:00
|
|
|
RemoveTargetF: func(ctx context.Context, id platform.ID) error {
|
2019-01-08 12:22:03 +00:00
|
|
|
if id == targetOneID {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
return fmt.Errorf("wrong id")
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
args: args{
|
|
|
|
id: targetOneIDString,
|
|
|
|
},
|
|
|
|
wants: wants{
|
|
|
|
statusCode: http.StatusNoContent,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "scraper target not found",
|
|
|
|
fields: fields{
|
|
|
|
Service: &mock.ScraperTargetStoreService{
|
2021-03-30 18:10:02 +00:00
|
|
|
RemoveTargetF: func(ctx context.Context, id platform.ID) error {
|
|
|
|
return &errors.Error{
|
|
|
|
Code: errors.ENotFound,
|
2019-04-12 16:45:48 +00:00
|
|
|
Msg: influxdb.ErrScraperTargetNotFound,
|
2019-01-08 12:22:03 +00:00
|
|
|
}
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
args: args{
|
|
|
|
id: targetTwoIDString,
|
|
|
|
},
|
|
|
|
wants: wants{
|
|
|
|
statusCode: http.StatusNotFound,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
|
|
|
t.Run(tt.name, func(t *testing.T) {
|
2019-12-04 23:10:23 +00:00
|
|
|
scraperBackend := NewMockScraperBackend(t)
|
2020-02-03 19:07:43 +00:00
|
|
|
scraperBackend.HTTPErrorHandler = kithttp.ErrorHandler(0)
|
2019-01-16 12:44:17 +00:00
|
|
|
scraperBackend.ScraperStorageService = tt.fields.Service
|
2019-12-04 23:10:23 +00:00
|
|
|
h := NewScraperHandler(zaptest.NewLogger(t), scraperBackend)
|
2019-01-16 12:44:17 +00:00
|
|
|
|
2019-01-08 12:22:03 +00:00
|
|
|
r := httptest.NewRequest("GET", "http://any.tld", nil)
|
2019-01-07 18:33:49 +00:00
|
|
|
|
2019-01-08 12:22:03 +00:00
|
|
|
r = r.WithContext(context.WithValue(
|
|
|
|
context.Background(),
|
|
|
|
httprouter.ParamsKey,
|
|
|
|
httprouter.Params{
|
|
|
|
{
|
|
|
|
Key: "id",
|
|
|
|
Value: tt.args.id,
|
|
|
|
},
|
|
|
|
}))
|
|
|
|
|
|
|
|
w := httptest.NewRecorder()
|
|
|
|
|
|
|
|
h.handleDeleteScraperTarget(w, r)
|
|
|
|
|
|
|
|
res := w.Result()
|
|
|
|
content := res.Header.Get("Content-Type")
|
|
|
|
body, _ := ioutil.ReadAll(res.Body)
|
|
|
|
|
|
|
|
if res.StatusCode != tt.wants.statusCode {
|
|
|
|
t.Errorf("%q. handleDeleteScraperTarget() = %v, want %v", tt.name, res.StatusCode, tt.wants.statusCode)
|
|
|
|
}
|
|
|
|
if tt.wants.contentType != "" && content != tt.wants.contentType {
|
|
|
|
t.Errorf("%q. handleDeleteScraperTarget() = %v, want %v", tt.name, content, tt.wants.contentType)
|
|
|
|
}
|
2019-05-08 19:51:03 +00:00
|
|
|
if tt.wants.body != "" {
|
|
|
|
if eq, diff, err := jsonEqual(string(body), tt.wants.body); err != nil {
|
2020-11-11 18:54:21 +00:00
|
|
|
t.Errorf("%q, handleDeleteScraperTarget(). error unmarshalling json %v", tt.name, err)
|
2019-05-08 19:51:03 +00:00
|
|
|
} else if !eq {
|
|
|
|
t.Errorf("%q. handleDeleteScraperTarget() = ***%s***", tt.name, diff)
|
|
|
|
}
|
2019-01-08 12:22:03 +00:00
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
2019-01-07 18:33:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestService_handlePostScraperTarget(t *testing.T) {
|
2019-01-08 12:22:03 +00:00
|
|
|
type fields struct {
|
2019-04-12 16:45:48 +00:00
|
|
|
OrganizationService influxdb.OrganizationService
|
|
|
|
BucketService influxdb.BucketService
|
|
|
|
ScraperTargetStoreService influxdb.ScraperTargetStoreService
|
2019-01-08 12:22:03 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
type args struct {
|
2019-04-12 16:45:48 +00:00
|
|
|
target *influxdb.ScraperTarget
|
2019-01-08 12:22:03 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
type wants struct {
|
|
|
|
statusCode int
|
|
|
|
contentType string
|
|
|
|
body string
|
|
|
|
}
|
|
|
|
|
|
|
|
tests := []struct {
|
|
|
|
name string
|
|
|
|
fields fields
|
|
|
|
args args
|
|
|
|
wants wants
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
name: "create a new scraper target",
|
|
|
|
fields: fields{
|
2019-01-11 17:51:15 +00:00
|
|
|
OrganizationService: &mock.OrganizationService{
|
2021-03-30 18:10:02 +00:00
|
|
|
FindOrganizationByIDF: func(ctx context.Context, id platform.ID) (*influxdb.Organization, error) {
|
2019-04-12 16:45:48 +00:00
|
|
|
return &influxdb.Organization{
|
2019-01-11 17:51:15 +00:00
|
|
|
ID: platformtesting.MustIDBase16("0000000000000211"),
|
|
|
|
Name: "org1",
|
|
|
|
}, nil
|
|
|
|
},
|
|
|
|
},
|
|
|
|
BucketService: &mock.BucketService{
|
2021-03-30 18:10:02 +00:00
|
|
|
FindBucketByIDFn: func(ctx context.Context, id platform.ID) (*influxdb.Bucket, error) {
|
2019-04-12 16:45:48 +00:00
|
|
|
return &influxdb.Bucket{
|
2019-01-11 17:51:15 +00:00
|
|
|
ID: platformtesting.MustIDBase16("0000000000000212"),
|
|
|
|
Name: "bucket1",
|
|
|
|
}, nil
|
|
|
|
},
|
|
|
|
},
|
|
|
|
ScraperTargetStoreService: &mock.ScraperTargetStoreService{
|
2021-03-30 18:10:02 +00:00
|
|
|
AddTargetF: func(ctx context.Context, st *influxdb.ScraperTarget, userID platform.ID) error {
|
2019-01-08 12:22:03 +00:00
|
|
|
st.ID = targetOneID
|
|
|
|
return nil
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
args: args{
|
2019-04-12 16:45:48 +00:00
|
|
|
target: &influxdb.ScraperTarget{
|
2019-01-10 17:39:37 +00:00
|
|
|
Name: "hello",
|
2019-04-12 16:45:48 +00:00
|
|
|
Type: influxdb.PrometheusScraperType,
|
2019-01-10 17:39:37 +00:00
|
|
|
BucketID: platformtesting.MustIDBase16("0000000000000212"),
|
|
|
|
OrgID: platformtesting.MustIDBase16("0000000000000211"),
|
|
|
|
URL: "www.some.url",
|
2019-01-08 12:22:03 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
wants: wants{
|
|
|
|
statusCode: http.StatusCreated,
|
|
|
|
contentType: "application/json; charset=utf-8",
|
|
|
|
body: fmt.Sprintf(
|
|
|
|
`
|
|
|
|
{
|
2019-03-07 03:10:09 +00:00
|
|
|
"id": "%s",
|
2019-01-08 12:22:03 +00:00
|
|
|
"name": "hello",
|
|
|
|
"type": "prometheus",
|
|
|
|
"url": "www.some.url",
|
2019-01-11 17:51:15 +00:00
|
|
|
"orgID": "0000000000000211",
|
2019-04-10 19:21:49 +00:00
|
|
|
"org": "org1",
|
2019-01-11 17:51:15 +00:00
|
|
|
"bucket": "bucket1",
|
2019-01-10 17:39:37 +00:00
|
|
|
"bucketID": "0000000000000212",
|
2019-01-08 12:22:03 +00:00
|
|
|
"links": {
|
2019-01-23 01:21:23 +00:00
|
|
|
"bucket": "/api/v2/buckets/0000000000000212",
|
|
|
|
"organization": "/api/v2/orgs/0000000000000211",
|
2019-03-07 03:10:09 +00:00
|
|
|
"self": "/api/v2/scrapers/%s",
|
|
|
|
"members": "/api/v2/scrapers/%s/members",
|
|
|
|
"owners": "/api/v2/scrapers/%s/owners"
|
2019-01-08 12:22:03 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
`,
|
2019-03-07 03:10:09 +00:00
|
|
|
targetOneIDString, targetOneIDString, targetOneIDString, targetOneIDString,
|
2019-01-08 12:22:03 +00:00
|
|
|
),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
|
|
|
t.Run(tt.name, func(t *testing.T) {
|
2019-12-04 23:10:23 +00:00
|
|
|
scraperBackend := NewMockScraperBackend(t)
|
2020-02-03 19:07:43 +00:00
|
|
|
scraperBackend.HTTPErrorHandler = kithttp.ErrorHandler(0)
|
2019-01-16 12:44:17 +00:00
|
|
|
scraperBackend.ScraperStorageService = tt.fields.ScraperTargetStoreService
|
|
|
|
scraperBackend.OrganizationService = tt.fields.OrganizationService
|
|
|
|
scraperBackend.BucketService = tt.fields.BucketService
|
2019-12-04 23:10:23 +00:00
|
|
|
h := NewScraperHandler(zaptest.NewLogger(t), scraperBackend)
|
2019-01-08 12:22:03 +00:00
|
|
|
|
|
|
|
st, err := json.Marshal(tt.args.target)
|
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("failed to unmarshal scraper target: %v", err)
|
|
|
|
}
|
|
|
|
|
|
|
|
r := httptest.NewRequest("GET", "http://any.tld", bytes.NewReader(st))
|
2019-04-12 16:45:48 +00:00
|
|
|
r = r.WithContext(platcontext.SetAuthorizer(r.Context(), &influxdb.Authorization{}))
|
2019-01-08 12:22:03 +00:00
|
|
|
w := httptest.NewRecorder()
|
|
|
|
|
|
|
|
h.handlePostScraperTarget(w, r)
|
|
|
|
|
|
|
|
res := w.Result()
|
|
|
|
content := res.Header.Get("Content-Type")
|
|
|
|
body, _ := ioutil.ReadAll(res.Body)
|
|
|
|
|
|
|
|
if res.StatusCode != tt.wants.statusCode {
|
|
|
|
t.Errorf("%q. handlePostScraperTarget() = %v, want %v", tt.name, res.StatusCode, tt.wants.statusCode)
|
|
|
|
}
|
|
|
|
if tt.wants.contentType != "" && content != tt.wants.contentType {
|
|
|
|
t.Errorf("%q. handlePostScraperTarget() = %v, want %v", tt.name, content, tt.wants.contentType)
|
|
|
|
}
|
2019-05-08 19:51:03 +00:00
|
|
|
if tt.wants.body != "" {
|
|
|
|
if eq, diff, err := jsonEqual(string(body), tt.wants.body); err != nil {
|
2020-11-11 18:54:21 +00:00
|
|
|
t.Errorf("%q, handlePostScraperTarget(). error unmarshalling json %v", tt.name, err)
|
2019-05-08 19:51:03 +00:00
|
|
|
} else if !eq {
|
|
|
|
t.Errorf("%q. handlePostScraperTarget() = ***%s***", tt.name, diff)
|
|
|
|
}
|
2019-01-08 12:22:03 +00:00
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestService_handlePatchScraperTarget(t *testing.T) {
|
2019-01-08 14:07:52 +00:00
|
|
|
type fields struct {
|
2019-04-12 16:45:48 +00:00
|
|
|
BucketService influxdb.BucketService
|
|
|
|
OrganizationService influxdb.OrganizationService
|
|
|
|
ScraperTargetStoreService influxdb.ScraperTargetStoreService
|
2019-01-08 14:07:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
type args struct {
|
|
|
|
id string
|
2019-04-12 16:45:48 +00:00
|
|
|
update *influxdb.ScraperTarget
|
2019-01-08 14:07:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
type wants struct {
|
|
|
|
statusCode int
|
|
|
|
contentType string
|
|
|
|
body string
|
|
|
|
}
|
|
|
|
|
|
|
|
tests := []struct {
|
|
|
|
name string
|
|
|
|
fields fields
|
|
|
|
args args
|
|
|
|
wants wants
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
name: "update a scraper target",
|
|
|
|
fields: fields{
|
2019-01-11 17:51:15 +00:00
|
|
|
OrganizationService: &mock.OrganizationService{
|
2021-03-30 18:10:02 +00:00
|
|
|
FindOrganizationByIDF: func(ctx context.Context, id platform.ID) (*influxdb.Organization, error) {
|
2019-04-12 16:45:48 +00:00
|
|
|
return &influxdb.Organization{
|
2019-01-11 17:51:15 +00:00
|
|
|
ID: platformtesting.MustIDBase16("0000000000000211"),
|
|
|
|
Name: "org1",
|
|
|
|
}, nil
|
|
|
|
},
|
|
|
|
},
|
|
|
|
BucketService: &mock.BucketService{
|
2021-03-30 18:10:02 +00:00
|
|
|
FindBucketByIDFn: func(ctx context.Context, id platform.ID) (*influxdb.Bucket, error) {
|
2019-04-12 16:45:48 +00:00
|
|
|
return &influxdb.Bucket{
|
2019-01-11 17:51:15 +00:00
|
|
|
ID: platformtesting.MustIDBase16("0000000000000212"),
|
|
|
|
Name: "bucket1",
|
|
|
|
}, nil
|
|
|
|
},
|
|
|
|
},
|
|
|
|
ScraperTargetStoreService: &mock.ScraperTargetStoreService{
|
2021-03-30 18:10:02 +00:00
|
|
|
UpdateTargetF: func(ctx context.Context, t *influxdb.ScraperTarget, userID platform.ID) (*influxdb.ScraperTarget, error) {
|
2019-01-08 14:07:52 +00:00
|
|
|
if t.ID == targetOneID {
|
|
|
|
return t, nil
|
|
|
|
}
|
|
|
|
|
2021-03-30 18:10:02 +00:00
|
|
|
return nil, &errors.Error{
|
|
|
|
Code: errors.ENotFound,
|
2019-01-11 17:51:15 +00:00
|
|
|
Msg: "scraper target is not found",
|
|
|
|
}
|
2019-01-08 14:07:52 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
args: args{
|
|
|
|
id: targetOneIDString,
|
2019-04-12 16:45:48 +00:00
|
|
|
update: &influxdb.ScraperTarget{
|
2019-01-10 17:39:37 +00:00
|
|
|
ID: targetOneID,
|
|
|
|
Name: "name",
|
|
|
|
BucketID: platformtesting.MustIDBase16("0000000000000212"),
|
2019-04-12 16:45:48 +00:00
|
|
|
Type: influxdb.PrometheusScraperType,
|
2019-01-10 17:39:37 +00:00
|
|
|
URL: "www.example.url",
|
|
|
|
OrgID: platformtesting.MustIDBase16("0000000000000211"),
|
2019-01-08 14:07:52 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
wants: wants{
|
|
|
|
statusCode: http.StatusOK,
|
|
|
|
contentType: "application/json; charset=utf-8",
|
|
|
|
body: fmt.Sprintf(
|
2019-01-11 17:51:15 +00:00
|
|
|
`{
|
2019-03-07 03:10:09 +00:00
|
|
|
"id":"%s",
|
2019-01-08 14:07:52 +00:00
|
|
|
"name":"name",
|
|
|
|
"type":"prometheus",
|
2019-01-11 17:51:15 +00:00
|
|
|
"url":"www.example.url",
|
2019-04-10 19:21:49 +00:00
|
|
|
"org": "org1",
|
2019-01-11 17:51:15 +00:00
|
|
|
"orgID":"0000000000000211",
|
|
|
|
"bucket": "bucket1",
|
|
|
|
"bucketID":"0000000000000212",
|
2019-01-08 14:07:52 +00:00
|
|
|
"links":{
|
2019-01-23 01:21:23 +00:00
|
|
|
"bucket": "/api/v2/buckets/0000000000000212",
|
|
|
|
"organization": "/api/v2/orgs/0000000000000211",
|
2019-03-07 03:10:09 +00:00
|
|
|
"self":"/api/v2/scrapers/%s",
|
|
|
|
"members":"/api/v2/scrapers/%s/members",
|
|
|
|
"owners":"/api/v2/scrapers/%s/owners"
|
2019-01-08 14:07:52 +00:00
|
|
|
}
|
2019-01-11 17:51:15 +00:00
|
|
|
}`,
|
2019-03-07 03:10:09 +00:00
|
|
|
targetOneIDString, targetOneIDString, targetOneIDString, targetOneIDString,
|
2019-01-08 14:07:52 +00:00
|
|
|
),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "scraper target not found",
|
|
|
|
fields: fields{
|
2019-01-11 17:51:15 +00:00
|
|
|
OrganizationService: &mock.OrganizationService{
|
2021-03-30 18:10:02 +00:00
|
|
|
FindOrganizationByIDF: func(ctx context.Context, id platform.ID) (*influxdb.Organization, error) {
|
2019-04-12 16:45:48 +00:00
|
|
|
return &influxdb.Organization{
|
2019-01-11 17:51:15 +00:00
|
|
|
ID: platformtesting.MustIDBase16("0000000000000211"),
|
|
|
|
Name: "org1",
|
|
|
|
}, nil
|
|
|
|
},
|
|
|
|
},
|
|
|
|
BucketService: &mock.BucketService{
|
2021-03-30 18:10:02 +00:00
|
|
|
FindBucketByIDFn: func(ctx context.Context, id platform.ID) (*influxdb.Bucket, error) {
|
2019-04-12 16:45:48 +00:00
|
|
|
return &influxdb.Bucket{
|
2019-01-11 17:51:15 +00:00
|
|
|
ID: platformtesting.MustIDBase16("0000000000000212"),
|
|
|
|
Name: "bucket1",
|
|
|
|
}, nil
|
|
|
|
},
|
|
|
|
},
|
|
|
|
ScraperTargetStoreService: &mock.ScraperTargetStoreService{
|
2021-03-30 18:10:02 +00:00
|
|
|
UpdateTargetF: func(ctx context.Context, upd *influxdb.ScraperTarget, userID platform.ID) (*influxdb.ScraperTarget, error) {
|
|
|
|
return nil, &errors.Error{
|
|
|
|
Code: errors.ENotFound,
|
2019-04-12 16:45:48 +00:00
|
|
|
Msg: influxdb.ErrScraperTargetNotFound,
|
2019-01-08 14:07:52 +00:00
|
|
|
}
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
args: args{
|
|
|
|
id: targetOneIDString,
|
2019-04-12 16:45:48 +00:00
|
|
|
update: &influxdb.ScraperTarget{
|
2019-01-10 17:39:37 +00:00
|
|
|
ID: targetOneID,
|
|
|
|
Name: "name",
|
|
|
|
BucketID: platformtesting.MustIDBase16("0000000000000212"),
|
2019-04-12 16:45:48 +00:00
|
|
|
Type: influxdb.PrometheusScraperType,
|
2019-01-10 17:39:37 +00:00
|
|
|
URL: "www.example.url",
|
|
|
|
OrgID: platformtesting.MustIDBase16("0000000000000211"),
|
2019-01-08 14:07:52 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
wants: wants{
|
|
|
|
statusCode: http.StatusNotFound,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
2019-01-07 18:33:49 +00:00
|
|
|
|
2019-01-08 14:07:52 +00:00
|
|
|
for _, tt := range tests {
|
|
|
|
t.Run(tt.name, func(t *testing.T) {
|
2019-12-04 23:10:23 +00:00
|
|
|
scraperBackend := NewMockScraperBackend(t)
|
2020-02-03 19:07:43 +00:00
|
|
|
scraperBackend.HTTPErrorHandler = kithttp.ErrorHandler(0)
|
2019-01-16 12:44:17 +00:00
|
|
|
scraperBackend.ScraperStorageService = tt.fields.ScraperTargetStoreService
|
|
|
|
scraperBackend.OrganizationService = tt.fields.OrganizationService
|
|
|
|
scraperBackend.BucketService = tt.fields.BucketService
|
2019-12-04 23:10:23 +00:00
|
|
|
h := NewScraperHandler(zaptest.NewLogger(t), scraperBackend)
|
2019-01-08 14:07:52 +00:00
|
|
|
|
|
|
|
var err error
|
|
|
|
st := make([]byte, 0)
|
|
|
|
if tt.args.update != nil {
|
|
|
|
st, err = json.Marshal(*tt.args.update)
|
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("failed to unmarshal scraper target: %v", err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
r := httptest.NewRequest("GET", "http://any.tld", bytes.NewReader(st))
|
|
|
|
|
|
|
|
r = r.WithContext(context.WithValue(
|
|
|
|
context.Background(),
|
|
|
|
httprouter.ParamsKey,
|
|
|
|
httprouter.Params{
|
|
|
|
{
|
|
|
|
Key: "id",
|
|
|
|
Value: tt.args.id,
|
|
|
|
},
|
|
|
|
}))
|
2019-04-12 16:45:48 +00:00
|
|
|
r = r.WithContext(platcontext.SetAuthorizer(r.Context(), &influxdb.Authorization{}))
|
2019-01-08 14:07:52 +00:00
|
|
|
w := httptest.NewRecorder()
|
|
|
|
|
|
|
|
h.handlePatchScraperTarget(w, r)
|
|
|
|
|
|
|
|
res := w.Result()
|
|
|
|
content := res.Header.Get("Content-Type")
|
|
|
|
body, _ := ioutil.ReadAll(res.Body)
|
|
|
|
|
|
|
|
if res.StatusCode != tt.wants.statusCode {
|
|
|
|
t.Errorf("%q. handlePatchScraperTarget() = %v, want %v", tt.name, res.StatusCode, tt.wants.statusCode)
|
|
|
|
}
|
|
|
|
if tt.wants.contentType != "" && content != tt.wants.contentType {
|
|
|
|
t.Errorf("%q. handlePatchScraperTarget() = %v, want %v", tt.name, content, tt.wants.contentType)
|
|
|
|
}
|
2019-05-08 19:51:03 +00:00
|
|
|
if tt.wants.body != "" {
|
|
|
|
if eq, diff, err := jsonEqual(string(body), tt.wants.body); err != nil {
|
2020-11-11 18:54:21 +00:00
|
|
|
t.Errorf("%q, handlePatchScraperTarget(). error unmarshalling json %v", tt.name, err)
|
2019-05-08 19:51:03 +00:00
|
|
|
} else if !eq {
|
|
|
|
t.Errorf("%q. handlePatchScraperTarget() = ***%s***", tt.name, diff)
|
|
|
|
}
|
2019-01-08 14:07:52 +00:00
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
2019-01-07 18:33:49 +00:00
|
|
|
}
|
|
|
|
|
2019-04-12 16:45:48 +00:00
|
|
|
func initScraperService(f platformtesting.TargetFields, t *testing.T) (influxdb.ScraperTargetStoreService, string, func()) {
|
2018-10-12 01:06:43 +00:00
|
|
|
t.Helper()
|
refactor(kv): delete deprecated kv service code
This includes removal of a lot of kv.Service responsibilities. However,
it does not finish the re-wiring. It removes documents, telegrafs,
notification rules + endpoints, checks, orgs, users, buckets, passwords,
urms, labels and authorizations. There are some oustanding pieces that
are needed to get kv service compiling (dashboard service urm
dependency). Then all the call sites for kv service need updating and
the new implementations of telegraf and notification rules + endpoints
needed installing (along with any necessary migrations).
2020-10-20 13:25:36 +00:00
|
|
|
|
|
|
|
store := NewTestInmemStore(t)
|
|
|
|
tenantStore := tenant.NewStore(store)
|
|
|
|
tenantService := tenant.NewService(tenantStore)
|
|
|
|
|
|
|
|
svc := kv.NewService(zaptest.NewLogger(t), store, tenantService)
|
2018-10-12 01:06:43 +00:00
|
|
|
svc.IDGenerator = f.IDGenerator
|
|
|
|
|
2019-04-09 18:24:40 +00:00
|
|
|
ctx := context.Background()
|
2018-10-12 01:06:43 +00:00
|
|
|
for _, target := range f.Targets {
|
|
|
|
if err := svc.PutTarget(ctx, target); err != nil {
|
|
|
|
t.Fatalf("failed to populate scraper targets")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-04-12 16:45:48 +00:00
|
|
|
for _, o := range f.Organizations {
|
refactor(kv): delete deprecated kv service code
This includes removal of a lot of kv.Service responsibilities. However,
it does not finish the re-wiring. It removes documents, telegrafs,
notification rules + endpoints, checks, orgs, users, buckets, passwords,
urms, labels and authorizations. There are some oustanding pieces that
are needed to get kv service compiling (dashboard service urm
dependency). Then all the call sites for kv service need updating and
the new implementations of telegraf and notification rules + endpoints
needed installing (along with any necessary migrations).
2020-10-20 13:25:36 +00:00
|
|
|
mock.SetIDForFunc(&tenantStore.OrgIDGen, o.ID, func() {
|
|
|
|
if err := tenantService.CreateOrganization(ctx, o); err != nil {
|
|
|
|
t.Fatalf("failed to populate orgs")
|
|
|
|
}
|
|
|
|
})
|
2019-04-12 16:45:48 +00:00
|
|
|
}
|
|
|
|
|
2019-12-04 23:10:23 +00:00
|
|
|
scraperBackend := NewMockScraperBackend(t)
|
2020-02-03 19:07:43 +00:00
|
|
|
scraperBackend.HTTPErrorHandler = kithttp.ErrorHandler(0)
|
2019-01-16 12:44:17 +00:00
|
|
|
scraperBackend.ScraperStorageService = svc
|
refactor(kv): delete deprecated kv service code
This includes removal of a lot of kv.Service responsibilities. However,
it does not finish the re-wiring. It removes documents, telegrafs,
notification rules + endpoints, checks, orgs, users, buckets, passwords,
urms, labels and authorizations. There are some oustanding pieces that
are needed to get kv service compiling (dashboard service urm
dependency). Then all the call sites for kv service need updating and
the new implementations of telegraf and notification rules + endpoints
needed installing (along with any necessary migrations).
2020-10-20 13:25:36 +00:00
|
|
|
scraperBackend.OrganizationService = tenantService
|
2019-01-16 12:44:17 +00:00
|
|
|
scraperBackend.BucketService = &mock.BucketService{
|
2021-03-30 18:10:02 +00:00
|
|
|
FindBucketByIDFn: func(ctx context.Context, id platform.ID) (*influxdb.Bucket, error) {
|
2019-04-12 16:45:48 +00:00
|
|
|
return &influxdb.Bucket{
|
2019-01-16 12:44:17 +00:00
|
|
|
ID: id,
|
|
|
|
Name: "bucket1",
|
|
|
|
}, nil
|
2019-01-11 17:51:15 +00:00
|
|
|
},
|
2019-01-16 12:44:17 +00:00
|
|
|
}
|
|
|
|
|
2019-12-04 23:10:23 +00:00
|
|
|
handler := NewScraperHandler(zaptest.NewLogger(t), scraperBackend)
|
2019-01-23 17:11:06 +00:00
|
|
|
server := httptest.NewServer(httpMock.NewAuthMiddlewareHandler(
|
2019-01-16 12:44:17 +00:00
|
|
|
handler,
|
2019-04-12 16:45:48 +00:00
|
|
|
&influxdb.Authorization{
|
2019-01-16 12:44:17 +00:00
|
|
|
UserID: platformtesting.MustIDBase16("020f755c3c082002"),
|
2019-01-18 20:46:37 +00:00
|
|
|
Token: "tok",
|
|
|
|
},
|
|
|
|
))
|
|
|
|
client := struct {
|
2019-04-12 16:45:48 +00:00
|
|
|
influxdb.UserResourceMappingService
|
|
|
|
influxdb.OrganizationService
|
2019-01-18 20:46:37 +00:00
|
|
|
ScraperService
|
|
|
|
}{
|
refactor(kv): delete deprecated kv service code
This includes removal of a lot of kv.Service responsibilities. However,
it does not finish the re-wiring. It removes documents, telegrafs,
notification rules + endpoints, checks, orgs, users, buckets, passwords,
urms, labels and authorizations. There are some oustanding pieces that
are needed to get kv service compiling (dashboard service urm
dependency). Then all the call sites for kv service need updating and
the new implementations of telegraf and notification rules + endpoints
needed installing (along with any necessary migrations).
2020-10-20 13:25:36 +00:00
|
|
|
UserResourceMappingService: tenantService,
|
|
|
|
OrganizationService: tenantService,
|
2019-01-18 20:46:37 +00:00
|
|
|
ScraperService: ScraperService{
|
2019-12-28 00:58:57 +00:00
|
|
|
Token: "tok",
|
|
|
|
Addr: server.URL,
|
2019-01-18 20:46:37 +00:00
|
|
|
},
|
2018-10-12 01:06:43 +00:00
|
|
|
}
|
|
|
|
done := server.Close
|
|
|
|
|
2019-12-28 00:58:57 +00:00
|
|
|
return &client, "", done
|
2018-10-12 01:06:43 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestScraperService(t *testing.T) {
|
|
|
|
platformtesting.ScraperService(initScraperService, t)
|
|
|
|
}
|