2018-09-07 15:45:28 +00:00
|
|
|
package testing
|
|
|
|
|
|
|
|
import (
|
|
|
|
"bytes"
|
|
|
|
"context"
|
|
|
|
"sort"
|
|
|
|
"testing"
|
|
|
|
|
|
|
|
"github.com/google/go-cmp/cmp"
|
2020-04-03 17:39:20 +00:00
|
|
|
"github.com/influxdata/influxdb/v2"
|
|
|
|
"github.com/influxdata/influxdb/v2/mock"
|
2018-09-07 15:45:28 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
const (
|
|
|
|
targetOneID = "020f755c3c082000"
|
|
|
|
targetTwoID = "020f755c3c082001"
|
|
|
|
targetThreeID = "020f755c3c082002"
|
|
|
|
)
|
|
|
|
|
2019-04-12 16:45:48 +00:00
|
|
|
var (
|
|
|
|
target1 = influxdb.ScraperTarget{
|
|
|
|
Name: "name1",
|
|
|
|
Type: influxdb.PrometheusScraperType,
|
2020-08-11 14:56:42 +00:00
|
|
|
OrgID: idOne,
|
|
|
|
BucketID: idOne,
|
2019-04-12 16:45:48 +00:00
|
|
|
URL: "url1",
|
|
|
|
ID: MustIDBase16(targetOneID),
|
|
|
|
}
|
|
|
|
target2 = influxdb.ScraperTarget{
|
|
|
|
Name: "name2",
|
|
|
|
Type: influxdb.PrometheusScraperType,
|
2020-08-11 14:56:42 +00:00
|
|
|
OrgID: idTwo,
|
|
|
|
BucketID: idTwo,
|
2019-04-12 16:45:48 +00:00
|
|
|
URL: "url2",
|
|
|
|
ID: MustIDBase16(targetTwoID),
|
|
|
|
}
|
|
|
|
target3 = influxdb.ScraperTarget{
|
|
|
|
Name: "name3",
|
|
|
|
Type: influxdb.PrometheusScraperType,
|
2020-08-11 14:56:42 +00:00
|
|
|
OrgID: idOne,
|
|
|
|
BucketID: idThree,
|
2019-04-12 16:45:48 +00:00
|
|
|
URL: "url3",
|
|
|
|
ID: MustIDBase16(targetThreeID),
|
|
|
|
}
|
|
|
|
org1 = influxdb.Organization{
|
2020-08-11 14:56:42 +00:00
|
|
|
ID: idOne,
|
2019-04-12 16:45:48 +00:00
|
|
|
Name: "org1",
|
|
|
|
}
|
|
|
|
org2 = influxdb.Organization{
|
2020-08-11 14:56:42 +00:00
|
|
|
ID: idTwo,
|
2019-04-12 16:45:48 +00:00
|
|
|
Name: "org2",
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
2018-09-07 15:45:28 +00:00
|
|
|
// TargetFields will include the IDGenerator, and targets
|
|
|
|
type TargetFields struct {
|
2020-09-02 17:50:26 +00:00
|
|
|
IDGenerator influxdb.IDGenerator
|
|
|
|
Targets []*influxdb.ScraperTarget
|
|
|
|
Organizations []*influxdb.Organization
|
2018-09-07 15:45:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
var targetCmpOptions = cmp.Options{
|
|
|
|
cmp.Comparer(func(x, y []byte) bool {
|
|
|
|
return bytes.Equal(x, y)
|
|
|
|
}),
|
2019-04-12 16:45:48 +00:00
|
|
|
cmp.Transformer("Sort", func(in []influxdb.ScraperTarget) []influxdb.ScraperTarget {
|
|
|
|
out := append([]influxdb.ScraperTarget(nil), in...) // Copy input to avoid mutating it
|
2018-09-07 15:45:28 +00:00
|
|
|
sort.Slice(out, func(i, j int) bool {
|
|
|
|
return out[i].ID.String() > out[j].ID.String()
|
|
|
|
})
|
|
|
|
return out
|
|
|
|
}),
|
|
|
|
}
|
|
|
|
|
2018-10-12 01:06:43 +00:00
|
|
|
// ScraperService tests all the service functions.
|
|
|
|
func ScraperService(
|
2019-04-12 16:45:48 +00:00
|
|
|
init func(TargetFields, *testing.T) (influxdb.ScraperTargetStoreService, string, func()), t *testing.T,
|
2018-10-12 01:06:43 +00:00
|
|
|
) {
|
2019-02-19 23:47:19 +00:00
|
|
|
t.Helper()
|
2018-10-12 01:06:43 +00:00
|
|
|
tests := []struct {
|
|
|
|
name string
|
2019-04-12 16:45:48 +00:00
|
|
|
fn func(init func(TargetFields, *testing.T) (influxdb.ScraperTargetStoreService, string, func()),
|
2018-10-12 01:06:43 +00:00
|
|
|
t *testing.T)
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
name: "AddTarget",
|
|
|
|
fn: AddTarget,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "ListTargets",
|
|
|
|
fn: ListTargets,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "GetTargetByID",
|
|
|
|
fn: GetTargetByID,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "RemoveTarget",
|
|
|
|
fn: RemoveTarget,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "UpdateTarget",
|
|
|
|
fn: UpdateTarget,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
for _, tt := range tests {
|
|
|
|
t.Run(tt.name, func(t *testing.T) {
|
2020-07-14 15:18:21 +00:00
|
|
|
tt := tt
|
2020-07-01 11:08:20 +00:00
|
|
|
t.Parallel()
|
2018-10-12 01:06:43 +00:00
|
|
|
tt.fn(init, t)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-09-07 15:45:28 +00:00
|
|
|
// AddTarget testing.
|
|
|
|
func AddTarget(
|
2019-04-12 16:45:48 +00:00
|
|
|
init func(TargetFields, *testing.T) (influxdb.ScraperTargetStoreService, string, func()),
|
2018-09-07 15:45:28 +00:00
|
|
|
t *testing.T,
|
|
|
|
) {
|
2019-02-19 23:47:19 +00:00
|
|
|
t.Helper()
|
2018-09-07 15:45:28 +00:00
|
|
|
type args struct {
|
2019-04-12 16:45:48 +00:00
|
|
|
userID influxdb.ID
|
|
|
|
target *influxdb.ScraperTarget
|
2018-09-07 15:45:28 +00:00
|
|
|
}
|
|
|
|
type wants struct {
|
2020-09-02 17:50:26 +00:00
|
|
|
err error
|
|
|
|
targets []influxdb.ScraperTarget
|
2018-09-07 15:45:28 +00:00
|
|
|
}
|
|
|
|
tests := []struct {
|
|
|
|
name string
|
|
|
|
fields TargetFields
|
|
|
|
args args
|
|
|
|
wants wants
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
name: "create targets with empty set",
|
|
|
|
fields: TargetFields{
|
2020-09-02 17:50:26 +00:00
|
|
|
IDGenerator: mock.NewIDGenerator(targetOneID, t),
|
|
|
|
Targets: []*influxdb.ScraperTarget{},
|
|
|
|
Organizations: []*influxdb.Organization{&org1},
|
2018-09-07 15:45:28 +00:00
|
|
|
},
|
|
|
|
args: args{
|
2019-01-18 20:46:37 +00:00
|
|
|
userID: MustIDBase16(threeID),
|
2019-04-12 16:45:48 +00:00
|
|
|
target: &influxdb.ScraperTarget{
|
2019-01-10 17:39:37 +00:00
|
|
|
Name: "name1",
|
2019-04-12 16:45:48 +00:00
|
|
|
Type: influxdb.PrometheusScraperType,
|
2020-08-11 14:56:42 +00:00
|
|
|
OrgID: idOne,
|
|
|
|
BucketID: idOne,
|
2019-01-10 17:39:37 +00:00
|
|
|
URL: "url1",
|
2018-09-07 15:45:28 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
wants: wants{
|
2019-04-12 16:45:48 +00:00
|
|
|
targets: []influxdb.ScraperTarget{
|
2018-09-07 15:45:28 +00:00
|
|
|
{
|
2019-01-10 17:39:37 +00:00
|
|
|
Name: "name1",
|
2019-04-12 16:45:48 +00:00
|
|
|
Type: influxdb.PrometheusScraperType,
|
2020-08-11 14:56:42 +00:00
|
|
|
OrgID: idOne,
|
|
|
|
BucketID: idOne,
|
2019-01-10 17:39:37 +00:00
|
|
|
URL: "url1",
|
|
|
|
ID: MustIDBase16(targetOneID),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "create target with invalid org id",
|
|
|
|
fields: TargetFields{
|
2020-09-02 17:50:26 +00:00
|
|
|
IDGenerator: mock.NewIDGenerator(targetTwoID, t),
|
|
|
|
Organizations: []*influxdb.Organization{&org1},
|
2019-04-12 16:45:48 +00:00
|
|
|
Targets: []*influxdb.ScraperTarget{
|
2019-01-10 17:39:37 +00:00
|
|
|
{
|
|
|
|
Name: "name1",
|
2019-04-12 16:45:48 +00:00
|
|
|
Type: influxdb.PrometheusScraperType,
|
2020-08-11 14:56:42 +00:00
|
|
|
OrgID: idOne,
|
|
|
|
BucketID: idOne,
|
2019-01-10 17:39:37 +00:00
|
|
|
URL: "url1",
|
|
|
|
ID: MustIDBase16(targetOneID),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
args: args{
|
2019-04-12 16:45:48 +00:00
|
|
|
target: &influxdb.ScraperTarget{
|
2019-01-10 17:39:37 +00:00
|
|
|
ID: MustIDBase16(targetTwoID),
|
|
|
|
Name: "name2",
|
2019-04-12 16:45:48 +00:00
|
|
|
Type: influxdb.PrometheusScraperType,
|
2020-08-11 14:56:42 +00:00
|
|
|
BucketID: idTwo,
|
2019-01-10 17:39:37 +00:00
|
|
|
URL: "url2",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
wants: wants{
|
2019-04-12 16:45:48 +00:00
|
|
|
err: &influxdb.Error{
|
|
|
|
Code: influxdb.EInvalid,
|
2019-02-19 23:47:19 +00:00
|
|
|
Msg: "provided organization ID has invalid format",
|
2019-04-12 16:45:48 +00:00
|
|
|
Op: influxdb.OpAddTarget,
|
2019-01-10 17:39:37 +00:00
|
|
|
},
|
2019-04-12 16:45:48 +00:00
|
|
|
targets: []influxdb.ScraperTarget{
|
2019-01-10 17:39:37 +00:00
|
|
|
{
|
|
|
|
Name: "name1",
|
2019-04-12 16:45:48 +00:00
|
|
|
Type: influxdb.PrometheusScraperType,
|
2020-08-11 14:56:42 +00:00
|
|
|
OrgID: idOne,
|
|
|
|
BucketID: idOne,
|
2019-01-10 17:39:37 +00:00
|
|
|
URL: "url1",
|
|
|
|
ID: MustIDBase16(targetOneID),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "create target with invalid bucket id",
|
|
|
|
fields: TargetFields{
|
2020-09-02 17:50:26 +00:00
|
|
|
IDGenerator: mock.NewIDGenerator(targetTwoID, t),
|
|
|
|
Organizations: []*influxdb.Organization{&org1},
|
2019-04-12 16:45:48 +00:00
|
|
|
Targets: []*influxdb.ScraperTarget{
|
2019-01-10 17:39:37 +00:00
|
|
|
{
|
|
|
|
Name: "name1",
|
2019-04-12 16:45:48 +00:00
|
|
|
Type: influxdb.PrometheusScraperType,
|
2020-08-11 14:56:42 +00:00
|
|
|
OrgID: idOne,
|
|
|
|
BucketID: idOne,
|
2019-01-10 17:39:37 +00:00
|
|
|
URL: "url1",
|
|
|
|
ID: MustIDBase16(targetOneID),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
args: args{
|
2019-04-12 16:45:48 +00:00
|
|
|
target: &influxdb.ScraperTarget{
|
2019-01-10 17:39:37 +00:00
|
|
|
ID: MustIDBase16(targetTwoID),
|
|
|
|
Name: "name2",
|
2019-04-12 16:45:48 +00:00
|
|
|
Type: influxdb.PrometheusScraperType,
|
2020-08-11 14:56:42 +00:00
|
|
|
OrgID: idTwo,
|
2019-01-10 17:39:37 +00:00
|
|
|
URL: "url2",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
wants: wants{
|
2019-04-12 16:45:48 +00:00
|
|
|
err: &influxdb.Error{
|
|
|
|
Code: influxdb.EInvalid,
|
2019-02-19 23:47:19 +00:00
|
|
|
Msg: "provided bucket ID has invalid format",
|
2019-04-12 16:45:48 +00:00
|
|
|
Op: influxdb.OpAddTarget,
|
2019-01-10 17:39:37 +00:00
|
|
|
},
|
2019-04-12 16:45:48 +00:00
|
|
|
targets: []influxdb.ScraperTarget{
|
2019-01-10 17:39:37 +00:00
|
|
|
{
|
|
|
|
Name: "name1",
|
2019-04-12 16:45:48 +00:00
|
|
|
Type: influxdb.PrometheusScraperType,
|
2020-08-11 14:56:42 +00:00
|
|
|
OrgID: idOne,
|
|
|
|
BucketID: idOne,
|
2019-01-10 17:39:37 +00:00
|
|
|
URL: "url1",
|
|
|
|
ID: MustIDBase16(targetOneID),
|
2018-09-07 15:45:28 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "basic create target",
|
|
|
|
fields: TargetFields{
|
|
|
|
IDGenerator: mock.NewIDGenerator(targetTwoID, t),
|
2019-04-12 16:45:48 +00:00
|
|
|
Targets: []*influxdb.ScraperTarget{
|
2018-09-07 15:45:28 +00:00
|
|
|
{
|
2019-01-10 17:39:37 +00:00
|
|
|
Name: "name1",
|
2019-04-12 16:45:48 +00:00
|
|
|
Type: influxdb.PrometheusScraperType,
|
2020-08-11 14:56:42 +00:00
|
|
|
OrgID: idOne,
|
|
|
|
BucketID: idOne,
|
2019-01-10 17:39:37 +00:00
|
|
|
URL: "url1",
|
|
|
|
ID: MustIDBase16(targetOneID),
|
2018-09-07 15:45:28 +00:00
|
|
|
},
|
|
|
|
},
|
2019-04-12 16:45:48 +00:00
|
|
|
Organizations: []*influxdb.Organization{&org1, &org2},
|
2018-09-07 15:45:28 +00:00
|
|
|
},
|
|
|
|
args: args{
|
2019-01-18 20:46:37 +00:00
|
|
|
userID: MustIDBase16(threeID),
|
2019-04-12 16:45:48 +00:00
|
|
|
target: &influxdb.ScraperTarget{
|
2019-01-10 17:39:37 +00:00
|
|
|
ID: MustIDBase16(targetTwoID),
|
|
|
|
Name: "name2",
|
2019-04-12 16:45:48 +00:00
|
|
|
Type: influxdb.PrometheusScraperType,
|
2020-08-11 14:56:42 +00:00
|
|
|
OrgID: idTwo,
|
|
|
|
BucketID: idTwo,
|
2019-01-10 17:39:37 +00:00
|
|
|
URL: "url2",
|
2018-09-07 15:45:28 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
wants: wants{
|
2019-04-12 16:45:48 +00:00
|
|
|
targets: []influxdb.ScraperTarget{
|
2018-09-07 15:45:28 +00:00
|
|
|
{
|
2019-01-10 17:39:37 +00:00
|
|
|
Name: "name1",
|
2019-04-12 16:45:48 +00:00
|
|
|
Type: influxdb.PrometheusScraperType,
|
2020-08-11 14:56:42 +00:00
|
|
|
OrgID: idOne,
|
|
|
|
BucketID: idOne,
|
2019-01-10 17:39:37 +00:00
|
|
|
URL: "url1",
|
|
|
|
ID: MustIDBase16(targetOneID),
|
2018-09-07 15:45:28 +00:00
|
|
|
},
|
|
|
|
{
|
2019-01-10 17:39:37 +00:00
|
|
|
Name: "name2",
|
2019-04-12 16:45:48 +00:00
|
|
|
Type: influxdb.PrometheusScraperType,
|
2020-08-11 14:56:42 +00:00
|
|
|
OrgID: idTwo,
|
|
|
|
BucketID: idTwo,
|
2019-01-10 17:39:37 +00:00
|
|
|
URL: "url2",
|
|
|
|
ID: MustIDBase16(targetTwoID),
|
2018-09-07 15:45:28 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
for _, tt := range tests {
|
|
|
|
t.Run(tt.name, func(t *testing.T) {
|
2018-12-17 14:07:38 +00:00
|
|
|
s, opPrefix, done := init(tt.fields, t)
|
2018-09-07 15:45:28 +00:00
|
|
|
defer done()
|
2018-12-28 23:02:19 +00:00
|
|
|
ctx := context.Background()
|
2019-01-18 20:46:37 +00:00
|
|
|
err := s.AddTarget(ctx, tt.args.target, tt.args.userID)
|
2018-12-17 14:07:38 +00:00
|
|
|
diffPlatformErrors(tt.name, err, tt.wants.err, opPrefix, t)
|
2018-09-07 15:45:28 +00:00
|
|
|
defer s.RemoveTarget(ctx, tt.args.target.ID)
|
|
|
|
|
2019-04-12 16:45:48 +00:00
|
|
|
targets, err := s.ListTargets(ctx, influxdb.ScraperTargetFilter{})
|
2018-09-07 15:45:28 +00:00
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("failed to retrieve scraper targets: %v", err)
|
|
|
|
}
|
|
|
|
if diff := cmp.Diff(targets, tt.wants.targets, targetCmpOptions...); diff != "" {
|
|
|
|
t.Errorf("scraper targets are different -got/+want\ndiff %s", diff)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// ListTargets testing
|
|
|
|
func ListTargets(
|
2019-04-12 16:45:48 +00:00
|
|
|
init func(TargetFields, *testing.T) (influxdb.ScraperTargetStoreService, string, func()),
|
2018-09-07 15:45:28 +00:00
|
|
|
t *testing.T,
|
|
|
|
) {
|
2019-04-12 16:45:48 +00:00
|
|
|
type args struct {
|
|
|
|
filter influxdb.ScraperTargetFilter
|
|
|
|
}
|
2018-09-07 15:45:28 +00:00
|
|
|
type wants struct {
|
2019-04-12 16:45:48 +00:00
|
|
|
targets []influxdb.ScraperTarget
|
2018-09-07 15:45:28 +00:00
|
|
|
err error
|
|
|
|
}
|
|
|
|
|
|
|
|
tests := []struct {
|
|
|
|
name string
|
|
|
|
fields TargetFields
|
2019-04-12 16:45:48 +00:00
|
|
|
args args
|
2018-09-07 15:45:28 +00:00
|
|
|
wants wants
|
|
|
|
}{
|
|
|
|
{
|
2019-04-12 16:45:48 +00:00
|
|
|
name: "get all targets",
|
2018-09-07 15:45:28 +00:00
|
|
|
fields: TargetFields{
|
2019-04-12 16:45:48 +00:00
|
|
|
Organizations: []*influxdb.Organization{
|
|
|
|
&org1,
|
|
|
|
&org2,
|
|
|
|
},
|
|
|
|
Targets: []*influxdb.ScraperTarget{
|
|
|
|
&target1,
|
|
|
|
&target2,
|
|
|
|
&target3,
|
2018-09-07 15:45:28 +00:00
|
|
|
},
|
|
|
|
},
|
2019-04-12 16:45:48 +00:00
|
|
|
args: args{
|
|
|
|
filter: influxdb.ScraperTargetFilter{},
|
|
|
|
},
|
2018-09-07 15:45:28 +00:00
|
|
|
wants: wants{
|
2019-04-12 16:45:48 +00:00
|
|
|
targets: []influxdb.ScraperTarget{
|
|
|
|
target1,
|
|
|
|
target2,
|
|
|
|
target3,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "filter by name",
|
|
|
|
fields: TargetFields{
|
|
|
|
Organizations: []*influxdb.Organization{
|
|
|
|
&org1,
|
|
|
|
&org2,
|
|
|
|
},
|
|
|
|
Targets: []*influxdb.ScraperTarget{
|
|
|
|
&target1,
|
|
|
|
&target2,
|
|
|
|
&target3,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
args: args{
|
|
|
|
filter: influxdb.ScraperTargetFilter{
|
|
|
|
Name: strPtr(target2.Name),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
wants: wants{
|
|
|
|
targets: []influxdb.ScraperTarget{
|
|
|
|
target2,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "filter by id",
|
|
|
|
fields: TargetFields{
|
|
|
|
Organizations: []*influxdb.Organization{
|
|
|
|
&org1,
|
|
|
|
&org2,
|
|
|
|
},
|
|
|
|
Targets: []*influxdb.ScraperTarget{
|
|
|
|
&target1,
|
|
|
|
&target2,
|
|
|
|
&target3,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
args: args{
|
|
|
|
filter: influxdb.ScraperTargetFilter{
|
|
|
|
IDs: map[influxdb.ID]bool{target2.ID: false},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
wants: wants{
|
|
|
|
targets: []influxdb.ScraperTarget{
|
|
|
|
target2,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "filter targets by orgID",
|
|
|
|
fields: TargetFields{
|
|
|
|
Organizations: []*influxdb.Organization{
|
|
|
|
&org1,
|
|
|
|
&org2,
|
|
|
|
},
|
|
|
|
Targets: []*influxdb.ScraperTarget{
|
|
|
|
&target1,
|
|
|
|
&target2,
|
|
|
|
&target3,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
args: args{
|
|
|
|
filter: influxdb.ScraperTargetFilter{
|
2020-08-11 14:56:42 +00:00
|
|
|
OrgID: idPtr(idOne),
|
2019-04-12 16:45:48 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
wants: wants{
|
|
|
|
targets: []influxdb.ScraperTarget{
|
|
|
|
target1,
|
|
|
|
target3,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "filter targets by orgID not exist",
|
|
|
|
fields: TargetFields{
|
|
|
|
Organizations: []*influxdb.Organization{
|
|
|
|
&org2,
|
|
|
|
},
|
|
|
|
Targets: []*influxdb.ScraperTarget{
|
|
|
|
&target1,
|
|
|
|
&target2,
|
|
|
|
&target3,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
args: args{
|
|
|
|
filter: influxdb.ScraperTargetFilter{
|
2020-08-11 14:56:42 +00:00
|
|
|
OrgID: idPtr(idOne),
|
2019-04-12 16:45:48 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
wants: wants{
|
|
|
|
targets: []influxdb.ScraperTarget{},
|
|
|
|
err: &influxdb.Error{
|
|
|
|
Code: influxdb.ENotFound,
|
|
|
|
Msg: "organization not found",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "filter targets by org name",
|
|
|
|
fields: TargetFields{
|
|
|
|
Organizations: []*influxdb.Organization{
|
|
|
|
&org1,
|
|
|
|
&org2,
|
|
|
|
},
|
|
|
|
Targets: []*influxdb.ScraperTarget{
|
|
|
|
&target1,
|
|
|
|
&target2,
|
|
|
|
&target3,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
args: args{
|
|
|
|
filter: influxdb.ScraperTargetFilter{
|
|
|
|
Org: strPtr("org1"),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
wants: wants{
|
|
|
|
targets: []influxdb.ScraperTarget{
|
|
|
|
target1,
|
|
|
|
target3,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "filter targets by org name not exist",
|
|
|
|
fields: TargetFields{
|
|
|
|
Organizations: []*influxdb.Organization{
|
|
|
|
&org1,
|
|
|
|
},
|
|
|
|
Targets: []*influxdb.ScraperTarget{
|
|
|
|
&target1,
|
|
|
|
&target2,
|
|
|
|
&target3,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
args: args{
|
|
|
|
filter: influxdb.ScraperTargetFilter{
|
|
|
|
Org: strPtr("org2"),
|
|
|
|
},
|
|
|
|
},
|
|
|
|
wants: wants{
|
|
|
|
targets: []influxdb.ScraperTarget{},
|
|
|
|
err: &influxdb.Error{
|
|
|
|
Code: influxdb.ENotFound,
|
|
|
|
Msg: `organization name "org2" not found`,
|
2018-09-07 15:45:28 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
for _, tt := range tests {
|
|
|
|
t.Run(tt.name, func(t *testing.T) {
|
2018-12-17 14:07:38 +00:00
|
|
|
s, opPrefix, done := init(tt.fields, t)
|
2018-09-07 15:45:28 +00:00
|
|
|
defer done()
|
2018-12-28 23:02:19 +00:00
|
|
|
ctx := context.Background()
|
2019-04-12 16:45:48 +00:00
|
|
|
targets, err := s.ListTargets(ctx, tt.args.filter)
|
2018-12-17 14:07:38 +00:00
|
|
|
diffPlatformErrors(tt.name, err, tt.wants.err, opPrefix, t)
|
2018-09-07 15:45:28 +00:00
|
|
|
|
|
|
|
if diff := cmp.Diff(targets, tt.wants.targets, targetCmpOptions...); diff != "" {
|
|
|
|
t.Errorf("targets are different -got/+want\ndiff %s", diff)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// GetTargetByID testing
|
|
|
|
func GetTargetByID(
|
2019-04-12 16:45:48 +00:00
|
|
|
init func(TargetFields, *testing.T) (influxdb.ScraperTargetStoreService, string, func()),
|
2018-09-07 15:45:28 +00:00
|
|
|
t *testing.T,
|
|
|
|
) {
|
2019-02-19 23:47:19 +00:00
|
|
|
t.Helper()
|
2018-09-07 15:45:28 +00:00
|
|
|
type args struct {
|
2019-04-12 16:45:48 +00:00
|
|
|
id influxdb.ID
|
2018-09-07 15:45:28 +00:00
|
|
|
}
|
|
|
|
type wants struct {
|
|
|
|
err error
|
2019-04-12 16:45:48 +00:00
|
|
|
target *influxdb.ScraperTarget
|
2018-09-07 15:45:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
tests := []struct {
|
|
|
|
name string
|
|
|
|
fields TargetFields
|
|
|
|
args args
|
|
|
|
wants wants
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
name: "basic find target by id",
|
|
|
|
fields: TargetFields{
|
2019-04-12 16:45:48 +00:00
|
|
|
Organizations: []*influxdb.Organization{&org1},
|
|
|
|
Targets: []*influxdb.ScraperTarget{
|
2018-09-07 15:45:28 +00:00
|
|
|
{
|
2019-01-10 17:39:37 +00:00
|
|
|
ID: MustIDBase16(targetOneID),
|
|
|
|
Name: "target1",
|
2020-08-11 14:56:42 +00:00
|
|
|
OrgID: idOne,
|
|
|
|
BucketID: idOne,
|
2018-09-07 15:45:28 +00:00
|
|
|
},
|
|
|
|
{
|
2019-01-10 17:39:37 +00:00
|
|
|
ID: MustIDBase16(targetTwoID),
|
|
|
|
Name: "target2",
|
2020-08-11 14:56:42 +00:00
|
|
|
OrgID: idOne,
|
|
|
|
BucketID: idOne,
|
2018-09-07 15:45:28 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
args: args{
|
2018-10-10 19:39:09 +00:00
|
|
|
id: MustIDBase16(targetTwoID),
|
2018-09-07 15:45:28 +00:00
|
|
|
},
|
|
|
|
wants: wants{
|
2019-04-12 16:45:48 +00:00
|
|
|
target: &influxdb.ScraperTarget{
|
2019-01-10 17:39:37 +00:00
|
|
|
ID: MustIDBase16(targetTwoID),
|
|
|
|
Name: "target2",
|
2020-08-11 14:56:42 +00:00
|
|
|
OrgID: idOne,
|
|
|
|
BucketID: idOne,
|
2018-09-07 15:45:28 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2018-12-17 14:07:38 +00:00
|
|
|
{
|
|
|
|
name: "find target by id not find",
|
|
|
|
fields: TargetFields{
|
2019-04-12 16:45:48 +00:00
|
|
|
Targets: []*influxdb.ScraperTarget{
|
2018-12-17 14:07:38 +00:00
|
|
|
{
|
2019-01-10 17:39:37 +00:00
|
|
|
ID: MustIDBase16(targetOneID),
|
|
|
|
Name: "target1",
|
2020-08-11 14:56:42 +00:00
|
|
|
OrgID: idOne,
|
|
|
|
BucketID: idOne,
|
2018-12-17 14:07:38 +00:00
|
|
|
},
|
|
|
|
{
|
2019-01-10 17:39:37 +00:00
|
|
|
ID: MustIDBase16(targetTwoID),
|
|
|
|
Name: "target2",
|
2020-08-11 14:56:42 +00:00
|
|
|
OrgID: idOne,
|
|
|
|
BucketID: idOne,
|
2018-12-17 14:07:38 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
args: args{
|
|
|
|
id: MustIDBase16(threeID),
|
|
|
|
},
|
|
|
|
wants: wants{
|
2019-04-12 16:45:48 +00:00
|
|
|
err: &influxdb.Error{
|
|
|
|
Code: influxdb.ENotFound,
|
|
|
|
Op: influxdb.OpGetTargetByID,
|
2018-12-17 14:07:38 +00:00
|
|
|
Msg: "scraper target is not found",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
2018-09-07 15:45:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
|
|
|
t.Run(tt.name, func(t *testing.T) {
|
2018-12-17 14:07:38 +00:00
|
|
|
s, opPrefix, done := init(tt.fields, t)
|
2018-09-07 15:45:28 +00:00
|
|
|
defer done()
|
2018-12-28 23:02:19 +00:00
|
|
|
ctx := context.Background()
|
2018-09-07 15:45:28 +00:00
|
|
|
|
|
|
|
target, err := s.GetTargetByID(ctx, tt.args.id)
|
2018-12-17 14:07:38 +00:00
|
|
|
diffPlatformErrors(tt.name, err, tt.wants.err, opPrefix, t)
|
2018-09-07 15:45:28 +00:00
|
|
|
|
|
|
|
if diff := cmp.Diff(target, tt.wants.target, targetCmpOptions...); diff != "" {
|
|
|
|
t.Errorf("target is different -got/+want\ndiff %s", diff)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// RemoveTarget testing
|
2019-04-12 16:45:48 +00:00
|
|
|
func RemoveTarget(init func(TargetFields, *testing.T) (influxdb.ScraperTargetStoreService, string, func()),
|
2018-09-07 15:45:28 +00:00
|
|
|
t *testing.T) {
|
|
|
|
type args struct {
|
2019-04-12 16:45:48 +00:00
|
|
|
ID influxdb.ID
|
|
|
|
userID influxdb.ID
|
2018-09-07 15:45:28 +00:00
|
|
|
}
|
|
|
|
type wants struct {
|
2020-09-02 17:50:26 +00:00
|
|
|
err error
|
|
|
|
targets []influxdb.ScraperTarget
|
2018-09-07 15:45:28 +00:00
|
|
|
}
|
|
|
|
tests := []struct {
|
|
|
|
name string
|
|
|
|
fields TargetFields
|
|
|
|
args args
|
|
|
|
wants wants
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
name: "delete targets using exist id",
|
|
|
|
fields: TargetFields{
|
2019-04-12 16:45:48 +00:00
|
|
|
Organizations: []*influxdb.Organization{&org1},
|
|
|
|
Targets: []*influxdb.ScraperTarget{
|
2018-09-07 15:45:28 +00:00
|
|
|
{
|
2019-01-10 17:39:37 +00:00
|
|
|
ID: MustIDBase16(targetOneID),
|
2020-08-11 14:56:42 +00:00
|
|
|
OrgID: idOne,
|
|
|
|
BucketID: idOne,
|
2018-09-07 15:45:28 +00:00
|
|
|
},
|
|
|
|
{
|
2019-01-10 17:39:37 +00:00
|
|
|
ID: MustIDBase16(targetTwoID),
|
2020-08-11 14:56:42 +00:00
|
|
|
OrgID: idOne,
|
|
|
|
BucketID: idOne,
|
2018-09-07 15:45:28 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
args: args{
|
2019-01-18 20:46:37 +00:00
|
|
|
ID: MustIDBase16(targetOneID),
|
|
|
|
userID: MustIDBase16(threeID),
|
2018-09-07 15:45:28 +00:00
|
|
|
},
|
|
|
|
wants: wants{
|
2019-04-12 16:45:48 +00:00
|
|
|
targets: []influxdb.ScraperTarget{
|
2018-09-07 15:45:28 +00:00
|
|
|
{
|
2019-01-10 17:39:37 +00:00
|
|
|
ID: MustIDBase16(targetTwoID),
|
2020-08-11 14:56:42 +00:00
|
|
|
OrgID: idOne,
|
|
|
|
BucketID: idOne,
|
2018-09-07 15:45:28 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "delete targets using id that does not exist",
|
|
|
|
fields: TargetFields{
|
2019-04-12 16:45:48 +00:00
|
|
|
Organizations: []*influxdb.Organization{&org1},
|
|
|
|
Targets: []*influxdb.ScraperTarget{
|
2018-09-07 15:45:28 +00:00
|
|
|
{
|
2019-01-10 17:39:37 +00:00
|
|
|
ID: MustIDBase16(targetOneID),
|
2020-08-11 14:56:42 +00:00
|
|
|
OrgID: idOne,
|
|
|
|
BucketID: idOne,
|
2018-09-07 15:45:28 +00:00
|
|
|
},
|
|
|
|
{
|
2019-01-10 17:39:37 +00:00
|
|
|
ID: MustIDBase16(targetTwoID),
|
2020-08-11 14:56:42 +00:00
|
|
|
OrgID: idOne,
|
|
|
|
BucketID: idOne,
|
2018-09-07 15:45:28 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
args: args{
|
2019-01-18 20:46:37 +00:00
|
|
|
ID: MustIDBase16(targetThreeID),
|
|
|
|
userID: MustIDBase16(threeID),
|
2018-09-07 15:45:28 +00:00
|
|
|
},
|
|
|
|
wants: wants{
|
2019-04-12 16:45:48 +00:00
|
|
|
err: &influxdb.Error{
|
|
|
|
Code: influxdb.ENotFound,
|
|
|
|
Op: influxdb.OpRemoveTarget,
|
2018-12-17 14:07:38 +00:00
|
|
|
Msg: "scraper target is not found",
|
|
|
|
},
|
2019-04-12 16:45:48 +00:00
|
|
|
targets: []influxdb.ScraperTarget{
|
2018-09-07 15:45:28 +00:00
|
|
|
{
|
2019-01-10 17:39:37 +00:00
|
|
|
ID: MustIDBase16(targetOneID),
|
2020-08-11 14:56:42 +00:00
|
|
|
OrgID: idOne,
|
|
|
|
BucketID: idOne,
|
2018-09-07 15:45:28 +00:00
|
|
|
},
|
|
|
|
{
|
2019-01-10 17:39:37 +00:00
|
|
|
ID: MustIDBase16(targetTwoID),
|
2020-08-11 14:56:42 +00:00
|
|
|
OrgID: idOne,
|
|
|
|
BucketID: idOne,
|
2018-09-07 15:45:28 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
for _, tt := range tests {
|
|
|
|
t.Run(tt.name, func(t *testing.T) {
|
2018-12-17 14:07:38 +00:00
|
|
|
s, opPrefix, done := init(tt.fields, t)
|
2018-09-07 15:45:28 +00:00
|
|
|
defer done()
|
2018-12-28 23:02:19 +00:00
|
|
|
ctx := context.Background()
|
2018-09-07 15:45:28 +00:00
|
|
|
err := s.RemoveTarget(ctx, tt.args.ID)
|
2018-12-17 14:07:38 +00:00
|
|
|
diffPlatformErrors(tt.name, err, tt.wants.err, opPrefix, t)
|
2018-09-07 15:45:28 +00:00
|
|
|
|
2019-04-12 16:45:48 +00:00
|
|
|
targets, err := s.ListTargets(ctx, influxdb.ScraperTargetFilter{})
|
2018-09-07 15:45:28 +00:00
|
|
|
if err != nil {
|
|
|
|
t.Fatalf("failed to retrieve targets: %v", err)
|
|
|
|
}
|
|
|
|
if diff := cmp.Diff(targets, tt.wants.targets, targetCmpOptions...); diff != "" {
|
|
|
|
t.Errorf("targets are different -got/+want\ndiff %s", diff)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// UpdateTarget testing
|
|
|
|
func UpdateTarget(
|
2019-04-12 16:45:48 +00:00
|
|
|
init func(TargetFields, *testing.T) (influxdb.ScraperTargetStoreService, string, func()),
|
2018-09-07 15:45:28 +00:00
|
|
|
t *testing.T,
|
|
|
|
) {
|
|
|
|
type args struct {
|
2019-01-18 20:46:37 +00:00
|
|
|
url string
|
2019-04-12 16:45:48 +00:00
|
|
|
userID influxdb.ID
|
|
|
|
id influxdb.ID
|
2018-09-07 15:45:28 +00:00
|
|
|
}
|
|
|
|
type wants struct {
|
|
|
|
err error
|
2019-04-12 16:45:48 +00:00
|
|
|
target *influxdb.ScraperTarget
|
2018-09-07 15:45:28 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
tests := []struct {
|
|
|
|
name string
|
|
|
|
fields TargetFields
|
|
|
|
args args
|
|
|
|
wants wants
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
name: "update url with blank id",
|
|
|
|
fields: TargetFields{
|
2019-04-12 16:45:48 +00:00
|
|
|
Organizations: []*influxdb.Organization{&org1},
|
|
|
|
Targets: []*influxdb.ScraperTarget{
|
2018-09-07 15:45:28 +00:00
|
|
|
{
|
2019-01-10 17:39:37 +00:00
|
|
|
ID: MustIDBase16(targetOneID),
|
|
|
|
URL: "url1",
|
2020-08-11 14:56:42 +00:00
|
|
|
OrgID: idOne,
|
|
|
|
BucketID: idOne,
|
2018-09-07 15:45:28 +00:00
|
|
|
},
|
|
|
|
{
|
2019-01-10 17:39:37 +00:00
|
|
|
ID: MustIDBase16(targetTwoID),
|
|
|
|
URL: "url2",
|
2020-08-11 14:56:42 +00:00
|
|
|
OrgID: idOne,
|
|
|
|
BucketID: idOne,
|
2018-09-07 15:45:28 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
args: args{
|
|
|
|
url: "changed",
|
|
|
|
},
|
|
|
|
wants: wants{
|
2019-04-12 16:45:48 +00:00
|
|
|
err: &influxdb.Error{
|
|
|
|
Code: influxdb.EInvalid,
|
|
|
|
Op: influxdb.OpUpdateTarget,
|
2019-02-19 23:47:19 +00:00
|
|
|
Msg: "provided scraper target ID has invalid format",
|
2018-12-17 14:07:38 +00:00
|
|
|
},
|
2018-09-07 15:45:28 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "update url with non exist id",
|
|
|
|
fields: TargetFields{
|
2019-04-12 16:45:48 +00:00
|
|
|
Organizations: []*influxdb.Organization{&org1},
|
|
|
|
Targets: []*influxdb.ScraperTarget{
|
2018-09-07 15:45:28 +00:00
|
|
|
{
|
2019-01-10 17:39:37 +00:00
|
|
|
ID: MustIDBase16(targetOneID),
|
|
|
|
URL: "url1",
|
2020-08-11 14:56:42 +00:00
|
|
|
OrgID: idOne,
|
|
|
|
BucketID: idOne,
|
2018-09-07 15:45:28 +00:00
|
|
|
},
|
|
|
|
{
|
2019-01-10 17:39:37 +00:00
|
|
|
ID: MustIDBase16(targetTwoID),
|
|
|
|
URL: "url2",
|
2020-08-11 14:56:42 +00:00
|
|
|
OrgID: idOne,
|
|
|
|
BucketID: idOne,
|
2018-09-07 15:45:28 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
args: args{
|
2018-10-10 19:39:09 +00:00
|
|
|
id: MustIDBase16(targetThreeID),
|
2018-09-07 15:45:28 +00:00
|
|
|
url: "changed",
|
|
|
|
},
|
|
|
|
wants: wants{
|
2019-04-12 16:45:48 +00:00
|
|
|
err: &influxdb.Error{
|
|
|
|
Code: influxdb.ENotFound,
|
|
|
|
Op: influxdb.OpUpdateTarget,
|
2018-12-17 14:07:38 +00:00
|
|
|
Msg: "scraper target is not found",
|
|
|
|
},
|
2018-09-07 15:45:28 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "update url",
|
|
|
|
fields: TargetFields{
|
2019-04-12 16:45:48 +00:00
|
|
|
Organizations: []*influxdb.Organization{&org1},
|
|
|
|
Targets: []*influxdb.ScraperTarget{
|
2018-09-07 15:45:28 +00:00
|
|
|
{
|
2019-01-10 17:39:37 +00:00
|
|
|
ID: MustIDBase16(targetOneID),
|
|
|
|
URL: "url1",
|
2020-08-11 14:56:42 +00:00
|
|
|
OrgID: idOne,
|
|
|
|
BucketID: idOne,
|
2018-09-07 15:45:28 +00:00
|
|
|
},
|
|
|
|
{
|
2019-01-10 17:39:37 +00:00
|
|
|
ID: MustIDBase16(targetTwoID),
|
|
|
|
URL: "url2",
|
2020-08-11 14:56:42 +00:00
|
|
|
OrgID: idOne,
|
|
|
|
BucketID: idOne,
|
2018-09-07 15:45:28 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
args: args{
|
2018-10-10 19:39:09 +00:00
|
|
|
id: MustIDBase16(targetOneID),
|
2018-09-07 15:45:28 +00:00
|
|
|
url: "changed",
|
|
|
|
},
|
|
|
|
wants: wants{
|
2019-04-12 16:45:48 +00:00
|
|
|
target: &influxdb.ScraperTarget{
|
2019-01-10 17:39:37 +00:00
|
|
|
ID: MustIDBase16(targetOneID),
|
|
|
|
URL: "changed",
|
2020-08-11 14:56:42 +00:00
|
|
|
OrgID: idOne,
|
|
|
|
BucketID: idOne,
|
2018-09-07 15:45:28 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, tt := range tests {
|
|
|
|
t.Run(tt.name, func(t *testing.T) {
|
2018-12-17 14:07:38 +00:00
|
|
|
s, opPrefix, done := init(tt.fields, t)
|
2018-09-07 15:45:28 +00:00
|
|
|
defer done()
|
2018-10-12 01:06:43 +00:00
|
|
|
ctx := context.Background()
|
2018-09-07 15:45:28 +00:00
|
|
|
|
2019-04-12 16:45:48 +00:00
|
|
|
upd := &influxdb.ScraperTarget{
|
2018-09-07 15:45:28 +00:00
|
|
|
ID: tt.args.id,
|
|
|
|
URL: tt.args.url,
|
|
|
|
}
|
|
|
|
|
2019-01-18 20:46:37 +00:00
|
|
|
target, err := s.UpdateTarget(ctx, upd, tt.args.userID)
|
2018-12-17 14:07:38 +00:00
|
|
|
diffPlatformErrors(tt.name, err, tt.wants.err, opPrefix, t)
|
2018-09-07 15:45:28 +00:00
|
|
|
|
|
|
|
if diff := cmp.Diff(target, tt.wants.target, targetCmpOptions...); diff != "" {
|
|
|
|
t.Errorf("scraper target is different -got/+want\ndiff %s", diff)
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|