chore: add goimports

pull/20651/head
Sam Arnold 2021-01-29 11:50:57 -05:00
parent 06020b2a49
commit 781fa0e846
34 changed files with 194 additions and 178 deletions

View File

@ -128,7 +128,7 @@ ui_client:
#
fmt: $(SOURCES_NO_VENDOR)
gofmt -w -s $^
./etc/fmt.sh
checkfmt:
./etc/checkfmt.sh

View File

@ -6,7 +6,7 @@ import (
"fmt"
"net/http"
"github.com/elazarl/go-bindata-assetfs"
assetfs "github.com/elazarl/go-bindata-assetfs"
)
// DebugAssets serves assets via a specified directory

View File

@ -164,7 +164,7 @@ func TestMetaClient_Users(t *testing.T) {
{
Name: "admin",
Permissions: map[string][]string{
"": []string{
"": {
"ViewAdmin", "ViewChronograf",
},
},
@ -195,7 +195,7 @@ func TestMetaClient_Users(t *testing.T) {
{
Name: "admin",
Permissions: map[string][]string{
"": []string{
"": {
"ViewAdmin", "ViewChronograf",
},
},
@ -297,7 +297,7 @@ func TestMetaClient_User(t *testing.T) {
want: &User{
Name: "admin",
Permissions: map[string][]string{
"": []string{
"": {
"ViewAdmin", "ViewChronograf",
},
},
@ -723,7 +723,7 @@ func TestMetaClient_Roles(t *testing.T) {
{
Name: "admin",
Permissions: map[string][]string{
"": []string{
"": {
"ViewAdmin", "ViewChronograf",
},
},
@ -755,7 +755,7 @@ func TestMetaClient_Roles(t *testing.T) {
{
Name: "admin",
Permissions: map[string][]string{
"": []string{
"": {
"ViewAdmin", "ViewChronograf",
},
},
@ -818,7 +818,7 @@ func TestMetaClient_Role(t *testing.T) {
want: &Role{
Name: "admin",
Permissions: map[string][]string{
"": []string{
"": {
"ViewAdmin", "ViewChronograf",
},
},
@ -897,12 +897,12 @@ func TestMetaClient_UserRoles(t *testing.T) {
name: nil,
},
want: map[string]Roles{
"marty": Roles{
"marty": {
Roles: []Role{
{
Name: "timetravelers",
Permissions: map[string][]string{
"": []string{
"": {
"ViewAdmin", "ViewChronograf",
},
},
@ -911,7 +911,7 @@ func TestMetaClient_UserRoles(t *testing.T) {
{
Name: "mcfly",
Permissions: map[string][]string{
"": []string{
"": {
"ViewAdmin", "ViewChronograf",
},
},
@ -919,12 +919,12 @@ func TestMetaClient_UserRoles(t *testing.T) {
},
},
},
"docbrown": Roles{
"docbrown": {
Roles: []Role{
{
Name: "timetravelers",
Permissions: map[string][]string{
"": []string{
"": {
"ViewAdmin", "ViewChronograf",
},
},
@ -932,12 +932,12 @@ func TestMetaClient_UserRoles(t *testing.T) {
},
},
},
"george": Roles{
"george": {
Roles: []Role{
{
Name: "mcfly",
Permissions: map[string][]string{
"": []string{
"": {
"ViewAdmin", "ViewChronograf",
},
},

View File

@ -24,19 +24,19 @@ func TestConvert(t *testing.T) {
Measurement: "cpu",
RetentionPolicy: "autogen",
Fields: []chronograf.Field{
chronograf.Field{
{
Value: "usage_idle",
Type: "field",
},
chronograf.Field{
{
Value: "usage_guest_nice",
Type: "field",
},
chronograf.Field{
{
Value: "usage_system",
Type: "field",
},
chronograf.Field{
{
Value: "usage_guest",
Type: "field",
},
@ -55,7 +55,7 @@ func TestConvert(t *testing.T) {
Measurement: "cpu",
RetentionPolicy: "autogen",
Fields: []chronograf.Field{
chronograf.Field{
{
Value: "mean",
Type: "func",
Args: []chronograf.Field{
@ -65,7 +65,7 @@ func TestConvert(t *testing.T) {
},
},
},
chronograf.Field{
{
Value: "median",
Type: "func",
Args: []chronograf.Field{
@ -75,7 +75,7 @@ func TestConvert(t *testing.T) {
},
},
},
chronograf.Field{
{
Value: "count",
Type: "func",
Args: []chronograf.Field{
@ -85,7 +85,7 @@ func TestConvert(t *testing.T) {
},
},
},
chronograf.Field{
{
Value: "mean",
Type: "func",
Args: []chronograf.Field{
@ -134,12 +134,12 @@ func TestConvert(t *testing.T) {
Measurement: "cpu",
RetentionPolicy: "autogen",
Fields: []chronograf.Field{
chronograf.Field{
{
Value: "usage_user",
Type: "field",
},
},
Tags: map[string][]string{"host": []string{"myhost"}},
Tags: map[string][]string{"host": {"myhost"}},
GroupBy: chronograf.GroupBy{
Time: "",
Tags: []string{},
@ -170,12 +170,12 @@ func TestConvert(t *testing.T) {
Measurement: "cpu",
RetentionPolicy: "autogen",
Fields: []chronograf.Field{
chronograf.Field{
{
Value: "usage_user",
Type: "field",
},
},
Tags: map[string][]string{"host": []string{"myhost"}},
Tags: map[string][]string{"host": {"myhost"}},
GroupBy: chronograf.GroupBy{
Time: "",
Tags: []string{},
@ -195,7 +195,7 @@ func TestConvert(t *testing.T) {
RetentionPolicy: "autogen",
Tags: map[string][]string{},
Fields: []chronograf.Field{
chronograf.Field{
{
Value: "usage_user",
Type: "field",
},
@ -242,7 +242,7 @@ func TestConvert(t *testing.T) {
Measurement: "cpu",
RetentionPolicy: "autogen",
Fields: []chronograf.Field{
chronograf.Field{
{
Value: "usage_user",
Type: "field",
},
@ -262,12 +262,12 @@ func TestConvert(t *testing.T) {
Measurement: "cpu",
RetentionPolicy: "autogen",
Fields: []chronograf.Field{
chronograf.Field{
{
Value: "usage_user",
Type: "field",
},
},
Tags: map[string][]string{"host": []string{"myhost"}},
Tags: map[string][]string{"host": {"myhost"}},
GroupBy: chronograf.GroupBy{
Time: "",
Tags: []string{},
@ -287,16 +287,16 @@ func TestConvert(t *testing.T) {
Measurement: "cpu",
RetentionPolicy: "autogen",
Fields: []chronograf.Field{
chronograf.Field{
{
Value: "usage_user",
Type: "field",
},
},
Tags: map[string][]string{
"host": []string{
"host": {
"myhost",
},
"cpu": []string{
"cpu": {
"cpu-total",
},
},
@ -331,14 +331,14 @@ func TestConvert(t *testing.T) {
Measurement: "cpu",
RetentionPolicy: "autogen",
Fields: []chronograf.Field{
chronograf.Field{
{
Value: "usage_user",
Type: "field",
},
},
Tags: map[string][]string{
"host": []string{"myhost", "yourhost"},
"these": []string{"those"},
"host": {"myhost", "yourhost"},
"these": {"those"},
},
GroupBy: chronograf.GroupBy{
Time: "",
@ -358,31 +358,31 @@ func TestConvert(t *testing.T) {
Measurement: "cpu",
RetentionPolicy: "autogen",
Fields: []chronograf.Field{
chronograf.Field{
{
Value: "usage_idle",
Type: "field",
},
chronograf.Field{
{
Value: "usage_guest_nice",
Type: "field",
},
chronograf.Field{
{
Value: "usage_system",
Type: "field",
},
chronograf.Field{
{
Value: "usage_guest",
Type: "field",
},
},
Tags: map[string][]string{
"host": []string{
"host": {
"dev-052978d6-us-east-2-meta-0",
"dev-052978d6-us-east-2-data-5",
"dev-052978d6-us-east-2-data-4",
"dev-052978d6-us-east-2-data-3",
},
"cpu": []string{
"cpu": {
"cpu-total",
"cpu0",
},
@ -405,31 +405,31 @@ func TestConvert(t *testing.T) {
Measurement: "cpu",
RetentionPolicy: "autogen",
Fields: []chronograf.Field{
chronograf.Field{
{
Value: "usage_idle",
Type: "field",
},
chronograf.Field{
{
Value: "usage_guest_nice",
Type: "field",
},
chronograf.Field{
{
Value: "usage_system",
Type: "field",
},
chronograf.Field{
{
Value: "usage_guest",
Type: "field",
},
},
Tags: map[string][]string{
"host": []string{
"host": {
"dev-052978d6-us-east-2-meta-0",
"dev-052978d6-us-east-2-data-5",
"dev-052978d6-us-east-2-data-4",
"dev-052978d6-us-east-2-data-3",
},
"cpu": []string{
"cpu": {
"cpu-total",
"cpu0",
},
@ -452,7 +452,7 @@ func TestConvert(t *testing.T) {
Measurement: "cpu",
RetentionPolicy: "autogen",
Fields: []chronograf.Field{
chronograf.Field{
{
Value: "mean",
Type: "func",
Args: []chronograf.Field{
@ -483,7 +483,7 @@ func TestConvert(t *testing.T) {
Measurement: "cpu",
RetentionPolicy: "autogen",
Fields: []chronograf.Field{
chronograf.Field{
{
Value: "mean",
Type: "func",
Args: []chronograf.Field{
@ -514,7 +514,7 @@ func TestConvert(t *testing.T) {
Measurement: "cpu",
RetentionPolicy: "autogen",
Fields: []chronograf.Field{
chronograf.Field{
{
Value: "mean",
Type: "func",
Alias: "mean_usage_idle",
@ -546,7 +546,7 @@ func TestConvert(t *testing.T) {
Measurement: "cpu",
RetentionPolicy: "autogen",
Fields: []chronograf.Field{
chronograf.Field{
{
Value: "percentile",
Type: "func",
Alias: "mean_usage_idle",
@ -555,7 +555,7 @@ func TestConvert(t *testing.T) {
Value: "usage_idle",
Type: "field",
},
chronograf.Field{
{
Value: "3.14",
Type: "number",
},
@ -580,7 +580,7 @@ func TestConvert(t *testing.T) {
want: chronograf.QueryConfig{
Measurement: "h2o_feet",
Fields: []chronograf.Field{
chronograf.Field{
{
Value: "top",
Type: "func",
Args: []chronograf.Field{
@ -588,11 +588,11 @@ func TestConvert(t *testing.T) {
Value: "water_level",
Type: "field",
},
chronograf.Field{
{
Value: "location",
Type: "field",
},
chronograf.Field{
{
Value: "2",
Type: "integer",
},
@ -612,7 +612,7 @@ func TestConvert(t *testing.T) {
want: chronograf.QueryConfig{
Measurement: "h2o_feet",
Fields: []chronograf.Field{
chronograf.Field{
{
Value: "count",
Type: "func",
Args: []chronograf.Field{
@ -636,7 +636,7 @@ func TestConvert(t *testing.T) {
want: chronograf.QueryConfig{
Measurement: "h2o_feet",
Fields: []chronograf.Field{
chronograf.Field{
{
Value: "count",
Type: "func",
Alias: "count_water",
@ -661,7 +661,7 @@ func TestConvert(t *testing.T) {
want: chronograf.QueryConfig{
Measurement: "h2o_feet",
Fields: []chronograf.Field{
chronograf.Field{
{
Value: "count",
Type: "func",
Args: []chronograf.Field{
@ -687,7 +687,7 @@ func TestConvert(t *testing.T) {
Measurement: "cpu",
RetentionPolicy: "autogen",
Fields: []chronograf.Field{
chronograf.Field{
{
Value: "mean",
Type: "func",
Args: []chronograf.Field{
@ -718,7 +718,7 @@ func TestConvert(t *testing.T) {
Measurement: "cpu",
RetentionPolicy: "autogen",
Fields: []chronograf.Field{
chronograf.Field{
{
Value: "mean",
Type: "func",
Args: []chronograf.Field{

View File

@ -85,7 +85,7 @@ func Test_AuthMux_Logout_DeletesSessionCookie(t *testing.T) {
tsURL, _ := url.Parse(ts.URL)
hc.Jar.SetCookies(tsURL, []*http.Cookie{
&http.Cookie{
{
Name: DefaultCookieName,
Value: "",
},

View File

@ -30,13 +30,13 @@ func Test_Cells_CorrectAxis(t *testing.T) {
name: "correct axes",
cell: &chronograf.DashboardCell{
Axes: map[string]chronograf.Axis{
"x": chronograf.Axis{
"x": {
Bounds: []string{"0", "100"},
},
"y": chronograf.Axis{
"y": {
Bounds: []string{"0", "100"},
},
"y2": chronograf.Axis{
"y2": {
Bounds: []string{"0", "100"},
},
},
@ -46,10 +46,10 @@ func Test_Cells_CorrectAxis(t *testing.T) {
name: "invalid axes present",
cell: &chronograf.DashboardCell{
Axes: map[string]chronograf.Axis{
"axis of evil": chronograf.Axis{
"axis of evil": {
Bounds: []string{"666", "666"},
},
"axis of awesome": chronograf.Axis{
"axis of awesome": {
Bounds: []string{"1337", "31337"},
},
},
@ -60,7 +60,7 @@ func Test_Cells_CorrectAxis(t *testing.T) {
name: "linear scale value",
cell: &chronograf.DashboardCell{
Axes: map[string]chronograf.Axis{
"x": chronograf.Axis{
"x": {
Scale: "linear",
Bounds: []string{"0", "100"},
},
@ -71,7 +71,7 @@ func Test_Cells_CorrectAxis(t *testing.T) {
name: "log scale value",
cell: &chronograf.DashboardCell{
Axes: map[string]chronograf.Axis{
"x": chronograf.Axis{
"x": {
Scale: "log",
Bounds: []string{"0", "100"},
},
@ -82,7 +82,7 @@ func Test_Cells_CorrectAxis(t *testing.T) {
name: "invalid scale value",
cell: &chronograf.DashboardCell{
Axes: map[string]chronograf.Axis{
"x": chronograf.Axis{
"x": {
Scale: "potatoes",
Bounds: []string{"0", "100"},
},
@ -94,7 +94,7 @@ func Test_Cells_CorrectAxis(t *testing.T) {
name: "base 10 axis",
cell: &chronograf.DashboardCell{
Axes: map[string]chronograf.Axis{
"x": chronograf.Axis{
"x": {
Base: "10",
Bounds: []string{"0", "100"},
},
@ -105,7 +105,7 @@ func Test_Cells_CorrectAxis(t *testing.T) {
name: "base 2 axis",
cell: &chronograf.DashboardCell{
Axes: map[string]chronograf.Axis{
"x": chronograf.Axis{
"x": {
Base: "2",
Bounds: []string{"0", "100"},
},
@ -116,7 +116,7 @@ func Test_Cells_CorrectAxis(t *testing.T) {
name: "invalid base",
cell: &chronograf.DashboardCell{
Axes: map[string]chronograf.Axis{
"x": chronograf.Axis{
"x": {
Base: "all your base are belong to us",
Bounds: []string{"0", "100"},
},
@ -191,13 +191,13 @@ func Test_Service_DashboardCells(t *testing.T) {
Queries: []chronograf.DashboardQuery{},
CellColors: []chronograf.CellColor{},
Axes: map[string]chronograf.Axis{
"x": chronograf.Axis{
"x": {
Bounds: []string{"", ""},
},
"y": chronograf.Axis{
"y": {
Bounds: []string{"", ""},
},
"y2": chronograf.Axis{
"y2": {
Bounds: []string{"", ""},
},
},
@ -714,7 +714,7 @@ func Test_newCellResponses(t *testing.T) {
name: "all fields set",
dID: chronograf.DashboardID(1),
dcells: []chronograf.DashboardCell{
chronograf.DashboardCell{
{
ID: "445f8dc0-4d73-4168-8477-f628690d18a3",
X: 0,
Y: 0,
@ -744,7 +744,7 @@ func Test_newCellResponses(t *testing.T) {
},
},
},
Tags: map[string][]string{"cpu": []string{"ChristohersMBP2.lan"}},
Tags: map[string][]string{"cpu": {"ChristohersMBP2.lan"}},
GroupBy: chronograf.GroupBy{
Time: "2s",
},
@ -759,14 +759,14 @@ func Test_newCellResponses(t *testing.T) {
},
},
Axes: map[string]chronograf.Axis{
"x": chronograf.Axis{},
"y": chronograf.Axis{},
"y2": chronograf.Axis{},
"x": {},
"y": {},
"y2": {},
},
Type: "line",
CellColors: []chronograf.CellColor{
chronograf.CellColor{ID: "0", Type: "min", Hex: "#00C9FF", Name: "laser", Value: "0"},
chronograf.CellColor{ID: "1", Type: "max", Hex: "#9394FF", Name: "comet", Value: "100"},
{ID: "0", Type: "min", Hex: "#00C9FF", Name: "laser", Value: "0"},
{ID: "1", Type: "max", Hex: "#9394FF", Name: "comet", Value: "100"},
},
Legend: chronograf.Legend{
Type: "static",
@ -851,7 +851,7 @@ func Test_newCellResponses(t *testing.T) {
name: "nothing set",
dID: chronograf.DashboardID(1),
dcells: []chronograf.DashboardCell{
chronograf.DashboardCell{
{
ID: "445f8dc0-4d73-4168-8477-f628690d18a3",
X: 0,
Y: 0,
@ -869,13 +869,13 @@ func Test_newCellResponses(t *testing.T) {
Name: "Untitled Cell",
Queries: []chronograf.DashboardQuery{},
Axes: map[string]chronograf.Axis{
"x": chronograf.Axis{
"x": {
Bounds: []string{"", ""},
},
"y": chronograf.Axis{
"y": {
Bounds: []string{"", ""},
},
"y2": chronograf.Axis{
"y2": {
Bounds: []string{"", ""},
},
},

View File

@ -233,10 +233,10 @@ func Test_newDashboardResponse(t *testing.T) {
},
},
Axes: map[string]chronograf.Axis{
"x": chronograf.Axis{
"x": {
Bounds: []string{"0", "100"},
},
"y": chronograf.Axis{
"y": {
Bounds: []string{"2", "95"},
Label: "foo",
},
@ -259,7 +259,7 @@ func Test_newDashboardResponse(t *testing.T) {
Organization: "0",
Templates: []templateResponse{},
Cells: []dashboardCellResponse{
dashboardCellResponse{
{
Links: dashboardCellLinks{
Self: "/chronograf/v1/dashboards/0/cells/a",
},
@ -291,20 +291,20 @@ func Test_newDashboardResponse(t *testing.T) {
},
CellColors: []chronograf.CellColor{},
Axes: map[string]chronograf.Axis{
"x": chronograf.Axis{
"x": {
Bounds: []string{"0", "100"},
},
"y": chronograf.Axis{
"y": {
Bounds: []string{"2", "95"},
Label: "foo",
},
"y2": chronograf.Axis{
"y2": {
Bounds: []string{"", ""},
},
},
},
},
dashboardCellResponse{
{
Links: dashboardCellLinks{
Self: "/chronograf/v1/dashboards/0/cells/b",
},
@ -313,13 +313,13 @@ func Test_newDashboardResponse(t *testing.T) {
W: 4,
H: 4,
Axes: map[string]chronograf.Axis{
"x": chronograf.Axis{
"x": {
Bounds: []string{"", ""},
},
"y": chronograf.Axis{
"y": {
Bounds: []string{"", ""},
},
"y2": chronograf.Axis{
"y2": {
Bounds: []string{"", ""},
},
},

View File

@ -292,7 +292,7 @@ func TestService_Me(t *testing.T) {
},
AllF: func(ctx context.Context) ([]chronograf.Organization, error) {
return []chronograf.Organization{
chronograf.Organization{
{
ID: "0",
Name: "The Gnarly Default",
DefaultRole: roles.ViewerRoleName,
@ -372,7 +372,7 @@ func TestService_Me(t *testing.T) {
},
AllF: func(ctx context.Context) ([]chronograf.Organization, error) {
return []chronograf.Organization{
chronograf.Organization{
{
ID: "0",
Name: "The Gnarly Default",
DefaultRole: roles.ViewerRoleName,
@ -452,7 +452,7 @@ func TestService_Me(t *testing.T) {
},
AllF: func(ctx context.Context) ([]chronograf.Organization, error) {
return []chronograf.Organization{
chronograf.Organization{
{
ID: "0",
Name: "The Gnarly Default",
DefaultRole: roles.ViewerRoleName,
@ -522,7 +522,7 @@ func TestService_Me(t *testing.T) {
},
AllF: func(ctx context.Context) ([]chronograf.Organization, error) {
return []chronograf.Organization{
chronograf.Organization{
{
ID: "0",
Name: "The Bad Place",
DefaultRole: roles.ViewerRoleName,

View File

@ -167,11 +167,11 @@ func TestService_Organizations(t *testing.T) {
OrganizationsStore: &mocks.OrganizationsStore{
AllF: func(ctx context.Context) ([]chronograf.Organization, error) {
return []chronograf.Organization{
chronograf.Organization{
{
ID: "1337",
Name: "The Good Place",
},
chronograf.Organization{
{
ID: "100",
Name: "The Bad Place",
},

View File

@ -3,7 +3,6 @@ package upgrade
import (
"context"
"errors"
"github.com/influxdata/influxdb/v2/pkg/testing/assert"
"reflect"
"sort"
"testing"
@ -15,6 +14,7 @@ import (
"github.com/influxdata/influxdb/v2/inmem"
"github.com/influxdata/influxdb/v2/kv/migration"
"github.com/influxdata/influxdb/v2/kv/migration/all"
"github.com/influxdata/influxdb/v2/pkg/testing/assert"
"github.com/influxdata/influxdb/v2/tenant"
authv1 "github.com/influxdata/influxdb/v2/v1/authorization"
"github.com/influxdata/influxdb/v2/v1/services/meta"

View File

@ -3,11 +3,11 @@ package internal
import (
"errors"
"fmt"
"github.com/influxdata/influxdb/v2/task/options"
"os"
"strings"
"time"
"github.com/influxdata/influxdb/v2/task/options"
"github.com/tcnksm/go-input"
)

View File

@ -1,12 +1,15 @@
#!/bin/bash
go install golang.org/x/tools/cmd/goimports
HAS_FMT_ERR=0
# For every Go file in the project, excluding vendor...
for file in $(go list -f '{{$dir := .Dir}}{{range .GoFiles}}{{printf "%s/%s\n" $dir .}}{{end}}' ./...); do
for file in $(go list -f '{{$dir := .Dir}}{{range .GoFiles}}{{printf "%s/%s\n" $dir .}}{{end}}{{range .TestGoFiles}}{{printf "%s/%s\n" $dir .}}{{end}}{{range .IgnoredGoFiles}}{{printf "%s/%s\n" $dir .}}{{end}}{{range .CgoFiles}}{{printf "%s/%s\n" $dir .}}{{end}}' ./... ); do
# ... if file does not contain standard generated code comment (https://golang.org/s/generatedcode)...
if ! grep -Exq '^// Code generated .* DO NOT EDIT\.$' $file; then
FMT_OUT="$(gofmt -l -d -e $file)" # gofmt exits 0 regardless of whether it's formatted.
# ... and if gofmt had any output...
FMT_OUT="$(goimports -l -d $file)"
# ... and if goimports had any output...
if [[ -n "$FMT_OUT" ]]; then
if [ "$HAS_FMT_ERR" -eq "0" ]; then
# Only print this once.

12
etc/fmt.sh Executable file
View File

@ -0,0 +1,12 @@
#!/bin/bash
go install golang.org/x/tools/cmd/goimports
# For every Go file in the project, excluding vendor...
for file in $(go list -f '{{$dir := .Dir}}{{range .GoFiles}}{{printf "%s/%s\n" $dir .}}{{end}}{{range .TestGoFiles}}{{printf "%s/%s\n" $dir .}}{{end}}{{range .IgnoredGoFiles}}{{printf "%s/%s\n" $dir .}}{{end}}{{range .CgoFiles}}{{printf "%s/%s\n" $dir .}}{{end}}' ./... ); do
# ... if file does not contain standard generated code comment (https://golang.org/s/generatedcode)...
if ! grep -Exq '^// Code generated .* DO NOT EDIT\.$' $file; then
gofmt -w -s $file
goimports -w $file
fi
done

View File

@ -11,7 +11,6 @@ import (
"github.com/influxdata/httprouter"
"github.com/influxdata/influxdb/v2"
// "github.com/influxdata/influxdb/v2/bolt"
"github.com/influxdata/influxdb/v2/kit/tracing"
"go.uber.org/zap"
)

View File

@ -118,7 +118,7 @@ func TestDelete(t *testing.T) {
name: "missing bucket",
args: args{
queryParams: map[string][]string{
"org": []string{"org1"},
"org": {"org1"},
},
body: []byte(`{"start":"2009-01-01T23:00:00Z","stop":"2009-11-10T01:00:00Z"}`),
authorizer: &influxdb.Authorization{UserID: user1ID},
@ -153,8 +153,8 @@ func TestDelete(t *testing.T) {
name: "insufficient permissions delete",
args: args{
queryParams: map[string][]string{
"org": []string{"org1"},
"bucket": []string{"buck1"},
"org": {"org1"},
"bucket": {"buck1"},
},
body: []byte(`{"start":"2009-01-01T23:00:00Z","stop":"2019-11-10T01:00:00Z"}`),
authorizer: &influxdb.Authorization{UserID: user1ID},
@ -189,8 +189,8 @@ func TestDelete(t *testing.T) {
name: "no predicate delete",
args: args{
queryParams: map[string][]string{
"org": []string{"org1"},
"bucket": []string{"buck1"},
"org": {"org1"},
"bucket": {"buck1"},
},
body: []byte(`{"start":"2009-01-01T23:00:00Z","stop":"2019-11-10T01:00:00Z"}`),
authorizer: &influxdb.Authorization{
@ -236,8 +236,8 @@ func TestDelete(t *testing.T) {
name: "unsupported delete",
args: args{
queryParams: map[string][]string{
"org": []string{"org1"},
"bucket": []string{"buck1"},
"org": {"org1"},
"bucket": {"buck1"},
},
body: []byte(`{
"start":"2009-01-01T23:00:00Z",
@ -290,8 +290,8 @@ func TestDelete(t *testing.T) {
name: "complex delete",
args: args{
queryParams: map[string][]string{
"org": []string{"org1"},
"bucket": []string{"buck1"},
"org": {"org1"},
"bucket": {"buck1"},
},
body: []byte(`{
"start":"2009-01-01T23:00:00Z",

View File

@ -18,9 +18,9 @@ import (
"github.com/influxdata/flux/csv"
"github.com/influxdata/flux/lang"
platform "github.com/influxdata/influxdb/v2"
_ "github.com/influxdata/influxdb/v2/fluxinit/static"
"github.com/influxdata/influxdb/v2/mock"
"github.com/influxdata/influxdb/v2/query"
_ "github.com/influxdata/influxdb/v2/fluxinit/static"
)
var cmpOptions = cmp.Options{

View File

@ -17,10 +17,10 @@ import (
"github.com/influxdata/influxdb/v2"
"github.com/influxdata/influxdb/v2/authorization"
pcontext "github.com/influxdata/influxdb/v2/context"
_ "github.com/influxdata/influxdb/v2/fluxinit/static"
kithttp "github.com/influxdata/influxdb/v2/kit/transport/http"
"github.com/influxdata/influxdb/v2/label"
"github.com/influxdata/influxdb/v2/mock"
_ "github.com/influxdata/influxdb/v2/fluxinit/static"
"github.com/influxdata/influxdb/v2/tenant"
influxdbtesting "github.com/influxdata/influxdb/v2/testing"
"go.uber.org/zap"

View File

@ -8,8 +8,8 @@ import (
func TestEncodingFormatFromMimeType(t *testing.T) {
tests := []struct {
s string
exp EncodingFormat
s string
exp EncodingFormat
}{
{s: "application/csv", exp: EncodingFormatAppCSV},
{s: "text/csv", exp: EncodingFormatTextCSV},

View File

@ -5,7 +5,7 @@ import (
"io"
"time"
"github.com/jsternberg/zap-logfmt"
zaplogfmt "github.com/jsternberg/zap-logfmt"
isatty "github.com/mattn/go-isatty"
"go.uber.org/zap"
"go.uber.org/zap/zapcore"

View File

@ -9,12 +9,12 @@ import (
influxdb "github.com/influxdata/influxdb/v2"
"github.com/influxdata/influxdb/v2/bolt"
_ "github.com/influxdata/influxdb/v2/fluxinit/static"
"github.com/influxdata/influxdb/v2/inmem"
"github.com/influxdata/influxdb/v2/kv"
"github.com/influxdata/influxdb/v2/kv/migration/all"
"github.com/influxdata/influxdb/v2/mock"
endpointservice "github.com/influxdata/influxdb/v2/notification/endpoint/service"
_ "github.com/influxdata/influxdb/v2/fluxinit/static"
"github.com/influxdata/influxdb/v2/query/fluxlang"
"github.com/influxdata/influxdb/v2/secret"
"github.com/influxdata/influxdb/v2/tenant"

View File

@ -467,7 +467,7 @@ func TestPlus_Error(t *testing.T) {
func TestPlus_Marshal_Unmarshal_Sparse(t *testing.T) {
h, _ := NewPlus(4)
h.sparse = true
h.tmpSet = map[uint32]struct{}{26: struct{}{}, 40: struct{}{}}
h.tmpSet = map[uint32]struct{}{26: {}, 40: {}}
// Add a bunch of values to the sparse representation.
for i := 0; i < 10; i++ {

View File

@ -68,8 +68,8 @@ func TestMetrics_Metrics(t *testing.T) {
// The label variants for the two caches.
labelVariants := []prometheus.Labels{
prometheus.Labels{"engine_id": "0", "node_id": "0"},
prometheus.Labels{"engine_id": "1", "node_id": "0"},
{"engine_id": "0", "node_id": "0"},
{"engine_id": "1", "node_id": "0"},
}
for i, labels := range labelVariants {

View File

@ -83,7 +83,7 @@ func TestTemplate(t *testing.T) {
Color: "blurple",
associationMapping: associationMapping{
mappings: map[assocMapKey][]assocMapVal{
assocMapKey{
{
resType: influxdb.BucketsResourceType,
name: bucket1.Name(),
}: {{

View File

@ -5,12 +5,12 @@ import (
"context"
"encoding/json"
"fmt"
"github.com/influxdata/influxdb/v2/kit/errors"
"time"
"github.com/influxdata/flux"
"github.com/influxdata/flux/lang"
"github.com/influxdata/influxdb/v2"
"github.com/influxdata/influxdb/v2/kit/errors"
"github.com/influxdata/influxdb/v2/query"
"github.com/influxdata/influxdb/v2/storage"
"go.uber.org/zap"

View File

@ -38,8 +38,8 @@ func Test_NotifyCoordinatorOfCreated(t *testing.T) {
// paginated responses
pageOne: []*influxdb.Task{taskOne},
otherPages: map[influxdb.ID][]*influxdb.Task{
one: []*influxdb.Task{taskTwo, taskThree},
three: []*influxdb.Task{taskFour},
one: {taskTwo, taskThree},
three: {taskFour},
},
}
)

View File

@ -15,8 +15,8 @@ import (
"github.com/influxdata/flux/runtime"
"github.com/influxdata/flux/values"
"github.com/influxdata/influxdb/v2"
"github.com/influxdata/influxdb/v2/query"
_ "github.com/influxdata/influxdb/v2/fluxinit/static"
"github.com/influxdata/influxdb/v2/query"
)
type fakeQueryService struct {

View File

@ -7,9 +7,9 @@ import (
"github.com/google/go-cmp/cmp"
platform "github.com/influxdata/influxdb/v2"
_ "github.com/influxdata/influxdb/v2/fluxinit/static"
"github.com/influxdata/influxdb/v2/kit/feature"
"github.com/influxdata/influxdb/v2/mock"
_ "github.com/influxdata/influxdb/v2/fluxinit/static"
"github.com/influxdata/influxdb/v2/query/fluxlang"
"github.com/influxdata/influxdb/v2/task/options"
)

View File

@ -227,7 +227,7 @@ func badMetric() *dto.MetricFamily {
Name: proto.String("bad"),
Type: dto.MetricType_COUNTER.Enum(),
Metric: []*dto.Metric{
&dto.Metric{
{
Label: []*dto.LabelPair{pr.L("n1", "v1")},
Counter: &dto.Counter{
Value: proto.Float64(1.0),
@ -243,7 +243,7 @@ func goodMetric() *dto.MetricFamily {
Name: proto.String("good"),
Type: dto.MetricType_COUNTER.Enum(),
Metric: []*dto.Metric{
&dto.Metric{
{
Label: []*dto.LabelPair{pr.L("n1", "v1")},
Counter: &dto.Counter{
Value: proto.Float64(1.0),

View File

@ -15,7 +15,7 @@ func goodMetricWithTime() *dto.MetricFamily {
Name: proto.String("good"),
Type: dto.MetricType_COUNTER.Enum(),
Metric: []*dto.Metric{
&dto.Metric{
{
Label: []*dto.LabelPair{pr.L("n1", "v1")},
Counter: &dto.Counter{
Value: proto.Float64(1.0),

View File

@ -3,6 +3,7 @@ package tenant
import (
"context"
"fmt"
"github.com/influxdata/influxdb/v2"
icontext "github.com/influxdata/influxdb/v2/context"
"github.com/influxdata/influxdb/v2/kv"

View File

@ -10,6 +10,7 @@ import (
_ "github.com/influxdata/pkg-config"
_ "github.com/kevinburke/go-bindata/go-bindata"
_ "github.com/mna/pigeon"
_ "golang.org/x/tools/cmd/goimports"
_ "golang.org/x/tools/cmd/stringer"
_ "honnef.co/go/tools/cmd/staticcheck"
)

View File

@ -68,11 +68,11 @@ func TestTSMReader_MMAP_ReadAll(t *testing.T) {
}
var data = map[string][]Value{
"float": []Value{NewValue(1, 1.0)},
"int": []Value{NewValue(1, int64(1))},
"uint": []Value{NewValue(1, ^uint64(0))},
"bool": []Value{NewValue(1, true)},
"string": []Value{NewValue(1, "foo")},
"float": {NewValue(1, 1.0)},
"int": {NewValue(1, int64(1))},
"uint": {NewValue(1, ^uint64(0))},
"bool": {NewValue(1, true)},
"string": {NewValue(1, "foo")},
}
keys := make([]string, 0, len(data))
@ -142,15 +142,15 @@ func TestTSMReader_MMAP_Read(t *testing.T) {
}
var data = map[string][]Value{
"float": []Value{
"float": {
NewValue(1, 1.0)},
"int": []Value{
"int": {
NewValue(1, int64(1))},
"uint": []Value{
"uint": {
NewValue(1, ^uint64(0))},
"bool": []Value{
"bool": {
NewValue(1, true)},
"string": []Value{
"string": {
NewValue(1, "foo")},
}
@ -221,15 +221,15 @@ func TestTSMReader_MMAP_Keys(t *testing.T) {
}
var data = map[string][]Value{
"float": []Value{
"float": {
NewValue(1, 1.0)},
"int": []Value{
"int": {
NewValue(1, int64(1))},
"uint": []Value{
"uint": {
NewValue(1, ^uint64(0))},
"bool": []Value{
"bool": {
NewValue(1, true)},
"string": []Value{
"string": {
NewValue(1, "foo")},
}
@ -1395,11 +1395,11 @@ func TestBlockIterator_Sorted(t *testing.T) {
}
values := map[string][]Value{
"mem": []Value{NewValue(0, int64(1))},
"cycles": []Value{NewValue(0, ^uint64(0))},
"cpu": []Value{NewValue(1, float64(2))},
"disk": []Value{NewValue(1, true)},
"load": []Value{NewValue(1, "string")},
"mem": {NewValue(0, int64(1))},
"cycles": {NewValue(0, ^uint64(0))},
"cpu": {NewValue(1, float64(2))},
"disk": {NewValue(1, true)},
"load": {NewValue(1, "string")},
}
keys := make([]string, 0, len(values))
@ -1560,15 +1560,15 @@ func TestTSMReader_File_ReadAll(t *testing.T) {
}
var data = map[string][]Value{
"float": []Value{
"float": {
NewValue(1, 1.0)},
"int": []Value{
"int": {
NewValue(1, int64(1))},
"uint": []Value{
"uint": {
NewValue(1, ^uint64(0))},
"bool": []Value{
"bool": {
NewValue(1, true)},
"string": []Value{
"string": {
NewValue(1, "foo")},
}
@ -1708,15 +1708,15 @@ func TestTSMReader_File_Read(t *testing.T) {
}
var data = map[string][]Value{
"float": []Value{
"float": {
NewValue(1, 1.0)},
"int": []Value{
"int": {
NewValue(1, int64(1))},
"uint": []Value{
"uint": {
NewValue(1, ^uint64(0))},
"bool": []Value{
"bool": {
NewValue(1, true)},
"string": []Value{
"string": {
NewValue(1, "foo")},
}
@ -1787,15 +1787,15 @@ func TestTSMReader_References(t *testing.T) {
}
var data = map[string][]Value{
"float": []Value{
"float": {
NewValue(1, 1.0)},
"int": []Value{
"int": {
NewValue(1, int64(1))},
"uint": []Value{
"uint": {
NewValue(1, ^uint64(0))},
"bool": []Value{
"bool": {
NewValue(1, true)},
"string": []Value{
"string": {
NewValue(1, "foo")},
}

View File

@ -136,7 +136,7 @@ func (c *TagValueSeriesIDCache) Put(name, key, value []byte, ss *tsdb.SeriesIDSe
// No map for the measurement - first tag key for the measurement.
c.cache[string(name)] = map[string]map[string]*list.Element{
string(key): map[string]*list.Element{string(value): listElement},
string(key): {string(value): listElement},
}
EVICT:

View File

@ -12,27 +12,27 @@ import (
func TestSeriesIDSet_AndNot(t *testing.T) {
examples := [][3][]uint64{
[3][]uint64{
{
{1, 10, 20, 30},
{10, 12, 13, 14, 20},
{1, 30},
},
[3][]uint64{
{
{},
{10},
{},
},
[3][]uint64{
{
{1, 10, 20, 30},
{1, 10, 20, 30},
{},
},
[3][]uint64{
{
{1, 10},
{1, 10, 100},
{},
},
[3][]uint64{
{
{1, 10},
{},
{1, 10},