Merge pull request #1799 from zhulongcheng/paging
feat(http/paging): add paging implementationpull/10616/head
commit
c33d94f5ec
26
bucket.go
26
bucket.go
|
@ -75,12 +75,26 @@ type BucketFilter struct {
|
|||
Organization *string
|
||||
}
|
||||
|
||||
// FindOptions represents options passed to all find methods with multiple results.
|
||||
type FindOptions struct {
|
||||
Limit int
|
||||
Offset int
|
||||
SortBy string
|
||||
Descending bool
|
||||
// QueryParams Converts BucketFilter fields to url query params.
|
||||
func (f BucketFilter) QueryParams() map[string][]string {
|
||||
qp := map[string][]string{}
|
||||
if f.ID != nil {
|
||||
qp["id"] = []string{f.ID.String()}
|
||||
}
|
||||
|
||||
if f.Name != nil {
|
||||
qp["name"] = []string{*f.Name}
|
||||
}
|
||||
|
||||
if f.OrganizationID != nil {
|
||||
qp["orgID"] = []string{f.OrganizationID.String()}
|
||||
}
|
||||
|
||||
if f.Organization != nil {
|
||||
qp["org"] = []string{*f.Organization}
|
||||
}
|
||||
|
||||
return qp
|
||||
}
|
||||
|
||||
// InternalBucketID returns the ID for an organization's specified internal bucket
|
||||
|
|
|
@ -11,7 +11,7 @@ import (
|
|||
"time"
|
||||
|
||||
"github.com/influxdata/platform"
|
||||
errors "github.com/influxdata/platform/kit/errors"
|
||||
"github.com/influxdata/platform/kit/errors"
|
||||
"github.com/julienschmidt/httprouter"
|
||||
)
|
||||
|
||||
|
@ -196,7 +196,7 @@ func newBucketResponse(b *platform.Bucket) *bucketResponse {
|
|||
}
|
||||
|
||||
type bucketsResponse struct {
|
||||
Links map[string]string `json:"links"`
|
||||
Links *platform.PagingLinks `json:"links"`
|
||||
Buckets []*bucketResponse `json:"buckets"`
|
||||
}
|
||||
|
||||
|
@ -206,10 +206,7 @@ func newBucketsResponse(opts platform.FindOptions, f platform.BucketFilter, bs [
|
|||
rs = append(rs, newBucketResponse(b))
|
||||
}
|
||||
return &bucketsResponse{
|
||||
// TODO(desa): update links to include paging and filter information
|
||||
Links: map[string]string{
|
||||
"self": "/api/v2/buckets",
|
||||
},
|
||||
Links: newPagingLinks(bucketsPath, opts, f, len(bs)),
|
||||
Buckets: rs,
|
||||
}
|
||||
}
|
||||
|
@ -358,14 +355,13 @@ func (h *BucketHandler) handleGetBuckets(w http.ResponseWriter, r *http.Request)
|
|||
return
|
||||
}
|
||||
|
||||
opts := platform.FindOptions{}
|
||||
bs, _, err := h.BucketService.FindBuckets(ctx, req.filter, opts)
|
||||
bs, _, err := h.BucketService.FindBuckets(ctx, req.filter, req.opts)
|
||||
if err != nil {
|
||||
EncodeError(ctx, err, w)
|
||||
return
|
||||
}
|
||||
|
||||
if err := encodeResponse(ctx, w, http.StatusOK, newBucketsResponse(opts, req.filter, bs)); err != nil {
|
||||
if err := encodeResponse(ctx, w, http.StatusOK, newBucketsResponse(req.opts, req.filter, bs)); err != nil {
|
||||
EncodeError(ctx, err, w)
|
||||
return
|
||||
}
|
||||
|
@ -373,12 +369,20 @@ func (h *BucketHandler) handleGetBuckets(w http.ResponseWriter, r *http.Request)
|
|||
|
||||
type getBucketsRequest struct {
|
||||
filter platform.BucketFilter
|
||||
opts platform.FindOptions
|
||||
}
|
||||
|
||||
func decodeGetBucketsRequest(ctx context.Context, r *http.Request) (*getBucketsRequest, error) {
|
||||
qp := r.URL.Query()
|
||||
req := &getBucketsRequest{}
|
||||
|
||||
opts, err := decodeFindOptions(ctx, r)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
req.opts = *opts
|
||||
|
||||
if orgID := qp.Get("orgID"); orgID != "" {
|
||||
id, err := platform.IDFromString(orgID)
|
||||
if err != nil {
|
||||
|
|
|
@ -59,14 +59,19 @@ func TestService_handleGetBuckets(t *testing.T) {
|
|||
},
|
||||
},
|
||||
},
|
||||
args: args{},
|
||||
args: args{
|
||||
map[string][]string{
|
||||
"limit": []string{"1"},
|
||||
},
|
||||
},
|
||||
wants: wants{
|
||||
statusCode: http.StatusOK,
|
||||
contentType: "application/json; charset=utf-8",
|
||||
body: `
|
||||
{
|
||||
"links": {
|
||||
"self": "/api/v2/buckets"
|
||||
"self": "/api/v2/buckets?descending=false&limit=1&offset=0",
|
||||
"next": "/api/v2/buckets?descending=false&limit=1&offset=1"
|
||||
},
|
||||
"buckets": [
|
||||
{
|
||||
|
@ -105,14 +110,18 @@ func TestService_handleGetBuckets(t *testing.T) {
|
|||
},
|
||||
},
|
||||
},
|
||||
args: args{},
|
||||
args: args{
|
||||
map[string][]string{
|
||||
"limit": []string{"1"},
|
||||
},
|
||||
},
|
||||
wants: wants{
|
||||
statusCode: http.StatusOK,
|
||||
contentType: "application/json; charset=utf-8",
|
||||
body: `
|
||||
{
|
||||
"links": {
|
||||
"self": "/api/v2/buckets"
|
||||
"self": "/api/v2/buckets?descending=false&limit=1&offset=0"
|
||||
},
|
||||
"buckets": []
|
||||
}`,
|
||||
|
@ -154,7 +163,6 @@ func TestService_handleGetBuckets(t *testing.T) {
|
|||
if eq, _ := jsonEqual(string(body), tt.wants.body); tt.wants.body != "" && !eq {
|
||||
t.Errorf("%q. handleGetBuckets() = \n***%v***\n,\nwant\n***%v***", tt.name, string(body), tt.wants.body)
|
||||
}
|
||||
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,111 @@
|
|||
package http
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strconv"
|
||||
|
||||
"github.com/influxdata/platform/kit/errors"
|
||||
|
||||
"github.com/influxdata/platform"
|
||||
)
|
||||
|
||||
// decodeFindOptions returns a FindOptions decoded from http request.
|
||||
func decodeFindOptions(ctx context.Context, r *http.Request) (*platform.FindOptions, error) {
|
||||
opts := &platform.FindOptions{}
|
||||
qp := r.URL.Query()
|
||||
|
||||
if offset := qp.Get("offset"); offset != "" {
|
||||
o, err := strconv.Atoi(offset)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
opts.Offset = o
|
||||
}
|
||||
|
||||
if limit := qp.Get("limit"); limit != "" {
|
||||
l, err := strconv.Atoi(limit)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if l < 1 || l > platform.MaxPageSize {
|
||||
return nil, errors.InvalidDataf("limit must be between 1 and %d", platform.MaxPageSize)
|
||||
}
|
||||
|
||||
opts.Limit = l
|
||||
} else {
|
||||
opts.Limit = platform.DefaultPageSize
|
||||
}
|
||||
|
||||
if sortBy := qp.Get("sortBy"); sortBy != "" {
|
||||
opts.SortBy = sortBy
|
||||
}
|
||||
|
||||
if descending := qp.Get("descending"); descending != "" {
|
||||
desc, err := strconv.ParseBool(descending)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
opts.Descending = desc
|
||||
}
|
||||
|
||||
return opts, nil
|
||||
}
|
||||
|
||||
// newPagingLinks returns a PagingLinks.
|
||||
// num is the number of returned results.
|
||||
func newPagingLinks(basePath string, opts platform.FindOptions, f platform.PagingFilter, num int) *platform.PagingLinks {
|
||||
u := url.URL{
|
||||
Path: basePath,
|
||||
}
|
||||
|
||||
values := url.Values{}
|
||||
for k, vs := range f.QueryParams() {
|
||||
for _, v := range vs {
|
||||
if v != "" {
|
||||
values.Add(k, v)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var self, next, prev string
|
||||
for k, vs := range opts.QueryParams() {
|
||||
for _, v := range vs {
|
||||
if v != "" {
|
||||
values.Add(k, v)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
u.RawQuery = values.Encode()
|
||||
self = u.String()
|
||||
|
||||
if num >= opts.Limit {
|
||||
nextOffset := opts.Offset + opts.Limit
|
||||
values.Set("offset", strconv.Itoa(nextOffset))
|
||||
u.RawQuery = values.Encode()
|
||||
next = u.String()
|
||||
}
|
||||
|
||||
if opts.Offset > 0 {
|
||||
prevOffset := opts.Offset - opts.Limit
|
||||
if prevOffset < 0 {
|
||||
prevOffset = 0
|
||||
}
|
||||
values.Set("offset", strconv.Itoa(prevOffset))
|
||||
u.RawQuery = values.Encode()
|
||||
prev = u.String()
|
||||
}
|
||||
|
||||
links := &platform.PagingLinks{
|
||||
Prev: prev,
|
||||
Self: self,
|
||||
Next: next,
|
||||
}
|
||||
|
||||
return links
|
||||
}
|
|
@ -0,0 +1,195 @@
|
|||
package http
|
||||
|
||||
import (
|
||||
"context"
|
||||
"github.com/influxdata/platform"
|
||||
"github.com/influxdata/platform/mock"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestPaging_decodeFindOptions(t *testing.T) {
|
||||
type args struct {
|
||||
queryParams map[string]string
|
||||
}
|
||||
type wants struct {
|
||||
opts platform.FindOptions
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
wants wants
|
||||
}{
|
||||
{
|
||||
name: "decode FindOptions",
|
||||
args: args{
|
||||
map[string]string{
|
||||
"offset": "10",
|
||||
"limit": "10",
|
||||
"sortBy": "updateTime",
|
||||
"descending": "true",
|
||||
},
|
||||
},
|
||||
wants: wants{
|
||||
opts: platform.FindOptions{
|
||||
Offset: 10,
|
||||
Limit: 10,
|
||||
SortBy: "updateTime",
|
||||
Descending: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "decode FindOptions with default values",
|
||||
args: args{
|
||||
map[string]string{
|
||||
"limit": "10",
|
||||
},
|
||||
},
|
||||
wants: wants{
|
||||
opts: platform.FindOptions{
|
||||
Offset: 0,
|
||||
Limit: 10,
|
||||
SortBy: "",
|
||||
Descending: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
r := httptest.NewRequest("GET", "http://any.url", nil)
|
||||
qp := r.URL.Query()
|
||||
for k, v := range tt.args.queryParams {
|
||||
qp.Set(k, v)
|
||||
}
|
||||
r.URL.RawQuery = qp.Encode()
|
||||
|
||||
opts, err := decodeFindOptions(context.Background(), r)
|
||||
if err != nil {
|
||||
t.Errorf("%q failed, err: %s", tt.name, err.Error())
|
||||
}
|
||||
|
||||
if opts.Offset != tt.wants.opts.Offset {
|
||||
t.Errorf("%q. decodeFindOptions() = %v, want %v", tt.name, opts.Offset, tt.wants.opts.Offset)
|
||||
}
|
||||
if opts.Limit != tt.wants.opts.Limit {
|
||||
t.Errorf("%q. decodeFindOptions() = %v, want %v", tt.name, opts.Limit, tt.wants.opts.Limit)
|
||||
}
|
||||
if opts.SortBy != tt.wants.opts.SortBy {
|
||||
t.Errorf("%q. decodeFindOptions() = %v, want %v", tt.name, opts.SortBy, tt.wants.opts.SortBy)
|
||||
}
|
||||
if opts.Descending != tt.wants.opts.Descending {
|
||||
t.Errorf("%q. decodeFindOptions() = %v, want %v", tt.name, opts.Descending, tt.wants.opts.Descending)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestPaging_newPagingLinks(t *testing.T) {
|
||||
type args struct {
|
||||
basePath string
|
||||
num int
|
||||
opts platform.FindOptions
|
||||
filter mock.PagingFilter
|
||||
}
|
||||
type wants struct {
|
||||
links platform.PagingLinks
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
wants wants
|
||||
}{
|
||||
{
|
||||
name: "new PagingLinks",
|
||||
args: args{
|
||||
basePath: "/api/v2/buckets",
|
||||
num: 50,
|
||||
opts: platform.FindOptions{
|
||||
Offset: 10,
|
||||
Limit: 10,
|
||||
Descending: true,
|
||||
},
|
||||
filter: mock.PagingFilter{
|
||||
Name: "name",
|
||||
Type: []string{"type1", "type2"},
|
||||
},
|
||||
},
|
||||
wants: wants{
|
||||
links: platform.PagingLinks{
|
||||
Prev: "/api/v2/buckets?descending=true&limit=10&name=name&offset=0&type=type1&type=type2",
|
||||
Self: "/api/v2/buckets?descending=true&limit=10&name=name&offset=10&type=type1&type=type2",
|
||||
Next: "/api/v2/buckets?descending=true&limit=10&name=name&offset=20&type=type1&type=type2",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "new PagingLinks with empty prev link",
|
||||
args: args{
|
||||
basePath: "/api/v2/buckets",
|
||||
num: 50,
|
||||
opts: platform.FindOptions{
|
||||
Offset: 0,
|
||||
Limit: 10,
|
||||
Descending: true,
|
||||
},
|
||||
filter: mock.PagingFilter{
|
||||
Name: "name",
|
||||
Type: []string{"type1", "type2"},
|
||||
},
|
||||
},
|
||||
wants: wants{
|
||||
links: platform.PagingLinks{
|
||||
Prev: "",
|
||||
Self: "/api/v2/buckets?descending=true&limit=10&name=name&offset=0&type=type1&type=type2",
|
||||
Next: "/api/v2/buckets?descending=true&limit=10&name=name&offset=10&type=type1&type=type2",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "new PagingLinks with empty next link",
|
||||
args: args{
|
||||
basePath: "/api/v2/buckets",
|
||||
num: 5,
|
||||
opts: platform.FindOptions{
|
||||
Offset: 10,
|
||||
Limit: 10,
|
||||
Descending: true,
|
||||
},
|
||||
filter: mock.PagingFilter{
|
||||
Name: "name",
|
||||
Type: []string{"type1", "type2"},
|
||||
},
|
||||
},
|
||||
wants: wants{
|
||||
links: platform.PagingLinks{
|
||||
Prev: "/api/v2/buckets?descending=true&limit=10&name=name&offset=0&type=type1&type=type2",
|
||||
Self: "/api/v2/buckets?descending=true&limit=10&name=name&offset=10&type=type1&type=type2",
|
||||
Next: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
links := newPagingLinks(tt.args.basePath, tt.args.opts, tt.args.filter, tt.args.num)
|
||||
|
||||
if links.Prev != tt.wants.links.Prev {
|
||||
t.Errorf("%q. newPagingLinks() = %v, want %v", tt.name, links.Prev, tt.wants.links.Prev)
|
||||
}
|
||||
|
||||
if links.Self != tt.wants.links.Self {
|
||||
t.Errorf("%q. newPagingLinks() = %v, want %v", tt.name, links.Self, tt.wants.links.Self)
|
||||
}
|
||||
|
||||
if links.Next != tt.wants.links.Next {
|
||||
t.Errorf("%q. newPagingLinks() = %v, want %v", tt.name, links.Next, tt.wants.links.Next)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
|
@ -2401,6 +2401,8 @@ paths:
|
|||
- Buckets
|
||||
summary: List all buckets
|
||||
parameters:
|
||||
- $ref: "#/components/parameters/Offset"
|
||||
- $ref: "#/components/parameters/Limit"
|
||||
- in: query
|
||||
name: org
|
||||
description: specifies the organization name of the resource
|
||||
|
@ -3990,6 +3992,36 @@ paths:
|
|||
schema:
|
||||
$ref: "#/components/schemas/Error"
|
||||
components:
|
||||
parameters:
|
||||
Offset:
|
||||
in: query
|
||||
name: offset
|
||||
required: false
|
||||
schema:
|
||||
type: integer
|
||||
minimum: 0
|
||||
Limit:
|
||||
in: query
|
||||
name: limit
|
||||
required: false
|
||||
schema:
|
||||
type: integer
|
||||
minimum: 1
|
||||
maximum: 100
|
||||
default: 20
|
||||
Descending:
|
||||
in: query
|
||||
name: descending
|
||||
required: false
|
||||
schema:
|
||||
type: boolean
|
||||
default: false
|
||||
SortBy:
|
||||
in: query
|
||||
name: sortBy
|
||||
required: false
|
||||
schema:
|
||||
type: string
|
||||
schemas:
|
||||
LanguageRequest:
|
||||
description: flux query to be analyzed.
|
||||
|
@ -4282,14 +4314,10 @@ components:
|
|||
items:
|
||||
$ref: "#/components/schemas/Bucket"
|
||||
Link:
|
||||
type: object
|
||||
readOnly: true
|
||||
description: URI of resource.
|
||||
properties:
|
||||
href:
|
||||
type: string
|
||||
readOnly: true
|
||||
format: uri
|
||||
required: [href]
|
||||
description: URI of resource.
|
||||
Links:
|
||||
type: object
|
||||
properties:
|
||||
|
|
|
@ -0,0 +1,13 @@
|
|||
package mock
|
||||
|
||||
type PagingFilter struct {
|
||||
Name string
|
||||
Type []string
|
||||
}
|
||||
|
||||
func (f PagingFilter) QueryParams() map[string][]string {
|
||||
qp := map[string][]string{}
|
||||
qp["name"] = []string{f.Name}
|
||||
qp["type"] = f.Type
|
||||
return qp
|
||||
}
|
|
@ -0,0 +1,43 @@
|
|||
package platform
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
)
|
||||
|
||||
const (
|
||||
DefaultPageSize = 20
|
||||
MaxPageSize = 100
|
||||
)
|
||||
|
||||
// PagingFilter represents a filter containing url query params.
|
||||
type PagingFilter interface {
|
||||
// QueryParams returns a map containing url query params.
|
||||
QueryParams() map[string][]string
|
||||
}
|
||||
|
||||
// PagingLinks represents paging links.
|
||||
type PagingLinks struct {
|
||||
Prev string `json:"prev,omitempty"`
|
||||
Self string `json:"self"`
|
||||
Next string `json:"next,omitempty"`
|
||||
}
|
||||
|
||||
// FindOptions represents options passed to all find methods with multiple results.
|
||||
type FindOptions struct {
|
||||
Limit int
|
||||
Offset int
|
||||
SortBy string
|
||||
Descending bool
|
||||
}
|
||||
|
||||
// QueryParams returns a map containing url query params.
|
||||
func (f FindOptions) QueryParams() map[string][]string {
|
||||
qp := map[string][]string{
|
||||
"limit": []string{strconv.Itoa(f.Limit)},
|
||||
"offset": []string{strconv.Itoa(f.Offset)},
|
||||
"sortBy": []string{f.SortBy},
|
||||
"descending": []string{strconv.FormatBool(f.Descending)},
|
||||
}
|
||||
|
||||
return qp
|
||||
}
|
Loading…
Reference in New Issue