Remove explorations from backend server

pull/841/head
Chris Goller 2017-02-01 15:05:06 -06:00
parent 68ec7dfd26
commit 1522dc6e4e
18 changed files with 103 additions and 987 deletions

View File

@ -59,7 +59,6 @@ Chronograf's graphing tool that allows you to dig in and create personalized vis
* Generate [InfluxQL](https://docs.influxdata.com/influxdb/latest/query_language/) statements with the query builder
* Generate and edit [InfluxQL](https://docs.influxdata.com/influxdb/latest/query_language/) statements with the raw query editor
* Create visualizations and view query results in tabular format
* Manage visualizations with exploration sessions
### Dashboards

View File

@ -15,18 +15,16 @@ type Client struct {
Now func() time.Time
LayoutIDs chronograf.ID
ExplorationStore *ExplorationStore
SourcesStore *SourcesStore
ServersStore *ServersStore
LayoutStore *LayoutStore
UsersStore *UsersStore
AlertsStore *AlertsStore
DashboardsStore *DashboardsStore
SourcesStore *SourcesStore
ServersStore *ServersStore
LayoutStore *LayoutStore
UsersStore *UsersStore
AlertsStore *AlertsStore
DashboardsStore *DashboardsStore
}
func NewClient() *Client {
c := &Client{Now: time.Now}
c.ExplorationStore = &ExplorationStore{client: c}
c.SourcesStore = &SourcesStore{client: c}
c.ServersStore = &ServersStore{client: c}
c.AlertsStore = &AlertsStore{client: c}
@ -49,10 +47,6 @@ func (c *Client) Open() error {
c.db = db
if err := c.db.Update(func(tx *bolt.Tx) error {
// Always create explorations bucket.
if _, err := tx.CreateBucketIfNotExists(ExplorationBucket); err != nil {
return err
}
// Always create Sources bucket.
if _, err := tx.CreateBucketIfNotExists(SourcesBucket); err != nil {
return err

View File

@ -1,128 +0,0 @@
package bolt
import (
"context"
"github.com/boltdb/bolt"
"github.com/influxdata/chronograf"
"github.com/influxdata/chronograf/bolt/internal"
)
// Ensure ExplorationStore implements chronograf.ExplorationStore.
var _ chronograf.ExplorationStore = &ExplorationStore{}
var ExplorationBucket = []byte("Explorations")
type ExplorationStore struct {
client *Client
}
// Search the ExplorationStore for all explorations owned by userID.
func (s *ExplorationStore) Query(ctx context.Context, uid chronograf.UserID) ([]*chronograf.Exploration, error) {
var explorations []*chronograf.Exploration
if err := s.client.db.View(func(tx *bolt.Tx) error {
if err := tx.Bucket(ExplorationBucket).ForEach(func(k, v []byte) error {
var e chronograf.Exploration
if err := internal.UnmarshalExploration(v, &e); err != nil {
return err
} else if e.UserID != uid {
return nil
}
explorations = append(explorations, &e)
return nil
}); err != nil {
return err
}
return nil
}); err != nil {
return nil, err
}
return explorations, nil
}
// Create a new Exploration in the ExplorationStore.
func (s *ExplorationStore) Add(ctx context.Context, e *chronograf.Exploration) (*chronograf.Exploration, error) {
if err := s.client.db.Update(func(tx *bolt.Tx) error {
b := tx.Bucket(ExplorationBucket)
seq, err := b.NextSequence()
if err != nil {
return err
}
e.ID = chronograf.ExplorationID(seq)
e.CreatedAt = s.client.Now().UTC()
e.UpdatedAt = e.CreatedAt
if v, err := internal.MarshalExploration(e); err != nil {
return err
} else if err := b.Put(itob(int(e.ID)), v); err != nil {
return err
}
return nil
}); err != nil {
return nil, err
}
return e, nil
}
// Delete the exploration from the ExplorationStore
func (s *ExplorationStore) Delete(ctx context.Context, e *chronograf.Exploration) error {
if err := s.client.db.Update(func(tx *bolt.Tx) error {
if err := tx.Bucket(ExplorationBucket).Delete(itob(int(e.ID))); err != nil {
return err
}
return nil
}); err != nil {
return err
}
return nil
}
// Retrieve an exploration for an id exists.
func (s *ExplorationStore) Get(ctx context.Context, id chronograf.ExplorationID) (*chronograf.Exploration, error) {
var e chronograf.Exploration
if err := s.client.db.View(func(tx *bolt.Tx) error {
if v := tx.Bucket(ExplorationBucket).Get(itob(int(id))); v == nil {
return chronograf.ErrExplorationNotFound
} else if err := internal.UnmarshalExploration(v, &e); err != nil {
return err
}
return nil
}); err != nil {
return nil, err
}
return &e, nil
}
// Update an exploration; will also update the `UpdatedAt` time.
func (s *ExplorationStore) Update(ctx context.Context, e *chronograf.Exploration) error {
if err := s.client.db.Update(func(tx *bolt.Tx) error {
// Retreive an existing exploration with the same exploration ID.
var ee chronograf.Exploration
b := tx.Bucket(ExplorationBucket)
if v := b.Get(itob(int(e.ID))); v == nil {
return chronograf.ErrExplorationNotFound
} else if err := internal.UnmarshalExploration(v, &ee); err != nil {
return err
}
ee.Name = e.Name
ee.UserID = e.UserID
ee.Data = e.Data
ee.UpdatedAt = s.client.Now().UTC()
if v, err := internal.MarshalExploration(&ee); err != nil {
return err
} else if err := b.Put(itob(int(ee.ID)), v); err != nil {
return err
}
return nil
}); err != nil {
return err
}
return nil
}

View File

@ -1,142 +0,0 @@
package bolt_test
import (
"context"
"testing"
"github.com/influxdata/chronograf"
)
// Ensure an ExplorationStore can store, retrieve, update, and delete explorations.
func TestExplorationStore_CRUD(t *testing.T) {
c, err := NewTestClient()
if err != nil {
t.Fatal(err)
}
if err := c.Open(); err != nil {
t.Fatal(err)
}
defer c.Close()
s := c.ExplorationStore
explorations := []*chronograf.Exploration{
&chronograf.Exploration{
Name: "Ferdinand Magellan",
UserID: 2,
Data: "{\"panels\":{\"123\":{\"id\":\"123\",\"queryIds\":[\"456\"]}},\"queryConfigs\":{\"456\":{\"id\":\"456\",\"database\":null,\"measurement\":null,\"retentionPolicy\":null,\"fields\":[],\"tags\":{},\"groupBy\":{\"time\":null,\"tags\":[]},\"areTagsAccepted\":true,\"rawText\":null}}}",
},
&chronograf.Exploration{
Name: "Marco Polo",
UserID: 3,
Data: "{\"panels\":{\"123\":{\"id\":\"123\",\"queryIds\":[\"456\"]}},\"queryConfigs\":{\"456\":{\"id\":\"456\",\"database\":null,\"measurement\":null,\"retentionPolicy\":null,\"fields\":[],\"tags\":{},\"groupBy\":{\"time\":null,\"tags\":[]},\"areTagsAccepted\":true,\"rawText\":null}}}",
},
&chronograf.Exploration{
Name: "Leif Ericson",
UserID: 3,
Data: "{\"panels\":{\"123\":{\"id\":\"123\",\"queryIds\":[\"456\"]}},\"queryConfigs\":{\"456\":{\"id\":\"456\",\"database\":null,\"measurement\":null,\"retentionPolicy\":null,\"fields\":[],\"tags\":{},\"groupBy\":{\"time\":null,\"tags\":[]},\"areTagsAccepted\":true,\"rawText\":null}}}",
},
}
ctx := context.Background()
// Add new explorations.
for i := range explorations {
if _, err := s.Add(ctx, explorations[i]); err != nil {
t.Fatal(err)
}
}
// Confirm first exploration in the store is the same as the original.
if e, err := s.Get(ctx, explorations[0].ID); err != nil {
t.Fatal(err)
} else if e.ID != explorations[0].ID {
t.Fatalf("exploration ID error: got %v, expected %v", e.ID, explorations[1].ID)
} else if e.Name != explorations[0].Name {
t.Fatalf("exploration Name error: got %v, expected %v", e.Name, explorations[1].Name)
} else if e.UserID != explorations[0].UserID {
t.Fatalf("exploration UserID error: got %v, expected %v", e.UserID, explorations[1].UserID)
} else if e.Data != explorations[0].Data {
t.Fatalf("exploration Data error: got %v, expected %v", e.Data, explorations[1].Data)
}
// Update explorations.
explorations[1].Name = "Francis Drake"
explorations[2].UserID = 4
if err := s.Update(ctx, explorations[1]); err != nil {
t.Fatal(err)
} else if err := s.Update(ctx, explorations[2]); err != nil {
t.Fatal(err)
}
// Confirm explorations are updated.
if e, err := s.Get(ctx, explorations[1].ID); err != nil {
t.Fatal(err)
} else if e.Name != "Francis Drake" {
t.Fatalf("exploration 1 update error: got %v, expected %v", e.Name, "Francis Drake")
}
if e, err := s.Get(ctx, explorations[2].ID); err != nil {
t.Fatal(err)
} else if e.UserID != 4 {
t.Fatalf("exploration 2 update error: got %v, expected %v", e.UserID, 4)
}
// Delete an exploration.
if err := s.Delete(ctx, explorations[2]); err != nil {
t.Fatal(err)
}
// Confirm exploration has been deleted.
if e, err := s.Get(ctx, explorations[2].ID); err != chronograf.ErrExplorationNotFound {
t.Fatalf("exploration delete error: got %v, expected %v", e, chronograf.ErrExplorationNotFound)
}
}
// Ensure Explorations can be queried by UserID.
func TestExplorationStore_Query(t *testing.T) {
c, err := NewTestClient()
if err != nil {
t.Fatal(err)
}
if err := c.Open(); err != nil {
t.Fatal(err)
}
defer c.Close()
s := c.ExplorationStore
explorations := []*chronograf.Exploration{
&chronograf.Exploration{
Name: "Ferdinand Magellan",
UserID: 2,
Data: "{\"panels\":{\"123\":{\"id\":\"123\",\"queryIds\":[\"456\"]}},\"queryConfigs\":{\"456\":{\"id\":\"456\",\"database\":null,\"measurement\":null,\"retentionPolicy\":null,\"fields\":[],\"tags\":{},\"groupBy\":{\"time\":null,\"tags\":[]},\"areTagsAccepted\":true,\"rawText\":null}}}",
},
&chronograf.Exploration{
Name: "Marco Polo",
UserID: 3,
Data: "{\"panels\":{\"123\":{\"id\":\"123\",\"queryIds\":[\"456\"]}},\"queryConfigs\":{\"456\":{\"id\":\"456\",\"database\":null,\"measurement\":null,\"retentionPolicy\":null,\"fields\":[],\"tags\":{},\"groupBy\":{\"time\":null,\"tags\":[]},\"areTagsAccepted\":true,\"rawText\":null}}}",
},
&chronograf.Exploration{
Name: "Leif Ericson",
UserID: 3,
Data: "{\"panels\":{\"123\":{\"id\":\"123\",\"queryIds\":[\"456\"]}},\"queryConfigs\":{\"456\":{\"id\":\"456\",\"database\":null,\"measurement\":null,\"retentionPolicy\":null,\"fields\":[],\"tags\":{},\"groupBy\":{\"time\":null,\"tags\":[]},\"areTagsAccepted\":true,\"rawText\":null}}}",
},
}
ctx := context.Background()
// Add new explorations.
for i := range explorations {
if _, err := s.Add(ctx, explorations[i]); err != nil {
t.Fatal(err)
}
}
// Query for explorations.
if e, err := s.Query(ctx, 3); err != nil {
t.Fatal(err)
} else if len(e) != 2 {
t.Fatalf("exploration query length error: got %v, expected %v", len(explorations), len(e))
} else if e[0].Name != explorations[1].Name {
t.Fatalf("exploration query error: got %v, expected %v", explorations[0].Name, "Marco Polo")
} else if e[1].Name != explorations[2].Name {
t.Fatalf("exploration query error: got %v, expected %v", explorations[1].Name, "Leif Ericson")
}
}

View File

@ -2,7 +2,6 @@ package internal
import (
"encoding/json"
"time"
"github.com/gogo/protobuf/proto"
"github.com/influxdata/chronograf"
@ -10,37 +9,6 @@ import (
//go:generate protoc --gogo_out=. internal.proto
// MarshalExploration encodes an exploration to binary protobuf format.
func MarshalExploration(e *chronograf.Exploration) ([]byte, error) {
return proto.Marshal(&Exploration{
ID: int64(e.ID),
Name: e.Name,
UserID: int64(e.UserID),
Data: e.Data,
CreatedAt: e.CreatedAt.UnixNano(),
UpdatedAt: e.UpdatedAt.UnixNano(),
Default: e.Default,
})
}
// UnmarshalExploration decodes an exploration from binary protobuf data.
func UnmarshalExploration(data []byte, e *chronograf.Exploration) error {
var pb Exploration
if err := proto.Unmarshal(data, &pb); err != nil {
return err
}
e.ID = chronograf.ExplorationID(pb.ID)
e.Name = pb.Name
e.UserID = chronograf.UserID(pb.UserID)
e.Data = pb.Data
e.CreatedAt = time.Unix(0, pb.CreatedAt).UTC()
e.UpdatedAt = time.Unix(0, pb.UpdatedAt).UTC()
e.Default = pb.Default
return nil
}
// MarshalSource encodes a source to binary protobuf format.
func MarshalSource(s chronograf.Source) ([]byte, error) {
return proto.Marshal(&Source{

View File

@ -9,7 +9,6 @@ It is generated from these files:
internal.proto
It has these top-level messages:
Exploration
Source
Dashboard
DashboardCell
@ -38,21 +37,6 @@ var _ = math.Inf
// proto package needs to be updated.
const _ = proto.GoGoProtoPackageIsVersion2 // please upgrade the proto package
type Exploration struct {
ID int64 `protobuf:"varint,1,opt,name=ID,proto3" json:"ID,omitempty"`
Name string `protobuf:"bytes,2,opt,name=Name,proto3" json:"Name,omitempty"`
UserID int64 `protobuf:"varint,3,opt,name=UserID,proto3" json:"UserID,omitempty"`
Data string `protobuf:"bytes,4,opt,name=Data,proto3" json:"Data,omitempty"`
CreatedAt int64 `protobuf:"varint,5,opt,name=CreatedAt,proto3" json:"CreatedAt,omitempty"`
UpdatedAt int64 `protobuf:"varint,6,opt,name=UpdatedAt,proto3" json:"UpdatedAt,omitempty"`
Default bool `protobuf:"varint,7,opt,name=Default,proto3" json:"Default,omitempty"`
}
func (m *Exploration) Reset() { *m = Exploration{} }
func (m *Exploration) String() string { return proto.CompactTextString(m) }
func (*Exploration) ProtoMessage() {}
func (*Exploration) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{0} }
type Source struct {
ID int64 `protobuf:"varint,1,opt,name=ID,proto3" json:"ID,omitempty"`
Name string `protobuf:"bytes,2,opt,name=Name,proto3" json:"Name,omitempty"`
@ -68,7 +52,7 @@ type Source struct {
func (m *Source) Reset() { *m = Source{} }
func (m *Source) String() string { return proto.CompactTextString(m) }
func (*Source) ProtoMessage() {}
func (*Source) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{1} }
func (*Source) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{0} }
type Dashboard struct {
ID int64 `protobuf:"varint,1,opt,name=ID,proto3" json:"ID,omitempty"`
@ -79,7 +63,7 @@ type Dashboard struct {
func (m *Dashboard) Reset() { *m = Dashboard{} }
func (m *Dashboard) String() string { return proto.CompactTextString(m) }
func (*Dashboard) ProtoMessage() {}
func (*Dashboard) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{2} }
func (*Dashboard) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{1} }
func (m *Dashboard) GetCells() []*DashboardCell {
if m != nil {
@ -101,7 +85,7 @@ type DashboardCell struct {
func (m *DashboardCell) Reset() { *m = DashboardCell{} }
func (m *DashboardCell) String() string { return proto.CompactTextString(m) }
func (*DashboardCell) ProtoMessage() {}
func (*DashboardCell) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{3} }
func (*DashboardCell) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{2} }
func (m *DashboardCell) GetQueries() []*Query {
if m != nil {
@ -122,7 +106,7 @@ type Server struct {
func (m *Server) Reset() { *m = Server{} }
func (m *Server) String() string { return proto.CompactTextString(m) }
func (*Server) ProtoMessage() {}
func (*Server) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{4} }
func (*Server) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{3} }
type Layout struct {
ID string `protobuf:"bytes,1,opt,name=ID,proto3" json:"ID,omitempty"`
@ -135,7 +119,7 @@ type Layout struct {
func (m *Layout) Reset() { *m = Layout{} }
func (m *Layout) String() string { return proto.CompactTextString(m) }
func (*Layout) ProtoMessage() {}
func (*Layout) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{5} }
func (*Layout) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{4} }
func (m *Layout) GetCells() []*Cell {
if m != nil {
@ -160,7 +144,7 @@ type Cell struct {
func (m *Cell) Reset() { *m = Cell{} }
func (m *Cell) String() string { return proto.CompactTextString(m) }
func (*Cell) ProtoMessage() {}
func (*Cell) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{6} }
func (*Cell) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{5} }
func (m *Cell) GetQueries() []*Query {
if m != nil {
@ -182,7 +166,7 @@ type Query struct {
func (m *Query) Reset() { *m = Query{} }
func (m *Query) String() string { return proto.CompactTextString(m) }
func (*Query) ProtoMessage() {}
func (*Query) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{7} }
func (*Query) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{6} }
func (m *Query) GetRange() *Range {
if m != nil {
@ -199,7 +183,7 @@ type Range struct {
func (m *Range) Reset() { *m = Range{} }
func (m *Range) String() string { return proto.CompactTextString(m) }
func (*Range) ProtoMessage() {}
func (*Range) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{8} }
func (*Range) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{7} }
type AlertRule struct {
ID string `protobuf:"bytes,1,opt,name=ID,proto3" json:"ID,omitempty"`
@ -211,7 +195,7 @@ type AlertRule struct {
func (m *AlertRule) Reset() { *m = AlertRule{} }
func (m *AlertRule) String() string { return proto.CompactTextString(m) }
func (*AlertRule) ProtoMessage() {}
func (*AlertRule) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{9} }
func (*AlertRule) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{8} }
type User struct {
ID uint64 `protobuf:"varint,1,opt,name=ID,proto3" json:"ID,omitempty"`
@ -221,10 +205,9 @@ type User struct {
func (m *User) Reset() { *m = User{} }
func (m *User) String() string { return proto.CompactTextString(m) }
func (*User) ProtoMessage() {}
func (*User) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{10} }
func (*User) Descriptor() ([]byte, []int) { return fileDescriptorInternal, []int{9} }
func init() {
proto.RegisterType((*Exploration)(nil), "internal.Exploration")
proto.RegisterType((*Source)(nil), "internal.Source")
proto.RegisterType((*Dashboard)(nil), "internal.Dashboard")
proto.RegisterType((*DashboardCell)(nil), "internal.DashboardCell")
@ -240,50 +223,46 @@ func init() {
func init() { proto.RegisterFile("internal.proto", fileDescriptorInternal) }
var fileDescriptorInternal = []byte{
// 712 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0xbc, 0x55, 0xd1, 0x6e, 0xd3, 0x4a,
0x10, 0xd5, 0xc6, 0x76, 0x12, 0x4f, 0x7b, 0x7b, 0xaf, 0x56, 0xd5, 0xc5, 0x42, 0x3c, 0x44, 0x16,
0x48, 0x41, 0x82, 0x3e, 0xd0, 0x2f, 0x48, 0xe3, 0x0a, 0x05, 0x4a, 0x29, 0x9b, 0x06, 0x9e, 0x40,
0xda, 0x26, 0x9b, 0xc6, 0xc2, 0xb1, 0xcd, 0xda, 0x26, 0xf5, 0x3f, 0xf0, 0x05, 0x3c, 0xf0, 0x11,
0xf0, 0x29, 0xfc, 0x08, 0x9f, 0x80, 0x66, 0xbc, 0x76, 0x5c, 0x51, 0x50, 0x9f, 0x78, 0x9b, 0x33,
0x33, 0x9d, 0x3d, 0x73, 0xce, 0xb8, 0x81, 0xbd, 0x30, 0xce, 0x95, 0x8e, 0x65, 0x74, 0x90, 0xea,
0x24, 0x4f, 0x78, 0xbf, 0xc6, 0xfe, 0x37, 0x06, 0x3b, 0xc7, 0x57, 0x69, 0x94, 0x68, 0x99, 0x87,
0x49, 0xcc, 0xf7, 0xa0, 0x33, 0x09, 0x3c, 0x36, 0x60, 0x43, 0x4b, 0x74, 0x26, 0x01, 0xe7, 0x60,
0x9f, 0xca, 0xb5, 0xf2, 0x3a, 0x03, 0x36, 0x74, 0x05, 0xc5, 0xfc, 0x7f, 0xe8, 0xce, 0x32, 0xa5,
0x27, 0x81, 0x67, 0x51, 0x9f, 0x41, 0xd8, 0x1b, 0xc8, 0x5c, 0x7a, 0x76, 0xd5, 0x8b, 0x31, 0xbf,
0x07, 0xee, 0x58, 0x2b, 0x99, 0xab, 0xc5, 0x28, 0xf7, 0x1c, 0x6a, 0xdf, 0x26, 0xb0, 0x3a, 0x4b,
0x17, 0xa6, 0xda, 0xad, 0xaa, 0x4d, 0x82, 0x7b, 0xd0, 0x0b, 0xd4, 0x52, 0x16, 0x51, 0xee, 0xf5,
0x06, 0x6c, 0xd8, 0x17, 0x35, 0xf4, 0x7f, 0x30, 0xe8, 0x4e, 0x93, 0x42, 0xcf, 0xd5, 0xad, 0x08,
0x73, 0xb0, 0xcf, 0xcb, 0x54, 0x11, 0x5d, 0x57, 0x50, 0xcc, 0xef, 0x42, 0x1f, 0x69, 0xc7, 0xd8,
0x5b, 0x11, 0x6e, 0x30, 0xd6, 0xce, 0x64, 0x96, 0x6d, 0x12, 0xbd, 0x20, 0xce, 0xae, 0x68, 0x30,
0xff, 0x0f, 0xac, 0x99, 0x38, 0x21, 0xb2, 0xae, 0xc0, 0xf0, 0xf7, 0x34, 0x71, 0xce, 0xb9, 0x8a,
0xd4, 0xa5, 0x96, 0x4b, 0xaf, 0x5f, 0xcd, 0xa9, 0x31, 0x3f, 0x00, 0x3e, 0x89, 0x33, 0x35, 0x2f,
0xb4, 0x9a, 0xbe, 0x0f, 0xd3, 0xd7, 0x4a, 0x87, 0xcb, 0xd2, 0x73, 0x69, 0xc0, 0x0d, 0x15, 0xff,
0x1d, 0xb8, 0x81, 0xcc, 0x56, 0x17, 0x89, 0xd4, 0x8b, 0x5b, 0x2d, 0xfd, 0x18, 0x9c, 0xb9, 0x8a,
0xa2, 0xcc, 0xb3, 0x06, 0xd6, 0x70, 0xe7, 0xc9, 0x9d, 0x83, 0xe6, 0x06, 0x9a, 0x39, 0x63, 0x15,
0x45, 0xa2, 0xea, 0xf2, 0x3f, 0x33, 0xf8, 0xe7, 0x5a, 0x81, 0xef, 0x02, 0xbb, 0xa2, 0x37, 0x1c,
0xc1, 0xae, 0x10, 0x95, 0x34, 0xdf, 0x11, 0xac, 0x44, 0xb4, 0x21, 0x39, 0x1d, 0xc1, 0x36, 0x88,
0x56, 0x24, 0xa2, 0x23, 0xd8, 0x8a, 0x3f, 0x84, 0xde, 0x87, 0x42, 0xe9, 0x50, 0x65, 0x9e, 0x43,
0x4f, 0xff, 0xbb, 0x7d, 0xfa, 0x55, 0xa1, 0x74, 0x29, 0xea, 0x3a, 0xf2, 0x26, 0x03, 0x2a, 0x35,
0x29, 0xc6, 0x5c, 0x8e, 0x66, 0xf5, 0xaa, 0x1c, 0xc6, 0xfe, 0x27, 0xf4, 0x5b, 0xe9, 0x8f, 0x4a,
0xdf, 0x6a, 0xf5, 0xb6, 0xb7, 0xd6, 0x1f, 0xbc, 0xb5, 0x6f, 0xf6, 0xd6, 0xd9, 0x7a, 0xbb, 0x0f,
0xce, 0x54, 0xcf, 0x27, 0x81, 0x39, 0xce, 0x0a, 0xf8, 0x5f, 0x18, 0x74, 0x4f, 0x64, 0x99, 0x14,
0x79, 0x8b, 0x8e, 0x4b, 0x74, 0x06, 0xb0, 0x33, 0x4a, 0xd3, 0x28, 0x9c, 0xd3, 0xe7, 0x64, 0x58,
0xb5, 0x53, 0xd8, 0xf1, 0x42, 0xc9, 0xac, 0xd0, 0x6a, 0xad, 0xe2, 0xdc, 0xf0, 0x6b, 0xa7, 0xf8,
0x7d, 0x70, 0xc6, 0xe4, 0x9c, 0x4d, 0xf2, 0xed, 0x6d, 0xe5, 0xab, 0x0c, 0xa3, 0x22, 0x2e, 0x32,
0x2a, 0xf2, 0x64, 0x19, 0x25, 0x1b, 0x62, 0xdc, 0x17, 0x0d, 0xf6, 0xbf, 0x33, 0xb0, 0xff, 0x96,
0x87, 0xbb, 0xc0, 0x42, 0x63, 0x20, 0x0b, 0x1b, 0x47, 0x7b, 0x2d, 0x47, 0x3d, 0xe8, 0x95, 0x5a,
0xc6, 0x97, 0x2a, 0xf3, 0xfa, 0x03, 0x6b, 0x68, 0x89, 0x1a, 0x52, 0x25, 0x92, 0x17, 0x2a, 0xca,
0x3c, 0x77, 0x60, 0x0d, 0x5d, 0x51, 0xc3, 0xe6, 0x0a, 0xa0, 0x75, 0x05, 0x5f, 0x19, 0x38, 0xf4,
0x38, 0xfe, 0xdd, 0x38, 0x59, 0xaf, 0x65, 0xbc, 0x30, 0xd2, 0xd7, 0x10, 0xfd, 0x08, 0x8e, 0x8c,
0xec, 0x9d, 0xe0, 0x08, 0xb1, 0x38, 0x33, 0x22, 0x77, 0xc4, 0x19, 0xaa, 0xf6, 0x54, 0x27, 0x45,
0x7a, 0x54, 0x56, 0xf2, 0xba, 0xa2, 0xc1, 0xf8, 0x7f, 0xed, 0xcd, 0x4a, 0x69, 0xb3, 0xb3, 0x2b,
0x0c, 0xc2, 0x23, 0x38, 0x41, 0x56, 0x66, 0xcb, 0x0a, 0xf0, 0x07, 0xe0, 0x08, 0xdc, 0x82, 0x56,
0xbd, 0x26, 0x10, 0xa5, 0x45, 0x55, 0xf5, 0x0f, 0x4d, 0x1b, 0x4e, 0x99, 0xa5, 0xa9, 0xd2, 0xe6,
0x76, 0x2b, 0x40, 0xb3, 0x93, 0x8d, 0xd2, 0x44, 0xd9, 0x12, 0x15, 0xf0, 0xdf, 0x82, 0x3b, 0x8a,
0x94, 0xce, 0x45, 0x11, 0xa9, 0x5f, 0x4e, 0x8c, 0x83, 0xfd, 0x6c, 0xfa, 0xf2, 0xb4, 0xbe, 0x78,
0x8c, 0xb7, 0x77, 0x6a, 0xb5, 0xee, 0x14, 0x17, 0x7a, 0x2e, 0x53, 0x39, 0x09, 0xc8, 0x58, 0x4b,
0x18, 0xe4, 0x3f, 0x02, 0x1b, 0xbf, 0x87, 0xd6, 0x64, 0x9b, 0x26, 0xef, 0x83, 0x73, 0xbc, 0x96,
0x61, 0x64, 0x46, 0x57, 0xe0, 0xa2, 0x4b, 0xbf, 0x19, 0x87, 0x3f, 0x03, 0x00, 0x00, 0xff, 0xff,
0x6d, 0xf2, 0xe7, 0x54, 0x45, 0x06, 0x00, 0x00,
// 652 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0xbc, 0x94, 0x51, 0x6e, 0xd3, 0x4c,
0x10, 0xc7, 0xb5, 0xb1, 0x9d, 0xc4, 0xd3, 0x7e, 0xfd, 0x3e, 0xad, 0xaa, 0x8f, 0x15, 0x4f, 0x91,
0x05, 0x52, 0x90, 0xa0, 0x0f, 0xf4, 0x04, 0x6d, 0x8d, 0x50, 0xa0, 0x94, 0xb2, 0x69, 0xe1, 0x09,
0xa4, 0x6d, 0x3a, 0x69, 0x2c, 0x36, 0xb1, 0x59, 0xdb, 0xa4, 0xbe, 0x03, 0x27, 0xe0, 0x81, 0x43,
0x70, 0x15, 0x2e, 0xc2, 0x11, 0xd0, 0xec, 0xda, 0x8e, 0x2b, 0x2a, 0xd4, 0x27, 0xde, 0xe6, 0x37,
0xe3, 0xce, 0xce, 0xfc, 0xff, 0xd3, 0xc0, 0x4e, 0xb2, 0x2a, 0xd0, 0xac, 0x94, 0xde, 0xcb, 0x4c,
0x5a, 0xa4, 0x7c, 0xd8, 0x70, 0xf4, 0x93, 0x41, 0x7f, 0x9a, 0x96, 0x66, 0x86, 0x7c, 0x07, 0x7a,
0x93, 0x58, 0xb0, 0x11, 0x1b, 0x7b, 0xb2, 0x37, 0x89, 0x39, 0x07, 0xff, 0x44, 0x2d, 0x51, 0xf4,
0x46, 0x6c, 0x1c, 0x4a, 0x1b, 0x53, 0xee, 0xac, 0xca, 0x50, 0x78, 0x2e, 0x47, 0x31, 0xbf, 0x0f,
0xc3, 0xf3, 0x9c, 0xba, 0x2d, 0x51, 0xf8, 0x36, 0xdf, 0x32, 0xd5, 0x4e, 0x55, 0x9e, 0xaf, 0x53,
0x73, 0x29, 0x02, 0x57, 0x6b, 0x98, 0xff, 0x07, 0xde, 0xb9, 0x3c, 0x16, 0x7d, 0x9b, 0xa6, 0x90,
0x0b, 0x18, 0xc4, 0x38, 0x57, 0xa5, 0x2e, 0xc4, 0x60, 0xc4, 0xc6, 0x43, 0xd9, 0x20, 0xf5, 0x39,
0x43, 0x8d, 0x57, 0x46, 0xcd, 0xc5, 0xd0, 0xf5, 0x69, 0x98, 0xef, 0x01, 0x9f, 0xac, 0x72, 0x9c,
0x95, 0x06, 0xa7, 0x1f, 0x93, 0xec, 0x2d, 0x9a, 0x64, 0x5e, 0x89, 0xd0, 0x36, 0xb8, 0xa5, 0x12,
0x7d, 0x80, 0x30, 0x56, 0xf9, 0xe2, 0x22, 0x55, 0xe6, 0xf2, 0x4e, 0x4b, 0x3f, 0x81, 0x60, 0x86,
0x5a, 0xe7, 0xc2, 0x1b, 0x79, 0xe3, 0xad, 0xa7, 0xf7, 0xf6, 0x5a, 0x35, 0xdb, 0x3e, 0x47, 0xa8,
0xb5, 0x74, 0x5f, 0x45, 0x5f, 0x19, 0xfc, 0x73, 0xa3, 0xc0, 0xb7, 0x81, 0x5d, 0xdb, 0x37, 0x02,
0xc9, 0xae, 0x89, 0x2a, 0xdb, 0x3f, 0x90, 0xac, 0x22, 0x5a, 0x5b, 0x39, 0x03, 0xc9, 0xd6, 0x44,
0x0b, 0x2b, 0x62, 0x20, 0xd9, 0x82, 0x3f, 0x82, 0xc1, 0xa7, 0x12, 0x4d, 0x82, 0xb9, 0x08, 0xec,
0xd3, 0xff, 0x6e, 0x9e, 0x7e, 0x53, 0xa2, 0xa9, 0x64, 0x53, 0xa7, 0xb9, 0xad, 0x01, 0x4e, 0x4d,
0x1b, 0x53, 0xae, 0x20, 0xb3, 0x06, 0x2e, 0x47, 0x71, 0xf4, 0x85, 0xfc, 0x46, 0xf3, 0x19, 0xcd,
0x9d, 0x56, 0xef, 0x7a, 0xeb, 0xfd, 0xc1, 0x5b, 0xff, 0x76, 0x6f, 0x83, 0x8d, 0xb7, 0xbb, 0x10,
0x4c, 0xcd, 0x6c, 0x12, 0xdb, 0x09, 0x3d, 0xe9, 0x20, 0xfa, 0xc6, 0xa0, 0x7f, 0xac, 0xaa, 0xb4,
0x2c, 0x3a, 0xe3, 0x84, 0x76, 0x9c, 0x11, 0x6c, 0x1d, 0x64, 0x99, 0x4e, 0x66, 0xaa, 0x48, 0xd2,
0x55, 0x3d, 0x55, 0x37, 0x45, 0x5f, 0xbc, 0x42, 0x95, 0x97, 0x06, 0x97, 0xb8, 0x2a, 0xea, 0xf9,
0xba, 0x29, 0xfe, 0x00, 0x82, 0x23, 0xeb, 0x9c, 0x6f, 0xe5, 0xdb, 0xd9, 0xc8, 0xe7, 0x0c, 0xb3,
0x45, 0x5a, 0xe4, 0xa0, 0x2c, 0xd2, 0xb9, 0x4e, 0xd7, 0x76, 0xe2, 0xa1, 0x6c, 0x39, 0xfa, 0xc1,
0xc0, 0xff, 0x5b, 0x1e, 0x6e, 0x03, 0x4b, 0x6a, 0x03, 0x59, 0xd2, 0x3a, 0x3a, 0xe8, 0x38, 0x2a,
0x60, 0x50, 0x19, 0xb5, 0xba, 0xc2, 0x5c, 0x0c, 0x47, 0xde, 0xd8, 0x93, 0x0d, 0xda, 0x8a, 0x56,
0x17, 0xa8, 0x73, 0x11, 0x8e, 0xbc, 0x71, 0x28, 0x1b, 0x6c, 0xaf, 0x00, 0x3a, 0x57, 0xf0, 0x9d,
0x41, 0x60, 0x1f, 0xa7, 0xbf, 0x3b, 0x4a, 0x97, 0x4b, 0xb5, 0xba, 0xac, 0xa5, 0x6f, 0x90, 0xfc,
0x88, 0x0f, 0x6b, 0xd9, 0x7b, 0xf1, 0x21, 0xb1, 0x3c, 0xad, 0x45, 0xee, 0xc9, 0x53, 0x52, 0xed,
0xb9, 0x49, 0xcb, 0xec, 0xb0, 0x72, 0xf2, 0x86, 0xb2, 0x65, 0xfe, 0x3f, 0xf4, 0xdf, 0x2d, 0xd0,
0xd4, 0x3b, 0x87, 0xb2, 0x26, 0x3a, 0x82, 0x63, 0x9a, 0xaa, 0xde, 0xd2, 0x01, 0x7f, 0x08, 0x81,
0xa4, 0x2d, 0xec, 0xaa, 0x37, 0x04, 0xb2, 0x69, 0xe9, 0xaa, 0xd1, 0x7e, 0xfd, 0x19, 0x75, 0x39,
0xcf, 0x32, 0x34, 0xf5, 0xed, 0x3a, 0xb0, 0xbd, 0xd3, 0x35, 0x1a, 0x3b, 0xb2, 0x27, 0x1d, 0x44,
0xef, 0x21, 0x3c, 0xd0, 0x68, 0x0a, 0x59, 0x6a, 0xfc, 0xed, 0xc4, 0x38, 0xf8, 0x2f, 0xa6, 0xaf,
0x4f, 0x9a, 0x8b, 0xa7, 0x78, 0x73, 0xa7, 0x5e, 0xe7, 0x4e, 0x69, 0xa1, 0x97, 0x2a, 0x53, 0x93,
0xd8, 0x1a, 0xeb, 0xc9, 0x9a, 0xa2, 0xc7, 0xe0, 0xd3, 0xff, 0x43, 0xa7, 0xb3, 0x6f, 0x3b, 0xef,
0x42, 0xf0, 0x6c, 0xa9, 0x12, 0x5d, 0xb7, 0x76, 0x70, 0xd1, 0xb7, 0xbf, 0xbe, 0xfb, 0xbf, 0x02,
0x00, 0x00, 0xff, 0xff, 0xbd, 0xa5, 0x15, 0x7a, 0x8f, 0x05, 0x00, 0x00,
}

View File

@ -1,16 +1,6 @@
syntax = "proto3";
package internal;
message Exploration {
int64 ID = 1; // ExplorationID is a unique ID for an Exploration.
string Name = 2; // User provided name of the Exploration.
int64 UserID = 3; // UserID is the owner of this Exploration.
string Data = 4; // Opaque blob of JSON data.
int64 CreatedAt = 5; // Time the exploration was first created.
int64 UpdatedAt = 6; // Latest time the exploration was updated.
bool Default = 7; // Flags an exploration as the default.
}
message Source {
int64 ID = 1; // ID is the unique ID of the source
string Name = 2; // Name is the user-defined name for the source
@ -18,7 +8,7 @@ message Source {
string Username = 4; // Username is the username to connect to the source
string Password = 5;
string URL = 6; // URL are the connections to the source
bool Default = 7; // Flags an exploration as the default.
bool Default = 7; // Flags an source as the default.
string Telegraf = 8; // Telegraf is the db telegraf is written to. By default it is "telegraf"
bool InsecureSkipVerify = 9; // InsecureSkipVerify accepts any certificate from the influx server
}
@ -34,18 +24,18 @@ message DashboardCell {
int32 y = 2; // Y-coordinate of Cell in the Dashboard
int32 w = 3; // Width of Cell in the Dashboard
int32 h = 4; // Height of Cell in the Dashboard
repeated Query queries = 5; // Time-series data queries for Dashboard
repeated Query queries = 5; // Time-series data queries for Dashboard
string name = 6; // User-facing name for this Dashboard
string type = 7; // Dashboard visualization type
}
message Server {
int64 ID = 1; // ID is the unique ID of the server
string Name = 2; // Name is the user-defined name for the server
string Username = 3; // Username is the username to connect to the server
string Password = 4;
string URL = 5; // URL is the path to the server
int64 SrcID = 6; // SrcID is the ID of the data source
int64 ID = 1; // ID is the unique ID of the server
string Name = 2; // Name is the user-defined name for the server
string Username = 3; // Username is the username to connect to the server
string Password = 4;
string URL = 5; // URL is the path to the server
int64 SrcID = 6; // SrcID is the ID of the data source
}
message Layout {

View File

@ -3,33 +3,11 @@ package internal_test
import (
"reflect"
"testing"
"time"
"github.com/influxdata/chronograf"
"github.com/influxdata/chronograf/bolt/internal"
)
// Ensure an exploration can be marshaled and unmarshaled.
func TestMarshalExploration(t *testing.T) {
v := chronograf.Exploration{
ID: 12,
Name: "Some Exploration",
UserID: 34,
Data: "{\"data\":\"something\"}",
CreatedAt: time.Now().UTC(),
UpdatedAt: time.Now().UTC(),
}
var vv chronograf.Exploration
if buf, err := internal.MarshalExploration(&v); err != nil {
t.Fatal(err)
} else if err := internal.UnmarshalExploration(buf, &vv); err != nil {
t.Fatal(err)
} else if !reflect.DeepEqual(v, vv) {
t.Fatalf("exploration protobuf copy error: got %#v, expected %#v", vv, v)
}
}
func TestMarshalSource(t *testing.T) {
v := chronograf.Source{
ID: 12,

View File

@ -8,16 +8,15 @@ import (
// General errors.
const (
ErrUpstreamTimeout = Error("request to backend timed out")
ErrExplorationNotFound = Error("exploration not found")
ErrSourceNotFound = Error("source not found")
ErrServerNotFound = Error("server not found")
ErrLayoutNotFound = Error("layout not found")
ErrDashboardNotFound = Error("dashboard not found")
ErrUserNotFound = Error("user not found")
ErrLayoutInvalid = Error("layout is invalid")
ErrAlertNotFound = Error("alert not found")
ErrAuthentication = Error("user not authenticated")
ErrUpstreamTimeout = Error("request to backend timed out")
ErrSourceNotFound = Error("source not found")
ErrServerNotFound = Error("server not found")
ErrLayoutNotFound = Error("layout not found")
ErrDashboardNotFound = Error("dashboard not found")
ErrUserNotFound = Error("user not found")
ErrLayoutInvalid = Error("layout is invalid")
ErrAlertNotFound = Error("alert not found")
ErrAuthentication = Error("user not authenticated")
)
// Error is a domain error encountered while processing chronograf requests
@ -238,13 +237,13 @@ type Dashboard struct {
// DashboardCell holds visual and query information for a cell
type DashboardCell struct {
X int32 `json:"x"`
Y int32 `json:"y"`
W int32 `json:"w"`
H int32 `json:"h"`
Name string `json:"name"`
X int32 `json:"x"`
Y int32 `json:"y"`
W int32 `json:"w"`
H int32 `json:"h"`
Name string `json:"name"`
Queries []Query `json:"queries"`
Type string `json:"type"`
Type string `json:"type"`
}
// DashboardsStore is the storage and retrieval of dashboards
@ -261,34 +260,6 @@ type DashboardsStore interface {
Update(context.Context, Dashboard) error
}
// ExplorationID is a unique ID for an Exploration.
type ExplorationID int
// Exploration is a serialization of front-end Data Explorer.
type Exploration struct {
ID ExplorationID
Name string // User provided name of the Exploration.
UserID UserID // UserID is the owner of this Exploration.
Data string // Opaque blob of JSON data.
CreatedAt time.Time // Time the exploration was first created.
UpdatedAt time.Time // Latest time the exploration was updated.
Default bool // Flags an exploration as the default.
}
// ExplorationStore stores front-end serializations of data explorer sessions.
type ExplorationStore interface {
// Search the ExplorationStore for each Exploration owned by `UserID`.
Query(ctx context.Context, userID UserID) ([]*Exploration, error)
// Create a new Exploration in the ExplorationStore.
Add(context.Context, *Exploration) (*Exploration, error)
// Delete the Exploration from the ExplorationStore.
Delete(context.Context, *Exploration) error
// Retrieve an Exploration if `ID` exists.
Get(ctx context.Context, ID ExplorationID) (*Exploration, error)
// Update the Exploration; will also update the `UpdatedAt` time.
Update(context.Context, *Exploration) error
}
// Cell is a rectangle and multiple time series queries to visualize.
type Cell struct {
X int32 `json:"x"`

View File

@ -44,7 +44,7 @@ Paste an existing [InfluxQL](https://docs.influxdata.com/influxdb/latest/query_l
![Raw Editor](https://github.com/influxdata/chronograf/blob/master/docs/images/raw-editor-gs.gif)
### Other Features
View query results in tabular format (1), easily alter the query's time range with the time range selector (2), and save your graphs in individual exploration sessions (3):
View query results in tabular format (1) and easily alter the query's time range with the time range selector (2):
![Data Exploration Extras](https://github.com/influxdata/chronograf/blob/master/docs/images/data-exploration-extras-gs.png)

View File

@ -3,7 +3,7 @@ The dashboard API will support collections of resizable InfluxQL visualizations.
### TL; DR
Here are the objects we are thinking about; dashboards contain layouts which
contain explorations.
contain queries.
#### Dashboard

View File

@ -1,220 +0,0 @@
package server
import (
"encoding/json"
"fmt"
"net/http"
"time"
"github.com/influxdata/chronograf"
)
type link struct {
Href string `json:"href"`
Rel string `json:"rel"`
}
type exploration struct {
Name string `json:"name"` // Exploration name given by user.
Data interface{} `json:"data"` // Serialization of the exploration config.
CreatedAt time.Time `json:"created_at"` // Time exploration was created
UpdatedAt time.Time `json:"updated_at"` // Latest time the exploration was updated.
Link link `json:"link"` // Self link
}
func newExploration(e *chronograf.Exploration) exploration {
rel := "self"
href := fmt.Sprintf("%s/%d/explorations/%d", "/chronograf/v1/users", e.UserID, e.ID)
return exploration{
Name: e.Name,
Data: e.Data,
CreatedAt: e.CreatedAt,
UpdatedAt: e.UpdatedAt,
Link: link{
Rel: rel,
Href: href,
},
}
}
type explorations struct {
Explorations []exploration `json:"explorations"`
}
// Explorations returns all explorations scoped by user id.
func (h *Service) Explorations(w http.ResponseWriter, r *http.Request) {
id, err := paramID("id", r)
if err != nil {
Error(w, http.StatusUnprocessableEntity, err.Error(), h.Logger)
return
}
ctx := r.Context()
mrExs, err := h.ExplorationStore.Query(ctx, chronograf.UserID(id))
if err != nil {
unknownErrorWithMessage(w, err, h.Logger)
return
}
exs := make([]exploration, len(mrExs))
for i, e := range mrExs {
exs[i] = newExploration(e)
}
res := explorations{
Explorations: exs,
}
encodeJSON(w, http.StatusOK, res, h.Logger)
}
// ExplorationsID retrieves exploration ID scoped under user.
func (h *Service) ExplorationsID(w http.ResponseWriter, r *http.Request) {
eID, err := paramID("eid", r)
if err != nil {
Error(w, http.StatusUnprocessableEntity, err.Error(), h.Logger)
return
}
uID, err := paramID("id", r)
if err != nil {
Error(w, http.StatusUnprocessableEntity, err.Error(), h.Logger)
return
}
ctx := r.Context()
e, err := h.ExplorationStore.Get(ctx, chronograf.ExplorationID(eID))
if err != nil || e.UserID != chronograf.UserID(uID) {
notFound(w, eID, h.Logger)
return
}
res := newExploration(e)
encodeJSON(w, http.StatusOK, res, h.Logger)
}
type patchExplorationRequest struct {
Data interface{} `json:"data,omitempty"` // Serialized configuration
Name *string `json:"name,omitempty"` // Exploration name given by user.
}
// UpdateExploration incrementally updates exploration
func (h *Service) UpdateExploration(w http.ResponseWriter, r *http.Request) {
id, err := paramID("eid", r)
if err != nil {
Error(w, http.StatusUnprocessableEntity, err.Error(), h.Logger)
return
}
uID, err := paramID("id", r)
if err != nil {
Error(w, http.StatusUnprocessableEntity, err.Error(), h.Logger)
return
}
ctx := r.Context()
e, err := h.ExplorationStore.Get(ctx, chronograf.ExplorationID(id))
if err != nil || e.UserID != chronograf.UserID(uID) {
notFound(w, id, h.Logger)
return
}
var req patchExplorationRequest
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
invalidJSON(w, h.Logger)
return
}
if req.Data != nil {
var ok bool
if e.Data, ok = req.Data.(string); !ok {
err := fmt.Errorf("Error: Exploration data is not a string")
invalidData(w, err, h.Logger)
return
}
}
if req.Name != nil {
e.Name = *req.Name
}
if err := h.ExplorationStore.Update(ctx, e); err != nil {
msg := "Error: Failed to update Exploration"
Error(w, http.StatusInternalServerError, msg, h.Logger)
return
}
res := newExploration(e)
encodeJSON(w, http.StatusOK, res, h.Logger)
}
type postExplorationRequest struct {
Data interface{} `json:"data"` // Serialization of config.
Name string `json:"name,omitempty"` // Exploration name given by user.
}
// NewExploration adds valid exploration scoped by user id.
func (h *Service) NewExploration(w http.ResponseWriter, r *http.Request) {
uID, err := paramID("id", r)
if err != nil {
Error(w, http.StatusUnprocessableEntity, err.Error(), h.Logger)
return
}
// TODO: Check user if user exists.
var req postExplorationRequest
if err = json.NewDecoder(r.Body).Decode(&req); err != nil {
invalidJSON(w, h.Logger)
return
}
data := ""
if req.Data != nil {
data, _ = req.Data.(string)
}
e := &chronograf.Exploration{
Name: req.Name,
UserID: chronograf.UserID(uID),
Data: data,
}
ctx := r.Context()
e, err = h.ExplorationStore.Add(ctx, e)
if err != nil {
msg := fmt.Errorf("Error: Failed to save Exploration")
unknownErrorWithMessage(w, msg, h.Logger)
return
}
res := newExploration(e)
w.Header().Add("Location", res.Link.Href)
encodeJSON(w, http.StatusCreated, res, h.Logger)
}
// RemoveExploration deletes exploration from store.
func (h *Service) RemoveExploration(w http.ResponseWriter, r *http.Request) {
eID, err := paramID("eid", r)
if err != nil {
Error(w, http.StatusUnprocessableEntity, err.Error(), h.Logger)
return
}
uID, err := paramID("id", r)
if err != nil {
Error(w, http.StatusUnprocessableEntity, err.Error(), h.Logger)
return
}
ctx := r.Context()
e, err := h.ExplorationStore.Get(ctx, chronograf.ExplorationID(eID))
if err != nil || e.UserID != chronograf.UserID(uID) {
notFound(w, eID, h.Logger)
return
}
if err := h.ExplorationStore.Delete(ctx, &chronograf.Exploration{ID: chronograf.ExplorationID(eID)}); err != nil {
unknownErrorWithMessage(w, err, h.Logger)
return
}
w.WriteHeader(http.StatusNoContent)
}

View File

@ -9,6 +9,11 @@ import (
"github.com/influxdata/chronograf"
)
type link struct {
Href string `json:"href"`
Rel string `json:"rel"`
}
type layoutResponse struct {
chronograf.Layout
Link link `json:"link"`

View File

@ -106,14 +106,6 @@ func NewMux(opts MuxOpts, service Service) http.Handler {
router.PATCH("/chronograf/v1/users/:id", service.UpdateUser)
router.DELETE("/chronograf/v1/users/:id", service.RemoveUser)
// Explorations
router.GET("/chronograf/v1/users/:id/explorations", service.Explorations)
router.POST("/chronograf/v1/users/:id/explorations", service.NewExploration)
router.GET("/chronograf/v1/users/:id/explorations/:eid", service.ExplorationsID)
router.PATCH("/chronograf/v1/users/:id/explorations/:eid", service.UpdateExploration)
router.DELETE("/chronograf/v1/users/:id/explorations/:eid", service.RemoveExploration)
// Dashboards
router.GET("/chronograf/v1/dashboards", service.Dashboards)
router.POST("/chronograf/v1/dashboards", service.NewDashboard)

View File

@ -147,10 +147,9 @@ func openService(boltPath, cannedPath string, logger chronograf.Logger, useAuth
}
return Service{
ExplorationStore: db.ExplorationStore,
SourcesStore: db.SourcesStore,
ServersStore: db.ServersStore,
UsersStore: db.UsersStore,
SourcesStore: db.SourcesStore,
ServersStore: db.ServersStore,
UsersStore: db.UsersStore,
TimeSeries: &influx.Client{
Logger: logger,
},

View File

@ -4,16 +4,15 @@ import "github.com/influxdata/chronograf"
// Service handles REST calls to the persistence
type Service struct {
ExplorationStore chronograf.ExplorationStore
SourcesStore chronograf.SourcesStore
ServersStore chronograf.ServersStore
LayoutStore chronograf.LayoutStore
AlertRulesStore chronograf.AlertRulesStore
UsersStore chronograf.UsersStore
DashboardsStore chronograf.DashboardsStore
TimeSeries chronograf.TimeSeries
Logger chronograf.Logger
UseAuth bool
SourcesStore chronograf.SourcesStore
ServersStore chronograf.ServersStore
LayoutStore chronograf.LayoutStore
AlertRulesStore chronograf.AlertRulesStore
UsersStore chronograf.UsersStore
DashboardsStore chronograf.DashboardsStore
TimeSeries chronograf.TimeSeries
Logger chronograf.Logger
UseAuth bool
}
// ErrorMessage is the error response format for all service errors

View File

@ -425,233 +425,6 @@
}
}
},
"/users/{user_id}/explorations": {
"get": {
"tags": [
"users",
"explorations"
],
"summary": "Returns all explorations for specified user",
"parameters": [
{
"name": "user_id",
"in": "path",
"type": "string",
"description": "All Data Explorations returned only for this user.",
"required": true
}
],
"responses": {
"200": {
"description": "Data Explorations saved sessions for user are returned.",
"schema": {
"$ref": "#/definitions/Explorations"
}
},
"404": {
"description": "User does not exist.",
"schema": {
"$ref": "#/definitions/Error"
}
},
"default": {
"description": "Unexpected internal service error",
"schema": {
"$ref": "#/definitions/Error"
}
}
}
},
"post": {
"tags": [
"users",
"explorations"
],
"summary": "Create new named exploration for this user",
"parameters": [
{
"name": "user_id",
"in": "path",
"type": "string",
"description": "ID of user to associate this exploration with.",
"required": true
},
{
"name": "exploration",
"in": "body",
"description": "Exploration session to save",
"schema": {
"$ref": "#/definitions/Exploration"
}
}
],
"responses": {
"201": {
"description": "Successfully created new Exploration session",
"headers": {
"Location": {
"type": "string",
"format": "url",
"description": "Location of the newly created exploration resource."
}
},
"schema": {
"$ref": "#/definitions/Exploration"
}
},
"404": {
"description": "User does not exist.",
"schema": {
"$ref": "#/definitions/Error"
}
},
"default": {
"description": "A processing or an unexpected error.",
"schema": {
"$ref": "#/definitions/Error"
}
}
}
}
},
"/users/{user_id}/explorations/{exploration_id}": {
"get": {
"tags": [
"users",
"explorations"
],
"parameters": [
{
"name": "user_id",
"in": "path",
"type": "string",
"description": "ID of user to associate this exploration with.",
"required": true
},
{
"name": "exploration_id",
"in": "path",
"type": "string",
"description": "ID of the specific exploration.",
"required": true
}
],
"summary": "Returns the specified data exploration session",
"description": "A data exploration session specifies query information.\n",
"responses": {
"200": {
"description": "Information relating to the exploration",
"schema": {
"$ref": "#/definitions/Exploration"
}
},
"404": {
"description": "User or exploration does not exist.",
"schema": {
"$ref": "#/definitions/Error"
}
},
"default": {
"description": "Unexpected internal service error",
"schema": {
"$ref": "#/definitions/Error"
}
}
}
},
"patch": {
"tags": [
"users",
"explorations"
],
"summary": "Update exploration configuration",
"parameters": [
{
"name": "user_id",
"in": "path",
"type": "string",
"description": "ID of user",
"required": true
},
{
"name": "exploration_id",
"in": "path",
"type": "string",
"description": "ID of the specific exploration.",
"required": true
},
{
"name": "exploration",
"in": "body",
"description": "Update the exploration information to this.",
"required": true,
"schema": {
"$ref": "#/definitions/Exploration"
}
}
],
"responses": {
"200": {
"description": "Exploration's configuration was changed",
"schema": {
"$ref": "#/definitions/Exploration"
}
},
"404": {
"description": "Data source id, user, or exploration does not exist.",
"schema": {
"$ref": "#/definitions/Error"
}
},
"default": {
"description": "A processing or an unexpected error.",
"schema": {
"$ref": "#/definitions/Error"
}
}
}
},
"delete": {
"tags": [
"users",
"explorations"
],
"parameters": [
{
"name": "user_id",
"in": "path",
"type": "string",
"description": "ID of user to associate this exploration with.",
"required": true
},
{
"name": "exploration_id",
"in": "path",
"type": "string",
"description": "ID of the specific exploration.",
"required": true
}
],
"summary": "This specific exporer session will be removed.",
"responses": {
"204": {
"description": "Exploration session has been removed"
},
"404": {
"description": "Data source id, user, or exploration does not exist.",
"schema": {
"$ref": "#/definitions/Error"
}
},
"default": {
"description": "Unexpected internal service error",
"schema": {
"$ref": "#/definitions/Error"
}
}
}
}
},
"/sources/{id}/kapacitors": {
"get": {
"tags": [
@ -2160,45 +1933,6 @@
}
}
},
"Explorations": {
"type": "object",
"required": [
"explorations"
],
"properties": {
"explorations": {
"type": "array",
"items": {
"$ref": "#/definitions/Exploration"
}
}
}
},
"Exploration": {
"type": "object",
"properties": {
"created_at": {
"type": "string",
"format": "date-time"
},
"updated_at": {
"type": "string",
"format": "date-time",
"description": "Latest time the exploration was updated."
},
"name": {
"type": "string",
"description": "Exploration name given by user."
},
"data": {
"type": "object",
"description": "Serialization of the exploration query configuration."
},
"link": {
"$ref": "#/definitions/Link"
}
}
},
"Users": {
"type": "object",
"properties": {
@ -2563,4 +2297,4 @@
}
}
}
}
}

View File

@ -11,8 +11,7 @@ import (
)
type userLinks struct {
Self string `json:"self"` // Self link mapping to this resource
Explorations string `json:"explorations"` // URL for explorations endpoint
Self string `json:"self"` // Self link mapping to this resource
}
type userResponse struct {
@ -25,8 +24,7 @@ func newUserResponse(usr *chronograf.User) userResponse {
return userResponse{
User: usr,
Links: userLinks{
Self: fmt.Sprintf("%s/%d", base, usr.ID),
Explorations: fmt.Sprintf("%s/%d/explorations", base, usr.ID),
Self: fmt.Sprintf("%s/%d", base, usr.ID),
},
}
}