Merge branch 'master' into flux-staging
commit
26e7f641b7
13
CHANGELOG.md
13
CHANGELOG.md
|
@ -1,4 +1,13 @@
|
|||
## v2.0.0-alpha.4 [unreleased]
|
||||
## v2.0.0-alpha.5 [unreleased]
|
||||
|
||||
### Features
|
||||
1. [12096](https://github.com/influxdata/influxdb/pull/12096): Add labels to cloned tasks
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
### UI Improvements
|
||||
|
||||
## v2.0.0-alpha.4 [2019-02-21]
|
||||
|
||||
### Features
|
||||
1. [11954](https://github.com/influxdata/influxdb/pull/11954): Add the ability to run a task manually from tasks page
|
||||
|
@ -6,6 +15,8 @@
|
|||
1. [12009](https://github.com/influxdata/influxdb/pull/12009): Display the version information on the login page
|
||||
1. [12011](https://github.com/influxdata/influxdb/pull/12011): Add the ability to update a Variable's name and query.
|
||||
1. [12026](https://github.com/influxdata/influxdb/pull/12026): Add labels to cloned dashboard
|
||||
1. [12018](https://github.com/influxdata/influxdb/pull/12057): Add ability filter resources by label name
|
||||
1. [11973](https://github.com/influxdata/influxdb/pull/11973): Add ability to create or add labels to a resource from labels editor
|
||||
|
||||
### Bug Fixes
|
||||
1. [11997](https://github.com/influxdata/influxdb/pull/11997): Update the bucket retention policy to update the time in seconds
|
||||
|
|
|
@ -71,7 +71,11 @@ func setupF(cmd *cobra.Command, args []string) error {
|
|||
if err != nil {
|
||||
return fmt.Errorf("failed to setup instance: %v", err)
|
||||
}
|
||||
writeTokenToPath(result.Auth.Token, defaultTokenPath())
|
||||
err = writeTokenToPath(result.Auth.Token, defaultTokenPath())
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to write token to path %q: %v", defaultTokenPath(), err)
|
||||
}
|
||||
|
||||
fmt.Println(promptWithColor("Your token has been stored in "+defaultTokenPath()+".", colorCyan))
|
||||
|
||||
w := internal.NewTabWriter(os.Stdout)
|
||||
|
|
|
@ -442,7 +442,7 @@ func (m *Launcher) run(ctx context.Context) (err error) {
|
|||
store taskbackend.Store
|
||||
err error
|
||||
)
|
||||
store, err = taskbolt.New(m.boltClient.DB(), "tasks")
|
||||
store, err = taskbolt.New(m.boltClient.DB(), "tasks", taskbolt.NoCatchUp)
|
||||
if err != nil {
|
||||
m.logger.Error("failed opening task bolt", zap.Error(err))
|
||||
return err
|
||||
|
@ -535,7 +535,8 @@ func (m *Launcher) run(ctx context.Context) (err error) {
|
|||
VariableService: variableSvc,
|
||||
PasswordsService: passwdsSvc,
|
||||
OnboardingService: onboardingSvc,
|
||||
ProxyQueryService: storageQueryService,
|
||||
InfluxQLService: nil, // No InfluxQL support
|
||||
FluxService: storageQueryService,
|
||||
TaskService: taskSvc,
|
||||
TelegrafService: telegrafSvc,
|
||||
ScraperTargetStoreService: scraperTargetSvc,
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package gather
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
|
||||
"github.com/influxdata/influxdb/tsdb"
|
||||
|
@ -25,7 +26,7 @@ func (s PointWriter) Record(collected MetricsCollection) error {
|
|||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return s.Writer.WritePoints(ps)
|
||||
return s.Writer.WritePoints(context.TODO(), ps)
|
||||
}
|
||||
|
||||
// Recorder record the metrics of a time based.
|
||||
|
|
|
@ -61,7 +61,8 @@ type APIBackend struct {
|
|||
VariableService influxdb.VariableService
|
||||
PasswordsService influxdb.PasswordsService
|
||||
OnboardingService influxdb.OnboardingService
|
||||
ProxyQueryService query.ProxyQueryService
|
||||
InfluxQLService query.ProxyQueryService
|
||||
FluxService query.ProxyQueryService
|
||||
TaskService influxdb.TaskService
|
||||
TelegrafService influxdb.TelegrafConfigStore
|
||||
ScraperTargetStoreService influxdb.ScraperTargetStoreService
|
||||
|
|
|
@ -829,7 +829,7 @@ func newOrganizationLogResponse(id influxdb.ID, es []*influxdb.OperationLogEntry
|
|||
}
|
||||
return &operationLogResponse{
|
||||
Links: map[string]string{
|
||||
"self": fmt.Sprintf("/api/v2/organizations/%s/log", id),
|
||||
"self": fmt.Sprintf("/api/v2/orgs/%s/log", id),
|
||||
},
|
||||
Log: log,
|
||||
}
|
||||
|
|
|
@ -42,7 +42,7 @@ func NewFluxBackend(b *APIBackend) *FluxBackend {
|
|||
return &FluxBackend{
|
||||
Logger: b.Logger.With(zap.String("handler", "query")),
|
||||
|
||||
ProxyQueryService: b.ProxyQueryService,
|
||||
ProxyQueryService: b.FluxService,
|
||||
OrganizationService: b.OrganizationService,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -336,11 +336,11 @@ paths:
|
|||
$ref: "#/components/schemas/LabelMapping"
|
||||
responses:
|
||||
'200':
|
||||
description: a list of all labels for a telegraf config
|
||||
description: "the label added to the telegraf config"
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: "#/components/schemas/LabelsResponse"
|
||||
$ref: "#/components/schemas/LabelResponse"
|
||||
default:
|
||||
description: unexpected error
|
||||
content:
|
||||
|
@ -2136,7 +2136,7 @@ paths:
|
|||
$ref: "#/components/schemas/LabelMapping"
|
||||
responses:
|
||||
'200':
|
||||
description: a list of all labels for a dashboard
|
||||
description: the label added to the dashboard
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
|
@ -6228,7 +6228,7 @@ components:
|
|||
properties:
|
||||
x:
|
||||
$ref: '#/components/schemas/Axis'
|
||||
y:
|
||||
"y": # Quoted to prevent YAML parser from interpreting y as shorthand for true.
|
||||
$ref: '#/components/schemas/Axis'
|
||||
y2:
|
||||
$ref: '#/components/schemas/Axis'
|
||||
|
@ -6460,7 +6460,7 @@ components:
|
|||
x:
|
||||
type: integer
|
||||
format: int32
|
||||
y:
|
||||
"y": # Quoted to prevent YAML parser from interpreting y as shorthand for true.
|
||||
type: integer
|
||||
format: int32
|
||||
w:
|
||||
|
@ -6508,7 +6508,7 @@ components:
|
|||
x:
|
||||
type: integer
|
||||
format: int32
|
||||
y:
|
||||
"y": # Quoted to prevent YAML parser from interpreting y as shorthand for true.
|
||||
type: integer
|
||||
format: int32
|
||||
w:
|
||||
|
|
|
@ -19,6 +19,7 @@ import (
|
|||
pcontext "github.com/influxdata/influxdb/context"
|
||||
"github.com/influxdata/influxdb/query"
|
||||
"github.com/influxdata/influxdb/task/backend"
|
||||
"github.com/influxdata/influxdb/task/options"
|
||||
"github.com/julienschmidt/httprouter"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
@ -369,44 +370,98 @@ func decodeGetTasksRequest(ctx context.Context, r *http.Request) (*getTasksReque
|
|||
return req, nil
|
||||
}
|
||||
|
||||
func (h *TaskHandler) createTaskAuthorizationIfNotExists(ctx context.Context, a platform.Authorizer, t *platform.TaskCreate) error {
|
||||
// createBootstrapTaskAuthorizationIfNotExists checks if a the task create request hasn't specified a token, and if the request came from a session,
|
||||
// and if both of those are true, it creates an authorization and return it.
|
||||
//
|
||||
// Note that the created authorization will have permissions required for the task,
|
||||
// but it won't have permissions to read the task, as we don't have the task ID yet.
|
||||
//
|
||||
// This method may return a nil error and a nil authorization, if there wasn't a need to create an authorization.
|
||||
func (h *TaskHandler) createBootstrapTaskAuthorizationIfNotExists(ctx context.Context, a platform.Authorizer, t *platform.TaskCreate) (*platform.Authorization, error) {
|
||||
if t.Token != "" {
|
||||
return nil
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
s, ok := a.(*platform.Session)
|
||||
if !ok {
|
||||
// If an authorization was used continue
|
||||
return nil
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
spec, err := flux.Compile(ctx, t.Flux, time.Now())
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
preAuthorizer := query.NewPreAuthorizer(h.BucketService)
|
||||
ps, err := preAuthorizer.RequiredPermissions(ctx, spec)
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := authorizer.VerifyPermissions(ctx, ps); err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
opts, err := options.FromScript(t.Flux)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
auth := &platform.Authorization{
|
||||
OrgID: t.OrganizationID,
|
||||
UserID: s.UserID,
|
||||
Permissions: ps,
|
||||
Description: fmt.Sprintf("bootstrap authorization for task %q", opts.Name),
|
||||
}
|
||||
|
||||
if err := h.AuthorizationService.CreateAuthorization(ctx, auth); err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
t.Token = auth.Token
|
||||
|
||||
return auth, nil
|
||||
}
|
||||
|
||||
func (h *TaskHandler) finalizeBootstrappedTaskAuthorization(ctx context.Context, bootstrap *platform.Authorization, task *platform.Task) error {
|
||||
// If we created a bootstrapped authorization for a task,
|
||||
// we need to replace it with a new authorization that allows read access on the task.
|
||||
// Unfortunately for this case, updating authorizations is not allowed.
|
||||
readTaskPerm, err := platform.NewPermissionAtID(task.ID, platform.ReadAction, platform.TasksResourceType, bootstrap.OrgID)
|
||||
if err != nil {
|
||||
// We should never fail to create a new permission like this.
|
||||
return err
|
||||
}
|
||||
authzWithTask := &platform.Authorization{
|
||||
UserID: bootstrap.UserID,
|
||||
OrgID: bootstrap.OrgID,
|
||||
Permissions: append([]platform.Permission{*readTaskPerm}, bootstrap.Permissions...),
|
||||
Description: fmt.Sprintf("auto-generated authorization for task %q", task.Name),
|
||||
}
|
||||
|
||||
if err := h.AuthorizationService.CreateAuthorization(ctx, authzWithTask); err != nil {
|
||||
h.logger.Warn("Failed to finalize bootstrap authorization", zap.String("taskID", task.ID.String()))
|
||||
// The task exists with an authorization that can't read the task.
|
||||
return err
|
||||
}
|
||||
|
||||
// Assign the new authorization...
|
||||
u, err := h.TaskService.UpdateTask(ctx, task.ID, platform.TaskUpdate{Token: authzWithTask.Token})
|
||||
if err != nil {
|
||||
h.logger.Warn("Failed to assign finalized authorization", zap.String("authorizationID", bootstrap.ID.String()), zap.String("taskID", task.ID.String()))
|
||||
// The task exists with an authorization that can't read the task,
|
||||
// and we've created a new authorization for the task but not assigned it.
|
||||
return err
|
||||
}
|
||||
*task = *u
|
||||
|
||||
// .. and delete the old one.
|
||||
if err := h.AuthorizationService.DeleteAuthorization(ctx, bootstrap.ID); err != nil {
|
||||
// Since this is the last thing we're doing, just log it if we fail to delete for some reason.
|
||||
h.logger.Warn("Failed to delete bootstrap authorization", zap.String("authorizationID", bootstrap.ID.String()), zap.String("taskID", task.ID.String()))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -435,6 +490,15 @@ func (h *TaskHandler) handlePostTask(w http.ResponseWriter, r *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
if err := h.populateTaskCreateOrg(ctx, &req.TaskCreate); err != nil {
|
||||
err = &platform.Error{
|
||||
Err: err,
|
||||
Msg: "could not identify organization",
|
||||
}
|
||||
EncodeError(ctx, err, w)
|
||||
return
|
||||
}
|
||||
|
||||
if !req.TaskCreate.OrganizationID.Valid() {
|
||||
err := &platform.Error{
|
||||
Code: platform.EInvalid,
|
||||
|
@ -444,7 +508,8 @@ func (h *TaskHandler) handlePostTask(w http.ResponseWriter, r *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
if err := h.createTaskAuthorizationIfNotExists(ctx, auth, &req.TaskCreate); err != nil {
|
||||
bootstrapAuthz, err := h.createBootstrapTaskAuthorizationIfNotExists(ctx, auth, &req.TaskCreate)
|
||||
if err != nil {
|
||||
EncodeError(ctx, err, w)
|
||||
return
|
||||
}
|
||||
|
@ -462,6 +527,20 @@ func (h *TaskHandler) handlePostTask(w http.ResponseWriter, r *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
if bootstrapAuthz != nil {
|
||||
// There was a bootstrapped authorization for this task.
|
||||
// Now we need to apply the final authorization for the task.
|
||||
if err := h.finalizeBootstrappedTaskAuthorization(ctx, bootstrapAuthz, task); err != nil {
|
||||
err = &platform.Error{
|
||||
Err: err,
|
||||
Msg: fmt.Sprintf("successfully created task with ID %s, but failed to finalize bootstrap token for task", task.ID.String()),
|
||||
Code: platform.EInternal,
|
||||
}
|
||||
EncodeError(ctx, err, w)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
if err := encodeResponse(ctx, w, http.StatusCreated, newTaskResponse(*task, []*platform.Label{})); err != nil {
|
||||
logEncodingError(h.logger, r, err)
|
||||
return
|
||||
|
@ -690,6 +769,29 @@ func (h *TaskHandler) handleGetLogs(w http.ResponseWriter, r *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
auth, err := pcontext.GetAuthorizer(ctx)
|
||||
if err != nil {
|
||||
err = &platform.Error{
|
||||
Err: err,
|
||||
Code: platform.EUnauthorized,
|
||||
Msg: "failed to get authorizer",
|
||||
}
|
||||
EncodeError(ctx, err, w)
|
||||
return
|
||||
}
|
||||
|
||||
if k := auth.Kind(); k != platform.AuthorizationKind {
|
||||
// Get the authorization for the task, if allowed.
|
||||
authz, err := h.getAuthorizationForTask(ctx, req.filter.Task)
|
||||
if err != nil {
|
||||
EncodeError(ctx, err, w)
|
||||
return
|
||||
}
|
||||
|
||||
// We were able to access the authorizer for the task, so reassign that on the context for the rest of this call.
|
||||
ctx = pcontext.SetAuthorizer(ctx, authz)
|
||||
}
|
||||
|
||||
logs, _, err := h.TaskService.FindLogs(ctx, req.filter)
|
||||
if err != nil {
|
||||
err := &platform.Error{
|
||||
|
@ -755,6 +857,29 @@ func (h *TaskHandler) handleGetRuns(w http.ResponseWriter, r *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
auth, err := pcontext.GetAuthorizer(ctx)
|
||||
if err != nil {
|
||||
err = &platform.Error{
|
||||
Err: err,
|
||||
Code: platform.EUnauthorized,
|
||||
Msg: "failed to get authorizer",
|
||||
}
|
||||
EncodeError(ctx, err, w)
|
||||
return
|
||||
}
|
||||
|
||||
if k := auth.Kind(); k != platform.AuthorizationKind {
|
||||
// Get the authorization for the task, if allowed.
|
||||
authz, err := h.getAuthorizationForTask(ctx, req.filter.Task)
|
||||
if err != nil {
|
||||
EncodeError(ctx, err, w)
|
||||
return
|
||||
}
|
||||
|
||||
// We were able to access the authorizer for the task, so reassign that on the context for the rest of this call.
|
||||
ctx = pcontext.SetAuthorizer(ctx, authz)
|
||||
}
|
||||
|
||||
runs, _, err := h.TaskService.FindRuns(ctx, req.filter)
|
||||
if err != nil {
|
||||
err := &platform.Error{
|
||||
|
@ -939,6 +1064,29 @@ func (h *TaskHandler) handleGetRun(w http.ResponseWriter, r *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
auth, err := pcontext.GetAuthorizer(ctx)
|
||||
if err != nil {
|
||||
err = &platform.Error{
|
||||
Err: err,
|
||||
Code: platform.EUnauthorized,
|
||||
Msg: "failed to get authorizer",
|
||||
}
|
||||
EncodeError(ctx, err, w)
|
||||
return
|
||||
}
|
||||
|
||||
if k := auth.Kind(); k != platform.AuthorizationKind {
|
||||
// Get the authorization for the task, if allowed.
|
||||
authz, err := h.getAuthorizationForTask(ctx, req.TaskID)
|
||||
if err != nil {
|
||||
EncodeError(ctx, err, w)
|
||||
return
|
||||
}
|
||||
|
||||
// We were able to access the authorizer for the task, so reassign that on the context for the rest of this call.
|
||||
ctx = pcontext.SetAuthorizer(ctx, authz)
|
||||
}
|
||||
|
||||
run, err := h.TaskService.FindRunByID(ctx, req.TaskID, req.RunID)
|
||||
if err != nil {
|
||||
err := &platform.Error{
|
||||
|
@ -1073,6 +1221,29 @@ func (h *TaskHandler) handleRetryRun(w http.ResponseWriter, r *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
auth, err := pcontext.GetAuthorizer(ctx)
|
||||
if err != nil {
|
||||
err = &platform.Error{
|
||||
Err: err,
|
||||
Code: platform.EUnauthorized,
|
||||
Msg: "failed to get authorizer",
|
||||
}
|
||||
EncodeError(ctx, err, w)
|
||||
return
|
||||
}
|
||||
|
||||
if k := auth.Kind(); k != platform.AuthorizationKind {
|
||||
// Get the authorization for the task, if allowed.
|
||||
authz, err := h.getAuthorizationForTask(ctx, req.TaskID)
|
||||
if err != nil {
|
||||
EncodeError(ctx, err, w)
|
||||
return
|
||||
}
|
||||
|
||||
// We were able to access the authorizer for the task, so reassign that on the context for the rest of this call.
|
||||
ctx = pcontext.SetAuthorizer(ctx, authz)
|
||||
}
|
||||
|
||||
run, err := h.TaskService.RetryRun(ctx, req.TaskID, req.RunID)
|
||||
if err != nil {
|
||||
err := &platform.Error{
|
||||
|
@ -1126,6 +1297,60 @@ func decodeRetryRunRequest(ctx context.Context, r *http.Request) (*retryRunReque
|
|||
}, nil
|
||||
}
|
||||
|
||||
func (h *TaskHandler) populateTaskCreateOrg(ctx context.Context, tc *platform.TaskCreate) error {
|
||||
if tc.OrganizationID.Valid() && tc.Organization != "" {
|
||||
return nil
|
||||
}
|
||||
|
||||
if !tc.OrganizationID.Valid() && tc.Organization == "" {
|
||||
return errors.New("missing orgID and organization name")
|
||||
}
|
||||
|
||||
if tc.OrganizationID.Valid() {
|
||||
o, err := h.OrganizationService.FindOrganizationByID(ctx, tc.OrganizationID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
tc.Organization = o.Name
|
||||
} else {
|
||||
o, err := h.OrganizationService.FindOrganization(ctx, platform.OrganizationFilter{Name: &tc.Organization})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
tc.OrganizationID = o.ID
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// getAuthorizationForTask looks up the authorization associated with taskID,
|
||||
// ensuring that the authorizer on ctx is allowed to view the task and the authorization.
|
||||
//
|
||||
// This method returns a *platform.Error, suitable for directly passing to EncodeError.
|
||||
func (h *TaskHandler) getAuthorizationForTask(ctx context.Context, taskID platform.ID) (*platform.Authorization, *platform.Error) {
|
||||
// First look up the task, if we're allowed.
|
||||
// This assumes h.TaskService validates access.
|
||||
t, err := h.TaskService.FindTaskByID(ctx, taskID)
|
||||
if err != nil {
|
||||
return nil, &platform.Error{
|
||||
Err: err,
|
||||
Code: platform.EUnauthorized,
|
||||
Msg: "task ID unknown or unauthorized",
|
||||
}
|
||||
}
|
||||
|
||||
// Explicitly check against an authorized authorization service.
|
||||
authz, err := authorizer.NewAuthorizationService(h.AuthorizationService).FindAuthorizationByID(ctx, t.AuthorizationID)
|
||||
if err != nil {
|
||||
return nil, &platform.Error{
|
||||
Err: err,
|
||||
Code: platform.EUnauthorized,
|
||||
Msg: "unable to access task authorization",
|
||||
}
|
||||
}
|
||||
|
||||
return authz, nil
|
||||
}
|
||||
|
||||
// TaskService connects to Influx via HTTP using tokens to manage tasks.
|
||||
type TaskService struct {
|
||||
Addr string
|
||||
|
|
|
@ -377,7 +377,7 @@ func TestTaskHandler_handleGetRun(t *testing.T) {
|
|||
t.Run(tt.name, func(t *testing.T) {
|
||||
r := httptest.NewRequest("GET", "http://any.url", nil)
|
||||
r = r.WithContext(context.WithValue(
|
||||
context.TODO(),
|
||||
context.Background(),
|
||||
httprouter.ParamsKey,
|
||||
httprouter.Params{
|
||||
{
|
||||
|
@ -389,6 +389,7 @@ func TestTaskHandler_handleGetRun(t *testing.T) {
|
|||
Value: tt.args.runID.String(),
|
||||
},
|
||||
}))
|
||||
r = r.WithContext(pcontext.SetAuthorizer(r.Context(), &platform.Authorization{Permissions: platform.OperPermissions()}))
|
||||
w := httptest.NewRecorder()
|
||||
taskBackend := NewMockTaskBackend(t)
|
||||
taskBackend.TaskService = tt.fields.taskService
|
||||
|
@ -490,7 +491,7 @@ func TestTaskHandler_handleGetRuns(t *testing.T) {
|
|||
t.Run(tt.name, func(t *testing.T) {
|
||||
r := httptest.NewRequest("GET", "http://any.url", nil)
|
||||
r = r.WithContext(context.WithValue(
|
||||
context.TODO(),
|
||||
context.Background(),
|
||||
httprouter.ParamsKey,
|
||||
httprouter.Params{
|
||||
{
|
||||
|
@ -498,6 +499,7 @@ func TestTaskHandler_handleGetRuns(t *testing.T) {
|
|||
Value: tt.args.taskID.String(),
|
||||
},
|
||||
}))
|
||||
r = r.WithContext(pcontext.SetAuthorizer(r.Context(), &platform.Authorization{Permissions: platform.OperPermissions()}))
|
||||
w := httptest.NewRecorder()
|
||||
taskBackend := NewMockTaskBackend(t)
|
||||
taskBackend.TaskService = tt.fields.taskService
|
||||
|
@ -538,6 +540,9 @@ func TestTaskHandler_NotFoundStatus(t *testing.T) {
|
|||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// Create a session to associate with the contexts, so authorization checks pass.
|
||||
authz := &platform.Authorization{Permissions: platform.OperPermissions()}
|
||||
|
||||
const taskID, runID = platform.ID(0xCCCCCC), platform.ID(0xAAAAAA)
|
||||
|
||||
var (
|
||||
|
@ -763,7 +768,9 @@ func TestTaskHandler_NotFoundStatus(t *testing.T) {
|
|||
okPath := fmt.Sprintf(tc.pathFmt, tc.okPathArgs...)
|
||||
t.Run("matching ID: "+tc.method+" "+okPath, func(t *testing.T) {
|
||||
w := httptest.NewRecorder()
|
||||
r := httptest.NewRequest(tc.method, "http://task.example/api/v2"+okPath, strings.NewReader(tc.body))
|
||||
r := httptest.NewRequest(tc.method, "http://task.example/api/v2"+okPath, strings.NewReader(tc.body)).WithContext(
|
||||
pcontext.SetAuthorizer(context.Background(), authz),
|
||||
)
|
||||
|
||||
h.ServeHTTP(w, r)
|
||||
|
||||
|
@ -782,7 +789,9 @@ func TestTaskHandler_NotFoundStatus(t *testing.T) {
|
|||
path := fmt.Sprintf(tc.pathFmt, nfa...)
|
||||
t.Run(tc.method+" "+path, func(t *testing.T) {
|
||||
w := httptest.NewRecorder()
|
||||
r := httptest.NewRequest(tc.method, "http://task.example/api/v2"+path, strings.NewReader(tc.body))
|
||||
r := httptest.NewRequest(tc.method, "http://task.example/api/v2"+path, strings.NewReader(tc.body)).WithContext(
|
||||
pcontext.SetAuthorizer(context.Background(), authz),
|
||||
)
|
||||
|
||||
h.ServeHTTP(w, r)
|
||||
|
||||
|
@ -899,28 +908,9 @@ func TestService_handlePostTaskLabel(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestTaskHandler_CreateTaskFromSession(t *testing.T) {
|
||||
var createdTasks []platform.TaskCreate
|
||||
ts := &mock.TaskService{
|
||||
CreateTaskFn: func(_ context.Context, tc platform.TaskCreate) (*platform.Task, error) {
|
||||
createdTasks = append(createdTasks, tc)
|
||||
// Task with fake IDs so it can be serialized.
|
||||
return &platform.Task{ID: 9, OrganizationID: 99, AuthorizationID: 999}, nil
|
||||
},
|
||||
}
|
||||
|
||||
func TestTaskHandler_Sessions(t *testing.T) {
|
||||
// Common setup to get a working base for using tasks.
|
||||
i := inmem.NewService()
|
||||
h := NewTaskHandler(&TaskBackend{
|
||||
Logger: zaptest.NewLogger(t),
|
||||
|
||||
TaskService: ts,
|
||||
AuthorizationService: i,
|
||||
OrganizationService: i,
|
||||
UserResourceMappingService: i,
|
||||
LabelService: i,
|
||||
UserService: i,
|
||||
BucketService: i,
|
||||
})
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
|
@ -954,45 +944,528 @@ func TestTaskHandler_CreateTaskFromSession(t *testing.T) {
|
|||
t.Fatal(err)
|
||||
}
|
||||
|
||||
// Create a session for use in authorizing context.
|
||||
s := &platform.Session{
|
||||
sessionAllPermsCtx := pcontext.SetAuthorizer(context.Background(), &platform.Session{
|
||||
UserID: u.ID,
|
||||
Permissions: platform.OperPermissions(),
|
||||
ExpiresAt: time.Now().Add(24 * time.Hour),
|
||||
}
|
||||
|
||||
b, err := json.Marshal(platform.TaskCreate{
|
||||
Flux: `option task = {name:"x", every:1m} from(bucket:"b-src") |> range(start:-1m) |> to(bucket:"b-dst", org:"o")`,
|
||||
OrganizationID: o.ID,
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
sessionNoPermsCtx := pcontext.SetAuthorizer(context.Background(), &platform.Session{
|
||||
UserID: u.ID,
|
||||
ExpiresAt: time.Now().Add(24 * time.Hour),
|
||||
})
|
||||
|
||||
newHandler := func(t *testing.T, ts *mock.TaskService) *TaskHandler {
|
||||
return NewTaskHandler(&TaskBackend{
|
||||
Logger: zaptest.NewLogger(t),
|
||||
|
||||
TaskService: ts,
|
||||
AuthorizationService: i,
|
||||
OrganizationService: i,
|
||||
UserResourceMappingService: i,
|
||||
LabelService: i,
|
||||
UserService: i,
|
||||
BucketService: i,
|
||||
})
|
||||
}
|
||||
|
||||
sessionCtx := pcontext.SetAuthorizer(context.Background(), s)
|
||||
url := fmt.Sprintf("http://localhost:9999/api/v2/tasks")
|
||||
r := httptest.NewRequest("POST", url, bytes.NewReader(b)).WithContext(sessionCtx)
|
||||
t.Run("creating a task from a session", func(t *testing.T) {
|
||||
taskID := platform.ID(9)
|
||||
var createdTasks []platform.TaskCreate
|
||||
ts := &mock.TaskService{
|
||||
CreateTaskFn: func(_ context.Context, tc platform.TaskCreate) (*platform.Task, error) {
|
||||
createdTasks = append(createdTasks, tc)
|
||||
// Task with fake IDs so it can be serialized.
|
||||
return &platform.Task{ID: taskID, OrganizationID: 99, AuthorizationID: 999, Name: "x"}, nil
|
||||
},
|
||||
// Needed due to task authorization bootstrapping.
|
||||
UpdateTaskFn: func(ctx context.Context, id platform.ID, tu platform.TaskUpdate) (*platform.Task, error) {
|
||||
authz, err := i.FindAuthorizationByToken(ctx, tu.Token)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
w := httptest.NewRecorder()
|
||||
return &platform.Task{ID: taskID, OrganizationID: 99, AuthorizationID: authz.ID, Name: "x"}, nil
|
||||
},
|
||||
}
|
||||
|
||||
h.handlePostTask(w, r)
|
||||
h := newHandler(t, ts)
|
||||
url := "http://localhost:9999/api/v2/tasks"
|
||||
|
||||
res := w.Result()
|
||||
body, err := ioutil.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if res.StatusCode != http.StatusCreated {
|
||||
t.Logf("response body: %s", body)
|
||||
t.Fatalf("expected status created, got %v", res.StatusCode)
|
||||
}
|
||||
b, err := json.Marshal(platform.TaskCreate{
|
||||
Flux: `option task = {name:"x", every:1m} from(bucket:"b-src") |> range(start:-1m) |> to(bucket:"b-dst", org:"o")`,
|
||||
OrganizationID: o.ID,
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if len(createdTasks) != 1 {
|
||||
t.Fatalf("didn't create task; got %#v", createdTasks)
|
||||
}
|
||||
r := httptest.NewRequest("POST", url, bytes.NewReader(b)).WithContext(sessionAllPermsCtx)
|
||||
w := httptest.NewRecorder()
|
||||
|
||||
// The task should have been created with a valid token.
|
||||
if _, err := i.FindAuthorizationByToken(ctx, createdTasks[0].Token); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
h.handlePostTask(w, r)
|
||||
|
||||
res := w.Result()
|
||||
body, err := ioutil.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if res.StatusCode != http.StatusCreated {
|
||||
t.Logf("response body: %s", body)
|
||||
t.Fatalf("expected status created, got %v", res.StatusCode)
|
||||
}
|
||||
|
||||
if len(createdTasks) != 1 {
|
||||
t.Fatalf("didn't create task; got %#v", createdTasks)
|
||||
}
|
||||
|
||||
// The task should have been created with a valid token.
|
||||
var createdTask platform.Task
|
||||
if err := json.Unmarshal([]byte(body), &createdTask); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
authz, err := i.FindAuthorizationByID(ctx, createdTask.AuthorizationID)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if authz.OrgID != o.ID {
|
||||
t.Fatalf("expected authorization to have org ID %v, got %v", o.ID, authz.OrgID)
|
||||
}
|
||||
if authz.UserID != u.ID {
|
||||
t.Fatalf("expected authorization to have user ID %v, got %v", u.ID, authz.UserID)
|
||||
}
|
||||
|
||||
const expDesc = `auto-generated authorization for task "x"`
|
||||
if authz.Description != expDesc {
|
||||
t.Fatalf("expected authorization to be created with description %q, got %q", expDesc, authz.Description)
|
||||
}
|
||||
|
||||
// The authorization should be allowed to read and write the target buckets,
|
||||
// and it should be allowed to read its task.
|
||||
if !authz.Allowed(platform.Permission{
|
||||
Action: platform.ReadAction,
|
||||
Resource: platform.Resource{
|
||||
Type: platform.BucketsResourceType,
|
||||
OrgID: &o.ID,
|
||||
ID: &bSrc.ID,
|
||||
},
|
||||
}) {
|
||||
t.Logf("WARNING: permissions on `from` buckets not yet accessible: update test after https://github.com/influxdata/flux/issues/114 is fixed.")
|
||||
}
|
||||
|
||||
if !authz.Allowed(platform.Permission{
|
||||
Action: platform.WriteAction,
|
||||
Resource: platform.Resource{
|
||||
Type: platform.BucketsResourceType,
|
||||
OrgID: &o.ID,
|
||||
ID: &bDst.ID,
|
||||
},
|
||||
}) {
|
||||
t.Fatalf("expected authorization to be allowed write access to destination bucket, but it wasn't allowed")
|
||||
}
|
||||
|
||||
if !authz.Allowed(platform.Permission{
|
||||
Action: platform.ReadAction,
|
||||
Resource: platform.Resource{
|
||||
Type: platform.TasksResourceType,
|
||||
OrgID: &o.ID,
|
||||
ID: &taskID,
|
||||
},
|
||||
}) {
|
||||
t.Fatalf("expected authorization to be allowed to read its task, but it wasn't allowed")
|
||||
}
|
||||
|
||||
// Session without permissions should not be allowed to create task.
|
||||
r = httptest.NewRequest("POST", url, bytes.NewReader(b)).WithContext(sessionNoPermsCtx)
|
||||
w = httptest.NewRecorder()
|
||||
|
||||
h.handlePostTask(w, r)
|
||||
|
||||
res = w.Result()
|
||||
body, err = ioutil.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if res.StatusCode != http.StatusUnauthorized && res.StatusCode != http.StatusForbidden {
|
||||
t.Logf("response body: %s", body)
|
||||
t.Fatalf("expected status unauthorized or forbidden, got %v", res.StatusCode)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("get runs for a task", func(t *testing.T) {
|
||||
// Unique authorization to associate with our fake task.
|
||||
taskAuth := &platform.Authorization{OrgID: o.ID, UserID: u.ID}
|
||||
if err := i.CreateAuthorization(ctx, taskAuth); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
const taskID = platform.ID(12345)
|
||||
const runID = platform.ID(9876)
|
||||
|
||||
var findRunsCtx context.Context
|
||||
ts := &mock.TaskService{
|
||||
FindRunsFn: func(ctx context.Context, f platform.RunFilter) ([]*platform.Run, int, error) {
|
||||
findRunsCtx = ctx
|
||||
if f.Task != taskID {
|
||||
t.Fatalf("expected task ID %v, got %v", taskID, f.Task)
|
||||
}
|
||||
|
||||
return []*platform.Run{
|
||||
{ID: runID, TaskID: taskID},
|
||||
}, 1, nil
|
||||
},
|
||||
|
||||
FindTaskByIDFn: func(ctx context.Context, id platform.ID) (*platform.Task, error) {
|
||||
if id != taskID {
|
||||
return nil, backend.ErrTaskNotFound
|
||||
}
|
||||
|
||||
return &platform.Task{
|
||||
ID: taskID,
|
||||
OrganizationID: o.ID,
|
||||
AuthorizationID: taskAuth.ID,
|
||||
}, nil
|
||||
},
|
||||
}
|
||||
|
||||
h := newHandler(t, ts)
|
||||
url := fmt.Sprintf("http://localhost:9999/api/v2/tasks/%s/runs", taskID)
|
||||
valCtx := context.WithValue(sessionAllPermsCtx, httprouter.ParamsKey, httprouter.Params{{Key: "id", Value: taskID.String()}})
|
||||
r := httptest.NewRequest("GET", url, nil).WithContext(valCtx)
|
||||
w := httptest.NewRecorder()
|
||||
h.handleGetRuns(w, r)
|
||||
|
||||
res := w.Result()
|
||||
body, err := ioutil.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if res.StatusCode != http.StatusOK {
|
||||
t.Logf("response body: %s", body)
|
||||
t.Fatalf("expected status OK, got %v", res.StatusCode)
|
||||
}
|
||||
|
||||
// The context passed to TaskService.FindRuns must be a valid authorization (not a session).
|
||||
authr, err := pcontext.GetAuthorizer(findRunsCtx)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if authr.Kind() != platform.AuthorizationKind {
|
||||
t.Fatalf("expected context's authorizer to be of kind %q, got %q", platform.AuthorizationKind, authr.Kind())
|
||||
}
|
||||
if authr.Identifier() != taskAuth.ID {
|
||||
t.Fatalf("expected context's authorizer ID to be %v, got %v", taskAuth.ID, authr.Identifier())
|
||||
}
|
||||
|
||||
// Other user without permissions on the task or authorization should be disallowed.
|
||||
otherUser := &platform.User{Name: "other-" + t.Name()}
|
||||
if err := i.CreateUser(ctx, otherUser); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
valCtx = pcontext.SetAuthorizer(valCtx, &platform.Session{
|
||||
UserID: otherUser.ID,
|
||||
ExpiresAt: time.Now().Add(24 * time.Hour),
|
||||
})
|
||||
|
||||
r = httptest.NewRequest("GET", url, nil).WithContext(valCtx)
|
||||
w = httptest.NewRecorder()
|
||||
h.handleGetRuns(w, r)
|
||||
|
||||
res = w.Result()
|
||||
body, err = ioutil.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if res.StatusCode != http.StatusUnauthorized {
|
||||
t.Logf("response body: %s", body)
|
||||
t.Fatalf("expected status unauthorized, got %v", res.StatusCode)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("get single run for a task", func(t *testing.T) {
|
||||
// Unique authorization to associate with our fake task.
|
||||
taskAuth := &platform.Authorization{OrgID: o.ID, UserID: u.ID}
|
||||
if err := i.CreateAuthorization(ctx, taskAuth); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
const taskID = platform.ID(12345)
|
||||
const runID = platform.ID(9876)
|
||||
|
||||
var findRunByIDCtx context.Context
|
||||
ts := &mock.TaskService{
|
||||
FindRunByIDFn: func(ctx context.Context, tid, rid platform.ID) (*platform.Run, error) {
|
||||
findRunByIDCtx = ctx
|
||||
if tid != taskID {
|
||||
t.Fatalf("expected task ID %v, got %v", taskID, tid)
|
||||
}
|
||||
if rid != runID {
|
||||
t.Fatalf("expected run ID %v, got %v", runID, rid)
|
||||
}
|
||||
|
||||
return &platform.Run{ID: runID, TaskID: taskID}, nil
|
||||
},
|
||||
|
||||
FindTaskByIDFn: func(ctx context.Context, id platform.ID) (*platform.Task, error) {
|
||||
if id != taskID {
|
||||
return nil, backend.ErrTaskNotFound
|
||||
}
|
||||
|
||||
return &platform.Task{
|
||||
ID: taskID,
|
||||
OrganizationID: o.ID,
|
||||
AuthorizationID: taskAuth.ID,
|
||||
}, nil
|
||||
},
|
||||
}
|
||||
|
||||
h := newHandler(t, ts)
|
||||
url := fmt.Sprintf("http://localhost:9999/api/v2/tasks/%s/runs/%s", taskID, runID)
|
||||
valCtx := context.WithValue(sessionAllPermsCtx, httprouter.ParamsKey, httprouter.Params{
|
||||
{Key: "id", Value: taskID.String()},
|
||||
{Key: "rid", Value: runID.String()},
|
||||
})
|
||||
r := httptest.NewRequest("GET", url, nil).WithContext(valCtx)
|
||||
w := httptest.NewRecorder()
|
||||
h.handleGetRun(w, r)
|
||||
|
||||
res := w.Result()
|
||||
body, err := ioutil.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if res.StatusCode != http.StatusOK {
|
||||
t.Logf("response body: %s", body)
|
||||
t.Fatalf("expected status OK, got %v", res.StatusCode)
|
||||
}
|
||||
|
||||
// The context passed to TaskService.FindRunByID must be a valid authorization (not a session).
|
||||
authr, err := pcontext.GetAuthorizer(findRunByIDCtx)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if authr.Kind() != platform.AuthorizationKind {
|
||||
t.Fatalf("expected context's authorizer to be of kind %q, got %q", platform.AuthorizationKind, authr.Kind())
|
||||
}
|
||||
if authr.Identifier() != taskAuth.ID {
|
||||
t.Fatalf("expected context's authorizer ID to be %v, got %v", taskAuth.ID, authr.Identifier())
|
||||
}
|
||||
|
||||
// Other user without permissions on the task or authorization should be disallowed.
|
||||
otherUser := &platform.User{Name: "other-" + t.Name()}
|
||||
if err := i.CreateUser(ctx, otherUser); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
valCtx = pcontext.SetAuthorizer(valCtx, &platform.Session{
|
||||
UserID: otherUser.ID,
|
||||
ExpiresAt: time.Now().Add(24 * time.Hour),
|
||||
})
|
||||
|
||||
r = httptest.NewRequest("GET", url, nil).WithContext(valCtx)
|
||||
w = httptest.NewRecorder()
|
||||
h.handleGetRuns(w, r)
|
||||
|
||||
res = w.Result()
|
||||
body, err = ioutil.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if res.StatusCode != http.StatusUnauthorized {
|
||||
t.Logf("response body: %s", body)
|
||||
t.Fatalf("expected status unauthorized, got %v", res.StatusCode)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("get logs for a run", func(t *testing.T) {
|
||||
// Unique authorization to associate with our fake task.
|
||||
taskAuth := &platform.Authorization{OrgID: o.ID, UserID: u.ID}
|
||||
if err := i.CreateAuthorization(ctx, taskAuth); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
const taskID = platform.ID(12345)
|
||||
const runID = platform.ID(9876)
|
||||
|
||||
var findLogsCtx context.Context
|
||||
ts := &mock.TaskService{
|
||||
FindLogsFn: func(ctx context.Context, f platform.LogFilter) ([]*platform.Log, int, error) {
|
||||
findLogsCtx = ctx
|
||||
if f.Task != taskID {
|
||||
t.Fatalf("expected task ID %v, got %v", taskID, f.Task)
|
||||
}
|
||||
if *f.Run != runID {
|
||||
t.Fatalf("expected run ID %v, got %v", runID, *f.Run)
|
||||
}
|
||||
|
||||
line := platform.Log("a log line")
|
||||
return []*platform.Log{&line}, 1, nil
|
||||
},
|
||||
|
||||
FindTaskByIDFn: func(ctx context.Context, id platform.ID) (*platform.Task, error) {
|
||||
if id != taskID {
|
||||
return nil, backend.ErrTaskNotFound
|
||||
}
|
||||
|
||||
return &platform.Task{
|
||||
ID: taskID,
|
||||
OrganizationID: o.ID,
|
||||
AuthorizationID: taskAuth.ID,
|
||||
}, nil
|
||||
},
|
||||
}
|
||||
|
||||
h := newHandler(t, ts)
|
||||
url := fmt.Sprintf("http://localhost:9999/api/v2/tasks/%s/runs/%s/logs", taskID, runID)
|
||||
valCtx := context.WithValue(sessionAllPermsCtx, httprouter.ParamsKey, httprouter.Params{
|
||||
{Key: "id", Value: taskID.String()},
|
||||
{Key: "rid", Value: runID.String()},
|
||||
})
|
||||
r := httptest.NewRequest("GET", url, nil).WithContext(valCtx)
|
||||
w := httptest.NewRecorder()
|
||||
h.handleGetLogs(w, r)
|
||||
|
||||
res := w.Result()
|
||||
body, err := ioutil.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if res.StatusCode != http.StatusOK {
|
||||
t.Logf("response body: %s", body)
|
||||
t.Fatalf("expected status OK, got %v", res.StatusCode)
|
||||
}
|
||||
|
||||
// The context passed to TaskService.FindLogs must be a valid authorization (not a session).
|
||||
authr, err := pcontext.GetAuthorizer(findLogsCtx)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if authr.Kind() != platform.AuthorizationKind {
|
||||
t.Fatalf("expected context's authorizer to be of kind %q, got %q", platform.AuthorizationKind, authr.Kind())
|
||||
}
|
||||
if authr.Identifier() != taskAuth.ID {
|
||||
t.Fatalf("expected context's authorizer ID to be %v, got %v", taskAuth.ID, authr.Identifier())
|
||||
}
|
||||
|
||||
// Other user without permissions on the task or authorization should be disallowed.
|
||||
otherUser := &platform.User{Name: "other-" + t.Name()}
|
||||
if err := i.CreateUser(ctx, otherUser); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
valCtx = pcontext.SetAuthorizer(valCtx, &platform.Session{
|
||||
UserID: otherUser.ID,
|
||||
ExpiresAt: time.Now().Add(24 * time.Hour),
|
||||
})
|
||||
|
||||
r = httptest.NewRequest("GET", url, nil).WithContext(valCtx)
|
||||
w = httptest.NewRecorder()
|
||||
h.handleGetRuns(w, r)
|
||||
|
||||
res = w.Result()
|
||||
body, err = ioutil.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if res.StatusCode != http.StatusUnauthorized {
|
||||
t.Logf("response body: %s", body)
|
||||
t.Fatalf("expected status unauthorized, got %v", res.StatusCode)
|
||||
}
|
||||
})
|
||||
|
||||
t.Run("retry a run", func(t *testing.T) {
|
||||
// Unique authorization to associate with our fake task.
|
||||
taskAuth := &platform.Authorization{OrgID: o.ID, UserID: u.ID}
|
||||
if err := i.CreateAuthorization(ctx, taskAuth); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
const taskID = platform.ID(12345)
|
||||
const runID = platform.ID(9876)
|
||||
|
||||
var retryRunCtx context.Context
|
||||
ts := &mock.TaskService{
|
||||
RetryRunFn: func(ctx context.Context, tid, rid platform.ID) (*platform.Run, error) {
|
||||
retryRunCtx = ctx
|
||||
if tid != taskID {
|
||||
t.Fatalf("expected task ID %v, got %v", taskID, tid)
|
||||
}
|
||||
if rid != runID {
|
||||
t.Fatalf("expected run ID %v, got %v", runID, rid)
|
||||
}
|
||||
|
||||
return &platform.Run{ID: 10 * runID, TaskID: taskID}, nil
|
||||
},
|
||||
|
||||
FindTaskByIDFn: func(ctx context.Context, id platform.ID) (*platform.Task, error) {
|
||||
if id != taskID {
|
||||
return nil, backend.ErrTaskNotFound
|
||||
}
|
||||
|
||||
return &platform.Task{
|
||||
ID: taskID,
|
||||
OrganizationID: o.ID,
|
||||
AuthorizationID: taskAuth.ID,
|
||||
}, nil
|
||||
},
|
||||
}
|
||||
|
||||
h := newHandler(t, ts)
|
||||
url := fmt.Sprintf("http://localhost:9999/api/v2/tasks/%s/runs/%s/retry", taskID, runID)
|
||||
valCtx := context.WithValue(sessionAllPermsCtx, httprouter.ParamsKey, httprouter.Params{
|
||||
{Key: "id", Value: taskID.String()},
|
||||
{Key: "rid", Value: runID.String()},
|
||||
})
|
||||
r := httptest.NewRequest("POST", url, nil).WithContext(valCtx)
|
||||
w := httptest.NewRecorder()
|
||||
h.handleRetryRun(w, r)
|
||||
|
||||
res := w.Result()
|
||||
body, err := ioutil.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if res.StatusCode != http.StatusOK {
|
||||
t.Logf("response body: %s", body)
|
||||
t.Fatalf("expected status OK, got %v", res.StatusCode)
|
||||
}
|
||||
|
||||
// The context passed to TaskService.RetryRun must be a valid authorization (not a session).
|
||||
authr, err := pcontext.GetAuthorizer(retryRunCtx)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if authr.Kind() != platform.AuthorizationKind {
|
||||
t.Fatalf("expected context's authorizer to be of kind %q, got %q", platform.AuthorizationKind, authr.Kind())
|
||||
}
|
||||
if authr.Identifier() != taskAuth.ID {
|
||||
t.Fatalf("expected context's authorizer ID to be %v, got %v", taskAuth.ID, authr.Identifier())
|
||||
}
|
||||
|
||||
// Other user without permissions on the task or authorization should be disallowed.
|
||||
otherUser := &platform.User{Name: "other-" + t.Name()}
|
||||
if err := i.CreateUser(ctx, otherUser); err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
valCtx = pcontext.SetAuthorizer(valCtx, &platform.Session{
|
||||
UserID: otherUser.ID,
|
||||
ExpiresAt: time.Now().Add(24 * time.Hour),
|
||||
})
|
||||
|
||||
r = httptest.NewRequest("POST", url, nil).WithContext(valCtx)
|
||||
w = httptest.NewRecorder()
|
||||
h.handleGetRuns(w, r)
|
||||
|
||||
res = w.Result()
|
||||
body, err = ioutil.ReadAll(res.Body)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if res.StatusCode != http.StatusUnauthorized {
|
||||
t.Logf("response body: %s", body)
|
||||
t.Fatalf("expected status unauthorized, got %v", res.StatusCode)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
|
@ -218,7 +218,7 @@ func (h *WriteHandler) handleWrite(w http.ResponseWriter, r *http.Request) {
|
|||
return
|
||||
}
|
||||
|
||||
if err := h.PointsWriter.WritePoints(exploded); err != nil {
|
||||
if err := h.PointsWriter.WritePoints(ctx, exploded); err != nil {
|
||||
logger.Error("Error writing points", zap.Error(err))
|
||||
EncodeError(ctx, &platform.Error{
|
||||
Code: platform.EInternal,
|
||||
|
|
|
@ -699,7 +699,7 @@ func (s *Service) GetBucketOperationLog(ctx context.Context, id influxdb.ID, opt
|
|||
})
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
if err != nil && err != errKeyValueLogBoundsNotFound {
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@ import (
|
|||
|
||||
influxdb "github.com/influxdata/influxdb"
|
||||
icontext "github.com/influxdata/influxdb/context"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
var (
|
||||
|
@ -301,7 +302,15 @@ func (s *Service) CreateDashboard(ctx context.Context, d *influxdb.Dashboard) er
|
|||
// TODO(desa): don't populate this here. use the first/last methods of the oplog to get meta fields.
|
||||
d.Meta.CreatedAt = s.time()
|
||||
|
||||
return s.putDashboardWithMeta(ctx, tx, d)
|
||||
if err := s.putDashboardWithMeta(ctx, tx, d); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := s.addDashboardOwner(ctx, tx, d.ID); err != nil {
|
||||
s.Logger.Info("failed to make user owner of organization", zap.Error(err))
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
return &influxdb.Error{
|
||||
|
@ -311,6 +320,12 @@ func (s *Service) CreateDashboard(ctx context.Context, d *influxdb.Dashboard) er
|
|||
return nil
|
||||
}
|
||||
|
||||
// addDashboardOwner attempts to create a user resource mapping for the user on the
|
||||
// authorizer found on context. If no authorizer is found on context if returns an error.
|
||||
func (s *Service) addDashboardOwner(ctx context.Context, tx Tx, orgID influxdb.ID) error {
|
||||
return s.addResourceOwner(ctx, tx, influxdb.DashboardsResourceType, orgID)
|
||||
}
|
||||
|
||||
func (s *Service) createCellView(ctx context.Context, tx Tx, dashID, cellID influxdb.ID, view *influxdb.View) error {
|
||||
if view == nil {
|
||||
// If not view exists create the view
|
||||
|
@ -926,7 +941,7 @@ func (s *Service) GetDashboardOperationLog(ctx context.Context, id influxdb.ID,
|
|||
})
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
if err != nil && err != errKeyValueLogBoundsNotFound {
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
|
|
21
kv/org.go
21
kv/org.go
|
@ -8,6 +8,7 @@ import (
|
|||
|
||||
influxdb "github.com/influxdata/influxdb"
|
||||
icontext "github.com/influxdata/influxdb/context"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
var (
|
||||
|
@ -213,10 +214,26 @@ func (s *Service) FindOrganizations(ctx context.Context, filter influxdb.Organiz
|
|||
// CreateOrganization creates a influxdb organization and sets b.ID.
|
||||
func (s *Service) CreateOrganization(ctx context.Context, o *influxdb.Organization) error {
|
||||
return s.kv.Update(func(tx Tx) error {
|
||||
return s.createOrganization(ctx, tx, o)
|
||||
if err := s.createOrganization(ctx, tx, o); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Attempt to add user as owner of organization, if that is not possible allow the
|
||||
// organization to be created anyways.
|
||||
if err := s.addOrgOwner(ctx, tx, o.ID); err != nil {
|
||||
s.Logger.Info("failed to make user owner of organization", zap.Error(err))
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
// addOrgOwner attempts to create a user resource mapping for the user on the
|
||||
// authorizer found on context. If no authorizer is found on context if returns an error.
|
||||
func (s *Service) addOrgOwner(ctx context.Context, tx Tx, orgID influxdb.ID) error {
|
||||
return s.addResourceOwner(ctx, tx, influxdb.OrgsResourceType, orgID)
|
||||
}
|
||||
|
||||
func (s *Service) createOrganization(ctx context.Context, tx Tx, o *influxdb.Organization) error {
|
||||
if err := s.uniqueOrganizationName(ctx, tx, o); err != nil {
|
||||
return err
|
||||
|
@ -476,7 +493,7 @@ func (s *Service) GetOrganizationOperationLog(ctx context.Context, id influxdb.I
|
|||
})
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
if err != nil && err != errKeyValueLogBoundsNotFound {
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
|
|
28
kv/urm.go
28
kv/urm.go
|
@ -6,6 +6,7 @@ import (
|
|||
"fmt"
|
||||
|
||||
"github.com/influxdata/influxdb"
|
||||
icontext "github.com/influxdata/influxdb/context"
|
||||
)
|
||||
|
||||
var (
|
||||
|
@ -358,3 +359,30 @@ func (s *Service) deleteOrgDependentMappings(ctx context.Context, tx Tx, m *infl
|
|||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *Service) addResourceOwner(ctx context.Context, tx Tx, rt influxdb.ResourceType, id influxdb.ID) error {
|
||||
a, err := icontext.GetAuthorizer(ctx)
|
||||
if err != nil {
|
||||
return &influxdb.Error{
|
||||
Code: influxdb.EInternal,
|
||||
Msg: fmt.Sprintf("could not find authorizer on context when adding user to resource type %s", rt),
|
||||
}
|
||||
}
|
||||
|
||||
urm := &influxdb.UserResourceMapping{
|
||||
ResourceID: id,
|
||||
ResourceType: rt,
|
||||
UserID: a.GetUserID(),
|
||||
UserType: influxdb.Owner,
|
||||
}
|
||||
|
||||
if err := s.createUserResourceMapping(ctx, tx, urm); err != nil {
|
||||
return &influxdb.Error{
|
||||
Code: influxdb.EInternal,
|
||||
Msg: "could not create user resource mapping",
|
||||
Err: err,
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -451,7 +451,7 @@ func (s *Service) GetUserOperationLog(ctx context.Context, id influxdb.ID, opts
|
|||
log := []*influxdb.OperationLogEntry{}
|
||||
|
||||
err := s.kv.View(func(tx Tx) error {
|
||||
key, err := encodeBucketOperationLogKey(id)
|
||||
key, err := encodeUserOperationLogKey(id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -469,7 +469,7 @@ func (s *Service) GetUserOperationLog(ctx context.Context, id influxdb.ID, opts
|
|||
})
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
if err != nil && err != errKeyValueLogBoundsNotFound {
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package mock
|
||||
|
||||
import (
|
||||
"context"
|
||||
"sync"
|
||||
|
||||
"github.com/influxdata/influxdb/models"
|
||||
|
@ -21,7 +22,7 @@ func (p *PointsWriter) ForceError(err error) {
|
|||
}
|
||||
|
||||
// WritePoints writes points to the PointsWriter that will be exposed in the Values.
|
||||
func (p *PointsWriter) WritePoints(points []models.Point) error {
|
||||
func (p *PointsWriter) WritePoints(ctx context.Context, points []models.Point) error {
|
||||
p.mu.Lock()
|
||||
p.Points = append(p.Points, points...)
|
||||
err := p.Err
|
||||
|
|
|
@ -29,45 +29,56 @@ func newBucketServiceWithOneBucket(bucket platform.Bucket) platform.BucketServic
|
|||
}
|
||||
|
||||
func TestPreAuthorizer_PreAuthorize(t *testing.T) {
|
||||
// TODO(adam) add this test back when BucketsAccessed is restored for the from function
|
||||
// https://github.com/influxdata/flux/issues/114
|
||||
t.Skip("https://github.com/influxdata/flux/issues/114")
|
||||
ctx := context.Background()
|
||||
now := time.Now().UTC()
|
||||
|
||||
q := `from(bucket:"my_bucket") |> range(start:-2h) |> yield()`
|
||||
spec, err := flux.Compile(ctx, q, now)
|
||||
if err != nil {
|
||||
t.Fatalf("Error compiling query: %v", err)
|
||||
}
|
||||
|
||||
// Try to pre-authorize with bucket service with no buckets
|
||||
// and no authorization
|
||||
// fresh pre-authorizer
|
||||
auth := &platform.Authorization{Status: platform.Active}
|
||||
emptyBucketService := mock.NewBucketService()
|
||||
preAuthorizer := query.NewPreAuthorizer(emptyBucketService)
|
||||
|
||||
// Try to pre-authorize invalid bucketID
|
||||
q := `from(bucketID:"invalid") |> range(start:-2h) |> yield()`
|
||||
spec, err := flux.Compile(ctx, q, now)
|
||||
if err != nil {
|
||||
t.Fatalf("Error compiling query: %v", err)
|
||||
}
|
||||
err = preAuthorizer.PreAuthorize(ctx, spec, auth)
|
||||
if diagnostic := cmp.Diff("Bucket service returned nil bucket", err.Error()); diagnostic != "" {
|
||||
if diagnostic := cmp.Diff("bucket service returned nil bucket", err.Error()); diagnostic != "" {
|
||||
t.Errorf("Authorize message mismatch: -want/+got:\n%v", diagnostic)
|
||||
}
|
||||
|
||||
// Try to pre-authorize a valid from with bucket service with no buckets
|
||||
// and no authorization
|
||||
q = `from(bucket:"my_bucket") |> range(start:-2h) |> yield()`
|
||||
spec, err = flux.Compile(ctx, q, now)
|
||||
if err != nil {
|
||||
t.Fatalf("Error compiling query: %v", err)
|
||||
}
|
||||
err = preAuthorizer.PreAuthorize(ctx, spec, auth)
|
||||
if diagnostic := cmp.Diff("bucket service returned nil bucket", err.Error()); diagnostic != "" {
|
||||
t.Errorf("Authorize message mismatch: -want/+got:\n%v", diagnostic)
|
||||
}
|
||||
|
||||
// Try to authorize with a bucket service that knows about one bucket
|
||||
// (still no authorization)
|
||||
id, _ := platform.IDFromString("deadbeefdeadbeef")
|
||||
bucketID, err := platform.IDFromString("deadbeefdeadbeef")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
orgID := platform.ID(1)
|
||||
bucketService := newBucketServiceWithOneBucket(platform.Bucket{
|
||||
Name: "my_bucket",
|
||||
ID: *id,
|
||||
Name: "my_bucket",
|
||||
ID: *bucketID,
|
||||
OrganizationID: orgID,
|
||||
})
|
||||
|
||||
preAuthorizer = query.NewPreAuthorizer(bucketService)
|
||||
err = preAuthorizer.PreAuthorize(ctx, spec, auth)
|
||||
if diagnostic := cmp.Diff(`No read permission for bucket: "my_bucket"`, err.Error()); diagnostic != "" {
|
||||
if diagnostic := cmp.Diff(`no read permission for bucket: "my_bucket"`, err.Error()); diagnostic != "" {
|
||||
t.Errorf("Authorize message mismatch: -want/+got:\n%v", diagnostic)
|
||||
}
|
||||
|
||||
orgID := platform.ID(1)
|
||||
p, err := platform.NewPermissionAtID(*id, platform.ReadAction, platform.BucketsResourceType, orgID)
|
||||
p, err := platform.NewPermissionAtID(*bucketID, platform.ReadAction, platform.BucketsResourceType, orgID)
|
||||
if err != nil {
|
||||
t.Fatalf("Error creating read bucket permission query: %v", err)
|
||||
}
|
||||
|
@ -113,14 +124,16 @@ func TestPreAuthorizer_RequiredPermissions(t *testing.T) {
|
|||
t.Fatal(err)
|
||||
}
|
||||
|
||||
pRead, err := platform.NewPermissionAtID(bFrom.ID, platform.ReadAction, platform.BucketsResourceType, o.ID)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
pWrite, err := platform.NewPermissionAtID(bTo.ID, platform.WriteAction, platform.BucketsResourceType, o.ID)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
t.Log("WARNING: this test does not validate permissions on the 'from' bucket. Please update after https://github.com/influxdata/flux/issues/114.")
|
||||
|
||||
exp := []platform.Permission{*pWrite}
|
||||
exp := []platform.Permission{*pRead, *pWrite}
|
||||
if diff := cmp.Diff(exp, perms); diff != "" {
|
||||
t.Fatalf("unexpected permissions: %s", diff)
|
||||
}
|
||||
|
|
|
@ -79,6 +79,28 @@ func (s *FromOpSpec) Kind() flux.OperationKind {
|
|||
return FromKind
|
||||
}
|
||||
|
||||
// BucketsAccessed makes FromOpSpec a query.BucketAwareOperationSpec
|
||||
func (s *FromOpSpec) BucketsAccessed() (readBuckets, writeBuckets []platform.BucketFilter) {
|
||||
bf := platform.BucketFilter{}
|
||||
if s.Bucket != "" {
|
||||
bf.Name = &s.Bucket
|
||||
}
|
||||
|
||||
if len(s.BucketID) > 0 {
|
||||
if id, err := platform.IDFromString(s.BucketID); err != nil {
|
||||
invalidID := platform.InvalidID()
|
||||
bf.ID = &invalidID
|
||||
} else {
|
||||
bf.ID = id
|
||||
}
|
||||
}
|
||||
|
||||
if bf.ID != nil || bf.Name != nil {
|
||||
readBuckets = append(readBuckets, bf)
|
||||
}
|
||||
return readBuckets, writeBuckets
|
||||
}
|
||||
|
||||
type FromProcedureSpec struct {
|
||||
Bucket string
|
||||
BucketID string
|
||||
|
@ -623,9 +645,6 @@ func (FromKeysRule) Rewrite(keysNode plan.PlanNode) (plan.PlanNode, bool, error)
|
|||
return keysNode, true, nil
|
||||
}
|
||||
|
||||
// TODO(adam): implement a BucketsAccessed that doesn't depend on flux.
|
||||
// https://github.com/influxdata/flux/issues/114
|
||||
|
||||
func createFromSource(prSpec plan.ProcedureSpec, dsid execute.DatasetID, a execute.Administration) (execute.Source, error) {
|
||||
spec := prSpec.(*PhysicalFromProcedureSpec)
|
||||
var w execute.Window
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package influxdb_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
|
@ -122,24 +123,32 @@ func TestFromOperation_Marshaling(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestFromOpSpec_BucketsAccessed(t *testing.T) {
|
||||
// TODO(adam) add this test back when BucketsAccessed is restored for the from function
|
||||
// https://github.com/influxdata/flux/issues/114
|
||||
t.Skip("https://github.com/influxdata/flux/issues/114")
|
||||
bucketName := "my_bucket"
|
||||
bucketID, _ := platform.IDFromString("deadbeef")
|
||||
bucketIDString := "aaaabbbbccccdddd"
|
||||
bucketID, err := platform.IDFromString(bucketIDString)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
invalidID := platform.InvalidID()
|
||||
tests := []pquerytest.BucketAwareQueryTestCase{
|
||||
{
|
||||
Name: "From with bucket",
|
||||
Raw: `from(bucket:"my_bucket")`,
|
||||
Raw: fmt.Sprintf(`from(bucket:"%s")`, bucketName),
|
||||
WantReadBuckets: &[]platform.BucketFilter{{Name: &bucketName}},
|
||||
WantWriteBuckets: &[]platform.BucketFilter{},
|
||||
},
|
||||
{
|
||||
Name: "From with bucketID",
|
||||
Raw: `from(bucketID:"deadbeef")`,
|
||||
Raw: fmt.Sprintf(`from(bucketID:"%s")`, bucketID),
|
||||
WantReadBuckets: &[]platform.BucketFilter{{ID: bucketID}},
|
||||
WantWriteBuckets: &[]platform.BucketFilter{},
|
||||
},
|
||||
{
|
||||
Name: "From invalid bucketID",
|
||||
Raw: `from(bucketID:"invalid")`,
|
||||
WantReadBuckets: &[]platform.BucketFilter{{ID: &invalidID}},
|
||||
WantWriteBuckets: &[]platform.BucketFilter{},
|
||||
},
|
||||
}
|
||||
for _, tc := range tests {
|
||||
tc := tc
|
||||
|
|
|
@ -172,7 +172,13 @@ func (ToOpSpec) Kind() flux.OperationKind {
|
|||
|
||||
// BucketsAccessed returns the buckets accessed by the spec.
|
||||
func (o *ToOpSpec) BucketsAccessed() (readBuckets, writeBuckets []platform.BucketFilter) {
|
||||
bf := platform.BucketFilter{Name: &o.Bucket, Organization: &o.Org}
|
||||
bf := platform.BucketFilter{}
|
||||
if o.Bucket != "" {
|
||||
bf.Name = &o.Bucket
|
||||
}
|
||||
if o.Org != "" {
|
||||
bf.Organization = &o.Org
|
||||
}
|
||||
if o.OrgID != "" {
|
||||
id, err := platform.IDFromString(o.OrgID)
|
||||
if err == nil {
|
||||
|
@ -583,7 +589,7 @@ func writeTable(t *ToTransformation, tbl flux.Table) error {
|
|||
}
|
||||
}
|
||||
points, err = tsdb.ExplodePoints(*orgID, *bucketID, points)
|
||||
return d.PointsWriter.WritePoints(points)
|
||||
return d.PointsWriter.WritePoints(context.TODO(), points)
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -2,6 +2,7 @@ package influxdb_test
|
|||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
|
@ -82,24 +83,25 @@ func TestTo_Query(t *testing.T) {
|
|||
}
|
||||
|
||||
func TestToOpSpec_BucketsAccessed(t *testing.T) {
|
||||
// TODO(adam) add this test back when BucketsAccessed is restored for the from function
|
||||
// https://github.com/influxdata/flux/issues/114
|
||||
t.Skip("https://github.com/influxdata/flux/issues/114")
|
||||
bucketName := "my_bucket"
|
||||
orgName := "my_org"
|
||||
id := platform.ID(1)
|
||||
orgIDString := "aaaabbbbccccdddd"
|
||||
orgID, err := platform.IDFromString(orgIDString)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
tests := []querytest.BucketAwareQueryTestCase{
|
||||
{
|
||||
Name: "from() with bucket and to with org and bucket",
|
||||
Raw: `from(bucket:"my_bucket") |> to(bucket:"my_bucket", org:"my_org")`,
|
||||
Raw: fmt.Sprintf(`from(bucket:"%s") |> to(bucket:"%s", org:"%s")`, bucketName, bucketName, orgName),
|
||||
WantReadBuckets: &[]platform.BucketFilter{{Name: &bucketName}},
|
||||
WantWriteBuckets: &[]platform.BucketFilter{{Name: &bucketName, Organization: &orgName}},
|
||||
},
|
||||
{
|
||||
Name: "from() with bucket and to with orgID and bucket",
|
||||
Raw: `from(bucket:"my_bucket") |> to(bucket:"my_bucket", orgID:"0000000000000001")`,
|
||||
Raw: fmt.Sprintf(`from(bucket:"%s") |> to(bucket:"%s", orgID:"%s")`, bucketName, bucketName, orgIDString),
|
||||
WantReadBuckets: &[]platform.BucketFilter{{Name: &bucketName}},
|
||||
WantWriteBuckets: &[]platform.BucketFilter{{Name: &bucketName, OrganizationID: &id}},
|
||||
WantWriteBuckets: &[]platform.BucketFilter{{Name: &bucketName, OrganizationID: orgID}},
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
@ -355,7 +355,7 @@ func (e *Engine) CreateCursorIterator(ctx context.Context) (tsdb.CursorIterator,
|
|||
// The Engine expects all points to have been correctly validated by the caller.
|
||||
// WritePoints will however determine if there are any field type conflicts, and
|
||||
// return an appropriate error in that case.
|
||||
func (e *Engine) WritePoints(points []models.Point) error {
|
||||
func (e *Engine) WritePoints(ctx context.Context, points []models.Point) error {
|
||||
collection, j := tsdb.NewSeriesCollection(points), 0
|
||||
for iter := collection.Iterator(); iter.Next(); {
|
||||
tags := iter.Tags()
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package storage_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"math"
|
||||
|
@ -359,7 +360,7 @@ func (e *Engine) Write1xPoints(pts []models.Point) error {
|
|||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return e.Engine.WritePoints(points)
|
||||
return e.Engine.WritePoints(context.TODO(), points)
|
||||
}
|
||||
|
||||
// Write1xPointsWithOrgBucket writes 1.x points with the provided org and bucket id strings.
|
||||
|
@ -378,7 +379,7 @@ func (e *Engine) Write1xPointsWithOrgBucket(pts []models.Point, org, bucket stri
|
|||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return e.Engine.WritePoints(points)
|
||||
return e.Engine.WritePoints(context.TODO(), points)
|
||||
}
|
||||
|
||||
// Close closes the engine and removes all temporary data.
|
||||
|
|
|
@ -1,10 +1,12 @@
|
|||
package storage
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/influxdata/influxdb/models"
|
||||
)
|
||||
|
||||
// PointsWriter describes the ability to write points into a storage engine.
|
||||
type PointsWriter interface {
|
||||
WritePoints([]models.Point) error
|
||||
WritePoints(context.Context, []models.Point) error
|
||||
}
|
||||
|
|
|
@ -18,6 +18,7 @@ import (
|
|||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"math"
|
||||
"time"
|
||||
|
||||
bolt "github.com/coreos/bbolt"
|
||||
|
@ -46,6 +47,8 @@ type Store struct {
|
|||
db *bolt.DB
|
||||
bucket []byte
|
||||
idGen platform.IDGenerator
|
||||
|
||||
minLatestCompleted int64
|
||||
}
|
||||
|
||||
const basePath = "/tasks/v1/"
|
||||
|
@ -59,8 +62,14 @@ var (
|
|||
runIDs = []byte(basePath + "run_ids")
|
||||
)
|
||||
|
||||
// Option is a optional configuration for the store.
|
||||
type Option func(*Store)
|
||||
|
||||
// NoCatchUp allows you to skip any task that was supposed to run during down time.
|
||||
func NoCatchUp(st *Store) { st.minLatestCompleted = time.Now().Unix() }
|
||||
|
||||
// New gives us a new Store based on "github.com/coreos/bbolt"
|
||||
func New(db *bolt.DB, rootBucket string) (*Store, error) {
|
||||
func New(db *bolt.DB, rootBucket string, opts ...Option) (*Store, error) {
|
||||
if db.IsReadOnly() {
|
||||
return nil, ErrDBReadOnly
|
||||
}
|
||||
|
@ -87,7 +96,11 @@ func New(db *bolt.DB, rootBucket string) (*Store, error) {
|
|||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &Store{db: db, bucket: bucket, idGen: snowflake.NewDefaultIDGenerator()}, nil
|
||||
st := &Store{db: db, bucket: bucket, idGen: snowflake.NewDefaultIDGenerator(), minLatestCompleted: math.MinInt64}
|
||||
for _, opt := range opts {
|
||||
opt(st)
|
||||
}
|
||||
return st, nil
|
||||
}
|
||||
|
||||
// CreateTask creates a task in the boltdb task store.
|
||||
|
@ -368,6 +381,12 @@ func (s *Store) ListTasks(ctx context.Context, params backend.TaskSearchParams)
|
|||
if err := stm.Unmarshal(b.Bucket(taskMetaPath).Get(encodedID)); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if stm.LatestCompleted < s.minLatestCompleted {
|
||||
stm.LatestCompleted = s.minLatestCompleted
|
||||
stm.AlignLatestCompleted()
|
||||
}
|
||||
|
||||
tasks[i].Meta = stm
|
||||
}
|
||||
}
|
||||
|
@ -434,6 +453,11 @@ func (s *Store) FindTaskMetaByID(ctx context.Context, id platform.ID) (*backend.
|
|||
return nil, err
|
||||
}
|
||||
|
||||
if stm.LatestCompleted < s.minLatestCompleted {
|
||||
stm.LatestCompleted = s.minLatestCompleted
|
||||
stm.AlignLatestCompleted()
|
||||
}
|
||||
|
||||
return &stm, nil
|
||||
}
|
||||
|
||||
|
@ -472,6 +496,11 @@ func (s *Store) FindTaskByIDWithMeta(ctx context.Context, id platform.ID) (*back
|
|||
return nil, nil, err
|
||||
}
|
||||
|
||||
if stm.LatestCompleted < s.minLatestCompleted {
|
||||
stm.LatestCompleted = s.minLatestCompleted
|
||||
stm.AlignLatestCompleted()
|
||||
}
|
||||
|
||||
return &backend.StoreTask{
|
||||
ID: id,
|
||||
Org: orgID,
|
||||
|
@ -539,6 +568,11 @@ func (s *Store) CreateNextRun(ctx context.Context, taskID platform.ID, now int64
|
|||
return err
|
||||
}
|
||||
|
||||
if stm.LatestCompleted < s.minLatestCompleted {
|
||||
stm.LatestCompleted = s.minLatestCompleted
|
||||
stm.AlignLatestCompleted()
|
||||
}
|
||||
|
||||
rc, err = stm.CreateNextRun(now, func() (platform.ID, error) {
|
||||
return s.idGen.ID(), nil
|
||||
})
|
||||
|
|
|
@ -1,11 +1,14 @@
|
|||
package bolt_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
bolt "github.com/coreos/bbolt"
|
||||
"github.com/influxdata/influxdb"
|
||||
_ "github.com/influxdata/influxdb/query/builtin"
|
||||
"github.com/influxdata/influxdb/task/backend"
|
||||
boltstore "github.com/influxdata/influxdb/task/backend/bolt"
|
||||
|
@ -49,3 +52,96 @@ func TestBoltStore(t *testing.T) {
|
|||
},
|
||||
)(t)
|
||||
}
|
||||
|
||||
func TestSkip(t *testing.T) {
|
||||
f, err := ioutil.TempFile("", "influx_bolt_task_store_test")
|
||||
if err != nil {
|
||||
t.Fatalf("failed to create tempfile for test db %v\n", err)
|
||||
}
|
||||
defer f.Close()
|
||||
defer os.Remove(f.Name())
|
||||
|
||||
db, err := bolt.Open(f.Name(), os.ModeTemporary, nil)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to open bolt db for test db %v\n", err)
|
||||
}
|
||||
s, err := boltstore.New(db, "testbucket")
|
||||
if err != nil {
|
||||
t.Fatalf("failed to create new bolt store %v\n", err)
|
||||
}
|
||||
|
||||
schedAfter := time.Now().Add(-time.Minute)
|
||||
tskID, err := s.CreateTask(context.Background(), backend.CreateTaskRequest{
|
||||
Org: influxdb.ID(1),
|
||||
AuthorizationID: influxdb.ID(2),
|
||||
Script: `option task = {name:"x", every:1s} from(bucket:"b-src") |> range(start:-1m) |> to(bucket:"b-dst", org:"o")`,
|
||||
ScheduleAfter: schedAfter.Unix(),
|
||||
Status: backend.TaskActive,
|
||||
})
|
||||
if err != nil {
|
||||
t.Fatalf("failed to create new task %v\n", err)
|
||||
}
|
||||
|
||||
rc, err := s.CreateNextRun(context.Background(), tskID, schedAfter.Add(10*time.Second).Unix())
|
||||
if err != nil {
|
||||
t.Fatalf("failed to create new run %v\n", err)
|
||||
}
|
||||
|
||||
if err := s.FinishRun(context.Background(), tskID, rc.Created.RunID); err != nil {
|
||||
t.Fatalf("failed to finish run %v\n", err)
|
||||
}
|
||||
|
||||
meta, err := s.FindTaskMetaByID(context.Background(), tskID)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to pull meta %v\n", err)
|
||||
}
|
||||
if meta.LatestCompleted <= schedAfter.Unix() {
|
||||
t.Fatal("failed to update latestCompleted")
|
||||
}
|
||||
|
||||
latestCompleted := meta.LatestCompleted
|
||||
|
||||
s.Close()
|
||||
|
||||
db, err = bolt.Open(f.Name(), os.ModeTemporary, nil)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to open bolt db for test db %v\n", err)
|
||||
}
|
||||
s, err = boltstore.New(db, "testbucket", boltstore.NoCatchUp)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to create new bolt store %v\n", err)
|
||||
}
|
||||
defer s.Close()
|
||||
|
||||
meta, err = s.FindTaskMetaByID(context.Background(), tskID)
|
||||
if err != nil {
|
||||
t.Fatalf("failed to pull meta %v\n", err)
|
||||
}
|
||||
|
||||
if meta.LatestCompleted == latestCompleted {
|
||||
t.Fatal("failed to overwrite latest completed on new meta pull")
|
||||
}
|
||||
latestCompleted = meta.LatestCompleted
|
||||
|
||||
rc, err = s.CreateNextRun(context.Background(), tskID, time.Now().Add(10*time.Second).Unix())
|
||||
if err != nil {
|
||||
t.Fatalf("failed to create new run %v\n", err)
|
||||
}
|
||||
|
||||
if err := s.FinishRun(context.Background(), tskID, rc.Created.RunID); err != nil {
|
||||
t.Fatalf("failed to finish run %v\n", err)
|
||||
}
|
||||
|
||||
tasks, err := s.ListTasks(context.Background(), backend.TaskSearchParams{})
|
||||
if err != nil {
|
||||
t.Fatalf("failed to pull meta %v\n", err)
|
||||
}
|
||||
|
||||
if len(tasks) != 1 {
|
||||
t.Fatal("task not found")
|
||||
}
|
||||
|
||||
if tasks[0].Meta.LatestCompleted == latestCompleted {
|
||||
t.Fatal("failed to run after an override")
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,6 +3,7 @@ package backend
|
|||
import (
|
||||
"errors"
|
||||
"math"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
platform "github.com/influxdata/influxdb"
|
||||
|
@ -28,18 +29,32 @@ func NewStoreTaskMeta(req CreateTaskRequest, o options.Options) StoreTaskMeta {
|
|||
stm.Status = string(DefaultTaskStatus)
|
||||
}
|
||||
|
||||
if o.Every != 0 {
|
||||
t := time.Unix(stm.LatestCompleted, 0).Truncate(o.Every).Unix()
|
||||
stm.AlignLatestCompleted()
|
||||
|
||||
return stm
|
||||
}
|
||||
|
||||
// AlignLatestCompleted alligns the latest completed to be on the min/hour/day
|
||||
func (stm *StoreTaskMeta) AlignLatestCompleted() {
|
||||
|
||||
if strings.HasPrefix(stm.EffectiveCron, "@every ") {
|
||||
everyString := strings.TrimPrefix(stm.EffectiveCron, "@every ")
|
||||
every, err := time.ParseDuration(everyString)
|
||||
if err != nil {
|
||||
// We cannot align a invalid time
|
||||
return
|
||||
}
|
||||
|
||||
t := time.Unix(stm.LatestCompleted, 0).Truncate(every).Unix()
|
||||
if t == stm.LatestCompleted {
|
||||
// For example, every 1m truncates to exactly on the minute.
|
||||
// But the input request is schedule after, not "on or after".
|
||||
// Add one interval.
|
||||
t += int64(o.Every / time.Second)
|
||||
t += int64(every / time.Second)
|
||||
}
|
||||
stm.LatestCompleted = t
|
||||
}
|
||||
|
||||
return stm
|
||||
}
|
||||
|
||||
// FinishRun removes the run matching runID from m's CurrentlyRunning slice,
|
||||
|
|
|
@ -25,7 +25,7 @@ const (
|
|||
// Copy of storage.PointsWriter interface.
|
||||
// Duplicating it here to avoid having tasks/backend depend directly on storage.
|
||||
type PointsWriter interface {
|
||||
WritePoints(points []models.Point) error
|
||||
WritePoints(ctx context.Context, points []models.Point) error
|
||||
}
|
||||
|
||||
// PointLogWriter writes task and run logs as time-series points.
|
||||
|
@ -61,7 +61,7 @@ func (p *PointLogWriter) UpdateRunState(ctx context.Context, rlb RunLogBase, whe
|
|||
return err
|
||||
}
|
||||
|
||||
return p.pointsWriter.WritePoints(exploded)
|
||||
return p.pointsWriter.WritePoints(ctx, exploded)
|
||||
}
|
||||
|
||||
func (p *PointLogWriter) AddRunLog(ctx context.Context, rlb RunLogBase, when time.Time, log string) error {
|
||||
|
@ -83,5 +83,5 @@ func (p *PointLogWriter) AddRunLog(ctx context.Context, rlb RunLogBase, when tim
|
|||
return err
|
||||
}
|
||||
|
||||
return p.pointsWriter.WritePoints(exploded)
|
||||
return p.pointsWriter.WritePoints(ctx, exploded)
|
||||
}
|
||||
|
|
|
@ -1,19 +1,21 @@
|
|||
import {Bucket} from '@influxdata/influx'
|
||||
|
||||
describe('Buckets', () => {
|
||||
let orgID: string = ''
|
||||
let bucketName: string = ''
|
||||
beforeEach(() => {
|
||||
cy.flush()
|
||||
|
||||
cy.setupUser().then(({body}) => {
|
||||
const {org, bucket} = body
|
||||
orgID = org.id
|
||||
bucketName = bucket.name
|
||||
const {
|
||||
org: {id},
|
||||
bucket,
|
||||
} = body
|
||||
cy.wrap(bucket).as('bucket')
|
||||
|
||||
cy.signin(orgID)
|
||||
})
|
||||
cy.signin(id)
|
||||
|
||||
cy.fixture('routes').then(({orgs}) => {
|
||||
cy.visit(`${orgs}/${orgID}/buckets_tab`)
|
||||
cy.fixture('routes').then(({orgs}) => {
|
||||
cy.visit(`${orgs}/${id}/buckets_tab`)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
|
@ -38,7 +40,9 @@ describe('Buckets', () => {
|
|||
it('can update a buckets name and retention rules', () => {
|
||||
const newName = 'newdefbuck'
|
||||
|
||||
cy.contains(bucketName).click()
|
||||
cy.get<Bucket>('@bucket').then(({name}) => {
|
||||
cy.contains(name).click()
|
||||
})
|
||||
|
||||
cy.getByDataTest('retention-intervals').click()
|
||||
|
|
@ -1,11 +1,15 @@
|
|||
import {Organization} from '@influxdata/influx'
|
||||
|
||||
describe('Dashboards', () => {
|
||||
let orgID: string = ''
|
||||
beforeEach(() => {
|
||||
cy.flush()
|
||||
|
||||
cy.setupUser().then(({body}) => {
|
||||
orgID = body.org.id
|
||||
cy.signin(orgID)
|
||||
cy.wrap(body.org).as('org')
|
||||
})
|
||||
|
||||
cy.get<Organization>('@org').then(org => {
|
||||
cy.signin(org.id)
|
||||
})
|
||||
|
||||
cy.fixture('routes').then(({dashboards}) => {
|
||||
|
@ -40,8 +44,10 @@ describe('Dashboards', () => {
|
|||
})
|
||||
|
||||
it('can delete a dashboard', () => {
|
||||
cy.createDashboard(orgID)
|
||||
cy.createDashboard(orgID)
|
||||
cy.get<Organization>('@org').then(({id}) => {
|
||||
cy.createDashboard(id)
|
||||
cy.createDashboard(id)
|
||||
})
|
||||
|
||||
cy.get('.index-list--row').then(rows => {
|
||||
const numDashboards = rows.length
|
||||
|
@ -61,8 +67,10 @@ describe('Dashboards', () => {
|
|||
})
|
||||
|
||||
it('can edit a dashboards name', () => {
|
||||
cy.createDashboard(orgID).then(({body}) => {
|
||||
cy.visit(`/dashboards/${body.id}`)
|
||||
cy.get<Organization>('@org').then(({id}) => {
|
||||
cy.createDashboard(id).then(({body}) => {
|
||||
cy.visit(`/dashboards/${body.id}`)
|
||||
})
|
||||
})
|
||||
|
||||
const newName = 'new 🅱️ashboard'
|
|
@ -0,0 +1,72 @@
|
|||
import {Organization} from '@influxdata/influx'
|
||||
|
||||
describe('Tasks', () => {
|
||||
beforeEach(() => {
|
||||
cy.flush()
|
||||
|
||||
cy.setupUser().then(({body}) => {
|
||||
cy.signin(body.org.id)
|
||||
cy.wrap(body.org).as('org')
|
||||
})
|
||||
|
||||
cy.visit('/tasks')
|
||||
})
|
||||
|
||||
it('can create a task', () => {
|
||||
const taskName = '🦄ask'
|
||||
cy.get('.empty-state').within(() => {
|
||||
cy.contains('Create').click()
|
||||
})
|
||||
|
||||
cy.getByInputName('name').type(taskName)
|
||||
cy.getByInputName('interval').type('1d')
|
||||
cy.getByInputName('offset').type('20m')
|
||||
|
||||
cy.getByDataTest('flux-editor').within(() => {
|
||||
cy.get('textarea').type(
|
||||
`from(bucket: "defbuck")
|
||||
|> range(start: -2m)`,
|
||||
{force: true}
|
||||
)
|
||||
})
|
||||
|
||||
cy.contains('Save').click()
|
||||
|
||||
cy.getByDataTest('task-row')
|
||||
.should('have.length', 1)
|
||||
.and('contain', taskName)
|
||||
})
|
||||
|
||||
it('can delete a task', () => {
|
||||
cy.get<Organization>('@org').then(({id}) => {
|
||||
cy.createTask(id)
|
||||
cy.createTask(id)
|
||||
})
|
||||
|
||||
cy.getByDataTest('task-row').should('have.length', 2)
|
||||
|
||||
cy.getByDataTest('confirmation-button')
|
||||
.first()
|
||||
.click({force: true})
|
||||
|
||||
cy.getByDataTest('task-row').should('have.length', 1)
|
||||
})
|
||||
|
||||
it('fails to create a task without a valid script', () => {
|
||||
cy.get('.empty-state').within(() => {
|
||||
cy.contains('Create').click()
|
||||
})
|
||||
|
||||
cy.getByInputName('name').type('🦄ask')
|
||||
cy.getByInputName('interval').type('1d')
|
||||
cy.getByInputName('offset').type('20m')
|
||||
|
||||
cy.getByDataTest('flux-editor').within(() => {
|
||||
cy.get('textarea').type('{}', {force: true})
|
||||
})
|
||||
|
||||
cy.contains('Save').click()
|
||||
|
||||
cy.getByDataTest('notification-error').should('exist')
|
||||
})
|
||||
})
|
|
@ -1,28 +0,0 @@
|
|||
// currently getting unauthorized errors for task creation
|
||||
describe.skip('Tasks', () => {
|
||||
beforeEach(() => {
|
||||
cy.flush()
|
||||
|
||||
cy.setupUser().then(({body}) => {
|
||||
cy.signin(body.org.id)
|
||||
})
|
||||
|
||||
cy.visit('/tasks')
|
||||
})
|
||||
|
||||
it('can create a task', () => {
|
||||
cy.get('.empty-state').within(() => {
|
||||
cy.contains('Create').click()
|
||||
})
|
||||
|
||||
cy.getByInputName('name').type('🅱️ask')
|
||||
cy.getByInputName('interval').type('1d')
|
||||
cy.getByInputName('offset').type('20m')
|
||||
|
||||
cy.getByDataTest('flux-editor').within(() => {
|
||||
cy.get('textarea').type('{}', {force: true})
|
||||
})
|
||||
|
||||
cy.contains('Save').click()
|
||||
})
|
||||
})
|
|
@ -0,0 +1,31 @@
|
|||
describe('Variables', () => {
|
||||
beforeEach(() => {
|
||||
cy.flush()
|
||||
|
||||
cy.setupUser().then(({body}) => {
|
||||
cy.signin(body.org.id)
|
||||
|
||||
cy.wrap(body.org).as('org')
|
||||
cy.visit(`organizations/${body.org.id}/variables_tab`)
|
||||
})
|
||||
})
|
||||
|
||||
it('can create a variable', () => {
|
||||
cy.get('.empty-state').within(() => {
|
||||
cy.contains('Create').click()
|
||||
})
|
||||
|
||||
cy.getByInputName('name').type('Little Variable')
|
||||
cy.getByDataTest('flux-editor').within(() => {
|
||||
cy.get('textarea').type('filter(fn: (r) => r._field == "cpu")', {
|
||||
force: true,
|
||||
})
|
||||
})
|
||||
|
||||
cy.get('form')
|
||||
.contains('Create')
|
||||
.click()
|
||||
|
||||
cy.getByDataTest('variable-row').should('have.length', 1)
|
||||
})
|
||||
})
|
|
@ -9,6 +9,7 @@ import {
|
|||
getByDataTest,
|
||||
getByInputName,
|
||||
getByTitle,
|
||||
createTask,
|
||||
} from './support/commands'
|
||||
|
||||
declare global {
|
||||
|
@ -17,6 +18,7 @@ declare global {
|
|||
signin: typeof signin
|
||||
setupUser: typeof setupUser
|
||||
createSource: typeof createSource
|
||||
createTask: typeof createTask
|
||||
createDashboard: typeof createDashboard
|
||||
createOrg: typeof createOrg
|
||||
flush: typeof flush
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
export const signin = (orgID: string): Cypress.Chainable<Response> => {
|
||||
export const signin = (orgID?: string): Cypress.Chainable<Response> => {
|
||||
return cy.fixture('user').then(user => {
|
||||
cy.request({
|
||||
method: 'POST',
|
||||
|
@ -10,9 +10,8 @@ export const signin = (orgID: string): Cypress.Chainable<Response> => {
|
|||
})
|
||||
}
|
||||
|
||||
// createDashboard relies on an org fixture to be set
|
||||
export const createDashboard = (
|
||||
orgID: string
|
||||
orgID?: string
|
||||
): Cypress.Chainable<Cypress.Response> => {
|
||||
return cy.request({
|
||||
method: 'POST',
|
||||
|
@ -44,8 +43,29 @@ export const createBucket = (): Cypress.Chainable<Cypress.Response> => {
|
|||
})
|
||||
}
|
||||
|
||||
export const createTask = (
|
||||
orgID?: string
|
||||
): Cypress.Chainable<Cypress.Response> => {
|
||||
const flux = `option task = {
|
||||
name: "🦄ask",
|
||||
every: 1d,
|
||||
offset: 20m
|
||||
}
|
||||
from(bucket: "defbuck")
|
||||
|> range(start: -2m)`
|
||||
|
||||
return cy.request({
|
||||
method: 'POST',
|
||||
url: '/api/v2/tasks',
|
||||
body: {
|
||||
flux,
|
||||
orgID,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
export const createSource = (
|
||||
orgID: string
|
||||
orgID?: string
|
||||
): Cypress.Chainable<Cypress.Response> => {
|
||||
return cy.request({
|
||||
method: 'POST',
|
||||
|
@ -115,3 +135,6 @@ Cypress.Commands.add('createSource', createSource)
|
|||
|
||||
// general
|
||||
Cypress.Commands.add('flush', flush)
|
||||
|
||||
// tasks
|
||||
Cypress.Commands.add('createTask', createTask)
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
"strict": true,
|
||||
"baseUrl": "../node_modules",
|
||||
"target": "es5",
|
||||
"lib": ["es5", "dom"],
|
||||
"lib": ["es2017", "dom"],
|
||||
"types": ["cypress", "mocha", "node"]
|
||||
},
|
||||
"include": ["**/*.ts"]
|
||||
|
|
|
@ -985,9 +985,9 @@
|
|||
}
|
||||
},
|
||||
"@influxdata/influx": {
|
||||
"version": "0.2.15",
|
||||
"resolved": "https://registry.npmjs.org/@influxdata/influx/-/influx-0.2.15.tgz",
|
||||
"integrity": "sha512-4s3yLEYdiauq0eydi35GrxTOs55ghpRiBiNFKuH5kTGOrXj9y9OSxJfMLyE+Dy4s4FD/Z+UpeBM2Uy3dRdzerg==",
|
||||
"version": "0.2.18",
|
||||
"resolved": "https://registry.npmjs.org/@influxdata/influx/-/influx-0.2.18.tgz",
|
||||
"integrity": "sha512-GMkSinELOnOJMuupd/7H4CwOEWqvVTj3863tgH/b7HBRDSZyi/FqYUPazEYSoMXPQA65oPhcqnbgYAUtC1foWw==",
|
||||
"requires": {
|
||||
"axios": "^0.18.0"
|
||||
}
|
||||
|
@ -6085,7 +6085,8 @@
|
|||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz",
|
||||
"integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=",
|
||||
"dev": true
|
||||
"dev": true,
|
||||
"optional": true
|
||||
},
|
||||
"aproba": {
|
||||
"version": "1.2.0",
|
||||
|
@ -6109,13 +6110,15 @@
|
|||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz",
|
||||
"integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=",
|
||||
"dev": true
|
||||
"dev": true,
|
||||
"optional": true
|
||||
},
|
||||
"brace-expansion": {
|
||||
"version": "1.1.11",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
|
||||
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"requires": {
|
||||
"balanced-match": "^1.0.0",
|
||||
"concat-map": "0.0.1"
|
||||
|
@ -6132,19 +6135,22 @@
|
|||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz",
|
||||
"integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=",
|
||||
"dev": true
|
||||
"dev": true,
|
||||
"optional": true
|
||||
},
|
||||
"concat-map": {
|
||||
"version": "0.0.1",
|
||||
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
|
||||
"integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=",
|
||||
"dev": true
|
||||
"dev": true,
|
||||
"optional": true
|
||||
},
|
||||
"console-control-strings": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz",
|
||||
"integrity": "sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4=",
|
||||
"dev": true
|
||||
"dev": true,
|
||||
"optional": true
|
||||
},
|
||||
"core-util-is": {
|
||||
"version": "1.0.2",
|
||||
|
@ -6275,7 +6281,8 @@
|
|||
"version": "2.0.3",
|
||||
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz",
|
||||
"integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=",
|
||||
"dev": true
|
||||
"dev": true,
|
||||
"optional": true
|
||||
},
|
||||
"ini": {
|
||||
"version": "1.3.5",
|
||||
|
@ -6289,6 +6296,7 @@
|
|||
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz",
|
||||
"integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=",
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"requires": {
|
||||
"number-is-nan": "^1.0.0"
|
||||
}
|
||||
|
@ -6305,6 +6313,7 @@
|
|||
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
|
||||
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"requires": {
|
||||
"brace-expansion": "^1.1.7"
|
||||
}
|
||||
|
@ -6313,13 +6322,15 @@
|
|||
"version": "0.0.8",
|
||||
"resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz",
|
||||
"integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=",
|
||||
"dev": true
|
||||
"dev": true,
|
||||
"optional": true
|
||||
},
|
||||
"minipass": {
|
||||
"version": "2.2.4",
|
||||
"resolved": "https://registry.npmjs.org/minipass/-/minipass-2.2.4.tgz",
|
||||
"integrity": "sha512-hzXIWWet/BzWhYs2b+u7dRHlruXhwdgvlTMDKC6Cb1U7ps6Ac6yQlR39xsbjWJE377YTCtKwIXIpJ5oP+j5y8g==",
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"requires": {
|
||||
"safe-buffer": "^5.1.1",
|
||||
"yallist": "^3.0.0"
|
||||
|
@ -6340,6 +6351,7 @@
|
|||
"resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz",
|
||||
"integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=",
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"requires": {
|
||||
"minimist": "0.0.8"
|
||||
}
|
||||
|
@ -6428,7 +6440,8 @@
|
|||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz",
|
||||
"integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=",
|
||||
"dev": true
|
||||
"dev": true,
|
||||
"optional": true
|
||||
},
|
||||
"object-assign": {
|
||||
"version": "4.1.1",
|
||||
|
@ -6442,6 +6455,7 @@
|
|||
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
|
||||
"integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=",
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"requires": {
|
||||
"wrappy": "1"
|
||||
}
|
||||
|
@ -6537,7 +6551,8 @@
|
|||
"version": "5.1.1",
|
||||
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.1.tgz",
|
||||
"integrity": "sha512-kKvNJn6Mm93gAczWVJg7wH+wGYWNrDHdWvpUmHyEsgCtIwwo3bqPtV4tR5tuPaUhTOo/kvhVwd8XwwOllGYkbg==",
|
||||
"dev": true
|
||||
"dev": true,
|
||||
"optional": true
|
||||
},
|
||||
"safer-buffer": {
|
||||
"version": "2.1.2",
|
||||
|
@ -6579,6 +6594,7 @@
|
|||
"resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz",
|
||||
"integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=",
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"requires": {
|
||||
"code-point-at": "^1.0.0",
|
||||
"is-fullwidth-code-point": "^1.0.0",
|
||||
|
@ -6600,6 +6616,7 @@
|
|||
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz",
|
||||
"integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=",
|
||||
"dev": true,
|
||||
"optional": true,
|
||||
"requires": {
|
||||
"ansi-regex": "^2.0.0"
|
||||
}
|
||||
|
@ -6648,13 +6665,15 @@
|
|||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
|
||||
"integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=",
|
||||
"dev": true
|
||||
"dev": true,
|
||||
"optional": true
|
||||
},
|
||||
"yallist": {
|
||||
"version": "3.0.2",
|
||||
"resolved": "https://registry.npmjs.org/yallist/-/yallist-3.0.2.tgz",
|
||||
"integrity": "sha1-hFK0u36Dx8GI2AQcGoN8dz1ti7k=",
|
||||
"dev": true
|
||||
"dev": true,
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
|
|
|
@ -32,15 +32,18 @@
|
|||
"tsc": "tsc -p ./tsconfig.json --noEmit --pretty --skipLibCheck",
|
||||
"tsc:watch": "tsc -p ./tsconfig.json --noEmit --pretty -w",
|
||||
"tsc:cypress": "tsc -p ./cypress/tsconfig.json --noEmit --pretty --skipLibCheck",
|
||||
"cypress:open": "cypress open"
|
||||
"e2e": "cypress open"
|
||||
},
|
||||
"jest": {
|
||||
"setupTestFrameworkScriptFile": "./jestSetup.ts",
|
||||
"setupFilesAfterEnv": [
|
||||
"./jestSetup.ts"
|
||||
],
|
||||
"displayName": "test",
|
||||
"testURL": "http://localhost",
|
||||
"testPathIgnorePatterns": [
|
||||
"build",
|
||||
"<rootDir>/node_modules/(?!(jest-test))"
|
||||
"<rootDir>/node_modules/(?!(jest-test))",
|
||||
"cypress"
|
||||
],
|
||||
"setupFiles": [
|
||||
"<rootDir>/testSetup.ts"
|
||||
|
@ -134,7 +137,7 @@
|
|||
},
|
||||
"dependencies": {
|
||||
"@influxdata/clockface": "0.0.5",
|
||||
"@influxdata/influx": "0.2.15",
|
||||
"@influxdata/influx": "0.2.18",
|
||||
"@influxdata/react-custom-scrollbars": "4.3.8",
|
||||
"axios": "^0.18.0",
|
||||
"babel-polyfill": "^6.26.0",
|
||||
|
|
|
@ -33,6 +33,7 @@ interface Props {
|
|||
titleText?: string
|
||||
tabIndex?: number
|
||||
className?: string
|
||||
testID?: string
|
||||
}
|
||||
|
||||
interface State {
|
||||
|
@ -45,6 +46,7 @@ class ConfirmationButton extends Component<Props, State> {
|
|||
size: ComponentSize.Small,
|
||||
shape: ButtonShape.Default,
|
||||
status: ComponentStatus.Default,
|
||||
testID: 'confirmation-button',
|
||||
}
|
||||
|
||||
public ref: RefObject<HTMLButtonElement> = React.createRef()
|
||||
|
@ -67,6 +69,7 @@ class ConfirmationButton extends Component<Props, State> {
|
|||
status,
|
||||
confirmText,
|
||||
icon,
|
||||
testID,
|
||||
} = this.props
|
||||
const {isTooltipVisible} = this.state
|
||||
|
||||
|
@ -89,6 +92,7 @@ class ConfirmationButton extends Component<Props, State> {
|
|||
<div className={this.tooltipClassName}>
|
||||
<div
|
||||
data-test="confirmation-button--click-target"
|
||||
data-testid={testID}
|
||||
className="confirmation-button--tooltip-body"
|
||||
onClick={this.handleTooltipClick}
|
||||
>
|
||||
|
|
|
@ -7,6 +7,7 @@ exports[`ConfirmationButton interaction shows the tooltip when clicked 1`] = `
|
|||
shape="none"
|
||||
size="sm"
|
||||
status="default"
|
||||
testID="confirmation-button"
|
||||
text="I am a dangerous button!"
|
||||
>
|
||||
<ClickOutside
|
||||
|
@ -44,6 +45,7 @@ exports[`ConfirmationButton interaction shows the tooltip when clicked 1`] = `
|
|||
<div
|
||||
className="confirmation-button--tooltip-body"
|
||||
data-test="confirmation-button--click-target"
|
||||
data-testid="confirmation-button"
|
||||
onClick={[Function]}
|
||||
>
|
||||
Click me if you dare
|
||||
|
|
|
@ -26,30 +26,34 @@
|
|||
}
|
||||
|
||||
.label-selector--menu {
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
width: 100%;
|
||||
min-height: 50px;
|
||||
padding: $ix-marg-b;
|
||||
padding: $ix-marg-b - ($ix-border / 2);
|
||||
}
|
||||
|
||||
.label-selector--menu-item {
|
||||
margin: 1px;
|
||||
display: inline-flex;
|
||||
align-items: flex-start;
|
||||
margin: $ix-border / 2;
|
||||
}
|
||||
|
||||
.label-selector--empty {
|
||||
width: 100%;
|
||||
font-size: 13px;
|
||||
font-weight: 500;
|
||||
user-select: none;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
}
|
||||
|
||||
.label-selector--empty {
|
||||
padding: $ix-marg-a $ix-marg-b;
|
||||
color: $g9-mountain;
|
||||
font-style: italic;
|
||||
min-height: 30px;
|
||||
line-height: 30px;
|
||||
|
||||
&:first-child {
|
||||
margin-bottom: $ix-marg-b - ($ix-border / 2);
|
||||
}
|
||||
}
|
||||
|
||||
.label-selector--selection {
|
||||
|
|
|
@ -74,15 +74,15 @@ class LabelSelector extends Component<Props, State> {
|
|||
|
||||
public render() {
|
||||
return (
|
||||
<ClickOutside onClickOutside={this.handleStopSuggesting}>
|
||||
<div className="label-selector">
|
||||
<div className="label-selector--selection">
|
||||
{this.selectedLabels}
|
||||
{this.clearSelectedButton}
|
||||
</div>
|
||||
{this.input}
|
||||
<div className="label-selector">
|
||||
<div className="label-selector--selection">
|
||||
{this.selectedLabels}
|
||||
{this.clearSelectedButton}
|
||||
</div>
|
||||
</ClickOutside>
|
||||
<ClickOutside onClickOutside={this.handleStopSuggesting}>
|
||||
{this.input}
|
||||
</ClickOutside>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -169,8 +169,9 @@ class LabelSelector extends Component<Props, State> {
|
|||
|
||||
private handleStartSuggesting = () => {
|
||||
const {availableLabels} = this
|
||||
const {isSuggesting} = this.state
|
||||
|
||||
if (_.isEmpty(availableLabels)) {
|
||||
if (_.isEmpty(availableLabels) && !isSuggesting) {
|
||||
return this.setState({
|
||||
isSuggesting: true,
|
||||
highlightedID: null,
|
||||
|
|
|
@ -30,8 +30,9 @@ class LabelSelectorMenu extends Component<Props> {
|
|||
<div className="label-selector--menu-container">
|
||||
<FancyScrollbar autoHide={false} autoHeight={true} maxHeight={250}>
|
||||
<div className="label-selector--menu">
|
||||
{this.resourceLabelForm}
|
||||
{this.menuItems}
|
||||
{this.emptyText}
|
||||
{this.resourceLabelForm}
|
||||
</div>
|
||||
</FancyScrollbar>
|
||||
</div>
|
||||
|
@ -60,18 +61,22 @@ class LabelSelectorMenu extends Component<Props> {
|
|||
/>
|
||||
))
|
||||
}
|
||||
|
||||
return <div className="label-selector--empty">{this.emptyText}</div>
|
||||
}
|
||||
|
||||
private get emptyText(): string {
|
||||
const {allLabelsUsed} = this.props
|
||||
private get emptyText(): JSX.Element {
|
||||
const {allLabelsUsed, filterValue} = this.props
|
||||
|
||||
if (allLabelsUsed) {
|
||||
return 'You have somehow managed to add all the labels, wow!'
|
||||
if (!filterValue) {
|
||||
return null
|
||||
}
|
||||
|
||||
return 'No labels match your query'
|
||||
let text = `No labels match "${filterValue}" want to create a new label?`
|
||||
|
||||
if (allLabelsUsed) {
|
||||
text = 'You have somehow managed to add all the labels, wow!'
|
||||
}
|
||||
|
||||
return <div className="label-selector--empty">{text}</div>
|
||||
}
|
||||
|
||||
private get resourceLabelForm(): JSX.Element {
|
||||
|
|
|
@ -26,9 +26,9 @@ class LabelSelectorMenuItem extends Component<Props> {
|
|||
<span
|
||||
className="label-selector--menu-item"
|
||||
onMouseOver={this.handleMouseOver}
|
||||
onClick={this.handleClick}
|
||||
>
|
||||
<Label
|
||||
onClick={this.handleClick}
|
||||
name={name}
|
||||
description={description}
|
||||
id={id}
|
||||
|
|
|
@ -9,9 +9,12 @@
|
|||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
margin-bottom: $ix-marg-b;
|
||||
|
||||
.label-colors--swatch {
|
||||
margin-right: $ix-marg-c;
|
||||
margin-right: $ix-marg-b;
|
||||
}
|
||||
|
||||
> span.button-icon {
|
||||
margin-right: 0;
|
||||
}
|
||||
}
|
|
@ -4,10 +4,14 @@ import _ from 'lodash'
|
|||
// Utils
|
||||
import {randomPresetColor} from 'src/configuration/utils/labels'
|
||||
import {IconFont} from 'src/clockface'
|
||||
import {validateHexCode} from 'src/configuration/utils/labels'
|
||||
|
||||
// Styles
|
||||
import 'src/configuration/components/RandomLabelColor.scss'
|
||||
|
||||
// Constants
|
||||
import {INPUT_ERROR_COLOR} from 'src/configuration/constants/LabelColors'
|
||||
|
||||
interface Props {
|
||||
colorHex: string
|
||||
onClick: (newRandomHex: string) => void
|
||||
|
@ -15,16 +19,16 @@ interface Props {
|
|||
|
||||
export default class RandomLabelColorButton extends Component<Props> {
|
||||
public render() {
|
||||
const {colorHex} = this.props
|
||||
return (
|
||||
<button
|
||||
className="button button-sm button-default random-color--button "
|
||||
onClick={this.handleClick}
|
||||
title="Randomize label color"
|
||||
>
|
||||
<div
|
||||
className="label-colors--swatch"
|
||||
style={{
|
||||
backgroundColor: colorHex,
|
||||
backgroundColor: this.colorHex,
|
||||
}}
|
||||
/>
|
||||
<span className={`button-icon icon ${IconFont.Refresh}`} />
|
||||
|
@ -32,6 +36,16 @@ export default class RandomLabelColorButton extends Component<Props> {
|
|||
)
|
||||
}
|
||||
|
||||
private get colorHex(): string {
|
||||
const {colorHex} = this.props
|
||||
|
||||
if (validateHexCode(colorHex)) {
|
||||
return INPUT_ERROR_COLOR
|
||||
}
|
||||
|
||||
return colorHex
|
||||
}
|
||||
|
||||
private handleClick = () => {
|
||||
this.props.onClick(randomPresetColor())
|
||||
}
|
||||
|
|
|
@ -58,6 +58,150 @@ export const PRESET_LABEL_COLORS: LabelColor[] = [
|
|||
name: 'Neutrino',
|
||||
type: LabelColorType.Preset,
|
||||
},
|
||||
{
|
||||
id: 'label-preset-void',
|
||||
colorHex: '#311F94',
|
||||
name: 'Void',
|
||||
type: LabelColorType.Preset,
|
||||
},
|
||||
{
|
||||
id: 'label-preset-amethyst',
|
||||
colorHex: '#513CC6',
|
||||
name: 'Amethyst',
|
||||
type: LabelColorType.Preset,
|
||||
},
|
||||
{
|
||||
id: 'label-preset-star',
|
||||
colorHex: '#7A65F2',
|
||||
name: 'Star',
|
||||
type: LabelColorType.Preset,
|
||||
},
|
||||
{
|
||||
id: 'label-preset-comet',
|
||||
colorHex: '#9394FF',
|
||||
name: 'Comet',
|
||||
type: LabelColorType.Preset,
|
||||
},
|
||||
{
|
||||
id: 'label-preset-potassium',
|
||||
colorHex: '#B1B6FF',
|
||||
name: 'Potassium',
|
||||
type: LabelColorType.Preset,
|
||||
},
|
||||
{
|
||||
id: 'label-preset-moonstone',
|
||||
colorHex: '#C9D0FF',
|
||||
name: 'Moonstone',
|
||||
type: LabelColorType.Preset,
|
||||
},
|
||||
{
|
||||
id: 'label-preset-emerald',
|
||||
colorHex: '#108174',
|
||||
name: 'Emerald',
|
||||
type: LabelColorType.Preset,
|
||||
},
|
||||
{
|
||||
id: 'label-preset-viridian',
|
||||
colorHex: '#32B08C',
|
||||
name: 'Viridian',
|
||||
type: LabelColorType.Preset,
|
||||
},
|
||||
{
|
||||
id: 'label-preset-rainforest',
|
||||
colorHex: '#4ED8A0',
|
||||
name: 'Rainforest',
|
||||
type: LabelColorType.Preset,
|
||||
},
|
||||
{
|
||||
id: 'label-preset-honeydew',
|
||||
colorHex: '#7CE490',
|
||||
name: 'Honeydew',
|
||||
type: LabelColorType.Preset,
|
||||
},
|
||||
{
|
||||
id: 'label-preset-krypton',
|
||||
colorHex: '#A5F3B4',
|
||||
name: 'Krypton',
|
||||
type: LabelColorType.Preset,
|
||||
},
|
||||
{
|
||||
id: 'label-preset-wasabi',
|
||||
colorHex: '#C6FFD0',
|
||||
name: 'Wasabi',
|
||||
type: LabelColorType.Preset,
|
||||
},
|
||||
{
|
||||
id: 'label-preset-ruby',
|
||||
colorHex: '#BF3D5E',
|
||||
name: 'Ruby',
|
||||
type: LabelColorType.Preset,
|
||||
},
|
||||
{
|
||||
id: 'label-preset-fire',
|
||||
colorHex: '#DC4E58',
|
||||
name: 'Fire',
|
||||
type: LabelColorType.Preset,
|
||||
},
|
||||
{
|
||||
id: 'label-preset-curacao',
|
||||
colorHex: '#F95F53',
|
||||
name: 'Curacao',
|
||||
type: LabelColorType.Preset,
|
||||
},
|
||||
{
|
||||
id: 'label-preset-dreamsicle',
|
||||
colorHex: '#FF8564',
|
||||
name: 'Dreamsicle',
|
||||
type: LabelColorType.Preset,
|
||||
},
|
||||
{
|
||||
id: 'label-preset-tungsten',
|
||||
colorHex: '#FFB6A0',
|
||||
name: 'Tungsten',
|
||||
type: LabelColorType.Preset,
|
||||
},
|
||||
{
|
||||
id: 'label-preset-marmelade',
|
||||
colorHex: '#FFDCCF',
|
||||
name: 'Marmelade',
|
||||
type: LabelColorType.Preset,
|
||||
},
|
||||
{
|
||||
id: 'label-preset-topaz',
|
||||
colorHex: '#E85B1C',
|
||||
name: 'Topaz',
|
||||
type: LabelColorType.Preset,
|
||||
},
|
||||
{
|
||||
id: 'label-preset-tiger',
|
||||
colorHex: '#F48D38',
|
||||
name: 'Tiger',
|
||||
type: LabelColorType.Preset,
|
||||
},
|
||||
{
|
||||
id: 'label-preset-pineapple',
|
||||
colorHex: '#FFB94A',
|
||||
name: 'Pineapple',
|
||||
type: LabelColorType.Preset,
|
||||
},
|
||||
{
|
||||
id: 'label-preset-thunder',
|
||||
colorHex: '#FFD255',
|
||||
name: 'Thunder',
|
||||
type: LabelColorType.Preset,
|
||||
},
|
||||
{
|
||||
id: 'label-preset-sulfur',
|
||||
colorHex: '#FFE480',
|
||||
name: 'Sulfur',
|
||||
type: LabelColorType.Preset,
|
||||
},
|
||||
{
|
||||
id: 'label-preset-daisy',
|
||||
colorHex: '#FFF6B8',
|
||||
name: 'Daisy',
|
||||
type: LabelColorType.Preset,
|
||||
},
|
||||
]
|
||||
|
||||
export const INPUT_ERROR_COLOR = '#0F0E15'
|
||||
|
|
|
@ -1,44 +1,39 @@
|
|||
// Libraries
|
||||
import React from 'react'
|
||||
import {shallow} from 'enzyme'
|
||||
|
||||
// Components
|
||||
import DataListening from 'src/dataLoaders/components/verifyStep/DataListening'
|
||||
import ConnectionInformation from 'src/dataLoaders/components/verifyStep/ConnectionInformation'
|
||||
import {Button} from '@influxdata/clockface'
|
||||
|
||||
const setup = (override = {}) => {
|
||||
const props = {
|
||||
bucket: 'defbuck',
|
||||
stepIndex: 4,
|
||||
...override,
|
||||
// Utils
|
||||
import {renderWithRedux} from 'src/mockState'
|
||||
import {fireEvent} from 'react-testing-library'
|
||||
|
||||
const setInitialState = state => {
|
||||
return {
|
||||
...state,
|
||||
orgs: [
|
||||
{
|
||||
id: 'foo',
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
const wrapper = shallow(<DataListening {...props} />)
|
||||
|
||||
return {wrapper}
|
||||
}
|
||||
|
||||
describe('Onboarding.Components.DataListening', () => {
|
||||
it('renders', () => {
|
||||
const {wrapper} = setup()
|
||||
const button = wrapper.find(Button)
|
||||
|
||||
expect(wrapper.exists()).toBe(true)
|
||||
expect(button.exists()).toBe(true)
|
||||
})
|
||||
|
||||
describe('if button is clicked', () => {
|
||||
it('displays connection information', () => {
|
||||
const {wrapper} = setup()
|
||||
const {getByTitle, getByText} = renderWithRedux(
|
||||
<DataListening bucket="bucket" />,
|
||||
setInitialState
|
||||
)
|
||||
|
||||
const button = wrapper.find(Button)
|
||||
button.simulate('click')
|
||||
const button = getByTitle('Listen for Data')
|
||||
|
||||
const connectionInfo = wrapper.find(ConnectionInformation)
|
||||
fireEvent.click(button)
|
||||
|
||||
expect(wrapper.exists()).toBe(true)
|
||||
expect(connectionInfo.exists()).toBe(true)
|
||||
const message = getByText('Awaiting Connection...')
|
||||
|
||||
expect(message).toBeDefined()
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
// Libraries
|
||||
import React, {PureComponent} from 'react'
|
||||
import {connect} from 'react-redux'
|
||||
import _ from 'lodash'
|
||||
|
||||
// Apis
|
||||
import {executeQuery} from 'src/shared/apis/v2/query'
|
||||
import {getActiveOrg} from 'src/organizations/selectors'
|
||||
|
||||
// Components
|
||||
import {ErrorHandling} from 'src/shared/decorators/errors'
|
||||
|
@ -18,12 +20,19 @@ import ConnectionInformation, {
|
|||
} from 'src/dataLoaders/components/verifyStep/ConnectionInformation'
|
||||
|
||||
// Types
|
||||
import {AppState, Organization} from 'src/types/v2'
|
||||
import {InfluxLanguage} from 'src/types/v2/dashboards'
|
||||
|
||||
export interface Props {
|
||||
interface OwnProps {
|
||||
bucket: string
|
||||
}
|
||||
|
||||
interface StateProps {
|
||||
activeOrg: Organization
|
||||
}
|
||||
|
||||
type Props = OwnProps & StateProps
|
||||
|
||||
interface State {
|
||||
loading: LoadingState
|
||||
timePassedInSeconds: number
|
||||
|
@ -112,7 +121,7 @@ class DataListening extends PureComponent<Props, State> {
|
|||
}
|
||||
|
||||
private checkForData = async (): Promise<void> => {
|
||||
const {bucket} = this.props
|
||||
const {bucket, activeOrg} = this.props
|
||||
const {secondsLeft} = this.state
|
||||
const script = `from(bucket: "${bucket}")
|
||||
|> range(start: -1m)`
|
||||
|
@ -123,6 +132,7 @@ class DataListening extends PureComponent<Props, State> {
|
|||
try {
|
||||
const response = await executeQuery(
|
||||
'/api/v2/query',
|
||||
activeOrg.id,
|
||||
script,
|
||||
InfluxLanguage.Flux
|
||||
).promise
|
||||
|
@ -165,4 +175,11 @@ class DataListening extends PureComponent<Props, State> {
|
|||
}
|
||||
}
|
||||
|
||||
export default DataListening
|
||||
const mstp = (state: AppState) => ({
|
||||
activeOrg: getActiveOrg(state),
|
||||
})
|
||||
|
||||
export default connect<StateProps, {}, OwnProps>(
|
||||
mstp,
|
||||
null
|
||||
)(DataListening)
|
||||
|
|
|
@ -38,7 +38,6 @@ class FetchAuthToken extends PureComponent<Props, State> {
|
|||
const authToken = await client.authorizations.getAuthorizationToken(
|
||||
username
|
||||
)
|
||||
|
||||
this.setState({authToken, loading: RemoteDataState.Done})
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,11 @@
|
|||
import React, {useRef, useLayoutEffect, SFC} from 'react'
|
||||
|
||||
import {PlotEnv, TICK_PADDING_RIGHT, TICK_PADDING_TOP} from 'src/minard'
|
||||
import {
|
||||
PlotEnv,
|
||||
TICK_PADDING_RIGHT,
|
||||
TICK_PADDING_TOP,
|
||||
PLOT_PADDING,
|
||||
} from 'src/minard'
|
||||
import {clearCanvas} from 'src/minard/utils/clearCanvas'
|
||||
|
||||
interface Props {
|
||||
|
@ -20,9 +25,13 @@ export const drawAxes = (
|
|||
const {
|
||||
width,
|
||||
height,
|
||||
innerWidth,
|
||||
innerHeight,
|
||||
margins,
|
||||
xTicks,
|
||||
yTicks,
|
||||
xAxisLabel,
|
||||
yAxisLabel,
|
||||
baseLayer: {
|
||||
scales: {x: xScale, y: yScale},
|
||||
},
|
||||
|
@ -77,6 +86,31 @@ export const drawAxes = (
|
|||
|
||||
context.fillText(String(yTick), margins.left - TICK_PADDING_RIGHT, y)
|
||||
}
|
||||
|
||||
// Draw the x axis label
|
||||
if (xAxisLabel) {
|
||||
context.textAlign = 'center'
|
||||
context.textBaseline = 'bottom'
|
||||
context.fillText(
|
||||
xAxisLabel,
|
||||
margins.left + innerWidth / 2,
|
||||
height - PLOT_PADDING
|
||||
)
|
||||
}
|
||||
|
||||
// Draw the y axis label
|
||||
if (yAxisLabel) {
|
||||
const x = PLOT_PADDING
|
||||
const y = margins.top + innerHeight / 2
|
||||
|
||||
context.save()
|
||||
context.translate(x, y)
|
||||
context.rotate(-Math.PI / 2)
|
||||
context.textAlign = 'center'
|
||||
context.textBaseline = 'top'
|
||||
context.fillText(yAxisLabel, 0, 0)
|
||||
context.restore()
|
||||
}
|
||||
}
|
||||
|
||||
export const Axes: SFC<Props> = props => {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import React, {SFC} from 'react'
|
||||
|
||||
import {PlotEnv} from 'src/minard'
|
||||
import {PlotEnv, HistogramLayer} from 'src/minard'
|
||||
import {bin} from 'src/minard/utils/bin'
|
||||
import HistogramBars from 'src/minard/components/HistogramBars'
|
||||
import HistogramTooltip from 'src/minard/components/HistogramTooltip'
|
||||
|
@ -47,7 +47,7 @@ export const Histogram: SFC<Props> = ({
|
|||
const layer = useLayer(
|
||||
env,
|
||||
() => {
|
||||
const [table, aesthetics] = bin(
|
||||
const [table, mappings] = bin(
|
||||
baseTable,
|
||||
x,
|
||||
xDomain,
|
||||
|
@ -56,10 +56,10 @@ export const Histogram: SFC<Props> = ({
|
|||
position
|
||||
)
|
||||
|
||||
return {table, aesthetics, colors, scales: {}}
|
||||
return {type: 'histogram', table, mappings, colors}
|
||||
},
|
||||
[baseTable, xDomain, x, fill, position, binCount, colors]
|
||||
)
|
||||
) as HistogramLayer
|
||||
|
||||
if (!layer) {
|
||||
return null
|
||||
|
@ -75,12 +75,10 @@ export const Histogram: SFC<Props> = ({
|
|||
},
|
||||
} = env
|
||||
|
||||
const {aesthetics, table} = layer
|
||||
const {table} = layer
|
||||
|
||||
const hoveredRowIndices = findHoveredRowIndices(
|
||||
table.columns[aesthetics.xMin],
|
||||
table.columns[aesthetics.xMax],
|
||||
table.columns[aesthetics.yMax],
|
||||
table,
|
||||
hoverX,
|
||||
hoverY,
|
||||
xScale,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import React, {useRef, useLayoutEffect, SFC} from 'react'
|
||||
|
||||
import {Scale, HistogramPosition, Layer} from 'src/minard'
|
||||
import {Scale, HistogramPosition, HistogramLayer} from 'src/minard'
|
||||
import {clearCanvas} from 'src/minard/utils/clearCanvas'
|
||||
import {getBarFill} from 'src/minard/utils/getBarFill'
|
||||
|
||||
|
@ -11,7 +11,7 @@ const BAR_PADDING = 1.5
|
|||
interface Props {
|
||||
width: number
|
||||
height: number
|
||||
layer: Layer
|
||||
layer: HistogramLayer
|
||||
xScale: Scale<number, number>
|
||||
yScale: Scale<number, number>
|
||||
position: HistogramPosition
|
||||
|
@ -24,11 +24,11 @@ const drawBars = (
|
|||
): void => {
|
||||
clearCanvas(canvas, width, height)
|
||||
|
||||
const {table, aesthetics} = layer
|
||||
const xMinCol = table.columns[aesthetics.xMin]
|
||||
const xMaxCol = table.columns[aesthetics.xMax]
|
||||
const yMinCol = table.columns[aesthetics.yMin]
|
||||
const yMaxCol = table.columns[aesthetics.yMax]
|
||||
const {table} = layer
|
||||
const xMinCol = table.columns.xMin.data
|
||||
const xMaxCol = table.columns.xMax.data
|
||||
const yMinCol = table.columns.yMin.data
|
||||
const yMaxCol = table.columns.yMax.data
|
||||
|
||||
const context = canvas.getContext('2d')
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import React, {useRef, SFC} from 'react'
|
||||
|
||||
import {HistogramTooltipProps, Layer} from 'src/minard'
|
||||
import {HistogramTooltipProps, HistogramLayer} from 'src/minard'
|
||||
import {useLayoutStyle} from 'src/minard/utils/useLayoutStyle'
|
||||
import {useMousePos} from 'src/minard/utils/useMousePos'
|
||||
import {getHistogramTooltipProps} from 'src/minard/utils/getHistogramTooltipProps'
|
||||
|
@ -12,7 +12,7 @@ interface Props {
|
|||
hoverX: number
|
||||
hoverY: number
|
||||
tooltip?: (props: HistogramTooltipProps) => JSX.Element
|
||||
layer: Layer
|
||||
layer: HistogramLayer
|
||||
hoveredRowIndices: number[] | null
|
||||
}
|
||||
|
||||
|
|
|
@ -9,6 +9,8 @@ import {
|
|||
setTable,
|
||||
setControlledXDomain,
|
||||
setControlledYDomain,
|
||||
setXAxisLabel,
|
||||
setYAxisLabel,
|
||||
} from 'src/minard/utils/plotEnvActions'
|
||||
import {plotEnvReducer, INITIAL_PLOT_ENV} from 'src/minard/utils/plotEnvReducer'
|
||||
|
||||
|
@ -29,6 +31,8 @@ export interface Props {
|
|||
axesStroke?: string
|
||||
tickFont?: string
|
||||
tickFill?: string
|
||||
xAxisLabel?: string
|
||||
yAxisLabel?: string
|
||||
|
||||
// The x domain of the plot can be explicitly set. If this prop is passed,
|
||||
// then the component is operating in a "controlled" mode, where it always
|
||||
|
@ -53,6 +57,8 @@ export const Plot: SFC<Props> = ({
|
|||
axesStroke = '#31313d',
|
||||
tickFont = 'bold 10px Roboto',
|
||||
tickFill = '#8e91a1',
|
||||
xAxisLabel = '',
|
||||
yAxisLabel = '',
|
||||
xDomain = null,
|
||||
yDomain = null,
|
||||
}) => {
|
||||
|
@ -62,12 +68,16 @@ export const Plot: SFC<Props> = ({
|
|||
height,
|
||||
xDomain,
|
||||
yDomain,
|
||||
xAxisLabel,
|
||||
yAxisLabel,
|
||||
baseLayer: {...INITIAL_PLOT_ENV.baseLayer, table},
|
||||
})
|
||||
|
||||
useMountedEffect(() => dispatch(setTable(table)), [table])
|
||||
useMountedEffect(() => dispatch(setControlledXDomain(xDomain)), [xDomain])
|
||||
useMountedEffect(() => dispatch(setControlledYDomain(yDomain)), [yDomain])
|
||||
useMountedEffect(() => dispatch(setXAxisLabel(xAxisLabel)), [xAxisLabel])
|
||||
useMountedEffect(() => dispatch(setYAxisLabel(yAxisLabel)), [yAxisLabel])
|
||||
useMountedEffect(() => dispatch(setDimensions(width, height)), [
|
||||
width,
|
||||
height,
|
||||
|
|
|
@ -9,6 +9,8 @@ export const TICK_PADDING_TOP = 5
|
|||
export const TICK_CHAR_WIDTH = 7
|
||||
export const TICK_CHAR_HEIGHT = 10
|
||||
|
||||
export const AXIS_LABEL_PADDING_BOTTOM = 15
|
||||
|
||||
export {Plot} from 'src/minard/components/Plot'
|
||||
|
||||
export {
|
||||
|
@ -17,35 +19,120 @@ export {
|
|||
TooltipProps as HistogramTooltipProps,
|
||||
} from 'src/minard/components/Histogram'
|
||||
|
||||
export {isNumeric} from 'src/minard/utils/isNumeric'
|
||||
|
||||
export type ColumnType = 'int' | 'uint' | 'float' | 'string' | 'time' | 'bool'
|
||||
|
||||
export type NumericColumnType = 'int' | 'uint' | 'float' | 'time'
|
||||
|
||||
export interface FloatColumn {
|
||||
data: number[]
|
||||
type: 'float'
|
||||
}
|
||||
|
||||
export interface IntColumn {
|
||||
data: number[]
|
||||
type: 'int'
|
||||
}
|
||||
|
||||
export interface UIntColumn {
|
||||
data: number[]
|
||||
type: 'uint'
|
||||
}
|
||||
|
||||
export interface TimeColumn {
|
||||
data: number[]
|
||||
type: 'time'
|
||||
}
|
||||
|
||||
export interface StringColumn {
|
||||
data: string[]
|
||||
type: 'string'
|
||||
}
|
||||
|
||||
export interface BoolColumn {
|
||||
data: boolean[]
|
||||
type: 'bool'
|
||||
}
|
||||
|
||||
export type NumericTableColumn =
|
||||
| FloatColumn
|
||||
| IntColumn
|
||||
| UIntColumn
|
||||
| TimeColumn
|
||||
|
||||
export type TableColumn =
|
||||
| FloatColumn
|
||||
| IntColumn
|
||||
| UIntColumn
|
||||
| TimeColumn
|
||||
| StringColumn
|
||||
| BoolColumn
|
||||
|
||||
export interface Table {
|
||||
length: number
|
||||
columns: {
|
||||
[columnName: string]: TableColumn
|
||||
}
|
||||
}
|
||||
|
||||
export type LayerType = 'base' | 'histogram'
|
||||
|
||||
export interface Scale<D = number, R = number> {
|
||||
(x: D): R
|
||||
invert?: (y: R) => D
|
||||
}
|
||||
|
||||
export interface AestheticDataMappings {
|
||||
x?: string
|
||||
fill?: string[]
|
||||
xMin?: string
|
||||
xMax?: string
|
||||
yMin?: string
|
||||
yMax?: string
|
||||
export interface BaseLayerMappings {}
|
||||
|
||||
export interface BaseLayerScales {
|
||||
x: Scale<number, number>
|
||||
y: Scale<number, number>
|
||||
}
|
||||
|
||||
export interface AestheticScaleMappings {
|
||||
x?: Scale<number, number>
|
||||
y?: Scale<number, number>
|
||||
fill?: Scale<string, string>
|
||||
export interface BaseLayer {
|
||||
type: 'base'
|
||||
table: Table
|
||||
scales: BaseLayerScales
|
||||
mappings: {}
|
||||
xDomain: [number, number]
|
||||
yDomain: [number, number]
|
||||
}
|
||||
|
||||
export interface Layer {
|
||||
table?: Table
|
||||
aesthetics: AestheticDataMappings
|
||||
scales: AestheticScaleMappings
|
||||
colors?: string[]
|
||||
xDomain?: [number, number]
|
||||
yDomain?: [number, number]
|
||||
export interface HistogramTable extends Table {
|
||||
columns: {
|
||||
xMin: NumericTableColumn
|
||||
xMax: NumericTableColumn
|
||||
yMin: IntColumn
|
||||
yMax: IntColumn
|
||||
[fillColumn: string]: TableColumn
|
||||
}
|
||||
length: number
|
||||
}
|
||||
|
||||
export interface HistogramMappings {
|
||||
xMin: 'xMin'
|
||||
xMax: 'xMax'
|
||||
yMin: 'yMin'
|
||||
yMax: 'yMax'
|
||||
fill: string[]
|
||||
}
|
||||
|
||||
export interface HistogramScales {
|
||||
// x and y scale are from the `BaseLayer`
|
||||
fill: Scale<string, string>
|
||||
}
|
||||
|
||||
export interface HistogramLayer {
|
||||
type: 'histogram'
|
||||
table: HistogramTable
|
||||
mappings: HistogramMappings
|
||||
scales: HistogramScales
|
||||
colors: string[]
|
||||
}
|
||||
|
||||
export type Layer = BaseLayer | HistogramLayer
|
||||
|
||||
export interface Margins {
|
||||
top: number
|
||||
right: number
|
||||
|
@ -61,6 +148,8 @@ export interface PlotEnv {
|
|||
margins: Margins
|
||||
xTicks: number[]
|
||||
yTicks: number[]
|
||||
xAxisLabel: string
|
||||
yAxisLabel: string
|
||||
|
||||
// If the domains have been explicitly passed in to the `Plot` component,
|
||||
// they will be stored here. Scales and child layers use the `xDomain` and
|
||||
|
@ -69,103 +158,9 @@ export interface PlotEnv {
|
|||
xDomain: [number, number]
|
||||
yDomain: [number, number]
|
||||
|
||||
baseLayer: Layer
|
||||
baseLayer: BaseLayer
|
||||
layers: {[layerKey: string]: Layer}
|
||||
hoverX: number
|
||||
hoverY: number
|
||||
dispatch: (action: PlotAction) => void
|
||||
}
|
||||
|
||||
export enum ColumnType {
|
||||
Numeric = 'numeric',
|
||||
Categorical = 'categorical',
|
||||
Temporal = 'temporal',
|
||||
Boolean = 'bool',
|
||||
}
|
||||
|
||||
export interface Table {
|
||||
columns: {[columnName: string]: any[]}
|
||||
columnTypes: {[columnName: string]: ColumnType}
|
||||
}
|
||||
|
||||
// export enum InterpolationKind {
|
||||
// Linear = 'linear',
|
||||
// MonotoneX = 'monotoneX',
|
||||
// MonotoneY = 'monotoneY',
|
||||
// Cubic = 'cubic',
|
||||
// Step = 'step',
|
||||
// StepBefore = 'stepBefore',
|
||||
// StepAfter = 'stepAfter',
|
||||
// }
|
||||
|
||||
// export interface LineProps {
|
||||
// x?: string
|
||||
// y?: string
|
||||
// stroke?: string
|
||||
// strokeWidth?: string
|
||||
// interpolate?: InterpolationKind
|
||||
// }
|
||||
|
||||
// export enum AreaPositionKind {
|
||||
// Stack = 'stack',
|
||||
// Overlay = 'overlay',
|
||||
// }
|
||||
|
||||
// export interface AreaProps {
|
||||
// x?: string
|
||||
// y?: string
|
||||
// position?: AreaPositionKind
|
||||
// }
|
||||
|
||||
// export enum ShapeKind {
|
||||
// Point = 'point',
|
||||
// // Spade, Heart, Club, Triangle, Hexagon, etc.
|
||||
// }
|
||||
|
||||
// export interface PointProps {
|
||||
// x?: string
|
||||
// y?: string
|
||||
// fill?: string
|
||||
// shape?: ShapeKind
|
||||
// radius?: number
|
||||
// alpha?: number
|
||||
// }
|
||||
|
||||
// export interface ContinuousBarProps {
|
||||
// x0?: string
|
||||
// x1?: string
|
||||
// y?: string
|
||||
// fill?: string
|
||||
// }
|
||||
|
||||
// export enum DiscreteBarPositionKind {
|
||||
// Stack = 'stack',
|
||||
// Dodge = 'dodge',
|
||||
// }
|
||||
|
||||
// export interface DiscreteBarProps {
|
||||
// x?: string
|
||||
// y?: string
|
||||
// fill?: string
|
||||
// position?: DiscreteBarPositionKind
|
||||
// }
|
||||
|
||||
// export interface StepLineProps {
|
||||
// x0?: string
|
||||
// x1?: string
|
||||
// y?: string
|
||||
// }
|
||||
|
||||
// export interface StepAreaProps {
|
||||
// x0?: string
|
||||
// x1?: string
|
||||
// y?: string
|
||||
// position?: AreaPositionKind
|
||||
// }
|
||||
|
||||
// export interface Bin2DProps {
|
||||
// x?: string
|
||||
// y?: string
|
||||
// binWidth?: number
|
||||
// binHeight?: number
|
||||
// }
|
||||
|
|
|
@ -1,39 +1,44 @@
|
|||
import {HistogramPosition, ColumnType} from 'src/minard'
|
||||
import {HistogramPosition, Table} from 'src/minard'
|
||||
import {bin} from 'src/minard/utils/bin'
|
||||
|
||||
const TABLE = {
|
||||
const TABLE: Table = {
|
||||
columns: {
|
||||
_value: [70, 56, 60, 100, 76, 0, 63, 48, 79, 67],
|
||||
_field: [
|
||||
'usage_guest',
|
||||
'usage_guest',
|
||||
'usage_guest',
|
||||
'usage_guest',
|
||||
'usage_guest',
|
||||
'usage_idle',
|
||||
'usage_idle',
|
||||
'usage_idle',
|
||||
'usage_idle',
|
||||
'usage_idle',
|
||||
],
|
||||
cpu: [
|
||||
'cpu0',
|
||||
'cpu0',
|
||||
'cpu0',
|
||||
'cpu1',
|
||||
'cpu1',
|
||||
'cpu0',
|
||||
'cpu0',
|
||||
'cpu0',
|
||||
'cpu1',
|
||||
'cpu1',
|
||||
],
|
||||
},
|
||||
columnTypes: {
|
||||
_value: ColumnType.Numeric,
|
||||
_field: ColumnType.Categorical,
|
||||
cpu: ColumnType.Categorical,
|
||||
_value: {
|
||||
data: [70, 56, 60, 100, 76, 0, 63, 48, 79, 67],
|
||||
type: 'int',
|
||||
},
|
||||
_field: {
|
||||
data: [
|
||||
'usage_guest',
|
||||
'usage_guest',
|
||||
'usage_guest',
|
||||
'usage_guest',
|
||||
'usage_guest',
|
||||
'usage_idle',
|
||||
'usage_idle',
|
||||
'usage_idle',
|
||||
'usage_idle',
|
||||
'usage_idle',
|
||||
],
|
||||
type: 'string',
|
||||
},
|
||||
cpu: {
|
||||
data: [
|
||||
'cpu0',
|
||||
'cpu0',
|
||||
'cpu0',
|
||||
'cpu1',
|
||||
'cpu1',
|
||||
'cpu0',
|
||||
'cpu0',
|
||||
'cpu0',
|
||||
'cpu1',
|
||||
'cpu1',
|
||||
],
|
||||
type: 'string',
|
||||
},
|
||||
},
|
||||
length: 10,
|
||||
}
|
||||
|
||||
describe('bin', () => {
|
||||
|
@ -41,20 +46,15 @@ describe('bin', () => {
|
|||
const actual = bin(TABLE, '_value', null, [], 5, HistogramPosition.Stacked)
|
||||
const expected = [
|
||||
{
|
||||
columnTypes: {
|
||||
xMax: 'numeric',
|
||||
xMin: 'numeric',
|
||||
yMax: 'numeric',
|
||||
yMin: 'numeric',
|
||||
},
|
||||
columns: {
|
||||
xMax: [20, 40, 60, 80, 100],
|
||||
xMin: [0, 20, 40, 60, 80],
|
||||
yMax: [1, 0, 2, 6, 1],
|
||||
yMin: [0, 0, 0, 0, 0],
|
||||
xMin: {data: [0, 20, 40, 60, 80], type: 'int'},
|
||||
xMax: {data: [20, 40, 60, 80, 100], type: 'int'},
|
||||
yMin: {data: [0, 0, 0, 0, 0], type: 'int'},
|
||||
yMax: {data: [1, 0, 2, 6, 1], type: 'int'},
|
||||
},
|
||||
length: 5,
|
||||
},
|
||||
{fill: [], xMax: 'xMax', xMin: 'xMin', yMax: 'yMax', yMin: 'yMin'},
|
||||
{xMin: 'xMin', xMax: 'xMax', yMin: 'yMin', yMax: 'yMax', fill: []},
|
||||
]
|
||||
|
||||
expect(actual).toEqual(expected)
|
||||
|
@ -71,22 +71,25 @@ describe('bin', () => {
|
|||
)[0].columns
|
||||
|
||||
const expected = {
|
||||
_field: [
|
||||
'usage_guest',
|
||||
'usage_guest',
|
||||
'usage_guest',
|
||||
'usage_guest',
|
||||
'usage_guest',
|
||||
'usage_idle',
|
||||
'usage_idle',
|
||||
'usage_idle',
|
||||
'usage_idle',
|
||||
'usage_idle',
|
||||
],
|
||||
xMax: [20, 40, 60, 80, 100, 20, 40, 60, 80, 100],
|
||||
xMin: [0, 20, 40, 60, 80, 0, 20, 40, 60, 80],
|
||||
yMax: [0, 0, 1, 3, 1, 1, 0, 2, 6, 1],
|
||||
yMin: [0, 0, 0, 0, 0, 0, 0, 1, 3, 1],
|
||||
xMin: {data: [0, 20, 40, 60, 80, 0, 20, 40, 60, 80], type: 'int'},
|
||||
xMax: {data: [20, 40, 60, 80, 100, 20, 40, 60, 80, 100], type: 'int'},
|
||||
yMin: {data: [0, 0, 0, 0, 0, 0, 0, 1, 3, 1], type: 'int'},
|
||||
yMax: {data: [0, 0, 1, 3, 1, 1, 0, 2, 6, 1], type: 'int'},
|
||||
_field: {
|
||||
data: [
|
||||
'usage_guest',
|
||||
'usage_guest',
|
||||
'usage_guest',
|
||||
'usage_guest',
|
||||
'usage_guest',
|
||||
'usage_idle',
|
||||
'usage_idle',
|
||||
'usage_idle',
|
||||
'usage_idle',
|
||||
'usage_idle',
|
||||
],
|
||||
type: 'string',
|
||||
},
|
||||
}
|
||||
|
||||
expect(actual).toEqual(expected)
|
||||
|
@ -103,22 +106,25 @@ describe('bin', () => {
|
|||
)[0].columns
|
||||
|
||||
const expected = {
|
||||
_field: [
|
||||
'usage_guest',
|
||||
'usage_guest',
|
||||
'usage_guest',
|
||||
'usage_guest',
|
||||
'usage_guest',
|
||||
'usage_idle',
|
||||
'usage_idle',
|
||||
'usage_idle',
|
||||
'usage_idle',
|
||||
'usage_idle',
|
||||
],
|
||||
xMax: [20, 40, 60, 80, 100, 20, 40, 60, 80, 100],
|
||||
xMin: [0, 20, 40, 60, 80, 0, 20, 40, 60, 80],
|
||||
yMax: [0, 0, 1, 3, 1, 1, 0, 1, 3, 0],
|
||||
yMin: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
|
||||
xMin: {data: [0, 20, 40, 60, 80, 0, 20, 40, 60, 80], type: 'int'},
|
||||
xMax: {data: [20, 40, 60, 80, 100, 20, 40, 60, 80, 100], type: 'int'},
|
||||
yMin: {data: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], type: 'int'},
|
||||
yMax: {data: [0, 0, 1, 3, 1, 1, 0, 1, 3, 0], type: 'int'},
|
||||
_field: {
|
||||
data: [
|
||||
'usage_guest',
|
||||
'usage_guest',
|
||||
'usage_guest',
|
||||
'usage_guest',
|
||||
'usage_guest',
|
||||
'usage_idle',
|
||||
'usage_idle',
|
||||
'usage_idle',
|
||||
'usage_idle',
|
||||
'usage_idle',
|
||||
],
|
||||
type: 'string',
|
||||
},
|
||||
}
|
||||
|
||||
expect(actual).toEqual(expected)
|
||||
|
@ -135,10 +141,16 @@ describe('bin', () => {
|
|||
)[0].columns
|
||||
|
||||
const expected = {
|
||||
xMax: [-160, -120, -80, -40, 0, 40, 80, 120, 160, 200],
|
||||
xMin: [-200, -160, -120, -80, -40, 0, 40, 80, 120, 160],
|
||||
yMax: [0, 0, 0, 0, 0, 1, 8, 1, 0, 0],
|
||||
yMin: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
|
||||
xMin: {
|
||||
data: [-200, -160, -120, -80, -40, 0, 40, 80, 120, 160],
|
||||
type: 'int',
|
||||
},
|
||||
xMax: {
|
||||
data: [-160, -120, -80, -40, 0, 40, 80, 120, 160, 200],
|
||||
type: 'int',
|
||||
},
|
||||
yMin: {data: [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], type: 'int'},
|
||||
yMax: {data: [0, 0, 0, 0, 0, 1, 8, 1, 0, 0], type: 'int'},
|
||||
}
|
||||
|
||||
expect(actual).toEqual(expected)
|
||||
|
@ -155,10 +167,10 @@ describe('bin', () => {
|
|||
)[0].columns
|
||||
|
||||
const expected = {
|
||||
xMax: [60, 70, 80],
|
||||
xMin: [50, 60, 70],
|
||||
yMax: [1, 3, 3],
|
||||
yMin: [0, 0, 0],
|
||||
xMin: {data: [50, 60, 70], type: 'int'},
|
||||
xMax: {data: [60, 70, 80], type: 'int'},
|
||||
yMin: {data: [0, 0, 0], type: 'int'},
|
||||
yMax: {data: [1, 3, 3], type: 'int'},
|
||||
}
|
||||
|
||||
expect(actual).toEqual(expected)
|
||||
|
|
|
@ -1,6 +1,13 @@
|
|||
import {extent, range, thresholdSturges} from 'd3-array'
|
||||
|
||||
import {Table, HistogramPosition, ColumnType} from 'src/minard'
|
||||
import {
|
||||
Table,
|
||||
HistogramTable,
|
||||
HistogramMappings,
|
||||
HistogramPosition,
|
||||
NumericColumnType,
|
||||
isNumeric,
|
||||
} from 'src/minard'
|
||||
import {assert} from 'src/minard/utils/assert'
|
||||
import {getGroupKey} from 'src/minard/utils/getGroupKey'
|
||||
|
||||
|
@ -37,15 +44,14 @@ export const bin = (
|
|||
groupColNames: string[] = [],
|
||||
binCount: number,
|
||||
position: HistogramPosition
|
||||
) => {
|
||||
const xCol = table.columns[xColName]
|
||||
const xColType = table.columnTypes[xColName]
|
||||
): [HistogramTable, HistogramMappings] => {
|
||||
const col = table.columns[xColName]
|
||||
|
||||
assert(`could not find column "${xColName}"`, !!xCol)
|
||||
assert(
|
||||
`unsupported value column type "${xColType}"`,
|
||||
xColType === ColumnType.Numeric || xColType === ColumnType.Temporal
|
||||
)
|
||||
assert(`could not find column "${xColName}"`, !!col)
|
||||
assert(`unsupported value column type "${col.type}"`, isNumeric(col.type))
|
||||
|
||||
const xCol = col.data as number[]
|
||||
const xColType = col.type as NumericColumnType
|
||||
|
||||
if (!binCount) {
|
||||
binCount = thresholdSturges(xCol)
|
||||
|
@ -64,7 +70,14 @@ export const bin = (
|
|||
for (let i = 0; i < xCol.length; i++) {
|
||||
const x = xCol[i]
|
||||
|
||||
if (x < xDomain[0] || x > xDomain[1]) {
|
||||
const shouldSkipPoint =
|
||||
x === undefined ||
|
||||
x === null ||
|
||||
isNaN(x) ||
|
||||
x < xDomain[0] ||
|
||||
x > xDomain[1]
|
||||
|
||||
if (shouldSkipPoint) {
|
||||
continue
|
||||
}
|
||||
|
||||
|
@ -91,24 +104,37 @@ export const bin = (
|
|||
}
|
||||
|
||||
// Next, build up a tabular representation of each of these bins by group
|
||||
const groupKeys = Object.keys(groupsByGroupKey)
|
||||
const statTable = {
|
||||
columns: {xMin: [], xMax: [], yMin: [], yMax: []},
|
||||
columnTypes: {
|
||||
xMin: xColType,
|
||||
xMax: xColType,
|
||||
yMin: ColumnType.Numeric,
|
||||
yMax: ColumnType.Numeric,
|
||||
columns: {
|
||||
xMin: {
|
||||
data: [],
|
||||
type: xColType,
|
||||
},
|
||||
xMax: {
|
||||
data: [],
|
||||
type: xColType,
|
||||
},
|
||||
yMin: {
|
||||
data: [],
|
||||
type: 'int',
|
||||
},
|
||||
yMax: {
|
||||
data: [],
|
||||
type: 'int',
|
||||
},
|
||||
},
|
||||
length: binCount * groupKeys.length,
|
||||
}
|
||||
|
||||
// Include original columns used to group data in the resulting table
|
||||
for (const name of groupColNames) {
|
||||
statTable.columns[name] = []
|
||||
statTable.columnTypes[name] = table.columnTypes[name]
|
||||
statTable.columns[name] = {
|
||||
data: [],
|
||||
type: table.columns[name].type,
|
||||
}
|
||||
}
|
||||
|
||||
const groupKeys = Object.keys(groupsByGroupKey)
|
||||
|
||||
for (let i = 0; i < groupKeys.length; i++) {
|
||||
const groupKey = groupKeys[i]
|
||||
|
||||
|
@ -121,18 +147,18 @@ export const bin = (
|
|||
.reduce((sum, k) => sum + (bin.values[k] || 0), 0)
|
||||
}
|
||||
|
||||
statTable.columns.xMin.push(bin.min)
|
||||
statTable.columns.xMax.push(bin.max)
|
||||
statTable.columns.yMin.push(yMin)
|
||||
statTable.columns.yMax.push(yMin + (bin.values[groupKey] || 0))
|
||||
statTable.columns.xMin.data.push(bin.min)
|
||||
statTable.columns.xMax.data.push(bin.max)
|
||||
statTable.columns.yMin.data.push(yMin)
|
||||
statTable.columns.yMax.data.push(yMin + (bin.values[groupKey] || 0))
|
||||
|
||||
for (const [k, v] of Object.entries(groupsByGroupKey[groupKey])) {
|
||||
statTable.columns[k].push(v)
|
||||
statTable.columns[k].data.push(v)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const mappings: any = {
|
||||
const mappings: HistogramMappings = {
|
||||
xMin: 'xMin',
|
||||
xMax: 'xMax',
|
||||
yMin: 'yMin',
|
||||
|
@ -140,7 +166,7 @@ export const bin = (
|
|||
fill: groupColNames,
|
||||
}
|
||||
|
||||
return [statTable, mappings]
|
||||
return [statTable as HistogramTable, mappings]
|
||||
}
|
||||
|
||||
const createBins = (
|
||||
|
@ -186,7 +212,7 @@ const getGroup = (table: Table, groupColNames: string[], i: number) => {
|
|||
const result = {}
|
||||
|
||||
for (const key of groupColNames) {
|
||||
result[key] = table.columns[key][i]
|
||||
result[key] = table.columns[key].data[i]
|
||||
}
|
||||
|
||||
return result
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
import {Scale} from 'src/minard'
|
||||
import {range} from 'd3-array'
|
||||
|
||||
import {HistogramTable} from 'src/minard'
|
||||
|
||||
export const findHoveredRowIndices = (
|
||||
xMinCol: number[],
|
||||
xMaxCol: number[],
|
||||
yMaxCol: number[],
|
||||
table: HistogramTable,
|
||||
hoverX: number,
|
||||
hoverY: number,
|
||||
xScale: Scale,
|
||||
|
@ -14,6 +14,9 @@ export const findHoveredRowIndices = (
|
|||
return null
|
||||
}
|
||||
|
||||
const xMinCol = table.columns.xMin.data
|
||||
const xMaxCol = table.columns.xMax.data
|
||||
const yMaxCol = table.columns.yMax.data
|
||||
const dataX = xScale.invert(hoverX)
|
||||
const dataY = yScale.invert(hoverY)
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import {Layer} from 'src/minard'
|
||||
import {HistogramLayer} from 'src/minard'
|
||||
import {getGroupKey} from 'src/minard/utils/getGroupKey'
|
||||
|
||||
// Given a histogram `Layer` and the index of row in its table, this function
|
||||
|
@ -13,11 +13,11 @@ import {getGroupKey} from 'src/minard/utils/getGroupKey'
|
|||
// key”) that the scale uses as a domain
|
||||
// 3. Lookup the scale and get the color via this representation
|
||||
export const getBarFill = (
|
||||
{scales, aesthetics, table}: Layer,
|
||||
{scales, mappings, table}: HistogramLayer,
|
||||
i: number
|
||||
): string => {
|
||||
const fillScale = scales.fill
|
||||
const values = aesthetics.fill.map(colKey => table.columns[colKey][i])
|
||||
const values = mappings.fill.map(colKey => table.columns[colKey].data[i])
|
||||
const groupKey = getGroupKey(values)
|
||||
const fill = fillScale(groupKey)
|
||||
|
||||
|
|
|
@ -1,21 +1,22 @@
|
|||
import {HistogramTooltipProps, Layer} from 'src/minard'
|
||||
import {HistogramTooltipProps, HistogramLayer} from 'src/minard'
|
||||
import {getBarFill} from 'src/minard/utils/getBarFill'
|
||||
|
||||
export const getHistogramTooltipProps = (
|
||||
layer: Layer,
|
||||
layer: HistogramLayer,
|
||||
rowIndices: number[]
|
||||
): HistogramTooltipProps => {
|
||||
const {table, aesthetics} = layer
|
||||
const xMinCol = table.columns[aesthetics.xMin]
|
||||
const xMaxCol = table.columns[aesthetics.xMax]
|
||||
const yMinCol = table.columns[aesthetics.yMin]
|
||||
const yMaxCol = table.columns[aesthetics.yMax]
|
||||
const {table, mappings} = layer
|
||||
|
||||
const xMinCol = table.columns.xMin.data
|
||||
const xMaxCol = table.columns.xMax.data
|
||||
const yMinCol = table.columns.yMin.data
|
||||
const yMaxCol = table.columns.yMax.data
|
||||
|
||||
const counts = rowIndices.map(i => {
|
||||
const grouping = aesthetics.fill.reduce(
|
||||
const grouping = mappings.fill.reduce(
|
||||
(acc, colName) => ({
|
||||
...acc,
|
||||
[colName]: table.columns[colName][i],
|
||||
[colName]: table.columns[colName].data[i],
|
||||
}),
|
||||
{}
|
||||
)
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
import {ColumnType} from 'src/minard'
|
||||
|
||||
const NUMERIC_TYPES = new Set(['uint', 'int', 'float', 'time'])
|
||||
|
||||
export const isNumeric = (columnType: ColumnType): boolean =>
|
||||
NUMERIC_TYPES.has(columnType)
|
|
@ -8,18 +8,20 @@ export type PlotAction =
|
|||
| ResetAction
|
||||
| SetControlledXDomainAction
|
||||
| SetControlledYDomainAction
|
||||
| SetXAxisLabelAction
|
||||
| SetYAxisLabelAction
|
||||
|
||||
interface RegisterLayerAction {
|
||||
type: 'REGISTER_LAYER'
|
||||
payload: {
|
||||
layerKey: string
|
||||
layer: Layer
|
||||
layer: Partial<Layer>
|
||||
}
|
||||
}
|
||||
|
||||
export const registerLayer = (
|
||||
layerKey: string,
|
||||
layer: Layer
|
||||
layer: Partial<Layer>
|
||||
): RegisterLayerAction => ({
|
||||
type: 'REGISTER_LAYER',
|
||||
payload: {layerKey, layer},
|
||||
|
@ -91,3 +93,23 @@ export const setControlledYDomain = (
|
|||
type: 'SET_CONTROLLED_Y_DOMAIN',
|
||||
payload: {yDomain},
|
||||
})
|
||||
|
||||
interface SetXAxisLabelAction {
|
||||
type: 'SET_X_AXIS_LABEL'
|
||||
payload: {xAxisLabel: string}
|
||||
}
|
||||
|
||||
export const setXAxisLabel = (xAxisLabel: string): SetXAxisLabelAction => ({
|
||||
type: 'SET_X_AXIS_LABEL',
|
||||
payload: {xAxisLabel},
|
||||
})
|
||||
|
||||
interface SetYAxisLabelAction {
|
||||
type: 'SET_Y_AXIS_LABEL'
|
||||
payload: {yAxisLabel: string}
|
||||
}
|
||||
|
||||
export const setYAxisLabel = (yAxisLabel: string): SetYAxisLabelAction => ({
|
||||
type: 'SET_Y_AXIS_LABEL',
|
||||
payload: {yAxisLabel},
|
||||
})
|
||||
|
|
|
@ -6,16 +6,21 @@ import chroma from 'chroma-js'
|
|||
import {
|
||||
PlotEnv,
|
||||
Layer,
|
||||
HistogramLayer,
|
||||
Scale,
|
||||
PLOT_PADDING,
|
||||
TICK_CHAR_WIDTH,
|
||||
TICK_CHAR_HEIGHT,
|
||||
TICK_PADDING_RIGHT,
|
||||
TICK_PADDING_TOP,
|
||||
AXIS_LABEL_PADDING_BOTTOM,
|
||||
} from 'src/minard'
|
||||
import {PlotAction} from 'src/minard/utils/plotEnvActions'
|
||||
import {getGroupKey} from 'src/minard/utils/getGroupKey'
|
||||
|
||||
const DEFAULT_X_DOMAIN: [number, number] = [0, 1]
|
||||
const DEFAULT_Y_DOMAIN: [number, number] = [0, 1]
|
||||
|
||||
export const INITIAL_PLOT_ENV: PlotEnv = {
|
||||
width: 0,
|
||||
height: 0,
|
||||
|
@ -29,12 +34,20 @@ export const INITIAL_PLOT_ENV: PlotEnv = {
|
|||
},
|
||||
xTicks: [],
|
||||
yTicks: [],
|
||||
xAxisLabel: '',
|
||||
yAxisLabel: '',
|
||||
xDomain: null,
|
||||
yDomain: null,
|
||||
baseLayer: {
|
||||
table: {columns: {}, columnTypes: {}},
|
||||
aesthetics: {},
|
||||
scales: {},
|
||||
type: 'base',
|
||||
table: {columns: {}, length: 0},
|
||||
xDomain: DEFAULT_X_DOMAIN,
|
||||
yDomain: DEFAULT_Y_DOMAIN,
|
||||
mappings: {},
|
||||
scales: {
|
||||
x: null,
|
||||
y: null,
|
||||
},
|
||||
},
|
||||
layers: {},
|
||||
hoverX: null,
|
||||
|
@ -42,16 +55,13 @@ export const INITIAL_PLOT_ENV: PlotEnv = {
|
|||
dispatch: () => {},
|
||||
}
|
||||
|
||||
const DEFAULT_X_DOMAIN: [number, number] = [0, 1]
|
||||
const DEFAULT_Y_DOMAIN: [number, number] = [0, 1]
|
||||
|
||||
export const plotEnvReducer = (state: PlotEnv, action: PlotAction): PlotEnv =>
|
||||
produce(state, draftState => {
|
||||
switch (action.type) {
|
||||
case 'REGISTER_LAYER': {
|
||||
const {layerKey, layer} = action.payload
|
||||
|
||||
draftState.layers[layerKey] = layer
|
||||
draftState.layers[layerKey] = {...layer, scales: {}} as Layer
|
||||
|
||||
setXDomain(draftState)
|
||||
setYDomain(draftState)
|
||||
|
@ -112,28 +122,49 @@ export const plotEnvReducer = (state: PlotEnv, action: PlotAction): PlotEnv =>
|
|||
|
||||
return
|
||||
}
|
||||
|
||||
case 'SET_X_AXIS_LABEL': {
|
||||
const {xAxisLabel} = action.payload
|
||||
|
||||
draftState.xAxisLabel = xAxisLabel
|
||||
|
||||
setLayout(draftState)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
case 'SET_Y_AXIS_LABEL': {
|
||||
const {yAxisLabel} = action.payload
|
||||
|
||||
draftState.yAxisLabel = yAxisLabel
|
||||
|
||||
setLayout(draftState)
|
||||
|
||||
return
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
/*
|
||||
Find all columns in the current in all layers that are mapped to the supplied
|
||||
aesthetics
|
||||
aesthetic mappings
|
||||
*/
|
||||
const getColumnsForAesthetics = (
|
||||
state: PlotEnv,
|
||||
aesthetics: string[]
|
||||
mappings: string[]
|
||||
): any[][] => {
|
||||
const {baseLayer, layers} = state
|
||||
|
||||
const cols = []
|
||||
|
||||
for (const layer of Object.values(layers)) {
|
||||
for (const aes of aesthetics) {
|
||||
if (layer.aesthetics[aes]) {
|
||||
const colName = layer.aesthetics[aes]
|
||||
for (const mapping of mappings) {
|
||||
const colName = layer.mappings[mapping]
|
||||
|
||||
if (colName) {
|
||||
const col = layer.table
|
||||
? layer.table.columns[colName]
|
||||
: baseLayer.table.columns[colName]
|
||||
? layer.table.columns[colName].data
|
||||
: baseLayer.table.columns[colName].data
|
||||
|
||||
cols.push(col)
|
||||
}
|
||||
|
@ -222,11 +253,20 @@ const setLayout = (draftState: PlotEnv): void => {
|
|||
const yTickWidth =
|
||||
Math.max(...draftState.yTicks.map(t => String(t).length)) * TICK_CHAR_WIDTH
|
||||
|
||||
const xAxisLabelHeight = draftState.xAxisLabel
|
||||
? TICK_CHAR_HEIGHT + AXIS_LABEL_PADDING_BOTTOM
|
||||
: 0
|
||||
|
||||
const yAxisLabelHeight = draftState.yAxisLabel
|
||||
? TICK_CHAR_HEIGHT + AXIS_LABEL_PADDING_BOTTOM
|
||||
: 0
|
||||
|
||||
const margins = {
|
||||
top: PLOT_PADDING,
|
||||
right: PLOT_PADDING,
|
||||
bottom: TICK_CHAR_HEIGHT + TICK_PADDING_TOP + PLOT_PADDING,
|
||||
left: yTickWidth + TICK_PADDING_RIGHT + PLOT_PADDING,
|
||||
bottom:
|
||||
TICK_CHAR_HEIGHT + TICK_PADDING_TOP + PLOT_PADDING + xAxisLabelHeight,
|
||||
left: yTickWidth + TICK_PADDING_RIGHT + PLOT_PADDING + yAxisLabelHeight,
|
||||
}
|
||||
|
||||
const innerWidth = width - margins.left - margins.right
|
||||
|
@ -272,18 +312,17 @@ const getColorScale = (
|
|||
of data (for now). So the domain of the scale is a set of "group keys" which
|
||||
represent all possible groupings of data in the layer.
|
||||
*/
|
||||
const getFillDomain = ({table, aesthetics}: Layer): string[] => {
|
||||
const fillColKeys = aesthetics.fill
|
||||
const getFillDomain = ({table, mappings}: HistogramLayer): string[] => {
|
||||
const fillColKeys = mappings.fill
|
||||
|
||||
if (!fillColKeys.length) {
|
||||
return []
|
||||
}
|
||||
|
||||
const fillDomain = new Set()
|
||||
const n = Object.values(table.columns)[0].length
|
||||
|
||||
for (let i = 0; i < n; i++) {
|
||||
fillDomain.add(getGroupKey(fillColKeys.map(k => table.columns[k][i])))
|
||||
for (let i = 0; i < table.length; i++) {
|
||||
fillDomain.add(getGroupKey(fillColKeys.map(k => table.columns[k].data[i])))
|
||||
}
|
||||
|
||||
return [...fillDomain].sort()
|
||||
|
@ -297,11 +336,8 @@ const setFillScales = (draftState: PlotEnv) => {
|
|||
const layers = Object.values(draftState.layers)
|
||||
|
||||
layers
|
||||
.filter(
|
||||
// Pick out the layers that actually need a fill scale
|
||||
layer => layer.aesthetics.fill && layer.colors && layer.colors.length
|
||||
)
|
||||
.forEach(layer => {
|
||||
.filter(layer => layer.type === 'histogram')
|
||||
.forEach((layer: HistogramLayer) => {
|
||||
layer.scales.fill = getColorScale(getFillDomain(layer), layer.colors)
|
||||
})
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@ import {registerLayer, unregisterLayer} from 'src/minard/utils/plotEnvActions'
|
|||
*/
|
||||
export const useLayer = (
|
||||
env: PlotEnv,
|
||||
layerFactory: () => Layer,
|
||||
layerFactory: () => Partial<Layer>,
|
||||
inputs?: DependencyList
|
||||
) => {
|
||||
const {current: layerKey} = useRef(uuid.v4())
|
||||
|
|
|
@ -22,12 +22,17 @@ export const telegrafsAPI = {
|
|||
telegrafsTelegrafIDPut,
|
||||
}
|
||||
|
||||
const getAuthorizationToken = jest.fn(() => Promise.resolve('im_an_auth_token'))
|
||||
|
||||
export const client = {
|
||||
telegrafConfigs: {
|
||||
getAll: telegrafsGet,
|
||||
getAllByOrg: telegrafsGet,
|
||||
create: telegrafsPost,
|
||||
},
|
||||
authorizations: {
|
||||
getAuthorizationToken,
|
||||
},
|
||||
}
|
||||
|
||||
export const setupAPI = {
|
||||
|
|
|
@ -28,7 +28,8 @@ export default class CollectorList extends PureComponent<Props> {
|
|||
<IndexList>
|
||||
<IndexList.Header>
|
||||
<IndexList.HeaderCell columnName="Name" width="50%" />
|
||||
<IndexList.HeaderCell columnName="Bucket" width="50%" />
|
||||
<IndexList.HeaderCell columnName="Bucket" width="25%" />
|
||||
<IndexList.HeaderCell columnName="" width="25%" />
|
||||
</IndexList.Header>
|
||||
<IndexList.Body columnCount={3} emptyState={emptyState}>
|
||||
{this.collectorsList}
|
||||
|
|
|
@ -47,6 +47,7 @@ export default class CollectorRow extends PureComponent<Props> {
|
|||
onUpdate={this.handleUpdateName}
|
||||
name={collector.name}
|
||||
noNameString={DEFAULT_COLLECTOR_NAME}
|
||||
onEditName={this.handleOpenConfig}
|
||||
/>
|
||||
<EditableDescription
|
||||
description={collector.description}
|
||||
|
@ -61,13 +62,7 @@ export default class CollectorRow extends PureComponent<Props> {
|
|||
<Button
|
||||
size={ComponentSize.ExtraSmall}
|
||||
color={ComponentColor.Secondary}
|
||||
text={'View'}
|
||||
onClick={this.handleOpenConfig}
|
||||
/>
|
||||
<Button
|
||||
size={ComponentSize.ExtraSmall}
|
||||
color={ComponentColor.Secondary}
|
||||
text={'Setup Details'}
|
||||
text={'Setup Instructions'}
|
||||
onClick={this.handleOpenInstructions}
|
||||
/>
|
||||
<ConfirmationButton
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
// Libraries
|
||||
import React, {PureComponent, ChangeEvent} from 'react'
|
||||
import _ from 'lodash'
|
||||
|
||||
// Components
|
||||
import {
|
||||
|
@ -13,6 +14,7 @@ import {
|
|||
OverlayBody,
|
||||
OverlayHeading,
|
||||
OverlayContainer,
|
||||
OverlayFooter,
|
||||
Input,
|
||||
} from 'src/clockface'
|
||||
|
||||
|
@ -47,13 +49,13 @@ export default class CreateOrgOverlay extends PureComponent<Props, State> {
|
|||
const {org, nameInputStatus, errorMessage} = this.state
|
||||
|
||||
return (
|
||||
<OverlayContainer>
|
||||
<OverlayContainer maxWidth={500}>
|
||||
<OverlayHeading
|
||||
title="Create Organization"
|
||||
onDismiss={this.props.onCloseModal}
|
||||
/>
|
||||
<OverlayBody>
|
||||
<Form onSubmit={this.handleCreateOrg}>
|
||||
<Form onSubmit={this.handleCreateOrg}>
|
||||
<OverlayBody>
|
||||
<Form.Element label="Name" errorMessage={errorMessage}>
|
||||
<Input
|
||||
placeholder="Give your organization a name"
|
||||
|
@ -64,24 +66,31 @@ export default class CreateOrgOverlay extends PureComponent<Props, State> {
|
|||
status={nameInputStatus}
|
||||
/>
|
||||
</Form.Element>
|
||||
<Form.Footer>
|
||||
<Button
|
||||
text="Cancel"
|
||||
color={ComponentColor.Danger}
|
||||
onClick={onCloseModal}
|
||||
/>
|
||||
<Button
|
||||
text="Create"
|
||||
type={ButtonType.Submit}
|
||||
color={ComponentColor.Primary}
|
||||
/>
|
||||
</Form.Footer>
|
||||
</Form>
|
||||
</OverlayBody>
|
||||
</OverlayBody>
|
||||
<OverlayFooter>
|
||||
<Button text="Cancel" onClick={onCloseModal} />
|
||||
<Button
|
||||
text="Create"
|
||||
type={ButtonType.Submit}
|
||||
color={ComponentColor.Primary}
|
||||
status={this.submitButtonStatus}
|
||||
/>
|
||||
</OverlayFooter>
|
||||
</Form>
|
||||
</OverlayContainer>
|
||||
)
|
||||
}
|
||||
|
||||
private get submitButtonStatus(): ComponentStatus {
|
||||
const {org} = this.state
|
||||
|
||||
if (org.name) {
|
||||
return ComponentStatus.Default
|
||||
}
|
||||
|
||||
return ComponentStatus.Disabled
|
||||
}
|
||||
|
||||
private handleCreateOrg = async () => {
|
||||
const {org} = this.state
|
||||
const {onCreateOrg, onCloseModal} = this.props
|
||||
|
@ -98,7 +107,7 @@ export default class CreateOrgOverlay extends PureComponent<Props, State> {
|
|||
return this.setState({
|
||||
org,
|
||||
nameInputStatus: ComponentStatus.Error,
|
||||
errorMessage: `Organization ${key} cannot be empty`,
|
||||
errorMessage: this.randomErrorMessage(key),
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -108,4 +117,17 @@ export default class CreateOrgOverlay extends PureComponent<Props, State> {
|
|||
errorMessage: '',
|
||||
})
|
||||
}
|
||||
|
||||
private randomErrorMessage = (key: string): string => {
|
||||
const messages = [
|
||||
`Imagine that! An organization without a ${key}`,
|
||||
`An organization needs a ${key}`,
|
||||
`You're not getting far without a ${key}`,
|
||||
`The organization formerly known as...`,
|
||||
`Pick a ${key}, any ${key}`,
|
||||
`Any ${key} will do`,
|
||||
]
|
||||
|
||||
return _.sample(messages)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -59,7 +59,7 @@ export default class CreateOrgOverlay extends PureComponent<Props, State> {
|
|||
onDismiss={this.props.onCloseModal}
|
||||
/>
|
||||
|
||||
<Form>
|
||||
<Form onSubmit={this.handleSubmit}>
|
||||
<OverlayBody>
|
||||
<div className="overlay-flux-editor--spacing">
|
||||
<Form.Element label="Name">
|
||||
|
@ -94,7 +94,6 @@ export default class CreateOrgOverlay extends PureComponent<Props, State> {
|
|||
<Button
|
||||
text="Create"
|
||||
type={ButtonType.Submit}
|
||||
onClick={this.handleSubmit}
|
||||
color={ComponentColor.Primary}
|
||||
/>
|
||||
</OverlayFooter>
|
||||
|
|
|
@ -1,15 +1,13 @@
|
|||
// Libraries
|
||||
import React, {PureComponent, ChangeEvent} from 'react'
|
||||
import React, {PureComponent} from 'react'
|
||||
import {connect} from 'react-redux'
|
||||
import {InjectedRouter} from 'react-router'
|
||||
import _ from 'lodash'
|
||||
|
||||
// Components
|
||||
import {Input, IconFont} from 'src/clockface'
|
||||
import FilterList from 'src/shared/components/Filter'
|
||||
import TasksHeader from 'src/tasks/components/TasksHeader'
|
||||
import TasksList from 'src/tasks/components/TasksList'
|
||||
import {Page} from 'src/pageLayout'
|
||||
import {ErrorHandling} from 'src/shared/decorators/errors'
|
||||
import ImportOverlay from 'src/shared/components/ImportOverlay'
|
||||
|
||||
|
@ -95,47 +93,37 @@ class OrgTasksPage extends PureComponent<Props, State> {
|
|||
|
||||
return (
|
||||
<>
|
||||
<Page titleTag="Tasks">
|
||||
<Input
|
||||
icon={IconFont.Search}
|
||||
placeholder="Filter tasks..."
|
||||
widthPixels={290}
|
||||
value={searchTerm}
|
||||
onChange={this.handleFilterChange}
|
||||
onBlur={this.handleFilterBlur}
|
||||
/>
|
||||
<TasksHeader
|
||||
onCreateTask={this.handleCreateTask}
|
||||
setSearchTerm={setSearchTerm}
|
||||
setShowInactive={this.handleToggle}
|
||||
showInactive={showInactive}
|
||||
toggleOverlay={this.handleToggleImportOverlay}
|
||||
showOrgDropdown={false}
|
||||
showFilter={false}
|
||||
/>
|
||||
<FilterList<Task>
|
||||
searchTerm={searchTerm}
|
||||
searchKeys={['name', 'labels[].name']}
|
||||
list={this.filteredTasks}
|
||||
>
|
||||
{ts => (
|
||||
<TasksList
|
||||
searchTerm={searchTerm}
|
||||
tasks={ts}
|
||||
totalCount={this.totalTaskCount}
|
||||
onActivate={this.handleActivate}
|
||||
onDelete={this.handleDelete}
|
||||
onCreate={this.handleCreateTask}
|
||||
onClone={this.handleClone}
|
||||
onSelect={this.handleSelectTask}
|
||||
onAddTaskLabels={onAddTaskLabels}
|
||||
onRemoveTaskLabels={onRemoveTaskLabels}
|
||||
onUpdate={this.handleUpdateTask}
|
||||
onRunTask={onRunTask}
|
||||
/>
|
||||
)}
|
||||
</FilterList>
|
||||
</Page>
|
||||
<TasksHeader
|
||||
onCreateTask={this.handleCreateTask}
|
||||
setSearchTerm={setSearchTerm}
|
||||
setShowInactive={this.handleToggle}
|
||||
showInactive={showInactive}
|
||||
toggleOverlay={this.handleToggleImportOverlay}
|
||||
showOrgDropdown={false}
|
||||
isFullPage={false}
|
||||
/>
|
||||
<FilterList<Task>
|
||||
searchTerm={searchTerm}
|
||||
searchKeys={['name', 'labels[].name']}
|
||||
list={this.filteredTasks}
|
||||
>
|
||||
{ts => (
|
||||
<TasksList
|
||||
searchTerm={searchTerm}
|
||||
tasks={ts}
|
||||
totalCount={this.totalTaskCount}
|
||||
onActivate={this.handleActivate}
|
||||
onDelete={this.handleDelete}
|
||||
onCreate={this.handleCreateTask}
|
||||
onClone={this.handleClone}
|
||||
onSelect={this.handleSelectTask}
|
||||
onAddTaskLabels={onAddTaskLabels}
|
||||
onRemoveTaskLabels={onRemoveTaskLabels}
|
||||
onUpdate={this.handleUpdateTask}
|
||||
onRunTask={onRunTask}
|
||||
/>
|
||||
)}
|
||||
</FilterList>
|
||||
{this.importOverlay}
|
||||
</>
|
||||
)
|
||||
|
@ -218,14 +206,6 @@ class OrgTasksPage extends PureComponent<Props, State> {
|
|||
private handleValidateTask = (): boolean => {
|
||||
return true
|
||||
}
|
||||
|
||||
private handleFilterBlur = (e: ChangeEvent<HTMLInputElement>): void => {
|
||||
this.props.setSearchTerm(e.target.value)
|
||||
}
|
||||
|
||||
private handleFilterChange = (e: ChangeEvent<HTMLInputElement>): void => {
|
||||
this.props.setSearchTerm(e.target.value)
|
||||
}
|
||||
}
|
||||
|
||||
const mstp = ({
|
||||
|
|
|
@ -9,7 +9,7 @@ import OverlayContainer from 'src/clockface/components/overlays/OverlayContainer
|
|||
import OverlayTechnology from 'src/clockface/components/overlays/OverlayTechnology'
|
||||
import OverlayHeading from 'src/clockface/components/overlays/OverlayHeading'
|
||||
import TelegrafConfig from 'src/organizations/components/TelegrafConfig'
|
||||
import {ComponentSize, ComponentColor, Button} from '@influxdata/clockface'
|
||||
import {ComponentColor, Button} from '@influxdata/clockface'
|
||||
import {OverlayFooter} from 'src/clockface'
|
||||
|
||||
// Utils
|
||||
|
@ -66,7 +66,6 @@ export class TelegrafConfigOverlay extends PureComponent<Props> {
|
|||
</OverlayBody>
|
||||
<OverlayFooter>
|
||||
<Button
|
||||
size={ComponentSize.ExtraSmall}
|
||||
color={ComponentColor.Secondary}
|
||||
text={'Download Config'}
|
||||
onClick={this.handleDownloadConfig}
|
||||
|
|
|
@ -25,7 +25,7 @@ export default class VariableRow extends PureComponent<Props> {
|
|||
const {variable, onDeleteVariable} = this.props
|
||||
|
||||
return (
|
||||
<IndexList.Row>
|
||||
<IndexList.Row testID="variable-row">
|
||||
<IndexList.Cell alignment={Alignment.Left}>
|
||||
<EditableName
|
||||
onUpdate={this.handleUpdateVariableName}
|
||||
|
|
|
@ -49,12 +49,13 @@ export default class Variables extends PureComponent<Props, State> {
|
|||
|
||||
if (_.isEmpty(searchTerm)) {
|
||||
return (
|
||||
<EmptyState size={ComponentSize.Medium}>
|
||||
<EmptyState size={ComponentSize.Large}>
|
||||
<EmptyState.Text
|
||||
text={`${orgName} does not own any Variables , why not create one?`}
|
||||
highlightWords={['Buckets']}
|
||||
highlightWords={['Variables']}
|
||||
/>
|
||||
<Button
|
||||
size={ComponentSize.Medium}
|
||||
text="Create Variable"
|
||||
icon={IconFont.Plus}
|
||||
color={ComponentColor.Primary}
|
||||
|
@ -65,7 +66,7 @@ export default class Variables extends PureComponent<Props, State> {
|
|||
}
|
||||
|
||||
return (
|
||||
<EmptyState size={ComponentSize.Medium}>
|
||||
<EmptyState size={ComponentSize.Large}>
|
||||
<EmptyState.Text text="No Variables match your query" />
|
||||
</EmptyState>
|
||||
)
|
||||
|
|
|
@ -96,6 +96,7 @@ Object {
|
|||
<div
|
||||
class="confirmation-button--tooltip-body"
|
||||
data-test="confirmation-button--click-target"
|
||||
data-testid="confirmation-button"
|
||||
>
|
||||
Confirm
|
||||
</div>
|
||||
|
@ -228,6 +229,7 @@ Object {
|
|||
<div
|
||||
class="confirmation-button--tooltip-body"
|
||||
data-test="confirmation-button--click-target"
|
||||
data-testid="confirmation-button"
|
||||
>
|
||||
Confirm
|
||||
</div>
|
||||
|
@ -417,6 +419,7 @@ Object {
|
|||
<div
|
||||
class="confirmation-button--tooltip-body"
|
||||
data-test="confirmation-button--click-target"
|
||||
data-testid="confirmation-button"
|
||||
>
|
||||
Confirm
|
||||
</div>
|
||||
|
@ -549,6 +552,7 @@ Object {
|
|||
<div
|
||||
class="confirmation-button--tooltip-body"
|
||||
data-test="confirmation-button--click-target"
|
||||
data-testid="confirmation-button"
|
||||
>
|
||||
Confirm
|
||||
</div>
|
||||
|
|
|
@ -12,7 +12,12 @@ exports[`CollectorList rendering renders 1`] = `
|
|||
<IndexListHeaderCell
|
||||
alignment="left"
|
||||
columnName="Bucket"
|
||||
width="50%"
|
||||
width="25%"
|
||||
/>
|
||||
<IndexListHeaderCell
|
||||
alignment="left"
|
||||
columnName=""
|
||||
width="25%"
|
||||
/>
|
||||
</IndexListHeader>
|
||||
<IndexListBody
|
||||
|
|
|
@ -60,7 +60,7 @@ class OrgBucketsIndex extends Component<Props> {
|
|||
<Page.Contents fullWidth={false} scrollable={true}>
|
||||
<div className="col-xs-12">
|
||||
<Tabs>
|
||||
<OrganizationNavigation tab={'bucket_tab'} orgID={org.id} />
|
||||
<OrganizationNavigation tab={'buckets_tab'} orgID={org.id} />
|
||||
<Tabs.TabContents>
|
||||
<TabbedPageSection
|
||||
id="org-view-tab--buckets"
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
import {AppState, Organization} from 'src/types/v2'
|
||||
|
||||
export const getActiveOrg = (state: AppState): Organization => state.orgs[0]
|
|
@ -20,6 +20,7 @@ interface XHRError extends Error {
|
|||
|
||||
export const executeQuery = (
|
||||
url: string,
|
||||
orgID: string,
|
||||
query: string,
|
||||
language: InfluxLanguage = InfluxLanguage.Flux
|
||||
): WrappedCancelablePromise<ExecuteFluxQueryResult> => {
|
||||
|
@ -127,7 +128,7 @@ export const executeQuery = (
|
|||
const dialect = {annotations: ['group', 'datatype', 'default']}
|
||||
const body = JSON.stringify({query, dialect, type: language})
|
||||
|
||||
xhr.open('POST', url)
|
||||
xhr.open('POST', `${url}?orgID=${encodeURIComponent(orgID)}`)
|
||||
xhr.setRequestHeader('Content-Type', 'application/json')
|
||||
xhr.send(body)
|
||||
|
||||
|
|
|
@ -5,8 +5,8 @@ import {AutoSizer} from 'react-virtualized'
|
|||
import {
|
||||
Plot as MinardPlot,
|
||||
Histogram as MinardHistogram,
|
||||
ColumnType,
|
||||
Table,
|
||||
isNumeric,
|
||||
} from 'src/minard'
|
||||
|
||||
// Components
|
||||
|
@ -52,18 +52,18 @@ type Props = OwnProps & DispatchProps
|
|||
*/
|
||||
const resolveMappings = (
|
||||
table: Table,
|
||||
preferredXColumn: string,
|
||||
preferredFillColumns: string[] = []
|
||||
preferredXColumnName: string,
|
||||
preferredFillColumnNames: string[] = []
|
||||
): {x: string; fill: string[]} => {
|
||||
let x: string = preferredXColumn
|
||||
let x: string = preferredXColumnName
|
||||
|
||||
if (!table.columns[x] || table.columnTypes[x] !== ColumnType.Numeric) {
|
||||
x = Object.entries(table.columnTypes)
|
||||
.filter(([__, type]) => type === ColumnType.Numeric)
|
||||
if (!table.columns[x] || !isNumeric(table.columns[x].type)) {
|
||||
x = Object.entries(table.columns)
|
||||
.filter(([__, {type}]) => isNumeric(type))
|
||||
.map(([name]) => name)[0]
|
||||
}
|
||||
|
||||
let fill = preferredFillColumns || []
|
||||
let fill = preferredFillColumnNames || []
|
||||
|
||||
fill = fill.filter(name => table.columns[name])
|
||||
|
||||
|
@ -79,6 +79,7 @@ const Histogram: SFC<Props> = ({
|
|||
binCount,
|
||||
position,
|
||||
colors,
|
||||
xAxisLabel,
|
||||
xDomain: defaultXDomain,
|
||||
},
|
||||
}) => {
|
||||
|
@ -99,27 +100,34 @@ const Histogram: SFC<Props> = ({
|
|||
|
||||
return (
|
||||
<AutoSizer>
|
||||
{({width, height}) => (
|
||||
<MinardPlot
|
||||
table={table}
|
||||
width={width}
|
||||
height={height}
|
||||
xDomain={xDomain}
|
||||
onSetXDomain={setXDomain}
|
||||
>
|
||||
{env => (
|
||||
<MinardHistogram
|
||||
env={env}
|
||||
x={mappings.x}
|
||||
fill={fill}
|
||||
binCount={binCount}
|
||||
position={position}
|
||||
tooltip={HistogramTooltip}
|
||||
colors={colorHexes}
|
||||
/>
|
||||
)}
|
||||
</MinardPlot>
|
||||
)}
|
||||
{({width, height}) => {
|
||||
if (width === 0 || height === 0) {
|
||||
return null
|
||||
}
|
||||
|
||||
return (
|
||||
<MinardPlot
|
||||
table={table}
|
||||
width={width}
|
||||
height={height}
|
||||
xAxisLabel={xAxisLabel}
|
||||
xDomain={xDomain}
|
||||
onSetXDomain={setXDomain}
|
||||
>
|
||||
{env => (
|
||||
<MinardHistogram
|
||||
env={env}
|
||||
x={mappings.x}
|
||||
fill={fill}
|
||||
binCount={binCount}
|
||||
position={position}
|
||||
tooltip={HistogramTooltip}
|
||||
colors={colorHexes}
|
||||
/>
|
||||
)}
|
||||
</MinardPlot>
|
||||
)
|
||||
}}
|
||||
</AutoSizer>
|
||||
)
|
||||
}
|
||||
|
|
|
@ -5,9 +5,12 @@
|
|||
*/
|
||||
|
||||
.resource-label--form {
|
||||
margin-bottom: $ix-marg-b;
|
||||
width: 100%;
|
||||
|
||||
.resource-label--create-button {
|
||||
margin-top: $ix-marg-c + 2px;
|
||||
}
|
||||
}
|
||||
.form--element {
|
||||
margin: 0;
|
||||
}
|
||||
.component-spacer--horizontal {
|
||||
align-items: flex-start;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,22 +7,15 @@ import {
|
|||
Button,
|
||||
ComponentColor,
|
||||
ButtonType,
|
||||
Columns,
|
||||
ComponentStatus,
|
||||
} from '@influxdata/clockface'
|
||||
import {
|
||||
Grid,
|
||||
Form,
|
||||
Input,
|
||||
InputType,
|
||||
ComponentSpacer,
|
||||
Alignment,
|
||||
} from 'src/clockface'
|
||||
import {Form, Input, InputType, ComponentSpacer, Alignment} from 'src/clockface'
|
||||
import RandomLabelColorButton from 'src/configuration/components/RandomLabelColor'
|
||||
import {Label, LabelProperties} from 'src/types/v2/labels'
|
||||
|
||||
// Constants
|
||||
import {HEX_CODE_CHAR_LENGTH} from 'src/configuration/constants/LabelColors'
|
||||
const MAX_CREATE_BUTTON_LENGTH = 24
|
||||
|
||||
// Utils
|
||||
import {
|
||||
|
@ -72,46 +65,34 @@ export default class ResourceLabelForm extends PureComponent<Props, State> {
|
|||
public render() {
|
||||
const {isValid} = this.state
|
||||
|
||||
// TODO: Add className prop to ComponentSpacer so we don't need this wrapper div
|
||||
|
||||
return (
|
||||
<div className="resource-label--form">
|
||||
<Grid.Row>
|
||||
<Grid.Column widthXS={Columns.Five}>
|
||||
<Form.Element label="Color">
|
||||
<ComponentSpacer stretchToFitWidth={true} align={Alignment.Left}>
|
||||
<RandomLabelColorButton
|
||||
colorHex={this.colorHex}
|
||||
onClick={this.handleColorChange}
|
||||
/>
|
||||
{this.customColorInput}
|
||||
</ComponentSpacer>
|
||||
</Form.Element>
|
||||
</Grid.Column>
|
||||
<Grid.Column widthXS={Columns.Five}>
|
||||
<Form.Element label="Description">
|
||||
<Input
|
||||
type={InputType.Text}
|
||||
placeholder="Add a optional description"
|
||||
name="description"
|
||||
value={this.description}
|
||||
onChange={this.handleInputChange}
|
||||
/>
|
||||
</Form.Element>
|
||||
</Grid.Column>
|
||||
<Grid.Column widthXS={Columns.Two}>
|
||||
<Form.Element label="">
|
||||
<Button
|
||||
customClass="resource-label--create-button"
|
||||
text="Create Label"
|
||||
color={ComponentColor.Success}
|
||||
type={ButtonType.Submit}
|
||||
status={
|
||||
isValid ? ComponentStatus.Default : ComponentStatus.Disabled
|
||||
}
|
||||
onClick={this.handleSubmit}
|
||||
/>
|
||||
</Form.Element>
|
||||
</Grid.Column>
|
||||
</Grid.Row>
|
||||
<ComponentSpacer align={Alignment.Left}>
|
||||
<RandomLabelColorButton
|
||||
colorHex={this.colorHex}
|
||||
onClick={this.handleColorChange}
|
||||
/>
|
||||
{this.customColorInput}
|
||||
<Input
|
||||
type={InputType.Text}
|
||||
placeholder="Add a optional description"
|
||||
name="description"
|
||||
value={this.description}
|
||||
onChange={this.handleInputChange}
|
||||
/>
|
||||
<Button
|
||||
customClass="resource-label--create-button"
|
||||
text={this.createButtonLabel}
|
||||
color={ComponentColor.Success}
|
||||
type={ButtonType.Submit}
|
||||
status={
|
||||
isValid ? ComponentStatus.Default : ComponentStatus.Disabled
|
||||
}
|
||||
onClick={this.handleSubmit}
|
||||
/>
|
||||
</ComponentSpacer>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
@ -157,6 +138,18 @@ export default class ResourceLabelForm extends PureComponent<Props, State> {
|
|||
})
|
||||
}
|
||||
|
||||
private get createButtonLabel(): string {
|
||||
const {labelName} = this.props
|
||||
|
||||
let label = `Create "${labelName}"`
|
||||
|
||||
if (labelName.length > MAX_CREATE_BUTTON_LENGTH) {
|
||||
label = `Create "${labelName.slice(0, MAX_CREATE_BUTTON_LENGTH)}..."`
|
||||
}
|
||||
|
||||
return label
|
||||
}
|
||||
|
||||
private get customColorInput(): JSX.Element {
|
||||
const {colorHex} = this
|
||||
|
||||
|
|
|
@ -10,18 +10,20 @@ import {executeQuery, ExecuteFluxQueryResult} from 'src/shared/apis/v2/query'
|
|||
import {parseResponse} from 'src/shared/parsing/flux/response'
|
||||
import {getSources, getActiveSource} from 'src/sources/selectors'
|
||||
import {renderQuery} from 'src/shared/utils/renderQuery'
|
||||
import {getActiveOrg} from 'src/organizations/selectors'
|
||||
|
||||
// Types
|
||||
import {RemoteDataState, FluxTable} from 'src/types'
|
||||
import {DashboardQuery} from 'src/types/v2/dashboards'
|
||||
import {AppState, Source} from 'src/types/v2'
|
||||
import {AppState, Source, Organization} from 'src/types/v2'
|
||||
import {WrappedCancelablePromise, CancellationError} from 'src/types/promises'
|
||||
|
||||
type URLQuery = DashboardQuery & {url: string}
|
||||
|
||||
const executeRenderedQuery = (
|
||||
{text, type, url}: URLQuery,
|
||||
variables: {[key: string]: string}
|
||||
variables: {[key: string]: string},
|
||||
orgID: string
|
||||
): WrappedCancelablePromise<ExecuteFluxQueryResult> => {
|
||||
let isCancelled = false
|
||||
let cancelExecution
|
||||
|
@ -39,7 +41,7 @@ const executeRenderedQuery = (
|
|||
return Promise.reject(new CancellationError())
|
||||
}
|
||||
|
||||
const pendingResult = executeQuery(url, renderedQuery, type)
|
||||
const pendingResult = executeQuery(url, orgID, renderedQuery, type)
|
||||
|
||||
cancelExecution = pendingResult.cancel
|
||||
|
||||
|
@ -61,6 +63,7 @@ export interface QueriesState {
|
|||
interface StateProps {
|
||||
dynamicSourceURL: string
|
||||
sources: Source[]
|
||||
activeOrg: Organization
|
||||
}
|
||||
|
||||
interface OwnProps {
|
||||
|
@ -141,7 +144,7 @@ class TimeSeries extends Component<Props, State> {
|
|||
}
|
||||
|
||||
private reload = async () => {
|
||||
const {inView, variables} = this.props
|
||||
const {inView, variables, activeOrg} = this.props
|
||||
const queries = this.queries
|
||||
|
||||
if (!inView) {
|
||||
|
@ -167,7 +170,9 @@ class TimeSeries extends Component<Props, State> {
|
|||
this.pendingResults.forEach(({cancel}) => cancel())
|
||||
|
||||
// Issue new queries
|
||||
this.pendingResults = queries.map(q => executeRenderedQuery(q, variables))
|
||||
this.pendingResults = queries.map(q =>
|
||||
executeRenderedQuery(q, variables, activeOrg.id)
|
||||
)
|
||||
|
||||
// Wait for new queries to complete
|
||||
const results = await Promise.all(this.pendingResults.map(r => r.promise))
|
||||
|
@ -218,8 +223,9 @@ class TimeSeries extends Component<Props, State> {
|
|||
const mstp = (state: AppState) => {
|
||||
const sources = getSources(state)
|
||||
const dynamicSourceURL = getActiveSource(state).links.query
|
||||
const activeOrg = getActiveOrg(state)
|
||||
|
||||
return {sources, dynamicSourceURL}
|
||||
return {sources, dynamicSourceURL, activeOrg}
|
||||
}
|
||||
|
||||
export default connect<StateProps, {}, OwnProps>(
|
||||
|
|
|
@ -4,9 +4,8 @@ import ReactDatePicker from 'react-datepicker'
|
|||
|
||||
// Styles
|
||||
import 'react-datepicker/dist/react-datepicker.css'
|
||||
import {Input} from 'src/clockface'
|
||||
import {ComponentSize} from '@influxdata/clockface'
|
||||
import FormLabel from 'src/clockface/components/form_layout/FormLabel'
|
||||
import {Input, Form, Grid} from 'src/clockface'
|
||||
import {ComponentSize, Columns} from '@influxdata/clockface'
|
||||
|
||||
interface Props {
|
||||
label: string
|
||||
|
@ -22,37 +21,42 @@ class DatePicker extends PureComponent<Props> {
|
|||
const date = new Date(dateTime)
|
||||
|
||||
return (
|
||||
<FormLabel label={label}>
|
||||
<div className="range-picker--date-picker">
|
||||
<ReactDatePicker
|
||||
selected={date}
|
||||
onChange={this.handleSelectDate}
|
||||
startOpen={true}
|
||||
dateFormat="yyyy-MM-dd HH:mm"
|
||||
showTimeSelect={true}
|
||||
timeFormat="HH:mm"
|
||||
shouldCloseOnSelect={false}
|
||||
disabledKeyboardNavigation={true}
|
||||
customInput={this.customInput}
|
||||
popperContainer={this.popperContainer}
|
||||
popperClassName="range-picker--popper"
|
||||
calendarClassName="range-picker--calendar"
|
||||
dayClassName={this.dayClassName}
|
||||
timeIntervals={60}
|
||||
fixedHeight={true}
|
||||
/>
|
||||
</div>
|
||||
</FormLabel>
|
||||
<div className="range-picker--date-picker">
|
||||
<Grid.Row>
|
||||
<Grid.Column widthXS={Columns.Twelve}>
|
||||
<Form.Element label={label}>
|
||||
<ReactDatePicker
|
||||
selected={date}
|
||||
onChange={this.handleSelectDate}
|
||||
startOpen={true}
|
||||
dateFormat="yyyy-MM-dd HH:mm"
|
||||
showTimeSelect={true}
|
||||
timeFormat="HH:mm"
|
||||
shouldCloseOnSelect={false}
|
||||
disabledKeyboardNavigation={true}
|
||||
customInput={this.customInput}
|
||||
popperContainer={this.popperContainer}
|
||||
popperClassName="range-picker--popper"
|
||||
calendarClassName="range-picker--calendar"
|
||||
dayClassName={this.dayClassName}
|
||||
timeIntervals={60}
|
||||
fixedHeight={true}
|
||||
/>
|
||||
</Form.Element>
|
||||
</Grid.Column>
|
||||
</Grid.Row>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
private get customInput() {
|
||||
const {label} = this.props
|
||||
|
||||
return (
|
||||
<Input
|
||||
widthPixels={314}
|
||||
size={ComponentSize.Medium}
|
||||
customClass="range-picker--input react-datepicker-ignore-onclickoutside"
|
||||
titleText="Start"
|
||||
titleText={label}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
|
|
@ -10,10 +10,10 @@
|
|||
text-align: center;
|
||||
background-color: $g1-raven;
|
||||
border: $ix-border solid $c-pool;
|
||||
padding: $ix-marg-b;
|
||||
padding: 0 $ix-marg-b;
|
||||
border-radius: $ix-radius;
|
||||
z-index: 9999;
|
||||
height: 410px;
|
||||
height: 416px;
|
||||
|
||||
.react-datepicker {
|
||||
font-family: $ix-text-font;
|
||||
|
@ -25,9 +25,16 @@
|
|||
display: flex;
|
||||
flex-direction: row;
|
||||
align-items: center;
|
||||
margin: $ix-marg-b 0;
|
||||
|
||||
margin-top: $ix-marg-b;
|
||||
|
||||
.range-picker--date-picker {
|
||||
margin: $ix-marg-a;
|
||||
|
||||
.react-datepicker-wrapper,
|
||||
.react-datepicker__input-container {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.range-picker--popper-container {
|
||||
position: relative;
|
||||
}
|
||||
|
@ -44,6 +51,12 @@
|
|||
display: inline-flex;
|
||||
flex-direction: row;
|
||||
|
||||
.react-datepicker__month-container {
|
||||
background-color: $g3-castle;
|
||||
border-radius: 0 0 $ix-radius $ix-radius;
|
||||
width: 260px;
|
||||
}
|
||||
|
||||
.react-datepicker__navigation {
|
||||
outline: none;
|
||||
cursor: pointer;
|
||||
|
@ -58,8 +71,7 @@
|
|||
}
|
||||
|
||||
.range-picker--day {
|
||||
color: $c-void;
|
||||
font-weight: 400;
|
||||
color: $g7-graphite;
|
||||
|
||||
&:hover {
|
||||
background-color: $c-laser;
|
||||
|
@ -68,7 +80,7 @@
|
|||
}
|
||||
|
||||
.range-picker--day-in-month {
|
||||
color: $c-star;
|
||||
color: $g14-chromium;
|
||||
|
||||
&:hover {
|
||||
background-color: $c-laser;
|
||||
|
@ -86,6 +98,7 @@
|
|||
}
|
||||
|
||||
.react-datepicker__header {
|
||||
|
||||
border-radius: 0;
|
||||
padding: 0;
|
||||
border: none;
|
||||
|
@ -93,6 +106,7 @@
|
|||
|
||||
.react-datepicker__day-name {
|
||||
color: $c-rainforest;
|
||||
font-weight: 700;
|
||||
}
|
||||
|
||||
.react-datepicker__current-month {
|
||||
|
@ -138,8 +152,8 @@
|
|||
|
||||
.react-datepicker__time-box {
|
||||
width: 100%;
|
||||
background-color: $g2-kevlar;
|
||||
color: $g18-cloud;
|
||||
background-color: $g3-castle;
|
||||
color: $g14-chromium;
|
||||
|
||||
.react-datepicker__time-list {
|
||||
font-size: $ix-text-base;
|
||||
|
|
|
@ -26,7 +26,7 @@ interface State {
|
|||
topPosition?: number
|
||||
}
|
||||
|
||||
const PICKER_HEIGHT = 410
|
||||
const PICKER_HEIGHT = 416
|
||||
const HORIZONTAL_PADDING = 2
|
||||
const VERTICAL_PADDING = 15
|
||||
|
||||
|
|
|
@ -20,31 +20,42 @@ describe('toMinardTable', () => {
|
|||
const tables = parseResponse(CSV)
|
||||
const actual = toMinardTable(tables)
|
||||
const expected = {
|
||||
schemaConflicts: [],
|
||||
table: {
|
||||
columnTypes: {
|
||||
_field: 'categorical',
|
||||
_measurement: 'categorical',
|
||||
_start: 'temporal',
|
||||
_stop: 'temporal',
|
||||
_time: 'temporal',
|
||||
_value: 'numeric',
|
||||
cpu: 'categorical',
|
||||
host: 'categorical',
|
||||
result: 'categorical',
|
||||
},
|
||||
columns: {
|
||||
_field: ['usage_guest', 'usage_guest', 'usage_guest', 'usage_guest'],
|
||||
_measurement: ['cpu', 'cpu', 'cpu', 'cpu'],
|
||||
_start: [1549064312524, 1549064312524, 1549064312524, 1549064312524],
|
||||
_stop: [1549064342524, 1549064342524, 1549064342524, 1549064342524],
|
||||
_time: [1549064313000, 1549064323000, 1549064313000, 1549064323000],
|
||||
_value: [10, 20, 30, 40],
|
||||
cpu: ['cpu-total', 'cpu-total', 'cpu0', 'cpu0'],
|
||||
host: ['oox4k.local', 'oox4k.local', 'oox4k.local', 'oox4k.local'],
|
||||
result: ['_result', '_result', '_result', '_result'],
|
||||
result: {
|
||||
data: ['_result', '_result', '_result', '_result'],
|
||||
type: 'string',
|
||||
},
|
||||
_start: {
|
||||
data: [1549064312524, 1549064312524, 1549064312524, 1549064312524],
|
||||
type: 'time',
|
||||
},
|
||||
_stop: {
|
||||
data: [1549064342524, 1549064342524, 1549064342524, 1549064342524],
|
||||
type: 'time',
|
||||
},
|
||||
_time: {
|
||||
data: [1549064313000, 1549064323000, 1549064313000, 1549064323000],
|
||||
type: 'time',
|
||||
},
|
||||
_value: {data: [10, 20, 30, 40], type: 'float'},
|
||||
_field: {
|
||||
data: ['usage_guest', 'usage_guest', 'usage_guest', 'usage_guest'],
|
||||
type: 'string',
|
||||
},
|
||||
_measurement: {data: ['cpu', 'cpu', 'cpu', 'cpu'], type: 'string'},
|
||||
cpu: {
|
||||
data: ['cpu-total', 'cpu-total', 'cpu0', 'cpu0'],
|
||||
type: 'string',
|
||||
},
|
||||
host: {
|
||||
data: ['oox4k.local', 'oox4k.local', 'oox4k.local', 'oox4k.local'],
|
||||
type: 'string',
|
||||
},
|
||||
},
|
||||
length: 4,
|
||||
},
|
||||
schemaConflicts: [],
|
||||
}
|
||||
|
||||
expect(actual).toEqual(expected)
|
||||
|
@ -68,31 +79,42 @@ describe('toMinardTable', () => {
|
|||
const tables = parseResponse(CSV)
|
||||
const actual = toMinardTable(tables)
|
||||
const expected = {
|
||||
schemaConflicts: ['_value'],
|
||||
table: {
|
||||
columnTypes: {
|
||||
_field: 'categorical',
|
||||
_measurement: 'categorical',
|
||||
_start: 'temporal',
|
||||
_stop: 'temporal',
|
||||
_time: 'temporal',
|
||||
_value: 'numeric',
|
||||
cpu: 'categorical',
|
||||
host: 'categorical',
|
||||
result: 'categorical',
|
||||
},
|
||||
columns: {
|
||||
_field: ['usage_guest', 'usage_guest', 'usage_guest', 'usage_guest'],
|
||||
_measurement: ['cpu', 'cpu', 'cpu', 'cpu'],
|
||||
_start: [1549064312524, 1549064312524, 1549064312524, 1549064312524],
|
||||
_stop: [1549064342524, 1549064342524, 1549064342524, 1549064342524],
|
||||
_time: [1549064313000, 1549064323000, 1549064313000, 1549064323000],
|
||||
_value: [10, 20, undefined, undefined],
|
||||
cpu: ['cpu-total', 'cpu-total', 'cpu0', 'cpu0'],
|
||||
host: ['oox4k.local', 'oox4k.local', 'oox4k.local', 'oox4k.local'],
|
||||
result: ['_result', '_result', '_result', '_result'],
|
||||
result: {
|
||||
data: ['_result', '_result', '_result', '_result'],
|
||||
type: 'string',
|
||||
},
|
||||
_start: {
|
||||
data: [1549064312524, 1549064312524, 1549064312524, 1549064312524],
|
||||
type: 'time',
|
||||
},
|
||||
_stop: {
|
||||
data: [1549064342524, 1549064342524, 1549064342524, 1549064342524],
|
||||
type: 'time',
|
||||
},
|
||||
_time: {
|
||||
data: [1549064313000, 1549064323000, 1549064313000, 1549064323000],
|
||||
type: 'time',
|
||||
},
|
||||
_value: {data: [10, 20, undefined, undefined], type: 'float'},
|
||||
_field: {
|
||||
data: ['usage_guest', 'usage_guest', 'usage_guest', 'usage_guest'],
|
||||
type: 'string',
|
||||
},
|
||||
_measurement: {data: ['cpu', 'cpu', 'cpu', 'cpu'], type: 'string'},
|
||||
cpu: {
|
||||
data: ['cpu-total', 'cpu-total', 'cpu0', 'cpu0'],
|
||||
type: 'string',
|
||||
},
|
||||
host: {
|
||||
data: ['oox4k.local', 'oox4k.local', 'oox4k.local', 'oox4k.local'],
|
||||
type: 'string',
|
||||
},
|
||||
},
|
||||
length: 4,
|
||||
},
|
||||
schemaConflicts: ['_value'],
|
||||
}
|
||||
|
||||
expect(actual).toEqual(expected)
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import {FluxTable} from 'src/types'
|
||||
import {Table, ColumnType} from 'src/minard'
|
||||
import {Table, ColumnType, isNumeric} from 'src/minard'
|
||||
|
||||
export const GROUP_KEY_COL_NAME = 'group_key'
|
||||
|
||||
|
@ -53,8 +53,7 @@ export interface ToMinardTableResult {
|
|||
|
||||
*/
|
||||
export const toMinardTable = (tables: FluxTable[]): ToMinardTableResult => {
|
||||
const columns = {}
|
||||
const columnTypes = {}
|
||||
const outColumns = {}
|
||||
const schemaConflicts = []
|
||||
|
||||
let k = 0
|
||||
|
@ -68,34 +67,37 @@ export const toMinardTable = (tables: FluxTable[]): ToMinardTableResult => {
|
|||
}
|
||||
|
||||
for (let j = 0; j < header.length; j++) {
|
||||
const column = header[j]
|
||||
const columnName = header[j]
|
||||
|
||||
if (column === '' || column === 'table') {
|
||||
if (columnName === '' || columnName === 'table') {
|
||||
// Ignore these columns
|
||||
continue
|
||||
}
|
||||
|
||||
const columnType = toMinardColumnType(table.dataTypes[column])
|
||||
const columnType = toMinardColumnType(table.dataTypes[columnName])
|
||||
|
||||
let columnConflictsSchema = false
|
||||
|
||||
if (columnTypes[column] && columnTypes[column] !== columnType) {
|
||||
schemaConflicts.push(column)
|
||||
if (
|
||||
outColumns[columnName] &&
|
||||
outColumns[columnName].type !== columnType
|
||||
) {
|
||||
schemaConflicts.push(columnName)
|
||||
columnConflictsSchema = true
|
||||
} else if (!columnTypes[column]) {
|
||||
columns[column] = []
|
||||
columnTypes[column] = columnType
|
||||
} else if (!outColumns[columnName]) {
|
||||
outColumns[columnName] = {data: [], type: columnType}
|
||||
}
|
||||
|
||||
for (let i = 1; i < table.data.length; i++) {
|
||||
let value
|
||||
|
||||
if (column === 'result') {
|
||||
if (columnName === 'result') {
|
||||
value = table.result
|
||||
} else if (!columnConflictsSchema) {
|
||||
value = parseValue(table.data[i][j].trim(), columnType)
|
||||
}
|
||||
|
||||
columns[column][k + i - 1] = value
|
||||
outColumns[columnName].data[k + i - 1] = value
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -103,7 +105,7 @@ export const toMinardTable = (tables: FluxTable[]): ToMinardTableResult => {
|
|||
}
|
||||
|
||||
const result: ToMinardTableResult = {
|
||||
table: {columns, columnTypes},
|
||||
table: {columns: outColumns, length: k},
|
||||
schemaConflicts,
|
||||
}
|
||||
|
||||
|
@ -111,12 +113,12 @@ export const toMinardTable = (tables: FluxTable[]): ToMinardTableResult => {
|
|||
}
|
||||
|
||||
const TO_MINARD_COLUMN_TYPE = {
|
||||
boolean: ColumnType.Boolean,
|
||||
unsignedLong: ColumnType.Numeric,
|
||||
long: ColumnType.Numeric,
|
||||
double: ColumnType.Numeric,
|
||||
string: ColumnType.Categorical,
|
||||
'dateTime:RFC3339': ColumnType.Temporal,
|
||||
boolean: 'bool',
|
||||
unsignedLong: 'uint',
|
||||
long: 'int',
|
||||
double: 'float',
|
||||
string: 'string',
|
||||
'dateTime:RFC3339': 'time',
|
||||
}
|
||||
|
||||
const toMinardColumnType = (fluxDataType: string): ColumnType => {
|
||||
|
@ -138,24 +140,24 @@ const parseValue = (value: string, columnType: ColumnType): any => {
|
|||
return NaN
|
||||
}
|
||||
|
||||
if (columnType === ColumnType.Boolean && value === 'true') {
|
||||
if (columnType === 'bool' && value === 'true') {
|
||||
return true
|
||||
}
|
||||
|
||||
if (columnType === ColumnType.Boolean && value === 'false') {
|
||||
if (columnType === 'bool' && value === 'false') {
|
||||
return false
|
||||
}
|
||||
|
||||
if (columnType === ColumnType.Categorical) {
|
||||
if (columnType === 'string') {
|
||||
return value
|
||||
}
|
||||
|
||||
if (columnType === ColumnType.Numeric) {
|
||||
return Number(value)
|
||||
if (columnType === 'time') {
|
||||
return Date.parse(value)
|
||||
}
|
||||
|
||||
if (columnType === ColumnType.Temporal) {
|
||||
return Date.parse(value)
|
||||
if (isNumeric(columnType)) {
|
||||
return Number(value)
|
||||
}
|
||||
|
||||
return null
|
||||
|
|
|
@ -130,6 +130,7 @@ const NEW_VIEW_CREATORS = {
|
|||
shape: ViewShape.ChronografV2,
|
||||
xColumn: '_value',
|
||||
xDomain: null,
|
||||
xAxisLabel: '',
|
||||
fillColumns: null,
|
||||
position: HistogramPosition.Stacked,
|
||||
binCount: 30,
|
||||
|
|
|
@ -249,9 +249,7 @@ export const deleteTask = (task: Task) => async dispatch => {
|
|||
|
||||
export const cloneTask = (task: Task, _) => async dispatch => {
|
||||
try {
|
||||
// const allTaskNames = tasks.map(t => t.name)
|
||||
// const clonedName = incrementCloneName(allTaskNames, task.name)
|
||||
await client.tasks.create(task.orgID, task.flux)
|
||||
await client.tasks.clone(task.id)
|
||||
|
||||
dispatch(notify(taskCloneSuccess(task.name)))
|
||||
dispatch(populateTasks())
|
||||
|
@ -384,7 +382,7 @@ export const saveNewScript = (
|
|||
org = orgs[0]
|
||||
}
|
||||
|
||||
await client.tasks.create(getDeep<string>(org, 'id', ''), scriptWithOptions)
|
||||
await client.tasks.create(org.name, scriptWithOptions)
|
||||
|
||||
dispatch(setNewScript(''))
|
||||
dispatch(clearTask())
|
||||
|
|
|
@ -44,7 +44,7 @@ export class TaskRow extends PureComponent<Props & WithRouterProps> {
|
|||
const {task, onDelete} = this.props
|
||||
|
||||
return (
|
||||
<IndexList.Row disabled={!this.isTaskActive}>
|
||||
<IndexList.Row disabled={!this.isTaskActive} testID="task-row">
|
||||
<IndexList.Cell>
|
||||
<ComponentSpacer
|
||||
stackChildren={Stack.Columns}
|
||||
|
|
|
@ -4,6 +4,7 @@ import {Page} from 'src/pageLayout'
|
|||
|
||||
// Components
|
||||
import {SlideToggle, ComponentSize} from '@influxdata/clockface'
|
||||
import {Tabs, ComponentSpacer, Alignment, Stack} from 'src/clockface'
|
||||
import SearchWidget from 'src/shared/components/search_widget/SearchWidget'
|
||||
import TaskOrgDropdown from 'src/tasks/components/TasksOrgDropdown'
|
||||
import AddResourceDropdown from 'src/shared/components/AddResourceDropdown'
|
||||
|
@ -17,16 +18,16 @@ interface Props {
|
|||
showInactive: boolean
|
||||
toggleOverlay: () => void
|
||||
showOrgDropdown?: boolean
|
||||
showFilter?: boolean
|
||||
isFullPage?: boolean
|
||||
}
|
||||
|
||||
export default class TasksHeader extends PureComponent<Props> {
|
||||
public static defaultProps: {
|
||||
showOrgDropdown: boolean
|
||||
showFilter: boolean
|
||||
isFullPage: boolean
|
||||
} = {
|
||||
showOrgDropdown: true,
|
||||
showFilter: true,
|
||||
isFullPage: true,
|
||||
}
|
||||
|
||||
public render() {
|
||||
|
@ -35,29 +36,51 @@ export default class TasksHeader extends PureComponent<Props> {
|
|||
setShowInactive,
|
||||
showInactive,
|
||||
toggleOverlay,
|
||||
isFullPage,
|
||||
} = this.props
|
||||
|
||||
if (isFullPage) {
|
||||
return (
|
||||
<Page.Header fullWidth={false}>
|
||||
<Page.Header.Left>
|
||||
<Page.Title title={this.pageTitle} />
|
||||
</Page.Header.Left>
|
||||
<Page.Header.Right>
|
||||
<SlideToggle.Label text="Show Inactive" />
|
||||
<SlideToggle
|
||||
active={showInactive}
|
||||
size={ComponentSize.ExtraSmall}
|
||||
onChange={setShowInactive}
|
||||
/>
|
||||
{this.filterSearch}
|
||||
{this.orgDropDown}
|
||||
<AddResourceDropdown
|
||||
onSelectNew={onCreateTask}
|
||||
onSelectImport={toggleOverlay}
|
||||
resourceName="Task"
|
||||
/>
|
||||
</Page.Header.Right>
|
||||
</Page.Header>
|
||||
)
|
||||
}
|
||||
|
||||
return (
|
||||
<Page.Header fullWidth={false}>
|
||||
<Page.Header.Left>
|
||||
<Page.Title title={this.pageTitle} />
|
||||
</Page.Header.Left>
|
||||
<Page.Header.Right>
|
||||
<Tabs.TabContentsHeader>
|
||||
{this.filterSearch}
|
||||
<ComponentSpacer align={Alignment.Right} stackChildren={Stack.Columns}>
|
||||
<SlideToggle.Label text="Show Inactive" />
|
||||
<SlideToggle
|
||||
active={showInactive}
|
||||
size={ComponentSize.ExtraSmall}
|
||||
onChange={setShowInactive}
|
||||
/>
|
||||
{this.filterSearch}
|
||||
{this.orgDropDown}
|
||||
<AddResourceDropdown
|
||||
onSelectNew={onCreateTask}
|
||||
onSelectImport={toggleOverlay}
|
||||
resourceName="Task"
|
||||
/>
|
||||
</Page.Header.Right>
|
||||
</Page.Header>
|
||||
</ComponentSpacer>
|
||||
</Tabs.TabContentsHeader>
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -71,17 +94,14 @@ export default class TasksHeader extends PureComponent<Props> {
|
|||
}
|
||||
|
||||
private get filterSearch(): JSX.Element {
|
||||
const {setSearchTerm, showFilter} = this.props
|
||||
const {setSearchTerm} = this.props
|
||||
|
||||
if (showFilter) {
|
||||
return (
|
||||
<SearchWidget
|
||||
placeholderText="Filter tasks by name..."
|
||||
onSearch={setSearchTerm}
|
||||
/>
|
||||
)
|
||||
}
|
||||
return <></>
|
||||
return (
|
||||
<SearchWidget
|
||||
placeholderText="Filter tasks by name..."
|
||||
onSearch={setSearchTerm}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
private get orgDropDown(): JSX.Element {
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
exports[`Tasks.Components.TaskRow renders 1`] = `
|
||||
<IndexListRow
|
||||
disabled={false}
|
||||
testID="table-row"
|
||||
testID="task-row"
|
||||
>
|
||||
<IndexListRowCell
|
||||
alignment="left"
|
||||
|
@ -152,6 +152,7 @@ from(bucket: \\"inbucket\\")
|
|||
shape="none"
|
||||
size="xs"
|
||||
status="default"
|
||||
testID="confirmation-button"
|
||||
text="Delete"
|
||||
/>
|
||||
</Component>
|
||||
|
|
|
@ -15,7 +15,7 @@ import {
|
|||
} from 'src/types/v2/dashboards'
|
||||
import {TimeMachineTab} from 'src/types/v2/timeMachine'
|
||||
import {Color} from 'src/types/colors'
|
||||
import {Table, HistogramPosition, ColumnType} from 'src/minard'
|
||||
import {Table, HistogramPosition, isNumeric} from 'src/minard'
|
||||
|
||||
export type Action =
|
||||
| QueryBuilderAction
|
||||
|
@ -61,6 +61,7 @@ export type Action =
|
|||
| SetHistogramPositionAction
|
||||
| TableLoadedAction
|
||||
| SetXDomainAction
|
||||
| SetXAxisLabelAction
|
||||
|
||||
interface SetActiveTimeMachineAction {
|
||||
type: 'SET_ACTIVE_TIME_MACHINE'
|
||||
|
@ -511,8 +512,8 @@ interface TableLoadedAction {
|
|||
}
|
||||
|
||||
export const tableLoaded = (table: Table): TableLoadedAction => {
|
||||
const availableXColumns = Object.entries(table.columnTypes)
|
||||
.filter(([__, type]) => type === ColumnType.Numeric)
|
||||
const availableXColumns = Object.entries(table.columns)
|
||||
.filter(([__, {type}]) => isNumeric(type) && type !== 'time')
|
||||
.map(([name]) => name)
|
||||
|
||||
const invalidGroupColumns = new Set(['_value', '_start', '_stop', '_time'])
|
||||
|
@ -539,3 +540,13 @@ export const setXDomain = (xDomain: [number, number]): SetXDomainAction => ({
|
|||
type: 'SET_VIEW_X_DOMAIN',
|
||||
payload: {xDomain},
|
||||
})
|
||||
|
||||
interface SetXAxisLabelAction {
|
||||
type: 'SET_X_AXIS_LABEL'
|
||||
payload: {xAxisLabel: string}
|
||||
}
|
||||
|
||||
export const setXAxisLabel = (xAxisLabel: string): SetXAxisLabelAction => ({
|
||||
type: 'SET_X_AXIS_LABEL',
|
||||
payload: {xAxisLabel},
|
||||
})
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
import {queryBuilderFetcher} from 'src/timeMachine/apis/QueryBuilderFetcher'
|
||||
|
||||
// Utils
|
||||
import {getActiveOrg} from 'src/organizations/selectors'
|
||||
import {
|
||||
getActiveQuerySource,
|
||||
getActiveQuery,
|
||||
|
@ -203,11 +204,13 @@ export const loadBuckets = () => async (
|
|||
dispatch: Dispatch<Action>,
|
||||
getState: GetState
|
||||
) => {
|
||||
const queryURL = getActiveQuerySource(getState()).links.query
|
||||
const orgID = getActiveOrg(getState()).id
|
||||
|
||||
dispatch(setBuilderBucketsStatus(RemoteDataState.Loading))
|
||||
|
||||
try {
|
||||
const queryURL = getActiveQuerySource(getState()).links.query
|
||||
const buckets = await queryBuilderFetcher.findBuckets(queryURL)
|
||||
const buckets = await queryBuilderFetcher.findBuckets(queryURL, orgID)
|
||||
const selectedBucket = getActiveQuery(getState()).builderConfig.buckets[0]
|
||||
|
||||
dispatch(setBuilderBuckets(buckets))
|
||||
|
@ -247,6 +250,7 @@ export const loadTagSelector = (index: number) => async (
|
|||
|
||||
const tagPredicates = tags.slice(0, index)
|
||||
const queryURL = getActiveQuerySource(getState()).links.query
|
||||
const orgID = getActiveOrg(getState()).id
|
||||
|
||||
dispatch(setBuilderTagKeysStatus(index, RemoteDataState.Loading))
|
||||
|
||||
|
@ -257,6 +261,7 @@ export const loadTagSelector = (index: number) => async (
|
|||
const keys = await queryBuilderFetcher.findKeys(
|
||||
index,
|
||||
queryURL,
|
||||
orgID,
|
||||
buckets[0],
|
||||
tagPredicates,
|
||||
searchTerm
|
||||
|
@ -299,6 +304,7 @@ const loadTagSelectorValues = (index: number) => async (
|
|||
const {buckets, tags} = getActiveQuery(getState()).builderConfig
|
||||
const tagPredicates = tags.slice(0, index)
|
||||
const queryURL = getActiveQuerySource(getState()).links.query
|
||||
const orgID = getActiveOrg(getState()).id
|
||||
|
||||
dispatch(setBuilderTagValuesStatus(index, RemoteDataState.Loading))
|
||||
|
||||
|
@ -309,6 +315,7 @@ const loadTagSelectorValues = (index: number) => async (
|
|||
const values = await queryBuilderFetcher.findValues(
|
||||
index,
|
||||
queryURL,
|
||||
orgID,
|
||||
buckets[0],
|
||||
tagPredicates,
|
||||
key,
|
||||
|
|
|
@ -19,7 +19,7 @@ class QueryBuilderFetcher {
|
|||
private findValuesCache: {[key: string]: string[]} = {}
|
||||
private findBucketsCache: {[key: string]: string[]} = {}
|
||||
|
||||
public async findBuckets(url: string): Promise<string[]> {
|
||||
public async findBuckets(url: string, orgID: string): Promise<string[]> {
|
||||
this.cancelFindBuckets()
|
||||
|
||||
const cacheKey = JSON.stringify([...arguments])
|
||||
|
@ -29,7 +29,7 @@ class QueryBuilderFetcher {
|
|||
return Promise.resolve(cachedResult)
|
||||
}
|
||||
|
||||
const pendingResult = findBuckets(url)
|
||||
const pendingResult = findBuckets(url, orgID)
|
||||
|
||||
pendingResult.promise.then(result => {
|
||||
this.findBucketsCache[cacheKey] = result
|
||||
|
@ -47,6 +47,7 @@ class QueryBuilderFetcher {
|
|||
public async findKeys(
|
||||
index: number,
|
||||
url: string,
|
||||
orgID: string,
|
||||
bucket: string,
|
||||
tagsSelections: BuilderConfig['tags'],
|
||||
searchTerm: string = ''
|
||||
|
@ -60,7 +61,13 @@ class QueryBuilderFetcher {
|
|||
return Promise.resolve(cachedResult)
|
||||
}
|
||||
|
||||
const pendingResult = findKeys(url, bucket, tagsSelections, searchTerm)
|
||||
const pendingResult = findKeys(
|
||||
url,
|
||||
orgID,
|
||||
bucket,
|
||||
tagsSelections,
|
||||
searchTerm
|
||||
)
|
||||
|
||||
this.findKeysQueries[index] = pendingResult
|
||||
|
||||
|
@ -80,6 +87,7 @@ class QueryBuilderFetcher {
|
|||
public async findValues(
|
||||
index: number,
|
||||
url: string,
|
||||
orgID: string,
|
||||
bucket: string,
|
||||
tagsSelections: BuilderConfig['tags'],
|
||||
key: string,
|
||||
|
@ -96,6 +104,7 @@ class QueryBuilderFetcher {
|
|||
|
||||
const pendingResult = findValues(
|
||||
url,
|
||||
orgID,
|
||||
bucket,
|
||||
tagsSelections,
|
||||
key,
|
||||
|
|
|
@ -14,12 +14,12 @@ export const LIMIT = 200
|
|||
|
||||
type CancelableQuery = WrappedCancelablePromise<string[]>
|
||||
|
||||
export function findBuckets(url: string): CancelableQuery {
|
||||
export function findBuckets(url: string, orgID: string): CancelableQuery {
|
||||
const query = `buckets()
|
||||
|> sort(columns: ["name"])
|
||||
|> limit(n: ${LIMIT})`
|
||||
|
||||
const {promise, cancel} = executeQuery(url, query, InfluxLanguage.Flux)
|
||||
const {promise, cancel} = executeQuery(url, orgID, query, InfluxLanguage.Flux)
|
||||
|
||||
return {
|
||||
promise: promise.then(resp => extractCol(resp, 'name')),
|
||||
|
@ -29,6 +29,7 @@ export function findBuckets(url: string): CancelableQuery {
|
|||
|
||||
export function findKeys(
|
||||
url: string,
|
||||
orgID: string,
|
||||
bucket: string,
|
||||
tagsSelections: BuilderConfig['tags'],
|
||||
searchTerm: string = ''
|
||||
|
@ -49,7 +50,7 @@ v1.tagKeys(bucket: "${bucket}", predicate: ${tagFilters}, start: -${SEARCH_DURAT
|
|||
|> sort()
|
||||
|> limit(n: ${LIMIT})`
|
||||
|
||||
const {promise, cancel} = executeQuery(url, query, InfluxLanguage.Flux)
|
||||
const {promise, cancel} = executeQuery(url, orgID, query, InfluxLanguage.Flux)
|
||||
|
||||
return {
|
||||
promise: promise.then(resp => extractCol(resp, '_value')),
|
||||
|
@ -59,6 +60,7 @@ v1.tagKeys(bucket: "${bucket}", predicate: ${tagFilters}, start: -${SEARCH_DURAT
|
|||
|
||||
export function findValues(
|
||||
url: string,
|
||||
orgID: string,
|
||||
bucket: string,
|
||||
tagsSelections: BuilderConfig['tags'],
|
||||
key: string,
|
||||
|
@ -73,7 +75,7 @@ v1.tagValues(bucket: "${bucket}", tag: "${key}", predicate: ${tagFilters}, start
|
|||
|> limit(n: ${LIMIT})
|
||||
|> sort()`
|
||||
|
||||
const {promise, cancel} = executeQuery(url, query, InfluxLanguage.Flux)
|
||||
const {promise, cancel} = executeQuery(url, orgID, query, InfluxLanguage.Flux)
|
||||
|
||||
return {
|
||||
promise: promise.then(resp => extractCol(resp, '_value')),
|
||||
|
|
|
@ -17,6 +17,11 @@ const setInitialState = state => {
|
|||
[source.id]: source,
|
||||
},
|
||||
},
|
||||
orgs: [
|
||||
{
|
||||
id: 'foo',
|
||||
},
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
@import "src/style/modules";
|
||||
|
||||
.search-bar {
|
||||
padding: $ix-marg-b;
|
||||
flex-shrink: 0;
|
||||
border-bottom: $ix-border solid $g4-onyx;
|
||||
background-color: $g3-castle;
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue