build(flux): update flux to v0.173.0 (#23505)

* build(flux): update flux to v0.173.0

* fix(fluxfmt): update tests for newline in Flux fmt

Flux fmt now explicitly adds a newline to the end of a file, updating
tests accordingly.
flux-staging/1f7d0bd2
Nathaniel Cook 2022-06-30 11:39:07 -06:00 committed by GitHub
parent e7cf52298f
commit 07bab31fe6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
15 changed files with 82 additions and 44 deletions

View File

@ -317,7 +317,7 @@ func CreateCheck(
Organization: "theorg", Organization: "theorg",
OwnerID: MustIDBase16("020f755c3c082001"), OwnerID: MustIDBase16("020f755c3c082001"),
Status: "active", Status: "active",
Flux: "import \"influxdata/influxdb/monitor\"\nimport \"experimental\"\nimport \"influxdata/influxdb/v1\"\n\ndata = from(bucket: \"telegraf\") |> range(start: -1h) |> filter(fn: (r) => r._field == \"usage_user\")\n\noption task = {name: \"name1\", every: 1m}\n\ncheck = {_check_id: \"020f755c3c082000\", _check_name: \"name1\", _type: \"deadman\", tags: {k1: \"v1\", k2: \"v2\"}}\ncrit = (r) => r[\"dead\"]\nmessageFn = (r) => \"msg1\"\n\ndata\n |> v1[\"fieldsAsCols\"]()\n |> monitor[\"deadman\"](t: experimental[\"subDuration\"](from: now(), d: 21s))\n |> monitor[\"check\"](data: check, messageFn: messageFn, crit: crit)", Flux: "import \"influxdata/influxdb/monitor\"\nimport \"experimental\"\nimport \"influxdata/influxdb/v1\"\n\ndata = from(bucket: \"telegraf\") |> range(start: -1h) |> filter(fn: (r) => r._field == \"usage_user\")\n\noption task = {name: \"name1\", every: 1m}\n\ncheck = {_check_id: \"020f755c3c082000\", _check_name: \"name1\", _type: \"deadman\", tags: {k1: \"v1\", k2: \"v2\"}}\ncrit = (r) => r[\"dead\"]\nmessageFn = (r) => \"msg1\"\n\ndata\n |> v1[\"fieldsAsCols\"]()\n |> monitor[\"deadman\"](t: experimental[\"subDuration\"](from: now(), d: 21s))\n |> monitor[\"check\"](data: check, messageFn: messageFn, crit: crit)\n",
Every: "1m", Every: "1m",
}, },
}, },
@ -468,7 +468,8 @@ data
ok: ok, ok: ok,
warn: warn, warn: warn,
info: info, info: info,
)`, )
`,
}, },
}, },
}, },
@ -605,7 +606,7 @@ data
OwnerID: MustIDBase16("020f755c3c082001"), OwnerID: MustIDBase16("020f755c3c082001"),
Status: "active", Status: "active",
Every: "1m", Every: "1m",
Flux: "import \"influxdata/influxdb/monitor\"\nimport \"influxdata/influxdb/v1\"\n\ndata = from(bucket: \"telegraf\") |> range(start: -1m) |> filter(fn: (r) => r._field == \"usage_user\")\n\noption task = {name: \"name1\", every: 1m}\n\ncheck = {_check_id: \"020f755c3c082001\", _check_name: \"name1\", _type: \"threshold\", tags: {k11: \"v11\", k22: \"v22\"}}\nmessageFn = (r) => \"msg2\"\n\ndata |> v1[\"fieldsAsCols\"]() |> monitor[\"check\"](data: check, messageFn: messageFn)", Flux: "import \"influxdata/influxdb/monitor\"\nimport \"influxdata/influxdb/v1\"\n\ndata = from(bucket: \"telegraf\") |> range(start: -1m) |> filter(fn: (r) => r._field == \"usage_user\")\n\noption task = {name: \"name1\", every: 1m}\n\ncheck = {_check_id: \"020f755c3c082001\", _check_name: \"name1\", _type: \"threshold\", tags: {k11: \"v11\", k22: \"v22\"}}\nmessageFn = (r) => \"msg2\"\n\ndata |> v1[\"fieldsAsCols\"]() |> monitor[\"check\"](data: check, messageFn: messageFn)\n",
}, },
}, },
checks: []influxdb.Check{ checks: []influxdb.Check{

View File

@ -3805,6 +3805,7 @@ spec:
|> filter(fn: (r) => r._value > params.minVal) |> filter(fn: (r) => r._value > params.minVal)
|> aggregateWindow(every: v.windowPeriod, fn: max) |> aggregateWindow(every: v.windowPeriod, fn: max)
|> yield(name: params.name) |> yield(name: params.name)
params: params:
- key: bucket - key: bucket
default: "bar" default: "bar"
@ -3849,7 +3850,8 @@ from(bucket: params.bucket)
|> filter(fn: (r) => r.floater == params.floatVal) |> filter(fn: (r) => r.floater == params.floatVal)
|> filter(fn: (r) => r._value > params.minVal) |> filter(fn: (r) => r._value > params.minVal)
|> aggregateWindow(every: v.windowPeriod, fn: max) |> aggregateWindow(every: v.windowPeriod, fn: max)
|> yield(name: params.name)` |> yield(name: params.name)
`
assert.Equal(t, expectedQuery, props.Queries[0].Text) assert.Equal(t, expectedQuery, props.Queries[0].Text)
assert.Equal(t, "advanced", props.Queries[0].EditMode) assert.Equal(t, "advanced", props.Queries[0].EditMode)
@ -4066,6 +4068,7 @@ spec:
|> filter(fn: (r) => r._value > params.minVal) |> filter(fn: (r) => r._value > params.minVal)
|> aggregateWindow(every: 1m, fn: max) |> aggregateWindow(every: 1m, fn: max)
|> yield(name: params.name) |> yield(name: params.name)
params: params:
- key: bucket - key: bucket
default: "bar" default: "bar"
@ -4098,7 +4101,8 @@ from(bucket: params.bucket)
|> filter(fn: (r) => r.floater == params.floatVal) |> filter(fn: (r) => r.floater == params.floatVal)
|> filter(fn: (r) => r._value > params.minVal) |> filter(fn: (r) => r._value > params.minVal)
|> aggregateWindow(every: 1m, fn: max) |> aggregateWindow(every: 1m, fn: max)
|> yield(name: params.name)` |> yield(name: params.name)
`
assert.Equal(t, expectedQuery, actual.Query) assert.Equal(t, expectedQuery, actual.Query)
} }

2
go.mod
View File

@ -29,7 +29,7 @@ require (
github.com/google/go-jsonnet v0.17.0 github.com/google/go-jsonnet v0.17.0
github.com/hashicorp/vault/api v1.0.2 github.com/hashicorp/vault/api v1.0.2
github.com/influxdata/cron v0.0.0-20201006132531-4bb0a200dcbe github.com/influxdata/cron v0.0.0-20201006132531-4bb0a200dcbe
github.com/influxdata/flux v0.172.0 github.com/influxdata/flux v0.173.0
github.com/influxdata/httprouter v1.3.1-0.20191122104820-ee83e2772f69 github.com/influxdata/httprouter v1.3.1-0.20191122104820-ee83e2772f69
github.com/influxdata/influx-cli/v2 v2.2.1-0.20220318222112-88ba3464cd07 github.com/influxdata/influx-cli/v2 v2.2.1-0.20220318222112-88ba3464cd07
github.com/influxdata/influxql v1.1.1-0.20211004132434-7e7d61973256 github.com/influxdata/influxql v1.1.1-0.20211004132434-7e7d61973256

4
go.sum
View File

@ -497,8 +497,8 @@ github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NH
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
github.com/influxdata/cron v0.0.0-20201006132531-4bb0a200dcbe h1:7j4SdN/BvQwN6WoUq7mv0kg5U9NhnFBxPGMafYRKym0= github.com/influxdata/cron v0.0.0-20201006132531-4bb0a200dcbe h1:7j4SdN/BvQwN6WoUq7mv0kg5U9NhnFBxPGMafYRKym0=
github.com/influxdata/cron v0.0.0-20201006132531-4bb0a200dcbe/go.mod h1:XabtPPW2qsCg0tl+kjaPU+cFS+CjQXEXbT1VJvHT4og= github.com/influxdata/cron v0.0.0-20201006132531-4bb0a200dcbe/go.mod h1:XabtPPW2qsCg0tl+kjaPU+cFS+CjQXEXbT1VJvHT4og=
github.com/influxdata/flux v0.172.0 h1:aDLGOsQAVGlABAOHRpq9Nuog9Gv3bDN2hcnhW50kWWk= github.com/influxdata/flux v0.173.0 h1:b0kiACQbNO52oonCNiH5tbwOnUkbG0ON98sC2J6zwfc=
github.com/influxdata/flux v0.172.0/go.mod h1:fNtcZ8tqtVDjwWYcPRvCdlY5t3n+NYCc5xunKCmigQA= github.com/influxdata/flux v0.173.0/go.mod h1:fNtcZ8tqtVDjwWYcPRvCdlY5t3n+NYCc5xunKCmigQA=
github.com/influxdata/gosnowflake v1.6.9 h1:BhE39Mmh8bC+Rvd4QQsP2gHypfeYIH1wqW1AjGWxxrE= github.com/influxdata/gosnowflake v1.6.9 h1:BhE39Mmh8bC+Rvd4QQsP2gHypfeYIH1wqW1AjGWxxrE=
github.com/influxdata/gosnowflake v1.6.9/go.mod h1:9W/BvCXOKx2gJtQ+jdi1Vudev9t9/UDOEHnlJZ/y1nU= github.com/influxdata/gosnowflake v1.6.9/go.mod h1:9W/BvCXOKx2gJtQ+jdi1Vudev9t9/UDOEHnlJZ/y1nU=
github.com/influxdata/httprouter v1.3.1-0.20191122104820-ee83e2772f69 h1:WQsmW0fXO4ZE/lFGIE84G6rIV5SJN3P3sjIXAP1a8eU= github.com/influxdata/httprouter v1.3.1-0.20191122104820-ee83e2772f69 h1:WQsmW0fXO4ZE/lFGIE84G6rIV5SJN3P3sjIXAP1a8eU=

View File

@ -418,7 +418,7 @@ func TestService_handleGetCheckQuery(t *testing.T) {
wants: wants{ wants: wants{
statusCode: http.StatusOK, statusCode: http.StatusOK,
contentType: "application/json; charset=utf-8", contentType: "application/json; charset=utf-8",
body: "{\"flux\":\"import \\\"influxdata/influxdb/monitor\\\"\\nimport \\\"influxdata/influxdb/v1\\\"\\n\\ndata =\\n from(bucket: \\\"foo\\\")\\n |\\u003e range(start: -1h)\\n |\\u003e filter(fn: (r) =\\u003e r._field == \\\"usage_idle\\\")\\n |\\u003e aggregateWindow(every: 1h, fn: mean, createEmpty: false)\\n\\noption task = {name: \\\"hello\\\", every: 1h}\\n\\ncheck = {_check_id: \\\"020f755c3c082000\\\", _check_name: \\\"hello\\\", _type: \\\"threshold\\\", tags: {aaa: \\\"vaaa\\\", bbb: \\\"vbbb\\\"}}\\nok = (r) =\\u003e r[\\\"usage_idle\\\"] \\u003e 10.0\\ninfo = (r) =\\u003e r[\\\"usage_idle\\\"] \\u003c 40.0\\nwarn = (r) =\\u003e r[\\\"usage_idle\\\"] \\u003c 40.0 and r[\\\"usage_idle\\\"] \\u003e 10.0\\ncrit = (r) =\\u003e r[\\\"usage_idle\\\"] \\u003c 40.0 and r[\\\"usage_idle\\\"] \\u003e 10.0\\nmessageFn = (r) =\\u003e \\\"whoa! {check.yeah}\\\"\\n\\ndata\\n |\\u003e v1[\\\"fieldsAsCols\\\"]()\\n |\\u003e monitor[\\\"check\\\"](\\n data: check,\\n messageFn: messageFn,\\n ok: ok,\\n info: info,\\n warn: warn,\\n crit: crit,\\n )\"}\n", body: "{\"flux\":\"import \\\"influxdata/influxdb/monitor\\\"\\nimport \\\"influxdata/influxdb/v1\\\"\\n\\ndata =\\n from(bucket: \\\"foo\\\")\\n |\\u003e range(start: -1h)\\n |\\u003e filter(fn: (r) =\\u003e r._field == \\\"usage_idle\\\")\\n |\\u003e aggregateWindow(every: 1h, fn: mean, createEmpty: false)\\n\\noption task = {name: \\\"hello\\\", every: 1h}\\n\\ncheck = {_check_id: \\\"020f755c3c082000\\\", _check_name: \\\"hello\\\", _type: \\\"threshold\\\", tags: {aaa: \\\"vaaa\\\", bbb: \\\"vbbb\\\"}}\\nok = (r) =\\u003e r[\\\"usage_idle\\\"] \\u003e 10.0\\ninfo = (r) =\\u003e r[\\\"usage_idle\\\"] \\u003c 40.0\\nwarn = (r) =\\u003e r[\\\"usage_idle\\\"] \\u003c 40.0 and r[\\\"usage_idle\\\"] \\u003e 10.0\\ncrit = (r) =\\u003e r[\\\"usage_idle\\\"] \\u003c 40.0 and r[\\\"usage_idle\\\"] \\u003e 10.0\\nmessageFn = (r) =\\u003e \\\"whoa! {check.yeah}\\\"\\n\\ndata\\n |\\u003e v1[\\\"fieldsAsCols\\\"]()\\n |\\u003e monitor[\\\"check\\\"](\\n data: check,\\n messageFn: messageFn,\\n ok: ok,\\n info: info,\\n warn: warn,\\n crit: crit,\\n )\\n\"}\n",
}, },
}, },
} }

View File

@ -75,7 +75,8 @@ messageFn = (r) => "whoa! {r[\"dead\"]}"
data data
|> v1["fieldsAsCols"]() |> v1["fieldsAsCols"]()
|> monitor["deadman"](t: experimental["subDuration"](from: now(), d: 60s)) |> monitor["deadman"](t: experimental["subDuration"](from: now(), d: 60s))
|> monitor["check"](data: check, messageFn: messageFn, info: info)`, |> monitor["check"](data: check, messageFn: messageFn, info: info)
`,
}, },
}, },
{ {
@ -129,7 +130,8 @@ messageFn = (r) => "whoa! {r[\"dead\"]}"
data data
|> v1["fieldsAsCols"]() |> v1["fieldsAsCols"]()
|> monitor["deadman"](t: experimental["subDuration"](from: now(), d: 60s)) |> monitor["deadman"](t: experimental["subDuration"](from: now(), d: 60s))
|> monitor["check"](data: check, messageFn: messageFn, info: info)`, |> monitor["check"](data: check, messageFn: messageFn, info: info)
`,
}, },
}, },
{ {
@ -183,7 +185,8 @@ messageFn = (r) => "whoa! {r[\"dead\"]}"
data data
|> v1["fieldsAsCols"]() |> v1["fieldsAsCols"]()
|> monitor["deadman"](t: experimental["subDuration"](from: now(), d: 60s)) |> monitor["deadman"](t: experimental["subDuration"](from: now(), d: 60s))
|> monitor["check"](data: check, messageFn: messageFn, info: info)`, |> monitor["check"](data: check, messageFn: messageFn, info: info)
`,
}, },
}, },
} }

View File

@ -104,7 +104,8 @@ data
info: info, info: info,
warn: warn, warn: warn,
crit: crit, crit: crit,
)`, )
`,
}, },
}, },
{ {
@ -184,7 +185,8 @@ data
info: info, info: info,
warn: warn, warn: warn,
crit: crit, crit: crit,
)`, )
`,
}, },
}, },
{ {
@ -264,7 +266,8 @@ data
info: info, info: info,
warn: warn, warn: warn,
crit: crit, crit: crit,
)`, )
`,
}, },
}, },
{ {
@ -344,7 +347,8 @@ data
info: info, info: info,
warn: warn, warn: warn,
crit: crit, crit: crit,
)`, )
`,
}, },
}, },
} }

View File

@ -44,7 +44,8 @@ all_statuses
return {headers: headers, data: json["encode"](v: body)} return {headers: headers, data: json["encode"](v: body)}
}, },
), ),
)` )
`
s := &rule.HTTP{ s := &rule.HTTP{
Base: rule.Base{ Base: rule.Base{
@ -120,7 +121,8 @@ all_statuses
return {headers: headers, data: json["encode"](v: body)} return {headers: headers, data: json["encode"](v: body)}
}, },
), ),
)` )
`
s := &rule.HTTP{ s := &rule.HTTP{
Base: rule.Base{ Base: rule.Base{
ID: 1, ID: 1,
@ -198,7 +200,8 @@ all_statuses
return {headers: headers, data: json["encode"](v: body)} return {headers: headers, data: json["encode"](v: body)}
}, },
), ),
)` )
`
s := &rule.HTTP{ s := &rule.HTTP{
Base: rule.Base{ Base: rule.Base{
@ -274,7 +277,8 @@ all_statuses
return {headers: headers, data: json["encode"](v: body)} return {headers: headers, data: json["encode"](v: body)}
}, },
), ),
)` )
`
s := &rule.HTTP{ s := &rule.HTTP{
Base: rule.Base{ Base: rule.Base{

View File

@ -97,7 +97,8 @@ all_statuses
timestamp: time(v: r["_source_timestamp"]), timestamp: time(v: r["_source_timestamp"]),
}), }),
), ),
)`, )
`,
}, },
{ {
name: "notify on info to crit", name: "notify on info to crit",
@ -180,7 +181,8 @@ all_statuses
timestamp: time(v: r["_source_timestamp"]), timestamp: time(v: r["_source_timestamp"]),
}), }),
), ),
)`, )
`,
}, },
{ {
name: "notify on crit or ok to warn", name: "notify on crit or ok to warn",
@ -270,7 +272,8 @@ all_statuses
timestamp: time(v: r["_source_timestamp"]), timestamp: time(v: r["_source_timestamp"]),
}), }),
), ),
)`, )
`,
}, },
} }

View File

@ -332,7 +332,8 @@ all_statuses
"good", "good",
}), }),
), ),
)`, )
`,
Every: "1h", Every: "1h",
}, },
}, },

View File

@ -75,7 +75,8 @@ all_statuses
"good", "good",
}), }),
), ),
)`, )
`,
rule: &rule.Slack{ rule: &rule.Slack{
Channel: "bar", Channel: "bar",
MessageTemplate: "blah", MessageTemplate: "blah",
@ -157,7 +158,8 @@ all_statuses
"good", "good",
}), }),
), ),
)`, )
`,
rule: &rule.Slack{ rule: &rule.Slack{
Channel: "bar", Channel: "bar",
MessageTemplate: "blah", MessageTemplate: "blah",
@ -244,7 +246,8 @@ all_statuses
"good", "good",
}), }),
), ),
)`, )
`,
rule: &rule.Slack{ rule: &rule.Slack{
Channel: "bar", Channel: "bar",
MessageTemplate: "blah", MessageTemplate: "blah",
@ -333,7 +336,8 @@ all_statuses
"good", "good",
}), }),
), ),
)`, )
`,
rule: &rule.Slack{ rule: &rule.Slack{
Channel: "bar", Channel: "bar",
MessageTemplate: "blah", MessageTemplate: "blah",

View File

@ -133,7 +133,8 @@ all_statuses
silent: if r["_level"] == "crit" then true else if r["_level"] == "warn" then true else false, silent: if r["_level"] == "crit" then true else if r["_level"] == "warn" then true else false,
}), }),
), ),
)`, )
`,
}, },
{ {
name: "with DisableWebPagePreview and ParseMode", name: "with DisableWebPagePreview and ParseMode",
@ -208,7 +209,8 @@ all_statuses
silent: if r["_level"] == "crit" then true else if r["_level"] == "warn" then true else false, silent: if r["_level"] == "crit" then true else if r["_level"] == "warn" then true else false,
}), }),
), ),
)`, )
`,
}, },
} }

View File

@ -2613,7 +2613,8 @@ from(bucket: params.bucket)
|> filter(fn: (r) => r.floater == params.floatVal) |> filter(fn: (r) => r.floater == params.floatVal)
|> filter(fn: (r) => r._value > params.minVal) |> filter(fn: (r) => r._value > params.minVal)
|> aggregateWindow(every: v.windowPeriod, fn: max) |> aggregateWindow(every: v.windowPeriod, fn: max)
|> yield(name: params.name)` |> yield(name: params.name)
`
q := props.Queries[0] q := props.Queries[0]
assert.Equal(t, queryText, q.Text) assert.Equal(t, queryText, q.Text)
@ -3610,7 +3611,8 @@ from(bucket: params.bucket)
|> filter(fn: (r) => r.floater == params.floatVal) |> filter(fn: (r) => r.floater == params.floatVal)
|> filter(fn: (r) => r._value > params.minVal) |> filter(fn: (r) => r._value > params.minVal)
|> aggregateWindow(every: v.windowPeriod, fn: max) |> aggregateWindow(every: v.windowPeriod, fn: max)
|> yield(name: params.name)` |> yield(name: params.name)
`
assert.Equal(t, queryText, actual.Query) assert.Equal(t, queryText, actual.Query)
@ -3730,7 +3732,8 @@ from(bucket: "rucket_1")
|> filter(fn: (r) => r._measurement == "cpu") |> filter(fn: (r) => r._measurement == "cpu")
|> filter(fn: (r) => r._field == "usage_idle") |> filter(fn: (r) => r._field == "usage_idle")
|> aggregateWindow(every: 1m, fn: mean) |> aggregateWindow(every: 1m, fn: mean)
|> yield(name: "mean")` |> yield(name: "mean")
`
assert.Equal(t, queryText, actual[0].Query) assert.Equal(t, queryText, actual[0].Query)
@ -3759,7 +3762,8 @@ from(bucket: "rucket_1")
|> filter(fn: (r) => r._measurement == params.this) |> filter(fn: (r) => r._measurement == params.this)
|> filter(fn: (r) => r._field == "usage_idle") |> filter(fn: (r) => r._field == "usage_idle")
|> aggregateWindow(every: 1m, fn: mean) |> aggregateWindow(every: 1m, fn: mean)
|> yield(name: "mean")` |> yield(name: "mean")
`
assert.Equal(t, queryText, actual[0].Query) assert.Equal(t, queryText, actual[0].Query)

View File

@ -419,7 +419,7 @@ func testTaskCRUD(t *testing.T, sys *System) {
// Update task: just update an option. // Update task: just update an option.
newStatus = string(taskmodel.TaskActive) newStatus = string(taskmodel.TaskActive)
newFlux = "option task = {name: \"task-changed #98\", cron: \"* * * * *\", offset: 5s, concurrency: 100}\n\n// This comment should persist.\nfrom(bucket: \"b\")\n |> to(bucket: \"two\", orgID: \"000000000000000\")" newFlux = "option task = {name: \"task-changed #98\", cron: \"* * * * *\", offset: 5s, concurrency: 100}\n\n// This comment should persist.\nfrom(bucket: \"b\")\n |> to(bucket: \"two\", orgID: \"000000000000000\")\n"
f, err = sys.TaskService.UpdateTask(authorizedCtx, origID, taskmodel.TaskUpdate{Options: options.Options{Name: "task-changed #98"}}) f, err = sys.TaskService.UpdateTask(authorizedCtx, origID, taskmodel.TaskUpdate{Options: options.Options{Name: "task-changed #98"}})
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
@ -434,7 +434,7 @@ func testTaskCRUD(t *testing.T, sys *System) {
// Update task: switch to every. // Update task: switch to every.
newStatus = string(taskmodel.TaskActive) newStatus = string(taskmodel.TaskActive)
newFlux = "option task = {name: \"task-changed #98\", every: 30s, offset: 5s, concurrency: 100}\n\n// This comment should persist.\nfrom(bucket: \"b\")\n |> to(bucket: \"two\", orgID: \"000000000000000\")" newFlux = "option task = {name: \"task-changed #98\", every: 30s, offset: 5s, concurrency: 100}\n\n// This comment should persist.\nfrom(bucket: \"b\")\n |> to(bucket: \"two\", orgID: \"000000000000000\")\n"
f, err = sys.TaskService.UpdateTask(authorizedCtx, origID, taskmodel.TaskUpdate{Options: options.Options{Every: *(options.MustParseDuration("30s"))}}) f, err = sys.TaskService.UpdateTask(authorizedCtx, origID, taskmodel.TaskUpdate{Options: options.Options{Every: *(options.MustParseDuration("30s"))}})
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
@ -670,7 +670,8 @@ func testTaskOptionsUpdateFull(t *testing.T, sys *System) {
script := `option task = {name: "task-Options-Update", cron: "* * * * *", concurrency: 100, offset: 10s} script := `option task = {name: "task-Options-Update", cron: "* * * * *", concurrency: 100, offset: 10s}
from(bucket: "b") from(bucket: "b")
|> to(bucket: "two", orgID: "000000000000000")` |> to(bucket: "two", orgID: "000000000000000")
`
cr := creds(t, sys) cr := creds(t, sys)
@ -688,7 +689,8 @@ from(bucket: "b")
expectedFlux := `option task = {name: "task-Options-Update", every: 10s, concurrency: 100} expectedFlux := `option task = {name: "task-Options-Update", every: 10s, concurrency: 100}
from(bucket: "b") from(bucket: "b")
|> to(bucket: "two", orgID: "000000000000000")` |> to(bucket: "two", orgID: "000000000000000")
`
f, err := sys.TaskService.UpdateTask(authorizedCtx, task.ID, taskmodel.TaskUpdate{Options: options.Options{Offset: &options.Duration{}, Every: *(options.MustParseDuration("10s"))}}) f, err := sys.TaskService.UpdateTask(authorizedCtx, task.ID, taskmodel.TaskUpdate{Options: options.Options{Offset: &options.Duration{}, Every: *(options.MustParseDuration("10s"))}})
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
@ -706,7 +708,8 @@ from(bucket: "b")
expectedFlux := `option task = {name: "task-Options-Update", every: 10s, concurrency: 100, offset: 10s} expectedFlux := `option task = {name: "task-Options-Update", every: 10s, concurrency: 100, offset: 10s}
from(bucket: "b") from(bucket: "b")
|> to(bucket: "two", orgID: "000000000000000")` |> to(bucket: "two", orgID: "000000000000000")
`
f, err := sys.TaskService.UpdateTask(authorizedCtx, task.ID, taskmodel.TaskUpdate{Options: options.Options{Offset: options.MustParseDuration("10s")}}) f, err := sys.TaskService.UpdateTask(authorizedCtx, task.ID, taskmodel.TaskUpdate{Options: options.Options{Offset: options.MustParseDuration("10s")}})
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
@ -723,7 +726,8 @@ from(bucket: "b")
withoutOffset := `option task = {name: "task-Options-Update", every: 10s, concurrency: 100} withoutOffset := `option task = {name: "task-Options-Update", every: 10s, concurrency: 100}
from(bucket: "b") from(bucket: "b")
|> to(bucket: "two", orgID: "000000000000000")` |> to(bucket: "two", orgID: "000000000000000")
`
fNoOffset, err := sys.TaskService.UpdateTask(authorizedCtx, task.ID, taskmodel.TaskUpdate{Flux: &withoutOffset}) fNoOffset, err := sys.TaskService.UpdateTask(authorizedCtx, task.ID, taskmodel.TaskUpdate{Flux: &withoutOffset})
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
@ -1825,13 +1829,15 @@ const (
// This comment should persist. // This comment should persist.
from(bucket: "b") from(bucket: "b")
|> to(bucket: "two", orgID: "000000000000000")` |> to(bucket: "two", orgID: "000000000000000")
`
scriptDifferentName = `option task = {name: "task-changed #%d", cron: "* * * * *", offset: 5s, concurrency: 100} scriptDifferentName = `option task = {name: "task-changed #%d", cron: "* * * * *", offset: 5s, concurrency: 100}
// This comment should persist. // This comment should persist.
from(bucket: "b") from(bucket: "b")
|> to(bucket: "two", orgID: "000000000000000")` |> to(bucket: "two", orgID: "000000000000000")
`
) )
func testTaskType(t *testing.T, sys *System) { func testTaskType(t *testing.T, sys *System) {

View File

@ -67,7 +67,8 @@ func TestOptionsEditWithAST(t *testing.T) {
t.Run("fmt string", func(t *testing.T) { t.Run("fmt string", func(t *testing.T) {
expected := `option task = {every: 10s, name: "foo"} expected := `option task = {every: 10s, name: "foo"}
from(bucket: "x") |> range(start: -1h)` from(bucket: "x") |> range(start: -1h)
`
if *tu.Flux != expected { if *tu.Flux != expected {
t.Errorf("got the wrong task back, expected %s,\n got %s\n diff: %s", expected, *tu.Flux, cmp.Diff(expected, *tu.Flux)) t.Errorf("got the wrong task back, expected %s,\n got %s\n diff: %s", expected, *tu.Flux, cmp.Diff(expected, *tu.Flux))
} }
@ -135,7 +136,8 @@ from(bucket: "x") |> range(start: -1h)`
tu.Options.Offset = &options.Duration{} tu.Options.Offset = &options.Duration{}
expscript := `option task = {cron: "* * * * *", name: "foo"} expscript := `option task = {cron: "* * * * *", name: "foo"}
from(bucket: "x") |> range(start: -1h)` from(bucket: "x") |> range(start: -1h)
`
if err := tu.UpdateFlux(fluxlang.DefaultService, `option task = {cron: "* * * * *", name: "foo", offset: 10s} from(bucket:"x") |> range(start:-1h)`); err != nil { if err := tu.UpdateFlux(fluxlang.DefaultService, `option task = {cron: "* * * * *", name: "foo", offset: 10s} from(bucket:"x") |> range(start:-1h)`); err != nil {
t.Fatal(err) t.Fatal(err)
} }