fix(ui): improve detection of relevant tokens to display
When a user saves a query from the DE as a task, we display an overlay that allows them to select the token they use for the task. Since not all tokens have access to the buckets read from and written to in the query, we filter the list of tokens that are selectable in the overlay. This commit: - Improves the detection of what buckets are read from in the query by using an AST based approach to analyzing the query, rather than some stringy heuristics - Removes the assumption that only a single bucket is read from in the query Closes #14226pull/14542/head
parent
96d0a4f88b
commit
89d6f9ea3e
|
@ -0,0 +1,122 @@
|
||||||
|
import {Authorization} from '@influxdata/influx'
|
||||||
|
|
||||||
|
import {filterIrrelevantAuths} from './permissions'
|
||||||
|
|
||||||
|
describe('filterIrrelevantAuths', () => {
|
||||||
|
test('can find relevant tokens for a bucket', () => {
|
||||||
|
const allAccessToken = {
|
||||||
|
id: '03f4f29fd0011002',
|
||||||
|
token: 'a',
|
||||||
|
status: 'active',
|
||||||
|
description: 'all access token',
|
||||||
|
orgID: '03f4f29fd0011000',
|
||||||
|
org: "Chris' Org",
|
||||||
|
userID: '03f4f29fbd811000',
|
||||||
|
user: 'chris',
|
||||||
|
permissions: [
|
||||||
|
{
|
||||||
|
action: 'read',
|
||||||
|
resource: {
|
||||||
|
type: 'buckets',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
action: 'write',
|
||||||
|
resource: {
|
||||||
|
type: 'buckets',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
action: 'read',
|
||||||
|
resource: {
|
||||||
|
type: 'dashboards',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
} as Authorization
|
||||||
|
|
||||||
|
const readAAndBBucketsToken = {
|
||||||
|
id: '043f77872cad3000',
|
||||||
|
token: 'b',
|
||||||
|
status: 'active',
|
||||||
|
description: 'scoped read token (to A and B)',
|
||||||
|
orgID: '03f4f29fd0011000',
|
||||||
|
org: "Chris' Org",
|
||||||
|
userID: '03f4f29fbd811000',
|
||||||
|
user: 'chris',
|
||||||
|
permissions: [
|
||||||
|
{
|
||||||
|
action: 'read',
|
||||||
|
resource: {
|
||||||
|
type: 'buckets',
|
||||||
|
id: '03f8d8d34d7e5000',
|
||||||
|
orgID: '03f4f29fd0011000',
|
||||||
|
name: 'A',
|
||||||
|
org: "Chris' Org",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
action: 'read',
|
||||||
|
resource: {
|
||||||
|
type: 'buckets',
|
||||||
|
id: '043e71cf1922d000',
|
||||||
|
orgID: '03f4f29fd0011000',
|
||||||
|
name: 'B',
|
||||||
|
org: "Chris' Org",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
} as Authorization
|
||||||
|
|
||||||
|
const writeCBucketToken = {
|
||||||
|
id: '043f7799e2ad3000',
|
||||||
|
token: 'c',
|
||||||
|
status: 'active',
|
||||||
|
description: 'scoped write token (to C)',
|
||||||
|
orgID: '03f4f29fd0011000',
|
||||||
|
org: "Chris' Org",
|
||||||
|
userID: '03f4f29fbd811000',
|
||||||
|
user: 'chris',
|
||||||
|
permissions: [
|
||||||
|
{
|
||||||
|
action: 'write',
|
||||||
|
resource: {
|
||||||
|
type: 'buckets',
|
||||||
|
id: '03f4f29fd0011001',
|
||||||
|
orgID: '03f4f29fd0011000',
|
||||||
|
name: 'C',
|
||||||
|
org: "Chris' Org",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
} as Authorization
|
||||||
|
|
||||||
|
const tokens = [allAccessToken, readAAndBBucketsToken, writeCBucketToken]
|
||||||
|
|
||||||
|
expect(filterIrrelevantAuths(tokens, 'read', ['A', 'B', 'C'])).toEqual([
|
||||||
|
allAccessToken,
|
||||||
|
])
|
||||||
|
|
||||||
|
expect(filterIrrelevantAuths(tokens, 'read', ['B', 'C'])).toEqual([
|
||||||
|
allAccessToken,
|
||||||
|
])
|
||||||
|
|
||||||
|
expect(filterIrrelevantAuths(tokens, 'read', ['A', 'B'])).toEqual([
|
||||||
|
allAccessToken,
|
||||||
|
readAAndBBucketsToken,
|
||||||
|
])
|
||||||
|
|
||||||
|
expect(filterIrrelevantAuths(tokens, 'read', ['C'])).toEqual([
|
||||||
|
allAccessToken,
|
||||||
|
])
|
||||||
|
|
||||||
|
expect(filterIrrelevantAuths(tokens, 'write', ['C'])).toEqual([
|
||||||
|
allAccessToken,
|
||||||
|
writeCBucketToken,
|
||||||
|
])
|
||||||
|
|
||||||
|
expect(filterIrrelevantAuths(tokens, 'write', ['A'])).toEqual([
|
||||||
|
allAccessToken,
|
||||||
|
])
|
||||||
|
})
|
||||||
|
})
|
|
@ -160,18 +160,21 @@ export enum BucketTab {
|
||||||
|
|
||||||
/*
|
/*
|
||||||
Given a list of authorizations, return only those that allow performing the
|
Given a list of authorizations, return only those that allow performing the
|
||||||
supplied `action` to the supplied `bucketName`.
|
supplied `action` to all of the supplied `bucketNames`.
|
||||||
*/
|
*/
|
||||||
export const filterIrrelevantAuths = (
|
export const filterIrrelevantAuths = (
|
||||||
auths: Authorization[],
|
auths: Authorization[],
|
||||||
action: 'read' | 'write',
|
action: 'read' | 'write',
|
||||||
bucketName: string
|
bucketNames: string[]
|
||||||
): Authorization[] =>
|
): Authorization[] => {
|
||||||
auths.filter(auth =>
|
return auths.filter(auth =>
|
||||||
auth.permissions.some(
|
bucketNames.every(bucketName =>
|
||||||
permission =>
|
auth.permissions.some(
|
||||||
permission.action === action &&
|
permission =>
|
||||||
permission.resource.type === 'buckets' &&
|
permission.action === action &&
|
||||||
(!permission.resource.name || permission.resource.name === bucketName)
|
permission.resource.type === 'buckets' &&
|
||||||
|
(!permission.resource.name || permission.resource.name === bucketName)
|
||||||
|
)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
}
|
||||||
|
|
|
@ -19,6 +19,7 @@ import {getAuthorizations} from 'src/authorizations/actions'
|
||||||
|
|
||||||
// Utils
|
// Utils
|
||||||
import {filterIrrelevantAuths} from 'src/authorizations/utils/permissions'
|
import {filterIrrelevantAuths} from 'src/authorizations/utils/permissions'
|
||||||
|
import {getReadBuckets} from 'src/shared/utils/getReadBuckets'
|
||||||
import {getActiveTimeMachine, getActiveQuery} from 'src/timeMachine/selectors'
|
import {getActiveTimeMachine, getActiveQuery} from 'src/timeMachine/selectors'
|
||||||
import {getTimeRangeVars} from 'src/variables/utils/getTimeRangeVars'
|
import {getTimeRangeVars} from 'src/variables/utils/getTimeRangeVars'
|
||||||
import {getWindowVars} from 'src/variables/utils/getWindowVars'
|
import {getWindowVars} from 'src/variables/utils/getWindowVars'
|
||||||
|
@ -121,26 +122,26 @@ class SaveAsTaskForm extends PureComponent<Props & WithRouterProps> {
|
||||||
taskOptions: {toBucketName},
|
taskOptions: {toBucketName},
|
||||||
} = this.props
|
} = this.props
|
||||||
|
|
||||||
const readAuths = filterIrrelevantAuths(tokens, 'read', this.readBucketName)
|
const readAuths = filterIrrelevantAuths(
|
||||||
const writeAuths = filterIrrelevantAuths(tokens, 'write', toBucketName)
|
tokens,
|
||||||
const relevantAuthorizations = intersectionBy(readAuths, writeAuths, 'id')
|
'read',
|
||||||
|
this.readBucketNames
|
||||||
|
)
|
||||||
|
|
||||||
return relevantAuthorizations
|
const writeAuths = filterIrrelevantAuths(tokens, 'write', [toBucketName])
|
||||||
|
const relevantAuths = intersectionBy(readAuths, writeAuths, 'id')
|
||||||
|
|
||||||
|
return relevantAuths
|
||||||
}
|
}
|
||||||
|
|
||||||
private get readBucketName() {
|
private get readBucketNames(): string[] {
|
||||||
const {activeQuery} = this.props
|
const {activeQuery} = this.props
|
||||||
|
|
||||||
if (activeQuery.editMode === 'builder') {
|
if (activeQuery.editMode === 'builder') {
|
||||||
return activeQuery.builderConfig.buckets[0] || ''
|
return activeQuery.builderConfig.buckets
|
||||||
}
|
}
|
||||||
|
|
||||||
const text = activeQuery.text
|
return getReadBuckets(this.activeScript)
|
||||||
const splitBucket = text.split('bucket:')
|
|
||||||
const splitQuotes = splitBucket[1].split('"')
|
|
||||||
const readBucketName = splitQuotes[1]
|
|
||||||
|
|
||||||
return readBucketName
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private get isFormValid(): boolean {
|
private get isFormValid(): boolean {
|
||||||
|
|
|
@ -0,0 +1,32 @@
|
||||||
|
// Libraries
|
||||||
|
import {isObject, isArray} from 'lodash'
|
||||||
|
|
||||||
|
// Types
|
||||||
|
import {Node} from 'src/types'
|
||||||
|
|
||||||
|
/*
|
||||||
|
Find all nodes in a tree matching the `predicate` function. Each node in the
|
||||||
|
tree is an object, which may contain objects or arrays of objects as children
|
||||||
|
under any key.
|
||||||
|
*/
|
||||||
|
export const findNodes = (
|
||||||
|
node: any,
|
||||||
|
predicate: (node: Node) => boolean,
|
||||||
|
acc: any[] = []
|
||||||
|
) => {
|
||||||
|
if (predicate(node)) {
|
||||||
|
acc.push(node)
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const value of Object.values(node)) {
|
||||||
|
if (isObject(value)) {
|
||||||
|
findNodes(value, predicate, acc)
|
||||||
|
} else if (isArray(value)) {
|
||||||
|
for (const innerValue of value) {
|
||||||
|
findNodes(innerValue, predicate, acc)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return acc
|
||||||
|
}
|
|
@ -1,5 +1,8 @@
|
||||||
// Libraries
|
// Libraries
|
||||||
import {get, isObject, isArray} from 'lodash'
|
import {get} from 'lodash'
|
||||||
|
|
||||||
|
// Utils
|
||||||
|
import {findNodes} from 'src/shared/utils/ast'
|
||||||
|
|
||||||
// Types
|
// Types
|
||||||
import {
|
import {
|
||||||
|
@ -12,7 +15,7 @@ import {
|
||||||
ObjectExpression,
|
ObjectExpression,
|
||||||
DateTimeLiteral,
|
DateTimeLiteral,
|
||||||
DurationLiteral,
|
DurationLiteral,
|
||||||
} from 'src/types/ast'
|
} from 'src/types'
|
||||||
|
|
||||||
export function getMinDurationFromAST(ast: Package): number {
|
export function getMinDurationFromAST(ast: Package): number {
|
||||||
// We can't take the minimum of durations of each range individually, since
|
// We can't take the minimum of durations of each range individually, since
|
||||||
|
@ -41,7 +44,7 @@ export function getMinDurationFromAST(ast: Package): number {
|
||||||
}
|
}
|
||||||
|
|
||||||
function allRangeTimes(ast: any): Array<[number, number]> {
|
function allRangeTimes(ast: any): Array<[number, number]> {
|
||||||
return findNodes(isRangeNode, ast).map(node => rangeTimes(ast, node))
|
return findNodes(ast, isRangeNode).map(node => rangeTimes(ast, node))
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -138,7 +141,7 @@ function lookupVariable(ast: any, name: string): Expression {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const declarator = findNodes(isDeclarator, ast)
|
const declarator = findNodes(ast, isDeclarator)
|
||||||
|
|
||||||
if (!declarator.length) {
|
if (!declarator.length) {
|
||||||
throw new Error(`unable to lookup variable "${name}"`)
|
throw new Error(`unable to lookup variable "${name}"`)
|
||||||
|
@ -160,30 +163,3 @@ function isRangeNode(node: Node) {
|
||||||
get(node, 'callee.name') === 'range'
|
get(node, 'callee.name') === 'range'
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
|
||||||
Find all nodes in a tree matching the `predicate` function. Each node in the
|
|
||||||
tree is an object, which may contain objects or arrays of objects as children
|
|
||||||
under any key.
|
|
||||||
*/
|
|
||||||
function findNodes(
|
|
||||||
predicate: (node: Node) => boolean,
|
|
||||||
node: any,
|
|
||||||
acc: any[] = []
|
|
||||||
) {
|
|
||||||
if (predicate(node)) {
|
|
||||||
acc.push(node)
|
|
||||||
}
|
|
||||||
|
|
||||||
for (const value of Object.values(node)) {
|
|
||||||
if (isObject(value)) {
|
|
||||||
findNodes(predicate, value, acc)
|
|
||||||
} else if (isArray(value)) {
|
|
||||||
for (const innerValue of value) {
|
|
||||||
findNodes(predicate, innerValue, acc)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return acc
|
|
||||||
}
|
|
||||||
|
|
|
@ -0,0 +1,29 @@
|
||||||
|
import {getReadBuckets} from './getReadBuckets'
|
||||||
|
|
||||||
|
// These tests are skipped until we can use WASM modules in Jest
|
||||||
|
describe.skip('getReadBuckets', () => {
|
||||||
|
test('handles an empty script', () => {
|
||||||
|
expect(getReadBuckets('')).toEqual([])
|
||||||
|
})
|
||||||
|
|
||||||
|
test('can find buckets read from in a Flux query', () => {
|
||||||
|
const script = `
|
||||||
|
from(bucket:"foo") |> limit(limit:100, offset:10)
|
||||||
|
|
||||||
|
|> filter(fn: (r) => r.foo and r.bar or r.buz) from
|
||||||
|
|
||||||
|
(bucket:
|
||||||
|
|
||||||
|
"bar"
|
||||||
|
)
|
||||||
|
|
||||||
|
|> foo()
|
||||||
|
|> fromSnozzles(bucket: "moo")
|
||||||
|
|> moo()
|
||||||
|
|
||||||
|
|> to( bucket: "baz" )
|
||||||
|
`.trim()
|
||||||
|
|
||||||
|
expect(getReadBuckets(script)).toEqual(['foo', 'bar'])
|
||||||
|
})
|
||||||
|
})
|
|
@ -0,0 +1,54 @@
|
||||||
|
// Libraries
|
||||||
|
import {parse} from '@influxdata/flux-parser'
|
||||||
|
import {get, flatMap} from 'lodash'
|
||||||
|
|
||||||
|
// Utils
|
||||||
|
import {findNodes} from 'src/shared/utils/ast'
|
||||||
|
|
||||||
|
// Types
|
||||||
|
import {
|
||||||
|
File,
|
||||||
|
CallExpression,
|
||||||
|
Property,
|
||||||
|
StringLiteral,
|
||||||
|
Identifier,
|
||||||
|
} from 'src/types'
|
||||||
|
|
||||||
|
/*
|
||||||
|
Given a Flux script, return a list of names of buckets that are read from in
|
||||||
|
the script.
|
||||||
|
|
||||||
|
For now, this means detecting each time something like
|
||||||
|
|
||||||
|
from(bucket: "foo")
|
||||||
|
|
||||||
|
appears in the script.
|
||||||
|
*/
|
||||||
|
export const getReadBuckets = (text: string): string[] => {
|
||||||
|
try {
|
||||||
|
const ast: File = parse(text)
|
||||||
|
|
||||||
|
// Find every `from(bucket: "foo")` call in the script
|
||||||
|
const fromCalls: CallExpression[] = findNodes(
|
||||||
|
ast,
|
||||||
|
n => n.type === 'CallExpression' && get(n, 'callee.name') === 'from'
|
||||||
|
)
|
||||||
|
|
||||||
|
// Extract the `bucket: "foo"` part from each call
|
||||||
|
const bucketProperties: Property[] = flatMap(fromCalls, call =>
|
||||||
|
findNodes(
|
||||||
|
call,
|
||||||
|
n => n.type === 'Property' && (n.key as Identifier).name === 'bucket'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
// Extract the `foo` from each object property
|
||||||
|
const bucketNames = bucketProperties.map(
|
||||||
|
prop => (prop.value as StringLiteral).value
|
||||||
|
)
|
||||||
|
|
||||||
|
return bucketNames
|
||||||
|
} catch (e) {
|
||||||
|
console.error('Failed to find buckets read in flux script', e)
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue