feat(flow-query-builder): replaced dummy data with parsed query results for flow query builder (#19450)

pull/19478/head
Ariel Salem 2020-08-31 15:38:24 -07:00 committed by GitHub
parent c187ff268f
commit 73d78f62c1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 58 additions and 231 deletions

View File

@ -135,7 +135,7 @@
"@influxdata/clockface": "2.3.4",
"@influxdata/flux": "^0.5.1",
"@influxdata/flux-lsp-browser": "^0.5.11",
"@influxdata/giraffe": "0.27.0",
"@influxdata/giraffe": "0.29.0",
"@influxdata/influx": "0.5.5",
"@influxdata/influxdb-templates": "0.9.0",
"@influxdata/react-custom-scrollbars": "4.3.8",

View File

@ -79,6 +79,16 @@ export const Submit: FC = () => {
requirements,
})
} else if (pipe.type === 'data') {
const {bucketName} = pipe
const text = `from(bucket: "${bucketName}")|>range(start: v.timeRangeStart, stop: v.timeRangeStop)`
stages.push({
text,
instances: [pipeID],
requirements: {},
})
} else if (pipe.type === 'queryBuilder') {
const {
aggregateFunction,
bucketName,
@ -116,7 +126,7 @@ export const Submit: FC = () => {
})
}
if (aggregateFunction.flux && aggregateFunction.name) {
if (aggregateFunction?.name) {
text += ` |> aggregateWindow(every: v.windowPeriod, fn: ${aggregateFunction.name}, createEmpty: false)
|> yield(name: "${aggregateFunction.name}")`
}
@ -178,7 +188,7 @@ export const Submit: FC = () => {
const hasQueries = notebook.data.all
.map(p => p.type)
.filter(p => p === 'query' || p === 'data').length
.filter(p => p === 'query' || p === 'data' || p === 'queryBuilder').length
return (
<SubmitQueryButton

View File

@ -1,171 +0,0 @@
import {Schema} from 'src/types'
export const results: Schema = {
cpu: {
fields: [
'usage_guest',
'usage_guest_nice',
'usage_idle',
'usage_iowait',
'usage_irq',
'usage_nice',
'usage_softirq',
'usage_steal',
'usage_system',
'usage_user',
],
tags: {
host: ['MBP15.lan1'],
cpu: [
'cpu-total',
'cpu0',
'cpu1',
'cpu10',
'cpu11',
'cpu2',
'cpu3',
'cpu4',
'cpu5',
'cpu6',
'cpu7',
'cpu8',
'cpu9',
],
},
},
disk: {
fields: [
'free',
'inodes_free',
'inodes_total',
'inodes_used',
'total',
'used',
'used_percent',
],
tags: {
fstype: ['apfs'],
device: ['disk1s1', 'disk1s4', 'disk1s5', 'disk2s1'],
},
},
diskio: {
fields: [
'io_time',
'iops_in_progress',
'read_bytes',
'read_time',
'reads',
'weighted_io_time',
'write_bytes',
'write_time',
'writes',
],
tags: {
name: ['disk0', 'disk2'],
host: ['ip-192-168-1-206.ec2.internal'],
},
},
mem: {
fields: [
'active',
'available',
'available_percent',
'buffered',
'cached',
'commit_limit',
'committed_as',
'dirty',
'free',
'high_free',
'high_total',
'huge_page_size',
'huge_pages_free',
'huge_pages_total',
'inactive',
'low_free',
'low_total',
'mapped',
'page_tables',
'shared',
'slab',
'swap_cached',
'swap_free',
'swap_total',
'total',
'used',
'used_percent',
'vmalloc_chunk',
'vmalloc_total',
'vmalloc_used',
'wired',
'write_back',
'write_back_tmp',
],
tags: {
host: ['ip-192-168-1-206.ec2.internal'],
},
},
net: {
fields: [
'bytes_recv',
'bytes_sent',
'drop_in',
'drop_out',
'err_in',
'err_out',
'packets_recv',
'packets_sent',
],
tags: {
host: ['ip-192-168-1-206.ec2.internal'],
interface: [
'awd10',
'en0',
'en1',
'en2',
'en3',
'en4',
'en5',
'llw0',
'p2p0',
'utun0',
'utun1',
'utun2',
],
},
},
processes: {
fields: [
'blocked',
'idle',
'running',
'sleeping',
'stopped',
'total',
'unknown',
],
tags: {
host: ['ip-192-168-1-206.ec2.internal'],
},
},
swap: {
fields: ['free', 'in', 'out', 'total', 'used', 'used_percent'],
tags: {
host: ['ip-192-168-1-206.ec2.internal'],
},
},
system: {
fields: [
'load1',
'load15',
'load5',
'n_cpus',
'n_users',
'uptime',
'uptime_format',
],
tags: {
host: ['ip-192-168-1-206.ec2.internal'],
},
},
}

View File

@ -1,15 +1,15 @@
// Libraries
import {Dispatch} from 'react'
// import {fromFlux as parse} from '@influxdata/giraffe'
import {fromFlux as parse} from '@influxdata/giraffe'
// API
// import {runQuery} from 'src/shared/apis/query'
import {runQuery} from 'src/shared/apis/query'
// Types
import {AppState, GetState, RemoteDataState, Schema} from 'src/types'
// Utils
// import {getOrg} from 'src/organizations/selectors'
import {getOrg} from 'src/organizations/selectors'
import {getSchemaByBucketName} from 'src/shared/selectors/schemaSelectors'
// Actions
@ -24,42 +24,36 @@ import {notify, Action as NotifyAction} from 'src/shared/actions/notifications'
import {getBucketsFailed} from 'src/shared/copy/notifications'
import {TEN_MINUTES} from 'src/shared/reducers/schema'
// DUMMY DATA TO DELETE
import {results} from 'src/notebooks/pipes/Data/dummyData'
type Action = SchemaAction | NotifyAction
// TODO(ariel): make this work with the query & the time range
export const fetchSchemaForBucket = async (): Promise<Schema> => {
// export const fetchSchemaForBucket = async (
// bucketName: string,
// orgID: string
// ): Promise<Schema> => {
// const text = `import "influxdata/influxdb/v1"
// from(bucket: "${bucketName}")
// |> range(start: -1h)
// |> first()
// |> v1.fieldsAsCols()`
export const fetchSchemaForBucket = async (
bucketName: string,
orgID: string
): Promise<Schema> => {
/*
-4d here is an arbitrary time range that fulfills the need to overfetch a bucket's meta data
rather than underfetching the data. At the time of writing this comment, a timerange is
prerequisite for querying a bucket's metadata and is therefore required here.
// const res = await runQuery(orgID, text)
// .promise.then(raw => {
// if (raw.type !== 'SUCCESS') {
// throw new Error(raw.message)
// }
If overfetching provides too much overhead / comes at a performance cost down the line,
we should reduce the range / come up with an alternative to allow for querying a bucket's metadata
without having to provide a range
*/
const text = `from(bucket: "${bucketName}")
|> range(start: -4d)
|> first()`
// return raw
// })
// .then(raw => {
// return {
// source: text,
// raw: raw.csv,
// parsed: parse(raw.csv),
// error: null,
// }
// })
const res = await runQuery(orgID, text)
.promise.then(raw => {
if (raw.type !== 'SUCCESS') {
throw new Error(raw.message)
}
const result: Schema = await new Promise(resolve => resolve(results))
return result
return raw
})
.then(raw => parse(raw.csv).schema)
return res
}
const getUnexpiredSchema = (
@ -99,9 +93,8 @@ export const getAndSetBucketSchema = (bucketName: string) => async (
} else {
dispatch(setSchema(RemoteDataState.Loading, bucketName, {}))
}
// const orgID = getOrg(state).id
const schema = await fetchSchemaForBucket()
// const schema = await fetchSchemaForBucket(bucketName, orgID)
const orgID = getOrg(state).id
const schema = await fetchSchemaForBucket(bucketName, orgID)
dispatch(setSchema(RemoteDataState.Done, bucketName, schema))
} catch (error) {
console.error(error)

View File

@ -31,7 +31,7 @@ import {
} from 'src/types'
interface Props {
giraffeResult: FromFluxResult
giraffeResult: Omit<FromFluxResult, 'schema'>
files?: string[]
properties: QueryViewProperties | CheckViewProperties
timeZone: TimeZone

View File

@ -1,5 +1,5 @@
import fromFlux from 'src/shared/utils/fromFlux'
import {newTable, Table} from '@influxdata/giraffe'
import {newTable, Schema, Table} from '@influxdata/giraffe'
/*\
@ -12,6 +12,8 @@ export interface FromFluxResult {
// The single parsed `Table`
table: Table
schema: Schema
// The union of unique group keys from all input Flux tables
fluxGroupKeyUnion: string[]
}
@ -32,5 +34,6 @@ export default function fromFluxLegacy(csv: string): FromFluxResult {
newTable(parsedFlux.table.length)
),
fluxGroupKeyUnion: parsedFlux.fluxGroupKeyUnion,
schema: {},
}
}

View File

@ -166,7 +166,9 @@ export const extent = (xs: number[]): [number, number] | null => {
return [low, high]
}
export const checkResultsLength = (giraffeResult: FromFluxResult): boolean => {
export const checkResultsLength = (
giraffeResult: Omit<FromFluxResult, 'schema'>
): boolean => {
return get(giraffeResult, 'table.length', 0) > 0
}

View File

@ -1,15 +1,5 @@
export interface Tag {
[tagName: string]: string[]
}
import {Schema as GenSchema, Tag as GenTag} from '@influxdata/giraffe'
export interface SchemaValues {
fields: string[]
tags: Tag
type?: string
}
export interface Tag extends GenTag {}
export type Measurement = string
export interface Schema {
[measurement: string]: SchemaValues
}
export interface Schema extends GenSchema {}

View File

@ -762,10 +762,10 @@
resolved "https://registry.yarnpkg.com/@influxdata/flux/-/flux-0.5.1.tgz#e39e7a7af9163fc9494422c8fed77f3ae1b68f56"
integrity sha512-GHlkXBhSdJ2m56JzDkbnKPAqLj3/lexPooacu14AWTO4f2sDGLmzM7r0AxgdtU1M2x7EXNBwgGOI5EOAdN6mkw==
"@influxdata/giraffe@0.27.0":
version "0.27.0"
resolved "https://registry.yarnpkg.com/@influxdata/giraffe/-/giraffe-0.27.0.tgz#5cefd8f2003bc123f57d10b8f654311336230868"
integrity sha512-b7InFh45dt+qXeWsfqgDvLI5e3Ex0aCX9IDOSy/glxgTrtxgV5gF1Lp+/iqdMoLRu/DIslliXnT4SMKYBBU0Hg==
"@influxdata/giraffe@0.29.0":
version "0.29.0"
resolved "https://registry.yarnpkg.com/@influxdata/giraffe/-/giraffe-0.29.0.tgz#0a6a3216b910ae3a1d834d682003f3894d7bc5d6"
integrity sha512-2NBrYUlCf3spqec+HevFtzp+Ohp5WywOY2R0mIxpfMVMPXrRzl1EHRbZHCInh7N57mglPW8U1RGchmACr29RLA==
"@influxdata/influx@0.5.5":
version "0.5.5"