fix: reapply new flux result parser (#17757)

now its behind a flag
pull/17836/head
Alex Boatwright 2020-04-22 09:49:22 -07:00 committed by GitHub
parent 960ec964f6
commit aa940a9739
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 126 additions and 131 deletions

View File

@ -1,21 +1,24 @@
// Libraries
import React, {PureComponent} from 'react'
import {withRouter, WithRouterProps} from 'react-router'
import _ from 'lodash'
import {connect} from 'react-redux'
// Components
import {Button, ComponentColor, ComponentSize} from '@influxdata/clockface'
import {ErrorHandling} from 'src/shared/decorators/errors'
// Types
import {Organization} from 'src/types'
import {AppState} from 'src/types'
interface OwnProps {
orgs: Organization[]
onExit: () => void
}
type Props = OwnProps & WithRouterProps
interface StateProps {
orgID: string | null
}
type Props = OwnProps & StateProps & WithRouterProps
@ErrorHandling
class CompletionAdvancedButton extends PureComponent<Props> {
@ -32,14 +35,21 @@ class CompletionAdvancedButton extends PureComponent<Props> {
}
private handleAdvanced = (): void => {
const {router, orgs, onExit} = this.props
const id = _.get(orgs, '0.id', null)
if (id) {
router.push(`/orgs/${id}/load-data/buckets`)
const {router, orgID, onExit} = this.props
if (orgID) {
router.push(`/orgs/${orgID}/load-data/buckets`)
} else {
onExit()
}
}
}
export default withRouter<OwnProps>(CompletionAdvancedButton)
const mstp = (state: AppState): StateProps => {
return {
orgID: state.onboarding.orgID,
}
}
export default connect<StateProps, {}>(mstp)(
withRouter<OwnProps>(CompletionAdvancedButton)
)

View File

@ -20,7 +20,6 @@ import {ossMetricsTemplate} from 'src/templates/constants/defaultTemplates'
import {getDashboards} from 'src/organizations/apis'
import {client} from 'src/utils/api'
import {createDashboardFromTemplate as createDashboardFromTemplateAJAX} from 'src/templates/api'
import * as api from 'src/client'
// Types
import {
@ -31,7 +30,7 @@ import {
Grid,
DapperScrollbars,
} from '@influxdata/clockface'
import {Dashboard, Organization} from 'src/types'
import {Dashboard} from 'src/types'
import {ScraperTargetRequest} from '@influxdata/influx'
import {OnboardingStepProps} from 'src/onboarding/containers/OnboardingWizard'
import {QUICKSTART_SCRAPER_TARGET_URL} from 'src/dataLoaders/constants/pluginConfigs'
@ -41,15 +40,6 @@ interface Props extends OnboardingStepProps {
bucketID: string
}
const getOrganizations = async () => {
const resp = await api.getOrgs({})
if (resp.status !== 200) {
throw new Error(resp.data.message)
}
return resp.data.orgs
}
@ErrorHandling
class CompletionStep extends PureComponent<Props> {
public componentDidMount() {
@ -104,16 +94,7 @@ class CompletionStep extends PureComponent<Props> {
widthSM={Columns.Four}
>
<div className="wizard-completion--option">
<ResourceFetcher<Organization[]>
fetcher={getOrganizations}
>
{orgs => (
<CompletionAdvancedButton
onExit={onExit}
orgs={orgs}
/>
)}
</ResourceFetcher>
<CompletionAdvancedButton onExit={onExit} />
<dt>Whoa looks like youre an expert!</dt>
<dd>
This allows you to set up Telegraf, scrapers, and much

View File

@ -9,7 +9,7 @@ import {runQuery} from 'src/shared/apis/query'
import {getWindowVars} from 'src/variables/utils/getWindowVars'
import {buildVarsOption} from 'src/variables/utils/buildVarsOption'
import {getVariables, asAssignment} from 'src/variables/selectors'
import {checkQueryResult} from 'src/shared/utils/checkQueryResult'
import fromFlux from 'src/shared/utils/fromFlux'
import {getOrg} from 'src/organizations/selectors'
// Actions
@ -248,7 +248,10 @@ export const executePreviewQuery = (query: string) => async (
dispatch(notify(resultTooLarge(result.bytesRead)))
}
checkQueryResult(result.csv)
// TODO: this is just here for validation. since we are already eating
// the cost of parsing the results, we should store the output instead
// of the raw input
fromFlux(result.csv)
const files = [result.csv]
dispatch(setFiles(files))

View File

@ -3,7 +3,12 @@ import React, {Component, RefObject, CSSProperties} from 'react'
import {isEqual} from 'lodash'
import {connect} from 'react-redux'
import {withRouter, WithRouterProps} from 'react-router'
import {fromFlux, FromFluxResult} from '@influxdata/giraffe'
import {
default as fromFlux,
FromFluxResult,
} from 'src/shared/utils/fromFlux.legacy'
import {fromFlux as fromFluxGiraffe} from '@influxdata/giraffe'
import {isFlagEnabled} from 'src/shared/utils/featureFlag'
// API
import {
@ -14,7 +19,6 @@ import {
import {runStatusesQuery} from 'src/alerting/utils/statusEvents'
// Utils
import {checkQueryResult} from 'src/shared/utils/checkQueryResult'
import {getWindowVars} from 'src/variables/utils/getWindowVars'
import {buildVarsOption} from 'src/variables/utils/buildVarsOption'
import 'intersection-observer'
@ -227,12 +231,17 @@ class TimeSeries extends Component<Props & WithRouterProps, State> {
if (result.didTruncate) {
notify(resultTooLarge(result.bytesRead))
}
checkQueryResult(result.csv)
}
const files = (results as RunQuerySuccessResult[]).map(r => r.csv)
const giraffeResult = fromFlux(files.join('\n\n'))
let giraffeResult
if (isFlagEnabled('fluxParser')) {
giraffeResult = fromFlux(files.join('\n\n'))
} else {
giraffeResult = fromFluxGiraffe(files.join('\n\n'))
}
this.pendingReload = false
this.setState({

View File

@ -1,28 +0,0 @@
import {checkQueryResult} from 'src/shared/utils/checkQueryResult'
describe('checkQueryResult', () => {
test('throws an error when the response has an error table', () => {
const RESPONSE = `#group,true,true
#datatype,string,string
#default,,
,error,reference
,"function references unknown column ""_value""",`
expect(() => {
checkQueryResult(RESPONSE)
}).toThrow('function references unknown column')
})
test('does not throw an error when the response is valid', () => {
const RESPONSE = `#group,false,false,true,true,false,false,true,true,true
#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,dateTime:RFC3339,long,string,string,string
#default,_result,,,,,,,,
,result,table,_start,_stop,_time,_value,_measurement,host,_field
,,0,2019-03-21T18:54:14.113478Z,2019-03-21T19:54:14.113478Z,2019-03-21T18:54:21Z,4780101632,mem,oox4k.local,active
,,0,2019-03-21T18:54:14.113478Z,2019-03-21T19:54:14.113478Z,2019-03-21T18:54:31Z,5095436288,mem,oox4k.local,active`
expect(() => {
checkQueryResult(RESPONSE)
}).not.toThrow()
})
})

View File

@ -1,56 +0,0 @@
/*
Given Flux query response as a CSV, check if the CSV contains an error table
as the first result. If it does, throw the error message contained within
that table.
For example, given the following response:
#datatype,string,long
,error,reference
,Failed to parse query,897
we want to throw an error with the message "Failed to parse query".
See https://github.com/influxdata/flux/blob/master/docs/SPEC.md#errors.
*/
export const checkQueryResult = (file: string = ''): void => {
// Don't check the whole file, since it could be huge and the error table
// will be within the first few lines (if it exists)
const fileHead = file.slice(0, findNthIndex(file, '\n', 6))
const lines = fileHead.split('\n').filter(line => !line.startsWith('#'))
if (!lines.length || !lines[0].includes('error') || !lines[1]) {
return
}
const header = lines[0].split(',').map(s => s.trim())
const row = lines[1].split(',').map(s => s.trim())
const index = header.indexOf('error')
if (index === -1 || !row[index]) {
return
}
// Trim off extra quotes at start and end of message
const errorMessage = row[index].replace(/^"/, '').replace(/"$/, '')
throw new Error(errorMessage)
}
const findNthIndex = (s: string, c: string, n: number) => {
let count = 0
let i = 0
while (i < s.length) {
if (s[i] == c) {
count += 1
}
if (count === n) {
return i
}
i += 1
}
}

View File

@ -9,6 +9,7 @@ export const OSS_FLAGS = {
matchingNotificationRules: false,
regionBasedLoginPage: false,
demodata: false,
fluxParser: false,
}
export const CLOUD_FLAGS = {
@ -21,6 +22,7 @@ export const CLOUD_FLAGS = {
matchingNotificationRules: false,
regionBasedLoginPage: false,
demodata: false,
fluxParser: false,
}
export const isFlagEnabled = (flagName: string, equals?: string | boolean) => {

View File

@ -0,0 +1,36 @@
import fromFlux from 'src/shared/utils/fromFlux'
import {newTable, Table} from '@influxdata/giraffe'
/*\
This module translates interfaces between giraffe and influxdb
as we migrate the flux parsing over from the vis library, closer
to the api response layer
\*/
export interface FromFluxResult {
// The single parsed `Table`
table: Table
// The union of unique group keys from all input Flux tables
fluxGroupKeyUnion: string[]
}
export default function fromFluxLegacy(csv: string): FromFluxResult {
const parsedFlux = fromFlux(csv)
return {
table: Object.entries(parsedFlux.table.columns).reduce(
(table, [key, column]) => {
return table.addColumn(
key,
column.type,
column.data as string[],
column.name
)
},
newTable(parsedFlux.table.length)
),
fluxGroupKeyUnion: parsedFlux.fluxGroupKeyUnion,
}
}

View File

@ -1,4 +1,5 @@
import fromFlux from './fromFlux'
import fromFluxLegacy from './fromFlux.legacy'
describe('fromFlux', () => {
test('can parse a Flux CSV with mismatched schemas', () => {
@ -209,4 +210,24 @@ there",5
expect(() => fromFlux('\n\n\n\nyolo\ntrash\n\n\n\n')).not.toThrow()
expect(fromFlux('\n\n\n\nyolo\ntrash\n\n\n\n').table.length).toEqual(0)
})
test('ignores all the wrong whitespace', () => {
const CSV = `
#group,false, false , false
#datatype,string,long, long
#default, _result,,
,result ,table, _value
,,0,28
`
const flux = fromFlux(CSV)
const fluxLegacy = fromFluxLegacy(CSV)
expect(flux.table.columns['_value'].data.length).toEqual(1)
expect(flux.table.columns['_value'].data).toEqual([28])
expect(flux.fluxGroupKeyUnion).toEqual([])
expect((fluxLegacy.table as any).columns['_value'].data).toEqual([28])
})
})

View File

@ -199,6 +199,11 @@ export default function fromFlux(csv: string): ParsedFlux {
)
).data
// trim whitespace from the beginning
while (parsed.length && parsed[0].length === 1) {
parsed.shift()
}
// only happens on malformed input
if (!parsed.length) {
continue
@ -224,17 +229,23 @@ export default function fromFlux(csv: string): ParsedFlux {
throw new Error(`[${errorReferenceCode}] ${errorMessage}`)
}
// trim whitespace from the end
while (parsed[parsed.length - 1].length < parsed[headerLocation].length) {
parsed.pop()
}
for (
currentLineInHeader = 0;
currentLineInHeader < headerLocation;
currentLineInHeader++
) {
annotations[parsed[currentLineInHeader][0]] = parsed[
annotations[parsed[currentLineInHeader][0].trim()] = parsed[
currentLineInHeader
].reduce((annotationObject, currentColumn, currentColumnIndex) => {
annotationObject[
parsed[headerLocation][currentColumnIndex]
] = currentColumn
const key = parsed[headerLocation][currentColumnIndex].trim()
if (key) {
annotationObject[key] = currentColumn.trim()
}
return annotationObject
}, {})
}
@ -244,7 +255,7 @@ export default function fromFlux(csv: string): ParsedFlux {
currentColumnIndex < parsed[headerLocation].length;
currentColumnIndex++
) {
columnName = parsed[headerLocation][currentColumnIndex]
columnName = parsed[headerLocation][currentColumnIndex].trim()
columnType = annotations['#datatype'][columnName]
columnKey = `${columnName} (${TO_COLUMN_TYPE[columnType]})`

View File

@ -1,4 +1,4 @@
import {fromFlux} from '@influxdata/giraffe'
import fromFlux from 'src/shared/utils/fromFlux.legacy'
import {latestValues} from 'src/shared/utils/latestValues'
@ -118,7 +118,7 @@ describe('latestValues', () => {
,,0,2018-12-10T18:29:48Z,3,5.0
,,0,2018-12-10T18:40:18Z,4,6.0
#group,false,false,false,false
#group,false,false,false,false,false
#datatype,string,long,dateTime:RFC3339,long,double
#default,1,,,,
,result,table,_time,_value,foo

View File

@ -19,7 +19,7 @@ import {rateLimitReached, resultTooLarge} from 'src/shared/copy/notifications'
// Utils
import {getActiveTimeMachine, getActiveQuery} from 'src/timeMachine/selectors'
import {checkQueryResult} from 'src/shared/utils/checkQueryResult'
import fromFlux from 'src/shared/utils/fromFlux'
import {getAllVariables, asAssignment} from 'src/variables/selectors'
import {buildVarsOption} from 'src/variables/utils/buildVarsOption'
import {findNodes} from 'src/shared/utils/ast'
@ -163,7 +163,10 @@ export const executeQueries = () => async (dispatch, getState: GetState) => {
dispatch(notify(resultTooLarge(result.bytesRead)))
}
checkQueryResult(result.csv)
// TODO: this is just here for validation. since we are already eating
// the cost of parsing the results, we should store the output instead
// of the raw input
fromFlux(result.csv)
}
const files = (results as RunQuerySuccessResult[]).map(r => r.csv)
@ -228,7 +231,10 @@ export const executeCheckQuery = () => async (dispatch, getState: GetState) => {
dispatch(notify(resultTooLarge(result.bytesRead)))
}
checkQueryResult(result.csv)
// TODO: this is just here for validation. since we are already eating
// the cost of parsing the results, we should store the output instead
// of the raw input
fromFlux(result.csv)
const file = result.csv