Merge pull request #15034 from influxdata/fix-ui-newline-parsing

fix(ui): handle parsing Flux strings containing newlines
pull/15044/head
Michael Desa 2019-09-06 19:43:12 -04:00 committed by GitHub
commit 78e1a989fd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 124 additions and 10 deletions

View File

@ -160,7 +160,7 @@
"dependencies": {
"@influxdata/clockface": "0.0.28",
"@influxdata/flux-parser": "^0.3.0",
"@influxdata/giraffe": "0.16.2",
"@influxdata/giraffe": "0.16.3",
"@influxdata/influx": "0.5.5",
"@influxdata/influxdb-templates": "0.7.0",
"@influxdata/react-custom-scrollbars": "4.3.8",

View File

@ -19,14 +19,34 @@ export const parseResponseError = (response: string): FluxTable[] => {
]
}
/*
A Flux CSV response can contain multiple CSV files each joined by a newline.
This function splits up a CSV response into these individual CSV files.
See https://github.com/influxdata/flux/blob/master/docs/SPEC.md#multiple-tables.
*/
export const parseChunks = (response: string): string[] => {
const trimmedResponse = response.trim()
if (_.isEmpty(trimmedResponse)) {
if (trimmedResponse === '') {
return []
}
const chunks = trimmedResponse.split(/\n\s*\n/)
// Split the response into separate chunks whenever we encounter:
//
// 1. A newline
// 2. Followed by any amount of whitespace
// 3. Followed by a newline
// 4. Followed by a `#` character
//
// The last condition is [necessary][0] for handling CSV responses with
// values containing newlines.
//
// [0]: https://github.com/influxdata/influxdb/issues/15017
const chunks = trimmedResponse
.split(/\n\s*\n#/)
.map((s, i) => (i === 0 ? s : `#${s}`))
return chunks
}

View File

@ -0,0 +1,97 @@
import {parseFiles} from './rawFluxDataTable'
describe('parseFiles', () => {
test('can parse multi-csv response', () => {
const CSV = `
#group,false,false,false,false
#datatype,string,long,string,long
#default,_result,,,
,result,table,message,value
,,0,howdy,5
,,0,hello there,5
,,0,hi,6
#group,false,false,false,false
#datatype,string,long,string,long
#default,_result,,,
,result,table,message,value
,,1,howdy,5
,,1,hello there,5
,,1,hi,6
`.trim()
const expectedData = [
['#group', 'false', 'false', 'false', 'false'],
['#datatype', 'string', 'long', 'string', 'long'],
['#default', '_result', '', '', ''],
['', 'result', 'table', 'message', 'value'],
['', '', '0', 'howdy', '5'],
['', '', '0', 'hello there', '5'],
['', '', '0', 'hi', '6'],
[],
['#group', 'false', 'false', 'false', 'false'],
['#datatype', 'string', 'long', 'string', 'long'],
['#default', '_result', '', '', ''],
['', 'result', 'table', 'message', 'value'],
['', '', '1', 'howdy', '5'],
['', '', '1', 'hello there', '5'],
['', '', '1', 'hi', '6'],
]
const expected = {
data: expectedData,
maxColumnCount: 5,
}
expect(parseFiles([CSV])).toEqual(expected)
})
test('can parse multi-csv response with values containing newlines', () => {
const CSV = `
#group,false,false,false,false
#datatype,string,long,string,long
#default,_result,,,
,result,table,message,value
,,0,howdy,5
,,0,"hello
there",5
,,0,hi,6
#group,false,false,false,false
#datatype,string,long,string,long
#default,_result,,,
,result,table,message,value
,,1,howdy,5
,,1,"hello
there",5
,,1,hi,6
`.trim()
const expectedData = [
['#group', 'false', 'false', 'false', 'false'],
['#datatype', 'string', 'long', 'string', 'long'],
['#default', '_result', '', '', ''],
['', 'result', 'table', 'message', 'value'],
['', '', '0', 'howdy', '5'],
['', '', '0', 'hello\n\nthere', '5'],
['', '', '0', 'hi', '6'],
[],
['#group', 'false', 'false', 'false', 'false'],
['#datatype', 'string', 'long', 'string', 'long'],
['#default', '_result', '', '', ''],
['', 'result', 'table', 'message', 'value'],
['', '', '1', 'howdy', '5'],
['', '', '1', 'hello\n\nthere', '5'],
['', '', '1', 'hi', '6'],
]
const expected = {
data: expectedData,
maxColumnCount: 5,
}
expect(parseFiles([CSV])).toEqual(expected)
})
})

View File

@ -26,9 +26,6 @@ export const parseFiles = (responses: string[]): ParseFilesResult => {
// exceeded" error for large CSVs
data.push(parsedChunks[i][j])
}
// Add an empty line at the end
data.push([])
}
return {data, maxColumnCount}

View File

@ -1090,10 +1090,10 @@
resolved "https://registry.yarnpkg.com/@influxdata/flux-parser/-/flux-parser-0.3.0.tgz#b63123ac814ad32c65e46a4097ba3d8b959416a5"
integrity sha512-nsm801l60kXFulcSWA2YH2YRz9oSsMlTK9Evn6Og9BoQnQMcwUsSUEug8mQRIUljnkNYV58JSs0W0mP8h7Y/ZQ==
"@influxdata/giraffe@0.16.2":
version "0.16.2"
resolved "https://registry.yarnpkg.com/@influxdata/giraffe/-/giraffe-0.16.2.tgz#a2da3d157c4c751de8348f381a4d1b45d6bc3a43"
integrity sha512-cUbFwUeXbcyQp1Y5/3kEqb8Gt2Mh8+lFRRkgbpmdzTDub6xl1vJwlQvkXMLgdxebofVuApRjy0xj/Sbuq34Eug==
"@influxdata/giraffe@0.16.3":
version "0.16.3"
resolved "https://registry.yarnpkg.com/@influxdata/giraffe/-/giraffe-0.16.3.tgz#0b3e1d4c7894d6234b5001ab9cbcf731dd2e5ce8"
integrity sha512-GaAAOmjUvbpJLmg6sHWayTD/wgGcXgLYhaKyW8Hp0UUGC1840HFWrLuSVG/mpXUen+AJBnZMPFrI64cQNNCzYg==
"@influxdata/influx@0.5.5":
version "0.5.5"