Refactor Flux visualization parsing
- Rewrite `getTablesByTime` as `spreadTables` - Return a single, wide, time-indexed table, which is a more useful format for consumers - Also return structured metadata about series, in preparation for improvements to the hover legend - Simplified and more efficient code - Remove unused `parseValues` utility - Move `results.test.ts` test cases to `response.test.ts` - Rewrite `fluxToSingleStat` as `lastValue`, with a simplified call signature - Fix bug where gauge visualizations would alternate which series they displayed a value for when refreshing, dependent on the order of the series in each refreshed response - Simplify `fluxTablesToDygraph` implementation - Memoize `lastValue` calls in the gauge and single stat visualizations, to avoid expensive parsing on each rerender if possiblepull/10616/head
parent
53308dae3b
commit
0633ad0c67
|
@ -27,7 +27,6 @@ class DygraphTransformation extends PureComponent<
|
|||
this.state = {
|
||||
labels: [],
|
||||
dygraphsData: [],
|
||||
nonNumericColumns: [],
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,12 +1,13 @@
|
|||
// Libraries
|
||||
import React, {PureComponent} from 'react'
|
||||
import memoizeOne from 'memoize-one'
|
||||
import _ from 'lodash'
|
||||
|
||||
// Components
|
||||
import Gauge from 'src/shared/components/Gauge'
|
||||
|
||||
// Parsing
|
||||
import getLastValues from 'src/shared/parsing/flux/fluxToSingleStat'
|
||||
import {lastValue} from 'src/shared/parsing/flux/lastValue'
|
||||
|
||||
// Types
|
||||
import {FluxTable} from 'src/types'
|
||||
|
@ -21,9 +22,14 @@ interface Props {
|
|||
|
||||
@ErrorHandling
|
||||
class GaugeChart extends PureComponent<Props> {
|
||||
private lastValue = memoizeOne(lastValue)
|
||||
|
||||
public render() {
|
||||
const {tables} = this.props
|
||||
const {colors, prefix, suffix, decimalPlaces} = this.props.properties
|
||||
|
||||
const lastValue = this.lastValue(tables) || 0
|
||||
|
||||
return (
|
||||
<div className="single-stat">
|
||||
<Gauge
|
||||
|
@ -32,20 +38,12 @@ class GaugeChart extends PureComponent<Props> {
|
|||
colors={colors}
|
||||
prefix={prefix}
|
||||
suffix={suffix}
|
||||
gaugePosition={this.lastValueForGauge}
|
||||
gaugePosition={lastValue}
|
||||
decimalPlaces={decimalPlaces}
|
||||
/>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
private get lastValueForGauge(): number {
|
||||
const {tables} = this.props
|
||||
const {values} = getLastValues(tables)
|
||||
const lastValue = _.get(values, 0, 0)
|
||||
|
||||
return Number(lastValue)
|
||||
}
|
||||
}
|
||||
|
||||
export default GaugeChart
|
||||
|
|
|
@ -1,12 +1,13 @@
|
|||
// Libraries
|
||||
import React, {PureComponent} from 'react'
|
||||
import memoizeOne from 'memoize-one'
|
||||
import _ from 'lodash'
|
||||
|
||||
// Components
|
||||
import EmptyGraphMessage from 'src/shared/components/EmptyGraphMessage'
|
||||
|
||||
// Parsing
|
||||
import getLastValues from 'src/shared/parsing/flux/fluxToSingleStat'
|
||||
import {lastValue} from 'src/shared/parsing/flux/lastValue'
|
||||
|
||||
// Types
|
||||
import {FluxTable} from 'src/types'
|
||||
|
@ -20,8 +21,11 @@ interface Props {
|
|||
}
|
||||
|
||||
export default class SingleStatTransform extends PureComponent<Props> {
|
||||
private lastValue = memoizeOne(lastValue)
|
||||
|
||||
public render() {
|
||||
const lastValue = +this.lastValue
|
||||
const {tables} = this.props
|
||||
const lastValue = this.lastValue(tables)
|
||||
|
||||
if (!_.isFinite(lastValue)) {
|
||||
return <EmptyGraphMessage message={NON_NUMERIC_ERROR} />
|
||||
|
@ -29,17 +33,4 @@ export default class SingleStatTransform extends PureComponent<Props> {
|
|||
|
||||
return this.props.children(lastValue)
|
||||
}
|
||||
|
||||
private get lastValue(): number {
|
||||
const {tables} = this.props
|
||||
const {series, values} = getLastValues(tables)
|
||||
const firstAlphabeticalSeriesName = _.sortBy(series)[0]
|
||||
|
||||
const firstAlphabeticalIndex = _.indexOf(
|
||||
series,
|
||||
firstAlphabeticalSeriesName
|
||||
)
|
||||
|
||||
return values[firstAlphabeticalIndex]
|
||||
}
|
||||
}
|
||||
|
|
|
@ -168,7 +168,7 @@ class Dygraph extends Component<Props, State> {
|
|||
<Crosshair dygraph={this.dygraph} />
|
||||
</div>
|
||||
)}
|
||||
{this.nestedGraph && React.cloneElement(this.nestedGraph)}
|
||||
{this.nestedGraph}
|
||||
<div
|
||||
id={`graph-ref-${viewID}`}
|
||||
className="dygraph-child-container"
|
||||
|
@ -188,11 +188,9 @@ class Dygraph extends Component<Props, State> {
|
|||
private get nestedGraph(): JSX.Element {
|
||||
const {children} = this.props
|
||||
|
||||
if (children) {
|
||||
if (children[0]) {
|
||||
return children[0]
|
||||
}
|
||||
|
||||
if (children && children[0]) {
|
||||
return children[0]
|
||||
} else if (children) {
|
||||
return children as JSX.Element
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,48 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`spreadTables it spreads multiple series into a single table 1`] = `
|
||||
Object {
|
||||
"seriesDescriptions": Array [
|
||||
Object {
|
||||
"key": "_value[result=max][_field=active][_measurement=mem][host=oox4k.local]",
|
||||
"metaColumns": Object {
|
||||
"_field": "active",
|
||||
"_measurement": "mem",
|
||||
"host": "oox4k.local",
|
||||
"result": "max",
|
||||
},
|
||||
"valueColumnIndex": 6,
|
||||
"valueColumnName": "_value",
|
||||
},
|
||||
Object {
|
||||
"key": "_value[result=min][_field=active][_measurement=mem][host=oox4k.local]",
|
||||
"metaColumns": Object {
|
||||
"_field": "active",
|
||||
"_measurement": "mem",
|
||||
"host": "oox4k.local",
|
||||
"result": "min",
|
||||
},
|
||||
"valueColumnIndex": 6,
|
||||
"valueColumnName": "_value",
|
||||
},
|
||||
],
|
||||
"table": Object {
|
||||
"2018-12-10T18:29:48Z": Object {
|
||||
"_value[result=min][_field=active][_measurement=mem][host=oox4k.local]": 4589981696,
|
||||
},
|
||||
"2018-12-10T18:29:58Z": Object {
|
||||
"_value[result=max][_field=active][_measurement=mem][host=oox4k.local]": 4906213376,
|
||||
},
|
||||
"2018-12-10T18:40:18Z": Object {
|
||||
"_value[result=min][_field=active][_measurement=mem][host=oox4k.local]": 4318040064,
|
||||
},
|
||||
"2018-12-10T18:54:08Z": Object {
|
||||
"_value[result=max][_field=active][_measurement=mem][host=oox4k.local]": 5860683776,
|
||||
},
|
||||
"2018-12-10T19:11:58Z": Object {
|
||||
"_value[result=max][_field=active][_measurement=mem][host=oox4k.local]": 5115428864,
|
||||
"_value[result=min][_field=active][_measurement=mem][host=oox4k.local]": 4131692544,
|
||||
},
|
||||
},
|
||||
}
|
||||
`;
|
|
@ -23,10 +23,10 @@ describe('fluxTablesToDygraph', () => {
|
|||
const fluxTables = parseResponse(MISMATCHED)
|
||||
const actual = fluxTablesToDygraph(fluxTables)
|
||||
const expected = [
|
||||
[new Date('2018-06-04T17:12:25Z'), 1, null],
|
||||
[new Date('2018-06-04T17:12:35Z'), 2, null],
|
||||
[new Date('2018-06-05T17:12:25Z'), null, 10],
|
||||
[new Date('2018-06-05T17:12:35Z'), null, 11],
|
||||
[new Date('2018-06-04T17:12:25Z'), 1, undefined],
|
||||
[new Date('2018-06-04T17:12:35Z'), 2, undefined],
|
||||
[new Date('2018-06-05T17:12:25Z'), undefined, 10],
|
||||
[new Date('2018-06-05T17:12:35Z'), undefined, 11],
|
||||
]
|
||||
|
||||
expect(actual.dygraphsData).toEqual(expected)
|
||||
|
@ -39,16 +39,15 @@ describe('fluxTablesToDygraph', () => {
|
|||
labels: [
|
||||
'time',
|
||||
'mean_usage_idle[result=0][_measurement=cpu]',
|
||||
'mean_usage_user[result=0][_measurement=cpu]',
|
||||
'mean_usage_idle[result=0][_measurement=mem]',
|
||||
'mean_usage_user[result=0][_measurement=cpu]',
|
||||
'mean_usage_user[result=0][_measurement=mem]',
|
||||
],
|
||||
dygraphsData: [
|
||||
[new Date('2018-09-10T16:54:37Z'), 85, 10, 8, 1],
|
||||
[new Date('2018-09-10T16:54:38Z'), 87, 7, 9, 2],
|
||||
[new Date('2018-09-10T16:54:39Z'), 89, 5, 10, 3],
|
||||
[new Date('2018-09-10T16:54:37Z'), 85, 8, 10, 1],
|
||||
[new Date('2018-09-10T16:54:38Z'), 87, 9, 7, 2],
|
||||
[new Date('2018-09-10T16:54:39Z'), 89, 10, 5, 3],
|
||||
],
|
||||
nonNumericColumns: [],
|
||||
}
|
||||
|
||||
expect(actual).toEqual(expected)
|
||||
|
@ -67,7 +66,6 @@ describe('fluxTablesToDygraph', () => {
|
|||
[new Date('2018-09-10T16:54:37Z'), 85, 8],
|
||||
[new Date('2018-09-10T16:54:39Z'), 89, 10],
|
||||
],
|
||||
nonNumericColumns: ['my_fun_col'],
|
||||
}
|
||||
|
||||
expect(actual).toEqual(expected)
|
||||
|
@ -94,17 +92,16 @@ describe('fluxTablesToDygraph', () => {
|
|||
const actual = fluxTablesToDygraph(fluxTables)
|
||||
const expected = {
|
||||
dygraphsData: [
|
||||
[new Date('2018-12-10T18:29:48.000Z'), null, 4589981696],
|
||||
[new Date('2018-12-10T18:29:58.000Z'), 4906213376, null],
|
||||
[new Date('2018-12-10T18:40:18.000Z'), null, 4318040064],
|
||||
[new Date('2018-12-10T18:54:08.000Z'), 5860683776, null],
|
||||
[new Date('2018-12-10T18:29:48.000Z'), undefined, 4589981696],
|
||||
[new Date('2018-12-10T18:29:58.000Z'), 4906213376, undefined],
|
||||
[new Date('2018-12-10T18:40:18.000Z'), undefined, 4318040064],
|
||||
[new Date('2018-12-10T18:54:08.000Z'), 5860683776, undefined],
|
||||
],
|
||||
labels: [
|
||||
'time',
|
||||
'_value[result=0][_field=active][_measurement=mem][host=oox4k.local]',
|
||||
'_value[result=1][_field=active][_measurement=mem][host=oox4k.local]',
|
||||
],
|
||||
nonNumericColumns: [],
|
||||
}
|
||||
|
||||
expect(actual).toEqual(expected)
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
import _ from 'lodash'
|
||||
|
||||
// Utils
|
||||
import {parseTablesByTime} from 'src/shared/parsing/flux/parseTablesByTime'
|
||||
import {spreadTables} from 'src/shared/parsing/flux/spreadTables'
|
||||
|
||||
// Types
|
||||
import {FluxTable} from 'src/types'
|
||||
|
@ -11,46 +11,22 @@ import {DygraphValue} from 'src/external/dygraph'
|
|||
export interface FluxTablesToDygraphResult {
|
||||
labels: string[]
|
||||
dygraphsData: DygraphValue[][]
|
||||
nonNumericColumns: string[]
|
||||
}
|
||||
|
||||
export const fluxTablesToDygraph = (
|
||||
tables: FluxTable[]
|
||||
): FluxTablesToDygraphResult => {
|
||||
const {tablesByTime, nonNumericColumns, allColumnNames} = parseTablesByTime(
|
||||
tables
|
||||
)
|
||||
const {table, seriesDescriptions} = spreadTables(tables)
|
||||
const labels = seriesDescriptions.map(d => d.key)
|
||||
|
||||
const dygraphValuesByTime: {[k: string]: DygraphValue[]} = {}
|
||||
const DATE_INDEX = 0
|
||||
const DATE_INDEX_OFFSET = 1
|
||||
labels.sort()
|
||||
|
||||
for (const table of tablesByTime) {
|
||||
for (const time of Object.keys(table)) {
|
||||
dygraphValuesByTime[time] = Array(
|
||||
allColumnNames.length + DATE_INDEX_OFFSET
|
||||
).fill(null)
|
||||
}
|
||||
}
|
||||
const dygraphsData = Object.keys(table).map(time => [
|
||||
new Date(time),
|
||||
...labels.map(label => table[time][label]),
|
||||
])
|
||||
|
||||
for (const table of tablesByTime) {
|
||||
for (const [date, values] of Object.entries(table)) {
|
||||
dygraphValuesByTime[date][DATE_INDEX] = new Date(date)
|
||||
dygraphsData.sort((a, b) => (a[0] as any) - (b[0] as any))
|
||||
|
||||
for (const [seriesName, value] of Object.entries(values)) {
|
||||
const i = allColumnNames.indexOf(seriesName) + DATE_INDEX_OFFSET
|
||||
dygraphValuesByTime[date][i] = Number(value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const dygraphsData = _.sortBy(Object.values(dygraphValuesByTime), ([date]) =>
|
||||
Date.parse(date as string)
|
||||
)
|
||||
|
||||
return {
|
||||
labels: ['time', ...allColumnNames],
|
||||
dygraphsData,
|
||||
nonNumericColumns: _.uniq(nonNumericColumns),
|
||||
}
|
||||
return {dygraphsData, labels: ['time', ...labels]}
|
||||
}
|
||||
|
|
|
@ -1,28 +0,0 @@
|
|||
import _ from 'lodash'
|
||||
import {FluxTable} from 'src/types'
|
||||
import {parseTablesByTime} from 'src/shared/parsing/flux/parseTablesByTime'
|
||||
|
||||
export interface LastValues {
|
||||
values: number[]
|
||||
series: string[]
|
||||
}
|
||||
|
||||
export default (tables: FluxTable[]): LastValues => {
|
||||
const {tablesByTime} = parseTablesByTime(tables)
|
||||
|
||||
const lastValues = _.reduce(
|
||||
tablesByTime,
|
||||
(acc, table) => {
|
||||
const lastTime = _.last(Object.keys(table))
|
||||
const values = table[lastTime]
|
||||
_.forEach(values, (value, series) => {
|
||||
acc.series.push(series)
|
||||
acc.values.push(value)
|
||||
})
|
||||
return acc
|
||||
},
|
||||
{values: [], series: []}
|
||||
)
|
||||
|
||||
return lastValues
|
||||
}
|
|
@ -0,0 +1,44 @@
|
|||
import {lastValue} from 'src/shared/parsing/flux/lastValue'
|
||||
import {parseResponse} from 'src/shared/parsing/flux/response'
|
||||
|
||||
describe('lastValue', () => {
|
||||
test('the last value returned does not depend on the ordering of series', () => {
|
||||
const respA = `#group,false,false,false,false,false,false,true,true,true
|
||||
#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,dateTime:RFC3339,long,string,string,string
|
||||
#default,0,,,,,,,,
|
||||
,result,table,_start,_stop,_time,_value,_field,_measurement,host
|
||||
,,0,2018-12-10T18:21:52.748859Z,2018-12-10T18:30:00Z,2018-12-10T18:29:58Z,1,active,mem,oox4k.local
|
||||
,,0,2018-12-10T18:30:00Z,2018-12-10T19:00:00Z,2018-12-10T18:54:08Z,2,active,mem,oox4k.local
|
||||
|
||||
#group,false,false,false,false,false,false,true,true,true
|
||||
#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,dateTime:RFC3339,long,string,string,string
|
||||
#default,1,,,,,,,,
|
||||
,result,table,_start,_stop,_time,_value,_field,_measurement,host
|
||||
,,0,2018-12-10T18:21:52.748859Z,2018-12-10T18:30:00Z,2018-12-10T18:29:48Z,3,active,mem,oox4k.local
|
||||
,,0,2018-12-10T18:30:00Z,2018-12-10T19:00:00Z,2018-12-10T18:40:18Z,4,active,mem,oox4k.local
|
||||
|
||||
`
|
||||
|
||||
const respB = `#group,false,false,false,false,false,false,true,true,true
|
||||
#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,dateTime:RFC3339,long,string,string,string
|
||||
#default,1,,,,,,,,
|
||||
,result,table,_start,_stop,_time,_value,_field,_measurement,host
|
||||
,,0,2018-12-10T18:21:52.748859Z,2018-12-10T18:30:00Z,2018-12-10T18:29:48Z,3,active,mem,oox4k.local
|
||||
,,0,2018-12-10T18:30:00Z,2018-12-10T19:00:00Z,2018-12-10T18:40:18Z,4,active,mem,oox4k.local
|
||||
|
||||
#group,false,false,false,false,false,false,true,true,true
|
||||
#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,dateTime:RFC3339,long,string,string,string
|
||||
#default,0,,,,,,,,
|
||||
,result,table,_start,_stop,_time,_value,_field,_measurement,host
|
||||
,,0,2018-12-10T18:21:52.748859Z,2018-12-10T18:30:00Z,2018-12-10T18:29:58Z,1,active,mem,oox4k.local
|
||||
,,0,2018-12-10T18:30:00Z,2018-12-10T19:00:00Z,2018-12-10T18:54:08Z,2,active,mem,oox4k.local
|
||||
|
||||
`
|
||||
|
||||
const lastValueA = lastValue(parseResponse(respA))
|
||||
const lastValueB = lastValue(parseResponse(respB))
|
||||
|
||||
expect(lastValueA).toEqual(2)
|
||||
expect(lastValueB).toEqual(2)
|
||||
})
|
||||
})
|
|
@ -0,0 +1,21 @@
|
|||
import {spreadTables} from 'src/shared/parsing/flux/spreadTables'
|
||||
|
||||
import {FluxTable} from 'src/types'
|
||||
|
||||
export const lastValue = (tables: FluxTable[]): number => {
|
||||
if (tables.every(table => !table.data.length)) {
|
||||
return null
|
||||
}
|
||||
|
||||
const {table, seriesDescriptions} = spreadTables(tables)
|
||||
const seriesKeys = seriesDescriptions.map(d => d.key)
|
||||
const times = Object.keys(table)
|
||||
|
||||
times.sort()
|
||||
seriesKeys.sort()
|
||||
|
||||
const lastTime = times[times.length - 1]
|
||||
const firstSeriesKey = seriesKeys[0]
|
||||
|
||||
return table[lastTime][firstSeriesKey]
|
||||
}
|
|
@ -1,105 +0,0 @@
|
|||
import {FluxTable} from 'src/types'
|
||||
|
||||
const getTimeIndex = header => {
|
||||
let timeIndex = header.indexOf('_time')
|
||||
|
||||
if (timeIndex >= 0) {
|
||||
return timeIndex
|
||||
}
|
||||
|
||||
timeIndex = header.indexOf('_start')
|
||||
if (timeIndex >= 0) {
|
||||
return timeIndex
|
||||
}
|
||||
|
||||
timeIndex = header.indexOf('_end')
|
||||
if (timeIndex >= 0) {
|
||||
return timeIndex
|
||||
}
|
||||
|
||||
return -1
|
||||
}
|
||||
|
||||
const COLUMN_BLACKLIST = new Set([
|
||||
'_time',
|
||||
'result',
|
||||
'table',
|
||||
'_start',
|
||||
'_stop',
|
||||
'',
|
||||
])
|
||||
|
||||
const NUMERIC_DATATYPES = ['double', 'long', 'int', 'float']
|
||||
|
||||
interface TableByTime {
|
||||
[time: string]: {[columnName: string]: string}
|
||||
}
|
||||
|
||||
interface ParseTablesByTimeResult {
|
||||
tablesByTime: TableByTime[]
|
||||
allColumnNames: string[]
|
||||
nonNumericColumns: string[]
|
||||
}
|
||||
|
||||
export const parseTablesByTime = (
|
||||
tables: FluxTable[]
|
||||
): ParseTablesByTimeResult => {
|
||||
const allColumnNames = []
|
||||
const nonNumericColumns = []
|
||||
|
||||
const tablesByTime = tables.map(table => {
|
||||
const header = table.data[0] as string[]
|
||||
const columnNames: {[k: number]: string} = {}
|
||||
|
||||
for (let i = 0; i < header.length; i++) {
|
||||
const columnName = header[i]
|
||||
const dataType = table.dataTypes[columnName]
|
||||
|
||||
if (COLUMN_BLACKLIST.has(columnName)) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (table.groupKey[columnName]) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (!NUMERIC_DATATYPES.includes(dataType)) {
|
||||
nonNumericColumns.push(columnName)
|
||||
continue
|
||||
}
|
||||
|
||||
const uniqueColumnName = Object.entries(table.groupKey).reduce(
|
||||
(acc, [k, v]) => acc + `[${k}=${v}]`,
|
||||
`${columnName}[result=${table.result}]`
|
||||
)
|
||||
|
||||
columnNames[i] = uniqueColumnName
|
||||
allColumnNames.push(uniqueColumnName)
|
||||
}
|
||||
|
||||
const timeIndex = getTimeIndex(header)
|
||||
|
||||
if (timeIndex < 0) {
|
||||
throw new Error('Could not find time index in FluxTable')
|
||||
}
|
||||
|
||||
const result = {}
|
||||
|
||||
for (let i = 1; i < table.data.length; i++) {
|
||||
const row = table.data[i]
|
||||
const time = row[timeIndex].toString()
|
||||
|
||||
result[time] = Object.entries(columnNames).reduce(
|
||||
(acc, [valueIndex, columnName]) => ({
|
||||
...acc,
|
||||
[columnName]: row[valueIndex],
|
||||
}),
|
||||
{}
|
||||
)
|
||||
}
|
||||
|
||||
return result
|
||||
})
|
||||
|
||||
return {nonNumericColumns, tablesByTime, allColumnNames}
|
||||
}
|
|
@ -1,8 +1,21 @@
|
|||
import {parseResponse} from 'src/shared/parsing/flux/response'
|
||||
import {
|
||||
RESPONSE_NO_METADATA,
|
||||
RESPONSE_METADATA,
|
||||
MULTI_SCHEMA_RESPONSE,
|
||||
EXPECTED_COLUMNS,
|
||||
TRUNCATED_RESPONSE,
|
||||
} from 'src/shared/parsing/flux/constants'
|
||||
|
||||
describe('parseResponse', () => {
|
||||
test('uses the result name from the result column if present', () => {
|
||||
const resp = `#group,false,false,false,false,false,false,true,true,true
|
||||
test('parseResponse into the right number of tables', () => {
|
||||
const result = parseResponse(MULTI_SCHEMA_RESPONSE)
|
||||
expect(result).toHaveLength(4)
|
||||
})
|
||||
|
||||
describe('result name', () => {
|
||||
test('uses the result name from the result column if present', () => {
|
||||
const resp = `#group,false,false,false,false,false,false,true,true,true
|
||||
#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,dateTime:RFC3339,long,string,string,string
|
||||
#default,,,,,,,,,
|
||||
,result,table,_start,_stop,_time,_value,_field,_measurement,host
|
||||
|
@ -12,13 +25,13 @@ describe('parseResponse', () => {
|
|||
|
||||
`
|
||||
|
||||
const actual = parseResponse(resp)
|
||||
const actual = parseResponse(resp)
|
||||
|
||||
expect(actual[0].result).toBe('max')
|
||||
})
|
||||
expect(actual[0].result).toBe('max')
|
||||
})
|
||||
|
||||
test('uses the result name from the default annotation if result columns are empty', () => {
|
||||
const resp = `#group,false,false,false,false,false,false,true,true,true
|
||||
test('uses the result name from the default annotation if result columns are empty', () => {
|
||||
const resp = `#group,false,false,false,false,false,false,true,true,true
|
||||
#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,dateTime:RFC3339,long,string,string,string
|
||||
#default,max,,,,,,,,
|
||||
,result,table,_start,_stop,_time,_value,_field,_measurement,host
|
||||
|
@ -36,9 +49,45 @@ describe('parseResponse', () => {
|
|||
|
||||
|
||||
`
|
||||
const actual = parseResponse(resp)
|
||||
const actual = parseResponse(resp)
|
||||
|
||||
expect(actual[0].result).toBe('max')
|
||||
expect(actual[1].result).toBe('min')
|
||||
expect(actual).toHaveLength(2)
|
||||
expect(actual[0].result).toBe('max')
|
||||
expect(actual[1].result).toBe('min')
|
||||
})
|
||||
})
|
||||
|
||||
describe('headers', () => {
|
||||
test('throws when no metadata is present', () => {
|
||||
expect(() => {
|
||||
parseResponse(RESPONSE_NO_METADATA)
|
||||
}).toThrow()
|
||||
})
|
||||
|
||||
test('can parse headers when metadata is present', () => {
|
||||
const actual = parseResponse(RESPONSE_METADATA)[0].data[0]
|
||||
expect(actual).toEqual(EXPECTED_COLUMNS)
|
||||
})
|
||||
})
|
||||
|
||||
describe('group key', () => {
|
||||
test('parses the group key properly', () => {
|
||||
const actual = parseResponse(MULTI_SCHEMA_RESPONSE)[0].groupKey
|
||||
const expected = {
|
||||
_field: 'usage_guest',
|
||||
_measurement: 'cpu',
|
||||
cpu: 'cpu-total',
|
||||
host: 'WattsInfluxDB',
|
||||
}
|
||||
expect(actual).toEqual(expected)
|
||||
})
|
||||
})
|
||||
|
||||
describe('partial responses', () => {
|
||||
test('should discard tables without any non-annotation rows', () => {
|
||||
const actual = parseResponse(TRUNCATED_RESPONSE)
|
||||
|
||||
expect(actual).toHaveLength(2)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
|
|
@ -1,49 +0,0 @@
|
|||
import {parseResponse} from 'src/shared/parsing/flux/response'
|
||||
import {
|
||||
RESPONSE_NO_METADATA,
|
||||
RESPONSE_METADATA,
|
||||
MULTI_SCHEMA_RESPONSE,
|
||||
EXPECTED_COLUMNS,
|
||||
TRUNCATED_RESPONSE,
|
||||
} from 'src/shared/parsing/flux/constants'
|
||||
|
||||
describe('Flux results parser', () => {
|
||||
it('parseResponse into the right number of tables', () => {
|
||||
const result = parseResponse(MULTI_SCHEMA_RESPONSE)
|
||||
expect(result).toHaveLength(4)
|
||||
})
|
||||
|
||||
describe('headers', () => {
|
||||
it('throws when no metadata is present', () => {
|
||||
expect(() => {
|
||||
parseResponse(RESPONSE_NO_METADATA)
|
||||
}).toThrow()
|
||||
})
|
||||
|
||||
it('can parse headers when metadata is present', () => {
|
||||
const actual = parseResponse(RESPONSE_METADATA)[0].data[0]
|
||||
expect(actual).toEqual(EXPECTED_COLUMNS)
|
||||
})
|
||||
})
|
||||
|
||||
describe('group key', () => {
|
||||
it('parses the group key properly', () => {
|
||||
const actual = parseResponse(MULTI_SCHEMA_RESPONSE)[0].groupKey
|
||||
const expected = {
|
||||
_field: 'usage_guest',
|
||||
_measurement: 'cpu',
|
||||
cpu: 'cpu-total',
|
||||
host: 'WattsInfluxDB',
|
||||
}
|
||||
expect(actual).toEqual(expected)
|
||||
})
|
||||
})
|
||||
|
||||
describe('partial responses', () => {
|
||||
it('should discard tables without any non-annotation rows', () => {
|
||||
const actual = parseResponse(TRUNCATED_RESPONSE)
|
||||
|
||||
expect(actual).toHaveLength(2)
|
||||
})
|
||||
})
|
||||
})
|
|
@ -0,0 +1,28 @@
|
|||
import {parseResponse} from 'src/shared/parsing/flux/response'
|
||||
import {spreadTables} from 'src/shared/parsing/flux/spreadTables'
|
||||
|
||||
describe('spreadTables', () => {
|
||||
test('it spreads multiple series into a single table', () => {
|
||||
const resp = `#group,false,false,false,false,false,false,true,true,true
|
||||
#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,dateTime:RFC3339,long,string,string,string
|
||||
#default,max,,,,,,,,
|
||||
,result,table,_start,_stop,_time,_value,_field,_measurement,host
|
||||
,,0,2018-12-10T18:21:52.748859Z,2018-12-10T18:30:00Z,2018-12-10T18:29:58Z,4906213376,active,mem,oox4k.local
|
||||
,,0,2018-12-10T18:30:00Z,2018-12-10T19:00:00Z,2018-12-10T18:54:08Z,5860683776,active,mem,oox4k.local
|
||||
,,0,2018-12-10T19:00:00Z,2018-12-10T19:21:52.748859Z,2018-12-10T19:11:58Z,5115428864,active,mem,oox4k.local
|
||||
|
||||
#group,false,false,false,false,false,false,true,true,true
|
||||
#datatype,string,long,dateTime:RFC3339,dateTime:RFC3339,dateTime:RFC3339,long,string,string,string
|
||||
#default,min,,,,,,,,
|
||||
,result,table,_start,_stop,_time,_value,_field,_measurement,host
|
||||
,,0,2018-12-10T18:21:52.748859Z,2018-12-10T18:30:00Z,2018-12-10T18:29:48Z,4589981696,active,mem,oox4k.local
|
||||
,,0,2018-12-10T18:30:00Z,2018-12-10T19:00:00Z,2018-12-10T18:40:18Z,4318040064,active,mem,oox4k.local
|
||||
,,0,2018-12-10T19:00:00Z,2018-12-10T19:21:52.748859Z,2018-12-10T19:11:58Z,4131692544,active,mem,oox4k.local
|
||||
|
||||
`
|
||||
|
||||
const result = spreadTables(parseResponse(resp))
|
||||
|
||||
expect(result).toMatchSnapshot()
|
||||
})
|
||||
})
|
|
@ -0,0 +1,133 @@
|
|||
import {FluxTable} from 'src/types'
|
||||
|
||||
interface SeriesDescription {
|
||||
// A key identifying a unique (column, table, result) triple for a particular
|
||||
// Flux response—i.e. a single time series
|
||||
key: string
|
||||
// The name of the column that this series is extracted from (typically this
|
||||
// is `_value`, but could be any column)
|
||||
valueColumnName: string
|
||||
// The index of the column that this series was extracted from
|
||||
valueColumnIndex: number
|
||||
// The names and values of columns in the group key, plus the result name.
|
||||
// This provides the data for a user-recognizable label of the time series
|
||||
metaColumns: {
|
||||
[columnName: string]: string
|
||||
}
|
||||
}
|
||||
|
||||
interface SpreadTablesResult {
|
||||
seriesDescriptions: SeriesDescription[]
|
||||
table: {
|
||||
[time: string]: {[seriesKey: string]: number}
|
||||
}
|
||||
}
|
||||
|
||||
// Given a collection of `FluxTable`s parsed from a single Flux response,
|
||||
// `spreadTables` will place each unique series found within the response into
|
||||
// a single table, indexed by time. This data munging operation is often
|
||||
// referred to as as a “spread”, “cast”, “pivot”, or “unfold”.
|
||||
export const spreadTables = (tables: FluxTable[]): SpreadTablesResult => {
|
||||
const result: SpreadTablesResult = {
|
||||
table: {},
|
||||
seriesDescriptions: [],
|
||||
}
|
||||
|
||||
for (const table of tables) {
|
||||
const header = table.data[0]
|
||||
|
||||
if (!header) {
|
||||
continue
|
||||
}
|
||||
|
||||
const seriesDescriptions = getSeriesDescriptions(table)
|
||||
const timeIndex = getTimeIndex(header)
|
||||
|
||||
for (let i = 1; i < table.data.length; i++) {
|
||||
const row = table.data[i]
|
||||
const time = row[timeIndex]
|
||||
|
||||
for (const {key, valueColumnIndex} of seriesDescriptions) {
|
||||
if (!result.table[time]) {
|
||||
result.table[time] = {}
|
||||
}
|
||||
|
||||
result.table[time][key] = Number(row[valueColumnIndex])
|
||||
}
|
||||
}
|
||||
|
||||
result.seriesDescriptions.push(...seriesDescriptions)
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
const EXCLUDED_SERIES_COLUMNS = new Set([
|
||||
'_time',
|
||||
'result',
|
||||
'table',
|
||||
'_start',
|
||||
'_stop',
|
||||
'',
|
||||
])
|
||||
|
||||
const NUMERIC_DATATYPES = new Set(['double', 'long', 'int', 'float'])
|
||||
|
||||
const getSeriesDescriptions = (table: FluxTable): SeriesDescription[] => {
|
||||
const seriesDescriptions = []
|
||||
const header = table.data[0]
|
||||
|
||||
for (let i = 0; i < header.length; i++) {
|
||||
const columnName = header[i]
|
||||
const dataType = table.dataTypes[columnName]
|
||||
|
||||
if (EXCLUDED_SERIES_COLUMNS.has(columnName)) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (table.groupKey[columnName]) {
|
||||
continue
|
||||
}
|
||||
|
||||
if (!NUMERIC_DATATYPES.has(dataType)) {
|
||||
continue
|
||||
}
|
||||
|
||||
const key = Object.entries(table.groupKey).reduce(
|
||||
(acc, [k, v]) => acc + `[${k}=${v}]`,
|
||||
`${columnName}[result=${table.result}]`
|
||||
)
|
||||
|
||||
seriesDescriptions.push({
|
||||
key,
|
||||
valueColumnName: columnName,
|
||||
valueColumnIndex: i,
|
||||
metaColumns: {
|
||||
...table.groupKey,
|
||||
result: table.result,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
return seriesDescriptions
|
||||
}
|
||||
|
||||
const getTimeIndex = header => {
|
||||
let timeIndex = header.indexOf('_time')
|
||||
|
||||
if (timeIndex >= 0) {
|
||||
return timeIndex
|
||||
}
|
||||
|
||||
timeIndex = header.indexOf('_start')
|
||||
if (timeIndex >= 0) {
|
||||
return timeIndex
|
||||
}
|
||||
|
||||
timeIndex = header.indexOf('_end')
|
||||
if (timeIndex >= 0) {
|
||||
return timeIndex
|
||||
}
|
||||
|
||||
return -1
|
||||
}
|
|
@ -1,25 +0,0 @@
|
|||
import parseValuesColumn from 'src/shared/parsing/flux/values'
|
||||
import {TAGS_RESPONSE} from 'src/shared/parsing/flux/constants'
|
||||
|
||||
describe('tagKeys parser', () => {
|
||||
it('returns no measurements for an empty results response', () => {
|
||||
expect(parseValuesColumn('')).toEqual([])
|
||||
})
|
||||
|
||||
it('returns the approriate tagKeys', () => {
|
||||
const actual = parseValuesColumn(TAGS_RESPONSE)
|
||||
const expected = [
|
||||
'_field',
|
||||
'_measurement',
|
||||
'cpu',
|
||||
'device',
|
||||
'fstype',
|
||||
'host',
|
||||
'mode',
|
||||
'name',
|
||||
'path',
|
||||
]
|
||||
|
||||
expect(actual).toEqual(expected)
|
||||
})
|
||||
})
|
|
@ -1,28 +0,0 @@
|
|||
import _ from 'lodash'
|
||||
|
||||
import {FluxTable} from 'src/types'
|
||||
import {parseResponse} from 'src/shared/parsing/flux/response'
|
||||
|
||||
const parseValuesColumn = (resp: string): string[] => {
|
||||
const results = parseResponse(resp)
|
||||
|
||||
if (results.length === 0) {
|
||||
return []
|
||||
}
|
||||
|
||||
const tags = results.reduce<string[]>((acc, result: FluxTable) => {
|
||||
const colIndex = result.data[0].findIndex(header => header === '_value')
|
||||
|
||||
if (colIndex === -1) {
|
||||
return [...acc]
|
||||
}
|
||||
|
||||
const resultTags = result.data.slice(1).map(row => row[colIndex])
|
||||
|
||||
return [...acc, ...resultTags]
|
||||
}, [])
|
||||
|
||||
return _.sortBy(tags, t => t.toLocaleLowerCase())
|
||||
}
|
||||
|
||||
export default parseValuesColumn
|
Loading…
Reference in New Issue