fix(logs/fetch): add execute queries (#1137)

* Update executeQueriesAsync to fetch

* Update actions to use executeQueryAsync

* Update flux transform to respect field ordering

* Fix fluxResponse tests

* Add column ordering test case
pull/10616/head
Delmer 2018-10-22 16:06:02 -04:00 committed by GitHub
parent f4b3e10837
commit 7acd6599a8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 105 additions and 24 deletions

View File

@ -16,7 +16,7 @@ import {
} from 'src/dashboards/apis/v2/view'
import {getSource} from 'src/sources/apis/v2'
import {getBuckets} from 'src/shared/apis/v2/buckets'
import {getTimeSeries} from 'src/flux/apis'
import {executeQueryAsync} from 'src/logs/api/v2'
// Data
import {logViewData as defaultLogView} from 'src/logs/data/logViewData'
@ -447,17 +447,14 @@ export const fetchTailAsync = () => async (
const {
links: {query: queryLink},
} = currentSource
const response = await getTimeSeries(queryLink, query)
const response = await executeQueryAsync(queryLink, query)
if (
_.isEmpty(response.tables) ||
getDeep(response, 'tables.0.name', null) === 'Error'
) {
if (response.status !== SearchStatus.Loaded) {
return
}
const columnNames: string[] = tableQueryConfig.fields.map(f => f.alias)
const logSeries: TableData = transformFluxLogsResponse(
response,
response.tables,
columnNames
)

View File

@ -1,2 +1,57 @@
// TODO: Replace this with calls to getTimeSeries
export const executeQueryAsync = async (): Promise<void> => {}
// Utils
import AJAX from 'src/utils/ajax'
import {parseResponse} from 'src/shared/parsing/flux/response'
// Types
import {InfluxLanguages} from 'src/types/v2/dashboards'
import {FluxTable} from 'src/types'
import {SearchStatus} from 'src/types/logs'
export interface QueryResponse {
tables: FluxTable[]
status: SearchStatus
}
export const executeQueryAsync = async (
link: string,
query: string
): Promise<QueryResponse> => {
try {
const dialect = {
header: true,
annotations: ['datatype', 'group', 'default'],
delimiter: ',',
}
const {data} = await AJAX({
method: 'POST',
url: link,
data: {
type: InfluxLanguages.Flux,
query,
dialect,
},
})
const tables = parseResponse(data)
const status = responseStatus(tables)
return {tables, status}
} catch (error) {
console.error(error)
return {
tables: [],
status: SearchStatus.SourceError,
}
}
}
const responseStatus = (tables: FluxTable[]): SearchStatus => {
if (tables.length === 0) {
return SearchStatus.NoResults
} else if (tables[0].name === 'Error') {
return SearchStatus.SourceError
} else {
return SearchStatus.Loaded
}
}

View File

@ -1,6 +1,7 @@
import {GetTimeSeriesResult} from 'src/flux/apis/index'
import {QueryResponse} from 'src/logs/api/v2'
import {SearchStatus} from 'src/types/logs'
export const fluxResponse: GetTimeSeriesResult = {
export const fluxResponse: QueryResponse = {
tables: [
{
id: 'd7cc1e08-4b17-4309-885b-6798402bdae2',
@ -94,5 +95,5 @@ export const fluxResponse: GetTimeSeriesResult = {
},
},
],
didTruncate: false,
status: SearchStatus.Loaded,
}

View File

@ -2,6 +2,8 @@ import {transformFluxLogsResponse} from 'src/logs/utils'
import {fluxResponse} from 'src/logs/utils/fixtures/fluxResponse'
describe('Logs.transformFluxLogsResponse', () => {
const {tables: fluxResponseTables} = fluxResponse
it('can transform a Flux server response to a TableData shape', () => {
const columnNamesToExtract = [
'appname',
@ -13,7 +15,10 @@ describe('Logs.transformFluxLogsResponse', () => {
'timestamp',
]
const actual = transformFluxLogsResponse(fluxResponse, columnNamesToExtract)
const actual = transformFluxLogsResponse(
fluxResponseTables,
columnNamesToExtract
)
const expected = {
columns: [
'appname',
@ -86,7 +91,10 @@ describe('Logs.transformFluxLogsResponse', () => {
'timestamp',
]
const actual = transformFluxLogsResponse(fluxResponse, columnNamesToExtract)
const actual = transformFluxLogsResponse(
fluxResponseTables,
columnNamesToExtract
)
const expected = {
columns: [
'facility',
@ -142,4 +150,25 @@ describe('Logs.transformFluxLogsResponse', () => {
expect(actual).toEqual(expected)
})
it('can extract in the specified column ordering', () => {
const columnNamesToExtract = ['host', 'facility']
const actual = transformFluxLogsResponse(
fluxResponseTables,
columnNamesToExtract
)
const expected = {
columns: ['host', 'facility'],
values: [
['user.local', 'NTP subsystem'],
['user.local', 'cron'],
['user.local', 'cron'],
['user.local', 'lpr'],
['user.local', 'lpr'],
],
}
expect(actual).toEqual(expected)
})
})

View File

@ -1,12 +1,12 @@
import moment from 'moment'
import _ from 'lodash'
import {GetTimeSeriesResult} from 'src/flux/apis/index'
import {DEFAULT_TIME_FORMAT} from 'src/logs/constants'
import {TimeSeriesValue} from 'src/types/series'
import {getDeep} from 'src/utils/wrappers'
import {FluxTable} from 'src/types'
import {TimeSeriesValue} from 'src/types/series'
export interface TableData {
columns: string[]
values: TimeSeriesValue[][]
@ -17,11 +17,9 @@ export const formatTime = (time: number): string => {
}
export const transformFluxLogsResponse = (
response: GetTimeSeriesResult,
tables: FluxTable[],
columnNames: string[]
): TableData => {
const {tables} = response
const values: TimeSeriesValue[][] = []
const columns: string[] = []
const indicesToKeep = []
@ -29,10 +27,11 @@ export const transformFluxLogsResponse = (
const rows = getDeep<TimeSeriesValue[][]>(tables, '0.data', [])
const columnNamesRow = getDeep<string[]>(tables, '0.data.0', [])
for (let i = 0; i < columnNamesRow.length; i++) {
if (columnNames.includes(columnNamesRow[i])) {
indicesToKeep.push(i)
columns.push(columnNamesRow[i])
for (let i = 0; i < columnNames.length; i++) {
const columnIndex = columnNamesRow.indexOf(columnNames[i])
if (columnIndex !== -1) {
indicesToKeep.push(columnIndex)
columns.push(columnNames[i])
}
}