Remove src/influxql folder

pull/13410/head
Deniz Kusefoglu 2019-04-12 16:51:20 -07:00
parent a446f413a3
commit 4b9305a473
3 changed files with 0 additions and 544 deletions

View File

@ -1,129 +0,0 @@
import _ from 'lodash'
// TODO make recursive
const exprStr = ({expr, val, type}) => {
if (expr === 'reference') {
if (val === 'time') {
return val
}
return `"${val}"`
} else if (expr === 'literal' || expr === '"string"') {
if (type === 'regex') {
return `${val}` // TODO add slashes `/${val}/`
} else if (type === 'list') {
throw new Error() // TODO list
} else if (type === 'string') {
return `'${val}'`
} else {
// types: boolean, number, integer, duration, time
return val
}
} else if (expr === 'wildcard') {
return val
}
}
const recurse = root => {
const {expr} = root
if (expr === 'binary') {
const {op, lhs, rhs} = root
return `${recurse(lhs)} ${op} ${recurse(rhs)}`
} else if (expr === 'call') {
const {name, args} = root
if (!args) {
return `${name}()`
}
return `${name}(${args.map(recurse).join(', ')})`
}
return exprStr(root)
}
export const toString = ast => {
const {fields, sources, condition, groupBy, orderbys, limits} = ast
const strs = ['SELECT']
// SELECT
const flds = []
for (const field of fields) {
const {column, alias} = field
const result = recurse(column)
flds.push(alias ? `${result} AS "${alias}"` : result)
}
strs.push(flds.join(', '))
// FROM
if (sources.length) {
strs.push('FROM')
const srcs = []
for (const source of sources) {
// TODO subquery (type)
const {database, retentionPolicy, name} = source
srcs.push(`"${_.compact([database, retentionPolicy, name]).join('"."')}"`)
}
strs.push(srcs.join(', '))
}
// WHERE
if (condition) {
strs.push('WHERE')
const result = recurse(condition)
strs.push(result)
}
// GROUP BY
if (groupBy) {
strs.push('GROUP BY')
const dimensions = []
const {time, tags, fill} = groupBy
if (time) {
const {interval, offset} = time
// _.compact([interval, offset]).join(', ')
dimensions.push(`time(${_.compact([interval, offset]).join(', ')})`)
}
if (tags) {
strs.push(dimensions.concat(tags).join(','))
} else {
strs.push(dimensions.join(','))
}
if (fill) {
strs.push(`fill(${fill})`)
}
}
// ORDER BY
if (orderbys && orderbys.length) {
strs.push('ORDER BY')
strs.push(
orderbys
.map(({name, order}) => {
return `${name} ${order === 'descending' ? 'DESC' : 'ASC'}`
})
.join(',')
)
}
// LIMIT
if (limits) {
const {limit, offset, slimit, soffset} = limits
if (limit) {
strs.push(`LIMIT ${limit}`)
}
if (offset) {
strs.push(`OFFSET ${offset}`)
}
if (slimit) {
strs.push(`SLIMIT ${slimit}`)
}
if (soffset) {
strs.push(`SOFFSET ${soffset}`)
}
}
return strs.join(' ')
}

View File

@ -1,406 +0,0 @@
import InfluxQL from 'src/influxql'
describe('influxql astToString', () => {
it('simple query', () => {
const ast = InfluxQL({
fields: [
{
column: {
expr: 'binary',
op: '+',
lhs: {
expr: 'literal',
val: '1',
type: 'integer',
},
rhs: {
expr: 'reference',
val: 'A',
},
},
},
],
sources: [
{
database: '',
retentionPolicy: '',
name: 'howdy',
type: 'measurement',
},
],
})
const expected = `SELECT 1 + "A" FROM "howdy"`
const actual = ast.toString()
// console.log(actual)
expect(actual).toBe(expected)
})
it('simple query w/ multiple sources', () => {
const ast = InfluxQL({
fields: [
{
column: {
expr: 'binary',
op: '+',
lhs: {
expr: 'literal',
val: '1',
type: 'integer',
},
rhs: {
expr: 'reference',
val: 'A',
},
},
},
],
sources: [
{
database: '',
retentionPolicy: '',
name: 'howdy',
type: 'measurement',
},
{
database: 'telegraf',
retentionPolicy: 'autogen',
name: 'doody',
type: 'measurement',
},
],
})
const expected = `SELECT 1 + "A" FROM "howdy", "telegraf"."autogen"."doody"`
const actual = ast.toString()
// console.log('actual ', actual)
// console.log('expected', expected)
expect(actual).toBe(expected)
})
it('query with AS', () => {
const ast = InfluxQL({
fields: [
{
alias: 'B',
column: {
expr: 'binary',
op: '+',
lhs: {
expr: 'literal',
val: '1',
type: 'integer',
},
rhs: {
expr: 'reference',
val: 'A',
},
},
},
],
sources: [
{
database: '',
retentionPolicy: '',
name: 'howdy',
type: 'measurement',
},
],
})
const expected = `SELECT 1 + "A" AS "B" FROM "howdy"`
const actual = ast.toString()
// console.log(actual)
expect(actual).toBe(expected)
})
it('query with 2x func', () => {
const ast = InfluxQL({
fields: [
{
column: {
expr: 'binary',
op: '/',
lhs: {
expr: 'call',
name: 'derivative',
args: [
{
expr: 'reference',
val: 'field1',
},
{
expr: 'literal',
val: '1h',
type: 'duration',
},
],
},
rhs: {
expr: 'call',
name: 'derivative',
args: [
{
expr: 'reference',
val: 'field2',
},
{
expr: 'literal',
val: '1h',
type: 'duration',
},
],
},
},
},
],
sources: [
{
database: '',
retentionPolicy: '',
name: 'myseries',
type: 'measurement',
},
],
})
const expected = `SELECT derivative("field1", 1h) / derivative("field2", 1h) FROM "myseries"`
const actual = ast.toString()
expect(actual).toBe(expected)
})
it('query with where and groupby', () => {
const ast = InfluxQL({
condition: {
expr: 'binary',
op: 'AND',
lhs: {
expr: 'binary',
op: 'AND',
lhs: {
expr: 'binary',
op: '=~',
lhs: {
expr: 'reference',
val: 'cluster_id',
},
rhs: {
expr: 'literal',
val: '/^23/',
type: 'regex',
},
},
rhs: {
expr: 'binary',
op: '=',
lhs: {
expr: 'reference',
val: 'host',
},
rhs: {
expr: 'literal',
val: 'prod-2ccccc04-us-east-1-data-3',
type: 'string',
},
},
},
rhs: {
expr: 'binary',
op: '\u003e',
lhs: {
expr: 'reference',
val: 'time',
},
rhs: {
expr: 'binary',
op: '-',
lhs: {
expr: 'call',
name: 'now',
},
rhs: {
expr: 'literal',
val: '15m',
type: 'duration',
},
},
},
},
fields: [
{
alias: 'max_cpus',
column: {
expr: 'call',
name: 'max',
args: [
{
expr: 'reference',
val: 'n_cpus',
},
],
},
},
{
column: {
expr: 'call',
name: 'non_negative_derivative',
args: [
{
expr: 'call',
name: 'median',
args: [
{
expr: 'reference',
val: 'n_users',
},
],
},
{
expr: 'literal',
val: '5m',
type: 'duration',
},
],
},
},
],
groupBy: {
time: {
interval: '15m',
offset: '10s',
},
tags: ['host', 'tag_x'],
fill: '10',
},
sources: [
{
database: '',
retentionPolicy: '',
name: 'system',
type: 'measurement',
},
],
})
const expected =
'SELECT max("n_cpus") AS "max_cpus", non_negative_derivative(median("n_users"), 5m) FROM "system" WHERE "cluster_id" =~ /^23/ AND "host" = \'prod-2ccccc04-us-east-1-data-3\' AND time > now() - 15m GROUP BY time(15m, 10s),host,tag_x fill(10)'
const actual = ast.toString()
// console.log('actual ', actual)
// console.log('expected', expected)
expect(actual).toBe(expected)
})
it('query with orderby and limit', () => {
const ast = InfluxQL({
condition: {
expr: 'binary',
op: 'AND',
lhs: {
expr: 'binary',
op: '=',
lhs: {
expr: 'reference',
val: 'host',
},
rhs: {
expr: 'literal',
val: 'hosta.influxdb.org',
type: 'string',
},
},
rhs: {
expr: 'binary',
op: '\u003e',
lhs: {
expr: 'reference',
val: 'time',
},
rhs: {
expr: 'literal',
val: '2017-02-07T01:43:02.245407693Z',
type: 'string',
},
},
},
fields: [
{
column: {
expr: 'call',
name: 'mean',
args: [
{
expr: 'reference',
val: 'field1',
},
],
},
},
{
column: {
expr: 'call',
name: 'sum',
args: [
{
expr: 'reference',
val: 'field2',
},
],
},
},
{
alias: 'field_x',
column: {
expr: 'call',
name: 'count',
args: [
{
expr: 'reference',
val: 'field3',
},
],
},
},
],
groupBy: {
time: {
interval: '10h',
},
},
limits: {
limit: 20,
offset: 10,
},
orderbys: [
{
name: 'time',
order: 'descending',
},
],
sources: [
{
database: '',
retentionPolicy: '',
name: 'myseries',
type: 'measurement',
},
],
})
const expected = `SELECT mean("field1"), sum("field2"), count("field3") AS "field_x" FROM "myseries" WHERE "host" = 'hosta.influxdb.org' AND time > '2017-02-07T01:43:02.245407693Z' GROUP BY time(10h) ORDER BY time DESC LIMIT 20 OFFSET 10`
const actual = ast.toString()
// console.log('actual ', actual)
// console.log('expected', expected)
expect(actual).toBe(expected)
})
})

View File

@ -1,9 +0,0 @@
import {toString} from './ast'
const InfluxQL = ast => {
return {
toString: () => toString(ast),
}
}
export default InfluxQL