diff --git a/ui/.eslintrc b/ui/.eslintrc
index a74b895c3..7d3571e8c 100644
--- a/ui/.eslintrc
+++ b/ui/.eslintrc
@@ -179,7 +179,7 @@
'one-var': 0,
'operator-assignment': [2, 'always'],
'padded-blocks': [2, 'never'],
- 'quote-props': [2, 'as-needed', {keywords: true, numbers: false }],
+ 'quote-props': [2, 'as-needed', {keywords: false, numbers: false }],
'require-jsdoc': 0,
'semi-spacing': [2, {before: false, after: true}],
'semi': [2, 'never'],
@@ -234,6 +234,5 @@
'react/require-extension': 0,
'react/self-closing-comp': 0, // TODO: we can re-enable this if some brave soul wants to update the code (mostly spans acting as icons)
'react/sort-comp': 0, // TODO: 2
- 'react/jsx-wrap-multilines': ['error', {'declaration': false, 'assignment': false}],
},
}
diff --git a/ui/spec/admin/reducers/adminSpec.js b/ui/spec/admin/reducers/adminSpec.js
index ac234be1e..06b5c584b 100644
--- a/ui/spec/admin/reducers/adminSpec.js
+++ b/ui/spec/admin/reducers/adminSpec.js
@@ -144,11 +144,7 @@ describe('Admin.Reducers', () => {
it('can add a database', () => {
const actual = reducer(state, addDatabase())
- const expected = [
- {...NEW_DEFAULT_DATABASE, isEditing: true},
- db1,
- db2,
- ]
+ const expected = [{...NEW_DEFAULT_DATABASE, isEditing: true}, db1, db2]
expect(actual.databases).to.deep.equal(expected)
})
@@ -170,10 +166,7 @@ describe('Admin.Reducers', () => {
it('can add a database delete code', () => {
const actual = reducer(state, addDatabaseDeleteCode(db1))
- const expected = [
- {...db1, deleteCode: ''},
- db2,
- ]
+ const expected = [{...db1, deleteCode: ''}, db2]
expect(actual.databases).to.deep.equal(expected)
})
@@ -181,10 +174,7 @@ describe('Admin.Reducers', () => {
it('can remove the delete code', () => {
const actual = reducer(state, removeDatabaseDeleteCode(db2))
delete db2.deleteCode
- const expected = [
- db1,
- db2,
- ]
+ const expected = [db1, db2]
expect(actual.databases).to.deep.equal(expected)
})
@@ -195,18 +185,14 @@ describe('Admin.Reducers', () => {
it('can add a retention policy', () => {
const actual = reducer(state, addRetentionPolicy(db1))
- const expected = [
- {...db1, retentionPolicies: [NEW_EMPTY_RP, rp1]},
- ]
+ const expected = [{...db1, retentionPolicies: [NEW_EMPTY_RP, rp1]}]
expect(actual.databases).to.deep.equal(expected)
})
it('can remove a retention policy', () => {
const actual = reducer(state, removeRetentionPolicy(db1, rp1))
- const expected = [
- {...db1, retentionPolicies: []},
- ]
+ const expected = [{...db1, retentionPolicies: []}]
expect(actual.databases).to.deep.equal(expected)
})
@@ -214,9 +200,7 @@ describe('Admin.Reducers', () => {
it('can edit a retention policy', () => {
const updates = {name: 'rpOne', duration: '100y', replication: '42'}
const actual = reducer(state, editRetentionPolicy(db1, rp1, updates))
- const expected = [
- {...db1, retentionPolicies: [{...rp1, ...updates}]},
- ]
+ const expected = [{...db1, retentionPolicies: [{...rp1, ...updates}]}]
expect(actual.databases).to.deep.equal(expected)
})
@@ -224,17 +208,12 @@ describe('Admin.Reducers', () => {
it('it can add a user', () => {
state = {
- users: [
- u1,
- ],
+ users: [u1],
}
const actual = reducer(state, addUser())
const expected = {
- users: [
- {...NEW_DEFAULT_USER, isEditing: true},
- u1,
- ],
+ users: [{...NEW_DEFAULT_USER, isEditing: true}, u1],
}
expect(actual.users).to.deep.equal(expected.users)
@@ -268,17 +247,12 @@ describe('Admin.Reducers', () => {
it('it can add a role', () => {
state = {
- roles: [
- r1,
- ],
+ roles: [r1],
}
const actual = reducer(state, addRole())
const expected = {
- roles: [
- {...NEW_DEFAULT_ROLE, isEditing: true},
- r1,
- ],
+ roles: [{...NEW_DEFAULT_ROLE, isEditing: true}, r1],
}
expect(actual.roles).to.deep.equal(expected.roles)
@@ -321,9 +295,7 @@ describe('Admin.Reducers', () => {
it('it can delete a role', () => {
state = {
- roles: [
- r1,
- ],
+ roles: [r1],
}
const actual = reducer(state, deleteRole(r1))
@@ -336,9 +308,7 @@ describe('Admin.Reducers', () => {
it('it can delete a user', () => {
state = {
- users: [
- u1,
- ],
+ users: [u1],
}
const actual = reducer(state, deleteUser(u1))
@@ -358,10 +328,7 @@ describe('Admin.Reducers', () => {
const actual = reducer(state, filterRoles(text))
const expected = {
- roles: [
- {...r1, hidden: false},
- {...r2, hidden: true},
- ],
+ roles: [{...r1, hidden: false}, {...r2, hidden: true}],
}
expect(actual.roles).to.deep.equal(expected.roles)
@@ -376,10 +343,7 @@ describe('Admin.Reducers', () => {
const actual = reducer(state, filterUsers(text))
const expected = {
- users: [
- {...u1, hidden: true},
- {...u2, hidden: false},
- ],
+ users: [{...u1, hidden: true}, {...u2, hidden: false}],
}
expect(actual.users).to.deep.equal(expected.users)
diff --git a/ui/spec/data_explorer/utils/influxql/selectSpec.js b/ui/spec/data_explorer/utils/influxql/selectSpec.js
index 67e96bd47..f828db90b 100644
--- a/ui/spec/data_explorer/utils/influxql/selectSpec.js
+++ b/ui/spec/data_explorer/utils/influxql/selectSpec.js
@@ -10,61 +10,103 @@ describe('buildInfluxQLQuery', () => {
describe('when information is missing', () => {
it('returns a null select statement', () => {
expect(buildInfluxQLQuery({}, mergeConfig())).to.equal(null)
- expect(buildInfluxQLQuery({}, mergeConfig({database: 'db1'}))).to.equal(null) // no measurement
- expect(buildInfluxQLQuery({}, mergeConfig({database: 'db1', measurement: 'm1'}))).to.equal(null) // no fields
+ expect(buildInfluxQLQuery({}, mergeConfig({database: 'db1'}))).to.equal(
+ null
+ ) // no measurement
+ expect(
+ buildInfluxQLQuery(
+ {},
+ mergeConfig({database: 'db1', measurement: 'm1'})
+ )
+ ).to.equal(null) // no fields
})
})
describe('with a database, measurement, field, and NO retention policy', () => {
beforeEach(() => {
- config = mergeConfig({database: 'db1', measurement: 'm1', fields: [{field: 'f1', func: null}]})
+ config = mergeConfig({
+ database: 'db1',
+ measurement: 'm1',
+ fields: [{field: 'f1', func: null}],
+ })
})
it('builds the right query', () => {
- expect(buildInfluxQLQuery({}, config)).to.equal('SELECT "f1" FROM "db1".."m1"')
+ expect(buildInfluxQLQuery({}, config)).to.equal(
+ 'SELECT "f1" FROM "db1".."m1"'
+ )
})
})
describe('with a database, measurement, retention policy, and field', () => {
beforeEach(() => {
- config = mergeConfig({database: 'db1', measurement: 'm1', retentionPolicy: 'rp1', fields: [{field: 'f1', func: null}]})
+ config = mergeConfig({
+ database: 'db1',
+ measurement: 'm1',
+ retentionPolicy: 'rp1',
+ fields: [{field: 'f1', func: null}],
+ })
timeBounds = {lower: 'now() - 1hr'}
})
it('builds the right query', () => {
- expect(buildInfluxQLQuery({}, config)).to.equal('SELECT "f1" FROM "db1"."rp1"."m1"')
+ expect(buildInfluxQLQuery({}, config)).to.equal(
+ 'SELECT "f1" FROM "db1"."rp1"."m1"'
+ )
})
it('builds the right query with a time range', () => {
- expect(buildInfluxQLQuery(timeBounds, config)).to.equal('SELECT "f1" FROM "db1"."rp1"."m1" WHERE time > now() - 1hr')
+ expect(buildInfluxQLQuery(timeBounds, config)).to.equal(
+ 'SELECT "f1" FROM "db1"."rp1"."m1" WHERE time > now() - 1hr'
+ )
})
})
describe('when the field is *', () => {
beforeEach(() => {
- config = mergeConfig({database: 'db1', measurement: 'm1', retentionPolicy: 'rp1', fields: [{field: '*', func: null}]})
+ config = mergeConfig({
+ database: 'db1',
+ measurement: 'm1',
+ retentionPolicy: 'rp1',
+ fields: [{field: '*', func: null}],
+ })
})
it('does not quote the star', () => {
- expect(buildInfluxQLQuery({}, config)).to.equal('SELECT * FROM "db1"."rp1"."m1"')
+ expect(buildInfluxQLQuery({}, config)).to.equal(
+ 'SELECT * FROM "db1"."rp1"."m1"'
+ )
})
})
describe('with a measurement and one field, an aggregate, and a GROUP BY time()', () => {
beforeEach(() => {
- config = mergeConfig({database: 'db1', measurement: 'm0', retentionPolicy: 'rp1', fields: [{field: 'value', funcs: ['min']}], groupBy: {time: '10m', tags: []}})
+ config = mergeConfig({
+ database: 'db1',
+ measurement: 'm0',
+ retentionPolicy: 'rp1',
+ fields: [{field: 'value', funcs: ['min']}],
+ groupBy: {time: '10m', tags: []},
+ })
timeBounds = {lower: 'now() - 12h'}
})
it('builds the right query', () => {
- const expected = 'SELECT min("value") AS "min_value" FROM "db1"."rp1"."m0" WHERE time > now() - 12h GROUP BY time(10m)'
+ const expected =
+ 'SELECT min("value") AS "min_value" FROM "db1"."rp1"."m0" WHERE time > now() - 12h GROUP BY time(10m)'
expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected)
})
})
describe('with a measurement and one field, an aggregate, and a GROUP BY tags', () => {
beforeEach(() => {
- config = mergeConfig({database: 'db1', measurement: 'm0', retentionPolicy: 'rp1', fields: [{field: 'value', funcs: ['min']}], groupBy: {time: null, tags: ['t1', 't2']}})
+ config = mergeConfig({
+ database: 'db1',
+ measurement: 'm0',
+ retentionPolicy: 'rp1',
+ fields: [{field: 'value', funcs: ['min']}],
+ groupBy: {time: null, tags: ['t1', 't2']},
+ })
timeBounds = {lower: 'now() - 12h'}
})
@@ -76,36 +118,59 @@ describe('buildInfluxQLQuery', () => {
describe('with a measurement, one field, and an upper / lower absolute time range', () => {
beforeEach(() => {
- config = mergeConfig({database: 'db1', retentionPolicy: 'rp1', measurement: 'm0', fields: [{field: 'value', funcs: []}]})
- timeBounds = {lower: "'2015-07-23T15:52:24.447Z'", upper: "'2015-07-24T15:52:24.447Z'"}
+ config = mergeConfig({
+ database: 'db1',
+ retentionPolicy: 'rp1',
+ measurement: 'm0',
+ fields: [{field: 'value', funcs: []}],
+ })
+ timeBounds = {
+ lower: "'2015-07-23T15:52:24.447Z'",
+ upper: "'2015-07-24T15:52:24.447Z'",
+ }
})
it('builds the right query', () => {
- const expected = 'SELECT "value" FROM "db1"."rp1"."m0" WHERE time > \'2015-07-23T15:52:24.447Z\' AND time < \'2015-07-24T15:52:24.447Z\''
+ const expected =
+ 'SELECT "value" FROM "db1"."rp1"."m0" WHERE time > \'2015-07-23T15:52:24.447Z\' AND time < \'2015-07-24T15:52:24.447Z\''
expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected)
})
})
describe('with a measurement and one field, an aggregate, and a GROUP BY time(), and tags', () => {
beforeEach(() => {
- config = mergeConfig({database: 'db1', retentionPolicy: 'rp1', measurement: 'm0', fields: [{field: 'value', funcs: ['min']}], groupBy: {time: '10m', tags: ['t1', 't2']}})
+ config = mergeConfig({
+ database: 'db1',
+ retentionPolicy: 'rp1',
+ measurement: 'm0',
+ fields: [{field: 'value', funcs: ['min']}],
+ groupBy: {time: '10m', tags: ['t1', 't2']},
+ })
timeBounds = {lower: 'now() - 12h'}
})
it('builds the right query', () => {
- const expected = 'SELECT min("value") AS "min_value" FROM "db1"."rp1"."m0" WHERE time > now() - 12h GROUP BY time(10m), "t1", "t2"'
+ const expected =
+ 'SELECT min("value") AS "min_value" FROM "db1"."rp1"."m0" WHERE time > now() - 12h GROUP BY time(10m), "t1", "t2"'
expect(buildInfluxQLQuery(timeBounds, config)).to.equal(expected)
})
})
describe('with a measurement and two fields', () => {
beforeEach(() => {
- config = mergeConfig({database: 'db1', retentionPolicy: 'rp1', measurement: 'm0', fields: [{field: 'f0', funcs: []}, {field: 'f1', funcs: []}]})
+ config = mergeConfig({
+ database: 'db1',
+ retentionPolicy: 'rp1',
+ measurement: 'm0',
+ fields: [{field: 'f0', funcs: []}, {field: 'f1', funcs: []}],
+ })
timeBounds = {upper: "'2015-02-24T00:00:00Z'"}
})
it('builds the right query', () => {
- expect(buildInfluxQLQuery({}, config)).to.equal('SELECT "f0", "f1" FROM "db1"."rp1"."m0"')
+ expect(buildInfluxQLQuery({}, config)).to.equal(
+ 'SELECT "f0", "f1" FROM "db1"."rp1"."m0"'
+ )
})
it('builds the right query with a time range', () => {
@@ -121,14 +186,8 @@ describe('buildInfluxQLQuery', () => {
retentionPolicy: 'rp1',
fields: [{field: 'f0', funcs: []}],
tags: {
- k1: [
- 'v1',
- 'v3',
- 'v4',
- ],
- k2: [
- 'v2',
- ],
+ k1: ['v1', 'v3', 'v4'],
+ k2: ['v2'],
},
})
timeBounds = {lower: 'now() - 6h'}
diff --git a/ui/spec/influxql/astToStringSpec.js b/ui/spec/influxql/astToStringSpec.js
index 6625c6a25..c7d32cd73 100644
--- a/ui/spec/influxql/astToStringSpec.js
+++ b/ui/spec/influxql/astToStringSpec.js
@@ -3,31 +3,31 @@ import InfluxQL from 'src/influxql'
describe('influxql astToString', () => {
it('simple query', () => {
const ast = InfluxQL({
- "fields": [
+ fields: [
{
- "column": {
- "expr": "binary",
- "op": "+",
- "lhs": {
- "expr": "literal",
- "val": "1",
- "type": "integer"
+ column: {
+ expr: 'binary',
+ op: '+',
+ lhs: {
+ expr: 'literal',
+ val: '1',
+ type: 'integer',
},
- "rhs": {
- "expr": "reference",
- "val": "A"
- }
- }
- }
+ rhs: {
+ expr: 'reference',
+ val: 'A',
+ },
+ },
+ },
],
- "sources": [
+ sources: [
{
- "database": "",
- "retentionPolicy": "",
- "name": "howdy",
- "type": "measurement"
- }
- ]
+ database: '',
+ retentionPolicy: '',
+ name: 'howdy',
+ type: 'measurement',
+ },
+ ],
})
const expected = `SELECT 1 + "A" FROM "howdy"`
@@ -40,37 +40,37 @@ describe('influxql astToString', () => {
it('simple query w/ multiple sources', () => {
const ast = InfluxQL({
- "fields": [
+ fields: [
{
- "column": {
- "expr": "binary",
- "op": "+",
- "lhs": {
- "expr": "literal",
- "val": "1",
- "type": "integer"
+ column: {
+ expr: 'binary',
+ op: '+',
+ lhs: {
+ expr: 'literal',
+ val: '1',
+ type: 'integer',
},
- "rhs": {
- "expr": "reference",
- "val": "A"
- }
- }
- }
+ rhs: {
+ expr: 'reference',
+ val: 'A',
+ },
+ },
+ },
],
- "sources": [
+ sources: [
{
- "database": "",
- "retentionPolicy": "",
- "name": "howdy",
- "type": "measurement"
+ database: '',
+ retentionPolicy: '',
+ name: 'howdy',
+ type: 'measurement',
},
{
- "database": "telegraf",
- "retentionPolicy": "autogen",
- "name": "doody",
- "type": "measurement"
- }
- ]
+ database: 'telegraf',
+ retentionPolicy: 'autogen',
+ name: 'doody',
+ type: 'measurement',
+ },
+ ],
})
const expected = `SELECT 1 + "A" FROM "howdy", "telegraf"."autogen"."doody"`
@@ -84,32 +84,32 @@ describe('influxql astToString', () => {
it('query with AS', () => {
const ast = InfluxQL({
- "fields": [
+ fields: [
{
- "alias": "B",
- "column": {
- "expr": "binary",
- "op": "+",
- "lhs": {
- "expr": "literal",
- "val": "1",
- "type": "integer"
+ alias: 'B',
+ column: {
+ expr: 'binary',
+ op: '+',
+ lhs: {
+ expr: 'literal',
+ val: '1',
+ type: 'integer',
},
- "rhs": {
- "expr": "reference",
- "val": "A"
- }
- }
- }
+ rhs: {
+ expr: 'reference',
+ val: 'A',
+ },
+ },
+ },
],
- "sources": [
+ sources: [
{
- "database": "",
- "retentionPolicy": "",
- "name": "howdy",
- "type": "measurement"
- }
- ]
+ database: '',
+ retentionPolicy: '',
+ name: 'howdy',
+ type: 'measurement',
+ },
+ ],
})
const expected = `SELECT 1 + "A" AS "B" FROM "howdy"`
@@ -122,52 +122,52 @@ describe('influxql astToString', () => {
it('query with 2x func', () => {
const ast = InfluxQL({
- "fields": [
+ fields: [
{
- "column": {
- "expr": "binary",
- "op": "/",
- "lhs": {
- "expr": "call",
- "name": "derivative",
- "args": [
+ column: {
+ expr: 'binary',
+ op: '/',
+ lhs: {
+ expr: 'call',
+ name: 'derivative',
+ args: [
{
- "expr": "reference",
- "val": "field1"
+ expr: 'reference',
+ val: 'field1',
},
{
- "expr": "literal",
- "val": "1h",
- "type": "duration"
- }
- ]
+ expr: 'literal',
+ val: '1h',
+ type: 'duration',
+ },
+ ],
},
- "rhs": {
- "expr": "call",
- "name": "derivative",
- "args": [
+ rhs: {
+ expr: 'call',
+ name: 'derivative',
+ args: [
{
- "expr": "reference",
- "val": "field2"
+ expr: 'reference',
+ val: 'field2',
},
{
- "expr": "literal",
- "val": "1h",
- "type": "duration"
- }
- ]
- }
- }
- }
+ expr: 'literal',
+ val: '1h',
+ type: 'duration',
+ },
+ ],
+ },
+ },
+ },
],
- "sources": [
+ sources: [
{
- "database": "",
- "retentionPolicy": "",
- "name": "myseries",
- "type": "measurement"
- }
- ]
+ database: '',
+ retentionPolicy: '',
+ name: 'myseries',
+ type: 'measurement',
+ },
+ ],
})
const expected = `SELECT derivative("field1", 1h) / derivative("field2", 1h) FROM "myseries"`
@@ -181,121 +181,119 @@ describe('influxql astToString', () => {
it('query with where and groupby', () => {
const ast = InfluxQL({
- "condition": {
- "expr": "binary",
- "op": "AND",
- "lhs": {
- "expr": "binary",
- "op": "AND",
- "lhs": {
- "expr": "binary",
- "op": "=~",
- "lhs": {
- "expr": "reference",
- "val": "cluster_id"
+ condition: {
+ expr: 'binary',
+ op: 'AND',
+ lhs: {
+ expr: 'binary',
+ op: 'AND',
+ lhs: {
+ expr: 'binary',
+ op: '=~',
+ lhs: {
+ expr: 'reference',
+ val: 'cluster_id',
+ },
+ rhs: {
+ expr: 'literal',
+ val: '/^23/',
+ type: 'regex',
},
- "rhs": {
- "expr": "literal",
- "val": "/^23/",
- "type": "regex"
- }
},
- "rhs": {
- "expr": "binary",
- "op": "=",
- "lhs": {
- "expr": "reference",
- "val": "host"
+ rhs: {
+ expr: 'binary',
+ op: '=',
+ lhs: {
+ expr: 'reference',
+ val: 'host',
},
- "rhs": {
- "expr": "literal",
- "val": "prod-2ccccc04-us-east-1-data-3",
- "type": "string"
- }
- }
+ rhs: {
+ expr: 'literal',
+ val: 'prod-2ccccc04-us-east-1-data-3',
+ type: 'string',
+ },
+ },
},
- "rhs": {
- "expr": "binary",
- "op": "\u003e",
- "lhs": {
- "expr": "reference",
- "val": "time"
+ rhs: {
+ expr: 'binary',
+ op: '\u003e',
+ lhs: {
+ expr: 'reference',
+ val: 'time',
},
- "rhs": {
- "expr": "binary",
- "op": "-",
- "lhs": {
- "expr": "call",
- "name": "now"
+ rhs: {
+ expr: 'binary',
+ op: '-',
+ lhs: {
+ expr: 'call',
+ name: 'now',
},
- "rhs": {
- "expr": "literal",
- "val": "15m",
- "type": "duration"
- }
- }
- }
+ rhs: {
+ expr: 'literal',
+ val: '15m',
+ type: 'duration',
+ },
+ },
+ },
},
- "fields": [
+ fields: [
{
- "alias": "max_cpus",
- "column": {
- "expr": "call",
- "name": "max",
- "args": [
+ alias: 'max_cpus',
+ column: {
+ expr: 'call',
+ name: 'max',
+ args: [
{
- "expr": "reference",
- "val": "n_cpus"
- }
- ]
- }
+ expr: 'reference',
+ val: 'n_cpus',
+ },
+ ],
+ },
},
{
- "column": {
- "expr": "call",
- "name": "non_negative_derivative",
- "args": [
+ column: {
+ expr: 'call',
+ name: 'non_negative_derivative',
+ args: [
{
- "expr": "call",
- "name": "median",
- "args": [
+ expr: 'call',
+ name: 'median',
+ args: [
{
- "expr": "reference",
- "val": "n_users"
- }
- ]
+ expr: 'reference',
+ val: 'n_users',
+ },
+ ],
},
{
- "expr": "literal",
- "val": "5m",
- "type": "duration"
- }
- ]
- }
- }
- ],
- "groupBy": {
- "time": {
- "interval": "15m",
- "offset": "10s"
+ expr: 'literal',
+ val: '5m',
+ type: 'duration',
+ },
+ ],
+ },
},
- "tags": [
- "host",
- "tag_x"
- ],
- "fill": "10"
+ ],
+ groupBy: {
+ time: {
+ interval: '15m',
+ offset: '10s',
+ },
+ tags: ['host', 'tag_x'],
+ fill: '10',
},
- "sources": [
+ sources: [
{
- "database": "",
- "retentionPolicy": "",
- "name": "system",
- "type": "measurement"
- }
- ]
+ database: '',
+ retentionPolicy: '',
+ name: 'system',
+ type: 'measurement',
+ },
+ ],
})
- const expected = "SELECT max(\"n_cpus\") AS \"max_cpus\", non_negative_derivative(median(\"n_users\"), 5m) FROM \"system\" WHERE \"cluster_id\" =~ /^23/ AND \"host\" = 'prod-2ccccc04-us-east-1-data-3' AND time > now() - 15m GROUP BY time(15m, 10s),host,tag_x fill(10)"
+ const expected =
+ 'SELECT max("n_cpus") AS "max_cpus", non_negative_derivative(median("n_users"), 5m) FROM "system" WHERE "cluster_id" =~ /^23/ AND "host" = \'prod-2ccccc04-us-east-1-data-3\' AND time > now() - 15m GROUP BY time(15m, 10s),host,tag_x fill(10)'
const actual = ast.toString()
// console.log('actual ', actual)
@@ -306,98 +304,98 @@ describe('influxql astToString', () => {
it('query with orderby and limit', () => {
const ast = InfluxQL({
- "condition": {
- "expr": "binary",
- "op": "AND",
- "lhs": {
- "expr": "binary",
- "op": "=",
- "lhs": {
- "expr": "reference",
- "val": "host"
+ condition: {
+ expr: 'binary',
+ op: 'AND',
+ lhs: {
+ expr: 'binary',
+ op: '=',
+ lhs: {
+ expr: 'reference',
+ val: 'host',
},
- "rhs": {
- "expr": "literal",
- "val": "hosta.influxdb.org",
- "type": "string"
- }
- },
- "rhs": {
- "expr": "binary",
- "op": "\u003e",
- "lhs": {
- "expr": "reference",
- "val": "time"
+ rhs: {
+ expr: 'literal',
+ val: 'hosta.influxdb.org',
+ type: 'string',
},
- "rhs": {
- "expr": "literal",
- "val": "2017-02-07T01:43:02.245407693Z",
- "type": "string"
- }
- }
+ },
+ rhs: {
+ expr: 'binary',
+ op: '\u003e',
+ lhs: {
+ expr: 'reference',
+ val: 'time',
+ },
+ rhs: {
+ expr: 'literal',
+ val: '2017-02-07T01:43:02.245407693Z',
+ type: 'string',
+ },
+ },
},
- "fields": [
+ fields: [
{
- "column": {
- "expr": "call",
- "name": "mean",
- "args": [
+ column: {
+ expr: 'call',
+ name: 'mean',
+ args: [
{
- "expr": "reference",
- "val": "field1"
- }
- ]
- }
+ expr: 'reference',
+ val: 'field1',
+ },
+ ],
+ },
},
{
- "column": {
- "expr": "call",
- "name": "sum",
- "args": [
+ column: {
+ expr: 'call',
+ name: 'sum',
+ args: [
{
- "expr": "reference",
- "val": "field2"
- }
- ]
- }
+ expr: 'reference',
+ val: 'field2',
+ },
+ ],
+ },
},
{
- "alias": "field_x",
- "column": {
- "expr": "call",
- "name": "count",
- "args": [
+ alias: 'field_x',
+ column: {
+ expr: 'call',
+ name: 'count',
+ args: [
{
- "expr": "reference",
- "val": "field3"
- }
- ]
- }
- }
+ expr: 'reference',
+ val: 'field3',
+ },
+ ],
+ },
+ },
],
- "groupBy": {
- "time": {
- "interval": "10h"
- }
+ groupBy: {
+ time: {
+ interval: '10h',
+ },
},
- "limits": {
- "limit": 20,
- "offset": 10
+ limits: {
+ limit: 20,
+ offset: 10,
},
- "orderbys": [
+ orderbys: [
{
- "name": "time",
- "order": "descending"
- }
+ name: 'time',
+ order: 'descending',
+ },
],
- "sources": [
+ sources: [
{
- "database": "",
- "retentionPolicy": "",
- "name": "myseries",
- "type": "measurement"
- }
- ]
+ database: '',
+ retentionPolicy: '',
+ name: 'myseries',
+ type: 'measurement',
+ },
+ ],
})
const expected = `SELECT mean("field1"), sum("field2"), count("field3") AS "field_x" FROM "myseries" WHERE "host" = 'hosta.influxdb.org' AND time > '2017-02-07T01:43:02.245407693Z' GROUP BY time(10h) ORDER BY time DESC LIMIT 20 OFFSET 10`
diff --git a/ui/spec/shared/parsing/diskBytesSpec.js b/ui/spec/shared/parsing/diskBytesSpec.js
index 39f3f5daa..ee8024d9e 100644
--- a/ui/spec/shared/parsing/diskBytesSpec.js
+++ b/ui/spec/shared/parsing/diskBytesSpec.js
@@ -1,11 +1,52 @@
-import {diskBytesFromShard, diskBytesFromShardForDatabase} from 'shared/parsing/diskBytes'
+import {
+ diskBytesFromShard,
+ diskBytesFromShardForDatabase,
+} from 'shared/parsing/diskBytes'
describe('diskBytesFromShard', () => {
it('sums all the disk bytes in multiple series', () => {
- const response = {results: [
- {series: [{name: "shard", tags: {clusterID: "6272208615254493595", database: "_internal", engine: "tsm1", hostname: "WattsInfluxDB", id: "1", nodeID: "localhost:8088", path: "/Users/watts/.influxdb/data/_internal/monitor/1", retentionPolicy: "monitor"}, columns: ["time", "last"], values: [[1464811503000000000, 100]]}]},
- {series: [{name: "shard", tags: {clusterID: "6272208615254493595", database: "telegraf", engine: "tsm1", hostname: "WattsInfluxDB", id: "2", nodeID: "localhost:8088", path: "/Users/watts/.influxdb/data/telegraf/default/2", retentionPolicy: "default"}, columns: ["time", "last"], values: [[1464811503000000000, 200]]}]},
- ]}
+ const response = {
+ results: [
+ {
+ series: [
+ {
+ name: 'shard',
+ tags: {
+ clusterID: '6272208615254493595',
+ database: '_internal',
+ engine: 'tsm1',
+ hostname: 'WattsInfluxDB',
+ id: '1',
+ nodeID: 'localhost:8088',
+ path: '/Users/watts/.influxdb/data/_internal/monitor/1',
+ retentionPolicy: 'monitor',
+ },
+ columns: ['time', 'last'],
+ values: [[1464811503000000000, 100]],
+ },
+ ],
+ },
+ {
+ series: [
+ {
+ name: 'shard',
+ tags: {
+ clusterID: '6272208615254493595',
+ database: 'telegraf',
+ engine: 'tsm1',
+ hostname: 'WattsInfluxDB',
+ id: '2',
+ nodeID: 'localhost:8088',
+ path: '/Users/watts/.influxdb/data/telegraf/default/2',
+ retentionPolicy: 'default',
+ },
+ columns: ['time', 'last'],
+ values: [[1464811503000000000, 200]],
+ },
+ ],
+ },
+ ],
+ }
const result = diskBytesFromShard(response)
const expectedTotal = 300
@@ -24,7 +65,7 @@ describe('diskBytesFromShard', () => {
})
it('exposes the server error', () => {
- const response = {results: [{error: "internal server error?"}]}
+ const response = {results: [{error: 'internal server error?'}]}
const result = diskBytesFromShard(response)
@@ -35,17 +76,65 @@ describe('diskBytesFromShard', () => {
describe('diskBytesFromShardForDatabase', () => {
it('return parses data as expected', () => {
- const response = {results: [{series: [
- {name: "shard", tags: {nodeID: "localhost:8088", path: "/Users/watts/.influxdb/data/_internal/monitor/1", retentionPolicy: "monitor"}, columns: ["time", "last"], values: [["2016-06-02T01:06:13Z", 100]]},
- {name: "shard", tags: {nodeID: "localhost:8088", path: "/Users/watts/.influxdb/data/_internal/monitor/3", retentionPolicy: "monitor"}, columns: ["time", "last"], values: [["2016-06-02T01:06:13Z", 200]]},
- {name: "shard", tags: {nodeID: "localhost:8188", path: "/Users/watts/.influxdb/data/_internal/monitor/1", retentionPolicy: "monitor"}, columns: ["time", "last"], values: [["2016-06-02T01:06:13Z", 100]]},
- {name: "shard", tags: {nodeID: "localhost:8188", path: "/Users/watts/.influxdb/data/_internal/monitor/3", retentionPolicy: "monitor"}, columns: ["time", "last"], values: [["2016-06-02T01:06:13Z", 200]]},
- ]}]}
+ const response = {
+ results: [
+ {
+ series: [
+ {
+ name: 'shard',
+ tags: {
+ nodeID: 'localhost:8088',
+ path: '/Users/watts/.influxdb/data/_internal/monitor/1',
+ retentionPolicy: 'monitor',
+ },
+ columns: ['time', 'last'],
+ values: [['2016-06-02T01:06:13Z', 100]],
+ },
+ {
+ name: 'shard',
+ tags: {
+ nodeID: 'localhost:8088',
+ path: '/Users/watts/.influxdb/data/_internal/monitor/3',
+ retentionPolicy: 'monitor',
+ },
+ columns: ['time', 'last'],
+ values: [['2016-06-02T01:06:13Z', 200]],
+ },
+ {
+ name: 'shard',
+ tags: {
+ nodeID: 'localhost:8188',
+ path: '/Users/watts/.influxdb/data/_internal/monitor/1',
+ retentionPolicy: 'monitor',
+ },
+ columns: ['time', 'last'],
+ values: [['2016-06-02T01:06:13Z', 100]],
+ },
+ {
+ name: 'shard',
+ tags: {
+ nodeID: 'localhost:8188',
+ path: '/Users/watts/.influxdb/data/_internal/monitor/3',
+ retentionPolicy: 'monitor',
+ },
+ columns: ['time', 'last'],
+ values: [['2016-06-02T01:06:13Z', 200]],
+ },
+ ],
+ },
+ ],
+ }
const result = diskBytesFromShardForDatabase(response)
const expected = {
- 1: [{nodeID: 'localhost:8088', diskUsage: 100}, {nodeID: 'localhost:8188', diskUsage: 100}],
- 3: [{nodeID: 'localhost:8088', diskUsage: 200}, {nodeID: 'localhost:8188', diskUsage: 200}],
+ 1: [
+ {nodeID: 'localhost:8088', diskUsage: 100},
+ {nodeID: 'localhost:8188', diskUsage: 100},
+ ],
+ 3: [
+ {nodeID: 'localhost:8088', diskUsage: 200},
+ {nodeID: 'localhost:8188', diskUsage: 200},
+ ],
}
expect(result.shardData).to.deep.equal(expected)
@@ -61,7 +150,7 @@ describe('diskBytesFromShardForDatabase', () => {
})
it('exposes the server error', () => {
- const response = {results: [{error: "internal server error?"}]}
+ const response = {results: [{error: 'internal server error?'}]}
const result = diskBytesFromShardForDatabase(response)
diff --git a/ui/spec/shared/parsing/getRangeForDygraphSpec.js b/ui/spec/shared/parsing/getRangeForDygraphSpec.js
index fd89e1c4c..d467a7c79 100644
--- a/ui/spec/shared/parsing/getRangeForDygraphSpec.js
+++ b/ui/spec/shared/parsing/getRangeForDygraphSpec.js
@@ -43,14 +43,20 @@ describe('getRangeForDygraphSpec', () => {
const timeSeries = [[date, max], [date, mid], [date, min]]
it('can pad positive values', () => {
- const [actualMin, actualMax] = getRange(timeSeries, undefined, {...kapacitor, value: 20})
+ const [actualMin, actualMax] = getRange(timeSeries, undefined, {
+ ...kapacitor,
+ value: 20,
+ })
expect(actualMin).to.equal(min)
expect(actualMax).to.be.above(max)
})
it('can pad negative values', () => {
- const [actualMin, actualMax] = getRange(timeSeries, undefined, {...kapacitor, value: -10})
+ const [actualMin, actualMax] = getRange(timeSeries, undefined, {
+ ...kapacitor,
+ value: -10,
+ })
expect(actualMin).to.be.below(min)
expect(actualMax).to.equal(max)
@@ -60,7 +66,10 @@ describe('getRangeForDygraphSpec', () => {
it('subtracts from a positive value', () => {
const value = 2
const opAndValue = {operator: 'less than', value}
- const [actualMin, actualMax] = getRange(timeSeries, undefined, {...kapacitor, ...opAndValue})
+ const [actualMin, actualMax] = getRange(timeSeries, undefined, {
+ ...kapacitor,
+ ...opAndValue,
+ })
expect(actualMin).to.be.lessThan(value)
expect(actualMax).to.equal(max)
@@ -72,14 +81,20 @@ describe('getRangeForDygraphSpec', () => {
const timeSeries = [[date, max], [date, min], [date, mid]]
it('can pad positive values', () => {
- const [actualMin, actualMax] = getRange(timeSeries, undefined, {...kapacitor, rangeValue: 20})
+ const [actualMin, actualMax] = getRange(timeSeries, undefined, {
+ ...kapacitor,
+ rangeValue: 20,
+ })
expect(actualMin).to.equal(min)
expect(actualMax).to.be.above(max)
})
it('can pad negative values', () => {
- const [actualMin, actualMax] = getRange(timeSeries, undefined, {...kapacitor, rangeValue: -10})
+ const [actualMin, actualMax] = getRange(timeSeries, undefined, {
+ ...kapacitor,
+ rangeValue: -10,
+ })
expect(actualMin).to.be.below(min)
expect(actualMax).to.equal(max)
diff --git a/ui/spec/shared/parsing/parseAlertaSpec.js b/ui/spec/shared/parsing/parseAlertaSpec.js
index 8e289cdfe..9fe25b5ef 100644
--- a/ui/spec/shared/parsing/parseAlertaSpec.js
+++ b/ui/spec/shared/parsing/parseAlertaSpec.js
@@ -15,36 +15,24 @@ it('can parse an alerta tick script', () => {
const expectedObj = [
{
- name: "resource",
- args: [
- "Hostname or service",
- ],
+ name: 'resource',
+ args: ['Hostname or service'],
},
{
- name: "event",
- args: [
- "Something went wrong",
- ],
+ name: 'event',
+ args: ['Something went wrong'],
},
{
- name: "environment",
- args: [
- "Development",
- ],
+ name: 'environment',
+ args: ['Development'],
},
{
- name: "group",
- args: [
- "Dev. Servers",
- ],
+ name: 'group',
+ args: ['Dev. Servers'],
},
{
- name: "services",
- args: [
- "a",
- "b",
- "c",
- ],
+ name: 'services',
+ args: ['a', 'b', 'c'],
},
]
diff --git a/ui/spec/shared/parsing/showDatabasesSpec.js b/ui/spec/shared/parsing/showDatabasesSpec.js
index f2eaf8078..d44c855b4 100644
--- a/ui/spec/shared/parsing/showDatabasesSpec.js
+++ b/ui/spec/shared/parsing/showDatabasesSpec.js
@@ -2,7 +2,11 @@ import showDatabases from 'shared/parsing/showDatabases'
describe('showDatabases', () => {
it('exposes all the database properties', () => {
- const response = {results: [{series: [{columns: ["name"], values: [["mydb1"], ["mydb2"]]}]}]}
+ const response = {
+ results: [
+ {series: [{columns: ['name'], values: [['mydb1'], ['mydb2']]}]},
+ ],
+ }
const result = showDatabases(response)
@@ -13,7 +17,7 @@ describe('showDatabases', () => {
})
it('returns an empty array when there are no databases', () => {
- const response = {results: [{series: [{columns: ["name"]}]}]}
+ const response = {results: [{series: [{columns: ['name']}]}]}
const result = showDatabases(response)
@@ -22,7 +26,7 @@ describe('showDatabases', () => {
})
it('exposes the server error', () => {
- const response = {results: [{error: "internal server error?"}]}
+ const response = {results: [{error: 'internal server error?'}]}
const result = showDatabases(response)
diff --git a/ui/spec/shared/parsing/showFieldKeysSpec.js b/ui/spec/shared/parsing/showFieldKeysSpec.js
index 581ab9dd2..439942a9d 100644
--- a/ui/spec/shared/parsing/showFieldKeysSpec.js
+++ b/ui/spec/shared/parsing/showFieldKeysSpec.js
@@ -2,7 +2,15 @@ import parseShowFieldKeys from 'shared/parsing/showFieldKeys'
describe('parseShowFieldKeys', () => {
it('parses a single result', () => {
- const response = {results: [{series: [{name: "m1", columns: ["fieldKey"], values: [["f1"], ["f2"]]}]}]}
+ const response = {
+ results: [
+ {
+ series: [
+ {name: 'm1', columns: ['fieldKey'], values: [['f1'], ['f2']]},
+ ],
+ },
+ ],
+ }
const result = parseShowFieldKeys(response)
expect(result.errors).to.eql([])
@@ -12,7 +20,20 @@ describe('parseShowFieldKeys', () => {
})
it('parses multiple results', () => {
- const response = {results: [{series: [{name: "m1", columns: ["fieldKey"], values: [["f1"], ["f2"]]}]}, {series: [{name: "m2", columns: ["fieldKey"], values: [["f3"], ["f4"]]}]}]}
+ const response = {
+ results: [
+ {
+ series: [
+ {name: 'm1', columns: ['fieldKey'], values: [['f1'], ['f2']]},
+ ],
+ },
+ {
+ series: [
+ {name: 'm2', columns: ['fieldKey'], values: [['f3'], ['f4']]},
+ ],
+ },
+ ],
+ }
const result = parseShowFieldKeys(response)
expect(result.errors).to.eql([])
expect(result.fieldSets).to.eql({
@@ -22,14 +43,31 @@ describe('parseShowFieldKeys', () => {
})
it('parses multiple errors', () => {
- const response = {results: [{error: "measurement not found: m1"}, {error: "measurement not found: m2"}]}
+ const response = {
+ results: [
+ {error: 'measurement not found: m1'},
+ {error: 'measurement not found: m2'},
+ ],
+ }
const result = parseShowFieldKeys(response)
- expect(result.errors).to.eql(['measurement not found: m1', 'measurement not found: m2'])
+ expect(result.errors).to.eql([
+ 'measurement not found: m1',
+ 'measurement not found: m2',
+ ])
expect(result.fieldSets).to.eql({})
})
it('parses a mix of results and errors', () => {
- const response = {results: [{series: [{name: "m1", columns: ["fieldKey"], values: [["f1"], ["f2"]]}]}, {error: "measurement not found: m2"}]}
+ const response = {
+ results: [
+ {
+ series: [
+ {name: 'm1', columns: ['fieldKey'], values: [['f1'], ['f2']]},
+ ],
+ },
+ {error: 'measurement not found: m2'},
+ ],
+ }
const result = parseShowFieldKeys(response)
expect(result.errors).to.eql(['measurement not found: m2'])
expect(result.fieldSets).to.eql({
diff --git a/ui/spec/shared/parsing/showQueriesSpec.js b/ui/spec/shared/parsing/showQueriesSpec.js
index 44522df8e..e0b5c7122 100644
--- a/ui/spec/shared/parsing/showQueriesSpec.js
+++ b/ui/spec/shared/parsing/showQueriesSpec.js
@@ -2,7 +2,21 @@ import showQueriesParser from 'shared/parsing/showQueries'
describe('showQueriesParser', () => {
it('exposes all currently running queries', () => {
- const response = {results: [{series: [{columns: ["qid", "query", "database", "duration"], values: [[1, "SHOW QUERIES", "db1", "1s"], [2, "SELECT foo FROM bar", "db1", "2s"]]}]}]}
+ const response = {
+ results: [
+ {
+ series: [
+ {
+ columns: ['qid', 'query', 'database', 'duration'],
+ values: [
+ [1, 'SHOW QUERIES', 'db1', '1s'],
+ [2, 'SELECT foo FROM bar', 'db1', '2s'],
+ ],
+ },
+ ],
+ },
+ ],
+ }
const result = showQueriesParser(response)
@@ -24,7 +38,7 @@ describe('showQueriesParser', () => {
})
it('exposes the server error', () => {
- const response = {results: [{error: "internal server error?"}]}
+ const response = {results: [{error: 'internal server error?'}]}
const result = showQueriesParser(response)
diff --git a/ui/spec/shared/parsing/showTagKeysSpec.js b/ui/spec/shared/parsing/showTagKeysSpec.js
index 07dcf8806..ac8d627a3 100644
--- a/ui/spec/shared/parsing/showTagKeysSpec.js
+++ b/ui/spec/shared/parsing/showTagKeysSpec.js
@@ -2,7 +2,15 @@ import parseShowTagKeys from 'shared/parsing/showTagKeys'
describe('parseShowTagKeys', () => {
it('parses the tag keys', () => {
- const response = {results: [{series: [{name: "cpu", columns: ["tagKey"], values: [["cpu"], ["host"]]}]}]}
+ const response = {
+ results: [
+ {
+ series: [
+ {name: 'cpu', columns: ['tagKey'], values: [['cpu'], ['host']]},
+ ],
+ },
+ ],
+ }
const result = parseShowTagKeys(response)
expect(result.errors).to.eql([])
@@ -18,7 +26,7 @@ describe('parseShowTagKeys', () => {
})
it('handles errors', () => {
- const response = {results: [{error: "influxdb error"}]}
+ const response = {results: [{error: 'influxdb error'}]}
const result = parseShowTagKeys(response)
expect(result.errors).to.eql([response.results[0].error])
diff --git a/ui/spec/shared/parsing/showTagValuesSpec.js b/ui/spec/shared/parsing/showTagValuesSpec.js
index bf79868eb..88f22b05f 100644
--- a/ui/spec/shared/parsing/showTagValuesSpec.js
+++ b/ui/spec/shared/parsing/showTagValuesSpec.js
@@ -16,13 +16,13 @@ describe('showTagValuesParser', () => {
{
series: [
{
- name: "measurementA",
- columns: ["key", "value"],
+ name: 'measurementA',
+ columns: ['key', 'value'],
values: [
- ["host", "hostA"],
- ["host", "hostB"],
- ["cpu", "cpu0"],
- ["cpu", "cpu1"],
+ ['host', 'hostA'],
+ ['host', 'hostB'],
+ ['cpu', 'cpu0'],
+ ['cpu', 'cpu1'],
],
},
],
@@ -34,14 +34,8 @@ describe('showTagValuesParser', () => {
expect(result.errors).to.eql([])
expect(result.tags).to.eql({
- host: [
- 'hostA',
- 'hostB',
- ],
- cpu: [
- 'cpu0',
- 'cpu1',
- ],
+ host: ['hostA', 'hostB'],
+ cpu: ['cpu0', 'cpu1'],
})
})
})
diff --git a/ui/spec/shared/presenters/presentersSpec.js b/ui/spec/shared/presenters/presentersSpec.js
index 54d04b348..2cd1d7347 100644
--- a/ui/spec/shared/presenters/presentersSpec.js
+++ b/ui/spec/shared/presenters/presentersSpec.js
@@ -1,20 +1,15 @@
-import {
- buildRoles,
- buildClusterAccounts,
-} from 'shared/presenters'
+import {buildRoles, buildClusterAccounts} from 'shared/presenters'
describe('Presenters', function() {
describe('roles utils', function() {
describe('buildRoles', function() {
describe('when a role has no users', function() {
- it('sets a role\'s users as an empty array', function() {
+ it("sets a role's users as an empty array", function() {
const roles = [
{
- name: "Marketing",
+ name: 'Marketing',
permissions: {
- "": [
- "ViewAdmin",
- ],
+ '': ['ViewAdmin'],
},
},
]
@@ -26,14 +21,11 @@ describe('Presenters', function() {
})
describe('when a role has no permissions', function() {
- it('set\'s a roles permission as an empty array', function() {
+ it("set's a roles permission as an empty array", function() {
const roles = [
{
- name: "Marketing",
- users: [
- "roley@influxdb.com",
- "will@influxdb.com",
- ],
+ name: 'Marketing',
+ users: ['roley@influxdb.com', 'will@influxdb.com'],
},
]
@@ -47,23 +39,13 @@ describe('Presenters', function() {
beforeEach(function() {
const roles = [
{
- name: "Marketing",
+ name: 'Marketing',
permissions: {
- "": [
- "ViewAdmin",
- ],
- db1: [
- "ReadData",
- ],
- db2: [
- "ReadData",
- "AddRemoveNode",
- ],
+ '': ['ViewAdmin'],
+ db1: ['ReadData'],
+ db2: ['ReadData', 'AddRemoveNode'],
},
- users: [
- "roley@influxdb.com",
- "will@influxdb.com",
- ],
+ users: ['roley@influxdb.com', 'will@influxdb.com'],
},
]
@@ -73,8 +55,8 @@ describe('Presenters', function() {
it('each role has a name and a list of users (if they exist)', function() {
const role = this.roles[0]
expect(role.name).to.equal('Marketing')
- expect(role.users).to.contain("roley@influxdb.com")
- expect(role.users).to.contain("will@influxdb.com")
+ expect(role.users).to.contain('roley@influxdb.com')
+ expect(role.users).to.contain('will@influxdb.com')
})
it('transforms permissions into a list of objects and each permission has a list of resources', function() {
@@ -109,46 +91,33 @@ describe('Presenters', function() {
it('adds role information to each cluster account and parses permissions', function() {
const users = [
{
- name: "jon@example.com",
- hash: "xxxxx",
+ name: 'jon@example.com',
+ hash: 'xxxxx',
permissions: {
- "": [
- "ViewAdmin",
- ],
- db1: [
- "ReadData",
- ],
+ '': ['ViewAdmin'],
+ db1: ['ReadData'],
},
},
{
- name: "ned@example.com",
- hash: "xxxxx",
+ name: 'ned@example.com',
+ hash: 'xxxxx',
},
]
const roles = [
{
- name: "Admin",
+ name: 'Admin',
permissions: {
- db2: [
- "ViewAdmin",
- ],
+ db2: ['ViewAdmin'],
},
- users: [
- "jon@example.com",
- "ned@example.com",
- ],
+ users: ['jon@example.com', 'ned@example.com'],
},
{
- name: "Marketing",
+ name: 'Marketing',
permissions: {
- db3: [
- "ReadData",
- ],
+ db3: ['ReadData'],
},
- users: [
- "jon@example.com",
- ],
+ users: ['jon@example.com'],
},
]
@@ -156,8 +125,8 @@ describe('Presenters', function() {
const expected = [
{
- name: "jon@example.com",
- hash: "xxxxx",
+ name: 'jon@example.com',
+ hash: 'xxxxx',
permissions: [
{
name: 'ViewAdmin',
@@ -174,7 +143,7 @@ describe('Presenters', function() {
],
roles: [
{
- name: "Admin",
+ name: 'Admin',
permissions: [
{
name: 'ViewAdmin',
@@ -183,13 +152,10 @@ describe('Presenters', function() {
resources: ['db2'],
},
],
- users: [
- "jon@example.com",
- "ned@example.com",
- ],
+ users: ['jon@example.com', 'ned@example.com'],
},
{
- name: "Marketing",
+ name: 'Marketing',
permissions: [
{
name: 'ReadData',
@@ -198,19 +164,17 @@ describe('Presenters', function() {
resources: ['db3'],
},
],
- users: [
- "jon@example.com",
- ],
+ users: ['jon@example.com'],
},
],
},
{
- name: "ned@example.com",
- hash: "xxxxx",
+ name: 'ned@example.com',
+ hash: 'xxxxx',
permissions: [],
roles: [
{
- name: "Admin",
+ name: 'Admin',
permissions: [
{
name: 'ViewAdmin',
@@ -219,10 +183,7 @@ describe('Presenters', function() {
resources: ['db2'],
},
],
- users: [
- "jon@example.com",
- "ned@example.com",
- ],
+ users: ['jon@example.com', 'ned@example.com'],
},
],
},
@@ -241,10 +202,12 @@ describe('Presenters', function() {
})
it('sets roles to an empty array if a user has no roles', function() {
- const users = [{
- name: "ned@example.com",
- hash: "xxxxx",
- }]
+ const users = [
+ {
+ name: 'ned@example.com',
+ hash: 'xxxxx',
+ },
+ ]
const roles = []
const actual = buildClusterAccounts(users, roles)
diff --git a/ui/spec/shared/reducers/errorsSpec.js b/ui/spec/shared/reducers/errorsSpec.js
index 9729b6efe..8438225c7 100644
--- a/ui/spec/shared/reducers/errorsSpec.js
+++ b/ui/spec/shared/reducers/errorsSpec.js
@@ -5,56 +5,51 @@ import {errorThrown} from 'shared/actions/errors'
import {HTTP_FORBIDDEN} from 'shared/constants'
const errorForbidden = {
- "data":"",
- "status":403,
- "statusText":"Forbidden",
- "headers":{
- "date":"Mon, 17 Apr 2017 18:35:34 GMT",
- "content-length":"0",
- "x-chronograf-version":"1.2.0-beta8-71-gd875ea4a",
- "content-type":"text/plain; charset=utf-8"
- },
- "config":{
- "transformRequest":{
-
+ data: '',
+ status: 403,
+ statusText: 'Forbidden',
+ headers: {
+ date: 'Mon, 17 Apr 2017 18:35:34 GMT',
+ 'content-length': '0',
+ 'x-chronograf-version': '1.2.0-beta8-71-gd875ea4a',
+ 'content-type': 'text/plain; charset=utf-8',
+ },
+ config: {
+ transformRequest: {},
+ transformResponse: {},
+ headers: {
+ Accept: 'application/json, text/plain, */*',
+ 'Content-Type': 'application/json;charset=utf-8',
+ },
+ timeout: 0,
+ xsrfCookieName: 'XSRF-TOKEN',
+ xsrfHeaderName: 'X-XSRF-TOKEN',
+ maxContentLength: -1,
+ method: 'GET',
+ url: '/chronograf/v1/me',
+ data: '{}',
+ params: {},
+ },
+ request: {},
+ auth: {
+ links: [
+ {
+ name: 'github',
+ label: 'Github',
+ login: '/oauth/github/login',
+ logout: '/oauth/github/logout',
+ callback: '/oauth/github/callback',
},
- "transformResponse":{
-
- },
- "headers":{
- "Accept":"application/json, text/plain, *\/*",
- "Content-Type":"application/json;charset=utf-8"
- },
- "timeout":0,
- "xsrfCookieName":"XSRF-TOKEN",
- "xsrfHeaderName":"X-XSRF-TOKEN",
- "maxContentLength":-1,
- "method":"GET",
- "url":"/chronograf/v1/me",
- "data":"{}",
- "params":{
-
- }
- },
- "request":{
-
- },
- "auth":{
- "links":[
- {
- "name":"github",
- "label":"Github",
- "login":"/oauth/github/login",
- "logout":"/oauth/github/logout",
- "callback":"/oauth/github/callback"
- }
- ]
- }
+ ],
+ },
}
describe('Shared.Reducers.errorsReducer', () => {
it('should handle ERROR_THROWN', () => {
- const reducedState = errorsReducer(initialState, errorThrown(errorForbidden))
+ const reducedState = errorsReducer(
+ initialState,
+ errorThrown(errorForbidden)
+ )
expect(reducedState.error.status).to.equal(HTTP_FORBIDDEN)
})
diff --git a/ui/spec/shared/reducers/sourcesSpec.js b/ui/spec/shared/reducers/sourcesSpec.js
index a1a97fec8..c6115f94f 100644
--- a/ui/spec/shared/reducers/sourcesSpec.js
+++ b/ui/spec/shared/reducers/sourcesSpec.js
@@ -1,45 +1,56 @@
import reducer from 'shared/reducers/sources'
-import {
- loadSources,
- updateSource,
- addSource,
-} from 'shared/actions/sources'
+import {loadSources, updateSource, addSource} from 'shared/actions/sources'
describe('Shared.Reducers.sources', () => {
it('can correctly show default sources when adding a source', () => {
let state = []
- state = reducer(state, addSource({
- id: '1',
- "default": true,
- }))
+ state = reducer(
+ state,
+ addSource({
+ id: '1',
+ default: true,
+ })
+ )
- state = reducer(state, addSource({
- id: '2',
- "default": true,
- }))
+ state = reducer(
+ state,
+ addSource({
+ id: '2',
+ default: true,
+ })
+ )
- expect(state.filter((s) => s.default).length).to.equal(1)
+ expect(state.filter(s => s.default).length).to.equal(1)
})
it('can correctly show default sources when updating a source', () => {
let state = []
- state = reducer(state, addSource({
- id: '1',
- "default": true,
- }))
+ state = reducer(
+ state,
+ addSource({
+ id: '1',
+ default: true,
+ })
+ )
- state = reducer(state, addSource({
- id: '2',
- "default": true,
- }))
+ state = reducer(
+ state,
+ addSource({
+ id: '2',
+ default: true,
+ })
+ )
- state = reducer(state, updateSource({
- id: '1',
- "default": true,
- }))
+ state = reducer(
+ state,
+ updateSource({
+ id: '1',
+ default: true,
+ })
+ )
expect(state.find(({id}) => id === '1').default).to.equal(true)
expect(state.find(({id}) => id === '2').default).to.equal(false)
diff --git a/ui/spec/utils/formattingSpec.js b/ui/spec/utils/formattingSpec.js
index d3af80d1d..c34c50221 100644
--- a/ui/spec/utils/formattingSpec.js
+++ b/ui/spec/utils/formattingSpec.js
@@ -14,7 +14,7 @@ describe('Formatting helpers', () => {
expect(actual).to.equal('0 Bytes')
})
- it('converts a raw byte value into it\'s most appropriate unit', () => {
+ it("converts a raw byte value into it's most appropriate unit", () => {
expect(formatBytes(1000)).to.equal('1 KB')
expect(formatBytes(1000000)).to.equal('1 MB')
expect(formatBytes(1000000000)).to.equal('1 GB')
diff --git a/ui/spec/utils/timeSeriesToDygraphSpec.js b/ui/spec/utils/timeSeriesToDygraphSpec.js
index 5dfb04e53..167ed2036 100644
--- a/ui/spec/utils/timeSeriesToDygraphSpec.js
+++ b/ui/spec/utils/timeSeriesToDygraphSpec.js
@@ -4,14 +4,13 @@ describe('timeSeriesToDygraph', () => {
it('parses a raw InfluxDB response into a dygraph friendly data format', () => {
const influxResponse = [
{
- response:
- {
+ response: {
results: [
{
series: [
{
- name: "m1",
- columns: ["time", "f1"],
+ name: 'm1',
+ columns: ['time', 'f1'],
values: [[1000, 1], [2000, 2]],
},
],
@@ -19,8 +18,8 @@ describe('timeSeriesToDygraph', () => {
{
series: [
{
- name: "m1",
- columns: ["time", "f2"],
+ name: 'm1',
+ columns: ['time', 'f2'],
values: [[2000, 3], [4000, 4]],
},
],
@@ -33,11 +32,7 @@ describe('timeSeriesToDygraph', () => {
const actual = timeSeriesToDygraph(influxResponse)
const expected = {
- labels: [
- 'time',
- `m1.f1`,
- `m1.f2`,
- ],
+ labels: ['time', `m1.f1`, `m1.f2`],
timeSeries: [
[new Date(1000), 1, null],
[new Date(2000), 2, 3],
@@ -59,14 +54,13 @@ describe('timeSeriesToDygraph', () => {
it('can sort numerical timestamps correctly', () => {
const influxResponse = [
{
- response:
- {
+ response: {
results: [
{
series: [
{
- name: "m1",
- columns: ["time", "f1"],
+ name: 'm1',
+ columns: ['time', 'f1'],
values: [[100, 1], [3000, 3], [200, 2]],
},
],
@@ -76,19 +70,11 @@ describe('timeSeriesToDygraph', () => {
},
]
-
const actual = timeSeriesToDygraph(influxResponse)
const expected = {
- labels: [
- 'time',
- 'm1.f1',
- ],
- timeSeries: [
- [new Date(100), 1],
- [new Date(200), 2],
- [new Date(3000), 3],
- ],
+ labels: ['time', 'm1.f1'],
+ timeSeries: [[new Date(100), 1], [new Date(200), 2], [new Date(3000), 3]],
}
expect(actual.timeSeries).to.deep.equal(expected.timeSeries)
@@ -97,14 +83,13 @@ describe('timeSeriesToDygraph', () => {
it('can parse multiple responses into two axes', () => {
const influxResponse = [
{
- response:
- {
+ response: {
results: [
{
series: [
{
- name: "m1",
- columns: ["time", "f1"],
+ name: 'm1',
+ columns: ['time', 'f1'],
values: [[1000, 1], [2000, 2]],
},
],
@@ -112,8 +97,8 @@ describe('timeSeriesToDygraph', () => {
{
series: [
{
- name: "m1",
- columns: ["time", "f2"],
+ name: 'm1',
+ columns: ['time', 'f2'],
values: [[2000, 3], [4000, 4]],
},
],
@@ -122,14 +107,13 @@ describe('timeSeriesToDygraph', () => {
},
},
{
- response:
- {
+ response: {
results: [
{
series: [
{
- name: "m3",
- columns: ["time", "f3"],
+ name: 'm3',
+ columns: ['time', 'f3'],
values: [[1000, 1], [2000, 2]],
},
],
@@ -159,14 +143,13 @@ describe('timeSeriesToDygraph', () => {
it('can parse multiple responses with the same field and measurement', () => {
const influxResponse = [
{
- response:
- {
+ response: {
results: [
{
series: [
{
- name: "m1",
- columns: ["time", "f1"],
+ name: 'm1',
+ columns: ['time', 'f1'],
values: [[1000, 1], [2000, 2]],
},
],
@@ -175,14 +158,13 @@ describe('timeSeriesToDygraph', () => {
},
},
{
- response:
- {
+ response: {
results: [
{
series: [
{
- name: "m1",
- columns: ["time", "f1"],
+ name: 'm1',
+ columns: ['time', 'f1'],
values: [[2000, 3], [4000, 4]],
},
],
@@ -195,11 +177,7 @@ describe('timeSeriesToDygraph', () => {
const actual = timeSeriesToDygraph(influxResponse)
const expected = {
- labels: [
- 'time',
- `m1.f1`,
- `m1.f1`,
- ],
+ labels: ['time', `m1.f1`, `m1.f1`],
timeSeries: [
[new Date(1000), 1, null],
[new Date(2000), 2, 3],
@@ -218,14 +196,13 @@ describe('timeSeriesToDygraph', () => {
it('it does not use multiple axes if being used for the DataExplorer', () => {
const influxResponse = [
{
- response:
- {
+ response: {
results: [
{
series: [
{
- name: "m1",
- columns: ["time", "f1"],
+ name: 'm1',
+ columns: ['time', 'f1'],
values: [[1000, 1], [2000, 2]],
},
],
@@ -234,14 +211,13 @@ describe('timeSeriesToDygraph', () => {
},
},
{
- response:
- {
+ response: {
results: [
{
series: [
{
- name: "m1",
- columns: ["time", "f2"],
+ name: 'm1',
+ columns: ['time', 'f2'],
values: [[2000, 3], [4000, 4]],
},
],
@@ -252,7 +228,11 @@ describe('timeSeriesToDygraph', () => {
]
const isInDataExplorer = true
- const actual = timeSeriesToDygraph(influxResponse, undefined, isInDataExplorer)
+ const actual = timeSeriesToDygraph(
+ influxResponse,
+ undefined,
+ isInDataExplorer
+ )
const expected = {}
@@ -262,14 +242,13 @@ describe('timeSeriesToDygraph', () => {
it('parses a raw InfluxDB response into a dygraph friendly data format', () => {
const influxResponse = [
{
- response:
- {
+ response: {
results: [
{
series: [
{
- name: "mb",
- columns: ["time", "f1"],
+ name: 'mb',
+ columns: ['time', 'f1'],
values: [[1000, 1], [2000, 2]],
},
],
@@ -277,8 +256,8 @@ describe('timeSeriesToDygraph', () => {
{
series: [
{
- name: "ma",
- columns: ["time", "f1"],
+ name: 'ma',
+ columns: ['time', 'f1'],
values: [[1000, 1], [2000, 2]],
},
],
@@ -286,8 +265,8 @@ describe('timeSeriesToDygraph', () => {
{
series: [
{
- name: "mc",
- columns: ["time", "f2"],
+ name: 'mc',
+ columns: ['time', 'f2'],
values: [[2000, 3], [4000, 4]],
},
],
@@ -295,8 +274,8 @@ describe('timeSeriesToDygraph', () => {
{
series: [
{
- name: "mc",
- columns: ["time", "f1"],
+ name: 'mc',
+ columns: ['time', 'f1'],
values: [[2000, 3], [4000, 4]],
},
],
@@ -308,13 +287,7 @@ describe('timeSeriesToDygraph', () => {
const actual = timeSeriesToDygraph(influxResponse)
- const expected = [
- 'time',
- `ma.f1`,
- `mb.f1`,
- `mc.f1`,
- `mc.f2`,
- ]
+ const expected = ['time', `ma.f1`, `mb.f1`, `mc.f1`, `mc.f2`]
expect(actual.labels).to.deep.equal(expected)
})
diff --git a/ui/src/admin/components/AdminTabs.js b/ui/src/admin/components/AdminTabs.js
index ad58b7bc3..2c78b292f 100644
--- a/ui/src/admin/components/AdminTabs.js
+++ b/ui/src/admin/components/AdminTabs.js
@@ -1,11 +1,5 @@
import React, {PropTypes} from 'react'
-import {
- Tab,
- Tabs,
- TabPanel,
- TabPanels,
- TabList,
-} from 'shared/components/Tabs'
+import {Tab, Tabs, TabPanel, TabPanels, TabList} from 'shared/components/Tabs'
import UsersTable from 'src/admin/components/UsersTable'
import RolesTable from 'src/admin/components/RolesTable'
import QueriesPage from 'src/admin/containers/QueriesPage'
@@ -97,9 +91,9 @@ const AdminTabs = ({
{tabs.map((t, i) =>
You don't have any {tableName},
why not create one?
{VERSION} / Time-Series Data Visualization
{auth.links && - auth.links.map(({name, login, label}) => ( + auth.links.map(({name, login, label}) => Login with {label} - ))} + )}Made by InfluxData diff --git a/ui/src/dashboards/components/OverlayControls.js b/ui/src/dashboards/components/OverlayControls.js index c173ab91c..a13a29f72 100644 --- a/ui/src/dashboards/components/OverlayControls.js +++ b/ui/src/dashboards/components/OverlayControls.js @@ -19,7 +19,7 @@ const OverlayControls = props => {
Visualization Type:
Send this Alert to:
[key=value,]+
. If no groupBy is performed equal to literal "nil"',
+ text:
+ 'Concatenation of all group-by tags of the form [key=value,]+
. If no groupBy is performed equal to literal "nil"',
},
tags: {
label: '{{.Tags}}',
- text: 'Map of tags. Use {{ index .Tags "key" }}
to get a specific tag value',
+ text:
+ 'Map of tags. Use {{ index .Tags "key" }}
to get a specific tag value',
},
level: {
label: '{{.Level}}',
- text: 'Alert Level, one of: INFO
WARNING
CRITICAL
',
+ text:
+ 'Alert Level, one of: INFO
WARNING
CRITICAL
',
},
fields: {
label: '{{ index .Fields "value" }}',
- text: 'Map of fields. Use {{ index .Fields "key" }}
to get a specific field value',
+ text:
+ 'Map of fields. Use {{ index .Fields "key" }}
to get a specific field value',
},
time: {
label: '{{.Time}}',
diff --git a/ui/src/shared/components/ConfirmButtons.js b/ui/src/shared/components/ConfirmButtons.js
index 5dfc1ccdf..afd34e42c 100644
--- a/ui/src/shared/components/ConfirmButtons.js
+++ b/ui/src/shared/components/ConfirmButtons.js
@@ -1,13 +1,7 @@
import React, {PropTypes} from 'react'
import classnames from 'classnames'
-const ConfirmButtons = ({
- onConfirm,
- item,
- onCancel,
- buttonSize,
- isDisabled,
-}) => (
+const ConfirmButtons = ({onConfirm, item, onCancel, buttonSize, isDisabled}) =>
- The current source does not have an associated Kapacitor instance, please configure one. + The current source does not have an associated Kapacitor instance, + please configure one.
Add Kapacitor
- A cluster can have an alias that replaces its ID in the interface.
+ A cluster can have an alias that replaces its ID in the
+ interface.
This does not affect the cluster ID.