Merge branch 'ndrone-feature/hosts'

pull/10616/head
Chris Goller 2017-10-30 19:04:20 -05:00
commit 7d70118d61
14 changed files with 174 additions and 66 deletions

View File

@ -1,5 +1,10 @@
## v1.3.11.0 [unreleased]
### Bug Fixes
1. [#2157](https://github.com/influxdata/chronograf/pull/2157): Fix logscale producing console errors when only one point in graph
1. [#2158](https://github.com/influxdata/chronograf/pull/2158): Fix 'Cannot connect to source' false error flag on Dashboard page
1. [#2167](https://github.com/influxdata/chronograf/pull/2167): Add fractions of seconds to time field in csv export
1. [#1077](https://github.com/influxdata/chronograf/pull/2087): Fix Chronograf requiring Telegraf's CPU and system plugins to ensure that all Apps appear on the HOST LIST page.
### Features
### UI Improvements

View File

@ -7,39 +7,57 @@ import (
"github.com/influxdata/chronograf"
)
type logResponseWriter struct {
// statusWriterFlusher captures the status header of an http.ResponseWriter
// and is a flusher
type statusWriter struct {
http.ResponseWriter
responseCode int
Flusher http.Flusher
status int
}
func (l *logResponseWriter) WriteHeader(status int) {
l.responseCode = status
l.ResponseWriter.WriteHeader(status)
func (w *statusWriter) WriteHeader(status int) {
w.status = status
w.ResponseWriter.WriteHeader(status)
}
func (w *statusWriter) Status() int { return w.status }
// Flush is here because the underlying HTTP chunked transfer response writer
// to implement http.Flusher. Without it data is silently buffered. This
// was discovered when proxying kapacitor chunked logs.
func (w *statusWriter) Flush() {
if w.Flusher != nil {
w.Flusher.Flush()
}
}
// Logger is middleware that logs the request
func Logger(logger chronograf.Logger, next http.Handler) http.Handler {
fn := func(w http.ResponseWriter, r *http.Request) {
now := time.Now()
logger.
WithField("component", "server").
logger.WithField("component", "server").
WithField("remote_addr", r.RemoteAddr).
WithField("method", r.Method).
WithField("url", r.URL).
Info("Request")
Debug("Request")
lrr := &logResponseWriter{w, 0}
next.ServeHTTP(lrr, r)
sw := &statusWriter{
ResponseWriter: w,
}
if f, ok := w.(http.Flusher); ok {
sw.Flusher = f
}
next.ServeHTTP(sw, r)
later := time.Now()
elapsed := later.Sub(now)
logger.
WithField("component", "server").
WithField("remote_addr", r.RemoteAddr).
WithField("method", r.Method).
WithField("response_time", elapsed.String()).
WithField("code", lrr.responseCode).
Info("Response: ", http.StatusText(lrr.responseCode))
WithField("status", sw.Status()).
Info("Response: ", http.StatusText(sw.Status()))
}
return http.HandlerFunc(fn)
}

View File

@ -9,7 +9,8 @@ import (
type interceptingResponseWriter struct {
http.ResponseWriter
Prefix string
Flusher http.Flusher
Prefix string
}
func (i *interceptingResponseWriter) WriteHeader(status int) {
@ -25,11 +26,26 @@ func (i *interceptingResponseWriter) WriteHeader(status int) {
i.ResponseWriter.WriteHeader(status)
}
// PrefixingRedirector alters the Location header of downstream http.Handlers
// Flush is here because the underlying HTTP chunked transfer response writer
// to implement http.Flusher. Without it data is silently buffered. This
// was discovered when proxying kapacitor chunked logs.
func (i *interceptingResponseWriter) Flush() {
if i.Flusher != nil {
i.Flusher.Flush()
}
}
// PrefixedRedirect alters the Location header of downstream http.Handlers
// to include a specified prefix
func PrefixedRedirect(prefix string, next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
iw := &interceptingResponseWriter{w, prefix}
iw := &interceptingResponseWriter{
ResponseWriter: w,
Prefix: prefix,
}
if flusher, ok := w.(http.Flusher); ok {
iw.Flusher = flusher
}
next.ServeHTTP(iw, r)
})
}

View File

@ -5,6 +5,8 @@ import (
"net/http"
"net/http/httputil"
"net/url"
"strings"
"time"
)
// KapacitorProxy proxies requests to kapacitor using the path query parameter.
@ -34,28 +36,33 @@ func (h *Service) KapacitorProxy(w http.ResponseWriter, r *http.Request) {
return
}
u, err := url.Parse(srv.URL)
// To preserve any HTTP query arguments to the kapacitor path,
// we concat and parse them into u.
uri := singleJoiningSlash(srv.URL, path)
u, err := url.Parse(uri)
if err != nil {
msg := fmt.Sprintf("Error parsing kapacitor url: %v", err)
Error(w, http.StatusUnprocessableEntity, msg, h.Logger)
return
}
u.Path = path
director := func(req *http.Request) {
// Set the Host header of the original Kapacitor URL
req.Host = u.Host
req.URL = u
// Because we are acting as a proxy, kapacitor needs to have the basic auth information set as
// a header directly
if srv.Username != "" && srv.Password != "" {
req.SetBasicAuth(srv.Username, srv.Password)
}
}
// Without a FlushInterval the HTTP Chunked response for kapacitor logs is
// buffered and flushed every 30 seconds.
proxy := &httputil.ReverseProxy{
Director: director,
Director: director,
FlushInterval: time.Second,
}
proxy.ServeHTTP(w, r)
}
@ -79,3 +86,15 @@ func (h *Service) KapacitorProxyGet(w http.ResponseWriter, r *http.Request) {
func (h *Service) KapacitorProxyDelete(w http.ResponseWriter, r *http.Request) {
h.KapacitorProxy(w, r)
}
func singleJoiningSlash(a, b string) string {
aslash := strings.HasSuffix(a, "/")
bslash := strings.HasPrefix(b, "/")
if aslash && bslash {
return a + b[1:]
}
if !aslash && !bslash {
return a + "/" + b
}
return a + b
}

View File

@ -3,13 +3,16 @@ import {
formatDate,
dashboardtoCSV,
} from 'shared/parsing/resultsToCSV'
import moment from 'moment'
describe('formatDate', () => {
it('converts timestamp to an excel compatible date string', () => {
const timestamp = 1000000000000
const result = formatDate(timestamp)
expect(result).to.be.a('string')
expect(+new Date(result)).to.equal(timestamp)
expect(moment(result, 'M/D/YYYY h:mm:ss.SSSSSSSSS A').valueOf()).to.equal(
timestamp
)
})
})

View File

@ -281,7 +281,8 @@ export const updateTempVarValues = (source, dashboard) => async dispatch => {
results.forEach(({data}, i) => {
const {type, query, id} = tempsWithQueries[i]
const vals = parsers[type](data, query.tagKey || query.measurement)[type]
const parsed = parsers[type](data, query.tagKey || query.measurement)
const vals = parsed[type]
dispatch(editTemplateVariableValues(dashboard.id, id, vals))
})
} catch (error) {

View File

@ -6,6 +6,7 @@ import {Tabber, Tab} from 'src/dashboards/components/Tabber'
import {DISPLAY_OPTIONS, TOOLTIP_CONTENT} from 'src/dashboards/constants'
const {LINEAR, LOG, BASE_2, BASE_10} = DISPLAY_OPTIONS
const getInputMin = scale => (scale === LOG ? '0' : null)
const AxesOptions = ({
axes: {y: {bounds, label, prefix, suffix, base, scale, defaultYLabel}},
@ -38,6 +39,7 @@ const AxesOptions = ({
customValue={min}
onSetValue={onSetYAxisBoundMin}
type="number"
min={getInputMin(scale)}
/>
</div>
<div className="form-group col-sm-6">
@ -47,6 +49,7 @@ const AxesOptions = ({
customValue={max}
onSetValue={onSetYAxisBoundMax}
type="number"
min={getInputMin(scale)}
/>
</div>
<Input

View File

@ -11,7 +11,7 @@ export function getCpuAndLoadForHosts(proxyLink, telegrafDB) {
SELECT mean("Percent_Processor_Time") FROM win_cpu WHERE time > now() - 10m GROUP BY host;
SELECT mean("Processor_Queue_Length") FROM win_system WHERE time > now() - 10s GROUP BY host;
SELECT non_negative_derivative(mean("System_Up_Time")) AS winDeltaUptime FROM win_system WHERE time > now() - 10m GROUP BY host, time(1m) fill(0);
SHOW TAG VALUES FROM /win_system|system/ WITH KEY = "host"`,
SHOW TAG VALUES WITH KEY = "host";`,
db: telegrafDB,
}).then(resp => {
const hosts = {}
@ -87,7 +87,7 @@ export async function getAllHosts(proxyLink, telegrafDB) {
try {
const resp = await proxy({
source: proxyLink,
query: 'show tag values from /win_system|system/ with key = "host"',
query: 'show tag values with key = "host"',
db: telegrafDB,
})
const hosts = {}

View File

@ -14,6 +14,7 @@ class ClickOutsideInput extends Component {
render() {
const {
id,
min,
type,
onFocus,
onChange,
@ -27,6 +28,7 @@ class ClickOutsideInput extends Component {
<input
className="form-control input-sm"
id={id}
min={min}
type={type}
name={customPlaceholder}
ref={onGetRef}
@ -43,6 +45,7 @@ class ClickOutsideInput extends Component {
const {func, string} = PropTypes
ClickOutsideInput.propTypes = {
min: string,
id: string.isRequired,
type: string.isRequired,
customPlaceholder: string.isRequired,

View File

@ -43,7 +43,6 @@ export default class Dygraph extends Component {
componentDidMount() {
const {
axes: {y, y2},
ruleValues,
isGraphFilled: fillGraph,
isBarGraph,
options,
@ -63,9 +62,7 @@ export default class Dygraph extends Component {
plugins: [new Dygraphs.Plugins.Crosshair({direction: 'vertical'})],
axes: {
y: {
valueRange: options.stackedGraph
? getStackedRange(y.bounds)
: getRange(timeSeries, y.bounds, ruleValues),
valueRange: this.getYRange(timeSeries),
axisLabelFormatter: (yval, __, opts) =>
numberValueFormatter(yval, opts, y.prefix, y.suffix),
axisLabelWidth: this.getLabelWidth(),
@ -130,7 +127,7 @@ export default class Dygraph extends Component {
}
componentDidUpdate() {
const {labels, axes: {y, y2}, options, ruleValues, isBarGraph} = this.props
const {labels, axes: {y, y2}, options, isBarGraph} = this.props
const dygraph = this.dygraph
if (!dygraph) {
@ -149,9 +146,7 @@ export default class Dygraph extends Component {
ylabel: this.getLabel('y'),
axes: {
y: {
valueRange: options.stackedGraph
? getStackedRange(y.bounds)
: getRange(timeSeries, y.bounds, ruleValues),
valueRange: this.getYRange(timeSeries),
axisLabelFormatter: (yval, __, opts) =>
numberValueFormatter(yval, opts, y.prefix, y.suffix),
axisLabelWidth: this.getLabelWidth(),
@ -175,6 +170,24 @@ export default class Dygraph extends Component {
this.resize()
}
getYRange = timeSeries => {
const {options, axes: {y}, ruleValues} = this.props
if (options.stackedGraph) {
return getStackedRange(y.bounds)
}
const range = getRange(timeSeries, y.bounds, ruleValues)
const [min, max] = range
// Bug in Dygraph calculates a negative range for logscale when min range is 0
if (y.scale === LOG && timeSeries.length === 1 && min <= 0) {
return [0.1, max]
}
return range
}
handleZoom = (lower, upper) => {
const {onZoom} = this.props

View File

@ -99,7 +99,7 @@ class OptIn extends Component {
handleInputRef = el => (this.customValueInput = el)
render() {
const {fixedPlaceholder, customPlaceholder, type} = this.props
const {fixedPlaceholder, customPlaceholder, type, min} = this.props
const {useCustomValue, customValue} = this.state
return (
@ -110,6 +110,7 @@ class OptIn extends Component {
>
<ClickOutsideInput
id={this.id}
min={min}
type={type}
customValue={customValue}
onGetRef={this.handleInputRef}
@ -119,7 +120,6 @@ class OptIn extends Component {
onKeyDown={this.handleKeyDownCustomValueInput}
handleClickOutsideInput={this.handleClickOutsideInput}
/>
<div
className="opt-in--groove-knob-container"
id={this.id}
@ -141,15 +141,16 @@ class OptIn extends Component {
}
OptIn.defaultProps = {
fixedPlaceholder: 'auto',
fixedValue: '',
customPlaceholder: 'Custom Value',
fixedPlaceholder: 'auto',
customValue: '',
}
const {func, oneOf, string} = PropTypes
OptIn.propTypes = {
min: string,
fixedPlaceholder: string,
fixedValue: string,
customPlaceholder: string,

View File

@ -1,3 +1,4 @@
import _ from 'lodash'
import databases from 'shared/parsing/showDatabases'
import measurements from 'shared/parsing/showMeasurements'
import fieldKeys from 'shared/parsing/showFieldKeys'
@ -8,16 +9,19 @@ const parsers = {
databases,
measurements: data => {
const {errors, measurementSets} = measurements(data)
return {errors, measurements: measurementSets[0].measurements}
return {
errors,
measurements: _.get(measurementSets, ['0', 'measurements'], []),
}
},
fieldKeys: (data, key) => {
const {errors, fieldSets} = fieldKeys(data)
return {errors, fieldKeys: fieldSets[key]}
return {errors, fieldKeys: _.get(fieldSets, key, [])}
},
tagKeys,
tagValues: (data, key) => {
const {errors, tags} = tagValues(data)
return {errors, tagValues: tags[key]}
return {errors, tagValues: _.get(tags, key, [])}
},
}

View File

@ -2,7 +2,7 @@ import _ from 'lodash'
import moment from 'moment'
export const formatDate = timestamp =>
moment(timestamp).format('M/D/YYYY h:mm:ss A')
moment(timestamp).format('M/D/YYYY h:mm:ss.SSSSSSSSS A')
export const resultsToCSV = results => {
if (!_.get(results, ['0', 'series', '0'])) {

View File

@ -1,14 +1,14 @@
/* eslint-disable no-var */
var webpack = require('webpack');
var path = require('path');
var ExtractTextPlugin = require("extract-text-webpack-plugin");
var HtmlWebpackPlugin = require("html-webpack-plugin");
var package = require('../package.json');
var dependencies = package.dependencies;
var webpack = require('webpack')
var path = require('path')
var ExtractTextPlugin = require('extract-text-webpack-plugin')
var HtmlWebpackPlugin = require('html-webpack-plugin')
var package = require('../package.json')
var dependencies = package.dependencies
var config = {
bail: true,
devtool: 'eval',
devtool: 'eval',
entry: {
app: path.resolve(__dirname, '..', 'src', 'index.js'),
vendor: Object.keys(dependencies),
@ -28,6 +28,15 @@ var config = {
},
},
module: {
noParse: [
path.resolve(
__dirname,
'..',
'node_modules',
'memoizerific',
'memoizerific.js'
),
],
preLoaders: [
{
test: /\.js$/,
@ -42,15 +51,21 @@ var config = {
},
{
test: /\.scss$/,
loader: ExtractTextPlugin.extract('style-loader', 'css-loader!sass-loader!resolve-url!sass?sourceMap'),
loader: ExtractTextPlugin.extract(
'style-loader',
'css-loader!sass-loader!resolve-url!sass?sourceMap'
),
},
{
test: /\.css$/,
loader: ExtractTextPlugin.extract('style-loader', 'css-loader!postcss-loader'),
loader: ExtractTextPlugin.extract(
'style-loader',
'css-loader!postcss-loader'
),
},
{
test : /\.(ico|png|cur|jpg|ttf|eot|svg|woff(2)?)(\?[a-z0-9]+)?$/,
loader : 'file',
test: /\.(ico|png|cur|jpg|ttf|eot|svg|woff(2)?)(\?[a-z0-9]+)?$/,
loader: 'file',
},
{
test: /\.js$/,
@ -74,10 +89,10 @@ var config = {
},
}),
new webpack.ProvidePlugin({
$: "jquery",
jQuery: "jquery",
$: 'jquery',
jQuery: 'jquery',
}),
new ExtractTextPlugin("chronograf.css"),
new ExtractTextPlugin('chronograf.css'),
new HtmlWebpackPlugin({
template: path.resolve(__dirname, '..', 'src', 'index.template.html'),
inject: 'body',
@ -86,21 +101,28 @@ var config = {
}),
new webpack.optimize.UglifyJsPlugin({
compress: {
warnings: false
}
warnings: false,
},
}),
new webpack.optimize.CommonsChunkPlugin({
names: ['vendor', 'manifest'],
}),
function() { /* Webpack does not exit with non-zero status if error. */
this.plugin("done", function(stats) {
if (stats.compilation.errors && stats.compilation.errors.length && process.argv.indexOf("--watch") == -1) {
console.log(stats.compilation.errors.toString({
colors: true
}));
process.exit(1);
function() {
/* Webpack does not exit with non-zero status if error. */
this.plugin('done', function(stats) {
if (
stats.compilation.errors &&
stats.compilation.errors.length &&
process.argv.indexOf('--watch') == -1
) {
console.log(
stats.compilation.errors.toString({
colors: true,
})
)
process.exit(1)
}
});
})
},
new webpack.DefinePlugin({
VERSION: JSON.stringify(require('../package.json').version),
@ -108,6 +130,6 @@ var config = {
],
postcss: require('./postcss'),
target: 'web',
};
}
module.exports = config;
module.exports = config