Fix merge conflicts
commit
54b5c3dd47
|
@ -2,9 +2,12 @@
|
|||
|
||||
### Upcoming Bug Fixes
|
||||
1. [#788](https://github.com/influxdata/chronograf/pull/788): Fix missing fields in data explorer when using non-default retention policy
|
||||
1. [#774](https://github.com/influxdata/chronograf/issues/774): Fix gaps in layouts for hosts
|
||||
|
||||
### Upcoming Features
|
||||
1. [#779](https://github.com/influxdata/chronograf/issues/779): Add layout for telegraf's diskio system plugin
|
||||
1. [#810](https://github.com/influxdata/chronograf/issues/810): Add layout for telegraf's net system plugin
|
||||
1. [#811](https://github.com/influxdata/chronograf/issues/811): Add layout for telegraf's procstat plugin
|
||||
|
||||
### Upcoming UI Improvements
|
||||
|
||||
|
|
|
@ -58,8 +58,8 @@ After installing gvm you can install and set the default go version by
|
|||
running the following:
|
||||
|
||||
```bash
|
||||
gvm install go1.7.4
|
||||
gvm use go1.7.4 --default
|
||||
gvm install go1.7.5
|
||||
gvm use go1.7.5 --default
|
||||
```
|
||||
|
||||
Installing GDM
|
||||
|
|
|
@ -43,8 +43,10 @@ Currently, Chronograf offers dashboard templates for the following Telegraf inpu
|
|||
* [Disk](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/DISK_README.md)
|
||||
* [DiskIO](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/disk.go#L136)
|
||||
* [Memory](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/MEM_README.md)
|
||||
* [Net](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/net.go)
|
||||
* [Netstat](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/NETSTAT_README.md)
|
||||
* [Processes](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/PROCESSES_README.md)
|
||||
* [Procstat](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/procstat/README.md)
|
||||
* Varnish
|
||||
* Windows Performance Counters
|
||||
|
||||
|
@ -111,7 +113,7 @@ docker pull quay.io/influxdb/chronograf:latest
|
|||
|
||||
### From Source
|
||||
|
||||
* Chronograf works with go 1.7.4, npm 3.10.7 and node v6.6.0. Additional version support of these projects will be implemented soon, but these are the only supported versions to date.
|
||||
* Chronograf works with go 1.7.x, node 6.x/7.x, and npm 3.x.
|
||||
* Chronograf requires [Kapacitor](https://github.com/influxdata/kapacitor) 1.1.x+ to create and store alerts.
|
||||
|
||||
1. [Install Go](https://golang.org/doc/install)
|
||||
|
|
|
@ -0,0 +1,49 @@
|
|||
{
|
||||
"id": "4585a7db-73af-4ca1-9378-47ee67c71f99",
|
||||
"measurement": "net",
|
||||
"app": "system",
|
||||
"autoflow": true,
|
||||
"cells": [
|
||||
{
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"w": 4,
|
||||
"h": 4,
|
||||
"i": "e2f65d45-1898-4a16-860c-14b655575925",
|
||||
"name": "System – Network Mb/s",
|
||||
"queries": [
|
||||
{
|
||||
"query": "SELECT non_negative_derivative(max(\"bytes_recv\"), 1s) / 125000 as \"rx_megabits_per_second\" FROM \"net\"",
|
||||
"groupbys": [],
|
||||
"wheres": [],
|
||||
"label": "Mb/s"
|
||||
},
|
||||
{
|
||||
"query": "SELECT non_negative_derivative(max(\"bytes_sent\"), 1s) / 125000 as \"tx_megabits_per_second\" FROM \"net\"",
|
||||
"groupbys": [],
|
||||
"wheres": []
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"w": 4,
|
||||
"h": 4,
|
||||
"i": "5e957624-b28b-4904-8068-5e7a9a058609",
|
||||
"name": "System – Network Error Rate",
|
||||
"queries": [
|
||||
{
|
||||
"query": "SELECT non_negative_derivative(max(\"err_in\"), 1s) / 125000 as \"tx_errors_per_second\" FROM \"net\"",
|
||||
"groupbys": [],
|
||||
"wheres": []
|
||||
},
|
||||
{
|
||||
"query": "SELECT non_negative_derivative(max(\"err_out\"), 1s) / 125000 as \"rx_errors_per_second\" FROM \"net\"",
|
||||
"groupbys": [],
|
||||
"wheres": []
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
|
@ -49,7 +49,7 @@ cat > $APP_FILE << EOF
|
|||
"i": "$CELLID",
|
||||
"name": "User facing cell Name",
|
||||
"queries": [{
|
||||
"query": "select mean(\"used_percent from\") from disk",
|
||||
"query": "select mean(\"used_percent\") from disk",
|
||||
"groupbys": [],
|
||||
"wheres": []
|
||||
}]
|
||||
|
|
|
@ -0,0 +1,40 @@
|
|||
{
|
||||
"id": "44644fae-21e7-4897-81e6-b11d2643cd61",
|
||||
"measurement": "procstat",
|
||||
"app": "system",
|
||||
"autoflow": true,
|
||||
"cells": [
|
||||
{
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"w": 4,
|
||||
"h": 4,
|
||||
"i": "e75a6baa-9938-4ade-b83f-55a239039964",
|
||||
"name": "Processes – Resident Memory (MB)",
|
||||
"queries": [
|
||||
{
|
||||
"query": "SELECT max(\"memory_rss\") / 1000000 AS \"max_mb_memory_rss\" FROM \"procstat\"",
|
||||
"groupbys": ["\"exe\""],
|
||||
"wheres": [],
|
||||
"label": "MB"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"w": 4,
|
||||
"h": 4,
|
||||
"i": "2bfae447-47c6-4f85-9fec-494301d29a04",
|
||||
"name": "Processes – CPU Usage %",
|
||||
"queries": [
|
||||
{
|
||||
"query": "SELECT max(\"cpu_usage\") AS \"cpu_usage\" FROM \"procstat\"",
|
||||
"groupbys": ["\"exe\""],
|
||||
"wheres": [],
|
||||
"label": "%"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
|
@ -3,7 +3,7 @@ machine:
|
|||
services:
|
||||
- docker
|
||||
environment:
|
||||
DOCKER_TAG: chronograf-20161207
|
||||
DOCKER_TAG: chronograf-20170127
|
||||
|
||||
dependencies:
|
||||
override:
|
||||
|
@ -26,7 +26,7 @@ deployment:
|
|||
--package
|
||||
--platform all
|
||||
--arch all
|
||||
--upload
|
||||
--upload-overwrite
|
||||
- sudo chown -R ubuntu:ubuntu /home/ubuntu
|
||||
- cp build/linux/static_amd64/chronograf .
|
||||
- docker build -t chronograf .
|
||||
|
@ -46,7 +46,7 @@ deployment:
|
|||
--package
|
||||
--platform all
|
||||
--arch all
|
||||
--upload
|
||||
--upload-overwrite
|
||||
--bucket dl.influxdata.com/chronograf/releases
|
||||
- sudo chown -R ubuntu:ubuntu /home/ubuntu
|
||||
- cp build/linux/static_amd64/chronograf .
|
||||
|
@ -67,7 +67,7 @@ deployment:
|
|||
--package
|
||||
--platform all
|
||||
--arch all
|
||||
--upload
|
||||
--upload-overwrite
|
||||
--bucket dl.influxdata.com/chronograf/releases
|
||||
- sudo chown -R ubuntu:ubuntu /home/ubuntu
|
||||
- cp build/linux/static_amd64/chronograf .
|
||||
|
|
|
@ -16,9 +16,9 @@ RUN pip install boto requests python-jose --upgrade
|
|||
RUN gem install fpm
|
||||
|
||||
# Install node
|
||||
RUN wget -q https://nodejs.org/dist/latest-v6.x/node-v6.9.1-linux-x64.tar.gz; \
|
||||
tar -xvf node-v6.9.1-linux-x64.tar.gz -C / --strip-components=1; \
|
||||
rm -f node-v6.9.1-linux-x64.tar.gz
|
||||
RUN wget -q https://nodejs.org/dist/latest-v6.x/node-v6.9.4-linux-x64.tar.gz; \
|
||||
tar -xvf node-v6.9.4-linux-x64.tar.gz -C / --strip-components=1; \
|
||||
rm -f node-v6.9.4-linux-x64.tar.gz
|
||||
|
||||
# Update npm
|
||||
RUN cd $(npm root -g)/npm \
|
||||
|
@ -28,7 +28,7 @@ RUN npm install npm -g
|
|||
|
||||
# Install go
|
||||
ENV GOPATH /root/go
|
||||
ENV GO_VERSION 1.7.4
|
||||
ENV GO_VERSION 1.7.5
|
||||
ENV GO_ARCH amd64
|
||||
RUN wget https://storage.googleapis.com/golang/go${GO_VERSION}.linux-${GO_ARCH}.tar.gz; \
|
||||
tar -C /usr/local/ -xf /go${GO_VERSION}.linux-${GO_ARCH}.tar.gz ; \
|
||||
|
|
|
@ -0,0 +1,12 @@
|
|||
## Builds
|
||||
|
||||
Builds are run from a docker build image that is configured with the node and go we support.
|
||||
Our circle.yml uses this docker container to build, test and create release packages.
|
||||
|
||||
### Updating new node/go versions
|
||||
After updating the Dockerfile_build run
|
||||
|
||||
`docker build -t quay.io/influxdb/builder:chronograf-$(date "+%Y%m%d") -f Dockerfile_build`
|
||||
|
||||
and push to quay with:
|
||||
`docker push quay.io/influxdb/builder:chronograf-$(date "+%Y%m%d")`
|
28
etc/build.py
28
etc/build.py
|
@ -812,24 +812,31 @@ def main(args):
|
|||
arch = None
|
||||
type = None
|
||||
regex = None
|
||||
nice_name = None
|
||||
if ".deb" in p_name:
|
||||
type = "Ubuntu"
|
||||
type = "ubuntu"
|
||||
nice_name = "Ubuntu"
|
||||
regex = r"^.+_(.+)\.deb$"
|
||||
elif ".rpm" in p_name:
|
||||
type = "CentOS"
|
||||
type = "centos"
|
||||
nice_name = "CentOS"
|
||||
regex = r"^.+\.(.+)\.rpm$"
|
||||
elif ".tar.gz" in p_name:
|
||||
if "linux" in p_name:
|
||||
if "static" in p_name:
|
||||
type = "linux static"
|
||||
type = "linux_static"
|
||||
nice_name = "Linux Static"
|
||||
else:
|
||||
type = "linux"
|
||||
nice_name = "Linux"
|
||||
elif "darwin" in p_name:
|
||||
type = "Mac OS X"
|
||||
type = "darwin"
|
||||
nice_name = "Mac OS X"
|
||||
regex = r"^.+_(.+)\.tar.gz$"
|
||||
elif ".zip" in p_name:
|
||||
if "windows" in p_name:
|
||||
type = "Windows"
|
||||
type = "windows"
|
||||
nice_name = "Windows"
|
||||
regex = r"^.+_(.+)\.zip$"
|
||||
|
||||
if regex is None or type is None:
|
||||
|
@ -844,16 +851,19 @@ def main(args):
|
|||
arch = "amd64"
|
||||
elif arch == "x86_32":
|
||||
arch = "i386"
|
||||
|
||||
package_output[str(type).capitalize() + " " + str(arch)] = {
|
||||
package_name = str(arch) + "_" + str(type)
|
||||
package_output[package_name] = {
|
||||
"md5": generate_md5_from_file(p),
|
||||
"filename": p_name,
|
||||
"name": nice_name,
|
||||
"link": "https://dl.influxdata.com/chronograf/releases/" + p_name.rsplit('/', 1)[-1],
|
||||
}
|
||||
|
||||
# Print the downloads in Markdown format for the release
|
||||
if args.release:
|
||||
lines = []
|
||||
for arch, v in package_output.items():
|
||||
line = arch + " | [" + v['filename'] +"](https://dl.influxdata.com/chronograf/releases/" + v['filename'].rsplit('/', 1)[-1] + ") | `" + v['md5'] + "`"
|
||||
for package_name, v in package_output.items():
|
||||
line = v['name'] + " | [" + v['filename'] +"](" + v['link'] + ") | `" + v['md5'] + "`"
|
||||
lines.append(line)
|
||||
lines.sort()
|
||||
|
||||
|
|
|
@ -14,6 +14,8 @@
|
|||
"start": "node_modules/webpack/bin/webpack.js -w --config ./webpack/devConfig.js",
|
||||
"lint": "node_modules/eslint/bin/eslint.js src/",
|
||||
"test": "karma start",
|
||||
"test:lint": "npm run lint; npm run test",
|
||||
"test:dev": "nodemon --exec npm run test:lint",
|
||||
"clean": "rm -rf build",
|
||||
"storybook": "start-storybook -p 6006",
|
||||
"build-storybook": "build-storybook",
|
||||
|
|
|
@ -0,0 +1,48 @@
|
|||
import reducer from 'src/shared/reducers/sources';
|
||||
|
||||
import {
|
||||
loadSources,
|
||||
updateSource,
|
||||
removeSource,
|
||||
addSource,
|
||||
} from 'src/shared/actions/sources';
|
||||
|
||||
describe('Shared.Reducers.sources', () => {
|
||||
it('can correctly show default sources when adding a source', () => {
|
||||
let state = [];
|
||||
|
||||
state = reducer(state, addSource({
|
||||
id: '1',
|
||||
default: true,
|
||||
}));
|
||||
|
||||
state = reducer(state, addSource({
|
||||
id: '2',
|
||||
default: true,
|
||||
}));
|
||||
|
||||
expect(state.filter((s) => s.default).length).to.equal(1);
|
||||
});
|
||||
|
||||
it('can correctly show default sources when updating a source', () => {
|
||||
let state = [];
|
||||
|
||||
state = reducer(state, addSource({
|
||||
id: '1',
|
||||
default: true,
|
||||
}));
|
||||
|
||||
state = reducer(state, addSource({
|
||||
id: '2',
|
||||
default: true,
|
||||
}));
|
||||
|
||||
state = reducer(state, updateSource({
|
||||
id: '1',
|
||||
default: true,
|
||||
}));
|
||||
|
||||
expect(state.find(({id}) => id === '1').default).to.equal(true);
|
||||
expect(state.find(({id}) => id === '2').default).to.equal(false);
|
||||
});
|
||||
});
|
|
@ -86,13 +86,17 @@ export const HostPage = React.createClass({
|
|||
const cellHeight = 4;
|
||||
const pageWidth = 12;
|
||||
|
||||
const autoflowCells = autoflowLayouts.reduce((allCells, layout, i) => {
|
||||
return allCells.concat(layout.cells.map((cell, j) => {
|
||||
let cellCount = 0;
|
||||
const autoflowCells = autoflowLayouts.reduce((allCells, layout) => {
|
||||
return allCells.concat(layout.cells.map((cell) => {
|
||||
const x = (cellCount * cellWidth % pageWidth);
|
||||
const y = Math.floor(cellCount * cellWidth / pageWidth) * cellHeight;
|
||||
cellCount += 1;
|
||||
return Object.assign(cell, {
|
||||
w: cellWidth,
|
||||
h: cellHeight,
|
||||
x: ((i + j) * cellWidth % pageWidth),
|
||||
y: Math.floor(((i + j) * cellWidth / pageWidth)) * cellHeight,
|
||||
x,
|
||||
y,
|
||||
});
|
||||
}));
|
||||
}, []);
|
||||
|
|
|
@ -7,6 +7,7 @@ import PagerDutyConfig from './PagerDutyConfig';
|
|||
import SensuConfig from './SensuConfig';
|
||||
import SlackConfig from './SlackConfig';
|
||||
import SMTPConfig from './SMTPConfig';
|
||||
import TalkConfig from './TalkConfig';
|
||||
import TelegramConfig from './TelegramConfig';
|
||||
import VictorOpsConfig from './VictorOpsConfig';
|
||||
|
||||
|
@ -114,6 +115,7 @@ const AlertOutputs = React.createClass({
|
|||
<option value="sensu">Sensu</option>
|
||||
<option value="slack">Slack</option>
|
||||
<option value="smtp">SMTP</option>
|
||||
<option value="talk">Talk</option>
|
||||
<option value="telegram">Telegram</option>
|
||||
<option value="victorops">VictorOps</option>
|
||||
</select>
|
||||
|
@ -166,6 +168,9 @@ const AlertOutputs = React.createClass({
|
|||
case 'sensu': {
|
||||
return <SensuConfig onSave={save} config={this.getSection(configSections, endpoint)} />;
|
||||
}
|
||||
case 'talk': {
|
||||
return <TalkConfig onSave={save} config={this.getSection(configSections, endpoint)} />;
|
||||
}
|
||||
}
|
||||
},
|
||||
});
|
||||
|
|
|
@ -0,0 +1,61 @@
|
|||
import React, {PropTypes} from 'react';
|
||||
|
||||
const {
|
||||
bool,
|
||||
string,
|
||||
shape,
|
||||
func,
|
||||
} = PropTypes;
|
||||
|
||||
const TalkConfig = React.createClass({
|
||||
propTypes: {
|
||||
config: shape({
|
||||
options: shape({
|
||||
url: bool.isRequired,
|
||||
author_name: string.isRequired,
|
||||
}).isRequired,
|
||||
}).isRequired,
|
||||
onSave: func.isRequired,
|
||||
},
|
||||
|
||||
handleSaveAlert(e) {
|
||||
e.preventDefault();
|
||||
|
||||
const properties = {
|
||||
url: this.url.value,
|
||||
author_name: this.author.value,
|
||||
};
|
||||
|
||||
this.props.onSave(properties);
|
||||
},
|
||||
|
||||
render() {
|
||||
const {url, author_name: author} = this.props.config.options;
|
||||
|
||||
return (
|
||||
<div>
|
||||
<h4 className="text-center">Talk Alert</h4>
|
||||
<br/>
|
||||
<p>Have alerts sent to Talk.</p>
|
||||
<form onSubmit={this.handleSaveAlert}>
|
||||
<div className="form-group col-xs-12">
|
||||
<label htmlFor="url">URL</label>
|
||||
<input className="form-control" id="url" type="text" ref={(r) => this.url = r} defaultValue={url || ''}></input>
|
||||
<label className="form-helper">Note: a value of <code>true</code> indicates that the Talk URL has been set</label>
|
||||
</div>
|
||||
|
||||
<div className="form-group col-xs-12">
|
||||
<label htmlFor="author">Author Name</label>
|
||||
<input className="form-control" id="author" type="text" ref={(r) => this.author = r} defaultValue={author || ''}></input>
|
||||
</div>
|
||||
|
||||
<div className="form-group form-group-submit col-xs-12 col-sm-6 col-sm-offset-3">
|
||||
<button className="btn btn-block btn-primary" type="submit">Save</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
);
|
||||
},
|
||||
});
|
||||
|
||||
export default TalkConfig;
|
|
@ -123,8 +123,8 @@ export const KapacitorPage = React.createClass({
|
|||
render() {
|
||||
const {kapacitor, newName, newURL, newUsername} = this.state;
|
||||
// if the fields in state are defined, use them. otherwise use the defaults
|
||||
const name = newName === undefined ? kapacitor && kapacitor.name || '' : newName;
|
||||
const url = newURL === undefined ? kapacitor && kapacitor.url || '' : newURL;
|
||||
const name = newName === undefined ? kapacitor && kapacitor.name || defaultKapacitorName : newName;
|
||||
const url = newURL === undefined ? kapacitor && kapacitor.url || defaultKapacitorUrl : newURL;
|
||||
const username = newUsername === undefined ? kapacitor && kapacitor.username || '' : newUsername;
|
||||
|
||||
|
||||
|
|
|
@ -7,7 +7,9 @@ export default function sources(state = [], action) {
|
|||
case 'SOURCE_UPDATED': {
|
||||
const {source} = action.payload;
|
||||
const updatedIndex = state.findIndex((s) => s.id === source.id);
|
||||
const updatedSources = Object.assign([], state);
|
||||
const updatedSources = source.default ? state.map((s) => {
|
||||
s.default = false; return s;
|
||||
}) : [...state];
|
||||
updatedSources[updatedIndex] = source;
|
||||
return updatedSources;
|
||||
}
|
||||
|
@ -20,7 +22,10 @@ export default function sources(state = [], action) {
|
|||
|
||||
case 'SOURCE_ADDED': {
|
||||
const {source} = action.payload;
|
||||
return state.concat([source]);
|
||||
const updatedSources = source.default ? state.map((s) => {
|
||||
s.default = false; return s;
|
||||
}) : state;
|
||||
return [...updatedSources, source];
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue