Fix merge conflicts
commit
54b5c3dd47
|
@ -2,9 +2,12 @@
|
||||||
|
|
||||||
### Upcoming Bug Fixes
|
### Upcoming Bug Fixes
|
||||||
1. [#788](https://github.com/influxdata/chronograf/pull/788): Fix missing fields in data explorer when using non-default retention policy
|
1. [#788](https://github.com/influxdata/chronograf/pull/788): Fix missing fields in data explorer when using non-default retention policy
|
||||||
|
1. [#774](https://github.com/influxdata/chronograf/issues/774): Fix gaps in layouts for hosts
|
||||||
|
|
||||||
### Upcoming Features
|
### Upcoming Features
|
||||||
1. [#779](https://github.com/influxdata/chronograf/issues/779): Add layout for telegraf's diskio system plugin
|
1. [#779](https://github.com/influxdata/chronograf/issues/779): Add layout for telegraf's diskio system plugin
|
||||||
|
1. [#810](https://github.com/influxdata/chronograf/issues/810): Add layout for telegraf's net system plugin
|
||||||
|
1. [#811](https://github.com/influxdata/chronograf/issues/811): Add layout for telegraf's procstat plugin
|
||||||
|
|
||||||
### Upcoming UI Improvements
|
### Upcoming UI Improvements
|
||||||
|
|
||||||
|
|
|
@ -58,8 +58,8 @@ After installing gvm you can install and set the default go version by
|
||||||
running the following:
|
running the following:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
gvm install go1.7.4
|
gvm install go1.7.5
|
||||||
gvm use go1.7.4 --default
|
gvm use go1.7.5 --default
|
||||||
```
|
```
|
||||||
|
|
||||||
Installing GDM
|
Installing GDM
|
||||||
|
|
|
@ -43,8 +43,10 @@ Currently, Chronograf offers dashboard templates for the following Telegraf inpu
|
||||||
* [Disk](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/DISK_README.md)
|
* [Disk](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/DISK_README.md)
|
||||||
* [DiskIO](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/disk.go#L136)
|
* [DiskIO](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/disk.go#L136)
|
||||||
* [Memory](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/MEM_README.md)
|
* [Memory](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/MEM_README.md)
|
||||||
|
* [Net](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/net.go)
|
||||||
* [Netstat](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/NETSTAT_README.md)
|
* [Netstat](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/NETSTAT_README.md)
|
||||||
* [Processes](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/PROCESSES_README.md)
|
* [Processes](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/system/PROCESSES_README.md)
|
||||||
|
* [Procstat](https://github.com/influxdata/telegraf/blob/master/plugins/inputs/procstat/README.md)
|
||||||
* Varnish
|
* Varnish
|
||||||
* Windows Performance Counters
|
* Windows Performance Counters
|
||||||
|
|
||||||
|
@ -111,7 +113,7 @@ docker pull quay.io/influxdb/chronograf:latest
|
||||||
|
|
||||||
### From Source
|
### From Source
|
||||||
|
|
||||||
* Chronograf works with go 1.7.4, npm 3.10.7 and node v6.6.0. Additional version support of these projects will be implemented soon, but these are the only supported versions to date.
|
* Chronograf works with go 1.7.x, node 6.x/7.x, and npm 3.x.
|
||||||
* Chronograf requires [Kapacitor](https://github.com/influxdata/kapacitor) 1.1.x+ to create and store alerts.
|
* Chronograf requires [Kapacitor](https://github.com/influxdata/kapacitor) 1.1.x+ to create and store alerts.
|
||||||
|
|
||||||
1. [Install Go](https://golang.org/doc/install)
|
1. [Install Go](https://golang.org/doc/install)
|
||||||
|
|
|
@ -0,0 +1,49 @@
|
||||||
|
{
|
||||||
|
"id": "4585a7db-73af-4ca1-9378-47ee67c71f99",
|
||||||
|
"measurement": "net",
|
||||||
|
"app": "system",
|
||||||
|
"autoflow": true,
|
||||||
|
"cells": [
|
||||||
|
{
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"w": 4,
|
||||||
|
"h": 4,
|
||||||
|
"i": "e2f65d45-1898-4a16-860c-14b655575925",
|
||||||
|
"name": "System – Network Mb/s",
|
||||||
|
"queries": [
|
||||||
|
{
|
||||||
|
"query": "SELECT non_negative_derivative(max(\"bytes_recv\"), 1s) / 125000 as \"rx_megabits_per_second\" FROM \"net\"",
|
||||||
|
"groupbys": [],
|
||||||
|
"wheres": [],
|
||||||
|
"label": "Mb/s"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"query": "SELECT non_negative_derivative(max(\"bytes_sent\"), 1s) / 125000 as \"tx_megabits_per_second\" FROM \"net\"",
|
||||||
|
"groupbys": [],
|
||||||
|
"wheres": []
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"w": 4,
|
||||||
|
"h": 4,
|
||||||
|
"i": "5e957624-b28b-4904-8068-5e7a9a058609",
|
||||||
|
"name": "System – Network Error Rate",
|
||||||
|
"queries": [
|
||||||
|
{
|
||||||
|
"query": "SELECT non_negative_derivative(max(\"err_in\"), 1s) / 125000 as \"tx_errors_per_second\" FROM \"net\"",
|
||||||
|
"groupbys": [],
|
||||||
|
"wheres": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"query": "SELECT non_negative_derivative(max(\"err_out\"), 1s) / 125000 as \"rx_errors_per_second\" FROM \"net\"",
|
||||||
|
"groupbys": [],
|
||||||
|
"wheres": []
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
|
@ -49,7 +49,7 @@ cat > $APP_FILE << EOF
|
||||||
"i": "$CELLID",
|
"i": "$CELLID",
|
||||||
"name": "User facing cell Name",
|
"name": "User facing cell Name",
|
||||||
"queries": [{
|
"queries": [{
|
||||||
"query": "select mean(\"used_percent from\") from disk",
|
"query": "select mean(\"used_percent\") from disk",
|
||||||
"groupbys": [],
|
"groupbys": [],
|
||||||
"wheres": []
|
"wheres": []
|
||||||
}]
|
}]
|
||||||
|
|
|
@ -0,0 +1,40 @@
|
||||||
|
{
|
||||||
|
"id": "44644fae-21e7-4897-81e6-b11d2643cd61",
|
||||||
|
"measurement": "procstat",
|
||||||
|
"app": "system",
|
||||||
|
"autoflow": true,
|
||||||
|
"cells": [
|
||||||
|
{
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"w": 4,
|
||||||
|
"h": 4,
|
||||||
|
"i": "e75a6baa-9938-4ade-b83f-55a239039964",
|
||||||
|
"name": "Processes – Resident Memory (MB)",
|
||||||
|
"queries": [
|
||||||
|
{
|
||||||
|
"query": "SELECT max(\"memory_rss\") / 1000000 AS \"max_mb_memory_rss\" FROM \"procstat\"",
|
||||||
|
"groupbys": ["\"exe\""],
|
||||||
|
"wheres": [],
|
||||||
|
"label": "MB"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"x": 0,
|
||||||
|
"y": 0,
|
||||||
|
"w": 4,
|
||||||
|
"h": 4,
|
||||||
|
"i": "2bfae447-47c6-4f85-9fec-494301d29a04",
|
||||||
|
"name": "Processes – CPU Usage %",
|
||||||
|
"queries": [
|
||||||
|
{
|
||||||
|
"query": "SELECT max(\"cpu_usage\") AS \"cpu_usage\" FROM \"procstat\"",
|
||||||
|
"groupbys": ["\"exe\""],
|
||||||
|
"wheres": [],
|
||||||
|
"label": "%"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
|
@ -3,7 +3,7 @@ machine:
|
||||||
services:
|
services:
|
||||||
- docker
|
- docker
|
||||||
environment:
|
environment:
|
||||||
DOCKER_TAG: chronograf-20161207
|
DOCKER_TAG: chronograf-20170127
|
||||||
|
|
||||||
dependencies:
|
dependencies:
|
||||||
override:
|
override:
|
||||||
|
@ -26,7 +26,7 @@ deployment:
|
||||||
--package
|
--package
|
||||||
--platform all
|
--platform all
|
||||||
--arch all
|
--arch all
|
||||||
--upload
|
--upload-overwrite
|
||||||
- sudo chown -R ubuntu:ubuntu /home/ubuntu
|
- sudo chown -R ubuntu:ubuntu /home/ubuntu
|
||||||
- cp build/linux/static_amd64/chronograf .
|
- cp build/linux/static_amd64/chronograf .
|
||||||
- docker build -t chronograf .
|
- docker build -t chronograf .
|
||||||
|
@ -46,7 +46,7 @@ deployment:
|
||||||
--package
|
--package
|
||||||
--platform all
|
--platform all
|
||||||
--arch all
|
--arch all
|
||||||
--upload
|
--upload-overwrite
|
||||||
--bucket dl.influxdata.com/chronograf/releases
|
--bucket dl.influxdata.com/chronograf/releases
|
||||||
- sudo chown -R ubuntu:ubuntu /home/ubuntu
|
- sudo chown -R ubuntu:ubuntu /home/ubuntu
|
||||||
- cp build/linux/static_amd64/chronograf .
|
- cp build/linux/static_amd64/chronograf .
|
||||||
|
@ -67,7 +67,7 @@ deployment:
|
||||||
--package
|
--package
|
||||||
--platform all
|
--platform all
|
||||||
--arch all
|
--arch all
|
||||||
--upload
|
--upload-overwrite
|
||||||
--bucket dl.influxdata.com/chronograf/releases
|
--bucket dl.influxdata.com/chronograf/releases
|
||||||
- sudo chown -R ubuntu:ubuntu /home/ubuntu
|
- sudo chown -R ubuntu:ubuntu /home/ubuntu
|
||||||
- cp build/linux/static_amd64/chronograf .
|
- cp build/linux/static_amd64/chronograf .
|
||||||
|
|
|
@ -16,9 +16,9 @@ RUN pip install boto requests python-jose --upgrade
|
||||||
RUN gem install fpm
|
RUN gem install fpm
|
||||||
|
|
||||||
# Install node
|
# Install node
|
||||||
RUN wget -q https://nodejs.org/dist/latest-v6.x/node-v6.9.1-linux-x64.tar.gz; \
|
RUN wget -q https://nodejs.org/dist/latest-v6.x/node-v6.9.4-linux-x64.tar.gz; \
|
||||||
tar -xvf node-v6.9.1-linux-x64.tar.gz -C / --strip-components=1; \
|
tar -xvf node-v6.9.4-linux-x64.tar.gz -C / --strip-components=1; \
|
||||||
rm -f node-v6.9.1-linux-x64.tar.gz
|
rm -f node-v6.9.4-linux-x64.tar.gz
|
||||||
|
|
||||||
# Update npm
|
# Update npm
|
||||||
RUN cd $(npm root -g)/npm \
|
RUN cd $(npm root -g)/npm \
|
||||||
|
@ -28,7 +28,7 @@ RUN npm install npm -g
|
||||||
|
|
||||||
# Install go
|
# Install go
|
||||||
ENV GOPATH /root/go
|
ENV GOPATH /root/go
|
||||||
ENV GO_VERSION 1.7.4
|
ENV GO_VERSION 1.7.5
|
||||||
ENV GO_ARCH amd64
|
ENV GO_ARCH amd64
|
||||||
RUN wget https://storage.googleapis.com/golang/go${GO_VERSION}.linux-${GO_ARCH}.tar.gz; \
|
RUN wget https://storage.googleapis.com/golang/go${GO_VERSION}.linux-${GO_ARCH}.tar.gz; \
|
||||||
tar -C /usr/local/ -xf /go${GO_VERSION}.linux-${GO_ARCH}.tar.gz ; \
|
tar -C /usr/local/ -xf /go${GO_VERSION}.linux-${GO_ARCH}.tar.gz ; \
|
||||||
|
|
|
@ -0,0 +1,12 @@
|
||||||
|
## Builds
|
||||||
|
|
||||||
|
Builds are run from a docker build image that is configured with the node and go we support.
|
||||||
|
Our circle.yml uses this docker container to build, test and create release packages.
|
||||||
|
|
||||||
|
### Updating new node/go versions
|
||||||
|
After updating the Dockerfile_build run
|
||||||
|
|
||||||
|
`docker build -t quay.io/influxdb/builder:chronograf-$(date "+%Y%m%d") -f Dockerfile_build`
|
||||||
|
|
||||||
|
and push to quay with:
|
||||||
|
`docker push quay.io/influxdb/builder:chronograf-$(date "+%Y%m%d")`
|
28
etc/build.py
28
etc/build.py
|
@ -812,24 +812,31 @@ def main(args):
|
||||||
arch = None
|
arch = None
|
||||||
type = None
|
type = None
|
||||||
regex = None
|
regex = None
|
||||||
|
nice_name = None
|
||||||
if ".deb" in p_name:
|
if ".deb" in p_name:
|
||||||
type = "Ubuntu"
|
type = "ubuntu"
|
||||||
|
nice_name = "Ubuntu"
|
||||||
regex = r"^.+_(.+)\.deb$"
|
regex = r"^.+_(.+)\.deb$"
|
||||||
elif ".rpm" in p_name:
|
elif ".rpm" in p_name:
|
||||||
type = "CentOS"
|
type = "centos"
|
||||||
|
nice_name = "CentOS"
|
||||||
regex = r"^.+\.(.+)\.rpm$"
|
regex = r"^.+\.(.+)\.rpm$"
|
||||||
elif ".tar.gz" in p_name:
|
elif ".tar.gz" in p_name:
|
||||||
if "linux" in p_name:
|
if "linux" in p_name:
|
||||||
if "static" in p_name:
|
if "static" in p_name:
|
||||||
type = "linux static"
|
type = "linux_static"
|
||||||
|
nice_name = "Linux Static"
|
||||||
else:
|
else:
|
||||||
type = "linux"
|
type = "linux"
|
||||||
|
nice_name = "Linux"
|
||||||
elif "darwin" in p_name:
|
elif "darwin" in p_name:
|
||||||
type = "Mac OS X"
|
type = "darwin"
|
||||||
|
nice_name = "Mac OS X"
|
||||||
regex = r"^.+_(.+)\.tar.gz$"
|
regex = r"^.+_(.+)\.tar.gz$"
|
||||||
elif ".zip" in p_name:
|
elif ".zip" in p_name:
|
||||||
if "windows" in p_name:
|
if "windows" in p_name:
|
||||||
type = "Windows"
|
type = "windows"
|
||||||
|
nice_name = "Windows"
|
||||||
regex = r"^.+_(.+)\.zip$"
|
regex = r"^.+_(.+)\.zip$"
|
||||||
|
|
||||||
if regex is None or type is None:
|
if regex is None or type is None:
|
||||||
|
@ -844,16 +851,19 @@ def main(args):
|
||||||
arch = "amd64"
|
arch = "amd64"
|
||||||
elif arch == "x86_32":
|
elif arch == "x86_32":
|
||||||
arch = "i386"
|
arch = "i386"
|
||||||
|
package_name = str(arch) + "_" + str(type)
|
||||||
package_output[str(type).capitalize() + " " + str(arch)] = {
|
package_output[package_name] = {
|
||||||
"md5": generate_md5_from_file(p),
|
"md5": generate_md5_from_file(p),
|
||||||
"filename": p_name,
|
"filename": p_name,
|
||||||
|
"name": nice_name,
|
||||||
|
"link": "https://dl.influxdata.com/chronograf/releases/" + p_name.rsplit('/', 1)[-1],
|
||||||
}
|
}
|
||||||
|
|
||||||
# Print the downloads in Markdown format for the release
|
# Print the downloads in Markdown format for the release
|
||||||
if args.release:
|
if args.release:
|
||||||
lines = []
|
lines = []
|
||||||
for arch, v in package_output.items():
|
for package_name, v in package_output.items():
|
||||||
line = arch + " | [" + v['filename'] +"](https://dl.influxdata.com/chronograf/releases/" + v['filename'].rsplit('/', 1)[-1] + ") | `" + v['md5'] + "`"
|
line = v['name'] + " | [" + v['filename'] +"](" + v['link'] + ") | `" + v['md5'] + "`"
|
||||||
lines.append(line)
|
lines.append(line)
|
||||||
lines.sort()
|
lines.sort()
|
||||||
|
|
||||||
|
|
|
@ -14,6 +14,8 @@
|
||||||
"start": "node_modules/webpack/bin/webpack.js -w --config ./webpack/devConfig.js",
|
"start": "node_modules/webpack/bin/webpack.js -w --config ./webpack/devConfig.js",
|
||||||
"lint": "node_modules/eslint/bin/eslint.js src/",
|
"lint": "node_modules/eslint/bin/eslint.js src/",
|
||||||
"test": "karma start",
|
"test": "karma start",
|
||||||
|
"test:lint": "npm run lint; npm run test",
|
||||||
|
"test:dev": "nodemon --exec npm run test:lint",
|
||||||
"clean": "rm -rf build",
|
"clean": "rm -rf build",
|
||||||
"storybook": "start-storybook -p 6006",
|
"storybook": "start-storybook -p 6006",
|
||||||
"build-storybook": "build-storybook",
|
"build-storybook": "build-storybook",
|
||||||
|
|
|
@ -0,0 +1,48 @@
|
||||||
|
import reducer from 'src/shared/reducers/sources';
|
||||||
|
|
||||||
|
import {
|
||||||
|
loadSources,
|
||||||
|
updateSource,
|
||||||
|
removeSource,
|
||||||
|
addSource,
|
||||||
|
} from 'src/shared/actions/sources';
|
||||||
|
|
||||||
|
describe('Shared.Reducers.sources', () => {
|
||||||
|
it('can correctly show default sources when adding a source', () => {
|
||||||
|
let state = [];
|
||||||
|
|
||||||
|
state = reducer(state, addSource({
|
||||||
|
id: '1',
|
||||||
|
default: true,
|
||||||
|
}));
|
||||||
|
|
||||||
|
state = reducer(state, addSource({
|
||||||
|
id: '2',
|
||||||
|
default: true,
|
||||||
|
}));
|
||||||
|
|
||||||
|
expect(state.filter((s) => s.default).length).to.equal(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('can correctly show default sources when updating a source', () => {
|
||||||
|
let state = [];
|
||||||
|
|
||||||
|
state = reducer(state, addSource({
|
||||||
|
id: '1',
|
||||||
|
default: true,
|
||||||
|
}));
|
||||||
|
|
||||||
|
state = reducer(state, addSource({
|
||||||
|
id: '2',
|
||||||
|
default: true,
|
||||||
|
}));
|
||||||
|
|
||||||
|
state = reducer(state, updateSource({
|
||||||
|
id: '1',
|
||||||
|
default: true,
|
||||||
|
}));
|
||||||
|
|
||||||
|
expect(state.find(({id}) => id === '1').default).to.equal(true);
|
||||||
|
expect(state.find(({id}) => id === '2').default).to.equal(false);
|
||||||
|
});
|
||||||
|
});
|
|
@ -86,13 +86,17 @@ export const HostPage = React.createClass({
|
||||||
const cellHeight = 4;
|
const cellHeight = 4;
|
||||||
const pageWidth = 12;
|
const pageWidth = 12;
|
||||||
|
|
||||||
const autoflowCells = autoflowLayouts.reduce((allCells, layout, i) => {
|
let cellCount = 0;
|
||||||
return allCells.concat(layout.cells.map((cell, j) => {
|
const autoflowCells = autoflowLayouts.reduce((allCells, layout) => {
|
||||||
|
return allCells.concat(layout.cells.map((cell) => {
|
||||||
|
const x = (cellCount * cellWidth % pageWidth);
|
||||||
|
const y = Math.floor(cellCount * cellWidth / pageWidth) * cellHeight;
|
||||||
|
cellCount += 1;
|
||||||
return Object.assign(cell, {
|
return Object.assign(cell, {
|
||||||
w: cellWidth,
|
w: cellWidth,
|
||||||
h: cellHeight,
|
h: cellHeight,
|
||||||
x: ((i + j) * cellWidth % pageWidth),
|
x,
|
||||||
y: Math.floor(((i + j) * cellWidth / pageWidth)) * cellHeight,
|
y,
|
||||||
});
|
});
|
||||||
}));
|
}));
|
||||||
}, []);
|
}, []);
|
||||||
|
|
|
@ -7,6 +7,7 @@ import PagerDutyConfig from './PagerDutyConfig';
|
||||||
import SensuConfig from './SensuConfig';
|
import SensuConfig from './SensuConfig';
|
||||||
import SlackConfig from './SlackConfig';
|
import SlackConfig from './SlackConfig';
|
||||||
import SMTPConfig from './SMTPConfig';
|
import SMTPConfig from './SMTPConfig';
|
||||||
|
import TalkConfig from './TalkConfig';
|
||||||
import TelegramConfig from './TelegramConfig';
|
import TelegramConfig from './TelegramConfig';
|
||||||
import VictorOpsConfig from './VictorOpsConfig';
|
import VictorOpsConfig from './VictorOpsConfig';
|
||||||
|
|
||||||
|
@ -114,6 +115,7 @@ const AlertOutputs = React.createClass({
|
||||||
<option value="sensu">Sensu</option>
|
<option value="sensu">Sensu</option>
|
||||||
<option value="slack">Slack</option>
|
<option value="slack">Slack</option>
|
||||||
<option value="smtp">SMTP</option>
|
<option value="smtp">SMTP</option>
|
||||||
|
<option value="talk">Talk</option>
|
||||||
<option value="telegram">Telegram</option>
|
<option value="telegram">Telegram</option>
|
||||||
<option value="victorops">VictorOps</option>
|
<option value="victorops">VictorOps</option>
|
||||||
</select>
|
</select>
|
||||||
|
@ -166,6 +168,9 @@ const AlertOutputs = React.createClass({
|
||||||
case 'sensu': {
|
case 'sensu': {
|
||||||
return <SensuConfig onSave={save} config={this.getSection(configSections, endpoint)} />;
|
return <SensuConfig onSave={save} config={this.getSection(configSections, endpoint)} />;
|
||||||
}
|
}
|
||||||
|
case 'talk': {
|
||||||
|
return <TalkConfig onSave={save} config={this.getSection(configSections, endpoint)} />;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
|
@ -0,0 +1,61 @@
|
||||||
|
import React, {PropTypes} from 'react';
|
||||||
|
|
||||||
|
const {
|
||||||
|
bool,
|
||||||
|
string,
|
||||||
|
shape,
|
||||||
|
func,
|
||||||
|
} = PropTypes;
|
||||||
|
|
||||||
|
const TalkConfig = React.createClass({
|
||||||
|
propTypes: {
|
||||||
|
config: shape({
|
||||||
|
options: shape({
|
||||||
|
url: bool.isRequired,
|
||||||
|
author_name: string.isRequired,
|
||||||
|
}).isRequired,
|
||||||
|
}).isRequired,
|
||||||
|
onSave: func.isRequired,
|
||||||
|
},
|
||||||
|
|
||||||
|
handleSaveAlert(e) {
|
||||||
|
e.preventDefault();
|
||||||
|
|
||||||
|
const properties = {
|
||||||
|
url: this.url.value,
|
||||||
|
author_name: this.author.value,
|
||||||
|
};
|
||||||
|
|
||||||
|
this.props.onSave(properties);
|
||||||
|
},
|
||||||
|
|
||||||
|
render() {
|
||||||
|
const {url, author_name: author} = this.props.config.options;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div>
|
||||||
|
<h4 className="text-center">Talk Alert</h4>
|
||||||
|
<br/>
|
||||||
|
<p>Have alerts sent to Talk.</p>
|
||||||
|
<form onSubmit={this.handleSaveAlert}>
|
||||||
|
<div className="form-group col-xs-12">
|
||||||
|
<label htmlFor="url">URL</label>
|
||||||
|
<input className="form-control" id="url" type="text" ref={(r) => this.url = r} defaultValue={url || ''}></input>
|
||||||
|
<label className="form-helper">Note: a value of <code>true</code> indicates that the Talk URL has been set</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="form-group col-xs-12">
|
||||||
|
<label htmlFor="author">Author Name</label>
|
||||||
|
<input className="form-control" id="author" type="text" ref={(r) => this.author = r} defaultValue={author || ''}></input>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div className="form-group form-group-submit col-xs-12 col-sm-6 col-sm-offset-3">
|
||||||
|
<button className="btn btn-block btn-primary" type="submit">Save</button>
|
||||||
|
</div>
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
export default TalkConfig;
|
|
@ -123,8 +123,8 @@ export const KapacitorPage = React.createClass({
|
||||||
render() {
|
render() {
|
||||||
const {kapacitor, newName, newURL, newUsername} = this.state;
|
const {kapacitor, newName, newURL, newUsername} = this.state;
|
||||||
// if the fields in state are defined, use them. otherwise use the defaults
|
// if the fields in state are defined, use them. otherwise use the defaults
|
||||||
const name = newName === undefined ? kapacitor && kapacitor.name || '' : newName;
|
const name = newName === undefined ? kapacitor && kapacitor.name || defaultKapacitorName : newName;
|
||||||
const url = newURL === undefined ? kapacitor && kapacitor.url || '' : newURL;
|
const url = newURL === undefined ? kapacitor && kapacitor.url || defaultKapacitorUrl : newURL;
|
||||||
const username = newUsername === undefined ? kapacitor && kapacitor.username || '' : newUsername;
|
const username = newUsername === undefined ? kapacitor && kapacitor.username || '' : newUsername;
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,9 @@ export default function sources(state = [], action) {
|
||||||
case 'SOURCE_UPDATED': {
|
case 'SOURCE_UPDATED': {
|
||||||
const {source} = action.payload;
|
const {source} = action.payload;
|
||||||
const updatedIndex = state.findIndex((s) => s.id === source.id);
|
const updatedIndex = state.findIndex((s) => s.id === source.id);
|
||||||
const updatedSources = Object.assign([], state);
|
const updatedSources = source.default ? state.map((s) => {
|
||||||
|
s.default = false; return s;
|
||||||
|
}) : [...state];
|
||||||
updatedSources[updatedIndex] = source;
|
updatedSources[updatedIndex] = source;
|
||||||
return updatedSources;
|
return updatedSources;
|
||||||
}
|
}
|
||||||
|
@ -20,7 +22,10 @@ export default function sources(state = [], action) {
|
||||||
|
|
||||||
case 'SOURCE_ADDED': {
|
case 'SOURCE_ADDED': {
|
||||||
const {source} = action.payload;
|
const {source} = action.payload;
|
||||||
return state.concat([source]);
|
const updatedSources = source.default ? state.map((s) => {
|
||||||
|
s.default = false; return s;
|
||||||
|
}) : state;
|
||||||
|
return [...updatedSources, source];
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue