Compare commits

..

No commits in common. "master" and "v1.9.4" have entirely different histories.

559 changed files with 12916 additions and 19221 deletions

View File

@ -1,16 +1,3 @@
version: 2.1
orbs:
browser-tools: circleci/browser-tools@1.1.3
aws-s3: circleci/aws-s3@2.0.0
parameters:
trigger:
type: enum
enum: [none, deploy-as-artifacts]
default: none
workflows:
version: 2
main:
@ -33,23 +20,6 @@ workflows:
ignore: /.*/
tags:
only: /^[0-9]+(\.[0-9]+)*(\S*)([a|rc|beta]([0-9]+))+$/
- packages-sign:
name: packages-sign-prerelease
requires:
- deploy-pre-release
filters:
branches:
ignore: /.*/
tags:
only: /^[0-9]+(\.[0-9]+)*(\S*)([a|rc|beta]([0-9]+))+$/
- packages-upload-signatures:
requires:
- packages-sign-prerelease
filters:
branches:
ignore: /.*/
tags:
only: /^[0-9]+(\.[0-9]+)*(\S*)([a|rc|beta]([0-9]+))+$/
- deploy-release:
requires:
- build
@ -58,40 +28,15 @@ workflows:
ignore: /.*/
tags:
only: /^[0-9]+(\.[0-9]+)*$/
- packages-sign:
name: packages-sign-release
requires:
- deploy-release
filters:
branches:
ignore: /.*/
tags:
only: /^[0-9]+(\.[0-9]+)*$/
- packages-upload-signatures:
requires:
- packages-sign-release
filters:
branches:
ignore: /.*/
tags:
only: /^[0-9]+(\.[0-9]+)*$/
trigger:
when:
equal: [ deploy-as-artifacts, << pipeline.parameters.trigger >> ]
jobs:
- build
- build-nightly:
requires:
- build
version: 2
jobs:
build:
environment:
DOCKER_TAG: chronograf-20250404
DOCKER_TAG: chronograf-20210828
GO111MODULE: "ON"
machine:
image: ubuntu-2204:current
image: ubuntu-2004:202201-02
steps:
- checkout
- run: |
@ -112,10 +57,10 @@ jobs:
deploy-nightly:
environment:
DOCKER_TAG: chronograf-20250404
DOCKER_TAG: chronograf-20210828
GO111MODULE: "ON"
machine:
image: ubuntu-2204:current
image: ubuntu-2004:202201-02
steps:
- attach_workspace:
at: /home/circleci
@ -139,33 +84,12 @@ jobs:
- store_artifacts:
path: ./build/
build-nightly:
environment:
DOCKER_TAG: chronograf-20250404
GO111MODULE: "ON"
machine:
image: ubuntu-2204:current
steps:
- attach_workspace:
at: /home/circleci
- run: |
./etc/scripts/docker/run.sh \
--debug \
--clean \
--package \
--platform all \
--arch all \
--nightly \
--version=${CIRCLE_SHA1:0:7}
- store_artifacts:
path: ./build/
deploy-pre-release:
environment:
DOCKER_TAG: chronograf-20250404
DOCKER_TAG: chronograf-20210828
GO111MODULE: "ON"
machine:
image: ubuntu-2204:current
image: ubuntu-2004:202201-02
steps:
- attach_workspace:
at: /home/circleci
@ -190,17 +114,13 @@ jobs:
docker push quay.io/influxdb/chronograf:${CIRCLE_TAG}
- store_artifacts:
path: ./build/
- persist_to_workspace:
root: .
paths:
- build
deploy-release:
environment:
DOCKER_TAG: chronograf-20250404
DOCKER_TAG: chronograf-20210828
GO111MODULE: "ON"
machine:
image: ubuntu-2204:current
image: ubuntu-2004:202201-02
steps:
- attach_workspace:
at: /home/circleci
@ -227,54 +147,3 @@ jobs:
docker push quay.io/influxdb/chronograf:latest
- store_artifacts:
path: ./build/
- persist_to_workspace:
root: .
paths:
- build
packages-sign:
circleci_ip_ranges: true
docker:
- image: quay.io/influxdb/rsign:latest
auth:
username: $QUAY_RSIGN_USERNAME
password: $QUAY_RSIGN_PASSWORD
steps:
- add_ssh_keys:
fingerpints:
- 92:24:4f:e1:e1:ee:6a:39:22:d7:b5:fa:9e:a9:bf:4b
- attach_workspace:
at: /tmp/workspace
- run: |
mkdir -p /tmp/workspace/signatures
find /tmp/workspace/build \
-type f \( \
-iname '*.deb' \
-o -iname '*.rpm' \
-o -iname '*.tar.gz' \
-o -iname '*.zip' \
\) -exec rsign "{}" \; -exec mv "{}.asc" /tmp/workspace/signatures \;
- persist_to_workspace:
root: /tmp/workspace
paths:
- signatures
- store_artifacts:
path: /tmp/workspace/signatures
packages-upload-signatures:
docker:
- image: cimg/python:3.12.3
steps:
- attach_workspace:
at: /tmp/workspace
- aws-s3/sync:
arguments: >
--exclude '*'
--include 'chronograf-*.asc'
--include 'chronograf_*.asc'
--acl public-read
aws-region: AWS_REGION
aws-access-key-id: AWS_ACCESS_KEY_ID
aws-secret-access-key: AWS_SECRET_ACCESS_KEY
from: /tmp/workspace/signatures/
to: s3://dl.influxdata.com/chronograf/releases/

View File

@ -1,18 +0,0 @@
name: 'chronograf/cypress/report'
on:
workflow_run:
workflows: ['chronograf/cypress']
types:
- completed
jobs:
report:
runs-on: ubuntu-latest
steps:
- uses: dorny/test-reporter@v1
if: success() || failure()
with:
artifact: results
name: Cypress Tests
path: ui/cypress/results/results-*.xml
reporter: java-junit
fail-on-error: false

View File

@ -1,119 +0,0 @@
name: "chronograf/cypress"
on:
pull_request:
push:
branches:
- master
jobs:
e2e:
runs-on: ubuntu-latest
timeout-minutes: 30
env:
OAUTH2_HOSTNAME: localhost
OAUTH2_PORT: 8087
OAUTH2_TEST_USER_NAME: test
OAUTH2_TEST_USER_EMAIL: test@oauth2.mock
OAUTH2_REDIRECT_URL: http://localhost:8888/oauth/oauth-mock/callback
steps:
- name: Checkout
uses: actions/checkout@v2
with:
fetch-depth: 1
- name: Setup Go
uses: actions/setup-go@v2
with:
go-version: '1.23.8'
- uses: actions/setup-node@v2
with:
node-version: '16.14.2'
- run: if [ ! -x "$(command -v yarn)" ]; then npm install -g yarn; fi
- run: yarn node --version
- name: Setup Helm
uses: azure/setup-helm@v1
with:
version: v3.6.3
- name: Create kind cluster
uses: helm/kind-action@v1.2.0
with:
cluster_name: "chronograf-testing"
config: .github/workflows/resources/kind-config.yaml
- name: Create Enterprise Test Resources
run: |
helm repo add jetstack https://charts.jetstack.io
helm repo add influxdata https://helm.influxdata.com/
helm repo update
helm upgrade --wait --install \
cert-manager jetstack/cert-manager \
--namespace cert-manager \
--create-namespace \
--version v1.5.4 \
--set prometheus.enabled=false \
--set webhook.timeoutSeconds=30 \
--set installCRDs=true
kubectl apply -f .github/workflows/resources/test-resources.yaml
kubectl create secret generic influxdb-license --from-literal=INFLUXDB_ENTERPRISE_LICENSE_KEY=${INFLUXDB_ENTERPRISE_LICENSE_KEY}
helm upgrade --install influxdb influxdata/influxdb-enterprise --namespace default \
--set-string envFromSecret=influxdb-license \
--set-string data.service.type=NodePort \
--set-string meta.service.type=NodePort \
--set data.service.nodePort=30086 \
--set meta.service.nodePort=30091
env:
INFLUXDB_ENTERPRISE_LICENSE_KEY: "${{ secrets.INFLUXDB_ENTERPRISE_LICENSE_KEY }}"
- name: OAuth2 Mock Server
run: |
yarn
RUNNER_TRACKING_ID="" && (nohup yarn start > oauth2-mock.log 2>&1 &)
working-directory: ./etc/oauth2-server-mock
- name: Chronograf
run: |
make
RUNNER_TRACKING_ID="" && (nohup ./chronograf \
--generic-name=oauth-mock \
--generic-client-id=clientID \
--generic-client-secret=clientSecret \
--generic-scopes=scopes \
--generic-auth-url="http://$OAUTH2_HOSTNAME:$OAUTH2_PORT/oauth/authorize" \
--generic-api-url="http://$OAUTH2_HOSTNAME:$OAUTH2_PORT/userinfo" \
--generic-token-url="http://$OAUTH2_HOSTNAME:$OAUTH2_PORT/oauth/token" \
--public-url=http://localhost:8888 \
--token-secret=Q4O1T8FTbErOnmx03mGeVH3pkvKtdKr6HEmzEpNBiVMynZ/qKDdOResI3OMx4Zg9kmIfAI9ihlIV3OV5+VRfZ+iB2knLuGagEmFpG/h51CRcQY58j2NpnxdBewz91E51RRfjDYvqMrISHZCjdeuw0338Xp5UnEg32utr0ThRN0Ucv2isRr4KYJNYuvUXrjKJzjh76394JwY+bzn20L/enR2rLEtJ40ePxwuEvsE0MBUGZy79ecLZPaolQ3lkPE6X3+iV/9suN0BkBNtbQe1sGv4P522jSm24fFhXaFjetQQ/dJGehbWzsBo8uVAWB2RO0+xU2LhHFN0k0LAESD6MWw== \
--redir-auth-login=oauth-mock > out.log 2>&1 &)
sleep 10
- name: Chronograf test url
run: |
echo "InfluxDB data node status: $(curl -Isk "https://localhost:8086/ping" | head -n 1)"
echo "Chronograf status: $(curl -Isk "http://localhost:8888" | head -n 1)"
cat out.log || true
- name: Cypress
run: |
docker run --rm \
-v ${{ github.workspace }}:/chronograf \
-w /chronograf/ui \
--network=host \
cypress/browsers:node16.14.0-slim-chrome99-ff97 \
sh -c "apt update && apt install -y build-essential && yarn install --frozen-lockfile && yarn run cypress install && yarn run cypress run --env oauth2ServerURL=http://${{ env.OAUTH2_HOSTNAME }}:${{ env.OAUTH2_PORT }} --browser chrome --config-file githubActions-config.json --reporter junit --reporter-options 'mochaFile=cypress/results/results-[hash].xml'"
- name: Upload Artifact
uses: actions/upload-artifact@v4
if: success() || failure()
with:
name: results
path: |
oauth2-mock.log
out.log
ui/cypress/videos/*.mp4
ui/cypress/screenshots/
ui/cypress/results/results-*.xml

View File

@ -1,13 +0,0 @@
---
kind: Cluster
apiVersion: kind.x-k8s.io/v1alpha4
nodes:
- role: control-plane
extraPortMappings:
- containerPort: 30086
hostPort: 8086
listenAddress: "0.0.0.0"
- containerPort: 30091
hostPort: 8091
listenAddress: "0.0.0.0"
---

View File

@ -1,16 +0,0 @@
---
apiVersion: v1
kind: Secret
metadata:
name: influxdb-auth
stringData:
username: admin
password: admin
---
apiVersion: v1
kind: Secret
metadata:
name: influxdb-shared-secret
stringData:
secret: MY RANDOM STRING
---

5
.gitignore vendored
View File

@ -6,7 +6,6 @@ vendor/
.vscode/
.idea/
node_modules/
.parcel-cache/
# Binaries
/chronograf
@ -29,7 +28,3 @@ canned/apps_gen.go
npm-debug.log
yarn-error.log
ui/.vs/slnx.sqlite
out.log
ui/cypress/kube-config
ui/cypress/.local-chronograf-influxdb-enterprise.sh.swp
ui/oauth2-mock.log

2
.nvmrc
View File

@ -1 +1 @@
16
14

View File

@ -1,29 +0,0 @@
/* eslint no-console: 0 */
const { createProxyMiddleware } = require("http-proxy-middleware");
module.exports = function (app) {
const handleProxyError = err => {
if (err.code === "ECONNREFUSED") {
console.log(
"Cannot reach Chronograf server at localhost:8888. Is it running?"
);
} else {
console.log(`Error: ${err.code}`);
}
};
const proxyMiddleware = createProxyMiddleware("/chronograf/v1", {
target: "http://localhost:8888",
logLevel: "silent",
changeOrigin: true,
onError: handleProxyError,
});
const proxyMiddlewareOAuth = createProxyMiddleware("/oauth", {
target: "http://localhost:8888",
logLevel: "silent",
changeOrigin: true,
onError: handleProxyError,
});
const port = Number(process.env.PORT || 8080);
app.use(proxyMiddleware);
app.use(proxyMiddlewareOAuth);
};

View File

@ -1,125 +1,11 @@
## [unreleased]
## v1.10.8 [2025-08-15]
### Bug Fixes
1. [#6145](https://github.com/influxdata/chronograf/pull/6145): Show missing retention policies on the Databases page
## v1.10.7 [2025-04-15]
### Bug Fixes
1. [#6131](https://github.com/influxdata/chronograf/pull/6131): Handle missing queryConfig in Host page queries
### Other
1. [#6129](https://github.com/influxdata/chronograf/pull/6129): Upgrade golang to 1.23.8
## v1.10.6 [2024-12-16]
### Bug Fixes
1. [#6103](https://github.com/influxdata/chronograf/pull/6103): Set active database for InfluxQL meta queries.
2. [#6105](https://github.com/influxdata/chronograf/pull/6105): Prevent dangerous InfluxQL statements from auto-execution.
3. [#6111](https://github.com/influxdata/chronograf/pull/6111): Loading Hosts page for large number of hosts.
4. [#6116](https://github.com/influxdata/chronograf/pull/6116): Showing tag values in Flux query builder connected to InfluxDB Enterprise.
### Other
1. [#6108](https://github.com/influxdata/chronograf/pull/6108): Upgrade golang to 1.22.7.
## v1.10.5 [2024-05-31]
### Other
1. [#6094](https://github.com/influxdata/chronograf/pull/6094): Upgrade golang to 1.21.10.
## v1.10.4 [2024-04-25]
### Other
1. [#6090](https://github.com/influxdata/chronograf/pull/6090): Upgrade golang to 1.21.9.
## v1.10.3 [2024-02-28]
### Features
1. [#6073](https://github.com/influxdata/chronograf/pull/6073): Possibility to specify OAuth logout endpoint to logout from OAuth Identity provider.
### Other
1. [#6074](https://github.com/influxdata/chronograf/pull/6074): Upgrade golang to 1.20.13.
## v1.10.2 [2023-10-20]
### Bug Fixes
1. [#6056](https://github.com/influxdata/chronograf/pull/6056): Fix error on typing colon
2. [#6060](https://github.com/influxdata/chronograf/pull/6060): Fix time interval in `Processor_Queue_Length` query
### Other
1. [#6063](https://github.com/influxdata/chronograf/pull/6063): Upgrade golang to 1.20.10
## v1.10.1
## v1.10
### Features
### Bug Fixes
1. [#6001](https://github.com/influxdata/chronograf/pull/6001): Repair UI served under BASEPATH.
### Other
1. [#6010](https://github.com/influxdata/chronograf/pull/6010): Security Updates
1. [#6021](https://github.com/influxdata/chronograf/pull/6021): Security Updates
1. [#6025](https://github.com/influxdata/chronograf/pull/6025): Security Updates
1. [#6026](https://github.com/influxdata/chronograf/pull/6026): Bump to Go 1.20
1. [#6028](https://github.com/influxdata/chronograf/pull/6028): Build releases with Go 1.20
1. [#6032](https://github.com/influxdata/chronograf/pull/6032): Upgrade golang to 1.20.4
## v1.10.0 [2022-08-23]
### Features
1. [#5904](https://github.com/influxdata/chronograf/pull/5904): Add reader role.
1. [#5921](https://github.com/influxdata/chronograf/pull/5921): Manage InfluxDB users including their database permissions.
1. [#5923](https://github.com/influxdata/chronograf/pull/5923): Manage InfluxDB roles including their database permissions.
1. [#5925](https://github.com/influxdata/chronograf/pull/5925): Improve InfluxDB user creation.
1. [#5926](https://github.com/influxdata/chronograf/pull/5926): Improve InfluxDB role creation.
1. [#5927](https://github.com/influxdata/chronograf/pull/5927): Show effective permissions on Users page.
1. [#5929](https://github.com/influxdata/chronograf/pull/5926): Add refresh button to InfluxDB Users/Roles/Databases page.
1. [#5940](https://github.com/influxdata/chronograf/pull/5940): Support InfluxDB behind proxy under subpath.
1. [#5956](https://github.com/influxdata/chronograf/pull/5956): Add InfluxDB admin tabs to user/role detail page.
1. [#5959](https://github.com/influxdata/chronograf/pull/5959): Allow to customize annotation color.
1. [#5967](https://github.com/influxdata/chronograf/pull/5967): Remember whether to start with shown annotations on Dashboard page.
1. [#5977](https://github.com/influxdata/chronograf/pull/5977): Select current value in dropdown search input.
1. [#5997](https://github.com/influxdata/chronograf/pull/5997): Simplify flux labels.
1. [#5998](https://github.com/influxdata/chronograf/pull/5998): Setup DBRP mapping automatically for a v2 connection.
### Bug Fixes
1. [#5882](https://github.com/influxdata/chronograf/pull/5882): Repair table visualization of string values.
1. [#5913](https://github.com/influxdata/chronograf/pull/5913): Improve InfluxDB Enterprise detection.
1. [#5917](https://github.com/influxdata/chronograf/pull/5917): Improve InfluxDB Enterprise user creation process.
1. [#5917](https://github.com/influxdata/chronograf/pull/5917): Avoid stale reads in communication with InfluxDB Enterprise meta nodes.
1. [#5938](https://github.com/influxdata/chronograf/pull/5938): Properly detect unsupported values in Alert Rule builder.
1. [#5965](https://github.com/influxdata/chronograf/pull/5965): Inform the user to use v2 administration.
1. [#5976](https://github.com/influxdata/chronograf/pull/5976): Make markdown cell content selectable.
### Other
1. [#5875](https://github.com/influxdata/chronograf/pull/5875): Upgrade to node 16 LTS.
1. [#5896](https://github.com/influxdata/chronograf/pull/5896): Add cypress tests with github workflows.
1. [#5898](https://github.com/influxdata/chronograf/pull/5898): Upgrade javascript dependencies.
1. [#5897](https://github.com/influxdata/chronograf/pull/5897): Upgrade golang to 1.18.
1. [#5915](https://github.com/influxdata/chronograf/pull/5915): Upgrade github.com/lestrrat-go/jwx to v2.
1. [#5933](https://github.com/influxdata/chronograf/pull/5933): Upgrade golang to 1.18.3 .
1. [#5947](https://github.com/influxdata/chronograf/pull/5947): Use stable component keys.
1. [#5990](https://github.com/influxdata/chronograf/pull/5990): Upgrade golang to 1.18.4 .
1. [#5991](https://github.com/influxdata/chronograf/pull/5991): Upgrade UI to use parcel v2.
## v1.9.4 [2022-03-22]
### Features
@ -273,10 +159,10 @@
### Breaking Changes
1. [#5710](https://github.com/influxdata/chronograf/pull/5710): OAuth integrations newly use OAuth PKCE (RFC7636)
to provide a more secure OAuth token exchange. Google, Azure, Octa, Auth0, Gitlab (and more) integrations already
support OAuth PKCE. PKCE enablement should have no effect on the communication with authorization servers that
don't support it yet (such as Github, Bitbucket). PKCE can be eventually turned off with `OAUTH_NO_PKCE=true`
1. [#5710](https://github.com/influxdata/chronograf/pull/5710): OAuth integrations newly use OAuth PKCE (RFC7636)
to provide a more secure OAuth token exchange. Google, Azure, Octa, Auth0, Gitlab (and more) integrations already
support OAuth PKCE. PKCE enablement should have no effect on the communication with authorization servers that
don't support it yet (such as Github, Bitbucket). PKCE can be eventually turned off with `OAUTH_NO_PKCE=true`
environment variable.
## v1.8.10 [2021-02-08]

View File

@ -45,9 +45,9 @@ If you are going to be contributing back to Chronograf please take a second to s
## Installing & Using Node
You'll need to install Node 16 to run the frontend chronograf application.
You'll need to install Node 14 to run the frontend chronograf application.
* [Install Node 16](https://nodejs.org/en/about/releases/)
* [Install Node 14](https://nodejs.org/en/about/releases/)
## Installing & Using Yarn
@ -59,7 +59,15 @@ To add a dependency via Yarn, for example, run `yarn add <dependency>` from with
## Installing Go
Chronograf requires Go 1.18 or higher.
Chronograf requires Go 1.16 or higher.
## Installing & Using Dep
You'll need to install Dep to manage the backend (Go) dependencies.
* [Install Dep](https://github.com/golang/dep)
To add a dependency via Dep, for example, run `dep ensure -add <dependency>` from within the `/chronograf` directory. _Note that as of this writing, `dep ensure` will modify many extraneous vendor files, so you'll need to run `dep prune` to clean this up before committing your changes. Apparently, the next version of `dep` will take care of this step for you._
## Revision Control Systems

View File

@ -1,4 +1,4 @@
FROM alpine:3.17
FROM alpine:3.12
ENV PROTOBOARDS_PATH /usr/share/chronograf/protoboards
ENV CANNED_PATH /usr/share/chronograf/canned

File diff suppressed because it is too large Load Diff

View File

@ -1,14 +1,16 @@
ifeq ($(OS), Windows_NT)
VERSION := $(shell git describe --exact-match --tags 2>nil)
GOBINDATA := $(shell go-bindata.exe --version 2>nil)
else
VERSION := $(shell git describe --exact-match --tags 2>/dev/null)
GOBINDATA := $(shell which go-bindata 2> /dev/null)
endif
COMMIT ?= $(shell git rev-parse --short=8 HEAD)
YARN := $(shell command -v yarn 2> /dev/null)
SOURCES := $(shell find . -name '*.go' ! -name '*_gen.go' -not -path "./vendor/*" )
UISOURCES := $(shell find ui -type f -not \( -path ui/build/\* -o -path ui/node_modules/\* -o -path ui/cypress/\* -prune \) )
UISOURCES := $(shell find ui -type f -not \( -path ui/build/\* -o -path ui/node_modules/\* -prune \) )
unexport LDFLAGS
ifdef VERSION
@ -19,6 +21,7 @@ unexport TMP_BUILD_VERSION
BINARY=chronograf
CTLBINARY=chronoctl
GO111MODULE=on
.DEFAULT_GOAL := all
@ -29,8 +32,8 @@ all: dep build
build: assets ${BINARY}
${BINARY}: $(SOURCES) .bindata .jsdep .godep
go build -o ${BINARY} ${LDFLAGS} ./cmd/chronograf/main.go
go build -o ${CTLBINARY} ${LDFLAGS} ./cmd/chronoctl
GO111MODULE=on go build -o ${BINARY} ${LDFLAGS} ./cmd/chronograf/main.go
GO111MODULE=on go build -o ${CTLBINARY} ${LDFLAGS} ./cmd/chronoctl
define CHRONOGIRAFFE
._ o o
@ -47,7 +50,7 @@ chronogiraffe: ${BINARY}
@echo "$$CHRONOGIRAFFE"
docker-${BINARY}: $(SOURCES)
CGO_ENABLED=0 GOOS=linux go build -installsuffix cgo -o ${BINARY} ${LDFLAGS} \
CGO_ENABLED=0 GOOS=linux GO111MODULE=on go build -installsuffix cgo -o ${BINARY} ${LDFLAGS} \
./cmd/chronograf/main.go
docker: dep assets docker-${BINARY}
@ -55,9 +58,21 @@ docker: dep assets docker-${BINARY}
assets: .jssrc .bindata
.bindata: server/swagger.json canned/*.json protoboards/*.json $(UISOURCES)
.bindata: server/swagger_gen.go canned/bin_gen.go protoboards/bin_gen.go dist/dist_gen.go
@touch .bindata
dist/dist_gen.go: $(UISOURCES)
go generate -x ./dist
server/swagger_gen.go: server/swagger.json
go generate -x ./server
canned/bin_gen.go: canned/*.json
go generate -x ./canned
protoboards/bin_gen.go: protoboards/*.json
go generate -x ./protoboards
.jssrc: $(UISOURCES)
cd ui && yarn run clean && yarn run build
@touch .jssrc
@ -65,7 +80,11 @@ assets: .jssrc .bindata
dep: .jsdep .godep
.godep:
go get
ifndef GOBINDATA
@echo "Installing go-bindata"
go install github.com/kevinburke/go-bindata/...@v3.22.0+incompatible
GO111MODULE=on go get
endif
@touch .godep
.jsdep: ./yarn.lock
@ -79,31 +98,15 @@ endif
gen: internal.pb.go
internal.pb.go: kv/internal/internal.proto
go generate -x ./kv/internal
GO111MODULE=on go generate -x ./kv/internal
test: gochecktidy gocheckfmt jstest gotest gotestrace lint-ci
gochecktidy:
go version
go mod tidy
if ! git --no-pager diff --exit-code -- go.mod go.sum; then\
echo Modules are not tidy, please run \`go mod tidy\` ! ;\
exit 1;\
fi
gocheckfmt:
NOFMTFILES=`go fmt './...'` ; \
if [ ! -z "$$NOFMTFILES" ] ; then\
echo Unformatted files: $$NOFMTFILES ;\
echo Run \`go fmt ./...\` to fix it ! ;\
exit 1;\
fi
test: jstest gotest gotestrace lint-ci
gotest:
go test -timeout 10s ./...
GO111MODULE=on go test -timeout 10s ./...
gotestrace:
go test -race ./...
GO111MODULE=on go test -race ./...
jstest:
cd ui && yarn test --runInBand
@ -121,17 +124,11 @@ run-dev: chronogiraffe
mkdir -p ui/build
./chronograf -d --log-level=debug
e2e-prepare:
./ui/cypress/local-chronograf-influxdb-enterprise.sh
e2e:
cd ui && yarn test:e2e
clean:
if [ -f ${BINARY} ] ; then rm ${BINARY} ; fi
cd ui && yarn run clean
rm -rf node_modules
cd ui && rm -rf node_modules
rm -f dist/dist_gen.go canned/bin_gen.go protoboards/bin_gen.go server/swagger_gen.go
@rm -f .godep .jsdep .jssrc .bindata
ctags:

View File

@ -181,12 +181,12 @@ docker pull chronograf:latest
### From Source
* Chronograf works with go 1.18+, node 12 LTS, and yarn 1.7+.
* Chronograf works with go 1.16+, node 12 LTS, and yarn 1.7+.
* Chronograf requires [Kapacitor](https://github.com/influxdata/kapacitor)
1.5.x+ to create and store alerts.
1. [Install Go 1.18](https://golang.org/doc/install)
1. [Install Node (version 16 LTS)](https://nodejs.org/en/about/releases/)
1. [Install Go 1.16](https://golang.org/doc/install)
1. [Install Node (version 14 LTS)](https://nodejs.org/en/about/releases/)
1. [Install yarn](https://yarnpkg.com/docs/install)
1. [Setup your GOPATH](https://golang.org/doc/code.html#GOPATH)
1. Build the Chronograf package:

View File

@ -1,11 +0,0 @@
# Security Policy
## Reporting a Vulnerability
InfluxData takes security and our users' trust seriously. If you believe you
have found a security issue in any of our open source projects, please
responsibly disclose it by contacting `security@influxdata.com`. More details
about security vulnerability reporting can be found on the
[InfluxData How to Report Vulnerabilities page][InfluxData Security].
[InfluxData Security]: https://www.influxdata.com/how-to-report-security-vulnerabilities/

View File

@ -2,27 +2,24 @@ package canned
import (
"context"
"embed"
"encoding/json"
"github.com/influxdata/chronograf"
)
//go:embed *.json
var content embed.FS
//go:generate go-bindata -o bin_gen.go -ignore README|apps|.sh|go -pkg canned .
// BinLayoutsStore represents a embedded layout store
// BinLayoutsStore represents a layout store using data generated by go-bindata
type BinLayoutsStore struct {
Logger chronograf.Logger
}
// All returns the set of all layouts
func (s *BinLayoutsStore) All(ctx context.Context) ([]chronograf.Layout, error) {
dirEntries, _ := content.ReadDir(".")
layouts := make([]chronograf.Layout, len(dirEntries))
for i, dirEntry := range dirEntries {
name := dirEntry.Name()
octets, err := content.ReadFile(name)
names := AssetNames()
layouts := make([]chronograf.Layout, len(names))
for i, name := range names {
octets, err := Asset(name)
if err != nil {
s.Logger.
WithField("component", "apps").

View File

@ -1,19 +0,0 @@
package canned
import (
"context"
"testing"
clog "github.com/influxdata/chronograf/log"
)
func TestAll(t *testing.T) {
store := BinLayoutsStore{Logger: clog.New(clog.ParseLevel("debug"))}
all, err := store.All(context.Background())
if err != nil {
t.Error("No error expected!")
}
if len(all) != 50 {
t.Errorf("50 items expected, but %d", len(all))
}
}

View File

@ -535,7 +535,6 @@ var annotationTagsBlacklist = map[string]bool{
"endTime": true,
"modified_time_ns": true,
"text": true,
"color": true,
"type": true,
"id": true,
}
@ -567,7 +566,6 @@ type Annotation struct {
StartTime time.Time // StartTime starts the annotation
EndTime time.Time // EndTime ends the annotation
Text string // Text is the associated user-facing text describing the annotation
Color string // Color associated with the annotation
Tags AnnotationTags // Tags is a collection of user defined key/value pairs that contextualize the annotation
}
@ -821,15 +819,13 @@ const MappingWildcard string = "*"
// explicit role within the organization.
//
// One can think of a mapping like so:
//
// Provider:Scheme:Group -> Organization
// github:oauth2:influxdata -> Happy
// beyondcorp:ldap:influxdata -> TheBillHilliettas
// Provider:Scheme:Group -> Organization
// github:oauth2:influxdata -> Happy
// beyondcorp:ldap:influxdata -> TheBillHilliettas
//
// Any of Provider, Scheme, or Group may be provided as a wildcard *
//
// github:oauth2:* -> MyOrg
// *:*:* -> AllOrg
// github:oauth2:* -> MyOrg
// *:*:* -> AllOrg
type Mapping struct {
ID string `json:"id"`
Organization string `json:"organizationId"`

View File

@ -20,7 +20,7 @@ var (
func init() {
if version == "" {
// read version from embedded files
// read version from bindata files
version = dist.GetVersion()
}
fullVersion = version

98
dist/dist.go vendored
View File

@ -1,9 +1,14 @@
package dist
import (
"net/http"
//go:generate go-bindata -o dist_gen.go -ignore 'map|go' -pkg dist ../ui/build/... ../ui/package.json
"github.com/influxdata/chronograf/ui"
import (
"fmt"
"net/http"
"regexp"
"strings"
assetfs "github.com/elazarl/go-bindata-assetfs"
)
// DebugAssets serves assets via a specified directory
@ -17,7 +22,92 @@ func (d *DebugAssets) Handler() http.Handler {
return http.FileServer(NewDir(d.Dir, d.Default))
}
// BindataAssets serves assets from go-bindata, but, also serves Default if assent doesn't exist
// This is to support single-page react-apps with its own router.
type BindataAssets struct {
Prefix string // Prefix is prepended to the http file request
Default string // Default is the file to serve if the file is not found
DefaultContentType string // DefaultContentType is the content type of the default file
}
// Handler serves go-bindata using a go-bindata-assetfs façade
func (b *BindataAssets) Handler() http.Handler {
return b
}
// addCacheHeaders requests an hour of Cache-Control and sets an ETag based on file size and modtime
func (b *BindataAssets) addCacheHeaders(filename string, w http.ResponseWriter) error {
w.Header().Add("Cache-Control", "public, max-age=3600")
w.Header().Add("X-Frame-Options", "SAMEORIGIN")
w.Header().Add("X-XSS-Protection", "1; mode=block")
w.Header().Add("X-Content-Type-Options", "nosniff")
w.Header().Add("Content-Security-Policy", "script-src 'self'; object-src 'self'")
fi, err := AssetInfo(filename)
if err != nil {
return err
}
hour, minute, second := fi.ModTime().Clock()
etag := fmt.Sprintf(`"%d%d%d%d%d"`, fi.Size(), fi.ModTime().Day(), hour, minute, second)
w.Header().Set("ETag", etag)
return nil
}
// ServeHTTP wraps http.FileServer by returning a default asset if the asset
// doesn't exist. This supports single-page react-apps with its own
// built-in router. Additionally, we override the content-type if the
// Default file is used.
func (b *BindataAssets) ServeHTTP(w http.ResponseWriter, r *http.Request) {
// def wraps the assets to return the default file if the file doesn't exist
def := func(name string) ([]byte, error) {
// If the named asset exists, then return it directly.
octets, err := Asset(name)
if err != nil {
// If this is at / then we just error out so we can return a Directory
// This directory will then be redirected by go to the /index.html
if name == b.Prefix {
return nil, err
}
// If this is anything other than slash, we just return the default
// asset. This default asset will handle the routing.
// Additionally, because we know we are returning the default asset,
// we need to set the default asset's content-type.
w.Header().Set("Content-Type", b.DefaultContentType)
if err := b.addCacheHeaders(b.Default, w); err != nil {
return nil, err
}
return Asset(b.Default)
}
if err := b.addCacheHeaders(name, w); err != nil {
return nil, err
}
// https://github.com/influxdata/chronograf/issues/5565
// workaround wrong .js content-type on windows
if strings.HasSuffix(name, ".js") {
w.Header().Set("Content-Type", "text/javascript")
}
return octets, nil
}
var dir http.FileSystem = &assetfs.AssetFS{
Asset: def,
AssetDir: AssetDir,
AssetInfo: AssetInfo,
Prefix: b.Prefix,
}
http.FileServer(dir).ServeHTTP(w, r)
}
var re = regexp.MustCompile(`"version"\s*:\s*"(.*)"`)
// GetVersion returns version of the packed assets
func GetVersion() string {
return ui.GetVersion()
if data, err := Asset("../ui/package.json"); err == nil {
if matches := re.FindStringSubmatch(string(data)); matches != nil {
return matches[1]
}
}
return ""
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 76 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 204 KiB

View File

@ -50,19 +50,4 @@ git push --tags
* OS X
* amd64
* Windows
* amd64
## Testing builds
The test builds are stored as artifacts on the CircleCI build page. If you want to create a test build, you can do it by triggering a pipeline on CircleCI.
During the trigger of the pipeline, you should specify the `trigger` parameter with the value `deploy-as-artifacts`:
<p align="left">
<img src="./images/testing-builds-pipeline.png"/>
</p>
The resulting artifacts will be available on the CircleCI build page for `build-nightly` Job:
<p align="left">
<img src="./images/testing-builds-result.png"/>
</p>
* amd64

View File

@ -1,17 +0,0 @@
# Updating the Builder Image
The image that performs CI builds is located at `etc/Dockerfile_build`. When this file is changed, a manual build and release process is required.
1. Build the image:
```
docker build . -t quay.io/influxdb/builder:chronograf-<YYYYMMDD>
```
2. Push the image to quay:
```
docker push quay.io/influxdb/builder:chronograf-<YYYYMMDD>
```
If you have any permissions issues, ping @jeffreyssmith2nd or @bnpfeife

View File

@ -164,42 +164,36 @@ func (c *Client) Roles(ctx context.Context) (chronograf.RolesStore, error) {
// Permissions returns all Influx Enterprise permission strings
func (c *Client) Permissions(context.Context) chronograf.Permissions {
all := chronograf.Allowances{
"NoPermissions",
"ViewAdmin",
"ViewChronograf",
"CreateDatabase",
"CreateUserAndRole",
"AddRemoveNode",
"DropDatabase",
"DropData",
"ReadData",
"WriteData",
"Rebalance",
"ManageShard",
"ManageContinuousQuery",
"ManageQuery",
"ManageSubscription",
"Monitor",
"CopyShard",
"KapacitorAPI",
"KapacitorConfigAPI",
}
return chronograf.Permissions{
{
Scope: chronograf.AllScope,
Allowed: chronograf.Allowances{
"NoPermissions",
"ReadData",
"WriteData",
"DropData",
"ManageContinuousQuery",
"ManageQuery",
"ManageSubscription",
"CreateDatabase",
"DropDatabase",
"ViewAdmin",
"ViewChronograf",
"KapacitorAPI",
"KapacitorConfigAPI",
"CreateUserAndRole",
"CopyShard",
"ManageShard",
"Rebalance",
"AddRemoveNode",
"Monitor",
},
Scope: chronograf.AllScope,
Allowed: all,
},
{
Scope: chronograf.DBScope,
Allowed: chronograf.Allowances{
"NoPermissions",
"ReadData",
"WriteData",
"DropData",
"ManageContinuousQuery",
"ManageQuery",
"ManageSubscription",
},
Scope: chronograf.DBScope,
Allowed: all,
},
}
}

View File

@ -202,52 +202,69 @@ func Test_Enterprise_ComplainsIfNotOpened(t *testing.T) {
}
func TestClient_Permissions(t *testing.T) {
want := chronograf.Permissions{
tests := []struct {
name string
want chronograf.Permissions
}{
{
Scope: chronograf.AllScope,
Allowed: chronograf.Allowances{
"NoPermissions",
"ReadData",
"WriteData",
"DropData",
"ManageContinuousQuery",
"ManageQuery",
"ManageSubscription",
"CreateDatabase",
"DropDatabase",
"ViewAdmin",
"ViewChronograf",
"KapacitorAPI",
"KapacitorConfigAPI",
"CreateUserAndRole",
"CopyShard",
"ManageShard",
"Rebalance",
"AddRemoveNode",
"Monitor",
},
},
{
Scope: chronograf.DBScope,
Allowed: chronograf.Allowances{
"NoPermissions",
"ReadData",
"WriteData",
"DropData",
"ManageContinuousQuery",
"ManageQuery",
"ManageSubscription",
name: "All possible enterprise permissions",
want: chronograf.Permissions{
{
Scope: chronograf.AllScope,
Allowed: chronograf.Allowances{
"NoPermissions",
"ViewAdmin",
"ViewChronograf",
"CreateDatabase",
"CreateUserAndRole",
"AddRemoveNode",
"DropDatabase",
"DropData",
"ReadData",
"WriteData",
"Rebalance",
"ManageShard",
"ManageContinuousQuery",
"ManageQuery",
"ManageSubscription",
"Monitor",
"CopyShard",
"KapacitorAPI",
"KapacitorConfigAPI",
},
},
{
Scope: chronograf.DBScope,
Allowed: chronograf.Allowances{
"NoPermissions",
"ViewAdmin",
"ViewChronograf",
"CreateDatabase",
"CreateUserAndRole",
"AddRemoveNode",
"DropDatabase",
"DropData",
"ReadData",
"WriteData",
"Rebalance",
"ManageShard",
"ManageContinuousQuery",
"ManageQuery",
"ManageSubscription",
"Monitor",
"CopyShard",
"KapacitorAPI",
"KapacitorConfigAPI",
},
},
},
},
}
c := &enterprise.Client{}
if got := c.Permissions(context.Background()); !reflect.DeepEqual(got, want) {
t.Errorf("Client.Permissions() = %v, want %v", got, want)
dbAllowed := got[1].Allowed
allCommonAllowed := got[0].Allowed[0:len(dbAllowed)]
if !reflect.DeepEqual(allCommonAllowed, dbAllowed) {
t.Errorf("Database allowed permissions do not start all allowed permissions = %v, want %v", got, want)
for _, tt := range tests {
c := &enterprise.Client{}
if got := c.Permissions(context.Background()); !reflect.DeepEqual(got, tt.want) {
t.Errorf("%q. Client.Permissions() = %v, want %v", tt.name, got, tt.want)
}
}
}

View File

@ -12,7 +12,6 @@ import (
"net"
"net/http"
"net/url"
"sync"
"time"
"github.com/influxdata/chronograf"
@ -37,11 +36,12 @@ var (
)
type client interface {
Do(path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
// MetaClient represents a Meta node in an Influx Enterprise cluster
type MetaClient struct {
URL *url.URL
client client
authorizer influx.Authorizer
}
@ -49,7 +49,10 @@ type MetaClient struct {
// NewMetaClient represents a meta node in an Influx Enterprise cluster
func NewMetaClient(url *url.URL, InsecureSkipVerify bool, authorizer influx.Authorizer) *MetaClient {
return &MetaClient{
client: newDefaultClient(url, InsecureSkipVerify),
URL: url,
client: &defaultClient{
InsecureSkipVerify: InsecureSkipVerify,
},
authorizer: authorizer,
}
}
@ -203,18 +206,6 @@ func (m *MetaClient) RemoveUserPerms(ctx context.Context, name string, perms Per
return m.Post(ctx, "/user", a, nil)
}
// AddUserPerms adds permissions for a user in Influx Enterprise
func (m *MetaClient) AddUserPerms(ctx context.Context, name string, perms Permissions) error {
a := &UserAction{
Action: "add-permissions",
User: &User{
Name: name,
Permissions: perms,
},
}
return m.Post(ctx, "/user", a, nil)
}
// SetUserPerms removes permissions not in set and then adds the requested perms
func (m *MetaClient) SetUserPerms(ctx context.Context, name string, perms Permissions) error {
user, err := m.User(ctx, name)
@ -235,7 +226,14 @@ func (m *MetaClient) SetUserPerms(ctx context.Context, name string, perms Permis
// ... next, add any permissions the user should have
if len(add) > 0 {
return m.AddUserPerms(ctx, name, add)
a := &UserAction{
Action: "add-permissions",
User: &User{
Name: name,
Permissions: add,
},
}
return m.Post(ctx, "/user", a, nil)
}
return nil
}
@ -477,46 +475,14 @@ func (m *MetaClient) Post(ctx context.Context, path string, action interface{},
type defaultClient struct {
InsecureSkipVerify bool
URL *url.URL
// masterURL is setup when doing redirects, communication with a master
// node prevents stale reads from follower nodes after master node modifications
masterURL *url.URL
mu sync.Mutex
}
func newDefaultClient(URL *url.URL, InsecureSkipVerify bool) *defaultClient {
return &defaultClient{
URL: URL,
InsecureSkipVerify: InsecureSkipVerify,
}
}
func (d *defaultClient) setMasterURL(URL *url.URL) {
if URL.Host != "" && URL.Scheme != "" {
d.mu.Lock()
defer d.mu.Unlock()
d.masterURL = &url.URL{Host: URL.Host, Scheme: URL.Scheme}
}
}
func (d *defaultClient) getMasterURL() url.URL {
d.mu.Lock()
defer d.mu.Unlock()
if d.masterURL != nil {
return *d.masterURL
}
return *d.URL
}
// Do is a helper function to interface with Influx Enterprise's Meta API
func (d *defaultClient) Do(path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error) {
func (d *defaultClient) Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error) {
p := url.Values{}
for k, v := range params {
p.Add(k, v)
}
// prefer communication with the master node, to avoid stale reads
URL := d.getMasterURL()
URL.Path = path
URL.RawQuery = p.Encode()
@ -571,7 +537,6 @@ func (d *defaultClient) Do(path, method string, authorizer influx.Authorizer, pa
// AuthedCheckRedirect tries to follow the Influx Enterprise pattern of
// redirecting to the leader but preserving authentication headers.
func (d *defaultClient) AuthedCheckRedirect(req *http.Request, via []*http.Request) error {
d.setMasterURL(req.URL)
if len(via) >= 10 {
return errors.New("too many redirects")
} else if len(via) == 0 {
@ -593,7 +558,7 @@ func (m *MetaClient) Do(ctx context.Context, path, method string, authorizer inf
resps := make(chan (result))
go func() {
resp, err := m.client.Do(path, method, authorizer, params, body)
resp, err := m.client.Do(m.URL, path, method, authorizer, params, body)
resps <- result{resp, err}
}()

View File

@ -18,8 +18,9 @@ import (
func TestMetaClient_ShowCluster(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
Do(path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
tests := []struct {
@ -31,6 +32,10 @@ func TestMetaClient_ShowCluster(t *testing.T) {
{
name: "Successful Show Cluster",
fields: fields{
URL: &url.URL{
Host: "twinpinesmall.net:8091",
Scheme: "https",
},
client: NewMockClient(
http.StatusOK,
[]byte(`{"data":[{"id":2,"version":"1.1.0-c1.1.0","tcpAddr":"data-1.twinpinesmall.net:8088","httpAddr":"data-1.twinpinesmall.net:8086","httpScheme":"https","status":"joined"}],"meta":[{"id":1,"addr":"meta-0.twinpinesmall.net:8091","httpScheme":"http","tcpAddr":"meta-0.twinpinesmall.net:8089","version":"1.1.0-c1.1.0"}]}`),
@ -61,6 +66,10 @@ func TestMetaClient_ShowCluster(t *testing.T) {
{
name: "Failed Show Cluster",
fields: fields{
URL: &url.URL{
Host: "twinpinesmall.net:8091",
Scheme: "https",
},
client: NewMockClient(
http.StatusBadGateway,
nil,
@ -73,6 +82,10 @@ func TestMetaClient_ShowCluster(t *testing.T) {
{
name: "Bad JSON from Show Cluster",
fields: fields{
URL: &url.URL{
Host: "twinpinesmall.net:8091",
Scheme: "https",
},
client: NewMockClient(
http.StatusOK,
[]byte(`{data}`),
@ -85,6 +98,7 @@ func TestMetaClient_ShowCluster(t *testing.T) {
}
for _, tt := range tests {
m := &MetaClient{
URL: tt.fields.URL,
client: tt.fields.client,
}
got, err := m.ShowCluster(context.Background())
@ -115,8 +129,9 @@ func TestMetaClient_ShowCluster(t *testing.T) {
func TestMetaClient_Users(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
Do(path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@ -133,6 +148,10 @@ func TestMetaClient_Users(t *testing.T) {
{
name: "Successful Show users",
fields: fields{
URL: &url.URL{
Host: "twinpinesmall.net:8091",
Scheme: "https",
},
client: NewMockClient(
http.StatusOK,
[]byte(`{"users":[{"name":"admin","hash":"1234","permissions":{"":["ViewAdmin","ViewChronograf"]}}]}`),
@ -160,6 +179,10 @@ func TestMetaClient_Users(t *testing.T) {
{
name: "Successful Show users single user",
fields: fields{
URL: &url.URL{
Host: "twinpinesmall.net:8091",
Scheme: "https",
},
client: NewMockClient(
http.StatusOK,
[]byte(`{"users":[{"name":"admin","hash":"1234","permissions":{"":["ViewAdmin","ViewChronograf"]}}]}`),
@ -187,6 +210,10 @@ func TestMetaClient_Users(t *testing.T) {
{
name: "Failure Show users",
fields: fields{
URL: &url.URL{
Host: "twinpinesmall.net:8091",
Scheme: "https",
},
client: NewMockClient(
http.StatusOK,
[]byte(`{"users":[{"name":"admin","hash":"1234","permissions":{"":["ViewAdmin","ViewChronograf"]}}]}`),
@ -203,6 +230,10 @@ func TestMetaClient_Users(t *testing.T) {
{
name: "Bad JSON from Show users",
fields: fields{
URL: &url.URL{
Host: "twinpinesmall.net:8091",
Scheme: "https",
},
client: NewMockClient(
http.StatusOK,
[]byte(`{foo}`),
@ -219,6 +250,7 @@ func TestMetaClient_Users(t *testing.T) {
}
for _, tt := range tests {
m := &MetaClient{
URL: tt.fields.URL,
client: tt.fields.client,
}
got, err := m.Users(tt.args.ctx, tt.args.name)
@ -234,8 +266,9 @@ func TestMetaClient_Users(t *testing.T) {
func TestMetaClient_User(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
Do(path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@ -252,6 +285,10 @@ func TestMetaClient_User(t *testing.T) {
{
name: "Successful Show users",
fields: fields{
URL: &url.URL{
Host: "twinpinesmall.net:8091",
Scheme: "https",
},
client: NewMockClient(
http.StatusOK,
[]byte(`{"users":[{"name":"admin","hash":"1234","permissions":{"":["ViewAdmin","ViewChronograf"]}}]}`),
@ -275,6 +312,10 @@ func TestMetaClient_User(t *testing.T) {
{
name: "No such user",
fields: fields{
URL: &url.URL{
Host: "twinpinesmall.net:8091",
Scheme: "https",
},
client: NewMockClient(
http.StatusNotFound,
[]byte(`{"error":"user not found"}`),
@ -291,6 +332,10 @@ func TestMetaClient_User(t *testing.T) {
{
name: "Bad JSON",
fields: fields{
URL: &url.URL{
Host: "twinpinesmall.net:8091",
Scheme: "https",
},
client: NewMockClient(
http.StatusNotFound,
[]byte(`{BAD}`),
@ -306,6 +351,7 @@ func TestMetaClient_User(t *testing.T) {
}
for _, tt := range tests {
m := &MetaClient{
URL: tt.fields.URL,
client: tt.fields.client,
}
got, err := m.User(tt.args.ctx, tt.args.name)
@ -321,8 +367,9 @@ func TestMetaClient_User(t *testing.T) {
func TestMetaClient_CreateUser(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
Do(path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@ -340,6 +387,10 @@ func TestMetaClient_CreateUser(t *testing.T) {
{
name: "Successful Create User",
fields: fields{
URL: &url.URL{
Host: "twinpinesmall.net:8091",
Scheme: "https",
},
client: NewMockClient(
http.StatusOK,
nil,
@ -357,6 +408,7 @@ func TestMetaClient_CreateUser(t *testing.T) {
}
for _, tt := range tests {
m := &MetaClient{
URL: tt.fields.URL,
client: tt.fields.client,
}
if err := m.CreateUser(tt.args.ctx, tt.args.name, tt.args.passwd); (err != nil) != tt.wantErr {
@ -386,8 +438,9 @@ func TestMetaClient_CreateUser(t *testing.T) {
func TestMetaClient_ChangePassword(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
Do(path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@ -405,6 +458,10 @@ func TestMetaClient_ChangePassword(t *testing.T) {
{
name: "Successful Change Password",
fields: fields{
URL: &url.URL{
Host: "twinpinesmall.net:8091",
Scheme: "https",
},
client: NewMockClient(
http.StatusOK,
nil,
@ -422,6 +479,7 @@ func TestMetaClient_ChangePassword(t *testing.T) {
}
for _, tt := range tests {
m := &MetaClient{
URL: tt.fields.URL,
client: tt.fields.client,
}
if err := m.ChangePassword(tt.args.ctx, tt.args.name, tt.args.passwd); (err != nil) != tt.wantErr {
@ -452,8 +510,9 @@ func TestMetaClient_ChangePassword(t *testing.T) {
func TestMetaClient_DeleteUser(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
Do(path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@ -470,6 +529,10 @@ func TestMetaClient_DeleteUser(t *testing.T) {
{
name: "Successful delete User",
fields: fields{
URL: &url.URL{
Host: "twinpinesmall.net:8091",
Scheme: "https",
},
client: NewMockClient(
http.StatusOK,
nil,
@ -486,6 +549,7 @@ func TestMetaClient_DeleteUser(t *testing.T) {
}
for _, tt := range tests {
m := &MetaClient{
URL: tt.fields.URL,
client: tt.fields.client,
}
if err := m.DeleteUser(tt.args.ctx, tt.args.name); (err != nil) != tt.wantErr {
@ -515,8 +579,9 @@ func TestMetaClient_DeleteUser(t *testing.T) {
func TestMetaClient_SetUserPerms(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
Do(path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@ -535,6 +600,10 @@ func TestMetaClient_SetUserPerms(t *testing.T) {
{
name: "Remove all permissions for a user",
fields: fields{
URL: &url.URL{
Host: "twinpinesmall.net:8091",
Scheme: "https",
},
client: NewMockClient(
http.StatusOK,
[]byte(`{"users":[{"name":"admin","hash":"1234","permissions":{"":["ViewAdmin","ViewChronograf"]}}]}`),
@ -551,6 +620,10 @@ func TestMetaClient_SetUserPerms(t *testing.T) {
{
name: "Remove some permissions and add others",
fields: fields{
URL: &url.URL{
Host: "twinpinesmall.net:8091",
Scheme: "https",
},
client: NewMockClient(
http.StatusOK,
[]byte(`{"users":[{"name":"admin","hash":"1234","permissions":{"":["ViewAdmin","ViewChronograf"]}}]}`),
@ -573,6 +646,7 @@ func TestMetaClient_SetUserPerms(t *testing.T) {
}
for _, tt := range tests {
m := &MetaClient{
URL: tt.fields.URL,
client: tt.fields.client,
}
if err := m.SetUserPerms(tt.args.ctx, tt.args.name, tt.args.perms); (err != nil) != tt.wantErr {
@ -626,8 +700,9 @@ func TestMetaClient_SetUserPerms(t *testing.T) {
func TestMetaClient_Roles(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
Do(path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@ -644,6 +719,10 @@ func TestMetaClient_Roles(t *testing.T) {
{
name: "Successful Show role",
fields: fields{
URL: &url.URL{
Host: "twinpinesmall.net:8091",
Scheme: "https",
},
client: NewMockClient(
http.StatusOK,
[]byte(`{"roles":[{"name":"admin","users":["marty"],"permissions":{"":["ViewAdmin","ViewChronograf"]}}]}`),
@ -672,6 +751,10 @@ func TestMetaClient_Roles(t *testing.T) {
{
name: "Successful Show role single role",
fields: fields{
URL: &url.URL{
Host: "twinpinesmall.net:8091",
Scheme: "https",
},
client: NewMockClient(
http.StatusOK,
[]byte(`{"roles":[{"name":"admin","users":["marty"],"permissions":{"":["ViewAdmin","ViewChronograf"]}}]}`),
@ -700,6 +783,7 @@ func TestMetaClient_Roles(t *testing.T) {
}
for _, tt := range tests {
m := &MetaClient{
URL: tt.fields.URL,
client: tt.fields.client,
}
got, err := m.Roles(tt.args.ctx, tt.args.name)
@ -715,8 +799,9 @@ func TestMetaClient_Roles(t *testing.T) {
func TestMetaClient_Role(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
Do(path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@ -733,6 +818,10 @@ func TestMetaClient_Role(t *testing.T) {
{
name: "Successful Show role",
fields: fields{
URL: &url.URL{
Host: "twinpinesmall.net:8091",
Scheme: "https",
},
client: NewMockClient(
http.StatusOK,
[]byte(`{"roles":[{"name":"admin","users":["marty"],"permissions":{"":["ViewAdmin","ViewChronograf"]}}]}`),
@ -757,6 +846,10 @@ func TestMetaClient_Role(t *testing.T) {
{
name: "No such role",
fields: fields{
URL: &url.URL{
Host: "twinpinesmall.net:8091",
Scheme: "https",
},
client: NewMockClient(
http.StatusNotFound,
[]byte(`{"error":"user not found"}`),
@ -773,6 +866,7 @@ func TestMetaClient_Role(t *testing.T) {
}
for _, tt := range tests {
m := &MetaClient{
URL: tt.fields.URL,
client: tt.fields.client,
}
got, err := m.Role(tt.args.ctx, tt.args.name)
@ -788,8 +882,9 @@ func TestMetaClient_Role(t *testing.T) {
func TestMetaClient_UserRoles(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
Do(path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@ -806,6 +901,10 @@ func TestMetaClient_UserRoles(t *testing.T) {
{
name: "Successful Show all roles",
fields: fields{
URL: &url.URL{
Host: "twinpinesmall.net:8091",
Scheme: "https",
},
client: NewMockClient(
http.StatusOK,
[]byte(`{"roles":[{"name":"timetravelers","users":["marty","docbrown"],"permissions":{"":["ViewAdmin","ViewChronograf"]}},{"name":"mcfly","users":["marty","george"],"permissions":{"":["ViewAdmin","ViewChronograf"]}}]}`),
@ -871,6 +970,7 @@ func TestMetaClient_UserRoles(t *testing.T) {
}
for _, tt := range tests {
m := &MetaClient{
URL: tt.fields.URL,
client: tt.fields.client,
}
got, err := m.UserRoles(tt.args.ctx)
@ -886,8 +986,9 @@ func TestMetaClient_UserRoles(t *testing.T) {
func TestMetaClient_CreateRole(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
Do(path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@ -904,6 +1005,10 @@ func TestMetaClient_CreateRole(t *testing.T) {
{
name: "Successful Create Role",
fields: fields{
URL: &url.URL{
Host: "twinpinesmall.net:8091",
Scheme: "https",
},
client: NewMockClient(
http.StatusOK,
nil,
@ -920,6 +1025,7 @@ func TestMetaClient_CreateRole(t *testing.T) {
}
for _, tt := range tests {
m := &MetaClient{
URL: tt.fields.URL,
client: tt.fields.client,
}
if err := m.CreateRole(tt.args.ctx, tt.args.name); (err != nil) != tt.wantErr {
@ -946,8 +1052,9 @@ func TestMetaClient_CreateRole(t *testing.T) {
func TestMetaClient_DeleteRole(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
Do(path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@ -964,6 +1071,10 @@ func TestMetaClient_DeleteRole(t *testing.T) {
{
name: "Successful delete role",
fields: fields{
URL: &url.URL{
Host: "twinpinesmall.net:8091",
Scheme: "https",
},
client: NewMockClient(
http.StatusOK,
nil,
@ -980,6 +1091,7 @@ func TestMetaClient_DeleteRole(t *testing.T) {
}
for _, tt := range tests {
m := &MetaClient{
URL: tt.fields.URL,
client: tt.fields.client,
}
if err := m.DeleteRole(tt.args.ctx, tt.args.name); (err != nil) != tt.wantErr {
@ -1009,8 +1121,9 @@ func TestMetaClient_DeleteRole(t *testing.T) {
func TestMetaClient_SetRolePerms(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
Do(path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@ -1029,6 +1142,10 @@ func TestMetaClient_SetRolePerms(t *testing.T) {
{
name: "Remove all roles from user",
fields: fields{
URL: &url.URL{
Host: "twinpinesmall.net:8091",
Scheme: "https",
},
client: NewMockClient(
http.StatusOK,
[]byte(`{"roles":[{"name":"admin","users":["marty"],"permissions":{"":["ViewAdmin","ViewChronograf"]}}]}`),
@ -1045,6 +1162,10 @@ func TestMetaClient_SetRolePerms(t *testing.T) {
{
name: "Remove some users and add permissions to other",
fields: fields{
URL: &url.URL{
Host: "twinpinesmall.net:8091",
Scheme: "https",
},
client: NewMockClient(
http.StatusOK,
[]byte(`{"roles":[{"name":"admin","users":["marty"],"permissions":{"":["ViewAdmin","ViewChronograf"]}}]}`),
@ -1067,6 +1188,7 @@ func TestMetaClient_SetRolePerms(t *testing.T) {
}
for _, tt := range tests {
m := &MetaClient{
URL: tt.fields.URL,
client: tt.fields.client,
}
if err := m.SetRolePerms(tt.args.ctx, tt.args.name, tt.args.perms); (err != nil) != tt.wantErr {
@ -1120,8 +1242,9 @@ func TestMetaClient_SetRolePerms(t *testing.T) {
func TestMetaClient_SetRoleUsers(t *testing.T) {
type fields struct {
URL *url.URL
client interface {
Do(path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error)
}
}
type args struct {
@ -1139,6 +1262,10 @@ func TestMetaClient_SetRoleUsers(t *testing.T) {
{
name: "Successful set users role (remove user from role)",
fields: fields{
URL: &url.URL{
Host: "twinpinesmall.net:8091",
Scheme: "https",
},
client: NewMockClient(
http.StatusOK,
[]byte(`{"roles":[{"name":"admin","users":["marty"],"permissions":{"":["ViewAdmin","ViewChronograf"]}}]}`),
@ -1155,6 +1282,10 @@ func TestMetaClient_SetRoleUsers(t *testing.T) {
{
name: "Successful set single user role",
fields: fields{
URL: &url.URL{
Host: "twinpinesmall.net:8091",
Scheme: "https",
},
client: NewMockClient(
http.StatusOK,
[]byte(`{"roles":[{"name":"admin","users":[],"permissions":{"":["ViewAdmin","ViewChronograf"]}}]}`),
@ -1174,6 +1305,7 @@ func TestMetaClient_SetRoleUsers(t *testing.T) {
}
for _, tt := range tests {
m := &MetaClient{
URL: tt.fields.URL,
client: tt.fields.client,
}
if err := m.SetRoleUsers(tt.args.ctx, tt.args.name, tt.args.users); (err != nil) != tt.wantErr {
@ -1214,7 +1346,6 @@ func TestMetaClient_SetRoleUsers(t *testing.T) {
}
type MockClient struct {
URL *url.URL
Code int // HTTP Status code
Body []byte
HeaderMap http.Header
@ -1225,10 +1356,6 @@ type MockClient struct {
func NewMockClient(code int, body []byte, headers http.Header, err error) *MockClient {
return &MockClient{
URL: &url.URL{
Host: "twinpinesmall.net:8091",
Scheme: "https",
},
Code: code,
Body: body,
HeaderMap: headers,
@ -1237,10 +1364,13 @@ func NewMockClient(code int, body []byte, headers http.Header, err error) *MockC
}
}
func (c *MockClient) Do(path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error) {
func (c *MockClient) Do(URL *url.URL, path, method string, authorizer influx.Authorizer, params map[string]string, body io.Reader) (*http.Response, error) {
if c == nil {
return nil, fmt.Errorf("NIL MockClient")
}
if URL == nil {
return nil, fmt.Errorf("NIL url")
}
if c.Err != nil {
return nil, c.Err
}
@ -1251,7 +1381,6 @@ func (c *MockClient) Do(path, method string, authorizer influx.Authorizer, param
p.Add(k, v)
}
URL := *c.URL
URL.Path = path
URL.RawQuery = p.Encode()
@ -1272,31 +1401,17 @@ func (c *MockClient) Do(path, method string, authorizer influx.Authorizer, param
}, nil
}
type mockAuthorizer struct {
set func(req *http.Request) error
}
func (a *mockAuthorizer) Set(req *http.Request) error {
return a.set(req)
}
func Test_AuthedCheckRedirect_Do(t *testing.T) {
path := "/test"
var ts2URL *url.URL
ts1Called := 0
var ts2URL string
ts1 := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
ts1Called++
want := http.Header{
"Referer": []string{ts2URL},
"Accept-Encoding": []string{"gzip"},
"Authorization": []string{"hunter2"},
}
if ts1Called == 1 {
// referer is filled when doing a first redirect
want.Add("Referer", ts2URL.String()+path)
}
for k, v := range want {
if !reflect.DeepEqual(r.Header[k], v) {
t.Errorf("Request.Header[%s] = %#v; want %#v", k, r.Header[k], v)
t.Errorf("Request.Header = %#v; want %#v", r.Header[k], v)
}
}
if t.Failed() {
@ -1307,44 +1422,38 @@ func Test_AuthedCheckRedirect_Do(t *testing.T) {
}))
defer ts1.Close()
ts2Called := 0
ts2 := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
ts2Called++
http.Redirect(w, r, ts1.URL, http.StatusFound)
}))
defer ts2.Close()
ts2URL, _ = url.Parse(ts2.URL)
ts2URL = ts2.URL
d := newDefaultClient(ts2URL, true)
authorizer := &mockAuthorizer{
set: func(req *http.Request) error {
req.Header.Add("Cookie", "foo=bar")
req.Header.Add("Authorization", "hunter2")
req.Header.Add("Howdy", "doody")
req.Header.Set("User-Agent", "Darth Vader, an extraterrestrial from the Planet Vulcan")
return nil
},
}
repetitions := 3
for i := 0; i < repetitions; i++ {
res, err := d.Do(path, "GET", authorizer, nil, nil)
if err != nil {
t.Fatal(err)
}
defer res.Body.Close()
if res.StatusCode != 200 {
t.Fatal(res.Status)
}
if got := res.Header.Get("Result"); got != "ok" {
t.Errorf("result = %q; want ok", got)
}
tr := &http.Transport{}
defer tr.CloseIdleConnections()
d := &defaultClient{}
c := &http.Client{
Transport: tr,
CheckRedirect: d.AuthedCheckRedirect,
}
if ts1Called != repetitions {
t.Errorf("Master server called %v; expected %v", ts1Called, repetitions)
req, _ := http.NewRequest("GET", ts2.URL, nil)
req.Header.Add("Cookie", "foo=bar")
req.Header.Add("Authorization", "hunter2")
req.Header.Add("Howdy", "doody")
req.Header.Set("User-Agent", "Darth Vader, an extraterrestrial from the Planet Vulcan")
res, err := c.Do(req)
if err != nil {
t.Fatal(err)
}
if ts2Called != 1 {
t.Errorf("Follower server called %v; expected 1", ts2Called)
defer res.Body.Close()
if res.StatusCode != 200 {
t.Fatal(res.Status)
}
if got := res.Header.Get("Result"); got != "ok" {
t.Errorf("result = %q; want ok", got)
}
}
@ -1405,9 +1514,9 @@ func Test_defaultClient_Do(t *testing.T) {
}))
defer ts.Close()
d := &defaultClient{}
u, _ := url.Parse(ts.URL)
d := newDefaultClient(u, true)
_, err := d.Do(tt.args.path, tt.args.method, tt.args.authorizer, tt.args.params, tt.args.body)
_, err := d.Do(u, tt.args.path, tt.args.method, tt.args.authorizer, tt.args.params, tt.args.body)
if (err != nil) != tt.wantErr {
t.Errorf("defaultClient.Do() error = %v, wantErr %v", err, tt.wantErr)
return

View File

@ -3,7 +3,6 @@ package enterprise
import (
"context"
"fmt"
"time"
"github.com/influxdata/chronograf"
)
@ -19,35 +18,11 @@ func (c *UserStore) Add(ctx context.Context, u *chronograf.User) (*chronograf.Us
if err := c.Ctrl.CreateUser(ctx, u.Name, u.Passwd); err != nil {
return nil, err
}
// fix #5840: eventual consistency can cause delays in user creation,
// wait for the user to become available
_, err := c.Ctrl.User(ctx, u.Name)
timer := time.NewTimer(2_000_000_000) // retry at most 2 seconds
defer timer.Stop()
for err != nil {
if err.Error() != "user not found" {
return nil, err
}
// wait before the next attempt
select {
case <-ctx.Done():
return nil, err
case <-timer.C:
return nil, err
case <-time.After(50_000_000):
break
}
_, err = c.Ctrl.User(ctx, u.Name)
}
// add permissions
perms := ToEnterprise(u.Permissions)
if len(perms) > 0 {
if err := c.Ctrl.SetUserPerms(ctx, u.Name, perms); err != nil {
return nil, err
}
if err := c.Ctrl.SetUserPerms(ctx, u.Name, perms); err != nil {
return nil, err
}
// add roles
for _, role := range u.Roles {
if err := c.Ctrl.AddRoleUsers(ctx, role.Name, []string{u.Name}); err != nil {
return nil, err

View File

@ -2,7 +2,6 @@ package enterprise_test
import (
"context"
"errors"
"fmt"
"reflect"
"testing"
@ -176,48 +175,6 @@ func TestClient_Add(t *testing.T) {
}
}
// TestClient_Add_UserNotFound verifies fix of defect #5840, the API has to wait
// for the creation of the user OOTB
func TestClient_Add_UserNotFound(t *testing.T) {
notFoundAttempts := 1
c := &enterprise.UserStore{
Ctrl: &mockCtrl{
createUser: func(ctx context.Context, name, passwd string) error {
return nil
},
user: func(ctx context.Context, name string) (*enterprise.User, error) {
if notFoundAttempts > 0 {
notFoundAttempts--
return nil, errors.New("user not found")
}
return &enterprise.User{
Name: "pavel",
Permissions: map[string][]string{},
}, nil
},
userRoles: func(ctx context.Context) (map[string]enterprise.Roles, error) {
return map[string]enterprise.Roles{}, nil
},
},
}
got, err := c.Add(context.Background(), &chronograf.User{
Name: "pavel",
Passwd: "levap",
})
if err != nil {
t.Errorf("Client.Add() error = %v", err)
return
}
want := &chronograf.User{
Name: "pavel",
Permissions: chronograf.Permissions{},
Roles: []chronograf.Role{},
}
if !reflect.DeepEqual(got, want) {
t.Errorf("Client.Add() = \n%#v\n, want \n%#v\n", got, want)
}
}
func TestClient_Delete(t *testing.T) {
type fields struct {
Ctrl *mockCtrl

View File

@ -1,4 +1,4 @@
FROM ubuntu:focal
FROM ubuntu:bionic
RUN apt update && DEBIAN_FRONTEND=noninteractive apt install -y \
apt-transport-https \
@ -17,15 +17,14 @@ RUN apt update && DEBIAN_FRONTEND=noninteractive apt install -y \
libtool
RUN pip3 install boto requests python-jose --upgrade
RUN gem install dotenv -v 2.8.1
RUN gem install fpm
# Install node
ENV NODE_VERSION v16.14.2
ENV NODE_VERSION v14.15.0
RUN wget -q https://nodejs.org/dist/${NODE_VERSION}/node-${NODE_VERSION}-linux-x64.tar.gz; \
mkdir /usr/local/node; \
tar -xvf node-${NODE_VERSION}-linux-x64.tar.gz -C /usr/local/node --strip-components=1; \
rm -f node-${NODE_VERSION}-linux-x64.tar.gz
mkdir /usr/local/node; \
tar -xvf node-${NODE_VERSION}-linux-x64.tar.gz -C /usr/local/node --strip-components=1; \
rm -f node-${NODE_VERSION}-linux-x64.tar.gz
ENV PATH=/usr/local/node/bin:$PATH
RUN curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - && \
@ -34,12 +33,12 @@ RUN curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - && \
# Install go
ENV GOPATH /root/go
ENV GO_VERSION 1.23.8
ENV GO_VERSION 1.16.4
ENV GO_ARCH amd64
ENV GO111MODULES ON
RUN wget https://golang.org/dl/go${GO_VERSION}.linux-${GO_ARCH}.tar.gz; \
tar -C /usr/local/ -xf /go${GO_VERSION}.linux-${GO_ARCH}.tar.gz ; \
rm /go${GO_VERSION}.linux-${GO_ARCH}.tar.gz
tar -C /usr/local/ -xf /go${GO_VERSION}.linux-${GO_ARCH}.tar.gz ; \
rm /go${GO_VERSION}.linux-${GO_ARCH}.tar.gz
ENV PATH /usr/local/go/bin:$PATH
ENV PROJECT_DIR $GOPATH/src/github.com/influxdata/chronograf

View File

@ -1,31 +1,15 @@
# Builds
## Builds
Builds are run from a docker build image that is configured with the node and go we support.
Our circle.yml uses this docker container to build, test and create release packages.
## Updating new node/go versions
### Updating new node/go versions
After updating the Dockerfile_build run
Versions can be updated in `Dockerfile_build`. A new docker must be then built, published and used in CI.
`docker build -t quay.io/influxdb/builder:chronograf-$(date "+%Y%m%d") -f Dockerfile_build .`
### Step 1: Build New Docker Image and Save It to Quay
and push to quay with:
`docker push quay.io/influxdb/builder:chronograf-$(date "+%Y%m%d")`
Having logged to quay.io with push permissions run:
```sh
cd $CHRONOGRAF_REPOSITORY_ROOT
./etc/scripts/docker/build.sh
```
### OPTIONAL Step 2: Check the build image
Run the image with:
```sh
export DOCKER_TAG="chronograf-$(date +%Y%m%d)"
./etc/scripts/docker/run.sh
```
### Step 3: Update script and CircleCI
1. Modify default tag in `etc/docker/run.sh`, replace with new one.
2. Change DOCKER_TAG in `.circleci/config.yml`.
### Update circle
Update DOCKER_TAG in .circleci/config.yml to the new container.

View File

@ -297,10 +297,6 @@ def get_current_commit(short=False):
out = run(command)
return out.strip('\'\n\r ')
def set_safe_directory():
out = run("git config --global --add safe.directory *")
return out.strip()
def get_current_branch():
"""Retrieve the current git branch.
"""
@ -952,7 +948,6 @@ if __name__ == '__main__':
level=LOG_LEVEL,
format=log_format)
logging.info(set_safe_directory())
parser = argparse.ArgumentParser(description='InfluxDB build and packaging script.')
parser.add_argument('--verbose','-v','--debug',
action='store_true',

View File

@ -1,24 +0,0 @@
#!/usr/bin/env node
/* eslint-disable no-console */
import { spawnSync } from "child_process";
const yarnLicensesJSON = spawnSync("yarn", ["licenses", "list", "--json"], {
encoding: "utf8",
});
const licensesOutput = yarnLicensesJSON.stdout.split("\n").filter((x) => x);
const licenses = licensesOutput[licensesOutput.length - 1];
// generate licenses.json with `yarn licenses --json`
const obj = JSON.parse(licenses);
const items = obj.data.body.reduce((acc, val) => {
const [name] = val;
(acc[name] || (acc[name] = [])).push(val);
return acc;
}, {});
const keys = Object.keys(items).sort();
keys.forEach((key) => {
items[key].forEach((val) => {
console.log(`* ${key} ${val[1]} [${val[2]}](${val[3]})`);
});
});

View File

@ -1,43 +0,0 @@
# oauth2-server-mock
This repository contains a mock [OAuth2](https://oauth.net/2/) Authorization Server with Authorization Code Grant flow and OpenID [UserInfo endpoint](https://openid.net/specs/openid-connect-core-1_0.html#UserInfo). It is primarily intended for testing, it always authorizes a pre-configured user that can be changed at runtime.
## Install
Prerequisites: [node.js 14](https://nodejs.org/) or newer
Install required dependencies using your favorite package manager yarn:
```
yarn
```
## Configure
Modify [./env.sh](./env.sh) and run `source env.sh` to setup initial configuration of the OAuth2 Mock Server.
## Start
```
yarn start
```
Initial configuration is printed to console as well as the URL where the server is listening for requests.
## Chronograf Setup
[Chronograf](https://github.com/influxdata/chronograf) can use this OAuth2 Mock server as a generic OAuth2 authentication provider,
several environment variables must be set before starting chronograf. These variables are shown in [./oauth-for-chronograf.sh](./oauth-for-chronograf.sh).
## Change OAuth2 redirect or authorized user at runtime
The running OAuth2 Mock server exposes a simple UI that let you change name/email
of the authorized user, and a redirect URL after authorization. This page is by default accessible at http://localhost:8087/ .
Alternatively a simple HTTP POST message can be sent to change the config:
```bash
curl -X POST http://localhost:8087/config -H 'Content-Type: application/json' -d '{
"redirect_uri": "http://whatever.you.like/callback",
"userinfo": { "name": "fred doe", "email": "fred.doe@fake.net" }
}'
```

View File

@ -1,9 +0,0 @@
#!/bin/bash
# server hostname and port
export OAUTH2_PORT=8087
export OAUTH2_HOSTNAME=localhost
# user to authorize
export OAUTH2_TEST_USER_NAME=Test User
export OAUTH2_TEST_USER_EMAIL=test@oauth2.mock
# where to redirect after authorization
export OAUTH2_REDIRECT_URL=http://localhost:8888/oauth/oauth-mock/callback

View File

@ -1,24 +0,0 @@
#!/bin/bash
# This script setups chronograf environment variables to use this OAuth2 server.
# Run this script as `source oauth-for-chronograf.sh` before you start chronograf.
. `dirname ${BASH_SOURCE}`/env.sh
OAUTH2_URL=http://${OAUTH2_HOSTNAME}:${OAUTH2_PORT}
# chronograf environment variables that configure OAuth2
export TOKEN_SECRET=Q4O1T8FTbErOnmx03mGeVH3pkvKtdKr6HEmzEpNBiVMynZ/qKDdOResI3OMx4Zg9kmIfAI9ihlIV3OV5+VRfZ+iB2knLuGagEmFpG/h51CRcQY58j2NpnxdBewz91E51RRfjDYvqMrISHZCjdeuw0338Xp5UnEg32utr0ThRN0Ucv2isRr4KYJNYuvUXrjKJzjh76394JwY+bzn20L/enR2rLEtJ40ePxwuEvsE0MBUGZy79ecLZPaolQ3lkPE6X3+iV/9suN0BkBNtbQe1sGv4P522jSm24fFhXaFjetQQ/dJGehbWzsBo8uVAWB2RO0+xU2LhHFN0k0LAESD6MWw==
export GENERIC_CLIENT_ID=whateverid
export GENERIC_CLIENT_SECRET=whateversecret
export GENERIC_AUTH_URL="${OAUTH2_URL}/oauth/authorize"
export GENERIC_TOKEN_URL="${OAUTH2_URL}/oauth/token"
export GENERIC_API_URL=${OAUTH2_URL}/userinfo
export GENERIC_SCOPES="whatever"
export GENERIC_NAME=oauth-mock
export PUBLIC_URL=http://localhost:8888
export REDIR_AUTH_LOGIN=oauth-mock
echo Make sure to setup the following environment variables before you start OAuth Mock server
echo export OAUTH2_HOSTNAME=${OAUTH2_HOSTNAME}
echo export OAUTH2_PORT=${OAUTH2_PORT}
echo export OAUTH2_REDIRECT_URL=${PUBLIC_URL}/oauth/${GENERIC_NAME}/callback

View File

@ -1,16 +0,0 @@
{
"name": "oauth2-server-mock",
"version": "0.1.0",
"description": "Mock OAuth2 server with authorization code grant",
"type": "module",
"main": "src/index.mjs",
"license": "MIT",
"private": true,
"scripts": {
"start": "node src/index.mjs"
},
"dependencies": {
"cors": "^2.8.5",
"express": "^4.19.2"
}
}

View File

@ -1,124 +0,0 @@
/* eslint-disable no-process-exit */
import express from "express";
import cors from "cors";
// terminate on DTRL+C or CTRL+D
process.on("SIGINT", () => process.exit());
process.on("SIGTERM", () => process.exit());
const app = express();
app.use(cors())
app.use(express.json());
app.use(express.urlencoded({ extended: true }));
let CONFIG = {
redirect_uri:
process.env.OAUTH2_REDIRECT_URL ||
"http://localhost:8888/oauth/oauth-mock/callback",
userinfo: {
name: process.env.OAUTH2_TEST_USER_NAME || "Test User",
email: process.env.OAUTH2_TEST_USER_EMAIL || "test@oauth2.mock",
},
};
app.get("/oauth/authorize", (req, res) => {
const state = req.query.state;
const redirect = req.query.redirect_uri || CONFIG.redirect_uri;
console.info("GET /oauth/authorize: ", redirect);
res.setHeader("Location", `${redirect}?code=${encodeURIComponent(redirect + new Date().toISOString())}&state=${encodeURIComponent(state)}`);
res.sendStatus(302);
res.end();
});
app.post("/oauth/token", (_req, res) => {
console.info("POST /oauth/token: ");
const token = `t-${new Date().toISOString()}`;
res.setHeader("content-type", "application/json;charset=UTF-8");
res.status(200);
res.json({
access_token: token,
token_type: "bearer",
expires_in: 3600,
refresh_token: token,
});
});
app.get("/userinfo", (_req, res) => {
console.info("GET /userinfo");
res.setHeader("Content-Type", "application/json;charset=UTF-8");
res.status(200);
res.json(CONFIG.userinfo);
});
app.post("/config", (req, res) => {
console.info("POST /state");
res.setHeader("Content-Type", "application/json;charset=UTF-8");
try {
const body = req.body;
if (typeof body !== "object" || body === null) {
throw new Error("invalid body");
}
if (!body.redirect_uri) {
body.redirect_uri = CONFIG.redirect_uri;
}
if (!body.userinfo) {
body.userinfo = CONFIG.userinfo;
}
CONFIG = body;
console.info("Configuration changed to:", CONFIG);
} catch (e) {
console.error(e);
res.sendStatus(500);
return;
}
res.status(200);
res.json(CONFIG);
});
app.all("/", (req, res) => {
if (req.method !== "POST" && req.method !== "GET") {
return res.sendStatus(500);
}
if (req.method === "POST") {
const { redirect_uri, email, name } = req.body;
if (redirect_uri) {
CONFIG.redirect_uri = redirect_uri;
}
if (email) {
(CONFIG.userinfo || (CONFIG.userinfo = {})).email = email;
}
if (name) {
(CONFIG.userinfo || (CONFIG.userinfo = {})).name = name;
}
console.info("Configuration changed to:", CONFIG);
}
res.status(200);
res.header("Content-Type", "text/html;charset=UTF-8");
res.write(
"<html><head><title>OAuth2 Mock</title></head><body><h1>OAuth2 Mock</h1>"
);
res.write('<form action="/" method="POST">');
res.write('<label for="callbackUrl">Callback URL:</label><br>');
res.write(
`<input style="width: 100%" type="text" id="callbackUrl" name="redirect_uri" value="${CONFIG.redirect_uri}"><br>`
);
res.write('<label for="email">User Email:</label><br>');
res.write(
`<input style="width: 100%" type="text" id="email" name="email" value="${CONFIG.userinfo.email}"><br>`
);
res.write('<label for="name">User Name:</label><br>');
res.write(
`<input style="width: 100%" type="text" id="name" name="name" value="${CONFIG.userinfo.name}"><br>`
);
res.write('<br><input type="submit" value="Change Configuration">');
res.write('<a style="float: right" href="userinfo">OpenID UserInfo endpoint</a>');
res.write("</form></body></html>");
res.end();
});
// start HTTP server
const port = process.env.OAUTH2_PORT || 8087;
const HOSTNAME = process.env.OAUTH2_HOSTNAME || "localhost";
app.listen(port, HOSTNAME, () => {
console.info("Initial configuration:", CONFIG);
console.info(`Listening on http://${HOSTNAME}:${port}`);
});

View File

@ -1,476 +0,0 @@
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
# yarn lockfile v1
accepts@~1.3.8:
version "1.3.8"
resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.8.tgz#0bf0be125b67014adcb0b0921e62db7bffe16b2e"
integrity sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==
dependencies:
mime-types "~2.1.34"
negotiator "0.6.3"
array-flatten@1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2"
integrity sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=
body-parser@1.20.2:
version "1.20.2"
resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.2.tgz#6feb0e21c4724d06de7ff38da36dad4f57a747fd"
integrity sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==
dependencies:
bytes "3.1.2"
content-type "~1.0.5"
debug "2.6.9"
depd "2.0.0"
destroy "1.2.0"
http-errors "2.0.0"
iconv-lite "0.4.24"
on-finished "2.4.1"
qs "6.11.0"
raw-body "2.5.2"
type-is "~1.6.18"
unpipe "1.0.0"
bytes@3.1.2:
version "3.1.2"
resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5"
integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==
call-bind@^1.0.7:
version "1.0.7"
resolved "https://registry.yarnpkg.com/call-bind/-/call-bind-1.0.7.tgz#06016599c40c56498c18769d2730be242b6fa3b9"
integrity sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==
dependencies:
es-define-property "^1.0.0"
es-errors "^1.3.0"
function-bind "^1.1.2"
get-intrinsic "^1.2.4"
set-function-length "^1.2.1"
content-disposition@0.5.4:
version "0.5.4"
resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.4.tgz#8b82b4efac82512a02bb0b1dcec9d2c5e8eb5bfe"
integrity sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==
dependencies:
safe-buffer "5.2.1"
content-type@~1.0.4, content-type@~1.0.5:
version "1.0.5"
resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.5.tgz#8b773162656d1d1086784c8f23a54ce6d73d7918"
integrity sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==
cookie-signature@1.0.6:
version "1.0.6"
resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c"
integrity sha1-4wOogrNCzD7oylE6eZmXNNqzriw=
cookie@0.6.0:
version "0.6.0"
resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.6.0.tgz#2798b04b071b0ecbff0dbb62a505a8efa4e19051"
integrity sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==
cors@^2.8.5:
version "2.8.5"
resolved "https://registry.yarnpkg.com/cors/-/cors-2.8.5.tgz#eac11da51592dd86b9f06f6e7ac293b3df875d29"
integrity sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==
dependencies:
object-assign "^4"
vary "^1"
debug@2.6.9:
version "2.6.9"
resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f"
integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==
dependencies:
ms "2.0.0"
define-data-property@^1.1.4:
version "1.1.4"
resolved "https://registry.yarnpkg.com/define-data-property/-/define-data-property-1.1.4.tgz#894dc141bb7d3060ae4366f6a0107e68fbe48c5e"
integrity sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==
dependencies:
es-define-property "^1.0.0"
es-errors "^1.3.0"
gopd "^1.0.1"
depd@2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/depd/-/depd-2.0.0.tgz#b696163cc757560d09cf22cc8fad1571b79e76df"
integrity sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==
destroy@1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.2.0.tgz#4803735509ad8be552934c67df614f94e66fa015"
integrity sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==
ee-first@1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d"
integrity sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=
encodeurl@~1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59"
integrity sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k=
es-define-property@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/es-define-property/-/es-define-property-1.0.0.tgz#c7faefbdff8b2696cf5f46921edfb77cc4ba3845"
integrity sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==
dependencies:
get-intrinsic "^1.2.4"
es-errors@^1.3.0:
version "1.3.0"
resolved "https://registry.yarnpkg.com/es-errors/-/es-errors-1.3.0.tgz#05f75a25dab98e4fb1dcd5e1472c0546d5057c8f"
integrity sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==
escape-html@~1.0.3:
version "1.0.3"
resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988"
integrity sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg=
etag@~1.8.1:
version "1.8.1"
resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887"
integrity sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc=
express@^4.19.2:
version "4.19.2"
resolved "https://registry.yarnpkg.com/express/-/express-4.19.2.tgz#e25437827a3aa7f2a827bc8171bbbb664a356465"
integrity sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q==
dependencies:
accepts "~1.3.8"
array-flatten "1.1.1"
body-parser "1.20.2"
content-disposition "0.5.4"
content-type "~1.0.4"
cookie "0.6.0"
cookie-signature "1.0.6"
debug "2.6.9"
depd "2.0.0"
encodeurl "~1.0.2"
escape-html "~1.0.3"
etag "~1.8.1"
finalhandler "1.2.0"
fresh "0.5.2"
http-errors "2.0.0"
merge-descriptors "1.0.1"
methods "~1.1.2"
on-finished "2.4.1"
parseurl "~1.3.3"
path-to-regexp "0.1.7"
proxy-addr "~2.0.7"
qs "6.11.0"
range-parser "~1.2.1"
safe-buffer "5.2.1"
send "0.18.0"
serve-static "1.15.0"
setprototypeof "1.2.0"
statuses "2.0.1"
type-is "~1.6.18"
utils-merge "1.0.1"
vary "~1.1.2"
finalhandler@1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.2.0.tgz#7d23fe5731b207b4640e4fcd00aec1f9207a7b32"
integrity sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==
dependencies:
debug "2.6.9"
encodeurl "~1.0.2"
escape-html "~1.0.3"
on-finished "2.4.1"
parseurl "~1.3.3"
statuses "2.0.1"
unpipe "~1.0.0"
forwarded@0.2.0:
version "0.2.0"
resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.2.0.tgz#2269936428aad4c15c7ebe9779a84bf0b2a81811"
integrity sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==
fresh@0.5.2:
version "0.5.2"
resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7"
integrity sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac=
function-bind@^1.1.2:
version "1.1.2"
resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.2.tgz#2c02d864d97f3ea6c8830c464cbd11ab6eab7a1c"
integrity sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==
get-intrinsic@^1.1.3, get-intrinsic@^1.2.4:
version "1.2.4"
resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.2.4.tgz#e385f5a4b5227d449c3eabbad05494ef0abbeadd"
integrity sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==
dependencies:
es-errors "^1.3.0"
function-bind "^1.1.2"
has-proto "^1.0.1"
has-symbols "^1.0.3"
hasown "^2.0.0"
gopd@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/gopd/-/gopd-1.0.1.tgz#29ff76de69dac7489b7c0918a5788e56477c332c"
integrity sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==
dependencies:
get-intrinsic "^1.1.3"
has-property-descriptors@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz#963ed7d071dc7bf5f084c5bfbe0d1b6222586854"
integrity sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==
dependencies:
es-define-property "^1.0.0"
has-proto@^1.0.1:
version "1.0.3"
resolved "https://registry.yarnpkg.com/has-proto/-/has-proto-1.0.3.tgz#b31ddfe9b0e6e9914536a6ab286426d0214f77fd"
integrity sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==
has-symbols@^1.0.3:
version "1.0.3"
resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8"
integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==
hasown@^2.0.0:
version "2.0.2"
resolved "https://registry.yarnpkg.com/hasown/-/hasown-2.0.2.tgz#003eaf91be7adc372e84ec59dc37252cedb80003"
integrity sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==
dependencies:
function-bind "^1.1.2"
http-errors@2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-2.0.0.tgz#b7774a1486ef73cf7667ac9ae0858c012c57b9d3"
integrity sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==
dependencies:
depd "2.0.0"
inherits "2.0.4"
setprototypeof "1.2.0"
statuses "2.0.1"
toidentifier "1.0.1"
iconv-lite@0.4.24:
version "0.4.24"
resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b"
integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==
dependencies:
safer-buffer ">= 2.1.2 < 3"
inherits@2.0.4:
version "2.0.4"
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c"
integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==
ipaddr.js@1.9.1:
version "1.9.1"
resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3"
integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==
media-typer@0.3.0:
version "0.3.0"
resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748"
integrity sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=
merge-descriptors@1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61"
integrity sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E=
methods@~1.1.2:
version "1.1.2"
resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee"
integrity sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=
mime-db@1.52.0:
version "1.52.0"
resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70"
integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==
mime-types@~2.1.24, mime-types@~2.1.34:
version "2.1.35"
resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a"
integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==
dependencies:
mime-db "1.52.0"
mime@1.6.0:
version "1.6.0"
resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1"
integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==
ms@2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8"
integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=
ms@2.1.3:
version "2.1.3"
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2"
integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==
negotiator@0.6.3:
version "0.6.3"
resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.3.tgz#58e323a72fedc0d6f9cd4d31fe49f51479590ccd"
integrity sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==
object-assign@^4:
version "4.1.1"
resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863"
integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=
object-inspect@^1.13.1:
version "1.13.1"
resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.13.1.tgz#b96c6109324ccfef6b12216a956ca4dc2ff94bc2"
integrity sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==
on-finished@2.4.1:
version "2.4.1"
resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.4.1.tgz#58c8c44116e54845ad57f14ab10b03533184ac3f"
integrity sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==
dependencies:
ee-first "1.1.1"
parseurl@~1.3.3:
version "1.3.3"
resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4"
integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==
path-to-regexp@0.1.7:
version "0.1.7"
resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c"
integrity sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=
proxy-addr@~2.0.7:
version "2.0.7"
resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.7.tgz#f19fe69ceab311eeb94b42e70e8c2070f9ba1025"
integrity sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==
dependencies:
forwarded "0.2.0"
ipaddr.js "1.9.1"
qs@6.11.0:
version "6.11.0"
resolved "https://registry.yarnpkg.com/qs/-/qs-6.11.0.tgz#fd0d963446f7a65e1367e01abd85429453f0c37a"
integrity sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==
dependencies:
side-channel "^1.0.4"
range-parser@~1.2.1:
version "1.2.1"
resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031"
integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==
raw-body@2.5.2:
version "2.5.2"
resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.2.tgz#99febd83b90e08975087e8f1f9419a149366b68a"
integrity sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==
dependencies:
bytes "3.1.2"
http-errors "2.0.0"
iconv-lite "0.4.24"
unpipe "1.0.0"
safe-buffer@5.2.1:
version "5.2.1"
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6"
integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==
"safer-buffer@>= 2.1.2 < 3":
version "2.1.2"
resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a"
integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==
send@0.18.0:
version "0.18.0"
resolved "https://registry.yarnpkg.com/send/-/send-0.18.0.tgz#670167cc654b05f5aa4a767f9113bb371bc706be"
integrity sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==
dependencies:
debug "2.6.9"
depd "2.0.0"
destroy "1.2.0"
encodeurl "~1.0.2"
escape-html "~1.0.3"
etag "~1.8.1"
fresh "0.5.2"
http-errors "2.0.0"
mime "1.6.0"
ms "2.1.3"
on-finished "2.4.1"
range-parser "~1.2.1"
statuses "2.0.1"
serve-static@1.15.0:
version "1.15.0"
resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.15.0.tgz#faaef08cffe0a1a62f60cad0c4e513cff0ac9540"
integrity sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==
dependencies:
encodeurl "~1.0.2"
escape-html "~1.0.3"
parseurl "~1.3.3"
send "0.18.0"
set-function-length@^1.2.1:
version "1.2.2"
resolved "https://registry.yarnpkg.com/set-function-length/-/set-function-length-1.2.2.tgz#aac72314198eaed975cf77b2c3b6b880695e5449"
integrity sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==
dependencies:
define-data-property "^1.1.4"
es-errors "^1.3.0"
function-bind "^1.1.2"
get-intrinsic "^1.2.4"
gopd "^1.0.1"
has-property-descriptors "^1.0.2"
setprototypeof@1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.2.0.tgz#66c9a24a73f9fc28cbe66b09fed3d33dcaf1b424"
integrity sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==
side-channel@^1.0.4:
version "1.0.6"
resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.6.tgz#abd25fb7cd24baf45466406b1096b7831c9215f2"
integrity sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==
dependencies:
call-bind "^1.0.7"
es-errors "^1.3.0"
get-intrinsic "^1.2.4"
object-inspect "^1.13.1"
statuses@2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/statuses/-/statuses-2.0.1.tgz#55cb000ccf1d48728bd23c685a063998cf1a1b63"
integrity sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==
toidentifier@1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.1.tgz#3be34321a88a820ed1bd80dfaa33e479fbb8dd35"
integrity sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==
type-is@~1.6.18:
version "1.6.18"
resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131"
integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==
dependencies:
media-typer "0.3.0"
mime-types "~2.1.24"
unpipe@1.0.0, unpipe@~1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec"
integrity sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=
utils-merge@1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713"
integrity sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=
vary@^1, vary@~1.1.2:
version "1.1.2"
resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc"
integrity sha1-IpnwLG3tMNSllhsLn3RSShj2NPw=

View File

@ -1,9 +1,8 @@
#!/bin/bash
set -x
docker_tag="chronograf-$(date +%Y%m%d)"
DOCKER_TAG="chronograf-$(date +%Y%m%d)"
docker build --rm=false -f etc/Dockerfile_build -t builder:$docker_tag .
docker tag builder:$docker_tag quay.io/influxdb/builder:$docker_tag
docker build --rm=false --platform linux/amd64 -f etc/Dockerfile_build -t builder:$DOCKER_TAG .
docker tag builder:$DOCKER_TAG quay.io/influxdb/builder:$DOCKER_TAG
docker push quay.io/influxdb/builder:$DOCKER_TAG
docker push quay.io/influxdb/builder:$docker_tag

View File

@ -14,7 +14,7 @@ test -z $SSH_KEY_PATH && SSH_KEY_PATH="$HOME/.ssh/id_rsa"
echo "Using SSH key located at: $SSH_KEY_PATH"
# Default docker tag if not specified
test -z "$DOCKER_TAG" && DOCKER_TAG="chronograf-20250404"
test -z "$DOCKER_TAG" && DOCKER_TAG="chronograf-20210828"
docker run \
-e AWS_ACCESS_KEY_ID \

View File

@ -8,13 +8,12 @@
//
// Resources that are storable in a file are:
// (CRUD refers to create, read, update, delete. An '_' means not supported)
//
// Apps(layouts) - _R__
// Dashboards - _RUD
// Kapacitors - _RUD
// Organizations - _R__
// Protoboards - _R__
// Sources - _RUD
// Apps(layouts) - _R__
// Dashboards - _RUD
// Kapacitors - _RUD
// Organizations - _R__
// Protoboards - _R__
// Sources - _RUD
//
// Caution should be taken when editing resources provided via the filestore,
// especially in a distributed environment as unexpected behavior may occur.

View File

@ -1,11 +1,15 @@
package flux
import (
"context"
"errors"
"io/ioutil"
"net/http"
"net/url"
"strings"
"time"
"github.com/influxdata/chronograf"
"github.com/influxdata/chronograf/util"
)
@ -22,9 +26,36 @@ type Client struct {
Timeout time.Duration
}
// Ping checks the connection of a Flux.
func (c *Client) Ping(ctx context.Context) error {
t := 2 * time.Second
if c.Timeout > 0 {
t = c.Timeout
}
ctx, cancel := context.WithTimeout(ctx, t)
defer cancel()
err := c.pingTimeout(ctx)
return err
}
func (c *Client) pingTimeout(ctx context.Context) error {
resps := make(chan (error))
go func() {
resps <- c.ping(c.URL)
}()
select {
case resp := <-resps:
return resp
case <-ctx.Done():
return chronograf.ErrUpstreamTimeout
}
}
// FluxEnabled returns true if the server has flux querying enabled.
func (c *Client) FluxEnabled() (bool, error) {
url := util.AppendPath(c.URL, "/api/v2/query")
url := c.URL
url.Path = "/api/v2/query"
req, err := http.NewRequest("POST", url.String(), nil)
if err != nil {
@ -53,3 +84,36 @@ func (c *Client) FluxEnabled() (bool, error) {
// {"code":"unauthorized","message":"unauthorized access"} is received
return strings.HasPrefix(contentType, "application/json"), nil
}
func (c *Client) ping(u *url.URL) error {
u.Path = "ping"
req, err := http.NewRequest("GET", u.String(), nil)
if err != nil {
return err
}
hc := &http.Client{}
if c.InsecureSkipVerify {
hc.Transport = skipVerifyTransport
} else {
hc.Transport = defaultTransport
}
resp, err := hc.Do(req)
if err != nil {
return err
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
if err != nil {
return err
}
if resp.StatusCode != http.StatusNoContent {
return errors.New(string(body))
}
return nil
}

View File

@ -1,56 +0,0 @@
package flux_test
import (
"net/http"
"net/http/httptest"
"net/url"
"strings"
"testing"
"time"
"github.com/influxdata/chronograf/flux"
)
// NewClient initializes an HTTP Client for InfluxDB.
func NewClient(urlStr string) *flux.Client {
u, _ := url.Parse(urlStr)
return &flux.Client{
URL: u,
Timeout: 500 * time.Millisecond,
}
}
func Test_FluxEnabled(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
path := r.URL.Path
if !strings.HasSuffix(path, "/api/v2/query") {
t.Error("Expected the path to contain `/api/v2/query` but was", path)
}
if strings.HasPrefix(path, "/enabled_v1") {
rw.Header().Add("Content-Type", "application/json")
rw.WriteHeader(http.StatusBadRequest)
rw.Write([]byte(`{}`))
return
}
if strings.HasPrefix(path, "/enabled_v2") {
rw.Header().Add("Content-Type", "application/json")
rw.WriteHeader(http.StatusUnauthorized)
rw.Write([]byte(`{"code":"unauthorized","message":"unauthorized access"}`))
return
}
rw.Header().Add("Content-Type", "text/plain")
rw.WriteHeader(http.StatusForbidden)
rw.Write([]byte(`Flux query service disabled.`))
}))
defer ts.Close()
if enabled, _ := NewClient(ts.URL).FluxEnabled(); enabled {
t.Errorf("Client.FluxEnabled() expected false value")
}
if enabled, _ := NewClient(ts.URL + "/enabled_v1").FluxEnabled(); !enabled {
t.Errorf("Client.FluxEnabled() expected true value")
}
if enabled, _ := NewClient(ts.URL + "/enabled_v2").FluxEnabled(); !enabled {
t.Errorf("Client.FluxEnabled() expected true value")
}
}

47
go.mod
View File

@ -1,14 +1,15 @@
module github.com/influxdata/chronograf
go 1.23.0
go 1.17
require (
cloud.google.com/go/bigtable v1.10.0 // indirect
github.com/NYTimes/gziphandler v1.1.1
github.com/abbot/go-http-auth v0.4.0
github.com/bouk/httprouter v0.0.0-20160817010721-ee8b3818a7f5
github.com/golang-jwt/jwt/v4 v4.5.2
github.com/google/go-cmp v0.6.0
github.com/elazarl/go-bindata-assetfs v1.0.0
github.com/golang-jwt/jwt/v4 v4.0.0
github.com/google/go-cmp v0.5.5
github.com/google/go-github v17.0.0+incompatible
github.com/google/uuid v1.1.2
github.com/influxdata/flux v0.114.1
@ -16,18 +17,18 @@ require (
github.com/influxdata/kapacitor v1.5.10-0.20210518140415-452f2b236610
github.com/influxdata/usage-client v0.0.0-20160829180054-6d3895376368
github.com/jessevdk/go-flags v1.4.0
github.com/lestrrat-go/jwx/v2 v2.0.21
github.com/microcosm-cc/bluemonday v1.0.26
github.com/lestrrat-go/jwx v0.9.0
github.com/microcosm-cc/bluemonday v1.0.16
github.com/sergi/go-diff v1.1.0
github.com/sirupsen/logrus v1.7.0
github.com/stretchr/testify v1.9.0
github.com/stretchr/testify v1.7.0
go.etcd.io/bbolt v1.3.5
go.etcd.io/etcd/client/v3 v3.5.0-alpha.0
go.etcd.io/etcd/server/v3 v3.5.0-alpha.0
golang.org/x/net v0.38.0
golang.org/x/oauth2 v0.27.0
golang.org/x/net v0.0.0-20210614182718-04defd469f4e
golang.org/x/oauth2 v0.0.0-20210427180440-81ed05c6b58c
google.golang.org/api v0.46.0
google.golang.org/protobuf v1.33.0
google.golang.org/protobuf v1.27.1
)
require (
@ -40,17 +41,15 @@ require (
github.com/coreos/go-semver v0.3.0 // indirect
github.com/coreos/go-systemd/v22 v22.1.0 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.2.0 // indirect
github.com/dustin/go-humanize v1.0.0 // indirect
github.com/eclipse/paho.mqtt.golang v1.2.0 // indirect
github.com/form3tech-oss/jwt-go v3.2.2+incompatible // indirect
github.com/go-sql-driver/mysql v1.4.1 // indirect
github.com/goccy/go-json v0.10.2 // indirect
github.com/gofrs/uuid v3.3.0+incompatible // indirect
github.com/gogo/protobuf v1.3.2 // indirect
github.com/golang/geo v0.0.0-20190916061304-5b978397cfec // indirect
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e // indirect
github.com/golang/protobuf v1.5.4 // indirect
github.com/golang/protobuf v1.5.2 // indirect
github.com/google/btree v1.0.1 // indirect
github.com/google/flatbuffers v1.11.0 // indirect
github.com/google/go-querystring v1.0.0 // indirect
@ -63,13 +62,8 @@ require (
github.com/influxdata/line-protocol v0.0.0-20180522152040-32c6aa80de5e // indirect
github.com/influxdata/tdigest v0.0.0-20181121200506-bf2b5ad3c0a9 // indirect
github.com/jonboulle/clockwork v0.2.2 // indirect
github.com/json-iterator/go v1.1.11 // indirect
github.com/json-iterator/go v1.1.10 // indirect
github.com/jstemmer/go-junit-report v0.9.1 // indirect
github.com/lestrrat-go/blackmagic v1.0.2 // indirect
github.com/lestrrat-go/httpcc v1.0.1 // indirect
github.com/lestrrat-go/httprc v1.0.5 // indirect
github.com/lestrrat-go/iter v1.0.2 // indirect
github.com/lestrrat-go/option v1.0.1 // indirect
github.com/lib/pq v1.2.0 // indirect
github.com/matttproud/golang_protobuf_extensions v1.0.1 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
@ -77,11 +71,10 @@ require (
github.com/opentracing/opentracing-go v1.2.0 // indirect
github.com/pkg/errors v0.9.1 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/prometheus/client_golang v1.11.1 // indirect
github.com/prometheus/client_golang v1.10.0 // indirect
github.com/prometheus/client_model v0.2.0 // indirect
github.com/prometheus/common v0.26.0 // indirect
github.com/prometheus/common v0.20.0 // indirect
github.com/prometheus/procfs v0.6.0 // indirect
github.com/segmentio/asm v1.2.0 // indirect
github.com/segmentio/kafka-go v0.3.10 // indirect
github.com/soheilhy/cmux v0.1.5-0.20210205191134-5ec6847320e5 // indirect
github.com/spf13/pflag v1.0.5 // indirect
@ -95,19 +88,19 @@ require (
go.uber.org/atomic v1.6.0 // indirect
go.uber.org/multierr v1.5.0 // indirect
go.uber.org/zap v1.16.0 // indirect
golang.org/x/crypto v0.36.0 // indirect
golang.org/x/crypto v0.0.0-20210220033148-5ea612d1eb83 // indirect
golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5 // indirect
golang.org/x/mod v0.17.0 // indirect
golang.org/x/sys v0.31.0 // indirect
golang.org/x/text v0.23.0 // indirect
golang.org/x/mod v0.4.1 // indirect
golang.org/x/sys v0.0.0-20210503080704-8803ae5d1324 // indirect
golang.org/x/text v0.3.6 // indirect
golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba // indirect
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d // indirect
golang.org/x/tools v0.1.0 // indirect
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect
google.golang.org/appengine v1.6.7 // indirect
google.golang.org/genproto v0.0.0-20210503173045-b96a97608f20 // indirect
google.golang.org/grpc v1.37.0 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c // indirect
sigs.k8s.io/yaml v1.2.0 // indirect
)

86
go.sum
View File

@ -96,6 +96,7 @@ github.com/armon/go-metrics v0.3.6/go.mod h1:4O98XIr/9W0sxpJ8UaYkvjk10Iff7SnFrb4
github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
github.com/aryann/difflib v0.0.0-20170710044230-e206f873d14a/go.mod h1:DAHtR1m6lCRdSC2Tm3DSWRPvIPr6xNKyeHdqDQSQT+A=
github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY=
github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw=
github.com/aws/aws-lambda-go v1.13.3/go.mod h1:4UKl9IzQMoD+QF79YdCuzCwp8VbmG4VAQwij/eHl5CU=
github.com/aws/aws-sdk-go v1.27.0/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo=
github.com/aws/aws-sdk-go v1.38.3/go.mod h1:hcU610XS61/+aQV88ixoOzUoG7v3b31pl2zKMmprdro=
@ -151,6 +152,7 @@ github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8Nz
github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
github.com/coreos/go-semver v0.3.0 h1:wkHLiw0WNATZnSG7epLsujiMCgPAc9xhjJ4tgnAxmfM=
github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
github.com/coreos/go-systemd/v22 v22.0.0 h1:XJIw/+VlJ+87J+doOxznsAWIdmWuViOVhkQamW5YV28=
github.com/coreos/go-systemd/v22 v22.0.0/go.mod h1:xO0FLkIi5MaZafQlIrOotqXZ90ih+1atmu1JpKERPPk=
github.com/coreos/go-systemd/v22 v22.1.0 h1:kq/SbG2BCKLkDKkjQf5OWwKWUKj1lgs3lFI4PxnR5lg=
github.com/coreos/go-systemd/v22 v22.1.0/go.mod h1:xO0FLkIi5MaZafQlIrOotqXZ90ih+1atmu1JpKERPPk=
@ -166,8 +168,6 @@ github.com/dave/jennifer v1.2.0/go.mod h1:fIb+770HOpJ2fmN9EPPKOqm1vMGhB+TwXKMZhr
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.2.0 h1:8UrgZ3GkP4i/CLijOJx79Yu+etlyjdBU4sfcs2WYQMs=
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.2.0/go.mod h1:v57UDF4pDQJcEfFUCRop3lJL149eHGSe9Jvczhzjo/0=
github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
github.com/dgryski/go-bits v0.0.0-20180113010104-bd8a69a71dc2/go.mod h1:/9UYwwvZuEgp+mQ4960SHWCU1FS+FgdFX+m5ExFByNs=
github.com/dgryski/go-bitstream v0.0.0-20180413035011-3522498ce2c8/go.mod h1:VMaSuZ+SZcx/wljOQKvp5srsbCiKDEb6K2wC4+PiBmQ=
@ -190,6 +190,7 @@ github.com/eclipse/paho.mqtt.golang v1.2.0/go.mod h1:H9keYFcgq3Qr5OUJm/JZI/i6U7j
github.com/editorconfig-checker/editorconfig-checker v0.0.0-20190819115812-1474bdeaf2a2/go.mod h1:nnr6DXFepwb2+GC7evku5Mak3wGGRShiYy6fPkdIwVM=
github.com/editorconfig/editorconfig-core-go/v2 v2.1.1/go.mod h1:/LuhWJiQ9Gvo1DhVpa4ssm5qeg8rrztdtI7j/iCie2k=
github.com/edsrzf/mmap-go v1.0.0/go.mod h1:YO35OhQPt3KJa3ryjFM5Bs14WD66h8eGKpfaBNrHW5M=
github.com/elazarl/go-bindata-assetfs v1.0.0 h1:G/bYguwHIzWq9ZoyUQqrjTmJbbYn3j3CKKpKinvZLFk=
github.com/elazarl/go-bindata-assetfs v1.0.0/go.mod h1:v+YaWX3bdea5J/mo8dSETolEo7R71Vk1u8bnjau5yw4=
github.com/elazarl/goproxy v0.0.0-20180725130230-947c36da3153/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc=
github.com/emicklei/go-restful v0.0.0-20170410110728-ff4f55a20633/go.mod h1:otzb+WCGbkyDHkqmQmT5YD2WR4BBwUdeQoFo8l/7tVs=
@ -227,7 +228,6 @@ github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-kit/kit v0.10.0/go.mod h1:xUsJbQ/Fp4kEt7AFgCuvyX4a71u8h9jB8tj/ORgOZ7o=
github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY=
github.com/go-ldap/ldap v3.0.2+incompatible/go.mod h1:qfd9rJvER9Q0/D/Sqn1DfHRoBp40uXYvFoEVrNEPqRc=
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
@ -246,8 +246,6 @@ github.com/go-sql-driver/mysql v1.4.1 h1:g24URVg0OFbNUTx9qqY1IRZ9D9z3iPyi5zKhQZp
github.com/go-sql-driver/mysql v1.4.1/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/go-test/deep v1.0.1/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA=
github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
github.com/godbus/dbus/v5 v5.0.3/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
github.com/gofrs/uuid v3.3.0+incompatible h1:8K4tyRfvU1CYPgJsveYFQMhpFd/wXNM7iK6rR7UHz84=
github.com/gofrs/uuid v3.3.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM=
@ -259,8 +257,8 @@ github.com/gogo/protobuf v1.2.2-0.20190730201129-28a6bbf47e48/go.mod h1:SlYgWuQ5
github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o=
github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q=
github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
github.com/golang-jwt/jwt/v4 v4.5.2 h1:YtQM7lnr8iZ+j5q71MGKkNw9Mn7AjHM68uc9g5fXeUI=
github.com/golang-jwt/jwt/v4 v4.5.2/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=
github.com/golang-jwt/jwt/v4 v4.0.0 h1:RAqyYixv1p7uEnocuy8P1nru5wprCh/MH2BIlW5z5/o=
github.com/golang-jwt/jwt/v4 v4.0.0/go.mod h1:/xlHOz8bRuivTWchD4jCa+NbatV+wEUSzwAxVc6locg=
github.com/golang/gddo v0.0.0-20181116215533-9bd4a3295021/go.mod h1:xEhNfoBDX1hzLm2Nf80qUvZ2sVwoMZ8d6IE2SrsQfh4=
github.com/golang/geo v0.0.0-20190916061304-5b978397cfec h1:lJwO/92dFXWeXOZdoGXgptLmNLwynMSHUmU6besqtiw=
github.com/golang/geo v0.0.0-20190916061304-5b978397cfec/go.mod h1:QZ0nwyI2jOfgRAoBvP+ab5aRr7c9x7lhGEJrKvBwjWI=
@ -295,9 +293,8 @@ github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw
github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
github.com/golang/protobuf v1.5.1/go.mod h1:DopwsBzvsk0Fs44TXzsVbJyPhcCPeIwnvohx4u74HPM=
github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw=
github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek=
github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps=
github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/golang/snappy v0.0.1 h1:Qgr9rKW7uDUkrbSmQeiDsGa8SjGyCOGtuasMWwvp2P4=
github.com/golang/snappy v0.0.1/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
@ -318,9 +315,8 @@ github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU=
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-github v17.0.0+incompatible h1:N0LgJ1j65A7kfXrZnUDaYCs/Sf4rEjNlfyDHW9dolSY=
github.com/google/go-github v17.0.0+incompatible/go.mod h1:zLgOLi98H3fifZn+44m+umXrS52loVEgC2AApnigrVQ=
github.com/google/go-jsonnet v0.14.0/go.mod h1:zPGC9lj/TbjkBtUACIvYR/ILHrFqKRhxeEA+bLyeMnY=
@ -465,9 +461,8 @@ github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCV
github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.8/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.10 h1:Kz6Cvnvv2wGdaG/V8yMvfkmNiXq9Ya2KUv4rouJJr68=
github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/json-iterator/go v1.1.11 h1:uVUAXhF2To8cbw/3xN3pxj6kk7TYKs98NIrTqPlMWAQ=
github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
github.com/jstemmer/go-junit-report v0.9.1 h1:6QPYqodiu3GuPL+7mfx+NwDdp2eTkp9IfEUpgAwUN0o=
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
@ -498,18 +493,8 @@ github.com/kr/pty v1.1.5/go.mod h1:9r2w37qlBe7rQ6e1fg1S/9xpWHSnaqNdHD3WcMdbPDA=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/lestrrat-go/blackmagic v1.0.2 h1:Cg2gVSc9h7sz9NOByczrbUvLopQmXrfFx//N+AkAr5k=
github.com/lestrrat-go/blackmagic v1.0.2/go.mod h1:UrEqBzIR2U6CnzVyUtfM6oZNMt/7O7Vohk2J0OGSAtU=
github.com/lestrrat-go/httpcc v1.0.1 h1:ydWCStUeJLkpYyjLDHihupbn2tYmZ7m22BGkcvZZrIE=
github.com/lestrrat-go/httpcc v1.0.1/go.mod h1:qiltp3Mt56+55GPVCbTdM9MlqhvzyuL6W/NMDA8vA5E=
github.com/lestrrat-go/httprc v1.0.5 h1:bsTfiH8xaKOJPrg1R+E3iE/AWZr/x0Phj9PBTG/OLUk=
github.com/lestrrat-go/httprc v1.0.5/go.mod h1:mwwz3JMTPBjHUkkDv/IGJ39aALInZLrhBp0X7KGUZlo=
github.com/lestrrat-go/iter v1.0.2 h1:gMXo1q4c2pHmC3dn8LzRhJfP1ceCbgSiT9lUydIzltI=
github.com/lestrrat-go/iter v1.0.2/go.mod h1:Momfcq3AnRlRjI5b5O8/G5/BvpzrhoFTZcn06fEOPt4=
github.com/lestrrat-go/jwx/v2 v2.0.21 h1:jAPKupy4uHgrHFEdjVjNkUgoBKtVDgrQPB/h55FHrR0=
github.com/lestrrat-go/jwx/v2 v2.0.21/go.mod h1:09mLW8zto6bWL9GbwnqAli+ArLf+5M33QLQPDggkUWM=
github.com/lestrrat-go/option v1.0.1 h1:oAzP2fvZGQKWkvHa1/SAcFolBEca1oN+mQ7eooNBEYU=
github.com/lestrrat-go/option v1.0.1/go.mod h1:5ZHFbivi4xwXxhxY9XHDe2FHo6/Z7WWmtT7T5nBBp3I=
github.com/lestrrat-go/jwx v0.9.0 h1:Fnd0EWzTm0kFrBPzE/PEPp9nzllES5buMkksPMjEKpM=
github.com/lestrrat-go/jwx v0.9.0/go.mod h1:iEoxlYfZjvoGpuWwxUz+eR5e6KTJGsaRcy/YNA/UnBk=
github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.2.0 h1:LXpIM/LZ5xGFhOpXAQUIMM1HdyqzVYM13zNdjCEEcA0=
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
@ -539,8 +524,10 @@ github.com/mattn/go-sqlite3 v1.11.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsO
github.com/mattn/go-tty v0.0.0-20180907095812-13ff1204f104/go.mod h1:XPvLUNfbS4fJH25nqRHfWLMa1ONC8Amw+mIA639KxkE=
github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU=
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
github.com/microcosm-cc/bluemonday v1.0.26 h1:xbqSvqzQMeEHCqMi64VAs4d8uy6Mequs3rQ0k/Khz58=
github.com/microcosm-cc/bluemonday v1.0.26/go.mod h1:JyzOCs9gkyQyjs+6h10UEVSe02CGwkhd72Xdqh78TWs=
github.com/microcosm-cc/bluemonday v1.0.15 h1:J4uN+qPng9rvkBZBoBb8YGR+ijuklIMpSOZZLjYpbeY=
github.com/microcosm-cc/bluemonday v1.0.15/go.mod h1:ZLvAzeakRwrGnzQEvstVzVt3ZpqOF2+sdFr0Om+ce30=
github.com/microcosm-cc/bluemonday v1.0.16 h1:kHmAq2t7WPWLjiGvzKa5o3HzSfahUKiOq7fAPUiMNIc=
github.com/microcosm-cc/bluemonday v1.0.16/go.mod h1:Z0r70sCuXHig8YpBzCc5eGHAap2K7e/u082ZUpDRRqM=
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
github.com/mileusna/useragent v0.0.0-20190129205925-3e331f0949a5/go.mod h1:JWhYAp2EXqUtsxTKdeGlY8Wp44M7VxThC9FEoNGi2IE=
github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc=
@ -649,9 +636,8 @@ github.com/prometheus/client_golang v1.3.0/go.mod h1:hJaj2vgQTGQmVCsAACORcieXFeD
github.com/prometheus/client_golang v1.4.0/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU=
github.com/prometheus/client_golang v1.5.1/go.mod h1:e9GMxYsXl05ICDXkRhurwBS4Q3OK1iX/F2sw+iXX5zU=
github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M=
github.com/prometheus/client_golang v1.10.0 h1:/o0BDeWzLWXNZ+4q5gXltUvaMpJqckTa+jTNoB+z4cg=
github.com/prometheus/client_golang v1.10.0/go.mod h1:WJM3cc3yu7XKBKa/I8WeZm+V3eltZnBwfENSU7mdogU=
github.com/prometheus/client_golang v1.11.1 h1:+4eQaD7vAZ6DsfsxB15hbE0odUjGI5ARs9yskGu1v4s=
github.com/prometheus/client_golang v1.11.1/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0=
github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
@ -669,9 +655,8 @@ github.com/prometheus/common v0.7.0/go.mod h1:DjGbpBbp5NYNiECxcL/VnbXCCaQpKd3tt2
github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8bs7vj7HSQ4=
github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo=
github.com/prometheus/common v0.18.0/go.mod h1:U+gB1OBLb1lF3O42bTCL+FK18tX9Oar16Clt/msog/s=
github.com/prometheus/common v0.20.0 h1:pfeDeUdQcIxOMutNjCejsEFp7qeP+/iltHSSmLpE+hU=
github.com/prometheus/common v0.20.0/go.mod h1:U+gB1OBLb1lF3O42bTCL+FK18tX9Oar16Clt/msog/s=
github.com/prometheus/common v0.26.0 h1:iMAkS2TDoNWnKM+Kopnx/8tnEStIfpYA0ur0xQzzhMQ=
github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc=
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
@ -697,8 +682,6 @@ github.com/samuel/go-zookeeper v0.0.0-20190923202752-2cc03de413da/go.mod h1:gi+0
github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
github.com/satori/go.uuid v1.2.1-0.20181028125025-b2ce2384e17b/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
github.com/segmentio/asm v1.2.0 h1:9BQrFxC+YOHJlTlHGkTrFWf59nbL3XnCoFLTwDCI7ys=
github.com/segmentio/asm v1.2.0/go.mod h1:BqMnlJP91P8d+4ibuonYZw9mfnzI9HfxselHZr5aAcs=
github.com/segmentio/kafka-go v0.1.0/go.mod h1:X6itGqS9L4jDletMsxZ7Dz+JFWxM6JHfPOCvTvk+EJo=
github.com/segmentio/kafka-go v0.3.10 h1:h/1aSu7gWp6DXLmp0csxm8wrYD6rRYyaqclu2aQ/PWo=
github.com/segmentio/kafka-go v0.3.10/go.mod h1:8rEphJEczp+yDE/R5vwmaqZgF1wllrl4ioQcNKB8wVA=
@ -754,10 +737,8 @@ github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UV
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg=
github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY=
github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw=
github.com/tcnksm/go-input v0.0.0-20180404061846-548a7d7a8ee8/go.mod h1:IlWNj9v/13q7xFbaK4mbyzMNwrZLaWSHx/aibKIZuIg=
github.com/testcontainers/testcontainers-go v0.0.0-20190108154635-47c0da630f72/go.mod h1:wt/nMz68+kIO4RoguOZzsdv1B3kTYw+SuIKyJYRQpgE=
@ -796,6 +777,7 @@ go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU=
go.etcd.io/bbolt v1.3.3/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU=
go.etcd.io/bbolt v1.3.5 h1:XAzx9gjCb0Rxj7EoqcClPD1d5ZBxZJk0jbuoPHenBt0=
go.etcd.io/bbolt v1.3.5/go.mod h1:G5EMThwa9y8QZGBClrRx5EY+Yw9kAhnjy3bSjsnlVTQ=
go.etcd.io/etcd v0.0.0-20191023171146-3cf2f69b5738 h1:VcrIfasaLFkyjk6KNlXQSzO+B0fZcnECiDrKJsfxka0=
go.etcd.io/etcd v0.0.0-20191023171146-3cf2f69b5738/go.mod h1:dnLIgRNXwCJa5e+c6mIZCrds/GIG4ncV9HhK5PX7jPg=
go.etcd.io/etcd/api/v3 v3.5.0-alpha.0 h1:+e5nrluATIy3GP53znpkHMFzPTHGYyzvJGFCbuI6ZLc=
go.etcd.io/etcd/api/v3 v3.5.0-alpha.0/go.mod h1:mPcW6aZJukV6Aa81LSKpBjQXTWlXB5r74ymPoSWa3Sw=
@ -852,9 +834,8 @@ golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8U
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20201002170205-7f63de1d35b0/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I=
golang.org/x/crypto v0.0.0-20210220033148-5ea612d1eb83 h1:/ZScEX8SfEmUGRHs0gxpqteO5nfNW6axyZbBdw9A12g=
golang.org/x/crypto v0.0.0-20210220033148-5ea612d1eb83/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I=
golang.org/x/crypto v0.36.0 h1:AnAEvhDddvBdpY+uR+MyHmuZzzNqXSe/GvuDeob5L34=
golang.org/x/crypto v0.36.0/go.mod h1:Y4J0ReaxCR1IMaabaSMugxJES1EpwhBHhv2bDHklZvc=
golang.org/x/exp v0.0.0-20180321215751-8460e604b9de/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
@ -891,9 +872,8 @@ golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzB
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.1 h1:Kvvh58BN8Y9/lBi7hTekvtMpm07eUZ0ck5pRHpsMWrY=
golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.17.0 h1:zY54UmvipHiNd+pm+m0x9KhZ9hl1/7QNMyxXbc6ICqA=
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@ -945,8 +925,8 @@ golang.org/x/net v0.0.0-20210224082022-3d97a244fca7/go.mod h1:m0MpNAwzfU5UDzcl9v
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210316092652-d523dce5a7f4/go.mod h1:RBQZq4jEuRlivfhVLdyRGr576XBO4/greRjx4P4O3yc=
golang.org/x/net v0.0.0-20210503060351-7fd8e65b6420/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.38.0 h1:vRMAPTMaeGqVhG5QyLJHqNDwecKTomGeqbnfZyKlBI8=
golang.org/x/net v0.38.0/go.mod h1:ivrbrMbzFq5J41QOQh0siUuly180yBYtLp+CKbEaFx8=
golang.org/x/net v0.0.0-20210614182718-04defd469f4e h1:XpT3nA5TvE525Ne3hInMh6+GETgn27Zfm9dxsThnX2Q=
golang.org/x/net v0.0.0-20210614182718-04defd469f4e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
@ -958,9 +938,8 @@ golang.org/x/oauth2 v0.0.0-20201208152858-08078c50e5b5/go.mod h1:KelEdhl1UZF7XfJ
golang.org/x/oauth2 v0.0.0-20210218202405-ba52d332ba99/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
golang.org/x/oauth2 v0.0.0-20210220000619-9bb904979d93/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
golang.org/x/oauth2 v0.0.0-20210313182246-cd4f82c27b84/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
golang.org/x/oauth2 v0.0.0-20210427180440-81ed05c6b58c h1:SgVl/sCtkicsS7psKkje4H9YtjdEl3xsYh7N+5TDHqY=
golang.org/x/oauth2 v0.0.0-20210427180440-81ed05c6b58c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
golang.org/x/oauth2 v0.27.0 h1:da9Vo7/tDv5RH/7nZDz1eMGS/q1Vv1N/7FCrBhI9I3M=
golang.org/x/oauth2 v0.27.0/go.mod h1:onh5ek6nERTohokkhCD/y2cV4Do3fxFHFuAejCkRWT8=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@ -972,8 +951,6 @@ golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.12.0 h1:MHc5BpPuC30uJk597Ri8TV3CNZcTLu6B6z4lJy+g6Jw=
golang.org/x/sync v0.12.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA=
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@ -1043,10 +1020,8 @@ golang.org/x/sys v0.0.0-20210309074719-68d13333faf2/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20210315160823-c6e025ad8005/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210503080704-8803ae5d1324 h1:pAwJxDByZctfPwzlNGrDN2BQLsdPb9NkhoTJtUkAO28=
golang.org/x/sys v0.0.0-20210503080704-8803ae5d1324/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.31.0 h1:ioabZlmFYtWhL+TRYpcnNlLwhyxaM9kWTDEmfnprqik=
golang.org/x/sys v0.31.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k=
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210220032956-6a3ed077a48d/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
@ -1058,9 +1033,8 @@ golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6 h1:aRYxNxv6iGQlyVaZmk6ZgYEDa+Jg18DxebPSrd6bg1M=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.23.0 h1:D71I7dUrlY+VX0gQShAThNGHFxZ13dGLBHQLVl1mJlY=
golang.org/x/text v0.23.0/go.mod h1:/BLNzu4aZCJ1+kcD0DNRotWKage4q2rGVAg4o22unh4=
golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
@ -1128,9 +1102,8 @@ golang.org/x/tools v0.0.0-20201201161351-ac6f37ff4c2a/go.mod h1:emZCQorbCU4vsT4f
golang.org/x/tools v0.0.0-20201208233053-a543418bbed2/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.0.0-20210105154028-b0ab187a4818/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
golang.org/x/tools v0.1.0 h1:po9/4sTYwZU9lPhi1tOrb4hCv3qrhiQ77LZfGa2OjwY=
golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d h1:vU5i/LfpvrRCpgM/VPfJLg5KjxD3E+hfT1SH+d9zLwg=
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
@ -1259,8 +1232,8 @@ google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGj
google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI=
google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
google.golang.org/protobuf v1.27.1 h1:SnqbnDw1V7RiZcXPx5MEeqPv2s79L9i7BJUlG/+RurQ=
google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc/go.mod h1:m7x9LTH6d71AHyAX77c9yqWCCa3UKHcVEj9y7hAtKDk=
gopkg.in/asn1-ber.v1 v1.0.0-20181015200546-f715ec2f112d/go.mod h1:cuepJuh7vyXfUyUwEgHQXw849cJrilpS5NeIjOWESAw=
@ -1297,9 +1270,8 @@ gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200121175148-a6ecf24a6d71/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw=
honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=

View File

@ -1,8 +1,8 @@
package id
import (
"github.com/google/uuid"
"github.com/influxdata/chronograf"
"github.com/google/uuid"
)
var _ chronograf.ID = &UUID{}

View File

@ -162,7 +162,6 @@ func toPoint(anno *chronograf.Annotation, now time.Time) chronograf.Point {
"start_time": anno.StartTime.UnixNano(),
"modified_time_ns": int64(now.UnixNano()),
"text": anno.Text,
"color": anno.Color,
},
}
}
@ -303,9 +302,6 @@ func (r *influxResults) Annotations() (res []chronograf.Annotation, err error) {
if anno.ID, err = v.String(i); err != nil {
return
}
if colorIndex, found := columnIndex["color"]; found {
anno.Color, _ = v.String(colorIndex)
}
anno.Tags = chronograf.AnnotationTags{}

View File

@ -40,7 +40,6 @@ func Test_toPoint(t *testing.T) {
"start_time": time.Time{}.UnixNano(),
"modified_time_ns": int64(time.Unix(0, 0).UnixNano()),
"text": "mytext",
"color": "",
},
},
},
@ -51,7 +50,6 @@ func Test_toPoint(t *testing.T) {
Text: "mytext",
StartTime: time.Unix(100, 0),
EndTime: time.Unix(200, 0),
Color: "red",
},
now: time.Unix(0, 0),
want: chronograf.Point{
@ -67,7 +65,6 @@ func Test_toPoint(t *testing.T) {
"start_time": time.Unix(100, 0).UnixNano(),
"modified_time_ns": int64(time.Unix(0, 0).UnixNano()),
"text": "mytext",
"color": "red",
},
},
},

View File

@ -197,7 +197,7 @@ func (c *Client) showRetentionPolicies(ctx context.Context, db string) ([]chrono
return nil, err
}
return results.RetentionPolicies(c.Logger), nil
return results.RetentionPolicies(), nil
}
func (c *Client) showMeasurements(ctx context.Context, db string, limit, offset int) ([]chronograf.Measurement, error) {

View File

@ -65,8 +65,7 @@ func (r *responseType) Error() string {
}
func (c *Client) query(u *url.URL, q chronograf.Query) (chronograf.Response, error) {
u = util.AppendPath(u, "/query")
u.Path = "query"
req, err := http.NewRequest("POST", u.String(), nil)
if err != nil {
return nil, err
@ -184,7 +183,7 @@ func (c *Client) validateAuthFlux(ctx context.Context, src *chronograf.Source) e
if err != nil {
return err
}
u = util.AppendPath(u, "/api/v2/query")
u.Path = "api/v2/query"
command := "buckets()"
req, err := http.NewRequest("POST", u.String(), strings.NewReader(command))
if err != nil {
@ -298,7 +297,7 @@ type pingResult struct {
}
func (c *Client) ping(u *url.URL) (string, string, error) {
u = util.AppendPath(u, "/ping")
u.Path = "ping"
req, err := http.NewRequest("GET", u.String(), nil)
if err != nil {
@ -341,8 +340,7 @@ func (c *Client) ping(u *url.URL) (string, string, error) {
} else if strings.Contains(version, "relay") {
return version, chronograf.InfluxRelay, nil
}
// Strip v prefix from version, some older '1.x' versions and also
// InfluxDB 2.2.0 return version in format vx.x.x
// older InfluxDB instances might have version 'v1.x.x'
if strings.HasPrefix(version, "v") {
version = version[1:]
}
@ -393,7 +391,7 @@ func (c *Client) writePoint(ctx context.Context, point *chronograf.Point) error
}
func (c *Client) write(ctx context.Context, u *url.URL, db, rp, lp string) error {
u = util.AppendPath(u, "/write")
u.Path = "write"
req, err := http.NewRequest("POST", u.String(), strings.NewReader(lp))
if err != nil {
return err

View File

@ -4,7 +4,6 @@ import (
"context"
"encoding/base64"
"fmt"
"io/ioutil"
"net/http"
"net/http/httptest"
"net/url"
@ -540,11 +539,14 @@ func TestClient_write(t *testing.T) {
func Test_Influx_ValidateAuth_V1(t *testing.T) {
t.Parallel()
calledPath := ""
called := false
ts := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
rw.WriteHeader(http.StatusUnauthorized)
rw.Write([]byte(`{"error":"v1authfailed"}`))
calledPath = r.URL.Path
called = true
if path := r.URL.Path; path != "/query" {
t.Error("Expected the path to contain `/query` but was: ", path)
}
expectedAuth := "Basic " + base64.StdEncoding.EncodeToString(([]byte)("my-user:my-pwd"))
if auth := r.Header.Get("Authorization"); auth != expectedAuth {
t.Errorf("Expected Authorization '%v' but was: %v", expectedAuth, auth)
@ -552,212 +554,66 @@ func Test_Influx_ValidateAuth_V1(t *testing.T) {
}))
defer ts.Close()
for _, urlContext := range []string{"", "/ctx"} {
client, err := NewClient(ts.URL+urlContext, log.New(log.DebugLevel))
if err != nil {
t.Fatal("Unexpected error initializing client: err:", err)
}
source := &chronograf.Source{
URL: ts.URL + urlContext,
Username: "my-user",
Password: "my-pwd",
}
client, err := NewClient(ts.URL, log.New(log.DebugLevel))
if err != nil {
t.Fatal("Unexpected error initializing client: err:", err)
}
client.Connect(context.Background(), source)
err = client.ValidateAuth(context.Background(), &chronograf.Source{})
if err == nil {
t.Fatal("Expected error but nil")
}
if !strings.Contains(err.Error(), "v1authfailed") {
t.Errorf("Expected client error '%v' to contain server-sent error message", err)
}
if calledPath != urlContext+"/query" {
t.Errorf("Path received: %v, want: %v ", calledPath, urlContext+"/query")
}
source := &chronograf.Source{
URL: ts.URL,
Username: "my-user",
Password: "my-pwd",
}
client.Connect(context.Background(), source)
err = client.ValidateAuth(context.Background(), &chronograf.Source{})
if err == nil {
t.Fatal("Expected error but nil")
}
if !strings.Contains(err.Error(), "v1authfailed") {
t.Errorf("Expected client error '%v' to contain server-sent error message", err)
}
if called == false {
t.Error("Expected http request to InfluxDB but there was none")
}
}
func Test_Influx_ValidateAuth_V2(t *testing.T) {
t.Parallel()
calledPath := ""
called := false
ts := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
rw.WriteHeader(http.StatusUnauthorized)
rw.Write([]byte(`{"message":"v2authfailed"}`))
calledPath = r.URL.Path
called = true
if auth := r.Header.Get("Authorization"); auth != "Token my-token" {
t.Error("Expected Authorization 'Token my-token' but was: ", auth)
}
if path := r.URL.Path; !strings.HasSuffix(path, "/api/v2/query") {
if path := r.URL.Path; path != "/api/v2/query" {
t.Error("Expected the path to contain `api/v2/query` but was: ", path)
}
}))
defer ts.Close()
for _, urlContext := range []string{"", "/ctx"} {
calledPath = ""
client, err := NewClient(ts.URL+urlContext, log.New(log.DebugLevel))
if err != nil {
t.Fatal("Unexpected error initializing client: err:", err)
}
source := &chronograf.Source{
URL: ts.URL + urlContext,
Type: chronograf.InfluxDBv2,
Username: "my-org",
Password: "my-token",
}
client.Connect(context.Background(), source)
err = client.ValidateAuth(context.Background(), source)
if err == nil {
t.Fatal("Expected error but nil")
}
if !strings.Contains(err.Error(), "v2authfailed") {
t.Errorf("Expected client error '%v' to contain server-sent error message", err)
}
if calledPath != urlContext+"/api/v2/query" {
t.Errorf("Path received: %v, want: %v ", calledPath, urlContext+"/api/v2/query")
}
}
}
func Test_Influx_Version(t *testing.T) {
t.Parallel()
calledPath := ""
serverVersion := ""
ts := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
rw.Header().Add("X-Influxdb-Version", serverVersion)
rw.WriteHeader(http.StatusNoContent)
calledPath = r.URL.Path
}))
defer ts.Close()
for _, urlContext := range []string{"", "/ctx"} {
calledPath = ""
client, err := NewClient(ts.URL+urlContext, log.New(log.DebugLevel))
if err != nil {
t.Fatal("Unexpected error initializing client: err:", err)
}
source := &chronograf.Source{
URL: ts.URL + urlContext,
Type: chronograf.InfluxDBv2,
Username: "my-org",
Password: "my-token",
}
client.Connect(context.Background(), source)
versions := []struct {
server string
expected string
}{
{
server: "1.8.3",
expected: "1.8.3",
},
{
server: "v2.2.0",
expected: "2.2.0",
},
}
for _, testPair := range versions {
serverVersion = testPair.server
version, err := client.Version(context.Background())
if err != nil {
t.Fatalf("No error expected, but received: %v", err)
}
if version != testPair.expected {
t.Errorf("Version received: %v, want: %v ", version, testPair.expected)
}
if calledPath != urlContext+"/ping" {
t.Errorf("Path received: %v, want: %v ", calledPath, urlContext+"/ping")
}
}
}
}
func Test_Write(t *testing.T) {
t.Parallel()
calledPath := ""
data := ""
ts := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
calledPath = r.URL.Path
content, _ := ioutil.ReadAll(r.Body)
data = string(content)
rw.WriteHeader(http.StatusNoContent)
}))
defer ts.Close()
for _, urlContext := range []string{"", "/ctx"} {
calledPath = ""
client, err := NewClient(ts.URL+urlContext, log.New(log.DebugLevel))
if err != nil {
t.Fatal("Unexpected error initializing client: err:", err)
}
source := &chronograf.Source{
URL: ts.URL + urlContext,
Type: chronograf.InfluxDBv2,
Username: "my-org",
Password: "my-token",
}
client.Connect(context.Background(), source)
err = client.Write(context.Background(), []chronograf.Point{
{
Database: "mydb",
RetentionPolicy: "default",
Measurement: "temperature",
Fields: map[string]interface{}{
"v": true,
},
},
})
if err != nil {
t.Fatalf("No error expected, but received: %v", err)
}
expectedLine := "temperature v=true"
if data != expectedLine {
t.Errorf("Data received: %v, want: %v ", data, expectedLine)
}
if calledPath != urlContext+"/write" {
t.Errorf("Path received: %v, want: %v ", calledPath, urlContext+"/write")
}
}
}
func Test_Query(t *testing.T) {
t.Parallel()
calledPath := ""
ts := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
calledPath = r.URL.Path
rw.WriteHeader(http.StatusOK)
rw.Write([]byte(`{"message":"hi"}`))
}))
defer ts.Close()
for _, urlContext := range []string{"", "/ctx"} {
calledPath = ""
client, err := NewClient(ts.URL+urlContext, log.New(log.DebugLevel))
if err != nil {
t.Fatal("Unexpected error initializing client: err:", err)
}
source := &chronograf.Source{
URL: ts.URL + urlContext,
Type: chronograf.InfluxDBv2,
Username: "my-org",
Password: "my-token",
}
client.Connect(context.Background(), source)
_, err = client.Query(context.Background(), chronograf.Query{
DB: "mydb",
RP: "default",
Command: "show databases",
})
if err != nil {
t.Fatalf("No error expected, but received: %v", err)
}
if calledPath != urlContext+"/query" {
t.Errorf("Path received: %v, want: %v ", calledPath, urlContext+"/query")
}
client, err := NewClient(ts.URL, log.New(log.DebugLevel))
if err != nil {
t.Fatal("Unexpected error initializing client: err:", err)
}
source := &chronograf.Source{
URL: ts.URL,
Type: chronograf.InfluxDBv2,
Username: "my-org",
Password: "my-token",
}
client.Connect(context.Background(), source)
err = client.ValidateAuth(context.Background(), source)
if err == nil {
t.Fatal("Expected error but nil")
}
if !strings.Contains(err.Error(), "v2authfailed") {
t.Errorf("Expected client error '%v' to contain server-sent error message", err)
}
if called == false {
t.Error("Expected http request to InfluxDB but there was none")
}
}

View File

@ -45,7 +45,7 @@ func (c *Client) Permissions(context.Context) chronograf.Permissions {
// showResults is used to deserialize InfluxQL SHOW commands
type showResults []struct {
Series []struct {
Values []value `json:"values"`
Values [][]interface{} `json:"values"`
} `json:"series"`
}
@ -93,72 +93,37 @@ func (r *showResults) Databases() []chronograf.Database {
return res
}
func (r *showResults) RetentionPolicies(logger chronograf.Logger) []chronograf.RetentionPolicy {
var res []chronograf.RetentionPolicy
func (r *showResults) RetentionPolicies() []chronograf.RetentionPolicy {
res := []chronograf.RetentionPolicy{}
for _, u := range *r {
for _, s := range u.Series {
for _, v := range s.Values {
rp, err := parseRetentionPolicy(v)
if err != nil {
if logger != nil {
types := make([]string, len(v))
for i, val := range v {
types[i] = fmt.Sprintf("%T", val)
}
logger.
WithField("values", fmt.Sprintf("%v", v)).
WithField("types", fmt.Sprintf("%v", types)).
WithField("error", err.Error()).
Error("Unsupported retention policy format")
}
if name, ok := v[0].(string); !ok {
continue
} else if duration, ok := v[1].(string); !ok {
continue
} else if sduration, ok := v[2].(string); !ok {
continue
} else if replication, ok := v[3].(float64); !ok {
continue
} else if def, ok := v[4].(bool); !ok {
continue
} else {
d := chronograf.RetentionPolicy{
Name: name,
Duration: duration,
ShardDuration: sduration,
Replication: int32(replication),
Default: def,
}
res = append(res, d)
}
res = append(res, rp)
}
}
}
return res
}
// parseRetentionPolicy validates and parses a retention policy row
func parseRetentionPolicy(v []interface{}) (chronograf.RetentionPolicy, error) {
columns := len(v)
if columns < 5 {
return chronograf.RetentionPolicy{}, fmt.Errorf("insufficient columns: expected at least 5, got %d", columns)
} else if name, ok := v[0].(string); !ok {
return chronograf.RetentionPolicy{}, fmt.Errorf("column 0 (name) is not a string")
} else if duration, ok := v[1].(string); !ok {
return chronograf.RetentionPolicy{}, fmt.Errorf("column 1 (duration) is not a string")
} else if sduration, ok := v[2].(string); !ok {
return chronograf.RetentionPolicy{}, fmt.Errorf("column 2 (shardDuration) is not a string")
} else if replication, ok := v[3].(float64); !ok {
return chronograf.RetentionPolicy{}, fmt.Errorf("column 3 (replication) is not a float64")
} else {
var def bool
if columns == 5 {
// 5-column format: [name, duration, shardGroupDuration, replicaN, default]
if def, ok = v[4].(bool); !ok {
return chronograf.RetentionPolicy{}, fmt.Errorf("column 4 (default) is not a bool")
}
} else if columns == 7 {
// 7-column format: [name, duration, shardGroupDuration, replicaN, futureWriteLimit, pastWriteLimit, default]
if def, ok = v[6].(bool); !ok {
return chronograf.RetentionPolicy{}, fmt.Errorf("column 6 (default) is not a bool")
}
} else {
return chronograf.RetentionPolicy{}, fmt.Errorf("unexpected number of columns: %d", columns)
}
return chronograf.RetentionPolicy{
Name: name,
Duration: duration,
ShardDuration: sduration,
Replication: int32(replication),
Default: def,
}, nil
}
}
// Measurements converts SHOW MEASUREMENTS to chronograf Measurement
func (r *showResults) Measurements() []chronograf.Measurement {
res := []chronograf.Measurement{}

View File

@ -2,8 +2,6 @@ package influx
import (
"encoding/json"
"fmt"
"io"
"reflect"
"testing"
@ -304,349 +302,6 @@ func TestToRevoke(t *testing.T) {
}
}
// mockLoggerWithError is a simple logger that captures error messages passed via WithField
type mockLoggerWithError struct {
errorMsg string
fields map[string]interface{}
}
func (m *mockLoggerWithError) Debug(...interface{}) {}
func (m *mockLoggerWithError) Info(...interface{}) {}
func (m *mockLoggerWithError) Error(_ ...interface{}) {}
func (m *mockLoggerWithError) WithField(key string, value interface{}) chronograf.Logger {
if m.fields == nil {
m.fields = make(map[string]interface{})
}
m.fields[key] = value
if key == "error" {
m.errorMsg = fmt.Sprintf("%v", value)
}
return m
}
func (m *mockLoggerWithError) Writer() *io.PipeWriter {
_, w := io.Pipe()
return w
}
func TestRetentionPolicies(t *testing.T) {
tests := []struct {
name string
input showResults
expected []chronograf.RetentionPolicy
expectedErr string
}{
{
name: "5-column format with two retention policies",
input: showResults{
{
Series: []struct {
Values []value `json:"values"`
}{
{
Values: []value{
{
"autogen", // name
"2160h0m0s", // duration
"168h0m0s", // shardGroupDuration
float64(3), // replicaN
true, // default
},
{
"quarterly", // name
"1560h0m0s", // duration
"24h0m0s", // shardGroupDuration
float64(1), // replicaN
false, // default
},
},
},
},
},
},
expected: []chronograf.RetentionPolicy{
{
Name: "autogen",
Duration: "2160h0m0s",
ShardDuration: "168h0m0s",
Replication: 3,
Default: true,
},
{
Name: "quarterly",
Duration: "1560h0m0s",
ShardDuration: "24h0m0s",
Replication: 1,
Default: false,
},
},
},
{
name: "7-column format with two retention policies",
input: showResults{
{
Series: []struct {
Values []value `json:"values"`
}{
{
Values: []value{
{
"autogen", // name
"2160h0m0s", // duration
"168h0m0s", // shardGroupDuration
float64(3), // replicaN
"0s", // futureWriteLimit
"0s", // pastWriteLimit
true, // default
},
{
"quarterly", // name
"1560h0m0s", // duration
"24h0m0s", // shardGroupDuration
float64(1), // replicaN
"1h", // futureWriteLimit
"30m", // pastWriteLimit
false, // default
},
},
},
},
},
},
expected: []chronograf.RetentionPolicy{
{
Name: "autogen",
Duration: "2160h0m0s",
ShardDuration: "168h0m0s",
Replication: 3,
Default: true,
},
{
Name: "quarterly",
Duration: "1560h0m0s",
ShardDuration: "24h0m0s",
Replication: 1,
Default: false,
},
},
},
{
name: "empty input",
input: showResults{
{
Series: []struct {
Values []value `json:"values"`
}{
{
Values: []value{},
},
},
},
},
expected: []chronograf.RetentionPolicy{},
},
{
name: "insufficient columns (3 columns)",
input: showResults{
{
Series: []struct {
Values []value `json:"values"`
}{
{
Values: []value{
{"autogen", "2160h0m0s", "168h0m0s"}, // Only 3 columns
},
},
},
},
},
expected: []chronograf.RetentionPolicy{},
expectedErr: "insufficient columns: expected at least 5, got 3",
},
{
name: "wrong type for name (int instead of string)",
input: showResults{
{
Series: []struct {
Values []value `json:"values"`
}{
{
Values: []value{
{123, "2160h0m0s", "168h0m0s", float64(3), true},
},
},
},
},
},
expected: []chronograf.RetentionPolicy{},
expectedErr: "column 0 (name) is not a string",
},
{
name: "wrong type for duration (int instead of string)",
input: showResults{
{
Series: []struct {
Values []value `json:"values"`
}{
{
Values: []value{
{"autogen", 2160, "168h0m0s", float64(3), true},
},
},
},
},
},
expected: []chronograf.RetentionPolicy{},
expectedErr: "column 1 (duration) is not a string",
},
{
name: "wrong type for replication (string instead of float64)",
input: showResults{
{
Series: []struct {
Values []value `json:"values"`
}{
{
Values: []value{
{"autogen", "2160h0m0s", "168h0m0s", "3", true},
},
},
},
},
},
expected: []chronograf.RetentionPolicy{},
expectedErr: "column 3 (replication) is not a float64",
},
{
name: "wrong type for default in 5-column format (string instead of bool)",
input: showResults{
{
Series: []struct {
Values []value `json:"values"`
}{
{
Values: []value{
{"autogen", "2160h0m0s", "168h0m0s", float64(3), "true"},
},
},
},
},
},
expected: []chronograf.RetentionPolicy{},
expectedErr: "column 4 (default) is not a bool",
},
{
name: "wrong type for default in 7-column format (string instead of bool)",
input: showResults{
{
Series: []struct {
Values []value `json:"values"`
}{
{
Values: []value{
{"autogen", "2160h0m0s", "168h0m0s", float64(3), "0s", "0s", "true"},
},
},
},
},
},
expected: []chronograf.RetentionPolicy{},
expectedErr: "column 6 (default) is not a bool",
},
{
name: "invalid column count (6 columns)",
input: showResults{
{
Series: []struct {
Values []value `json:"values"`
}{
{
Values: []value{
{"autogen", "2160h0m0s", "168h0m0s", float64(3), "0s", true},
},
},
},
},
},
expected: []chronograf.RetentionPolicy{},
expectedErr: "unexpected number of columns: 6",
},
{
name: "mixed valid and invalid entries",
input: showResults{
{
Series: []struct {
Values []value `json:"values"`
}{
{
Values: []value{
{"autogen", "2160h0m0s", "168h0m0s", float64(3), true}, // valid
{"invalid", "2160h0m0s", "168h0m0s"}, // insufficient columns
{"quarterly", "1560h0m0s", "24h0m0s", float64(1), false}, // valid
},
},
},
},
},
expected: []chronograf.RetentionPolicy{
{
Name: "autogen",
Duration: "2160h0m0s",
ShardDuration: "168h0m0s",
Replication: 3,
Default: true,
},
{
Name: "quarterly",
Duration: "1560h0m0s",
ShardDuration: "24h0m0s",
Replication: 1,
Default: false,
},
},
expectedErr: "insufficient columns: expected at least 5, got 3",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
// Use mock logger to capture error messages
logger := &mockLoggerWithError{}
result := tt.input.RetentionPolicies(logger)
// Check the returned policies match expected
if !equalRetentionPolicies(result, tt.expected) {
t.Errorf("RetentionPolicies() = %v, want %v", result, tt.expected)
}
// Check the error message if one is expected
if tt.expectedErr != "" {
if logger.errorMsg != tt.expectedErr {
t.Errorf("RetentionPolicies() error = %v, want %v", logger.errorMsg, tt.expectedErr)
}
} else if logger.errorMsg != "" {
t.Errorf("RetentionPolicies() unexpected error = %v", logger.errorMsg)
}
})
}
}
// equalRetentionPolicies compares two slices of RetentionPolicy for equality
func equalRetentionPolicies(a, b []chronograf.RetentionPolicy) bool {
if len(a) != len(b) {
return false
}
for i := range a {
if a[i].Name != b[i].Name ||
a[i].Duration != b[i].Duration ||
a[i].ShardDuration != b[i].ShardDuration ||
a[i].Replication != b[i].Replication ||
a[i].Default != b[i].Default {
return false
}
}
return true
}
func Test_showResults_Users(t *testing.T) {
t.Parallel()
tests := []struct {

View File

@ -4,7 +4,6 @@ import (
"context"
"encoding/json"
"fmt"
"sort"
"github.com/influxdata/chronograf"
)
@ -124,9 +123,6 @@ func (c *Client) All(ctx context.Context) ([]chronograf.User, error) {
user.Permissions = append(user.Permissions, perms...)
users[i] = user
}
sort.Slice(users, func(i, j int) bool {
return users[i].Name < users[j].Name
})
return users, nil
}

View File

@ -590,7 +590,7 @@ func TestClient_Num(t *testing.T) {
{
name: "All Users",
statusUsers: http.StatusOK,
showUsers: []byte(`{"results":[{"series":[{"columns":["user","admin"],"values":[["docbrown",true],["admin",true],["reader",false]]}]}]}`),
showUsers: []byte(`{"results":[{"series":[{"columns":["user","admin"],"values":[["admin",true],["docbrown",true],["reader",false]]}]}]}`),
statusGrants: http.StatusOK,
showGrants: []byte(`{"results":[{"series":[{"columns":["database","privilege"],"values":[["mydb","ALL PRIVILEGES"]]}]}]}`),
args: args{

View File

@ -2069,7 +2069,7 @@ trigger
Trigger: "threshold",
AlertNodes: chronograf.AlertNodes{
IsStateChangesOnly: true,
IsNoRecoveries: true,
IsNoRecoveries: true,
Slack: []*chronograf.Slack{
{},
},

View File

@ -273,7 +273,7 @@ func TestOrganizationsStore_Update(t *testing.T) {
addFirst: true,
},
{
name: "Update organization default role to viewer",
name: "Update organization default role",
fields: fields{},
args: args{
ctx: context.Background(),
@ -291,25 +291,7 @@ func TestOrganizationsStore_Update(t *testing.T) {
addFirst: true,
},
{
name: "Update organization default role to reader",
fields: fields{},
args: args{
ctx: context.Background(),
initial: &chronograf.Organization{
Name: "The Good Place",
},
updates: &chronograf.Organization{
DefaultRole: roles.ReaderRoleName,
},
},
want: &chronograf.Organization{
Name: "The Good Place",
DefaultRole: roles.ReaderRoleName,
},
addFirst: true,
},
{
name: "Update organization name and default role to viewer",
name: "Update organization name and default role",
fields: fields{},
args: args{
ctx: context.Background(),
@ -328,26 +310,6 @@ func TestOrganizationsStore_Update(t *testing.T) {
},
addFirst: true,
},
{
name: "Update organization name and default role to reader",
fields: fields{},
args: args{
ctx: context.Background(),
initial: &chronograf.Organization{
Name: "The Good Place",
DefaultRole: roles.AdminRoleName,
},
updates: &chronograf.Organization{
Name: "The Bad Place",
DefaultRole: roles.ReaderRoleName,
},
},
want: &chronograf.Organization{
Name: "The Bad Place",
DefaultRole: roles.ReaderRoleName,
},
addFirst: true,
},
{
name: "Update organization name, role",
fields: fields{},
@ -443,7 +405,7 @@ func TestOrganizationsStore_Update(t *testing.T) {
}
if tt.addFirst {
tt.args.initial, _ = s.Add(tt.args.ctx, tt.args.initial)
tt.args.initial, err = s.Add(tt.args.ctx, tt.args.initial)
}
if tt.args.updates.Name != "" {

View File

@ -32,7 +32,7 @@ func Test_CodeExchangeCSRF_AuthCodeURL(t *testing.T) {
ProviderURL: "http://localhost:1234",
Orgs: "",
}
authMux := NewAuthMux(mp, auth, mt, "", clog.New(clog.ParseLevel("debug")), useidtoken, "hello", nil, nil, "")
authMux := NewAuthMux(mp, auth, mt, "", clog.New(clog.ParseLevel("debug")), useidtoken, "hello", nil, nil)
// create AuthCodeURL with code exchange without PKCE
codeExchange := NewCodeExchange(false, "")
@ -95,7 +95,7 @@ func Test_CodeExchangeCSRF_ExchangeCodeForToken(t *testing.T) {
ProviderURL: authServer.URL,
Orgs: "",
}
authMux := NewAuthMux(mp, auth, auth.Tokens, "", clog.New(clog.ParseLevel("debug")), useidtoken, "hi", nil, nil, "")
authMux := NewAuthMux(mp, auth, auth.Tokens, "", clog.New(clog.ParseLevel("debug")), useidtoken, "hi", nil, nil)
// create AuthCodeURL using CodeExchange with PKCE
codeExchange := simpleTokenExchange
@ -136,7 +136,7 @@ func Test_CodeExchangePKCE_AuthCodeURL(t *testing.T) {
ProviderURL: "http://localhost:1234",
Orgs: "",
}
authMux := NewAuthMux(mp, auth, mt, "", clog.New(clog.ParseLevel("debug")), useidtoken, "hi", nil, nil, "")
authMux := NewAuthMux(mp, auth, mt, "", clog.New(clog.ParseLevel("debug")), useidtoken, "hi", nil, nil)
// create AuthCodeURL using CodeExchange with PKCE
codeExchange := NewCodeExchange(true, "secret")
@ -213,7 +213,7 @@ func Test_CodeExchangePKCE_ExchangeCodeForToken(t *testing.T) {
ProviderURL: authServer.URL,
Orgs: "",
}
authMux := NewAuthMux(mp, auth, jwt, "", clog.New(clog.ParseLevel("debug")), useidtoken, "hi", nil, nil, "")
authMux := NewAuthMux(mp, auth, jwt, "", clog.New(clog.ParseLevel("debug")), useidtoken, "hi", nil, nil)
// create AuthCodeURL using CodeExchange with PKCE
codeExchange := CodeExchangePKCE{Secret: secret}

View File

@ -3,49 +3,49 @@
//
// This is how the pieces of this package fit together:
//
// ┌────────────────────────────────────────┐
// │github.com/influxdata/chronograf/oauth2 │
// ├────────────────────────────────────────┴────────────────────────────────────┐
// │┌────────────────────┐ │
// ││ <<interface>> │ ┌─────────────────────────┐ │
// ││ Authenticator │ │ AuthMux │ │
// │├────────────────────┤ ├─────────────────────────┤ │
// ││Authorize() │ Auth │+SuccessURL : string │ │
// ││Validate() ◀────────│+FailureURL : string │──────────┐ │
// ||Expire() | |+Now : func() time.Time | | |
// │└──────────△─────────┘ └─────────────────────────┘ | |
// │ │ │ │ |
// │ │ │ │ │
// │ │ │ │ │
// │ │ Provider│ │ │
// │ │ ┌───┘ │ │
// │┌──────────┴────────────┐ │ ▽ │
// ││ Tokenizer │ │ ┌───────────────┐ │
// │├───────────────────────┤ ▼ │ <<interface>> │ │
// ││Create() │ ┌───────────────┐ │ OAuth2Mux │ │
// ││ValidPrincipal() │ │ <<interface>> │ ├───────────────┤ │
// │└───────────────────────┘ │ Provider │ │Login() │ │
// │ ├───────────────┤ │Logout() │ │
// │ │ID() │ │Callback() │ │
// │ │Scopes() │ └───────────────┘ │
// │ │Secret() │ │
// │ │Authenticator()│ │
// │ └───────────────┘ │
// │ △ │
// │ │ │
// │ ┌─────────────────────────┼─────────────────────────┐ │
// │ │ │ │ │
// │ │ │ │ │
// │ │ │ │ │
// │ ┌───────────────────────┐ ┌──────────────────────┐ ┌──────────────────────┐│
// │ │ Github │ │ Google │ │ Heroku ││
// │ ├───────────────────────┤ ├──────────────────────┤ ├──────────────────────┤│
// │ │+ClientID : string │ │+ClientID : string │ │+ClientID : string ││
// │ │+ClientSecret : string │ │+ClientSecret : string│ │+ClientSecret : string││
// │ │+Orgs : []string │ │+Domains : []string │ └──────────────────────┘│
// │ └───────────────────────┘ │+RedirectURL : string │ │
// │ └──────────────────────┘ │
// └─────────────────────────────────────────────────────────────────────────────┘
// ┌────────────────────────────────────────┐
// │github.com/influxdata/chronograf/oauth2 │
// ├────────────────────────────────────────┴────────────────────────────────────┐
// │┌────────────────────┐ │
// ││ <<interface>> │ ┌─────────────────────────┐ │
// ││ Authenticator │ │ AuthMux │ │
// │├────────────────────┤ ├─────────────────────────┤ │
// ││Authorize() │ Auth │+SuccessURL : string │ │
// ││Validate() ◀────────│+FailureURL : string │──────────┐ │
// ||Expire() | |+Now : func() time.Time | | |
// │└──────────△─────────┘ └─────────────────────────┘ | |
// │ │ │ │ |
// │ │ │ │ │
// │ │ │ │ │
// │ │ Provider│ │ │
// │ │ ┌───┘ │ │
// │┌──────────┴────────────┐ │ ▽ │
// ││ Tokenizer │ │ ┌───────────────┐ │
// │├───────────────────────┤ ▼ │ <<interface>> │ │
// ││Create() │ ┌───────────────┐ │ OAuth2Mux │ │
// ││ValidPrincipal() │ │ <<interface>> │ ├───────────────┤ │
// │└───────────────────────┘ │ Provider │ │Login() │ │
// │ ├───────────────┤ │Logout() │ │
// │ │ID() │ │Callback() │ │
// │ │Scopes() │ └───────────────┘ │
// │ │Secret() │ │
// │ │Authenticator()│ │
// │ └───────────────┘ │
// │ △ │
// │ │ │
// │ ┌─────────────────────────┼─────────────────────────┐ │
// │ │ │ │ │
// │ │ │ │ │
// │ │ │ │ │
// │ ┌───────────────────────┐ ┌──────────────────────┐ ┌──────────────────────┐│
// │ │ Github │ │ Google │ │ Heroku ││
// │ ├───────────────────────┤ ├──────────────────────┤ ├──────────────────────┤│
// │ │+ClientID : string │ │+ClientID : string │ │+ClientID : string ││
// │ │+ClientSecret : string │ │+ClientSecret : string│ │+ClientSecret : string││
// │ │+Orgs : []string │ │+Domains : []string │ └──────────────────────┘│
// │ └───────────────────────┘ │+RedirectURL : string │ │
// │ └──────────────────────┘ │
// └─────────────────────────────────────────────────────────────────────────────┘
//
// The design focuses on an Authenticator, a Provider, and an OAuth2Mux. Their
// responsibilities, respectively, are to decode and encode secrets received
@ -57,36 +57,36 @@
// The Oauth2 flow between a browser, backend, and a Provider that this package
// implements is pictured below for reference.
//
// ┌─────────┐ ┌───────────┐ ┌────────┐
// │ Browser │ │Chronograf │ │Provider│
// └─────────┘ └───────────┘ └────────┘
// │ │ │
// ├─────── GET /auth ─────────▶ │
// │ │ │
// │ │ │
// ◀ ─ ─ ─302 to Provider ─ ─ ┤ │
// │ │ │
// │ │ │
// ├──────────────── GET /auth w/ callback ─────────────────────▶
// │ │ │
// │ │ │
// ◀─ ─ ─ ─ ─ ─ ─ 302 to Chronograf Callback ─ ─ ─ ─ ─ ─ ─ ─ ┤
// │ │ │
// │ Code and State from │ │
// │ Provider │ │
// ├───────────────────────────▶ Request token w/ code & │
// │ │ state │
// │ ├────────────────────────────────▶
// │ │ │
// │ │ Response with │
// │ │ Token │
// │ Set cookie, Redirect │◀ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┤
// │ to / │ │
// ◀───────────────────────────┤ │
// │ │ │
// │ │ │
// │ │ │
// │ │ │
// ┌─────────┐ ┌───────────┐ ┌────────┐
// │ Browser │ │Chronograf │ │Provider│
// └─────────┘ └───────────┘ └────────┘
// │ │ │
// ├─────── GET /auth ─────────▶ │
// │ │ │
// │ │ │
// ◀ ─ ─ ─302 to Provider ─ ─ ┤ │
// │ │ │
// │ │ │
// ├──────────────── GET /auth w/ callback ─────────────────────▶
// │ │ │
// │ │ │
// ◀─ ─ ─ ─ ─ ─ ─ 302 to Chronograf Callback ─ ─ ─ ─ ─ ─ ─ ─ ┤
// │ │ │
// │ Code and State from │ │
// │ Provider │ │
// ├───────────────────────────▶ Request token w/ code & │
// │ │ state │
// │ ├────────────────────────────────▶
// │ │ │
// │ │ Response with │
// │ │ Token │
// │ Set cookie, Redirect │◀ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┤
// │ to / │ │
// ◀───────────────────────────┤ │
// │ │ │
// │ │ │
// │ │ │
// │ │ │
//
// The browser ultimately receives a cookie from Chronograf, authorizing it.
// Its contents are encoded as a JWT whose "sub" claim is the user's email
@ -102,14 +102,14 @@
// convention to ensure compatibility with the front end logic. These routes
// and their responsibilities are:
//
// /oauth/{provider}/login
// /oauth/{provider}/login
//
// The `/oauth` endpoint redirects to the Provider for OAuth. Chronograf sets
// the OAuth `state` request parameter to a JWT with a random "sub". Using
// $TOKEN_SECRET `/oauth/github/callback` can validate the `state` parameter
// without needing `state` to be saved.
//
// /oauth/{provider}/callback
// /oauth/{provider}/callback
//
// The `/oauth/github/callback` receives the OAuth `authorization code` and `state`.
//
@ -135,7 +135,7 @@
// of the JWT within the cookie value.
// If the request did not have a valid JWT, the API returns `HTTP/1.1 401 Unauthorized`.
//
// /oauth/{provider}/logout
// /oauth/{provider}/logout
//
// Simply expires the session cookie and redirects to `/`.
package oauth2

View File

@ -6,7 +6,7 @@ import (
"time"
gojwt "github.com/golang-jwt/jwt/v4"
"github.com/lestrrat-go/jwx/v2/jwk"
"github.com/lestrrat-go/jwx/jwk"
)
// Ensure JWT conforms to the Tokenizer interface
@ -120,7 +120,7 @@ func (j *JWT) KeyFuncRS256(token *gojwt.Token) (interface{}, error) {
return nil, fmt.Errorf("JWKSURL not specified, cannot validate RS256 signature")
}
set, err := jwk.Fetch(context.TODO(), j.Jwksurl)
set, err := jwk.Fetch(j.Jwksurl)
if err != nil {
return nil, err
}
@ -130,17 +130,17 @@ func (j *JWT) KeyFuncRS256(token *gojwt.Token) (interface{}, error) {
return nil, fmt.Errorf("could not convert JWT header kid to string")
}
key, ok := set.LookupKeyID(kid)
if !ok {
keys := set.LookupKeyID(kid)
if len(keys) == 0 {
return nil, fmt.Errorf("no JWK found with kid %s", kid)
}
var rawkey interface{}
if err := key.Raw(&rawkey); err != nil {
key, err := keys[0].Materialize()
if err != nil {
return nil, fmt.Errorf("failed to read JWK public key: %s", err)
}
return rawkey, nil
return key, nil
}
// ValidClaims validates a token with StandardClaims

View File

@ -83,7 +83,7 @@ func TestAuthenticate(t *testing.T) {
{
Desc: "Test jwt with empty subject is invalid",
Secret: "secret",
Token: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpYXQiOi00NDY3NzQ0MDAsImV4cCI6LTQ0Njc3NDM5OSwibmJmIjotNDQ2Nzc0NDAwfQ.Ik90GX1cLvTQzkFvKgBxDPIi-GZsIqFhqQlCxek9TPg",
Token: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpYXQiOi00NDY3NzQ0MDAsImV4cCI6LTQ0Njc3NDQwMCwibmJmIjotNDQ2Nzc0NDAwfQ.gxsA6_Ei3s0f2I1TAtrrb8FmGiO25OqVlktlF_ylhX4",
Duration: time.Second,
Principal: oauth2.Principal{
Subject: "",
@ -104,18 +104,6 @@ func TestAuthenticate(t *testing.T) {
},
Err: errors.New("claims duration is different from auth lifespan"),
},
{
Desc: "Test expiration time is present",
Secret: "secret",
Token: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIvY2hyb25vZ3JhZi92MS91c2Vycy8xIiwibmFtZSI6IkRvYyBCcm93biIsImlhdCI6LTQ0Njc3NDQwMCwiZXhwIjotNDQ2Nzc0NDAwLCJuYmYiOi00NDY3NzQ0MDB9._rZ4gOIei9PizHOABH6kLcJTA3jm8ls0YnDxtz1qeUI",
Duration: time.Second,
Principal: oauth2.Principal{
Subject: "/chronograf/v1/users/1",
ExpiresAt: history.Add(time.Second),
IssuedAt: history,
},
Err: errors.New("token is expired by 0s"),
},
}
for _, test := range tests {
j := oauth2.JWT{
@ -205,7 +193,7 @@ func TestGetClaims(t *testing.T) {
TokenString: "eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiIsIng1dCI6IllEQlVocWRXa3NLWGRHdVgwc3l0amFVdXhoQSIsImtpZCI6IllEQlVocWRXa3NLWGRHdVgwc3l0amFVdXhoQSJ9.eyJhdWQiOiJjaHJvbm9ncmFmIiwiaXNzIjoiaHR0cHM6Ly9kc3RjaW1hYWQxcC5kc3QtaXRzLmRlL2FkZnMiLCJpYXQiOjE1MTMxNjU4ODksImV4cCI6MTUxMzE2OTQ4OSwiYXV0aF90aW1lIjoxNTEzMTY1ODg4LCJzdWIiOiJlWVYzamRsZE55RlkxcUZGSDRvQWRCdkRGZmJWZm51RzI5SGlIa1N1andrPSIsInVwbiI6ImJzY0Bkc3QtaXRzLmRlIiwidW5pcXVlX25hbWUiOiJEU1RcXGJzYyIsInNpZCI6IlMtMS01LTIxLTI1MDUxNTEzOTgtMjY2MTAyODEwOS0zNzU0MjY1ODIwLTExMDQifQ.nK51Ui4XN45SVul9igNaKFQd-F63BNstBzW-T5LBVm_ANHCEHyP3_88C3ffkkQIi3PxYacRJGtfswP35ws7YJUcNp-GoGZARqz62NpMtbQyhos6mCaVXwPoxPbrZx4AkMQgxkZwJcOzceX7mpjcT3kCth30chN3lkhzSjGrXe4ZDOAV25liS-dsdBiqDiaTB91sS534GM76qJQxFUs51oSbYTRdCN1VJ0XopMcasfVDzFrtSbyvEIVXlpKK2HplnhheqF4QHrM_3cjV_NGRr3tYLe-AGTdDXKWlJD1GDz1ECXeMGQHPoz3U8cqNsFLYBstIlCgfnBWgWsPZSvJPJUg",
JwksDocument: "",
Iat: int64(1513165889),
Err: errors.New("failed to unmarshal JWK set: EOF"),
Err: errors.New("failed to unmarshal JWK: EOF"),
},
{
Name: "Invalid Token",

View File

@ -19,30 +19,21 @@ func NewAuthMux(p Provider, a Authenticator, t Tokenizer,
basepath string, l chronograf.Logger,
UseIDToken bool, LoginHint string,
client *http.Client, codeExchange CodeExchange,
logoutCallback string) *AuthMux {
) *AuthMux {
if codeExchange == nil {
codeExchange = simpleTokenExchange
}
var afterLogoutURL string
if logoutCallback != "" {
afterLogoutURL = logoutCallback
} else {
afterLogoutURL = path.Join(basepath, "/")
}
mux := &AuthMux{
Provider: p,
Auth: a,
Tokens: t,
SuccessURL: path.Join(basepath, "/landing"),
AfterLogoutURL: afterLogoutURL,
FailureURL: path.Join(basepath, "/login"),
Now: DefaultNowTime,
Logger: l,
UseIDToken: UseIDToken,
LoginHint: LoginHint,
CodeExchange: codeExchange,
Provider: p,
Auth: a,
Tokens: t,
SuccessURL: path.Join(basepath, "/"),
FailureURL: path.Join(basepath, "/login"),
Now: DefaultNowTime,
Logger: l,
UseIDToken: UseIDToken,
LoginHint: LoginHint,
CodeExchange: codeExchange,
}
if client != nil {
@ -58,18 +49,17 @@ func NewAuthMux(p Provider, a Authenticator, t Tokenizer,
// Chronograf instance as long as the Authenticator has no external
// dependencies (e.g. on a Database).
type AuthMux struct {
Provider Provider // Provider is the OAuth2 service
Auth Authenticator // Auth is used to Authorize after successful OAuth2 callback and Expire on Logout
Tokens Tokenizer // Tokens is used to create and validate OAuth2 "state"
Logger chronograf.Logger // Logger is used to give some more information about the OAuth2 process
SuccessURL string // SuccessURL is redirect location after successful authorization
AfterLogoutURL string // LogoutURL is redirect location after logout
FailureURL string // FailureURL is redirect location after authorization failure
Now func() time.Time // Now returns the current time (for testing)
UseIDToken bool // UseIDToken enables OpenID id_token support
LoginHint string // LoginHint will be included as a parameter during authentication if non-nil
client *http.Client // client is the http client used in oauth exchange.
CodeExchange CodeExchange // helps with CSRF in exchange of token for authorization code
Provider Provider // Provider is the OAuth2 service
Auth Authenticator // Auth is used to Authorize after successful OAuth2 callback and Expire on Logout
Tokens Tokenizer // Tokens is used to create and validate OAuth2 "state"
Logger chronograf.Logger // Logger is used to give some more information about the OAuth2 process
SuccessURL string // SuccessURL is redirect location after successful authorization
FailureURL string // FailureURL is redirect location after authorization failure
Now func() time.Time // Now returns the current time (for testing)
UseIDToken bool // UseIDToken enables OpenID id_token support
LoginHint string // LoginHint will be included as a parameter during authentication if non-nil
client *http.Client // client is the http client used in oauth exchange.
CodeExchange CodeExchange // helps with CSRF in exchange of token for authorization code
}
// Login returns a handler that redirects to the providers OAuth login.
@ -190,6 +180,6 @@ func (j *AuthMux) Callback() http.Handler {
func (j *AuthMux) Logout() http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
j.Auth.Expire(w)
http.Redirect(w, r, j.AfterLogoutURL, http.StatusTemporaryRedirect)
http.Redirect(w, r, j.SuccessURL, http.StatusTemporaryRedirect)
})
}

View File

@ -24,7 +24,7 @@ type mockCallbackResponse struct {
// a function, and returning the desired handler. Cleanup is still the
// responsibility of the test writer, so the httptest.Server's Close() method
// should be deferred.
func setupMuxTest(response interface{}, selector func(*AuthMux) http.Handler, config ...map[string]string) (*http.Client, *httptest.Server, *httptest.Server) {
func setupMuxTest(response interface{}, selector func(*AuthMux) http.Handler) (*http.Client, *httptest.Server, *httptest.Server) {
provider := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
rw.Header().Set("content-type", "application/json")
rw.WriteHeader(http.StatusOK)
@ -53,13 +53,7 @@ func setupMuxTest(response interface{}, selector func(*AuthMux) http.Handler, co
useidtoken := false
logoutCallback := ""
if len(config) > 0 {
if v, ok := config[0]["logoutCallback"]; ok {
logoutCallback = v
}
}
jm := NewAuthMux(mp, auth, mt, "", clog.New(clog.ParseLevel("debug")), useidtoken, "", nil, nil, logoutCallback)
jm := NewAuthMux(mp, auth, mt, "", clog.New(clog.ParseLevel("debug")), useidtoken, "", nil, nil)
ts := httptest.NewServer(selector(jm))
jar, _ := cookiejar.New(nil)
hc := http.Client{
@ -117,44 +111,6 @@ func Test_AuthMux_Logout_DeletesSessionCookie(t *testing.T) {
}
}
func Test_AuthMux_Logout_RedirectToLogoutCallback(t *testing.T) {
t.Parallel()
var response interface{}
hc, ts, prov := setupMuxTest(response, func(j *AuthMux) http.Handler {
return j.Logout()
}, map[string]string{"logoutCallback": "http://custom-url:8123?redirect=http://localhost:8888"})
defer teardownMuxTest(hc, ts, prov)
tsURL, _ := url.Parse(ts.URL)
hc.Jar.SetCookies(tsURL, []*http.Cookie{
{
Name: DefaultCookieName,
Value: "",
},
})
resp, err := hc.Get(ts.URL)
if err != nil {
t.Fatal("Error communicating with Logout() handler: err:", err)
}
if resp.StatusCode < 300 || resp.StatusCode >= 400 {
t.Fatal("Expected to be redirected, but received status code", resp.StatusCode)
}
loc, err := resp.Location()
if err != nil {
t.Fatal("Expected a location to be redirected to, but wasn't present")
}
if loc.String() != "http://custom-url:8123?redirect=http://localhost:8888" {
t.Fatal("Expected to be redirected to http://custom-url:8123?redirect=http://localhost:8888, but was", loc.String())
}
}
func Test_AuthMux_Login_RedirectsToCorrectURL(t *testing.T) {
t.Parallel()

View File

@ -22,15 +22,5 @@
"author": {
"name": "InfluxData"
},
"license": "AGPL-3.0",
"devDependencies": {
"@parcel/core": "^2.6.2",
"@parcel/transformer-sass": "^2.6.2",
"assert": "^2.0.0",
"events": "^3.3.0",
"http-proxy-middleware": "^2.0.9",
"process": "^0.11.10",
"querystring-es3": "^0.2.1",
"util": "^0.12.4"
}
"license": "AGPL-3.0"
}

View File

@ -2,28 +2,25 @@ package protoboards
import (
"context"
"embed"
"encoding/json"
"fmt"
"github.com/influxdata/chronograf"
)
//go:embed *.json
var content embed.FS
//go:generate go-bindata -o bin_gen.go -ignore README|apps|.sh|go -pkg protoboards .
// BinProtoboardsStore represents a embedded protoboards store
// BinProtoboardsStore represents a protoboards store using data generated by go-bindata
type BinProtoboardsStore struct {
Logger chronograf.Logger
}
// All returns the set of all protoboards
func (s *BinProtoboardsStore) All(ctx context.Context) ([]chronograf.Protoboard, error) {
dirEntries, _ := content.ReadDir(".")
protoboards := make([]chronograf.Protoboard, len(dirEntries))
for i, dirEntry := range dirEntries {
name := dirEntry.Name()
octets, err := content.ReadFile(name)
names := AssetNames()
protoboards := make([]chronograf.Protoboard, len(names))
for i, name := range names {
octets, err := Asset(name)
if err != nil {
s.Logger.
WithField("component", "protoboards").

View File

@ -1,19 +0,0 @@
package protoboards
import (
"context"
"testing"
clog "github.com/influxdata/chronograf/log"
)
func TestAll(t *testing.T) {
store := BinProtoboardsStore{Logger: clog.New(clog.ParseLevel("debug"))}
all, err := store.All(context.Background())
if err != nil {
t.Error("No error expected!")
}
if len(all) != 29 {
t.Errorf("29 items expected, but %d", len(all))
}
}

View File

@ -9,7 +9,6 @@ const ContextKey = contextKey("role")
// Chronograf User Roles
const (
MemberRoleName = "member"
ReaderRoleName = "reader"
ViewerRoleName = "viewer"
EditorRoleName = "editor"
AdminRoleName = "admin"

View File

@ -28,7 +28,6 @@ type annotationResponse struct {
StartTime string `json:"startTime"` // StartTime in RFC3339 of the start of the annotation
EndTime string `json:"endTime"` // EndTime in RFC3339 of the end of the annotation
Text string `json:"text"` // Text is the associated user-facing text describing the annotation
Color string `json:"color"` // Optional annotation color
Tags chronograf.AnnotationTags `json:"tags"` // Tags is a collection of user defined key/value pairs that contextualize the annotation
Links annotationLinks `json:"links"`
}
@ -40,7 +39,6 @@ func newAnnotationResponse(src chronograf.Source, a *chronograf.Annotation) anno
StartTime: a.StartTime.UTC().Format(timeMilliFormat),
EndTime: a.EndTime.UTC().Format(timeMilliFormat),
Text: a.Text,
Color: a.Color,
Tags: a.Tags,
Links: annotationLinks{
Self: fmt.Sprintf("%s/%d/annotations/%s", base, src.ID, a.ID),
@ -229,8 +227,7 @@ func (s *Service) Annotation(w http.ResponseWriter, r *http.Request) {
type newAnnotationRequest struct {
StartTime time.Time
EndTime time.Time
Text string `json:"text,omitempty"` // Text is the associated user-facing text describing the annotation
Color string `json:"color,omitempty"` // Optional annotation color
Text string `json:"text,omitempty"` // Text is the associated user-facing text describing the annotation
Tags chronograf.AnnotationTags `json:"tags"`
}
@ -270,7 +267,6 @@ func (ar *newAnnotationRequest) Annotation() *chronograf.Annotation {
StartTime: ar.StartTime,
EndTime: ar.EndTime,
Text: ar.Text,
Color: ar.Color,
Tags: ar.Tags,
}
}
@ -383,7 +379,6 @@ type updateAnnotationRequest struct {
StartTime *time.Time `json:"startTime,omitempty"` // StartTime is the time in rfc3339 milliseconds
EndTime *time.Time `json:"endTime,omitempty"` // EndTime is the time in rfc3339 milliseconds
Text *string `json:"text,omitempty"` // Text is the associated user-facing text describing the annotation
Color *string `json:"color,omitempty"` // Annotation color
Tags chronograf.AnnotationTags `json:"tags"`
}
@ -484,10 +479,6 @@ func (s *Service) UpdateAnnotation(w http.ResponseWriter, r *http.Request) {
if req.Text != nil {
cur.Text = *req.Text
}
if req.Color != nil {
cur.Color = *req.Color
}
if req.Tags != nil {
if err = req.Tags.Valid(); err != nil {
Error(w, http.StatusBadRequest, err.Error(), s.Logger)

View File

@ -153,52 +153,7 @@ func TestService_Annotations(t *testing.T) {
ID: "1",
w: httptest.NewRecorder(),
r: httptest.NewRequest("GET", "/chronograf/v1/sources/1/annotations?since=1985-04-12T23:20:50.52Z", bytes.NewReader([]byte(`howdy`))),
want: `{"annotations":[{"id":"ea0aa94b-969a-4cd5-912a-5db61d502268","startTime":"1970-01-01T00:00:00Z","endTime":"2018-01-25T22:42:57.345Z","text":"mytext","color":"","tags":{},"links":{"self":"/chronograf/v1/sources/1/annotations/ea0aa94b-969a-4cd5-912a-5db61d502268"}}]}
`,
},
{
name: "returns annotations with color in store",
fields: fields{
Store: mockStore,
TimeSeriesClient: &mocks.TimeSeries{
ConnectF: func(context.Context, *chronograf.Source) error {
return nil
},
QueryF: func(context.Context, chronograf.Query) (chronograf.Response, error) {
return mocks.NewResponse(`[
{
"series": [
{
"name": "annotations",
"columns": [
"time",
"start_time",
"modified_time_ns",
"text",
"color",
"id"
],
"values": [
[
1516920177345000000,
0,
1516989242129417403,
"mytext",
"red",
"ea0aa94b-969a-4cd5-912a-5db61d502268"
]
]
}
]
}
]`, nil), nil
},
},
},
ID: "1",
w: httptest.NewRecorder(),
r: httptest.NewRequest("GET", "/chronograf/v1/sources/1/annotations?since=1985-04-12T23:20:50.52Z", bytes.NewReader([]byte(`howdy`))),
want: `{"annotations":[{"id":"ea0aa94b-969a-4cd5-912a-5db61d502268","startTime":"1970-01-01T00:00:00Z","endTime":"2018-01-25T22:42:57.345Z","text":"mytext","color":"red","tags":{},"links":{"self":"/chronograf/v1/sources/1/annotations/ea0aa94b-969a-4cd5-912a-5db61d502268"}}]}
want: `{"annotations":[{"id":"ea0aa94b-969a-4cd5-912a-5db61d502268","startTime":"1970-01-01T00:00:00Z","endTime":"2018-01-25T22:42:57.345Z","text":"mytext","tags":{},"links":{"self":"/chronograf/v1/sources/1/annotations/ea0aa94b-969a-4cd5-912a-5db61d502268"}}]}
`,
},
{
@ -263,7 +218,6 @@ func TestService_UpdateAnnotation(t *testing.T) {
"start_time",
"modified_time_ns",
"text",
"color",
"id"
],
"values": [
@ -272,7 +226,6 @@ func TestService_UpdateAnnotation(t *testing.T) {
0,
1516989242129417403,
"mytext",
"red",
"1"
]
]
@ -296,14 +249,9 @@ func TestService_UpdateAnnotation(t *testing.T) {
body string
want string
}{
{
body: `{"id":"1","text":"newtext","color":"blue","tags":{"foo":"bar"}}`,
want: `{"id":"1","startTime":"1970-01-01T00:00:00Z","endTime":"2018-01-25T22:42:57.345Z","text":"newtext","color":"blue","tags":{"foo":"bar"},"links":{"self":"/chronograf/v1/sources/1/annotations/1"}}
`,
},
{
body: `{"id":"1","text":"newtext","tags":{"foo":"bar"}}`,
want: `{"id":"1","startTime":"1970-01-01T00:00:00Z","endTime":"2018-01-25T22:42:57.345Z","text":"newtext","color":"red","tags":{"foo":"bar"},"links":{"self":"/chronograf/v1/sources/1/annotations/1"}}
want: `{"id":"1","startTime":"1970-01-01T00:00:00Z","endTime":"2018-01-25T22:42:57.345Z","text":"newtext","tags":{"foo":"bar"},"links":{"self":"/chronograf/v1/sources/1/annotations/1"}}
`,
},
{

View File

@ -5,19 +5,24 @@ import (
"github.com/influxdata/chronograf"
"github.com/influxdata/chronograf/dist"
"github.com/influxdata/chronograf/ui"
)
const (
// Dir is prefix of the assets in the bindata
Dir = "../ui/build"
// Default is the default item to load if 404
Default = "../ui/build/index.html"
// DebugDir is the prefix of the assets in development mode
DebugDir = "ui/build"
// DebugDefault is the default item to load if 404
DebugDefault = "ui/build/index.html"
// DefaultContentType is the content-type to return for the Default file
DefaultContentType = "text/html; charset=utf-8"
)
// AssetsOpts configures the asset middleware
type AssetsOpts struct {
// Develop when true serves assets from ui/build directory directly; false will use embedded files.
// Develop when true serves assets from ui/build directory directly; false will use internal bindata.
Develop bool
// Logger will log the asset served
Logger chronograf.Logger
@ -32,7 +37,11 @@ func Assets(opts AssetsOpts) http.Handler {
Default: DebugDefault,
}
} else {
assets = &ui.BindataAssets{}
assets = &dist.BindataAssets{
Prefix: Dir,
Default: Default,
DefaultContentType: DefaultContentType,
}
}
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {

View File

@ -63,6 +63,7 @@ func AuthorizedToken(auth oauth2.Authenticator, logger chronograf.Logger, next h
// Send the principal to the next handler
ctx = context.WithValue(ctx, oauth2.PrincipalKey, principal)
next.ServeHTTP(w, r.WithContext(ctx))
return
})
}
@ -292,6 +293,7 @@ func AuthorizedUser(
}
Error(w, http.StatusForbidden, "User is not authorized", logger)
return
})
}
@ -304,14 +306,7 @@ func hasAuthorizedRole(u *chronograf.User, role string) bool {
case roles.MemberRoleName:
for _, r := range u.Roles {
switch r.Name {
case roles.MemberRoleName, roles.ReaderRoleName, roles.ViewerRoleName, roles.EditorRoleName, roles.AdminRoleName:
return true
}
}
case roles.ReaderRoleName:
for _, r := range u.Roles {
switch r.Name {
case roles.ReaderRoleName, roles.ViewerRoleName, roles.EditorRoleName, roles.AdminRoleName:
case roles.MemberRoleName, roles.ViewerRoleName, roles.EditorRoleName, roles.AdminRoleName:
return true
}
}

File diff suppressed because it is too large Load Diff

View File

@ -16,7 +16,6 @@ import (
"github.com/influxdata/chronograf"
uuid "github.com/influxdata/chronograf/id"
"github.com/influxdata/chronograf/influx"
"github.com/influxdata/chronograf/util"
)
// ValidInfluxRequest checks if queries specify a command.
@ -125,13 +124,13 @@ func (s *Service) Write(w http.ResponseWriter, r *http.Request) {
version := query.Get("v")
query.Del("v")
if strings.HasPrefix(version, "2") {
u = util.AppendPath(u, "/api/v2/write")
u.Path = "/api/v2/write"
// v2 organization name is stored in username (org does not matter against v1)
query.Set("org", src.Username)
query.Set("bucket", query.Get("db"))
query.Del("db")
} else {
u = util.AppendPath(u, "/write")
u.Path = "/write"
}
u.RawQuery = query.Encode()
@ -173,59 +172,26 @@ func (s *Service) Write(w http.ResponseWriter, r *http.Request) {
// setupQueryFromCommand set query parameters from its command
func setupQueryFromCommand(req *chronograf.Query) {
// sets active database (and retention policy) from the query
useDb := func(dbSpec string) error {
dbSpecReader := csv.NewReader(bytes.NewReader(([]byte)(dbSpec)))
dbSpecReader.Comma = '.'
if dbrp, err := dbSpecReader.Read(); err == nil {
if len(dbrp) > 0 {
req.DB = dbrp[0]
}
if len(dbrp) > 1 {
req.RP = dbrp[1]
}
return nil
} else {
return err
}
}
// allow to set active database with USE command or via ON clause, examples:
// allow to set active database with USE command, examples:
// use mydb
// use "mydb"
// USE "mydb"."myrp"
// use "mydb.myrp"
// use mydb.myrp
// show tag keys on "mydb"
// SHOW TAG KEYS ON "mydb"
command := strings.ToLower(req.Command)
if strings.HasPrefix(command, "use ") {
if strings.HasPrefix(req.Command, "use ") || strings.HasPrefix(req.Command, "USE ") {
if nextCommand := strings.IndexRune(req.Command, ';'); nextCommand > 4 {
dbSpec := strings.TrimSpace(req.Command[4:nextCommand])
if useDb(dbSpec) == nil {
dbSpecReader := csv.NewReader(bytes.NewReader(([]byte)(dbSpec)))
dbSpecReader.Comma = '.'
if dbrp, err := dbSpecReader.Read(); err == nil {
if len(dbrp) > 0 {
req.DB = dbrp[0]
}
if len(dbrp) > 1 {
req.RP = dbrp[1]
}
req.Command = strings.TrimSpace(req.Command[nextCommand+1:])
}
}
} else if strings.Contains(command, " on ") {
r := csv.NewReader(strings.NewReader(req.Command))
r.Comma = ' '
if tokens, err := r.Read(); err == nil {
// filter empty tokens (i.e. redundant whitespaces, using https://go.dev/wiki/SliceTricks#filtering-without-allocating)
fields := tokens[:0]
for _, field := range tokens {
if field != "" {
fields = append(fields, field)
}
}
// try to find ON clause and use its value to set the database
for i, field := range fields {
if strings.ToLower(field) == "on" {
if i < len(fields)-1 {
_ = useDb(fields[i+1])
}
break
}
}
}
}
}

View File

@ -216,191 +216,3 @@ func TestService_Influx_UseCommand(t *testing.T) {
})
}
}
// TestService_Influx_CommandWithOnClause tests preprocessing of command with ON clause
func TestService_Influx_CommandWithOnClause(t *testing.T) {
tests := []struct {
name string
db string
rp string
}{
{
name: "/* no command */",
},
{
name: "SHOW MEASUREMENTS",
},
{
name: "SHOW TAG KEYS ON mydb",
db: "mydb",
},
{
name: "SHOW TAG KEYS ON mydb FROM table",
db: "mydb",
},
{
name: "USE anotherdb; SHOW TAG KEYS ON mydb",
db: "anotherdb",
},
{
name: `show tag keys on "mydb"`,
db: "mydb",
},
{
name: `show tag keys oN "mydb"`,
db: "mydb",
},
{
name: `show tag keys on "mydb" from "table"`,
db: "mydb",
},
{
name: `show tag keys on "my_db" from "table"`,
db: "my_db",
},
{
name: `show tag keys on "my-db" from "table"`,
db: "my-db",
},
{
name: `show tag keys on "my/db" from "table"`,
db: "my/db",
},
{
name: `show tag keys on "my db" from "table"`,
db: "my db",
},
{
name: `show tag values on "my db" from "table" with key = "my key"`,
db: "my db",
},
}
h := &Service{
Store: &mocks.Store{
SourcesStore: &mocks.SourcesStore{
GetF: func(ctx context.Context, ID int) (chronograf.Source, error) {
return chronograf.Source{
ID: 1337,
URL: "http://any.url",
}, nil
},
},
},
TimeSeriesClient: &mocks.TimeSeries{
ConnectF: func(ctx context.Context, src *chronograf.Source) error {
return nil
},
QueryF: func(ctx context.Context, query chronograf.Query) (chronograf.Response, error) {
return mocks.NewResponse(
fmt.Sprintf(`{"db":"%s","rp":"%s"}`, query.DB, query.RP),
nil,
),
nil
},
},
Logger: log.New(log.ErrorLevel),
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
prefixCommand := strings.ReplaceAll(tt.name, "\"", "\\\"")
w := httptest.NewRecorder()
r := httptest.NewRequest(
"POST",
"http://any.url",
ioutil.NopCloser(
bytes.NewReader([]byte(
`{"uuid": "tst", "query":"`+prefixCommand+` ; DROP MEASUREMENT test"}`,
)),
),
)
r = r.WithContext(httprouter.WithParams(
context.Background(),
httprouter.Params{
{
Key: "id",
Value: "1",
},
},
))
h.Influx(w, r)
resp := w.Result()
body, _ := ioutil.ReadAll(resp.Body)
want := fmt.Sprintf(`{"results":{"db":"%s","rp":"%s"},"uuid":"tst"}`, tt.db, tt.rp)
got := strings.TrimSpace(string(body))
if got != want {
t.Errorf("%q. Influx() =\ngot ***%v***\nwant ***%v***\n", tt.name, got, want)
}
})
}
}
func TestService_Influx_Write(t *testing.T) {
calledPath := ""
ts := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
calledPath = r.URL.Path
rw.WriteHeader(http.StatusOK)
rw.Write([]byte(`{"message":"hi"}`))
}))
defer ts.Close()
testPairs := []struct {
version string
ctx string
path string
}{
{version: "1.8.3", ctx: "", path: "/write"},
{version: "1.8.3", ctx: "/ctx", path: "/ctx/write"},
{version: "2.2.0", ctx: "", path: "/api/v2/write"},
{version: "2.2.0", ctx: "/ctx", path: "/ctx/api/v2/write"},
}
for _, testPair := range testPairs {
calledPath = ""
w := httptest.NewRecorder()
r := httptest.NewRequest(
"POST",
"http://any.url?v="+testPair.version,
ioutil.NopCloser(
bytes.NewReader([]byte(
`temperature v=1.0`,
)),
),
)
r = r.WithContext(httprouter.WithParams(
context.Background(),
httprouter.Params{
{
Key: "id",
Value: "1",
},
},
))
h := &Service{
Store: &mocks.Store{
SourcesStore: &mocks.SourcesStore{
GetF: func(ctx context.Context, ID int) (chronograf.Source, error) {
return chronograf.Source{
ID: 1337,
URL: ts.URL + testPair.ctx,
}, nil
},
},
},
Logger: log.New(log.ErrorLevel),
}
h.Write(w, r)
resp := w.Result()
ioutil.ReadAll(resp.Body)
if calledPath != testPair.path {
t.Errorf("Path received: %v, want: %v ", calledPath, testPair.path)
}
}
}

View File

@ -28,7 +28,7 @@ const (
// MuxOpts are the options for the router. Mostly related to auth.
type MuxOpts struct {
Logger chronograf.Logger
Develop bool // Develop loads assets from filesystem instead of embedded files
Develop bool // Develop loads assets from filesystem instead of bindata
Basepath string // URL path prefix under which all chronograf routes will be mounted
UseAuth bool // UseAuth turns on Github OAuth and JWT
RedirAuth string // RedirAuth specifies which auth to redirect login.
@ -92,15 +92,6 @@ func NewMux(opts MuxOpts, service Service) http.Handler {
)
}
_ = EnsureMember
EnsureReader := func(next http.HandlerFunc) http.HandlerFunc {
return AuthorizedUser(
service.Store,
opts.UseAuth,
roles.ReaderRoleName,
opts.Logger,
next,
)
}
EnsureViewer := func(next http.HandlerFunc) http.HandlerFunc {
return AuthorizedUser(
service.Store,
@ -183,28 +174,27 @@ func NewMux(opts MuxOpts, service Service) http.Handler {
router.DELETE("/chronograf/v1/mappings/:id", EnsureSuperAdmin(service.RemoveMapping))
// Sources
router.GET("/chronograf/v1/sources", EnsureReader(service.Sources))
router.GET("/chronograf/v1/sources", EnsureViewer(service.Sources))
router.POST("/chronograf/v1/sources", EnsureEditor(service.NewSource))
router.GET("/chronograf/v1/sources/:id", EnsureReader(service.SourcesID))
router.GET("/chronograf/v1/sources/:id", EnsureViewer(service.SourcesID))
router.PATCH("/chronograf/v1/sources/:id", EnsureEditor(service.UpdateSource))
router.DELETE("/chronograf/v1/sources/:id", EnsureEditor(service.RemoveSource))
router.GET("/chronograf/v1/sources/:id/health", EnsureReader(service.SourceHealth))
router.GET("/chronograf/v1/sources/:id/health", EnsureViewer(service.SourceHealth))
// Flux
router.GET("/chronograf/v1/flux", EnsureReader(service.Flux))
router.POST("/chronograf/v1/flux/ast", EnsureReader(service.FluxAST))
router.GET("/chronograf/v1/flux", EnsureViewer(service.Flux))
router.POST("/chronograf/v1/flux/ast", EnsureViewer(service.FluxAST))
router.GET("/chronograf/v1/flux/suggestions", EnsureViewer(service.FluxSuggestions))
router.GET("/chronograf/v1/flux/suggestions/:name", EnsureViewer(service.FluxSuggestion))
// Source Proxy to Influx; Has gzip compression around the handler
influx := gziphandler.GzipHandler(http.HandlerFunc(EnsureReader(service.Influx)))
influx := gziphandler.GzipHandler(http.HandlerFunc(EnsureViewer(service.Influx)))
router.Handler("POST", "/chronograf/v1/sources/:id/proxy", influx)
// Source Proxy to Influx's flux endpoint; compression because the responses from
// flux could be large.
router.Handler("POST", "/chronograf/v1/sources/:id/proxy/flux", EnsureReader(service.ProxyFlux))
router.Handler("GET", "/chronograf/v1/sources/:id/proxy/flux", EnsureReader(service.ProxyFlux))
router.Handler("POST", "/chronograf/v1/sources/:id/proxy/flux", EnsureViewer(service.ProxyFlux))
// Write proxies line protocol write requests to InfluxDB
router.POST("/chronograf/v1/sources/:id/write", EnsureViewer(service.Write))
@ -215,12 +205,12 @@ func NewMux(opts MuxOpts, service Service) http.Handler {
//
// Admins should ensure that the InfluxDB source as the proper permissions
// intended for Chronograf Users with the Viewer Role type.
router.POST("/chronograf/v1/sources/:id/queries", EnsureReader(service.Queries))
router.POST("/chronograf/v1/sources/:id/queries", EnsureViewer(service.Queries))
// Annotations are user-defined events associated with this source
router.GET("/chronograf/v1/sources/:id/annotations", EnsureReader(service.Annotations))
router.GET("/chronograf/v1/sources/:id/annotations", EnsureViewer(service.Annotations))
router.POST("/chronograf/v1/sources/:id/annotations", EnsureEditor(service.NewAnnotation))
router.GET("/chronograf/v1/sources/:id/annotations/:aid", EnsureReader(service.Annotation))
router.GET("/chronograf/v1/sources/:id/annotations/:aid", EnsureViewer(service.Annotation))
router.DELETE("/chronograf/v1/sources/:id/annotations/:aid", EnsureEditor(service.RemoveAnnotation))
router.PATCH("/chronograf/v1/sources/:id/annotations/:aid", EnsureEditor(service.UpdateAnnotation))
@ -309,18 +299,18 @@ func NewMux(opts MuxOpts, service Service) http.Handler {
router.PATCH("/chronograf/v1/users/:id", EnsureSuperAdmin(rawStoreAccess(service.UpdateUser)))
// Dashboards
router.GET("/chronograf/v1/dashboards", EnsureReader(service.Dashboards))
router.GET("/chronograf/v1/dashboards", EnsureViewer(service.Dashboards))
router.POST("/chronograf/v1/dashboards", EnsureEditor(service.NewDashboard))
router.GET("/chronograf/v1/dashboards/:id", EnsureReader(service.DashboardID))
router.GET("/chronograf/v1/dashboards/:id", EnsureViewer(service.DashboardID))
router.DELETE("/chronograf/v1/dashboards/:id", EnsureEditor(service.RemoveDashboard))
router.PUT("/chronograf/v1/dashboards/:id", EnsureEditor(service.ReplaceDashboard))
router.PATCH("/chronograf/v1/dashboards/:id", EnsureEditor(service.UpdateDashboard))
// Dashboard Cells
router.GET("/chronograf/v1/dashboards/:id/cells", EnsureReader(service.DashboardCells))
router.GET("/chronograf/v1/dashboards/:id/cells", EnsureViewer(service.DashboardCells))
router.POST("/chronograf/v1/dashboards/:id/cells", EnsureEditor(service.NewDashboardCell))
router.GET("/chronograf/v1/dashboards/:id/cells/:cid", EnsureReader(service.DashboardCellID))
router.GET("/chronograf/v1/dashboards/:id/cells/:cid", EnsureViewer(service.DashboardCellID))
router.DELETE("/chronograf/v1/dashboards/:id/cells/:cid", EnsureEditor(service.RemoveDashboardCell))
router.PUT("/chronograf/v1/dashboards/:id/cells/:cid", EnsureEditor(service.ReplaceDashboardCell))
@ -358,7 +348,7 @@ func NewMux(opts MuxOpts, service Service) http.Handler {
router.GET("/chronograf/v1/org_config/logviewer", EnsureViewer(service.OrganizationLogViewerConfig))
router.PUT("/chronograf/v1/org_config/logviewer", EnsureEditor(service.ReplaceOrganizationLogViewerConfig))
router.GET("/chronograf/v1/env", EnsureMember(service.Environment))
router.GET("/chronograf/v1/env", EnsureViewer(service.Environment))
// Validates go templates for the js client
router.POST("/chronograf/v1/validate_text_templates", EnsureViewer(service.ValidateTextTemplate))

View File

@ -27,7 +27,7 @@ func (r *organizationRequest) ValidCreate() error {
func (r *organizationRequest) ValidUpdate() error {
if r.Name == "" && r.DefaultRole == "" {
return fmt.Errorf("no fields to update")
return fmt.Errorf("No fields to update")
}
if r.DefaultRole != "" {
@ -43,10 +43,10 @@ func (r *organizationRequest) ValidDefaultRole() error {
}
switch r.DefaultRole {
case roles.MemberRoleName, roles.ReaderRoleName, roles.ViewerRoleName, roles.EditorRoleName, roles.AdminRoleName:
case roles.MemberRoleName, roles.ViewerRoleName, roles.EditorRoleName, roles.AdminRoleName:
return nil
default:
return fmt.Errorf("default role must be member, reader, viewer, editor, or admin")
return fmt.Errorf("default role must be member, viewer, editor, or admin")
}
}

View File

@ -220,9 +220,10 @@ func TestService_UpdateOrganization(t *testing.T) {
Logger chronograf.Logger
}
type args struct {
w *httptest.ResponseRecorder
r *http.Request
org *organizationRequest
w *httptest.ResponseRecorder
r *http.Request
org *organizationRequest
setPtr bool
}
tests := []struct {
name string
@ -295,10 +296,10 @@ func TestService_UpdateOrganization(t *testing.T) {
id: "1337",
wantStatus: http.StatusUnprocessableEntity,
wantContentType: "application/json",
wantBody: `{"code":422,"message":"no fields to update"}`,
wantBody: `{"code":422,"message":"No fields to update"}`,
},
{
name: "Update Organization default role to viewer",
name: "Update Organization default role",
args: args{
w: httptest.NewRecorder(),
r: httptest.NewRequest(
@ -330,39 +331,6 @@ func TestService_UpdateOrganization(t *testing.T) {
wantContentType: "application/json",
wantBody: `{"links":{"self":"/chronograf/v1/organizations/1337"},"id":"1337","name":"The Good Place","defaultRole":"viewer"}`,
},
{
name: "Update Organization default role to reader",
args: args{
w: httptest.NewRecorder(),
r: httptest.NewRequest(
"GET",
"http://any.url", // can be any valid URL as we are bypassing mux
nil,
),
org: &organizationRequest{
DefaultRole: roles.ReaderRoleName,
},
},
fields: fields{
Logger: log.New(log.DebugLevel),
OrganizationsStore: &mocks.OrganizationsStore{
UpdateF: func(ctx context.Context, o *chronograf.Organization) error {
return nil
},
GetF: func(ctx context.Context, q chronograf.OrganizationQuery) (*chronograf.Organization, error) {
return &chronograf.Organization{
ID: "1337",
Name: "The Good Place",
DefaultRole: roles.MemberRoleName,
}, nil
},
},
},
id: "1337",
wantStatus: http.StatusOK,
wantContentType: "application/json",
wantBody: `{"links":{"self":"/chronograf/v1/organizations/1337"},"id":"1337","name":"The Good Place","defaultRole":"reader"}`,
},
{
name: "Update Organization - invalid update",
args: args{
@ -388,7 +356,7 @@ func TestService_UpdateOrganization(t *testing.T) {
id: "1337",
wantStatus: http.StatusUnprocessableEntity,
wantContentType: "application/json",
wantBody: `{"code":422,"message":"no fields to update"}`,
wantBody: `{"code":422,"message":"No fields to update"}`,
},
{
name: "Update Organization - invalid role",
@ -417,7 +385,7 @@ func TestService_UpdateOrganization(t *testing.T) {
id: "1337",
wantStatus: http.StatusUnprocessableEntity,
wantContentType: "application/json",
wantBody: `{"code":422,"message":"default role must be member, reader, viewer, editor, or admin"}`,
wantBody: `{"code":422,"message":"default role must be member, viewer, editor, or admin"}`,
},
}

View File

@ -239,7 +239,7 @@ func Test_ProtoboardsID(t *testing.T) {
wants: wants{
statusCode: http.StatusOK,
contentType: "application/json",
body: `{"id":"1","meta":{"name":"","version":"","measurements":null,"dashboardVersion":""},"data":{"cells":null,"templates":null},"links":{"self":"/chronograf/v1/protoboards/1"}}`,
body: `{"id":"1","meta":{"name":"","version":"","dashboardVersion":""},"data":{"cells":null},"links":{"self":"/chronograf/v1/protoboards/1"}}`,
},
args: args{
id: "1",

View File

@ -115,7 +115,6 @@ type Server struct {
GenericInsecure bool `long:"generic-insecure" description:"Whether or not to verify auth-url's tls certificates." env:"GENERIC_INSECURE"`
GenericRootCA flags.Filename `long:"generic-root-ca" description:"File location of root ca cert for generic oauth tls verification." env:"GENERIC_ROOT_CA"`
OAuthNoPKCE bool `long:"oauth-no-pkce" description:"Disables OAuth PKCE." env:"OAUTH_NO_PKCE"`
OAuthLogoutEndpoint string `long:"oauth-logout-endpoint" description:"OAuth endpoint to call for logout from OAuth Identity provider." env:"OAUTH_LOGOUT_ENDPOINT"`
Auth0Domain string `long:"auth0-domain" description:"Subdomain of auth0.com used for Auth0 OAuth2 authentication" env:"AUTH0_DOMAIN"`
Auth0ClientID string `long:"auth0-client-id" description:"Auth0 Client ID for OAuth2 support" env:"AUTH0_CLIENT_ID"`
@ -344,7 +343,7 @@ func (s *Server) githubOAuth(logger chronograf.Logger, auth oauth2.Authenticator
Logger: logger,
}
jwt := oauth2.NewJWT(s.TokenSecret, s.JwksURL)
ghMux := oauth2.NewAuthMux(&gh, auth, jwt, s.Basepath, logger, s.UseIDToken, s.LoginHint, &s.oauthClient, s.createCodeExchange(), s.OAuthLogoutEndpoint)
ghMux := oauth2.NewAuthMux(&gh, auth, jwt, s.Basepath, logger, s.UseIDToken, s.LoginHint, &s.oauthClient, s.createCodeExchange())
return &gh, ghMux, s.UseGithub
}
@ -358,7 +357,7 @@ func (s *Server) googleOAuth(logger chronograf.Logger, auth oauth2.Authenticator
Logger: logger,
}
jwt := oauth2.NewJWT(s.TokenSecret, s.JwksURL)
goMux := oauth2.NewAuthMux(&google, auth, jwt, s.Basepath, logger, s.UseIDToken, s.LoginHint, &s.oauthClient, s.createCodeExchange(), s.OAuthLogoutEndpoint)
goMux := oauth2.NewAuthMux(&google, auth, jwt, s.Basepath, logger, s.UseIDToken, s.LoginHint, &s.oauthClient, s.createCodeExchange())
return &google, goMux, s.UseGoogle
}
@ -370,7 +369,7 @@ func (s *Server) herokuOAuth(logger chronograf.Logger, auth oauth2.Authenticator
Logger: logger,
}
jwt := oauth2.NewJWT(s.TokenSecret, s.JwksURL)
hMux := oauth2.NewAuthMux(&heroku, auth, jwt, s.Basepath, logger, s.UseIDToken, s.LoginHint, &s.oauthClient, s.createCodeExchange(), s.OAuthLogoutEndpoint)
hMux := oauth2.NewAuthMux(&heroku, auth, jwt, s.Basepath, logger, s.UseIDToken, s.LoginHint, &s.oauthClient, s.createCodeExchange())
return &heroku, hMux, s.UseHeroku
}
@ -389,7 +388,7 @@ func (s *Server) genericOAuth(logger chronograf.Logger, auth oauth2.Authenticato
Logger: logger,
}
jwt := oauth2.NewJWT(s.TokenSecret, s.JwksURL)
genMux := oauth2.NewAuthMux(&gen, auth, jwt, s.Basepath, logger, s.UseIDToken, s.LoginHint, &s.oauthClient, s.createCodeExchange(), s.OAuthLogoutEndpoint)
genMux := oauth2.NewAuthMux(&gen, auth, jwt, s.Basepath, logger, s.UseIDToken, s.LoginHint, &s.oauthClient, s.createCodeExchange())
return &gen, genMux, s.UseGenericOAuth2
}
@ -405,7 +404,7 @@ func (s *Server) auth0OAuth(logger chronograf.Logger, auth oauth2.Authenticator)
auth0, err := oauth2.NewAuth0(s.Auth0Domain, s.Auth0ClientID, s.Auth0ClientSecret, redirectURL.String(), s.Auth0Organizations, logger)
jwt := oauth2.NewJWT(s.TokenSecret, s.JwksURL)
genMux := oauth2.NewAuthMux(&auth0, auth, jwt, s.Basepath, logger, s.UseIDToken, s.LoginHint, &s.oauthClient, s.createCodeExchange(), s.OAuthLogoutEndpoint)
genMux := oauth2.NewAuthMux(&auth0, auth, jwt, s.Basepath, logger, s.UseIDToken, s.LoginHint, &s.oauthClient, s.createCodeExchange())
if err != nil {
logger.Error("Error parsing Auth0 domain: err:", err)

View File

@ -544,6 +544,11 @@ func (s *Service) NewSourceUser(w http.ResponseWriter, r *http.Request) {
return
}
if err != nil {
Error(w, http.StatusBadRequest, err.Error(), s.Logger)
return
}
su := newSourceUserResponse(srcID, res.Name).WithPermissions(res.Permissions)
if _, hasRoles := s.hasRoles(ctx, ts); hasRoles {
su.WithRoles(srcID, res.Roles)

View File

@ -40,7 +40,7 @@ func hasRoleContext(ctx context.Context) (string, bool) {
return "", false
}
switch role {
case roles.MemberRoleName, roles.ReaderRoleName, roles.ViewerRoleName, roles.EditorRoleName, roles.AdminRoleName:
case roles.MemberRoleName, roles.ViewerRoleName, roles.EditorRoleName, roles.AdminRoleName:
return role, true
default:
return "", false

View File

@ -1,16 +1,18 @@
package server
import (
_ "embed"
"net/http"
)
//go:generate go-bindata -o swagger_gen.go -ignore go -nocompress -pkg server .
//go:embed swagger.json
var swagger []byte
import "net/http"
// Spec servers the swagger.json embedded file
// Spec servers the swagger.json file from bindata
func Spec() http.HandlerFunc {
swagger, err := Asset("swagger.json")
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusOK)
w.Write(swagger)

View File

@ -3,7 +3,7 @@
"info": {
"title": "Chronograf",
"description": "API endpoints for Chronograf",
"version": "1.10.8"
"version": "1.9.4"
},
"schemes": ["http"],
"basePath": "/chronograf/v1",
@ -6699,7 +6699,6 @@
"startTime": "2018-07-09T18:08:15.933Z",
"endTime": "2018-07-09T18:08:15.933Z",
"text": "unknown event",
"color": "",
"tags": {},
"links": {
"self": "/chronograf/v1/sources/1/annotations/50ee18e8-8115-4fac-abed-24ce89e96047"
@ -6710,7 +6709,6 @@
"startTime": "2018-07-09T17:48:04.23Z",
"endTime": "2018-07-09T17:48:08.652Z",
"text": "todo: investigate this spike",
"color": "",
"tags": {
"repo": "influxdata/chronograf"
},
@ -6752,13 +6750,6 @@
"text": "my annotation"
}
},
"color": {
"type": "string",
"description": "Annotation color",
"example": {
"text": "red"
}
},
"tags": {
"type": "object",
"description": "A set of user-defined tags associated with the annotation",
@ -6782,7 +6773,6 @@
"startTime": "2018-07-09T17:48:04.23Z",
"endTime": "2018-07-09T17:48:08.652Z",
"text": "no name",
"color": "red",
"tags": {
"repo": "influxdata/chronograf"
},
@ -6815,13 +6805,6 @@
"text": "my annotation"
}
},
"color": {
"type": "string",
"description": "Annotation color",
"example": {
"text": "red"
}
},
"tags": {
"type": "object",
"description": "A set of user-defined tags associated with the annotation",

View File

@ -84,8 +84,8 @@ func (up *URLPrefixer) ServeHTTP(rw http.ResponseWriter, r *http.Request) {
return
}
// do not process JS or SVG files, it only harms them
if isIgnored, _ := regexp.Match("\\.(svg|js)$", []byte(r.URL.String())); isIgnored {
isSVG, _ := regexp.Match(".svg$", []byte(r.URL.String()))
if isSVG {
up.Next.ServeHTTP(rw, r)
return
}
@ -186,6 +186,8 @@ func NewDefaultURLPrefixer(prefix string, next http.Handler, lg chronograf.Logge
[]byte(`src="`),
[]byte(`href="`),
[]byte(`url(`),
[]byte(`new Worker("`),
[]byte(`new Worker('`),
[]byte(`data-basepath="`), // for forwarding basepath to frontend
},
}

View File

@ -176,37 +176,3 @@ func Test_Server_Prefixer_NoPrefixingWithoutFlusther(t *testing.T) {
t.Error("No Flusher", ":\n Expected Error Message: \"", server.ErrNotFlusher, "\" but saw none. Msgs:", tl.Messages)
}
}
func Test_Server_Prefixer_IgnoreJsAndSvg(t *testing.T) {
expected := "Keep it the same please"
backend := http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
fmt.Fprintf(rw, expected)
})
tl := &mocks.TestLogger{}
pfx := &server.URLPrefixer{
Prefix: " error",
Next: backend,
Logger: tl,
Attrs: [][]byte{
[]byte("same"),
},
}
ts := httptest.NewServer(pfx)
defer ts.Close()
for _, fileName := range []string{"/test.js", "/test.svg"} {
res, err := http.Get(ts.URL + fileName)
if err != nil {
t.Fatal("Unexpected error fetching from prefixer: err:", err)
}
actual, err := ioutil.ReadAll(res.Body)
if err != nil {
t.Fatal("Unable to read prefixed body: err:", err)
}
if string(actual) != expected {
t.Error("Prefixing changed content of ", fileName, ":\n\t\tWant:\n", expected, "\n\t\tGot:\n", string(actual))
}
}
}

View File

@ -27,10 +27,10 @@ func (r *userRequest) ValidCreate() error {
return fmt.Errorf("Name required on Chronograf User request body")
}
if r.Provider == "" {
return fmt.Errorf("provider required on Chronograf User request body")
return fmt.Errorf("Provider required on Chronograf User request body")
}
if r.Scheme == "" {
return fmt.Errorf("scheme required on Chronograf User request body")
return fmt.Errorf("Scheme required on Chronograf User request body")
}
// TODO: This Scheme value is hard-coded temporarily since we only currently
@ -42,7 +42,7 @@ func (r *userRequest) ValidCreate() error {
func (r *userRequest) ValidUpdate() error {
if r.Roles == nil {
return fmt.Errorf("no roles to update")
return fmt.Errorf("No Roles to update")
}
return r.ValidRoles()
}
@ -59,10 +59,10 @@ func (r *userRequest) ValidRoles() error {
}
orgs[r.Organization] = true
switch r.Name {
case roles.MemberRoleName, roles.ReaderRoleName, roles.ViewerRoleName, roles.EditorRoleName, roles.AdminRoleName, roles.WildcardRoleName:
case roles.MemberRoleName, roles.ViewerRoleName, roles.EditorRoleName, roles.AdminRoleName, roles.WildcardRoleName:
continue
default:
return fmt.Errorf("unknown role %s, valid roles are 'member', 'reader', 'viewer', 'editor', 'admin', and '*'", r.Name)
return fmt.Errorf("Unknown role %s. Valid roles are 'member', 'viewer', 'editor', 'admin', and '*'", r.Name)
}
}
}
@ -277,17 +277,17 @@ func (s *Service) UpdateUser(w http.ResponseWriter, r *http.Request) {
// But currently, it is not possible to change name, provider, or
// scheme via the API.
if req.Name != "" && req.Name != u.Name {
err := fmt.Errorf("cannot update Name")
err := fmt.Errorf("Cannot update Name")
invalidData(w, err, s.Logger)
return
}
if req.Provider != "" && req.Provider != u.Provider {
err := fmt.Errorf("cannot update Provider")
err := fmt.Errorf("Cannot update Provider")
invalidData(w, err, s.Logger)
return
}
if req.Scheme != "" && req.Scheme != u.Scheme {
err := fmt.Errorf("cannot update Scheme")
err := fmt.Errorf("Cannot update Scheme")
invalidData(w, err, s.Logger)
return
}
@ -302,7 +302,7 @@ func (s *Service) UpdateUser(w http.ResponseWriter, r *http.Request) {
}
// If the user being updated is the user making the request and they are
// changing their SuperAdmin status, return an unauthorized error
if ctxUser.ID == u.ID && u.SuperAdmin && !req.SuperAdmin {
if ctxUser.ID == u.ID && u.SuperAdmin == true && req.SuperAdmin == false {
Error(w, http.StatusUnauthorized, "user cannot modify their own SuperAdmin status", s.Logger)
return
}
@ -357,7 +357,7 @@ func setSuperAdmin(ctx context.Context, req userRequest, user *chronograf.User)
} else if !isSuperAdmin && (user.SuperAdmin != req.SuperAdmin) {
// If req.SuperAdmin has been set, and the request was not made with the SuperAdmin
// context, return error
return fmt.Errorf("user does not have authorization required to set SuperAdmin status, see https://github.com/influxdata/chronograf/issues/2601 for more information")
return fmt.Errorf("User does not have authorization required to set SuperAdmin status. See https://github.com/influxdata/chronograf/issues/2601 for more information.")
}
return nil

View File

@ -376,7 +376,7 @@ func TestService_NewUser(t *testing.T) {
},
wantStatus: http.StatusUnauthorized,
wantContentType: "application/json",
wantBody: `{"code":401,"message":"user does not have authorization required to set SuperAdmin status, see https://github.com/influxdata/chronograf/issues/2601 for more information"}`,
wantBody: `{"code":401,"message":"User does not have authorization required to set SuperAdmin status. See https://github.com/influxdata/chronograf/issues/2601 for more information."}`,
},
{
name: "Create a new SuperAdmin User - as superadmin",
@ -1361,7 +1361,7 @@ func TestService_UpdateUser(t *testing.T) {
id: "1336",
wantStatus: http.StatusUnauthorized,
wantContentType: "application/json",
wantBody: `{"code":401,"message":"user does not have authorization required to set SuperAdmin status, see https://github.com/influxdata/chronograf/issues/2601 for more information"}`,
wantBody: `{"code":401,"message":"User does not have authorization required to set SuperAdmin status. See https://github.com/influxdata/chronograf/issues/2601 for more information."}`,
},
{
name: "Update a Chronograf user to super admin - with super admin context",
@ -1665,7 +1665,7 @@ func TestUserRequest_ValidCreate(t *testing.T) {
},
},
wantErr: true,
err: fmt.Errorf("provider required on Chronograf User request body"),
err: fmt.Errorf("Provider required on Chronograf User request body"),
},
{
name: "Invalid Scheme missing",
@ -1683,7 +1683,7 @@ func TestUserRequest_ValidCreate(t *testing.T) {
},
},
wantErr: true,
err: fmt.Errorf("scheme required on Chronograf User request body"),
err: fmt.Errorf("Scheme required on Chronograf User request body"),
},
{
name: "Invalid roles - bad role name",
@ -1702,7 +1702,7 @@ func TestUserRequest_ValidCreate(t *testing.T) {
},
},
wantErr: true,
err: fmt.Errorf("unknown role BilliettaSpecialRole, valid roles are 'member', 'reader', 'viewer', 'editor', 'admin', and '*'"),
err: fmt.Errorf("Unknown role BilliettaSpecialRole. Valid roles are 'member', 'viewer', 'editor', 'admin', and '*'"),
},
{
name: "Invalid roles - missing organization",
@ -1773,7 +1773,7 @@ func TestUserRequest_ValidUpdate(t *testing.T) {
u: &userRequest{},
},
wantErr: true,
err: fmt.Errorf("no roles to update"),
err: fmt.Errorf("No Roles to update"),
},
{
name: "Invalid - bad role name",
@ -1792,7 +1792,7 @@ func TestUserRequest_ValidUpdate(t *testing.T) {
},
},
wantErr: true,
err: fmt.Errorf("unknown role BillietaSpecialOrg, valid roles are 'member', 'reader', 'viewer', 'editor', 'admin', and '*'"),
err: fmt.Errorf("Unknown role BillietaSpecialOrg. Valid roles are 'member', 'viewer', 'editor', 'admin', and '*'"),
},
{
name: "Valid roles empty",
@ -1824,7 +1824,7 @@ func TestUserRequest_ValidUpdate(t *testing.T) {
},
},
wantErr: true,
err: fmt.Errorf("unknown role BillietaSpecialOrg, valid roles are 'member', 'reader', 'viewer', 'editor', 'admin', and '*'"),
err: fmt.Errorf("Unknown role BillietaSpecialOrg. Valid roles are 'member', 'viewer', 'editor', 'admin', and '*'"),
},
{
name: "Invalid - duplicate organization",

View File

@ -3,22 +3,15 @@
"@babel/plugin-transform-runtime",
"@babel/plugin-proposal-class-properties",
"babel-plugin-lodash",
[
"babel-plugin-module-resolver",
{
"root": ["./src"],
"alias": {
"src": "./src",
"shared": "./src/shared",
"style": "./src/style",
"utils": "./src/utils"
}
["babel-plugin-module-resolver", {
"root": ["./src"],
"alias": {
"src": "./src",
"shared": "./src/shared",
"style": "./src/style",
"utils": "./src/utils"
}
]
}]
],
"presets": [
"@babel/preset-env",
"@babel/preset-typescript",
"@babel/preset-react"
]
"presets": ["@babel/preset-env", "@babel/preset-react"]
}

View File

@ -1,25 +0,0 @@
{
"plugins": [
"babel-plugin-transform-import-meta",
"@babel/plugin-transform-runtime",
"@babel/plugin-proposal-class-properties",
"babel-plugin-lodash",
[
"babel-plugin-module-resolver",
{
"root": ["./src"],
"alias": {
"src": "./src",
"shared": "./src/shared",
"style": "./src/style",
"utils": "./src/utils"
}
}
]
],
"presets": [
"@babel/preset-env",
"@babel/preset-typescript",
"@babel/preset-react"
]
}

3
ui/.browserslistrc Normal file
View File

@ -0,0 +1,3 @@
last 2 versions
> 5%
not dead

View File

@ -7,9 +7,8 @@
"eslint-plugin-react",
"react",
"prettier",
"@babel",
"jest",
"no-only-tests"
"babel",
"jest"
],
"settings": {
"react": {
@ -182,7 +181,7 @@
"SwitchCase": 1
}
],
"linebreak-style": 0,
"linebreak-style": [2, "unix"],
"lines-around-comment": 0,
"max-depth": 0,
"max-len": 0,
@ -248,7 +247,7 @@
"react/sort-comp": 0,
"jest/no-disabled-tests": "warn",
"jest/no-focused-tests": "error",
"@babel/no-invalid-this": 1,
"babel/no-invalid-this": 1,
"@typescript-eslint/adjacent-overload-signatures": "error",
"@typescript-eslint/array-type": [
"error",
@ -325,7 +324,6 @@
"hoist": "all"
}
],
"@typescript-eslint/no-unsafe-argument": "off",
"@typescript-eslint/no-unsafe-assignment": "off",
"@typescript-eslint/no-unsafe-call": "off",
"@typescript-eslint/no-unsafe-member-access": "off",
@ -356,21 +354,6 @@
],
"@typescript-eslint/type-annotation-spacing": "off",
"@typescript-eslint/unbound-method": "off",
"@typescript-eslint/unified-signatures": "error",
"no-only-tests/no-only-tests": "error"
},
"overrides": [
{
"files": ["*.js"],
"rules": {
"@typescript-eslint/no-unsafe-argument": "off"
}
},
{
"files": ["*.ts", "*.tsx"],
"rules": {
"@babel/no-invalid-this": "off"
}
}
]
"@typescript-eslint/unified-signatures": "error"
}
}

3
ui/.gitignore vendored
View File

@ -9,6 +9,3 @@ log/
yarn-error.log
.cache/
build/
results/
screenshots/
videos/

View File

@ -1,14 +0,0 @@
{
"$schema": "https://on.cypress.io/cypress.schema.json",
"baseUrl": "http://localhost:8888",
"video": false,
"env": {
"ALLOW_SCREENSHOT": true,
"influxDBURL": "https://localhost:8086",
"username": "admin",
"password": "admin",
"connectionName": "E1M1",
"insecureSkipVerify": true,
"metaUrl": "https://localhost:8091"
}
}

View File

@ -1,42 +0,0 @@
# Cypress tests
## How to run the tests
You have to first start a mock OAuth2 server, Chronograf, and InfluxDB Enterprise before the tests can be run.
### OAuth2 Mock server
```bash
yarn test:e2e:oauth-mock
```
The default configuration of the OAuth2 server is explained in `../../etc/oauth2-server-mock/env.sh`
### Chronograf
Chronograf must be configured with authentication against the OAuth2 mock server:
```bash
cd ../..
./etc/oauth2-server-mock/oauth-for-chronograf.sh
# build chronograf from sources
make
# start it (herein with a custom file-based database for e2e tests)
./chronograf -b chronograf-e2e.db
```
### InfluxDB Enteprise
InfluxDB Enterprise is required by the tests. InfluxDB installation is automated with [kind](https://kind.sigs.k8s.io/) and [helm](https://helm.sh/). Setup InfluxDB license key and start it with:
```bash
export INFLUXDB_ENTERPRISE_LICENSE_KEY=yourlicensekey
./local-chronograf-influxdb-enterprise.sh
```
... and wait, it takes a while
## Cypress tests
Run Cypress e2e tests in a headless mode using:
```bash
yarn test:e2e
```
or within a browser (Chrome) using:
```bash
yarn test:e2e:headed
```

View File

@ -1,29 +0,0 @@
{
"user": {
"name": "Smiley",
"role": ["reader", "viewer"],
"oauthProvider": "oauth-mock",
"oauth-schema": "oauth",
"orgs": ["Default"]
},
"organizations": [
{
"name": "SmallOrg",
"defaultRole": "reader",
"mapping": {
"scheme": "oauth2",
"provider": "oauth-mock",
"providerOrg": "default"
}
},
{
"name": "BigOrg",
"defaultRole": "member",
"mapping": {
"scheme": "oauth2",
"provider": "oauth-mock-2",
"providerOrg": "default-2"
}
}
]
}

View File

@ -1,60 +0,0 @@
{
"user": {
"name": "Smiley",
"password": "securePassword123",
"roles": [
"Sunny"
],
"db": [
{
"name": "New InfluxDB",
"permissions": [
"ReadData",
"WriteData"
]
}
]
},
"db": {
"name": "New InfluxDB",
"retentionPolicies": [
{
"name": "New Retention Policy",
"duration": "1h",
"shardDuration": "1d"
}
],
"measurements": [
{
"name": "NewMeasurementA",
"tagValues": [
"NewTagA",
"NewTagB"
],
"fieldValues": [
1
]
},
{
"name": "NewMeasurementB",
"tagValues": [
"NewTagC",
"NewTagD"
],
"fieldValues": [
2
]
}
]
},
"role": {
"name": "Sunny",
"permissions": [
"ReadData",
"WriteData"
],
"users": [
"Smiley"
]
}
}

View File

@ -1,8 +0,0 @@
{
"influxDBURL": "https://localhost:8086",
"username": "admin",
"password": "admin",
"connectionName": "E1M1",
"insecureSkipVerify": true,
"metaUrl": "https://localhost:8091"
}

69
ui/cypress/index.d.ts vendored
View File

@ -1,69 +0,0 @@
import 'jest'
import {
getByTestID,
removeInfluxDBConnections,
createInfluxDBConnection,
createDashboard,
deleteDashboards,
createDashboardWithCell,
OAuthLogin,
OAuthLogout,
OAuthLoginAsDiffUser,
createChronografUser,
deleteChronografUser,
deleteChronografUsers,
createOrg,
deleteOrg,
deleteOrgs,
createInfluxDBUser,
deleteInfluxDBUser,
deleteInfluxDBUsers,
createInfluxDBRole,
deleteInfluxDBRole,
deleteInfluxDBRoles,
createInfluxDB,
deleteInfluxDB,
deleteInfluxDBs,
toInitialState,
writePoints,
clickAttached,
changeUserInfo,
deleteMappings,
} from './support/commands'
declare global {
namespace Cypress {
interface Chainable {
getByTestID: typeof getByTestID
removeInfluxDBConnections: typeof removeInfluxDBConnections
createInfluxDBConnection: typeof createInfluxDBConnection
createDashboard: typeof createDashboard
deleteDashboards: typeof deleteDashboards
createDashboardWithCell: typeof createDashboardWithCell
OAuthLogin: typeof OAuthLogin
OAuthLogout: typeof OAuthLogout
OAuthLoginAsDiffUser: typeof OAuthLoginAsDiffUser
createChronografUser: typeof createChronografUser
deleteChronografUser: typeof deleteChronografUser
deleteChronografUsers: typeof deleteChronografUsers
createOrg: typeof createOrg
deleteOrg: typeof deleteOrg
deleteOrgs: typeof deleteOrgs
createInfluxDBUser: typeof createInfluxDBUser
deleteInfluxDBUser: typeof deleteInfluxDBUser
deleteInfluxDBUsers: typeof deleteInfluxDBUsers
createInfluxDBRole: typeof createInfluxDBRole
deleteInfluxDBRole: typeof deleteInfluxDBRole
deleteInfluxDBRoles: typeof deleteInfluxDBRoles
createInfluxDB: typeof createInfluxDB
deleteInfluxDB: typeof deleteInfluxDB
deleteInfluxDBs: typeof deleteInfluxDBs
toInitialState: typeof toInitialState
writePoints: typeof writePoints
clickAttached: typeof clickAttached
changeUserInfo: typeof changeUserInfo
deleteMappings: typeof deleteMappings
}
}
}

Some files were not shown because too many files have changed in this diff Show More