Compare commits
64 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f2fffadcd6 | ||
|
|
de06c1c1b8 | ||
|
|
830da0fda0 | ||
|
|
78fff0161a | ||
|
|
06d4641a8f | ||
|
|
e232629917 | ||
|
|
57a57932af | ||
|
|
62a226b1c3 | ||
|
|
a38dcc3ac7 | ||
|
|
c39b0e246b | ||
|
|
f8f1f506ed | ||
|
|
9d57a1f192 | ||
|
|
ba4a870632 | ||
|
|
ef9ec32c32 | ||
|
|
17235e4bd1 | ||
|
|
eb82b77782 | ||
|
|
3af8aa5c4f | ||
|
|
5b588af73c | ||
|
|
5ec6eccfac | ||
|
|
237e0e8631 | ||
|
|
7a165febf3 | ||
|
|
fd7c38c62f | ||
|
|
00519f1105 | ||
|
|
c8740d98b0 | ||
|
|
54030deaf6 | ||
|
|
880f6b63a1 | ||
|
|
d8ddfa5622 | ||
|
|
9af2ea1b53 | ||
|
|
d98ee66bd8 | ||
|
|
d3ecc963e6 | ||
|
|
0961ec9748 | ||
|
|
a2c7570c5c | ||
|
|
9cbe0abb44 | ||
|
|
2fecdaf6b4 | ||
|
|
a5f360e50e | ||
|
|
99f2386bd9 | ||
|
|
bffa956068 | ||
|
|
1b7a54c084 | ||
|
|
8128eb6c77 | ||
|
|
10c4d6eccc | ||
|
|
8a062dde35 | ||
|
|
041c1c3cb1 | ||
|
|
aba6df2e09 | ||
|
|
5192504748 | ||
|
|
8a0e2ac3a4 | ||
|
|
373d082363 | ||
|
|
94765294a2 | ||
|
|
1f482a5f0b | ||
|
|
ae04813045 | ||
|
|
3d7c52dcd1 | ||
|
|
81f8158010 | ||
|
|
5de014a91d | ||
|
|
777311a759 | ||
|
|
829b5d0d40 | ||
|
|
12878409db | ||
|
|
ffa9429c68 | ||
|
|
6649c5d75b | ||
|
|
d6e8129588 | ||
|
|
6a3a2f5f94 | ||
|
|
5d3a60d46e | ||
|
|
5f0a7f43c3 | ||
|
|
ebff883016 | ||
|
|
81ff856568 | ||
|
|
648aa62264 |
@@ -98,6 +98,34 @@ jobs:
|
||||
path: public/e2e-test/screenShots/theOutput
|
||||
destination: output-screenshots
|
||||
|
||||
end-to-end-test-release:
|
||||
docker:
|
||||
- image: circleci/node:10-browsers
|
||||
- image: grafana/grafana-dev:$CIRCLE_TAG
|
||||
steps:
|
||||
- run: dockerize -wait tcp://127.0.0.1:3000 -timeout 120s
|
||||
- checkout
|
||||
- restore_cache:
|
||||
key: dependency-cache-{{ checksum "yarn.lock" }}
|
||||
- run:
|
||||
name: yarn install
|
||||
command: 'yarn install --pure-lockfile --no-progress'
|
||||
no_output_timeout: 5m
|
||||
- save_cache:
|
||||
key: dependency-cache-{{ checksum "yarn.lock" }}
|
||||
paths:
|
||||
- node_modules
|
||||
- run:
|
||||
name: run end-to-end tests
|
||||
command: 'env BASE_URL=http://127.0.0.1:3000 yarn e2e-tests'
|
||||
no_output_timeout: 5m
|
||||
- store_artifacts:
|
||||
path: public/e2e-test/screenShots/theTruth
|
||||
destination: expected-screenshots
|
||||
- store_artifacts:
|
||||
path: public/e2e-test/screenShots/theOutput
|
||||
destination: output-screenshots
|
||||
|
||||
codespell:
|
||||
docker:
|
||||
- image: circleci/python
|
||||
@@ -127,6 +155,15 @@ jobs:
|
||||
name: Lint Go
|
||||
command: 'make lint-go'
|
||||
|
||||
shellcheck:
|
||||
machine: true
|
||||
working_directory: ~/go/src/github.com/grafana/grafana
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: ShellCheck
|
||||
command: 'make shellcheck'
|
||||
|
||||
test-frontend:
|
||||
docker:
|
||||
- image: circleci/node:10
|
||||
@@ -635,6 +672,8 @@ workflows:
|
||||
filters: *filter-only-master
|
||||
- lint-go:
|
||||
filters: *filter-only-master
|
||||
- shellcheck:
|
||||
filters: *filter-only-master
|
||||
- test-frontend:
|
||||
filters: *filter-only-master
|
||||
- test-backend:
|
||||
@@ -650,6 +689,7 @@ workflows:
|
||||
- test-frontend
|
||||
- codespell
|
||||
- lint-go
|
||||
- shellcheck
|
||||
- mysql-integration-test
|
||||
- postgres-integration-test
|
||||
- build-oss-msi
|
||||
@@ -662,6 +702,7 @@ workflows:
|
||||
- test-frontend
|
||||
- codespell
|
||||
- lint-go
|
||||
- shellcheck
|
||||
- mysql-integration-test
|
||||
- postgres-integration-test
|
||||
filters: *filter-only-master
|
||||
@@ -672,6 +713,7 @@ workflows:
|
||||
- test-frontend
|
||||
- codespell
|
||||
- lint-go
|
||||
- shellcheck
|
||||
- mysql-integration-test
|
||||
- postgres-integration-test
|
||||
- build-all-enterprise
|
||||
@@ -683,6 +725,7 @@ workflows:
|
||||
- test-frontend
|
||||
- codespell
|
||||
- lint-go
|
||||
- shellcheck
|
||||
- mysql-integration-test
|
||||
- postgres-integration-test
|
||||
filters: *filter-only-master
|
||||
@@ -704,6 +747,8 @@ workflows:
|
||||
filters: *filter-only-release
|
||||
- lint-go:
|
||||
filters: *filter-only-release
|
||||
- shellcheck:
|
||||
filters: *filter-only-release
|
||||
- test-frontend:
|
||||
filters: *filter-only-release
|
||||
- test-backend:
|
||||
@@ -719,6 +764,7 @@ workflows:
|
||||
- test-frontend
|
||||
- codespell
|
||||
- lint-go
|
||||
- shellcheck
|
||||
- mysql-integration-test
|
||||
- postgres-integration-test
|
||||
- build-oss-msi
|
||||
@@ -731,6 +777,7 @@ workflows:
|
||||
- test-frontend
|
||||
- codespell
|
||||
- lint-go
|
||||
- shellcheck
|
||||
- mysql-integration-test
|
||||
- postgres-integration-test
|
||||
filters: *filter-only-release
|
||||
@@ -742,6 +789,7 @@ workflows:
|
||||
- test-frontend
|
||||
- codespell
|
||||
- lint-go
|
||||
- shellcheck
|
||||
- mysql-integration-test
|
||||
- postgres-integration-test
|
||||
filters: *filter-only-release
|
||||
@@ -752,9 +800,14 @@ workflows:
|
||||
- test-frontend
|
||||
- codespell
|
||||
- lint-go
|
||||
- shellcheck
|
||||
- mysql-integration-test
|
||||
- postgres-integration-test
|
||||
filters: *filter-only-release
|
||||
- end-to-end-test-release:
|
||||
requires:
|
||||
- grafana-docker-release
|
||||
filters: *filter-only-release
|
||||
|
||||
build-branches-and-prs:
|
||||
jobs:
|
||||
@@ -771,6 +824,10 @@ workflows:
|
||||
filters: *filter-not-release-or-master
|
||||
- lint-go:
|
||||
filters: *filter-not-release-or-master
|
||||
- lint-go:
|
||||
filters: *filter-not-release-or-master
|
||||
- shellcheck:
|
||||
filters: *filter-not-release-or-master
|
||||
- test-frontend:
|
||||
filters: *filter-not-release-or-master
|
||||
- test-backend:
|
||||
@@ -788,6 +845,7 @@ workflows:
|
||||
- test-frontend
|
||||
- codespell
|
||||
- lint-go
|
||||
- shellcheck
|
||||
- mysql-integration-test
|
||||
- postgres-integration-test
|
||||
- cache-server-test
|
||||
@@ -799,6 +857,7 @@ workflows:
|
||||
- test-frontend
|
||||
- codespell
|
||||
- lint-go
|
||||
- shellcheck
|
||||
- mysql-integration-test
|
||||
- postgres-integration-test
|
||||
- cache-server-test
|
||||
|
||||
@@ -33,7 +33,7 @@ ENV NODE_ENV production
|
||||
RUN ./node_modules/.bin/grunt build
|
||||
|
||||
# Final container
|
||||
FROM ubuntu:latest
|
||||
FROM ubuntu:18.04
|
||||
|
||||
LABEL maintainer="Grafana team <hello@grafana.com>"
|
||||
|
||||
|
||||
10
Makefile
10
Makefile
@@ -2,8 +2,9 @@
|
||||
|
||||
.PHONY: all deps-go deps-js deps build-go build-server build-cli build-js build build-docker-dev build-docker-full lint-go gosec revive golangci-lint go-vet test-go test-js test run clean devenv devenv-down revive-alerting
|
||||
|
||||
GO := GO111MODULE=on go
|
||||
GO_FILES := ./pkg/...
|
||||
GO = GO111MODULE=on go
|
||||
GO_FILES ?= ./pkg/...
|
||||
SH_FILES ?= $(shell find ./scripts -name *.sh)
|
||||
|
||||
all: deps build
|
||||
|
||||
@@ -111,6 +112,11 @@ go-vet:
|
||||
|
||||
lint-go: go-vet golangci-lint revive revive-alerting gosec
|
||||
|
||||
# with disabled SC1071 we are ignored some TCL,Expect `/usr/bin/env expect` scripts
|
||||
shellcheck: $(SH_FILES)
|
||||
@docker run --rm -v "$$PWD:/mnt" koalaman/shellcheck:stable \
|
||||
$(SH_FILES) -e SC1071
|
||||
|
||||
run: scripts/go/bin/bra
|
||||
@scripts/go/bin/bra run
|
||||
|
||||
|
||||
@@ -379,17 +379,45 @@ send_client_credentials_via_post = false
|
||||
|
||||
#################################### SAML Auth ###########################
|
||||
[auth.saml] # Enterprise only
|
||||
# Defaults to false. If true, the feature is enabled
|
||||
enabled = false
|
||||
private_key =
|
||||
private_key_path =
|
||||
|
||||
# Base64-encoded public X.509 certificate. Used to sign requests to the IdP
|
||||
certificate =
|
||||
|
||||
# Path to the public X.509 certificate. Used to sign requests to the IdP
|
||||
certificate_path =
|
||||
|
||||
# Base64-encoded private key. Used to decrypt assertions from the IdP
|
||||
private_key =
|
||||
|
||||
# Path to the private key. Used to decrypt assertions from the IdP
|
||||
private_key_path =
|
||||
|
||||
# Base64-encoded IdP SAML metadata XML. Used to verify and obtain binding locations from the IdP
|
||||
idp_metadata =
|
||||
|
||||
# Path to the SAML metadata XML. Used to verify and obtain binding locations from the IdP
|
||||
idp_metadata_path =
|
||||
|
||||
# URL to fetch SAML IdP metadata. Used to verify and obtain binding locations from the IdP
|
||||
idp_metadata_url =
|
||||
|
||||
# Duration, since the IdP issued a response and the SP is allowed to process it. Defaults to 90 seconds
|
||||
max_issue_delay = 90s
|
||||
|
||||
# Duration, for how long the SP's metadata should be valid. Defaults to 48 hours
|
||||
metadata_valid_duration = 48h
|
||||
|
||||
# Friendly name or name of the attribute within the SAML assertion to use as the user's name
|
||||
assertion_attribute_name = displayName
|
||||
|
||||
# Friendly name or name of the attribute within the SAML assertion to use as the user's login handle
|
||||
assertion_attribute_login = mail
|
||||
|
||||
# Friendly name or name of the attribute within the SAML assertion to use as the user's email
|
||||
assertion_attribute_email = mail
|
||||
|
||||
#################################### Basic Auth ##########################
|
||||
[auth.basic]
|
||||
enabled = true
|
||||
|
||||
@@ -334,18 +334,46 @@
|
||||
;send_client_credentials_via_post = false
|
||||
|
||||
#################################### SAML Auth ###########################
|
||||
;[auth.saml] # Enterprise only
|
||||
[auth.saml] # Enterprise only
|
||||
# Defaults to false. If true, the feature is enabled.
|
||||
;enabled = false
|
||||
;private_key =
|
||||
;private_key_path =
|
||||
|
||||
# Base64-encoded public X.509 certificate. Used to sign requests to the IdP
|
||||
;certificate =
|
||||
|
||||
# Path to the public X.509 certificate. Used to sign requests to the IdP
|
||||
;certificate_path =
|
||||
|
||||
# Base64-encoded private key. Used to decrypt assertions from the IdP
|
||||
;private_key =
|
||||
|
||||
;# Path to the private key. Used to decrypt assertions from the IdP
|
||||
;private_key_path =
|
||||
|
||||
# Base64-encoded IdP SAML metadata XML. Used to verify and obtain binding locations from the IdP
|
||||
;idp_metadata =
|
||||
|
||||
# Path to the SAML metadata XML. Used to verify and obtain binding locations from the IdP
|
||||
;idp_metadata_path =
|
||||
|
||||
# URL to fetch SAML IdP metadata. Used to verify and obtain binding locations from the IdP
|
||||
;idp_metadata_url =
|
||||
|
||||
# Duration, since the IdP issued a response and the SP is allowed to process it. Defaults to 90 seconds.
|
||||
;max_issue_delay = 90s
|
||||
|
||||
# Duration, for how long the SP's metadata should be valid. Defaults to 48 hours.
|
||||
;metadata_valid_duration = 48h
|
||||
|
||||
# Friendly name or name of the attribute within the SAML assertion to use as the user's name
|
||||
;assertion_attribute_name = displayName
|
||||
|
||||
# Friendly name or name of the attribute within the SAML assertion to use as the user's login handle
|
||||
;assertion_attribute_login = mail
|
||||
|
||||
# Friendly name or name of the attribute within the SAML assertion to use as the user's email
|
||||
;assertion_attribute_email = mail
|
||||
|
||||
#################################### Grafana.com Auth ####################
|
||||
[auth.grafana_com]
|
||||
;enabled = false
|
||||
|
||||
@@ -28,38 +28,6 @@ search_filter = "(cn=%s)"
|
||||
# An array of base dns to search through
|
||||
search_base_dns = ["dc=grafana,dc=org"]
|
||||
|
||||
# In POSIX LDAP schemas, without memberOf attribute a secondary query must be made for groups.
|
||||
# This is done by enabling group_search_filter below. You must also set member_of= "cn"
|
||||
# in [servers.attributes] below.
|
||||
|
||||
# Users with nested/recursive group membership and an LDAP server that supports LDAP_MATCHING_RULE_IN_CHAIN
|
||||
# can set group_search_filter, group_search_filter_user_attribute, group_search_base_dns and member_of
|
||||
# below in such a way that the user's recursive group membership is considered.
|
||||
#
|
||||
# Nested Groups + Active Directory (AD) Example:
|
||||
#
|
||||
# AD groups store the Distinguished Names (DNs) of members, so your filter must
|
||||
# recursively search your groups for the authenticating user's DN. For example:
|
||||
#
|
||||
# group_search_filter = "(member:1.2.840.113556.1.4.1941:=%s)"
|
||||
# group_search_filter_user_attribute = "distinguishedName"
|
||||
# group_search_base_dns = ["ou=groups,dc=grafana,dc=org"]
|
||||
#
|
||||
# [servers.attributes]
|
||||
# ...
|
||||
# member_of = "distinguishedName"
|
||||
|
||||
## Group search filter, to retrieve the groups of which the user is a member (only set if memberOf attribute is not available)
|
||||
# group_search_filter = "(&(objectClass=posixGroup)(memberUid=%s))"
|
||||
## Group search filter user attribute defines what user attribute gets substituted for %s in group_search_filter.
|
||||
## Defaults to the value of username in [server.attributes]
|
||||
## Valid options are any of your values in [servers.attributes]
|
||||
## If you are using nested groups you probably want to set this and member_of in
|
||||
## [servers.attributes] to "distinguishedName"
|
||||
# group_search_filter_user_attribute = "distinguishedName"
|
||||
## An array of the base DNs to search through for groups. Typically uses ou=groups
|
||||
# group_search_base_dns = ["ou=groups,dc=grafana,dc=org"]
|
||||
|
||||
# Specify names of the ldap attributes your ldap uses
|
||||
[servers.attributes]
|
||||
name = "givenName"
|
||||
|
||||
57
devenv/docker/blocks/openldap/ldap_posix_dev.toml
Normal file
57
devenv/docker/blocks/openldap/ldap_posix_dev.toml
Normal file
@@ -0,0 +1,57 @@
|
||||
# To troubleshoot and get more log info enable ldap debug logging in grafana.ini
|
||||
# [log]
|
||||
# filters = ldap:debug
|
||||
|
||||
[[servers]]
|
||||
# Ldap server host (specify multiple hosts space separated)
|
||||
host = "127.0.0.1"
|
||||
# Default port is 389 or 636 if use_ssl = true
|
||||
port = 389
|
||||
# Set to true if ldap server supports TLS
|
||||
use_ssl = false
|
||||
# Set to true if connect ldap server with STARTTLS pattern (create connection in insecure, then upgrade to secure connection with TLS)
|
||||
start_tls = false
|
||||
# set to true if you want to skip ssl cert validation
|
||||
ssl_skip_verify = false
|
||||
# set to the path to your root CA certificate or leave unset to use system defaults
|
||||
# root_ca_cert = "/path/to/certificate.crt"
|
||||
|
||||
# Search user bind dn
|
||||
bind_dn = "cn=admin,dc=grafana,dc=org"
|
||||
# Search user bind password
|
||||
# If the password contains # or ; you have to wrap it with triple quotes. Ex """#password;"""
|
||||
bind_password = 'grafana'
|
||||
|
||||
# An array of base dns to search through
|
||||
search_base_dns = ["dc=grafana,dc=org"]
|
||||
|
||||
search_filter = "(uid=%s)"
|
||||
|
||||
group_search_filter = "(&(objectClass=posixGroup)(memberUid=%s))"
|
||||
group_search_filter_user_attribute = "uid"
|
||||
group_search_base_dns = ["ou=groups,dc=grafana,dc=org"]
|
||||
|
||||
[servers.attributes]
|
||||
name = "givenName"
|
||||
surname = "sn"
|
||||
username = "cn"
|
||||
member_of = "memberOf"
|
||||
email = "email"
|
||||
|
||||
# Map ldap groups to grafana org roles
|
||||
[[servers.group_mappings]]
|
||||
group_dn = "cn=posix-admins,ou=groups,dc=grafana,dc=org"
|
||||
org_role = "Admin"
|
||||
grafana_admin = true
|
||||
|
||||
# The Grafana organization database id, optional, if left out the default org (id 1) will be used
|
||||
# org_id = 1
|
||||
|
||||
[[servers.group_mappings]]
|
||||
group_dn = "cn=editors,ou=groups,dc=grafana,dc=org"
|
||||
org_role = "Editor"
|
||||
|
||||
[[servers.group_mappings]]
|
||||
# If you want to match all (or no ldap groups) then you can use wildcard
|
||||
group_dn = "*"
|
||||
org_role = "Viewer"
|
||||
@@ -12,7 +12,7 @@ After adding ldif files to `prepopulate`:
|
||||
|
||||
## Enabling LDAP in Grafana
|
||||
|
||||
Copy the ldap_dev.toml file in this folder into your `conf` folder (it is gitignored already). To enable it in the .ini file to get Grafana to use this block:
|
||||
If you want to use users/groups with `memberOf` support Copy the ldap_dev.toml file in this folder into your `conf` folder (it is gitignored already). To enable it in the .ini file to get Grafana to use this block:
|
||||
|
||||
```ini
|
||||
[auth.ldap]
|
||||
@@ -21,6 +21,8 @@ config_file = conf/ldap_dev.toml
|
||||
; allow_sign_up = true
|
||||
```
|
||||
|
||||
Otherwise perform same actions for `ldap_dev_posix.toml` config.
|
||||
|
||||
## Groups & Users
|
||||
|
||||
admins
|
||||
@@ -38,3 +40,11 @@ editors
|
||||
ldap-editors
|
||||
no groups
|
||||
ldap-viewer
|
||||
|
||||
|
||||
## Groups & Users (POSIX)
|
||||
|
||||
admins
|
||||
ldap-posix-admin
|
||||
no groups
|
||||
ldap-posix
|
||||
|
||||
@@ -78,3 +78,31 @@ objectClass: inetOrgPerson
|
||||
objectClass: organizationalPerson
|
||||
sn: ldap-torkel
|
||||
cn: ldap-torkel
|
||||
|
||||
# admin for posix group (without support for memberOf attribute)
|
||||
dn: uid=ldap-posix-admin,ou=users,dc=grafana,dc=org
|
||||
mail: ldap-posix-admin@grafana.com
|
||||
userPassword: grafana
|
||||
objectclass: top
|
||||
objectclass: posixAccount
|
||||
objectclass: inetOrgPerson
|
||||
homedirectory: /home/ldap-posix-admin
|
||||
sn: ldap-posix-admin
|
||||
cn: ldap-posix-admin
|
||||
uid: ldap-posix-admin
|
||||
uidnumber: 1
|
||||
gidnumber: 1
|
||||
|
||||
# user for posix group (without support for memberOf attribute)
|
||||
dn: uid=ldap-posix,ou=users,dc=grafana,dc=org
|
||||
mail: ldap-posix@grafana.com
|
||||
userPassword: grafana
|
||||
objectclass: top
|
||||
objectclass: posixAccount
|
||||
objectclass: inetOrgPerson
|
||||
homedirectory: /home/ldap-posix
|
||||
sn: ldap-posix
|
||||
cn: ldap-posix
|
||||
uid: ldap-posix
|
||||
uidnumber: 2
|
||||
gidnumber: 2
|
||||
|
||||
@@ -23,3 +23,21 @@ objectClass: groupOfNames
|
||||
member: cn=ldap-torkel,ou=users,dc=grafana,dc=org
|
||||
member: cn=ldap-daniel,ou=users,dc=grafana,dc=org
|
||||
member: cn=ldap-leo,ou=users,dc=grafana,dc=org
|
||||
|
||||
# -- POSIX --
|
||||
|
||||
# posix admin group (without support for memberOf attribute)
|
||||
dn: cn=posix-admins,ou=groups,dc=grafana,dc=org
|
||||
cn: admins
|
||||
objectClass: top
|
||||
objectClass: posixGroup
|
||||
gidNumber: 1
|
||||
memberUid: ldap-posix-admin
|
||||
|
||||
# posix group (without support for memberOf attribute)
|
||||
dn: cn=posix,ou=groups,dc=grafana,dc=org
|
||||
cn: viewers
|
||||
objectClass: top
|
||||
objectClass: posixGroup
|
||||
gidNumber: 2
|
||||
memberUid: ldap-posix
|
||||
|
||||
@@ -60,9 +60,9 @@ aliases = ["/v1.1", "/guides/reference/admin", "/v3.1"]
|
||||
<h4>Provisioning</h4>
|
||||
<p>A guide to help you automate your Grafana setup & configuration.</p>
|
||||
</a>
|
||||
<a href="{{< relref "guides/whats-new-in-v6-2.md" >}}" class="nav-cards__item nav-cards__item--guide">
|
||||
<h4>What's new in v6.2</h4>
|
||||
<p>Article on all the new cool features and enhancements in v6.2</p>
|
||||
<a href="{{< relref "guides/whats-new-in-v6-3.md" >}}" class="nav-cards__item nav-cards__item--guide">
|
||||
<h4>What's new in v6.3</h4>
|
||||
<p>Article on all the new cool features and enhancements in v6.3</p>
|
||||
</a>
|
||||
<a href="{{< relref "tutorials/screencasts.md" >}}" class="nav-cards__item nav-cards__item--guide">
|
||||
<h4>Screencasts</h4>
|
||||
|
||||
@@ -99,3 +99,18 @@ allow_sign_up = true
|
||||
allowed_organizations = github google
|
||||
```
|
||||
|
||||
### Team Sync (Enterprise only)
|
||||
|
||||
> Only available in Grafana Enterprise v6.3+
|
||||
|
||||
With Team Sync you can map your GitHub org teams to teams in Grafana so that your users will automatically be added to
|
||||
the correct teams.
|
||||
|
||||
Your GitHub teams can be referenced in two ways:
|
||||
|
||||
- `https://github.com/orgs/<org>/teams/<team name>`
|
||||
- `@<org>/<team name>`
|
||||
|
||||
Example: `@grafana/developers`
|
||||
|
||||
[Learn more about Team Sync]({{< relref "auth/enhanced_ldap.md" >}})
|
||||
|
||||
@@ -126,8 +126,6 @@ group_search_base_dns = ["ou=groups,dc=grafana,dc=org"]
|
||||
group_search_filter_user_attribute = "uid"
|
||||
```
|
||||
|
||||
Also set `member_of = "dn"` in the `[servers.attributes]` section.
|
||||
|
||||
### Group Mappings
|
||||
|
||||
In `[[servers.group_mappings]]` you can map an LDAP group to a Grafana organization and role. These will be synced every time the user logs in, with LDAP being
|
||||
|
||||
178
docs/sources/auth/saml.md
Normal file
178
docs/sources/auth/saml.md
Normal file
@@ -0,0 +1,178 @@
|
||||
+++
|
||||
title = "SAML Authentication"
|
||||
description = "Grafana SAML Authentication"
|
||||
keywords = ["grafana", "saml", "documentation", "saml-auth"]
|
||||
aliases = ["/auth/saml/"]
|
||||
type = "docs"
|
||||
[menu.docs]
|
||||
name = "SAML"
|
||||
parent = "authentication"
|
||||
weight = 5
|
||||
+++
|
||||
|
||||
# SAML Authentication
|
||||
|
||||
> SAML Authentication integration is only available in Grafana Enterprise. Read more about [Grafana Enterprise]({{< relref "enterprise" >}}).
|
||||
|
||||
> Only available in Grafana v6.3+
|
||||
|
||||
The SAML authentication integration allows your Grafana users to log in by
|
||||
using an external SAML Identity Provider (IdP). To enable this, Grafana becomes
|
||||
a Service Provider (SP) in the authentication flow, interacting with the IdP to
|
||||
exchange user information.
|
||||
|
||||
## Supported SAML
|
||||
|
||||
The SAML single-sign-on (SSO) standard is varied and flexible. Our implementation contains the subset of features needed to provide a smooth authentication experience into Grafana.
|
||||
|
||||
> Should you encounter any problems with our implementation, please don't hesitate to contact us.
|
||||
|
||||
At the moment of writing, Grafana supports:
|
||||
|
||||
1. From the Service Provider (SP) to the Identity Provider (IdP)
|
||||
|
||||
- `HTTP-POST` binding
|
||||
- `HTTP-Redirect` binding
|
||||
|
||||
2. From the Identity Provider (IdP) to the Service Provider (SP)
|
||||
|
||||
- `HTTP-POST` binding
|
||||
|
||||
3. In terms of security, we currently support signed and encrypted Assertions. However, signed or encrypted requests are not supported.
|
||||
|
||||
4. In terms of initiation, only SP-initiated requests are supported. There's no support for IdP-initiated request.
|
||||
|
||||
## Set up SAML Authentication
|
||||
|
||||
To use the SAML integration, you need to enable SAML in the [main config file]({{< relref "installation/configuration.md" >}}).
|
||||
|
||||
```bash
|
||||
[auth.saml]
|
||||
# Defaults to false. If true, the feature is enabled
|
||||
enabled = true
|
||||
|
||||
# Base64-encoded public X.509 certificate. Used to sign requests to the IdP
|
||||
certificate =
|
||||
|
||||
# Path to the public X.509 certificate. Used to sign requests to the IdP
|
||||
certificate_path =
|
||||
|
||||
# Base64-encoded private key. Used to decrypt assertions from the IdP
|
||||
private_key =
|
||||
|
||||
# Path to the private key. Used to decrypt assertions from the IdP
|
||||
private_key_path =
|
||||
|
||||
# Base64-encoded IdP SAML metadata XML. Used to verify and obtain binding locations from the IdP
|
||||
idp_metadata =
|
||||
|
||||
# Path to the SAML metadata XML. Used to verify and obtain binding locations from the IdP
|
||||
idp_metadata_path =
|
||||
|
||||
# URL to fetch SAML IdP metadata. Used to verify and obtain binding locations from the IdP
|
||||
idp_metadata_url =
|
||||
|
||||
# Duration, since the IdP issued a response and the SP is allowed to process it. Defaults to 90 seconds
|
||||
max_issue_delay =
|
||||
|
||||
# Duration, for how long the SP's metadata should be valid. Defaults to 48 hours
|
||||
metadata_valid_duration =
|
||||
|
||||
# Friendly name or name of the attribute within the SAML assertion to use as the user's name
|
||||
assertion_attribute_name = displayName
|
||||
|
||||
# Friendly name or name of the attribute within the SAML assertion to use as the user's login handle
|
||||
assertion_attribute_login = mail
|
||||
|
||||
# Friendly name or name of the attribute within the SAML assertion to use as the user's email
|
||||
assertion_attribute_email = mail
|
||||
```
|
||||
|
||||
Important to note:
|
||||
|
||||
- like any other Grafana configuration, use of [environment variables for these options is supported]({{< relref "installation/configuration.md#using-environment-variables" >}})
|
||||
- only one form of configuration option is required. Using multiple forms, e.g. both `certificate` and `certificate_path` will result in an error
|
||||
|
||||
## Grafana Configuration
|
||||
|
||||
An example working configuration example looks like:
|
||||
|
||||
```bash
|
||||
[auth.saml]
|
||||
enabled = true
|
||||
certificate_path = "/path/to/certificate.cert"
|
||||
private_key_path = "/path/to/private_key.pem"
|
||||
metadata_path = "/my/metadata.xml"
|
||||
max_issue_delay = 90s
|
||||
metadata_valid_duration = 48h
|
||||
assertion_attribute_name = displayName
|
||||
assertion_attribute_login = mail
|
||||
assertion_attribute_email = mail
|
||||
```
|
||||
|
||||
And here is a comprehensive list of the options:
|
||||
|
||||
| Setting | Required | Description | Default |
|
||||
| ----------------------------------------------------------- | -------- | -------------------------------------------------------------------------------------------------- | ------------- |
|
||||
| `enabled` | No | Whenever SAML authentication is allowed | `false` |
|
||||
| `certificate` or `certificate_path` | Yes | Base64-encoded string or Path for the SP X.509 certificate | |
|
||||
| `private_key` or `private_key_path` | Yes | Base64-encoded string or Path for the SP private key | |
|
||||
| `idp_metadata` or `idp_metadata_path` or `idp_metadata_url` | Yes | Base64-encoded string, Path or URL for the IdP SAML metadata XML | |
|
||||
| `max_issue_delay` | No | Duration, since the IdP issued a response and the SP is allowed to process it | `90s` |
|
||||
| `metadata_valid_duration` | No | Duration, for how long the SP's metadata should be valid | `48h` |
|
||||
| `assertion_attribute_name` | No | Friendly name or name of the attribute within the SAML assertion to use as the user's name | `displayName` |
|
||||
| `assertion_attribute_login` | No | Friendly name or name of the attribute within the SAML assertion to use as the user's login handle | `mail` |
|
||||
| `assertion_attribute_email` | No | Friendly name or name of the attribute within the SAML assertion to use as the user's email | `mail` |
|
||||
|
||||
### Cert and Private Key
|
||||
|
||||
The SAML SSO standard uses asymmetric encryption to exchange information between the SP (Grafana) and the IdP. To perform such encryption, you need a public part and a private part. In this case, the X.509 certificate provides the public part, while the private key provides the private part.
|
||||
|
||||
Grafana supports two ways of specifying both the `certificate` and `private_key`. Without a suffix (e.g. `certificate=`), the configuration assumes you've supplied the base64-encoded file contents. However, if specified with the `_path` suffix (e.g. `certificate_path=`) Grafana will treat it as a file path and attempt to read the file from the file system.
|
||||
|
||||
### IdP Metadata
|
||||
|
||||
Expanding on the above, we'll also need the public part from our IdP for message verification. The SAML IdP metadata XML tells us where and how we should exchange the user information.
|
||||
|
||||
Currently, we support three ways of specifying the IdP metadata. Without a suffix `idp_metadata=` Grafana assumes base64-encoded XML file contents, with the `_path` suffix assumes a file path and attempts to read the file from the file system and with the `_url` suffix assumes an URL and attempts to load the metadata from the given location.
|
||||
|
||||
### Max Issue Delay
|
||||
|
||||
Prevention of SAML response replay attacks and internal clock skews between the SP (Grafana), and the IdP is covered. You can set a maximum amount of time between the IdP issuing a response and the SP (Grafana) processing it.
|
||||
|
||||
The configuration options is specified as a duration e.g. `max_issue_delay = 90s` or `max_issue_delay = 1h`
|
||||
|
||||
### Metadata valid duration
|
||||
|
||||
As an SP, our metadata is likely to expire at some point, e.g. due to a certificate rotation or change of location binding. Grafana allows you to specify for how long the metadata should be valid. Leveraging the standard's `validUntil` field, you can tell consumers until when your metadata is going to be valid. The duration is computed by adding the duration to the current time.
|
||||
|
||||
The configuration option is specified as a duration e.g. `metadata_valid_duration = 48h`
|
||||
|
||||
## Identity Provider (IdP) registration
|
||||
|
||||
For the SAML integration to work correctly, you need to make the IdP aware of the SP.
|
||||
|
||||
The integration provides two key endpoints as part of Grafana:
|
||||
|
||||
- The `/saml/metadata` endpoint. Which contains the SP's metadata. You can either download and upload it manually or make the IdP request it directly from the endpoint. Some providers name it Identifier or Entity ID.
|
||||
|
||||
- The `/saml/acs` endpoint. Which is intended to receive the ACS (Assertion Customer Service) callback. Some providers name it SSO URL or Reply URL.
|
||||
|
||||
## Assertion mapping
|
||||
|
||||
During the SAML SSO authentication flow, we receive the ACS (Assertion Customer Service) callback. The callback contains all the relevant information of the user under authentication embedded in the SAML response. Grafana parses the response to create (or update) the user within its internal database.
|
||||
|
||||
For Grafana to map the user information, it looks at the individual attributes within the assertion. You can think of these attributes as Key/Value pairs (although, they contain more information than that).
|
||||
|
||||
Grafana provides configuration options that let you modify which keys to look at for these values. The data we need to create the user in Grafana is Name, Login handle, and email.
|
||||
|
||||
An example is `assertion_attribute_name = "givenName"` where Grafana looks within the assertion for an attribute with a friendly name or name of `givenName`. Both, the friendly name (e.g. `givenName`) or the name (e.g. `urn:oid:2.5.4.42`) can be used interchangeably as the value for the configuration option.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
To troubleshoot and get more log info enable saml debug logging in the [main config file]({{< relref "installation/configuration.md" >}}).
|
||||
|
||||
```bash
|
||||
[log]
|
||||
filters = saml.auth:debug
|
||||
```
|
||||
@@ -29,6 +29,10 @@ With Grafana Enterprise you can set up synchronization between LDAP Groups and T
|
||||
|
||||
Datasource permissions allow you to restrict query access to only specific Teams and Users. [Learn More]({{< relref "permissions/datasource_permissions.md" >}}).
|
||||
|
||||
### SAML Authentication
|
||||
|
||||
Enables your Grafana Enterprise users to authenticate with SAML. [Learn More]({{< relref "auth/saml.md" >}}).
|
||||
|
||||
### Premium Plugins
|
||||
|
||||
With a Grafana Enterprise license you will get access to premium plugins, including:
|
||||
|
||||
146
docs/sources/guides/whats-new-in-v6-3.md
Normal file
146
docs/sources/guides/whats-new-in-v6-3.md
Normal file
@@ -0,0 +1,146 @@
|
||||
+++
|
||||
title = "What's New in Grafana v6.3"
|
||||
description = "Feature & improvement highlights for Grafana v6.3"
|
||||
keywords = ["grafana", "new", "documentation", "6.3"]
|
||||
type = "docs"
|
||||
[menu.docs]
|
||||
name = "Version 6.3"
|
||||
identifier = "v6.3"
|
||||
parent = "whatsnew"
|
||||
weight = -14
|
||||
+++
|
||||
|
||||
# What's New in Grafana v6.3
|
||||
|
||||
For all details please read the full [CHANGELOG.md](https://github.com/grafana/grafana/blob/master/CHANGELOG.md)
|
||||
|
||||
## Highlights
|
||||
|
||||
- New Explore features
|
||||
- [Loki Live Streaming]({{< relref "#loki-live-streaming" >}})
|
||||
- [Loki Context Queries]({{< relref "#loki-context-queries" >}})
|
||||
- [Elasticsearch Logs Support]({{< relref "#elasticsearch-logs-support" >}})
|
||||
- [InfluxDB Logs Support]({{< relref "#influxdb-logs-support" >}})
|
||||
- [Data links]({{< relref "#data-links" >}})
|
||||
- [New Time Picker]({{< relref "#new-time-picker" >}})
|
||||
- [Graph Area Gradients]({{< relref "#graph-gradients" >}}) - A new graph display option!
|
||||
- Grafana Enterprise
|
||||
- [LDAP Active Sync]({{< relref "#ldap-active-sync" >}}) - LDAP Active Sync
|
||||
- [SAML Authentication]({{< relref "#saml-authentication" >}}) - SAML Authentication
|
||||
|
||||
## Explore improvements
|
||||
|
||||
This release adds a ton of enhancements to Explore. Both in terms of new general enhancements but also in
|
||||
new data source specific features.
|
||||
|
||||
### Loki live streaming
|
||||
|
||||
For log queries using the Loki data source you can now stream logs live directly to the Explore UI.
|
||||
|
||||
### Loki context queries
|
||||
|
||||
After finding a log line through the heavy use of query filters it can then be useful to
|
||||
see the log lines surrounding the line your searched for. The `show context` feature
|
||||
allows you to view lines before and after the line of interest.
|
||||
|
||||
### Elasticsearch logs support
|
||||
|
||||
This release adds support for searching & visualizing logs stored in Elasticsearch in the Explore mode. With a special
|
||||
simplified query interface specifically designed for logs search.
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v63/elasticsearch_explore_logs.png" max-width="600px" caption="New Time Picker" >}}
|
||||
|
||||
Please read [Using Elasticsearch in Grafana](/features/datasources/elasticsearch/#querying-logs-beta) for more detailed information on how to get started and use it.
|
||||
|
||||
### InfluxDB logs support
|
||||
|
||||
This release adds support for searching & visualizing logs stored in InfluxDB in the Explore mode. With a special
|
||||
simplified query interface specifically designed for logs search.
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v63/influxdb_explore_logs.png" max-width="600px" caption="New Time Picker" >}}
|
||||
|
||||
Please read [Using InfluxDB in Grafana](/features/datasources/influxdb/#querying-logs-beta) for more detailed information on how to get started and use it.
|
||||
|
||||
## Data Links
|
||||
|
||||
We have simplified the UI for defining panel drilldown links (and renamed them to Panel links). We have also added a
|
||||
new type of link named `Data link`. The reason to have two different types is to make it clear how they are used
|
||||
and what variables you can use in the link. Panel links are only shown in the top left corner of
|
||||
the panel and you cannot reference series name or any data field.
|
||||
|
||||
While `Data links` are used by the actual visualization and can reference data fields.
|
||||
|
||||
Example:
|
||||
```url
|
||||
http://my-grafana.com/d/bPCI6VSZz/other-dashboard?var-server=${__series_name}
|
||||
```
|
||||
|
||||
You have access to these variables:
|
||||
|
||||
Name | Description
|
||||
------------ | -------------
|
||||
*${__series_name}* | The name of the time series (or table)
|
||||
*${__value_time}* | The time of the point your clicking on (in millisecond epoch)
|
||||
*${__url_time_range}* | Interpolates as the full time range (i.e. from=21312323412&to=21312312312)
|
||||
*${__all_variables}* | Adds all current variables (and current values) to the url
|
||||
|
||||
You can then click on point in the Graph.
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v63/graph_datalink.png" max-width="400px" caption="New Time Picker" >}}
|
||||
|
||||
For now only the Graph panel supports `Data links` but we hope to add these to many visualizations.
|
||||
|
||||
## New Time Picker
|
||||
|
||||
The time picker has been re-designed and with a more basic design that makes accessing quick ranges more easy.
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v63/time_picker.png" max-width="400px" caption="New Time Picker" >}}
|
||||
|
||||
## Graph Gradients
|
||||
|
||||
Want more eye candy in your graphs? Then the fill gradient option might be for you! Works really well for
|
||||
graphs with only a single series.
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v63/graph_gradient_area.jpeg" max-width="800px" caption="Graph Gradient Area" >}}
|
||||
|
||||
Looks really nice in light theme as well.
|
||||
|
||||
{{< docs-imagebox img="/img/docs/v63/graph_gradients_white.png" max-width="800px" caption="Graph Gradient Area" >}}
|
||||
|
||||
## Grafana Enterprise
|
||||
|
||||
Substantial refactoring and improvements to the external auth systems has gone in to this release making the features
|
||||
listed below possible as well as laying a foundation for future enhancements.
|
||||
|
||||
### LDAP Active Sync
|
||||
|
||||
This is a new Enterprise feature that enables background syncing of user information, org role and teams memberships.
|
||||
This syncing is otherwise only done at login time. With this feature you can schedule how often this user synchronization should
|
||||
occur.
|
||||
|
||||
For example, lets say a user is removed from an LDAP group. In previous versions of Grafana an admin would have to
|
||||
wait for the user to logout or the session to expire for the Grafana permissions to update, a process that can take days.
|
||||
|
||||
With active sync the user would be automatically removed from the corresponding team in Grafana or even logged out and disabled if no longer
|
||||
belonging to an LDAP group that gives them access to Grafana.
|
||||
|
||||
[Read more](/auth/enhanced_ldap/#active-ldap-synchronization)
|
||||
|
||||
### SAML Authentication
|
||||
|
||||
Built-in support for SAML is now available in Grafana Enterprise.
|
||||
|
||||
[See docs]({{< relref "auth/saml.md" >}})
|
||||
|
||||
### Team Sync for GitHub OAuth
|
||||
|
||||
When setting up OAuth with GitHub it's now possible to sync GitHub teams with Teams in Grafana.
|
||||
|
||||
[See docs]({{< relref "auth/github.md" >}})
|
||||
|
||||
### Team Sync for Auth Proxy
|
||||
|
||||
We've added support for enriching the Auth Proxy headers with Teams information, which makes it possible
|
||||
to use Team Sync with Auth Proxy.
|
||||
|
||||
[See docs](/auth/auth-proxy/#auth-proxy-authentication)
|
||||
@@ -37,15 +37,11 @@ export class ConfigCtrl {
|
||||
|
||||
postUpdate() {
|
||||
if (!this.appModel.enabled) {
|
||||
return this.$q.resolve();
|
||||
return;
|
||||
}
|
||||
return this.appEditCtrl.importDashboards().then(() => {
|
||||
this.enabled = true;
|
||||
return {
|
||||
url: "plugins/raintank-kubernetes-app/page/clusters",
|
||||
message: "Kubernetes App enabled!"
|
||||
};
|
||||
});
|
||||
|
||||
// TODO, whatever you want
|
||||
console.log('Post Update:', this);
|
||||
}
|
||||
}
|
||||
ConfigCtrl.templateUrl = 'components/config/config.html';
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
[
|
||||
{ "version": "v6.2", "path": "/", "archived": false, "current": true },
|
||||
{ "version": "v6.3", "path": "/", "archived": false, "current": true },
|
||||
{ "version": "v6.2", "path": "/v6.2", "archived": true },
|
||||
{ "version": "v6.1", "path": "/v6.1", "archived": true },
|
||||
{ "version": "v6.0", "path": "/v6.0", "archived": true },
|
||||
{ "version": "v5.4", "path": "/v5.4", "archived": true },
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
"company": "Grafana Labs"
|
||||
},
|
||||
"name": "grafana",
|
||||
"version": "6.3.0-pre",
|
||||
"version": "6.3.1",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "http://github.com/grafana/grafana.git"
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
import { Threshold } from './threshold';
|
||||
import { ValueMapping } from './valueMapping';
|
||||
|
||||
export enum LoadingState {
|
||||
NotStarted = 'NotStarted',
|
||||
Loading = 'Loading',
|
||||
@@ -49,6 +52,12 @@ export interface Field {
|
||||
decimals?: number | null; // Significant digits (for display)
|
||||
min?: number | null;
|
||||
max?: number | null;
|
||||
|
||||
// Convert input values into a display value
|
||||
mappings?: ValueMapping[];
|
||||
|
||||
// Must be sorted by 'value', first value is always -Infinity
|
||||
thresholds?: Threshold[];
|
||||
}
|
||||
|
||||
export interface Labels {
|
||||
|
||||
@@ -2,6 +2,7 @@ export * from './data';
|
||||
export * from './dataLink';
|
||||
export * from './logs';
|
||||
export * from './navModel';
|
||||
export * from './select';
|
||||
export * from './time';
|
||||
export * from './threshold';
|
||||
export * from './utils';
|
||||
|
||||
10
packages/grafana-data/src/types/select.ts
Normal file
10
packages/grafana-data/src/types/select.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
/**
|
||||
* Used in select elements
|
||||
*/
|
||||
export interface SelectableValue<T = any> {
|
||||
label?: string;
|
||||
value?: T;
|
||||
imgUrl?: string;
|
||||
description?: string;
|
||||
[key: string]: any;
|
||||
}
|
||||
@@ -1,5 +1,4 @@
|
||||
export interface Threshold {
|
||||
index: number;
|
||||
value: number;
|
||||
color: string;
|
||||
}
|
||||
|
||||
@@ -5,6 +5,18 @@ import { TimeZone } from '../types';
|
||||
|
||||
const units: DurationUnit[] = ['y', 'M', 'w', 'd', 'h', 'm', 's'];
|
||||
|
||||
export function isMathString(text: string | DateTime | Date): boolean {
|
||||
if (!text) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (typeof text === 'string' && (text.substring(0, 3) === 'now' || text.includes('||'))) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses different types input to a moment instance. There is a specific formatting language that can be used
|
||||
* if text arg is string. See unit tests for examples.
|
||||
|
||||
@@ -1,6 +1,14 @@
|
||||
import { getFieldReducers, ReducerID, reduceField } from './index';
|
||||
import { fieldReducers, ReducerID, reduceField } from './fieldReducer';
|
||||
|
||||
import _ from 'lodash';
|
||||
import { DataFrame } from '../types/data';
|
||||
|
||||
/**
|
||||
* Run a reducer and get back the value
|
||||
*/
|
||||
function reduce(series: DataFrame, fieldIndex: number, id: string): any {
|
||||
return reduceField({ series, fieldIndex, reducers: [id] })[id];
|
||||
}
|
||||
|
||||
describe('Stats Calculators', () => {
|
||||
const basicTable = {
|
||||
@@ -9,29 +17,16 @@ describe('Stats Calculators', () => {
|
||||
};
|
||||
|
||||
it('should load all standard stats', () => {
|
||||
const names = [
|
||||
ReducerID.sum,
|
||||
ReducerID.max,
|
||||
ReducerID.min,
|
||||
ReducerID.logmin,
|
||||
ReducerID.mean,
|
||||
ReducerID.last,
|
||||
ReducerID.first,
|
||||
ReducerID.count,
|
||||
ReducerID.range,
|
||||
ReducerID.diff,
|
||||
ReducerID.step,
|
||||
ReducerID.delta,
|
||||
// ReducerID.allIsZero,
|
||||
// ReducerID.allIsNull,
|
||||
];
|
||||
const stats = getFieldReducers(names);
|
||||
expect(stats.length).toBe(names.length);
|
||||
for (const id of Object.keys(ReducerID)) {
|
||||
const reducer = fieldReducers.getIfExists(id);
|
||||
const found = reducer ? reducer.id : '<NOT FOUND>';
|
||||
expect(found).toEqual(id);
|
||||
}
|
||||
});
|
||||
|
||||
it('should fail to load unknown stats', () => {
|
||||
const names = ['not a stat', ReducerID.max, ReducerID.min, 'also not a stat'];
|
||||
const stats = getFieldReducers(names);
|
||||
const stats = fieldReducers.list(names);
|
||||
expect(stats.length).toBe(2);
|
||||
|
||||
const found = stats.map(v => v.id);
|
||||
@@ -92,6 +87,34 @@ describe('Stats Calculators', () => {
|
||||
expect(stats.delta).toEqual(300);
|
||||
});
|
||||
|
||||
it('consistenly check allIsNull/allIsZero', () => {
|
||||
const empty = {
|
||||
fields: [{ name: 'A' }],
|
||||
rows: [],
|
||||
};
|
||||
const allNull = ({
|
||||
fields: [{ name: 'A' }],
|
||||
rows: [null, null, null, null],
|
||||
} as unknown) as DataFrame;
|
||||
const allNull2 = {
|
||||
fields: [{ name: 'A' }],
|
||||
rows: [[null], [null], [null], [null]],
|
||||
};
|
||||
const allZero = {
|
||||
fields: [{ name: 'A' }],
|
||||
rows: [[0], [0], [0], [0]],
|
||||
};
|
||||
|
||||
expect(reduce(empty, 0, ReducerID.allIsNull)).toEqual(true);
|
||||
expect(reduce(allNull, 0, ReducerID.allIsNull)).toEqual(true);
|
||||
expect(reduce(allNull2, 0, ReducerID.allIsNull)).toEqual(true);
|
||||
|
||||
expect(reduce(empty, 0, ReducerID.allIsZero)).toEqual(false);
|
||||
expect(reduce(allNull, 0, ReducerID.allIsZero)).toEqual(false);
|
||||
expect(reduce(allNull2, 0, ReducerID.allIsZero)).toEqual(false);
|
||||
expect(reduce(allZero, 0, ReducerID.allIsZero)).toEqual(true);
|
||||
});
|
||||
|
||||
it('consistent results for first/last value with null', () => {
|
||||
const info = [
|
||||
{
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
// Libraries
|
||||
import isNumber from 'lodash/isNumber';
|
||||
|
||||
import { DataFrame, NullValueMode } from '../types/index';
|
||||
import { DataFrame, NullValueMode } from '../types';
|
||||
import { Registry, RegistryItem } from './registry';
|
||||
|
||||
export enum ReducerID {
|
||||
sum = 'sum',
|
||||
@@ -34,38 +35,13 @@ export interface FieldCalcs {
|
||||
// Internal function
|
||||
type FieldReducer = (data: DataFrame, fieldIndex: number, ignoreNulls: boolean, nullAsZero: boolean) => FieldCalcs;
|
||||
|
||||
export interface FieldReducerInfo {
|
||||
id: string;
|
||||
name: string;
|
||||
description: string;
|
||||
alias?: string; // optional secondary key. 'avg' vs 'mean', 'total' vs 'sum'
|
||||
|
||||
export interface FieldReducerInfo extends RegistryItem {
|
||||
// Internal details
|
||||
emptyInputResult?: any; // typically null, but some things like 'count' & 'sum' should be zero
|
||||
standard: boolean; // The most common stats can all be calculated in a single pass
|
||||
reduce?: FieldReducer;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param ids list of stat names or null to get all of them
|
||||
*/
|
||||
export function getFieldReducers(ids?: string[]): FieldReducerInfo[] {
|
||||
if (ids === null || ids === undefined) {
|
||||
if (!hasBuiltIndex) {
|
||||
getById(ReducerID.mean);
|
||||
}
|
||||
return listOfStats;
|
||||
}
|
||||
|
||||
return ids.reduce((list, id) => {
|
||||
const stat = getById(id);
|
||||
if (stat) {
|
||||
list.push(stat);
|
||||
}
|
||||
return list;
|
||||
}, new Array<FieldReducerInfo>());
|
||||
}
|
||||
|
||||
interface ReduceFieldOptions {
|
||||
series: DataFrame;
|
||||
fieldIndex: number;
|
||||
@@ -83,7 +59,7 @@ export function reduceField(options: ReduceFieldOptions): FieldCalcs {
|
||||
return {};
|
||||
}
|
||||
|
||||
const queue = getFieldReducers(reducers);
|
||||
const queue = fieldReducers.list(reducers);
|
||||
|
||||
// Return early for empty series
|
||||
// This lets the concrete implementations assume at least one row
|
||||
@@ -122,122 +98,107 @@ export function reduceField(options: ReduceFieldOptions): FieldCalcs {
|
||||
//
|
||||
// ------------------------------------------------------------------------------
|
||||
|
||||
// private registry of all stats
|
||||
interface TableStatIndex {
|
||||
[id: string]: FieldReducerInfo;
|
||||
}
|
||||
|
||||
const listOfStats: FieldReducerInfo[] = [];
|
||||
const index: TableStatIndex = {};
|
||||
let hasBuiltIndex = false;
|
||||
|
||||
function getById(id: string): FieldReducerInfo | undefined {
|
||||
if (!hasBuiltIndex) {
|
||||
[
|
||||
{
|
||||
id: ReducerID.lastNotNull,
|
||||
name: 'Last (not null)',
|
||||
description: 'Last non-null value',
|
||||
standard: true,
|
||||
alias: 'current',
|
||||
reduce: calculateLastNotNull,
|
||||
},
|
||||
{
|
||||
id: ReducerID.last,
|
||||
name: 'Last',
|
||||
description: 'Last Value',
|
||||
standard: true,
|
||||
reduce: calculateLast,
|
||||
},
|
||||
{ id: ReducerID.first, name: 'First', description: 'First Value', standard: true, reduce: calculateFirst },
|
||||
{
|
||||
id: ReducerID.firstNotNull,
|
||||
name: 'First (not null)',
|
||||
description: 'First non-null value',
|
||||
standard: true,
|
||||
reduce: calculateFirstNotNull,
|
||||
},
|
||||
{ id: ReducerID.min, name: 'Min', description: 'Minimum Value', standard: true },
|
||||
{ id: ReducerID.max, name: 'Max', description: 'Maximum Value', standard: true },
|
||||
{ id: ReducerID.mean, name: 'Mean', description: 'Average Value', standard: true, alias: 'avg' },
|
||||
{
|
||||
id: ReducerID.sum,
|
||||
name: 'Total',
|
||||
description: 'The sum of all values',
|
||||
emptyInputResult: 0,
|
||||
standard: true,
|
||||
alias: 'total',
|
||||
},
|
||||
{
|
||||
id: ReducerID.count,
|
||||
name: 'Count',
|
||||
description: 'Number of values in response',
|
||||
emptyInputResult: 0,
|
||||
standard: true,
|
||||
},
|
||||
{
|
||||
id: ReducerID.range,
|
||||
name: 'Range',
|
||||
description: 'Difference between minimum and maximum values',
|
||||
standard: true,
|
||||
},
|
||||
{
|
||||
id: ReducerID.delta,
|
||||
name: 'Delta',
|
||||
description: 'Cumulative change in value',
|
||||
standard: true,
|
||||
},
|
||||
{
|
||||
id: ReducerID.step,
|
||||
name: 'Step',
|
||||
description: 'Minimum interval between values',
|
||||
standard: true,
|
||||
},
|
||||
{
|
||||
id: ReducerID.diff,
|
||||
name: 'Difference',
|
||||
description: 'Difference between first and last values',
|
||||
standard: true,
|
||||
},
|
||||
{
|
||||
id: ReducerID.logmin,
|
||||
name: 'Min (above zero)',
|
||||
description: 'Used for log min scale',
|
||||
standard: true,
|
||||
},
|
||||
{
|
||||
id: ReducerID.changeCount,
|
||||
name: 'Change Count',
|
||||
description: 'Number of times the value changes',
|
||||
standard: false,
|
||||
reduce: calculateChangeCount,
|
||||
},
|
||||
{
|
||||
id: ReducerID.distinctCount,
|
||||
name: 'Distinct Count',
|
||||
description: 'Number of distinct values',
|
||||
standard: false,
|
||||
reduce: calculateDistinctCount,
|
||||
},
|
||||
].forEach(info => {
|
||||
const { id, alias } = info;
|
||||
if (index.hasOwnProperty(id)) {
|
||||
console.warn('Duplicate Stat', id, info, index);
|
||||
}
|
||||
index[id] = info;
|
||||
if (alias) {
|
||||
if (index.hasOwnProperty(alias)) {
|
||||
console.warn('Duplicate Stat (alias)', alias, info, index);
|
||||
}
|
||||
index[alias] = info;
|
||||
}
|
||||
listOfStats.push(info);
|
||||
});
|
||||
hasBuiltIndex = true;
|
||||
}
|
||||
|
||||
return index[id];
|
||||
}
|
||||
export const fieldReducers = new Registry<FieldReducerInfo>(() => [
|
||||
{
|
||||
id: ReducerID.lastNotNull,
|
||||
name: 'Last (not null)',
|
||||
description: 'Last non-null value',
|
||||
standard: true,
|
||||
alias: 'current',
|
||||
reduce: calculateLastNotNull,
|
||||
},
|
||||
{
|
||||
id: ReducerID.last,
|
||||
name: 'Last',
|
||||
description: 'Last Value',
|
||||
standard: true,
|
||||
reduce: calculateLast,
|
||||
},
|
||||
{ id: ReducerID.first, name: 'First', description: 'First Value', standard: true, reduce: calculateFirst },
|
||||
{
|
||||
id: ReducerID.firstNotNull,
|
||||
name: 'First (not null)',
|
||||
description: 'First non-null value',
|
||||
standard: true,
|
||||
reduce: calculateFirstNotNull,
|
||||
},
|
||||
{ id: ReducerID.min, name: 'Min', description: 'Minimum Value', standard: true },
|
||||
{ id: ReducerID.max, name: 'Max', description: 'Maximum Value', standard: true },
|
||||
{ id: ReducerID.mean, name: 'Mean', description: 'Average Value', standard: true, alias: 'avg' },
|
||||
{
|
||||
id: ReducerID.sum,
|
||||
name: 'Total',
|
||||
description: 'The sum of all values',
|
||||
emptyInputResult: 0,
|
||||
standard: true,
|
||||
alias: 'total',
|
||||
},
|
||||
{
|
||||
id: ReducerID.count,
|
||||
name: 'Count',
|
||||
description: 'Number of values in response',
|
||||
emptyInputResult: 0,
|
||||
standard: true,
|
||||
},
|
||||
{
|
||||
id: ReducerID.range,
|
||||
name: 'Range',
|
||||
description: 'Difference between minimum and maximum values',
|
||||
standard: true,
|
||||
},
|
||||
{
|
||||
id: ReducerID.delta,
|
||||
name: 'Delta',
|
||||
description: 'Cumulative change in value',
|
||||
standard: true,
|
||||
},
|
||||
{
|
||||
id: ReducerID.step,
|
||||
name: 'Step',
|
||||
description: 'Minimum interval between values',
|
||||
standard: true,
|
||||
},
|
||||
{
|
||||
id: ReducerID.diff,
|
||||
name: 'Difference',
|
||||
description: 'Difference between first and last values',
|
||||
standard: true,
|
||||
},
|
||||
{
|
||||
id: ReducerID.logmin,
|
||||
name: 'Min (above zero)',
|
||||
description: 'Used for log min scale',
|
||||
standard: true,
|
||||
},
|
||||
{
|
||||
id: ReducerID.allIsZero,
|
||||
name: 'All Zeros',
|
||||
description: 'All values are zero',
|
||||
emptyInputResult: false,
|
||||
standard: true,
|
||||
},
|
||||
{
|
||||
id: ReducerID.allIsNull,
|
||||
name: 'All Nulls',
|
||||
description: 'All values are null',
|
||||
emptyInputResult: true,
|
||||
standard: true,
|
||||
},
|
||||
{
|
||||
id: ReducerID.changeCount,
|
||||
name: 'Change Count',
|
||||
description: 'Number of times the value changes',
|
||||
standard: false,
|
||||
reduce: calculateChangeCount,
|
||||
},
|
||||
{
|
||||
id: ReducerID.distinctCount,
|
||||
name: 'Distinct Count',
|
||||
description: 'Number of distinct values',
|
||||
standard: false,
|
||||
reduce: calculateDistinctCount,
|
||||
},
|
||||
]);
|
||||
|
||||
function doStandardCalcs(data: DataFrame, fieldIndex: number, ignoreNulls: boolean, nullAsZero: boolean): FieldCalcs {
|
||||
const calcs = {
|
||||
@@ -253,7 +214,7 @@ function doStandardCalcs(data: DataFrame, fieldIndex: number, ignoreNulls: boole
|
||||
count: 0,
|
||||
nonNullCount: 0,
|
||||
allIsNull: true,
|
||||
allIsZero: false,
|
||||
allIsZero: true,
|
||||
range: null,
|
||||
diff: null,
|
||||
delta: 0,
|
||||
@@ -264,7 +225,7 @@ function doStandardCalcs(data: DataFrame, fieldIndex: number, ignoreNulls: boole
|
||||
} as FieldCalcs;
|
||||
|
||||
for (let i = 0; i < data.rows.length; i++) {
|
||||
let currentValue = data.rows[i][fieldIndex];
|
||||
let currentValue = data.rows[i] ? data.rows[i][fieldIndex] : null;
|
||||
if (i === 0) {
|
||||
calcs.first = currentValue;
|
||||
}
|
||||
@@ -350,6 +311,10 @@ function doStandardCalcs(data: DataFrame, fieldIndex: number, ignoreNulls: boole
|
||||
calcs.mean = calcs.sum! / calcs.nonNullCount;
|
||||
}
|
||||
|
||||
if (calcs.allIsNull) {
|
||||
calcs.allIsZero = false;
|
||||
}
|
||||
|
||||
if (calcs.max !== null && calcs.min !== null) {
|
||||
calcs.range = calcs.max - calcs.min;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
export * from './string';
|
||||
export * from './registry';
|
||||
export * from './markdown';
|
||||
export * from './processDataFrame';
|
||||
export * from './csv';
|
||||
|
||||
134
packages/grafana-data/src/utils/registry.ts
Normal file
134
packages/grafana-data/src/utils/registry.ts
Normal file
@@ -0,0 +1,134 @@
|
||||
import { SelectableValue } from '../types/select';
|
||||
|
||||
export interface RegistryItem {
|
||||
id: string; // Unique Key -- saved in configs
|
||||
name: string; // Display Name, can change without breaking configs
|
||||
description: string;
|
||||
aliasIds?: string[]; // when the ID changes, we may want backwards compatibility ('current' => 'last')
|
||||
|
||||
/**
|
||||
* Some extensions should not be user selectable
|
||||
* like: 'all' and 'any' matchers;
|
||||
*/
|
||||
excludeFromPicker?: boolean;
|
||||
}
|
||||
|
||||
interface RegistrySelectInfo {
|
||||
options: Array<SelectableValue<string>>;
|
||||
current: Array<SelectableValue<string>>;
|
||||
}
|
||||
|
||||
export class Registry<T extends RegistryItem> {
|
||||
private ordered: T[] = [];
|
||||
private byId = new Map<string, T>();
|
||||
private initalized = false;
|
||||
|
||||
constructor(private init?: () => T[]) {}
|
||||
|
||||
getIfExists(id: string | undefined): T | undefined {
|
||||
if (!this.initalized) {
|
||||
if (this.init) {
|
||||
for (const ext of this.init()) {
|
||||
this.register(ext);
|
||||
}
|
||||
}
|
||||
this.sort();
|
||||
this.initalized = true;
|
||||
}
|
||||
if (id) {
|
||||
return this.byId.get(id);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
get(id: string): T {
|
||||
const v = this.getIfExists(id);
|
||||
if (!v) {
|
||||
throw new Error('Undefined: ' + id);
|
||||
}
|
||||
return v;
|
||||
}
|
||||
|
||||
selectOptions(current?: string[], filter?: (ext: T) => boolean): RegistrySelectInfo {
|
||||
if (!this.initalized) {
|
||||
this.getIfExists('xxx'); // will trigger init
|
||||
}
|
||||
|
||||
const select = {
|
||||
options: [],
|
||||
current: [],
|
||||
} as RegistrySelectInfo;
|
||||
|
||||
const currentIds: any = {};
|
||||
if (current) {
|
||||
for (const id of current) {
|
||||
currentIds[id] = true;
|
||||
}
|
||||
}
|
||||
|
||||
for (const ext of this.ordered) {
|
||||
if (ext.excludeFromPicker) {
|
||||
continue;
|
||||
}
|
||||
if (filter && !filter(ext)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const option = {
|
||||
value: ext.id,
|
||||
label: ext.name,
|
||||
description: ext.description,
|
||||
};
|
||||
|
||||
select.options.push(option);
|
||||
if (currentIds[ext.id]) {
|
||||
select.current.push(option);
|
||||
}
|
||||
}
|
||||
return select;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a list of values by ID, or all values if not specified
|
||||
*/
|
||||
list(ids?: any[]): T[] {
|
||||
if (ids) {
|
||||
const found: T[] = [];
|
||||
for (const id of ids) {
|
||||
const v = this.getIfExists(id);
|
||||
if (v) {
|
||||
found.push(v);
|
||||
}
|
||||
}
|
||||
return found;
|
||||
}
|
||||
if (!this.initalized) {
|
||||
this.getIfExists('xxx'); // will trigger init
|
||||
}
|
||||
return [...this.ordered]; // copy of everythign just in case
|
||||
}
|
||||
|
||||
register(ext: T) {
|
||||
if (this.byId.has(ext.id)) {
|
||||
throw new Error('Duplicate Key:' + ext.id);
|
||||
}
|
||||
this.byId.set(ext.id, ext);
|
||||
this.ordered.push(ext);
|
||||
|
||||
if (ext.aliasIds) {
|
||||
for (const alias of ext.aliasIds) {
|
||||
if (!this.byId.has(alias)) {
|
||||
this.byId.set(alias, ext);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (this.initalized) {
|
||||
this.sort();
|
||||
}
|
||||
}
|
||||
|
||||
private sort() {
|
||||
// TODO sort the list
|
||||
}
|
||||
}
|
||||
@@ -1,23 +1,22 @@
|
||||
import { Threshold } from '../types';
|
||||
|
||||
export function getThresholdForValue(
|
||||
thresholds: Threshold[],
|
||||
value: number | null | string | undefined
|
||||
): Threshold | null {
|
||||
if (thresholds.length === 1) {
|
||||
return thresholds[0];
|
||||
export function getActiveThreshold(value: number, thresholds: Threshold[]): Threshold {
|
||||
let active = thresholds[0];
|
||||
for (const threshold of thresholds) {
|
||||
if (value >= threshold.value) {
|
||||
active = threshold;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
const atThreshold = thresholds.filter(threshold => (value as number) === threshold.value)[0];
|
||||
if (atThreshold) {
|
||||
return atThreshold;
|
||||
}
|
||||
|
||||
const belowThreshold = thresholds.filter(threshold => (value as number) > threshold.value);
|
||||
if (belowThreshold.length > 0) {
|
||||
const nearestThreshold = belowThreshold.sort((t1: Threshold, t2: Threshold) => t2.value - t1.value)[0];
|
||||
return nearestThreshold;
|
||||
}
|
||||
|
||||
return null;
|
||||
return active;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sorts the thresholds
|
||||
*/
|
||||
export function sortThresholds(thresholds: Threshold[]) {
|
||||
return thresholds.sort((t1, t2) => {
|
||||
return t1.value - t2.value;
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,19 +1,11 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"extends": "../tsconfig.json",
|
||||
"include": ["src/**/*.ts", "src/**/*.tsx", "../../public/app/types/jquery/*.ts"],
|
||||
"exclude": ["dist", "node_modules"],
|
||||
"compilerOptions": {
|
||||
"rootDirs": ["."],
|
||||
"module": "esnext",
|
||||
"outDir": "compiled",
|
||||
"declaration": true,
|
||||
"declarationDir": "dist",
|
||||
"strict": true,
|
||||
"alwaysStrict": true,
|
||||
"noImplicitAny": true,
|
||||
"strictNullChecks": true,
|
||||
"typeRoots": ["./node_modules/@types", "types"],
|
||||
"skipLibCheck": true, // Temp workaround for Duplicate identifier tsc errors,
|
||||
"removeComments": false
|
||||
"declarationDir": "dist",
|
||||
"outDir": "compiled"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,19 +1,11 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"extends": "../tsconfig.json",
|
||||
"include": ["src/**/*.ts", "src/**/*.tsx", "../../public/app/types/jquery/*.ts"],
|
||||
"exclude": ["dist", "node_modules"],
|
||||
"compilerOptions": {
|
||||
"rootDirs": ["."],
|
||||
"module": "esnext",
|
||||
"outDir": "compiled",
|
||||
"declaration": true,
|
||||
"declarationDir": "dist",
|
||||
"strict": true,
|
||||
"alwaysStrict": true,
|
||||
"noImplicitAny": true,
|
||||
"strictNullChecks": true,
|
||||
"typeRoots": ["./node_modules/@types", "types"],
|
||||
"skipLibCheck": true, // Temp workaround for Duplicate identifier tsc errors,
|
||||
"removeComments": false
|
||||
"declarationDir": "dist",
|
||||
"outDir": "compiled"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -84,7 +84,7 @@ Adidtionaly, you can also provide additional Jest config via package.json file.
|
||||
|
||||
|
||||
## Working with CSS & static assets
|
||||
We support pure css, SASS and CSS in JS approach (via Emotion). All static assets referenced in your code (i.e. images) should be placed under `src/static` directory and referenced using relative paths.
|
||||
We support pure css, SASS and CSS in JS approach (via Emotion).
|
||||
|
||||
1. Single css/sass file
|
||||
Create your css/sass file and import it in your plugin entry point (typically module.ts):
|
||||
@@ -101,6 +101,8 @@ If you want to provide different stylesheets for dark/light theme, create `dark.
|
||||
|
||||
TODO: add note about loadPluginCss
|
||||
|
||||
Note that static files (png, svg, json, html) are all copied to dist directory when the plugin is bundled. Relative paths to those files does not change.
|
||||
|
||||
3. Emotion
|
||||
Starting from Grafana 6.2 our suggested way of styling plugins is by using [Emotion](https://emotion.sh). It's a css-in-js library that we use internaly at Grafana. The biggest advantage of using Emotion is that you will get access to Grafana Theme variables.
|
||||
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import axios from 'axios';
|
||||
// @ts-ignore
|
||||
import * as _ from 'lodash';
|
||||
import { Task, TaskRunner } from './task';
|
||||
|
||||
@@ -3,7 +3,6 @@ import execa = require('execa');
|
||||
import * as fs from 'fs';
|
||||
// @ts-ignore
|
||||
import * as path from 'path';
|
||||
import { changeCwdToGrafanaUi, restoreCwd, changeCwdToPackage } from '../utils/cwd';
|
||||
import chalk from 'chalk';
|
||||
import { useSpinner } from '../utils/useSpinner';
|
||||
import { Task, TaskRunner } from './task';
|
||||
|
||||
@@ -4,7 +4,6 @@ import execa = require('execa');
|
||||
import path = require('path');
|
||||
import fs = require('fs');
|
||||
import glob = require('glob');
|
||||
import util = require('util');
|
||||
import { Linter, Configuration, RuleFailure } from 'tslint';
|
||||
import * as prettier from 'prettier';
|
||||
|
||||
@@ -17,7 +16,6 @@ interface PluginBuildOptions {
|
||||
|
||||
export const bundlePlugin = useSpinner<PluginBundleOptions>('Compiling...', async options => await bundleFn(options));
|
||||
|
||||
const readFileAsync = util.promisify(fs.readFile);
|
||||
// @ts-ignore
|
||||
export const clean = useSpinner<void>('Cleaning', async () => await execa('rimraf', [`${process.cwd()}/dist`]));
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { Task, TaskRunner } from './task';
|
||||
import { pluginBuildRunner } from './plugin.build';
|
||||
import { useSpinner } from '../utils/useSpinner';
|
||||
import { restoreCwd } from '../utils/cwd';
|
||||
import { getPluginJson } from '../../config/utils/pluginValidation';
|
||||
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import path = require('path');
|
||||
import fs = require('fs');
|
||||
import webpack = require('webpack');
|
||||
import { getWebpackConfig } from '../../../config/webpack.plugin.config';
|
||||
import formatWebpackMessages = require('react-dev-utils/formatWebpackMessages');
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import path = require('path');
|
||||
import * as jestCLI from 'jest-cli';
|
||||
import { useSpinner } from '../../utils/useSpinner';
|
||||
import { jestConfig } from '../../../config/jest.plugin.config';
|
||||
|
||||
@@ -46,7 +46,6 @@ export async function getTeam(team: any): Promise<any> {
|
||||
}
|
||||
|
||||
export async function addToTeam(team: any, user: any): Promise<any> {
|
||||
const members = await client.get(`/teams/${team.id}/members`);
|
||||
console.log(`Adding user ${user.name} to team ${team.name}`);
|
||||
await client.post(`/teams/${team.id}/members`, { userId: user.id });
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import execa = require('execa');
|
||||
import * as fs from 'fs';
|
||||
import { changeCwdToGrafanaUi, restoreCwd, changeCwdToGrafanaToolkit } from '../utils/cwd';
|
||||
import chalk from 'chalk';
|
||||
import { useSpinner } from '../utils/useSpinner';
|
||||
import { Task, TaskRunner } from './task';
|
||||
|
||||
@@ -7,7 +7,7 @@ const MiniCssExtractPlugin = require('mini-css-extract-plugin');
|
||||
const OptimizeCssAssetsPlugin = require('optimize-css-assets-webpack-plugin');
|
||||
|
||||
import * as webpack from 'webpack';
|
||||
import { hasThemeStylesheets, getStyleLoaders, getStylesheetEntries, getFileLoaders } from './webpack/loaders';
|
||||
import { getStyleLoaders, getStylesheetEntries, getFileLoaders } from './webpack/loaders';
|
||||
|
||||
interface WebpackConfigurationOptions {
|
||||
watch?: boolean;
|
||||
@@ -51,6 +51,7 @@ const getManualChunk = (id: string) => {
|
||||
};
|
||||
}
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
const getEntries = () => {
|
||||
@@ -83,8 +84,8 @@ const getCommonPlugins = (options: WebpackConfigurationOptions) => {
|
||||
{ from: '../LICENSE', to: '.' },
|
||||
{ from: 'img/*', to: '.' },
|
||||
{ from: '**/*.json', to: '.' },
|
||||
// { from: '**/*.svg', to: '.' },
|
||||
// { from: '**/*.png', to: '.' },
|
||||
{ from: '**/*.svg', to: '.' },
|
||||
{ from: '**/*.png', to: '.' },
|
||||
{ from: '**/*.html', to: '.' },
|
||||
],
|
||||
{ logLevel: options.watch ? 'silent' : 'warn' }
|
||||
|
||||
@@ -3,7 +3,6 @@ import { getStylesheetEntries, hasThemeStylesheets } from './loaders';
|
||||
describe('Loaders', () => {
|
||||
describe('stylesheet helpers', () => {
|
||||
const logSpy = jest.spyOn(console, 'log').mockImplementation();
|
||||
const errorSpy = jest.spyOn(console, 'error').mockImplementation();
|
||||
|
||||
afterAll(() => {
|
||||
logSpy.mockRestore();
|
||||
|
||||
@@ -1,6 +1,3 @@
|
||||
import { getPluginJson } from '../utils/pluginValidation';
|
||||
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const MiniCssExtractPlugin = require('mini-css-extract-plugin');
|
||||
|
||||
@@ -122,8 +119,8 @@ export const getFileLoaders = () => {
|
||||
? {
|
||||
loader: 'file-loader',
|
||||
options: {
|
||||
outputPath: 'static',
|
||||
name: '[name].[hash:8].[ext]',
|
||||
outputPath: '/',
|
||||
name: '[path][name].[ext]',
|
||||
},
|
||||
}
|
||||
: // When using single css import images are inlined as base64 URIs in the result bundle
|
||||
|
||||
@@ -1,17 +1,13 @@
|
||||
{
|
||||
"extends": "../tsconfig.json",
|
||||
"include": ["src/**/*.ts"],
|
||||
"exclude": ["dist", "node_modules"],
|
||||
"compilerOptions": {
|
||||
"module": "commonjs",
|
||||
"rootDirs": ["."],
|
||||
"outDir": "dist/src",
|
||||
"strict": true,
|
||||
"alwaysStrict": true,
|
||||
"noImplicitAny": true,
|
||||
"strictNullChecks": true,
|
||||
"declaration": false,
|
||||
"typeRoots": ["./node_modules/@types"],
|
||||
"skipLibCheck": true, // Temp workaround for Duplicate identifier tsc errors,
|
||||
"removeComments": false,
|
||||
"esModuleInterop": true,
|
||||
"lib": ["es2015", "es2017.string"]
|
||||
}
|
||||
|
||||
@@ -49,9 +49,9 @@ function addBarGaugeStory(name: string, overrides: Partial<Props>) {
|
||||
orientation: VizOrientation.Vertical,
|
||||
displayMode: 'basic',
|
||||
thresholds: [
|
||||
{ index: 0, value: -Infinity, color: 'green' },
|
||||
{ index: 1, value: threshold1Value, color: threshold1Color },
|
||||
{ index: 1, value: threshold2Value, color: threshold2Color },
|
||||
{ value: -Infinity, color: 'green' },
|
||||
{ value: threshold1Value, color: threshold1Color },
|
||||
{ value: threshold2Value, color: threshold2Color },
|
||||
],
|
||||
};
|
||||
|
||||
|
||||
@@ -25,11 +25,7 @@ function getProps(propOverrides?: Partial<Props>): Props {
|
||||
maxValue: 100,
|
||||
minValue: 0,
|
||||
displayMode: 'basic',
|
||||
thresholds: [
|
||||
{ index: 0, value: -Infinity, color: 'green' },
|
||||
{ index: 1, value: 70, color: 'orange' },
|
||||
{ index: 2, value: 90, color: 'red' },
|
||||
],
|
||||
thresholds: [{ value: -Infinity, color: 'green' }, { value: 70, color: 'orange' }, { value: 90, color: 'red' }],
|
||||
height: 300,
|
||||
width: 300,
|
||||
value: {
|
||||
|
||||
@@ -7,7 +7,7 @@ import { getColorFromHexRgbOrName } from '../../utils';
|
||||
|
||||
// Types
|
||||
import { DisplayValue, Themeable, VizOrientation } from '../../types';
|
||||
import { Threshold, TimeSeriesValue, getThresholdForValue } from '@grafana/data';
|
||||
import { Threshold, TimeSeriesValue, getActiveThreshold } from '@grafana/data';
|
||||
|
||||
const MIN_VALUE_HEIGHT = 18;
|
||||
const MAX_VALUE_HEIGHT = 50;
|
||||
@@ -87,8 +87,14 @@ export class BarGauge extends PureComponent<Props> {
|
||||
|
||||
getCellColor(positionValue: TimeSeriesValue): CellColors {
|
||||
const { thresholds, theme, value } = this.props;
|
||||
const activeThreshold = getThresholdForValue(thresholds, positionValue);
|
||||
if (positionValue === null) {
|
||||
return {
|
||||
background: 'gray',
|
||||
border: 'gray',
|
||||
};
|
||||
}
|
||||
|
||||
const activeThreshold = getActiveThreshold(positionValue, thresholds);
|
||||
if (activeThreshold !== null) {
|
||||
const color = getColorFromHexRgbOrName(activeThreshold.color, theme.type);
|
||||
|
||||
@@ -474,7 +480,7 @@ export function getBarGradient(props: Props, maxSize: number): string {
|
||||
export function getValueColor(props: Props): string {
|
||||
const { thresholds, theme, value } = props;
|
||||
|
||||
const activeThreshold = getThresholdForValue(thresholds, value.numeric);
|
||||
const activeThreshold = getActiveThreshold(value.numeric, thresholds);
|
||||
|
||||
if (activeThreshold !== null) {
|
||||
return getColorFromHexRgbOrName(activeThreshold.color, theme.type);
|
||||
|
||||
@@ -14,7 +14,7 @@ const setup = (propOverrides?: object) => {
|
||||
minValue: 0,
|
||||
showThresholdMarkers: true,
|
||||
showThresholdLabels: false,
|
||||
thresholds: [{ index: 0, value: -Infinity, color: '#7EB26D' }],
|
||||
thresholds: [{ value: -Infinity, color: '#7EB26D' }],
|
||||
height: 300,
|
||||
width: 300,
|
||||
value: {
|
||||
@@ -48,9 +48,9 @@ describe('Get thresholds formatted', () => {
|
||||
it('should get the correct formatted values when thresholds are added', () => {
|
||||
const { instance } = setup({
|
||||
thresholds: [
|
||||
{ index: 0, value: -Infinity, color: '#7EB26D' },
|
||||
{ index: 1, value: 50, color: '#EAB839' },
|
||||
{ index: 2, value: 75, color: '#6ED0E0' },
|
||||
{ value: -Infinity, color: '#7EB26D' },
|
||||
{ value: 50, color: '#EAB839' },
|
||||
{ value: 75, color: '#6ED0E0' },
|
||||
],
|
||||
});
|
||||
|
||||
|
||||
@@ -43,12 +43,12 @@ export class Gauge extends PureComponent<Props> {
|
||||
const lastThreshold = thresholds[thresholds.length - 1];
|
||||
|
||||
return [
|
||||
...thresholds.map(threshold => {
|
||||
if (threshold.index === 0) {
|
||||
...thresholds.map((threshold, index) => {
|
||||
if (index === 0) {
|
||||
return { value: minValue, color: getColorFromHexRgbOrName(threshold.color, theme.type) };
|
||||
}
|
||||
|
||||
const previousThreshold = thresholds[threshold.index - 1];
|
||||
const previousThreshold = thresholds[index - 1];
|
||||
return { value: threshold.value, color: getColorFromHexRgbOrName(previousThreshold.color, theme.type) };
|
||||
}),
|
||||
{ value: maxValue, color: getColorFromHexRgbOrName(lastThreshold.color, theme.type) },
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import React, { PureComponent } from 'react';
|
||||
import classNames from 'classnames';
|
||||
import { SelectOptionItem } from '../Select/Select';
|
||||
import { SelectableValue } from '@grafana/data';
|
||||
import { Tooltip } from '../Tooltip/Tooltip';
|
||||
import { ButtonSelect } from '../Select/ButtonSelect';
|
||||
|
||||
@@ -23,7 +23,7 @@ export class RefreshPicker extends PureComponent<Props> {
|
||||
super(props);
|
||||
}
|
||||
|
||||
intervalsToOptions = (intervals: string[] | undefined): Array<SelectOptionItem<string>> => {
|
||||
intervalsToOptions = (intervals: string[] | undefined): Array<SelectableValue<string>> => {
|
||||
const intervalsOrDefault = intervals || defaultIntervals;
|
||||
const options = intervalsOrDefault
|
||||
.filter(str => str !== '')
|
||||
@@ -37,7 +37,7 @@ export class RefreshPicker extends PureComponent<Props> {
|
||||
return options;
|
||||
};
|
||||
|
||||
onChangeSelect = (item: SelectOptionItem<string>) => {
|
||||
onChangeSelect = (item: SelectableValue<string>) => {
|
||||
const { onIntervalChanged } = this.props;
|
||||
if (onIntervalChanged) {
|
||||
// @ts-ignore
|
||||
|
||||
@@ -4,7 +4,7 @@ import { action } from '@storybook/addon-actions';
|
||||
import { withKnobs, object, text } from '@storybook/addon-knobs';
|
||||
import { withCenteredStory } from '../../utils/storybook/withCenteredStory';
|
||||
import { UseState } from '../../utils/storybook/UseState';
|
||||
import { SelectOptionItem } from './Select';
|
||||
import { SelectableValue } from '@grafana/data';
|
||||
import { ButtonSelect } from './ButtonSelect';
|
||||
|
||||
const ButtonSelectStories = storiesOf('UI/Select/ButtonSelect', module);
|
||||
@@ -12,9 +12,9 @@ const ButtonSelectStories = storiesOf('UI/Select/ButtonSelect', module);
|
||||
ButtonSelectStories.addDecorator(withCenteredStory).addDecorator(withKnobs);
|
||||
|
||||
ButtonSelectStories.add('default', () => {
|
||||
const intialState: SelectOptionItem<string> = { label: 'A label', value: 'A value' };
|
||||
const value = object<SelectOptionItem<string>>('Selected Value:', intialState);
|
||||
const options = object<Array<SelectOptionItem<string>>>('Options:', [
|
||||
const intialState: SelectableValue<string> = { label: 'A label', value: 'A value' };
|
||||
const value = object<SelectableValue<string>>('Selected Value:', intialState);
|
||||
const options = object<Array<SelectableValue<string>>>('Options:', [
|
||||
intialState,
|
||||
{ label: 'Another label', value: 'Another value' },
|
||||
]);
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import React, { PureComponent, ReactElement } from 'react';
|
||||
import Select, { SelectOptionItem } from './Select';
|
||||
import Select from './Select';
|
||||
import { PopperContent } from '../Tooltip/PopperController';
|
||||
import { SelectableValue } from '@grafana/data';
|
||||
|
||||
interface ButtonComponentProps {
|
||||
label: ReactElement | string | undefined;
|
||||
@@ -30,13 +31,13 @@ const ButtonComponent = (buttonProps: ButtonComponentProps) => (props: any) => {
|
||||
|
||||
export interface Props<T> {
|
||||
className: string | undefined;
|
||||
options: Array<SelectOptionItem<T>>;
|
||||
value?: SelectOptionItem<T>;
|
||||
options: Array<SelectableValue<T>>;
|
||||
value?: SelectableValue<T>;
|
||||
label?: ReactElement | string;
|
||||
iconClass?: string;
|
||||
components?: any;
|
||||
maxMenuHeight?: number;
|
||||
onChange: (item: SelectOptionItem<T>) => void;
|
||||
onChange: (item: SelectableValue<T>) => void;
|
||||
tooltipContent?: PopperContent<any>;
|
||||
isMenuOpen?: boolean;
|
||||
onOpenMenu?: () => void;
|
||||
@@ -45,7 +46,7 @@ export interface Props<T> {
|
||||
}
|
||||
|
||||
export class ButtonSelect<T> extends PureComponent<Props<T>> {
|
||||
onChange = (item: SelectOptionItem<T>) => {
|
||||
onChange = (item: SelectableValue<T>) => {
|
||||
const { onChange } = this.props;
|
||||
onChange(item);
|
||||
};
|
||||
|
||||
@@ -19,23 +19,16 @@ import resetSelectStyles from './resetSelectStyles';
|
||||
import { CustomScrollbar } from '../CustomScrollbar/CustomScrollbar';
|
||||
import { PopperContent } from '../Tooltip/PopperController';
|
||||
import { Tooltip } from '../Tooltip/Tooltip';
|
||||
|
||||
export interface SelectOptionItem<T> {
|
||||
label?: string;
|
||||
value?: T;
|
||||
imgUrl?: string;
|
||||
description?: string;
|
||||
[key: string]: any;
|
||||
}
|
||||
import { SelectableValue } from '@grafana/data';
|
||||
|
||||
export interface CommonProps<T> {
|
||||
defaultValue?: any;
|
||||
getOptionLabel?: (item: SelectOptionItem<T>) => string;
|
||||
getOptionValue?: (item: SelectOptionItem<T>) => string;
|
||||
onChange: (item: SelectOptionItem<T>) => {} | void;
|
||||
getOptionLabel?: (item: SelectableValue<T>) => string;
|
||||
getOptionValue?: (item: SelectableValue<T>) => string;
|
||||
onChange: (item: SelectableValue<T>) => {} | void;
|
||||
placeholder?: string;
|
||||
width?: number;
|
||||
value?: SelectOptionItem<T>;
|
||||
value?: SelectableValue<T>;
|
||||
className?: string;
|
||||
isDisabled?: boolean;
|
||||
isSearchable?: boolean;
|
||||
@@ -57,12 +50,12 @@ export interface CommonProps<T> {
|
||||
}
|
||||
|
||||
export interface SelectProps<T> extends CommonProps<T> {
|
||||
options: Array<SelectOptionItem<T>>;
|
||||
options: Array<SelectableValue<T>>;
|
||||
}
|
||||
|
||||
interface AsyncProps<T> extends CommonProps<T> {
|
||||
defaultOptions: boolean;
|
||||
loadOptions: (query: string) => Promise<Array<SelectOptionItem<T>>>;
|
||||
loadOptions: (query: string) => Promise<Array<SelectableValue<T>>>;
|
||||
loadingMessage?: () => string;
|
||||
}
|
||||
|
||||
|
||||
@@ -3,11 +3,10 @@ import { interval, Subscription, Subject, of, NEVER } from 'rxjs';
|
||||
import { tap, switchMap } from 'rxjs/operators';
|
||||
import _ from 'lodash';
|
||||
|
||||
import { stringToMs } from '@grafana/data';
|
||||
import { stringToMs, SelectableValue } from '@grafana/data';
|
||||
import { isLive } from '../RefreshPicker/RefreshPicker';
|
||||
import { SelectOptionItem } from '../Select/Select';
|
||||
|
||||
export function getIntervalFromString(strInterval: string): SelectOptionItem<number> {
|
||||
export function getIntervalFromString(strInterval: string): SelectableValue<number> {
|
||||
return {
|
||||
label: strInterval,
|
||||
value: stringToMs(strInterval),
|
||||
|
||||
@@ -8,10 +8,10 @@ import { StatsPicker } from '../StatsPicker/StatsPicker';
|
||||
|
||||
// Types
|
||||
import { FieldDisplayOptions, DEFAULT_FIELD_DISPLAY_VALUES_LIMIT } from '../../utils/fieldDisplay';
|
||||
import Select, { SelectOptionItem } from '../Select/Select';
|
||||
import { Field, ReducerID, toNumberString, toIntegerOrUndefined } from '@grafana/data';
|
||||
import Select from '../Select/Select';
|
||||
import { Field, ReducerID, toNumberString, toIntegerOrUndefined, SelectableValue } from '@grafana/data';
|
||||
|
||||
const showOptions: Array<SelectOptionItem<boolean>> = [
|
||||
const showOptions: Array<SelectableValue<boolean>> = [
|
||||
{
|
||||
value: true,
|
||||
label: 'All Values',
|
||||
@@ -31,7 +31,7 @@ export interface Props {
|
||||
}
|
||||
|
||||
export class FieldDisplayEditor extends PureComponent<Props> {
|
||||
onShowValuesChange = (item: SelectOptionItem<boolean>) => {
|
||||
onShowValuesChange = (item: SelectableValue<boolean>) => {
|
||||
const val = item.value === true;
|
||||
this.props.onChange({ ...this.props.value, values: val });
|
||||
};
|
||||
|
||||
@@ -7,8 +7,7 @@ import { FormLabel } from '../FormLabel/FormLabel';
|
||||
import { UnitPicker } from '../UnitPicker/UnitPicker';
|
||||
|
||||
// Types
|
||||
import { toIntegerOrUndefined, Field } from '@grafana/data';
|
||||
import { SelectOptionItem } from '../Select/Select';
|
||||
import { toIntegerOrUndefined, Field, SelectableValue } from '@grafana/data';
|
||||
|
||||
import { VAR_SERIES_NAME, VAR_FIELD_NAME, VAR_CALC, VAR_CELL_PREFIX } from '../../utils/fieldDisplay';
|
||||
|
||||
@@ -54,7 +53,7 @@ export const FieldPropertiesEditor: React.FC<Props> = ({ value, onChange, showMi
|
||||
[value.max, onChange]
|
||||
);
|
||||
|
||||
const onUnitChange = (unit: SelectOptionItem<string>) => {
|
||||
const onUnitChange = (unit: SelectableValue<string>) => {
|
||||
onChange({ ...value, unit: unit.value });
|
||||
};
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import omit from 'lodash/omit';
|
||||
|
||||
import { VizOrientation, PanelModel } from '../../types/panel';
|
||||
import { FieldDisplayOptions } from '../../utils/fieldDisplay';
|
||||
import { Field, getFieldReducers } from '@grafana/data';
|
||||
import { Field, fieldReducers, Threshold, sortThresholds } from '@grafana/data';
|
||||
|
||||
export interface SingleStatBaseOptions {
|
||||
fieldOptions: FieldDisplayOptions;
|
||||
@@ -39,17 +39,18 @@ export const sharedSingleStatMigrationCheck = (panel: PanelModel<SingleStatBaseO
|
||||
const { valueOptions } = old;
|
||||
|
||||
const fieldOptions = (old.fieldOptions = {} as FieldDisplayOptions);
|
||||
fieldOptions.mappings = old.valueMappings;
|
||||
fieldOptions.thresholds = old.thresholds;
|
||||
|
||||
const field = (fieldOptions.defaults = {} as Field);
|
||||
if (valueOptions) {
|
||||
field.unit = valueOptions.unit;
|
||||
field.decimals = valueOptions.decimals;
|
||||
field.mappings = old.valueMappings;
|
||||
field.thresholds = migrateOldThresholds(old.thresholds);
|
||||
field.unit = valueOptions.unit;
|
||||
field.decimals = valueOptions.decimals;
|
||||
|
||||
// Make sure the stats have a valid name
|
||||
if (valueOptions.stat) {
|
||||
fieldOptions.calcs = getFieldReducers([valueOptions.stat]).map(s => s.id);
|
||||
// Make sure the stats have a valid name
|
||||
if (valueOptions.stat) {
|
||||
const reducer = fieldReducers.get(valueOptions.stat);
|
||||
if (reducer) {
|
||||
fieldOptions.calcs = [reducer.id];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -58,7 +59,33 @@ export const sharedSingleStatMigrationCheck = (panel: PanelModel<SingleStatBaseO
|
||||
|
||||
// remove old props
|
||||
return omit(old, 'valueMappings', 'thresholds', 'valueOptions', 'minValue', 'maxValue');
|
||||
} else if (old.fieldOptions) {
|
||||
// Move mappins & thresholds to field defautls (6.4+)
|
||||
const { mappings, thresholds, ...fieldOptions } = old.fieldOptions;
|
||||
fieldOptions.defaults = {
|
||||
mappings,
|
||||
thresholds: migrateOldThresholds(thresholds),
|
||||
...fieldOptions.defaults,
|
||||
};
|
||||
old.fieldOptions = fieldOptions;
|
||||
return old;
|
||||
}
|
||||
|
||||
return panel.options;
|
||||
};
|
||||
|
||||
export function migrateOldThresholds(thresholds?: any[]): Threshold[] | undefined {
|
||||
if (!thresholds || !thresholds.length) {
|
||||
return undefined;
|
||||
}
|
||||
const copy = thresholds.map(t => {
|
||||
return {
|
||||
// Drops 'index'
|
||||
value: t.value === null ? -Infinity : t.value,
|
||||
color: t.color,
|
||||
};
|
||||
});
|
||||
sortThresholds(copy);
|
||||
copy[0].value = -Infinity;
|
||||
return copy;
|
||||
}
|
||||
|
||||
@@ -5,8 +5,7 @@ import difference from 'lodash/difference';
|
||||
|
||||
import { Select } from '../index';
|
||||
|
||||
import { getFieldReducers } from '@grafana/data';
|
||||
import { SelectOptionItem } from '../Select/Select';
|
||||
import { fieldReducers, SelectableValue } from '@grafana/data';
|
||||
|
||||
interface Props {
|
||||
placeholder?: string;
|
||||
@@ -34,7 +33,7 @@ export class StatsPicker extends PureComponent<Props> {
|
||||
checkInput = () => {
|
||||
const { stats, allowMultiple, defaultStat, onChange } = this.props;
|
||||
|
||||
const current = getFieldReducers(stats);
|
||||
const current = fieldReducers.list(stats);
|
||||
if (current.length !== stats.length) {
|
||||
const found = current.map(v => v.id);
|
||||
const notFound = difference(stats, found);
|
||||
@@ -54,7 +53,7 @@ export class StatsPicker extends PureComponent<Props> {
|
||||
}
|
||||
};
|
||||
|
||||
onSelectionChange = (item: SelectOptionItem<string>) => {
|
||||
onSelectionChange = (item: SelectableValue<string>) => {
|
||||
const { onChange } = this.props;
|
||||
if (isArray(item)) {
|
||||
onChange(item.map(v => v.value));
|
||||
@@ -65,24 +64,16 @@ export class StatsPicker extends PureComponent<Props> {
|
||||
|
||||
render() {
|
||||
const { width, stats, allowMultiple, defaultStat, placeholder } = this.props;
|
||||
const options = getFieldReducers().map(s => {
|
||||
return {
|
||||
value: s.id,
|
||||
label: s.name,
|
||||
description: s.description,
|
||||
};
|
||||
});
|
||||
|
||||
const value: Array<SelectOptionItem<string>> = options.filter(option => stats.find(stat => option.value === stat));
|
||||
|
||||
const select = fieldReducers.selectOptions(stats);
|
||||
return (
|
||||
<Select
|
||||
width={width}
|
||||
value={value}
|
||||
value={select.current}
|
||||
isClearable={!defaultStat}
|
||||
isMulti={allowMultiple}
|
||||
isSearchable={true}
|
||||
options={options}
|
||||
options={select.options}
|
||||
placeholder={placeholder}
|
||||
onChange={this.onSelectionChange}
|
||||
/>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import React, { ChangeEvent } from 'react';
|
||||
import { mount } from 'enzyme';
|
||||
import { ThresholdsEditor, Props } from './ThresholdsEditor';
|
||||
import { ThresholdsEditor, Props, threshodsWithoutKey } from './ThresholdsEditor';
|
||||
import { colors } from '../../utils';
|
||||
|
||||
const setup = (propOverrides?: Partial<Props>) => {
|
||||
@@ -20,6 +20,10 @@ const setup = (propOverrides?: Partial<Props>) => {
|
||||
};
|
||||
};
|
||||
|
||||
function getCurrentThresholds(editor: ThresholdsEditor) {
|
||||
return threshodsWithoutKey(editor.state.thresholds);
|
||||
}
|
||||
|
||||
describe('Render', () => {
|
||||
it('should render with base threshold', () => {
|
||||
const { wrapper } = setup();
|
||||
@@ -32,60 +36,55 @@ describe('Initialization', () => {
|
||||
it('should add a base threshold if missing', () => {
|
||||
const { instance } = setup();
|
||||
|
||||
expect(instance.state.thresholds).toEqual([{ index: 0, value: -Infinity, color: colors[0] }]);
|
||||
expect(getCurrentThresholds(instance)).toEqual([{ value: -Infinity, color: colors[0] }]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Add threshold', () => {
|
||||
it('should not add threshold at index 0', () => {
|
||||
const { instance } = setup();
|
||||
|
||||
instance.onAddThreshold(0);
|
||||
|
||||
expect(instance.state.thresholds).toEqual([{ index: 0, value: -Infinity, color: colors[0] }]);
|
||||
});
|
||||
|
||||
it('should add threshold', () => {
|
||||
const { instance } = setup();
|
||||
|
||||
instance.onAddThreshold(1);
|
||||
instance.onAddThresholdAfter(instance.state.thresholds[0]);
|
||||
|
||||
expect(instance.state.thresholds).toEqual([
|
||||
{ index: 0, value: -Infinity, color: colors[0] },
|
||||
{ index: 1, value: 50, color: colors[2] },
|
||||
expect(getCurrentThresholds(instance)).toEqual([
|
||||
{ value: -Infinity, color: colors[0] }, // 0
|
||||
{ value: 50, color: colors[2] }, // 1
|
||||
]);
|
||||
});
|
||||
|
||||
it('should add another threshold above a first', () => {
|
||||
const { instance } = setup({
|
||||
thresholds: [{ index: 0, value: -Infinity, color: colors[0] }, { index: 1, value: 50, color: colors[2] }],
|
||||
thresholds: [
|
||||
{ value: -Infinity, color: colors[0] }, // 0
|
||||
{ value: 50, color: colors[2] }, // 1
|
||||
],
|
||||
});
|
||||
|
||||
instance.onAddThreshold(2);
|
||||
instance.onAddThresholdAfter(instance.state.thresholds[1]);
|
||||
|
||||
expect(instance.state.thresholds).toEqual([
|
||||
{ index: 0, value: -Infinity, color: colors[0] },
|
||||
{ index: 1, value: 50, color: colors[2] },
|
||||
{ index: 2, value: 75, color: colors[3] },
|
||||
expect(getCurrentThresholds(instance)).toEqual([
|
||||
{ value: -Infinity, color: colors[0] }, // 0
|
||||
{ value: 50, color: colors[2] }, // 1
|
||||
{ value: 75, color: colors[3] }, // 2
|
||||
]);
|
||||
});
|
||||
|
||||
it('should add another threshold between first and second index', () => {
|
||||
const { instance } = setup({
|
||||
thresholds: [
|
||||
{ index: 0, value: -Infinity, color: colors[0] },
|
||||
{ index: 1, value: 50, color: colors[2] },
|
||||
{ index: 2, value: 75, color: colors[3] },
|
||||
{ value: -Infinity, color: colors[0] },
|
||||
{ value: 50, color: colors[2] },
|
||||
{ value: 75, color: colors[3] },
|
||||
],
|
||||
});
|
||||
|
||||
instance.onAddThreshold(2);
|
||||
instance.onAddThresholdAfter(instance.state.thresholds[1]);
|
||||
|
||||
expect(instance.state.thresholds).toEqual([
|
||||
{ index: 0, value: -Infinity, color: colors[0] },
|
||||
{ index: 1, value: 50, color: colors[2] },
|
||||
{ index: 2, value: 62.5, color: colors[4] },
|
||||
{ index: 3, value: 75, color: colors[3] },
|
||||
expect(getCurrentThresholds(instance)).toEqual([
|
||||
{ value: -Infinity, color: colors[0] },
|
||||
{ value: 50, color: colors[2] },
|
||||
{ value: 62.5, color: colors[4] },
|
||||
{ value: 75, color: colors[3] },
|
||||
]);
|
||||
});
|
||||
});
|
||||
@@ -93,30 +92,30 @@ describe('Add threshold', () => {
|
||||
describe('Remove threshold', () => {
|
||||
it('should not remove threshold at index 0', () => {
|
||||
const thresholds = [
|
||||
{ index: 0, value: -Infinity, color: '#7EB26D' },
|
||||
{ index: 1, value: 50, color: '#EAB839' },
|
||||
{ index: 2, value: 75, color: '#6ED0E0' },
|
||||
{ value: -Infinity, color: '#7EB26D' },
|
||||
{ value: 50, color: '#EAB839' },
|
||||
{ value: 75, color: '#6ED0E0' },
|
||||
];
|
||||
const { instance } = setup({ thresholds });
|
||||
|
||||
instance.onRemoveThreshold(thresholds[0]);
|
||||
instance.onRemoveThreshold(instance.state.thresholds[0]);
|
||||
|
||||
expect(instance.state.thresholds).toEqual(thresholds);
|
||||
expect(getCurrentThresholds(instance)).toEqual(thresholds);
|
||||
});
|
||||
|
||||
it('should remove threshold', () => {
|
||||
const thresholds = [
|
||||
{ index: 0, value: -Infinity, color: '#7EB26D' },
|
||||
{ index: 1, value: 50, color: '#EAB839' },
|
||||
{ index: 2, value: 75, color: '#6ED0E0' },
|
||||
{ value: -Infinity, color: '#7EB26D' },
|
||||
{ value: 50, color: '#EAB839' },
|
||||
{ value: 75, color: '#6ED0E0' },
|
||||
];
|
||||
const { instance } = setup({ thresholds });
|
||||
|
||||
instance.onRemoveThreshold(thresholds[1]);
|
||||
instance.onRemoveThreshold(instance.state.thresholds[1]);
|
||||
|
||||
expect(instance.state.thresholds).toEqual([
|
||||
{ index: 0, value: -Infinity, color: '#7EB26D' },
|
||||
{ index: 1, value: 75, color: '#6ED0E0' },
|
||||
expect(getCurrentThresholds(instance)).toEqual([
|
||||
{ value: -Infinity, color: '#7EB26D' },
|
||||
{ value: 75, color: '#6ED0E0' },
|
||||
]);
|
||||
});
|
||||
});
|
||||
@@ -124,25 +123,25 @@ describe('Remove threshold', () => {
|
||||
describe('change threshold value', () => {
|
||||
it('should not change threshold at index 0', () => {
|
||||
const thresholds = [
|
||||
{ index: 0, value: -Infinity, color: '#7EB26D' },
|
||||
{ index: 1, value: 50, color: '#EAB839' },
|
||||
{ index: 2, value: 75, color: '#6ED0E0' },
|
||||
{ value: -Infinity, color: '#7EB26D' },
|
||||
{ value: 50, color: '#EAB839' },
|
||||
{ value: 75, color: '#6ED0E0' },
|
||||
];
|
||||
const { instance } = setup({ thresholds });
|
||||
|
||||
const mockEvent = ({ target: { value: '12' } } as any) as ChangeEvent<HTMLInputElement>;
|
||||
|
||||
instance.onChangeThresholdValue(mockEvent, thresholds[0]);
|
||||
instance.onChangeThresholdValue(mockEvent, instance.state.thresholds[0]);
|
||||
|
||||
expect(instance.state.thresholds).toEqual(thresholds);
|
||||
expect(getCurrentThresholds(instance)).toEqual(thresholds);
|
||||
});
|
||||
|
||||
it('should update value', () => {
|
||||
const { instance } = setup();
|
||||
const thresholds = [
|
||||
{ index: 0, value: -Infinity, color: '#7EB26D' },
|
||||
{ index: 1, value: 50, color: '#EAB839' },
|
||||
{ index: 2, value: 75, color: '#6ED0E0' },
|
||||
{ value: -Infinity, color: '#7EB26D', key: 1 },
|
||||
{ value: 50, color: '#EAB839', key: 2 },
|
||||
{ value: 75, color: '#6ED0E0', key: 3 },
|
||||
];
|
||||
|
||||
instance.state = {
|
||||
@@ -153,10 +152,10 @@ describe('change threshold value', () => {
|
||||
|
||||
instance.onChangeThresholdValue(mockEvent, thresholds[1]);
|
||||
|
||||
expect(instance.state.thresholds).toEqual([
|
||||
{ index: 0, value: -Infinity, color: '#7EB26D' },
|
||||
{ index: 1, value: 78, color: '#EAB839' },
|
||||
{ index: 2, value: 75, color: '#6ED0E0' },
|
||||
expect(getCurrentThresholds(instance)).toEqual([
|
||||
{ value: -Infinity, color: '#7EB26D' },
|
||||
{ value: 78, color: '#EAB839' },
|
||||
{ value: 75, color: '#6ED0E0' },
|
||||
]);
|
||||
});
|
||||
});
|
||||
@@ -165,9 +164,9 @@ describe('on blur threshold value', () => {
|
||||
it('should resort rows and update indexes', () => {
|
||||
const { instance } = setup();
|
||||
const thresholds = [
|
||||
{ index: 0, value: -Infinity, color: '#7EB26D' },
|
||||
{ index: 1, value: 78, color: '#EAB839' },
|
||||
{ index: 2, value: 75, color: '#6ED0E0' },
|
||||
{ value: -Infinity, color: '#7EB26D', key: 1 },
|
||||
{ value: 78, color: '#EAB839', key: 2 },
|
||||
{ value: 75, color: '#6ED0E0', key: 3 },
|
||||
];
|
||||
|
||||
instance.setState({
|
||||
@@ -176,10 +175,10 @@ describe('on blur threshold value', () => {
|
||||
|
||||
instance.onBlur();
|
||||
|
||||
expect(instance.state.thresholds).toEqual([
|
||||
{ index: 0, value: -Infinity, color: '#7EB26D' },
|
||||
{ index: 1, value: 75, color: '#6ED0E0' },
|
||||
{ index: 2, value: 78, color: '#EAB839' },
|
||||
expect(getCurrentThresholds(instance)).toEqual([
|
||||
{ value: -Infinity, color: '#7EB26D' },
|
||||
{ value: 75, color: '#6ED0E0' },
|
||||
{ value: 78, color: '#EAB839' },
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import React, { PureComponent, ChangeEvent } from 'react';
|
||||
import { Threshold } from '@grafana/data';
|
||||
import { Threshold, sortThresholds } from '@grafana/data';
|
||||
import { colors } from '../../utils';
|
||||
import { ThemeContext } from '../../themes';
|
||||
import { getColorFromHexRgbOrName } from '../../utils';
|
||||
@@ -13,115 +13,121 @@ export interface Props {
|
||||
}
|
||||
|
||||
interface State {
|
||||
thresholds: Threshold[];
|
||||
thresholds: ThresholdWithKey[];
|
||||
}
|
||||
|
||||
interface ThresholdWithKey extends Threshold {
|
||||
key: number;
|
||||
}
|
||||
|
||||
let counter = 100;
|
||||
|
||||
export class ThresholdsEditor extends PureComponent<Props, State> {
|
||||
constructor(props: Props) {
|
||||
super(props);
|
||||
|
||||
const addDefaultThreshold = this.props.thresholds.length === 0;
|
||||
const thresholds: Threshold[] = addDefaultThreshold
|
||||
? [{ index: 0, value: -Infinity, color: colors[0] }]
|
||||
: props.thresholds;
|
||||
const thresholds = props.thresholds
|
||||
? props.thresholds.map(t => {
|
||||
return {
|
||||
color: t.color,
|
||||
value: t.value === null ? -Infinity : t.value,
|
||||
key: counter++,
|
||||
};
|
||||
})
|
||||
: ([] as ThresholdWithKey[]);
|
||||
|
||||
let needsCallback = false;
|
||||
if (!thresholds.length) {
|
||||
thresholds.push({ value: -Infinity, color: colors[0], key: counter++ });
|
||||
needsCallback = true;
|
||||
} else {
|
||||
// First value is always base
|
||||
thresholds[0].value = -Infinity;
|
||||
}
|
||||
|
||||
// Update the state
|
||||
this.state = { thresholds };
|
||||
|
||||
if (addDefaultThreshold) {
|
||||
if (needsCallback) {
|
||||
this.onChange();
|
||||
}
|
||||
}
|
||||
|
||||
onAddThreshold = (index: number) => {
|
||||
onAddThresholdAfter = (threshold: ThresholdWithKey) => {
|
||||
const { thresholds } = this.state;
|
||||
|
||||
const maxValue = 100;
|
||||
const minValue = 0;
|
||||
|
||||
if (index === 0) {
|
||||
return;
|
||||
let prev: ThresholdWithKey | undefined = undefined;
|
||||
let next: ThresholdWithKey | undefined = undefined;
|
||||
for (const t of thresholds) {
|
||||
if (prev && prev.key === threshold.key) {
|
||||
next = t;
|
||||
break;
|
||||
}
|
||||
prev = t;
|
||||
}
|
||||
|
||||
const newThresholds = thresholds.map(threshold => {
|
||||
if (threshold.index >= index) {
|
||||
const index = threshold.index + 1;
|
||||
threshold = { ...threshold, index };
|
||||
}
|
||||
return threshold;
|
||||
});
|
||||
const prevValue = prev && isFinite(prev.value) ? prev.value : minValue;
|
||||
const nextValue = next && isFinite(next.value) ? next.value : maxValue;
|
||||
|
||||
// Setting value to a value between the previous thresholds
|
||||
const beforeThreshold = newThresholds.filter(t => t.index === index - 1 && t.index !== 0)[0];
|
||||
const afterThreshold = newThresholds.filter(t => t.index === index + 1 && t.index !== 0)[0];
|
||||
const beforeThresholdValue = beforeThreshold !== undefined ? beforeThreshold.value : minValue;
|
||||
const afterThresholdValue = afterThreshold !== undefined ? afterThreshold.value : maxValue;
|
||||
const value = afterThresholdValue - (afterThresholdValue - beforeThresholdValue) / 2;
|
||||
|
||||
// Set a color
|
||||
const color = colors.filter(c => !newThresholds.some(t => t.color === c))[1];
|
||||
const color = colors.filter(c => !thresholds.some(t => t.color === c))[1];
|
||||
const add = {
|
||||
value: prevValue + (nextValue - prevValue) / 2.0,
|
||||
color: color,
|
||||
key: counter++,
|
||||
};
|
||||
const newThresholds = [...thresholds, add];
|
||||
sortThresholds(newThresholds);
|
||||
|
||||
this.setState(
|
||||
{
|
||||
thresholds: this.sortThresholds([
|
||||
...newThresholds,
|
||||
{
|
||||
color,
|
||||
index,
|
||||
value: value as number,
|
||||
},
|
||||
]),
|
||||
thresholds: newThresholds,
|
||||
},
|
||||
() => this.onChange()
|
||||
);
|
||||
};
|
||||
|
||||
onRemoveThreshold = (threshold: Threshold) => {
|
||||
if (threshold.index === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.setState(
|
||||
prevState => {
|
||||
const newThresholds = prevState.thresholds.map(t => {
|
||||
if (t.index > threshold.index) {
|
||||
const index = t.index - 1;
|
||||
t = { ...t, index };
|
||||
}
|
||||
return t;
|
||||
});
|
||||
|
||||
return {
|
||||
thresholds: newThresholds.filter(t => t !== threshold),
|
||||
};
|
||||
},
|
||||
() => this.onChange()
|
||||
);
|
||||
};
|
||||
|
||||
onChangeThresholdValue = (event: ChangeEvent<HTMLInputElement>, threshold: Threshold) => {
|
||||
if (threshold.index === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
onRemoveThreshold = (threshold: ThresholdWithKey) => {
|
||||
const { thresholds } = this.state;
|
||||
if (!thresholds.length) {
|
||||
return;
|
||||
}
|
||||
// Don't remove index 0
|
||||
if (threshold.key === thresholds[0].key) {
|
||||
return;
|
||||
}
|
||||
this.setState(
|
||||
{
|
||||
thresholds: thresholds.filter(t => t.key !== threshold.key),
|
||||
},
|
||||
() => this.onChange()
|
||||
);
|
||||
};
|
||||
|
||||
onChangeThresholdValue = (event: ChangeEvent<HTMLInputElement>, threshold: ThresholdWithKey) => {
|
||||
const cleanValue = event.target.value.replace(/,/g, '.');
|
||||
const parsedValue = parseFloat(cleanValue);
|
||||
const value = isNaN(parsedValue) ? '' : parsedValue;
|
||||
|
||||
const newThresholds = thresholds.map(t => {
|
||||
if (t === threshold && t.index !== 0) {
|
||||
const thresholds = this.state.thresholds.map(t => {
|
||||
if (t.key === threshold.key) {
|
||||
t = { ...t, value: value as number };
|
||||
}
|
||||
|
||||
return t;
|
||||
});
|
||||
|
||||
this.setState({ thresholds: newThresholds });
|
||||
if (thresholds.length) {
|
||||
thresholds[0].value = -Infinity;
|
||||
}
|
||||
this.setState({ thresholds });
|
||||
};
|
||||
|
||||
onChangeThresholdColor = (threshold: Threshold, color: string) => {
|
||||
onChangeThresholdColor = (threshold: ThresholdWithKey, color: string) => {
|
||||
const { thresholds } = this.state;
|
||||
|
||||
const newThresholds = thresholds.map(t => {
|
||||
if (t === threshold) {
|
||||
if (t.key === threshold.key) {
|
||||
t = { ...t, color: color };
|
||||
}
|
||||
|
||||
@@ -137,30 +143,22 @@ export class ThresholdsEditor extends PureComponent<Props, State> {
|
||||
};
|
||||
|
||||
onBlur = () => {
|
||||
this.setState(prevState => {
|
||||
const sortThresholds = this.sortThresholds([...prevState.thresholds]);
|
||||
let index = 0;
|
||||
sortThresholds.forEach(t => {
|
||||
t.index = index++;
|
||||
});
|
||||
|
||||
return { thresholds: sortThresholds };
|
||||
});
|
||||
|
||||
this.onChange();
|
||||
const thresholds = [...this.state.thresholds];
|
||||
sortThresholds(thresholds);
|
||||
this.setState(
|
||||
{
|
||||
thresholds,
|
||||
},
|
||||
() => this.onChange()
|
||||
);
|
||||
};
|
||||
|
||||
onChange = () => {
|
||||
this.props.onChange(this.state.thresholds);
|
||||
const { thresholds } = this.state;
|
||||
this.props.onChange(threshodsWithoutKey(thresholds));
|
||||
};
|
||||
|
||||
sortThresholds = (thresholds: Threshold[]) => {
|
||||
return thresholds.sort((t1, t2) => {
|
||||
return t1.value - t2.value;
|
||||
});
|
||||
};
|
||||
|
||||
renderInput = (threshold: Threshold) => {
|
||||
renderInput = (threshold: ThresholdWithKey) => {
|
||||
return (
|
||||
<div className="thresholds-row-input-inner">
|
||||
<span className="thresholds-row-input-inner-arrow" />
|
||||
@@ -175,12 +173,11 @@ export class ThresholdsEditor extends PureComponent<Props, State> {
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
{threshold.index === 0 && (
|
||||
{!isFinite(threshold.value) ? (
|
||||
<div className="thresholds-row-input-inner-value">
|
||||
<Input type="text" value="Base" readOnly />
|
||||
</div>
|
||||
)}
|
||||
{threshold.index > 0 && (
|
||||
) : (
|
||||
<>
|
||||
<div className="thresholds-row-input-inner-value">
|
||||
<Input
|
||||
@@ -189,7 +186,6 @@ export class ThresholdsEditor extends PureComponent<Props, State> {
|
||||
onChange={(event: ChangeEvent<HTMLInputElement>) => this.onChangeThresholdValue(event, threshold)}
|
||||
value={threshold.value}
|
||||
onBlur={this.onBlur}
|
||||
readOnly={threshold.index === 0}
|
||||
/>
|
||||
</div>
|
||||
<div className="thresholds-row-input-inner-remove" onClick={() => this.onRemoveThreshold(threshold)}>
|
||||
@@ -212,13 +208,10 @@ export class ThresholdsEditor extends PureComponent<Props, State> {
|
||||
{thresholds
|
||||
.slice(0)
|
||||
.reverse()
|
||||
.map((threshold, index) => {
|
||||
.map(threshold => {
|
||||
return (
|
||||
<div className="thresholds-row" key={`${threshold.index}-${index}`}>
|
||||
<div
|
||||
className="thresholds-row-add-button"
|
||||
onClick={() => this.onAddThreshold(threshold.index + 1)}
|
||||
>
|
||||
<div className="thresholds-row" key={`${threshold.key}`}>
|
||||
<div className="thresholds-row-add-button" onClick={() => this.onAddThresholdAfter(threshold)}>
|
||||
<i className="fa fa-plus" />
|
||||
</div>
|
||||
<div
|
||||
@@ -237,3 +230,10 @@ export class ThresholdsEditor extends PureComponent<Props, State> {
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export function threshodsWithoutKey(thresholds: ThresholdWithKey[]): Threshold[] {
|
||||
return thresholds.map(t => {
|
||||
const { key, ...rest } = t;
|
||||
return rest; // everything except key
|
||||
});
|
||||
}
|
||||
|
||||
@@ -9,7 +9,6 @@ exports[`Render should render with base threshold 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"color": "#7EB26D",
|
||||
"index": 0,
|
||||
"value": -Infinity,
|
||||
},
|
||||
],
|
||||
@@ -48,7 +47,7 @@ exports[`Render should render with base threshold 1`] = `
|
||||
>
|
||||
<div
|
||||
className="thresholds-row"
|
||||
key="0-0"
|
||||
key="100"
|
||||
>
|
||||
<div
|
||||
className="thresholds-row-add-button"
|
||||
|
||||
@@ -8,13 +8,13 @@ import { TimePickerPopover } from './TimePickerPopover';
|
||||
import { ClickOutsideWrapper } from '../ClickOutsideWrapper/ClickOutsideWrapper';
|
||||
|
||||
// Utils & Services
|
||||
import { isDateTime } from '@grafana/data';
|
||||
import { isDateTime, DateTime } from '@grafana/data';
|
||||
import { rangeUtil } from '@grafana/data';
|
||||
import { rawToTimeRange } from './time';
|
||||
|
||||
// Types
|
||||
import { TimeRange, TimeOption, TimeZone, TIME_FORMAT } from '@grafana/data';
|
||||
import { SelectOptionItem } from '../Select/Select';
|
||||
import { TimeRange, TimeOption, TimeZone, TIME_FORMAT, SelectableValue } from '@grafana/data';
|
||||
import { isMathString } from '@grafana/data/src/utils/datemath';
|
||||
|
||||
export interface Props {
|
||||
value: TimeRange;
|
||||
@@ -77,7 +77,7 @@ export class TimePicker extends PureComponent<Props, State> {
|
||||
isCustomOpen: false,
|
||||
};
|
||||
|
||||
mapTimeOptionsToSelectOptionItems = (selectOptions: TimeOption[]) => {
|
||||
mapTimeOptionsToSelectableValues = (selectOptions: TimeOption[]) => {
|
||||
const options = selectOptions.map(timeOption => {
|
||||
return {
|
||||
label: timeOption.display,
|
||||
@@ -93,7 +93,7 @@ export class TimePicker extends PureComponent<Props, State> {
|
||||
return options;
|
||||
};
|
||||
|
||||
onSelectChanged = (item: SelectOptionItem<TimeOption>) => {
|
||||
onSelectChanged = (item: SelectableValue<TimeOption>) => {
|
||||
const { onChange, timeZone } = this.props;
|
||||
|
||||
if (item.value && item.value.from === 'custom') {
|
||||
@@ -122,15 +122,23 @@ export class TimePicker extends PureComponent<Props, State> {
|
||||
render() {
|
||||
const { selectOptions: selectTimeOptions, value, onMoveBackward, onMoveForward, onZoom, timeZone } = this.props;
|
||||
const { isCustomOpen } = this.state;
|
||||
const options = this.mapTimeOptionsToSelectOptionItems(selectTimeOptions);
|
||||
const options = this.mapTimeOptionsToSelectableValues(selectTimeOptions);
|
||||
const currentOption = options.find(item => isTimeOptionEqualToTimeRange(item.value, value));
|
||||
const rangeString = rangeUtil.describeTimeRange(value.raw);
|
||||
|
||||
const isUTC = timeZone === 'utc';
|
||||
|
||||
const adjustedTime = (time: DateTime) => (isUTC ? time.utc() : time.local()) || null;
|
||||
const adjustedTimeRange = {
|
||||
to: isMathString(value.raw.to) ? value.raw.to : adjustedTime(value.to),
|
||||
from: isMathString(value.raw.from) ? value.raw.from : adjustedTime(value.from),
|
||||
};
|
||||
const rangeString = rangeUtil.describeTimeRange(adjustedTimeRange);
|
||||
|
||||
const label = (
|
||||
<>
|
||||
{isCustomOpen && <span>Custom time range</span>}
|
||||
{!isCustomOpen && <span>{rangeString}</span>}
|
||||
{timeZone === 'utc' && <span className="time-picker-utc">UTC</span>}
|
||||
{isUTC && <span className="time-picker-utc">UTC</span>}
|
||||
</>
|
||||
);
|
||||
const isAbsolute = isDateTime(value.raw.to);
|
||||
@@ -148,6 +156,7 @@ export class TimePicker extends PureComponent<Props, State> {
|
||||
value={currentOption}
|
||||
label={label}
|
||||
options={options}
|
||||
maxMenuHeight={600}
|
||||
onChange={this.onSelectChanged}
|
||||
iconClass={'fa fa-clock-o fa-fw'}
|
||||
tooltipContent={<TimePickerTooltipContent timeRange={value} />}
|
||||
|
||||
@@ -18,7 +18,6 @@
|
||||
|
||||
.time-picker-popover {
|
||||
display: flex;
|
||||
flex-flow: row nowrap;
|
||||
justify-content: space-around;
|
||||
border: 1px solid $popover-border-color;
|
||||
border-radius: $border-radius;
|
||||
@@ -31,41 +30,41 @@
|
||||
max-width: 600px;
|
||||
top: 41px;
|
||||
right: 0px;
|
||||
}
|
||||
|
||||
.time-picker-popover-body {
|
||||
display: flex;
|
||||
flex-flow: row nowrap;
|
||||
justify-content: space-around;
|
||||
padding: $space-md;
|
||||
padding-bottom: 0;
|
||||
.time-picker-popover-body {
|
||||
display: flex;
|
||||
flex-flow: row nowrap;
|
||||
justify-content: space-around;
|
||||
padding: $space-md;
|
||||
padding-bottom: 0;
|
||||
}
|
||||
|
||||
.time-picker-popover-title {
|
||||
font-size: $font-size-md;
|
||||
font-weight: $font-weight-semi-bold;
|
||||
}
|
||||
|
||||
.time-picker-popover-body-custom-ranges:first-child {
|
||||
margin-right: $space-md;
|
||||
}
|
||||
|
||||
.time-picker-popover-body-custom-ranges-input {
|
||||
display: flex;
|
||||
flex-flow: row nowrap;
|
||||
align-items: center;
|
||||
margin-bottom: $space-sm;
|
||||
|
||||
.time-picker-input-error {
|
||||
box-shadow: inset 0 0px 5px $red;
|
||||
}
|
||||
}
|
||||
|
||||
.time-picker-popover-title {
|
||||
font-size: $font-size-md;
|
||||
font-weight: $font-weight-semi-bold;
|
||||
}
|
||||
|
||||
.time-picker-popover-body-custom-ranges:first-child {
|
||||
margin-right: $space-md;
|
||||
}
|
||||
|
||||
.time-picker-popover-body-custom-ranges-input {
|
||||
display: flex;
|
||||
flex-flow: row nowrap;
|
||||
align-items: center;
|
||||
margin-bottom: $space-sm;
|
||||
|
||||
.time-picker-input-error {
|
||||
box-shadow: inset 0 0px 5px $red;
|
||||
}
|
||||
}
|
||||
|
||||
.time-picker-popover-footer {
|
||||
display: flex;
|
||||
flex-flow: row nowrap;
|
||||
justify-content: center;
|
||||
padding: $space-md;
|
||||
}
|
||||
.time-picker-popover-footer {
|
||||
display: flex;
|
||||
flex-flow: row nowrap;
|
||||
justify-content: center;
|
||||
padding: $space-md;
|
||||
}
|
||||
|
||||
.time-picker-popover-header {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
export { DeleteButton } from './DeleteButton/DeleteButton';
|
||||
export { Tooltip } from './Tooltip/Tooltip';
|
||||
export { PopperController } from './Tooltip/PopperController';
|
||||
export { PopperController, PopperContent } from './Tooltip/PopperController';
|
||||
export { Popper } from './Tooltip/Popper';
|
||||
export { Portal } from './Portal/Portal';
|
||||
export { CustomScrollbar } from './CustomScrollbar/CustomScrollbar';
|
||||
@@ -9,7 +9,7 @@ export * from './Button/Button';
|
||||
export { ButtonVariant } from './Button/AbstractButton';
|
||||
|
||||
// Select
|
||||
export { Select, AsyncSelect, SelectOptionItem } from './Select/Select';
|
||||
export { Select, AsyncSelect } from './Select/Select';
|
||||
export { IndicatorsContainer } from './Select/IndicatorsContainer';
|
||||
export { NoOptionsMessage } from './Select/NoOptionsMessage';
|
||||
export { default as resetSelectStyles } from './Select/resetSelectStyles';
|
||||
|
||||
@@ -77,6 +77,13 @@ interface PluginMetaInfoLink {
|
||||
url: string;
|
||||
}
|
||||
|
||||
export interface PluginBuildInfo {
|
||||
time?: number;
|
||||
repo?: string;
|
||||
branch?: string;
|
||||
hash?: string;
|
||||
}
|
||||
|
||||
export interface PluginMetaInfo {
|
||||
author: {
|
||||
name: string;
|
||||
@@ -88,6 +95,7 @@ export interface PluginMetaInfo {
|
||||
large: string;
|
||||
small: string;
|
||||
};
|
||||
build?: PluginBuildInfo;
|
||||
screenshots: any[];
|
||||
updated: string;
|
||||
version: string;
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
export const deprecationWarning = (file: string, oldName: string, newName: string) => {
|
||||
const message = `[Deprecation warning] ${file}: ${oldName} is deprecated. Use ${newName} instead`;
|
||||
export const deprecationWarning = (file: string, oldName: string, newName?: string) => {
|
||||
let message = `[Deprecation warning] ${file}: ${oldName} is deprecated`;
|
||||
if (newName) {
|
||||
message += `. Use ${newName} instead`;
|
||||
}
|
||||
console.warn(message);
|
||||
};
|
||||
|
||||
@@ -103,7 +103,7 @@ describe('Format value', () => {
|
||||
it('should return if value isNaN', () => {
|
||||
const valueMappings: ValueMapping[] = [];
|
||||
const value = 'N/A';
|
||||
const instance = getDisplayProcessor({ mappings: valueMappings });
|
||||
const instance = getDisplayProcessor({ field: { mappings: valueMappings } });
|
||||
|
||||
const result = instance(value);
|
||||
|
||||
@@ -114,7 +114,7 @@ describe('Format value', () => {
|
||||
const valueMappings: ValueMapping[] = [];
|
||||
const value = '6';
|
||||
|
||||
const instance = getDisplayProcessor({ mappings: valueMappings, field: { decimals: 1 } });
|
||||
const instance = getDisplayProcessor({ field: { decimals: 1, mappings: valueMappings } });
|
||||
|
||||
const result = instance(value);
|
||||
|
||||
@@ -127,7 +127,7 @@ describe('Format value', () => {
|
||||
{ id: 1, operator: '', text: '1-9', type: MappingType.RangeToText, from: '1', to: '9' },
|
||||
];
|
||||
const value = '10';
|
||||
const instance = getDisplayProcessor({ mappings: valueMappings, field: { decimals: 1 } });
|
||||
const instance = getDisplayProcessor({ field: { decimals: 1, mappings: valueMappings } });
|
||||
|
||||
const result = instance(value);
|
||||
|
||||
@@ -160,7 +160,7 @@ describe('Format value', () => {
|
||||
{ id: 1, operator: '', text: 'elva', type: MappingType.ValueToText, value: '11' },
|
||||
];
|
||||
const value = '11';
|
||||
const instance = getDisplayProcessor({ mappings: valueMappings, field: { decimals: 1 } });
|
||||
const instance = getDisplayProcessor({ field: { decimals: 1, mappings: valueMappings } });
|
||||
|
||||
expect(instance(value).text).toEqual('1-20');
|
||||
});
|
||||
|
||||
@@ -7,16 +7,13 @@ import { getColorFromHexRgbOrName } from './namedColorsPalette';
|
||||
|
||||
// Types
|
||||
import { DecimalInfo, DisplayValue, GrafanaTheme, GrafanaThemeType, DecimalCount } from '../types';
|
||||
import { DateTime, dateTime, Threshold, ValueMapping, getMappedValue, Field } from '@grafana/data';
|
||||
import { DateTime, dateTime, Threshold, getMappedValue, Field } from '@grafana/data';
|
||||
|
||||
export type DisplayProcessor = (value: any) => DisplayValue;
|
||||
|
||||
export interface DisplayValueOptions {
|
||||
field?: Partial<Field>;
|
||||
|
||||
mappings?: ValueMapping[];
|
||||
thresholds?: Threshold[];
|
||||
|
||||
// Alternative to empty string
|
||||
noValue?: string;
|
||||
|
||||
@@ -31,7 +28,8 @@ export function getDisplayProcessor(options?: DisplayValueOptions): DisplayProce
|
||||
const formatFunc = getValueFormat(field.unit || 'none');
|
||||
|
||||
return (value: any) => {
|
||||
const { mappings, thresholds, theme } = options;
|
||||
const { theme } = options;
|
||||
const { mappings, thresholds } = field;
|
||||
let color;
|
||||
|
||||
let text = _.toString(value);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { getFieldProperties, getFieldDisplayValues, GetFieldDisplayValuesOptions } from './fieldDisplay';
|
||||
import { FieldType, ReducerID } from '@grafana/data';
|
||||
import { FieldType, ReducerID, Threshold } from '@grafana/data';
|
||||
import { GrafanaThemeType } from '../types/theme';
|
||||
import { getTheme } from '../themes/index';
|
||||
|
||||
@@ -55,8 +55,6 @@ describe('FieldDisplay', () => {
|
||||
},
|
||||
fieldOptions: {
|
||||
calcs: [],
|
||||
mappings: [],
|
||||
thresholds: [],
|
||||
override: {},
|
||||
defaults: {},
|
||||
},
|
||||
@@ -68,8 +66,6 @@ describe('FieldDisplay', () => {
|
||||
...options,
|
||||
fieldOptions: {
|
||||
calcs: [ReducerID.first],
|
||||
mappings: [],
|
||||
thresholds: [],
|
||||
override: {},
|
||||
defaults: {
|
||||
title: '$__cell_0 * $__field_name * $__series_name',
|
||||
@@ -88,8 +84,6 @@ describe('FieldDisplay', () => {
|
||||
...options,
|
||||
fieldOptions: {
|
||||
calcs: [ReducerID.last],
|
||||
mappings: [],
|
||||
thresholds: [],
|
||||
override: {},
|
||||
defaults: {},
|
||||
},
|
||||
@@ -104,8 +98,6 @@ describe('FieldDisplay', () => {
|
||||
values: true, //
|
||||
limit: 1000,
|
||||
calcs: [],
|
||||
mappings: [],
|
||||
thresholds: [],
|
||||
override: {},
|
||||
defaults: {},
|
||||
},
|
||||
@@ -120,12 +112,53 @@ describe('FieldDisplay', () => {
|
||||
values: true, //
|
||||
limit: 2,
|
||||
calcs: [],
|
||||
mappings: [],
|
||||
thresholds: [],
|
||||
override: {},
|
||||
defaults: {},
|
||||
},
|
||||
});
|
||||
expect(display.map(v => v.display.numeric)).toEqual([1, 3]); // First 2 are from the first field
|
||||
});
|
||||
|
||||
it('should restore -Infinity value for base threshold', () => {
|
||||
const field = getFieldProperties({
|
||||
thresholds: [
|
||||
({
|
||||
color: '#73BF69',
|
||||
value: null,
|
||||
} as unknown) as Threshold,
|
||||
{
|
||||
color: '#F2495C',
|
||||
value: 50,
|
||||
},
|
||||
],
|
||||
});
|
||||
expect(field.thresholds!.length).toEqual(2);
|
||||
expect(field.thresholds![0].value).toBe(-Infinity);
|
||||
});
|
||||
|
||||
it('Should return field thresholds when there is no data', () => {
|
||||
const options: GetFieldDisplayValuesOptions = {
|
||||
data: [
|
||||
{
|
||||
name: 'No data',
|
||||
fields: [],
|
||||
rows: [],
|
||||
},
|
||||
],
|
||||
replaceVariables: (value: string) => {
|
||||
return value;
|
||||
},
|
||||
fieldOptions: {
|
||||
calcs: [],
|
||||
override: {},
|
||||
defaults: {
|
||||
thresholds: [{ color: '#F2495C', value: 50 }],
|
||||
},
|
||||
},
|
||||
theme: getTheme(GrafanaThemeType.Dark),
|
||||
};
|
||||
|
||||
const display = getFieldDisplayValues(options);
|
||||
expect(display[0].field.thresholds!.length).toEqual(1);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -4,16 +4,7 @@ import toString from 'lodash/toString';
|
||||
import { DisplayValue, GrafanaTheme, InterpolateFunction, ScopedVars, GraphSeriesValue } from '../types/index';
|
||||
import { getDisplayProcessor } from './displayValue';
|
||||
import { getFlotPairs } from './flotPairs';
|
||||
import {
|
||||
ValueMapping,
|
||||
Threshold,
|
||||
ReducerID,
|
||||
reduceField,
|
||||
FieldType,
|
||||
NullValueMode,
|
||||
DataFrame,
|
||||
Field,
|
||||
} from '@grafana/data';
|
||||
import { ReducerID, reduceField, FieldType, NullValueMode, DataFrame, Field } from '@grafana/data';
|
||||
|
||||
export interface FieldDisplayOptions {
|
||||
values?: boolean; // If true show each row value
|
||||
@@ -22,10 +13,6 @@ export interface FieldDisplayOptions {
|
||||
|
||||
defaults: Partial<Field>; // Use these values unless otherwise stated
|
||||
override: Partial<Field>; // Set these values regardless of the source
|
||||
|
||||
// Could these be data driven also?
|
||||
thresholds: Threshold[];
|
||||
mappings: ValueMapping[];
|
||||
}
|
||||
|
||||
export const VAR_SERIES_NAME = '__series_name';
|
||||
@@ -127,8 +114,6 @@ export const getFieldDisplayValues = (options: GetFieldDisplayValuesOptions): Fi
|
||||
|
||||
const display = getDisplayProcessor({
|
||||
field,
|
||||
mappings: fieldOptions.mappings,
|
||||
thresholds: fieldOptions.thresholds,
|
||||
theme: options.theme,
|
||||
});
|
||||
|
||||
@@ -197,7 +182,10 @@ export const getFieldDisplayValues = (options: GetFieldDisplayValuesOptions): Fi
|
||||
|
||||
if (values.length === 0) {
|
||||
values.push({
|
||||
field: { name: 'No Data' },
|
||||
field: {
|
||||
...defaults,
|
||||
name: 'No Data',
|
||||
},
|
||||
display: {
|
||||
numeric: 0,
|
||||
text: 'No data',
|
||||
@@ -259,10 +247,16 @@ type PartialField = Partial<Field>;
|
||||
|
||||
export function getFieldProperties(...props: PartialField[]): Field {
|
||||
let field = props[0] as Field;
|
||||
|
||||
for (let i = 1; i < props.length; i++) {
|
||||
field = applyFieldProperties(field, props[i]);
|
||||
}
|
||||
|
||||
// First value is always -Infinity
|
||||
if (field.thresholds && field.thresholds.length) {
|
||||
field.thresholds[0].value = -Infinity;
|
||||
}
|
||||
|
||||
// Verify that max > min
|
||||
if (field.hasOwnProperty('min') && field.hasOwnProperty('max') && field.min! > field.max!) {
|
||||
return {
|
||||
|
||||
@@ -1,19 +1,11 @@
|
||||
{
|
||||
"extends": "../../tsconfig.json",
|
||||
"extends": "../tsconfig.json",
|
||||
"include": ["src/**/*.ts", "src/**/*.tsx"],
|
||||
"exclude": ["dist", "node_modules"],
|
||||
"compilerOptions": {
|
||||
"rootDirs": [".", "stories"],
|
||||
"module": "esnext",
|
||||
"outDir": "compiled",
|
||||
"declaration": true,
|
||||
"declarationDir": "dist",
|
||||
"strict": true,
|
||||
"alwaysStrict": true,
|
||||
"noImplicitAny": true,
|
||||
"strictNullChecks": true,
|
||||
"typeRoots": ["./node_modules/@types", "types"],
|
||||
"skipLibCheck": true, // Temp workaround for Duplicate identifier tsc errors,
|
||||
"removeComments": false
|
||||
"declarationDir": "dist",
|
||||
"outDir": "compiled"
|
||||
}
|
||||
}
|
||||
|
||||
13
packages/tsconfig.json
Normal file
13
packages/tsconfig.json
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"extends": "../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"module": "esnext",
|
||||
"declaration": true,
|
||||
"strict": true,
|
||||
"alwaysStrict": true,
|
||||
"noImplicitAny": true,
|
||||
"strictNullChecks": true,
|
||||
"skipLibCheck": true, // Temp workaround for Duplicate identifier tsc errors,
|
||||
"removeComments": false
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
ARG BASE_IMAGE=ubuntu:latest
|
||||
ARG BASE_IMAGE=ubuntu:18.04
|
||||
FROM ${BASE_IMAGE}
|
||||
|
||||
ARG GRAFANA_TGZ="grafana-latest.linux-x64.tar.gz"
|
||||
@@ -12,7 +12,7 @@ COPY ${GRAFANA_TGZ} /tmp/grafana.tar.gz
|
||||
# Change to tar xfzv to make tar print every file it extracts
|
||||
RUN mkdir /tmp/grafana && tar xfz /tmp/grafana.tar.gz --strip-components=1 -C /tmp/grafana
|
||||
|
||||
ARG BASE_IMAGE=ubuntu:latest
|
||||
ARG BASE_IMAGE=ubuntu:18.04
|
||||
FROM ${BASE_IMAGE}
|
||||
|
||||
ARG GF_UID="472"
|
||||
|
||||
@@ -59,14 +59,16 @@ docker_tag_all () {
|
||||
fi
|
||||
}
|
||||
|
||||
docker_build "ubuntu:latest" "grafana-latest.linux-x64.tar.gz" "${_docker_repo}:${_grafana_version}"
|
||||
docker_build "ubuntu:18.04" "grafana-latest.linux-x64.tar.gz" "${_docker_repo}:${_grafana_version}"
|
||||
if [ $BUILD_FAST = "0" ]; then
|
||||
docker_build "arm32v7/ubuntu:latest" "grafana-latest.linux-armv7.tar.gz" "${_docker_repo}-arm32v7-linux:${_grafana_version}"
|
||||
docker_build "arm64v8/ubuntu:latest" "grafana-latest.linux-arm64.tar.gz" "${_docker_repo}-arm64v8-linux:${_grafana_version}"
|
||||
docker_build "arm32v7/ubuntu:18.04" "grafana-latest.linux-armv7.tar.gz" "${_docker_repo}-arm32v7-linux:${_grafana_version}"
|
||||
docker_build "arm64v8/ubuntu:18.04" "grafana-latest.linux-arm64.tar.gz" "${_docker_repo}-arm64v8-linux:${_grafana_version}"
|
||||
fi
|
||||
# Tag as 'latest' for official release; otherwise tag as grafana/grafana:master
|
||||
if echo "$_grafana_tag" | grep -q "^v"; then
|
||||
docker_tag_all "${_docker_repo}" "latest"
|
||||
# Create the expected tag for running the end to end tests successfully
|
||||
docker tag "${_docker_repo}:${_grafana_version}" "grafana/grafana-dev:${_grafana_tag}"
|
||||
else
|
||||
docker_tag_all "${_docker_repo}" "master"
|
||||
docker tag "${_docker_repo}:${_grafana_version}" "grafana/grafana-dev:${_grafana_version}"
|
||||
|
||||
@@ -38,8 +38,14 @@ if echo "$_grafana_tag" | grep -q "^v" && echo "$_grafana_tag" | grep -vq "beta"
|
||||
echo "pushing ${_docker_repo}:latest"
|
||||
docker_push_all "${_docker_repo}" "latest"
|
||||
docker_push_all "${_docker_repo}" "${_grafana_version}"
|
||||
# Push to the grafana-dev repository with the expected tag
|
||||
# for running the end to end tests successfully
|
||||
docker push "grafana/grafana-dev:${_grafana_tag}"
|
||||
elif echo "$_grafana_tag" | grep -q "^v" && echo "$_grafana_tag" | grep -q "beta"; then
|
||||
docker_push_all "${_docker_repo}" "${_grafana_version}"
|
||||
# Push to the grafana-dev repository with the expected tag
|
||||
# for running the end to end tests successfully
|
||||
docker push "grafana/grafana-dev:${_grafana_tag}"
|
||||
elif echo "$_grafana_tag" | grep -q "master"; then
|
||||
docker_push_all "${_docker_repo}" "master"
|
||||
docker push "grafana/grafana-dev:${_grafana_version}"
|
||||
|
||||
@@ -34,7 +34,7 @@ func AdminCreateUser(c *models.ReqContext, form dtos.AdminCreateUserForm) {
|
||||
return
|
||||
}
|
||||
|
||||
metrics.M_Api_Admin_User_Create.Inc()
|
||||
metrics.MApiAdminUserCreate.Inc()
|
||||
|
||||
user := cmd.Result
|
||||
|
||||
|
||||
@@ -133,7 +133,7 @@ func (hs *HTTPServer) GetDashboard(c *m.ReqContext) Response {
|
||||
Meta: meta,
|
||||
}
|
||||
|
||||
c.TimeRequest(metrics.M_Api_Dashboard_Get)
|
||||
c.TimeRequest(metrics.MApiDashboardGet)
|
||||
return JSON(200, dto)
|
||||
}
|
||||
|
||||
@@ -278,12 +278,11 @@ func (hs *HTTPServer) PostDashboard(c *m.ReqContext, cmd m.SaveDashboardCommand)
|
||||
inFolder := cmd.FolderId > 0
|
||||
err := dashboards.MakeUserAdmin(hs.Bus, cmd.OrgId, cmd.UserId, dashboard.Id, !inFolder)
|
||||
if err != nil {
|
||||
hs.log.Error("Could not make user admin", "dashboard", cmd.Result.Title, "user", c.SignedInUser.UserId, "error", err)
|
||||
return Error(500, "Failed to make user admin of dashboard", err)
|
||||
hs.log.Error("Could not make user admin", "dashboard", dashboard.Title, "user", c.SignedInUser.UserId, "error", err)
|
||||
}
|
||||
}
|
||||
|
||||
c.TimeRequest(metrics.M_Api_Dashboard_Save)
|
||||
c.TimeRequest(metrics.MApiDashboardSave)
|
||||
return JSON(200, util.DynMap{
|
||||
"status": "success",
|
||||
"slug": dashboard.Slug,
|
||||
|
||||
@@ -97,7 +97,7 @@ func CreateDashboardSnapshot(c *m.ReqContext, cmd m.CreateDashboardSnapshotComma
|
||||
cmd.ExternalDeleteUrl = response.DeleteUrl
|
||||
cmd.Dashboard = simplejson.New()
|
||||
|
||||
metrics.M_Api_Dashboard_Snapshot_External.Inc()
|
||||
metrics.MApiDashboardSnapshotExternal.Inc()
|
||||
} else {
|
||||
if cmd.Key == "" {
|
||||
cmd.Key = util.GetRandomString(32)
|
||||
@@ -109,7 +109,7 @@ func CreateDashboardSnapshot(c *m.ReqContext, cmd m.CreateDashboardSnapshotComma
|
||||
|
||||
url = setting.ToAbsUrl("dashboard/snapshot/" + cmd.Key)
|
||||
|
||||
metrics.M_Api_Dashboard_Snapshot_Create.Inc()
|
||||
metrics.MApiDashboardSnapshotCreate.Inc()
|
||||
}
|
||||
|
||||
if err := bus.Dispatch(&cmd); err != nil {
|
||||
@@ -154,7 +154,7 @@ func GetDashboardSnapshot(c *m.ReqContext) {
|
||||
},
|
||||
}
|
||||
|
||||
metrics.M_Api_Dashboard_Snapshot_Get.Inc()
|
||||
metrics.MApiDashboardSnapshotGet.Inc()
|
||||
|
||||
c.Resp.Header().Set("Cache-Control", "public, max-age=3600")
|
||||
c.JSON(200, dto)
|
||||
|
||||
@@ -8,7 +8,7 @@ import (
|
||||
)
|
||||
|
||||
func (hs *HTTPServer) ProxyDataSourceRequest(c *m.ReqContext) {
|
||||
c.TimeRequest(metrics.M_DataSource_ProxyReq_Timer)
|
||||
c.TimeRequest(metrics.MDataSourceProxyReqTimer)
|
||||
|
||||
dsId := c.ParamsInt64(":id")
|
||||
ds, err := hs.DatasourceCache.GetDatasource(dsId, c.SignedInUser, c.SkipCache)
|
||||
|
||||
@@ -64,7 +64,6 @@ func (hs *HTTPServer) CreateFolder(c *m.ReqContext, cmd m.CreateFolderCommand) R
|
||||
if hs.Cfg.EditorsCanAdmin {
|
||||
if err := dashboards.MakeUserAdmin(hs.Bus, c.OrgId, c.SignedInUser.UserId, cmd.Result.Id, true); err != nil {
|
||||
hs.log.Error("Could not make user admin", "folder", cmd.Result.Title, "user", c.SignedInUser.UserId, "error", err)
|
||||
return Error(500, "Failed to make user admin of folder", err)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -269,7 +269,8 @@ func (hs *HTTPServer) metricsEndpoint(ctx *macaron.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
promhttp.HandlerFor(prometheus.DefaultGatherer, promhttp.HandlerOpts{}).
|
||||
promhttp.
|
||||
HandlerFor(prometheus.DefaultGatherer, promhttp.HandlerOpts{}).
|
||||
ServeHTTP(ctx.Resp, ctx.Req.Request)
|
||||
}
|
||||
|
||||
|
||||
146
pkg/api/index.go
146
pkg/api/index.go
@@ -242,74 +242,69 @@ func (hs *HTTPServer) setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, er
|
||||
}
|
||||
}
|
||||
|
||||
if c.IsGrafanaAdmin || c.OrgRole == m.ROLE_ADMIN {
|
||||
cfgNode := &dtos.NavLink{
|
||||
Id: "cfg",
|
||||
Text: "Configuration",
|
||||
SubTitle: "Organization: " + c.OrgName,
|
||||
Icon: "gicon gicon-cog",
|
||||
Url: setting.AppSubUrl + "/datasources",
|
||||
Children: []*dtos.NavLink{
|
||||
{
|
||||
Text: "Data Sources",
|
||||
Icon: "gicon gicon-datasources",
|
||||
Description: "Add and configure data sources",
|
||||
Id: "datasources",
|
||||
Url: setting.AppSubUrl + "/datasources",
|
||||
},
|
||||
{
|
||||
Text: "Users",
|
||||
Id: "users",
|
||||
Description: "Manage org members",
|
||||
Icon: "gicon gicon-user",
|
||||
Url: setting.AppSubUrl + "/org/users",
|
||||
},
|
||||
{
|
||||
Text: "Teams",
|
||||
Id: "teams",
|
||||
Description: "Manage org groups",
|
||||
Icon: "gicon gicon-team",
|
||||
Url: setting.AppSubUrl + "/org/teams",
|
||||
},
|
||||
{
|
||||
Text: "Plugins",
|
||||
Id: "plugins",
|
||||
Description: "View and configure plugins",
|
||||
Icon: "gicon gicon-plugins",
|
||||
Url: setting.AppSubUrl + "/plugins",
|
||||
},
|
||||
{
|
||||
Text: "Preferences",
|
||||
Id: "org-settings",
|
||||
Description: "Organization preferences",
|
||||
Icon: "gicon gicon-preferences",
|
||||
Url: setting.AppSubUrl + "/org",
|
||||
},
|
||||
configNodes := []*dtos.NavLink{}
|
||||
|
||||
{
|
||||
Text: "API Keys",
|
||||
Id: "apikeys",
|
||||
Description: "Create & manage API keys",
|
||||
Icon: "gicon gicon-apikeys",
|
||||
Url: setting.AppSubUrl + "/org/apikeys",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
if c.OrgRole != m.ROLE_ADMIN {
|
||||
cfgNode = &dtos.NavLink{
|
||||
Id: "cfg",
|
||||
Text: "Configuration",
|
||||
SubTitle: "Organization: " + c.OrgName,
|
||||
Icon: "gicon gicon-cog",
|
||||
Url: setting.AppSubUrl + "/admin/users",
|
||||
Children: make([]*dtos.NavLink, 0),
|
||||
}
|
||||
}
|
||||
|
||||
data.NavTree = append(data.NavTree, cfgNode)
|
||||
if c.OrgRole == m.ROLE_ADMIN {
|
||||
configNodes = append(configNodes, &dtos.NavLink{
|
||||
Text: "Data Sources",
|
||||
Icon: "gicon gicon-datasources",
|
||||
Description: "Add and configure data sources",
|
||||
Id: "datasources",
|
||||
Url: setting.AppSubUrl + "/datasources",
|
||||
})
|
||||
configNodes = append(configNodes, &dtos.NavLink{
|
||||
Text: "Users",
|
||||
Id: "users",
|
||||
Description: "Manage org members",
|
||||
Icon: "gicon gicon-user",
|
||||
Url: setting.AppSubUrl + "/org/users",
|
||||
})
|
||||
}
|
||||
|
||||
if c.OrgRole == m.ROLE_ADMIN || hs.Cfg.EditorsCanAdmin {
|
||||
configNodes = append(configNodes, &dtos.NavLink{
|
||||
Text: "Teams",
|
||||
Id: "teams",
|
||||
Description: "Manage org groups",
|
||||
Icon: "gicon gicon-team",
|
||||
Url: setting.AppSubUrl + "/org/teams",
|
||||
})
|
||||
}
|
||||
|
||||
configNodes = append(configNodes, &dtos.NavLink{
|
||||
Text: "Plugins",
|
||||
Id: "plugins",
|
||||
Description: "View and configure plugins",
|
||||
Icon: "gicon gicon-plugins",
|
||||
Url: setting.AppSubUrl + "/plugins",
|
||||
})
|
||||
|
||||
if c.OrgRole == m.ROLE_ADMIN {
|
||||
configNodes = append(configNodes, &dtos.NavLink{
|
||||
Text: "Preferences",
|
||||
Id: "org-settings",
|
||||
Description: "Organization preferences",
|
||||
Icon: "gicon gicon-preferences",
|
||||
Url: setting.AppSubUrl + "/org",
|
||||
})
|
||||
configNodes = append(configNodes, &dtos.NavLink{
|
||||
Text: "API Keys",
|
||||
Id: "apikeys",
|
||||
Description: "Create & manage API keys",
|
||||
Icon: "gicon gicon-apikeys",
|
||||
Url: setting.AppSubUrl + "/org/apikeys",
|
||||
})
|
||||
}
|
||||
|
||||
data.NavTree = append(data.NavTree, &dtos.NavLink{
|
||||
Id: "cfg",
|
||||
Text: "Configuration",
|
||||
SubTitle: "Organization: " + c.OrgName,
|
||||
Icon: "gicon gicon-cog",
|
||||
Url: configNodes[0].Url,
|
||||
Children: configNodes,
|
||||
})
|
||||
|
||||
if c.IsGrafanaAdmin {
|
||||
data.NavTree = append(data.NavTree, &dtos.NavLink{
|
||||
Text: "Server Admin",
|
||||
@@ -327,27 +322,6 @@ func (hs *HTTPServer) setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, er
|
||||
})
|
||||
}
|
||||
|
||||
if (c.OrgRole == m.ROLE_EDITOR || c.OrgRole == m.ROLE_VIEWER) && hs.Cfg.EditorsCanAdmin {
|
||||
cfgNode := &dtos.NavLink{
|
||||
Id: "cfg",
|
||||
Text: "Configuration",
|
||||
SubTitle: "Organization: " + c.OrgName,
|
||||
Icon: "gicon gicon-cog",
|
||||
Url: setting.AppSubUrl + "/org/teams",
|
||||
Children: []*dtos.NavLink{
|
||||
{
|
||||
Text: "Teams",
|
||||
Id: "teams",
|
||||
Description: "Manage org groups",
|
||||
Icon: "gicon gicon-team",
|
||||
Url: setting.AppSubUrl + "/org/teams",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
data.NavTree = append(data.NavTree, cfgNode)
|
||||
}
|
||||
|
||||
data.NavTree = append(data.NavTree, &dtos.NavLink{
|
||||
Text: "Help",
|
||||
SubTitle: fmt.Sprintf(`%s v%s (%s)`, setting.ApplicationName, setting.BuildVersion, setting.BuildCommit),
|
||||
|
||||
@@ -44,7 +44,7 @@ func (hs *HTTPServer) LoginView(c *models.ReqContext) {
|
||||
viewData.Settings["loginHint"] = setting.LoginHint
|
||||
viewData.Settings["passwordHint"] = setting.PasswordHint
|
||||
viewData.Settings["disableLoginForm"] = setting.DisableLoginForm
|
||||
viewData.Settings["samlEnabled"] = hs.Cfg.SAMLEnabled
|
||||
viewData.Settings["samlEnabled"] = setting.IsEnterprise && hs.Cfg.SAMLEnabled
|
||||
|
||||
if loginError, ok := tryGetEncryptedCookie(c, LoginErrorCookieName); ok {
|
||||
//this cookie is only set whenever an OAuth login fails
|
||||
@@ -81,7 +81,7 @@ func tryOAuthAutoLogin(c *models.ReqContext) bool {
|
||||
}
|
||||
oauthInfos := setting.OAuthService.OAuthInfos
|
||||
if len(oauthInfos) != 1 {
|
||||
log.Warn("Skipping OAuth auto login because multiple OAuth providers are configured.")
|
||||
log.Warn("Skipping OAuth auto login because multiple OAuth providers are configured")
|
||||
return false
|
||||
}
|
||||
for key := range setting.OAuthService.OAuthInfos {
|
||||
@@ -114,12 +114,16 @@ func (hs *HTTPServer) LoginPost(c *models.ReqContext, cmd dtos.LoginCommand) Res
|
||||
}
|
||||
|
||||
if err := bus.Dispatch(authQuery); err != nil {
|
||||
e401 := Error(401, "Invalid username or password", err)
|
||||
if err == login.ErrInvalidCredentials || err == login.ErrTooManyLoginAttempts {
|
||||
return Error(401, "Invalid username or password", err)
|
||||
return e401
|
||||
}
|
||||
|
||||
// Do not expose disabled status,
|
||||
// just show incorrect user credentials error (see #17947)
|
||||
if err == login.ErrUserDisabled {
|
||||
return Error(401, "User is disabled", err)
|
||||
hs.log.Warn("User is disabled", "user", cmd.User)
|
||||
return e401
|
||||
}
|
||||
|
||||
return Error(500, "Error while trying to authenticate user", err)
|
||||
@@ -138,7 +142,7 @@ func (hs *HTTPServer) LoginPost(c *models.ReqContext, cmd dtos.LoginCommand) Res
|
||||
c.SetCookie("redirect_to", "", -1, setting.AppSubUrl+"/")
|
||||
}
|
||||
|
||||
metrics.M_Api_Login_Post.Inc()
|
||||
metrics.MApiLoginPost.Inc()
|
||||
return JSON(200, result)
|
||||
}
|
||||
|
||||
|
||||
@@ -60,7 +60,7 @@ func (hs *HTTPServer) OAuthLogin(ctx *m.ReqContext) {
|
||||
if code == "" {
|
||||
state := GenStateString()
|
||||
hashedState := hashStatecode(state, setting.OAuthService.OAuthInfos[name].ClientSecret)
|
||||
hs.writeCookie(ctx.Resp, OauthStateCookieName, hashedState, 60)
|
||||
hs.writeCookie(ctx.Resp, OauthStateCookieName, hashedState, 60, hs.Cfg.CookieSameSite)
|
||||
if setting.OAuthService.OAuthInfos[name].HostedDomain == "" {
|
||||
ctx.Redirect(connect.AuthCodeURL(state, oauth2.AccessTypeOnline))
|
||||
} else {
|
||||
@@ -73,7 +73,7 @@ func (hs *HTTPServer) OAuthLogin(ctx *m.ReqContext) {
|
||||
|
||||
// delete cookie
|
||||
ctx.Resp.Header().Del("Set-Cookie")
|
||||
hs.deleteCookie(ctx.Resp, OauthStateCookieName)
|
||||
hs.deleteCookie(ctx.Resp, OauthStateCookieName, hs.Cfg.CookieSameSite)
|
||||
|
||||
if cookieState == "" {
|
||||
ctx.Handle(500, "login.OAuthLogin(missing saved state)", nil)
|
||||
@@ -191,15 +191,18 @@ func (hs *HTTPServer) OAuthLogin(ctx *m.ReqContext) {
|
||||
return
|
||||
}
|
||||
|
||||
// Do not expose disabled status,
|
||||
// just show incorrect user credentials error (see #17947)
|
||||
if cmd.Result.IsDisabled {
|
||||
hs.redirectWithError(ctx, login.ErrUserDisabled)
|
||||
oauthLogger.Warn("User is disabled", "user", cmd.Result.Login)
|
||||
hs.redirectWithError(ctx, login.ErrInvalidCredentials)
|
||||
return
|
||||
}
|
||||
|
||||
// login
|
||||
hs.loginUserWithUser(cmd.Result, ctx)
|
||||
|
||||
metrics.M_Api_Login_OAuth.Inc()
|
||||
metrics.MApiLoginOAuth.Inc()
|
||||
|
||||
if redirectTo, _ := url.QueryUnescape(ctx.GetCookie("redirect_to")); len(redirectTo) > 0 {
|
||||
ctx.SetCookie("redirect_to", "", -1, setting.AppSubUrl+"/")
|
||||
@@ -210,20 +213,23 @@ func (hs *HTTPServer) OAuthLogin(ctx *m.ReqContext) {
|
||||
ctx.Redirect(setting.AppSubUrl + "/")
|
||||
}
|
||||
|
||||
func (hs *HTTPServer) deleteCookie(w http.ResponseWriter, name string) {
|
||||
hs.writeCookie(w, name, "", -1)
|
||||
func (hs *HTTPServer) deleteCookie(w http.ResponseWriter, name string, sameSite http.SameSite) {
|
||||
hs.writeCookie(w, name, "", -1, sameSite)
|
||||
}
|
||||
|
||||
func (hs *HTTPServer) writeCookie(w http.ResponseWriter, name string, value string, maxAge int) {
|
||||
http.SetCookie(w, &http.Cookie{
|
||||
func (hs *HTTPServer) writeCookie(w http.ResponseWriter, name string, value string, maxAge int, sameSite http.SameSite) {
|
||||
cookie := http.Cookie{
|
||||
Name: name,
|
||||
MaxAge: maxAge,
|
||||
Value: value,
|
||||
HttpOnly: true,
|
||||
Path: setting.AppSubUrl + "/",
|
||||
Secure: hs.Cfg.CookieSecure,
|
||||
SameSite: hs.Cfg.CookieSameSite,
|
||||
})
|
||||
}
|
||||
if sameSite != http.SameSiteDefaultMode {
|
||||
cookie.SameSite = sameSite
|
||||
}
|
||||
http.SetCookie(w, &cookie)
|
||||
}
|
||||
|
||||
func hashStatecode(code, seed string) string {
|
||||
|
||||
@@ -88,7 +88,7 @@ func CreateOrg(c *m.ReqContext, cmd m.CreateOrgCommand) Response {
|
||||
return Error(500, "Failed to create organization", err)
|
||||
}
|
||||
|
||||
metrics.M_Api_Org_Create.Inc()
|
||||
metrics.MApiOrgCreate.Inc()
|
||||
|
||||
return JSON(200, &util.DynMap{
|
||||
"orgId": cmd.Result.Id,
|
||||
|
||||
@@ -188,8 +188,8 @@ func (hs *HTTPServer) CompleteInvite(c *m.ReqContext, completeInvite dtos.Comple
|
||||
|
||||
hs.loginUserWithUser(user, c)
|
||||
|
||||
metrics.M_Api_User_SignUpCompleted.Inc()
|
||||
metrics.M_Api_User_SignUpInvite.Inc()
|
||||
metrics.MApiUserSignUpCompleted.Inc()
|
||||
metrics.MApiUserSignUpInvite.Inc()
|
||||
|
||||
return Success("User created and logged in")
|
||||
}
|
||||
|
||||
@@ -61,6 +61,6 @@ func Search(c *m.ReqContext) Response {
|
||||
return Error(500, "Search failed", err)
|
||||
}
|
||||
|
||||
c.TimeRequest(metrics.M_Api_Dashboard_Search)
|
||||
c.TimeRequest(metrics.MApiDashboardSearch)
|
||||
return JSON(200, searchQuery.Result)
|
||||
}
|
||||
|
||||
@@ -46,7 +46,7 @@ func SignUp(c *m.ReqContext, form dtos.SignUpForm) Response {
|
||||
Code: cmd.Code,
|
||||
})
|
||||
|
||||
metrics.M_Api_User_SignUpStarted.Inc()
|
||||
metrics.MApiUserSignUpStarted.Inc()
|
||||
|
||||
return JSON(200, util.DynMap{"status": "SignUpCreated"})
|
||||
}
|
||||
@@ -110,7 +110,7 @@ func (hs *HTTPServer) SignUpStep2(c *m.ReqContext, form dtos.SignUpStep2Form) Re
|
||||
}
|
||||
|
||||
hs.loginUserWithUser(user, c)
|
||||
metrics.M_Api_User_SignUpCompleted.Inc()
|
||||
metrics.MApiUserSignUpCompleted.Inc()
|
||||
|
||||
return JSON(200, apiResponse)
|
||||
}
|
||||
|
||||
@@ -30,23 +30,6 @@ func GetTeamMembers(c *m.ReqContext) Response {
|
||||
return JSON(200, query.Result)
|
||||
}
|
||||
|
||||
func GetAuthProviderLabel(authModule string) string {
|
||||
switch authModule {
|
||||
case "oauth_github":
|
||||
return "GitHub"
|
||||
case "oauth_google":
|
||||
return "Google"
|
||||
case "oauth_gitlab":
|
||||
return "GitLab"
|
||||
case "oauth_grafana_com", "oauth_grafananet":
|
||||
return "grafana.com"
|
||||
case "ldap", "":
|
||||
return "LDAP"
|
||||
default:
|
||||
return "OAuth"
|
||||
}
|
||||
}
|
||||
|
||||
// POST /api/teams/:teamId/members
|
||||
func (hs *HTTPServer) AddTeamMember(c *m.ReqContext, cmd m.AddTeamMemberCommand) Response {
|
||||
cmd.OrgId = c.OrgId
|
||||
|
||||
@@ -29,8 +29,11 @@ func getUserUserProfile(userID int64) Response {
|
||||
}
|
||||
|
||||
getAuthQuery := m.GetAuthInfoQuery{UserId: userID}
|
||||
query.Result.AuthLabels = []string{}
|
||||
if err := bus.Dispatch(&getAuthQuery); err == nil {
|
||||
query.Result.AuthModule = []string{getAuthQuery.Result.AuthModule}
|
||||
authLabel := GetAuthProviderLabel(getAuthQuery.Result.AuthModule)
|
||||
query.Result.AuthLabels = append(query.Result.AuthLabels, authLabel)
|
||||
query.Result.IsExternal = true
|
||||
}
|
||||
|
||||
return JSON(200, query.Result)
|
||||
@@ -277,6 +280,12 @@ func searchUser(c *m.ReqContext) (*m.SearchUsersQuery, error) {
|
||||
|
||||
for _, user := range query.Result.Users {
|
||||
user.AvatarUrl = dtos.GetGravatarUrl(user.Email)
|
||||
user.AuthLabels = make([]string, 0)
|
||||
if user.AuthModule != nil && len(user.AuthModule) > 0 {
|
||||
for _, authModule := range user.AuthModule {
|
||||
user.AuthLabels = append(user.AuthLabels, GetAuthProviderLabel(authModule))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
query.Result.Page = page
|
||||
@@ -315,3 +324,22 @@ func ClearHelpFlags(c *m.ReqContext) Response {
|
||||
|
||||
return JSON(200, &util.DynMap{"message": "Help flag set", "helpFlags1": cmd.HelpFlags1})
|
||||
}
|
||||
|
||||
func GetAuthProviderLabel(authModule string) string {
|
||||
switch authModule {
|
||||
case "oauth_github":
|
||||
return "GitHub"
|
||||
case "oauth_google":
|
||||
return "Google"
|
||||
case "oauth_gitlab":
|
||||
return "GitLab"
|
||||
case "oauth_grafana_com", "oauth_grafananet":
|
||||
return "grafana.com"
|
||||
case "auth.saml":
|
||||
return "SAML"
|
||||
case "ldap", "":
|
||||
return "LDAP"
|
||||
default:
|
||||
return "OAuth"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -62,7 +62,7 @@ func EncryptDatasourcePaswords(c utils.CommandLine, sqlStore *sqlstore.SqlStore)
|
||||
}
|
||||
|
||||
func migrateColumn(session *sqlstore.DBSession, column string) (int, error) {
|
||||
var rows []map[string]string
|
||||
var rows []map[string][]byte
|
||||
|
||||
session.Cols("id", column, "secure_json_data")
|
||||
session.Table("data_source")
|
||||
@@ -78,7 +78,7 @@ func migrateColumn(session *sqlstore.DBSession, column string) (int, error) {
|
||||
return rowsUpdated, errutil.Wrapf(err, "failed to update column: %s", column)
|
||||
}
|
||||
|
||||
func updateRows(session *sqlstore.DBSession, rows []map[string]string, passwordFieldName string) (int, error) {
|
||||
func updateRows(session *sqlstore.DBSession, rows []map[string][]byte, passwordFieldName string) (int, error) {
|
||||
var rowsUpdated int
|
||||
|
||||
for _, row := range rows {
|
||||
@@ -94,7 +94,7 @@ func updateRows(session *sqlstore.DBSession, rows []map[string]string, passwordF
|
||||
|
||||
newRow := map[string]interface{}{"secure_json_data": data, passwordFieldName: ""}
|
||||
session.Table("data_source")
|
||||
session.Where("id = ?", row["id"])
|
||||
session.Where("id = ?", string(row["id"]))
|
||||
// Setting both columns while having value only for secure_json_data should clear the [passwordFieldName] column
|
||||
session.Cols("secure_json_data", passwordFieldName)
|
||||
|
||||
@@ -108,16 +108,20 @@ func updateRows(session *sqlstore.DBSession, rows []map[string]string, passwordF
|
||||
return rowsUpdated, nil
|
||||
}
|
||||
|
||||
func getUpdatedSecureJSONData(row map[string]string, passwordFieldName string) (map[string]interface{}, error) {
|
||||
encryptedPassword, err := util.Encrypt([]byte(row[passwordFieldName]), setting.SecretKey)
|
||||
func getUpdatedSecureJSONData(row map[string][]byte, passwordFieldName string) (map[string]interface{}, error) {
|
||||
encryptedPassword, err := util.Encrypt(row[passwordFieldName], setting.SecretKey)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var secureJSONData map[string]interface{}
|
||||
|
||||
if err := json.Unmarshal([]byte(row["secure_json_data"]), &secureJSONData); err != nil {
|
||||
return nil, err
|
||||
if len(row["secure_json_data"]) > 0 {
|
||||
if err := json.Unmarshal(row["secure_json_data"], &secureJSONData); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
} else {
|
||||
secureJSONData = map[string]interface{}{}
|
||||
}
|
||||
|
||||
jsonFieldName := util.ToCamelCase(passwordFieldName)
|
||||
|
||||
@@ -20,19 +20,30 @@ func TestPasswordMigrationCommand(t *testing.T) {
|
||||
datasources := []*models.DataSource{
|
||||
{Type: "influxdb", Name: "influxdb", Password: "foobar"},
|
||||
{Type: "graphite", Name: "graphite", BasicAuthPassword: "foobar"},
|
||||
{Type: "prometheus", Name: "prometheus", SecureJsonData: securejsondata.GetEncryptedJsonData(map[string]string{})},
|
||||
{Type: "prometheus", Name: "prometheus"},
|
||||
{Type: "elasticsearch", Name: "elasticsearch", Password: "pwd"},
|
||||
}
|
||||
|
||||
// set required default values
|
||||
for _, ds := range datasources {
|
||||
ds.Created = time.Now()
|
||||
ds.Updated = time.Now()
|
||||
ds.SecureJsonData = securejsondata.GetEncryptedJsonData(map[string]string{})
|
||||
if ds.Name == "elasticsearch" {
|
||||
ds.SecureJsonData = securejsondata.GetEncryptedJsonData(map[string]string{
|
||||
"key": "value",
|
||||
})
|
||||
} else {
|
||||
ds.SecureJsonData = securejsondata.GetEncryptedJsonData(map[string]string{})
|
||||
}
|
||||
}
|
||||
|
||||
_, err := session.Insert(&datasources)
|
||||
assert.Nil(t, err)
|
||||
|
||||
// force secure_json_data to be null to verify that migration can handle that
|
||||
_, err = session.Exec("update data_source set secure_json_data = null where name = 'influxdb'")
|
||||
assert.Nil(t, err)
|
||||
|
||||
//run migration
|
||||
err = EncryptDatasourcePaswords(&commandstest.FakeCommandLine{}, sqlstore)
|
||||
assert.Nil(t, err)
|
||||
@@ -41,7 +52,7 @@ func TestPasswordMigrationCommand(t *testing.T) {
|
||||
var dss []*models.DataSource
|
||||
err = session.SQL("select * from data_source").Find(&dss)
|
||||
assert.Nil(t, err)
|
||||
assert.Equal(t, len(dss), 3)
|
||||
assert.Equal(t, len(dss), 4)
|
||||
|
||||
for _, ds := range dss {
|
||||
sj := ds.SecureJsonData.Decrypt()
|
||||
@@ -63,5 +74,15 @@ func TestPasswordMigrationCommand(t *testing.T) {
|
||||
if ds.Name == "prometheus" {
|
||||
assert.Equal(t, len(sj), 0)
|
||||
}
|
||||
|
||||
if ds.Name == "elasticsearch" {
|
||||
assert.Equal(t, ds.Password, "")
|
||||
key, exist := sj["key"]
|
||||
assert.True(t, exist)
|
||||
password, exist := sj["password"]
|
||||
assert.True(t, exist)
|
||||
assert.Equal(t, password, "pwd", "expected password to be moved to securejson")
|
||||
assert.Equal(t, key, "value", "expected existing key to be kept intact in securejson")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -85,7 +85,7 @@ func InstallPlugin(pluginName, version string, c utils.CommandLine) error {
|
||||
}
|
||||
|
||||
logger.Infof("installing %v @ %v\n", pluginName, version)
|
||||
logger.Infof("from url: %v\n", downloadURL)
|
||||
logger.Infof("from: %v\n", downloadURL)
|
||||
logger.Infof("into: %v\n", pluginFolder)
|
||||
logger.Info("\n")
|
||||
|
||||
@@ -145,18 +145,27 @@ func downloadFile(pluginName, filePath, url string) (err error) {
|
||||
}
|
||||
}()
|
||||
|
||||
resp, err := http.Get(url) // #nosec
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
var bytes []byte
|
||||
|
||||
body, err := ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return err
|
||||
if _, err := os.Stat(url); err == nil {
|
||||
bytes, err = ioutil.ReadFile(url)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
resp, err := http.Get(url) // #nosec
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
bytes, err = ioutil.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return extractFiles(body, pluginName, filePath)
|
||||
return extractFiles(bytes, pluginName, filePath)
|
||||
}
|
||||
|
||||
func extractFiles(body []byte, pluginName string, filePath string) error {
|
||||
|
||||
@@ -6,37 +6,53 @@ import (
|
||||
"path/filepath"
|
||||
|
||||
"github.com/grafana/grafana/pkg/cmd/grafana-cli/logger"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
func GetGrafanaPluginDir(currentOS string) string {
|
||||
if isDevEnvironment() {
|
||||
return "../data/plugins"
|
||||
if rootPath, ok := tryGetRootForDevEnvironment(); ok {
|
||||
return filepath.Join(rootPath, "data/plugins")
|
||||
}
|
||||
|
||||
return returnOsDefault(currentOS)
|
||||
}
|
||||
|
||||
func isDevEnvironment() bool {
|
||||
// if ../conf/defaults.ini exists, grafana is not installed as package
|
||||
// that its in development environment.
|
||||
// getGrafanaRoot tries to get root of directory when developing grafana ie repo root. It is not perfect it just
|
||||
// checks what is the binary path and tries to guess based on that but if it is not running in dev env you get a bogus
|
||||
// path back.
|
||||
func getGrafanaRoot() (string, error) {
|
||||
ex, err := os.Executable()
|
||||
if err != nil {
|
||||
logger.Error("Could not get executable path. Assuming non dev environment.")
|
||||
return false
|
||||
return "", xerrors.New("Failed to get executable path")
|
||||
}
|
||||
exPath := filepath.Dir(ex)
|
||||
_, last := path.Split(exPath)
|
||||
if last == "bin" {
|
||||
// In dev env the executable for current platform is created in 'bin/' dir
|
||||
defaultsPath := filepath.Join(exPath, "../conf/defaults.ini")
|
||||
_, err = os.Stat(defaultsPath)
|
||||
return err == nil
|
||||
return filepath.Join(exPath, ".."), nil
|
||||
}
|
||||
|
||||
// But at the same time there are per platform directories that contain the binaries and can also be used.
|
||||
defaultsPath := filepath.Join(exPath, "../../conf/defaults.ini")
|
||||
_, err = os.Stat(defaultsPath)
|
||||
return err == nil
|
||||
return filepath.Join(exPath, "../.."), nil
|
||||
}
|
||||
|
||||
// tryGetRootForDevEnvironment returns root path if we are in dev environment. It checks if conf/defaults.ini exists
|
||||
// which should only exist in dev. Second param is false if we are not in dev or if it wasn't possible to determine it.
|
||||
func tryGetRootForDevEnvironment() (string, bool) {
|
||||
rootPath, err := getGrafanaRoot()
|
||||
if err != nil {
|
||||
logger.Error("Could not get executable path. Assuming non dev environment.", err)
|
||||
return "", false
|
||||
}
|
||||
|
||||
devenvPath := filepath.Join(rootPath, "devenv")
|
||||
|
||||
_, err = os.Stat(devenvPath)
|
||||
if err != nil {
|
||||
return "", false
|
||||
}
|
||||
|
||||
return rootPath, true
|
||||
}
|
||||
|
||||
func returnOsDefault(currentOs string) string {
|
||||
|
||||
@@ -3,103 +3,180 @@ package metrics
|
||||
import (
|
||||
"runtime"
|
||||
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
)
|
||||
|
||||
const exporterName = "grafana"
|
||||
|
||||
var (
|
||||
M_Instance_Start prometheus.Counter
|
||||
M_Page_Status *prometheus.CounterVec
|
||||
M_Api_Status *prometheus.CounterVec
|
||||
M_Proxy_Status *prometheus.CounterVec
|
||||
M_Http_Request_Total *prometheus.CounterVec
|
||||
M_Http_Request_Summary *prometheus.SummaryVec
|
||||
// MInstanceStart is a metric counter for started instances
|
||||
MInstanceStart prometheus.Counter
|
||||
|
||||
M_Api_User_SignUpStarted prometheus.Counter
|
||||
M_Api_User_SignUpCompleted prometheus.Counter
|
||||
M_Api_User_SignUpInvite prometheus.Counter
|
||||
M_Api_Dashboard_Save prometheus.Summary
|
||||
M_Api_Dashboard_Get prometheus.Summary
|
||||
M_Api_Dashboard_Search prometheus.Summary
|
||||
M_Api_Admin_User_Create prometheus.Counter
|
||||
M_Api_Login_Post prometheus.Counter
|
||||
M_Api_Login_OAuth prometheus.Counter
|
||||
M_Api_Org_Create prometheus.Counter
|
||||
// MPageStatus is a metric page http response status
|
||||
MPageStatus *prometheus.CounterVec
|
||||
|
||||
M_Api_Dashboard_Snapshot_Create prometheus.Counter
|
||||
M_Api_Dashboard_Snapshot_External prometheus.Counter
|
||||
M_Api_Dashboard_Snapshot_Get prometheus.Counter
|
||||
M_Api_Dashboard_Insert prometheus.Counter
|
||||
M_Alerting_Result_State *prometheus.CounterVec
|
||||
M_Alerting_Notification_Sent *prometheus.CounterVec
|
||||
M_Aws_CloudWatch_GetMetricStatistics prometheus.Counter
|
||||
M_Aws_CloudWatch_ListMetrics prometheus.Counter
|
||||
M_Aws_CloudWatch_GetMetricData prometheus.Counter
|
||||
M_DB_DataSource_QueryById prometheus.Counter
|
||||
// MApiStatus is a metric api http response status
|
||||
MApiStatus *prometheus.CounterVec
|
||||
|
||||
// Timers
|
||||
M_DataSource_ProxyReq_Timer prometheus.Summary
|
||||
M_Alerting_Execution_Time prometheus.Summary
|
||||
// MProxyStatus is a metric proxy http response status
|
||||
MProxyStatus *prometheus.CounterVec
|
||||
|
||||
// MHttpRequestTotal is a metric http request counter
|
||||
MHttpRequestTotal *prometheus.CounterVec
|
||||
|
||||
// MHttpRequestSummary is a metric http request summary
|
||||
MHttpRequestSummary *prometheus.SummaryVec
|
||||
|
||||
// MApiUserSignUpStarted is a metric amount of users who started the signup flow
|
||||
MApiUserSignUpStarted prometheus.Counter
|
||||
|
||||
// MApiUserSignUpCompleted is a metric amount of users who completed the signup flow
|
||||
MApiUserSignUpCompleted prometheus.Counter
|
||||
|
||||
// MApiUserSignUpInvite is a metric amount of users who have been invited
|
||||
MApiUserSignUpInvite prometheus.Counter
|
||||
|
||||
// MApiDashboardSave is a metric summary for dashboard save duration
|
||||
MApiDashboardSave prometheus.Summary
|
||||
|
||||
// MApiDashboardGet is a metric summary for dashboard get duration
|
||||
MApiDashboardGet prometheus.Summary
|
||||
|
||||
// MApiDashboardSearch is a metric summary for dashboard search duration
|
||||
MApiDashboardSearch prometheus.Summary
|
||||
|
||||
// MApiAdminUserCreate is a metric api admin user created counter
|
||||
MApiAdminUserCreate prometheus.Counter
|
||||
|
||||
// MApiLoginPost is a metric api login post counter
|
||||
MApiLoginPost prometheus.Counter
|
||||
|
||||
// MApiLoginOAuth is a metric api login oauth counter
|
||||
MApiLoginOAuth prometheus.Counter
|
||||
|
||||
// MApiLoginSAML is a metric api login SAML counter
|
||||
MApiLoginSAML prometheus.Counter
|
||||
|
||||
// MApiOrgCreate is a metric api org created counter
|
||||
MApiOrgCreate prometheus.Counter
|
||||
|
||||
// MApiDashboardSnapshotCreate is a metric dashboard snapshots created
|
||||
MApiDashboardSnapshotCreate prometheus.Counter
|
||||
|
||||
// MApiDashboardSnapshotExternal is a metric external dashboard snapshots created
|
||||
MApiDashboardSnapshotExternal prometheus.Counter
|
||||
|
||||
// MApiDashboardSnapshotGet is a metric loaded dashboards
|
||||
MApiDashboardSnapshotGet prometheus.Counter
|
||||
|
||||
// MApiDashboardInsert is a metric dashboards inserted
|
||||
MApiDashboardInsert prometheus.Counter
|
||||
|
||||
// MAlertingResultState is a metric alert execution result counter
|
||||
MAlertingResultState *prometheus.CounterVec
|
||||
|
||||
// MAlertingNotificationSent is a metric counter for how many alert notifications been sent
|
||||
MAlertingNotificationSent *prometheus.CounterVec
|
||||
|
||||
// MAwsCloudWatchGetMetricStatistics is a metric counter for getting metric statistics from aws
|
||||
MAwsCloudWatchGetMetricStatistics prometheus.Counter
|
||||
|
||||
// MAwsCloudWatchListMetrics is a metric counter for getting list of metrics from aws
|
||||
MAwsCloudWatchListMetrics prometheus.Counter
|
||||
|
||||
// MAwsCloudWatchGetMetricData is a metric counter for getting metric data time series from aws
|
||||
MAwsCloudWatchGetMetricData prometheus.Counter
|
||||
|
||||
// MDBDataSourceQueryByID is a metric counter for getting datasource by id
|
||||
MDBDataSourceQueryByID prometheus.Counter
|
||||
|
||||
// LDAPUsersSyncExecutionTime is a metric summary for LDAP users sync execution duration
|
||||
LDAPUsersSyncExecutionTime prometheus.Summary
|
||||
)
|
||||
|
||||
// Timers
|
||||
var (
|
||||
// MDataSourceProxyReqTimer is a metric summary for dataproxy request duration
|
||||
MDataSourceProxyReqTimer prometheus.Summary
|
||||
|
||||
// MAlertingExecutionTime is a metric summary of alert exeuction duration
|
||||
MAlertingExecutionTime prometheus.Summary
|
||||
)
|
||||
|
||||
// StatTotals
|
||||
var (
|
||||
M_Alerting_Active_Alerts prometheus.Gauge
|
||||
M_StatTotal_Dashboards prometheus.Gauge
|
||||
M_StatTotal_Users prometheus.Gauge
|
||||
M_StatActive_Users prometheus.Gauge
|
||||
M_StatTotal_Orgs prometheus.Gauge
|
||||
M_StatTotal_Playlists prometheus.Gauge
|
||||
// MAlertingActiveAlerts is a metric amount of active alerts
|
||||
MAlertingActiveAlerts prometheus.Gauge
|
||||
|
||||
StatsTotalViewers prometheus.Gauge
|
||||
StatsTotalEditors prometheus.Gauge
|
||||
StatsTotalAdmins prometheus.Gauge
|
||||
// MStatTotalDashboards is a metric total amount of dashboards
|
||||
MStatTotalDashboards prometheus.Gauge
|
||||
|
||||
// MStatTotalUsers is a metric total amount of users
|
||||
MStatTotalUsers prometheus.Gauge
|
||||
|
||||
// MStatActiveUsers is a metric number of active users
|
||||
MStatActiveUsers prometheus.Gauge
|
||||
|
||||
// MStatTotalOrgs is a metric total amount of orgs
|
||||
MStatTotalOrgs prometheus.Gauge
|
||||
|
||||
// MStatTotalPlaylists is a metric total amount of playlists
|
||||
MStatTotalPlaylists prometheus.Gauge
|
||||
|
||||
// StatsTotalViewers is a metric total amount of viewers
|
||||
StatsTotalViewers prometheus.Gauge
|
||||
|
||||
// StatsTotalEditors is a metric total amount of editors
|
||||
StatsTotalEditors prometheus.Gauge
|
||||
|
||||
// StatsTotalAdmins is a metric total amount of admins
|
||||
StatsTotalAdmins prometheus.Gauge
|
||||
|
||||
// StatsTotalActiveViewers is a metric total amount of viewers
|
||||
StatsTotalActiveViewers prometheus.Gauge
|
||||
|
||||
// StatsTotalActiveEditors is a metric total amount of active editors
|
||||
StatsTotalActiveEditors prometheus.Gauge
|
||||
StatsTotalActiveAdmins prometheus.Gauge
|
||||
|
||||
// M_Grafana_Version is a gauge that contains build info about this binary
|
||||
//
|
||||
// Deprecated: use M_Grafana_Build_Version instead.
|
||||
M_Grafana_Version *prometheus.GaugeVec
|
||||
// StatsTotalActiveAdmins is a metric total amount of active admins
|
||||
StatsTotalActiveAdmins prometheus.Gauge
|
||||
|
||||
// grafanaBuildVersion is a gauge that contains build info about this binary
|
||||
// grafanaBuildVersion is a metric with a constant '1' value labeled by version, revision, branch, and goversion from which Grafana was built
|
||||
grafanaBuildVersion *prometheus.GaugeVec
|
||||
)
|
||||
|
||||
func init() {
|
||||
M_Instance_Start = prometheus.NewCounter(prometheus.CounterOpts{
|
||||
httpStatusCodes := []string{"200", "404", "500", "unknown"}
|
||||
MInstanceStart = prometheus.NewCounter(prometheus.CounterOpts{
|
||||
Name: "instance_start_total",
|
||||
Help: "counter for started instances",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
httpStatusCodes := []string{"200", "404", "500", "unknown"}
|
||||
M_Page_Status = newCounterVecStartingAtZero(
|
||||
MPageStatus = newCounterVecStartingAtZero(
|
||||
prometheus.CounterOpts{
|
||||
Name: "page_response_status_total",
|
||||
Help: "page http response status",
|
||||
Namespace: exporterName,
|
||||
}, []string{"code"}, httpStatusCodes...)
|
||||
|
||||
M_Api_Status = newCounterVecStartingAtZero(
|
||||
MApiStatus = newCounterVecStartingAtZero(
|
||||
prometheus.CounterOpts{
|
||||
Name: "api_response_status_total",
|
||||
Help: "api http response status",
|
||||
Namespace: exporterName,
|
||||
}, []string{"code"}, httpStatusCodes...)
|
||||
|
||||
M_Proxy_Status = newCounterVecStartingAtZero(
|
||||
MProxyStatus = newCounterVecStartingAtZero(
|
||||
prometheus.CounterOpts{
|
||||
Name: "proxy_response_status_total",
|
||||
Help: "proxy http response status",
|
||||
Namespace: exporterName,
|
||||
}, []string{"code"}, httpStatusCodes...)
|
||||
|
||||
M_Http_Request_Total = prometheus.NewCounterVec(
|
||||
MHttpRequestTotal = prometheus.NewCounterVec(
|
||||
prometheus.CounterOpts{
|
||||
Name: "http_request_total",
|
||||
Help: "http request counter",
|
||||
@@ -107,7 +184,7 @@ func init() {
|
||||
[]string{"handler", "statuscode", "method"},
|
||||
)
|
||||
|
||||
M_Http_Request_Summary = prometheus.NewSummaryVec(
|
||||
MHttpRequestSummary = prometheus.NewSummaryVec(
|
||||
prometheus.SummaryOpts{
|
||||
Name: "http_request_duration_milliseconds",
|
||||
Help: "http request summary",
|
||||
@@ -115,169 +192,181 @@ func init() {
|
||||
[]string{"handler", "statuscode", "method"},
|
||||
)
|
||||
|
||||
M_Api_User_SignUpStarted = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
MApiUserSignUpStarted = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
Name: "api_user_signup_started_total",
|
||||
Help: "amount of users who started the signup flow",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_Api_User_SignUpCompleted = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
MApiUserSignUpCompleted = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
Name: "api_user_signup_completed_total",
|
||||
Help: "amount of users who completed the signup flow",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_Api_User_SignUpInvite = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
MApiUserSignUpInvite = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
Name: "api_user_signup_invite_total",
|
||||
Help: "amount of users who have been invited",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_Api_Dashboard_Save = prometheus.NewSummary(prometheus.SummaryOpts{
|
||||
MApiDashboardSave = prometheus.NewSummary(prometheus.SummaryOpts{
|
||||
Name: "api_dashboard_save_milliseconds",
|
||||
Help: "summary for dashboard save duration",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_Api_Dashboard_Get = prometheus.NewSummary(prometheus.SummaryOpts{
|
||||
MApiDashboardGet = prometheus.NewSummary(prometheus.SummaryOpts{
|
||||
Name: "api_dashboard_get_milliseconds",
|
||||
Help: "summary for dashboard get duration",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_Api_Dashboard_Search = prometheus.NewSummary(prometheus.SummaryOpts{
|
||||
MApiDashboardSearch = prometheus.NewSummary(prometheus.SummaryOpts{
|
||||
Name: "api_dashboard_search_milliseconds",
|
||||
Help: "summary for dashboard search duration",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_Api_Admin_User_Create = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
MApiAdminUserCreate = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
Name: "api_admin_user_created_total",
|
||||
Help: "api admin user created counter",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_Api_Login_Post = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
MApiLoginPost = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
Name: "api_login_post_total",
|
||||
Help: "api login post counter",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_Api_Login_OAuth = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
MApiLoginOAuth = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
Name: "api_login_oauth_total",
|
||||
Help: "api login oauth counter",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_Api_Org_Create = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
MApiLoginSAML = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
Name: "api_login_saml_total",
|
||||
Help: "api login saml counter",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
MApiOrgCreate = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
Name: "api_org_create_total",
|
||||
Help: "api org created counter",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_Api_Dashboard_Snapshot_Create = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
MApiDashboardSnapshotCreate = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
Name: "api_dashboard_snapshot_create_total",
|
||||
Help: "dashboard snapshots created",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_Api_Dashboard_Snapshot_External = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
MApiDashboardSnapshotExternal = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
Name: "api_dashboard_snapshot_external_total",
|
||||
Help: "external dashboard snapshots created",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_Api_Dashboard_Snapshot_Get = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
MApiDashboardSnapshotGet = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
Name: "api_dashboard_snapshot_get_total",
|
||||
Help: "loaded dashboards",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_Api_Dashboard_Insert = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
MApiDashboardInsert = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
Name: "api_models_dashboard_insert_total",
|
||||
Help: "dashboards inserted ",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_Alerting_Result_State = prometheus.NewCounterVec(prometheus.CounterOpts{
|
||||
MAlertingResultState = prometheus.NewCounterVec(prometheus.CounterOpts{
|
||||
Name: "alerting_result_total",
|
||||
Help: "alert execution result counter",
|
||||
Namespace: exporterName,
|
||||
}, []string{"state"})
|
||||
|
||||
M_Alerting_Notification_Sent = prometheus.NewCounterVec(prometheus.CounterOpts{
|
||||
MAlertingNotificationSent = prometheus.NewCounterVec(prometheus.CounterOpts{
|
||||
Name: "alerting_notification_sent_total",
|
||||
Help: "counter for how many alert notifications been sent",
|
||||
Namespace: exporterName,
|
||||
}, []string{"type"})
|
||||
|
||||
M_Aws_CloudWatch_GetMetricStatistics = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
MAwsCloudWatchGetMetricStatistics = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
Name: "aws_cloudwatch_get_metric_statistics_total",
|
||||
Help: "counter for getting metric statistics from aws",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_Aws_CloudWatch_ListMetrics = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
MAwsCloudWatchListMetrics = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
Name: "aws_cloudwatch_list_metrics_total",
|
||||
Help: "counter for getting list of metrics from aws",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_Aws_CloudWatch_GetMetricData = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
MAwsCloudWatchGetMetricData = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
Name: "aws_cloudwatch_get_metric_data_total",
|
||||
Help: "counter for getting metric data time series from aws",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_DB_DataSource_QueryById = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
MDBDataSourceQueryByID = newCounterStartingAtZero(prometheus.CounterOpts{
|
||||
Name: "db_datasource_query_by_id_total",
|
||||
Help: "counter for getting datasource by id",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_DataSource_ProxyReq_Timer = prometheus.NewSummary(prometheus.SummaryOpts{
|
||||
LDAPUsersSyncExecutionTime = prometheus.NewSummary(prometheus.SummaryOpts{
|
||||
Name: "ldap_users_sync_execution_time",
|
||||
Help: "summary for LDAP users sync execution duration",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
MDataSourceProxyReqTimer = prometheus.NewSummary(prometheus.SummaryOpts{
|
||||
Name: "api_dataproxy_request_all_milliseconds",
|
||||
Help: "summary for dataproxy request duration",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_Alerting_Execution_Time = prometheus.NewSummary(prometheus.SummaryOpts{
|
||||
MAlertingExecutionTime = prometheus.NewSummary(prometheus.SummaryOpts{
|
||||
Name: "alerting_execution_time_milliseconds",
|
||||
Help: "summary of alert exeuction duration",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_Alerting_Active_Alerts = prometheus.NewGauge(prometheus.GaugeOpts{
|
||||
MAlertingActiveAlerts = prometheus.NewGauge(prometheus.GaugeOpts{
|
||||
Name: "alerting_active_alerts",
|
||||
Help: "amount of active alerts",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_StatTotal_Dashboards = prometheus.NewGauge(prometheus.GaugeOpts{
|
||||
MStatTotalDashboards = prometheus.NewGauge(prometheus.GaugeOpts{
|
||||
Name: "stat_totals_dashboard",
|
||||
Help: "total amount of dashboards",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_StatTotal_Users = prometheus.NewGauge(prometheus.GaugeOpts{
|
||||
MStatTotalUsers = prometheus.NewGauge(prometheus.GaugeOpts{
|
||||
Name: "stat_total_users",
|
||||
Help: "total amount of users",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_StatActive_Users = prometheus.NewGauge(prometheus.GaugeOpts{
|
||||
MStatActiveUsers = prometheus.NewGauge(prometheus.GaugeOpts{
|
||||
Name: "stat_active_users",
|
||||
Help: "number of active users",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_StatTotal_Orgs = prometheus.NewGauge(prometheus.GaugeOpts{
|
||||
MStatTotalOrgs = prometheus.NewGauge(prometheus.GaugeOpts{
|
||||
Name: "stat_total_orgs",
|
||||
Help: "total amount of orgs",
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_StatTotal_Playlists = prometheus.NewGauge(prometheus.GaugeOpts{
|
||||
MStatTotalPlaylists = prometheus.NewGauge(prometheus.GaugeOpts{
|
||||
Name: "stat_total_playlists",
|
||||
Help: "total amount of playlists",
|
||||
Namespace: exporterName,
|
||||
@@ -319,78 +408,69 @@ func init() {
|
||||
Namespace: exporterName,
|
||||
})
|
||||
|
||||
M_Grafana_Version = prometheus.NewGaugeVec(prometheus.GaugeOpts{
|
||||
Name: "info",
|
||||
Help: "Information about the Grafana. This metric is deprecated. please use `grafana_build_info`",
|
||||
Namespace: exporterName,
|
||||
}, []string{"version"})
|
||||
|
||||
grafanaBuildVersion = prometheus.NewGaugeVec(prometheus.GaugeOpts{
|
||||
Name: "build_info",
|
||||
Help: "A metric with a constant '1' value labeled by version, revision, branch, and goversion from which Grafana was built.",
|
||||
Help: "A metric with a constant '1' value labeled by version, revision, branch, and goversion from which Grafana was built",
|
||||
Namespace: exporterName,
|
||||
}, []string{"version", "revision", "branch", "goversion", "edition"})
|
||||
}
|
||||
|
||||
// SetBuildInformation sets the build information for this binary
|
||||
func SetBuildInformation(version, revision, branch string) {
|
||||
// We export this info twice for backwards compatibility.
|
||||
// Once this have been released for some time we should be able to remote `M_Grafana_Version`
|
||||
// The reason we added a new one is that its common practice in the prometheus community
|
||||
// to name this metric `*_build_info` so its easy to do aggregation on all programs.
|
||||
edition := "oss"
|
||||
if setting.IsEnterprise {
|
||||
edition = "enterprise"
|
||||
}
|
||||
|
||||
M_Grafana_Version.WithLabelValues(version).Set(1)
|
||||
grafanaBuildVersion.WithLabelValues(version, revision, branch, runtime.Version(), edition).Set(1)
|
||||
}
|
||||
|
||||
func initMetricVars() {
|
||||
prometheus.MustRegister(
|
||||
M_Instance_Start,
|
||||
M_Page_Status,
|
||||
M_Api_Status,
|
||||
M_Proxy_Status,
|
||||
M_Http_Request_Total,
|
||||
M_Http_Request_Summary,
|
||||
M_Api_User_SignUpStarted,
|
||||
M_Api_User_SignUpCompleted,
|
||||
M_Api_User_SignUpInvite,
|
||||
M_Api_Dashboard_Save,
|
||||
M_Api_Dashboard_Get,
|
||||
M_Api_Dashboard_Search,
|
||||
M_DataSource_ProxyReq_Timer,
|
||||
M_Alerting_Execution_Time,
|
||||
M_Api_Admin_User_Create,
|
||||
M_Api_Login_Post,
|
||||
M_Api_Login_OAuth,
|
||||
M_Api_Org_Create,
|
||||
M_Api_Dashboard_Snapshot_Create,
|
||||
M_Api_Dashboard_Snapshot_External,
|
||||
M_Api_Dashboard_Snapshot_Get,
|
||||
M_Api_Dashboard_Insert,
|
||||
M_Alerting_Result_State,
|
||||
M_Alerting_Notification_Sent,
|
||||
M_Aws_CloudWatch_GetMetricStatistics,
|
||||
M_Aws_CloudWatch_ListMetrics,
|
||||
M_Aws_CloudWatch_GetMetricData,
|
||||
M_DB_DataSource_QueryById,
|
||||
M_Alerting_Active_Alerts,
|
||||
M_StatTotal_Dashboards,
|
||||
M_StatTotal_Users,
|
||||
M_StatActive_Users,
|
||||
M_StatTotal_Orgs,
|
||||
M_StatTotal_Playlists,
|
||||
M_Grafana_Version,
|
||||
MInstanceStart,
|
||||
MPageStatus,
|
||||
MApiStatus,
|
||||
MProxyStatus,
|
||||
MHttpRequestTotal,
|
||||
MHttpRequestSummary,
|
||||
MApiUserSignUpStarted,
|
||||
MApiUserSignUpCompleted,
|
||||
MApiUserSignUpInvite,
|
||||
MApiDashboardSave,
|
||||
MApiDashboardGet,
|
||||
MApiDashboardSearch,
|
||||
MDataSourceProxyReqTimer,
|
||||
MAlertingExecutionTime,
|
||||
MApiAdminUserCreate,
|
||||
MApiLoginPost,
|
||||
MApiLoginOAuth,
|
||||
MApiLoginSAML,
|
||||
MApiOrgCreate,
|
||||
MApiDashboardSnapshotCreate,
|
||||
MApiDashboardSnapshotExternal,
|
||||
MApiDashboardSnapshotGet,
|
||||
MApiDashboardInsert,
|
||||
MAlertingResultState,
|
||||
MAlertingNotificationSent,
|
||||
MAwsCloudWatchGetMetricStatistics,
|
||||
MAwsCloudWatchListMetrics,
|
||||
MAwsCloudWatchGetMetricData,
|
||||
MDBDataSourceQueryByID,
|
||||
LDAPUsersSyncExecutionTime,
|
||||
MAlertingActiveAlerts,
|
||||
MStatTotalDashboards,
|
||||
MStatTotalUsers,
|
||||
MStatActiveUsers,
|
||||
MStatTotalOrgs,
|
||||
MStatTotalPlaylists,
|
||||
StatsTotalViewers,
|
||||
StatsTotalEditors,
|
||||
StatsTotalAdmins,
|
||||
StatsTotalActiveViewers,
|
||||
StatsTotalActiveEditors,
|
||||
StatsTotalActiveAdmins,
|
||||
grafanaBuildVersion)
|
||||
grafanaBuildVersion,
|
||||
)
|
||||
|
||||
}
|
||||
|
||||
|
||||
@@ -46,7 +46,7 @@ func (im *InternalMetricsService) Run(ctx context.Context) error {
|
||||
}
|
||||
}
|
||||
|
||||
M_Instance_Start.Inc()
|
||||
MInstanceStart.Inc()
|
||||
|
||||
<-ctx.Done()
|
||||
return ctx.Err()
|
||||
|
||||
@@ -22,8 +22,12 @@ func parseRedisConnStr(connStr string) (*redis.Options, error) {
|
||||
keyValueCSV := strings.Split(connStr, ",")
|
||||
options := &redis.Options{Network: "tcp"}
|
||||
for _, rawKeyValue := range keyValueCSV {
|
||||
keyValueTuple := strings.Split(rawKeyValue, "=")
|
||||
keyValueTuple := strings.SplitN(rawKeyValue, "=", 2)
|
||||
if len(keyValueTuple) != 2 {
|
||||
if strings.HasPrefix(rawKeyValue, "password") {
|
||||
// don't log the password
|
||||
rawKeyValue = "password******"
|
||||
}
|
||||
return nil, fmt.Errorf("incorrect redis connection string format detected for '%v', format is key=value,key=value", rawKeyValue)
|
||||
}
|
||||
connKey := keyValueTuple[0]
|
||||
|
||||
@@ -161,11 +161,11 @@ func (uss *UsageStatsService) updateTotalStats() {
|
||||
return
|
||||
}
|
||||
|
||||
metrics.M_StatTotal_Dashboards.Set(float64(statsQuery.Result.Dashboards))
|
||||
metrics.M_StatTotal_Users.Set(float64(statsQuery.Result.Users))
|
||||
metrics.M_StatActive_Users.Set(float64(statsQuery.Result.ActiveUsers))
|
||||
metrics.M_StatTotal_Playlists.Set(float64(statsQuery.Result.Playlists))
|
||||
metrics.M_StatTotal_Orgs.Set(float64(statsQuery.Result.Orgs))
|
||||
metrics.MStatTotalDashboards.Set(float64(statsQuery.Result.Dashboards))
|
||||
metrics.MStatTotalUsers.Set(float64(statsQuery.Result.Users))
|
||||
metrics.MStatActiveUsers.Set(float64(statsQuery.Result.ActiveUsers))
|
||||
metrics.MStatTotalPlaylists.Set(float64(statsQuery.Result.Playlists))
|
||||
metrics.MStatTotalOrgs.Set(float64(statsQuery.Result.Orgs))
|
||||
metrics.StatsTotalViewers.Set(float64(statsQuery.Result.Viewers))
|
||||
metrics.StatsTotalActiveViewers.Set(float64(statsQuery.Result.ActiveViewers))
|
||||
metrics.StatsTotalEditors.Set(float64(statsQuery.Result.Editors))
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package authproxy
|
||||
|
||||
import (
|
||||
"encoding/base32"
|
||||
"fmt"
|
||||
"net"
|
||||
"net/mail"
|
||||
@@ -32,6 +33,9 @@ var isLDAPEnabled = ldap.IsEnabled
|
||||
// newLDAP creates multiple LDAP instance
|
||||
var newLDAP = multildap.New
|
||||
|
||||
// supportedHeaders states the supported headers configuration fields
|
||||
var supportedHeaderFields = []string{"Name", "Email", "Login", "Groups"}
|
||||
|
||||
// AuthProxy struct
|
||||
type AuthProxy struct {
|
||||
store *remotecache.RemoteCache
|
||||
@@ -142,9 +146,18 @@ func (auth *AuthProxy) IsAllowedIP() (bool, *Error) {
|
||||
return false, newError("Proxy authentication required", err)
|
||||
}
|
||||
|
||||
// getKey forms a key for the cache
|
||||
// getKey forms a key for the cache based on the headers received as part of the authentication flow.
|
||||
// Our configuration supports multiple headers. The main header contains the email or username.
|
||||
// And the additional ones that allow us to specify extra attributes: Name, Email or Groups.
|
||||
func (auth *AuthProxy) getKey() string {
|
||||
return fmt.Sprintf(CachePrefix, auth.header)
|
||||
key := strings.TrimSpace(auth.header) // start the key with the main header
|
||||
|
||||
auth.headersIterator(func(_, header string) {
|
||||
key = strings.Join([]string{key, header}, "-") // compose the key with any additional headers
|
||||
})
|
||||
|
||||
hashedKey := base32.StdEncoding.EncodeToString([]byte(key))
|
||||
return fmt.Sprintf(CachePrefix, hashedKey)
|
||||
}
|
||||
|
||||
// Login logs in user id with whatever means possible
|
||||
@@ -232,40 +245,36 @@ func (auth *AuthProxy) LoginViaHeader() (int64, error) {
|
||||
AuthId: auth.header,
|
||||
}
|
||||
|
||||
if auth.headerType == "username" {
|
||||
switch auth.headerType {
|
||||
case "username":
|
||||
extUser.Login = auth.header
|
||||
|
||||
// only set Email if it can be parsed as an email address
|
||||
emailAddr, emailErr := mail.ParseAddress(auth.header)
|
||||
emailAddr, emailErr := mail.ParseAddress(auth.header) // only set Email if it can be parsed as an email address
|
||||
if emailErr == nil {
|
||||
extUser.Email = emailAddr.Address
|
||||
}
|
||||
} else if auth.headerType == "email" {
|
||||
case "email":
|
||||
extUser.Email = auth.header
|
||||
extUser.Login = auth.header
|
||||
} else {
|
||||
default:
|
||||
return 0, newError("Auth proxy header property invalid", nil)
|
||||
|
||||
}
|
||||
|
||||
for _, field := range []string{"Name", "Email", "Login", "Groups"} {
|
||||
if auth.headers[field] == "" {
|
||||
continue
|
||||
auth.headersIterator(func(field string, header string) {
|
||||
if field == "Groups" {
|
||||
extUser.Groups = util.SplitString(header)
|
||||
} else {
|
||||
reflect.ValueOf(extUser).Elem().FieldByName(field).SetString(header)
|
||||
}
|
||||
|
||||
if val := auth.ctx.Req.Header.Get(auth.headers[field]); val != "" {
|
||||
if field == "Groups" {
|
||||
extUser.Groups = util.SplitString(val)
|
||||
} else {
|
||||
reflect.ValueOf(extUser).Elem().FieldByName(field).SetString(val)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
upsert := &models.UpsertUserCommand{
|
||||
ReqContext: auth.ctx,
|
||||
SignupAllowed: setting.AuthProxyAutoSignUp,
|
||||
ExternalUser: extUser,
|
||||
}
|
||||
|
||||
err := bus.Dispatch(upsert)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
@@ -274,6 +283,21 @@ func (auth *AuthProxy) LoginViaHeader() (int64, error) {
|
||||
return upsert.Result.Id, nil
|
||||
}
|
||||
|
||||
// headersIterator iterates over all non-empty supported additional headers
|
||||
func (auth *AuthProxy) headersIterator(fn func(field string, header string)) {
|
||||
for _, field := range supportedHeaderFields {
|
||||
h := auth.headers[field]
|
||||
|
||||
if h == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
if value := auth.ctx.Req.Header.Get(h); value != "" {
|
||||
fn(field, strings.TrimSpace(value))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// GetSignedUser get full signed user info
|
||||
func (auth *AuthProxy) GetSignedUser(userID int64) (*models.SignedInUser, *Error) {
|
||||
query := &models.GetSignedInUserQuery{
|
||||
|
||||
@@ -1,20 +1,20 @@
|
||||
package authproxy
|
||||
|
||||
import (
|
||||
"encoding/base32"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"testing"
|
||||
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
"gopkg.in/macaron.v1"
|
||||
|
||||
"github.com/grafana/grafana/pkg/bus"
|
||||
"github.com/grafana/grafana/pkg/infra/remotecache"
|
||||
"github.com/grafana/grafana/pkg/models"
|
||||
"github.com/grafana/grafana/pkg/services/ldap"
|
||||
"github.com/grafana/grafana/pkg/services/multildap"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
. "github.com/smartystreets/goconvey/convey"
|
||||
"gopkg.in/macaron.v1"
|
||||
)
|
||||
|
||||
type TestMultiLDAP struct {
|
||||
@@ -45,37 +45,70 @@ func (stub *TestMultiLDAP) User(login string) (
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func prepareMiddleware(t *testing.T, req *http.Request, store *remotecache.RemoteCache) *AuthProxy {
|
||||
t.Helper()
|
||||
|
||||
ctx := &models.ReqContext{
|
||||
Context: &macaron.Context{
|
||||
Req: macaron.Request{
|
||||
Request: req,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
auth := New(&Options{
|
||||
Store: store,
|
||||
Ctx: ctx,
|
||||
OrgID: 4,
|
||||
})
|
||||
|
||||
return auth
|
||||
}
|
||||
|
||||
func TestMiddlewareContext(t *testing.T) {
|
||||
Convey("auth_proxy helper", t, func() {
|
||||
req, _ := http.NewRequest("POST", "http://example.com", nil)
|
||||
setting.AuthProxyHeaderName = "X-Killa"
|
||||
name := "markelog"
|
||||
store := remotecache.NewFakeStore(t)
|
||||
|
||||
name := "markelog"
|
||||
req.Header.Add(setting.AuthProxyHeaderName, name)
|
||||
|
||||
ctx := &models.ReqContext{
|
||||
Context: &macaron.Context{
|
||||
Req: macaron.Request{
|
||||
Request: req,
|
||||
},
|
||||
},
|
||||
}
|
||||
Convey("when the cache only contains the main header", func() {
|
||||
|
||||
Convey("logs in user from the cache", func() {
|
||||
store := remotecache.NewFakeStore(t)
|
||||
key := fmt.Sprintf(CachePrefix, name)
|
||||
store.Set(key, int64(33), 0)
|
||||
Convey("with a simple cache key", func() {
|
||||
// Set cache key
|
||||
key := fmt.Sprintf(CachePrefix, base32.StdEncoding.EncodeToString([]byte(name)))
|
||||
store.Set(key, int64(33), 0)
|
||||
|
||||
auth := New(&Options{
|
||||
Store: store,
|
||||
Ctx: ctx,
|
||||
OrgID: 4,
|
||||
// Set up the middleware
|
||||
auth := prepareMiddleware(t, req, store)
|
||||
id, err := auth.Login()
|
||||
|
||||
So(auth.getKey(), ShouldEqual, "auth-proxy-sync-ttl:NVQXE23FNRXWO===")
|
||||
So(err, ShouldBeNil)
|
||||
So(id, ShouldEqual, 33)
|
||||
})
|
||||
|
||||
id, err := auth.Login()
|
||||
Convey("when the cache key contains additional headers", func() {
|
||||
setting.AuthProxyHeaders = map[string]string{"Groups": "X-WEBAUTH-GROUPS"}
|
||||
group := "grafana-core-team"
|
||||
req.Header.Add("X-WEBAUTH-GROUPS", group)
|
||||
|
||||
So(err, ShouldBeNil)
|
||||
So(id, ShouldEqual, 33)
|
||||
key := fmt.Sprintf(CachePrefix, base32.StdEncoding.EncodeToString([]byte(name+"-"+group)))
|
||||
store.Set(key, int64(33), 0)
|
||||
|
||||
auth := prepareMiddleware(t, req, store)
|
||||
|
||||
id, err := auth.Login()
|
||||
|
||||
So(auth.getKey(), ShouldEqual, "auth-proxy-sync-ttl:NVQXE23FNRXWOLLHOJQWMYLOMEWWG33SMUWXIZLBNU======")
|
||||
So(err, ShouldBeNil)
|
||||
So(id, ShouldEqual, 33)
|
||||
})
|
||||
|
||||
Convey("when the does not exist", func() {
|
||||
})
|
||||
})
|
||||
|
||||
Convey("LDAP", func() {
|
||||
@@ -119,13 +152,9 @@ func TestMiddlewareContext(t *testing.T) {
|
||||
|
||||
store := remotecache.NewFakeStore(t)
|
||||
|
||||
server := New(&Options{
|
||||
Store: store,
|
||||
Ctx: ctx,
|
||||
OrgID: 4,
|
||||
})
|
||||
auth := prepareMiddleware(t, req, store)
|
||||
|
||||
id, err := server.Login()
|
||||
id, err := auth.Login()
|
||||
|
||||
So(err, ShouldBeNil)
|
||||
So(id, ShouldEqual, 42)
|
||||
@@ -149,11 +178,7 @@ func TestMiddlewareContext(t *testing.T) {
|
||||
|
||||
store := remotecache.NewFakeStore(t)
|
||||
|
||||
auth := New(&Options{
|
||||
Store: store,
|
||||
Ctx: ctx,
|
||||
OrgID: 4,
|
||||
})
|
||||
auth := prepareMiddleware(t, req, store)
|
||||
|
||||
stub := &TestMultiLDAP{
|
||||
ID: 42,
|
||||
@@ -170,7 +195,6 @@ func TestMiddlewareContext(t *testing.T) {
|
||||
So(id, ShouldNotEqual, 42)
|
||||
So(stub.loginCalled, ShouldEqual, false)
|
||||
})
|
||||
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user