Compare commits

..

124 Commits

Author SHA1 Message Date
Marcus Efraimsson
f76cafa68e Merge pull request #12357 from grafana/cp-5.2.0-beta2
Cherry-picks for v5.2.0-beta2
2018-06-20 15:21:50 +02:00
Marcus Efraimsson
cdae9126ed release v5.2.0-beta2 2018-06-20 14:58:30 +02:00
Marcus Efraimsson
b4c1df11f6 make sure to process panels in collapsed rows when exporting dashboard
(cherry picked from commit a2e08dc4e8)
2018-06-20 14:56:13 +02:00
Alexander Zobnin
7c94d5cd1a graph: fix legend decimals precision calculation
(cherry picked from commit 24f6d34abd)
2018-06-20 14:55:38 +02:00
Marcus Efraimsson
74d6b5fc1c dashboard: fix drop down links
(cherry picked from commit 4ef4a4d3a7)
2018-06-20 14:55:04 +02:00
Marcus Efraimsson
af42e0836a fix regressions after save modal changes of not storing time and variables per default
Fix problem with adhoc variable filters not handled.
Fix problem with saving variables and time per default when saving a
dashboard as/first time.
Fix updating dashboard model after save with saving time/variables
enabled so that next time you save you won't get checkboxes for save
time/variables unless any values changed.
Tests validating correctness if time/variable values has changed.

(cherry picked from commit 41ac8d4cd5)
2018-06-20 14:54:13 +02:00
Martin Molnar
f453fbe8ef feat(ldap): Allow use of DN in user attribute filter (#3132)
(cherry picked from commit be2fa54459)
2018-06-20 14:53:36 +02:00
Marcus Efraimsson
8d635efda0 snapshot: copy correct props when creating a snapshot
(cherry picked from commit a738347957)
2018-06-20 14:52:59 +02:00
Marcus Efraimsson
0f2e879339 set current org when adding/removing user to org
To not get into a situation where a user has a current organization assign which he is
not a member of we try to always make sure that a user has a valid current organization
assigned.

(cherry picked from commit 6d48d0a80c)
2018-06-20 14:52:11 +02:00
Marcus Efraimsson
e51dd88260 Merge pull request #12340 from grafana/apikey-permission-fix-cherry-pick2
v5.2.x cherry pick fix
2018-06-19 12:37:26 +02:00
Torkel Ödegaard
984293cc52 fix: fixed permission issue with api key with viewer role in dashboards with default permissions
(cherry picked from commit 24d0b43e62)
2018-06-19 11:14:33 +02:00
Marcus Efraimsson
9a1a9584b7 Merge pull request #12316 from grafana/v52_merge_master
Merge master to v5.2.x release branch
2018-06-18 11:50:48 +02:00
Marcus Efraimsson
8a69ffb007 Merge branch 'master' into v52_merge_master 2018-06-18 09:44:39 +02:00
Marcus Efraimsson
ab9f0e8edd changelog: add notes about closing #10707
[skip ci]
2018-06-18 09:03:30 +02:00
Marcus Efraimsson
18167f1c18 Merge pull request #12299 from grafana/10707_whitelist
auth proxy: whitelist proxy ip instead of client ip
2018-06-18 09:00:41 +02:00
Tim Heckman
a2ff7629e0 Include the vendor directory when copying source in to Docker (#12305)
This change updates the `.dockerignore` file to no longer contain the `vendor/`
directory. When a Go project provides a `vendor/` directory within the
repository, the best practice is to build that project using their vendored
dependencies. By putting it in the `.dockerignore` file we prevent consumers
from easily doing that.

The `vendor/` directory is used to include all of the dependencies needed to
build a project. This makes it so that we can reproducibly build the project, at
any given commit, because the dependencies will always be present. Also, using
the vendor directory avoids us needing to continually re-download all the
dependencies, and it protects us from build failures if GitHub is down or a
dependency gets removed or renamed.

In addition to the change above, this also removes an extra `/tmp` entry from
the `.dockerignore` file.

Fixes #12304

Signed-off-by: Tim Heckman <t@heckman.io>
2018-06-17 22:38:37 -07:00
bergquist
6782be80fd changelog: adds note about closing #12199 2018-06-16 16:59:15 +02:00
Carl Bergquist
e2adb4ced5 Merge pull request #12302 from grafana/12199_cloudwatch_panic
cloudwatch: handle invalid time range
2018-06-16 16:57:23 +02:00
Marcus Efraimsson
c02dd7462a cloudwatch: handle invalid time range 2018-06-15 15:48:25 +02:00
Marcus Efraimsson
b418e14bd9 make sure to use real ip when validating white listed ip's 2018-06-15 15:29:43 +02:00
bergquist
4be6ef4ab3 changelog: adds note about closing #12286 2018-06-15 12:47:53 +02:00
Carl Bergquist
e33d18701d Merge pull request #12287 from bergquist/fix_12286
Influxdb: add mode functions
2018-06-15 12:43:47 +02:00
Marcus Efraimsson
f0254c201a Merge pull request #12215 from ryantxu/use-ng-if-for-time
use ng-if for the panel timeInfo
2018-06-15 12:16:48 +02:00
Marcus Efraimsson
a02306bc75 Merge pull request #12295 from bergquist/docker_master
docs: adds info about grafana-dev container
2018-06-15 11:32:24 +02:00
bergquist
10d840b1cc docs: adds info about grafana-dev container 2018-06-15 11:07:23 +02:00
Marcus Efraimsson
551957fc4d changelog: add notes about closing #12282
[skip ci]
2018-06-15 10:57:19 +02:00
flopp999
2d1f59a9cc Added Litre/min and milliLitre/min in Flow (#12282)
units: Litre/min and milliLitre/min in Flow
2018-06-15 10:54:38 +02:00
Marcus Efraimsson
5f78ad583f remove papaparse dependency 2018-06-15 10:49:57 +02:00
Marcus Efraimsson
22c7c741eb Merge pull request #12252 from grafana/davkal/ifql-query
Internal Influx Flux plugin cleanup
2018-06-15 10:43:34 +02:00
Marcus Efraimsson
b62322b128 Merge pull request #12288 from mlclmj/master
docs: HTTP Endpoint for Deleting an Organization
2018-06-15 10:37:51 +02:00
Marcus Efraimsson
96287a9061 Merge pull request #12294 from vincent-fuchs/patch-1
docs: delete_datasources -> deleteDatasources in provisioning
2018-06-15 10:33:03 +02:00
Vincent Fuchs
c8becf57fa list name is deleteDatasources, not delete_datasources
fixes https://github.com/grafana/grafana/issues/12293
2018-06-15 12:19:53 +05:30
David Kaltschmidt
d5196ab322 remove internal influx ifql datasource 2018-06-14 20:42:33 +01:00
Malcolm Jones
2765afc830 Document the endpoint for deleting an org 2018-06-14 15:28:54 -04:00
bergquist
c63533f004 tests: rewrite into table tests 2018-06-14 20:33:36 +02:00
bergquist
bf4ee9bcc6 influxdb: adds mode func to backend
closes #12286
2018-06-14 20:33:22 +02:00
Marcus Efraimsson
56628996b7 Merge pull request #12280 from dehrax/12224-elastic-response
karma to jest: 6 test files
2018-06-14 17:24:36 +02:00
Marcus Efraimsson
0107491195 changelog: add notes about closing #11484
[skip ci]
2018-06-14 15:59:31 +02:00
Marcus Efraimsson
81d34137a8 Merge pull request #11484 from manacker/master
alerting: fix mobile notifications for microsoft teams notifier
2018-06-14 15:56:18 +02:00
Marcus Efraimsson
e6135ffd4f changelog: add notes about closing #11233
[skip ci]
2018-06-14 15:39:32 +02:00
Marcus Efraimsson
9dd38031e4 Merge pull request #11233 from flopp999/patch-2
units: W/m2(energy), l/h(flow) and kPa(pressure)
2018-06-14 15:36:38 +02:00
Tobias Skarhed
dbbd6b9b66 Remove import 2018-06-14 15:27:11 +02:00
Marcus Efraimsson
1626a66bab Merge pull request #12275 from dehrax/12224-exporter
karma to jest: exporter, playlist_edit_ctrl
2018-06-14 15:10:06 +02:00
Marcus Efraimsson
25e1d723c6 Merge pull request #12276 from grafana/davkal/expose-react-slate
Expose react and slate to external plugins
2018-06-14 14:41:30 +02:00
Tobias Skarhed
23abf044ff Fix PR feedback 2018-06-14 14:37:48 +02:00
David Kaltschmidt
5ec7d60e5f Removed papaparse from external plugin exports 2018-06-14 13:22:16 +01:00
Tobias Skarhed
ef0586acab Karma to Jest: query_builder 2018-06-14 14:20:42 +02:00
Marcus Efraimsson
284045748c Merge pull request #12243 from grafana/12236-ds-proxy-tokens
dsproxy: allow multiple access tokens per datasource
2018-06-14 14:03:08 +02:00
Marcus Efraimsson
99c188f626 Merge pull request #12273 from grafana/12247-link-icon
fix panel link icon positioning
2018-06-14 13:54:08 +02:00
Daniel Lee
10d30f0b73 dsproxy: move http client variable back
After refactoring to be able to mock the client in
a test, the client wasn't a global variable anymore.
This change moves the client back to being a package-
level variable.
2018-06-14 13:39:46 +02:00
Tobias Skarhed
283b39c397 Karma to Jest: threshold_mapper 2018-06-14 13:38:03 +02:00
David Kaltschmidt
eb686c06f2 Expose react and slate to external plugins
Experimental support for react in external plugins

- react
- slate (editor)
- papaparse (csv parsing)
- prismjs (syntax highlighting)

This is needed for new external datasource plugins like Flux
2018-06-14 12:32:57 +01:00
Tobias Skarhed
155ff06cbe Karma to Jest: threshold_manager 2018-06-14 13:22:17 +02:00
Tobias Skarhed
2465f53324 Karma to Jest: query_def, index_pattern 2018-06-14 12:18:11 +02:00
Tobias Skarhed
a911d36dea Remove import 2018-06-14 12:00:54 +02:00
Tobias Skarhed
04029a94cd Karma to Jest: elastic_response 2018-06-14 11:56:03 +02:00
Marcus Efraimsson
71b0326769 Merge pull request #12237 from dehrax/12224-version-panel
Convert tests from Karma to Jest
2018-06-14 10:13:46 +02:00
Patrick O'Carroll
8345e83bd6 changelog: notes about closing #12189 2018-06-14 10:05:11 +02:00
Patrick O'Carroll
9802d86783 Merge pull request #12189 from ryantxu/focus-panel-search
autoFocus the add-panel search filter, and add on enter
2018-06-14 09:54:41 +02:00
Tobias Skarhed
a3552a60e1 Improve test readability 2018-06-14 09:46:36 +02:00
Patrick O'Carroll
a0aa0b55e7 moved link icon in panel header 2018-06-14 09:24:51 +02:00
Tobias Skarhed
4dd9b8f324 Karma to Jest: playlist_edit_ctrl 2018-06-13 16:29:50 +02:00
Tobias Skarhed
94b8cbdc6a Karma to Jest: exporter 2018-06-13 15:15:36 +02:00
nikoalch
0cc8ccdaa5 Update graphite.md 2018-06-13 08:40:15 -04:00
Marcus Efraimsson
ff3db60e2e changelog: add notes about closing #10796
[skip ci]
2018-06-13 13:13:57 +02:00
Marcus Efraimsson
2288e01752 Merge pull request #12169 from alexanderzobnin/feat-10796
Import dashboard to folder
2018-06-13 13:10:52 +02:00
Marcus Efraimsson
c39979a557 Merge pull request #12255 from dehrax/docs-dev-grafana
Update "Building from source" docs
2018-06-13 12:46:45 +02:00
Alexander Zobnin
25504e84ed dashboard import to folder: minor fixes 2018-06-13 10:44:37 +03:00
Tobias Skarhed
c650b50c37 Docs: output location from build script 2018-06-13 09:37:14 +02:00
Carl Bergquist
d3a3e7cfd1 Merge pull request #12222 from PrincipalsOffice/remove-QueryOptionsCtrl-references
removed QueryOptionsCtrl references
2018-06-13 09:34:02 +02:00
Marcus Efraimsson
3b824a5e00 Merge pull request #12244 from kdombeck/patch-1
docs: correct provisioning link
2018-06-13 09:14:32 +02:00
Ken Dombeck
110522307a Correct Provisioning documentation link
It was pointing to Prometheus documentation instead.
2018-06-12 09:01:25 -07:00
Daniel Lee
719ebdc24d dsproxy: allow multiple access tokens per datasource
Changes the cache key for tokens to cache on datasource id +
route path + http method instead of just datasource id.
2018-06-12 17:39:38 +02:00
Daniel Lee
610a90b79a Merge pull request #12231 from dehrax/12224-graph-tooltip
Karma to Jest: graph-tooltip
2018-06-12 16:32:14 +02:00
Tobias Skarhed
72ecf72c45 Mock core in jest-setup 2018-06-12 16:14:22 +02:00
Tobias Skarhed
8155ce9804 Docs: Update Build from Source 2018-06-12 15:06:17 +02:00
Tobias Skarhed
d91e9ddd22 Convert tests from Karma to Jest 2018-06-12 13:54:07 +02:00
Marcus Efraimsson
d8f269954a changelog: add notes about closing #11963
[skip ci]
2018-06-12 13:10:48 +02:00
Patrick O'Carroll
0bb30b146c save-modal save button (#12047)
Disable save dashboard button when saving
2018-06-12 11:54:50 +02:00
Tobias Skarhed
da9a28f37d Karma to Jest: graph-tooltip 2018-06-11 15:59:20 +02:00
Jiang Ye
c92874875c removed QueryOptionsCtrl references 2018-06-09 16:06:03 -04:00
bergquist
dbaa45e51e update latest.json to 5.1.3 2018-06-09 21:26:07 +02:00
ryan
0a3ec93347 use ng-if 2018-06-08 15:49:30 +02:00
David Kaltschmidt
ab76864e61 hot-fix ifql testdatasource() 2018-06-08 14:56:48 +02:00
Carl Bergquist
5ed2880a8b Merge pull request #12202 from grafana/11486_sqlite
Upgrade mattn/go-sqlite3 to v1.7.0
2018-06-08 13:11:46 +02:00
Carl Bergquist
38864d74bd Merge pull request #12180 from grafana/deploy
Propagate specific git-sha into tarball and docker builds
2018-06-08 13:02:07 +02:00
bergquist
484d9e3f9d triggers grafana-docker master build 2018-06-08 12:44:01 +02:00
Patrick O'Carroll
77403b0dc1 changed som variables to values so it's the same for dark and light theme, added special styling for login text, link and input (#12196) 2018-06-07 11:03:58 -07:00
Marcus Efraimsson
c0830e9cde mattn/go-sqlite3 v1.6.0 to v1.7.0 2018-06-07 16:53:39 +02:00
Marcus Efraimsson
7d508df1b4 changelog: add notes about closing #11074
[skip ci]
2018-06-07 16:24:45 +02:00
Patrick O'Carroll
25f2960717 fixed so panel title doesn't wrap and (#12142)
dashboard: fix so panel titles doesn't wrap
2018-06-07 16:20:55 +02:00
Ryan McKinley
1f32871f70 Use Passive eventListener for 'wheel' (#12106)
* make wheel passive

* remove console
2018-06-07 03:41:50 -07:00
Marcus Efraimsson
3fb4eb7322 Merge pull request #12175 from grafana/davkal/12168-fix-explore-setting
Respect explore settings in config ini
2018-06-07 12:08:42 +02:00
ryan
09dbb52423 Merge remote-tracking branch 'grafana/master' into focus-panel-search
* grafana/master:
  nicer collapsed row behaviour (#12186)
  remove DashboardRowCtrl (#12187)
  Annotations support for ifql datasource
  Template variable support for ifql datasource
  Query helpers for IFQL datasource
2018-06-07 10:31:46 +02:00
David
82ae7c6eee Merge pull request #12167 from grafana/davkal/ifql-helpers
Query helpers, Annotations, and Template Variables for IFQL datasource
2018-06-07 10:24:21 +02:00
Ryan McKinley
17a2ce13f0 nicer collapsed row behaviour (#12186)
* nicer collapsed row behavior

* don't block events

* use a div to toggle

* use singular name
2018-06-06 21:58:42 -07:00
Ryan McKinley
415ad8fbf3 remove DashboardRowCtrl (#12187) 2018-06-06 21:57:30 -07:00
ryan
c71608aae8 add panel on enter 2018-06-07 03:08:39 +02:00
ryan
37c45a81a9 autoFocus the search filter 2018-06-07 02:48:54 +02:00
bergquist
f93d83befe adds missing return statement 2018-06-06 22:17:19 +02:00
Sławek Piotrowski
9db964bf35 Fix typo: eleasticsearch -> elasticsearch (#12184) 2018-06-06 13:09:16 -07:00
David Kaltschmidt
cdba2bd184 Annotations support for ifql datasource 2018-06-06 16:11:40 +02:00
Alexander Zobnin
8fd3015e52 dashboard: improve import UX for non-editor users
validate folderId, import only into available folders
2018-06-06 15:35:19 +03:00
David Kaltschmidt
2c86484e54 Template variable support for ifql datasource
* Implements findMetricQuery()
* Macros for template queries: measurements(), tags(), tag_values(),
 field_keys()
* Tests for macro expansion
2018-06-06 14:11:48 +02:00
Tom Wilkie
c47f40d99c Use cut to trim down the SHA1.
Signed-off-by: Tom Wilkie <tom.wilkie@gmail.com>
2018-06-06 12:07:47 +01:00
Alexander Zobnin
9460063ab5 show import menu in sidenav, dashboard search and manage dashboards page
if user has editor permissions for at least one folder
2018-06-06 13:55:00 +03:00
David Kaltschmidt
3bd58446d6 Fix metrics panel test by adding config mock 2018-06-06 11:15:24 +02:00
David Kaltschmidt
661503f828 Respect explore settings in config ini
Previous explore restrictions only took permissions into consideration.

* add `exploreEnabled` to global settings
* only bind `x` if enabled
* only show explore in panel menu if enabled
2018-06-06 10:50:47 +02:00
David Kaltschmidt
56fc82151b Add .html to webpack.hot resolve extensions 2018-06-06 09:12:06 +02:00
Tom Wilkie
681326140d Version the tarball uploaded to s3 and tell the next step about it.
Signed-off-by: Tom Wilkie <tom.wilkie@gmail.com>
2018-06-05 20:02:51 +01:00
Alexander Zobnin
4ff4ac1d5f dashboard: import into current folder from manage folder page 2018-06-05 20:01:48 +03:00
Alexander Zobnin
393f41cd14 dashboard: add Import button to manage page 2018-06-05 19:37:47 +03:00
Alexander Zobnin
4063ae37a4 dashboard: import to folder 2018-06-05 19:23:09 +03:00
David Kaltschmidt
d7379912c1 Query helpers for IFQL datasource
* raw CSV preview next to query field (query inspector is not that
 useful here)
* added result table and record counts
2018-06-05 17:15:35 +02:00
David
45a156577c Merge pull request #12143 from grafana/davkal/ifql-datasource
InfluxDB IFQL datasource (beta)
2018-06-05 14:23:21 +02:00
Leonard Gram
9e7651a1b3 docs: docker secrets available in v5.2.0 2018-06-05 13:54:23 +02:00
David Kaltschmidt
64f5874778 Remove round-robin urls in ifql DS 2018-06-05 13:49:35 +02:00
David Kaltschmidt
1d587450b3 IFQL range variable expansion 2018-06-05 13:42:32 +02:00
Leonard Gram
eb66266629 changelog: release v5.2.0-beta1. 2018-06-05 10:57:18 +02:00
David Kaltschmidt
08ee1da6b1 InfluxDB IFQL datasource 2018-06-04 12:24:47 +02:00
Torkel Ödegaard
c79b1bef15 Merge branch 'master' into master 2018-05-08 21:23:34 +02:00
Marcel Anacker
64c16eb912 Alerting: Fixing mobile notifications in Microsoft Teams 2018-04-04 16:28:12 +02:00
flopp999
08461408a2 Added Kilopascals(kPa) under pressure 2018-03-13 22:17:56 +01:00
flopp999
af63a26be0 Added W/m2(energy) and l/h(flow)
both as .fixedUnit
2018-03-13 22:11:58 +01:00
102 changed files with 10838 additions and 5097 deletions

View File

@@ -183,16 +183,21 @@ jobs:
command: 'sudo pip install awscli'
- run:
name: deploy to s3
command: 'aws s3 sync ./dist s3://$BUCKET_NAME/master'
command: |
# Also
cp dist/grafana-latest.linux-x64.tar.gz dist/grafana-master-$(echo "${CIRCLE_SHA1}" | cut -b1-7).linux-x64.tar.gz
aws s3 sync ./dist s3://$BUCKET_NAME/master
- run:
name: Trigger Windows build
command: './scripts/trigger_windows_build.sh ${APPVEYOR_TOKEN} ${CIRCLE_SHA1} master'
- run:
name: Trigger Docker build
command: './scripts/trigger_docker_build.sh ${TRIGGER_GRAFANA_PACKER_CIRCLECI_TOKEN}'
command: './scripts/trigger_docker_build.sh ${TRIGGER_GRAFANA_PACKER_CIRCLECI_TOKEN} master-$(echo "${CIRCLE_SHA1}" | cut -b1-7)'
- run:
name: Publish to Grafana.com
command: './scripts/publish -apiKey ${GRAFANA_COM_API_KEY}'
command: |
rm dist/grafana-master-$(echo "${CIRCLE_SHA1}" | cut -b1-7).linux-x64.tar.gz
./scripts/publish -apiKey ${GRAFANA_COM_API_KEY}
deploy-release:
docker:
@@ -241,8 +246,8 @@ workflows:
- mysql-integration-test
- postgres-integration-test
filters:
branches:
only: master
branches:
only: master
release:
jobs:
- build-all:

View File

@@ -11,8 +11,5 @@ dump.rdb
node_modules
/local
/tmp
/vendor
*.yml
*.md
/vendor
/tmp

View File

@@ -2,6 +2,24 @@
### New Features
* **Dashboard**: Import dashboard to folder [#10796](https://github.com/grafana/grafana/issues/10796)
### Minor
* **Dashboard**: Fix so panel titles doesn't wrap [#11074](https://github.com/grafana/grafana/issues/11074)
* **Dashboard**: Prevent double-click when saving dashboard [#11963](https://github.com/grafana/grafana/issues/11963)
* **Dashboard**: AutoFocus the add-panel search filter [#12189](https://github.com/grafana/grafana/pull/12189) thx [@ryantxu](https://github.com/ryantxu)
* **Units**: W/m2 (energy), l/h (flow) and kPa (pressure) [#11233](https://github.com/grafana/grafana/pull/11233), thx [@flopp999](https://github.com/flopp999)
* **Units**: Litre/min (flow) and milliLitre/min (flow) [#12282](https://github.com/grafana/grafana/pull/12282), thx [@flopp999](https://github.com/flopp999)
* **Alerting**: Fix mobile notifications for Microsoft Teams alert notifier [#11484](https://github.com/grafana/grafana/pull/11484), thx [@manacker](https://github.com/manacker)
* **Influxdb**: Add support for mode function [#12286](https://github.com/grafana/grafana/issues/12286)
* **Cloudwatch**: Fixes panic caused by bad timerange settings [#12199](https://github.com/grafana/grafana/issues/12199)
* **Auth Proxy**: Whitelist proxy IP address instead of client IP address [#10707](https://github.com/grafana/grafana/issues/10707)
# 5.2.0-beta1 (2018-06-05)
### New Features
* **Elasticsearch**: Alerting support [#5893](https://github.com/grafana/grafana/issues/5893), thx [@WPH95](https://github.com/WPH95)
* **Login**: Change admin password after first login [#11882](https://github.com/grafana/grafana/issues/11882)
* **Alert list panel**: Updated to support filtering alerts by name, dashboard title, folder, tags [#11500](https://github.com/grafana/grafana/issues/11500), [#8168](https://github.com/grafana/grafana/issues/8168), [#6541](https://github.com/grafana/grafana/issues/6541)

6
Gopkg.lock generated
View File

@@ -331,8 +331,8 @@
[[projects]]
name = "github.com/mattn/go-sqlite3"
packages = ["."]
revision = "6c771bb9887719704b210e87e934f08be014bdb1"
version = "v1.6.0"
revision = "323a32be5a2421b8c7087225079c6c900ec397cd"
version = "v1.7.0"
[[projects]]
name = "github.com/matttproud/golang_protobuf_extensions"
@@ -670,6 +670,6 @@
[solve-meta]
analyzer-name = "dep"
analyzer-version = 1
inputs-digest = "08b97771990365d506af4788acb33cdf283ce89856669262ecb84860ad45bfcb"
inputs-digest = "85cc057e0cc074ab5b43bd620772d63d51e07b04e8782fcfe55e6929d2fc40f7"
solver-name = "gps-cdcl"
solver-version = 1

View File

@@ -129,7 +129,7 @@ ignored = [
[[constraint]]
name = "github.com/mattn/go-sqlite3"
version = "1.6.0"
version = "1.7.0"
[[constraint]]
name = "github.com/opentracing/opentracing-go"

View File

@@ -76,7 +76,7 @@ Saltstack | [https://github.com/salt-formulas/salt-formula-grafana](https://gith
> This feature is available from v5.0
It's possible to manage datasources in Grafana by adding one or more yaml config files in the [`provisioning/datasources`](/installation/configuration/#provisioning) directory. Each config file can contain a list of `datasources` that will be added or updated during start up. If the datasource already exists, Grafana will update it to match the configuration file. The config file can also contain a list of datasources that should be deleted. That list is called `delete_datasources`. Grafana will delete datasources listed in `delete_datasources` before inserting/updating those in the `datasource` list.
It's possible to manage datasources in Grafana by adding one or more yaml config files in the [`provisioning/datasources`](/installation/configuration/#provisioning) directory. Each config file can contain a list of `datasources` that will be added or updated during start up. If the datasource already exists, Grafana will update it to match the configuration file. The config file can also contain a list of datasources that should be deleted. That list is called `deleteDatasources`. Grafana will delete datasources listed in `deleteDatasources` before inserting/updating those in the `datasource` list.
### Running Multiple Grafana Instances

View File

@@ -20,7 +20,7 @@ queries through the use of query references.
## Adding the data source
1. Open the side menu by clicking the Grafana icon in the top header.
2. In the side menu under the `Dashboards` link you should find a link named `Data Sources`.
2. In the side menu under the `Configuration` link you should find a link named `Data Sources`.
3. Click the `+ Add data source` button in the top header.
4. Select `Graphite` from the *Type* dropdown.

View File

@@ -115,7 +115,7 @@ Grafana v5.1 brings an improved workflow for provisioned dashboards:
Available options in the dialog will let you `Copy JSON to Clipboard` and/or `Save JSON to file` which can help you synchronize your dashboard changes back to the provisioning source.
More information in the [Provisioning documentation](/features/datasources/prometheus/).
More information in the [Provisioning documentation](/administration/provisioning/).
<div class="clearfix"></div>

View File

@@ -331,6 +331,27 @@ Content-Type: application/json
```
## Update Users in Organisation
`PATCH /api/orgs/:orgId/users/:userId`
**Example Request**:
```http
PATCH /api/orgs/1/users/2 HTTP/1.1
Accept: application/json
Content-Type: application/json
Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
```
**Example Response**:
```http
HTTP/1.1 200
Content-Type: application/json
```
## Delete User in Organisation

View File

@@ -49,6 +49,11 @@ $ docker run \
grafana/grafana:5.1.0
```
## Running of the master branch
For every successful commit we publish a Grafana container to [`grafana/grafana`](https://hub.docker.com/r/grafana/grafana/tags/) and [`grafana/grafana-dev`](https://hub.docker.com/r/grafana/grafana-dev/tags/). In `grafana/grafana` container we will always overwrite the `master` tag with the latest version. In `grafana/grafana-dev` we will include
the git commit in the tag. If you run Grafana master in production we **strongly** recommend that you use the later since different machines might run different version of grafana if they pull the master tag at different times.
## Installing Plugins for Grafana
Pass the plugins you want installed to docker with the `GF_INSTALL_PLUGINS` environment variable as a comma separated list. This will pass each plugin name to `grafana-cli plugins install ${plugin}` and install them when Grafana starts.
@@ -132,6 +137,8 @@ docker run -d --user $ID --volume "$PWD/data:/var/lib/grafana" -p 3000:3000 graf
## Reading secrets from files (support for Docker Secrets)
> Available in v5.2.0 and later
It's possible to supply Grafana with configuration through files. This works well with [Docker Secrets](https://docs.docker.com/engine/swarm/secrets/) as the secrets by default gets mapped into `/run/secrets/<name of secret>` of the container.
You can do this with any of the configuration options in conf/grafana.ini by setting `GF_<SectionName>_<KeyName>_FILE` to the path of the file holding the secret.

View File

@@ -25,7 +25,6 @@ To interact with the rest of grafana the plugins module file can export 5 differ
- Datasource (Required)
- QueryCtrl (Required)
- ConfigCtrl (Required)
- QueryOptionsCtrl
- AnnotationsQueryCtrl
## Plugin json
@@ -182,12 +181,6 @@ A JavaScript class that will be instantiated and treated as an Angular controlle
Requires a static template or templateUrl variable which will be rendered as the view for this controller.
## QueryOptionsCtrl
A JavaScript class that will be instantiated and treated as an Angular controller when the user edits metrics in a panel. This controller is responsible for handling panel wide settings for the datasource, such as interval, rate and aggregations if needed.
Requires a static template or templateUrl variable which will be rendered as the view for this controller.
## AnnotationsQueryCtrl
A JavaScript class that will be instantiated and treated as an Angular controller when the user choose this type of datasource in the templating menu in the dashboard.

View File

@@ -13,7 +13,7 @@ dev environment. Grafana ships with its own required backend server; also comple
## Dependencies
- [Go 1.9.2](https://golang.org/dl/)
- [Go 1.10](https://golang.org/dl/)
- [Git](https://git-scm.com/downloads)
- [NodeJS LTS](https://nodejs.org/download/)
- node-gyp is the Node.js native addon build tool and it requires extra dependencies: python 2.7, make and GCC. These are already installed for most Linux distros and MacOS. See the Building On Windows section or the [node-gyp installation instructions](https://github.com/nodejs/node-gyp#installation) for more details.
@@ -66,13 +66,13 @@ You can run a local instance of Grafana by running:
```bash
./bin/grafana-server
```
If you built the binary with `go run build.go build`, run `./bin/grafana-server`
Or, if you built the binary with `go run build.go build`, run `./bin/<os>-<architecture>/grafana-server`
If you built it with `go build .`, run `./grafana`
Open grafana in your browser (default [http://localhost:3000](http://localhost:3000)) and login with admin user (default user/pass = admin/admin).
## Developing Grafana
# Developing Grafana
To add features, customize your config, etc, you'll need to rebuild the backend when you change the source code. We use a tool named `bra` that
does this.
@@ -124,7 +124,7 @@ Learn more about Grafana config options in the [Configuration section](/installa
## Create a pull requests
Please contribute to the Grafana project and submit a pull request! Build new features, write or update documentation, fix bugs and generally make Grafana even more awesome.
## Troubleshooting
# Troubleshooting
**Problem**: PhantomJS or node-sass errors when running grunt

View File

@@ -1,4 +1,4 @@
{
"stable": "5.0.4",
"testing": "5.0.4"
"stable": "5.1.3",
"testing": "5.1.3"
}

View File

@@ -4,7 +4,7 @@
"company": "Grafana Labs"
},
"name": "grafana",
"version": "5.2.0-beta1",
"version": "5.2.0-beta2",
"repository": {
"type": "git",
"url": "http://github.com/grafana/grafana.git"

View File

@@ -57,4 +57,5 @@ type ImportDashboardCommand struct {
Overwrite bool `json:"overwrite"`
Dashboard *simplejson.Json `json:"dashboard"`
Inputs []plugins.ImportDashboardInput `json:"inputs"`
FolderId int64 `json:"folderId"`
}

View File

@@ -140,6 +140,7 @@ func getFrontendSettingsMap(c *m.ReqContext) (map[string]interface{}, error) {
"authProxyEnabled": setting.AuthProxyEnabled,
"ldapEnabled": setting.LdapEnabled,
"alertingEnabled": setting.AlertingEnabled,
"exploreEnabled": setting.ExploreEnabled,
"googleAnalyticsId": setting.GoogleAnalyticsId,
"disableLoginForm": setting.DisableLoginForm,
"externalUserMngInfo": setting.ExternalUserMngInfo,

View File

@@ -99,9 +99,10 @@ func setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, error) {
if c.OrgRole == m.ROLE_ADMIN || c.OrgRole == m.ROLE_EDITOR {
children = append(children, &dtos.NavLink{Text: "Folder", SubTitle: "Create a new folder to organize your dashboards", Id: "folder", Icon: "gicon gicon-folder-new", Url: setting.AppSubUrl + "/dashboards/folder/new"})
children = append(children, &dtos.NavLink{Text: "Import", SubTitle: "Import dashboard from file or Grafana.com", Id: "import", Icon: "gicon gicon-dashboard-import", Url: setting.AppSubUrl + "/dashboard/import"})
}
children = append(children, &dtos.NavLink{Text: "Import", SubTitle: "Import dashboard from file or Grafana.com", Id: "import", Icon: "gicon gicon-dashboard-import", Url: setting.AppSubUrl + "/dashboard/import"})
data.NavTree = append(data.NavTree, &dtos.NavLink{
Text: "Create",
Id: "create",

View File

@@ -25,12 +25,9 @@ import (
)
var (
logger = log.New("data-proxy-log")
client = &http.Client{
Timeout: time.Second * 30,
Transport: &http.Transport{Proxy: http.ProxyFromEnvironment},
}
tokenCache = map[int64]*jwtToken{}
logger = log.New("data-proxy-log")
tokenCache = map[string]*jwtToken{}
client = newHTTPClient()
)
type jwtToken struct {
@@ -48,6 +45,10 @@ type DataSourceProxy struct {
plugin *plugins.DataSourcePlugin
}
type httpClient interface {
Do(req *http.Request) (*http.Response, error)
}
func NewDataSourceProxy(ds *m.DataSource, plugin *plugins.DataSourcePlugin, ctx *m.ReqContext, proxyPath string) *DataSourceProxy {
targetURL, _ := url.Parse(ds.Url)
@@ -60,6 +61,13 @@ func NewDataSourceProxy(ds *m.DataSource, plugin *plugins.DataSourcePlugin, ctx
}
}
func newHTTPClient() httpClient {
return &http.Client{
Timeout: time.Second * 30,
Transport: &http.Transport{Proxy: http.ProxyFromEnvironment},
}
}
func (proxy *DataSourceProxy) HandleRequest() {
if err := proxy.validateRequest(); err != nil {
proxy.ctx.JsonApiErr(403, err.Error(), nil)
@@ -311,7 +319,7 @@ func (proxy *DataSourceProxy) applyRoute(req *http.Request) {
}
func (proxy *DataSourceProxy) getAccessToken(data templateData) (string, error) {
if cachedToken, found := tokenCache[proxy.ds.Id]; found {
if cachedToken, found := tokenCache[proxy.getAccessTokenCacheKey()]; found {
if cachedToken.ExpiresOn.After(time.Now().Add(time.Second * 10)) {
logger.Info("Using token from cache")
return cachedToken.AccessToken, nil
@@ -350,12 +358,16 @@ func (proxy *DataSourceProxy) getAccessToken(data templateData) (string, error)
expiresOnEpoch, _ := strconv.ParseInt(token.ExpiresOnString, 10, 64)
token.ExpiresOn = time.Unix(expiresOnEpoch, 0)
tokenCache[proxy.ds.Id] = &token
tokenCache[proxy.getAccessTokenCacheKey()] = &token
logger.Info("Got new access token", "ExpiresOn", token.ExpiresOn)
return token.AccessToken, nil
}
func (proxy *DataSourceProxy) getAccessTokenCacheKey() string {
return fmt.Sprintf("%v_%v_%v", proxy.ds.Id, proxy.route.Path, proxy.route.Method)
}
func interpolateString(text string, data templateData) (string, error) {
t, err := template.New("content").Parse(text)
if err != nil {

View File

@@ -1,9 +1,13 @@
package pluginproxy
import (
"bytes"
"fmt"
"io/ioutil"
"net/http"
"net/url"
"testing"
"time"
macaron "gopkg.in/macaron.v1"
@@ -100,6 +104,112 @@ func TestDSRouteRule(t *testing.T) {
})
})
Convey("Plugin with multiple routes for token auth", func() {
plugin := &plugins.DataSourcePlugin{
Routes: []*plugins.AppPluginRoute{
{
Path: "pathwithtoken1",
Url: "https://api.nr1.io/some/path",
TokenAuth: &plugins.JwtTokenAuth{
Url: "https://login.server.com/{{.JsonData.tenantId}}/oauth2/token",
Params: map[string]string{
"grant_type": "client_credentials",
"client_id": "{{.JsonData.clientId}}",
"client_secret": "{{.SecureJsonData.clientSecret}}",
"resource": "https://api.nr1.io",
},
},
},
{
Path: "pathwithtoken2",
Url: "https://api.nr2.io/some/path",
TokenAuth: &plugins.JwtTokenAuth{
Url: "https://login.server.com/{{.JsonData.tenantId}}/oauth2/token",
Params: map[string]string{
"grant_type": "client_credentials",
"client_id": "{{.JsonData.clientId}}",
"client_secret": "{{.SecureJsonData.clientSecret}}",
"resource": "https://api.nr2.io",
},
},
},
},
}
setting.SecretKey = "password"
key, _ := util.Encrypt([]byte("123"), "password")
ds := &m.DataSource{
JsonData: simplejson.NewFromAny(map[string]interface{}{
"clientId": "asd",
"tenantId": "mytenantId",
}),
SecureJsonData: map[string][]byte{
"clientSecret": key,
},
}
req, _ := http.NewRequest("GET", "http://localhost/asd", nil)
ctx := &m.ReqContext{
Context: &macaron.Context{
Req: macaron.Request{Request: req},
},
SignedInUser: &m.SignedInUser{OrgRole: m.ROLE_EDITOR},
}
Convey("When creating and caching access tokens", func() {
var authorizationHeaderCall1 string
var authorizationHeaderCall2 string
Convey("first call should add authorization header with access token", func() {
json, err := ioutil.ReadFile("./test-data/access-token-1.json")
So(err, ShouldBeNil)
client = newFakeHTTPClient(json)
proxy1 := NewDataSourceProxy(ds, plugin, ctx, "pathwithtoken1")
proxy1.route = plugin.Routes[0]
proxy1.applyRoute(req)
authorizationHeaderCall1 = req.Header.Get("Authorization")
So(req.URL.String(), ShouldEqual, "https://api.nr1.io/some/path")
So(authorizationHeaderCall1, ShouldStartWith, "Bearer eyJ0e")
Convey("second call to another route should add a different access token", func() {
json2, err := ioutil.ReadFile("./test-data/access-token-2.json")
So(err, ShouldBeNil)
req, _ := http.NewRequest("GET", "http://localhost/asd", nil)
client = newFakeHTTPClient(json2)
proxy2 := NewDataSourceProxy(ds, plugin, ctx, "pathwithtoken2")
proxy2.route = plugin.Routes[1]
proxy2.applyRoute(req)
authorizationHeaderCall2 = req.Header.Get("Authorization")
So(req.URL.String(), ShouldEqual, "https://api.nr2.io/some/path")
So(authorizationHeaderCall1, ShouldStartWith, "Bearer eyJ0e")
So(authorizationHeaderCall2, ShouldStartWith, "Bearer eyJ0e")
So(authorizationHeaderCall2, ShouldNotEqual, authorizationHeaderCall1)
Convey("third call to first route should add cached access token", func() {
req, _ := http.NewRequest("GET", "http://localhost/asd", nil)
client = newFakeHTTPClient([]byte{})
proxy3 := NewDataSourceProxy(ds, plugin, ctx, "pathwithtoken1")
proxy3.route = plugin.Routes[0]
proxy3.applyRoute(req)
authorizationHeaderCall3 := req.Header.Get("Authorization")
So(req.URL.String(), ShouldEqual, "https://api.nr1.io/some/path")
So(authorizationHeaderCall1, ShouldStartWith, "Bearer eyJ0e")
So(authorizationHeaderCall3, ShouldStartWith, "Bearer eyJ0e")
So(authorizationHeaderCall3, ShouldEqual, authorizationHeaderCall1)
})
})
})
})
})
Convey("When proxying graphite", func() {
plugin := &plugins.DataSourcePlugin{}
ds := &m.DataSource{Url: "htttp://graphite:8080", Type: m.DS_GRAPHITE}
@@ -214,3 +324,27 @@ func TestDSRouteRule(t *testing.T) {
})
}
type httpClientStub struct {
fakeBody []byte
}
func (c *httpClientStub) Do(req *http.Request) (*http.Response, error) {
bodyJSON, _ := simplejson.NewJson(c.fakeBody)
_, passedTokenCacheTest := bodyJSON.CheckGet("expires_on")
So(passedTokenCacheTest, ShouldBeTrue)
bodyJSON.Set("expires_on", fmt.Sprint(time.Now().Add(time.Second*60).Unix()))
body, _ := bodyJSON.MarshalJSON()
resp := &http.Response{
Body: ioutil.NopCloser(bytes.NewReader(body)),
}
return resp, nil
}
func newFakeHTTPClient(fakeBody []byte) httpClient {
return &httpClientStub{
fakeBody: fakeBody,
}
}

View File

@@ -0,0 +1,9 @@
{
"token_type": "Bearer",
"expires_in": "3599",
"ext_expires_in": "0",
"expires_on": "1528740417",
"not_before": "1528736517",
"resource": "https://api.nr1.io",
"access_token": "eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiIsIng1dCI6ImlCakwxUmNxemhpeTRmcHhJeGRacW9oTTJZayIsImtpZCI6ImlCakwxUmNxemhpeTRmcHhJeGRacW9oTTJZayJ9.eyJhdWQiOiJodHRwczovL2FwaS5sb2dhbmFseXRpY3MuaW8iLCJpc3MiOiJodHRwczovL3N0cy53aW5kb3dzLm5ldC9lN2YzZjY2MS1hOTMzLTRiM2YtODE3Ni01MWM0Zjk4MmVjNDgvIiwiaWF0IjoxNTI4NzM2NTE3LCJuYmYiOjE1Mjg3MzY1MTcsImV4cCI6MTUyODc0MDQxNywiYWlvIjoiWTJkZ1lBaStzaWRsT3NmQ2JicGhLMSsremttN0NBQT0iLCJhcHBpZCI6IjdmMzJkYjdjLTZmNmYtNGU4OC05M2Q5LTlhZTEyNmMwYTU1ZiIsImFwcGlkYWNyIjoiMSIsImlkcCI6Imh0dHBzOi8vc3RzLndpbmRvd3MubmV0L2U3ZjNmNjYxLWE5MzMtNGIzZi04MTc2LTUxYzRmOTgyZWM0OC8iLCJvaWQiOiI1NDQ5ZmJjOS1mYWJhLTRkNjItODE2Yy05ZmMwMzZkMWViN2UiLCJzdWIiOiI1NDQ5ZmJjOS1mYWJhLTRkNjItODE2Yy05ZmMwMzZkMWViN2UiLCJ0aWQiOiJlN2YzZjY2MS1hOTMzLTRiM2YtODE3Ni01MWM0Zjk4MmVjNDgiLCJ1dGkiOiJZQTlQa2lxUy1VV1hMQjhIRnU0U0FBIiwidmVyIjoiMS4wIn0.ga5qudt4LDMKTStAxUmzjyZH8UFBAaFirJqpTdmYny4NtkH6JT2EILvjTjYxlKeTQisvwx9gof0PyicZIab9d6wlMa2xiLzr2nmaOonYClY8fqBaRTgc1xVjrKFw5SCgpx3FnEyJhIWvVPIfaWaogSHcQbIpe4kdk4tz-ccmrx0D1jsziSI4BZcJcX04aJuHZGz9k4mQZ_AA5sQSeQaNuojIng6rYoIifAXFYBZPTbeeeqmiGq8v0IOLeNKbC0POeQCJC_KKBG6Z_MV2KgPxFEzQuX2ZFmRD_wGPteV5TUBxh1kARdqexA3e0zAKSawR9kmrAiZ21lPr4tX2Br_HDg"
}

View File

@@ -0,0 +1,9 @@
{
"token_type": "Bearer",
"expires_in": "3599",
"ext_expires_in": "0",
"expires_on": "1528662059",
"not_before": "1528658159",
"resource": "https://api.nr2.io",
"access_token": "eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiIsIng1dCI6ImlCakwxUmNxemhpeTRmcHhJeGRacW9oTTJZayIsImtpZCI6ImlCakwxUmNxemhpeTRmcHhJeGRacW9oTTJZayJ9.eyJhdWQiOiJodHRwczovL21hbmFnZW1lbnQuYXp1cmUuY29tLyIsImlzcyI6Imh0dHBzOi8vc3RzLndpbmRvd3MubmV0L2U3ZjNmNjYxLWE5MzMtNGIzZi04MTc2LTUxYzRmOTgyZWM0OC8iLCJpYXQiOjE1Mjg2NTgxNTksIm5iZiI6MTUyODY1ODE1OSwiZXhwIjoxNTI4NjYyMDU5LCJhaW8iOiJZMmRnWUFpK3NpZGxPc2ZDYmJwaEsxKyt6a203Q0FBPSIsImFwcGlkIjoiODg5YjdlZDgtMWFlZC00ODZlLTk3ODktODE5NzcwYmJiNjFhIiwiYXBwaWRhY3IiOiIxIiwiaWRwIjoiaHR0cHM6Ly9zdHMud2luZG93cy5uZXQvZTdmM2Y2NjEtYTkzMy00YjNmLTgxNzYtNTFjNGY5ODJlYzQ4LyIsIm9pZCI6IjY0YzQxNjMyLTliOWUtNDczNy05MTYwLTBlNjAzZTg3NjljYyIsInN1YiI6IjY0YzQxNjMyLTliOWUtNDczNy05MTYwLTBlNjAzZTg3NjljYyIsInRpZCI6ImU3ZjNmNjYxLWE5MzMtNGIzZi04MTc2LTUxYzRmOTgyZWM0OCIsInV0aSI6IkQ1ODZHSGUySDBPd0ptOU0xeVlKQUEiLCJ2ZXIiOiIxLjAifQ.Pw8c8gpoZptw3lGreQoHQaMVOozSaTE5D38Vm2aCHRB3DvD3N-Qcm1x0ZCakUEV2sJd7jvx4XtPFuW7063T0V1deExL4rzzvIo0ZfMmURf9tCTiKFKYibqf8_PtfPSz0t9eNDEUGmWDh1Wgssb4W_H-wPqgl9VPMT7T6ynkfIm0-ODPZTBzgSHiY8C_L1-DkhsK7XiqbUlSDgx9FpfChZS3ah8QhA8geqnb_HVuSktg7WhpxmogSpK5QdrwSE3jsbItpzOfLJ4iBd2ExzS2C0y8H_Coluk3Y1YA07tAxJ6Y7oBv-XwGqNfZhveOCQOzX-U3dFod3fXXysjB0UB89WQ"
}

View File

@@ -174,6 +174,7 @@ func ImportDashboard(c *m.ReqContext, apiCmd dtos.ImportDashboardCommand) Respon
Path: apiCmd.Path,
Inputs: apiCmd.Inputs,
Overwrite: apiCmd.Overwrite,
FolderId: apiCmd.FolderId,
Dashboard: apiCmd.Dashboard,
}

View File

@@ -308,6 +308,10 @@ func (a *ldapAuther) searchForUser(username string) (*LdapUserInfo, error) {
} else {
filter_replace = getLdapAttr(a.server.GroupSearchFilterUserAttribute, searchResult)
}
if a.server.GroupSearchFilterUserAttribute == "dn" {
filter_replace = searchResult.Entries[0].DN
}
filter := strings.Replace(a.server.GroupSearchFilter, "%s", ldap.EscapeFilter(filter_replace), -1)
a.log.Info("Searching for user's groups", "filter", filter)
@@ -330,7 +334,11 @@ func (a *ldapAuther) searchForUser(username string) (*LdapUserInfo, error) {
if len(groupSearchResult.Entries) > 0 {
for i := range groupSearchResult.Entries {
memberOf = append(memberOf, getLdapAttrN(a.server.Attr.MemberOf, groupSearchResult, i))
if a.server.Attr.MemberOf == "dn" {
memberOf = append(memberOf, groupSearchResult.Entries[i].DN)
} else {
memberOf = append(memberOf, getLdapAttrN(a.server.Attr.MemberOf, groupSearchResult, i))
}
}
break
}

View File

@@ -2,7 +2,6 @@ package middleware
import (
"fmt"
"net"
"net/mail"
"reflect"
"strings"
@@ -29,7 +28,7 @@ func initContextWithAuthProxy(ctx *m.ReqContext, orgID int64) bool {
}
// if auth proxy ip(s) defined, check if request comes from one of those
if err := checkAuthenticationProxy(ctx.Req.RemoteAddr, proxyHeaderValue); err != nil {
if err := checkAuthenticationProxy(ctx.RemoteAddr(), proxyHeaderValue); err != nil {
ctx.Handle(407, "Proxy authentication required", err)
return true
}
@@ -197,18 +196,23 @@ func checkAuthenticationProxy(remoteAddr string, proxyHeaderValue string) error
return nil
}
proxies := strings.Split(setting.AuthProxyWhitelist, ",")
sourceIP, _, err := net.SplitHostPort(remoteAddr)
if err != nil {
return err
// Multiple ip addresses? Right-most IP address is the IP address of the most recent proxy
if strings.Contains(remoteAddr, ",") {
sourceIPs := strings.Split(remoteAddr, ",")
remoteAddr = strings.TrimSpace(sourceIPs[len(sourceIPs)-1])
}
remoteAddr = strings.TrimPrefix(remoteAddr, "[")
remoteAddr = strings.TrimSuffix(remoteAddr, "]")
proxies := strings.Split(setting.AuthProxyWhitelist, ",")
// Compare allowed IP addresses to actual address
for _, proxyIP := range proxies {
if sourceIP == strings.TrimSpace(proxyIP) {
if remoteAddr == strings.TrimSpace(proxyIP) {
return nil
}
}
return fmt.Errorf("Request for user (%s) from %s is not from the authentication proxy", proxyHeaderValue, sourceIP)
return fmt.Errorf("Request for user (%s) from %s is not from the authentication proxy", proxyHeaderValue, remoteAddr)
}

View File

@@ -293,6 +293,61 @@ func TestMiddlewareContext(t *testing.T) {
})
})
middlewareScenario("When auth_proxy is enabled and request has X-Forwarded-For that is not trusted", func(sc *scenarioContext) {
setting.AuthProxyEnabled = true
setting.AuthProxyHeaderName = "X-WEBAUTH-USER"
setting.AuthProxyHeaderProperty = "username"
setting.AuthProxyWhitelist = "192.168.1.1, 2001::23"
bus.AddHandler("test", func(query *m.GetSignedInUserQuery) error {
query.Result = &m.SignedInUser{OrgId: 4, UserId: 33}
return nil
})
bus.AddHandler("test", func(cmd *m.UpsertUserCommand) error {
cmd.Result = &m.User{Id: 33}
return nil
})
sc.fakeReq("GET", "/")
sc.req.Header.Add("X-WEBAUTH-USER", "torkelo")
sc.req.Header.Add("X-Forwarded-For", "client-ip, 192.168.1.1, 192.168.1.2")
sc.exec()
Convey("should return 407 status code", func() {
So(sc.resp.Code, ShouldEqual, 407)
So(sc.resp.Body.String(), ShouldContainSubstring, "Request for user (torkelo) from 192.168.1.2 is not from the authentication proxy")
})
})
middlewareScenario("When auth_proxy is enabled and request has X-Forwarded-For that is trusted", func(sc *scenarioContext) {
setting.AuthProxyEnabled = true
setting.AuthProxyHeaderName = "X-WEBAUTH-USER"
setting.AuthProxyHeaderProperty = "username"
setting.AuthProxyWhitelist = "192.168.1.1, 2001::23"
bus.AddHandler("test", func(query *m.GetSignedInUserQuery) error {
query.Result = &m.SignedInUser{OrgId: 4, UserId: 33}
return nil
})
bus.AddHandler("test", func(cmd *m.UpsertUserCommand) error {
cmd.Result = &m.User{Id: 33}
return nil
})
sc.fakeReq("GET", "/")
sc.req.Header.Add("X-WEBAUTH-USER", "torkelo")
sc.req.Header.Add("X-Forwarded-For", "client-ip, 192.168.1.2, 192.168.1.1")
sc.exec()
Convey("Should init context with user info", func() {
So(sc.context.IsSignedIn, ShouldBeTrue)
So(sc.context.UserId, ShouldEqual, 33)
So(sc.context.OrgId, ShouldEqual, 4)
})
})
middlewareScenario("When session exists for previous user, create a new session", func(sc *scenarioContext) {
setting.AuthProxyEnabled = true
setting.AuthProxyHeaderName = "X-WEBAUTH-USER"

View File

@@ -16,6 +16,7 @@ type ImportDashboardCommand struct {
Path string
Inputs []ImportDashboardInput
Overwrite bool
FolderId int64
OrgId int64
User *m.SignedInUser
@@ -70,7 +71,7 @@ func ImportDashboard(cmd *ImportDashboardCommand) error {
UserId: cmd.User.UserId,
Overwrite: cmd.Overwrite,
PluginId: cmd.PluginId,
FolderId: dashboard.FolderId,
FolderId: cmd.FolderId,
}
dto := &dashboards.SaveDashboardDTO{
@@ -91,6 +92,7 @@ func ImportDashboard(cmd *ImportDashboardCommand) error {
Title: savedDash.Title,
Path: cmd.Path,
Revision: savedDash.Data.Get("revision").MustInt64(1),
FolderId: savedDash.FolderId,
ImportedUri: "db/" + savedDash.Slug,
ImportedUrl: savedDash.GetUrl(),
ImportedRevision: dashboard.Data.Get("revision").MustInt64(1),

View File

@@ -17,6 +17,7 @@ type PluginDashboardInfoDTO struct {
ImportedUrl string `json:"importedUrl"`
Slug string `json:"slug"`
DashboardId int64 `json:"dashboardId"`
FolderId int64 `json:"folderId"`
ImportedRevision int64 `json:"importedRevision"`
Revision int64 `json:"revision"`
Description string `json:"description"`

View File

@@ -41,10 +41,8 @@ func NewTeamsNotifier(model *m.AlertNotification) (alerting.Notifier, error) {
type TeamsNotifier struct {
NotifierBase
Url string
Recipient string
Mention string
log log.Logger
Url string
log log.Logger
}
func (this *TeamsNotifier) Notify(evalContext *alerting.EvalContext) error {
@@ -75,17 +73,17 @@ func (this *TeamsNotifier) Notify(evalContext *alerting.EvalContext) error {
})
}
message := this.Mention
if evalContext.Rule.State != m.AlertStateOK { //don't add message when going back to alert state ok.
message += " " + evalContext.Rule.Message
} else {
message += " " // summary must not be empty
message := ""
if evalContext.Rule.State != m.AlertStateOK { //dont add message when going back to alert state ok.
message = evalContext.Rule.Message
}
body := map[string]interface{}{
"@type": "MessageCard",
"@context": "http://schema.org/extensions",
"summary": message,
"@type": "MessageCard",
"@context": "http://schema.org/extensions",
// summary MUST not be empty or the webhook request fails
// summary SHOULD contain some meaningful information, since it is used for mobile notifications
"summary": evalContext.GetNotificationTitle(),
"title": evalContext.GetNotificationTitle(),
"themeColor": evalContext.GetStateModel().Color,
"sections": []map[string]interface{}{

View File

@@ -83,7 +83,7 @@ func (g *dashboardGuardianImpl) checkAcl(permission m.PermissionType, acl []*m.D
for _, p := range acl {
// user match
if !g.user.IsAnonymous {
if !g.user.IsAnonymous && p.UserId > 0 {
if p.UserId == g.user.UserId && p.Permission >= permission {
return true, nil
}

View File

@@ -162,6 +162,11 @@ func TestGuardianViewer(t *testing.T) {
sc.parentFolderPermissionScenario(VIEWER, m.PERMISSION_EDIT, EDITOR_ACCESS)
sc.parentFolderPermissionScenario(VIEWER, m.PERMISSION_VIEW, VIEWER_ACCESS)
})
apiKeyScenario("Given api key with viewer role", t, m.ROLE_VIEWER, func(sc *scenarioContext) {
// dashboard has default permissions
sc.defaultPermissionScenario(VIEWER, m.PERMISSION_EDIT, VIEWER_ACCESS)
})
})
}
@@ -267,7 +272,7 @@ func (sc *scenarioContext) verifyExpectedPermissionsFlags() {
actualFlag = NO_ACCESS
}
if sc.expectedFlags&actualFlag != sc.expectedFlags {
if actualFlag&sc.expectedFlags != actualFlag {
sc.reportFailure(tc, sc.expectedFlags.String(), actualFlag.String())
}

View File

@@ -48,6 +48,27 @@ func orgRoleScenario(desc string, t *testing.T, role m.RoleType, fn scenarioFunc
})
}
func apiKeyScenario(desc string, t *testing.T, role m.RoleType, fn scenarioFunc) {
user := &m.SignedInUser{
UserId: 0,
OrgId: orgID,
OrgRole: role,
ApiKeyId: 10,
}
guard := New(dashboardID, orgID, user)
sc := &scenarioContext{
t: t,
orgRoleScenario: desc,
givenUser: user,
givenDashboardID: dashboardID,
g: guard,
}
Convey(desc, func() {
fn(sc)
})
}
func permissionScenario(desc string, dashboardID int64, sc *scenarioContext, permissions []*m.DashboardAclInfoDTO, fn scenarioFunc) {
bus.ClearBusHandlers()

View File

@@ -150,7 +150,7 @@ func TestAccountDataAccess(t *testing.T) {
})
Convey("Can set using org", func() {
cmd := m.SetUsingOrgCommand{UserId: ac2.Id, OrgId: ac1.Id}
cmd := m.SetUsingOrgCommand{UserId: ac2.Id, OrgId: ac1.OrgId}
err := SetUsingOrg(&cmd)
So(err, ShouldBeNil)
@@ -159,13 +159,25 @@ func TestAccountDataAccess(t *testing.T) {
err := GetSignedInUser(&query)
So(err, ShouldBeNil)
So(query.Result.OrgId, ShouldEqual, ac1.Id)
So(query.Result.OrgId, ShouldEqual, ac1.OrgId)
So(query.Result.Email, ShouldEqual, "ac2@test.com")
So(query.Result.Name, ShouldEqual, "ac2 name")
So(query.Result.Login, ShouldEqual, "ac2")
So(query.Result.OrgName, ShouldEqual, "ac1@test.com")
So(query.Result.OrgRole, ShouldEqual, "Viewer")
})
Convey("Should set last org as current when removing user from current", func() {
remCmd := m.RemoveOrgUserCommand{OrgId: ac1.OrgId, UserId: ac2.Id}
err := RemoveOrgUser(&remCmd)
So(err, ShouldBeNil)
query := m.GetSignedInUserQuery{UserId: ac2.Id}
err = GetSignedInUser(&query)
So(err, ShouldBeNil)
So(query.Result.OrgId, ShouldEqual, ac2.OrgId)
})
})
Convey("Cannot delete last admin org user", func() {

View File

@@ -20,7 +20,14 @@ func init() {
func AddOrgUser(cmd *m.AddOrgUserCommand) error {
return inTransaction(func(sess *DBSession) error {
// check if user exists
if res, err := sess.Query("SELECT 1 from org_user WHERE org_id=? and user_id=?", cmd.OrgId, cmd.UserId); err != nil {
var user m.User
if exists, err := sess.Id(cmd.UserId).Get(&user); err != nil {
return err
} else if !exists {
return m.ErrUserNotFound
}
if res, err := sess.Query("SELECT 1 from org_user WHERE org_id=? and user_id=?", cmd.OrgId, user.Id); err != nil {
return err
} else if len(res) == 1 {
return m.ErrOrgUserAlreadyAdded
@@ -41,7 +48,26 @@ func AddOrgUser(cmd *m.AddOrgUserCommand) error {
}
_, err := sess.Insert(&entity)
return err
if err != nil {
return err
}
var userOrgs []*m.UserOrgDTO
sess.Table("org_user")
sess.Join("INNER", "org", "org_user.org_id=org.id")
sess.Where("org_user.user_id=? AND org_user.org_id=?", user.Id, user.OrgId)
sess.Cols("org.name", "org_user.role", "org_user.org_id")
err = sess.Find(&userOrgs)
if err != nil {
return err
}
if len(userOrgs) == 0 {
return setUsingOrgInTransaction(sess, user.Id, cmd.OrgId)
}
return nil
})
}
@@ -110,6 +136,14 @@ func GetOrgUsers(query *m.GetOrgUsersQuery) error {
func RemoveOrgUser(cmd *m.RemoveOrgUserCommand) error {
return inTransaction(func(sess *DBSession) error {
// check if user exists
var user m.User
if exists, err := sess.Id(cmd.UserId).Get(&user); err != nil {
return err
} else if !exists {
return m.ErrUserNotFound
}
deletes := []string{
"DELETE FROM org_user WHERE org_id=? and user_id=?",
"DELETE FROM dashboard_acl WHERE org_id=? and user_id = ?",
@@ -123,6 +157,32 @@ func RemoveOrgUser(cmd *m.RemoveOrgUserCommand) error {
}
}
var userOrgs []*m.UserOrgDTO
sess.Table("org_user")
sess.Join("INNER", "org", "org_user.org_id=org.id")
sess.Where("org_user.user_id=?", user.Id)
sess.Cols("org.name", "org_user.role", "org_user.org_id")
err := sess.Find(&userOrgs)
if err != nil {
return err
}
hasCurrentOrgSet := false
for _, userOrg := range userOrgs {
if user.OrgId == userOrg.OrgId {
hasCurrentOrgSet = true
break
}
}
if !hasCurrentOrgSet && len(userOrgs) > 0 {
err = setUsingOrgInTransaction(sess, user.Id, userOrgs[0].OrgId)
if err != nil {
return err
}
}
return validateOneAdminLeftInOrg(cmd.OrgId, sess)
})
}

View File

@@ -89,7 +89,7 @@ func (ss *SqlStore) ensureAdminUser() error {
systemUserCountQuery := m.GetSystemUserCountStatsQuery{}
if err := bus.Dispatch(&systemUserCountQuery); err != nil {
fmt.Errorf("Could not determine if admin user exists: %v", err)
return fmt.Errorf("Could not determine if admin user exists: %v", err)
}
if systemUserCountQuery.Result.Count > 0 {

View File

@@ -290,16 +290,20 @@ func SetUsingOrg(cmd *m.SetUsingOrgCommand) error {
}
return inTransaction(func(sess *DBSession) error {
user := m.User{
Id: cmd.UserId,
OrgId: cmd.OrgId,
}
_, err := sess.Id(cmd.UserId).Update(&user)
return err
return setUsingOrgInTransaction(sess, cmd.UserId, cmd.OrgId)
})
}
func setUsingOrgInTransaction(sess *DBSession, userID int64, orgID int64) error {
user := m.User{
Id: userID,
OrgId: orgID,
}
_, err := sess.Id(userID).Update(&user)
return err
}
func GetUserProfile(query *m.GetUserProfileQuery) error {
var user m.User
has, err := x.Id(query.UserId).Get(&user)

View File

@@ -96,33 +96,33 @@ func TestUserDataAccess(t *testing.T) {
})
Convey("when a user is an org member and has been assigned permissions", func() {
err = AddOrgUser(&m.AddOrgUserCommand{LoginOrEmail: users[0].Login, Role: m.ROLE_VIEWER, OrgId: users[0].OrgId})
err = AddOrgUser(&m.AddOrgUserCommand{LoginOrEmail: users[1].Login, Role: m.ROLE_VIEWER, OrgId: users[0].OrgId, UserId: users[1].Id})
So(err, ShouldBeNil)
testHelperUpdateDashboardAcl(1, m.DashboardAcl{DashboardId: 1, OrgId: users[0].OrgId, UserId: users[0].Id, Permission: m.PERMISSION_EDIT})
testHelperUpdateDashboardAcl(1, m.DashboardAcl{DashboardId: 1, OrgId: users[0].OrgId, UserId: users[1].Id, Permission: m.PERMISSION_EDIT})
So(err, ShouldBeNil)
err = SavePreferences(&m.SavePreferencesCommand{UserId: users[0].Id, OrgId: users[0].OrgId, HomeDashboardId: 1, Theme: "dark"})
err = SavePreferences(&m.SavePreferencesCommand{UserId: users[1].Id, OrgId: users[0].OrgId, HomeDashboardId: 1, Theme: "dark"})
So(err, ShouldBeNil)
Convey("when the user is deleted", func() {
err = DeleteUser(&m.DeleteUserCommand{UserId: users[0].Id})
err = DeleteUser(&m.DeleteUserCommand{UserId: users[1].Id})
So(err, ShouldBeNil)
Convey("Should delete connected org users and permissions", func() {
query := &m.GetOrgUsersQuery{OrgId: 1}
query := &m.GetOrgUsersQuery{OrgId: users[0].OrgId}
err = GetOrgUsersForTest(query)
So(err, ShouldBeNil)
So(len(query.Result), ShouldEqual, 1)
permQuery := &m.GetDashboardAclInfoListQuery{DashboardId: 1, OrgId: 1}
permQuery := &m.GetDashboardAclInfoListQuery{DashboardId: 1, OrgId: users[0].OrgId}
err = GetDashboardAclInfoList(permQuery)
So(err, ShouldBeNil)
So(len(permQuery.Result), ShouldEqual, 0)
prefsQuery := &m.GetPreferencesQuery{OrgId: users[0].OrgId, UserId: users[0].Id}
prefsQuery := &m.GetPreferencesQuery{OrgId: users[0].OrgId, UserId: users[1].Id}
err = GetPreferences(prefsQuery)
So(err, ShouldBeNil)

View File

@@ -3,6 +3,7 @@ package cloudwatch
import (
"context"
"errors"
"fmt"
"regexp"
"sort"
"strconv"
@@ -144,6 +145,10 @@ func (e *CloudWatchExecutor) executeQuery(ctx context.Context, parameters *simpl
return nil, err
}
if endTime.Before(startTime) {
return nil, fmt.Errorf("Invalid time range: End time can't be before start time")
}
params := &cloudwatch.GetMetricStatisticsInput{
Namespace: aws.String(query.Namespace),
MetricName: aws.String(query.MetricName),

View File

@@ -43,12 +43,12 @@ type Client interface {
var NewClient = func(ctx context.Context, ds *models.DataSource, timeRange *tsdb.TimeRange) (Client, error) {
version, err := ds.JsonData.Get("esVersion").Int()
if err != nil {
return nil, fmt.Errorf("eleasticsearch version is required, err=%v", err)
return nil, fmt.Errorf("elasticsearch version is required, err=%v", err)
}
timeField, err := ds.JsonData.Get("timeField").String()
if err != nil {
return nil, fmt.Errorf("eleasticsearch time field name is required, err=%v", err)
return nil, fmt.Errorf("elasticsearch time field name is required, err=%v", err)
}
indexInterval := ds.JsonData.Get("interval").MustString()

View File

@@ -31,6 +31,7 @@ func init() {
renders["mean"] = QueryDefinition{Renderer: functionRenderer}
renders["median"] = QueryDefinition{Renderer: functionRenderer}
renders["sum"] = QueryDefinition{Renderer: functionRenderer}
renders["mode"] = QueryDefinition{Renderer: functionRenderer}
renders["holt_winters"] = QueryDefinition{
Renderer: functionRenderer,

View File

@@ -4,85 +4,39 @@ import (
"testing"
"github.com/grafana/grafana/pkg/tsdb"
. "github.com/smartystreets/goconvey/convey"
)
func TestInfluxdbQueryPart(t *testing.T) {
Convey("Influxdb query parts", t, func() {
tcs := []struct {
mode string
input string
params []string
expected string
}{
{mode: "field", params: []string{"value"}, input: "value", expected: `"value"`},
{mode: "derivative", params: []string{"10s"}, input: "mean(value)", expected: `derivative(mean(value), 10s)`},
{mode: "bottom", params: []string{"3"}, input: "value", expected: `bottom(value, 3)`},
{mode: "time", params: []string{"$interval"}, input: "", expected: `time($interval)`},
{mode: "time", params: []string{"auto"}, input: "", expected: `time($__interval)`},
{mode: "spread", params: []string{}, input: "value", expected: `spread(value)`},
{mode: "math", params: []string{"/ 100"}, input: "mean(value)", expected: `mean(value) / 100`},
{mode: "alias", params: []string{"test"}, input: "mean(value)", expected: `mean(value) AS "test"`},
{mode: "count", params: []string{}, input: "distinct(value)", expected: `count(distinct(value))`},
{mode: "mode", params: []string{}, input: "value", expected: `mode(value)`},
}
queryContext := &tsdb.TsdbQuery{TimeRange: tsdb.NewTimeRange("5m", "now")}
query := &Query{}
queryContext := &tsdb.TsdbQuery{TimeRange: tsdb.NewTimeRange("5m", "now")}
query := &Query{}
Convey("render field ", func() {
part, err := NewQueryPart("field", []string{"value"})
So(err, ShouldBeNil)
for _, tc := range tcs {
part, err := NewQueryPart(tc.mode, tc.params)
if err != nil {
t.Errorf("Expected NewQueryPart to not return an error. error: %v", err)
}
res := part.Render(query, queryContext, "value")
So(res, ShouldEqual, `"value"`)
})
Convey("render nested part", func() {
part, err := NewQueryPart("derivative", []string{"10s"})
So(err, ShouldBeNil)
res := part.Render(query, queryContext, "mean(value)")
So(res, ShouldEqual, "derivative(mean(value), 10s)")
})
Convey("render bottom", func() {
part, err := NewQueryPart("bottom", []string{"3"})
So(err, ShouldBeNil)
res := part.Render(query, queryContext, "value")
So(res, ShouldEqual, "bottom(value, 3)")
})
Convey("render time with $interval", func() {
part, err := NewQueryPart("time", []string{"$interval"})
So(err, ShouldBeNil)
res := part.Render(query, queryContext, "")
So(res, ShouldEqual, "time($interval)")
})
Convey("render time with auto", func() {
part, err := NewQueryPart("time", []string{"auto"})
So(err, ShouldBeNil)
res := part.Render(query, queryContext, "")
So(res, ShouldEqual, "time($__interval)")
})
Convey("render spread", func() {
part, err := NewQueryPart("spread", []string{})
So(err, ShouldBeNil)
res := part.Render(query, queryContext, "value")
So(res, ShouldEqual, `spread(value)`)
})
Convey("render suffix", func() {
part, err := NewQueryPart("math", []string{"/ 100"})
So(err, ShouldBeNil)
res := part.Render(query, queryContext, "mean(value)")
So(res, ShouldEqual, "mean(value) / 100")
})
Convey("render alias", func() {
part, err := NewQueryPart("alias", []string{"test"})
So(err, ShouldBeNil)
res := part.Render(query, queryContext, "mean(value)")
So(res, ShouldEqual, `mean(value) AS "test"`)
})
Convey("render count distinct", func() {
part, err := NewQueryPart("count", []string{})
So(err, ShouldBeNil)
res := part.Render(query, queryContext, "distinct(value)")
So(res, ShouldEqual, `count(distinct(value))`)
})
})
res := part.Render(query, queryContext, tc.input)
if res != tc.expected {
t.Errorf("expected %v to render into %s", tc, tc.expected)
}
}
}

View File

@@ -199,7 +199,7 @@ export function grafanaAppDirective(playlistSrv, contextSrv, $timeout, $rootScop
body.mousemove(userActivityDetected);
body.keydown(userActivityDetected);
// set useCapture = true to catch event here
document.addEventListener('wheel', userActivityDetected, true);
document.addEventListener('wheel', userActivityDetected, { capture: true, passive: true });
// treat tab change as activity
document.addEventListener('visibilitychange', userActivityDetected);

View File

@@ -13,6 +13,10 @@
<i class="fa fa-plus"></i>
Folder
</a>
<a class="btn btn-success" href="{{ctrl.importDashboardUrl()}}" ng-if="ctrl.hasEditPermissionInFolders || ctrl.canSave">
<i class="fa fa-plus"></i>
Import
</a>
</div>
<div class="page-action-bar page-action-bar--narrow" ng-show="ctrl.hasFilters">

View File

@@ -294,6 +294,16 @@ export class ManageDashboardsCtrl {
return url;
}
importDashboardUrl() {
let url = 'dashboard/import';
if (this.folderId) {
url += `?folderId=${this.folderId}`;
}
return url;
}
}
export function manageDashboardsDirective() {

View File

@@ -52,11 +52,11 @@
<a href="dashboards/folder/new" class="search-filter-box-link" ng-if="ctrl.isEditor">
<i class="gicon gicon-folder-new"></i> New folder
</a>
<a href="dashboard/import" class="search-filter-box-link" ng-if="ctrl.isEditor">
<a href="dashboard/import" class="search-filter-box-link" ng-if="ctrl.isEditor || ctrl.hasEditPermissionInFolders">
<i class="gicon gicon-dashboard-import"></i> Import dashboard
</a>
<a class="search-filter-box-link" target="_blank" href="https://grafana.com/dashboards?utm_source=grafana_search">
<img src="public/img/icn-dashboard-tiny.svg" width="20" /> Find dashboards on Grafana.com
<img src="public/img/icn-dashboard-tiny.svg" width="20" /> Find dashboards on Grafana.com
</a>
</div>
</div>

View File

@@ -16,6 +16,7 @@ class Settings {
defaultDatasource: string;
alertingEnabled: boolean;
authProxyEnabled: boolean;
exploreEnabled: boolean;
ldapEnabled: boolean;
oauth: any;
disableUserSignUp: boolean;

View File

@@ -1,6 +1,7 @@
import $ from 'jquery';
import _ from 'lodash';
import config from 'app/core/config';
import coreModule from 'app/core/core_module';
import appEvents from 'app/core/app_events';
import { encodePathComponent } from 'app/core/utils/location_util';
@@ -178,7 +179,7 @@ export class KeybindingSrv {
});
// jump to explore if permissions allow
if (this.contextSrv.isEditor) {
if (this.contextSrv.isEditor && config.exploreEnabled) {
this.bind('x', async () => {
if (dashboard.meta.focusPanelId) {
const panel = dashboard.getPanelById(dashboard.meta.focusPanelId);

View File

@@ -0,0 +1,25 @@
import * as ticks from '../utils/ticks';
describe('ticks', () => {
describe('getFlotTickDecimals()', () => {
let ctx: any = {};
beforeEach(() => {
ctx.axis = {};
});
it('should calculate decimals precision based on graph height', () => {
let dec = ticks.getFlotTickDecimals(0, 10, ctx.axis, 200);
expect(dec.tickDecimals).toBe(1);
expect(dec.scaledDecimals).toBe(1);
dec = ticks.getFlotTickDecimals(0, 100, ctx.axis, 200);
expect(dec.tickDecimals).toBe(0);
expect(dec.scaledDecimals).toBe(-1);
dec = ticks.getFlotTickDecimals(0, 1, ctx.axis, 200);
expect(dec.tickDecimals).toBe(2);
expect(dec.scaledDecimals).toBe(3);
});
});
});

View File

@@ -1,4 +1,5 @@
import TimeSeries from 'app/core/time_series2';
import { updateLegendValues } from 'app/core/time_series2';
describe('TimeSeries', function() {
var points, series;
@@ -311,4 +312,55 @@ describe('TimeSeries', function() {
expect(series.formatValue(-Infinity)).toBe('');
});
});
describe('legend decimals', function() {
let series, panel;
let height = 200;
beforeEach(function() {
testData = {
alias: 'test',
datapoints: [[1, 2], [0, 3], [10, 4], [8, 5]],
};
series = new TimeSeries(testData);
series.getFlotPairs();
panel = {
decimals: null,
yaxes: [
{
decimals: null,
},
],
};
});
it('should set decimals based on Y axis (expect calculated decimals = 1)', function() {
let data = [series];
// Expect ticks with this data will have decimals = 1
updateLegendValues(data, panel, height);
expect(data[0].decimals).toBe(2);
});
it('should set decimals based on Y axis to 0 if calculated decimals = 0)', function() {
testData.datapoints = [[10, 2], [0, 3], [100, 4], [80, 5]];
series = new TimeSeries(testData);
series.getFlotPairs();
let data = [series];
updateLegendValues(data, panel, height);
expect(data[0].decimals).toBe(0);
});
it('should set decimals to Y axis decimals + 1', function() {
panel.yaxes[0].decimals = 2;
let data = [series];
updateLegendValues(data, panel, height);
expect(data[0].decimals).toBe(3);
});
it('should set decimals to legend decimals value if it was set explicitly', function() {
panel.decimals = 3;
let data = [series];
updateLegendValues(data, panel, height);
expect(data[0].decimals).toBe(3);
});
});
});

View File

@@ -44,4 +44,8 @@ export default class TableModel {
this.columnMap[col.text] = col;
}
}
addRow(row) {
this.rows.push(row);
}
}

View File

@@ -23,23 +23,27 @@ function translateFillOption(fill) {
* Calculate decimals for legend and update values for each series.
* @param data series data
* @param panel
* @param height
*/
export function updateLegendValues(data: TimeSeries[], panel) {
export function updateLegendValues(data: TimeSeries[], panel, height) {
for (let i = 0; i < data.length; i++) {
let series = data[i];
let yaxes = panel.yaxes;
const yaxes = panel.yaxes;
const seriesYAxis = series.yaxis || 1;
let axis = yaxes[seriesYAxis - 1];
let { tickDecimals, scaledDecimals } = getFlotTickDecimals(data, axis);
let formater = kbn.valueFormats[panel.yaxes[seriesYAxis - 1].format];
const axis = yaxes[seriesYAxis - 1];
let formater = kbn.valueFormats[axis.format];
// decimal override
if (_.isNumber(panel.decimals)) {
series.updateLegendValues(formater, panel.decimals, null);
} else if (_.isNumber(axis.decimals)) {
series.updateLegendValues(formater, axis.decimals + 1, null);
} else {
// auto decimals
// legend and tooltip gets one more decimal precision
// than graph legend ticks
const { datamin, datamax } = getDataMinMax(data);
let { tickDecimals, scaledDecimals } = getFlotTickDecimals(datamin, datamax, axis, height);
tickDecimals = (tickDecimals || -1) + 1;
series.updateLegendValues(formater, tickDecimals, scaledDecimals + 2);
}

View File

@@ -499,6 +499,7 @@ kbn.valueFormats.watt = kbn.formatBuilders.decimalSIPrefix('W');
kbn.valueFormats.kwatt = kbn.formatBuilders.decimalSIPrefix('W', 1);
kbn.valueFormats.mwatt = kbn.formatBuilders.decimalSIPrefix('W', -1);
kbn.valueFormats.kwattm = kbn.formatBuilders.decimalSIPrefix('W/Min', 1);
kbn.valueFormats.Wm2 = kbn.formatBuilders.fixedUnit('W/m2');
kbn.valueFormats.voltamp = kbn.formatBuilders.decimalSIPrefix('VA');
kbn.valueFormats.kvoltamp = kbn.formatBuilders.decimalSIPrefix('VA', 1);
kbn.valueFormats.voltampreact = kbn.formatBuilders.decimalSIPrefix('var');
@@ -528,6 +529,7 @@ kbn.valueFormats.pressurebar = kbn.formatBuilders.decimalSIPrefix('bar');
kbn.valueFormats.pressurembar = kbn.formatBuilders.decimalSIPrefix('bar', -1);
kbn.valueFormats.pressurekbar = kbn.formatBuilders.decimalSIPrefix('bar', 1);
kbn.valueFormats.pressurehpa = kbn.formatBuilders.fixedUnit('hPa');
kbn.valueFormats.pressurekpa = kbn.formatBuilders.fixedUnit('kPa');
kbn.valueFormats.pressurehg = kbn.formatBuilders.fixedUnit('"Hg');
kbn.valueFormats.pressurepsi = kbn.formatBuilders.scaledUnits(1000, [' psi', ' ksi', ' Mpsi']);
@@ -579,6 +581,9 @@ kbn.valueFormats.flowgpm = kbn.formatBuilders.fixedUnit('gpm');
kbn.valueFormats.flowcms = kbn.formatBuilders.fixedUnit('cms');
kbn.valueFormats.flowcfs = kbn.formatBuilders.fixedUnit('cfs');
kbn.valueFormats.flowcfm = kbn.formatBuilders.fixedUnit('cfm');
kbn.valueFormats.litreh = kbn.formatBuilders.fixedUnit('l/h');
kbn.valueFormats.flowlpm = kbn.formatBuilders.decimalSIPrefix('L');
kbn.valueFormats.flowmlpm = kbn.formatBuilders.decimalSIPrefix('L', -1);
// Angle
kbn.valueFormats.degree = kbn.formatBuilders.fixedUnit('°');
@@ -1014,6 +1019,7 @@ kbn.getUnitFormats = function() {
{ text: 'Watt (W)', value: 'watt' },
{ text: 'Kilowatt (kW)', value: 'kwatt' },
{ text: 'Milliwatt (mW)', value: 'mwatt' },
{ text: 'Watt per square metre (W/m2)', value: 'Wm2' },
{ text: 'Volt-ampere (VA)', value: 'voltamp' },
{ text: 'Kilovolt-ampere (kVA)', value: 'kvoltamp' },
{ text: 'Volt-ampere reactive (var)', value: 'voltampreact' },
@@ -1049,6 +1055,7 @@ kbn.getUnitFormats = function() {
{ text: 'Bars', value: 'pressurebar' },
{ text: 'Kilobars', value: 'pressurekbar' },
{ text: 'Hectopascals', value: 'pressurehpa' },
{ text: 'Kilopascals', value: 'pressurekpa' },
{ text: 'Inches of mercury', value: 'pressurehg' },
{ text: 'PSI', value: 'pressurepsi' },
],
@@ -1069,6 +1076,9 @@ kbn.getUnitFormats = function() {
{ text: 'Cubic meters/sec (cms)', value: 'flowcms' },
{ text: 'Cubic feet/sec (cfs)', value: 'flowcfs' },
{ text: 'Cubic feet/min (cfm)', value: 'flowcfm' },
{ text: 'Litre/hour', value: 'litreh' },
{ text: 'Litre/min (l/min)', value: 'flowlpm' },
{ text: 'milliLitre/min (mL/min)', value: 'flowmlpm' },
],
},
{

View File

@@ -1,5 +1,3 @@
import { getDataMinMax } from 'app/core/time_series2';
/**
* Calculate tick step.
* Implementation from d3-array (ticks.js)
@@ -121,12 +119,10 @@ export function getFlotRange(panelMin, panelMax, datamin, datamax) {
* Calculate tick decimals.
* Implementation from Flot.
*/
export function getFlotTickDecimals(data, axis) {
let { datamin, datamax } = getDataMinMax(data);
let { min, max } = getFlotRange(axis.min, axis.max, datamin, datamax);
let noTicks = 3;
let tickDecimals, maxDec;
let delta = (max - min) / noTicks;
export function getFlotTickDecimals(datamin, datamax, axis, height) {
const { min, max } = getFlotRange(axis.min, axis.max, datamin, datamax);
const noTicks = 0.3 * Math.sqrt(height);
const delta = (max - min) / noTicks;
let dec = -Math.floor(Math.log(delta) / Math.LN10);
let magn = Math.pow(10, -dec);
@@ -139,19 +135,17 @@ export function getFlotTickDecimals(data, axis) {
} else if (norm < 3) {
size = 2;
// special case for 2.5, requires an extra decimal
if (norm > 2.25 && (maxDec == null || dec + 1 <= maxDec)) {
if (norm > 2.25) {
size = 2.5;
++dec;
}
} else if (norm < 7.5) {
size = 5;
} else {
size = 10;
}
size *= magn;
tickDecimals = Math.max(0, maxDec != null ? maxDec : dec);
const tickDecimals = Math.max(0, -Math.floor(Math.log(delta) / Math.LN10) + 1);
// grafana addition
const scaledDecimals = tickDecimals - Math.floor(Math.log(size) / Math.LN10);
return { tickDecimals, scaledDecimals };

View File

@@ -75,6 +75,7 @@ export class AdminEditUserCtrl {
$scope.removeOrgUser = function(orgUser) {
backendSrv.delete('/api/orgs/' + orgUser.orgId + '/users/' + $scope.user_id).then(function() {
$scope.getUser($scope.user_id);
$scope.getUserOrgs($scope.user_id);
});
};
@@ -108,6 +109,7 @@ export class AdminEditUserCtrl {
$scope.newOrg.loginOrEmail = $scope.user.login;
backendSrv.post('/api/orgs/' + orgInfo.id + '/users/', $scope.newOrg).then(function() {
$scope.getUser($scope.user_id);
$scope.getUserOrgs($scope.user_id);
});
};

View File

@@ -18,9 +18,9 @@ describe('ThresholdMapper', () => {
};
var updated = ThresholdMapper.alertToGraphThresholds(panel);
expect(updated).to.be(true);
expect(panel.thresholds[0].op).to.be('gt');
expect(panel.thresholds[0].value).to.be(100);
expect(updated).toBe(true);
expect(panel.thresholds[0].op).toBe('gt');
expect(panel.thresholds[0].value).toBe(100);
});
});
@@ -39,12 +39,12 @@ describe('ThresholdMapper', () => {
};
var updated = ThresholdMapper.alertToGraphThresholds(panel);
expect(updated).to.be(true);
expect(panel.thresholds[0].op).to.be('lt');
expect(panel.thresholds[0].value).to.be(100);
expect(updated).toBe(true);
expect(panel.thresholds[0].op).toBe('lt');
expect(panel.thresholds[0].value).toBe(100);
expect(panel.thresholds[1].op).to.be('gt');
expect(panel.thresholds[1].value).to.be(200);
expect(panel.thresholds[1].op).toBe('gt');
expect(panel.thresholds[1].value).toBe(200);
});
});
@@ -63,12 +63,12 @@ describe('ThresholdMapper', () => {
};
var updated = ThresholdMapper.alertToGraphThresholds(panel);
expect(updated).to.be(true);
expect(panel.thresholds[0].op).to.be('gt');
expect(panel.thresholds[0].value).to.be(100);
expect(updated).toBe(true);
expect(panel.thresholds[0].op).toBe('gt');
expect(panel.thresholds[0].value).toBe(100);
expect(panel.thresholds[1].op).to.be('lt');
expect(panel.thresholds[1].value).to.be(200);
expect(panel.thresholds[1].op).toBe('lt');
expect(panel.thresholds[1].value).toBe(200);
});
});
});

View File

@@ -21,6 +21,9 @@ export class DashboardImportCtrl {
uidValidationError: any;
autoGenerateUid: boolean;
autoGenerateUidValue: string;
folderId: number;
initialFolderTitle: string;
isValidFolderSelection: boolean;
/** @ngInject */
constructor(private backendSrv, private validationSrv, navModelSrv, private $location, $routeParams) {
@@ -31,6 +34,8 @@ export class DashboardImportCtrl {
this.uidExists = false;
this.autoGenerateUid = true;
this.autoGenerateUidValue = 'auto-generated';
this.folderId = $routeParams.folderId ? Number($routeParams.folderId) || 0 : null;
this.initialFolderTitle = 'Select a folder';
// check gnetId in url
if ($routeParams.gnetId) {
@@ -102,8 +107,9 @@ export class DashboardImportCtrl {
this.nameExists = false;
this.validationSrv
.validateNewDashboardName(0, this.dash.title)
.validateNewDashboardName(this.folderId, this.dash.title)
.then(() => {
this.nameExists = false;
this.hasNameValidationError = false;
})
.catch(err => {
@@ -138,6 +144,23 @@ export class DashboardImportCtrl {
});
}
onFolderChange(folder) {
this.folderId = folder.id;
this.titleChanged();
}
onEnterFolderCreation() {
this.inputsValid = false;
}
onExitFolderCreation() {
this.inputValueChanged();
}
isValid() {
return this.inputsValid && this.folderId !== null;
}
saveDashboard() {
var inputs = this.inputs.map(input => {
return {
@@ -153,6 +176,7 @@ export class DashboardImportCtrl {
dashboard: this.dash,
overwrite: true,
inputs: inputs,
folderId: this.folderId,
})
.then(res => {
this.$location.url(res.importedUrl);

View File

@@ -22,10 +22,10 @@ export class DashboardModel {
editable: any;
graphTooltip: any;
time: any;
originalTime: any;
private originalTime: any;
timepicker: any;
templating: any;
originalTemplating: any;
private originalTemplating: any;
annotations: any;
refresh: any;
snapshot: any;
@@ -50,6 +50,8 @@ export class DashboardModel {
meta: true,
panels: true, // needs special handling
templating: true, // needs special handling
originalTime: true,
originalTemplating: true,
};
constructor(data, meta?) {
@@ -70,12 +72,8 @@ export class DashboardModel {
this.editable = data.editable !== false;
this.graphTooltip = data.graphTooltip || 0;
this.time = data.time || { from: 'now-6h', to: 'now' };
this.originalTime = _.cloneDeep(this.time);
this.timepicker = data.timepicker || {};
this.templating = this.ensureListExist(data.templating);
this.originalTemplating = _.map(this.templating.list, variable => {
return { name: variable.name, current: _.clone(variable.current) };
});
this.annotations = this.ensureListExist(data.annotations);
this.refresh = data.refresh;
this.snapshot = data.snapshot;
@@ -85,6 +83,9 @@ export class DashboardModel {
this.gnetId = data.gnetId || null;
this.panels = _.map(data.panels || [], panelData => new PanelModel(panelData));
this.resetOriginalVariables();
this.resetOriginalTime();
this.initMeta(meta);
this.updateSchema(data);
@@ -138,8 +139,8 @@ export class DashboardModel {
// cleans meta data and other non persistent state
getSaveModelClone(options?) {
let defaults = _.defaults(options || {}, {
saveVariables: false,
saveTimerange: false,
saveVariables: true,
saveTimerange: true,
});
// make clone
@@ -153,15 +154,23 @@ export class DashboardModel {
}
// get variable save models
//console.log(this.templating.list);
copy.templating = {
list: _.map(this.templating.list, variable => (variable.getSaveModel ? variable.getSaveModel() : variable)),
};
if (!defaults.saveVariables && copy.templating.list.length === this.originalTemplating.length) {
if (!defaults.saveVariables) {
for (let i = 0; i < copy.templating.list.length; i++) {
if (copy.templating.list[i].name === this.originalTemplating[i].name) {
copy.templating.list[i].current = this.originalTemplating[i].current;
let current = copy.templating.list[i];
let original = _.find(this.originalTemplating, { name: current.name, type: current.type });
if (!original) {
continue;
}
if (current.type === 'adhoc') {
copy.templating.list[i].filters = original.filters;
} else {
copy.templating.list[i].current = original.current;
}
}
}
@@ -785,4 +794,40 @@ export class DashboardModel {
let migrator = new DashboardMigrator(this);
migrator.updateSchema(old);
}
resetOriginalTime() {
this.originalTime = _.cloneDeep(this.time);
}
hasTimeChanged() {
return !_.isEqual(this.time, this.originalTime);
}
resetOriginalVariables() {
this.originalTemplating = _.map(this.templating.list, variable => {
return {
name: variable.name,
type: variable.type,
current: _.cloneDeep(variable.current),
filters: _.cloneDeep(variable.filters),
};
});
}
hasVariableValuesChanged() {
if (this.templating.list.length !== this.originalTemplating.length) {
return false;
}
const updated = _.map(this.templating.list, variable => {
return {
name: variable.name,
type: variable.type,
current: _.cloneDeep(variable.current),
filters: _.cloneDeep(variable.filters),
};
});
return !_.isEqual(updated, this.originalTemplating);
}
}

View File

@@ -154,6 +154,15 @@ export class AddPanelPanel extends React.Component<AddPanelPanelProps, AddPanelP
});
}
filterKeyPress(evt) {
if (evt.key === 'Enter') {
let panel = _.head(this.state.panelPlugins);
if (panel) {
this.onAddPanel(panel);
}
}
}
filterPanels(panels, filter) {
let regex = new RegExp(filter, 'i');
return panels.filter(panel => {
@@ -229,10 +238,12 @@ export class AddPanelPanel extends React.Component<AddPanelPanelProps, AddPanelP
<label className="gf-form gf-form--grow gf-form--has-input-icon">
<input
type="text"
className="gf-form-input max-width-20"
autoFocus
className="gf-form-input gf-form--grow"
placeholder="Panel Search Filter"
value={this.state.filter}
onChange={this.filterChange.bind(this)}
onKeyPress={this.filterKeyPress.bind(this)}
/>
<i className="gf-form-input-icon fa fa-search" />
</label>

View File

@@ -84,15 +84,18 @@ export class DashboardRow extends React.Component<DashboardRowProps, any> {
'fa-chevron-right': this.state.collapsed,
});
let title = templateSrv.replaceWithText(this.props.panel.title, this.props.panel.scopedVars);
const hiddenPanels = this.props.panel.panels ? this.props.panel.panels.length : 0;
const title = templateSrv.replaceWithText(this.props.panel.title, this.props.panel.scopedVars);
const count = this.props.panel.panels ? this.props.panel.panels.length : 0;
const panels = count === 1 ? 'panel' : 'panels';
return (
<div className={classes}>
<a className="dashboard-row__title pointer" onClick={this.toggle}>
<i className={chevronClass} />
{title}
<span className="dashboard-row__panel_count">({hiddenPanels} hidden panels)</span>
<span className="dashboard-row__panel_count">
({count} {panels})
</span>
</a>
{this.dashboard.meta.canEdit === true && (
<div className="dashboard-row__actions">
@@ -104,6 +107,11 @@ export class DashboardRow extends React.Component<DashboardRowProps, any> {
</a>
</div>
)}
{this.state.collapsed === true && (
<div className="dashboard-row__toggle-target" onClick={this.toggle}>
&nbsp;
</div>
)}
<div className="dashboard-row__drag grid-drag-handle" />
</div>
);

View File

@@ -63,8 +63,7 @@ export class DashboardExporter {
);
};
// check up panel data sources
for (let panel of saveModel.panels) {
const processPanel = panel => {
if (panel.datasource !== undefined) {
templateizeDatasourceUsage(panel);
}
@@ -86,6 +85,18 @@ export class DashboardExporter {
version: panelDef.info.version,
};
}
};
// check up panel data sources
for (let panel of saveModel.panels) {
processPanel(panel);
// handle collapsed rows
if (panel.collapsed !== undefined && panel.collapsed === true && panel.panels) {
for (let rowPanel of panel.panels) {
processPanel(rowPanel);
}
}
}
// templatize template vars

View File

@@ -132,23 +132,26 @@ export class FolderPickerCtrl {
}
private loadInitialValue() {
if (this.initialFolderId && this.initialFolderId > 0) {
this.getOptions('').then(result => {
this.folder = _.find(result, { value: this.initialFolderId });
if (!this.folder) {
this.folder = { text: this.initialTitle, value: this.initialFolderId };
}
this.onFolderLoad();
});
} else {
if (this.initialTitle && this.initialFolderId === null) {
this.folder = { text: this.initialTitle, value: null };
} else {
this.folder = { text: this.rootName, value: 0 };
const resetFolder = { text: this.initialTitle, value: null };
const rootFolder = { text: this.rootName, value: 0 };
this.getOptions('').then(result => {
let folder;
if (this.initialFolderId) {
folder = _.find(result, { value: this.initialFolderId });
} else if (this.enableReset && this.initialTitle && this.initialFolderId === null) {
folder = resetFolder;
}
if (!folder) {
if (this.isEditor) {
folder = rootFolder;
} else {
folder = result.length > 0 ? result[0] : resetFolder;
}
}
this.folder = folder;
this.onFolderLoad();
}
});
}
private onFolderLoad() {

View File

@@ -80,6 +80,20 @@
</div>
</div>
<div class="gf-form-inline">
<div class="gf-form gf-form--grow">
<folder-picker label-class="width-15"
initial-folder-id="ctrl.folderId"
initial-title="ctrl.initialFolderTitle"
on-change="ctrl.onFolderChange($folder)"
on-load="ctrl.onFolderChange($folder)"
enter-folder-creation="ctrl.onEnterFolderCreation()"
exit-folder-creation="ctrl.onExitFolderCreation()"
enable-create-new="true">
</folder-picker>
</div>
</div>
<div class="gf-form-inline">
<div class="gf-form gf-form--grow">
<span class="gf-form-label width-15">
@@ -132,10 +146,10 @@
</div>
<div class="gf-form-button-row">
<button type="button" class="btn btn-success width-12" ng-click="ctrl.saveDashboard()" ng-hide="ctrl.nameExists || ctrl.uidExists" ng-disabled="!ctrl.inputsValid">
<button type="button" class="btn btn-success width-12" ng-click="ctrl.saveDashboard()" ng-hide="ctrl.nameExists || ctrl.uidExists" ng-disabled="!ctrl.isValid()">
<i class="fa fa-save"></i> Import
</button>
<button type="button" class="btn btn-danger width-12" ng-click="ctrl.saveDashboard()" ng-show="ctrl.nameExists || ctrl.uidExists" ng-disabled="!ctrl.inputsValid">
<button type="button" class="btn btn-danger width-12" ng-click="ctrl.saveDashboard()" ng-show="ctrl.nameExists || ctrl.uidExists" ng-disabled="!ctrl.isValid()">
<i class="fa fa-save"></i> Import (Overwrite)
</button>
<a class="btn btn-link" ng-click="ctrl.back()">Cancel</a>

View File

@@ -1,5 +1,4 @@
import coreModule from 'app/core/core_module';
import _ from 'lodash';
const template = `
<div class="modal-body">
@@ -50,8 +49,17 @@ const template = `
</div>
<div class="gf-form-button-row text-center">
<button type="submit" class="btn btn-success" ng-disabled="ctrl.saveForm.$invalid">Save</button>
<a class="btn btn-link" ng-click="ctrl.dismiss();">Cancel</a>
<button
id="saveBtn"
type="submit"
class="btn btn-success"
ng-class="{'btn-success--processing': ctrl.isSaving}"
ng-disabled="ctrl.saveForm.$invalid || ctrl.isSaving"
>
<span ng-if="!ctrl.isSaving">Save</span>
<span ng-if="ctrl.isSaving === true">Saving...</span>
</button>
<button class="btn btn-inverse" ng-click="ctrl.dismiss();">Cancel</button>
</div>
</form>
</div>
@@ -61,13 +69,13 @@ export class SaveDashboardModalCtrl {
message: string;
saveVariables = false;
saveTimerange = false;
templating: any;
time: any;
originalTime: any;
current = [];
originalCurrent = [];
max: number;
saveForm: any;
isSaving: boolean;
dismiss: () => void;
timeChange = false;
variableValueChange = false;
@@ -76,40 +84,9 @@ export class SaveDashboardModalCtrl {
constructor(private dashboardSrv) {
this.message = '';
this.max = 64;
this.templating = dashboardSrv.dash.templating.list;
this.compareTemplating();
this.compareTime();
}
compareTime() {
if (_.isEqual(this.dashboardSrv.dash.time, this.dashboardSrv.dash.originalTime)) {
this.timeChange = false;
} else {
this.timeChange = true;
}
}
compareTemplating() {
//checks if variables has been added or removed, if so variables will be saved automatically
if (this.dashboardSrv.dash.originalTemplating.length !== this.dashboardSrv.dash.templating.list.length) {
return (this.variableValueChange = false);
}
//checks if variable value has changed
if (this.dashboardSrv.dash.templating.list.length > 0) {
for (let i = 0; i < this.dashboardSrv.dash.templating.list.length; i++) {
if (
this.dashboardSrv.dash.templating.list[i].current.text !==
this.dashboardSrv.dash.originalTemplating[i].current.text
) {
return (this.variableValueChange = true);
}
}
return (this.variableValueChange = false);
} else {
return (this.variableValueChange = false);
}
this.isSaving = false;
this.timeChange = this.dashboardSrv.getCurrent().hasTimeChanged();
this.variableValueChange = this.dashboardSrv.getCurrent().hasVariableValuesChanged();
}
save() {
@@ -126,7 +103,21 @@ export class SaveDashboardModalCtrl {
var dashboard = this.dashboardSrv.getCurrent();
var saveModel = dashboard.getSaveModelClone(options);
return this.dashboardSrv.save(saveModel, options).then(this.dismiss);
this.isSaving = true;
return this.dashboardSrv.save(saveModel, options).then(this.postSave.bind(this, options));
}
postSave(options) {
if (options.saveVariables) {
this.dashboardSrv.getCurrent().resetOriginalVariables();
}
if (options.saveTimerange) {
this.dashboardSrv.getCurrent().resetOriginalTime();
}
this.dismiss();
}
}

View File

@@ -123,6 +123,9 @@ export class ShareSnapshotCtrl {
enable: annotation.enable,
iconColor: annotation.iconColor,
snapshotData: annotation.snapshotData,
type: annotation.type,
builtIn: annotation.builtIn,
hide: annotation.hide,
};
})
.value();

View File

@@ -435,8 +435,67 @@ describe('DashboardModel', function() {
});
});
describe('save variables and timeline', () => {
let model;
describe('Given model with time', () => {
let model: DashboardModel;
beforeEach(() => {
model = new DashboardModel({
time: {
from: 'now-6h',
to: 'now',
},
});
expect(model.hasTimeChanged()).toBeFalsy();
model.time = {
from: 'now-3h',
to: 'now-1h',
};
});
it('hasTimeChanged should be true', () => {
expect(model.hasTimeChanged()).toBeTruthy();
});
it('getSaveModelClone should return original time when saveTimerange=false', () => {
let options = { saveTimerange: false };
let saveModel = model.getSaveModelClone(options);
expect(saveModel.time.from).toBe('now-6h');
expect(saveModel.time.to).toBe('now');
});
it('getSaveModelClone should return updated time when saveTimerange=true', () => {
let options = { saveTimerange: true };
let saveModel = model.getSaveModelClone(options);
expect(saveModel.time.from).toBe('now-3h');
expect(saveModel.time.to).toBe('now-1h');
});
it('hasTimeChanged should be false when reset original time', () => {
model.resetOriginalTime();
expect(model.hasTimeChanged()).toBeFalsy();
});
it('getSaveModelClone should return original time when saveTimerange=false', () => {
let options = { saveTimerange: false };
let saveModel = model.getSaveModelClone(options);
expect(saveModel.time.from).toBe('now-6h');
expect(saveModel.time.to).toBe('now');
});
it('getSaveModelClone should return updated time when saveTimerange=true', () => {
let options = { saveTimerange: true };
let saveModel = model.getSaveModelClone(options);
expect(saveModel.time.from).toBe('now-3h');
expect(saveModel.time.to).toBe('now-1h');
});
});
describe('Given model with template variable of type query', () => {
let model: DashboardModel;
beforeEach(() => {
model = new DashboardModel({
@@ -444,6 +503,7 @@ describe('DashboardModel', function() {
list: [
{
name: 'Server',
type: 'query',
current: {
selected: true,
text: 'server_001',
@@ -452,45 +512,127 @@ describe('DashboardModel', function() {
},
],
},
time: {
from: 'now-6h',
to: 'now',
},
});
model.templating.list[0] = {
name: 'Server',
expect(model.hasVariableValuesChanged()).toBeFalsy();
});
it('hasVariableValuesChanged should be false when adding a template variable', () => {
model.templating.list.push({
name: 'Server2',
type: 'query',
current: {
selected: true,
text: 'server_002',
value: 'server_002',
},
};
model.time = {
from: 'now-3h',
to: 'now',
};
});
expect(model.hasVariableValuesChanged()).toBeFalsy();
});
it('should not save variables and timeline', () => {
let options = {
saveVariables: false,
saveTimerange: false,
};
it('hasVariableValuesChanged should be false when removing existing template variable', () => {
model.templating.list = [];
expect(model.hasVariableValuesChanged()).toBeFalsy();
});
it('hasVariableValuesChanged should be true when changing value of template variable', () => {
model.templating.list[0].current.text = 'server_002';
expect(model.hasVariableValuesChanged()).toBeTruthy();
});
it('getSaveModelClone should return original variable when saveVariables=false', () => {
model.templating.list[0].current.text = 'server_002';
let options = { saveVariables: false };
let saveModel = model.getSaveModelClone(options);
expect(saveModel.templating.list[0].current.text).toBe('server_001');
expect(saveModel.time.from).toBe('now-6h');
});
it('should save variables and timeline', () => {
let options = {
saveVariables: true,
saveTimerange: true,
};
it('getSaveModelClone should return updated variable when saveVariables=true', () => {
model.templating.list[0].current.text = 'server_002';
let options = { saveVariables: true };
let saveModel = model.getSaveModelClone(options);
expect(saveModel.templating.list[0].current.text).toBe('server_002');
expect(saveModel.time.from).toBe('now-3h');
});
});
describe('Given model with template variable of type adhoc', () => {
let model: DashboardModel;
beforeEach(() => {
model = new DashboardModel({
templating: {
list: [
{
name: 'Filter',
type: 'adhoc',
filters: [
{
key: '@hostname',
operator: '=',
value: 'server 20',
},
],
},
],
},
});
expect(model.hasVariableValuesChanged()).toBeFalsy();
});
it('hasVariableValuesChanged should be false when adding a template variable', () => {
model.templating.list.push({
name: 'Filter',
type: 'adhoc',
filters: [
{
key: '@hostname',
operator: '=',
value: 'server 1',
},
],
});
expect(model.hasVariableValuesChanged()).toBeFalsy();
});
it('hasVariableValuesChanged should be false when removing existing template variable', () => {
model.templating.list = [];
expect(model.hasVariableValuesChanged()).toBeFalsy();
});
it('hasVariableValuesChanged should be true when changing value of filter', () => {
model.templating.list[0].filters[0].value = 'server 1';
expect(model.hasVariableValuesChanged()).toBeTruthy();
});
it('hasVariableValuesChanged should be true when adding an additional condition', () => {
model.templating.list[0].filters[0].condition = 'AND';
model.templating.list[0].filters[1] = {
key: '@metric',
operator: '=',
value: 'logins.count',
};
expect(model.hasVariableValuesChanged()).toBeTruthy();
});
it('getSaveModelClone should return original variable when saveVariables=false', () => {
model.templating.list[0].filters[0].value = 'server 1';
let options = { saveVariables: false };
let saveModel = model.getSaveModelClone(options);
expect(saveModel.templating.list[0].filters[0].value).toBe('server 20');
});
it('getSaveModelClone should return updated variable when saveVariables=true', () => {
model.templating.list[0].filters[0].value = 'server 1';
let options = { saveVariables: true };
let saveModel = model.getSaveModelClone(options);
expect(saveModel.templating.list[0].filters[0].value).toBe('server 1');
});
});
});

View File

@@ -0,0 +1,246 @@
jest.mock('app/core/store', () => {
return {
getBool: jest.fn(),
};
});
import _ from 'lodash';
import config from 'app/core/config';
import { DashboardExporter } from '../export/exporter';
import { DashboardModel } from '../dashboard_model';
describe('given dashboard with repeated panels', () => {
var dash, exported;
beforeEach(done => {
dash = {
templating: {
list: [
{
name: 'apps',
type: 'query',
datasource: 'gfdb',
current: { value: 'Asd', text: 'Asd' },
options: [{ value: 'Asd', text: 'Asd' }],
},
{
name: 'prefix',
type: 'constant',
current: { value: 'collectd', text: 'collectd' },
options: [],
},
{
name: 'ds',
type: 'datasource',
query: 'testdb',
current: { value: 'prod', text: 'prod' },
options: [],
},
],
},
annotations: {
list: [
{
name: 'logs',
datasource: 'gfdb',
},
],
},
panels: [
{ id: 6, datasource: 'gfdb', type: 'graph' },
{ id: 7 },
{
id: 8,
datasource: '-- Mixed --',
targets: [{ datasource: 'other' }],
},
{ id: 9, datasource: '$ds' },
{
id: 2,
repeat: 'apps',
datasource: 'gfdb',
type: 'graph',
},
{ id: 3, repeat: null, repeatPanelId: 2 },
{
id: 4,
collapsed: true,
panels: [
{ id: 10, datasource: 'gfdb', type: 'table' },
{ id: 11 },
{
id: 12,
datasource: '-- Mixed --',
targets: [{ datasource: 'other' }],
},
{ id: 13, datasource: '$ds' },
{
id: 14,
repeat: 'apps',
datasource: 'gfdb',
type: 'heatmap',
},
{ id: 15, repeat: null, repeatPanelId: 14 },
],
},
],
};
config.buildInfo = {
version: '3.0.2',
};
//Stubs test function calls
var datasourceSrvStub = { get: jest.fn(arg => getStub(arg)) };
config.panels['graph'] = {
id: 'graph',
name: 'Graph',
info: { version: '1.1.0' },
};
config.panels['table'] = {
id: 'table',
name: 'Table',
info: { version: '1.1.1' },
};
config.panels['heatmap'] = {
id: 'heatmap',
name: 'Heatmap',
info: { version: '1.1.2' },
};
dash = new DashboardModel(dash, {});
var exporter = new DashboardExporter(datasourceSrvStub);
exporter.makeExportable(dash).then(clean => {
exported = clean;
done();
});
});
it('should replace datasource refs', () => {
var panel = exported.panels[0];
expect(panel.datasource).toBe('${DS_GFDB}');
});
it('should replace datasource refs in collapsed row', () => {
var panel = exported.panels[5].panels[0];
expect(panel.datasource).toBe('${DS_GFDB}');
});
it('should replace datasource in variable query', () => {
expect(exported.templating.list[0].datasource).toBe('${DS_GFDB}');
expect(exported.templating.list[0].options.length).toBe(0);
expect(exported.templating.list[0].current.value).toBe(undefined);
expect(exported.templating.list[0].current.text).toBe(undefined);
});
it('should replace datasource in annotation query', () => {
expect(exported.annotations.list[1].datasource).toBe('${DS_GFDB}');
});
it('should add datasource as input', () => {
expect(exported.__inputs[0].name).toBe('DS_GFDB');
expect(exported.__inputs[0].pluginId).toBe('testdb');
expect(exported.__inputs[0].type).toBe('datasource');
});
it('should add datasource to required', () => {
var require = _.find(exported.__requires, { name: 'TestDB' });
expect(require.name).toBe('TestDB');
expect(require.id).toBe('testdb');
expect(require.type).toBe('datasource');
expect(require.version).toBe('1.2.1');
});
it('should not add built in datasources to required', () => {
var require = _.find(exported.__requires, { name: 'Mixed' });
expect(require).toBe(undefined);
});
it('should add datasources used in mixed mode', () => {
var require = _.find(exported.__requires, { name: 'OtherDB' });
expect(require).not.toBe(undefined);
});
it('should add graph panel to required', () => {
var require = _.find(exported.__requires, { name: 'Graph' });
expect(require.name).toBe('Graph');
expect(require.id).toBe('graph');
expect(require.version).toBe('1.1.0');
});
it('should add table panel to required', () => {
var require = _.find(exported.__requires, { name: 'Table' });
expect(require.name).toBe('Table');
expect(require.id).toBe('table');
expect(require.version).toBe('1.1.1');
});
it('should add heatmap panel to required', () => {
var require = _.find(exported.__requires, { name: 'Heatmap' });
expect(require.name).toBe('Heatmap');
expect(require.id).toBe('heatmap');
expect(require.version).toBe('1.1.2');
});
it('should add grafana version', () => {
var require = _.find(exported.__requires, { name: 'Grafana' });
expect(require.type).toBe('grafana');
expect(require.id).toBe('grafana');
expect(require.version).toBe('3.0.2');
});
it('should add constant template variables as inputs', () => {
var input = _.find(exported.__inputs, { name: 'VAR_PREFIX' });
expect(input.type).toBe('constant');
expect(input.label).toBe('prefix');
expect(input.value).toBe('collectd');
});
it('should templatize constant variables', () => {
var variable = _.find(exported.templating.list, { name: 'prefix' });
expect(variable.query).toBe('${VAR_PREFIX}');
expect(variable.current.text).toBe('${VAR_PREFIX}');
expect(variable.current.value).toBe('${VAR_PREFIX}');
expect(variable.options[0].text).toBe('${VAR_PREFIX}');
expect(variable.options[0].value).toBe('${VAR_PREFIX}');
});
});
// Stub responses
var stubs = [];
stubs['gfdb'] = {
name: 'gfdb',
meta: { id: 'testdb', info: { version: '1.2.1' }, name: 'TestDB' },
};
stubs['other'] = {
name: 'other',
meta: { id: 'other', info: { version: '1.2.1' }, name: 'OtherDB' },
};
stubs['-- Mixed --'] = {
name: 'mixed',
meta: {
id: 'mixed',
info: { version: '1.2.1' },
name: 'Mixed',
builtIn: true,
},
};
stubs['-- Grafana --'] = {
name: '-- Grafana --',
meta: {
id: 'grafana',
info: { version: '1.2.1' },
name: 'grafana',
builtIn: true,
},
};
function getStub(arg) {
return Promise.resolve(stubs[arg]);
}

View File

@@ -1,187 +0,0 @@
import { describe, beforeEach, it, sinon, expect } from 'test/lib/common';
import _ from 'lodash';
import config from 'app/core/config';
import { DashboardExporter } from '../export/exporter';
import { DashboardModel } from '../dashboard_model';
describe('given dashboard with repeated panels', function() {
var dash, exported;
beforeEach(done => {
dash = {
templating: { list: [] },
annotations: { list: [] },
};
config.buildInfo = {
version: '3.0.2',
};
dash.templating.list.push({
name: 'apps',
type: 'query',
datasource: 'gfdb',
current: { value: 'Asd', text: 'Asd' },
options: [{ value: 'Asd', text: 'Asd' }],
});
dash.templating.list.push({
name: 'prefix',
type: 'constant',
current: { value: 'collectd', text: 'collectd' },
options: [],
});
dash.templating.list.push({
name: 'ds',
type: 'datasource',
query: 'testdb',
current: { value: 'prod', text: 'prod' },
options: [],
});
dash.annotations.list.push({
name: 'logs',
datasource: 'gfdb',
});
dash.panels = [
{ id: 6, datasource: 'gfdb', type: 'graph' },
{ id: 7 },
{
id: 8,
datasource: '-- Mixed --',
targets: [{ datasource: 'other' }],
},
{ id: 9, datasource: '$ds' },
];
dash.panels.push({
id: 2,
repeat: 'apps',
datasource: 'gfdb',
type: 'graph',
});
dash.panels.push({ id: 3, repeat: null, repeatPanelId: 2 });
var datasourceSrvStub = { get: sinon.stub() };
datasourceSrvStub.get.withArgs('gfdb').returns(
Promise.resolve({
name: 'gfdb',
meta: { id: 'testdb', info: { version: '1.2.1' }, name: 'TestDB' },
})
);
datasourceSrvStub.get.withArgs('other').returns(
Promise.resolve({
name: 'other',
meta: { id: 'other', info: { version: '1.2.1' }, name: 'OtherDB' },
})
);
datasourceSrvStub.get.withArgs('-- Mixed --').returns(
Promise.resolve({
name: 'mixed',
meta: {
id: 'mixed',
info: { version: '1.2.1' },
name: 'Mixed',
builtIn: true,
},
})
);
datasourceSrvStub.get.withArgs('-- Grafana --').returns(
Promise.resolve({
name: '-- Grafana --',
meta: {
id: 'grafana',
info: { version: '1.2.1' },
name: 'grafana',
builtIn: true,
},
})
);
config.panels['graph'] = {
id: 'graph',
name: 'Graph',
info: { version: '1.1.0' },
};
dash = new DashboardModel(dash, {});
var exporter = new DashboardExporter(datasourceSrvStub);
exporter.makeExportable(dash).then(clean => {
exported = clean;
done();
});
});
it('should replace datasource refs', function() {
var panel = exported.panels[0];
expect(panel.datasource).to.be('${DS_GFDB}');
});
it('should replace datasource in variable query', function() {
expect(exported.templating.list[0].datasource).to.be('${DS_GFDB}');
expect(exported.templating.list[0].options.length).to.be(0);
expect(exported.templating.list[0].current.value).to.be(undefined);
expect(exported.templating.list[0].current.text).to.be(undefined);
});
it('should replace datasource in annotation query', function() {
expect(exported.annotations.list[1].datasource).to.be('${DS_GFDB}');
});
it('should add datasource as input', function() {
expect(exported.__inputs[0].name).to.be('DS_GFDB');
expect(exported.__inputs[0].pluginId).to.be('testdb');
expect(exported.__inputs[0].type).to.be('datasource');
});
it('should add datasource to required', function() {
var require = _.find(exported.__requires, { name: 'TestDB' });
expect(require.name).to.be('TestDB');
expect(require.id).to.be('testdb');
expect(require.type).to.be('datasource');
expect(require.version).to.be('1.2.1');
});
it('should not add built in datasources to required', function() {
var require = _.find(exported.__requires, { name: 'Mixed' });
expect(require).to.be(undefined);
});
it('should add datasources used in mixed mode', function() {
var require = _.find(exported.__requires, { name: 'OtherDB' });
expect(require).to.not.be(undefined);
});
it('should add panel to required', function() {
var require = _.find(exported.__requires, { name: 'Graph' });
expect(require.name).to.be('Graph');
expect(require.id).to.be('graph');
expect(require.version).to.be('1.1.0');
});
it('should add grafana version', function() {
var require = _.find(exported.__requires, { name: 'Grafana' });
expect(require.type).to.be('grafana');
expect(require.id).to.be('grafana');
expect(require.version).to.be('3.0.2');
});
it('should add constant template variables as inputs', function() {
var input = _.find(exported.__inputs, { name: 'VAR_PREFIX' });
expect(input.type).to.be('constant');
expect(input.label).to.be('prefix');
expect(input.value).to.be('collectd');
});
it('should templatize constant variables', function() {
var variable = _.find(exported.templating.list, { name: 'prefix' });
expect(variable.query).to.be('${VAR_PREFIX}');
expect(variable.current.text).to.be('${VAR_PREFIX}');
expect(variable.current.value).to.be('${VAR_PREFIX}');
expect(variable.options[0].text).to.be('${VAR_PREFIX}');
expect(variable.options[0].value).to.be('${VAR_PREFIX}');
});
});

View File

@@ -1,128 +1,57 @@
import { SaveDashboardModalCtrl } from '../save_modal';
jest.mock('app/core/services/context_srv', () => ({}));
const setup = (timeChanged, variableValuesChanged, cb) => {
const dash = {
hasTimeChanged: jest.fn().mockReturnValue(timeChanged),
hasVariableValuesChanged: jest.fn().mockReturnValue(variableValuesChanged),
resetOriginalTime: jest.fn(),
resetOriginalVariables: jest.fn(),
getSaveModelClone: jest.fn().mockReturnValue({}),
};
const dashboardSrvMock = {
getCurrent: jest.fn().mockReturnValue(dash),
save: jest.fn().mockReturnValue(Promise.resolve()),
};
const ctrl = new SaveDashboardModalCtrl(dashboardSrvMock);
ctrl.saveForm = {
$valid: true,
};
ctrl.dismiss = () => Promise.resolve();
cb(dash, ctrl, dashboardSrvMock);
};
describe('SaveDashboardModal', () => {
describe('save modal checkboxes', () => {
it('should show checkboxes', () => {
let fakeDashboardSrv = {
dash: {
templating: {
list: [
{
current: {
selected: true,
tags: Array(0),
text: 'server_001',
value: 'server_001',
},
name: 'Server',
},
],
},
originalTemplating: [
{
current: {
selected: true,
text: 'server_002',
value: 'server_002',
},
name: 'Server',
},
],
time: {
from: 'now-3h',
to: 'now',
},
originalTime: {
from: 'now-6h',
to: 'now',
},
},
};
let modal = new SaveDashboardModalCtrl(fakeDashboardSrv);
expect(modal.timeChange).toBe(true);
expect(modal.variableValueChange).toBe(true);
describe('Given time and template variable values have not changed', () => {
setup(false, false, (dash, ctrl: SaveDashboardModalCtrl) => {
it('When creating ctrl should set time and template variable values changed', () => {
expect(ctrl.timeChange).toBeFalsy();
expect(ctrl.variableValueChange).toBeFalsy();
});
});
});
it('should hide checkboxes', () => {
let fakeDashboardSrv = {
dash: {
templating: {
list: [
{
current: {
selected: true,
text: 'server_002',
value: 'server_002',
},
name: 'Server',
},
],
},
originalTemplating: [
{
current: {
selected: true,
text: 'server_002',
value: 'server_002',
},
name: 'Server',
},
],
time: {
from: 'now-3h',
to: 'now',
},
originalTime: {
from: 'now-3h',
to: 'now',
},
},
};
let modal = new SaveDashboardModalCtrl(fakeDashboardSrv);
expect(modal.timeChange).toBe(false);
expect(modal.variableValueChange).toBe(false);
});
describe('Given time and template variable values have changed', () => {
setup(true, true, (dash, ctrl: SaveDashboardModalCtrl) => {
it('When creating ctrl should set time and template variable values changed', () => {
expect(ctrl.timeChange).toBeTruthy();
expect(ctrl.variableValueChange).toBeTruthy();
});
it('should hide variable checkboxes', () => {
let fakeDashboardSrv = {
dash: {
templating: {
list: [
{
current: {
selected: true,
text: 'server_002',
value: 'server_002',
},
name: 'Server',
},
{
current: {
selected: true,
text: 'web_002',
value: 'web_002',
},
name: 'Web',
},
],
},
originalTemplating: [
{
current: {
selected: true,
text: 'server_002',
value: 'server_002',
},
name: 'Server',
},
],
},
};
let modal = new SaveDashboardModalCtrl(fakeDashboardSrv);
expect(modal.variableValueChange).toBe(false);
it('When save time and variable value changes disabled and saving should reset original time and template variable values', async () => {
ctrl.saveTimerange = false;
ctrl.saveVariables = false;
await ctrl.save();
expect(dash.resetOriginalTime).toHaveBeenCalledTimes(0);
expect(dash.resetOriginalVariables).toHaveBeenCalledTimes(0);
});
it('When save time and variable value changes enabled and saving should reset original time and template variable values', async () => {
ctrl.saveTimerange = true;
ctrl.saveVariables = true;
await ctrl.save();
expect(dash.resetOriginalTime).toHaveBeenCalledTimes(1);
expect(dash.resetOriginalVariables).toHaveBeenCalledTimes(1);
});
});
});
});

View File

@@ -48,9 +48,11 @@ function dashLink($compile, $sanitize, linkSrv) {
function update() {
var linkInfo = linkSrv.getAnchorInfo(link);
span.text(linkInfo.title);
anchor.attr('href', linkInfo.href);
sanitizeAnchor();
if (!link.asDropdown) {
anchor.attr('href', linkInfo.href);
sanitizeAnchor();
}
elem.find('a').attr('data-placement', 'bottom');
// tooltip
elem.find('a').tooltip({
title: $sanitize(scope.link.tooltip),

View File

@@ -314,7 +314,7 @@ class MetricsPanelCtrl extends PanelCtrl {
getAdditionalMenuItems() {
const items = [];
if (this.contextSrv.isEditor && this.datasource && this.datasource.supportsExplore) {
if (config.exploreEnabled && this.contextSrv.isEditor && this.datasource && this.datasource.supportsExplore) {
items.push({
text: 'Explore',
click: 'ctrl.explore();',

View File

@@ -25,7 +25,7 @@ var template = `
<li><a ng-click="ctrl.addDataQuery(datasource);"><i class="fa fa-trash"></i> Remove</a></li>
</ul>
</span>
<span class="panel-time-info" ng-show="ctrl.timeInfo"><i class="fa fa-clock-o"></i> {{ctrl.timeInfo}}</span>
<span class="panel-time-info" ng-if="ctrl.timeInfo"><i class="fa fa-clock-o"></i> {{ctrl.timeInfo}}</span>
</span>`;
function renderMenuItem(item, ctrl) {

View File

@@ -1,8 +1,19 @@
jest.mock('app/core/core', () => ({}));
jest.mock('app/core/config', () => {
return {
exploreEnabled: true,
panels: {
test: {
id: 'test',
name: 'test',
},
},
};
});
import { MetricsPanelCtrl } from '../metrics_panel_ctrl';
import q from 'q';
import { PanelModel } from 'app/features/dashboard/panel_model';
import { MetricsPanelCtrl } from '../metrics_panel_ctrl';
describe('MetricsPanelCtrl', () => {
let ctrl;

View File

@@ -1,5 +1,4 @@
import '../playlist_edit_ctrl';
import { describe, beforeEach, it, expect } from 'test/lib/common';
import { PlaylistEditCtrl } from '../playlist_edit_ctrl';
describe('PlaylistEditCtrl', () => {
@@ -20,13 +19,13 @@ describe('PlaylistEditCtrl', () => {
describe('searchresult returns 2 dashboards, ', () => {
it('found dashboard should be 2', () => {
expect(ctx.dashboardresult.length).to.be(2);
expect(ctx.dashboardresult.length).toBe(2);
});
it('filtred result should be 2', () => {
ctx.filterFoundPlaylistItems();
expect(ctx.filteredDashboards.length).to.be(2);
expect(ctx.filteredTags.length).to.be(2);
expect(ctx.filteredDashboards.length).toBe(2);
expect(ctx.filteredTags.length).toBe(2);
});
describe('adds one dashboard to playlist, ', () => {
@@ -37,16 +36,16 @@ describe('PlaylistEditCtrl', () => {
});
it('playlistitems should be increased by one', () => {
expect(ctx.playlistItems.length).to.be(2);
expect(ctx.playlistItems.length).toBe(2);
});
it('filtred playlistitems should be reduced by one', () => {
expect(ctx.filteredDashboards.length).to.be(1);
expect(ctx.filteredTags.length).to.be(1);
expect(ctx.filteredDashboards.length).toBe(1);
expect(ctx.filteredTags.length).toBe(1);
});
it('found dashboard should be 2', () => {
expect(ctx.dashboardresult.length).to.be(2);
expect(ctx.dashboardresult.length).toBe(2);
});
describe('removes one dashboard from playlist, ', () => {
@@ -57,14 +56,14 @@ describe('PlaylistEditCtrl', () => {
});
it('playlistitems should be increased by one', () => {
expect(ctx.playlistItems.length).to.be(0);
expect(ctx.playlistItems.length).toBe(0);
});
it('found dashboard should be 2', () => {
expect(ctx.dashboardresult.length).to.be(2);
expect(ctx.filteredDashboards.length).to.be(2);
expect(ctx.filteredTags.length).to.be(2);
expect(ctx.tagresult.length).to.be(2);
expect(ctx.dashboardresult.length).toBe(2);
expect(ctx.filteredDashboards.length).toBe(2);
expect(ctx.filteredTags.length).toBe(2);
expect(ctx.tagresult.length).toBe(2);
});
});
});

View File

@@ -6,7 +6,6 @@ import coreModule from 'app/core/core_module';
import { importPluginModule } from './plugin_loader';
import { UnknownPanelCtrl } from 'app/plugins/panel/unknown/module';
import { DashboardRowCtrl } from './row_ctrl';
/** @ngInject **/
function pluginDirectiveLoader($compile, datasourceSrv, $rootScope, $q, $http, $templateCache) {
@@ -59,15 +58,6 @@ function pluginDirectiveLoader($compile, datasourceSrv, $rootScope, $q, $http, $
}
function loadPanelComponentInfo(scope, attrs) {
if (scope.panel.type === 'row') {
return $q.when({
name: 'dashboard-row',
bindings: { dashboard: '=', panel: '=' },
attrs: { dashboard: 'ctrl.dashboard', panel: 'panel' },
Component: DashboardRowCtrl,
});
}
var componentInfo: any = {
name: 'panel-plugin-' + scope.panel.type,
bindings: { dashboard: '=', panel: '=', row: '=' },
@@ -136,24 +126,6 @@ function pluginDirectiveLoader($compile, datasourceSrv, $rootScope, $q, $http, $
});
});
}
// QueryOptionsCtrl
case 'query-options-ctrl': {
return datasourceSrv.get(scope.ctrl.panel.datasource).then(ds => {
return importPluginModule(ds.meta.module).then((dsModule): any => {
if (!dsModule.QueryOptionsCtrl) {
return { notFound: true };
}
return {
baseUrl: ds.meta.baseUrl,
name: 'query-options-ctrl-' + ds.meta.id,
bindings: { panelCtrl: '=' },
attrs: { 'panel-ctrl': 'ctrl.panelCtrl' },
Component: dsModule.QueryOptionsCtrl,
};
});
});
}
// Annotations
case 'annotations-query-ctrl': {
return importPluginModule(scope.ctrl.currentDatasource.meta.module).then(function(dsModule) {

View File

@@ -5,6 +5,15 @@ import kbn from 'app/core/utils/kbn';
import moment from 'moment';
import angular from 'angular';
import jquery from 'jquery';
// Experimental module exports
import prismjs from 'prismjs';
import slate from 'slate';
import slateReact from 'slate-react';
import slatePlain from 'slate-plain-serializer';
import react from 'react';
import reactDom from 'react-dom';
import config from 'app/core/config';
import TimeSeries from 'app/core/time_series2';
import TableModel from 'app/core/table_model';
@@ -69,6 +78,14 @@ exposeToPlugin('d3', d3);
exposeToPlugin('rxjs/Subject', Subject);
exposeToPlugin('rxjs/Observable', Observable);
// Experimental modules
exposeToPlugin('prismjs', prismjs);
exposeToPlugin('slate', slate);
exposeToPlugin('slate-react', slateReact);
exposeToPlugin('slate-plain-serializer', slatePlain);
exposeToPlugin('react', react);
exposeToPlugin('react-dom', reactDom);
// backward compatible path
exposeToPlugin('vendor/npm/rxjs/Rx', {
Subject: Subject,

View File

@@ -1,100 +0,0 @@
import _ from 'lodash';
export class DashboardRowCtrl {
static template = `
<div class="dashboard-row__center">
<div class="dashboard-row__actions-left">
<i class="fa fa-chevron-down" ng-hide="ctrl.panel.collapse"></i>
<i class="fa fa-chevron-right" ng-show="ctrl.panel.collapse"></i>
</div>
<a class="dashboard-row__title pointer" ng-click="ctrl.toggle()">
<span class="dashboard-row__title-text">
{{ctrl.panel.title | interpolateTemplateVars:this}}
</span>
</a>
<div class="dashboard-row__actions-right">
<a class="pointer" ng-click="ctrl.openSettings()"><span class="fa fa-cog"></i></a>
</div>
</div>
<div class="dashboard-row__panel_count">
({{ctrl.panel.hiddenPanels.length}} hidden panels)
</div>
<div class="dashboard-row__drag grid-drag-handle">
</div>
`;
dashboard: any;
panel: any;
constructor() {
this.panel.hiddenPanels = this.panel.hiddenPanels || [];
}
toggle() {
if (this.panel.collapse) {
let panelIndex = _.indexOf(this.dashboard.panels, this.panel);
for (let child of this.panel.hiddenPanels) {
this.dashboard.panels.splice(panelIndex + 1, 0, child);
child.y = this.panel.y + 1;
console.log('restoring child', child);
}
this.panel.hiddenPanels = [];
this.panel.collapse = false;
return;
}
this.panel.collapse = true;
let foundRow = false;
for (let i = 0; i < this.dashboard.panels.length; i++) {
let panel = this.dashboard.panels[i];
if (panel === this.panel) {
console.log('found row');
foundRow = true;
continue;
}
if (!foundRow) {
continue;
}
if (panel.type === 'row') {
break;
}
this.panel.hiddenPanels.push(panel);
console.log('hiding child', panel.id);
}
for (let hiddenPanel of this.panel.hiddenPanels) {
this.dashboard.removePanel(hiddenPanel, false);
}
}
moveUp() {
// let panelIndex = _.indexOf(this.dashboard.panels, this.panel);
// let rowAbove = null;
// for (let index = panelIndex-1; index > 0; index--) {
// panel = this.dashboard.panels[index];
// if (panel.type === 'row') {
// rowAbove = panel;
// }
// }
//
// if (rowAbove) {
// this.panel.y = rowAbove.y;
// }
}
link(scope, elem) {
elem.addClass('dashboard-row');
scope.$watch('ctrl.panel.collapse', () => {
elem.toggleClass('dashboard-row--collapse', this.panel.collapse === true);
});
}
}

View File

@@ -2,10 +2,6 @@ import { ElasticDatasource } from './datasource';
import { ElasticQueryCtrl } from './query_ctrl';
import { ElasticConfigCtrl } from './config_ctrl';
class ElasticQueryOptionsCtrl {
static templateUrl = 'partials/query.options.html';
}
class ElasticAnnotationsQueryCtrl {
static templateUrl = 'partials/annotations.editor.html';
}
@@ -14,6 +10,5 @@ export {
ElasticDatasource as Datasource,
ElasticQueryCtrl as QueryCtrl,
ElasticConfigCtrl as ConfigCtrl,
ElasticQueryOptionsCtrl as QueryOptionsCtrl,
ElasticAnnotationsQueryCtrl as AnnotationsQueryCtrl,
};

View File

@@ -1,13 +1,12 @@
import { describe, beforeEach, it, expect } from 'test/lib/common';
import { ElasticResponse } from '../elastic_response';
describe('ElasticResponse', function() {
describe('ElasticResponse', () => {
var targets;
var response;
var result;
describe('simple query and count', function() {
beforeEach(function() {
describe('simple query and count', () => {
beforeEach(() => {
targets = [
{
refId: 'A',
@@ -39,19 +38,19 @@ describe('ElasticResponse', function() {
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return 1 series', function() {
expect(result.data.length).to.be(1);
expect(result.data[0].target).to.be('Count');
expect(result.data[0].datapoints.length).to.be(2);
expect(result.data[0].datapoints[0][0]).to.be(10);
expect(result.data[0].datapoints[0][1]).to.be(1000);
it('should return 1 series', () => {
expect(result.data.length).toBe(1);
expect(result.data[0].target).toBe('Count');
expect(result.data[0].datapoints.length).toBe(2);
expect(result.data[0].datapoints[0][0]).toBe(10);
expect(result.data[0].datapoints[0][1]).toBe(1000);
});
});
describe('simple query count & avg aggregation', function() {
describe('simple query count & avg aggregation', () => {
var result;
beforeEach(function() {
beforeEach(() => {
targets = [
{
refId: 'A',
@@ -85,22 +84,22 @@ describe('ElasticResponse', function() {
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return 2 series', function() {
expect(result.data.length).to.be(2);
expect(result.data[0].datapoints.length).to.be(2);
expect(result.data[0].datapoints[0][0]).to.be(10);
expect(result.data[0].datapoints[0][1]).to.be(1000);
it('should return 2 series', () => {
expect(result.data.length).toBe(2);
expect(result.data[0].datapoints.length).toBe(2);
expect(result.data[0].datapoints[0][0]).toBe(10);
expect(result.data[0].datapoints[0][1]).toBe(1000);
expect(result.data[1].target).to.be('Average value');
expect(result.data[1].datapoints[0][0]).to.be(88);
expect(result.data[1].datapoints[1][0]).to.be(99);
expect(result.data[1].target).toBe('Average value');
expect(result.data[1].datapoints[0][0]).toBe(88);
expect(result.data[1].datapoints[1][0]).toBe(99);
});
});
describe('single group by query one metric', function() {
describe('single group by query one metric', () => {
var result;
beforeEach(function() {
beforeEach(() => {
targets = [
{
refId: 'A',
@@ -141,18 +140,18 @@ describe('ElasticResponse', function() {
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return 2 series', function() {
expect(result.data.length).to.be(2);
expect(result.data[0].datapoints.length).to.be(2);
expect(result.data[0].target).to.be('server1');
expect(result.data[1].target).to.be('server2');
it('should return 2 series', () => {
expect(result.data.length).toBe(2);
expect(result.data[0].datapoints.length).toBe(2);
expect(result.data[0].target).toBe('server1');
expect(result.data[1].target).toBe('server2');
});
});
describe('single group by query two metrics', function() {
describe('single group by query two metrics', () => {
var result;
beforeEach(function() {
beforeEach(() => {
targets = [
{
refId: 'A',
@@ -199,20 +198,20 @@ describe('ElasticResponse', function() {
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return 2 series', function() {
expect(result.data.length).to.be(4);
expect(result.data[0].datapoints.length).to.be(2);
expect(result.data[0].target).to.be('server1 Count');
expect(result.data[1].target).to.be('server1 Average @value');
expect(result.data[2].target).to.be('server2 Count');
expect(result.data[3].target).to.be('server2 Average @value');
it('should return 2 series', () => {
expect(result.data.length).toBe(4);
expect(result.data[0].datapoints.length).toBe(2);
expect(result.data[0].target).toBe('server1 Count');
expect(result.data[1].target).toBe('server1 Average @value');
expect(result.data[2].target).toBe('server2 Count');
expect(result.data[3].target).toBe('server2 Average @value');
});
});
describe('with percentiles ', function() {
describe('with percentiles ', () => {
var result;
beforeEach(function() {
beforeEach(() => {
targets = [
{
refId: 'A',
@@ -246,21 +245,21 @@ describe('ElasticResponse', function() {
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return 2 series', function() {
expect(result.data.length).to.be(2);
expect(result.data[0].datapoints.length).to.be(2);
expect(result.data[0].target).to.be('p75');
expect(result.data[1].target).to.be('p90');
expect(result.data[0].datapoints[0][0]).to.be(3.3);
expect(result.data[0].datapoints[0][1]).to.be(1000);
expect(result.data[1].datapoints[1][0]).to.be(4.5);
it('should return 2 series', () => {
expect(result.data.length).toBe(2);
expect(result.data[0].datapoints.length).toBe(2);
expect(result.data[0].target).toBe('p75');
expect(result.data[1].target).toBe('p90');
expect(result.data[0].datapoints[0][0]).toBe(3.3);
expect(result.data[0].datapoints[0][1]).toBe(1000);
expect(result.data[1].datapoints[1][0]).toBe(4.5);
});
});
describe('with extended_stats', function() {
describe('with extended_stats', () => {
var result;
beforeEach(function() {
beforeEach(() => {
targets = [
{
refId: 'A',
@@ -322,21 +321,21 @@ describe('ElasticResponse', function() {
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return 4 series', function() {
expect(result.data.length).to.be(4);
expect(result.data[0].datapoints.length).to.be(1);
expect(result.data[0].target).to.be('server1 Max');
expect(result.data[1].target).to.be('server1 Std Dev Upper');
it('should return 4 series', () => {
expect(result.data.length).toBe(4);
expect(result.data[0].datapoints.length).toBe(1);
expect(result.data[0].target).toBe('server1 Max');
expect(result.data[1].target).toBe('server1 Std Dev Upper');
expect(result.data[0].datapoints[0][0]).to.be(10.2);
expect(result.data[1].datapoints[0][0]).to.be(3);
expect(result.data[0].datapoints[0][0]).toBe(10.2);
expect(result.data[1].datapoints[0][0]).toBe(3);
});
});
describe('single group by with alias pattern', function() {
describe('single group by with alias pattern', () => {
var result;
beforeEach(function() {
beforeEach(() => {
targets = [
{
refId: 'A',
@@ -385,19 +384,19 @@ describe('ElasticResponse', function() {
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return 2 series', function() {
expect(result.data.length).to.be(3);
expect(result.data[0].datapoints.length).to.be(2);
expect(result.data[0].target).to.be('server1 Count and {{not_exist}} server1');
expect(result.data[1].target).to.be('server2 Count and {{not_exist}} server2');
expect(result.data[2].target).to.be('0 Count and {{not_exist}} 0');
it('should return 2 series', () => {
expect(result.data.length).toBe(3);
expect(result.data[0].datapoints.length).toBe(2);
expect(result.data[0].target).toBe('server1 Count and {{not_exist}} server1');
expect(result.data[1].target).toBe('server2 Count and {{not_exist}} server2');
expect(result.data[2].target).toBe('0 Count and {{not_exist}} 0');
});
});
describe('histogram response', function() {
describe('histogram response', () => {
var result;
beforeEach(function() {
beforeEach(() => {
targets = [
{
refId: 'A',
@@ -420,16 +419,16 @@ describe('ElasticResponse', function() {
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return table with byte and count', function() {
expect(result.data[0].rows.length).to.be(3);
expect(result.data[0].columns).to.eql([{ text: 'bytes', filterable: true }, { text: 'Count' }]);
it('should return table with byte and count', () => {
expect(result.data[0].rows.length).toBe(3);
expect(result.data[0].columns).toEqual([{ text: 'bytes', filterable: true }, { text: 'Count' }]);
});
});
describe('with two filters agg', function() {
describe('with two filters agg', () => {
var result;
beforeEach(function() {
beforeEach(() => {
targets = [
{
refId: 'A',
@@ -472,16 +471,16 @@ describe('ElasticResponse', function() {
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return 2 series', function() {
expect(result.data.length).to.be(2);
expect(result.data[0].datapoints.length).to.be(2);
expect(result.data[0].target).to.be('@metric:cpu');
expect(result.data[1].target).to.be('@metric:logins.count');
it('should return 2 series', () => {
expect(result.data.length).toBe(2);
expect(result.data[0].datapoints.length).toBe(2);
expect(result.data[0].target).toBe('@metric:cpu');
expect(result.data[1].target).toBe('@metric:logins.count');
});
});
describe('with dropfirst and last aggregation', function() {
beforeEach(function() {
describe('with dropfirst and last aggregation', () => {
beforeEach(() => {
targets = [
{
refId: 'A',
@@ -528,14 +527,14 @@ describe('ElasticResponse', function() {
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should remove first and last value', function() {
expect(result.data.length).to.be(2);
expect(result.data[0].datapoints.length).to.be(1);
it('should remove first and last value', () => {
expect(result.data.length).toBe(2);
expect(result.data[0].datapoints.length).toBe(1);
});
});
describe('No group by time', function() {
beforeEach(function() {
describe('No group by time', () => {
beforeEach(() => {
targets = [
{
refId: 'A',
@@ -570,21 +569,21 @@ describe('ElasticResponse', function() {
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return table', function() {
expect(result.data.length).to.be(1);
expect(result.data[0].type).to.be('table');
expect(result.data[0].rows.length).to.be(2);
expect(result.data[0].rows[0][0]).to.be('server-1');
expect(result.data[0].rows[0][1]).to.be(1000);
expect(result.data[0].rows[0][2]).to.be(369);
it('should return table', () => {
expect(result.data.length).toBe(1);
expect(result.data[0].type).toBe('table');
expect(result.data[0].rows.length).toBe(2);
expect(result.data[0].rows[0][0]).toBe('server-1');
expect(result.data[0].rows[0][1]).toBe(1000);
expect(result.data[0].rows[0][2]).toBe(369);
expect(result.data[0].rows[1][0]).to.be('server-2');
expect(result.data[0].rows[1][1]).to.be(2000);
expect(result.data[0].rows[1][0]).toBe('server-2');
expect(result.data[0].rows[1][1]).toBe(2000);
});
});
describe('Multiple metrics of same type', function() {
beforeEach(function() {
describe('Multiple metrics of same type', () => {
beforeEach(() => {
targets = [
{
refId: 'A',
@@ -615,15 +614,15 @@ describe('ElasticResponse', function() {
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should include field in metric name', function() {
expect(result.data[0].type).to.be('table');
expect(result.data[0].rows[0][1]).to.be(1000);
expect(result.data[0].rows[0][2]).to.be(3000);
it('should include field in metric name', () => {
expect(result.data[0].type).toBe('table');
expect(result.data[0].rows[0][1]).toBe(1000);
expect(result.data[0].rows[0][2]).toBe(3000);
});
});
describe('Raw documents query', function() {
beforeEach(function() {
describe('Raw documents query', () => {
beforeEach(() => {
targets = [
{
refId: 'A',
@@ -657,13 +656,13 @@ describe('ElasticResponse', function() {
result = new ElasticResponse(targets, response).getTimeSeries();
});
it('should return docs', function() {
expect(result.data.length).to.be(1);
expect(result.data[0].type).to.be('docs');
expect(result.data[0].total).to.be(100);
expect(result.data[0].datapoints.length).to.be(2);
expect(result.data[0].datapoints[0].sourceProp).to.be('asd');
expect(result.data[0].datapoints[0].fieldProp).to.be('field');
it('should return docs', () => {
expect(result.data.length).toBe(1);
expect(result.data[0].type).toBe('docs');
expect(result.data[0].total).toBe(100);
expect(result.data[0].datapoints.length).toBe(2);
expect(result.data[0].datapoints[0].sourceProp).toBe('asd');
expect(result.data[0].datapoints[0].fieldProp).toBe('field');
});
});
});

View File

@@ -1,38 +1,37 @@
///<amd-dependency path="test/specs/helpers" name="helpers" />
import { describe, it, expect } from 'test/lib/common';
import moment from 'moment';
import { IndexPattern } from '../index_pattern';
describe('IndexPattern', function() {
describe('when getting index for today', function() {
it('should return correct index name', function() {
describe('IndexPattern', () => {
describe('when getting index for today', () => {
test('should return correct index name', () => {
var pattern = new IndexPattern('[asd-]YYYY.MM.DD', 'Daily');
var expected = 'asd-' + moment.utc().format('YYYY.MM.DD');
expect(pattern.getIndexForToday()).to.be(expected);
expect(pattern.getIndexForToday()).toBe(expected);
});
});
describe('when getting index list for time range', function() {
describe('no interval', function() {
it('should return correct index', function() {
describe('when getting index list for time range', () => {
describe('no interval', () => {
test('should return correct index', () => {
var pattern = new IndexPattern('my-metrics', null);
var from = new Date(2015, 4, 30, 1, 2, 3);
var to = new Date(2015, 5, 1, 12, 5, 6);
expect(pattern.getIndexList(from, to)).to.eql('my-metrics');
expect(pattern.getIndexList(from, to)).toEqual('my-metrics');
});
});
describe('daily', function() {
it('should return correct index list', function() {
describe('daily', () => {
test('should return correct index list', () => {
var pattern = new IndexPattern('[asd-]YYYY.MM.DD', 'Daily');
var from = new Date(1432940523000);
var to = new Date(1433153106000);
var expected = ['asd-2015.05.29', 'asd-2015.05.30', 'asd-2015.05.31', 'asd-2015.06.01'];
expect(pattern.getIndexList(from, to)).to.eql(expected);
expect(pattern.getIndexList(from, to)).toEqual(expected);
});
});
});

View File

@@ -1,25 +1,24 @@
import { describe, beforeEach, it, expect } from 'test/lib/common';
import { ElasticQueryBuilder } from '../query_builder';
describe('ElasticQueryBuilder', function() {
describe('ElasticQueryBuilder', () => {
var builder;
beforeEach(function() {
beforeEach(() => {
builder = new ElasticQueryBuilder({ timeField: '@timestamp' });
});
it('with defaults', function() {
it('with defaults', () => {
var query = builder.build({
metrics: [{ type: 'Count', id: '0' }],
timeField: '@timestamp',
bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '1' }],
});
expect(query.query.bool.filter[0].range['@timestamp'].gte).to.be('$timeFrom');
expect(query.aggs['1'].date_histogram.extended_bounds.min).to.be('$timeFrom');
expect(query.query.bool.filter[0].range['@timestamp'].gte).toBe('$timeFrom');
expect(query.aggs['1'].date_histogram.extended_bounds.min).toBe('$timeFrom');
});
it('with defaults on es5.x', function() {
it('with defaults on es5.x', () => {
var builder_5x = new ElasticQueryBuilder({
timeField: '@timestamp',
esVersion: 5,
@@ -31,11 +30,11 @@ describe('ElasticQueryBuilder', function() {
bucketAggs: [{ type: 'date_histogram', field: '@timestamp', id: '1' }],
});
expect(query.query.bool.filter[0].range['@timestamp'].gte).to.be('$timeFrom');
expect(query.aggs['1'].date_histogram.extended_bounds.min).to.be('$timeFrom');
expect(query.query.bool.filter[0].range['@timestamp'].gte).toBe('$timeFrom');
expect(query.aggs['1'].date_histogram.extended_bounds.min).toBe('$timeFrom');
});
it('with multiple bucket aggs', function() {
it('with multiple bucket aggs', () => {
var query = builder.build({
metrics: [{ type: 'count', id: '1' }],
timeField: '@timestamp',
@@ -45,11 +44,11 @@ describe('ElasticQueryBuilder', function() {
],
});
expect(query.aggs['2'].terms.field).to.be('@host');
expect(query.aggs['2'].aggs['3'].date_histogram.field).to.be('@timestamp');
expect(query.aggs['2'].terms.field).toBe('@host');
expect(query.aggs['2'].aggs['3'].date_histogram.field).toBe('@timestamp');
});
it('with select field', function() {
it('with select field', () => {
var query = builder.build(
{
metrics: [{ type: 'avg', field: '@value', id: '1' }],
@@ -60,10 +59,10 @@ describe('ElasticQueryBuilder', function() {
);
var aggs = query.aggs['2'].aggs;
expect(aggs['1'].avg.field).to.be('@value');
expect(aggs['1'].avg.field).toBe('@value');
});
it('with term agg and order by metric agg', function() {
it('with term agg and order by metric agg', () => {
var query = builder.build(
{
metrics: [{ type: 'count', id: '1' }, { type: 'avg', field: '@value', id: '5' }],
@@ -84,11 +83,11 @@ describe('ElasticQueryBuilder', function() {
var firstLevel = query.aggs['2'];
var secondLevel = firstLevel.aggs['3'];
expect(firstLevel.aggs['5'].avg.field).to.be('@value');
expect(secondLevel.aggs['5'].avg.field).to.be('@value');
expect(firstLevel.aggs['5'].avg.field).toBe('@value');
expect(secondLevel.aggs['5'].avg.field).toBe('@value');
});
it('with metric percentiles', function() {
it('with metric percentiles', () => {
var query = builder.build(
{
metrics: [
@@ -109,11 +108,11 @@ describe('ElasticQueryBuilder', function() {
var firstLevel = query.aggs['3'];
expect(firstLevel.aggs['1'].percentiles.field).to.be('@load_time');
expect(firstLevel.aggs['1'].percentiles.percents).to.eql([1, 2, 3, 4]);
expect(firstLevel.aggs['1'].percentiles.field).toBe('@load_time');
expect(firstLevel.aggs['1'].percentiles.percents).toEqual([1, 2, 3, 4]);
});
it('with filters aggs', function() {
it('with filters aggs', () => {
var query = builder.build({
metrics: [{ type: 'count', id: '1' }],
timeField: '@timestamp',
@@ -129,12 +128,12 @@ describe('ElasticQueryBuilder', function() {
],
});
expect(query.aggs['2'].filters.filters['@metric:cpu'].query_string.query).to.be('@metric:cpu');
expect(query.aggs['2'].filters.filters['@metric:logins.count'].query_string.query).to.be('@metric:logins.count');
expect(query.aggs['2'].aggs['4'].date_histogram.field).to.be('@timestamp');
expect(query.aggs['2'].filters.filters['@metric:cpu'].query_string.query).toBe('@metric:cpu');
expect(query.aggs['2'].filters.filters['@metric:logins.count'].query_string.query).toBe('@metric:logins.count');
expect(query.aggs['2'].aggs['4'].date_histogram.field).toBe('@timestamp');
});
it('with filters aggs on es5.x', function() {
it('with filters aggs on es5.x', () => {
var builder_5x = new ElasticQueryBuilder({
timeField: '@timestamp',
esVersion: 5,
@@ -154,31 +153,31 @@ describe('ElasticQueryBuilder', function() {
],
});
expect(query.aggs['2'].filters.filters['@metric:cpu'].query_string.query).to.be('@metric:cpu');
expect(query.aggs['2'].filters.filters['@metric:logins.count'].query_string.query).to.be('@metric:logins.count');
expect(query.aggs['2'].aggs['4'].date_histogram.field).to.be('@timestamp');
expect(query.aggs['2'].filters.filters['@metric:cpu'].query_string.query).toBe('@metric:cpu');
expect(query.aggs['2'].filters.filters['@metric:logins.count'].query_string.query).toBe('@metric:logins.count');
expect(query.aggs['2'].aggs['4'].date_histogram.field).toBe('@timestamp');
});
it('with raw_document metric', function() {
it('with raw_document metric', () => {
var query = builder.build({
metrics: [{ type: 'raw_document', id: '1', settings: {} }],
timeField: '@timestamp',
bucketAggs: [],
});
expect(query.size).to.be(500);
expect(query.size).toBe(500);
});
it('with raw_document metric size set', function() {
it('with raw_document metric size set', () => {
var query = builder.build({
metrics: [{ type: 'raw_document', id: '1', settings: { size: 1337 } }],
timeField: '@timestamp',
bucketAggs: [],
});
expect(query.size).to.be(1337);
expect(query.size).toBe(1337);
});
it('with moving average', function() {
it('with moving average', () => {
var query = builder.build({
metrics: [
{
@@ -198,12 +197,12 @@ describe('ElasticQueryBuilder', function() {
var firstLevel = query.aggs['3'];
expect(firstLevel.aggs['2']).not.to.be(undefined);
expect(firstLevel.aggs['2'].moving_avg).not.to.be(undefined);
expect(firstLevel.aggs['2'].moving_avg.buckets_path).to.be('3');
expect(firstLevel.aggs['2']).not.toBe(undefined);
expect(firstLevel.aggs['2'].moving_avg).not.toBe(undefined);
expect(firstLevel.aggs['2'].moving_avg.buckets_path).toBe('3');
});
it('with broken moving average', function() {
it('with broken moving average', () => {
var query = builder.build({
metrics: [
{
@@ -227,13 +226,13 @@ describe('ElasticQueryBuilder', function() {
var firstLevel = query.aggs['3'];
expect(firstLevel.aggs['2']).not.to.be(undefined);
expect(firstLevel.aggs['2'].moving_avg).not.to.be(undefined);
expect(firstLevel.aggs['2'].moving_avg.buckets_path).to.be('3');
expect(firstLevel.aggs['4']).to.be(undefined);
expect(firstLevel.aggs['2']).not.toBe(undefined);
expect(firstLevel.aggs['2'].moving_avg).not.toBe(undefined);
expect(firstLevel.aggs['2'].moving_avg.buckets_path).toBe('3');
expect(firstLevel.aggs['4']).toBe(undefined);
});
it('with derivative', function() {
it('with derivative', () => {
var query = builder.build({
metrics: [
{
@@ -252,12 +251,12 @@ describe('ElasticQueryBuilder', function() {
var firstLevel = query.aggs['3'];
expect(firstLevel.aggs['2']).not.to.be(undefined);
expect(firstLevel.aggs['2'].derivative).not.to.be(undefined);
expect(firstLevel.aggs['2'].derivative.buckets_path).to.be('3');
expect(firstLevel.aggs['2']).not.toBe(undefined);
expect(firstLevel.aggs['2'].derivative).not.toBe(undefined);
expect(firstLevel.aggs['2'].derivative.buckets_path).toBe('3');
});
it('with histogram', function() {
it('with histogram', () => {
var query = builder.build({
metrics: [{ id: '1', type: 'count' }],
bucketAggs: [
@@ -271,13 +270,13 @@ describe('ElasticQueryBuilder', function() {
});
var firstLevel = query.aggs['3'];
expect(firstLevel.histogram.field).to.be('bytes');
expect(firstLevel.histogram.interval).to.be(10);
expect(firstLevel.histogram.min_doc_count).to.be(2);
expect(firstLevel.histogram.missing).to.be(5);
expect(firstLevel.histogram.field).toBe('bytes');
expect(firstLevel.histogram.interval).toBe(10);
expect(firstLevel.histogram.min_doc_count).toBe(2);
expect(firstLevel.histogram.missing).toBe(5);
});
it('with adhoc filters', function() {
it('with adhoc filters', () => {
var query = builder.build(
{
metrics: [{ type: 'Count', id: '0' }],
@@ -295,12 +294,12 @@ describe('ElasticQueryBuilder', function() {
]
);
expect(query.query.bool.must[0].match_phrase['key1'].query).to.be('value1');
expect(query.query.bool.must[1].match_phrase['key2'].query).to.be('value2');
expect(query.query.bool.must_not[0].match_phrase['key2'].query).to.be('value2');
expect(query.query.bool.filter[2].range['key3'].lt).to.be('value3');
expect(query.query.bool.filter[3].range['key4'].gt).to.be('value4');
expect(query.query.bool.filter[4].regexp['key5']).to.be('value5');
expect(query.query.bool.filter[5].bool.must_not.regexp['key6']).to.be('value6');
expect(query.query.bool.must[0].match_phrase['key1'].query).toBe('value1');
expect(query.query.bool.must[1].match_phrase['key2'].query).toBe('value2');
expect(query.query.bool.must_not[0].match_phrase['key2'].query).toBe('value2');
expect(query.query.bool.filter[2].range['key3'].lt).toBe('value3');
expect(query.query.bool.filter[3].range['key4'].gt).toBe('value4');
expect(query.query.bool.filter[4].regexp['key5']).toBe('value5');
expect(query.query.bool.filter[5].bool.must_not.regexp['key6']).toBe('value6');
});
});

View File

@@ -0,0 +1,93 @@
import * as queryDef from '../query_def';
describe('ElasticQueryDef', () => {
describe('getPipelineAggOptions', () => {
describe('with zero targets', () => {
var response = queryDef.getPipelineAggOptions([]);
test('should return zero', () => {
expect(response.length).toBe(0);
});
});
describe('with count and sum targets', () => {
var targets = {
metrics: [{ type: 'count', field: '@value' }, { type: 'sum', field: '@value' }],
};
var response = queryDef.getPipelineAggOptions(targets);
test('should return zero', () => {
expect(response.length).toBe(2);
});
});
describe('with count and moving average targets', () => {
var targets = {
metrics: [{ type: 'count', field: '@value' }, { type: 'moving_avg', field: '@value' }],
};
var response = queryDef.getPipelineAggOptions(targets);
test('should return one', () => {
expect(response.length).toBe(1);
});
});
describe('with derivatives targets', () => {
var targets = {
metrics: [{ type: 'derivative', field: '@value' }],
};
var response = queryDef.getPipelineAggOptions(targets);
test('should return zero', () => {
expect(response.length).toBe(0);
});
});
});
describe('isPipelineMetric', () => {
describe('moving_avg', () => {
var result = queryDef.isPipelineAgg('moving_avg');
test('is pipe line metric', () => {
expect(result).toBe(true);
});
});
describe('count', () => {
var result = queryDef.isPipelineAgg('count');
test('is not pipe line metric', () => {
expect(result).toBe(false);
});
});
});
describe('pipeline aggs depending on esverison', () => {
describe('using esversion undefined', () => {
test('should not get pipeline aggs', () => {
expect(queryDef.getMetricAggTypes(undefined).length).toBe(9);
});
});
describe('using esversion 1', () => {
test('should not get pipeline aggs', () => {
expect(queryDef.getMetricAggTypes(1).length).toBe(9);
});
});
describe('using esversion 2', () => {
test('should get pipeline aggs', () => {
expect(queryDef.getMetricAggTypes(2).length).toBe(11);
});
});
describe('using esversion 5', () => {
test('should get pipeline aggs', () => {
expect(queryDef.getMetricAggTypes(5).length).toBe(11);
});
});
});
});

View File

@@ -1,95 +0,0 @@
import { describe, it, expect } from 'test/lib/common';
import * as queryDef from '../query_def';
describe('ElasticQueryDef', function() {
describe('getPipelineAggOptions', function() {
describe('with zero targets', function() {
var response = queryDef.getPipelineAggOptions([]);
it('should return zero', function() {
expect(response.length).to.be(0);
});
});
describe('with count and sum targets', function() {
var targets = {
metrics: [{ type: 'count', field: '@value' }, { type: 'sum', field: '@value' }],
};
var response = queryDef.getPipelineAggOptions(targets);
it('should return zero', function() {
expect(response.length).to.be(2);
});
});
describe('with count and moving average targets', function() {
var targets = {
metrics: [{ type: 'count', field: '@value' }, { type: 'moving_avg', field: '@value' }],
};
var response = queryDef.getPipelineAggOptions(targets);
it('should return one', function() {
expect(response.length).to.be(1);
});
});
describe('with derivatives targets', function() {
var targets = {
metrics: [{ type: 'derivative', field: '@value' }],
};
var response = queryDef.getPipelineAggOptions(targets);
it('should return zero', function() {
expect(response.length).to.be(0);
});
});
});
describe('isPipelineMetric', function() {
describe('moving_avg', function() {
var result = queryDef.isPipelineAgg('moving_avg');
it('is pipe line metric', function() {
expect(result).to.be(true);
});
});
describe('count', function() {
var result = queryDef.isPipelineAgg('count');
it('is not pipe line metric', function() {
expect(result).to.be(false);
});
});
});
describe('pipeline aggs depending on esverison', function() {
describe('using esversion undefined', function() {
it('should not get pipeline aggs', function() {
expect(queryDef.getMetricAggTypes(undefined).length).to.be(9);
});
});
describe('using esversion 1', function() {
it('should not get pipeline aggs', function() {
expect(queryDef.getMetricAggTypes(1).length).to.be(9);
});
});
describe('using esversion 2', function() {
it('should get pipeline aggs', function() {
expect(queryDef.getMetricAggTypes(2).length).to.be(11);
});
});
describe('using esversion 5', function() {
it('should get pipeline aggs', function() {
expect(queryDef.getMetricAggTypes(5).length).to.be(11);
});
});
});
});

View File

@@ -1,4 +1,3 @@
import { describe, it, expect } from 'test/lib/common';
import { InfluxQueryBuilder } from '../query_builder';
describe('InfluxQueryBuilder', function() {

View File

@@ -64,7 +64,8 @@ function graphDirective(timeSrv, popoverSrv, contextSrv) {
}
annotations = ctrl.annotations || [];
buildFlotPairs(data);
updateLegendValues(data, panel);
const graphHeight = elem.height();
updateLegendValues(data, panel, graphHeight);
ctrl.events.emit('render-legend');
});

View File

@@ -1,11 +1,11 @@
import { describe, beforeEach, it, sinon, expect } from '../../../../../test/lib/common';
jest.mock('app/core/core', () => ({}));
import $ from 'jquery';
import GraphTooltip from '../graph_tooltip';
var scope = {
appEvent: sinon.spy(),
onAppEvent: sinon.spy(),
appEvent: jest.fn(),
onAppEvent: jest.fn(),
ctrl: {},
};
@@ -47,22 +47,22 @@ describe('findHoverIndexFromData', function() {
it('should return 0 if posX out of lower bounds', function() {
var posX = 99;
expect(tooltip.findHoverIndexFromData(posX, series)).to.be(0);
expect(tooltip.findHoverIndexFromData(posX, series)).toBe(0);
});
it('should return n - 1 if posX out of upper bounds', function() {
var posX = 108;
expect(tooltip.findHoverIndexFromData(posX, series)).to.be(series.data.length - 1);
expect(tooltip.findHoverIndexFromData(posX, series)).toBe(series.data.length - 1);
});
it('should return i if posX in series', function() {
var posX = 104;
expect(tooltip.findHoverIndexFromData(posX, series)).to.be(4);
expect(tooltip.findHoverIndexFromData(posX, series)).toBe(4);
});
it('should return i if posX not in series and i + 1 > posX', function() {
var posX = 104.9;
expect(tooltip.findHoverIndexFromData(posX, series)).to.be(4);
expect(tooltip.findHoverIndexFromData(posX, series)).toBe(4);
});
});
@@ -73,17 +73,17 @@ describeSharedTooltip('steppedLine false, stack false', function(ctx) {
});
it('should return 2 series', function() {
expect(ctx.results.length).to.be(2);
expect(ctx.results.length).toBe(2);
});
it('should add time to results array', function() {
expect(ctx.results.time).to.be(10);
expect(ctx.results.time).toBe(10);
});
it('should set value and hoverIndex', function() {
expect(ctx.results[0].value).to.be(15);
expect(ctx.results[1].value).to.be(2);
expect(ctx.results[0].hoverIndex).to.be(0);
expect(ctx.results[0].value).toBe(15);
expect(ctx.results[1].value).toBe(2);
expect(ctx.results[0].hoverIndex).toBe(0);
});
});
@@ -121,7 +121,7 @@ describeSharedTooltip('steppedLine false, stack true, individual false', functio
});
it('should show stacked value', function() {
expect(ctx.results[1].value).to.be(17);
expect(ctx.results[1].value).toBe(17);
});
});
@@ -152,7 +152,7 @@ describeSharedTooltip('steppedLine false, stack true, individual false, series s
});
it('should not show stacked value', function() {
expect(ctx.results[1].value).to.be(2);
expect(ctx.results[1].value).toBe(2);
});
});
@@ -184,6 +184,6 @@ describeSharedTooltip('steppedLine false, stack true, individual true', function
});
it('should not show stacked value', function() {
expect(ctx.results[1].value).to.be(2);
expect(ctx.results[1].value).toBe(2);
});
});

View File

@@ -1,5 +1,3 @@
import { describe, it, expect } from '../../../../../test/lib/common';
import angular from 'angular';
import TimeSeries from 'app/core/time_series2';
import { ThresholdManager } from '../threshold_manager';
@@ -38,16 +36,16 @@ describe('ThresholdManager', function() {
it('should add fill for threshold with fill: true', function() {
var markings = ctx.options.grid.markings;
expect(markings[0].yaxis.from).to.be(300);
expect(markings[0].yaxis.to).to.be(Infinity);
expect(markings[0].color).to.be('rgba(234, 112, 112, 0.12)');
expect(markings[0].yaxis.from).toBe(300);
expect(markings[0].yaxis.to).toBe(Infinity);
expect(markings[0].color).toBe('rgba(234, 112, 112, 0.12)');
});
it('should add line', function() {
var markings = ctx.options.grid.markings;
expect(markings[1].yaxis.from).to.be(300);
expect(markings[1].yaxis.to).to.be(300);
expect(markings[1].color).to.be('rgba(237, 46, 24, 0.60)');
expect(markings[1].yaxis.from).toBe(300);
expect(markings[1].yaxis.to).toBe(300);
expect(markings[1].color).toBe('rgba(237, 46, 24, 0.60)');
});
});
@@ -59,14 +57,14 @@ describe('ThresholdManager', function() {
it('should add fill for first thresholds to next threshold', function() {
var markings = ctx.options.grid.markings;
expect(markings[0].yaxis.from).to.be(200);
expect(markings[0].yaxis.to).to.be(300);
expect(markings[0].yaxis.from).toBe(200);
expect(markings[0].yaxis.to).toBe(300);
});
it('should add fill for last thresholds to infinity', function() {
var markings = ctx.options.grid.markings;
expect(markings[1].yaxis.from).to.be(300);
expect(markings[1].yaxis.to).to.be(Infinity);
expect(markings[1].yaxis.from).toBe(300);
expect(markings[1].yaxis.to).toBe(Infinity);
});
});
@@ -78,14 +76,14 @@ describe('ThresholdManager', function() {
it('should add fill for first thresholds to next threshold', function() {
var markings = ctx.options.grid.markings;
expect(markings[0].yaxis.from).to.be(300);
expect(markings[0].yaxis.to).to.be(200);
expect(markings[0].yaxis.from).toBe(300);
expect(markings[0].yaxis.to).toBe(200);
});
it('should add fill for last thresholds to itself', function() {
var markings = ctx.options.grid.markings;
expect(markings[1].yaxis.from).to.be(200);
expect(markings[1].yaxis.to).to.be(200);
expect(markings[1].yaxis.from).toBe(200);
expect(markings[1].yaxis.to).toBe(200);
});
});
@@ -97,14 +95,14 @@ describe('ThresholdManager', function() {
it('should add fill for first thresholds to next threshold', function() {
var markings = ctx.options.grid.markings;
expect(markings[0].yaxis.from).to.be(300);
expect(markings[0].yaxis.to).to.be(Infinity);
expect(markings[0].yaxis.from).toBe(300);
expect(markings[0].yaxis.to).toBe(Infinity);
});
it('should add fill for last thresholds to itself', function() {
var markings = ctx.options.grid.markings;
expect(markings[1].yaxis.from).to.be(200);
expect(markings[1].yaxis.to).to.be(-Infinity);
expect(markings[1].yaxis.from).toBe(200);
expect(markings[1].yaxis.to).toBe(-Infinity);
});
});
@@ -130,12 +128,12 @@ describe('ThresholdManager', function() {
it('should add first threshold for left axis', function() {
var markings = ctx.options.grid.markings;
expect(markings[0].yaxis.from).to.be(100);
expect(markings[0].yaxis.from).toBe(100);
});
it('should add second threshold for right axis', function() {
var markings = ctx.options.grid.markings;
expect(markings[1].y2axis.from).to.be(200);
expect(markings[1].y2axis.from).toBe(200);
});
});
});

View File

@@ -1,5 +1,3 @@
import { describe, it, expect } from 'test/lib/common';
import { getColorForValue } from '../module';
describe('grafanaSingleStat', function() {
@@ -11,31 +9,31 @@ describe('grafanaSingleStat', function() {
};
it('5 should return green', () => {
expect(getColorForValue(data, 5)).to.be('green');
expect(getColorForValue(data, 5)).toBe('green');
});
it('19.9 should return green', () => {
expect(getColorForValue(data, 19.9)).to.be('green');
expect(getColorForValue(data, 19.9)).toBe('green');
});
it('20 should return yellow', () => {
expect(getColorForValue(data, 20)).to.be('yellow');
expect(getColorForValue(data, 20)).toBe('yellow');
});
it('20.1 should return yellow', () => {
expect(getColorForValue(data, 20.1)).to.be('yellow');
expect(getColorForValue(data, 20.1)).toBe('yellow');
});
it('25 should return yellow', () => {
expect(getColorForValue(data, 25)).to.be('yellow');
expect(getColorForValue(data, 25)).toBe('yellow');
});
it('50 should return red', () => {
expect(getColorForValue(data, 50)).to.be('red');
expect(getColorForValue(data, 50)).toBe('red');
});
it('55 should return red', () => {
expect(getColorForValue(data, 55)).to.be('red');
expect(getColorForValue(data, 55)).toBe('red');
});
});
});
@@ -47,15 +45,15 @@ describe('grafanaSingleStat', function() {
};
it('-30 should return green', () => {
expect(getColorForValue(data, -30)).to.be('green');
expect(getColorForValue(data, -30)).toBe('green');
});
it('1 should return green', () => {
expect(getColorForValue(data, 1)).to.be('yellow');
expect(getColorForValue(data, 1)).toBe('yellow');
});
it('22 should return green', () => {
expect(getColorForValue(data, 22)).to.be('red');
expect(getColorForValue(data, 22)).toBe('red');
});
});
@@ -66,7 +64,7 @@ describe('grafanaSingleStat', function() {
};
it('-30 should return green', () => {
expect(getColorForValue(data, -26)).to.be('yellow');
expect(getColorForValue(data, -26)).toBe('yellow');
});
});
});

View File

@@ -100,6 +100,22 @@
// Success appears as green
.btn-success {
@include buttonBackground($btn-success-bg, $btn-success-bg-hl);
&--processing {
@include button-outline-variant($gray-1);
@include box-shadow(none);
cursor: default;
&:hover,
&:active,
&:active:hover,
&:focus,
&:disabled {
color: $gray-1;
background-color: transparent;
border-color: $gray-1;
}
}
}
// Info appears as a neutral blue
.btn-secondary {

View File

@@ -11,11 +11,20 @@
display: inline-block;
}
.dashboard-row__drag,
.dashboard-row__actions {
.dashboard-row__drag {
visibility: visible;
opacity: 1;
}
.dashboard-row__actions {
visibility: hidden;
}
.dashboard-row__toggle-target {
flex: 1;
cursor: pointer;
margin-right: 15px;
}
}
&:hover {
@@ -43,7 +52,6 @@
color: $text-muted;
visibility: hidden;
opacity: 0;
flex-grow: 1;
transition: 200ms opacity ease-in 200ms;
a {
@@ -69,7 +77,7 @@
cursor: move;
width: 1rem;
height: 100%;
background: url("../img/grab_dark.svg") no-repeat 50% 50%;
background: url('../img/grab_dark.svg') no-repeat 50% 50%;
background-size: 8px;
visibility: hidden;
position: absolute;

View File

@@ -32,7 +32,7 @@
.panel-alert-icon:before {
content: '\e611';
position: relative;
top: 1px;
top: 5px;
left: -3px;
}
}

View File

@@ -68,17 +68,26 @@ div.flot-text {
font-weight: $font-weight-semi-bold;
position: relative;
width: 100%;
display: block;
padding-bottom: 2px;
display: flex;
flex-wrap: nowrap;
justify-content: center;
padding: 4px 0 4px;
}
.panel-title-text {
text-overflow: ellipsis;
overflow: hidden;
white-space: nowrap;
max-width: calc(100% - 38px);
cursor: pointer;
font-weight: $font-weight-semi-bold;
&:hover {
color: $link-hover-color;
}
.panel-has-alert & {
max-width: calc(100% - 54px);
}
}
.panel-menu-container {
@@ -97,7 +106,7 @@ div.flot-text {
width: 16px;
height: 16px;
left: 1px;
top: 4px;
top: 2px;
&:hover {
color: $link-hover-color;
@@ -114,8 +123,6 @@ div.flot-text {
}
.panel-header {
text-align: center;
&:hover {
transition: background-color 0.1s ease-in-out;
background-color: $panel-header-hover-bg;
@@ -156,8 +163,8 @@ div.flot-text {
.fa {
position: relative;
top: -4px;
left: -6px;
top: -2px;
left: 6px;
font-size: 75%;
z-index: 1;
}
@@ -174,7 +181,7 @@ div.flot-text {
display: block;
@include panel-corner-color(lighten($panel-bg, 4%));
.fa {
left: -5px;
left: 4px;
}
.fa:before {
content: '\f08e';

View File

@@ -13,6 +13,13 @@ $login-border: #8daac5;
justify-content: center;
background-image: url(../img/heatmap_bg_test.svg);
background-size: cover;
color: #d8d9da;
& a {
color: #d8d9da !important;
}
& .btn-primary {
@include buttonBackground(#ff6600, #bc3e06);
}
}
input:-webkit-autofill,
@@ -25,8 +32,9 @@ textarea:-webkit-autofill:focus,
select:-webkit-autofill,
select:-webkit-autofill:hover,
select:-webkit-autofill:focus {
-webkit-box-shadow: 0 0 0px 1000px $black inset;
-webkit-text-fill-color: $gray-7 !important;
-webkit-box-shadow: 0 0 0px 1000px $black inset !important;
-webkit-text-fill-color: #fbfbfb !important;
box-shadow: 0 0 0px 1000px $black inset;
}
.login-form-group {
@@ -46,6 +54,8 @@ select:-webkit-autofill:focus {
border: 1px solid $login-border;
border-radius: 4px;
opacity: 0.6;
background: $black;
color: #fbfbfb;
&:focus {
border: 1px solid $login-border;
@@ -103,7 +113,7 @@ select:-webkit-autofill:focus {
}
.icon-gf-grafana_wordmark {
color: $link-color;
color: darken($white, 11%);
position: relative;
font-size: 2rem;
text-shadow: 2px 2px 5px rgba(0, 0, 0, 0.3);

View File

@@ -1,5 +1,3 @@
import {describe, beforeEach, it, expect} from 'test/lib/common';
import {SemVersion, isVersionGtOrEq} from 'app/core/utils/version';
describe("SemVersion", () => {
@@ -8,10 +6,10 @@ describe("SemVersion", () => {
describe('parsing', () => {
it('should parse version properly', () => {
let semver = new SemVersion(version);
expect(semver.major).to.be(1);
expect(semver.minor).to.be(0);
expect(semver.patch).to.be(0);
expect(semver.meta).to.be('alpha.1');
expect(semver.major).toBe(1);
expect(semver.minor).toBe(0);
expect(semver.patch).toBe(0);
expect(semver.meta).toBe('alpha.1');
});
});
@@ -30,7 +28,7 @@ describe("SemVersion", () => {
{value: '3.5', expected: false},
];
cases.forEach((testCase) => {
expect(semver.isGtOrEq(testCase.value)).to.be(testCase.expected);
expect(semver.isGtOrEq(testCase.value)).toBe(testCase.expected);
});
});
});
@@ -48,7 +46,7 @@ describe("SemVersion", () => {
{values: ['3.4.5', '3.5'], expected: false},
];
cases.forEach((testCase) => {
expect(isVersionGtOrEq(testCase.values[0], testCase.values[1])).to.be(testCase.expected);
expect(isVersionGtOrEq(testCase.values[0], testCase.values[1])).toBe(testCase.expected);
});
});
});

View File

@@ -1,4 +1,22 @@
import { configure } from 'enzyme';
import Adapter from 'enzyme-adapter-react-16';
import 'jquery';
import $ from 'jquery';
import 'angular';
import angular from 'angular';
angular.module('grafana', ['ngRoute']);
angular.module('grafana.services', ['ngRoute', '$strap.directives']);
angular.module('grafana.panels', []);
angular.module('grafana.controllers', []);
angular.module('grafana.directives', []);
angular.module('grafana.filters', []);
angular.module('grafana.routes', ['ngRoute']);
jest.mock('app/core/core', () => ({}));
jest.mock('app/features/plugins/plugin_loader', () => ({}));
configure({ adapter: new Adapter() });
var global = <any>window;
global.$ = global.jQuery = $;

View File

@@ -23,7 +23,7 @@ module.exports = merge(common, {
},
resolve: {
extensions: ['.scss', '.ts', '.tsx', '.es6', '.js', '.json', '.svg', '.woff2', '.png'],
extensions: ['.scss', '.ts', '.tsx', '.es6', '.js', '.json', '.svg', '.woff2', '.png', '.html'],
},
devtool: 'eval-source-map',

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,3 +1,5 @@
// +build cgo
// Copyright (C) 2014 Yasuhiro Matsumoto <mattn.jp@gmail.com>.
//
// Use of this source code is governed by an MIT-style
@@ -7,10 +9,13 @@ package sqlite3
/*
#cgo CFLAGS: -std=gnu99
#cgo CFLAGS: -DSQLITE_ENABLE_RTREE -DSQLITE_THREADSAFE=1
#cgo CFLAGS: -DSQLITE_ENABLE_RTREE -DSQLITE_THREADSAFE=1 -DHAVE_USLEEP=1
#cgo linux,!android CFLAGS: -DHAVE_PREAD64=1 -DHAVE_PWRITE64=1
#cgo CFLAGS: -DSQLITE_ENABLE_FTS3 -DSQLITE_ENABLE_FTS3_PARENTHESIS -DSQLITE_ENABLE_FTS4_UNICODE61
#cgo CFLAGS: -DSQLITE_TRACE_SIZE_LIMIT=15
#cgo CFLAGS: -DSQLITE_OMIT_DEPRECATED
#cgo CFLAGS: -DSQLITE_DISABLE_INTRINSIC
#cgo CFLAGS: -DSQLITE_ENABLE_UPDATE_DELETE_LIMIT
#cgo CFLAGS: -Wno-deprecated-declarations
#ifndef USE_LIBSQLITE3
#include <sqlite3-binding.h>
@@ -170,6 +175,12 @@ var SQLiteTimestampFormats = []string{
"2006-01-02",
}
const (
columnDate string = "date"
columnDatetime string = "datetime"
columnTimestamp string = "timestamp"
)
func init() {
sql.Register("sqlite3", &SQLiteDriver{})
}
@@ -389,7 +400,7 @@ func (c *SQLiteConn) RegisterCommitHook(callback func() int) {
if callback == nil {
C.sqlite3_commit_hook(c.db, nil, nil)
} else {
C.sqlite3_commit_hook(c.db, (*[0]byte)(unsafe.Pointer(C.commitHookTrampoline)), unsafe.Pointer(newHandle(c, callback)))
C.sqlite3_commit_hook(c.db, (*[0]byte)(C.commitHookTrampoline), unsafe.Pointer(newHandle(c, callback)))
}
}
@@ -402,7 +413,7 @@ func (c *SQLiteConn) RegisterRollbackHook(callback func()) {
if callback == nil {
C.sqlite3_rollback_hook(c.db, nil, nil)
} else {
C.sqlite3_rollback_hook(c.db, (*[0]byte)(unsafe.Pointer(C.rollbackHookTrampoline)), unsafe.Pointer(newHandle(c, callback)))
C.sqlite3_rollback_hook(c.db, (*[0]byte)(C.rollbackHookTrampoline), unsafe.Pointer(newHandle(c, callback)))
}
}
@@ -419,7 +430,7 @@ func (c *SQLiteConn) RegisterUpdateHook(callback func(int, string, string, int64
if callback == nil {
C.sqlite3_update_hook(c.db, nil, nil)
} else {
C.sqlite3_update_hook(c.db, (*[0]byte)(unsafe.Pointer(C.updateHookTrampoline)), unsafe.Pointer(newHandle(c, callback)))
C.sqlite3_update_hook(c.db, (*[0]byte)(C.updateHookTrampoline), unsafe.Pointer(newHandle(c, callback)))
}
}
@@ -501,7 +512,7 @@ func (c *SQLiteConn) RegisterFunc(name string, impl interface{}, pure bool) erro
}
func sqlite3CreateFunction(db *C.sqlite3, zFunctionName *C.char, nArg C.int, eTextRep C.int, pApp uintptr, xFunc unsafe.Pointer, xStep unsafe.Pointer, xFinal unsafe.Pointer) C.int {
return C._sqlite3_create_function(db, zFunctionName, nArg, eTextRep, C.uintptr_t(pApp), (*[0]byte)(unsafe.Pointer(xFunc)), (*[0]byte)(unsafe.Pointer(xStep)), (*[0]byte)(unsafe.Pointer(xFinal)))
return C._sqlite3_create_function(db, zFunctionName, nArg, eTextRep, C.uintptr_t(pApp), (*[0]byte)(xFunc), (*[0]byte)(xStep), (*[0]byte)(xFinal))
}
// RegisterAggregator makes a Go type available as a SQLite aggregation function.
@@ -780,6 +791,8 @@ func errorString(err Error) string {
// Enable or disable enforcement of foreign keys. X can be 1 or 0.
// _recursive_triggers=X
// Enable or disable recursive triggers. X can be 1 or 0.
// _mutex=XXX
// Specify mutex mode. XXX can be "no", "full".
func (d *SQLiteDriver) Open(dsn string) (driver.Conn, error) {
if C.sqlite3_threadsafe() == 0 {
return nil, errors.New("sqlite library was not compiled for thread-safe operation")
@@ -790,6 +803,7 @@ func (d *SQLiteDriver) Open(dsn string) (driver.Conn, error) {
busyTimeout := 5000
foreignKeys := -1
recursiveTriggers := -1
mutex := C.int(C.SQLITE_OPEN_FULLMUTEX)
pos := strings.IndexRune(dsn, '?')
if pos >= 1 {
params, err := url.ParseQuery(dsn[pos+1:])
@@ -856,6 +870,18 @@ func (d *SQLiteDriver) Open(dsn string) (driver.Conn, error) {
}
}
// _mutex
if val := params.Get("_mutex"); val != "" {
switch val {
case "no":
mutex = C.SQLITE_OPEN_NOMUTEX
case "full":
mutex = C.SQLITE_OPEN_FULLMUTEX
default:
return nil, fmt.Errorf("Invalid _mutex: %v", val)
}
}
if !strings.HasPrefix(dsn, "file:") {
dsn = dsn[:pos]
}
@@ -865,9 +891,7 @@ func (d *SQLiteDriver) Open(dsn string) (driver.Conn, error) {
name := C.CString(dsn)
defer C.free(unsafe.Pointer(name))
rv := C._sqlite3_open_v2(name, &db,
C.SQLITE_OPEN_FULLMUTEX|
C.SQLITE_OPEN_READWRITE|
C.SQLITE_OPEN_CREATE,
mutex|C.SQLITE_OPEN_READWRITE|C.SQLITE_OPEN_CREATE,
nil)
if rv != 0 {
return nil, Error{Code: ErrNo(rv)}
@@ -1070,7 +1094,7 @@ func (s *SQLiteStmt) bind(args []namedValue) error {
case int64:
rv = C.sqlite3_bind_int64(s.s, n, C.sqlite3_int64(v))
case bool:
if bool(v) {
if v {
rv = C.sqlite3_bind_int(s.s, n, 1)
} else {
rv = C.sqlite3_bind_int(s.s, n, 0)
@@ -1121,18 +1145,20 @@ func (s *SQLiteStmt) query(ctx context.Context, args []namedValue) (driver.Rows,
done: make(chan struct{}),
}
go func(db *C.sqlite3) {
select {
case <-ctx.Done():
if ctxdone := ctx.Done(); ctxdone != nil {
go func(db *C.sqlite3) {
select {
case <-ctxdone:
select {
case <-rows.done:
default:
C.sqlite3_interrupt(db)
rows.Close()
}
case <-rows.done:
default:
C.sqlite3_interrupt(db)
rows.Close()
}
case <-rows.done:
}
}(s.c.db)
}(s.c.db)
}
return rows, nil
}
@@ -1166,19 +1192,21 @@ func (s *SQLiteStmt) exec(ctx context.Context, args []namedValue) (driver.Result
return nil, err
}
done := make(chan struct{})
defer close(done)
go func(db *C.sqlite3) {
select {
case <-done:
case <-ctx.Done():
if ctxdone := ctx.Done(); ctxdone != nil {
done := make(chan struct{})
defer close(done)
go func(db *C.sqlite3) {
select {
case <-done:
default:
C.sqlite3_interrupt(db)
case <-ctxdone:
select {
case <-done:
default:
C.sqlite3_interrupt(db)
}
}
}
}(s.c.db)
}(s.c.db)
}
var rowid, changes C.longlong
rv := C._sqlite3_step(s.s, &rowid, &changes)
@@ -1272,7 +1300,7 @@ func (rc *SQLiteRows) Next(dest []driver.Value) error {
case C.SQLITE_INTEGER:
val := int64(C.sqlite3_column_int64(rc.s.s, C.int(i)))
switch rc.decltype[i] {
case "timestamp", "datetime", "date":
case columnTimestamp, columnDatetime, columnDate:
var t time.Time
// Assume a millisecond unix timestamp if it's 13 digits -- too
// large to be a reasonable timestamp in seconds.
@@ -1303,10 +1331,10 @@ func (rc *SQLiteRows) Next(dest []driver.Value) error {
n := int(C.sqlite3_column_bytes(rc.s.s, C.int(i)))
switch dest[i].(type) {
case sql.RawBytes:
dest[i] = (*[1 << 30]byte)(unsafe.Pointer(p))[0:n]
dest[i] = (*[1 << 30]byte)(p)[0:n]
default:
slice := make([]byte, n)
copy(slice[:], (*[1 << 30]byte)(unsafe.Pointer(p))[0:n])
copy(slice[:], (*[1 << 30]byte)(p)[0:n])
dest[i] = slice
}
case C.SQLITE_NULL:
@@ -1319,7 +1347,7 @@ func (rc *SQLiteRows) Next(dest []driver.Value) error {
s := C.GoStringN((*C.char)(unsafe.Pointer(C.sqlite3_column_text(rc.s.s, C.int(i)))), C.int(n))
switch rc.decltype[i] {
case "timestamp", "datetime", "date":
case columnTimestamp, columnDatetime, columnDate:
var t time.Time
s = strings.TrimSuffix(s, "Z")
for _, format := range SQLiteTimestampFormats {

View File

@@ -1,3 +1,5 @@
// +build cgo
// Copyright (C) 2014 Yasuhiro Matsumoto <mattn.jp@gmail.com>.
//
// Use of this source code is governed by an MIT-style

12
vendor/github.com/mattn/go-sqlite3/sqlite3_solaris.go generated vendored Normal file
View File

@@ -0,0 +1,12 @@
// Copyright (C) 2018 Yasuhiro Matsumoto <mattn.jp@gmail.com>.
//
// Use of this source code is governed by an MIT-style
// license that can be found in the LICENSE file.
// +build solaris
package sqlite3
/*
#cgo CFLAGS: -D__EXTENSIONS__=1
*/
import "C"

View File

@@ -28,10 +28,10 @@ import (
// Trace... constants identify the possible events causing callback invocation.
// Values are same as the corresponding SQLite Trace Event Codes.
const (
TraceStmt = C.SQLITE_TRACE_STMT
TraceProfile = C.SQLITE_TRACE_PROFILE
TraceRow = C.SQLITE_TRACE_ROW
TraceClose = C.SQLITE_TRACE_CLOSE
TraceStmt = uint32(C.SQLITE_TRACE_STMT)
TraceProfile = uint32(C.SQLITE_TRACE_PROFILE)
TraceRow = uint32(C.SQLITE_TRACE_ROW)
TraceClose = uint32(C.SQLITE_TRACE_CLOSE)
)
type TraceInfo struct {
@@ -71,7 +71,7 @@ type TraceUserCallback func(TraceInfo) int
type TraceConfig struct {
Callback TraceUserCallback
EventMask C.uint
EventMask uint32
WantExpandedSQL bool
}
@@ -105,6 +105,8 @@ func traceCallbackTrampoline(
// Parameter named 'X' in SQLite docs (eXtra event data?):
xValue unsafe.Pointer) C.int {
eventCode := uint32(traceEventCode)
if ctx == nil {
panic(fmt.Sprintf("No context (ev 0x%x)", traceEventCode))
}
@@ -114,7 +116,7 @@ func traceCallbackTrampoline(
var traceConf TraceConfig
var found bool
if traceEventCode == TraceClose {
if eventCode == TraceClose {
// clean up traceMap: 'pop' means get and delete
traceConf, found = popTraceMapping(connHandle)
} else {
@@ -123,16 +125,16 @@ func traceCallbackTrampoline(
if !found {
panic(fmt.Sprintf("Mapping not found for handle 0x%x (ev 0x%x)",
connHandle, traceEventCode))
connHandle, eventCode))
}
var info TraceInfo
info.EventCode = uint32(traceEventCode)
info.EventCode = eventCode
info.AutoCommit = (int(C.sqlite3_get_autocommit(contextDB)) != 0)
info.ConnHandle = connHandle
switch traceEventCode {
switch eventCode {
case TraceStmt:
info.StmtHandle = uintptr(p)
@@ -183,7 +185,7 @@ func traceCallbackTrampoline(
// registering this callback trampoline with SQLite --- for cleanup.
// In the future there may be more events forced to "selected" in SQLite
// for the driver's needs.
if traceConf.EventMask&traceEventCode == 0 {
if traceConf.EventMask&eventCode == 0 {
return 0
}

Some files were not shown because too many files have changed in this diff Show More