diff --git a/.editorconfig b/.editorconfig index 84bbaf8a420..cbde126ca6c 100644 --- a/.editorconfig +++ b/.editorconfig @@ -8,7 +8,6 @@ charset = utf-8 trim_trailing_whitespace = true insert_final_newline = true max_line_length = 120 -insert_final_newline = true [*.go] indent_style = tab diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md index 082482fcb74..8086a6b86e5 100644 --- a/.github/ISSUE_TEMPLATE.md +++ b/.github/ISSUE_TEMPLATE.md @@ -5,12 +5,12 @@ Read before posting: - Checkout How to troubleshoot metric query issues: https://community.grafana.com/t/how-to-troubleshoot-metric-query-issues/50 Please include this information: -- What Grafana version are you using? -- What datasource are you using? -- What OS are you running grafana on? -- What did you do? -- What was the expected result? -- What happened instead? -- If related to metric query / data viz: - - Include raw network request & response: get by opening Chrome Dev Tools (F12, Ctrl+Shift+I on windows, Cmd+Opt+I on Mac), go the network tab. +### What Grafana version are you using? +### What datasource are you using? +### What OS are you running grafana on? +### What did you do? +### What was the expected result? +### What happened instead? +### If related to metric query / data viz: +### Include raw network request & response: get by opening Chrome Dev Tools (F12, Ctrl+Shift+I on windows, Cmd+Opt+I on Mac), go the network tab. diff --git a/.gitignore b/.gitignore index 12e7bed3f46..72f6684ef20 100644 --- a/.gitignore +++ b/.gitignore @@ -10,8 +10,8 @@ awsconfig /public_gen /public/vendor/npm /tmp -vendor/phantomjs/phantomjs -vendor/phantomjs/phantomjs.exe +tools/phantomjs/phantomjs +tools/phantomjs/phantomjs.exe profile.out coverage.txt @@ -60,3 +60,4 @@ debug.test /vendor/**/*_test.go /vendor/**/.editorconfig /vendor/**/appengine* +*.orig \ No newline at end of file diff --git a/.jshintrc b/.jshintrc index 3fb6501c2f9..1d8fad63173 100644 --- a/.jshintrc +++ b/.jshintrc @@ -4,7 +4,7 @@ "bitwise":false, "curly": true, "eqnull": true, - "strict": true, + "strict": false, "devel": true, "eqeqeq": true, "forin": false, diff --git a/CHANGELOG.md b/CHANGELOG.md index 51bb6f0c199..e4de9ccb9b2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,33 +1,119 @@ -# 5.0.0 (unreleased / master branch) +# 5.1.0 (unreleased) -Grafana v5.0 is going to be the biggest and most foundational release Grafana has ever had, coming with a ton of UX improvements, a new dashboard grid engine, dashboard folders, user teams and permissions. Checkout out this [video preview](https://www.youtube.com/watch?v=BC_YRNpqj5k) of Grafana v5. +* **Prometheus**: The heatmap panel now support Prometheus histograms [#10009](https://github.com/grafana/grafana/issues/10009) +* **Postgres/MySQL**: Ability to insert 0s or nulls for missing intervals [#9487](https://github.com/grafana/grafana/issues/9487), thanks [@svenklemm](https://github.com/svenklemm) +* **Graph**: Thresholds for Right Y axis [#7107](https://github.com/grafana/grafana/issues/7107), thx [@ilgizar](https://github.com/ilgizar) +* **Graph**: Support multiple series stacking in histogram mode [#8151](https://github.com/grafana/grafana/issues/8151), thx [@mtanda](https://github.com/mtanda) +* **Alerting**: Pausing/un alerts now updates new_state_date [#10942](https://github.com/grafana/grafana/pull/10942) +* **Templating**: Add comma templating format [#10632](https://github.com/grafana/grafana/issues/10632), thx [@mtanda](https://github.com/mtanda) +* **Prometheus**: Support POST for query and query_range [#9859](https://github.com/grafana/grafana/pull/9859), thx [@mtanda](https://github.com/mtanda) -### New Features +### Minor +* **OpsGenie**: Add triggered alerts as description [#11046](https://github.com/grafana/grafana/pull/11046), thx [@llamashoes](https://github.com/llamashoes) +* **Cloudwatch**: Support high resolution metrics [#10925](https://github.com/grafana/grafana/pull/10925), thx [@mtanda](https://github.com/mtanda) +* **Cloudwatch**: Add dimension filtering to CloudWatch `dimension_values()` [#10029](https://github.com/grafana/grafana/issues/10029), thx [@willyhutw](https://github.com/willyhutw) +* **Units**: Second to HH:mm:ss formatter [#11107](https://github.com/grafana/grafana/issues/11107), thx [@gladdiologist](https://github.com/gladdiologist) +* **Singlestat**: Add color to prefix and postfix in singlestat panel [#11143](https://github.com/grafana/grafana/pull/11143), thx [@ApsOps](https://github.com/ApsOps) + +# 5.0.2 (unrelease) + +* **Teams**: Remove quota restrictions from teams [#11220](https://github.com/grafana/grafana/issues/11220) + +# 5.0.1 (2018-03-08) + +* **Postgres**: PostgreSQL error when using ipv6 address as hostname in connection string [#11055](https://github.com/grafana/grafana/issues/11055), thanks [@svenklemm](https://github.com/svenklemm) +* **Dashboards**: Changing templated value from dropdown is causing unsaved changes [#11063](https://github.com/grafana/grafana/issues/11063) +* **Prometheus**: Fixes bundled Prometheus 2.0 dashboard [#11016](https://github.com/grafana/grafana/issues/11016), thx [@roidelapluie](https://github.com/roidelapluie) +* **Sidemenu**: Profile menu "invisible" when gravatar is disabled [#11097](https://github.com/grafana/grafana/issues/11097) +* **Dashboard**: Fixes a bug with resizeable handles for panels [#11103](https://github.com/grafana/grafana/issues/11103) +* **Alerting**: Telegram inline image mode fails when caption too long [#10975](https://github.com/grafana/grafana/issues/10975) +* **Alerting**: Fixes silent failing validation [#11145](https://github.com/grafana/grafana/pull/11145) +* **OAuth**: Only use jwt token if it contains an email address [#11127](https://github.com/grafana/grafana/pull/11127) + +# 5.0.0-stable (2018-03-01) + +### Fixes + +- **oauth** Fix Github OAuth not working with private Organizations [#11028](https://github.com/grafana/grafana/pull/11028) [@lostick](https://github.com/lostick) +- **kiosk** white area over bottom panels in kiosk mode [#11010](https://github.com/grafana/grafana/issues/11010) +- **alerting** Fix OK state doesn't show up in Microsoft Teams [#11032](https://github.com/grafana/grafana/pull/11032), thx [@manacker](https://github.com/manacker) + +# 5.0.0-beta5 (2018-02-26) + +### Fixes + +- **Orgs** Unable to switch org when too many orgs listed [#10774](https://github.com/grafana/grafana/issues/10774) +- **Folders** Make it easier/explicit to access/modify folders using the API [#10630](https://github.com/grafana/grafana/issues/10630) +- **Dashboard** Scrollbar works incorrectly in Grafana 5.0 Beta4 in some cases [#10982](https://github.com/grafana/grafana/issues/10982) +- **ElasticSearch** Custom aggregation sizes no longer allowed for Elasticsearch [#10124](https://github.com/grafana/grafana/issues/10124) +- **oauth** Github OAuth with allowed organizations fails to login [#10964](https://github.com/grafana/grafana/issues/10964) +- **heatmap** Heatmap panel has partially hidden legend [#10793](https://github.com/grafana/grafana/issues/10793) +- **snapshots** Expired snapshots not being cleaned up [#10996](https://github.com/grafana/grafana/pull/10996) + +# 5.0.0-beta4 (2018-02-19) + +### Fixes + +- **Dashboard** Fixed dashboard overwrite permission issue [#10814](https://github.com/grafana/grafana/issues/10814) +- **Keyboard shortcuts** Fixed Esc key when in panel edit/view mode [#10945](https://github.com/grafana/grafana/issues/10945) +- **Save dashboard** Fixed issue with time range & variable reset after saving [#10946](https://github.com/grafana/grafana/issues/10946) + +# 5.0.0-beta3 (2018-02-16) + +### Fixes + +- **MySQL** Fixed new migration issue with index length [#10931](https://github.com/grafana/grafana/issues/10931) +- **Modal** Escape key no closes modals everywhere, fixes [#10887](https://github.com/grafana/grafana/issues/10887) +- **Row repeats** Fix for repeating rows issue, fixes [#10932](https://github.com/grafana/grafana/issues/10932) +- **Docs** Team api documented, fixes [#10832](https://github.com/grafana/grafana/issues/10832) +- **Plugins** Plugin info page broken, fixes [#10943](https://github.com/grafana/grafana/issues/10943) + +# 5.0.0-beta2 (2018-02-15) + +### Fixes + +- **Permissions** Fixed search permissions issues [#10822](https://github.com/grafana/grafana/issues/10822) +- **Permissions** Fixed problem issues displaying permissions lists [#10864](https://github.com/grafana/grafana/issues/10864) +- **PNG-Rendering** Fixed problem rendering legend to the right [#10526](https://github.com/grafana/grafana/issues/10526) +- **Reset password** Fixed problem with reset password form [#10870](https://github.com/grafana/grafana/issues/10870) +- **Light theme** Fixed problem with light theme in safari, [#10869](https://github.com/grafana/grafana/issues/10869) +- **Provisioning** Now handles deletes when dashboard json files removed from disk [#10865](https://github.com/grafana/grafana/issues/10865) +- **MySQL** Fixed issue with schema migration on old mysql (index too long) [#10779](https://github.com/grafana/grafana/issues/10779) +- **Github OAuth** Fixed fetching github orgs from private github org [#10823](https://github.com/grafana/grafana/issues/10823) +- **Embedding** Fixed issues embedding panel [#10787](https://github.com/grafana/grafana/issues/10787) + +# 5.0.0-beta1 (2018-02-05) + +Grafana v5.0 is going to be the biggest and most foundational release Grafana has ever had, coming with a ton of UX improvements, a new dashboard grid engine, dashboard folders, user teams and permissions. Checkout out this [video preview](https://www.youtube.com/watch?v=Izr0IBgoTZQ) of Grafana v5. + +### New Major Features - **Dashboards** Dashboard folders, [#1611](https://github.com/grafana/grafana/issues/1611) - **Teams** User groups (teams) implemented. Can be used in folder & dashboard permission list. - **Dashboard grid**: Panels are now layed out in a two dimensional grid (with x, y, w, h). [#9093](https://github.com/grafana/grafana/issues/9093). - **Templating**: Vertical repeat direction for panel repeats. - **UX**: Major update to page header and navigation - **Dashboard settings**: Combine dashboard settings views into one with side menu, [#9750](https://github.com/grafana/grafana/issues/9750) - -## New Dashboard Grid - -The new grid engine is major upgrade for how you can position and move panels. It enables new layouts and a much easier dashboard building experience. The change is backwards compatible. Grafana will automatically upgrade your dashboards to the new schema and position panels to match your existing layout. There might be minor differences in panel height. - -Dashboard panels and rows are positioned using a gridPos object `{x: 0, y: 0, w: 24, h: 5}`. Units are in grid dimensions (24 columns, 1 height unit 30px). Rows and Panels objects exist (together) in a flat array directly on the dashboard root object. Rows are not needed for layouts anymore and are mainly there for backward compatibility. Some panel plugins that do not respect their panel height might require an update. - -# 4.7.0 (unreleased / v4.7.x branch) +- **Persistent dashboard url's**: New url's for dashboards that allows renaming dashboards without breaking links. [#7883](https://github.com/grafana/grafana/issues/7883) ## Breaking changes -`[dashboard.json]` have been replaced with [dashboard provisioning](http://docs.grafana.org/administration/provisioning/). - +* **[dashboard.json]** have been replaced with [dashboard provisioning](http://docs.grafana.org/administration/provisioning/). Config files for provisioning datasources as configuration have changed from `/conf/datasources` to `/conf/provisioning/datasources`. From `/etc/grafana/datasources` to `/etc/grafana/provisioning/datasources` when installed with deb/rpm packages. -The pagerduty notifier now defaults to not auto resolve incidents. More details at [#10222](https://github.com/grafana/grafana/issues/10222) +* **Pagerduty** The notifier now defaults to not auto resolve incidents. More details at [#10222](https://github.com/grafana/grafana/issues/10222) + +* **HTTP API** + - `GET /api/alerts` property dashboardUri renamed to url and is now the full url (that is including app sub url). + +## New Dashboard Grid + +The new grid engine is a major upgrade for how you can position and move panels. It enables new layouts and a much easier dashboard building experience. The change is backward compatible. So you can upgrade your current version to 5.0 without breaking dashboards, but you cannot downgrade from 5.0 to previous versions. Grafana will automatically upgrade your dashboards to the new schema and position panels to match your existing layout. There might be minor differences in panel height. If you upgrade to 5.0 and for some reason want to rollback to the previous version you can restore dashboards to previous versions using dashboard history. But that should only be seen as an emergency solution. + +Dashboard panels and rows are positioned using a gridPos object `{x: 0, y: 0, w: 24, h: 5}`. Units are in grid dimensions (24 columns, 1 height unit 30px). Rows and Panels objects exist (together) in a flat array directly on the dashboard root object. Rows are not needed for layouts anymore and are mainly there for backward compatibility. Some panel plugins that do not respect their panel height might require an update. ## New Features +* **Alerting**: Add support for internal image store [#6922](https://github.com/grafana/grafana/issues/6922), thx [@FunkyM](https://github.com/FunkyM) * **Data Source Proxy**: Add support for whitelisting specified cookies that will be passed through to the data source when proxying data source requests [#5457](https://github.com/grafana/grafana/issues/5457), thanks [@robingustafsson](https://github.com/robingustafsson) * **Postgres/MySQL**: add __timeGroup macro for mysql [#9596](https://github.com/grafana/grafana/pull/9596), thanks [@svenklemm](https://github.com/svenklemm) * **Text**: Text panel are now edited in the ace editor. [#9698](https://github.com/grafana/grafana/pull/9698), thx [@mtanda](https://github.com/mtanda) @@ -38,7 +124,11 @@ The pagerduty notifier now defaults to not auto resolve incidents. More details * **Dashboard as cfg**: Load dashboards from file into Grafana on startup/change [#9654](https://github.com/grafana/grafana/issues/9654) [#5269](https://github.com/grafana/grafana/issues/5269) * **Prometheus**: Grafana can now send alerts to Prometheus Alertmanager while firing [#7481](https://github.com/grafana/grafana/issues/7481), thx [@Thib17](https://github.com/Thib17) and [@mtanda](https://github.com/mtanda) * **Table**: Support multiple table formated queries in table panel [#9170](https://github.com/grafana/grafana/issues/9170), thx [@davkal](https://github.com/davkal) +* **Security**: Protect against brute force (frequent) login attempts [#7616](https://github.com/grafana/grafana/issues/7616) + ## Minor +* **Graph**: Don't hide graph display options (Lines/Points) when draw mode is unchecked [#9770](https://github.com/grafana/grafana/issues/9770), thx [@Jonnymcc](https://github.com/Jonnymcc) +* **Prometheus**: Show label name in paren after by/without/on/ignoring/group_left/group_right [#9664](https://github.com/grafana/grafana/pull/9664), thx [@mtanda](https://github.com/mtanda) * **Alert panel**: Adds placeholder text when no alerts are within the time range [#9624](https://github.com/grafana/grafana/issues/9624), thx [@straend](https://github.com/straend) * **Mysql**: MySQL enable MaxOpenCon and MaxIdleCon regards how constring is configured. [#9784](https://github.com/grafana/grafana/issues/9784), thx [@dfredell](https://github.com/dfredell) * **Cloudwatch**: Fixes broken query inspector for cloudwatch [#9661](https://github.com/grafana/grafana/issues/9661), thx [@mtanda](https://github.com/mtanda) @@ -49,16 +139,28 @@ The pagerduty notifier now defaults to not auto resolve incidents. More details [@adiletmaratov](https://github.com/adiletmaratov) * **Backend**: Fixed bug where Grafana exited before all sub routines where finished [#10131](https://github.com/grafana/grafana/issues/10131) * **Azure**: Adds support for Azure blob storage as external image stor [#8955](https://github.com/grafana/grafana/issues/8955), thx [@saada](https://github.com/saada) - -## Tech -* **RabbitMq**: Remove support for publishing events to RabbitMQ [#9645](https://github.com/grafana/grafana/issues/9645) - +* **Telegram**: Add support for inline image uploads to telegram notifier plugin [#9967](https://github.com/grafana/grafana/pull/9967), thx [@rburchell](https://github.com/rburchell) ## Fixes * **Sensu**: Send alert message to sensu output [#9551](https://github.com/grafana/grafana/issues/9551), thx [@cjchand](https://github.com/cjchand) * **Singlestat**: suppress error when result contains no datapoints [#9636](https://github.com/grafana/grafana/issues/9636), thx [@utkarshcmu](https://github.com/utkarshcmu) * **Postgres/MySQL**: Control quoting in SQL-queries when using template variables [#9030](https://github.com/grafana/grafana/issues/9030), thanks [@svenklemm](https://github.com/svenklemm) * **Pagerduty**: Pagerduty dont auto resolve incidents by default anymore. [#10222](https://github.com/grafana/grafana/issues/10222) +* **Cloudwatch**: Fix for multi-valued templated queries. [#9903](https://github.com/grafana/grafana/issues/9903) + +## Tech +* **RabbitMq**: Remove support for publishing events to RabbitMQ [#9645](https://github.com/grafana/grafana/issues/9645) + +## Deprecation notes + +### HTTP API +The following operations have been deprecated and will be removed in a future release: + - `GET /api/dashboards/db/:slug` -> Use `GET /api/dashboards/uid/:uid` instead + - `DELETE /api/dashboards/db/:slug` -> Use `DELETE /api/dashboards/uid/:uid` instead + +The following properties have been deprecated and will be removed in a future release: + - `uri` property in `GET /api/search` -> Use new `url` or `uid` property instead + - `meta.slug` property in `GET /api/dashboards/uid/:uid` and `GET /api/dashboards/db/:slug` -> Use new `meta.url` or `dashboard.uid` property instead # 4.6.3 (2017-12-14) diff --git a/Gopkg.lock b/Gopkg.lock new file mode 100644 index 00000000000..e7f96bd5170 --- /dev/null +++ b/Gopkg.lock @@ -0,0 +1,644 @@ +# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. + + +[[projects]] + name = "cloud.google.com/go" + packages = ["compute/metadata"] + revision = "767c40d6a2e058483c25fa193e963a22da17236d" + version = "v0.18.0" + +[[projects]] + name = "github.com/BurntSushi/toml" + packages = ["."] + revision = "b26d9c308763d68093482582cea63d69be07a0f0" + version = "v0.3.0" + +[[projects]] + branch = "master" + name = "github.com/Unknwon/com" + packages = ["."] + revision = "7677a1d7c1137cd3dd5ba7a076d0c898a1ef4520" + +[[projects]] + name = "github.com/apache/thrift" + packages = ["lib/go/thrift"] + revision = "b2a4d4ae21c789b689dd162deb819665567f481c" + version = "0.10.0" + +[[projects]] + name = "github.com/aws/aws-sdk-go" + packages = [ + "aws", + "aws/awserr", + "aws/awsutil", + "aws/client", + "aws/client/metadata", + "aws/corehandlers", + "aws/credentials", + "aws/credentials/ec2rolecreds", + "aws/credentials/endpointcreds", + "aws/credentials/stscreds", + "aws/defaults", + "aws/ec2metadata", + "aws/endpoints", + "aws/request", + "aws/session", + "aws/signer/v4", + "internal/shareddefaults", + "private/protocol", + "private/protocol/ec2query", + "private/protocol/query", + "private/protocol/query/queryutil", + "private/protocol/rest", + "private/protocol/restxml", + "private/protocol/xml/xmlutil", + "service/cloudwatch", + "service/ec2", + "service/ec2/ec2iface", + "service/s3", + "service/sts" + ] + revision = "decd990ddc5dcdf2f73309cbcab90d06b996ca28" + version = "v1.12.67" + +[[projects]] + branch = "master" + name = "github.com/benbjohnson/clock" + packages = ["."] + revision = "7dc76406b6d3c05b5f71a86293cbcf3c4ea03b19" + +[[projects]] + branch = "master" + name = "github.com/beorn7/perks" + packages = ["quantile"] + revision = "4c0e84591b9aa9e6dcfdf3e020114cd81f89d5f9" + +[[projects]] + branch = "master" + name = "github.com/bmizerany/assert" + packages = ["."] + revision = "b7ed37b82869576c289d7d97fb2bbd8b64a0cb28" + +[[projects]] + branch = "master" + name = "github.com/bradfitz/gomemcache" + packages = ["memcache"] + revision = "1952afaa557dc08e8e0d89eafab110fb501c1a2b" + +[[projects]] + branch = "master" + name = "github.com/codahale/hdrhistogram" + packages = ["."] + revision = "3a0bb77429bd3a61596f5e8a3172445844342120" + +[[projects]] + name = "github.com/codegangsta/cli" + packages = ["."] + revision = "cfb38830724cc34fedffe9a2a29fb54fa9169cd1" + version = "v1.20.0" + +[[projects]] + name = "github.com/davecgh/go-spew" + packages = ["spew"] + revision = "346938d642f2ec3594ed81d874461961cd0faa76" + version = "v1.1.0" + +[[projects]] + name = "github.com/denisenkom/go-mssqldb" + packages = ["."] + revision = "ee492709d4324cdcb051d2ac266b77ddc380f5c5" + +[[projects]] + name = "github.com/fatih/color" + packages = ["."] + revision = "570b54cabe6b8eb0bc2dfce68d964677d63b5260" + version = "v1.5.0" + +[[projects]] + name = "github.com/go-ini/ini" + packages = ["."] + revision = "32e4c1e6bc4e7d0d8451aa6b75200d19e37a536a" + version = "v1.32.0" + +[[projects]] + name = "github.com/go-ldap/ldap" + packages = ["."] + revision = "bb7a9ca6e4fbc2129e3db588a34bc970ffe811a9" + version = "v2.5.1" + +[[projects]] + branch = "master" + name = "github.com/go-macaron/binding" + packages = ["."] + revision = "ac54ee249c27dca7e76fad851a4a04b73bd1b183" + +[[projects]] + branch = "master" + name = "github.com/go-macaron/gzip" + packages = ["."] + revision = "cad1c6580a07c56f5f6bc52d66002a05985c5854" + +[[projects]] + branch = "master" + name = "github.com/go-macaron/inject" + packages = ["."] + revision = "d8a0b8677191f4380287cfebd08e462217bac7ad" + +[[projects]] + branch = "master" + name = "github.com/go-macaron/session" + packages = [ + ".", + "memcache", + "mysql", + "postgres", + "redis" + ] + revision = "b8e286a0dba8f4999042d6b258daf51b31d08938" + +[[projects]] + name = "github.com/go-sql-driver/mysql" + packages = ["."] + revision = "2cc627ac8defc45d65066ae98f898166f580f9a4" + +[[projects]] + name = "github.com/go-stack/stack" + packages = ["."] + revision = "259ab82a6cad3992b4e21ff5cac294ccb06474bc" + version = "v1.7.0" + +[[projects]] + branch = "master" + name = "github.com/go-xorm/builder" + packages = ["."] + revision = "488224409dd8aa2ce7a5baf8d10d55764a913738" + +[[projects]] + name = "github.com/go-xorm/core" + packages = ["."] + revision = "e8409d73255791843585964791443dbad877058c" + +[[projects]] + name = "github.com/go-xorm/xorm" + packages = ["."] + revision = "6687a2b4e824f4d87f2d65060ec5cb0d896dff1e" + +[[projects]] + branch = "master" + name = "github.com/golang/protobuf" + packages = [ + "proto", + "ptypes", + "ptypes/any", + "ptypes/duration", + "ptypes/timestamp" + ] + revision = "c65a0412e71e8b9b3bfd22925720d23c0f054237" + +[[projects]] + branch = "master" + name = "github.com/gopherjs/gopherjs" + packages = ["js"] + revision = "178c176a91fe05e3e6c58fa5c989bad19e6cdcb3" + +[[projects]] + name = "github.com/gorilla/websocket" + packages = ["."] + revision = "ea4d1f681babbce9545c9c5f3d5194a789c89f5b" + version = "v1.2.0" + +[[projects]] + name = "github.com/gosimple/slug" + packages = ["."] + revision = "e9f42fa127660e552d0ad2b589868d403a9be7c6" + version = "v1.1.1" + +[[projects]] + branch = "master" + name = "github.com/grafana/grafana_plugin_model" + packages = ["go/datasource"] + revision = "dfe5dc0a6ce05825ba7fe2d0323d92e631bffa89" + +[[projects]] + branch = "master" + name = "github.com/hashicorp/go-hclog" + packages = ["."] + revision = "5bcb0f17e36442247290887cc914a6e507afa5c4" + +[[projects]] + name = "github.com/hashicorp/go-plugin" + packages = ["."] + revision = "3e6d191694b5a3a2b99755f31b47fa209e4bcd09" + +[[projects]] + branch = "master" + name = "github.com/hashicorp/go-version" + packages = ["."] + revision = "4fe82ae3040f80a03d04d2cccb5606a626b8e1ee" + +[[projects]] + branch = "master" + name = "github.com/hashicorp/yamux" + packages = ["."] + revision = "683f49123a33db61abfb241b7ac5e4af4dc54d55" + +[[projects]] + name = "github.com/inconshreveable/log15" + packages = ["."] + revision = "0decfc6c20d9ca0ad143b0e89dcaa20f810b4fb3" + version = "v2.13" + +[[projects]] + name = "github.com/jmespath/go-jmespath" + packages = ["."] + revision = "0b12d6b5" + +[[projects]] + name = "github.com/jtolds/gls" + packages = ["."] + revision = "77f18212c9c7edc9bd6a33d383a7b545ce62f064" + version = "v4.2.1" + +[[projects]] + name = "github.com/klauspost/compress" + packages = [ + "flate", + "gzip" + ] + revision = "6c8db69c4b49dd4df1fff66996cf556176d0b9bf" + version = "v1.2.1" + +[[projects]] + name = "github.com/klauspost/cpuid" + packages = ["."] + revision = "ae7887de9fa5d2db4eaa8174a7eff2c1ac00f2da" + version = "v1.1" + +[[projects]] + name = "github.com/klauspost/crc32" + packages = ["."] + revision = "cb6bfca970f6908083f26f39a79009d608efd5cd" + version = "v1.1" + +[[projects]] + branch = "master" + name = "github.com/kr/pretty" + packages = ["."] + revision = "cfb55aafdaf3ec08f0db22699ab822c50091b1c4" + +[[projects]] + branch = "master" + name = "github.com/kr/text" + packages = ["."] + revision = "7cafcd837844e784b526369c9bce262804aebc60" + +[[projects]] + branch = "master" + name = "github.com/lib/pq" + packages = [ + ".", + "oid" + ] + revision = "61fe37aa2ee24fabcdbe5c4ac1d4ac566f88f345" + +[[projects]] + name = "github.com/mattn/go-colorable" + packages = ["."] + revision = "167de6bfdfba052fa6b2d3664c8f5272e23c9072" + version = "v0.0.9" + +[[projects]] + name = "github.com/mattn/go-isatty" + packages = ["."] + revision = "0360b2af4f38e8d38c7fce2a9f4e702702d73a39" + version = "v0.0.3" + +[[projects]] + name = "github.com/mattn/go-sqlite3" + packages = ["."] + revision = "6c771bb9887719704b210e87e934f08be014bdb1" + version = "v1.6.0" + +[[projects]] + name = "github.com/matttproud/golang_protobuf_extensions" + packages = ["pbutil"] + revision = "3247c84500bff8d9fb6d579d800f20b3e091582c" + version = "v1.0.0" + +[[projects]] + branch = "master" + name = "github.com/mitchellh/go-testing-interface" + packages = ["."] + revision = "a61a99592b77c9ba629d254a693acffaeb4b7e28" + +[[projects]] + name = "github.com/opentracing/opentracing-go" + packages = [ + ".", + "ext", + "log" + ] + revision = "1949ddbfd147afd4d964a9f00b24eb291e0e7c38" + version = "v1.0.2" + +[[projects]] + name = "github.com/patrickmn/go-cache" + packages = ["."] + revision = "a3647f8e31d79543b2d0f0ae2fe5c379d72cedc0" + version = "v2.1.0" + +[[projects]] + name = "github.com/prometheus/client_golang" + packages = [ + "api", + "api/prometheus/v1", + "prometheus", + "prometheus/promhttp" + ] + revision = "967789050ba94deca04a5e84cce8ad472ce313c1" + version = "v0.9.0-pre1" + +[[projects]] + branch = "master" + name = "github.com/prometheus/client_model" + packages = ["go"] + revision = "99fa1f4be8e564e8a6b613da7fa6f46c9edafc6c" + +[[projects]] + branch = "master" + name = "github.com/prometheus/common" + packages = [ + "expfmt", + "internal/bitbucket.org/ww/goautoneg", + "model" + ] + revision = "89604d197083d4781071d3c65855d24ecfb0a563" + +[[projects]] + branch = "master" + name = "github.com/prometheus/procfs" + packages = [ + ".", + "internal/util", + "nfsd", + "xfs" + ] + revision = "85fadb6e89903ef7cca6f6a804474cd5ea85b6e1" + +[[projects]] + branch = "master" + name = "github.com/rainycape/unidecode" + packages = ["."] + revision = "cb7f23ec59bec0d61b19c56cd88cee3d0cc1870c" + +[[projects]] + branch = "master" + name = "github.com/sergi/go-diff" + packages = ["diffmatchpatch"] + revision = "1744e2970ca51c86172c8190fadad617561ed6e7" + +[[projects]] + name = "github.com/smartystreets/assertions" + packages = [ + ".", + "internal/go-render/render", + "internal/oglematchers" + ] + revision = "0b37b35ec7434b77e77a4bb29b79677cced992ea" + version = "1.8.1" + +[[projects]] + name = "github.com/smartystreets/goconvey" + packages = [ + "convey", + "convey/gotest", + "convey/reporting" + ] + revision = "9e8dc3f972df6c8fcc0375ef492c24d0bb204857" + version = "1.6.3" + +[[projects]] + branch = "master" + name = "github.com/teris-io/shortid" + packages = ["."] + revision = "771a37caa5cf0c81f585d7b6df4dfc77e0615b5c" + +[[projects]] + name = "github.com/uber/jaeger-client-go" + packages = [ + ".", + "config", + "internal/baggage", + "internal/baggage/remote", + "internal/spanlog", + "log", + "rpcmetrics", + "thrift-gen/agent", + "thrift-gen/baggage", + "thrift-gen/jaeger", + "thrift-gen/sampling", + "thrift-gen/zipkincore", + "utils" + ] + revision = "3ac96c6e679cb60a74589b0d0aa7c70a906183f7" + version = "v2.11.2" + +[[projects]] + name = "github.com/uber/jaeger-lib" + packages = ["metrics"] + revision = "7f95f4f7e80028096410abddaae2556e4c61b59f" + version = "v1.3.1" + +[[projects]] + name = "github.com/yudai/gojsondiff" + packages = [ + ".", + "formatter" + ] + revision = "7b1b7adf999dab73a6eb02669c3d82dbb27a3dd6" + version = "1.0.0" + +[[projects]] + branch = "master" + name = "github.com/yudai/golcs" + packages = ["."] + revision = "ecda9a501e8220fae3b4b600c3db4b0ba22cfc68" + +[[projects]] + branch = "master" + name = "golang.org/x/crypto" + packages = [ + "md4", + "pbkdf2" + ] + revision = "3d37316aaa6bd9929127ac9a527abf408178ea7b" + +[[projects]] + branch = "master" + name = "golang.org/x/net" + packages = [ + "context", + "context/ctxhttp", + "http2", + "http2/hpack", + "idna", + "internal/timeseries", + "lex/httplex", + "trace" + ] + revision = "5ccada7d0a7ba9aeb5d3aca8d3501b4c2a509fec" + +[[projects]] + branch = "master" + name = "golang.org/x/oauth2" + packages = [ + ".", + "google", + "internal", + "jws", + "jwt" + ] + revision = "b28fcf2b08a19742b43084fb40ab78ac6c3d8067" + +[[projects]] + branch = "master" + name = "golang.org/x/sync" + packages = ["errgroup"] + revision = "fd80eb99c8f653c847d294a001bdf2a3a6f768f5" + +[[projects]] + branch = "master" + name = "golang.org/x/sys" + packages = ["unix"] + revision = "af50095a40f9041b3b38960738837185c26e9419" + +[[projects]] + branch = "master" + name = "golang.org/x/text" + packages = [ + "collate", + "collate/build", + "internal/colltab", + "internal/gen", + "internal/tag", + "internal/triegen", + "internal/ucd", + "language", + "secure/bidirule", + "transform", + "unicode/bidi", + "unicode/cldr", + "unicode/norm", + "unicode/rangetable" + ] + revision = "e19ae1496984b1c655b8044a65c0300a3c878dd3" + +[[projects]] + name = "google.golang.org/appengine" + packages = [ + ".", + "cloudsql", + "internal", + "internal/app_identity", + "internal/base", + "internal/datastore", + "internal/log", + "internal/modules", + "internal/remote_api", + "internal/urlfetch", + "urlfetch" + ] + revision = "150dc57a1b433e64154302bdc40b6bb8aefa313a" + version = "v1.0.0" + +[[projects]] + branch = "master" + name = "google.golang.org/genproto" + packages = ["googleapis/rpc/status"] + revision = "a8101f21cf983e773d0c1133ebc5424792003214" + +[[projects]] + name = "google.golang.org/grpc" + packages = [ + ".", + "balancer", + "balancer/base", + "balancer/roundrobin", + "codes", + "connectivity", + "credentials", + "encoding", + "grpclb/grpc_lb_v1/messages", + "grpclog", + "health", + "health/grpc_health_v1", + "internal", + "keepalive", + "metadata", + "naming", + "peer", + "resolver", + "resolver/dns", + "resolver/passthrough", + "stats", + "status", + "tap", + "transport" + ] + revision = "6b51017f791ae1cfbec89c52efdf444b13b550ef" + version = "v1.9.2" + +[[projects]] + branch = "v3" + name = "gopkg.in/alexcesaro/quotedprintable.v3" + packages = ["."] + revision = "2caba252f4dc53eaf6b553000885530023f54623" + +[[projects]] + name = "gopkg.in/asn1-ber.v1" + packages = ["."] + revision = "379148ca0225df7a432012b8df0355c2a2063ac0" + version = "v1.2" + +[[projects]] + name = "gopkg.in/bufio.v1" + packages = ["."] + revision = "567b2bfa514e796916c4747494d6ff5132a1dfce" + version = "v1" + +[[projects]] + branch = "v2" + name = "gopkg.in/gomail.v2" + packages = ["."] + revision = "81ebce5c23dfd25c6c67194b37d3dd3f338c98b1" + +[[projects]] + name = "gopkg.in/ini.v1" + packages = ["."] + revision = "32e4c1e6bc4e7d0d8451aa6b75200d19e37a536a" + version = "v1.32.0" + +[[projects]] + name = "gopkg.in/macaron.v1" + packages = ["."] + revision = "75f2e9b42e99652f0d82b28ccb73648f44615faa" + version = "v1.2.4" + +[[projects]] + name = "gopkg.in/redis.v2" + packages = ["."] + revision = "e6179049628164864e6e84e973cfb56335748dea" + version = "v2.3.2" + +[[projects]] + branch = "v2" + name = "gopkg.in/yaml.v2" + packages = ["."] + revision = "d670f9405373e636a5a2765eea47fac0c9bc91a4" + +[solve-meta] + analyzer-name = "dep" + analyzer-version = 1 + inputs-digest = "d2f67abb94028a388f051164896bfb69b1ff3a7255d285dc4d78d298f4793383" + solver-name = "gps-cdcl" + solver-version = 1 diff --git a/Gopkg.toml b/Gopkg.toml new file mode 100644 index 00000000000..7137547747b --- /dev/null +++ b/Gopkg.toml @@ -0,0 +1,203 @@ +# Gopkg.toml example +# +# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md +# for detailed Gopkg.toml documentation. +# +# required = ["github.com/user/thing/cmd/thing"] +# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"] +# +# [[constraint]] +# name = "github.com/user/project" +# version = "1.0.0" +# +# [[constraint]] +# name = "github.com/user/project2" +# branch = "dev" +# source = "github.com/myfork/project2" +# +# [[override]] +# name = "github.com/x/y" +# version = "2.4.0" + +ignored = [ + "github.com/grafana/grafana/data/*", + "github.com/grafana/grafana/public/*", + "github.com/grafana/grafana/node_modules/*" + ] + +[[constraint]] + name = "github.com/BurntSushi/toml" + version = "0.3.0" + +[[constraint]] + branch = "master" + name = "github.com/Unknwon/com" + #version = "1.0.0" + +[[constraint]] + name = "github.com/aws/aws-sdk-go" + version = "1.12.65" + +[[constraint]] + branch = "master" + name = "github.com/benbjohnson/clock" + +[[constraint]] + branch = "master" + name = "github.com/bmizerany/assert" + +[[constraint]] + name = "github.com/codegangsta/cli" + version = "1.20.0" + +[[constraint]] + name = "github.com/davecgh/go-spew" + version = "1.1.0" + +[[constraint]] + name = "github.com/fatih/color" + version = "1.5.0" + +[[constraint]] + name = "github.com/go-ldap/ldap" + version = "2.5.1" + +[[constraint]] + branch = "master" + name = "github.com/go-macaron/binding" + +[[constraint]] + branch = "master" + name = "github.com/go-macaron/gzip" + +[[constraint]] + branch = "master" + name = "github.com/go-macaron/session" + +[[constraint]] + name = "github.com/go-sql-driver/mysql" + revision = "2cc627ac8defc45d65066ae98f898166f580f9a4" + #version = "1.3.0" //keeping this since we would rather depend on version then commit + +[[constraint]] + name = "github.com/go-stack/stack" + version = "1.7.0" + +[[constraint]] + name = "github.com/go-xorm/core" + revision = "e8409d73255791843585964791443dbad877058c" + #version = "0.5.7" //keeping this since we would rather depend on version then commit + +[[constraint]] + name = "github.com/go-xorm/xorm" + revision = "6687a2b4e824f4d87f2d65060ec5cb0d896dff1e" + #version = "0.6.4" //keeping this since we would rather depend on version then commit + +[[constraint]] + name = "github.com/gorilla/websocket" + version = "1.2.0" + +[[constraint]] + name = "github.com/gosimple/slug" + version = "1.1.1" + +[[constraint]] + branch = "master" + name = "github.com/grafana/grafana_plugin_model" + +[[constraint]] + branch = "master" + name = "github.com/hashicorp/go-hclog" + +[[constraint]] + branch = "master" + name = "github.com/hashicorp/go-version" + +[[constraint]] + name = "github.com/inconshreveable/log15" + version = "2.13.0" + +[[constraint]] + branch = "master" + name = "github.com/lib/pq" + +[[constraint]] + name = "github.com/mattn/go-isatty" + version = "0.0.3" + +[[constraint]] + name = "github.com/mattn/go-sqlite3" + version = "1.6.0" + +[[constraint]] + name = "github.com/opentracing/opentracing-go" + version = "1.0.2" + +[[constraint]] + name = "github.com/patrickmn/go-cache" + version = "2.1.0" + +[[constraint]] + name = "github.com/prometheus/client_golang" + version = "0.9.0-pre1" + +[[constraint]] + branch = "master" + name = "github.com/prometheus/client_model" + +[[constraint]] + branch = "master" + name = "github.com/prometheus/common" + +[[constraint]] + name = "github.com/smartystreets/goconvey" + version = "1.6.3" + +[[constraint]] + name = "github.com/uber/jaeger-client-go" + version = "2.11.2" + +[[constraint]] + name = "github.com/yudai/gojsondiff" + version = "1.0.0" + +[[constraint]] + branch = "master" + name = "golang.org/x/net" + +[[constraint]] + branch = "master" + name = "golang.org/x/oauth2" + +[[constraint]] + branch = "master" + name = "golang.org/x/sync" + +[[constraint]] + name = "gopkg.in/gomail.v2" + branch = "v2" + +[[constraint]] + name = "gopkg.in/ini.v1" + version = "1.32.0" + +[[constraint]] + name = "gopkg.in/macaron.v1" + version = "1.2.4" + +[[constraint]] + branch = "v2" + name = "gopkg.in/yaml.v2" + +[prune] + non-go = true + go-tests = true + unused-packages = true + +[[constraint]] + branch = "master" + name = "github.com/teris-io/shortid" + +[[constraint]] + name = "github.com/denisenkom/go-mssqldb" + revision = "ee492709d4324cdcb051d2ac266b77ddc380f5c5" diff --git a/Makefile b/Makefile index d003cda6dd4..6f7beb837d8 100644 --- a/Makefile +++ b/Makefile @@ -11,8 +11,14 @@ deps: deps-js build-go: go run build.go build +build-server: + go run build.go build-server + +build-cli: + go run build.go build-cli + build-js: - npm run build + yarn run build build: build-go build-js @@ -20,9 +26,12 @@ test-go: go test -v ./pkg/... test-js: - npm test + yarn test test: test-go test-js run: ./bin/grafana-server + +protoc: + protoc -I pkg/tsdb/models pkg/tsdb/models/*.proto --go_out=plugins=grpc:pkg/tsdb/models/. \ No newline at end of file diff --git a/README.md b/README.md index 069958d9031..9db746cc5ea 100644 --- a/README.md +++ b/README.md @@ -33,7 +33,7 @@ the latest master builds [here](https://grafana.com/grafana/download) ### Building the backend ```bash go get github.com/grafana/grafana -cd ~/go/src/github.com/grafana/grafana +cd $GOPATH/src/github.com/grafana/grafana go run build.go setup go run build.go build ``` @@ -45,23 +45,17 @@ For this you need nodejs (v.6+). ```bash npm install -g yarn yarn install --pure-lockfile -npm run build -``` - -To rebuild frontend assets (typescript, sass etc) as you change them start the watcher via. - -```bash npm run watch ``` -Run tests +Run tests ```bash -npm run test +npm run jest ``` -Run tests in watch mode +Run karma tests ```bash -npm run watch-test +npm run karma ``` ### Recompile backend on source change @@ -86,8 +80,11 @@ In your custom.ini uncomment (remove the leading `;`) sign. And set `app_mode = ### Running tests -- You can run backend Golang tests using "go test ./pkg/...". -- Execute all frontend tests with "npm run test" +#### Frontend +Execute all frontend tests +```bash +npm run test +``` Writing & watching frontend tests (we have two test runners) @@ -98,6 +95,18 @@ Writing & watching frontend tests (we have two test runners) - Start watcher: `npm run karma` - Karma+Mocha runs all files that end with the name "_specs.ts". +#### Backend +```bash +# Run Golang tests using sqlite3 as database (default) +go test ./pkg/... + +# Run Golang tests using mysql as database - convenient to use /docker/blocks/mysql_tests +GRAFANA_TEST_DB=mysql go test ./pkg/... + +# Run Golang tests using postgres as database - convenient to use /docker/blocks/postgres_tests +GRAFANA_TEST_DB=postgres go test ./pkg/... +``` + ## Contribute If you have any idea for an improvement or found a bug, do not hesitate to open an issue. diff --git a/ROADMAP.md b/ROADMAP.md index 479c1933bc0..67d7093263d 100644 --- a/ROADMAP.md +++ b/ROADMAP.md @@ -1,25 +1,27 @@ -# Roadmap (2017-10-31) +# Roadmap (2018-02-22) This roadmap is a tentative plan for the core development team. Things change constantly as PRs come in and priorities change. But it will give you an idea of our current vision and plan. -### Short term (1-4 months) +### Short term (1-2 months) - - Release Grafana v5 - - Teams - - Dashboard folders - - Dashboard & folder permissions (assigned to users or groups) - - New Dashboard layout engine - - New sidemenu & nav UX +- v5.1 + - Crossplatform builds & build speed improvements + - Enterprise LDAP + - Provisioning workflow + - First login registration view + - IFQL Initial support + +### Mid term (2-4 months) + +- v5.2 + - Azure monitor backend rewrite - Elasticsearch alerting - - React migration foundation (core components) - - Graphite 1.1 Tags Support + - Backend plugins? (alert notifiers, auth) ### Long term (4 - 8 months) -- Backend plugins to support more Auth options, Alerting data sources & notifications - Alerting improvements (silence, per series tracking, etc) -- Dashboard as configuration and other automation / provisioning improvements - Progress on React migration - Change visualization (panel type) on the fly. - Multi stat panel (vertical version of singlestat with bars/graph mode with big number etc) diff --git a/build.go b/build.go index 3c63c34dd63..c38c452f61f 100644 --- a/build.go +++ b/build.go @@ -87,6 +87,10 @@ func main() { clean() build("grafana-cli", "./pkg/cmd/grafana-cli", []string{}) + case "build-server": + clean() + build("grafana-server", "./pkg/cmd/grafana-server", []string{}) + case "build": clean() for _, binary := range binaries { @@ -351,11 +355,11 @@ func ChangeWorkingDir(dir string) { } func grunt(params ...string) { - if runtime.GOOS == "windows" { - runPrint(`.\node_modules\.bin\grunt`, params...) - } else { - runPrint("./node_modules/.bin/grunt", params...) - } + if runtime.GOOS == "windows" { + runPrint(`.\node_modules\.bin\grunt`, params...) + } else { + runPrint("./node_modules/.bin/grunt", params...) + } } func gruntBuildArg(task string) []string { @@ -375,7 +379,7 @@ func gruntBuildArg(task string) []string { } func setup() { - runPrint("go", "get", "-v", "github.com/kardianos/govendor") + runPrint("go", "get", "-v", "github.com/golang/dep") runPrint("go", "install", "-v", "./pkg/cmd/grafana-server") } diff --git a/circle.yml b/circle.yml index 4eb600bfde3..cfa8b762e49 100644 --- a/circle.yml +++ b/circle.yml @@ -1,57 +1,135 @@ -machine: - node: - version: 6.11.4 - python: - version: 2.7.3 - services: - - docker - environment: - GOPATH: "/home/ubuntu/.go_workspace" - ORG_PATH: "github.com/grafana" - REPO_PATH: "${ORG_PATH}/grafana" - GODIST: "go1.9.2.linux-amd64.tar.gz" - post: - - mkdir -p ~/download - - mkdir -p ~/docker - - test -e download/$GODIST || curl -o download/$GODIST https://storage.googleapis.com/golang/$GODIST - - sudo rm -rf /usr/local/go - - sudo tar -C /usr/local -xzf download/$GODIST +version: 2 -dependencies: - cache_directories: - - "~/docker" - - "~/download" - override: - - rm -rf ${GOPATH}/src/${REPO_PATH} - - mkdir -p ${GOPATH}/src/${ORG_PATH} - - cp -r ~/grafana ${GOPATH}/src/${ORG_PATH} - pre: - - pip install awscli - - sudo apt-get update; sudo apt-get install rpm; sudo apt-get install expect - - ./scripts/build/build_container.sh +jobs: + test-frontend: + docker: + - image: circleci/node:6.11.4 + steps: + - checkout + - run: + name: install yarn + command: 'sudo npm install -g yarn --quiet' + - restore_cache: + key: dependency-cache-{{ checksum "yarn.lock" }} + # Could we skip this step if the cache has been restored? `[ -d node_modules ] || yarn install ...` should be able to apply to build step as well + - run: + name: yarn install + command: 'yarn install --pure-lockfile --no-progress' + - save_cache: + key: dependency-cache-{{ checksum "yarn.lock" }} + paths: + - node_modules + - run: + name: frontend tests + command: './scripts/circle-test-frontend.sh' -test: - override: - - bash scripts/circle-test-frontend.sh - - bash scripts/circle-test-backend.sh + test-backend: + docker: + - image: circleci/golang:1.10 + working_directory: /go/src/github.com/grafana/grafana + steps: + - checkout + - run: + name: build backend and run go tests + command: './scripts/circle-test-backend.sh' -deployment: - gh_branch: - branch: master - commands: - - ./scripts/build/deploy.sh - - ./scripts/build/sign_packages.sh - - go run build.go sha-dist - - aws s3 sync ./dist s3://$BUCKET_NAME/master - - ./scripts/trigger_windows_build.sh ${APPVEYOR_TOKEN} ${CIRCLE_SHA1} master - - ./scripts/trigger_docker_build.sh ${TRIGGER_GRAFANA_PACKER_CIRCLECI_TOKEN} - - go run ./scripts/build/publish.go -apiKey ${GRAFANA_COM_API_KEY} - gh_tag: - tag: /^v[0-9]+(\.[0-9]+){2}(-.+|[^-.]*)$/ - commands: - - ./scripts/build/deploy.sh - - ./scripts/build/sign_packages.sh - - go run build.go sha-dist - - aws s3 sync ./dist s3://$BUCKET_NAME/release - - ./scripts/trigger_windows_build.sh ${APPVEYOR_TOKEN} ${CIRCLE_SHA1} release - - ./scripts/trigger_docker_build.sh ${TRIGGER_GRAFANA_PACKER_CIRCLECI_TOKEN} ${CIRCLE_TAG} + build: + docker: + - image: grafana/build-container:v0.1 + working_directory: /go/src/github.com/grafana/grafana + steps: + - checkout + - run: + name: build and package grafana + command: './scripts/build/build.sh' + - run: + name: sign packages + command: './scripts/build/sign_packages.sh' + - run: + name: sha-sum packages + command: 'go run build.go sha-dist' + - run: + name: Build Grafana.com publisher + command: 'go build -o scripts/publish scripts/build/publish.go' + - persist_to_workspace: + root: . + paths: + - dist/grafana* + - scripts/*.sh + - scripts/publish + + deploy-master: + docker: + - image: circleci/python:2.7-stretch + steps: + - attach_workspace: + at: . + - run: + name: install awscli + command: 'sudo pip install awscli' + - run: + name: deploy to s3 + command: 'aws s3 sync ./dist s3://$BUCKET_NAME/master' + - run: + name: Trigger Windows build + command: './scripts/trigger_windows_build.sh ${APPVEYOR_TOKEN} ${CIRCLE_SHA1} master' + - run: + name: Trigger Docker build + command: './scripts/trigger_docker_build.sh ${TRIGGER_GRAFANA_PACKER_CIRCLECI_TOKEN}' + - run: + name: Publish to Grafana.com + command: './scripts/publish -apiKey ${GRAFANA_COM_API_KEY}' + + deploy-release: + docker: + - image: circleci/python:2.7-stretch + steps: + - attach_workspace: + at: dist + - run: + name: install awscli + command: 'sudo pip install awscli' + - run: + name: deploy to s3 + command: 'aws s3 sync ./dist s3://$BUCKET_NAME/release' + - run: + name: Trigger Windows build + command: './scripts/trigger_windows_build.sh ${APPVEYOR_TOKEN} ${CIRCLE_SHA1} release' + - run: + name: Trigger Docker build + command: './scripts/trigger_docker_build.sh ${TRIGGER_GRAFANA_PACKER_CIRCLECI_TOKEN} ${CIRCLE_TAG}' + +workflows: + version: 2 + test-and-build: + jobs: + - build: + filters: + tags: + only: /.*/ + - test-frontend: + filters: + tags: + only: /.*/ + - test-backend: + filters: + tags: + only: /.*/ + - deploy-master: + requires: + - test-backend + - test-frontend + - build + filters: + branches: + only: master + - deploy-release: + requires: + - test-backend + - test-frontend + - build + filters: + branches: + ignore: /.*/ + tags: + only: /^v[0-9]+(\.[0-9]+){2}(-.+|[^-.]*)$/ diff --git a/conf/defaults.ini b/conf/defaults.ini index 4e2929096a6..4a2240f1924 100644 --- a/conf/defaults.ini +++ b/conf/defaults.ini @@ -174,6 +174,9 @@ disable_gravatar = false # data source proxy whitelist (ip_or_domain:port separated by spaces) data_source_proxy_whitelist = +# disable protection against brute force login attempts +disable_brute_force_login_protection = false + #################################### Snapshots ########################### [snapshots] # snapshot sharing options @@ -184,9 +187,6 @@ external_snapshot_name = Publish to snapshot.raintank.io # remove expired snapshot snapshot_remove_expired = true -# remove snapshots after 90 days -snapshot_TTL_days = 90 - #################################### Dashboards ################## [dashboards] @@ -248,7 +248,7 @@ enabled = false allow_sign_up = true client_id = some_id client_secret = some_secret -scopes = user:email +scopes = user:email,read:org auth_url = https://github.com/login/oauth/authorize token_url = https://github.com/login/oauth/access_token api_url = https://api.github.com/user @@ -324,7 +324,7 @@ allow_sign_up = true enabled = false host = localhost:25 user = -# If the password contains # or ; you have to wrap it with trippel quotes. Ex """#password;""" +# If the password contains # or ; you have to wrap it with triple quotes. Ex """#password;""" password = cert_file = key_file = @@ -473,7 +473,7 @@ sampler_param = 1 #################################### External Image Storage ############## [external_image_storage] -# You can choose between (s3, webdav, gcs, azure_blob) +# You can choose between (s3, webdav, gcs, azure_blob, local) provider = [external_image_storage.s3] @@ -499,3 +499,6 @@ path = account_name = account_key = container_name = + +[external_image_storage.local] +# does not require any configuration diff --git a/conf/ldap.toml b/conf/ldap.toml index ae217106cb2..166d85eabb1 100644 --- a/conf/ldap.toml +++ b/conf/ldap.toml @@ -19,7 +19,7 @@ ssl_skip_verify = false # Search user bind dn bind_dn = "cn=admin,dc=grafana,dc=org" # Search user bind password -# If the password contains # or ; you have to wrap it with trippel quotes. Ex """#password;""" +# If the password contains # or ; you have to wrap it with triple quotes. Ex """#password;""" bind_password = 'grafana' # User search filter, for example "(cn=%s)" or "(sAMAccountName=%s)" or "(uid=%s)" diff --git a/conf/provisioning/dashboards/sample.yaml b/conf/provisioning/dashboards/sample.yaml index 40992d1461e..d70bd425634 100644 --- a/conf/provisioning/dashboards/sample.yaml +++ b/conf/provisioning/dashboards/sample.yaml @@ -1,6 +1,10 @@ +# # config file version +apiVersion: 1 + +#providers: # - name: 'default' -# org_id: 1 +# orgId: 1 # folder: '' # type: file # options: -# folder: /var/lib/grafana/dashboards \ No newline at end of file +# path: /var/lib/grafana/dashboards diff --git a/conf/provisioning/datasources/sample.yaml b/conf/provisioning/datasources/sample.yaml index 1bb9cb53b45..877e229183d 100644 --- a/conf/provisioning/datasources/sample.yaml +++ b/conf/provisioning/datasources/sample.yaml @@ -1,10 +1,13 @@ +# # config file version +apiVersion: 1 + # # list of datasources that should be deleted from the database -#delete_datasources: +#deleteDatasources: # - name: Graphite -# org_id: 1 +# orgId: 1 # # list of datasources to insert/update depending -# # whats available in the datbase +# # on what's available in the datbase #datasources: # # name of the datasource. Required # - name: Graphite @@ -12,8 +15,8 @@ # type: graphite # # access mode. direct or proxy. Required # access: proxy -# # org id. will default to org_id 1 if not specified -# org_id: 1 +# # org id. will default to orgId 1 if not specified +# orgId: 1 # # url # url: http://localhost:8080 # # database password, if used @@ -23,22 +26,22 @@ # # database name, if used # database: # # enable/disable basic auth -# basic_auth: +# basicAuth: # # basic auth username -# basic_auth_user: +# basicAuthUser: # # basic auth password -# basic_auth_password: +# basicAuthPassword: # # enable/disable with credentials headers -# with_credentials: +# withCredentials: # # mark as default datasource. Max one per org -# is_default: +# isDefault: # # fields that will be converted to json and stored in json_data -# json_data: +# jsonData: # graphiteVersion: "1.1" # tlsAuth: true # tlsAuthWithCACert: true # # json object of data that will be encrypted. -# secure_json_data: +# secureJsonData: # tlsCACert: "..." # tlsClientCert: "..." # tlsClientKey: "..." diff --git a/conf/sample.ini b/conf/sample.ini index d297d2db66a..3e45ac44d61 100644 --- a/conf/sample.ini +++ b/conf/sample.ini @@ -71,7 +71,7 @@ ;host = 127.0.0.1:3306 ;name = grafana ;user = root -# If the password contains # or ; you have to wrap it with trippel quotes. Ex """#password;""" +# If the password contains # or ; you have to wrap it with triple quotes. Ex """#password;""" ;password = # Use either URL or the previous fields to configure the database @@ -162,6 +162,9 @@ log_queries = # data source proxy whitelist (ip_or_domain:port separated by spaces) ;data_source_proxy_whitelist = +# disable protection against brute force login attempts +;disable_brute_force_login_protection = false + #################################### Snapshots ########################### [snapshots] # snapshot sharing options @@ -172,9 +175,6 @@ log_queries = # remove expired snapshot ;snapshot_remove_expired = true -# remove snapshots after 90 days -;snapshot_TTL_days = 90 - #################################### Dashboards History ################## [dashboards] # Number dashboard versions to keep (per dashboard). Default: 20, Minimum: 1 @@ -417,7 +417,7 @@ log_queries = #################################### External image storage ########################## [external_image_storage] # Used for uploading images to public servers so they can be included in slack/email messages. -# you can choose between (s3, webdav, gcs, azure_blob) +# you can choose between (s3, webdav, gcs, azure_blob, local) ;provider = [external_image_storage.s3] @@ -442,3 +442,6 @@ log_queries = ;account_name = ;account_key = ;container_name = + +[external_image_storage.local] +# does not require any configuration diff --git a/docker/blocks/apache_proxy/Dockerfile b/docker/blocks/apache_proxy/Dockerfile new file mode 100644 index 00000000000..37c2a4e493b --- /dev/null +++ b/docker/blocks/apache_proxy/Dockerfile @@ -0,0 +1,4 @@ +FROM jmferrer/apache2-reverse-proxy:latest + +COPY ports.conf /etc/apache2/sites-enabled +COPY proxy.conf /etc/apache2/sites-enabled \ No newline at end of file diff --git a/docker/blocks/apache_proxy/docker-compose.yaml b/docker/blocks/apache_proxy/docker-compose.yaml new file mode 100644 index 00000000000..2aec3d4bc4f --- /dev/null +++ b/docker/blocks/apache_proxy/docker-compose.yaml @@ -0,0 +1,9 @@ +# This will proxy all requests for http://localhost:10081/grafana/ to +# http://localhost:3000 (Grafana running locally) +# +# Please note that you'll need to change the root_url in the Grafana configuration: +# root_url = %(protocol)s://%(domain)s:/grafana/ + + apacheproxy: + build: blocks/apache_proxy + network_mode: host diff --git a/docker/blocks/apache_proxy/ports.conf b/docker/blocks/apache_proxy/ports.conf new file mode 100644 index 00000000000..e7a5f9712af --- /dev/null +++ b/docker/blocks/apache_proxy/ports.conf @@ -0,0 +1 @@ +Listen 10081 \ No newline at end of file diff --git a/docker/blocks/apache_proxy/proxy.conf b/docker/blocks/apache_proxy/proxy.conf new file mode 100644 index 00000000000..5eb35cbe8b9 --- /dev/null +++ b/docker/blocks/apache_proxy/proxy.conf @@ -0,0 +1,4 @@ + + ProxyPass /grafana/ http://localhost:3000/ + ProxyPassReverse /grafana/ http://localhost:3000/ + \ No newline at end of file diff --git a/docker/blocks/mysql/dashboard.json b/docker/blocks/mysql/dashboard.json new file mode 100644 index 00000000000..e2b791f82e6 --- /dev/null +++ b/docker/blocks/mysql/dashboard.json @@ -0,0 +1,549 @@ +{ + "__inputs": [ + { + "name": "DS_MYSQL", + "label": "Mysql", + "description": "", + "type": "datasource", + "pluginId": "mysql", + "pluginName": "MySQL" + } + ], + "__requires": [ + { + "type": "grafana", + "id": "grafana", + "name": "Grafana", + "version": "5.0.0" + }, + { + "type": "panel", + "id": "graph", + "name": "Graph", + "version": "" + }, + { + "type": "datasource", + "id": "mysql", + "name": "MySQL", + "version": "1.0.0" + }, + { + "type": "panel", + "id": "table", + "name": "Table", + "version": "" + } + ], + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "description": "A dashboard visualizing data generated from grafana/fake-data-gen", + "editable": true, + "gnetId": null, + "graphTooltip": 0, + "id": null, + "iteration": 1518602729468, + "links": [], + "panels": [ + { + "aliasColors": { + "total avg": "#6ed0e0" + }, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "${DS_MYSQL}", + "fill": 2, + "gridPos": { + "h": 9, + "w": 12, + "x": 0, + "y": 0 + }, + "id": 2, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [ + { + "alias": "total avg", + "fill": 0, + "pointradius": 3, + "points": true + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "alias": "", + "format": "time_series", + "hide": false, + "rawSql": "SELECT\n $__timeGroup(createdAt,'$summarize') as time_sec,\n avg(value) as value,\n hostname as metric\nFROM \n grafana_metric\nWHERE\n $__timeFilter(createdAt) AND\n measurement = 'logins.count' AND\n hostname IN($host)\nGROUP BY 1, 3\nORDER BY 1", + "refId": "A", + "target": "" + }, + { + "alias": "", + "format": "time_series", + "rawSql": "SELECT\n $__timeGroup(createdAt,'$summarize') as time_sec,\n min(value) as value,\n 'total avg' as metric\nFROM \n grafana_metric\nWHERE\n $__timeFilter(createdAt) AND\n measurement = 'logins.count'\nGROUP BY 1\nORDER BY 1", + "refId": "B" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": "1h", + "title": "Average logins / $summarize", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ] + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "${DS_MYSQL}", + "fill": 2, + "gridPos": { + "h": 18, + "w": 12, + "x": 12, + "y": 0 + }, + "id": 4, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "alias": "", + "format": "time_series", + "rawSql": "SELECT\n $__timeGroup(createdAt,'$summarize') as time_sec,\n avg(value) as value,\n 'started' as metric\nFROM \n grafana_metric\nWHERE\n $__timeFilter(createdAt) AND\n measurement = 'payment.started'\nGROUP BY 1, 3\nORDER BY 1", + "refId": "A", + "target": "" + }, + { + "alias": "", + "format": "time_series", + "rawSql": "SELECT\n $__timeGroup(createdAt,'$summarize') as time_sec,\n avg(value) as value,\n 'ended' as \"metric\"\nFROM \n grafana_metric\nWHERE\n $__timeFilter(createdAt) AND\n measurement = 'payment.ended'\nGROUP BY 1, 3\nORDER BY 1", + "refId": "B" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": "1h", + "title": "Average payments started/ended / $summarize", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ] + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "${DS_MYSQL}", + "fill": 2, + "gridPos": { + "h": 9, + "w": 12, + "x": 0, + "y": 9 + }, + "id": 3, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "alias": "", + "format": "time_series", + "rawSql": "SELECT\n $__timeGroup(createdAt,'$summarize') as time_sec,\n max(value) as value,\n hostname as metric\nFROM \n grafana_metric\nWHERE\n $__timeFilter(createdAt) AND\n measurement = 'cpu' AND\n hostname IN($host)\nGROUP BY 1, 3\nORDER BY 1", + "refId": "A", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": "1h", + "title": "Max CPU / $summarize", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "percent", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ] + }, + { + "columns": [], + "datasource": "${DS_MYSQL}", + "fontSize": "100%", + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 18 + }, + "id": 6, + "links": [], + "pageSize": null, + "scroll": true, + "showHeader": true, + "sort": { + "col": 0, + "desc": true + }, + "styles": [ + { + "alias": "Time", + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "link": false, + "pattern": "Time", + "type": "date" + }, + { + "alias": "", + "colorMode": null, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "decimals": 2, + "pattern": "/.*/", + "thresholds": [], + "type": "number", + "unit": "short" + } + ], + "targets": [ + { + "alias": "", + "format": "table", + "rawSql": "SELECT createdAt as Time, source, datacenter, hostname, value FROM grafana_metric WHERE hostname in($host)", + "refId": "A", + "target": "" + } + ], + "timeShift": "1h", + "title": "Values", + "transform": "table", + "type": "table" + } + ], + "schemaVersion": 16, + "style": "dark", + "tags": [ + "fake-data-gen", + "mysql" + ], + "templating": { + "list": [ + { + "allValue": null, + "current": {}, + "datasource": "${DS_MYSQL}", + "hide": 0, + "includeAll": false, + "label": "Datacenter", + "multi": false, + "name": "datacenter", + "options": [], + "query": "SELECT DISTINCT datacenter FROM grafana_metric", + "refresh": 1, + "regex": "", + "sort": 1, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + }, + { + "allValue": null, + "current": {}, + "datasource": "${DS_MYSQL}", + "hide": 0, + "includeAll": true, + "label": "Hostname", + "multi": true, + "name": "host", + "options": [], + "query": "SELECT DISTINCT hostname FROM grafana_metric WHERE datacenter='$datacenter'", + "refresh": 1, + "regex": "", + "sort": 1, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + }, + { + "auto": false, + "auto_count": 5, + "auto_min": "10s", + "current": { + "selected": true, + "text": "1m", + "value": "1m" + }, + "hide": 0, + "label": "Summarize", + "name": "summarize", + "options": [ + { + "selected": false, + "text": "1s", + "value": "1s" + }, + { + "selected": false, + "text": "10s", + "value": "10s" + }, + { + "selected": false, + "text": "30s", + "value": "30s" + }, + { + "selected": true, + "text": "1m", + "value": "1m" + }, + { + "selected": false, + "text": "5m", + "value": "5m" + }, + { + "selected": false, + "text": "10m", + "value": "10m" + }, + { + "selected": false, + "text": "30m", + "value": "30m" + }, + { + "selected": false, + "text": "1h", + "value": "1h" + }, + { + "selected": false, + "text": "6h", + "value": "6h" + }, + { + "selected": false, + "text": "12h", + "value": "12h" + }, + { + "selected": false, + "text": "1d", + "value": "1d" + }, + { + "selected": false, + "text": "7d", + "value": "7d" + }, + { + "selected": false, + "text": "14d", + "value": "14d" + }, + { + "selected": false, + "text": "30d", + "value": "30d" + } + ], + "query": "1s,10s,30s,1m,5m,10m,30m,1h,6h,12h,1d,7d,14d,30d", + "refresh": 2, + "type": "interval" + } + ] + }, + "time": { + "from": "now-1h", + "to": "now" + }, + "timepicker": { + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": [ + "5m", + "15m", + "1h", + "6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ] + }, + "timezone": "", + "title": "Grafana Fake Data Gen - MySQL", + "uid": "DGsCac3kz", + "version": 6 +} \ No newline at end of file diff --git a/docker/blocks/mysql/docker-compose.yaml b/docker/blocks/mysql/docker-compose.yaml index 6eee158ac43..f7881e66539 100644 --- a/docker/blocks/mysql/docker-compose.yaml +++ b/docker/blocks/mysql/docker-compose.yaml @@ -12,3 +12,10 @@ - /etc/timezone:/etc/timezone:ro command: [mysqld, --character-set-server=utf8mb4, --collation-server=utf8mb4_unicode_ci, --innodb_monitor_enable=all] + fake-mysql-data: + image: grafana/fake-data-gen + network_mode: bridge + environment: + FD_DATASOURCE: mysql + FD_PORT: 3306 + diff --git a/docker/blocks/mysql_tests/docker-compose.yaml b/docker/blocks/mysql_tests/docker-compose.yaml index c6c3097d463..3c59b66b5ac 100644 --- a/docker/blocks/mysql_tests/docker-compose.yaml +++ b/docker/blocks/mysql_tests/docker-compose.yaml @@ -7,4 +7,7 @@ MYSQL_PASSWORD: password ports: - "3306:3306" + volumes: + - /etc/localtime:/etc/localtime:ro + - /etc/timezone:/etc/timezone:ro tmpfs: /var/lib/mysql:rw diff --git a/docker/blocks/nginx_proxy/Dockerfile b/docker/blocks/nginx_proxy/Dockerfile new file mode 100644 index 00000000000..9ded20dfdda --- /dev/null +++ b/docker/blocks/nginx_proxy/Dockerfile @@ -0,0 +1,3 @@ +FROM nginx:alpine + +COPY nginx.conf /etc/nginx/nginx.conf \ No newline at end of file diff --git a/docker/blocks/nginx_proxy/docker-compose.yaml b/docker/blocks/nginx_proxy/docker-compose.yaml new file mode 100644 index 00000000000..7c3447ade5c --- /dev/null +++ b/docker/blocks/nginx_proxy/docker-compose.yaml @@ -0,0 +1,9 @@ +# This will proxy all requests for http://localhost:10080/grafana/ to +# http://localhost:3000 (Grafana running locally) +# +# Please note that you'll need to change the root_url in the Grafana configuration: +# root_url = %(protocol)s://%(domain)s:/grafana/ + + nginxproxy: + build: blocks/nginx_proxy + network_mode: host diff --git a/docker/blocks/nginx_proxy/nginx.conf b/docker/blocks/nginx_proxy/nginx.conf new file mode 100644 index 00000000000..18e27b3fb01 --- /dev/null +++ b/docker/blocks/nginx_proxy/nginx.conf @@ -0,0 +1,19 @@ +events { worker_connections 1024; } + +http { + sendfile on; + + proxy_redirect off; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Host $server_name; + + server { + listen 10080; + + location /grafana/ { + proxy_pass http://localhost:3000/; + } + } +} \ No newline at end of file diff --git a/docker/blocks/postgres/dashboard.json b/docker/blocks/postgres/dashboard.json new file mode 100644 index 00000000000..77b0ceac624 --- /dev/null +++ b/docker/blocks/postgres/dashboard.json @@ -0,0 +1,547 @@ +{ + "__inputs": [ + { + "name": "DS_POSTGRESQL", + "label": "PostgreSQL", + "description": "", + "type": "datasource", + "pluginId": "postgres", + "pluginName": "PostgreSQL" + } + ], + "__requires": [ + { + "type": "grafana", + "id": "grafana", + "name": "Grafana", + "version": "5.0.0" + }, + { + "type": "panel", + "id": "graph", + "name": "Graph", + "version": "" + }, + { + "type": "datasource", + "id": "postgres", + "name": "PostgreSQL", + "version": "1.0.0" + }, + { + "type": "panel", + "id": "table", + "name": "Table", + "version": "" + } + ], + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": "-- Grafana --", + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "description": "A dashboard visualizing data generated from grafana/fake-data-gen", + "editable": true, + "gnetId": null, + "graphTooltip": 0, + "id": null, + "iteration": 1518601837383, + "links": [], + "panels": [ + { + "aliasColors": { + "total avg": "#6ed0e0" + }, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "${DS_POSTGRESQL}", + "fill": 2, + "gridPos": { + "h": 9, + "w": 12, + "x": 0, + "y": 0 + }, + "id": 2, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [ + { + "alias": "total avg", + "fill": 0, + "pointradius": 3, + "points": true + } + ], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "alias": "", + "format": "time_series", + "hide": false, + "rawSql": "SELECT\n $__timeGroup(\"createdAt\",'$summarize'),\n avg(value) as \"value\",\n hostname as \"metric\"\nFROM \n grafana_metric\nWHERE\n $__timeFilter(\"createdAt\") AND\n measurement = 'logins.count' AND\n hostname IN($host)\nGROUP BY time, metric\nORDER BY time", + "refId": "A", + "target": "" + }, + { + "alias": "", + "format": "time_series", + "rawSql": "SELECT\n $__timeGroup(\"createdAt\",'$summarize'),\n min(value) as \"value\",\n 'total avg' as \"metric\"\nFROM \n grafana_metric\nWHERE\n $__timeFilter(\"createdAt\") AND\n measurement = 'logins.count'\nGROUP BY time", + "refId": "B" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Average logins / $summarize", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ] + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "${DS_POSTGRESQL}", + "fill": 2, + "gridPos": { + "h": 18, + "w": 12, + "x": 12, + "y": 0 + }, + "id": 4, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "alias": "", + "format": "time_series", + "rawSql": "SELECT\n $__timeGroup(\"createdAt\",'$summarize'),\n avg(value) as \"value\",\n 'started' as \"metric\"\nFROM \n grafana_metric\nWHERE\n $__timeFilter(\"createdAt\") AND\n measurement = 'payment.started'\nGROUP BY time, metric\nORDER BY time", + "refId": "A", + "target": "" + }, + { + "alias": "", + "format": "time_series", + "rawSql": "SELECT\n $__timeGroup(\"createdAt\",'$summarize'),\n avg(value) as \"value\",\n 'ended' as \"metric\"\nFROM \n grafana_metric\nWHERE\n $__timeFilter(\"createdAt\") AND\n measurement = 'payment.ended'\nGROUP BY time, metric\nORDER BY time", + "refId": "B" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Average payments started/ended / $summarize", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ] + }, + { + "aliasColors": {}, + "bars": false, + "dashLength": 10, + "dashes": false, + "datasource": "${DS_POSTGRESQL}", + "fill": 2, + "gridPos": { + "h": 9, + "w": 12, + "x": 0, + "y": 9 + }, + "id": 3, + "legend": { + "avg": false, + "current": false, + "max": false, + "min": false, + "show": true, + "total": false, + "values": false + }, + "lines": true, + "linewidth": 2, + "links": [], + "nullPointMode": "null", + "percentage": false, + "pointradius": 5, + "points": false, + "renderer": "flot", + "seriesOverrides": [], + "spaceLength": 10, + "stack": false, + "steppedLine": false, + "targets": [ + { + "alias": "", + "format": "time_series", + "rawSql": "SELECT\n $__timeGroup(\"createdAt\",'$summarize'),\n max(value) as \"value\",\n hostname as \"metric\"\nFROM \n grafana_metric\nWHERE\n $__timeFilter(\"createdAt\") AND\n measurement = 'cpu' AND\n hostname IN($host)\nGROUP BY time, metric\nORDER BY time", + "refId": "A", + "target": "" + } + ], + "thresholds": [], + "timeFrom": null, + "timeShift": null, + "title": "Max CPU / $summarize", + "tooltip": { + "shared": true, + "sort": 0, + "value_type": "individual" + }, + "type": "graph", + "xaxis": { + "buckets": null, + "mode": "time", + "name": null, + "show": true, + "values": [] + }, + "yaxes": [ + { + "format": "percent", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + }, + { + "format": "short", + "label": null, + "logBase": 1, + "max": null, + "min": null, + "show": true + } + ] + }, + { + "columns": [], + "datasource": "${DS_POSTGRESQL}", + "fontSize": "100%", + "gridPos": { + "h": 9, + "w": 24, + "x": 0, + "y": 18 + }, + "id": 6, + "links": [], + "pageSize": null, + "scroll": true, + "showHeader": true, + "sort": { + "col": 0, + "desc": true + }, + "styles": [ + { + "alias": "Time", + "dateFormat": "YYYY-MM-DD HH:mm:ss", + "link": false, + "pattern": "Time", + "type": "date" + }, + { + "alias": "", + "colorMode": null, + "colors": [ + "rgba(245, 54, 54, 0.9)", + "rgba(237, 129, 40, 0.89)", + "rgba(50, 172, 45, 0.97)" + ], + "decimals": 2, + "pattern": "/.*/", + "thresholds": [], + "type": "number", + "unit": "short" + } + ], + "targets": [ + { + "alias": "", + "format": "table", + "rawSql": "SELECT \"createdAt\" as \"Time\", source, datacenter, hostname, value FROM grafana_metric WHERE hostname in($host)", + "refId": "A", + "target": "" + } + ], + "title": "Values", + "transform": "table", + "type": "table" + } + ], + "schemaVersion": 16, + "style": "dark", + "tags": [ + "fake-data-gen", + "postgres" + ], + "templating": { + "list": [ + { + "allValue": null, + "current": {}, + "datasource": "${DS_POSTGRESQL}", + "hide": 0, + "includeAll": false, + "label": "Datacenter", + "multi": false, + "name": "datacenter", + "options": [], + "query": "SELECT DISTINCT datacenter FROM grafana_metric", + "refresh": 1, + "regex": "", + "sort": 1, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + }, + { + "allValue": null, + "current": {}, + "datasource": "${DS_POSTGRESQL}", + "hide": 0, + "includeAll": true, + "label": "Hostname", + "multi": true, + "name": "host", + "options": [], + "query": "SELECT DISTINCT hostname FROM grafana_metric WHERE datacenter='$datacenter'", + "refresh": 1, + "regex": "", + "sort": 1, + "tagValuesQuery": "", + "tags": [], + "tagsQuery": "", + "type": "query", + "useTags": false + }, + { + "auto": false, + "auto_count": 5, + "auto_min": "10s", + "current": { + "text": "1m", + "value": "1m" + }, + "hide": 0, + "label": "Summarize", + "name": "summarize", + "options": [ + { + "selected": false, + "text": "1s", + "value": "1s" + }, + { + "selected": false, + "text": "10s", + "value": "10s" + }, + { + "selected": false, + "text": "30s", + "value": "30s" + }, + { + "selected": true, + "text": "1m", + "value": "1m" + }, + { + "selected": false, + "text": "5m", + "value": "5m" + }, + { + "selected": false, + "text": "10m", + "value": "10m" + }, + { + "selected": false, + "text": "30m", + "value": "30m" + }, + { + "selected": false, + "text": "1h", + "value": "1h" + }, + { + "selected": false, + "text": "6h", + "value": "6h" + }, + { + "selected": false, + "text": "12h", + "value": "12h" + }, + { + "selected": false, + "text": "1d", + "value": "1d" + }, + { + "selected": false, + "text": "7d", + "value": "7d" + }, + { + "selected": false, + "text": "14d", + "value": "14d" + }, + { + "selected": false, + "text": "30d", + "value": "30d" + } + ], + "query": "1s,10s,30s,1m,5m,10m,30m,1h,6h,12h,1d,7d,14d,30d", + "refresh": 2, + "type": "interval" + } + ] + }, + "time": { + "from": "now-1h", + "to": "now" + }, + "timepicker": { + "refresh_intervals": [ + "5s", + "10s", + "30s", + "1m", + "5m", + "15m", + "30m", + "1h", + "2h", + "1d" + ], + "time_options": [ + "5m", + "15m", + "1h", + "6h", + "12h", + "24h", + "2d", + "7d", + "30d" + ] + }, + "timezone": "", + "title": "Grafana Fake Data Gen - PostgreSQL", + "uid": "JYola5qzz", + "version": 1 +} \ No newline at end of file diff --git a/docker/blocks/postgres/docker-compose.yaml b/docker/blocks/postgres/docker-compose.yaml index eced00aafeb..566df7b8877 100644 --- a/docker/blocks/postgres/docker-compose.yaml +++ b/docker/blocks/postgres/docker-compose.yaml @@ -7,3 +7,10 @@ ports: - "5432:5432" command: postgres -c log_connections=on -c logging_collector=on -c log_destination=stderr -c log_directory=/var/log/postgresql + + fake-postgres-data: + image: grafana/fake-data-gen + network_mode: bridge + environment: + FD_DATASOURCE: postgres + FD_PORT: 5432 \ No newline at end of file diff --git a/docker/blocks/prometheus/Dockerfile b/docker/blocks/prometheus/Dockerfile index 1ad28f524ff..2098e6527d3 100644 --- a/docker/blocks/prometheus/Dockerfile +++ b/docker/blocks/prometheus/Dockerfile @@ -1,3 +1,3 @@ -FROM prom/prometheus +FROM prom/prometheus:v1.8.2 ADD prometheus.yml /etc/prometheus/ ADD alert.rules /etc/prometheus/ diff --git a/docker/blocks/prometheus/docker-compose.yaml b/docker/blocks/prometheus/docker-compose.yaml index ccb1238a179..a65bb9a9e4f 100644 --- a/docker/blocks/prometheus/docker-compose.yaml +++ b/docker/blocks/prometheus/docker-compose.yaml @@ -23,3 +23,9 @@ network_mode: host ports: - "9093:9093" + + prometheus-random-data: + build: blocks/prometheus_random_data + network_mode: host + ports: + - "8080:8080" diff --git a/docker/blocks/prometheus/prometheus.yml b/docker/blocks/prometheus/prometheus.yml index ae40dfdf067..2a6579e691e 100644 --- a/docker/blocks/prometheus/prometheus.yml +++ b/docker/blocks/prometheus/prometheus.yml @@ -25,11 +25,15 @@ scrape_configs: - job_name: 'node_exporter' static_configs: - targets: ['127.0.0.1:9100'] - + - job_name: 'fake-data-gen' static_configs: - targets: ['127.0.0.1:9091'] - + - job_name: 'grafana' static_configs: - targets: ['127.0.0.1:3000'] + + - job_name: 'prometheus-random-data' + static_configs: + - targets: ['127.0.0.1:8080'] diff --git a/docker/blocks/prometheus2/Dockerfile b/docker/blocks/prometheus2/Dockerfile index d4a9eb2d75d..03edf4c9ee2 100644 --- a/docker/blocks/prometheus2/Dockerfile +++ b/docker/blocks/prometheus2/Dockerfile @@ -1,3 +1,3 @@ -FROM prom/prometheus:v2.0.0 +FROM prom/prometheus:v2.2.0 ADD prometheus.yml /etc/prometheus/ ADD alert.rules /etc/prometheus/ diff --git a/docker/blocks/prometheus2/docker-compose.yaml b/docker/blocks/prometheus2/docker-compose.yaml new file mode 100644 index 00000000000..68c0358b7d0 --- /dev/null +++ b/docker/blocks/prometheus2/docker-compose.yaml @@ -0,0 +1,31 @@ + prometheus: + build: blocks/prometheus2 + network_mode: host + ports: + - "9090:9090" + + node_exporter: + image: prom/node-exporter + network_mode: host + ports: + - "9100:9100" + + fake-prometheus-data: + image: grafana/fake-data-gen + network_mode: host + ports: + - "9091:9091" + environment: + FD_DATASOURCE: prom + + alertmanager: + image: quay.io/prometheus/alertmanager + network_mode: host + ports: + - "9093:9093" + + prometheus-random-data: + build: blocks/prometheus_random_data + network_mode: host + ports: + - "8080:8080" diff --git a/docker/blocks/prometheus2/prometheus.yml b/docker/blocks/prometheus2/prometheus.yml index 83dda78bb3c..57232aaa439 100644 --- a/docker/blocks/prometheus2/prometheus.yml +++ b/docker/blocks/prometheus2/prometheus.yml @@ -25,11 +25,15 @@ scrape_configs: - job_name: 'node_exporter' static_configs: - targets: ['127.0.0.1:9100'] - + - job_name: 'fake-data-gen' static_configs: - targets: ['127.0.0.1:9091'] - + - job_name: 'grafana' static_configs: - targets: ['127.0.0.1:3000'] + + - job_name: 'prometheus-random-data' + static_configs: + - targets: ['127.0.0.1:8080'] diff --git a/docker/blocks/prometheus_random_data/Dockerfile b/docker/blocks/prometheus_random_data/Dockerfile new file mode 100644 index 00000000000..3aad497c94d --- /dev/null +++ b/docker/blocks/prometheus_random_data/Dockerfile @@ -0,0 +1,18 @@ +# This Dockerfile builds an image for a client_golang example. + +# Builder image, where we build the example. +FROM golang:1.9.0 AS builder +# Download prometheus/client_golang/examples/random first +RUN go get github.com/prometheus/client_golang/examples/random +WORKDIR /go/src/github.com/prometheus/client_golang +WORKDIR /go/src/github.com/prometheus/client_golang/prometheus +RUN go get -d +WORKDIR /go/src/github.com/prometheus/client_golang/examples/random +RUN CGO_ENABLED=0 GOOS=linux go build -a -tags netgo -ldflags '-w' + +# Final image. +FROM scratch +LABEL maintainer "The Prometheus Authors " +COPY --from=builder /go/src/github.com/prometheus/client_golang/examples/random . +EXPOSE 8080 +ENTRYPOINT ["/random"] diff --git a/docs/Dockerfile b/docs/Dockerfile index 7679f2b7e4b..faf90ea0ecd 100644 --- a/docs/Dockerfile +++ b/docs/Dockerfile @@ -9,5 +9,6 @@ FROM grafana/docs-base:latest COPY config.toml /site COPY awsconfig /site +COPY versions.json /site/static/js VOLUME ["/site/content"] diff --git a/docs/VERSION b/docs/VERSION index b570a0ac21b..5e0a0f1d665 100644 --- a/docs/VERSION +++ b/docs/VERSION @@ -1 +1 @@ -v4.3 +v5.0 diff --git a/docs/sources/administration/cli.md b/docs/sources/administration/cli.md index 2be1881cfab..3dc9b0cf3b4 100644 --- a/docs/sources/administration/cli.md +++ b/docs/sources/administration/cli.md @@ -10,17 +10,17 @@ weight = 8 # Grafana CLI -Grafana cli is a small executable that is bundled with grafana server and is suppose to be executed on the same machine as grafana runs. +Grafana cli is a small executable that is bundled with Grafana-server and is supposed to be executed on the same machine Grafana-server is running on. ## Plugins -The CLI helps you install, upgrade and manage your plugins on the same machine it CLI is running. -You can find more information about how to install and manage your plugins at the -[plugin page]({{< relref "plugins/installation.md" >}}). +The CLI allows you to install, upgrade and manage your plugins on the machine it is running on. +You can find more information about how to install and manage your plugins in the +[plugins page]({{< relref "plugins/installation.md" >}}). ## Admin -> This feature is only available in grafana 4.1 and above. +> This feature is only available in Grafana 4.1 and above. To show all admin commands: `grafana-cli admin` @@ -39,7 +39,7 @@ then there are two flags that can be used to set homepath and the config file pa `grafana-cli admin reset-admin-password --homepath "/usr/share/grafana" newpass` -If you have not lost the admin password then it is better to set in the Grafana UI. If you need to set the password in a script then the [Grafana API](http://docs.grafana.org/http_api/user/#change-password) can be used. Here is an example with curl using basic auth: +If you have not lost the admin password then it is better to set in the Grafana UI. If you need to set the password in a script then the [Grafana API](http://docs.grafana.org/http_api/user/#change-password) can be used. Here is an example using curl with basic auth: ```bash curl -X PUT -H "Content-Type: application/json" -d '{ diff --git a/docs/sources/administration/metrics.md b/docs/sources/administration/metrics.md index 56d0290bc82..5eed2b7ce15 100644 --- a/docs/sources/administration/metrics.md +++ b/docs/sources/administration/metrics.md @@ -10,6 +10,6 @@ weight = 8 # Internal metrics -Grafana collects some metrics about it self internally. Currently Grafana supports pushing metrics to graphite and exposing them to be scraped by Prometheus. +Grafana collects some metrics about itself internally. Currently, Grafana supports pushing metrics to Graphite or exposing them to be scraped by Prometheus. -To enabled internal metrics you have to enable it under the [metrics] section in your [grafana.ini](http://docs.grafana.org/installation/configuration/#enabled-6) config file.If you want to push metrics to graphite you have also have to configure the [metrics.graphite](http://docs.grafana.org/installation/configuration/#metrics-graphite) section. +To emit internal metrics you have to enable the option under the [metrics] section in your [grafana.ini](http://docs.grafana.org/installation/configuration/#enabled-6) config file. If you want to push metrics to Graphite, you must also configure the [metrics.graphite](http://docs.grafana.org/installation/configuration/#metrics-graphite) section. diff --git a/docs/sources/administration/permissions.md b/docs/sources/administration/permissions.md new file mode 100644 index 00000000000..e7b84a417c0 --- /dev/null +++ b/docs/sources/administration/permissions.md @@ -0,0 +1,118 @@ ++++ +title = "Permissions" +description = "Grafana user permissions" +keywords = ["grafana", "configuration", "documentation", "admin", "users", "permissions"] +type = "docs" +aliases = ["/reference/admin"] +[menu.docs] +name = "Permissions" +parent = "admin" +weight = 3 ++++ + +# Permissions + +Grafana users have permissions that are determined by their: + +- **Organization Role** (Admin, Editor, Viewer) +- Via **Team** memberships where the **Team** has been assigned specific permissions. +- Via permissions assigned directly to user (on folders or dashboards) +- The Grafana Admin (i.e. Super Admin) user flag. + +## Organization Roles + +Users can be belong to one or more organizations. A user's organization membership is tied to a role that defines what the user is allowed to do +in that organization. + +### Admin Role + +Can do everything scoped to the organization. For example: + +- Add & Edit data sources. +- Add & Edit organization users & teams. +- Configure App plugins & set org settings. + +### Editor Role + +- Can create and modify dashboards & alert rules. This can be disabled on specific folders and dashboards. +- **Cannot** create or edit data sources nor invite new users. + +### Viewer Role + +- View any dashboard. This can be disabled on specific folders and dashboards. +- **Cannot** create or edit dashboards nor data sources. + +This role can be tweaked via Grafana server setting [viewers_can_edit]({{< relref "installation/configuration.md#viewers-can-edit" >}}). If you set this to true users +with **Viewer** can also make transient dashboard edits, meaning they can modify panels & queries but not save the changes (nor create new dashboards). +Useful for public Grafana installations where you want anonymous users to be able to edit panels & queries but not save or create new dashboards. + +## Grafana Admin + +This admin flag makes a user a `Super Admin`. This means they can access the `Server Admin` views where all users and organizations can be administrated. + +### Dashboard & Folder Permissions + +> Introduced in Grafana v5.0 + +{{< docs-imagebox img="/img/docs/v50/folder_permissions.png" max-width="500px" class="docs-image--right" >}} + +For dashboards and dashboard folders there is a **Permissions** page that make it possible to +remove the default role based permssions for Editors and Viewers. It's here you can add and assign permissions to specific **Users** and **Teams**. + +You can assign & remove permissions for **Organization Roles**, **Users** and **Teams**. + +Permission levels: + +- **Admin**: Can edit & create dashboards and edit permissions. +- **Edit**: Can edit & create dashboards. **Cannot** edit folder/dashboard permissions. +- **View**: Can only view existing dashboards/folders. + +#### Restricting Access + +The highest permission always wins so if you for example want to hide a folder or dashboard from others you need to remove the **Organization Role** based permission from the Access Control List (ACL). + +- You cannot override permissions for users with the **Org Admin Role**. Admins always have access to everything. +- A more specific permission with a lower permission level will not have any effect if a more general rule exists with higher permission level. You need to remove or lower the permission level of the more general rule. + +#### How Grafana Resolves Multiple Permissions - Examples + +##### Example 1 (`user1` has the Editor Role) + +Permissions for a dashboard: + +- `Everyone with Editor Role Can Edit` +- `user1 Can View` + +Result: `user1` has Edit permission as the highest permission always wins. + +##### Example 2 (`user1` has the Viewer Role and is a member of `team1`) + +Permissions for a dashboard: + +- `Everyone with Viewer Role Can View` +- `user1 Can Edit` +- `team1 Can Admin` + +Result: `user1` has Admin permission as the highest permission always wins. + +##### Example 3 + +Permissions for a dashboard: + +- `user1 Can Admin (inherited from parent folder)` +- `user1 Can Edit` + +Result: You cannot override to a lower permission. `user1` has Admin permission as the highest permission always wins. + +- **View**: Can only view existing dashboars/folders. +- You cannot override permissions for users with **Org Admin Role** +- A more specific permission with lower permission level will not have any effect if a more general rule exists with higher permission level. For example if "Everyone with Editor Role Can Edit" exists in the ACL list then **John Doe** will still have Edit permission even after you have specifically added a permission for this user with the permission set to **View**. You need to remove or lower the permission level of the more general rule. + +### Data source permissions + +Permissions on dashboards and folders **do not** include permissions on data sources. A user with `Viewer` role +can still issue any possible query to a data source, not just those queries that exist on dashboards he/she has access to. +We hope to add permissions on data sources in a future release. Until then **do not** view dashboard permissions as a secure +way to restrict user data access. Dashboard permissions only limits what dashboards & folders a user can view & edit not which +data sources a user can access nor what queries a user can issue. + diff --git a/docs/sources/administration/provisioning.md b/docs/sources/administration/provisioning.md index 3dd5c5fd1d3..135973df52a 100644 --- a/docs/sources/administration/provisioning.md +++ b/docs/sources/administration/provisioning.md @@ -3,6 +3,7 @@ title = "Provisioning" description = "" keywords = ["grafana", "provisioning"] type = "docs" +aliases = ["/installation/provisioning"] [menu.docs] parent = "admin" weight = 8 @@ -12,7 +13,7 @@ weight = 8 ## Config file -Checkout the [configuration](/installation/configuration) page for more information about what you can configure in `grafana.ini` +Checkout the [configuration](/installation/configuration) page for more information on what you can configure in `grafana.ini` ### Config file locations @@ -35,7 +36,7 @@ GF__ ``` Where the section name is the text within the brackets. Everything -should be upper case, `.` should be replaced by `_`. For example, given these configuration settings: +should be upper case and `.` should be replaced by `_`. For example, given these configuration settings: ```bash # default section @@ -48,7 +49,7 @@ admin_user = admin client_secret = 0ldS3cretKey ``` -Then you can override them using: +Overriding will be done like so: ```bash export GF_DEFAULT_INSTANCE_NAME=my-instance @@ -60,34 +61,36 @@ export GF_AUTH_GOOGLE_CLIENT_SECRET=newS3cretKey ## Configuration management tools -Currently we do not provide any scripts/manifests for configuring Grafana. Rather then spending time learning and creating scripts/manifests for each tool, we think our time is better spent making Grafana easier to provision. Therefor, we heavily relay on the expertise of he community. +Currently we do not provide any scripts/manifests for configuring Grafana. Rather than spending time learning and creating scripts/manifests for each tool, we think our time is better spent making Grafana easier to provision. Therefore, we heavily relay on the expertise of the community. Tool | Project -----|------------ Puppet | [https://forge.puppet.com/puppet/grafana](https://forge.puppet.com/puppet/grafana) Ansible | [https://github.com/cloudalchemy/ansible-grafana](https://github.com/cloudalchemy/ansible-grafana) -Ansible | [https://github.com/picotrading/ansible-grafana](https://github.com/picotrading/ansible-grafana) Chef | [https://github.com/JonathanTron/chef-grafana](https://github.com/JonathanTron/chef-grafana) Saltstack | [https://github.com/salt-formulas/salt-formula-grafana](https://github.com/salt-formulas/salt-formula-grafana) -## Datasources +## Datasources > This feature is available from v5.0 It's possible to manage datasources in Grafana by adding one or more yaml config files in the [`provisioning/datasources`](/installation/configuration/#provisioning) directory. Each config file can contain a list of `datasources` that will be added or updated during start up. If the datasource already exists, Grafana will update it to match the configuration file. The config file can also contain a list of datasources that should be deleted. That list is called `delete_datasources`. Grafana will delete datasources listed in `delete_datasources` before inserting/updating those in the `datasource` list. -### Running multiple grafana instances. -If you are running multiple instances of Grafana you might run into problems if they have different versions of the datasource.yaml configuration file. The best way to solve this problem is to add a version number to each datasource in the configuration and increase it when you update the config. Grafana will only update datasources with the same or lower version number than specified in the config. That way old configs cannot overwrite newer configs if they restart at the same time. +### Running multiple Grafana instances. +If you are running multiple instances of Grafana you might run into problems if they have different versions of the `datasource.yaml` configuration file. The best way to solve this problem is to add a version number to each datasource in the configuration and increase it when you update the config. Grafana will only update datasources with the same or lower version number than specified in the config. That way, old configs cannot overwrite newer configs if they restart at the same time. ### Example datasource config file ```yaml -# list of datasources that should be deleted from the database -delete_datasources: - - name: Graphite - org_id: 1 +# config file version +apiVersion: 1 -# list of datasources to insert/update depending -# whats available in the datbase +# list of datasources that should be deleted from the database +deleteDatasources: + - name: Graphite + orgId: 1 + +# list of datasources to insert/update depending +# whats available in the database datasources: # name of the datasource. Required - name: Graphite @@ -95,8 +98,8 @@ datasources: type: graphite # access mode. direct or proxy. Required access: proxy - # org id. will default to org_id 1 if not specified - org_id: 1 + # org id. will default to orgId 1 if not specified + orgId: 1 # url url: http://localhost:8080 # database password, if used @@ -106,22 +109,22 @@ datasources: # database name, if used database: # enable/disable basic auth - basic_auth: + basicAuth: # basic auth username - basic_auth_user: + basicAuthUser: # basic auth password - basic_auth_password: + basicAuthPassword: # enable/disable with credentials headers - with_credentials: + withCredentials: # mark as default datasource. Max one per org - is_default: + isDefault: # fields that will be converted to json and stored in json_data - json_data: + jsonData: graphiteVersion: "1.1" tlsAuth: true tlsAuthWithCACert: true # json object of data that will be encrypted. - secure_json_data: + secureJsonData: tlsCACert: "..." tlsClientCert: "..." tlsClientKey: "..." @@ -132,53 +135,72 @@ datasources: #### Json data -Since all datasources dont have the same configuration settings we only have the most common ones as fields. The rest should be stored as a json blob in the `json_data` field. Here are the most common settings that the core datasources use. +Since not all datasources have the same configuration settings we only have the most common ones as fields. The rest should be stored as a json blob in the `json_data` field. Here are the most common settings that the core datasources use. | Name | Type | Datasource |Description | | ----| ---- | ---- | --- | | tlsAuth | boolean | *All* | Enable TLS authentication using client cert configured in secure json data | | tlsAuthWithCACert | boolean | *All* | Enable TLS authtication using CA cert | +| tlsSkipVerify | boolean | *All* | Controls whether a client verifies the server's certificate chain and host name. | | graphiteVersion | string | Graphite | Graphite version | | timeInterval | string | Elastic, Influxdb & Prometheus | Lowest interval/step value that should be used for this data source | -| esVersion | string | Elastic | Elasticsearch version | -| timeField | string | Elastic | Which field that should be used as timestamp | +| esVersion | string | Elastic | Elasticsearch version as an number (2/5/56) | +| timeField | string | Elastic | Which field that should be used as timestamp | | interval | string | Elastic | Index date time format | | authType | string | Cloudwatch | Auth provider. keys/credentials/arn | -| assumeRoleArn | string | Cloudwatch | ARN of Assume Role | +| assumeRoleArn | string | Cloudwatch | ARN of Assume Role | | defaultRegion | string | Cloudwatch | AWS region | -| customMetricsNamespaces | string | Cloudwatch | Namespaces of Custom Metrics | +| customMetricsNamespaces | string | Cloudwatch | Namespaces of Custom Metrics | | tsdbVersion | string | OpenTsdb | Version | | tsdbResolution | string | OpenTsdb | Resolution | -| sslmode | string | Postgre | SSLmode. 'disable', 'require', 'verify-ca' or 'verify-full' | +| sslmode | string | Postgre | SSLmode. 'disable', 'require', 'verify-ca' or 'verify-full' | #### Secure Json data -{"authType":"keys","defaultRegion":"us-west-2","timeField":"@timestamp"} +`{"authType":"keys","defaultRegion":"us-west-2","timeField":"@timestamp"}` -Secure json data is a map of settings that will be encrypted with [secret key](/installation/configuration/#secret-key) from the grafana config. The purpose of this is only to hide content from the users of the application. This should be used for storing TLS Cert and password that Grafana will append to request on the server side. All these settings are optional. +Secure json data is a map of settings that will be encrypted with [secret key](/installation/configuration/#secret-key) from the Grafana config. The purpose of this is only to hide content from the users of the application. This should be used for storing TLS Cert and password that Grafana will append to the request on the server side. All of these settings are optional. | Name | Type | Datasource | Description | | ----| ---- | ---- | --- | | tlsCACert | string | *All* |CA cert for out going requests | | tlsClientCert | string | *All* |TLS Client cert for outgoing requests | | tlsClientKey | string | *All* |TLS Client key for outgoing requests | -| password | string | Postgre | password | -| user | string | Postgre | user | +| password | string | Postgre | password | +| user | string | Postgre | user | +| accessKey | string | Cloudwatch | Access key for connecting to Cloudwatch | +| secretKey | string | Cloudwatch | Secret key for connecting to Cloudwatch | ### Dashboards -It's possible to manage dashboards in Grafana by adding one or more yaml config files in the [`provisioning/dashboards`](/installation/configuration/#provisioning) directory. Each config file can contain a list of `dashboards providers` that will load dashboards into grafana. Currently we only support reading dashboards from file but we will add more providers in the future. +It's possible to manage dashboards in Grafana by adding one or more yaml config files in the [`provisioning/dashboards`](/installation/configuration/#provisioning) directory. Each config file can contain a list of `dashboards providers` that will load dashboards into Grafana from the local filesystem. -The dashboard provider config file looks like this +The dashboard provider config file looks somewhat like this: ```yaml +apiVersion: 1 + +providers: - name: 'default' - org_id: 1 + orgId: 1 folder: '' type: file + disableDeletion: false + editable: false options: - folder: /var/lib/grafana/dashboards + path: /var/lib/grafana/dashboards ``` -When grafana starts it will update/insert all dashboards available in the configured folders. If you modify the file the dashboard will also be updated. \ No newline at end of file +When Grafana starts, it will update/insert all dashboards available in the configured path. Then later on poll that path and look for updated json files and insert those update/insert those into the database. + +### Reuseable dashboard urls + +If the dashboard in the json file contains an [uid](/reference/dashboard/#json-fields), Grafana will force insert/update on that uid. This allows you to migrate dashboards betweens Grafana instances and provisioning Grafana from configuration without breaking the urls given since the new dashboard url uses the uid as identifer. +When Grafana starts, it will update/insert all dashboards available in the configured folders. If you modify the file, the dashboard will also be updated. +By default Grafana will delete dashboards in the database if the file is removed. You can disable this behavior using the `disableDeletion` setting. + +> **Note.** Provisioning allows you to overwrite existing dashboards +> which leads to problems if you re-use settings that are supposed to be unique. +> Be careful not to re-use the same `title` multiple times within a folder +> or `uid` within the same installation as this will cause weird behaviours. diff --git a/docs/sources/alerting/metrics.md b/docs/sources/alerting/metrics.md index 02fee6a718a..271e43f4ecf 100644 --- a/docs/sources/alerting/metrics.md +++ b/docs/sources/alerting/metrics.md @@ -13,7 +13,7 @@ weight = 2 > Alerting is only available in Grafana v4.0 and above. -The alert engine publishes some internal metrics about itself. You can read more about how Grafana published [internal metrics](/installation/configuration/#metrics). +The alert engine publishes some internal metrics about itself. You can read more about how Grafana publishes [internal metrics](/installation/configuration/#metrics). Description | Type | Metric name ---------- | ----------- | ---------- diff --git a/docs/sources/alerting/notifications.md b/docs/sources/alerting/notifications.md index a0673aaea98..453d169457b 100644 --- a/docs/sources/alerting/notifications.md +++ b/docs/sources/alerting/notifications.md @@ -14,23 +14,23 @@ weight = 2 > Alerting is only available in Grafana v4.0 and above. -When an alert changes state it sends out notifications. Each alert rule can have -multiple notifications. But in order to add a notification to an alert rule you first need -to add and configure a `notification` channel (can be email, Pagerduty or other integration). This is done from the Notification Channels page. +When an alert changes state, it sends out notifications. Each alert rule can have +multiple notifications. In order to add a notification to an alert rule you first need +to add and configure a `notification` channel (can be email, PagerDuty or other integration). This is done from the Notification Channels page. ## Notification Channel Setup -{{< imgbox max-width="40%" img="/img/docs/v43/alert_notifications_menu.png" caption="Alerting Notification Channels" >}} +{{< imgbox max-width="30%" img="/img/docs/v50/alerts_notifications_menu.png" caption="Alerting Notification Channels" >}} On the Notification Channels page hit the `New Channel` button to go the page where you can configure and setup a new Notification Channel. -You specify name and type, and type specific options. You can also test the notification to make -sure it's working and setup correctly. +You specify a name and a type, and type specific options. You can also test the notification to make +sure it's setup correctly. ### Send on all alerts -When checked this option will make this notification used for all alert rules, existing and new. +When checked, this option will nofity for all alert rules - existing and new. ## Supported Notification Types @@ -38,39 +38,39 @@ Grafana ships with the following set of notification types: ### Email -To enable email notification you have to setup [SMTP settings](/installation/configuration/#smtp) -in the Grafana config. Email notification will upload an image of the alert graph to an -external image destination if available or fallback to attaching the image in the email. +To enable email notifications you have to setup [SMTP settings](/installation/configuration/#smtp) +in the Grafana config. Email notifications will upload an image of the alert graph to an +external image destination if available or fallback to attaching the image to the email. ### Slack {{< imgbox max-width="40%" img="/img/docs/v4/slack_notification.png" caption="Alerting Slack Notification" >}} -To set up slack you need to configure an incoming webhook url at slack. You can follow their guide for how -to do that https://api.slack.com/incoming-webhooks If you want to include screenshots of the firing alerts -in the slack messages you have to configure either the [external image destination](#external-image-store) in Grafana, +To set up slack you need to configure an incoming webhook url at slack. You can follow their guide on how +to do that [here](https://api.slack.com/incoming-webhooks). If you want to include screenshots of the firing alerts +in the Slack messages you have to configure either the [external image destination](#external-image-store) in Grafana, or a bot integration via Slack Apps. Follow Slack's guide to set up a bot integration and use the token provided -https://api.slack.com/bot-users, which starts with "xoxb". +(https://api.slack.com/bot-users), which starts with "xoxb". Setting | Description ---------- | ----------- -Recipient | allows you to override the slack recipient. -Mention | make it possible to include a mention in the slack notification sent by Grafana. Ex @here or @channel +Recipient | allows you to override the Slack recipient. +Mention | make it possible to include a mention in the Slack notification sent by Grafana. Ex @here or @channel Token | If provided, Grafana will upload the generated image via Slack's file.upload API method, not the external image destination. ### PagerDuty -To set up PagerDuty, all you have to do is to provide an api key. +To set up PagerDuty, all you have to do is to provide an API key. Setting | Description ---------- | ----------- -Integration Key | Integration key for pagerduty. -Auto resolve incidents | Resolve incidents in pagerduty once the alert goes back to ok +Integration Key | Integration key for PagerDuty. +Auto resolve incidents | Resolve incidents in PagerDuty once the alert goes back to ok ### Webhook -The webhook notification is a simple way to send information about an state change over HTTP to a custom endpoint. -Using this notification you could integrate Grafana into any system you choose, by yourself. +The webhook notification is a simple way to send information about a state change over HTTP to a custom endpoint. +Using this notification you could integrate Grafana into a system of your choosing. Example json body: @@ -117,19 +117,19 @@ Dingtalk supports the following "message type": `text`, `link` and `markdown`. O ### Kafka -Notifications can be sent to a Kafka topic from Grafana using [Kafka REST Proxy](https://docs.confluent.io/1.0/kafka-rest/docs/index.html). -There are couple of configurations options which need to be set in Grafana UI under Kafka Settings: +Notifications can be sent to a Kafka topic from Grafana using the [Kafka REST Proxy](https://docs.confluent.io/1.0/kafka-rest/docs/index.html). +There are a couple of configuration options which need to be set up in Grafana UI under Kafka Settings: 1. Kafka REST Proxy endpoint. 2. Kafka Topic. -Once these two properties are set, you can send the alerts to Kafka for further processing or throttling them. +Once these two properties are set, you can send the alerts to Kafka for further processing or throttling. ### All supported notifier -Name | Type |Support images ------|------------ | ------ +Name | Type |Support images +-----|------------ | ------ Slack | `slack` | yes Pagerduty | `pagerduty` | yes Email | `email` | yes @@ -149,14 +149,16 @@ Prometheus Alertmanager | `prometheus-alertmanager` | no # Enable images in notifications {#external-image-store} -Grafana can render the panel associated with the alert rule and include that in the notification. Most Notification Channels require that this image be publicly accessible (Slack and PagerDuty for example). In order to include images in alert notifications, Grafana can upload the image to an image store. It currently supports -Amazon S3, Webdav, and Azure Blob Storage for this. So to set that up you need to configure the [external image uploader](/installation/configuration/#external-image-storage) in your grafana-server ini config file. +Grafana can render the panel associated with the alert rule and include that in the notification. Most Notification Channels require that this image be publicly accessable (Slack and PagerDuty for example). In order to include images in alert notifications, Grafana can upload the image to an image store. It currently supports +Amazon S3, Webdav, Google Cloud Storage and Azure Blob Storage. So to set that up you need to configure the [external image uploader](/installation/configuration/#external-image-storage) in your grafana-server ini config file. + +Be aware that some notifiers requires public access to the image to be able to include it in the notification. So make sure to enable public access to the images. If your using local image uploader, your Grafana instance need to be accessible by the internet. Currently only the Email Channels attaches images if no external image store is specified. To include images in alert notifications for other channels then you need to set up an external image store. -This is an optional requirement, you can get Slack and email notifications without setting this up. +This is an optional requirement. You can get Slack and email notifications without setting this up. # Configure the link back to Grafana from alert notifications -All alert notifications contains a link back to the triggered alert in the Grafana instance. +All alert notifications contain a link back to the triggered alert in the Grafana instance. This url is based on the [domain](/installation/configuration/#domain) setting in Grafana. diff --git a/docs/sources/alerting/rules.md b/docs/sources/alerting/rules.md index bd5b95da856..9bbbd70641d 100644 --- a/docs/sources/alerting/rules.md +++ b/docs/sources/alerting/rules.md @@ -59,7 +59,7 @@ avg() OF query(A, 5m, now) IS BELOW 14 ``` - `avg()` Controls how the values for **each** series should be reduced to a value that can be compared against the threshold. Click on the function to change it to another aggregation function. -- `query(A, 5m, now)` The letter defines what query to execute from the **Metrics** tab. The second two parameters define the time range, `5m, now` means 5 minutes from now to now. You can also do `10m, now-2m` to define a time range that will be 10 minutes from now to 2 minutes from now. This is useful if you want to ignore the last 2 minutes of data. +- `query(A, 5m, now)` The letter defines what query to execute from the **Metrics** tab. The second two parameters define the time range, `5m, now` means 5 minutes ago to now. You can also do `10m, now-2m` to define a time range that will be 10 minutes ago to 2 minutes ago. This is useful if you want to ignore the last 2 minutes of data. - `IS BELOW 14` Defines the type of threshold and the threshold value. You can click on `IS BELOW` to change the type of threshold. The query used in an alert rule cannot contain any template variables. Currently we only support `AND` and `OR` operators between conditions and they are executed serially. diff --git a/docs/sources/features/datasources/cloudwatch.md b/docs/sources/features/datasources/cloudwatch.md index 648957ed96e..f7f8138b5e9 100644 --- a/docs/sources/features/datasources/cloudwatch.md +++ b/docs/sources/features/datasources/cloudwatch.md @@ -13,7 +13,7 @@ weight = 10 # Using AWS CloudWatch in Grafana -Grafana ships with built in support for CloudWatch. You just have to add it as a data source and you will be ready to build dashboards for you CloudWatch metrics. +Grafana ships with built in support for CloudWatch. You just have to add it as a data source and you will be ready to build dashboards for your CloudWatch metrics. ## Adding the data source to Grafana @@ -87,7 +87,7 @@ Name | Description *namespaces()* | Returns a list of namespaces CloudWatch support. *metrics(namespace, [region])* | Returns a list of metrics in the namespace. (specify region or use "default" for custom metrics) *dimension_keys(namespace)* | Returns a list of dimension keys in the namespace. -*dimension_values(region, namespace, metric, dimension_key)* | Returns a list of dimension values matching the specified `region`, `namespace`, `metric` and `dimension_key`. +*dimension_values(region, namespace, metric, dimension_key, [filters])* | Returns a list of dimension values matching the specified `region`, `namespace`, `metric`, `dimension_key` or you can use dimension `filters` to get more specific result as well. *ebs_volume_ids(region, instance_id)* | Returns a list of volume ids matching the specified `region`, `instance_id`. *ec2_instance_attribute(region, attribute_name, filters)* | Returns a list of attributes matching the specified `region`, `attribute_name`, `filters`. @@ -104,6 +104,7 @@ Query | Service *dimension_values(us-east-1,AWS/Redshift,CPUUtilization,ClusterIdentifier)* | RedShift *dimension_values(us-east-1,AWS/RDS,CPUUtilization,DBInstanceIdentifier)* | RDS *dimension_values(us-east-1,AWS/S3,BucketSizeBytes,BucketName)* | S3 +*dimension_values(us-east-1,CWAgent,disk_used_percent,device,{"InstanceId":"$instance_id"})* | CloudWatch Agent ## ec2_instance_attribute examples diff --git a/docs/sources/features/datasources/prometheus.md b/docs/sources/features/datasources/prometheus.md index 15247ba5ebd..c9bb16441ca 100644 --- a/docs/sources/features/datasources/prometheus.md +++ b/docs/sources/features/datasources/prometheus.md @@ -93,7 +93,7 @@ queries via the Dashboard menu / Annotations view. Prometheus supports two ways to query annotations. - A regular metric query -- A Prometheus query for pending and firing alerts (for details see [Inspecting alerts during runtime](https://prometheus.io/docs/alerting/rules/#inspecting-alerts-during-runtime)) +- A Prometheus query for pending and firing alerts (for details see [Inspecting alerts during runtime](https://prometheus.io/docs/prometheus/latest/configuration/alerting_rules/#inspecting-alerts-during-runtime)) The step option is useful to limit the number of events returned from your query. diff --git a/docs/sources/features/index.md b/docs/sources/features/index.md index 1352e9f0576..d7fae41682b 100644 --- a/docs/sources/features/index.md +++ b/docs/sources/features/index.md @@ -5,7 +5,7 @@ type = "docs" [menu.docs] name = "Features" identifier = "features" -weight = 3 +weight = 4 +++ diff --git a/docs/sources/features/shortcuts.md b/docs/sources/features/shortcuts.md index caad521446e..cbcf3670c83 100644 --- a/docs/sources/features/shortcuts.md +++ b/docs/sources/features/shortcuts.md @@ -8,7 +8,7 @@ weight = 7 # Keyboard shortcuts -{{< docs-imagebox img="/img/docs/v4/shortcuts.png" max-width="20rem" class="docs-image--right" >}} +{{< docs-imagebox img="/img/docs/v50/shortcuts.png" max-width="20rem" class="docs-image--right" >}} Grafana v4 introduces a number of really powerful keyboard shortcuts. You can now focus a panel by hovering over it with your mouse. With a panel focused you can simple hit `e` to toggle panel @@ -34,6 +34,8 @@ Hit `?` on your keyboard to open the shortcuts help modal. - `d` `s` Dashboard settings - `d` `v` Toggle in-active / view mode - `d` `k` Toggle kiosk mode (hides top nav) +- `d` `E` Expand all rows +- `d` `C` Collapse all rows - `mod+o` Toggle shared graph crosshair ### Focused Panel @@ -42,12 +44,9 @@ Hit `?` on your keyboard to open the shortcuts help modal. - `p` `s` Open Panel Share Modal - `p` `r` Remove Panel -### Focused Row -- `r` `c` Collapse Row -- `r` `r` Remove Row - ### Time Range - `t` `z` Zoom out time range - `t` Move time range back - `t` Move time range forward +mod = CTRL on windows or linux and CMD key on Mac diff --git a/docs/sources/guides/basic_concepts.md b/docs/sources/guides/basic_concepts.md index cb64b105349..b710a227a79 100644 --- a/docs/sources/guides/basic_concepts.md +++ b/docs/sources/guides/basic_concepts.md @@ -7,6 +7,7 @@ type = "docs" name = "Basic Concepts" identifier = "basic_concepts" parent = "guides" +weight = 2 +++ # Basic Concepts diff --git a/docs/sources/guides/getting_started.md b/docs/sources/guides/getting_started.md index 5c02042481f..f724504156f 100644 --- a/docs/sources/guides/getting_started.md +++ b/docs/sources/guides/getting_started.md @@ -8,6 +8,7 @@ aliases = ["/guides/gettingstarted"] name = "Getting Started" identifier = "getting_started_guide" parent = "guides" +weight = 1 +++ # Getting started @@ -24,38 +25,38 @@ Read the [Basic Concepts](/guides/basic_concepts) document to get a crash course ### Top header -Let's start with creating a new Dashboard. You can find the new Dashboard link on the right side of the Dashboard picker. You now have a blank Dashboard. +Let's start with creating a new Dashboard. You can find the new Dashboard link on the right side of the Dashboard picker. You now have a blank Dashboard. - + The image above shows you the top header for a Dashboard. 1. Side menubar toggle: This toggles the side menu, allowing you to focus on the data presented in the dashboard. The side menu provides access to features unrelated to a Dashboard such as Users, Organizations, and Data Sources. -2. Dashboard dropdown: This dropdown shows you which Dashboard you are currently viewing, and allows you to easily switch to a new Dashboard. From here you can also create a new Dashboard, Import existing Dashboards, and manage Dashboard playlists. -3. Star Dashboard: Star (or unstar) the current Dashboard. Starred Dashboards will show up on your own Home Dashboard by default, and are a convenient way to mark Dashboards that you're interested in. -4. Share Dashboard: Share the current dashboard by creating a link or create a static Snapshot of it. Make sure the Dashboard is saved before sharing. -5. Save dashboard: The current Dashboard will be saved with the current Dashboard name. -6. Settings: Manage Dashboard settings and features such as Templating and Annotations. +2. Dashboard dropdown: This dropdown shows you which Dashboard you are currently viewing, and allows you to easily switch to a new Dashboard. From here you can also create a new Dashboard or folder, Import existing Dashboards, and manage Dashboard playlists. +3. Add Panel: Adds a new panel to the current Dashboard +4. Star Dashboard: Star (or unstar) the current Dashboard. Starred Dashboards will show up on your own Home Dashboard by default, and are a convenient way to mark Dashboards that you're interested in. +5. Share Dashboard: Share the current dashboard by creating a link or create a static Snapshot of it. Make sure the Dashboard is saved before sharing. +6. Save dashboard: The current Dashboard will be saved with the current Dashboard name. +7. Settings: Manage Dashboard settings and features such as Templating and Annotations. -## Dashboards, Panels, Rows, the building blocks of Grafana... +## Dashboards, Panels, the building blocks of Grafana... -Dashboards are at the core of what Grafana is all about. Dashboards are composed of individual Panels arranged on a number of Rows. Grafana ships with a variety of Panels. Grafana makes it easy to construct the right queries, and customize the display properties so that you can create the perfect Dashboard for your need. Each Panel can interact with data from any configured Grafana Data Source (currently InfluxDB, Graphite, OpenTSDB, Prometheus and Cloudwatch). The [Basic Concepts](/guides/basic_concepts) guide explores these key ideas in detail. +Dashboards are at the core of what Grafana is all about. Dashboards are composed of individual Panels arranged on a grid. Grafana ships with a variety of Panels. Grafana makes it easy to construct the right queries, and customize the display properties so that you can create the perfect Dashboard for your need. Each Panel can interact with data from any configured Grafana Data Source (currently InfluxDB, Graphite, OpenTSDB, Prometheus and Cloudwatch). The [Basic Concepts](/guides/basic_concepts) guide explores these key ideas in detail. - + 1. Zoom out time range 2. Time picker dropdown. Here you can access relative time range options, auto refresh options and set custom absolute time ranges. 3. Manual refresh button. Will cause all panels to refresh (fetch new data). -4. Row controls menu. Via this menu you can add panels to the row, set row height and more. -5. Dashboard panel. You edit panels by clicking the panel title. -6. Graph legend. You can change series colors, y-axis and series visibility directly from the legend. +4. Dashboard panel. You edit panels by clicking the panel title. +5. Graph legend. You can change series colors, y-axis and series visibility directly from the legend. ## Adding & Editing Graphs and Panels ![](/img/docs/v45/metrics_tab.png) -1. You add panels via row menu. The row menu is the icon to the left of each row. +1. You add panels by clicking the Add panel icon on the top menu. 2. To edit the graph you click on the graph title to open the panel menu, then `Edit`. 3. This should take you to the `Metrics` tab. In this tab you should see the editor for your default data source. @@ -63,7 +64,7 @@ When you click the `Metrics` tab, you are presented with a Query Editor that is ## Drag-and-Drop panels -You can Drag-and-Drop Panels within and between Rows. Click and hold the Panel title, and drag it to its new location. You can also easily resize panels by clicking the (-) and (+) icons. +You can Drag-and-Drop Panels by simply clicking and holding the Panel title, and drag it to its new location. You can also easily resize panels by clicking the (-) and (+) icons. ![](/img/docs/animated_gifs/drag_drop.gif) diff --git a/docs/sources/guides/index.md b/docs/sources/guides/index.md index c80dd624624..2ea09142880 100644 --- a/docs/sources/guides/index.md +++ b/docs/sources/guides/index.md @@ -4,6 +4,6 @@ type = "docs" [menu.docs] name = "Getting Started" identifier = "guides" -weight = 2 +weight = 3 +++ diff --git a/docs/sources/guides/whats-new-in-v2-1.md b/docs/sources/guides/whats-new-in-v2-1.md index 68da4f60226..2ad0e3356f0 100644 --- a/docs/sources/guides/whats-new-in-v2-1.md +++ b/docs/sources/guides/whats-new-in-v2-1.md @@ -3,11 +3,6 @@ title = "What's New in Grafana v2.1" description = "Feature & improvement highlights for Grafana v2.1" keywords = ["grafana", "new", "documentation", "2.1"] type = "docs" -[menu.docs] -name = "Version 2.1" -identifier = "v2.1" -parent = "whatsnew" -weight = 10 +++ # What's new in Grafana v2.1 diff --git a/docs/sources/guides/whats-new-in-v2-5.md b/docs/sources/guides/whats-new-in-v2-5.md index ff80ec1f4f4..90270ea1121 100644 --- a/docs/sources/guides/whats-new-in-v2-5.md +++ b/docs/sources/guides/whats-new-in-v2-5.md @@ -3,11 +3,6 @@ title = "What's New in Grafana v2.5" description = "Feature & improvement highlights for Grafana v2.5" keywords = ["grafana", "new", "documentation", "2.5"] type = "docs" -[menu.docs] -name = "Version 2.5" -identifier = "v2.5" -parent = "whatsnew" -weight = 9 +++ # What's new in Grafana v2.5 diff --git a/docs/sources/guides/whats-new-in-v2-6.md b/docs/sources/guides/whats-new-in-v2-6.md index 0b1e6688e60..b8996680ce6 100644 --- a/docs/sources/guides/whats-new-in-v2-6.md +++ b/docs/sources/guides/whats-new-in-v2-6.md @@ -3,11 +3,6 @@ title = "What's New in Grafana v2.6" description = "Feature & improvement highlights for Grafana v2.6" keywords = ["grafana", "new", "documentation", "2.6"] type = "docs" -[menu.docs] -name = "Version 2.6" -identifier = "v2.6" -parent = "whatsnew" -weight = 7 +++ # What's new in Grafana v2.6 diff --git a/docs/sources/guides/whats-new-in-v2.md b/docs/sources/guides/whats-new-in-v2.md index bd92128a12e..499849c8d83 100644 --- a/docs/sources/guides/whats-new-in-v2.md +++ b/docs/sources/guides/whats-new-in-v2.md @@ -3,11 +3,6 @@ title = "What's New in Grafana v2.0" description = "Feature & improvement highlights for Grafana v2.0" keywords = ["grafana", "new", "documentation", "2.0"] type = "docs" -[menu.docs] -name = "Version 2.0" -identifier = "v2.0" -parent = "whatsnew" -weight = 11 +++ # What's New in Grafana v2.0 diff --git a/docs/sources/guides/whats-new-in-v5.md b/docs/sources/guides/whats-new-in-v5.md new file mode 100644 index 00000000000..678f4cba22a --- /dev/null +++ b/docs/sources/guides/whats-new-in-v5.md @@ -0,0 +1,152 @@ ++++ +title = "What's New in Grafana v5.0" +description = "Feature & improvement highlights for Grafana v5.0" +keywords = ["grafana", "new", "documentation", "5.0"] +type = "docs" +[menu.docs] +name = "Version 5.0" +identifier = "v5.0" +parent = "whatsnew" +weight = -6 ++++ + +# What's New in Grafana v5.0 + +This is the most substantial update that Grafana has ever seen. This article will detail the major new features and enhancements. + +- [New Dashboard Layout Engine]({{< relref "#new-dashboard-layout-engine" >}}) enables a much easier drag, drop and resize experience and new types of layouts. +- [New UX]({{< relref "#new-ux-layout-engine" >}}). The UI has big improvements in both look and function. +- [New Light Theme]({{< relref "#new-light-theme" >}}) is now looking really nice. +- [Dashboard Folders]({{< relref "#dashboard-folders" >}}) helps you keep your dashboards organized. +- [Permissions]({{< relref "#dashboard-folders" >}}) on folders and dashboards helps manage larger Grafana installations. +- [Group users into teams]({{< relref "#teams" >}}) and use them in the new permission system. +- [Datasource provisioning]({{< relref "#data-sources" >}}) makes it possible to setup datasources via config files. +- [Dashboard provisioning]({{< relref "#dashboards" >}}) makes it possible to setup dashboards via config files. +- [Persistent dashboard url's]({{< relref "#dashboard-model-persistent-url-s-and-api-changes" >}}) makes it possible to rename dashboards without breaking links. +- [Graphite Tags & Integrated Function Docs]({{< relref "#graphite-tags-integrated-function-docs" >}}). + +### Video showing new features + + +
+ +## New Dashboard Layout Engine + +{{< docs-imagebox img="/img/docs/v50/new_grid.png" max-width="1000px" class="docs-image--right">}} + +The new dashboard layout engine allows for much easier movement and sizing of panels, as other panels now move out of the way in +a very intuitive way. Panels are sized independently, so rows are no longer necessary to create layouts. This opens +up many new types of layouts where panels of different heights can be aligned easily. Checkout the new grid in the video +above or on the [play site](http://play.grafana.org). All your existing dashboards will automatically migrate to the +new position system and look close to identical. The new panel position makes dashboards saved in v5.0 incompatible +with older versions of Grafana. + +
+ +## New UX + +{{< docs-imagebox img="/img/docs/v50/new_ux_nav.png" max-width="1000px" class="docs-image--right" >}} + +Almost every page has seen significant UX improvements. All pages (except dashboard pages) have a new tab-based layout that improves navigation between pages. The side menu has also changed quite a bit. You can still hide the side menu completely if you click on the Grafana logo. + +
+ +## Dashboard Settings + +{{< docs-imagebox img="/img/docs/v50/dashboard_settings.png" max-width="1000px" class="docs-image--right" >}} +Dashboard pages have a new header toolbar where buttons and actions are now all moved to the right. All the dashboard +settings views have been combined with a side nav which allows you to easily move between different setting categories. + +
+ +## New Light Theme + +{{< docs-imagebox img="/img/docs/v50/new_white_theme.png" max-width="1000px" class="docs-image--right" >}} + +This theme has not seen a lot of love in recent years and we felt it was time to give it a major overhaul. We are very happy with the result. + +
+ +## Dashboard Folders + +{{< docs-imagebox img="/img/docs/v50/new_search.png" max-width="1000px" class="docs-image--right" >}} + +The big new feature that comes with Grafana v5.0 is dashboard folders. Now you can organize your dashboards in folders, +which is very useful if you have a lot of dashboards or multiple teams. + +- New search design adds expandable sections for each folder, starred and recently viewed dashboards. +- New manage dashboard pages enable batch actions and views for folder settings and permissions. +- Set permissions on folders and have dashboards inherit the permissions. + +## Teams + +A team is a new concept in Grafana v5. They are simply a group of users that can be used in the new permission system for dashboards and folders. Only an admin can create teams. +We hope to do more with teams in future releases like integration with LDAP and a team landing page. + +## Permissions + +{{< docs-imagebox img="/img/docs/v50/folder_permissions.png" max-width="1000px" class="docs-image--right" >}} + +You can assign permissions to folders and dashboards. The default user role-based permissions can be removed and +replaced with specific teams or users enabling more control over what a user can see and edit. + +Dashboard permissions only limits what dashboards & folders a user can view & edit not which +data sources a user can access nor what queries a user can issue. + +
+ +## Provisioning from configuration + +In previous versions of Grafana, you could only use the API for provisioning data sources and dashboards. +But that required the service to be running before you started creating dashboards and you also needed to +set up credentials for the HTTP API. In v5.0 we decided to improve this experience by adding a new active +provisioning system that uses config files. This will make GitOps more natural as data sources and dashboards can +be defined via files that can be version controlled. We hope to extend this system to later add support for users, orgs +and alerts as well. + +### Data sources + +Data sources can now be setup using config files. These data sources are by default not editable from the Grafana GUI. +It's also possible to update and delete data sources from the config file. More info in the [data source provisioning docs](/administration/provisioning/#datasources). + +### Dashboards + +We also deprecated the `[dashboard.json]` in favor of our new dashboard provisioner that keeps dashboards on disk +in sync with dashboards in Grafana's database. The dashboard provisioner has multiple advantages over the old +`[dashboard.json]` feature. Instead of storing the dashboard in memory we now insert the dashboard into the database, +which makes it possible to star them, use one as the home dashboard, set permissions and other features in Grafana that +expects the dashboards to exist in the database. More info in the [dashboard provisioning docs](/administration/provisioning/#dashboards) + + +## Graphite Tags & Integrated Function Docs + +{{< docs-imagebox img="/img/docs/v50/graphite_tags.png" max-width="1000px" class="docs-image--right" >}} + +The Graphite query editor has been updated to support the latest Graphite version (v1.2) that adds +many new functions and support for querying by tags. You can now also view function documentation right in the query editor! + +Read more on [Graphite Tag Support](http://graphite.readthedocs.io/en/latest/tags.html?highlight=tags). + +
+ +## Dashboard model, persistent url's and API changes + +We are introducing a new unique identifier (`uid`) in the dashboard JSON model. It's automatically +generated if not provided when creating a dashboard and will have a length of 9-12 characters. + +The unique identifier allows having persistent URL's for accessing dashboards, sharing them +between instances and when using [dashboard provisioning](#dashboards). This means that dashboard can +be renamed without breaking any links. We're changing the url format for dashboards +from `/dashboard/db/:slug` to `/d/:uid/:slug`. We'll keep supporting the old slug-based url's for dashboards +and redirects to the new one for backward compatibility. Please note that the old slug-based url's +have been deprecated and will be removed in a future release. + +Sharing dashboards between instances becomes much easier since the `uid` is unique (unique enough). +This might seem like a small change, but we are incredibly excited about it since it will make it +much easier to manage, collaborate and navigate between dashboards. + +### API changes +New uid-based routes in the dashboard API have been introduced to retrieve and delete dashboards. +The corresponding slug-based routes have been deprecated and will be removed in a future release. + + diff --git a/docs/sources/http_api/admin.md b/docs/sources/http_api/admin.md index 716246102bc..0194c69caac 100644 --- a/docs/sources/http_api/admin.md +++ b/docs/sources/http_api/admin.md @@ -61,7 +61,7 @@ Content-Type: application/json "client_id":"some_id", "client_secret":"************", "enabled":"false", - "scopes":"user:email", + "scopes":"user:email,read:org", "team_ids":"", "token_url":"https://github.com/login/oauth/access_token" }, diff --git a/docs/sources/http_api/alerting.md b/docs/sources/http_api/alerting.md index 221552414e9..3860ae490b1 100644 --- a/docs/sources/http_api/alerting.md +++ b/docs/sources/http_api/alerting.md @@ -62,7 +62,7 @@ Content-Type: application/json } "newStateDate": "2016-12-25", "executionError": "", - "dashboardUri": "http://grafana.com/dashboard/db/sensors" + "url": "http://grafana.com/dashboard/db/sensors" } ] ``` @@ -94,7 +94,7 @@ Content-Type: application/json "state": "alerting", "newStateDate": "2016-12-25", "executionError": "", - "dashboardUri": "http://grafana.com/dashboard/db/sensors" + "url": "http://grafana.com/dashboard/db/sensors" } ``` @@ -196,7 +196,7 @@ Content-Type: application/json ## Create alert notification -You can find the full list of [supported notifers](/alerting/notifications/#all-supported-notifier) at the alert notifiers page. +You can find the full list of [supported notifers](/alerting/notifications/#all-supported-notifier) at the alert notifiers page. `POST /api/alert-notifications` @@ -294,4 +294,4 @@ Content-Type: application/json { "message": "Notification deleted" } -``` \ No newline at end of file +``` diff --git a/docs/sources/http_api/dashboard.md b/docs/sources/http_api/dashboard.md index 0538754bd96..ea1bd7f2ef7 100644 --- a/docs/sources/http_api/dashboard.md +++ b/docs/sources/http_api/dashboard.md @@ -11,6 +11,17 @@ parent = "http_api" # Dashboard API +## Identifier (id) vs unique identifier (uid) + +The identifier (id) of a dashboard is an auto-incrementing numeric value and is only unique per Grafana install. + +The unique identifier (uid) of a dashboard can be used for uniquely identify a dashboard between multiple Grafana installs. +It's automatically generated if not provided when creating a dashboard. The uid allows having consistent URL's for accessing +dashboards and when syncing dashboards between multiple Grafana installs, see [dashboard provisioning](/administration/provisioning/#dashboards) +for more information. This means that changing the title of a dashboard will not break any bookmarked links to that dashboard. + +The uid can have a maximum length of 40 characters. + ## Create / Update dashboard `POST /api/dashboards/db` @@ -28,24 +39,25 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk { "dashboard": { "id": null, + "uid": null, "title": "Production Overview", "tags": [ "templated" ], "timezone": "browser", - "rows": [ - { - } - ], - "schemaVersion": 6, + "schemaVersion": 16, "version": 0 }, + "folderId": 0, "overwrite": false } ``` JSON Body schema: -- **dashboard** – The complete dashboard model, id = null to create a new dashboard -- **overwrite** – Set to true if you want to overwrite existing dashboard with newer version or with same dashboard title. +- **dashboard** – The complete dashboard model, id = null to create a new dashboard. +- **dashboard.id** – id = null to create a new dashboard. +- **dashboard.uid** – Optional [unique identifier](/http_api/dashboard/#identifier-id-vs-unique-identifier-uid) when creating a dashboard. uid = null will generate a new uid. +- **folderId** – The id of the folder to save the dashboard in. +- **overwrite** – Set to true if you want to overwrite existing dashboard with newer version, same dashboard title in folder or same dashboard uid. - **message** - Set a commit message for the version history. **Example Response**: @@ -56,9 +68,12 @@ Content-Type: application/json; charset=UTF-8 Content-Length: 78 { - "slug": "production-overview", - "status": "success", - "version": 1 + "id": 1, + "uid": "cIBgcSjkk", + "url": "/d/cIBgcSjkk/production-overview", + "status": "success", + "version": 1, + "slug": "production-overview" //deprecated in Grafana v5.0 } ``` @@ -67,10 +82,18 @@ Status Codes: - **200** – Created - **400** – Errors (invalid json, missing or invalid fields, etc) - **401** – Unauthorized +- **403** – Access denied - **412** – Precondition failed -The **412** status code is used when a newer dashboard already exists (newer, its version is greater than the version that was sent). The -same status code is also used if another dashboard exists with the same title. The response body will look like this: +The **412** status code is used for explaing that you cannot create the dashboard and why. +There can be different reasons for this: + +- The dashboard has been changed by someone else, `status=version-mismatch` +- A dashboard with the same name in the folder already exists, `status=name-exists` +- A dashboard with the same uid already exists, `status=name-exists` +- The dashboard belongs to plugin ``, `status=plugin-dashboard` + + The response body will have the following properties: ```http HTTP/1.1 412 Precondition Failed @@ -83,18 +106,18 @@ Content-Length: 97 } ``` -In in case of title already exists the `status` property will be `name-exists`. +In case of title already exists the `status` property will be `name-exists`. -## Get dashboard +## Get dashboard by uid -`GET /api/dashboards/db/:slug` +`GET /api/dashboards/uid/:uid` -Will return the dashboard given the dashboard slug. Slug is the url friendly version of the dashboard title. +Will return the dashboard given the dashboard unique identifier (uid). **Example Request**: ```http -GET /api/dashboards/db/production-overview HTTP/1.1 +GET /api/dashboards/uid/cIBgcSjkk HTTP/1.1 Accept: application/json Content-Type: application/json Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk @@ -107,35 +130,40 @@ HTTP/1.1 200 Content-Type: application/json { - "meta": { - "isStarred": false, - "slug": "production-overview" - }, "dashboard": { - "id": null, + "id": 1, + "uid": "cIBgcSjkk", "title": "Production Overview", "tags": [ "templated" ], "timezone": "browser", - "rows": [ - { - } - ], - "schemaVersion": 6, + "schemaVersion": 16, "version": 0 + }, + "meta": { + "isStarred": false, + "url": "/d/cIBgcSjkk/production-overview", + "slug": "production-overview" //deprecated in Grafana v5.0 } } ``` -## Delete dashboard +Status Codes: -`DELETE /api/dashboards/db/:slug` +- **200** – Found +- **401** – Unauthorized +- **403** – Access denied +- **404** – Not found -The above will delete the dashboard with the specified slug. The slug is the url friendly (unique) version of the dashboard title. +## Delete dashboard by uid + +`DELETE /api/dashboards/uid/:uid` + +Will delete the dashboard given the specified unique identifier (uid). **Example Request**: ```http -DELETE /api/dashboards/db/test HTTP/1.1 +DELETE /api/dashboards/uid/cIBgcSjkk HTTP/1.1 Accept: application/json Content-Type: application/json Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk @@ -147,9 +175,16 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk HTTP/1.1 200 Content-Type: application/json -{"title": "Test"} +{"title": "Production Overview"} ``` +Status Codes: + +- **200** – Deleted +- **401** – Unauthorized +- **403** – Access denied +- **404** – Not found + ## Gets the home dashboard `GET /api/dashboards/home` @@ -172,27 +207,13 @@ HTTP/1.1 200 Content-Type: application/json { - "meta": { - "isHome":true, - "canSave":false, - "canEdit":false, - "canStar":false, - "slug":"", - "expires":"0001-01-01T00:00:00Z", - "created":"0001-01-01T00:00:00Z" - }, "dashboard": { "editable":false, "hideControls":true, "nav":[ - { - "enable":false, - "type":"timepicker" - } - ], - "rows": [ { - + "enable":false, + "type":"timepicker" } ], "style":"dark", @@ -206,13 +227,21 @@ Content-Type: application/json "timezone":"browser", "title":"Home", "version":5 + }, + "meta": { + "isHome":true, + "canSave":false, + "canEdit":false, + "canStar":false, + "url":"", + "expires":"0001-01-01T00:00:00Z", + "created":"0001-01-01T00:00:00Z" } } ``` ## Tags for Dashboard - `GET /api/dashboards/tags` Get all tags of dashboards @@ -244,21 +273,24 @@ Content-Type: application/json ] ``` -## Search Dashboards +## Dashboard Search +See [Folder/Dashboard Search API](/http_api/folder_dashboard_search). -`GET /api/search/` +## Deprecated resources +Please note that these resource have been deprecated and will be removed in a future release. -Query parameters: +### Get dashboard by slug +**Deprecated starting from Grafana v5.0. Please update to use the new *Get dashboard by uid* resource instead** -- **query** – Search Query -- **tag** – Tag to use -- **starred** – Flag indicating if only starred Dashboards should be returned -- **tagcloud** - Flag indicating if a tagcloud should be returned +`GET /api/dashboards/db/:slug` + +Will return the dashboard given the dashboard slug. Slug is the url friendly version of the dashboard title. +If there exists multiple dashboards with the same slug, one of them will be returned in the response. **Example Request**: ```http -GET /api/search?query=Production%20Overview&starred=true&tag=prod HTTP/1.1 +GET /api/dashboards/db/production-overview HTTP/1.1 Accept: application/json Content-Type: application/json Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk @@ -270,14 +302,74 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk HTTP/1.1 200 Content-Type: application/json -[ - { - "id":1, - "title":"Production Overview", - "uri":"db/production-overview", - "type":"dash-db", - "tags":[prod], - "isStarred":true +{ + "dashboard": { + "id": 1, + "uid": "cIBgcSjkk", + "title": "Production Overview", + "tags": [ "templated" ], + "timezone": "browser", + "schemaVersion": 16, + "version": 0 + }, + "meta": { + "isStarred": false, + "url": "/d/cIBgcSjkk/production-overview", + "slug": "production-overview" // deprecated in Grafana v5.0 } -] +} +``` + +Status Codes: + +- **200** – Found +- **401** – Unauthorized +- **403** – Access denied +- **404** – Not found + +### Delete dashboard by slug +**Deprecated starting from Grafana v5.0. Please update to use the *Delete dashboard by uid* resource instead.** + +`DELETE /api/dashboards/db/:slug` + +Will delete the dashboard given the specified slug. Slug is the url friendly version of the dashboard title. + +**Example Request**: + +```http +DELETE /api/dashboards/db/test HTTP/1.1 +Accept: application/json +Content-Type: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk +``` + +**Example Response**: + +```http +HTTP/1.1 200 +Content-Type: application/json + +{"title": "Production Overview"} +``` + +Status Codes: + +- **200** – Deleted +- **401** – Unauthorized +- **403** – Access denied +- **404** – Not found +- **412** – Precondition failed + +The **412** status code is used when there exists multiple dashboards with the same slug. +The response body will look like this: + +```http +HTTP/1.1 412 Precondition Failed +Content-Type: application/json; charset=UTF-8 +Content-Length: 97 + +{ + "message": "Multiple dashboards with the same slug exists", + "status": "multiple-slugs-exists" +} ``` diff --git a/docs/sources/http_api/dashboard_permissions.md b/docs/sources/http_api/dashboard_permissions.md new file mode 100644 index 00000000000..26aa1550d7c --- /dev/null +++ b/docs/sources/http_api/dashboard_permissions.md @@ -0,0 +1,149 @@ ++++ +title = "Dashboard Permissions HTTP API " +description = "Grafana Dashboard Permissions HTTP API" +keywords = ["grafana", "http", "documentation", "api", "dashboard", "permission", "permissions", "acl"] +aliases = ["/http_api/dashboardpermissions/"] +type = "docs" +[menu.docs] +name = "Dashboard Permissions" +parent = "http_api" ++++ + +# Dashboard Permissions API + +This API can be used to update/get the permissions for a dashboard. + +Permissions with `dashboardId=-1` are the default permissions for users with the Viewer and Editor roles. Permissions can be set for a user, a team or a role (Viewer or Editor). Permissions cannot be set for Admins - they always have access to everything. + +The permission levels for the permission field: + +- 1 = View +- 2 = Edit +- 4 = Admin + +## Get permissions for a dashboard + +`GET /api/dashboards/id/:dashboardId/permissions` + +Gets all existing permissions for the dashboard with the given `dashboardId`. + +**Example request**: + +```http +GET /api/dashboards/id/1/permissions HTTP/1.1 +Accept: application/json +Content-Type: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk +``` + +**Example Response** + +```http +HTTP/1.1 200 OK +Content-Type: application/json; charset=UTF-8 +Content-Length: 551 + +[ + { + "id": 1, + "dashboardId": -1, + "created": "2017-06-20T02:00:00+02:00", + "updated": "2017-06-20T02:00:00+02:00", + "userId": 0, + "userLogin": "", + "userEmail": "", + "teamId": 0, + "team": "", + "role": "Viewer", + "permission": 1, + "permissionName": "View", + "uid": "", + "title": "", + "slug": "", + "isFolder": false, + "url": "" + }, + { + "id": 2, + "dashboardId": -1, + "created": "2017-06-20T02:00:00+02:00", + "updated": "2017-06-20T02:00:00+02:00", + "userId": 0, + "userLogin": "", + "userEmail": "", + "teamId": 0, + "team": "", + "role": "Editor", + "permission": 2, + "permissionName": "Edit", + "uid": "", + "title": "", + "slug": "", + "isFolder": false, + "url": "" + } +] +``` + +Status Codes: + +- **200** - Ok +- **401** - Unauthorized +- **403** - Access denied +- **404** - Dashboard not found + +## Update permissions for a dashboard + +`POST /api/dashboards/id/:dashboardId/permissions` + +Updates permissions for a dashboard. This operation will remove existing permissions if they're not included in the request. + +**Example request**: + +```http +POST /api/dashboards/id/1/permissions +Accept: application/json +Content-Type: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk + + "items": [ + { + "role": "Viewer", + "permission": 1 + }, + { + "role": "Editor", + "permission": 2 + }, + { + "teamId": 1, + "permission": 1 + }, + { + "userId": 11, + "permission": 4 + } + ] +} +``` + +JSON body schema: + +- **items** - The permission items to add/update. Items that are omitted from the list will be removed. + +**Example response**: + +```http +HTTP/1.1 200 OK +Content-Type: application/json; charset=UTF-8 +Content-Length: 35 + +{"message":"Dashboard permissions updated"} +``` + +Status Codes: + +- **200** - Ok +- **401** - Unauthorized +- **403** - Access denied +- **404** - Dashboard not found diff --git a/docs/sources/http_api/folder.md b/docs/sources/http_api/folder.md new file mode 100644 index 00000000000..7ee1f737799 --- /dev/null +++ b/docs/sources/http_api/folder.md @@ -0,0 +1,317 @@ ++++ +title = "Folder HTTP API " +description = "Grafana Folder HTTP API" +keywords = ["grafana", "http", "documentation", "api", "folder"] +aliases = ["/http_api/folder/"] +type = "docs" +[menu.docs] +name = "Folder" +parent = "http_api" ++++ + +# Folder API + +## Identifier (id) vs unique identifier (uid) + +The identifier (id) of a folder is an auto-incrementing numeric value and is only unique per Grafana install. + +The unique identifier (uid) of a folder can be used for uniquely identify folders between multiple Grafana installs. It's automatically generated if not provided when creating a folder. The uid allows having consistent URL's for accessing folders and when syncing folders between multiple Grafana installs. This means that changing the title of a folder will not break any bookmarked links to that folder. + +The uid can have a maximum length of 40 characters. + + +## Get all folders + +`GET /api/folders` + +Returns all folders that the authenticated user has permission to view. + +**Example Request**: + +```http +GET /api/folders?limit=10 HTTP/1.1 +Accept: application/json +Content-Type: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk +``` + +**Example Response**: + +```http +HTTP/1.1 200 +Content-Type: application/json + +[ + { + "id":1, + "uid": "nErXDvCkzz", + "title": "Departmenet ABC", + "url": "/dashboards/f/nErXDvCkzz/department-abc", + "hasAcl": false, + "canSave": true, + "canEdit": true, + "canAdmin": true, + "createdBy": "admin", + "created": "2018-01-31T17:43:12+01:00", + "updatedBy": "admin", + "updated": "2018-01-31T17:43:12+01:00", + "version": 1 + } +] +``` + +## Get folder by uid + +`GET /api/folders/:uid` + +Will return the folder given the folder uid. + +**Example Request**: + +```http +GET /api/folders/nErXDvCkzzh HTTP/1.1 +Accept: application/json +Content-Type: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk +``` + +**Example Response**: + +```http +HTTP/1.1 200 +Content-Type: application/json + +{ + "id":1, + "uid": "nErXDvCkzz", + "title": "Departmenet ABC", + "url": "/dashboards/f/nErXDvCkzz/department-abc", + "hasAcl": false, + "canSave": true, + "canEdit": true, + "canAdmin": true, + "createdBy": "admin", + "created": "2018-01-31T17:43:12+01:00", + "updatedBy": "admin", + "updated": "2018-01-31T17:43:12+01:00", + "version": 1 +} +``` + +Status Codes: + +- **200** – Found +- **401** – Unauthorized +- **403** – Access Denied +- **404** – Folder not found + +## Create folder + +`POST /api/folders` + +Creates a new folder. + +**Example Request**: + +```http +POST /api/folders HTTP/1.1 +Accept: application/json +Content-Type: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk + +{ + "uid": "nErXDvCkzz", + "title": "Department ABC" +} +``` + +JSON Body schema: + +- **uid** – Optional [unique identifier](/http_api/folder/#identifier-id-vs-unique-identifier-uid). +- **title** – The title of the folder. + +**Example Response**: + +```http +HTTP/1.1 200 +Content-Type: application/json + +{ + "id":1, + "uid": "nErXDvCkzz", + "title": "Departmenet ABC", + "url": "/dashboards/f/nErXDvCkzz/department-abc", + "hasAcl": false, + "canSave": true, + "canEdit": true, + "canAdmin": true, + "createdBy": "admin", + "created": "2018-01-31T17:43:12+01:00", + "updatedBy": "admin", + "updated": "2018-01-31T17:43:12+01:00", + "version": 1 +} +``` + +Status Codes: + +- **200** – Created +- **400** – Errors (invalid json, missing or invalid fields, etc) +- **401** – Unauthorized +- **403** – Access Denied + +## Update folder + +`PUT /api/folders/:uid` + +Updates an existing folder identified by uid. + +**Example Request**: + +```http +PUT /api/folders/nErXDvCkzz HTTP/1.1 +Accept: application/json +Content-Type: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk + +{ + "title":"Department DEF", + "version": 1 +} +``` + +JSON Body schema: + +- **uid** – Provide another [unique identifier](/http_api/folder/#identifier-id-vs-unique-identifier-uid) than stored to change the unique identifier. +- **title** – The title of the folder. +- **version** – Provide the current version to be able to update the folder. Not needed if `overwrite=true`. +- **overwrite** – Set to true if you want to overwrite existing folder with newer version. + +**Example Response**: + +```http +HTTP/1.1 200 +Content-Type: application/json + +{ + "id":1, + "uid": "nErXDvCkzz", + "title": "Departmenet DEF", + "url": "/dashboards/f/nErXDvCkzz/department-def", + "hasAcl": false, + "canSave": true, + "canEdit": true, + "canAdmin": true, + "createdBy": "admin", + "created": "2018-01-31T17:43:12+01:00", + "updatedBy": "admin", + "updated": "2018-01-31T17:43:12+01:00", + "version": 1 +} +``` + +Status Codes: + +- **200** – Updated +- **400** – Errors (invalid json, missing or invalid fields, etc) +- **401** – Unauthorized +- **403** – Access Denied +- **404** – Folder not found +- **412** – Precondition failed + +The **412** status code is used for explaing that you cannot update the folder and why. +There can be different reasons for this: + +- The folder has been changed by someone else, `status=version-mismatch` + + The response body will have the following properties: + +```http +HTTP/1.1 412 Precondition Failed +Content-Type: application/json; charset=UTF-8 +Content-Length: 97 + +{ + "message": "The folder has been changed by someone else", + "status": "version-mismatch" +} +``` + +## Delete folder + +`DELETE /api/folders/:uid` + +Deletes an existing folder identified by uid together with all dashboards stored in the folder, if any. This operation cannot be reverted. + +**Example Request**: + +```http +DELETE /api/folders/nErXDvCkzz HTTP/1.1 +Accept: application/json +Content-Type: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk + +``` + +**Example Response**: + +```http +HTTP/1.1 200 +Content-Type: application/json + +{ + "message":"Folder deleted" +} +``` + +Status Codes: + +- **200** – Deleted +- **401** – Unauthorized +- **403** – Access Denied +- **404** – Folder not found + +## Get folder by id + +`GET /api/folders/:id` + +Will return the folder identified by id. + +**Example Request**: + +```http +GET /api/folders/1 HTTP/1.1 +Accept: application/json +Content-Type: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk +``` + +**Example Response**: + +```http +HTTP/1.1 200 +Content-Type: application/json + +{ + "id":1, + "uid": "nErXDvCkzz", + "title": "Departmenet ABC", + "url": "/dashboards/f/nErXDvCkzz/department-abc", + "hasAcl": false, + "canSave": true, + "canEdit": true, + "canAdmin": true, + "createdBy": "admin", + "created": "2018-01-31T17:43:12+01:00", + "updatedBy": "admin", + "updated": "2018-01-31T17:43:12+01:00", + "version": 1 +} +``` + +Status Codes: + +- **200** – Found +- **401** – Unauthorized +- **403** – Access Denied +- **404** – Folder not found diff --git a/docs/sources/http_api/folder_dashboard_search.md b/docs/sources/http_api/folder_dashboard_search.md new file mode 100644 index 00000000000..73b5dd90b87 --- /dev/null +++ b/docs/sources/http_api/folder_dashboard_search.md @@ -0,0 +1,98 @@ ++++ +title = "Folder/Dashboard Search HTTP API " +description = "Grafana Folder/Dashboard Search HTTP API" +keywords = ["grafana", "http", "documentation", "api", "search", "folder", "dashboard"] +aliases = ["/http_api/folder_dashboard_search/"] +type = "docs" +[menu.docs] +name = "Folder/dashboard search" +parent = "http_api" ++++ + +# Folder/Dashboard Search API + +## Search folders and dashboards + +`GET /api/search/` + +Query parameters: + +- **query** – Search Query +- **tag** – List of tags to search for +- **type** – Type to search for, `dash-folder` or `dash-db` +- **dashboardIds** – List of dashboard id's to search for +- **folderIds** – List of folder id's to search in for dashboards +- **starred** – Flag indicating if only starred Dashboards should be returned +- **limit** – Limit the number of returned results + +**Example request for retrieving folders and dashboards of the general folder**: + +```http +GET /api/search?folderIds=0&query=&starred=false HTTP/1.1 +Accept: application/json +Content-Type: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk +``` + +**Example response for retrieving folders and dashboards of the general folder**: + +```http +HTTP/1.1 200 +Content-Type: application/json + +[ + { + "id": 163, + "uid": "000000163", + "title": "Folder", + "url": "/dashboards/f/000000163/folder", + "type": "dash-folder", + "tags": [], + "isStarred": false, + "uri":"db/folder" // deprecated in Grafana v5.0 + }, + { + "id":1, + "uid": "cIBgcSjkk", + "title":"Production Overview", + "url": "/d/cIBgcSjkk/production-overview", + "type":"dash-db", + "tags":[prod], + "isStarred":true, + "uri":"db/production-overview" // deprecated in Grafana v5.0 + } +] +``` + +**Example request searching for dashboards**: + +```http +GET /api/search?query=Production%20Overview&starred=true&tag=prod HTTP/1.1 +Accept: application/json +Content-Type: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk +``` + +**Example response searching for dashboards**: + +```http +HTTP/1.1 200 +Content-Type: application/json + +[ + { + "id":1, + "uid": "cIBgcSjkk", + "title":"Production Overview", + "url": "/d/cIBgcSjkk/production-overview", + "type":"dash-db", + "tags":[prod], + "isStarred":true, + "folderId": 2, + "folderUid": "000000163", + "folderTitle": "Folder", + "folderUrl": "/dashboards/f/000000163/folder", + "uri":"db/production-overview" // deprecated in Grafana v5.0 + } +] +``` \ No newline at end of file diff --git a/docs/sources/http_api/folder_permissions.md b/docs/sources/http_api/folder_permissions.md new file mode 100644 index 00000000000..284ab70866f --- /dev/null +++ b/docs/sources/http_api/folder_permissions.md @@ -0,0 +1,149 @@ ++++ +title = "Folder Permissions HTTP API " +description = "Grafana Folder Permissions HTTP API" +keywords = ["grafana", "http", "documentation", "api", "folder", "permission", "permissions", "acl"] +aliases = ["/http_api/dashboardpermissions/"] +type = "docs" +[menu.docs] +name = "Folder Permissions" +parent = "http_api" ++++ + +# Folder Permissions API + +This API can be used to update/get the permissions for a folder. + +Permissions with `folderId=-1` are the default permissions for users with the Viewer and Editor roles. Permissions can be set for a user, a team or a role (Viewer or Editor). Permissions cannot be set for Admins - they always have access to everything. + +The permission levels for the permission field: + +- 1 = View +- 2 = Edit +- 4 = Admin + +## Get permissions for a folder + +`GET /api/folders/:uid/permissions` + +Gets all existing permissions for the folder with the given `uid`. + +**Example request**: + +```http +GET /api/folders/nErXDvCkzz/permissions HTTP/1.1 +Accept: application/json +Content-Type: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk +``` + +**Example Response** + +```http +HTTP/1.1 200 OK +Content-Type: application/json; charset=UTF-8 +Content-Length: 551 + +[ + { + "id": 1, + "folderId": -1, + "created": "2017-06-20T02:00:00+02:00", + "updated": "2017-06-20T02:00:00+02:00", + "userId": 0, + "userLogin": "", + "userEmail": "", + "teamId": 0, + "team": "", + "role": "Viewer", + "permission": 1, + "permissionName": "View", + "uid": "nErXDvCkzz", + "title": "", + "slug": "", + "isFolder": false, + "url": "" + }, + { + "id": 2, + "dashboardId": -1, + "created": "2017-06-20T02:00:00+02:00", + "updated": "2017-06-20T02:00:00+02:00", + "userId": 0, + "userLogin": "", + "userEmail": "", + "teamId": 0, + "team": "", + "role": "Editor", + "permission": 2, + "permissionName": "Edit", + "uid": "", + "title": "", + "slug": "", + "isFolder": false, + "url": "" + } +] +``` + +Status Codes: + +- **200** - Ok +- **401** - Unauthorized +- **403** - Access denied +- **404** - Folder not found + +## Update permissions for a folder + +`POST /api/folders/:uid/permissions` + +Updates permissions for a folder. This operation will remove existing permissions if they're not included in the request. + +**Example request**: + +```http +POST /api/folders/nErXDvCkzz/permissions +Accept: application/json +Content-Type: application/json +Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk + + "items": [ + { + "role": "Viewer", + "permission": 1 + }, + { + "role": "Editor", + "permission": 2 + }, + { + "teamId": 1, + "permission": 1 + }, + { + "userId": 11, + "permission": 4 + } + ] +} +``` + +JSON body schema: + +- **items** - The permission items to add/update. Items that are omitted from the list will be removed. + +**Example response**: + +```http +HTTP/1.1 200 OK +Content-Type: application/json; charset=UTF-8 +Content-Length: 35 + +{"message":"Folder permissions updated"} +``` + +Status Codes: + +- **200** - Ok +- **401** - Unauthorized +- **403** - Access denied +- **404** - Dashboard not found diff --git a/docs/sources/http_api/index.md b/docs/sources/http_api/index.md index 9c5d0e7d49a..2a74917a9fd 100644 --- a/docs/sources/http_api/index.md +++ b/docs/sources/http_api/index.md @@ -18,12 +18,20 @@ dashboards, creating users and updating data sources. ## Supported HTTP APIs: -* [Authentication API]({{< relref "auth.md" >}}) -* [Dashboard API]({{< relref "dashboard.md" >}}) -* [Data Source API]({{< relref "data_source.md" >}}) -* [Organisation API]({{< relref "org.md" >}}) -* [User API]({{< relref "user.md" >}}) -* [Admin API]({{< relref "admin.md" >}}) -* [Snapshot API]({{< relref "snapshot.md" >}}) -* [Preferences API]({{< relref "preferences.md" >}}) -* [Other API]({{< relref "other.md" >}}) +* [Authentication API]({{< relref "/http_api/auth.md" >}}) +* [Dashboard API]({{< relref "/http_api/dashboard.md" >}}) +* [Dashboard Versions API]({{< relref "http_api/dashboard_versions.md" >}}) +* [Dashboard Permissions API]({{< relref "http_api/dashboard_permissions.md" >}}) +* [Folder API]({{< relref "/http_api/folder.md" >}}) +* [Folder Permissions API]({{< relref "http_api/folder_permissions.md" >}}) +* [Folder/dashboard search API]({{< relref "/http_api/folder_dashboard_search.md" >}}) +* [Data Source API]({{< relref "http_api/data_source.md" >}}) +* [Organisation API]({{< relref "http_api/org.md" >}}) +* [Snapshot API]({{< relref "http_api/snapshot.md" >}}) +* [Annotations API]({{< relref "http_api/annotations.md" >}}) +* [Alerting API]({{< relref "http_api/alerting.md" >}}) +* [User API]({{< relref "http_api/user.md" >}}) +* [Team API]({{< relref "http_api/team.md" >}}) +* [Admin API]({{< relref "http_api/admin.md" >}}) +* [Preferences API]({{< relref "http_api/preferences.md" >}}) +* [Other API]({{< relref "http_api/other.md" >}}) diff --git a/docs/sources/http_api/team.md b/docs/sources/http_api/team.md new file mode 100644 index 00000000000..94ea4108481 --- /dev/null +++ b/docs/sources/http_api/team.md @@ -0,0 +1,316 @@ ++++ +title = "Team HTTP API " +description = "Grafana Team HTTP API" +keywords = ["grafana", "http", "documentation", "api", "team", "teams", "group"] +aliases = ["/http_api/team/"] +type = "docs" +[menu.docs] +name = "Teams" +parent = "http_api" ++++ + +# Team API + +This API can be used to create/update/delete Teams and to add/remove users to Teams. All actions require that the user has the Admin role for the organization. + +## Team Search With Paging + +`GET /api/teams/search?perpage=50&page=1&query=mytea` + +or + +`GET /api/teams/search?name=myteam` + +```http +GET /api/teams/search?perpage=10&page=1&query=myteam HTTP/1.1 +Accept: application/json +Content-Type: application/json +Authorization: Basic YWRtaW46YWRtaW4= +``` + +### Using the query parameter + +Default value for the `perpage` parameter is `1000` and for the `page` parameter is `1`. + +The `totalCount` field in the response can be used for pagination of the teams list E.g. if `totalCount` is equal to 100 teams and the `perpage` parameter is set to 10 then there are 10 pages of teams. + +The `query` parameter is optional and it will return results where the query value is contained in the `name` field. Query values with spaces need to be url encoded e.g. `query=my%20team`. + +### Using the name parameter + +The `name` parameter returns a single team if the parameter matches the `name` field. + +**Example Response**: + +```http +HTTP/1.1 200 +Content-Type: application/json + + "totalCount": 1, + "teams": [ + { + "id": 1, + "orgId": 1, + "name": "MyTestTeam", + "email": "", + "avatarUrl": "\/avatar\/3f49c15916554246daa714b9bd0ee398", + "memberCount": 1 + } + ], + "page": 1, + "perPage": 1000 +} +``` + +Status Codes: + +- **200** - Ok +- **401** - Unauthorized +- **403** - Permission denied +- **404** - Team not found (if searching by name) + +## Get Team By Id + +`GET /api/teams/:id` + +**Example Request**: + +```http +GET /api/teams/1 HTTP/1.1 +Accept: application/json +Content-Type: application/json +Authorization: Basic YWRtaW46YWRtaW4= +``` + +**Example Response**: + +```http +HTTP/1.1 200 +Content-Type: application/json + +{ + "id": 1, + "orgId": 1, + "name": "MyTestTeam", + "email": "", + "created": "2017-12-15T10:40:45+01:00", + "updated": "2017-12-15T10:40:45+01:00" +} +``` + +Status Codes: + +- **200** - Ok +- **401** - Unauthorized +- **403** - Permission denied +- **404** - Team not found + +## Add Team + +The Team `name` needs to be unique. `name` is required and `email` is optional. + +`POST /api/teams` + +**Example Request**: + +```http +POST /api/teams HTTP/1.1 +Accept: application/json +Content-Type: application/json +Authorization: Basic YWRtaW46YWRtaW4= + +{ + "name": "MyTestTeam", + "email": "email@test.com" +} +``` + +**Example Response**: + +```http +HTTP/1.1 200 +Content-Type: application/json + +{"message":"Team created","teamId":2} +``` + +Status Codes: + +- **200** - Ok +- **401** - Unauthorized +- **403** - Permission denied +- **409** - Team name is taken + +## Update Team + +There are two fields that can be updated for a team: `name` and `email`. + +`PUT /api/teams/:id` + +**Example Request**: + +```http +PUT /api/teams/2 HTTP/1.1 +Accept: application/json +Content-Type: application/json +Authorization: Basic YWRtaW46YWRtaW4= + +{ + "name": "MyTestTeam", + "email": "email@test.com" +} +``` + +**Example Response**: + +```http +HTTP/1.1 200 +Content-Type: application/json + +{"message":"Team updated"} +``` + +Status Codes: + +- **200** - Ok +- **401** - Unauthorized +- **403** - Permission denied +- **404** - Team not found +- **409** - Team name is taken + +## Delete Team By Id + +`DELETE /api/teams/:id` + +**Example Request**: + +```http +DELETE /api/teams/2 HTTP/1.1 +Accept: application/json +Content-Type: application/json +Authorization: Basic YWRtaW46YWRtaW4= +``` + +**Example Response**: + +```http +HTTP/1.1 200 +Content-Type: application/json + +{"message":"Team deleted"} +``` + +Status Codes: + +- **200** - Ok +- **401** - Unauthorized +- **403** - Permission denied +- **404** - Failed to delete Team. ID not found + +## Get Team Members + +`GET /api/teams/:teamId/members` + +**Example Request**: + +```http +GET /api/teams/1/members HTTP/1.1 +Accept: application/json +Content-Type: application/json +Authorization: Basic YWRtaW46YWRtaW4= +``` + +**Example Response**: + +```http +HTTP/1.1 200 +Content-Type: application/json + +[ + { + "orgId": 1, + "teamId": 1, + "userId": 3, + "email": "user1@email.com", + "login": "user1", + "avatarUrl": "\/avatar\/1b3c32f6386b0185c40d359cdc733a79" + }, + { + "orgId": 1, + "teamId": 1, + "userId": 2, + "email": "user2@email.com", + "login": "user2", + "avatarUrl": "\/avatar\/cad3c68da76e45d10269e8ef02f8e73e" + } +] +``` + +Status Codes: + +- **200** - Ok +- **401** - Unauthorized +- **403** - Permission denied + +## Add Team Member + +`POST /api/teams/:teamId/members` + +**Example Request**: + +```http +POST /api/teams/1/members HTTP/1.1 +Accept: application/json +Content-Type: application/json +Authorization: Basic YWRtaW46YWRtaW4= + +{ + "userId": 2 +} +``` + +**Example Response**: + +```http +HTTP/1.1 200 +Content-Type: application/json + +{"message":"Member added to Team"} +``` + +Status Codes: + +- **200** - Ok +- **400** - User is already added to this team +- **401** - Unauthorized +- **403** - Permission denied +- **404** - Team not found + +## Remove Member From Team + +`DELETE /api/teams/:teamId/members/:userId` + +**Example Request**: + +```http +DELETE /api/teams/2/members/3 HTTP/1.1 +Accept: application/json +Content-Type: application/json +Authorization: Basic YWRtaW46YWRtaW4= +``` + +**Example Response**: + +```http +HTTP/1.1 200 +Content-Type: application/json + +{"message":"Team Member removed"} +``` + +Status Codes: + +- **200** - Ok +- **401** - Unauthorized +- **403** - Permission denied +- **404** - Team not found/Team member not found diff --git a/docs/sources/index.md b/docs/sources/index.md index 7a431e29692..3c59b9baba0 100644 --- a/docs/sources/index.md +++ b/docs/sources/index.md @@ -1,53 +1,107 @@ +++ -title = "Docs Home" -description = "Install guide for Grafana" +title = "Grafana documentation" +description = "Guides, Installation & Feature Documentation" keywords = ["grafana", "installation", "documentation"] type = "docs" aliases = ["v1.1", "guides/reference/admin"] -[menu.docs] -name = "Welcome to the Docs" -identifier = "root" -weight = -1 +++ -# Welcome to the Grafana Documentation +# Grafana Documentation -Grafana is an open source metric analytics & visualization suite. It is most commonly used for -visualizing time series data for infrastructure and application analytics but many use it in -other domains including industrial sensors, home automation, weather, and process control. +

Installing Grafana

+ -## Installing Grafana -- [Installing on Debian / Ubuntu](installation/debian) -- [Installing on RPM-based Linux (CentOS, Fedora, OpenSuse, RedHat)](installation/rpm) -- [Installing on Mac OS X](installation/mac) -- [Installing on Windows](installation/windows) -- [Installing on Docker](installation/docker) -- [Installing using Provisioning (Chef, Puppet, Salt, Ansible, etc)](installation/provisioning) -- [Nightly Builds](https://grafana.com/grafana/download) +

Guides

-For other platforms Read the [build from source]({{< relref "project/building_from_source.md" >}}) -instructions for more information. + -## Configuring Grafana - -The back-end web server has a number of configuration options. Go the -[Configuration]({{< relref "installation/configuration.md" >}}) page for details on all -those options. - - -## Getting Started - -- [Getting Started]({{< relref "guides/getting_started.md" >}}) -- [Basic Concepts]({{< relref "guides/basic_concepts.md" >}}) -- [Screencasts]({{< relref "tutorials/screencasts.md" >}}) - -## Data Source Guides - -- [Graphite]({{< relref "features/datasources/graphite.md" >}}) -- [Elasticsearch]({{< relref "features/datasources/elasticsearch.md" >}}) -- [InfluxDB]({{< relref "features/datasources/influxdb.md" >}}) -- [Prometheus]({{< relref "features/datasources/prometheus.md" >}}) -- [OpenTSDB]({{< relref "features/datasources/opentsdb.md" >}}) -- [MySQL]({{< relref "features/datasources/mysql.md" >}}) -- [Postgres]({{< relref "features/datasources/postgres.md" >}}) -- [Cloudwatch]({{< relref "features/datasources/cloudwatch.md" >}}) +

Data Source Guides

+ diff --git a/docs/sources/installation/behind_proxy.md b/docs/sources/installation/behind_proxy.md index 3d89d8b3c2c..f1a00a5b1cc 100644 --- a/docs/sources/installation/behind_proxy.md +++ b/docs/sources/installation/behind_proxy.md @@ -69,4 +69,57 @@ server { } ``` +#### HAProxy configuration with sub path +```bash +frontend http-in + bind *:80 + use_backend grafana_backend if { path /grafana } or { path_beg /grafana/ } +backend grafana_backend + # Requires haproxy >= 1.6 + http-request set-path %[path,regsub(^/grafana/?,/)] + + # Works for haproxy < 1.6 + # reqrep ^([^\ ]*\ /)grafana[/]?(.*) \1\2 + + server grafana localhost:3000 +``` + +### IIS URL Rewrite Rule (Windows) with Subpath + +IIS requires that the URL Rewrite module is installed. + +Given: + +- subpath `grafana` +- Grafana installed on `http://localhost:3000` +- server config: + + ```bash + [server] + domain = localhost:8080 + root_url = %(protocol)s://%(domain)s:/grafana + ``` + +Create an Inbound Rule for the parent website (localhost:8080 in this example) in IIS Manager with the following settings: + +- pattern: `grafana(/)?(.*)` +- check the `Ignore case` checkbox +- rewrite url set to `http://localhost:3000/{R:2}` +- check the `Append query string` checkbox +- check the `Stop processing of subsequent rules` checkbox + +This is the rewrite rule that is generated in the `web.config`: + +```xml + + + + + + + + +``` + +See the [tutorial on IIS Url Rewrites](http://docs.grafana.org/tutorials/iis/) for more in-depth instructions. diff --git a/docs/sources/installation/configuration.md b/docs/sources/installation/configuration.md index 1e99d487c2b..66072a98f84 100644 --- a/docs/sources/installation/configuration.md +++ b/docs/sources/installation/configuration.md @@ -296,7 +296,7 @@ options are `Admin` and `Editor`. e.g. : `auto_assign_org_role = Viewer` -### viewers can edit +### viewers_can_edit Viewers can edit/inspect dashboard settings in the browser. But not save the dashboard. Defaults to `false`. @@ -354,7 +354,7 @@ enabled = true allow_sign_up = true client_id = YOUR_GITHUB_APP_CLIENT_ID client_secret = YOUR_GITHUB_APP_CLIENT_SECRET -scopes = user:email +scopes = user:email,read:org auth_url = https://github.com/login/oauth/authorize token_url = https://github.com/login/oauth/access_token api_url = https://api.github.com/user @@ -387,6 +387,7 @@ scopes = user:email,read:org team_ids = 150,300 auth_url = https://github.com/login/oauth/authorize token_url = https://github.com/login/oauth/access_token +api_url = https://api.github.com/user allow_sign_up = true ``` @@ -405,6 +406,7 @@ client_secret = YOUR_GITHUB_APP_CLIENT_SECRET scopes = user:email,read:org auth_url = https://github.com/login/oauth/authorize token_url = https://github.com/login/oauth/access_token +api_url = https://api.github.com/user allow_sign_up = true # space-delimited organization names allowed_organizations = github google @@ -496,7 +498,7 @@ name = BitBucket enabled = true allow_sign_up = true client_id = -client_secret = +client_secret = scopes = account email auth_url = https://bitbucket.org/site/oauth2/authorize token_url = https://bitbucket.org/site/oauth2/access_token @@ -505,6 +507,105 @@ team_ids = allowed_organizations = ``` +### Set up oauth2 with OneLogin + +1. Create a new Custom Connector with the following settings: + - Name: Grafana + - Sign On Method: OpenID Connect + - Redirect URI: `https:///login/generic_oauth` + - Signing Algorithm: RS256 + - Login URL: `https:///login/generic_oauth` + + then: +2. Add an App to the Grafana Connector: + - Display Name: Grafana + + then: +3. Under the SSO tab on the Grafana App details page you'll find the Client ID and Client Secret. + + Your OneLogin Domain will match the url you use to access OneLogin. + + Configure Grafana as follows: + + ```bash + [auth.generic_oauth] + name = OneLogin + enabled = true + allow_sign_up = true + client_id = + client_secret = + scopes = openid email name + auth_url = https://.onelogin.com/oidc/auth + token_url = https://.onelogin.com/oidc/token + api_url = https://.onelogin.com/oidc/me + team_ids = + allowed_organizations = + ``` + +### Set up oauth2 with Auth0 + +1. Create a new Client in Auth0 + - Name: Grafana + - Type: Regular Web Application + +2. Go to the Settings tab and set: + - Allowed Callback URLs: `https:///login/generic_oauth` + +3. Click Save Changes, then use the values at the top of the page to configure Grafana: + + ```bash + [auth.generic_oauth] + enabled = true + allow_sign_up = true + team_ids = + allowed_organizations = + name = Auth0 + client_id = + client_secret = + scopes = openid profile email + auth_url = https:///authorize + token_url = https:///oauth/token + api_url = https:///userinfo + ``` + +### Set up oauth2 with Azure Active Directory + +1. Log in to portal.azure.com and click "Azure Active Directory" in the side menu, then click the "Properties" sub-menu item. + +2. Copy the "Directory ID", this is needed for setting URLs later + +3. Click "App Registrations" and add a new application registration: + - Name: Grafana + - Application type: Web app / API + - Sign-on URL: `https:///login/generic_oauth` + +4. Click the name of the new application to open the application details page. + +5. Note down the "Application ID", this will be the OAuth client id. + +6. Click "Settings", then click "Keys" and add a new entry under Passwords + - Key Description: Grafana OAuth + - Duration: Never Expires + +7. Click Save then copy the key value, this will be the OAuth client secret. + +8. Configure Grafana as follows: + + ```bash + [auth.generic_oauth] + name = Azure AD + enabled = true + allow_sign_up = true + client_id = + client_secret = + scopes = openid email name + auth_url = https://login.microsoftonline.com//oauth2/authorize + token_url = https://login.microsoftonline.com//oauth2/token + api_url = + team_ids = + allowed_organizations = + ``` +
## [auth.basic] @@ -572,31 +673,6 @@ session provider you have configured. - **memcache:** ex: 127.0.0.1:11211 - **redis:** ex: `addr=127.0.0.1:6379,pool_size=100,prefix=grafana` -If you use MySQL or Postgres as the session store you need to create the -session table manually. - -Mysql Example: - -```bash -CREATE TABLE `session` ( - `key` CHAR(16) NOT NULL, - `data` BLOB, - `expiry` INT(11) UNSIGNED NOT NULL, - PRIMARY KEY (`key`) -) ENGINE=MyISAM DEFAULT CHARSET=utf8; -``` - -Postgres Example: - -```bash -CREATE TABLE session ( - key CHAR(16) NOT NULL, - data BYTEA, - expiry INTEGER NOT NULL, - PRIMARY KEY (key) -); -``` - Postgres valid `sslmode` are `disable`, `require`, `verify-ca`, and `verify-full` (default). ### cookie_name @@ -721,17 +797,14 @@ Set root url to a Grafana instance where you want to publish external snapshots ### external_snapshot_name Set name for external snapshot button. Defaults to `Publish to snapshot.raintank.io` -### remove expired snapshot +### snapshot_remove_expired Enabled to automatically remove expired snapshots -### remove snapshots after 90 days -Time to live for snapshots. - ## [external_image_storage] These options control how images should be made public so they can be shared on services like slack. ### provider -You can choose between (s3, webdav, gcs, azure_blob). If left empty Grafana will ignore the upload action. +You can choose between (s3, webdav, gcs, azure_blob, local). If left empty Grafana will ignore the upload action. ## [external_image_storage.s3] @@ -804,6 +877,4 @@ Defaults to true. Set to false to disable alerting engine and hide Alerting from ### execute_alerts -### execute_alerts = true - Makes it possible to turn off alert rule execution. diff --git a/docs/sources/installation/debian.md b/docs/sources/installation/debian.md index b742e96c869..30b6824c751 100644 --- a/docs/sources/installation/debian.md +++ b/docs/sources/installation/debian.md @@ -15,7 +15,7 @@ weight = 1 Description | Download ------------ | ------------- -Stable for Debian-based Linux | [grafana_4.6.3_amd64.deb](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana_4.6.3_amd64.deb) +Stable for Debian-based Linux | [grafana_5.0.1_amd64.deb](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana_5.0.1_amd64.deb) Read [Upgrading Grafana]({{< relref "installation/upgrading.md" >}}) for tips and guidance on updating an existing installation. @@ -24,10 +24,11 @@ installation. ```bash -wget https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana_4.6.3_amd64.deb +wget https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana_5.0.1_amd64.deb sudo apt-get install -y adduser libfontconfig -sudo dpkg -i grafana_4.6.3_amd64.deb +sudo dpkg -i grafana_5.0.1_amd64.deb ``` + ## APT Repository Add the following line to your `/etc/apt/sources.list` file. diff --git a/docs/sources/installation/index.md b/docs/sources/installation/index.md index 47d52817b64..a86f426dc13 100644 --- a/docs/sources/installation/index.md +++ b/docs/sources/installation/index.md @@ -7,6 +7,7 @@ aliases = ["installation/installation/", "v2.1/installation/install/"] [menu.docs] name = "Installation" identifier = "installation" +weight = 1 +++ ## Installing Grafana diff --git a/docs/sources/installation/ldap.md b/docs/sources/installation/ldap.md index 8f6be6e1d8c..85501e51d85 100644 --- a/docs/sources/installation/ldap.md +++ b/docs/sources/installation/ldap.md @@ -43,7 +43,7 @@ ssl_skip_verify = false # Search user bind dn bind_dn = "cn=admin,dc=grafana,dc=org" # Search user bind password -# If the password contains # or ; you have to wrap it with trippel quotes. Ex """#password;""" +# If the password contains # or ; you have to wrap it with triple quotes. Ex """#password;""" bind_password = 'grafana' # User search filter, for example "(cn=%s)" or "(sAMAccountName=%s)" or "(uid=%s)" diff --git a/docs/sources/installation/rpm.md b/docs/sources/installation/rpm.md index d3e796a78c8..da9ba1ebbe7 100644 --- a/docs/sources/installation/rpm.md +++ b/docs/sources/installation/rpm.md @@ -15,7 +15,8 @@ weight = 2 Description | Download ------------ | ------------- -Stable for CentOS / Fedora / OpenSuse / Redhat Linux | [4.6.3 (x86-64 rpm)](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-4.6.3-1.x86_64.rpm) +Stable for CentOS / Fedora / OpenSuse / Redhat Linux | [5.0.1 (x86-64 rpm)](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-5.0.1-1.x86_64.rpm) + Read [Upgrading Grafana]({{< relref "installation/upgrading.md" >}}) for tips and guidance on updating an existing installation. @@ -25,7 +26,7 @@ installation. You can install Grafana using Yum directly. ```bash -$ sudo yum install https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-4.6.3-1.x86_64.rpm +$ sudo yum install https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-5.0.1-1.x86_64.rpm ``` Or install manually using `rpm`. @@ -33,15 +34,15 @@ Or install manually using `rpm`. #### On CentOS / Fedora / Redhat: ```bash -$ wget https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-4.6.3-1.x86_64.rpm +$ wget https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-5.0.1-1.x86_64.rpm $ sudo yum install initscripts fontconfig -$ sudo rpm -Uvh grafana-4.6.3-1.x86_64.rpm +$ sudo rpm -Uvh grafana-5.0.1-1.x86_64.rpm ``` #### On OpenSuse: ```bash -$ sudo rpm -i --nodeps grafana-4.6.3-1.x86_64.rpm +$ sudo rpm -i --nodeps grafana-5.0.1-1.x86_64.rpm ``` ## Install via YUM Repository diff --git a/docs/sources/installation/upgrading.md b/docs/sources/installation/upgrading.md index 6a4b4e8f047..5b00fd92924 100644 --- a/docs/sources/installation/upgrading.md +++ b/docs/sources/installation/upgrading.md @@ -101,3 +101,11 @@ as this will make upgrades easier without risking losing your config changes. ## Upgrading from 2.x We are not aware of any issues upgrading directly from 2.x to 4.x but to be on the safe side go via 3.x => 4.x. + +## Upgrading to v5.0 + +The dashboard grid layout engine has changed. All dashboards will be automatically upgraded to new +positioning system when you load them in v5. Dashboards saved in v5 will not work in older versions of Grafana. Some +external panel plugins might need to be updated to work properly. + +For more details on the new panel positioning system, [click here]({{< relref "reference/dashboard.md#panel-size-position" >}}) diff --git a/docs/sources/installation/windows.md b/docs/sources/installation/windows.md index 7c6a97085df..1a8c55aa056 100644 --- a/docs/sources/installation/windows.md +++ b/docs/sources/installation/windows.md @@ -13,7 +13,7 @@ weight = 3 Description | Download ------------ | ------------- -Latest stable package for Windows | [grafana.4.6.3.windows-x64.zip](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-4.6.3.windows-x64.zip) +Latest stable package for Windows | [grafana-5.0.1.windows-x64.zip](https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana-5.0.1.windows-x64.zip) Read [Upgrading Grafana]({{< relref "installation/upgrading.md" >}}) for tips and guidance on updating an existing installation. @@ -30,9 +30,9 @@ on windows. Edit `custom.ini` and uncomment the `http_port` configuration option (`;` is the comment character in ini files) and change it to something like `8080` or similar. That port should not require extra Windows privileges. -Start Grafana by executing `grafana-server.exe`, preferably from the +Start Grafana by executing `grafana-server.exe`, located in the `bin` directory, preferably from the command line. If you want to run Grafana as windows service, download -[NSSM](https://nssm.cc/). It is very easy add Grafana as a Windows +[NSSM](https://nssm.cc/). It is very easy to add Grafana as a Windows service using that tool. Read more about the [configuration options]({{< relref "configuration.md" >}}). @@ -42,7 +42,3 @@ Read more about the [configuration options]({{< relref "configuration.md" >}}). The Grafana backend includes Sqlite3 which requires GCC to compile. So in order to compile Grafana on Windows you need to install GCC. We recommend [TDM-GCC](http://tdm-gcc.tdragon.net/download). - -Copy `conf/sample.ini` to a file named `conf/custom.ini` and change the -web server port to something like 8080. The default Grafana port, 3000, -requires special privileges on Windows. diff --git a/docs/sources/plugins/developing/datasources.md b/docs/sources/plugins/developing/datasources.md index 0149f06e1aa..09a005ba714 100644 --- a/docs/sources/plugins/developing/datasources.md +++ b/docs/sources/plugins/developing/datasources.md @@ -84,15 +84,15 @@ An array of: { "target":"upper_75", "datapoints":[ - [622,1450754160000], - [365,1450754220000] + [622, 1450754160000], + [365, 1450754220000] ] }, { "target":"upper_90", "datapoints":[ - [861,1450754160000], - [767,1450754220000] + [861, 1450754160000], + [767, 1450754220000] ] } ] diff --git a/docs/sources/project/building_from_source.md b/docs/sources/project/building_from_source.md index dba04164d00..13d71e8dcf4 100644 --- a/docs/sources/project/building_from_source.md +++ b/docs/sources/project/building_from_source.md @@ -57,7 +57,7 @@ For this you need nodejs (v.6+). ```bash npm install -g yarn yarn install --pure-lockfile -npm run build +npm run watch ``` ## Running Grafana Locally diff --git a/docs/sources/reference/admin.md b/docs/sources/reference/admin.md deleted file mode 100644 index a6863b4ea71..00000000000 --- a/docs/sources/reference/admin.md +++ /dev/null @@ -1,42 +0,0 @@ -+++ -title = "Admin Roles" -description = "Users & Organization permission and administration" -keywords = ["grafana", "configuration", "documentation", "admin", "users", "permissions"] -type = "docs" -[menu.docs] -name = "Admin Roles" -parent = "admin" -weight = 3 -+++ - -# Administration - -Grafana has two levels of administrators: - -* Organizational administrators: These admins can manage users within specific organizations in a particular Grafana installation -* Grafana administrators: These super admins can manage users across all organizations in a Grafana installation. They can also change and access system-wide settings. - -## Organizational Administrators - -As an Organizational administrator, you can add `Data Sources`, add Users to your Organization and -modify Organization details and options. - -> *Note*: If Grafana is configured with `users.allow_org_create = true`, any User of any Organization will be able to -> start their own Organization and become the administrator of that Organization. - - -## Grafana Administrators - - -As a Grafana Administrator, you have complete access to any Organization or User in that instance of Grafana. -When performing actions as a Grafana admin, the sidebar will change it's appearance as below to indicate you are performing global server administration. - -From the Grafana Server Admin page, you can access the System Info page which summarizes all of the backend configuration settings of the Grafana server. - -## Why would I have multiple Organizations? - -Organizations in Grafana are best suited for a **multi-tenant deployment**. In a multi-tenant deployment, -Organizations can be used to provide a full Grafana experience to different sets of users from a single Grafana instance, -at the convenience of the Grafana Administrator. - -In most cases, a Grafana installation will only have **one** Organization. Since dashboards, data sources and other configuration items are not shared between organizations, there's no need to create multiple Organizations if you want all your users to have access to the same set of dashboards and data. diff --git a/docs/sources/reference/annotations.md b/docs/sources/reference/annotations.md index de118f37d46..bfc104ef522 100644 --- a/docs/sources/reference/annotations.md +++ b/docs/sources/reference/annotations.md @@ -54,7 +54,8 @@ Annotation events are fetched via annotation queries. To add a new annotation qu open the dashboard settings menu, then select `Annotations`. This will open the dashboard annotations settings view. To create a new annotation query hit the `New` button. -![](/img/docs/annotations/new_query.png) + +{{< docs-imagebox img="/img/docs/v50/annotation_new_query.png" max-width="600px" >}} Specify a name for the annotation query. This name is given to the toggle (checkbox) that will allow you to enable/disable showing annotation events from this query. For example you might have two diff --git a/docs/sources/reference/dashboard.md b/docs/sources/reference/dashboard.md index 13f08a1ddaf..dbc3ed8635c 100644 --- a/docs/sources/reference/dashboard.md +++ b/docs/sources/reference/dashboard.md @@ -10,7 +10,7 @@ weight = 100 # Dashboard JSON -A dashboard in Grafana is represented by a JSON object, which stores metadata of its dashboard. Dashboard metadata includes dashboard properties, metadata from rows, panels, template variables, panel queries, etc. +A dashboard in Grafana is represented by a JSON object, which stores metadata of its dashboard. Dashboard metadata includes dashboard properties, metadata from panels, template variables, panel queries, etc. To view the JSON of a dashboard, follow the steps mentioned below: @@ -27,6 +27,7 @@ When a user creates a new dashboard, a new dashboard JSON object is initialized ```json { "id": null, + "uid": "cLV5GDCkz", "title": "New dashboard", "tags": [], "style": "dark", @@ -34,7 +35,7 @@ When a user creates a new dashboard, a new dashboard JSON object is initialized "editable": true, "hideControls": false, "graphTooltip": 1, - "rows": [], + "panels": [], "time": { "from": "now-6h", "to": "now" @@ -49,7 +50,7 @@ When a user creates a new dashboard, a new dashboard JSON object is initialized "annotations": { "list": [] }, - "schemaVersion": 7, + "schemaVersion": 16, "version": 0, "links": [] } @@ -58,224 +59,56 @@ Each field in the dashboard JSON is explained below with its usage: | Name | Usage | | ---- | ----- | -| **id** | unique dashboard id, an integer | +| **id** | unique numeric identifier for the dashboard. (generated by the db) | +| **uid** | unique dashboard identifier that can be generated by anyone. string (8-40) | | **title** | current title of dashboard | | **tags** | tags associated with dashboard, an array of strings | | **style** | theme of dashboard, i.e. `dark` or `light` | | **timezone** | timezone of dashboard, i.e. `utc` or `browser` | | **editable** | whether a dashboard is editable or not | -| **hideControls** | whether row controls on the left in green are hidden or not | | **graphTooltip** | 0 for no shared crosshair or tooltip (default), 1 for shared crosshair, 2 for shared crosshair AND shared tooltip | -| **rows** | row metadata, see [rows section](#rows) for details | | **time** | time range for dashboard, i.e. last 6 hours, last 7 days, etc | | **timepicker** | timepicker metadata, see [timepicker section](#timepicker) for details | | **templating** | templating metadata, see [templating section](#templating) for details | | **annotations** | annotations metadata, see [annotations section](#annotations) for details | | **schemaVersion** | version of the JSON schema (integer), incremented each time a Grafana update brings changes to the said schema | | **version** | version of the dashboard (integer), incremented each time the dashboard is updated | -| **links** | TODO | +| **panels** | panels array, see below for detail. | -### rows +## Panels -`rows` field consists of an array of JSON object representing each row in a dashboard, such as shown below: - -```json - "rows": [ - { - "collapse": false, - "editable": true, - "height": "200px", - "panels": [], - "title": "New row" - }, - { - "collapse": true, - "editable": true, - "height": "300px", - "panels": [], - "title": "New row" - } - ] -``` - -Usage of the fields is explained below: - -| Name | Usage | -| ---- | ----- | -| **collapse** | whether row is collapsed or not | -| **editable** | whether a row is editable or not | -| **height** | height of the row in pixels | -| **panels** | panels metadata, see [panels section](#panels) for details | -| **title** | title of row | - -#### panels - -Panels are the building blocks a dashboard. It consists of datasource queries, type of graphs, aliases, etc. Panel JSON consists of an array of JSON objects, each representing a different panel in a row. Most of the fields are common for all panels but some fields depends on the panel type. Following is an example of panel JSON representing a `graph` panel type: +Panels are the building blocks a dashboard. It consists of datasource queries, type of graphs, aliases, etc. Panel JSON consists of an array of JSON objects, each representing a different panel. Most of the fields are common for all panels but some fields depends on the panel type. Following is an example of panel JSON of a text panel. ```json "panels": [ - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "editable": true, - "error": false, - "fill": 0, - "grid": { - "leftLogBase": 1, - "leftMax": null, - "leftMin": null, - "rightLogBase": 1, - "rightMax": null, - "rightMin": null, - "threshold1": null, - "threshold1Color": "rgba(216, 200, 27, 0.27)", - "threshold2": null, - "threshold2Color": "rgba(234, 112, 112, 0.22)" - }, - "id": 1, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "connected", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 4, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "max", - "alias": "$tag_instance_id", - "currentTagKey": "", - "currentTagValue": "", - "downsampleAggregator": "avg", - "downsampleInterval": "", - "errors": {}, - "metric": "memory.percent-used", - "refId": "A", - "shouldComputeRate": false, - "tags": { - "app": "$app", - "env": "stage", - "instance_id": "*" - } - } - ], - "timeFrom": null, - "timeShift": null, - "title": "Memory Utilization", - "tooltip": { - "shared": true, - "value_type": "cumulative" - }, - "type": "graph", - "x-axis": true, - "y-axis": true, - "y_formats": [ - "percent", - "short" - ] - }, - { - "aliasColors": {}, - "bars": false, - "datasource": null, - "editable": true, - "error": false, - "fill": 0, - "grid": { - "leftLogBase": 1, - "leftMax": null, - "leftMin": null, - "rightLogBase": 1, - "rightMax": null, - "rightMin": null, - "threshold1": null, - "threshold1Color": "rgba(216, 200, 27, 0.27)", - "threshold2": null, - "threshold2Color": "rgba(234, 112, 112, 0.22)" - }, - "id": 2, - "legend": { - "avg": false, - "current": false, - "max": false, - "min": false, - "show": true, - "total": false, - "values": false - }, - "lines": true, - "linewidth": 1, - "links": [], - "nullPointMode": "connected", - "percentage": false, - "pointradius": 5, - "points": false, - "renderer": "flot", - "seriesOverrides": [], - "span": 4, - "stack": false, - "steppedLine": false, - "targets": [ - { - "aggregator": "avg", - "alias": "$tag_instance_id", - "currentTagKey": "", - "currentTagValue": "", - "downsampleAggregator": "avg", - "downsampleInterval": "", - "errors": {}, - "metric": "memory.percent-cached", - "refId": "A", - "shouldComputeRate": false, - "tags": { - "app": "$app", - "env": "prod", - "instance_id": "*" - } - } - ], - "timeFrom": null, - "timeShift": null, - "title": "Memory Cached", - "tooltip": { - "shared": true, - "value_type": "cumulative" - }, - "type": "graph", - "x-axis": true, - "y-axis": true, - "y_formats": [ - "short", - "short" - ] - }, + { + "type": "text", + "title": "Panel Title", + "gridPos": { + "x": 0, + "y": 0, + "w": 12, + "h": 9 + }, + "id": 4, + "mode": "markdown", + "content": "# title" + } ``` -Usage of each field is explained below: +### Panel size & position -| Name | Usage | -| ---- | ----- | -| TODO | TODO | +The gridPos property describes the panel size and position in grid coordinates. + +- `w` 1-24 (the width of the dashboard is divided into 24 columns) +- `h` In grid height units, each represents 30 pixels. +- `x` The x position, in same unit as `w`. +- `y` The y position, in same unit as `h`. + +The grid has a negative gravity that moves panels up if there i empty space above a panel. ### timepicker -Description: TODO - ```json "timepicker": { "collapse": false, @@ -416,7 +249,3 @@ Usage of the above mentioned fields in the templating section is explained below | **refresh** | TODO | | **regex** | TODO | | **type** | type of variable, i.e. `custom`, `query` or `interval` | - -### annotations - -TODO diff --git a/docs/sources/reference/dashboard_folders.md b/docs/sources/reference/dashboard_folders.md new file mode 100644 index 00000000000..2c287c6891b --- /dev/null +++ b/docs/sources/reference/dashboard_folders.md @@ -0,0 +1,52 @@ ++++ +title = "Dashboard Folders" +keywords = ["grafana", "dashboard", "dashboard folders", "folder", "folders", "documentation", "guide"] +type = "docs" +[menu.docs] +name = "Folders" +parent = "dashboard_features" +weight = 3 ++++ + +# Dashboard Folders + +Folders are a way to organize and group dashboards - very useful if you have a lot of dashboards or multiple teams using the same Grafana instance. + +## How To Create A Folder + +- Create a folder by using the Create Folder link in the side menu (under the create menu (+ icon)) +- Use the create Folder button on the Manage Dashboards page. +- When saving a dashboard, you can either choose a folder for the dashboard to be saved in or create a new folder + +On the Create Folder page, fill in a unique name for the folder and press Create. + +## Manage Dashboards + +{{< docs-imagebox img="/img/docs/v50/manage_dashboard_menu.png" max-width="300px" class="docs-image--right" >}} + +There is a new Manage Dashboards page where you can carry out a variety of tasks: + +- create a folder +- create a dashboard +- move dashboards into folders +- delete multiple dashboards +- navigate to a folder page (where you can set permissions for a folder and/or its dashboards) + +## Dashboard Folder Page + +You reach the dashboard folder page by clicking on the cog icon that appears when you hover +over a folder in the dashboard list in the search result or on the Manage dashboards page. + +The Dashboard Folder Page is similar to the Manage Dashboards page and is where you can carry out the following tasks: + +- Allows you to move or delete dashboards in a folder. +- Rename a folder (under the Settings tab). +- Set permissions for the folder (inherited by dashboards in the folder). + +## Permissions + +Permissions can assigned to a folder and inherited by the containing dashboards. An Access Control List (ACL) is used where +**Organization Role**, **Team** and Individual **User** can be assigned permissions. Read the + [Dashboard & Folder Permissions]({{< relref "administration/permissions.md#dashboard-folder-permissions" >}}) docs for more detail + on the permission system. + diff --git a/docs/sources/reference/export_import.md b/docs/sources/reference/export_import.md index 4c2d5faa3d3..31f32d890f6 100644 --- a/docs/sources/reference/export_import.md +++ b/docs/sources/reference/export_import.md @@ -15,9 +15,9 @@ Grafana Dashboards can easily be exported and imported, either from the UI or fr Dashboards are exported in Grafana JSON format, and contain everything you need (layout, variables, styles, data sources, queries, etc)to import the dashboard at a later time. -The export feature is accessed from the share menu. +The export feature is accessed in the share window which you open by clicking the share button in the dashboard menu. - +{{< docs-imagebox img="/img/docs/v50/export_modal.png" max-width="700px" >}} ### Making a dashboard portable @@ -31,12 +31,12 @@ the dashboard, and will also be added as an required input when the dashboard is To import a dashboard open dashboard search and then hit the import button. - +{{< docs-imagebox img="/img/docs/v50/import_step1.png" max-width="700px" >}} From here you can upload a dashboard json file, paste a [Grafana.com](https://grafana.com) dashboard url or paste dashboard json text directly into the text area. - +{{< docs-imagebox img="/img/docs/v50/import_step2.png" max-width="700px" >}} In step 2 of the import process Grafana will let you change the name of the dashboard, pick what data source you want the dashboard to use and specify any metric prefixes (if the dashboard use any). @@ -45,7 +45,7 @@ data source you want the dashboard to use and specify any metric prefixes (if th Find dashboards for common server applications at [Grafana.com/dashboards](https://grafana.com/dashboards). - +{{< docs-imagebox img="/img/docs/v50/gcom_dashboard_list.png" max-width="700px" >}} ## Import & Sharing with Grafana 2.x or 3.0 diff --git a/docs/sources/reference/playlist.md b/docs/sources/reference/playlist.md index f509ae4dc0d..5a6bf921334 100644 --- a/docs/sources/reference/playlist.md +++ b/docs/sources/reference/playlist.md @@ -16,7 +16,7 @@ Since Grafana automatically scales Dashboards to any resolution they're perfect ## Creating a Playlist -{{< docs-imagebox img="/img/docs/v3/playlist.png" max-width="25rem" class="docs-image--right">}} +{{< docs-imagebox img="/img/docs/v50/playlist.png" max-width="25rem" class="docs-image--right">}} The Playlist feature can be accessed from Grafana's sidemenu, in the Dashboard submenu. diff --git a/docs/sources/reference/search.md b/docs/sources/reference/search.md index 9fc4d47893c..1bf6fd53e52 100644 --- a/docs/sources/reference/search.md +++ b/docs/sources/reference/search.md @@ -10,22 +10,22 @@ weight = 5 # Dashboard Search -Dashboards can be searched by the dashboard name, filtered by one (or many) tags or filtered by starred status. The dashboard search is accessed through the dashboard picker, available in the dashboard top nav area. +Dashboards can be searched by the dashboard name, filtered by one (or many) tags or filtered by starred status. The dashboard search is accessed through the dashboard picker, available in the dashboard top nav area. The dashboard search can also be opened by using the shortcut `F`. - + -1. `Dashboard Picker`: The Dashboard Picker is your primary navigation tool to move between dashboards. It is present on all dashboards, and open the Dashboard Search. The dashboard picker also doubles as the title of the current dashboard. -2. `Search Bar`: The search bar allows you to enter any string and search both database and file based dashboards in real-time. -3. `Starred`: The starred link allows you to filter the list to display only starred dashboards. -4. `Tags`: The tags filter allows you to filter the list by dashboard tags. +1. `Search Bar`: The search bar allows you to enter any string and search both database and file based dashboards in real-time. +2. `Starred`: Here you find all your starred dashboards. +3. `Recent`: Here you find the latest created dashboards. +4. `Folders`: The tags filter allows you to filter the list by dashboard tags. +5. `Root`: The root contains all dashboards that are not placed in a folder. +6. `Tags`: The tags filter allows you to filter the list by dashboard tags. When using only a keyboard, you can use your keyboard arrow keys to navigate the results, hit enter to open the selected dashboard. ## Find by dashboard name - - -To search and load dashboards click the open folder icon in the header or use the shortcut `CTRL`+`F`. Begin typing any part of the desired dashboard names. Search will return results for for any partial string match in real-time, as you type. +Begin typing any part of the desired dashboard names in the search bar. Search will return results for for any partial string match in real-time, as you type. Dashboard search is: - Real-time @@ -38,21 +38,8 @@ Tags are a great way to organize your dashboards, especially as the number of da To filter the dashboard list by tag, click on any tag appearing in the right column. The list may be further filtered by clicking on additional tags: - - -Alternately, to see a list of all available tags, click the tags link in the search bar. All tags will be shown, and when a tag is selected, the dashboard search will be instantly filtered: - - +Alternately, to see a list of all available tags, click the tags dropdown menu. All tags will be shown, and when a tag is selected, the dashboard search will be instantly filtered: When using only a keyboard: `tab` to focus on the *tags* link, `▼` down arrow key to find a tag and select with the `Enter` key. -**Note**: When multiple tags are selected, Grafana will show dashboards that include **all**. - - -## Filter by Starred - -Starring is a great way to organize and find commonly used dashboards. To show only starred dashboards in the list, click the *starred* link in the search bar: - - - -When using only a keyboard: `tab` to focus on the *stars* link, `▼` down arrow key to find a tag and select with the `Enter` key. +**Note**: When multiple tags are selected, Grafana will show dashboards that include **all**. \ No newline at end of file diff --git a/docs/sources/reference/sharing.md b/docs/sources/reference/sharing.md index badd3b5712a..59c2e0345ea 100644 --- a/docs/sources/reference/sharing.md +++ b/docs/sources/reference/sharing.md @@ -24,7 +24,7 @@ A dashboard snapshot is an instant way to share an interactive dashboard publicl (metric, template and annotation) and panel links, leaving only the visible metric data and series names embedded into your dashboard. Dashboard snapshots can be accessed by anyone who has the link and can reach the URL. -![](/img/docs/v4/share_panel_modal.png) +{{< docs-imagebox img="/img/docs/v50/share_panel_modal.png" max-width="700px" >}} ### Publish snapshots @@ -39,7 +39,7 @@ Click a panel title to open the panel menu, then click share in the panel menu t ### Direct Link Rendered Image -You also get a link to service side rendered PNG of the panel. Useful if you want to share an image of the panel. Please note that for OSX and Windows, you will need to ensure that a `phantomjs` binary is available under `vendor/phantomjs/phantomjs`. For Linux, a `phantomjs` binary is included - however, you should ensure that any requisite libraries (e.g. libfontconfig) are available. +You also get a link to service side rendered PNG of the panel. Useful if you want to share an image of the panel. Please note that for OSX and Windows, you will need to ensure that a `phantomjs` binary is available under `tools/phantomjs/phantomjs`. For Linux, a `phantomjs` binary is included - however, you should ensure that any requisite libraries (e.g. libfontconfig) are available. Example of a link to a server-side rendered PNG: @@ -70,9 +70,9 @@ Below there should be an interactive Grafana graph embedded in an iframe: ### Export Panel Data -![](/img/docs/v4/export_panel_data.png) +{{< docs-imagebox img="/img/docs/v50/export_panel_data.png" max-width="500px" >}} -The submenu for a panel can be found by clicking on the title of a panel and then on the hamburger (three horizontal lines) submenu on the left of the context menu. +The submenu for a panel can be found by clicking on the title of a panel and then on the More submenu. This menu contains two options for exporting data: diff --git a/docs/sources/reference/templating.md b/docs/sources/reference/templating.md index 36308adf52f..3a15b4ed7d1 100644 --- a/docs/sources/reference/templating.md +++ b/docs/sources/reference/templating.md @@ -1,20 +1,20 @@ +++ -title = "Templating" +title = "Variables" keywords = ["grafana", "templating", "documentation", "guide"] type = "docs" [menu.docs] -name = "Templating" +name = "Variables" parent = "dashboard_features" weight = 1 +++ -# Templating +# Variables -Templating allows for more interactive and dynamic dashboards. Instead of hard-coding things like server, application +Variables allows for more interactive and dynamic dashboards. Instead of hard-coding things like server, application and sensor name in you metric queries you can use variables in their place. Variables are shown as dropdown select boxes at the top of the dashboard. These dropdowns make it easy to change the data being displayed in your dashboard. - +{{< docs-imagebox img="/img/docs/v50/variables_dashboard.png" >}} ## What is a variable? @@ -43,7 +43,7 @@ is the set of values you can choose from. ## Adding a variable - +{{< docs-imagebox img="/img/docs/v50/variables_var_list.png" max-width="800px" >}} You add variables via Dashboard cogs menu > Templating. This opens up a list of variables and a `New` button to create a new variable. @@ -133,7 +133,7 @@ Option | Description *Tags query* | Data source query that should return a list of tags *Tag values query* | Data source query that should return a list of values for a specified tag key. Use `$tag` in the query to refer the currently selected tag. -![](/img/docs/v4/variable_dropdown_tags.png) +{{< docs-imagebox img="/img/docs/v50/variable_dropdown_tags.png" max-width="300px" >}} ### Interval variables diff --git a/docs/sources/reference/timerange.md b/docs/sources/reference/timerange.md index a4d6fc62336..4121ed87931 100644 --- a/docs/sources/reference/timerange.md +++ b/docs/sources/reference/timerange.md @@ -13,7 +13,7 @@ weight = 7 Grafana provides numerous ways to manage the time ranges of the data being visualized, both at the Dashboard-level and the Panel-level. - + In the top right, you have the master Dashboard time picker (it's in between the 'Zoom out' and the 'Refresh' links). @@ -39,11 +39,11 @@ Week to date | `now/w` | `now` Previous Month | `now-1M/M` | `now-1M/M` -## Dashboard-Level Time Picker Settings +## Dashboard Time Options -There are two settings available from the Dashboard Settings area, allowing customization of the auto-refresh intervals and the definition of `now`. +There are two settings available in the Dashboard Settings General tab, allowing customization of the auto-refresh intervals and the definition of `now`. - + ### Auto-Refresh Options @@ -59,11 +59,11 @@ Users often ask, [when will then be now](https://www.youtube.com/watch?v=VeZ9HhH You can override the relative time range for individual panels, causing them to be different than what is selected in the Dashboard time picker in the upper right. This allows you to show metrics from different time periods or days at the same time. - +{{< docs-imagebox img="/img/docs/v50/panel_time_override.png" max-width="500px" >}} You control these overrides in panel editor mode and the tab `Time Range`. - +{{< docs-imagebox img="/img/docs/v50/time_range_tab.png" max-width="500px" >}} When you zoom or change the Dashboard time to a custom absolute time range, all panel overrides will be disabled. The panel relative time override is only active when the dashboard time is also relative. The panel timeshift override is always active, even when the dashboard time is absolute. diff --git a/docs/sources/tutorials/iis.md b/docs/sources/tutorials/iis.md new file mode 100644 index 00000000000..63a41d67c16 --- /dev/null +++ b/docs/sources/tutorials/iis.md @@ -0,0 +1,89 @@ ++++ +title = "Grafana with IIS Reverse Proxy on Windows" +type = "docs" +keywords = ["grafana", "tutorials", "proxy", "IIS", "windows"] +[menu.docs] +parent = "tutorials" +weight = 10 ++++ + +# How to Use IIS with URL Rewrite as a Reverse Proxy for Grafana on Windows + +If you want Grafana to be a subpath or subfolder under a website in IIS then the URL Rewrite module for ISS can be used to support this. + +Example: + +- Parent site: http://localhost:8080 +- Grafana: http://localhost:3000 + +Grafana as a subpath: http://localhost:8080/grafana + +## Setup + +If you have not already done it, then a requirement is to install URL Rewrite module for IIS. + +Download and install the URL Rewrite module for IIS: https://www.iis.net/downloads/microsoft/url-rewrite + +## Grafana Config + +The Grafana config can be set by creating a file named `custom.ini` in the `conf` subdirectory of your Grafana installation. See the [installation instructions](http://docs.grafana.org/installation/windows/#configure) for more details. + +Given that the subpath should be `grafana` and the parent site is `localhost:8080` then add this to the `custom.ini` config file: + + ```bash +[server] +domain = localhost:8080 +root_url = %(protocol)s://%(domain)s:/grafana +``` + +Restart the Grafana server after changing the config file. + +## IIS Config + +1. Open the IIS Manager and click on the parent website +2. In the admin console for this website, double click on the Url Rewrite option: + {{< docs-imagebox img="/img/docs/tutorials/IIS_admin_console.png" max-width= "800px" >}} + +3. Click on the `Add Rule(s)...` action +4. Choose the Blank Rule template for an Inbound Rule + {{< docs-imagebox img="/img/docs/tutorials/IIS_add_inbound_rule.png" max-width= "800px" >}} + +5. Create an Inbound Rule for the parent website (localhost:8080 in this example) with the following settings: + - pattern: `grafana(/)?(.*)` + - check the `Ignore case` checkbox + - rewrite url set to `http://localhost:3000/{R:2}` + - check the `Append query string` checkbox + - check the `Stop processing of subsequent rules` checkbox + + {{< docs-imagebox img="/img/docs/tutorials/IIS_url_rewrite.png" max-width= "800px" >}} + +Finally, navigate to `http://localhost:8080/grafana` (replace `http://localhost:8080` with your parent domain) and you should come to the Grafana login page. + +## Troubleshooting + +### 404 error + +When navigating to the grafana url (`http://localhost:8080/grafana` in the example above) and a `HTTP Error 404.0 - Not Found` error is returned then either: + +- the pattern for the Inbound Rule is incorrect. Edit the rule, click on the `Test pattern...` button, test the part of the url after `http://localhost:8080/` and make sure it matches. For `grafana/login` the test should return 3 capture groups: {R:0}: `grafana` {R:1}: `/` and {R:2}: `login`. +- The `root_url` setting in the Grafana config file does not match the parent url with subpath. + +### Grafana Website only shows text with no images or css + +{{< docs-imagebox img="/img/docs/tutorials/IIS_proxy_error.png" max-width= "800px" >}} + +1. The `root_url` setting in the Grafana config file does not match the parent url with subpath. This could happen if the root_url is commented out by mistake (`;` is used for commenting out a line in .ini files): + + `; root_url = %(protocol)s://%(domain)s:/grafana` + +2. or if the subpath in the `root_url` setting does not match the subpath used in the pattern in the Inbound Rule in IIS: + + `root_url = %(protocol)s://%(domain)s:/grafana` + + pattern in Inbound Rule: `wrongsubpath(/)?(.*)` + +3. or if the Rewrite Url in the Inbound Rule is incorrect. + + The Rewrite Url should not include the subpath. + + The Rewrite Url should contain the capture group from the pattern matching that returns the part of the url after the subpath. The pattern used above returns 3 capture groups and the third one {R:2} returns the part of the url after `http://localhost:8080/grafana/`. diff --git a/docs/sources/features/whatsnew/index.md b/docs/sources/whatsnew/index.md similarity index 90% rename from docs/sources/features/whatsnew/index.md rename to docs/sources/whatsnew/index.md index 30357af4668..df472f07093 100644 --- a/docs/sources/features/whatsnew/index.md +++ b/docs/sources/whatsnew/index.md @@ -3,7 +3,7 @@ title = "What's New in Grafana" [menu.docs] name = "What's New In Grafana" identifier = "whatsnew" -weight = 2 +weight = 3 +++ diff --git a/docs/versions.json b/docs/versions.json new file mode 100644 index 00000000000..2dcc7ebe776 --- /dev/null +++ b/docs/versions.json @@ -0,0 +1,10 @@ +[ + { "version": "v5.1", "path": "/v5.1", "archived": false }, + { "version": "v5.0", "path": "/", "archived": false, "current": true }, + { "version": "v4.6", "path": "/v4.6", "archived": true }, + { "version": "v4.5", "path": "/v4.5", "archived": true }, + { "version": "v4.4", "path": "/v4.4", "archived": true }, + { "version": "v4.3", "path": "/v4.3", "archived": true }, + { "version": "v4.1", "path": "/v4.1", "archived": true }, + { "version": "v3.1", "path": "/v3.1", "archived": true } +] diff --git a/jest.config.js b/jest.config.js index ead97e39dad..606465c9840 100644 --- a/jest.config.js +++ b/jest.config.js @@ -24,5 +24,6 @@ module.exports = { "setupFiles": [ "./public/test/jest-shim.ts", "./public/test/jest-setup.ts" - ] + ], + "snapshotSerializers": ["enzyme-to-json/serializer"], }; diff --git a/latest.json b/latest.json index a746e92c3b3..b476f44a00a 100644 --- a/latest.json +++ b/latest.json @@ -1,4 +1,4 @@ { - "stable": "4.6.2", - "testing": "4.6.2" + "stable": "5.0.0", + "testing": "5.0.0" } diff --git a/package.json b/package.json index 509b9bb106d..9493965f2fd 100644 --- a/package.json +++ b/package.json @@ -4,7 +4,7 @@ "company": "Grafana Labs" }, "name": "grafana", - "version": "5.0.0-pre1", + "version": "5.1.0-pre1", "repository": { "type": "git", "url": "http://github.com/grafana/grafana.git" @@ -19,12 +19,14 @@ "angular-mocks": "^1.6.6", "autoprefixer": "^6.4.0", "awesome-typescript-loader": "^3.2.3", + "axios": "^0.17.1", "babel-core": "^6.26.0", "babel-loader": "^7.1.2", "babel-preset-es2015": "^6.24.1", "css-loader": "^0.28.7", "enzyme": "^3.1.0", "enzyme-adapter-react-16": "^1.0.1", + "enzyme-to-json": "^3.3.0", "es6-promise": "^3.0.2", "es6-shim": "^0.35.3", "expect.js": "~0.2.0", @@ -54,7 +56,7 @@ "html-loader": "^0.5.1", "html-webpack-plugin": "^2.30.1", "husky": "^0.14.3", - "jest": "^21.2.1", + "jest": "^22.0.4", "jshint-stylish": "~2.2.1", "json-loader": "^0.5.7", "karma": "1.7.0", @@ -67,6 +69,7 @@ "karma-webpack": "^2.0.4", "lint-staged": "^6.0.0", "load-grunt-tasks": "3.5.2", + "mobx-react-devtools": "^4.2.15", "mocha": "^4.0.1", "ng-annotate-loader": "^0.6.1", "ng-annotate-webpack-plugin": "^0.2.1-pre", @@ -83,12 +86,12 @@ "sinon": "1.17.6", "systemjs": "0.20.19", "systemjs-plugin-css": "^0.1.36", - "ts-jest": "^21.1.3", - "ts-loader": "^2.3.7", - "tslint": "^5.7.0", + "ts-jest": "^22.0.0", + "ts-loader": "^3.2.0", + "tslint": "^5.8.0", "tslint-loader": "^3.5.3", - "typescript": "^2.5.2", - "webpack": "^3.6.0", + "typescript": "^2.6.2", + "webpack": "^3.10.0", "webpack-bundle-analyzer": "^2.9.0", "webpack-cleanup-plugin": "^0.5.1", "webpack-merge": "^4.1.0", @@ -103,6 +106,7 @@ "lint": "tslint -c tslint.json --project tsconfig.json --type-check", "karma": "node ./node_modules/grunt-cli/bin/grunt karma:dev", "jest": "node ./node_modules/jest-cli/bin/jest.js --notify --watch", + "api-tests": "node ./node_modules/jest-cli/bin/jest.js --notify --watch --config=tests/api/jest.js", "precommit": "lint-staged && node ./node_modules/grunt-cli/bin/grunt precommit" }, "lint-staged": { @@ -113,6 +117,10 @@ "*.scss": [ "prettier --write", "git add" + ], + "*.go": [ + "gofmt -w -s", + "git add" ] }, "prettier": { @@ -124,7 +132,6 @@ "dependencies": { "angular": "^1.6.6", "angular-bindonce": "^0.3.1", - "angular-mocks": "^1.6.6", "angular-native-dragdrop": "^1.2.2", "angular-route": "^1.6.6", "angular-sanitize": "^1.6.6", @@ -134,22 +141,31 @@ "clipboard": "^1.7.1", "d3": "^4.11.0", "d3-scale-chromatic": "^1.1.1", - "eventemitter3": "^2.0.2", + "eventemitter3": "^2.0.3", "file-saver": "^1.3.3", "jquery": "^3.2.1", "lodash": "^4.17.4", + "mobx": "^3.4.1", + "mobx-react": "^4.3.5", + "mobx-state-tree": "^1.3.1", "moment": "^2.18.1", "mousetrap": "^1.6.0", + "mousetrap-global-bind": "^1.1.0", "perfect-scrollbar": "^1.2.0", "prop-types": "^15.6.0", - "react": "^16.1.1", - "react-dom": "^16.1.1", - "react-grid-layout": "^0.16.1", + "react": "^16.2.0", + "react-dom": "^16.2.0", + "react-grid-layout-grafana": "0.16.0", + "react-highlight-words": "^0.10.0", + "react-popper": "^0.7.5", + "react-select": "^1.1.0", "react-sizeme": "^2.3.6", + "react-transition-group": "^2.2.1", "remarkable": "^1.7.1", + "rst2html": "github:thoward/rst2html#990cb89", "rxjs": "^5.4.3", "tether": "^1.4.0", - "tether-drop": "https://github.com/torkelo/drop", + "tether-drop": "https://github.com/torkelo/drop/tarball/master", "tinycolor2": "^1.4.1" } } diff --git a/packaging/publish/publish_both.sh b/packaging/publish/publish_both.sh index 9736cbddd6c..597d113f96a 100755 --- a/packaging/publish/publish_both.sh +++ b/packaging/publish/publish_both.sh @@ -1,5 +1,5 @@ #! /usr/bin/env bash -version=4.6.3 +version=5.0.1 wget https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana_${version}_amd64.deb diff --git a/packaging/publish/publish_testing.sh b/packaging/publish/publish_testing.sh index 276193ad63f..08ba2a89dd9 100755 --- a/packaging/publish/publish_testing.sh +++ b/packaging/publish/publish_testing.sh @@ -1,6 +1,6 @@ #! /usr/bin/env bash -deb_ver=4.6.0-beta1 -rpm_ver=4.6.0-beta1 +deb_ver=5.0.0-beta5 +rpm_ver=5.0.0-beta5 wget https://s3-us-west-2.amazonaws.com/grafana-releases/release/grafana_${deb_ver}_amd64.deb diff --git a/pkg/api/admin.go b/pkg/api/admin.go index d7f5a240416..286f23356ea 100644 --- a/pkg/api/admin.go +++ b/pkg/api/admin.go @@ -4,12 +4,11 @@ import ( "strings" "github.com/grafana/grafana/pkg/bus" - "github.com/grafana/grafana/pkg/middleware" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/setting" ) -func AdminGetSettings(c *middleware.Context) { +func AdminGetSettings(c *m.ReqContext) { settings := make(map[string]interface{}) for _, section := range setting.Cfg.Sections() { @@ -30,7 +29,7 @@ func AdminGetSettings(c *middleware.Context) { c.JSON(200, settings) } -func AdminGetStats(c *middleware.Context) { +func AdminGetStats(c *m.ReqContext) { statsQuery := m.GetAdminStatsQuery{} diff --git a/pkg/api/admin_users.go b/pkg/api/admin_users.go index 1868c589673..4cf7f4db4ec 100644 --- a/pkg/api/admin_users.go +++ b/pkg/api/admin_users.go @@ -4,12 +4,11 @@ import ( "github.com/grafana/grafana/pkg/api/dtos" "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/metrics" - "github.com/grafana/grafana/pkg/middleware" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/util" ) -func AdminCreateUser(c *middleware.Context, form dtos.AdminCreateUserForm) { +func AdminCreateUser(c *m.ReqContext, form dtos.AdminCreateUserForm) { cmd := m.CreateUserCommand{ Login: form.Login, Email: form.Email, @@ -47,7 +46,7 @@ func AdminCreateUser(c *middleware.Context, form dtos.AdminCreateUserForm) { c.JSON(200, result) } -func AdminUpdateUserPassword(c *middleware.Context, form dtos.AdminUpdateUserPasswordForm) { +func AdminUpdateUserPassword(c *m.ReqContext, form dtos.AdminUpdateUserPasswordForm) { userId := c.ParamsInt64(":id") if len(form.Password) < 4 { @@ -77,7 +76,7 @@ func AdminUpdateUserPassword(c *middleware.Context, form dtos.AdminUpdateUserPas c.JsonOK("User password updated") } -func AdminUpdateUserPermissions(c *middleware.Context, form dtos.AdminUpdateUserPermissionsForm) { +func AdminUpdateUserPermissions(c *m.ReqContext, form dtos.AdminUpdateUserPermissionsForm) { userId := c.ParamsInt64(":id") cmd := m.UpdateUserPermissionsCommand{ @@ -93,7 +92,7 @@ func AdminUpdateUserPermissions(c *middleware.Context, form dtos.AdminUpdateUser c.JsonOK("User permissions updated") } -func AdminDeleteUser(c *middleware.Context) { +func AdminDeleteUser(c *m.ReqContext) { userId := c.ParamsInt64(":id") cmd := m.DeleteUserCommand{UserId: userId} diff --git a/pkg/api/alerting.go b/pkg/api/alerting.go index 58ea56cc8a7..eea4ef90c05 100644 --- a/pkg/api/alerting.go +++ b/pkg/api/alerting.go @@ -5,14 +5,14 @@ import ( "github.com/grafana/grafana/pkg/api/dtos" "github.com/grafana/grafana/pkg/bus" - "github.com/grafana/grafana/pkg/middleware" - "github.com/grafana/grafana/pkg/models" + m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/alerting" + "github.com/grafana/grafana/pkg/services/guardian" ) -func ValidateOrgAlert(c *middleware.Context) { +func ValidateOrgAlert(c *m.ReqContext) { id := c.ParamsInt64(":alertId") - query := models.GetAlertByIdQuery{Id: id} + query := m.GetAlertByIdQuery{Id: id} if err := bus.Dispatch(&query); err != nil { c.JsonApiErr(404, "Alert not found", nil) @@ -25,14 +25,14 @@ func ValidateOrgAlert(c *middleware.Context) { } } -func GetAlertStatesForDashboard(c *middleware.Context) Response { +func GetAlertStatesForDashboard(c *m.ReqContext) Response { dashboardId := c.QueryInt64("dashboardId") if dashboardId == 0 { return ApiError(400, "Missing query parameter dashboardId", nil) } - query := models.GetAlertStatesForDashboardQuery{ + query := m.GetAlertStatesForDashboardQuery{ OrgId: c.OrgId, DashboardId: c.QueryInt64("dashboardId"), } @@ -45,12 +45,13 @@ func GetAlertStatesForDashboard(c *middleware.Context) Response { } // GET /api/alerts -func GetAlerts(c *middleware.Context) Response { - query := models.GetAlertsQuery{ +func GetAlerts(c *m.ReqContext) Response { + query := m.GetAlertsQuery{ OrgId: c.OrgId, DashboardId: c.QueryInt64("dashboardId"), PanelId: c.QueryInt64("panelId"), Limit: c.QueryInt64("limit"), + User: c.SignedInUser, } states := c.QueryStrings("state") @@ -62,47 +63,15 @@ func GetAlerts(c *middleware.Context) Response { return ApiError(500, "List alerts failed", err) } - dashboardIds := make([]int64, 0) - alertDTOs := make([]*dtos.AlertRule, 0) for _, alert := range query.Result { - dashboardIds = append(dashboardIds, alert.DashboardId) - alertDTOs = append(alertDTOs, &dtos.AlertRule{ - Id: alert.Id, - DashboardId: alert.DashboardId, - PanelId: alert.PanelId, - Name: alert.Name, - Message: alert.Message, - State: alert.State, - NewStateDate: alert.NewStateDate, - ExecutionError: alert.ExecutionError, - EvalData: alert.EvalData, - }) + alert.Url = m.GetDashboardUrl(alert.DashboardUid, alert.DashboardSlug) } - dashboardsQuery := models.GetDashboardsQuery{ - DashboardIds: dashboardIds, - } - - if len(alertDTOs) > 0 { - if err := bus.Dispatch(&dashboardsQuery); err != nil { - return ApiError(500, "List alerts failed", err) - } - } - - //TODO: should be possible to speed this up with lookup table - for _, alert := range alertDTOs { - for _, dash := range dashboardsQuery.Result { - if alert.DashboardId == dash.Id { - alert.DashbboardUri = "db/" + dash.Slug - } - } - } - - return Json(200, alertDTOs) + return Json(200, query.Result) } // POST /api/alerts/test -func AlertTest(c *middleware.Context, dto dtos.AlertTestCommand) Response { +func AlertTest(c *m.ReqContext, dto dtos.AlertTestCommand) Response { if _, idErr := dto.Dashboard.Get("id").Int64(); idErr != nil { return ApiError(400, "The dashboard needs to be saved at least once before you can test an alert rule", nil) } @@ -144,9 +113,9 @@ func AlertTest(c *middleware.Context, dto dtos.AlertTestCommand) Response { } // GET /api/alerts/:id -func GetAlert(c *middleware.Context) Response { +func GetAlert(c *m.ReqContext) Response { id := c.ParamsInt64(":alertId") - query := models.GetAlertByIdQuery{Id: id} + query := m.GetAlertByIdQuery{Id: id} if err := bus.Dispatch(&query); err != nil { return ApiError(500, "List alerts failed", err) @@ -155,30 +124,12 @@ func GetAlert(c *middleware.Context) Response { return Json(200, &query.Result) } -// DEL /api/alerts/:id -func DelAlert(c *middleware.Context) Response { - alertId := c.ParamsInt64(":alertId") - - if alertId == 0 { - return ApiError(401, "Failed to parse alertid", nil) - } - - cmd := models.DeleteAlertCommand{AlertId: alertId} - - if err := bus.Dispatch(&cmd); err != nil { - return ApiError(500, "Failed to delete alert", err) - } - - var resp = map[string]interface{}{"alertId": alertId} - return Json(200, resp) -} - -func GetAlertNotifiers(c *middleware.Context) Response { +func GetAlertNotifiers(c *m.ReqContext) Response { return Json(200, alerting.GetNotifiers()) } -func GetAlertNotifications(c *middleware.Context) Response { - query := &models.GetAllAlertNotificationsQuery{OrgId: c.OrgId} +func GetAlertNotifications(c *m.ReqContext) Response { + query := &m.GetAllAlertNotificationsQuery{OrgId: c.OrgId} if err := bus.Dispatch(query); err != nil { return ApiError(500, "Failed to get alert notifications", err) @@ -200,8 +151,8 @@ func GetAlertNotifications(c *middleware.Context) Response { return Json(200, result) } -func GetAlertNotificationById(c *middleware.Context) Response { - query := &models.GetAlertNotificationsQuery{ +func GetAlertNotificationById(c *m.ReqContext) Response { + query := &m.GetAlertNotificationsQuery{ OrgId: c.OrgId, Id: c.ParamsInt64("notificationId"), } @@ -213,7 +164,7 @@ func GetAlertNotificationById(c *middleware.Context) Response { return Json(200, query.Result) } -func CreateAlertNotification(c *middleware.Context, cmd models.CreateAlertNotificationCommand) Response { +func CreateAlertNotification(c *m.ReqContext, cmd m.CreateAlertNotificationCommand) Response { cmd.OrgId = c.OrgId if err := bus.Dispatch(&cmd); err != nil { @@ -223,7 +174,7 @@ func CreateAlertNotification(c *middleware.Context, cmd models.CreateAlertNotifi return Json(200, cmd.Result) } -func UpdateAlertNotification(c *middleware.Context, cmd models.UpdateAlertNotificationCommand) Response { +func UpdateAlertNotification(c *m.ReqContext, cmd m.UpdateAlertNotificationCommand) Response { cmd.OrgId = c.OrgId if err := bus.Dispatch(&cmd); err != nil { @@ -233,8 +184,8 @@ func UpdateAlertNotification(c *middleware.Context, cmd models.UpdateAlertNotifi return Json(200, cmd.Result) } -func DeleteAlertNotification(c *middleware.Context) Response { - cmd := models.DeleteAlertNotificationCommand{ +func DeleteAlertNotification(c *m.ReqContext) Response { + cmd := m.DeleteAlertNotificationCommand{ OrgId: c.OrgId, Id: c.ParamsInt64("notificationId"), } @@ -247,7 +198,7 @@ func DeleteAlertNotification(c *middleware.Context) Response { } //POST /api/alert-notifications/test -func NotificationTest(c *middleware.Context, dto dtos.NotificationTestCommand) Response { +func NotificationTest(c *m.ReqContext, dto dtos.NotificationTestCommand) Response { cmd := &alerting.NotificationTestCommand{ Name: dto.Name, Type: dto.Type, @@ -255,7 +206,7 @@ func NotificationTest(c *middleware.Context, dto dtos.NotificationTestCommand) R } if err := bus.Dispatch(cmd); err != nil { - if err == models.ErrSmtpNotEnabled { + if err == m.ErrSmtpNotEnabled { return ApiError(412, err.Error(), err) } return ApiError(500, "Failed to send alert notifications", err) @@ -265,9 +216,25 @@ func NotificationTest(c *middleware.Context, dto dtos.NotificationTestCommand) R } //POST /api/alerts/:alertId/pause -func PauseAlert(c *middleware.Context, dto dtos.PauseAlertCommand) Response { +func PauseAlert(c *m.ReqContext, dto dtos.PauseAlertCommand) Response { alertId := c.ParamsInt64("alertId") - cmd := models.PauseAlertCommand{ + + query := m.GetAlertByIdQuery{Id: alertId} + + if err := bus.Dispatch(&query); err != nil { + return ApiError(500, "Get Alert failed", err) + } + + guardian := guardian.New(query.Result.DashboardId, c.OrgId, c.SignedInUser) + if canEdit, err := guardian.CanEdit(); err != nil || !canEdit { + if err != nil { + return ApiError(500, "Error while checking permissions for Alert", err) + } + + return ApiError(403, "Access denied to this dashboard and alert", nil) + } + + cmd := m.PauseAlertCommand{ OrgId: c.OrgId, AlertIds: []int64{alertId}, Paused: dto.Paused, @@ -277,25 +244,25 @@ func PauseAlert(c *middleware.Context, dto dtos.PauseAlertCommand) Response { return ApiError(500, "", err) } - var response models.AlertStateType = models.AlertStatePending - pausedState := "un paused" + var response m.AlertStateType = m.AlertStatePending + pausedState := "un-paused" if cmd.Paused { - response = models.AlertStatePaused + response = m.AlertStatePaused pausedState = "paused" } result := map[string]interface{}{ "alertId": alertId, "state": response, - "message": "alert " + pausedState, + "message": "Alert " + pausedState, } return Json(200, result) } //POST /api/admin/pause-all-alerts -func PauseAllAlerts(c *middleware.Context, dto dtos.PauseAllAlertsCommand) Response { - updateCmd := models.PauseAllAlertCommand{ +func PauseAllAlerts(c *m.ReqContext, dto dtos.PauseAllAlertsCommand) Response { + updateCmd := m.PauseAllAlertCommand{ Paused: dto.Paused, } @@ -303,10 +270,10 @@ func PauseAllAlerts(c *middleware.Context, dto dtos.PauseAllAlertsCommand) Respo return ApiError(500, "Failed to pause alerts", err) } - var response models.AlertStateType = models.AlertStatePending + var response m.AlertStateType = m.AlertStatePending pausedState := "un paused" if updateCmd.Paused { - response = models.AlertStatePaused + response = m.AlertStatePaused pausedState = "paused" } diff --git a/pkg/api/alerting_test.go b/pkg/api/alerting_test.go new file mode 100644 index 00000000000..9302ef7beca --- /dev/null +++ b/pkg/api/alerting_test.go @@ -0,0 +1,96 @@ +package api + +import ( + "testing" + + "github.com/grafana/grafana/pkg/api/dtos" + "github.com/grafana/grafana/pkg/bus" + m "github.com/grafana/grafana/pkg/models" + + . "github.com/smartystreets/goconvey/convey" +) + +func TestAlertingApiEndpoint(t *testing.T) { + Convey("Given an alert in a dashboard with an acl", t, func() { + + singleAlert := &m.Alert{Id: 1, DashboardId: 1, Name: "singlealert"} + + bus.AddHandler("test", func(query *m.GetAlertByIdQuery) error { + query.Result = singleAlert + return nil + }) + + viewerRole := m.ROLE_VIEWER + editorRole := m.ROLE_EDITOR + + aclMockResp := []*m.DashboardAclInfoDTO{} + bus.AddHandler("test", func(query *m.GetDashboardAclInfoListQuery) error { + query.Result = aclMockResp + return nil + }) + + bus.AddHandler("test", func(query *m.GetTeamsByUserQuery) error { + query.Result = []*m.Team{} + return nil + }) + + Convey("When user is editor and not in the ACL", func() { + Convey("Should not be able to pause the alert", func() { + cmd := dtos.PauseAlertCommand{ + AlertId: 1, + Paused: true, + } + postAlertScenario("When calling POST on", "/api/alerts/1/pause", "/api/alerts/:alertId/pause", m.ROLE_EDITOR, cmd, func(sc *scenarioContext) { + CallPauseAlert(sc) + So(sc.resp.Code, ShouldEqual, 403) + }) + }) + }) + + Convey("When user is editor and dashboard has default ACL", func() { + aclMockResp = []*m.DashboardAclInfoDTO{ + {Role: &viewerRole, Permission: m.PERMISSION_VIEW}, + {Role: &editorRole, Permission: m.PERMISSION_EDIT}, + } + + Convey("Should be able to pause the alert", func() { + cmd := dtos.PauseAlertCommand{ + AlertId: 1, + Paused: true, + } + postAlertScenario("When calling POST on", "/api/alerts/1/pause", "/api/alerts/:alertId/pause", m.ROLE_EDITOR, cmd, func(sc *scenarioContext) { + CallPauseAlert(sc) + So(sc.resp.Code, ShouldEqual, 200) + }) + }) + }) + }) +} + +func CallPauseAlert(sc *scenarioContext) { + bus.AddHandler("test", func(cmd *m.PauseAlertCommand) error { + return nil + }) + + sc.fakeReqWithParams("POST", sc.url, map[string]string{}).exec() +} + +func postAlertScenario(desc string, url string, routePattern string, role m.RoleType, cmd dtos.PauseAlertCommand, fn scenarioFunc) { + Convey(desc+" "+url, func() { + defer bus.ClearBusHandlers() + + sc := setupScenarioContext(url) + sc.defaultHandler = wrap(func(c *m.ReqContext) Response { + sc.context = c + sc.context.UserId = TestUserID + sc.context.OrgId = TestOrgID + sc.context.OrgRole = role + + return PauseAlert(c, cmd) + }) + + sc.m.Post(routePattern, sc.defaultHandler) + + fn(sc) + }) +} diff --git a/pkg/api/annotations.go b/pkg/api/annotations.go index 0bf95557abc..fb75e0bf129 100644 --- a/pkg/api/annotations.go +++ b/pkg/api/annotations.go @@ -6,12 +6,13 @@ import ( "github.com/grafana/grafana/pkg/api/dtos" "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/middleware" + m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/annotations" + "github.com/grafana/grafana/pkg/services/guardian" "github.com/grafana/grafana/pkg/util" ) -func GetAnnotations(c *middleware.Context) Response { +func GetAnnotations(c *m.ReqContext) Response { query := &annotations.ItemQuery{ From: c.QueryInt64("from") / 1000, @@ -50,7 +51,11 @@ func (e *CreateAnnotationError) Error() string { return e.message } -func PostAnnotation(c *middleware.Context, cmd dtos.PostAnnotationsCmd) Response { +func PostAnnotation(c *m.ReqContext, cmd dtos.PostAnnotationsCmd) Response { + if canSave, err := canSaveByDashboardId(c, cmd.DashboardId); err != nil || !canSave { + return dashboardGuardianResponse(err) + } + repo := annotations.GetRepository() if cmd.Text == "" { @@ -119,7 +124,7 @@ func formatGraphiteAnnotation(what string, data string) string { return text } -func PostGraphiteAnnotation(c *middleware.Context, cmd dtos.PostGraphiteAnnotationsCmd) Response { +func PostGraphiteAnnotation(c *m.ReqContext, cmd dtos.PostGraphiteAnnotationsCmd) Response { repo := annotations.GetRepository() if cmd.What == "" { @@ -173,11 +178,15 @@ func PostGraphiteAnnotation(c *middleware.Context, cmd dtos.PostGraphiteAnnotati }) } -func UpdateAnnotation(c *middleware.Context, cmd dtos.UpdateAnnotationsCmd) Response { +func UpdateAnnotation(c *m.ReqContext, cmd dtos.UpdateAnnotationsCmd) Response { annotationId := c.ParamsInt64(":annotationId") repo := annotations.GetRepository() + if resp := canSave(c, repo, annotationId); resp != nil { + return resp + } + item := annotations.Item{ OrgId: c.OrgId, UserId: c.UserId, @@ -208,7 +217,7 @@ func UpdateAnnotation(c *middleware.Context, cmd dtos.UpdateAnnotationsCmd) Resp return ApiSuccess("Annotation updated") } -func DeleteAnnotations(c *middleware.Context, cmd dtos.DeleteAnnotationsCmd) Response { +func DeleteAnnotations(c *m.ReqContext, cmd dtos.DeleteAnnotationsCmd) Response { repo := annotations.GetRepository() err := repo.Delete(&annotations.DeleteParams{ @@ -224,10 +233,14 @@ func DeleteAnnotations(c *middleware.Context, cmd dtos.DeleteAnnotationsCmd) Res return ApiSuccess("Annotations deleted") } -func DeleteAnnotationById(c *middleware.Context) Response { +func DeleteAnnotationById(c *m.ReqContext) Response { repo := annotations.GetRepository() annotationId := c.ParamsInt64(":annotationId") + if resp := canSave(c, repo, annotationId); resp != nil { + return resp + } + err := repo.Delete(&annotations.DeleteParams{ Id: annotationId, }) @@ -239,10 +252,14 @@ func DeleteAnnotationById(c *middleware.Context) Response { return ApiSuccess("Annotation deleted") } -func DeleteAnnotationRegion(c *middleware.Context) Response { +func DeleteAnnotationRegion(c *m.ReqContext) Response { repo := annotations.GetRepository() regionId := c.ParamsInt64(":regionId") + if resp := canSave(c, repo, regionId); resp != nil { + return resp + } + err := repo.Delete(&annotations.DeleteParams{ RegionId: regionId, }) @@ -253,3 +270,50 @@ func DeleteAnnotationRegion(c *middleware.Context) Response { return ApiSuccess("Annotation region deleted") } + +func canSaveByDashboardId(c *m.ReqContext, dashboardId int64) (bool, error) { + if dashboardId == 0 && !c.SignedInUser.HasRole(m.ROLE_EDITOR) { + return false, nil + } + + if dashboardId > 0 { + guardian := guardian.New(dashboardId, c.OrgId, c.SignedInUser) + if canEdit, err := guardian.CanEdit(); err != nil || !canEdit { + return false, err + } + } + + return true, nil +} + +func canSave(c *m.ReqContext, repo annotations.Repository, annotationId int64) Response { + items, err := repo.Find(&annotations.ItemQuery{AnnotationId: annotationId, OrgId: c.OrgId}) + + if err != nil || len(items) == 0 { + return ApiError(500, "Could not find annotation to update", err) + } + + dashboardId := items[0].DashboardId + + if canSave, err := canSaveByDashboardId(c, dashboardId); err != nil || !canSave { + return dashboardGuardianResponse(err) + } + + return nil +} + +func canSaveByRegionId(c *m.ReqContext, repo annotations.Repository, regionId int64) Response { + items, err := repo.Find(&annotations.ItemQuery{RegionId: regionId, OrgId: c.OrgId}) + + if err != nil || len(items) == 0 { + return ApiError(500, "Could not find annotation to update", err) + } + + dashboardId := items[0].DashboardId + + if canSave, err := canSaveByDashboardId(c, dashboardId); err != nil || !canSave { + return dashboardGuardianResponse(err) + } + + return nil +} diff --git a/pkg/api/annotations_test.go b/pkg/api/annotations_test.go new file mode 100644 index 00000000000..7c298550673 --- /dev/null +++ b/pkg/api/annotations_test.go @@ -0,0 +1,241 @@ +package api + +import ( + "testing" + + "github.com/grafana/grafana/pkg/api/dtos" + "github.com/grafana/grafana/pkg/bus" + m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/annotations" + + . "github.com/smartystreets/goconvey/convey" +) + +func TestAnnotationsApiEndpoint(t *testing.T) { + Convey("Given an annotation without a dashboard id", t, func() { + cmd := dtos.PostAnnotationsCmd{ + Time: 1000, + Text: "annotation text", + Tags: []string{"tag1", "tag2"}, + IsRegion: false, + } + + updateCmd := dtos.UpdateAnnotationsCmd{ + Time: 1000, + Text: "annotation text", + Tags: []string{"tag1", "tag2"}, + IsRegion: false, + } + + Convey("When user is an Org Viewer", func() { + role := m.ROLE_VIEWER + Convey("Should not be allowed to save an annotation", func() { + postAnnotationScenario("When calling POST on", "/api/annotations", "/api/annotations", role, cmd, func(sc *scenarioContext) { + sc.fakeReqWithParams("POST", sc.url, map[string]string{}).exec() + So(sc.resp.Code, ShouldEqual, 403) + }) + + putAnnotationScenario("When calling PUT on", "/api/annotations/1", "/api/annotations/:annotationId", role, updateCmd, func(sc *scenarioContext) { + sc.fakeReqWithParams("PUT", sc.url, map[string]string{}).exec() + So(sc.resp.Code, ShouldEqual, 403) + }) + + loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/annotations/1", "/api/annotations/:annotationId", role, func(sc *scenarioContext) { + sc.handlerFunc = DeleteAnnotationById + sc.fakeReqWithParams("DELETE", sc.url, map[string]string{}).exec() + So(sc.resp.Code, ShouldEqual, 403) + }) + + loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/annotations/region/1", "/api/annotations/region/:regionId", role, func(sc *scenarioContext) { + sc.handlerFunc = DeleteAnnotationRegion + sc.fakeReqWithParams("DELETE", sc.url, map[string]string{}).exec() + So(sc.resp.Code, ShouldEqual, 403) + }) + }) + }) + + Convey("When user is an Org Editor", func() { + role := m.ROLE_EDITOR + Convey("Should be able to save an annotation", func() { + postAnnotationScenario("When calling POST on", "/api/annotations", "/api/annotations", role, cmd, func(sc *scenarioContext) { + sc.fakeReqWithParams("POST", sc.url, map[string]string{}).exec() + So(sc.resp.Code, ShouldEqual, 200) + }) + + putAnnotationScenario("When calling PUT on", "/api/annotations/1", "/api/annotations/:annotationId", role, updateCmd, func(sc *scenarioContext) { + sc.fakeReqWithParams("PUT", sc.url, map[string]string{}).exec() + So(sc.resp.Code, ShouldEqual, 200) + }) + + loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/annotations/1", "/api/annotations/:annotationId", role, func(sc *scenarioContext) { + sc.handlerFunc = DeleteAnnotationById + sc.fakeReqWithParams("DELETE", sc.url, map[string]string{}).exec() + So(sc.resp.Code, ShouldEqual, 200) + }) + + loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/annotations/region/1", "/api/annotations/region/:regionId", role, func(sc *scenarioContext) { + sc.handlerFunc = DeleteAnnotationRegion + sc.fakeReqWithParams("DELETE", sc.url, map[string]string{}).exec() + So(sc.resp.Code, ShouldEqual, 200) + }) + }) + }) + }) + + Convey("Given an annotation with a dashboard id and the dashboard does not have an acl", t, func() { + cmd := dtos.PostAnnotationsCmd{ + Time: 1000, + Text: "annotation text", + Tags: []string{"tag1", "tag2"}, + IsRegion: false, + DashboardId: 1, + PanelId: 1, + } + + updateCmd := dtos.UpdateAnnotationsCmd{ + Time: 1000, + Text: "annotation text", + Tags: []string{"tag1", "tag2"}, + IsRegion: false, + Id: 1, + } + + viewerRole := m.ROLE_VIEWER + editorRole := m.ROLE_EDITOR + + aclMockResp := []*m.DashboardAclInfoDTO{ + {Role: &viewerRole, Permission: m.PERMISSION_VIEW}, + {Role: &editorRole, Permission: m.PERMISSION_EDIT}, + } + + bus.AddHandler("test", func(query *m.GetDashboardAclInfoListQuery) error { + query.Result = aclMockResp + return nil + }) + + bus.AddHandler("test", func(query *m.GetTeamsByUserQuery) error { + query.Result = []*m.Team{} + return nil + }) + + Convey("When user is an Org Viewer", func() { + role := m.ROLE_VIEWER + Convey("Should not be allowed to save an annotation", func() { + postAnnotationScenario("When calling POST on", "/api/annotations", "/api/annotations", role, cmd, func(sc *scenarioContext) { + sc.fakeReqWithParams("POST", sc.url, map[string]string{}).exec() + So(sc.resp.Code, ShouldEqual, 403) + }) + + putAnnotationScenario("When calling PUT on", "/api/annotations/1", "/api/annotations/:annotationId", role, updateCmd, func(sc *scenarioContext) { + sc.fakeReqWithParams("PUT", sc.url, map[string]string{}).exec() + So(sc.resp.Code, ShouldEqual, 403) + }) + + loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/annotations/1", "/api/annotations/:annotationId", role, func(sc *scenarioContext) { + sc.handlerFunc = DeleteAnnotationById + sc.fakeReqWithParams("DELETE", sc.url, map[string]string{}).exec() + So(sc.resp.Code, ShouldEqual, 403) + }) + + loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/annotations/region/1", "/api/annotations/region/:regionId", role, func(sc *scenarioContext) { + sc.handlerFunc = DeleteAnnotationRegion + sc.fakeReqWithParams("DELETE", sc.url, map[string]string{}).exec() + So(sc.resp.Code, ShouldEqual, 403) + }) + }) + }) + + Convey("When user is an Org Editor", func() { + role := m.ROLE_EDITOR + Convey("Should be able to save an annotation", func() { + postAnnotationScenario("When calling POST on", "/api/annotations", "/api/annotations", role, cmd, func(sc *scenarioContext) { + sc.fakeReqWithParams("POST", sc.url, map[string]string{}).exec() + So(sc.resp.Code, ShouldEqual, 200) + }) + + putAnnotationScenario("When calling PUT on", "/api/annotations/1", "/api/annotations/:annotationId", role, updateCmd, func(sc *scenarioContext) { + sc.fakeReqWithParams("PUT", sc.url, map[string]string{}).exec() + So(sc.resp.Code, ShouldEqual, 200) + }) + + loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/annotations/1", "/api/annotations/:annotationId", role, func(sc *scenarioContext) { + sc.handlerFunc = DeleteAnnotationById + sc.fakeReqWithParams("DELETE", sc.url, map[string]string{}).exec() + So(sc.resp.Code, ShouldEqual, 200) + }) + + loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/annotations/region/1", "/api/annotations/region/:regionId", role, func(sc *scenarioContext) { + sc.handlerFunc = DeleteAnnotationRegion + sc.fakeReqWithParams("DELETE", sc.url, map[string]string{}).exec() + So(sc.resp.Code, ShouldEqual, 200) + }) + }) + }) + }) +} + +type fakeAnnotationsRepo struct { +} + +func (repo *fakeAnnotationsRepo) Delete(params *annotations.DeleteParams) error { + return nil +} +func (repo *fakeAnnotationsRepo) Save(item *annotations.Item) error { + item.Id = 1 + return nil +} +func (repo *fakeAnnotationsRepo) Update(item *annotations.Item) error { + return nil +} +func (repo *fakeAnnotationsRepo) Find(query *annotations.ItemQuery) ([]*annotations.ItemDTO, error) { + annotations := []*annotations.ItemDTO{{Id: 1}} + return annotations, nil +} + +var fakeAnnoRepo *fakeAnnotationsRepo + +func postAnnotationScenario(desc string, url string, routePattern string, role m.RoleType, cmd dtos.PostAnnotationsCmd, fn scenarioFunc) { + Convey(desc+" "+url, func() { + defer bus.ClearBusHandlers() + + sc := setupScenarioContext(url) + sc.defaultHandler = wrap(func(c *m.ReqContext) Response { + sc.context = c + sc.context.UserId = TestUserID + sc.context.OrgId = TestOrgID + sc.context.OrgRole = role + + return PostAnnotation(c, cmd) + }) + + fakeAnnoRepo = &fakeAnnotationsRepo{} + annotations.SetRepository(fakeAnnoRepo) + + sc.m.Post(routePattern, sc.defaultHandler) + + fn(sc) + }) +} + +func putAnnotationScenario(desc string, url string, routePattern string, role m.RoleType, cmd dtos.UpdateAnnotationsCmd, fn scenarioFunc) { + Convey(desc+" "+url, func() { + defer bus.ClearBusHandlers() + + sc := setupScenarioContext(url) + sc.defaultHandler = wrap(func(c *m.ReqContext) Response { + sc.context = c + sc.context.UserId = TestUserID + sc.context.OrgId = TestOrgID + sc.context.OrgRole = role + + return UpdateAnnotation(c, cmd) + }) + + fakeAnnoRepo = &fakeAnnotationsRepo{} + annotations.SetRepository(fakeAnnoRepo) + + sc.m.Put(routePattern, sc.defaultHandler) + + fn(sc) + }) +} diff --git a/pkg/api/api.go b/pkg/api/api.go index ea082ff4741..84f0eae79c7 100644 --- a/pkg/api/api.go +++ b/pkg/api/api.go @@ -15,6 +15,8 @@ func (hs *HttpServer) registerRoutes() { reqGrafanaAdmin := middleware.Auth(&middleware.AuthOptions{ReqSignedIn: true, ReqGrafanaAdmin: true}) reqEditorRole := middleware.RoleAuth(m.ROLE_EDITOR, m.ROLE_ADMIN) reqOrgAdmin := middleware.RoleAuth(m.ROLE_ADMIN) + redirectFromLegacyDashboardUrl := middleware.RedirectFromLegacyDashboardUrl() + redirectFromLegacyDashboardSoloUrl := middleware.RedirectFromLegacyDashboardSoloUrl() quota := middleware.Quota bind := binding.Bind @@ -63,9 +65,13 @@ func (hs *HttpServer) registerRoutes() { r.Get("/plugins/:id/edit", reqSignedIn, Index) r.Get("/plugins/:id/page/:page", reqSignedIn, Index) - r.Get("/dashboard/*", reqSignedIn, Index) + r.Get("/d/:uid/:slug", reqSignedIn, Index) + r.Get("/dashboard/db/:slug", reqSignedIn, redirectFromLegacyDashboardUrl, Index) + r.Get("/dashboard/script/*", reqSignedIn, Index) r.Get("/dashboard-solo/snapshot/*", Index) - r.Get("/dashboard-solo/*", reqSignedIn, Index) + r.Get("/d-solo/:uid/:slug", reqSignedIn, Index) + r.Get("/dashboard-solo/db/:slug", reqSignedIn, redirectFromLegacyDashboardSoloUrl, Index) + r.Get("/dashboard-solo/script/*", reqSignedIn, Index) r.Get("/import/dashboard", reqSignedIn, Index) r.Get("/dashboards/", reqSignedIn, Index) r.Get("/dashboards/*", reqSignedIn, Index) @@ -100,7 +106,7 @@ func (hs *HttpServer) registerRoutes() { r.Post("/api/snapshots/", bind(m.CreateDashboardSnapshotCommand{}), CreateDashboardSnapshot) r.Get("/api/snapshot/shared-options/", GetSharingOptions) r.Get("/api/snapshots/:key", GetDashboardSnapshot) - r.Get("/api/snapshots-delete/:key", reqEditorRole, DeleteDashboardSnapshot) + r.Get("/api/snapshots-delete/:key", reqEditorRole, wrap(DeleteDashboardSnapshot)) // api renew session based on remember cookie r.Get("/api/login/ping", quota("session"), LoginApiPing) @@ -144,11 +150,11 @@ func (hs *HttpServer) registerRoutes() { apiRoute.Group("/teams", func(teamsRoute RouteRegister) { teamsRoute.Get("/:teamId", wrap(GetTeamById)) teamsRoute.Get("/search", wrap(SearchTeams)) - teamsRoute.Post("/", quota("teams"), bind(m.CreateTeamCommand{}), wrap(CreateTeam)) + teamsRoute.Post("/", bind(m.CreateTeamCommand{}), wrap(CreateTeam)) teamsRoute.Put("/:teamId", bind(m.UpdateTeamCommand{}), wrap(UpdateTeam)) teamsRoute.Delete("/:teamId", wrap(DeleteTeamById)) teamsRoute.Get("/:teamId/members", wrap(GetTeamMembers)) - teamsRoute.Post("/:teamId/members", quota("teams"), bind(m.AddTeamMemberCommand{}), wrap(AddTeamMember)) + teamsRoute.Post("/:teamId/members", bind(m.AddTeamMemberCommand{}), wrap(AddTeamMember)) teamsRoute.Delete("/:teamId/members/:userId", wrap(RemoveTeamMember)) }, reqOrgAdmin) @@ -240,14 +246,35 @@ func (hs *HttpServer) registerRoutes() { apiRoute.Any("/datasources/proxy/:id/*", reqSignedIn, hs.ProxyDataSourceRequest) apiRoute.Any("/datasources/proxy/:id", reqSignedIn, hs.ProxyDataSourceRequest) + // Folders + apiRoute.Group("/folders", func(folderRoute RouteRegister) { + folderRoute.Get("/", wrap(GetFolders)) + folderRoute.Get("/id/:id", wrap(GetFolderById)) + folderRoute.Post("/", bind(m.CreateFolderCommand{}), wrap(CreateFolder)) + + folderRoute.Group("/:uid", func(folderUidRoute RouteRegister) { + folderUidRoute.Get("/", wrap(GetFolderByUid)) + folderUidRoute.Put("/", bind(m.UpdateFolderCommand{}), wrap(UpdateFolder)) + folderUidRoute.Delete("/", wrap(DeleteFolder)) + + folderUidRoute.Group("/permissions", func(folderPermissionRoute RouteRegister) { + folderPermissionRoute.Get("/", wrap(GetFolderPermissionList)) + folderPermissionRoute.Post("/", bind(dtos.UpdateDashboardAclCommand{}), wrap(UpdateFolderPermissions)) + }) + }) + }) + // Dashboard apiRoute.Group("/dashboards", func(dashboardRoute RouteRegister) { + dashboardRoute.Get("/uid/:uid", wrap(GetDashboard)) + dashboardRoute.Delete("/uid/:uid", wrap(DeleteDashboardByUid)) + dashboardRoute.Get("/db/:slug", wrap(GetDashboard)) - dashboardRoute.Delete("/db/:slug", reqEditorRole, wrap(DeleteDashboard)) + dashboardRoute.Delete("/db/:slug", wrap(DeleteDashboard)) dashboardRoute.Post("/calculate-diff", bind(dtos.CalculateDiffOptions{}), wrap(CalculateDashboardDiff)) - dashboardRoute.Post("/db", reqEditorRole, bind(m.SaveDashboardCommand{}), wrap(PostDashboard)) + dashboardRoute.Post("/db", bind(m.SaveDashboardCommand{}), wrap(PostDashboard)) dashboardRoute.Get("/home", wrap(GetHomeDashboard)) dashboardRoute.Get("/tags", GetDashboardTags) dashboardRoute.Post("/import", bind(dtos.ImportDashboardCommand{}), wrap(ImportDashboard)) @@ -255,12 +282,11 @@ func (hs *HttpServer) registerRoutes() { dashboardRoute.Group("/id/:dashboardId", func(dashIdRoute RouteRegister) { dashIdRoute.Get("/versions", wrap(GetDashboardVersions)) dashIdRoute.Get("/versions/:id", wrap(GetDashboardVersion)) - dashIdRoute.Post("/restore", reqEditorRole, bind(dtos.RestoreDashboardVersionCommand{}), wrap(RestoreDashboardVersion)) + dashIdRoute.Post("/restore", bind(dtos.RestoreDashboardVersionCommand{}), wrap(RestoreDashboardVersion)) - dashIdRoute.Group("/acl", func(aclRoute RouteRegister) { - aclRoute.Get("/", wrap(GetDashboardAclList)) - aclRoute.Post("/", bind(dtos.UpdateDashboardAclCommand{}), wrap(UpdateDashboardAcl)) - aclRoute.Delete("/:aclId", wrap(DeleteDashboardAcl)) + dashIdRoute.Group("/permissions", func(dashboardPermissionRoute RouteRegister) { + dashboardPermissionRoute.Get("/", wrap(GetDashboardPermissionList)) + dashboardPermissionRoute.Post("/", bind(dtos.UpdateDashboardAclCommand{}), wrap(UpdateDashboardPermissions)) }) }) }) @@ -317,8 +343,8 @@ func (hs *HttpServer) registerRoutes() { annotationsRoute.Delete("/:annotationId", wrap(DeleteAnnotationById)) annotationsRoute.Put("/:annotationId", bind(dtos.UpdateAnnotationsCmd{}), wrap(UpdateAnnotation)) annotationsRoute.Delete("/region/:regionId", wrap(DeleteAnnotationRegion)) - annotationsRoute.Post("/graphite", bind(dtos.PostGraphiteAnnotationsCmd{}), wrap(PostGraphiteAnnotation)) - }, reqEditorRole) + annotationsRoute.Post("/graphite", reqEditorRole, bind(dtos.PostGraphiteAnnotationsCmd{}), wrap(PostGraphiteAnnotation)) + }) // error test r.Get("/metrics/error", wrap(GenerateError)) diff --git a/pkg/api/apikey.go b/pkg/api/apikey.go index b2097104aba..24ed69ec691 100644 --- a/pkg/api/apikey.go +++ b/pkg/api/apikey.go @@ -4,11 +4,10 @@ import ( "github.com/grafana/grafana/pkg/api/dtos" "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/components/apikeygen" - "github.com/grafana/grafana/pkg/middleware" m "github.com/grafana/grafana/pkg/models" ) -func GetApiKeys(c *middleware.Context) Response { +func GetApiKeys(c *m.ReqContext) Response { query := m.GetApiKeysQuery{OrgId: c.OrgId} if err := bus.Dispatch(&query); err != nil { @@ -27,7 +26,7 @@ func GetApiKeys(c *middleware.Context) Response { return Json(200, result) } -func DeleteApiKey(c *middleware.Context) Response { +func DeleteApiKey(c *m.ReqContext) Response { id := c.ParamsInt64(":id") cmd := &m.DeleteApiKeyCommand{Id: id, OrgId: c.OrgId} @@ -40,7 +39,7 @@ func DeleteApiKey(c *middleware.Context) Response { return ApiSuccess("API key deleted") } -func AddApiKey(c *middleware.Context, cmd m.AddApiKeyCommand) Response { +func AddApiKey(c *m.ReqContext, cmd m.AddApiKeyCommand) Response { if !cmd.Role.IsValid() { return ApiError(400, "Invalid role specified", nil) } diff --git a/pkg/api/app_routes.go b/pkg/api/app_routes.go index 0440c880979..8d74d96396b 100644 --- a/pkg/api/app_routes.go +++ b/pkg/api/app_routes.go @@ -56,7 +56,7 @@ func InitAppPluginRoutes(r *macaron.Macaron) { } func AppPluginRoute(route *plugins.AppPluginRoute, appId string) macaron.Handler { - return func(c *middleware.Context) { + return func(c *m.ReqContext) { path := c.Params("*") proxy := pluginproxy.NewApiPluginProxy(c, path, route, appId) diff --git a/pkg/api/avatar/avatar.go b/pkg/api/avatar/avatar.go index 6824e330f00..ce9da1e8790 100644 --- a/pkg/api/avatar/avatar.go +++ b/pkg/api/avatar/avatar.go @@ -157,11 +157,11 @@ func NewCacheServer() *CacheServer { func newNotFound() *Avatar { avatar := &Avatar{notFound: true} - // load transparent png into buffer - path := filepath.Join(setting.StaticRootPath, "img", "transparent.png") + // load user_profile png into buffer + path := filepath.Join(setting.StaticRootPath, "img", "user_profile.png") if data, err := ioutil.ReadFile(path); err != nil { - log.Error(3, "Failed to read transparent.png, %v", path) + log.Error(3, "Failed to read user_profile.png, %v", path) } else { avatar.data = bytes.NewBuffer(data) } diff --git a/pkg/api/common.go b/pkg/api/common.go index bd1c8be477d..370f78f8b1d 100644 --- a/pkg/api/common.go +++ b/pkg/api/common.go @@ -4,7 +4,7 @@ import ( "encoding/json" "net/http" - "github.com/grafana/grafana/pkg/middleware" + m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/setting" "gopkg.in/macaron.v1" ) @@ -19,7 +19,7 @@ var ( ) type Response interface { - WriteTo(ctx *middleware.Context) + WriteTo(ctx *m.ReqContext) } type NormalResponse struct { @@ -32,7 +32,7 @@ type NormalResponse struct { func wrap(action interface{}) macaron.Handler { - return func(c *middleware.Context) { + return func(c *m.ReqContext) { var res Response val, err := c.Invoke(action) if err == nil && val != nil && len(val) > 0 { @@ -45,7 +45,7 @@ func wrap(action interface{}) macaron.Handler { } } -func (r *NormalResponse) WriteTo(ctx *middleware.Context) { +func (r *NormalResponse) WriteTo(ctx *m.ReqContext) { if r.err != nil { ctx.Logger.Error(r.errMessage, "error", r.err) } diff --git a/pkg/api/common_test.go b/pkg/api/common_test.go new file mode 100644 index 00000000000..e1cbd20edb3 --- /dev/null +++ b/pkg/api/common_test.go @@ -0,0 +1,105 @@ +package api + +import ( + "net/http" + "net/http/httptest" + "path/filepath" + + "github.com/go-macaron/session" + "github.com/grafana/grafana/pkg/bus" + "github.com/grafana/grafana/pkg/middleware" + m "github.com/grafana/grafana/pkg/models" + "gopkg.in/macaron.v1" + + . "github.com/smartystreets/goconvey/convey" +) + +func loggedInUserScenario(desc string, url string, fn scenarioFunc) { + loggedInUserScenarioWithRole(desc, "GET", url, url, m.ROLE_EDITOR, fn) +} + +func loggedInUserScenarioWithRole(desc string, method string, url string, routePattern string, role m.RoleType, fn scenarioFunc) { + Convey(desc+" "+url, func() { + defer bus.ClearBusHandlers() + + sc := setupScenarioContext(url) + sc.defaultHandler = wrap(func(c *m.ReqContext) Response { + sc.context = c + sc.context.UserId = TestUserID + sc.context.OrgId = TestOrgID + sc.context.OrgRole = role + if sc.handlerFunc != nil { + return sc.handlerFunc(sc.context) + } + + return nil + }) + + switch method { + case "GET": + sc.m.Get(routePattern, sc.defaultHandler) + case "DELETE": + sc.m.Delete(routePattern, sc.defaultHandler) + } + + fn(sc) + }) +} + +func (sc *scenarioContext) fakeReq(method, url string) *scenarioContext { + sc.resp = httptest.NewRecorder() + req, err := http.NewRequest(method, url, nil) + So(err, ShouldBeNil) + sc.req = req + + return sc +} + +func (sc *scenarioContext) fakeReqWithParams(method, url string, queryParams map[string]string) *scenarioContext { + sc.resp = httptest.NewRecorder() + req, err := http.NewRequest(method, url, nil) + q := req.URL.Query() + for k, v := range queryParams { + q.Add(k, v) + } + req.URL.RawQuery = q.Encode() + So(err, ShouldBeNil) + sc.req = req + + return sc +} + +type scenarioContext struct { + m *macaron.Macaron + context *m.ReqContext + resp *httptest.ResponseRecorder + handlerFunc handlerFunc + defaultHandler macaron.Handler + req *http.Request + url string +} + +func (sc *scenarioContext) exec() { + sc.m.ServeHTTP(sc.resp, sc.req) +} + +type scenarioFunc func(c *scenarioContext) +type handlerFunc func(c *m.ReqContext) Response + +func setupScenarioContext(url string) *scenarioContext { + sc := &scenarioContext{ + url: url, + } + viewsPath, _ := filepath.Abs("../../public/views") + + sc.m = macaron.New() + sc.m.Use(macaron.Renderer(macaron.RenderOptions{ + Directory: viewsPath, + Delims: macaron.Delims{Left: "[[", Right: "]]"}, + })) + + sc.m.Use(middleware.GetContextHandler()) + sc.m.Use(middleware.Sessioner(&session.Options{})) + + return sc +} diff --git a/pkg/api/dashboard.go b/pkg/api/dashboard.go index 87c42884e31..877524ad5dd 100644 --- a/pkg/api/dashboard.go +++ b/pkg/api/dashboard.go @@ -14,15 +14,15 @@ import ( "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/metrics" - "github.com/grafana/grafana/pkg/middleware" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/services/guardian" + "github.com/grafana/grafana/pkg/services/quota" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/util" ) -func isDashboardStarredByUser(c *middleware.Context, dashId int64) (bool, error) { +func isDashboardStarredByUser(c *m.ReqContext, dashId int64) (bool, error) { if !c.IsSignedIn { return false, nil } @@ -38,20 +38,19 @@ func isDashboardStarredByUser(c *middleware.Context, dashId int64) (bool, error) func dashboardGuardianResponse(err error) Response { if err != nil { return ApiError(500, "Error while checking dashboard permissions", err) - } else { - return ApiError(403, "Access denied to this dashboard", nil) } + + return ApiError(403, "Access denied to this dashboard", nil) } -func GetDashboard(c *middleware.Context) Response { - dash, rsp := getDashboardHelper(c.OrgId, c.Params(":slug"), 0) +func GetDashboard(c *m.ReqContext) Response { + dash, rsp := getDashboardHelper(c.OrgId, c.Params(":slug"), 0, c.Params(":uid")) if rsp != nil { return rsp } - guardian := guardian.NewDashboardGuardian(dash.Id, c.OrgId, c.SignedInUser) + guardian := guardian.New(dash.Id, c.OrgId, c.SignedInUser) if canView, err := guardian.CanView(); err != nil || !canView { - fmt.Printf("%v", err) return dashboardGuardianResponse(err) } @@ -89,7 +88,8 @@ func GetDashboard(c *middleware.Context) Response { HasAcl: dash.HasAcl, IsFolder: dash.IsFolder, FolderId: dash.FolderId, - FolderTitle: "Root", + Url: dash.GetUrl(), + FolderTitle: "General", } // lookup folder title @@ -99,6 +99,7 @@ func GetDashboard(c *middleware.Context) Response { return ApiError(500, "Dashboard folder could not be read", err) } meta.FolderTitle = query.Result.Title + meta.FolderUrl = query.Result.GetUrl() } // make sure db version is in sync with json model version @@ -124,21 +125,39 @@ func getUserLogin(userId int64) string { } } -func getDashboardHelper(orgId int64, slug string, id int64) (*m.Dashboard, Response) { - query := m.GetDashboardQuery{Slug: slug, Id: id, OrgId: orgId} +func getDashboardHelper(orgId int64, slug string, id int64, uid string) (*m.Dashboard, Response) { + var query m.GetDashboardQuery + + if len(uid) > 0 { + query = m.GetDashboardQuery{Uid: uid, Id: id, OrgId: orgId} + } else { + query = m.GetDashboardQuery{Slug: slug, Id: id, OrgId: orgId} + } + if err := bus.Dispatch(&query); err != nil { return nil, ApiError(404, "Dashboard not found", err) } + return query.Result, nil } -func DeleteDashboard(c *middleware.Context) Response { - dash, rsp := getDashboardHelper(c.OrgId, c.Params(":slug"), 0) +func DeleteDashboard(c *m.ReqContext) Response { + query := m.GetDashboardsBySlugQuery{OrgId: c.OrgId, Slug: c.Params(":slug")} + + if err := bus.Dispatch(&query); err != nil { + return ApiError(500, "Failed to retrieve dashboards by slug", err) + } + + if len(query.Result) > 1 { + return Json(412, util.DynMap{"status": "multiple-slugs-exists", "message": m.ErrDashboardsWithSameSlugExists.Error()}) + } + + dash, rsp := getDashboardHelper(c.OrgId, c.Params(":slug"), 0, "") if rsp != nil { return rsp } - guardian := guardian.NewDashboardGuardian(dash.Id, c.OrgId, c.SignedInUser) + guardian := guardian.New(dash.Id, c.OrgId, c.SignedInUser) if canSave, err := guardian.CanSave(); err != nil || !canSave { return dashboardGuardianResponse(err) } @@ -148,32 +167,42 @@ func DeleteDashboard(c *middleware.Context) Response { return ApiError(500, "Failed to delete dashboard", err) } - var resp = map[string]interface{}{"title": dash.Title} - return Json(200, resp) + return Json(200, util.DynMap{ + "title": dash.Title, + "message": fmt.Sprintf("Dashboard %s deleted", dash.Title), + }) } -func PostDashboard(c *middleware.Context, cmd m.SaveDashboardCommand) Response { +func DeleteDashboardByUid(c *m.ReqContext) Response { + dash, rsp := getDashboardHelper(c.OrgId, "", 0, c.Params(":uid")) + if rsp != nil { + return rsp + } + + guardian := guardian.New(dash.Id, c.OrgId, c.SignedInUser) + if canSave, err := guardian.CanSave(); err != nil || !canSave { + return dashboardGuardianResponse(err) + } + + cmd := m.DeleteDashboardCommand{OrgId: c.OrgId, Id: dash.Id} + if err := bus.Dispatch(&cmd); err != nil { + return ApiError(500, "Failed to delete dashboard", err) + } + + return Json(200, util.DynMap{ + "title": dash.Title, + "message": fmt.Sprintf("Dashboard %s deleted", dash.Title), + }) +} + +func PostDashboard(c *m.ReqContext, cmd m.SaveDashboardCommand) Response { cmd.OrgId = c.OrgId cmd.UserId = c.UserId dash := cmd.GetDashboardModel() - guardian := guardian.NewDashboardGuardian(dash.Id, c.OrgId, c.SignedInUser) - if canSave, err := guardian.CanSave(); err != nil || !canSave { - return dashboardGuardianResponse(err) - } - - if dash.IsFolder && dash.FolderId > 0 { - return ApiError(400, m.ErrDashboardFolderCannotHaveParent.Error(), nil) - } - - // Check if Title is empty - if dash.Title == "" { - return ApiError(400, m.ErrDashboardTitleEmpty.Error(), nil) - } - - if dash.Id == 0 { - limitReached, err := middleware.QuotaReached(c, "dashboard") + if dash.Id == 0 && dash.Uid == "" { + limitReached, err := quota.QuotaReached(c, "dashboard") if err != nil { return ApiError(500, "failed to get quota", err) } @@ -182,18 +211,31 @@ func PostDashboard(c *middleware.Context, cmd m.SaveDashboardCommand) Response { } } - dashItem := &dashboards.SaveDashboardItem{ + dashItem := &dashboards.SaveDashboardDTO{ Dashboard: dash, Message: cmd.Message, OrgId: c.OrgId, - UserId: c.UserId, + User: c.SignedInUser, Overwrite: cmd.Overwrite, } - dashboard, err := dashboards.GetRepository().SaveDashboard(dashItem) + dashboard, err := dashboards.NewService().SaveDashboard(dashItem) - if err == m.ErrDashboardTitleEmpty { - return ApiError(400, m.ErrDashboardTitleEmpty.Error(), nil) + if err == m.ErrDashboardTitleEmpty || + err == m.ErrDashboardWithSameNameAsFolder || + err == m.ErrDashboardFolderWithSameNameAsDashboard || + err == m.ErrDashboardTypeMismatch || + err == m.ErrDashboardInvalidUid || + err == m.ErrDashboardUidToLong || + err == m.ErrDashboardWithSameUIDExists || + err == m.ErrFolderNotFound || + err == m.ErrDashboardFolderCannotHaveParent || + err == m.ErrDashboardFolderNameExists { + return ApiError(400, err.Error(), nil) + } + + if err == m.ErrDashboardUpdateAccessDenied { + return ApiError(403, err.Error(), err) } if err == m.ErrDashboardContainsInvalidAlertData { @@ -201,7 +243,7 @@ func PostDashboard(c *middleware.Context, cmd m.SaveDashboardCommand) Response { } if err != nil { - if err == m.ErrDashboardWithSameNameExists { + if err == m.ErrDashboardWithSameNameInFolderExists { return Json(412, util.DynMap{"status": "name-exists", "message": err.Error()}) } if err == m.ErrDashboardVersionMismatch { @@ -226,20 +268,28 @@ func PostDashboard(c *middleware.Context, cmd m.SaveDashboardCommand) Response { } c.TimeRequest(metrics.M_Api_Dashboard_Save) - return Json(200, util.DynMap{"status": "success", "slug": dashboard.Slug, "version": dashboard.Version, "id": dashboard.Id}) + return Json(200, util.DynMap{ + "status": "success", + "slug": dashboard.Slug, + "version": dashboard.Version, + "id": dashboard.Id, + "uid": dashboard.Uid, + "url": dashboard.GetUrl(), + }) } -func GetHomeDashboard(c *middleware.Context) Response { +func GetHomeDashboard(c *m.ReqContext) Response { prefsQuery := m.GetPreferencesWithDefaultsQuery{OrgId: c.OrgId, UserId: c.UserId} if err := bus.Dispatch(&prefsQuery); err != nil { return ApiError(500, "Failed to get preferences", err) } if prefsQuery.Result.HomeDashboardId != 0 { - slugQuery := m.GetDashboardSlugByIdQuery{Id: prefsQuery.Result.HomeDashboardId} + slugQuery := m.GetDashboardRefByIdQuery{Id: prefsQuery.Result.HomeDashboardId} err := bus.Dispatch(&slugQuery) if err == nil { - dashRedirect := dtos.DashboardRedirect{RedirectUri: "db/" + slugQuery.Result} + url := m.GetDashboardUrl(slugQuery.Result.Uid, slugQuery.Result.Slug) + dashRedirect := dtos.DashboardRedirect{RedirectUri: url} return Json(200, &dashRedirect) } else { log.Warn("Failed to get slug from database, %s", err.Error()) @@ -255,7 +305,7 @@ func GetHomeDashboard(c *middleware.Context) Response { dash := dtos.DashboardFullWithMeta{} dash.Meta.IsHome = true dash.Meta.CanEdit = c.SignedInUser.HasRole(m.ROLE_EDITOR) - dash.Meta.FolderTitle = "Root" + dash.Meta.FolderTitle = "General" jsonParser := json.NewDecoder(file) if err := jsonParser.Decode(&dash.Dashboard); err != nil { @@ -288,10 +338,10 @@ func addGettingStartedPanelToHomeDashboard(dash *simplejson.Json) { } // GetDashboardVersions returns all dashboard versions as JSON -func GetDashboardVersions(c *middleware.Context) Response { +func GetDashboardVersions(c *m.ReqContext) Response { dashId := c.ParamsInt64(":dashboardId") - guardian := guardian.NewDashboardGuardian(dashId, c.OrgId, c.SignedInUser) + guardian := guardian.New(dashId, c.OrgId, c.SignedInUser) if canSave, err := guardian.CanSave(); err != nil || !canSave { return dashboardGuardianResponse(err) } @@ -327,10 +377,10 @@ func GetDashboardVersions(c *middleware.Context) Response { } // GetDashboardVersion returns the dashboard version with the given ID. -func GetDashboardVersion(c *middleware.Context) Response { +func GetDashboardVersion(c *m.ReqContext) Response { dashId := c.ParamsInt64(":dashboardId") - guardian := guardian.NewDashboardGuardian(dashId, c.OrgId, c.SignedInUser) + guardian := guardian.New(dashId, c.OrgId, c.SignedInUser) if canSave, err := guardian.CanSave(); err != nil || !canSave { return dashboardGuardianResponse(err) } @@ -359,7 +409,19 @@ func GetDashboardVersion(c *middleware.Context) Response { } // POST /api/dashboards/calculate-diff performs diffs on two dashboards -func CalculateDashboardDiff(c *middleware.Context, apiOptions dtos.CalculateDiffOptions) Response { +func CalculateDashboardDiff(c *m.ReqContext, apiOptions dtos.CalculateDiffOptions) Response { + + guardianBase := guardian.New(apiOptions.Base.DashboardId, c.OrgId, c.SignedInUser) + if canSave, err := guardianBase.CanSave(); err != nil || !canSave { + return dashboardGuardianResponse(err) + } + + if apiOptions.Base.DashboardId != apiOptions.New.DashboardId { + guardianNew := guardian.New(apiOptions.New.DashboardId, c.OrgId, c.SignedInUser) + if canSave, err := guardianNew.CanSave(); err != nil || !canSave { + return dashboardGuardianResponse(err) + } + } options := dashdiffs.Options{ OrgId: c.OrgId, @@ -386,19 +448,19 @@ func CalculateDashboardDiff(c *middleware.Context, apiOptions dtos.CalculateDiff if options.DiffType == dashdiffs.DiffDelta { return Respond(200, result.Delta).Header("Content-Type", "application/json") - } else { - return Respond(200, result.Delta).Header("Content-Type", "text/html") } + + return Respond(200, result.Delta).Header("Content-Type", "text/html") } // RestoreDashboardVersion restores a dashboard to the given version. -func RestoreDashboardVersion(c *middleware.Context, apiCmd dtos.RestoreDashboardVersionCommand) Response { - dash, rsp := getDashboardHelper(c.OrgId, "", c.ParamsInt64(":dashboardId")) +func RestoreDashboardVersion(c *m.ReqContext, apiCmd dtos.RestoreDashboardVersionCommand) Response { + dash, rsp := getDashboardHelper(c.OrgId, "", c.ParamsInt64(":dashboardId"), "") if rsp != nil { return rsp } - guardian := guardian.NewDashboardGuardian(dash.Id, c.OrgId, c.SignedInUser) + guardian := guardian.New(dash.Id, c.OrgId, c.SignedInUser) if canSave, err := guardian.CanSave(); err != nil || !canSave { return dashboardGuardianResponse(err) } @@ -416,12 +478,13 @@ func RestoreDashboardVersion(c *middleware.Context, apiCmd dtos.RestoreDashboard saveCmd.UserId = c.UserId saveCmd.Dashboard = version.Data saveCmd.Dashboard.Set("version", dash.Version) + saveCmd.Dashboard.Set("uid", dash.Uid) saveCmd.Message = fmt.Sprintf("Restored from version %d", version.Version) return PostDashboard(c, saveCmd) } -func GetDashboardTags(c *middleware.Context) { +func GetDashboardTags(c *m.ReqContext) { query := m.GetDashboardTagsQuery{OrgId: c.OrgId} err := bus.Dispatch(&query) if err != nil { diff --git a/pkg/api/dashboard_acl.go b/pkg/api/dashboard_acl.go deleted file mode 100644 index 88cc74b9d1c..00000000000 --- a/pkg/api/dashboard_acl.go +++ /dev/null @@ -1,79 +0,0 @@ -package api - -import ( - "time" - - "github.com/grafana/grafana/pkg/api/dtos" - "github.com/grafana/grafana/pkg/bus" - "github.com/grafana/grafana/pkg/middleware" - m "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/services/guardian" -) - -func GetDashboardAclList(c *middleware.Context) Response { - dashId := c.ParamsInt64(":dashboardId") - - guardian := guardian.NewDashboardGuardian(dashId, c.OrgId, c.SignedInUser) - - if canAdmin, err := guardian.CanAdmin(); err != nil || !canAdmin { - return dashboardGuardianResponse(err) - } - - acl, err := guardian.GetAcl() - if err != nil { - return ApiError(500, "Failed to get dashboard acl", err) - } - - return Json(200, acl) -} - -func UpdateDashboardAcl(c *middleware.Context, apiCmd dtos.UpdateDashboardAclCommand) Response { - dashId := c.ParamsInt64(":dashboardId") - - guardian := guardian.NewDashboardGuardian(dashId, c.OrgId, c.SignedInUser) - if canAdmin, err := guardian.CanAdmin(); err != nil || !canAdmin { - return dashboardGuardianResponse(err) - } - - cmd := m.UpdateDashboardAclCommand{} - cmd.DashboardId = dashId - - for _, item := range apiCmd.Items { - cmd.Items = append(cmd.Items, &m.DashboardAcl{ - OrgId: c.OrgId, - DashboardId: dashId, - UserId: item.UserId, - TeamId: item.TeamId, - Role: item.Role, - Permission: item.Permission, - Created: time.Now(), - Updated: time.Now(), - }) - } - - if err := bus.Dispatch(&cmd); err != nil { - if err == m.ErrDashboardAclInfoMissing || err == m.ErrDashboardPermissionDashboardEmpty { - return ApiError(409, err.Error(), err) - } - return ApiError(500, "Failed to create permission", err) - } - - return ApiSuccess("Dashboard acl updated") -} - -func DeleteDashboardAcl(c *middleware.Context) Response { - dashId := c.ParamsInt64(":dashboardId") - aclId := c.ParamsInt64(":aclId") - - guardian := guardian.NewDashboardGuardian(dashId, c.OrgId, c.SignedInUser) - if canAdmin, err := guardian.CanAdmin(); err != nil || !canAdmin { - return dashboardGuardianResponse(err) - } - - cmd := m.RemoveDashboardAclCommand{OrgId: c.OrgId, AclId: aclId} - if err := bus.Dispatch(&cmd); err != nil { - return ApiError(500, "Failed to delete permission for user", err) - } - - return Json(200, "") -} diff --git a/pkg/api/dashboard_acl_test.go b/pkg/api/dashboard_acl_test.go deleted file mode 100644 index e22e625dcf9..00000000000 --- a/pkg/api/dashboard_acl_test.go +++ /dev/null @@ -1,174 +0,0 @@ -package api - -import ( - "testing" - - "github.com/grafana/grafana/pkg/bus" - "github.com/grafana/grafana/pkg/components/simplejson" - m "github.com/grafana/grafana/pkg/models" - - . "github.com/smartystreets/goconvey/convey" -) - -func TestDashboardAclApiEndpoint(t *testing.T) { - Convey("Given a dashboard acl", t, func() { - mockResult := []*m.DashboardAclInfoDTO{ - {Id: 1, OrgId: 1, DashboardId: 1, UserId: 2, Permission: m.PERMISSION_VIEW}, - {Id: 2, OrgId: 1, DashboardId: 1, UserId: 3, Permission: m.PERMISSION_EDIT}, - {Id: 3, OrgId: 1, DashboardId: 1, UserId: 4, Permission: m.PERMISSION_ADMIN}, - {Id: 4, OrgId: 1, DashboardId: 1, TeamId: 1, Permission: m.PERMISSION_VIEW}, - {Id: 5, OrgId: 1, DashboardId: 1, TeamId: 2, Permission: m.PERMISSION_ADMIN}, - } - dtoRes := transformDashboardAclsToDTOs(mockResult) - - bus.AddHandler("test", func(query *m.GetDashboardAclInfoListQuery) error { - query.Result = dtoRes - return nil - }) - - bus.AddHandler("test", func(query *m.GetDashboardAclInfoListQuery) error { - query.Result = mockResult - return nil - }) - - teamResp := []*m.Team{} - bus.AddHandler("test", func(query *m.GetTeamsByUserQuery) error { - query.Result = teamResp - return nil - }) - - Convey("When user is org admin", func() { - loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/id/1/acl", "/api/dashboards/id/:dashboardsId/acl", m.ROLE_ADMIN, func(sc *scenarioContext) { - Convey("Should be able to access ACL", func() { - sc.handlerFunc = GetDashboardAclList - sc.fakeReqWithParams("GET", sc.url, map[string]string{}).exec() - - So(sc.resp.Code, ShouldEqual, 200) - - respJSON, err := simplejson.NewJson(sc.resp.Body.Bytes()) - So(err, ShouldBeNil) - So(len(respJSON.MustArray()), ShouldEqual, 5) - So(respJSON.GetIndex(0).Get("userId").MustInt(), ShouldEqual, 2) - So(respJSON.GetIndex(0).Get("permission").MustInt(), ShouldEqual, m.PERMISSION_VIEW) - }) - }) - }) - - Convey("When user is editor and has admin permission in the ACL", func() { - loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/id/1/acl", "/api/dashboards/id/:dashboardId/acl", m.ROLE_EDITOR, func(sc *scenarioContext) { - mockResult = append(mockResult, &m.DashboardAclInfoDTO{Id: 1, OrgId: 1, DashboardId: 1, UserId: 1, Permission: m.PERMISSION_ADMIN}) - - Convey("Should be able to access ACL", func() { - sc.handlerFunc = GetDashboardAclList - sc.fakeReqWithParams("GET", sc.url, map[string]string{}).exec() - - So(sc.resp.Code, ShouldEqual, 200) - }) - }) - - loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/id/1/acl/1", "/api/dashboards/id/:dashboardId/acl/:aclId", m.ROLE_EDITOR, func(sc *scenarioContext) { - mockResult = append(mockResult, &m.DashboardAclInfoDTO{Id: 1, OrgId: 1, DashboardId: 1, UserId: 1, Permission: m.PERMISSION_ADMIN}) - - bus.AddHandler("test3", func(cmd *m.RemoveDashboardAclCommand) error { - return nil - }) - - Convey("Should be able to delete permission", func() { - sc.handlerFunc = DeleteDashboardAcl - sc.fakeReqWithParams("DELETE", sc.url, map[string]string{}).exec() - - So(sc.resp.Code, ShouldEqual, 200) - }) - }) - - Convey("When user is a member of a team in the ACL with admin permission", func() { - loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/id/1/acl/1", "/api/dashboards/id/:dashboardsId/acl/:aclId", m.ROLE_EDITOR, func(sc *scenarioContext) { - teamResp = append(teamResp, &m.Team{Id: 2, OrgId: 1, Name: "UG2"}) - - bus.AddHandler("test3", func(cmd *m.RemoveDashboardAclCommand) error { - return nil - }) - - Convey("Should be able to delete permission", func() { - sc.handlerFunc = DeleteDashboardAcl - sc.fakeReqWithParams("DELETE", sc.url, map[string]string{}).exec() - - So(sc.resp.Code, ShouldEqual, 200) - }) - }) - }) - }) - - Convey("When user is editor and has edit permission in the ACL", func() { - loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/id/1/acl", "/api/dashboards/id/:dashboardId/acl", m.ROLE_EDITOR, func(sc *scenarioContext) { - mockResult = append(mockResult, &m.DashboardAclInfoDTO{Id: 1, OrgId: 1, DashboardId: 1, UserId: 1, Permission: m.PERMISSION_EDIT}) - - Convey("Should not be able to access ACL", func() { - sc.handlerFunc = GetDashboardAclList - sc.fakeReqWithParams("GET", sc.url, map[string]string{}).exec() - - So(sc.resp.Code, ShouldEqual, 403) - }) - }) - - loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/id/1/acl/1", "/api/dashboards/id/:dashboardId/acl/:aclId", m.ROLE_EDITOR, func(sc *scenarioContext) { - mockResult = append(mockResult, &m.DashboardAclInfoDTO{Id: 1, OrgId: 1, DashboardId: 1, UserId: 1, Permission: m.PERMISSION_EDIT}) - - bus.AddHandler("test3", func(cmd *m.RemoveDashboardAclCommand) error { - return nil - }) - - Convey("Should be not be able to delete permission", func() { - sc.handlerFunc = DeleteDashboardAcl - sc.fakeReqWithParams("DELETE", sc.url, map[string]string{}).exec() - - So(sc.resp.Code, ShouldEqual, 403) - }) - }) - }) - - Convey("When user is editor and not in the ACL", func() { - loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/id/1/acl", "/api/dashboards/id/:dashboardsId/acl", m.ROLE_EDITOR, func(sc *scenarioContext) { - - Convey("Should not be able to access ACL", func() { - sc.handlerFunc = GetDashboardAclList - sc.fakeReqWithParams("GET", sc.url, map[string]string{}).exec() - - So(sc.resp.Code, ShouldEqual, 403) - }) - }) - - loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/id/1/acl/user/1", "/api/dashboards/id/:dashboardsId/acl/user/:userId", m.ROLE_EDITOR, func(sc *scenarioContext) { - mockResult = append(mockResult, &m.DashboardAclInfoDTO{Id: 1, OrgId: 1, DashboardId: 1, UserId: 1, Permission: m.PERMISSION_VIEW}) - bus.AddHandler("test3", func(cmd *m.RemoveDashboardAclCommand) error { - return nil - }) - - Convey("Should be not be able to delete permission", func() { - sc.handlerFunc = DeleteDashboardAcl - sc.fakeReqWithParams("DELETE", sc.url, map[string]string{}).exec() - - So(sc.resp.Code, ShouldEqual, 403) - }) - }) - }) - }) -} - -func transformDashboardAclsToDTOs(acls []*m.DashboardAclInfoDTO) []*m.DashboardAclInfoDTO { - dtos := make([]*m.DashboardAclInfoDTO, 0) - - for _, acl := range acls { - dto := &m.DashboardAclInfoDTO{ - Id: acl.Id, - OrgId: acl.OrgId, - DashboardId: acl.DashboardId, - Permission: acl.Permission, - UserId: acl.UserId, - TeamId: acl.TeamId, - } - dtos = append(dtos, dto) - } - - return dtos -} diff --git a/pkg/api/dashboard_permission.go b/pkg/api/dashboard_permission.go new file mode 100644 index 00000000000..a62c27ab320 --- /dev/null +++ b/pkg/api/dashboard_permission.go @@ -0,0 +1,90 @@ +package api + +import ( + "time" + + "github.com/grafana/grafana/pkg/api/dtos" + "github.com/grafana/grafana/pkg/bus" + m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/guardian" +) + +func GetDashboardPermissionList(c *m.ReqContext) Response { + dashId := c.ParamsInt64(":dashboardId") + + _, rsp := getDashboardHelper(c.OrgId, "", dashId, "") + if rsp != nil { + return rsp + } + + g := guardian.New(dashId, c.OrgId, c.SignedInUser) + + if canAdmin, err := g.CanAdmin(); err != nil || !canAdmin { + return dashboardGuardianResponse(err) + } + + acl, err := g.GetAcl() + if err != nil { + return ApiError(500, "Failed to get dashboard permissions", err) + } + + for _, perm := range acl { + if perm.Slug != "" { + perm.Url = m.GetDashboardFolderUrl(perm.IsFolder, perm.Uid, perm.Slug) + } + } + + return Json(200, acl) +} + +func UpdateDashboardPermissions(c *m.ReqContext, apiCmd dtos.UpdateDashboardAclCommand) Response { + dashId := c.ParamsInt64(":dashboardId") + + _, rsp := getDashboardHelper(c.OrgId, "", dashId, "") + if rsp != nil { + return rsp + } + + g := guardian.New(dashId, c.OrgId, c.SignedInUser) + if canAdmin, err := g.CanAdmin(); err != nil || !canAdmin { + return dashboardGuardianResponse(err) + } + + cmd := m.UpdateDashboardAclCommand{} + cmd.DashboardId = dashId + + for _, item := range apiCmd.Items { + cmd.Items = append(cmd.Items, &m.DashboardAcl{ + OrgId: c.OrgId, + DashboardId: dashId, + UserId: item.UserId, + TeamId: item.TeamId, + Role: item.Role, + Permission: item.Permission, + Created: time.Now(), + Updated: time.Now(), + }) + } + + if okToUpdate, err := g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, cmd.Items); err != nil || !okToUpdate { + if err != nil { + if err == guardian.ErrGuardianPermissionExists || + err == guardian.ErrGuardianOverride { + return ApiError(400, err.Error(), err) + } + + return ApiError(500, "Error while checking dashboard permissions", err) + } + + return ApiError(403, "Cannot remove own admin permission for a folder", nil) + } + + if err := bus.Dispatch(&cmd); err != nil { + if err == m.ErrDashboardAclInfoMissing || err == m.ErrDashboardPermissionDashboardEmpty { + return ApiError(409, err.Error(), err) + } + return ApiError(500, "Failed to create permission", err) + } + + return ApiSuccess("Dashboard permissions updated") +} diff --git a/pkg/api/dashboard_permission_test.go b/pkg/api/dashboard_permission_test.go new file mode 100644 index 00000000000..bdf80ef5241 --- /dev/null +++ b/pkg/api/dashboard_permission_test.go @@ -0,0 +1,209 @@ +package api + +import ( + "testing" + + "github.com/grafana/grafana/pkg/api/dtos" + "github.com/grafana/grafana/pkg/bus" + "github.com/grafana/grafana/pkg/components/simplejson" + m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/guardian" + + . "github.com/smartystreets/goconvey/convey" +) + +func TestDashboardPermissionApiEndpoint(t *testing.T) { + Convey("Dashboard permissions test", t, func() { + Convey("Given dashboard not exists", func() { + bus.AddHandler("test", func(query *m.GetDashboardQuery) error { + return m.ErrDashboardNotFound + }) + + loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/id/1/permissions", "/api/dashboards/id/:id/permissions", m.ROLE_EDITOR, func(sc *scenarioContext) { + callGetDashboardPermissions(sc) + So(sc.resp.Code, ShouldEqual, 404) + }) + + cmd := dtos.UpdateDashboardAclCommand{ + Items: []dtos.DashboardAclUpdateItem{ + {UserId: 1000, Permission: m.PERMISSION_ADMIN}, + }, + } + + updateDashboardPermissionScenario("When calling POST on", "/api/dashboards/id/1/permissions", "/api/dashboards/id/:id/permissions", cmd, func(sc *scenarioContext) { + callUpdateDashboardPermissions(sc) + So(sc.resp.Code, ShouldEqual, 404) + }) + }) + + Convey("Given user has no admin permissions", func() { + origNewGuardian := guardian.New + guardian.MockDashboardGuardian(&guardian.FakeDashboardGuardian{CanAdminValue: false}) + + getDashboardQueryResult := m.NewDashboard("Dash") + bus.AddHandler("test", func(query *m.GetDashboardQuery) error { + query.Result = getDashboardQueryResult + return nil + }) + + loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/id/1/permissions", "/api/dashboards/id/:id/permissions", m.ROLE_EDITOR, func(sc *scenarioContext) { + callGetDashboardPermissions(sc) + So(sc.resp.Code, ShouldEqual, 403) + }) + + cmd := dtos.UpdateDashboardAclCommand{ + Items: []dtos.DashboardAclUpdateItem{ + {UserId: 1000, Permission: m.PERMISSION_ADMIN}, + }, + } + + updateDashboardPermissionScenario("When calling POST on", "/api/dashboards/id/1/permissions", "/api/dashboards/id/:id/permissions", cmd, func(sc *scenarioContext) { + callUpdateDashboardPermissions(sc) + So(sc.resp.Code, ShouldEqual, 403) + }) + + Reset(func() { + guardian.New = origNewGuardian + }) + }) + + Convey("Given user has admin permissions and permissions to update", func() { + origNewGuardian := guardian.New + guardian.MockDashboardGuardian(&guardian.FakeDashboardGuardian{ + CanAdminValue: true, + CheckPermissionBeforeUpdateValue: true, + GetAclValue: []*m.DashboardAclInfoDTO{ + {OrgId: 1, DashboardId: 1, UserId: 2, Permission: m.PERMISSION_VIEW}, + {OrgId: 1, DashboardId: 1, UserId: 3, Permission: m.PERMISSION_EDIT}, + {OrgId: 1, DashboardId: 1, UserId: 4, Permission: m.PERMISSION_ADMIN}, + {OrgId: 1, DashboardId: 1, TeamId: 1, Permission: m.PERMISSION_VIEW}, + {OrgId: 1, DashboardId: 1, TeamId: 2, Permission: m.PERMISSION_ADMIN}, + }, + }) + + getDashboardQueryResult := m.NewDashboard("Dash") + bus.AddHandler("test", func(query *m.GetDashboardQuery) error { + query.Result = getDashboardQueryResult + return nil + }) + + loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/id/1/permissions", "/api/dashboards/id/:id/permissions", m.ROLE_ADMIN, func(sc *scenarioContext) { + callGetDashboardPermissions(sc) + So(sc.resp.Code, ShouldEqual, 200) + respJSON, err := simplejson.NewJson(sc.resp.Body.Bytes()) + So(err, ShouldBeNil) + So(len(respJSON.MustArray()), ShouldEqual, 5) + So(respJSON.GetIndex(0).Get("userId").MustInt(), ShouldEqual, 2) + So(respJSON.GetIndex(0).Get("permission").MustInt(), ShouldEqual, m.PERMISSION_VIEW) + }) + + cmd := dtos.UpdateDashboardAclCommand{ + Items: []dtos.DashboardAclUpdateItem{ + {UserId: 1000, Permission: m.PERMISSION_ADMIN}, + }, + } + + updateDashboardPermissionScenario("When calling POST on", "/api/dashboards/id/1/permissions", "/api/dashboards/id/:id/permissions", cmd, func(sc *scenarioContext) { + callUpdateDashboardPermissions(sc) + So(sc.resp.Code, ShouldEqual, 200) + }) + + Reset(func() { + guardian.New = origNewGuardian + }) + }) + + Convey("When trying to update permissions with duplicate permissions", func() { + origNewGuardian := guardian.New + guardian.MockDashboardGuardian(&guardian.FakeDashboardGuardian{ + CanAdminValue: true, + CheckPermissionBeforeUpdateValue: false, + CheckPermissionBeforeUpdateError: guardian.ErrGuardianPermissionExists, + }) + + getDashboardQueryResult := m.NewDashboard("Dash") + bus.AddHandler("test", func(query *m.GetDashboardQuery) error { + query.Result = getDashboardQueryResult + return nil + }) + + cmd := dtos.UpdateDashboardAclCommand{ + Items: []dtos.DashboardAclUpdateItem{ + {UserId: 1000, Permission: m.PERMISSION_ADMIN}, + }, + } + + updateDashboardPermissionScenario("When calling POST on", "/api/dashboards/id/1/permissions", "/api/dashboards/id/:id/permissions", cmd, func(sc *scenarioContext) { + callUpdateDashboardPermissions(sc) + So(sc.resp.Code, ShouldEqual, 400) + }) + + Reset(func() { + guardian.New = origNewGuardian + }) + }) + + Convey("When trying to override inherited permissions with lower presedence", func() { + origNewGuardian := guardian.New + guardian.MockDashboardGuardian(&guardian.FakeDashboardGuardian{ + CanAdminValue: true, + CheckPermissionBeforeUpdateValue: false, + CheckPermissionBeforeUpdateError: guardian.ErrGuardianOverride}, + ) + + getDashboardQueryResult := m.NewDashboard("Dash") + bus.AddHandler("test", func(query *m.GetDashboardQuery) error { + query.Result = getDashboardQueryResult + return nil + }) + + cmd := dtos.UpdateDashboardAclCommand{ + Items: []dtos.DashboardAclUpdateItem{ + {UserId: 1000, Permission: m.PERMISSION_ADMIN}, + }, + } + + updateDashboardPermissionScenario("When calling POST on", "/api/dashboards/id/1/permissions", "/api/dashboards/id/:id/permissions", cmd, func(sc *scenarioContext) { + callUpdateDashboardPermissions(sc) + So(sc.resp.Code, ShouldEqual, 400) + }) + + Reset(func() { + guardian.New = origNewGuardian + }) + }) + }) +} + +func callGetDashboardPermissions(sc *scenarioContext) { + sc.handlerFunc = GetDashboardPermissionList + sc.fakeReqWithParams("GET", sc.url, map[string]string{}).exec() +} + +func callUpdateDashboardPermissions(sc *scenarioContext) { + bus.AddHandler("test", func(cmd *m.UpdateDashboardAclCommand) error { + return nil + }) + + sc.fakeReqWithParams("POST", sc.url, map[string]string{}).exec() +} + +func updateDashboardPermissionScenario(desc string, url string, routePattern string, cmd dtos.UpdateDashboardAclCommand, fn scenarioFunc) { + Convey(desc+" "+url, func() { + defer bus.ClearBusHandlers() + + sc := setupScenarioContext(url) + + sc.defaultHandler = wrap(func(c *m.ReqContext) Response { + sc.context = c + sc.context.OrgId = TestOrgID + sc.context.UserId = TestUserID + + return UpdateDashboardPermissions(c, cmd) + }) + + sc.m.Post(routePattern, sc.defaultHandler) + + fn(sc) + }) +} diff --git a/pkg/api/dashboard_snapshot.go b/pkg/api/dashboard_snapshot.go index a834bd4717d..4656940d2bb 100644 --- a/pkg/api/dashboard_snapshot.go +++ b/pkg/api/dashboard_snapshot.go @@ -6,13 +6,13 @@ import ( "github.com/grafana/grafana/pkg/api/dtos" "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/metrics" - "github.com/grafana/grafana/pkg/middleware" m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/guardian" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/util" ) -func GetSharingOptions(c *middleware.Context) { +func GetSharingOptions(c *m.ReqContext) { c.JSON(200, util.DynMap{ "externalSnapshotURL": setting.ExternalSnapshotUrl, "externalSnapshotName": setting.ExternalSnapshotName, @@ -20,7 +20,7 @@ func GetSharingOptions(c *middleware.Context) { }) } -func CreateDashboardSnapshot(c *middleware.Context, cmd m.CreateDashboardSnapshotCommand) { +func CreateDashboardSnapshot(c *m.ReqContext, cmd m.CreateDashboardSnapshotCommand) { if cmd.Name == "" { cmd.Name = "Unnamed snapshot" } @@ -56,7 +56,8 @@ func CreateDashboardSnapshot(c *middleware.Context, cmd m.CreateDashboardSnapsho }) } -func GetDashboardSnapshot(c *middleware.Context) { +// GET /api/snapshots/:key +func GetDashboardSnapshot(c *m.ReqContext) { key := c.Params(":key") query := &m.GetDashboardSnapshotQuery{Key: key} @@ -90,19 +91,44 @@ func GetDashboardSnapshot(c *middleware.Context) { c.JSON(200, dto) } -func DeleteDashboardSnapshot(c *middleware.Context) { +// GET /api/snapshots-delete/:key +func DeleteDashboardSnapshot(c *m.ReqContext) Response { key := c.Params(":key") + + query := &m.GetDashboardSnapshotQuery{DeleteKey: key} + + err := bus.Dispatch(query) + if err != nil { + return ApiError(500, "Failed to get dashboard snapshot", err) + } + + if query.Result == nil { + return ApiError(404, "Failed to get dashboard snapshot", nil) + } + dashboard := query.Result.Dashboard + dashboardId := dashboard.Get("id").MustInt64() + + guardian := guardian.New(dashboardId, c.OrgId, c.SignedInUser) + canEdit, err := guardian.CanEdit() + if err != nil { + return ApiError(500, "Error while checking permissions for snapshot", err) + } + + if !canEdit && query.Result.UserId != c.SignedInUser.UserId { + return ApiError(403, "Access denied to this snapshot", nil) + } + cmd := &m.DeleteDashboardSnapshotCommand{DeleteKey: key} if err := bus.Dispatch(cmd); err != nil { - c.JsonApiErr(500, "Failed to delete dashboard snapshot", err) - return + return ApiError(500, "Failed to delete dashboard snapshot", err) } - c.JSON(200, util.DynMap{"message": "Snapshot deleted. It might take an hour before it's cleared from a CDN cache."}) + return Json(200, util.DynMap{"message": "Snapshot deleted. It might take an hour before it's cleared from a CDN cache."}) } -func SearchDashboardSnapshots(c *middleware.Context) Response { +// GET /api/dashboard/snapshots +func SearchDashboardSnapshots(c *m.ReqContext) Response { query := c.Query("query") limit := c.QueryInt("limit") @@ -111,9 +137,10 @@ func SearchDashboardSnapshots(c *middleware.Context) Response { } searchQuery := m.GetDashboardSnapshotsQuery{ - Name: query, - Limit: limit, - OrgId: c.OrgId, + Name: query, + Limit: limit, + OrgId: c.OrgId, + SignedInUser: c.SignedInUser, } err := bus.Dispatch(&searchQuery) diff --git a/pkg/api/dashboard_snapshot_test.go b/pkg/api/dashboard_snapshot_test.go new file mode 100644 index 00000000000..87c2b9e99d4 --- /dev/null +++ b/pkg/api/dashboard_snapshot_test.go @@ -0,0 +1,97 @@ +package api + +import ( + "testing" + "time" + + "github.com/grafana/grafana/pkg/bus" + "github.com/grafana/grafana/pkg/components/simplejson" + m "github.com/grafana/grafana/pkg/models" + + . "github.com/smartystreets/goconvey/convey" +) + +func TestDashboardSnapshotApiEndpoint(t *testing.T) { + Convey("Given a single snapshot", t, func() { + jsonModel, _ := simplejson.NewJson([]byte(`{"id":100}`)) + + mockSnapshotResult := &m.DashboardSnapshot{ + Id: 1, + Dashboard: jsonModel, + Expires: time.Now().Add(time.Duration(1000) * time.Second), + UserId: 999999, + } + + bus.AddHandler("test", func(query *m.GetDashboardSnapshotQuery) error { + query.Result = mockSnapshotResult + return nil + }) + + bus.AddHandler("test", func(cmd *m.DeleteDashboardSnapshotCommand) error { + return nil + }) + + viewerRole := m.ROLE_VIEWER + editorRole := m.ROLE_EDITOR + aclMockResp := []*m.DashboardAclInfoDTO{} + bus.AddHandler("test", func(query *m.GetDashboardAclInfoListQuery) error { + query.Result = aclMockResp + return nil + }) + + teamResp := []*m.Team{} + bus.AddHandler("test", func(query *m.GetTeamsByUserQuery) error { + query.Result = teamResp + return nil + }) + + Convey("When user has editor role and is not in the ACL", func() { + Convey("Should not be able to delete snapshot", func() { + loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/snapshots-delete/12345", "/api/snapshots-delete/:key", m.ROLE_EDITOR, func(sc *scenarioContext) { + sc.handlerFunc = DeleteDashboardSnapshot + sc.fakeReqWithParams("GET", sc.url, map[string]string{"key": "12345"}).exec() + + So(sc.resp.Code, ShouldEqual, 403) + }) + }) + }) + + Convey("When user is editor and dashboard has default ACL", func() { + aclMockResp = []*m.DashboardAclInfoDTO{ + {Role: &viewerRole, Permission: m.PERMISSION_VIEW}, + {Role: &editorRole, Permission: m.PERMISSION_EDIT}, + } + + Convey("Should be able to delete a snapshot", func() { + loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/snapshots-delete/12345", "/api/snapshots-delete/:key", m.ROLE_EDITOR, func(sc *scenarioContext) { + sc.handlerFunc = DeleteDashboardSnapshot + sc.fakeReqWithParams("GET", sc.url, map[string]string{"key": "12345"}).exec() + + So(sc.resp.Code, ShouldEqual, 200) + respJSON, err := simplejson.NewJson(sc.resp.Body.Bytes()) + So(err, ShouldBeNil) + + So(respJSON.Get("message").MustString(), ShouldStartWith, "Snapshot deleted") + }) + }) + }) + + Convey("When user is editor and is the creator of the snapshot", func() { + aclMockResp = []*m.DashboardAclInfoDTO{} + mockSnapshotResult.UserId = TestUserID + + Convey("Should be able to delete a snapshot", func() { + loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/snapshots-delete/12345", "/api/snapshots-delete/:key", m.ROLE_EDITOR, func(sc *scenarioContext) { + sc.handlerFunc = DeleteDashboardSnapshot + sc.fakeReqWithParams("GET", sc.url, map[string]string{"key": "12345"}).exec() + + So(sc.resp.Code, ShouldEqual, 200) + respJSON, err := simplejson.NewJson(sc.resp.Body.Bytes()) + So(err, ShouldBeNil) + + So(respJSON.Get("message").MustString(), ShouldStartWith, "Snapshot deleted") + }) + }) + }) + }) +} diff --git a/pkg/api/dashboard_test.go b/pkg/api/dashboard_test.go index e6228878625..6c5b4e4c102 100644 --- a/pkg/api/dashboard_test.go +++ b/pkg/api/dashboard_test.go @@ -2,35 +2,24 @@ package api import ( "encoding/json" - "path/filepath" + "fmt" "testing" - macaron "gopkg.in/macaron.v1" - - "github.com/go-macaron/session" "github.com/grafana/grafana/pkg/api/dtos" "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/middleware" m "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/services/alerting" "github.com/grafana/grafana/pkg/services/dashboards" "github.com/grafana/grafana/pkg/setting" . "github.com/smartystreets/goconvey/convey" ) -type fakeDashboardRepo struct { - inserted []*dashboards.SaveDashboardItem - getDashboard []*m.Dashboard -} - -func (repo *fakeDashboardRepo) SaveDashboard(json *dashboards.SaveDashboardItem) (*m.Dashboard, error) { - repo.inserted = append(repo.inserted, json) - return json.Dashboard, nil -} - -var fakeRepo *fakeDashboardRepo +// This tests three main scenarios. +// If a user has access to execute an action on a dashboard: +// 1. and the dashboard is in a folder which does not have an acl +// 2. and the dashboard is in a folder which does have an acl +// 3. Post dashboard response tests func TestDashboardApiEndpoint(t *testing.T) { Convey("Given a dashboard with a parent folder which does not have an acl", t, func() { @@ -39,8 +28,17 @@ func TestDashboardApiEndpoint(t *testing.T) { fakeDash.FolderId = 1 fakeDash.HasAcl = false + bus.AddHandler("test", func(query *m.GetDashboardsBySlugQuery) error { + dashboards := []*m.Dashboard{fakeDash} + query.Result = dashboards + return nil + }) + + var getDashboardQueries []*m.GetDashboardQuery + bus.AddHandler("test", func(query *m.GetDashboardQuery) error { query.Result = fakeDash + getDashboardQueries = append(getDashboardQueries, query) return nil }) @@ -62,20 +60,20 @@ func TestDashboardApiEndpoint(t *testing.T) { return nil }) - cmd := m.SaveDashboardCommand{ - Dashboard: simplejson.NewFromAny(map[string]interface{}{ - "folderId": fakeDash.FolderId, - "title": fakeDash.Title, - "id": fakeDash.Id, - }), - } + // This tests two scenarios: + // 1. user is an org viewer + // 2. user is an org editor Convey("When user is an Org Viewer", func() { role := m.ROLE_VIEWER - loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/2", "/api/dashboards/:id", role, func(sc *scenarioContext) { + loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/db/child-dash", "/api/dashboards/db/:slug", role, func(sc *scenarioContext) { dash := GetDashboardShouldReturn200(sc) + Convey("Should lookup dashboard by slug", func() { + So(getDashboardQueries[0].Slug, ShouldEqual, "child-dash") + }) + Convey("Should not be able to edit or save dashboard", func() { So(dash.Meta.CanEdit, ShouldBeFalse) So(dash.Meta.CanSave, ShouldBeFalse) @@ -83,9 +81,36 @@ func TestDashboardApiEndpoint(t *testing.T) { }) }) - loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/2", "/api/dashboards/:id", role, func(sc *scenarioContext) { + loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/uid/abcdefghi", "/api/dashboards/uid/:uid", role, func(sc *scenarioContext) { + dash := GetDashboardShouldReturn200(sc) + + Convey("Should lookup dashboard by uid", func() { + So(getDashboardQueries[0].Uid, ShouldEqual, "abcdefghi") + }) + + Convey("Should not be able to edit or save dashboard", func() { + So(dash.Meta.CanEdit, ShouldBeFalse) + So(dash.Meta.CanSave, ShouldBeFalse) + So(dash.Meta.CanAdmin, ShouldBeFalse) + }) + }) + + loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/db/child-dash", "/api/dashboards/db/:slug", role, func(sc *scenarioContext) { CallDeleteDashboard(sc) So(sc.resp.Code, ShouldEqual, 403) + + Convey("Should lookup dashboard by slug", func() { + So(getDashboardQueries[0].Slug, ShouldEqual, "child-dash") + }) + }) + + loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/uid/abcdefghi", "/api/dashboards/uid/:uid", role, func(sc *scenarioContext) { + CallDeleteDashboardByUid(sc) + So(sc.resp.Code, ShouldEqual, 403) + + Convey("Should lookup dashboard by uid", func() { + So(getDashboardQueries[0].Uid, ShouldEqual, "abcdefghi") + }) }) loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/id/2/versions/1", "/api/dashboards/id/:dashboardId/versions/:id", role, func(sc *scenarioContext) { @@ -97,19 +122,18 @@ func TestDashboardApiEndpoint(t *testing.T) { CallGetDashboardVersions(sc) So(sc.resp.Code, ShouldEqual, 403) }) - - postDashboardScenario("When calling POST on", "/api/dashboards", "/api/dashboards", role, cmd, func(sc *scenarioContext) { - CallPostDashboard(sc) - So(sc.resp.Code, ShouldEqual, 403) - }) }) Convey("When user is an Org Editor", func() { role := m.ROLE_EDITOR - loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/2", "/api/dashboards/:id", role, func(sc *scenarioContext) { + loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/db/child-dash", "/api/dashboards/db/:slug", role, func(sc *scenarioContext) { dash := GetDashboardShouldReturn200(sc) + Convey("Should lookup dashboard by slug", func() { + So(getDashboardQueries[0].Slug, ShouldEqual, "child-dash") + }) + Convey("Should be able to edit or save dashboard", func() { So(dash.Meta.CanEdit, ShouldBeTrue) So(dash.Meta.CanSave, ShouldBeTrue) @@ -117,9 +141,36 @@ func TestDashboardApiEndpoint(t *testing.T) { }) }) - loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/2", "/api/dashboards/:id", role, func(sc *scenarioContext) { + loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/uid/abcdefghi", "/api/dashboards/uid/:uid", role, func(sc *scenarioContext) { + dash := GetDashboardShouldReturn200(sc) + + Convey("Should lookup dashboard by uid", func() { + So(getDashboardQueries[0].Uid, ShouldEqual, "abcdefghi") + }) + + Convey("Should be able to edit or save dashboard", func() { + So(dash.Meta.CanEdit, ShouldBeTrue) + So(dash.Meta.CanSave, ShouldBeTrue) + So(dash.Meta.CanAdmin, ShouldBeFalse) + }) + }) + + loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/db/child-dash", "/api/dashboards/db/:slug", role, func(sc *scenarioContext) { CallDeleteDashboard(sc) So(sc.resp.Code, ShouldEqual, 200) + + Convey("Should lookup dashboard by slug", func() { + So(getDashboardQueries[0].Slug, ShouldEqual, "child-dash") + }) + }) + + loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/uid/abcdefghi", "/api/dashboards/uid/:uid", role, func(sc *scenarioContext) { + CallDeleteDashboardByUid(sc) + So(sc.resp.Code, ShouldEqual, 200) + + Convey("Should lookup dashboard by uid", func() { + So(getDashboardQueries[0].Uid, ShouldEqual, "abcdefghi") + }) }) loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/id/2/versions/1", "/api/dashboards/id/:dashboardId/versions/:id", role, func(sc *scenarioContext) { @@ -131,33 +182,6 @@ func TestDashboardApiEndpoint(t *testing.T) { CallGetDashboardVersions(sc) So(sc.resp.Code, ShouldEqual, 200) }) - - postDashboardScenario("When calling POST on", "/api/dashboards", "/api/dashboards", role, cmd, func(sc *scenarioContext) { - CallPostDashboard(sc) - So(sc.resp.Code, ShouldEqual, 200) - }) - - Convey("When saving a dashboard folder in another folder", func() { - bus.AddHandler("test", func(query *m.GetDashboardQuery) error { - query.Result = fakeDash - query.Result.IsFolder = true - return nil - }) - invalidCmd := m.SaveDashboardCommand{ - FolderId: fakeDash.FolderId, - IsFolder: true, - Dashboard: simplejson.NewFromAny(map[string]interface{}{ - "folderId": fakeDash.FolderId, - "title": fakeDash.Title, - }), - } - Convey("Should return an error", func() { - postDashboardScenario("When calling POST on", "/api/dashboards", "/api/dashboards", role, invalidCmd, func(sc *scenarioContext) { - CallPostDashboard(sc) - So(sc.resp.Code, ShouldEqual, 400) - }) - }) - }) }) }) @@ -168,6 +192,12 @@ func TestDashboardApiEndpoint(t *testing.T) { fakeDash.HasAcl = true setting.ViewersCanEdit = false + bus.AddHandler("test", func(query *m.GetDashboardsBySlugQuery) error { + dashboards := []*m.Dashboard{fakeDash} + query.Result = dashboards + return nil + }) + aclMockResp := []*m.DashboardAclInfoDTO{ { DashboardId: 1, @@ -181,8 +211,11 @@ func TestDashboardApiEndpoint(t *testing.T) { return nil }) + var getDashboardQueries []*m.GetDashboardQuery + bus.AddHandler("test", func(query *m.GetDashboardQuery) error { query.Result = fakeDash + getDashboardQueries = append(getDashboardQueries, query) return nil }) @@ -191,30 +224,59 @@ func TestDashboardApiEndpoint(t *testing.T) { return nil }) - cmd := m.SaveDashboardCommand{ - FolderId: fakeDash.FolderId, - Dashboard: simplejson.NewFromAny(map[string]interface{}{ - "id": fakeDash.Id, - "folderId": fakeDash.FolderId, - "title": fakeDash.Title, - }), - } + // This tests six scenarios: + // 1. user is an org viewer AND has no permissions for this dashboard + // 2. user is an org editor AND has no permissions for this dashboard + // 3. user is an org viewer AND has been granted edit permission for the dashboard + // 4. user is an org viewer AND all viewers have edit permission for this dashboard + // 5. user is an org viewer AND has been granted an admin permission + // 6. user is an org editor AND has been granted a view permission Convey("When user is an Org Viewer and has no permissions for this dashboard", func() { role := m.ROLE_VIEWER - loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/2", "/api/dashboards/:id", role, func(sc *scenarioContext) { + loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/db/child-dash", "/api/dashboards/db/:slug", role, func(sc *scenarioContext) { sc.handlerFunc = GetDashboard sc.fakeReqWithParams("GET", sc.url, map[string]string{}).exec() + Convey("Should lookup dashboard by slug", func() { + So(getDashboardQueries[0].Slug, ShouldEqual, "child-dash") + }) + Convey("Should be denied access", func() { So(sc.resp.Code, ShouldEqual, 403) }) }) - loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/2", "/api/dashboards/:id", role, func(sc *scenarioContext) { + loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/uid/abcdefghi", "/api/dashboards/uid/:uid", role, func(sc *scenarioContext) { + sc.handlerFunc = GetDashboard + sc.fakeReqWithParams("GET", sc.url, map[string]string{}).exec() + + Convey("Should lookup dashboard by uid", func() { + So(getDashboardQueries[0].Uid, ShouldEqual, "abcdefghi") + }) + + Convey("Should be denied access", func() { + So(sc.resp.Code, ShouldEqual, 403) + }) + }) + + loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/db/child-dash", "/api/dashboards/db/:slug", role, func(sc *scenarioContext) { CallDeleteDashboard(sc) So(sc.resp.Code, ShouldEqual, 403) + + Convey("Should lookup dashboard by slug", func() { + So(getDashboardQueries[0].Slug, ShouldEqual, "child-dash") + }) + }) + + loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/uid/abcdefghi", "/api/dashboards/uid/:uid", role, func(sc *scenarioContext) { + CallDeleteDashboardByUid(sc) + So(sc.resp.Code, ShouldEqual, 403) + + Convey("Should lookup dashboard by uid", func() { + So(getDashboardQueries[0].Uid, ShouldEqual, "abcdefghi") + }) }) loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/id/2/versions/1", "/api/dashboards/id/:dashboardId/versions/:id", role, func(sc *scenarioContext) { @@ -226,28 +288,53 @@ func TestDashboardApiEndpoint(t *testing.T) { CallGetDashboardVersions(sc) So(sc.resp.Code, ShouldEqual, 403) }) - - postDashboardScenario("When calling POST on", "/api/dashboards", "/api/dashboards", role, cmd, func(sc *scenarioContext) { - CallPostDashboard(sc) - So(sc.resp.Code, ShouldEqual, 403) - }) }) Convey("When user is an Org Editor and has no permissions for this dashboard", func() { role := m.ROLE_EDITOR - loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/2", "/api/dashboards/:id", role, func(sc *scenarioContext) { + loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/db/child-dash", "/api/dashboards/db/:slug", role, func(sc *scenarioContext) { sc.handlerFunc = GetDashboard sc.fakeReqWithParams("GET", sc.url, map[string]string{}).exec() + Convey("Should lookup dashboard by slug", func() { + So(getDashboardQueries[0].Slug, ShouldEqual, "child-dash") + }) + Convey("Should be denied access", func() { So(sc.resp.Code, ShouldEqual, 403) }) }) - loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/2", "/api/dashboards/:id", role, func(sc *scenarioContext) { + loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/uid/abcdefghi", "/api/dashboards/uid/:uid", role, func(sc *scenarioContext) { + sc.handlerFunc = GetDashboard + sc.fakeReqWithParams("GET", sc.url, map[string]string{}).exec() + + Convey("Should lookup dashboard by uid", func() { + So(getDashboardQueries[0].Uid, ShouldEqual, "abcdefghi") + }) + + Convey("Should be denied access", func() { + So(sc.resp.Code, ShouldEqual, 403) + }) + }) + + loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/db/child-dash", "/api/dashboards/db/:slug", role, func(sc *scenarioContext) { CallDeleteDashboard(sc) So(sc.resp.Code, ShouldEqual, 403) + + Convey("Should lookup dashboard by slug", func() { + So(getDashboardQueries[0].Slug, ShouldEqual, "child-dash") + }) + }) + + loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/uid/abcdefghi", "/api/dashboards/uid/:uid", role, func(sc *scenarioContext) { + CallDeleteDashboardByUid(sc) + So(sc.resp.Code, ShouldEqual, 403) + + Convey("Should lookup dashboard by uid", func() { + So(getDashboardQueries[0].Uid, ShouldEqual, "abcdefghi") + }) }) loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/id/2/versions/1", "/api/dashboards/id/:dashboardId/versions/:id", role, func(sc *scenarioContext) { @@ -259,18 +346,13 @@ func TestDashboardApiEndpoint(t *testing.T) { CallGetDashboardVersions(sc) So(sc.resp.Code, ShouldEqual, 403) }) - - postDashboardScenario("When calling POST on", "/api/dashboards", "/api/dashboards", role, cmd, func(sc *scenarioContext) { - CallPostDashboard(sc) - So(sc.resp.Code, ShouldEqual, 403) - }) }) Convey("When user is an Org Viewer but has an edit permission", func() { role := m.ROLE_VIEWER mockResult := []*m.DashboardAclInfoDTO{ - {Id: 1, OrgId: 1, DashboardId: 2, UserId: 1, Permission: m.PERMISSION_EDIT}, + {OrgId: 1, DashboardId: 2, UserId: 1, Permission: m.PERMISSION_EDIT}, } bus.AddHandler("test", func(query *m.GetDashboardAclInfoListQuery) error { @@ -278,9 +360,13 @@ func TestDashboardApiEndpoint(t *testing.T) { return nil }) - loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/2", "/api/dashboards/:id", role, func(sc *scenarioContext) { + loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/db/child-dash", "/api/dashboards/db/:slug", role, func(sc *scenarioContext) { dash := GetDashboardShouldReturn200(sc) + Convey("Should lookup dashboard by slug", func() { + So(getDashboardQueries[0].Slug, ShouldEqual, "child-dash") + }) + Convey("Should be able to get dashboard with edit rights", func() { So(dash.Meta.CanEdit, ShouldBeTrue) So(dash.Meta.CanSave, ShouldBeTrue) @@ -288,9 +374,36 @@ func TestDashboardApiEndpoint(t *testing.T) { }) }) - loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/2", "/api/dashboards/:id", role, func(sc *scenarioContext) { + loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/uid/abcdefghi", "/api/dashboards/uid/:uid", role, func(sc *scenarioContext) { + dash := GetDashboardShouldReturn200(sc) + + Convey("Should lookup dashboard by uid", func() { + So(getDashboardQueries[0].Uid, ShouldEqual, "abcdefghi") + }) + + Convey("Should be able to get dashboard with edit rights", func() { + So(dash.Meta.CanEdit, ShouldBeTrue) + So(dash.Meta.CanSave, ShouldBeTrue) + So(dash.Meta.CanAdmin, ShouldBeFalse) + }) + }) + + loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/db/child-dash", "/api/dashboards/db/:slug", role, func(sc *scenarioContext) { CallDeleteDashboard(sc) So(sc.resp.Code, ShouldEqual, 200) + + Convey("Should lookup dashboard by slug", func() { + So(getDashboardQueries[0].Slug, ShouldEqual, "child-dash") + }) + }) + + loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/uid/abcdefghi", "/api/dashboards/uid/:uid", role, func(sc *scenarioContext) { + CallDeleteDashboardByUid(sc) + So(sc.resp.Code, ShouldEqual, 200) + + Convey("Should lookup dashboard by uid", func() { + So(getDashboardQueries[0].Uid, ShouldEqual, "abcdefghi") + }) }) loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/id/2/versions/1", "/api/dashboards/id/:dashboardId/versions/:id", role, func(sc *scenarioContext) { @@ -302,11 +415,6 @@ func TestDashboardApiEndpoint(t *testing.T) { CallGetDashboardVersions(sc) So(sc.resp.Code, ShouldEqual, 200) }) - - postDashboardScenario("When calling POST on", "/api/dashboards", "/api/dashboards", role, cmd, func(sc *scenarioContext) { - CallPostDashboard(sc) - So(sc.resp.Code, ShouldEqual, 200) - }) }) Convey("When user is an Org Viewer and viewers can edit", func() { @@ -314,7 +422,7 @@ func TestDashboardApiEndpoint(t *testing.T) { setting.ViewersCanEdit = true mockResult := []*m.DashboardAclInfoDTO{ - {Id: 1, OrgId: 1, DashboardId: 2, UserId: 1, Permission: m.PERMISSION_VIEW}, + {OrgId: 1, DashboardId: 2, UserId: 1, Permission: m.PERMISSION_VIEW}, } bus.AddHandler("test", func(query *m.GetDashboardAclInfoListQuery) error { @@ -322,9 +430,13 @@ func TestDashboardApiEndpoint(t *testing.T) { return nil }) - loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/2", "/api/dashboards/:id", role, func(sc *scenarioContext) { + loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/db/child-dash", "/api/dashboards/db/:slug", role, func(sc *scenarioContext) { dash := GetDashboardShouldReturn200(sc) + Convey("Should lookup dashboard by slug", func() { + So(getDashboardQueries[0].Slug, ShouldEqual, "child-dash") + }) + Convey("Should be able to get dashboard with edit rights but can save should be false", func() { So(dash.Meta.CanEdit, ShouldBeTrue) So(dash.Meta.CanSave, ShouldBeFalse) @@ -332,9 +444,36 @@ func TestDashboardApiEndpoint(t *testing.T) { }) }) - loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/2", "/api/dashboards/:id", role, func(sc *scenarioContext) { + loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/uid/abcdefghi", "/api/dashboards/uid/:uid", role, func(sc *scenarioContext) { + dash := GetDashboardShouldReturn200(sc) + + Convey("Should lookup dashboard by uid", func() { + So(getDashboardQueries[0].Uid, ShouldEqual, "abcdefghi") + }) + + Convey("Should be able to get dashboard with edit rights but can save should be false", func() { + So(dash.Meta.CanEdit, ShouldBeTrue) + So(dash.Meta.CanSave, ShouldBeFalse) + So(dash.Meta.CanAdmin, ShouldBeFalse) + }) + }) + + loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/db/child-dash", "/api/dashboards/db/:slug", role, func(sc *scenarioContext) { CallDeleteDashboard(sc) So(sc.resp.Code, ShouldEqual, 403) + + Convey("Should lookup dashboard by slug", func() { + So(getDashboardQueries[0].Slug, ShouldEqual, "child-dash") + }) + }) + + loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/uid/abcdefghi", "/api/dashboards/uid/:uid", role, func(sc *scenarioContext) { + CallDeleteDashboardByUid(sc) + So(sc.resp.Code, ShouldEqual, 403) + + Convey("Should lookup dashboard by uid", func() { + So(getDashboardQueries[0].Uid, ShouldEqual, "abcdefghi") + }) }) }) @@ -342,7 +481,7 @@ func TestDashboardApiEndpoint(t *testing.T) { role := m.ROLE_VIEWER mockResult := []*m.DashboardAclInfoDTO{ - {Id: 1, OrgId: 1, DashboardId: 2, UserId: 1, Permission: m.PERMISSION_ADMIN}, + {OrgId: 1, DashboardId: 2, UserId: 1, Permission: m.PERMISSION_ADMIN}, } bus.AddHandler("test", func(query *m.GetDashboardAclInfoListQuery) error { @@ -350,9 +489,13 @@ func TestDashboardApiEndpoint(t *testing.T) { return nil }) - loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/2", "/api/dashboards/:id", role, func(sc *scenarioContext) { + loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/db/child-dash", "/api/dashboards/db/:slug", role, func(sc *scenarioContext) { dash := GetDashboardShouldReturn200(sc) + Convey("Should lookup dashboard by slug", func() { + So(getDashboardQueries[0].Slug, ShouldEqual, "child-dash") + }) + Convey("Should be able to get dashboard with edit rights", func() { So(dash.Meta.CanEdit, ShouldBeTrue) So(dash.Meta.CanSave, ShouldBeTrue) @@ -360,9 +503,36 @@ func TestDashboardApiEndpoint(t *testing.T) { }) }) - loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/2", "/api/dashboards/:id", role, func(sc *scenarioContext) { + loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/uid/abcdefghi", "/api/dashboards/uid/:uid", role, func(sc *scenarioContext) { + dash := GetDashboardShouldReturn200(sc) + + Convey("Should lookup dashboard by uid", func() { + So(getDashboardQueries[0].Uid, ShouldEqual, "abcdefghi") + }) + + Convey("Should be able to get dashboard with edit rights", func() { + So(dash.Meta.CanEdit, ShouldBeTrue) + So(dash.Meta.CanSave, ShouldBeTrue) + So(dash.Meta.CanAdmin, ShouldBeTrue) + }) + }) + + loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/db/child-dash", "/api/dashboards/db/:slug", role, func(sc *scenarioContext) { CallDeleteDashboard(sc) So(sc.resp.Code, ShouldEqual, 200) + + Convey("Should lookup dashboard by slug", func() { + So(getDashboardQueries[0].Slug, ShouldEqual, "child-dash") + }) + }) + + loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/uid/abcdefghi", "/api/dashboards/uid/:uid", role, func(sc *scenarioContext) { + CallDeleteDashboardByUid(sc) + So(sc.resp.Code, ShouldEqual, 200) + + Convey("Should lookup dashboard by uid", func() { + So(getDashboardQueries[0].Uid, ShouldEqual, "abcdefghi") + }) }) loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/id/2/versions/1", "/api/dashboards/id/:dashboardId/versions/:id", role, func(sc *scenarioContext) { @@ -374,18 +544,13 @@ func TestDashboardApiEndpoint(t *testing.T) { CallGetDashboardVersions(sc) So(sc.resp.Code, ShouldEqual, 200) }) - - postDashboardScenario("When calling POST on", "/api/dashboards", "/api/dashboards", role, cmd, func(sc *scenarioContext) { - CallPostDashboard(sc) - So(sc.resp.Code, ShouldEqual, 200) - }) }) Convey("When user is an Org Editor but has a view permission", func() { role := m.ROLE_EDITOR mockResult := []*m.DashboardAclInfoDTO{ - {Id: 1, OrgId: 1, DashboardId: 2, UserId: 1, Permission: m.PERMISSION_VIEW}, + {OrgId: 1, DashboardId: 2, UserId: 1, Permission: m.PERMISSION_VIEW}, } bus.AddHandler("test", func(query *m.GetDashboardAclInfoListQuery) error { @@ -393,18 +558,48 @@ func TestDashboardApiEndpoint(t *testing.T) { return nil }) - loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/2", "/api/dashboards/:id", role, func(sc *scenarioContext) { + loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/db/child-dash", "/api/dashboards/db/:slug", role, func(sc *scenarioContext) { dash := GetDashboardShouldReturn200(sc) + Convey("Should lookup dashboard by slug", func() { + So(getDashboardQueries[0].Slug, ShouldEqual, "child-dash") + }) + Convey("Should not be able to edit or save dashboard", func() { So(dash.Meta.CanEdit, ShouldBeFalse) So(dash.Meta.CanSave, ShouldBeFalse) }) }) - loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/2", "/api/dashboards/:id", role, func(sc *scenarioContext) { + loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/uid/abcdefghi", "/api/dashboards/uid/:uid", role, func(sc *scenarioContext) { + dash := GetDashboardShouldReturn200(sc) + + Convey("Should lookup dashboard by uid", func() { + So(getDashboardQueries[0].Uid, ShouldEqual, "abcdefghi") + }) + + Convey("Should not be able to edit or save dashboard", func() { + So(dash.Meta.CanEdit, ShouldBeFalse) + So(dash.Meta.CanSave, ShouldBeFalse) + }) + }) + + loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/db/child-dash", "/api/dashboards/db/:slug", role, func(sc *scenarioContext) { CallDeleteDashboard(sc) So(sc.resp.Code, ShouldEqual, 403) + + Convey("Should lookup dashboard by slug", func() { + So(getDashboardQueries[0].Slug, ShouldEqual, "child-dash") + }) + }) + + loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/uid/abcdefghi", "/api/dashboards/uid/:uid", role, func(sc *scenarioContext) { + CallDeleteDashboardByUid(sc) + So(sc.resp.Code, ShouldEqual, 403) + + Convey("Should lookup dashboard by uid", func() { + So(getDashboardQueries[0].Uid, ShouldEqual, "abcdefghi") + }) }) loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/dashboards/id/2/versions/1", "/api/dashboards/id/:dashboardId/versions/:id", role, func(sc *scenarioContext) { @@ -416,18 +611,188 @@ func TestDashboardApiEndpoint(t *testing.T) { CallGetDashboardVersions(sc) So(sc.resp.Code, ShouldEqual, 403) }) + }) + }) - postDashboardScenario("When calling POST on", "/api/dashboards", "/api/dashboards", role, cmd, func(sc *scenarioContext) { + Convey("Given two dashboards with the same title in different folders", t, func() { + dashOne := m.NewDashboard("dash") + dashOne.Id = 2 + dashOne.FolderId = 1 + dashOne.HasAcl = false + + dashTwo := m.NewDashboard("dash") + dashTwo.Id = 4 + dashTwo.FolderId = 3 + dashTwo.HasAcl = false + + bus.AddHandler("test", func(query *m.GetDashboardsBySlugQuery) error { + dashboards := []*m.Dashboard{dashOne, dashTwo} + query.Result = dashboards + return nil + }) + + role := m.ROLE_EDITOR + + loggedInUserScenarioWithRole("When calling DELETE on", "DELETE", "/api/dashboards/db/dash", "/api/dashboards/db/:slug", role, func(sc *scenarioContext) { + CallDeleteDashboard(sc) + + Convey("Should result in 412 Precondition failed", func() { + So(sc.resp.Code, ShouldEqual, 412) + result := sc.ToJson() + So(result.Get("status").MustString(), ShouldEqual, "multiple-slugs-exists") + So(result.Get("message").MustString(), ShouldEqual, m.ErrDashboardsWithSameSlugExists.Error()) + }) + }) + }) + + Convey("Post dashboard response tests", t, func() { + + // This tests that a valid request returns correct response + + Convey("Given a correct request for creating a dashboard", func() { + cmd := m.SaveDashboardCommand{ + OrgId: 1, + UserId: 5, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "title": "Dash", + }), + Overwrite: true, + FolderId: 3, + IsFolder: false, + Message: "msg", + } + + mock := &dashboards.FakeDashboardService{ + SaveDashboardResult: &m.Dashboard{ + Id: 2, + Uid: "uid", + Title: "Dash", + Slug: "dash", + Version: 2, + }, + } + + postDashboardScenario("When calling POST on", "/api/dashboards", "/api/dashboards", mock, cmd, func(sc *scenarioContext) { + CallPostDashboardShouldReturnSuccess(sc) + + Convey("It should call dashboard service with correct data", func() { + dto := mock.SavedDashboards[0] + So(dto.OrgId, ShouldEqual, cmd.OrgId) + So(dto.User.UserId, ShouldEqual, cmd.UserId) + So(dto.Dashboard.FolderId, ShouldEqual, 3) + So(dto.Dashboard.Title, ShouldEqual, "Dash") + So(dto.Overwrite, ShouldBeTrue) + So(dto.Message, ShouldEqual, "msg") + }) + + Convey("It should return correct response data", func() { + result := sc.ToJson() + So(result.Get("status").MustString(), ShouldEqual, "success") + So(result.Get("id").MustInt64(), ShouldEqual, 2) + So(result.Get("uid").MustString(), ShouldEqual, "uid") + So(result.Get("slug").MustString(), ShouldEqual, "dash") + So(result.Get("url").MustString(), ShouldEqual, "/d/uid/dash") + }) + }) + }) + + // This tests that invalid requests returns expected error responses + + Convey("Given incorrect requests for creating a dashboard", func() { + testCases := []struct { + SaveError error + ExpectedStatusCode int + }{ + {SaveError: m.ErrDashboardNotFound, ExpectedStatusCode: 404}, + {SaveError: m.ErrFolderNotFound, ExpectedStatusCode: 400}, + {SaveError: m.ErrDashboardWithSameUIDExists, ExpectedStatusCode: 400}, + {SaveError: m.ErrDashboardWithSameNameInFolderExists, ExpectedStatusCode: 412}, + {SaveError: m.ErrDashboardVersionMismatch, ExpectedStatusCode: 412}, + {SaveError: m.ErrDashboardTitleEmpty, ExpectedStatusCode: 400}, + {SaveError: m.ErrDashboardFolderCannotHaveParent, ExpectedStatusCode: 400}, + {SaveError: m.ErrDashboardContainsInvalidAlertData, ExpectedStatusCode: 500}, + {SaveError: m.ErrDashboardFailedToUpdateAlertData, ExpectedStatusCode: 500}, + {SaveError: m.ErrDashboardFailedGenerateUniqueUid, ExpectedStatusCode: 500}, + {SaveError: m.ErrDashboardTypeMismatch, ExpectedStatusCode: 400}, + {SaveError: m.ErrDashboardFolderWithSameNameAsDashboard, ExpectedStatusCode: 400}, + {SaveError: m.ErrDashboardWithSameNameAsFolder, ExpectedStatusCode: 400}, + {SaveError: m.ErrDashboardFolderNameExists, ExpectedStatusCode: 400}, + {SaveError: m.ErrDashboardUpdateAccessDenied, ExpectedStatusCode: 403}, + {SaveError: m.ErrDashboardInvalidUid, ExpectedStatusCode: 400}, + {SaveError: m.ErrDashboardUidToLong, ExpectedStatusCode: 400}, + {SaveError: m.UpdatePluginDashboardError{PluginId: "plug"}, ExpectedStatusCode: 412}, + } + + cmd := m.SaveDashboardCommand{ + OrgId: 1, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "title": "", + }), + } + + for _, tc := range testCases { + mock := &dashboards.FakeDashboardService{ + SaveDashboardError: tc.SaveError, + } + + postDashboardScenario(fmt.Sprintf("Expect '%s' error when calling POST on", tc.SaveError.Error()), "/api/dashboards", "/api/dashboards", mock, cmd, func(sc *scenarioContext) { + CallPostDashboard(sc) + So(sc.resp.Code, ShouldEqual, tc.ExpectedStatusCode) + }) + } + }) + }) + + Convey("Given two dashboards being compared", t, func() { + mockResult := []*m.DashboardAclInfoDTO{} + bus.AddHandler("test", func(query *m.GetDashboardAclInfoListQuery) error { + query.Result = mockResult + return nil + }) + + bus.AddHandler("test", func(query *m.GetDashboardVersionQuery) error { + query.Result = &m.DashboardVersion{ + Data: simplejson.NewFromAny(map[string]interface{}{ + "title": "Dash" + string(query.DashboardId), + }), + } + return nil + }) + + cmd := dtos.CalculateDiffOptions{ + Base: dtos.CalculateDiffTarget{ + DashboardId: 1, + Version: 1, + }, + New: dtos.CalculateDiffTarget{ + DashboardId: 2, + Version: 2, + }, + DiffType: "basic", + } + + Convey("when user does not have permission", func() { + role := m.ROLE_VIEWER + + postDiffScenario("When calling POST on", "/api/dashboards/calculate-diff", "/api/dashboards/calculate-diff", cmd, role, func(sc *scenarioContext) { CallPostDashboard(sc) So(sc.resp.Code, ShouldEqual, 403) }) }) + + Convey("when user does have permission", func() { + role := m.ROLE_ADMIN + + postDiffScenario("When calling POST on", "/api/dashboards/calculate-diff", "/api/dashboards/calculate-diff", cmd, role, func(sc *scenarioContext) { + CallPostDashboard(sc) + So(sc.resp.Code, ShouldEqual, 200) + }) + }) }) } func GetDashboardShouldReturn200(sc *scenarioContext) dtos.DashboardFullWithMeta { - sc.handlerFunc = GetDashboard - sc.fakeReqWithParams("GET", sc.url, map[string]string{}).exec() + CallGetDashboard(sc) So(sc.resp.Code, ShouldEqual, 200) @@ -438,6 +803,11 @@ func GetDashboardShouldReturn200(sc *scenarioContext) dtos.DashboardFullWithMeta return dash } +func CallGetDashboard(sc *scenarioContext) { + sc.handlerFunc = GetDashboard + sc.fakeReqWithParams("GET", sc.url, map[string]string{}).exec() +} + func CallGetDashboardVersion(sc *scenarioContext) { bus.AddHandler("test", func(query *m.GetDashboardVersionQuery) error { query.Result = &m.DashboardVersion{} @@ -467,55 +837,75 @@ func CallDeleteDashboard(sc *scenarioContext) { sc.fakeReqWithParams("DELETE", sc.url, map[string]string{}).exec() } +func CallDeleteDashboardByUid(sc *scenarioContext) { + bus.AddHandler("test", func(cmd *m.DeleteDashboardCommand) error { + return nil + }) + + sc.handlerFunc = DeleteDashboardByUid + sc.fakeReqWithParams("DELETE", sc.url, map[string]string{}).exec() +} + func CallPostDashboard(sc *scenarioContext) { - bus.AddHandler("test", func(cmd *alerting.ValidateDashboardAlertsCommand) error { - return nil - }) - - bus.AddHandler("test", func(cmd *m.SaveDashboardCommand) error { - cmd.Result = &m.Dashboard{Id: 2, Slug: "Dash", Version: 2} - return nil - }) - - bus.AddHandler("test", func(cmd *alerting.UpdateDashboardAlertsCommand) error { - return nil - }) - sc.fakeReqWithParams("POST", sc.url, map[string]string{}).exec() } -func postDashboardScenario(desc string, url string, routePattern string, role m.RoleType, cmd m.SaveDashboardCommand, fn scenarioFunc) { +func CallPostDashboardShouldReturnSuccess(sc *scenarioContext) { + CallPostDashboard(sc) + + So(sc.resp.Code, ShouldEqual, 200) +} + +func postDashboardScenario(desc string, url string, routePattern string, mock *dashboards.FakeDashboardService, cmd m.SaveDashboardCommand, fn scenarioFunc) { Convey(desc+" "+url, func() { defer bus.ClearBusHandlers() - sc := &scenarioContext{ - url: url, - } - viewsPath, _ := filepath.Abs("../../public/views") - - sc.m = macaron.New() - sc.m.Use(macaron.Renderer(macaron.RenderOptions{ - Directory: viewsPath, - Delims: macaron.Delims{Left: "[[", Right: "]]"}, - })) - - sc.m.Use(middleware.GetContextHandler()) - sc.m.Use(middleware.Sessioner(&session.Options{})) - - sc.defaultHandler = wrap(func(c *middleware.Context) Response { + sc := setupScenarioContext(url) + sc.defaultHandler = wrap(func(c *m.ReqContext) Response { sc.context = c - sc.context.UserId = TestUserID - sc.context.OrgId = TestOrgID - sc.context.OrgRole = role + sc.context.SignedInUser = &m.SignedInUser{OrgId: cmd.OrgId, UserId: cmd.UserId} return PostDashboard(c, cmd) }) - fakeRepo = &fakeDashboardRepo{} - dashboards.SetRepository(fakeRepo) + origNewDashboardService := dashboards.NewService + dashboards.MockDashboardService(mock) + + sc.m.Post(routePattern, sc.defaultHandler) + + defer func() { + dashboards.NewService = origNewDashboardService + }() + + fn(sc) + }) +} + +func postDiffScenario(desc string, url string, routePattern string, cmd dtos.CalculateDiffOptions, role m.RoleType, fn scenarioFunc) { + Convey(desc+" "+url, func() { + defer bus.ClearBusHandlers() + + sc := setupScenarioContext(url) + sc.defaultHandler = wrap(func(c *m.ReqContext) Response { + sc.context = c + sc.context.SignedInUser = &m.SignedInUser{ + OrgId: TestOrgID, + UserId: TestUserID, + } + sc.context.OrgRole = role + + return CalculateDashboardDiff(c, cmd) + }) sc.m.Post(routePattern, sc.defaultHandler) fn(sc) }) } + +func (sc *scenarioContext) ToJson() *simplejson.Json { + var result *simplejson.Json + err := json.NewDecoder(sc.resp.Body).Decode(&result) + So(err, ShouldBeNil) + return result +} diff --git a/pkg/api/dataproxy.go b/pkg/api/dataproxy.go index 8a712f99804..c6fe8b6cd8c 100644 --- a/pkg/api/dataproxy.go +++ b/pkg/api/dataproxy.go @@ -7,7 +7,6 @@ import ( "github.com/grafana/grafana/pkg/api/pluginproxy" "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/metrics" - "github.com/grafana/grafana/pkg/middleware" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/plugins" ) @@ -35,7 +34,7 @@ func (hs *HttpServer) getDatasourceById(id int64, orgId int64, nocache bool) (*m return query.Result, nil } -func (hs *HttpServer) ProxyDataSourceRequest(c *middleware.Context) { +func (hs *HttpServer) ProxyDataSourceRequest(c *m.ReqContext) { c.TimeRequest(metrics.M_DataSource_ProxyReq_Timer) nocache := c.Req.Header.Get(HeaderNameNoBackendCache) == "true" diff --git a/pkg/api/datasources.go b/pkg/api/datasources.go index b5c5f9cb834..ed8fc5d2a66 100644 --- a/pkg/api/datasources.go +++ b/pkg/api/datasources.go @@ -5,13 +5,12 @@ import ( "github.com/grafana/grafana/pkg/api/dtos" "github.com/grafana/grafana/pkg/bus" - "github.com/grafana/grafana/pkg/middleware" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/util" ) -func GetDataSources(c *middleware.Context) Response { +func GetDataSources(c *m.ReqContext) Response { query := m.GetDataSourcesQuery{OrgId: c.OrgId} if err := bus.Dispatch(&query); err != nil { @@ -50,7 +49,7 @@ func GetDataSources(c *middleware.Context) Response { return Json(200, &result) } -func GetDataSourceById(c *middleware.Context) Response { +func GetDataSourceById(c *m.ReqContext) Response { query := m.GetDataSourceByIdQuery{ Id: c.ParamsInt64(":id"), OrgId: c.OrgId, @@ -69,7 +68,7 @@ func GetDataSourceById(c *middleware.Context) Response { return Json(200, &dtos) } -func DeleteDataSourceById(c *middleware.Context) Response { +func DeleteDataSourceById(c *m.ReqContext) Response { id := c.ParamsInt64(":id") if id <= 0 { @@ -95,7 +94,7 @@ func DeleteDataSourceById(c *middleware.Context) Response { return ApiSuccess("Data source deleted") } -func DeleteDataSourceByName(c *middleware.Context) Response { +func DeleteDataSourceByName(c *m.ReqContext) Response { name := c.Params(":name") if name == "" { @@ -120,7 +119,7 @@ func DeleteDataSourceByName(c *middleware.Context) Response { return ApiSuccess("Data source deleted") } -func AddDataSource(c *middleware.Context, cmd m.AddDataSourceCommand) Response { +func AddDataSource(c *m.ReqContext, cmd m.AddDataSourceCommand) Response { cmd.OrgId = c.OrgId if err := bus.Dispatch(&cmd); err != nil { @@ -140,7 +139,7 @@ func AddDataSource(c *middleware.Context, cmd m.AddDataSourceCommand) Response { }) } -func UpdateDataSource(c *middleware.Context, cmd m.UpdateDataSourceCommand) Response { +func UpdateDataSource(c *m.ReqContext, cmd m.UpdateDataSourceCommand) Response { cmd.OrgId = c.OrgId cmd.Id = c.ParamsInt64(":id") @@ -205,7 +204,7 @@ func getRawDataSourceById(id int64, orgId int64) (*m.DataSource, error) { } // Get /api/datasources/name/:name -func GetDataSourceByName(c *middleware.Context) Response { +func GetDataSourceByName(c *m.ReqContext) Response { query := m.GetDataSourceByNameQuery{Name: c.Params(":name"), OrgId: c.OrgId} if err := bus.Dispatch(&query); err != nil { @@ -221,7 +220,7 @@ func GetDataSourceByName(c *middleware.Context) Response { } // Get /api/datasources/id/:name -func GetDataSourceIdByName(c *middleware.Context) Response { +func GetDataSourceIdByName(c *m.ReqContext) Response { query := m.GetDataSourceByNameQuery{Name: c.Params(":name"), OrgId: c.OrgId} if err := bus.Dispatch(&query); err != nil { diff --git a/pkg/api/datasources_test.go b/pkg/api/datasources_test.go index 72336693363..490393727d6 100644 --- a/pkg/api/datasources_test.go +++ b/pkg/api/datasources_test.go @@ -2,17 +2,11 @@ package api import ( "encoding/json" - "net/http" - "net/http/httptest" - "path/filepath" "testing" "github.com/grafana/grafana/pkg/models" - macaron "gopkg.in/macaron.v1" - "github.com/go-macaron/session" "github.com/grafana/grafana/pkg/bus" - "github.com/grafana/grafana/pkg/middleware" . "github.com/smartystreets/goconvey/convey" ) @@ -54,88 +48,3 @@ func TestDataSourcesProxy(t *testing.T) { }) }) } - -func loggedInUserScenario(desc string, url string, fn scenarioFunc) { - loggedInUserScenarioWithRole(desc, "GET", url, url, models.ROLE_EDITOR, fn) -} - -func loggedInUserScenarioWithRole(desc string, method string, url string, routePattern string, role models.RoleType, fn scenarioFunc) { - Convey(desc+" "+url, func() { - defer bus.ClearBusHandlers() - - sc := &scenarioContext{ - url: url, - } - viewsPath, _ := filepath.Abs("../../public/views") - - sc.m = macaron.New() - sc.m.Use(macaron.Renderer(macaron.RenderOptions{ - Directory: viewsPath, - Delims: macaron.Delims{Left: "[[", Right: "]]"}, - })) - - sc.m.Use(middleware.GetContextHandler()) - sc.m.Use(middleware.Sessioner(&session.Options{})) - - sc.defaultHandler = wrap(func(c *middleware.Context) Response { - sc.context = c - sc.context.UserId = TestUserID - sc.context.OrgId = TestOrgID - sc.context.OrgRole = role - if sc.handlerFunc != nil { - return sc.handlerFunc(sc.context) - } - - return nil - }) - - switch method { - case "GET": - sc.m.Get(routePattern, sc.defaultHandler) - case "DELETE": - sc.m.Delete(routePattern, sc.defaultHandler) - } - - fn(sc) - }) -} - -func (sc *scenarioContext) fakeReq(method, url string) *scenarioContext { - sc.resp = httptest.NewRecorder() - req, err := http.NewRequest(method, url, nil) - So(err, ShouldBeNil) - sc.req = req - - return sc -} - -func (sc *scenarioContext) fakeReqWithParams(method, url string, queryParams map[string]string) *scenarioContext { - sc.resp = httptest.NewRecorder() - req, err := http.NewRequest(method, url, nil) - q := req.URL.Query() - for k, v := range queryParams { - q.Add(k, v) - } - req.URL.RawQuery = q.Encode() - So(err, ShouldBeNil) - sc.req = req - - return sc -} - -type scenarioContext struct { - m *macaron.Macaron - context *middleware.Context - resp *httptest.ResponseRecorder - handlerFunc handlerFunc - defaultHandler macaron.Handler - req *http.Request - url string -} - -func (sc *scenarioContext) exec() { - sc.m.ServeHTTP(sc.resp, sc.req) -} - -type scenarioFunc func(c *scenarioContext) -type handlerFunc func(c *middleware.Context) Response diff --git a/pkg/api/dtos/alerting.go b/pkg/api/dtos/alerting.go index 4285ebc89cc..d30f2697f3f 100644 --- a/pkg/api/dtos/alerting.go +++ b/pkg/api/dtos/alerting.go @@ -19,7 +19,8 @@ type AlertRule struct { EvalDate time.Time `json:"evalDate"` EvalData *simplejson.Json `json:"evalData"` ExecutionError string `json:"executionError"` - DashbboardUri string `json:"dashboardUri"` + Url string `json:"url"` + CanEdit bool `json:"canEdit"` } type AlertNotification struct { diff --git a/pkg/api/dtos/dashboard.go b/pkg/api/dtos/dashboard.go index 0be0537527b..e4c66aebbda 100644 --- a/pkg/api/dtos/dashboard.go +++ b/pkg/api/dtos/dashboard.go @@ -16,6 +16,7 @@ type DashboardMeta struct { CanAdmin bool `json:"canAdmin"` CanStar bool `json:"canStar"` Slug string `json:"slug"` + Url string `json:"url"` Expires time.Time `json:"expires"` Created time.Time `json:"created"` Updated time.Time `json:"updated"` @@ -26,6 +27,7 @@ type DashboardMeta struct { IsFolder bool `json:"isFolder"` FolderId int64 `json:"folderId"` FolderTitle string `json:"folderTitle"` + FolderUrl string `json:"folderUrl"` } type DashboardFullWithMeta struct { diff --git a/pkg/api/dtos/folder.go b/pkg/api/dtos/folder.go new file mode 100644 index 00000000000..469656c6f8f --- /dev/null +++ b/pkg/api/dtos/folder.go @@ -0,0 +1,25 @@ +package dtos + +import "time" + +type Folder struct { + Id int64 `json:"id"` + Uid string `json:"uid"` + Title string `json:"title"` + Url string `json:"url"` + HasAcl bool `json:"hasAcl"` + CanSave bool `json:"canSave"` + CanEdit bool `json:"canEdit"` + CanAdmin bool `json:"canAdmin"` + CreatedBy string `json:"createdBy"` + Created time.Time `json:"created"` + UpdatedBy string `json:"updatedBy"` + Updated time.Time `json:"updated"` + Version int `json:"version"` +} + +type FolderSearchHit struct { + Id int64 `json:"id"` + Uid string `json:"uid"` + Title string `json:"title"` +} diff --git a/pkg/api/folder.go b/pkg/api/folder.go new file mode 100644 index 00000000000..143892fa6e8 --- /dev/null +++ b/pkg/api/folder.go @@ -0,0 +1,146 @@ +package api + +import ( + "fmt" + + "github.com/grafana/grafana/pkg/api/dtos" + m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/dashboards" + "github.com/grafana/grafana/pkg/services/guardian" + "github.com/grafana/grafana/pkg/util" +) + +func GetFolders(c *m.ReqContext) Response { + s := dashboards.NewFolderService(c.OrgId, c.SignedInUser) + folders, err := s.GetFolders(c.QueryInt("limit")) + + if err != nil { + return toFolderError(err) + } + + result := make([]dtos.FolderSearchHit, 0) + + for _, f := range folders { + result = append(result, dtos.FolderSearchHit{ + Id: f.Id, + Uid: f.Uid, + Title: f.Title, + }) + } + + return Json(200, result) +} + +func GetFolderByUid(c *m.ReqContext) Response { + s := dashboards.NewFolderService(c.OrgId, c.SignedInUser) + folder, err := s.GetFolderByUid(c.Params(":uid")) + + if err != nil { + return toFolderError(err) + } + + g := guardian.New(folder.Id, c.OrgId, c.SignedInUser) + return Json(200, toFolderDto(g, folder)) +} + +func GetFolderById(c *m.ReqContext) Response { + s := dashboards.NewFolderService(c.OrgId, c.SignedInUser) + folder, err := s.GetFolderById(c.ParamsInt64(":id")) + if err != nil { + return toFolderError(err) + } + + g := guardian.New(folder.Id, c.OrgId, c.SignedInUser) + return Json(200, toFolderDto(g, folder)) +} + +func CreateFolder(c *m.ReqContext, cmd m.CreateFolderCommand) Response { + s := dashboards.NewFolderService(c.OrgId, c.SignedInUser) + err := s.CreateFolder(&cmd) + if err != nil { + return toFolderError(err) + } + + g := guardian.New(cmd.Result.Id, c.OrgId, c.SignedInUser) + return Json(200, toFolderDto(g, cmd.Result)) +} + +func UpdateFolder(c *m.ReqContext, cmd m.UpdateFolderCommand) Response { + s := dashboards.NewFolderService(c.OrgId, c.SignedInUser) + err := s.UpdateFolder(c.Params(":uid"), &cmd) + if err != nil { + return toFolderError(err) + } + + g := guardian.New(cmd.Result.Id, c.OrgId, c.SignedInUser) + return Json(200, toFolderDto(g, cmd.Result)) +} + +func DeleteFolder(c *m.ReqContext) Response { + s := dashboards.NewFolderService(c.OrgId, c.SignedInUser) + f, err := s.DeleteFolder(c.Params(":uid")) + if err != nil { + return toFolderError(err) + } + + return Json(200, util.DynMap{ + "title": f.Title, + "message": fmt.Sprintf("Folder %s deleted", f.Title), + }) +} + +func toFolderDto(g guardian.DashboardGuardian, folder *m.Folder) dtos.Folder { + canEdit, _ := g.CanEdit() + canSave, _ := g.CanSave() + canAdmin, _ := g.CanAdmin() + + // Finding creator and last updater of the folder + updater, creator := "Anonymous", "Anonymous" + if folder.CreatedBy > 0 { + creator = getUserLogin(folder.CreatedBy) + } + if folder.UpdatedBy > 0 { + updater = getUserLogin(folder.UpdatedBy) + } + + return dtos.Folder{ + Id: folder.Id, + Uid: folder.Uid, + Title: folder.Title, + Url: folder.Url, + HasAcl: folder.HasAcl, + CanSave: canSave, + CanEdit: canEdit, + CanAdmin: canAdmin, + CreatedBy: creator, + Created: folder.Created, + UpdatedBy: updater, + Updated: folder.Updated, + Version: folder.Version, + } +} + +func toFolderError(err error) Response { + if err == m.ErrFolderTitleEmpty || + err == m.ErrFolderSameNameExists || + err == m.ErrFolderWithSameUIDExists || + err == m.ErrDashboardTypeMismatch || + err == m.ErrDashboardInvalidUid || + err == m.ErrDashboardUidToLong { + return ApiError(400, err.Error(), nil) + } + + if err == m.ErrFolderAccessDenied { + return ApiError(403, "Access denied", err) + } + + if err == m.ErrFolderNotFound { + return Json(404, util.DynMap{"status": "not-found", "message": m.ErrFolderNotFound.Error()}) + } + + if err == m.ErrFolderVersionMismatch { + return Json(412, util.DynMap{"status": "version-mismatch", "message": m.ErrFolderVersionMismatch.Error()}) + } + + return ApiError(500, "Folder API error", err) +} diff --git a/pkg/api/folder_permission.go b/pkg/api/folder_permission.go new file mode 100644 index 00000000000..1b04eb20e53 --- /dev/null +++ b/pkg/api/folder_permission.go @@ -0,0 +1,107 @@ +package api + +import ( + "time" + + "github.com/grafana/grafana/pkg/api/dtos" + "github.com/grafana/grafana/pkg/bus" + m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/dashboards" + "github.com/grafana/grafana/pkg/services/guardian" +) + +func GetFolderPermissionList(c *m.ReqContext) Response { + s := dashboards.NewFolderService(c.OrgId, c.SignedInUser) + folder, err := s.GetFolderByUid(c.Params(":uid")) + + if err != nil { + return toFolderError(err) + } + + g := guardian.New(folder.Id, c.OrgId, c.SignedInUser) + + if canAdmin, err := g.CanAdmin(); err != nil || !canAdmin { + return toFolderError(m.ErrFolderAccessDenied) + } + + acl, err := g.GetAcl() + if err != nil { + return ApiError(500, "Failed to get folder permissions", err) + } + + for _, perm := range acl { + perm.FolderId = folder.Id + perm.DashboardId = 0 + + if perm.Slug != "" { + perm.Url = m.GetDashboardFolderUrl(perm.IsFolder, perm.Uid, perm.Slug) + } + } + + return Json(200, acl) +} + +func UpdateFolderPermissions(c *m.ReqContext, apiCmd dtos.UpdateDashboardAclCommand) Response { + s := dashboards.NewFolderService(c.OrgId, c.SignedInUser) + folder, err := s.GetFolderByUid(c.Params(":uid")) + + if err != nil { + return toFolderError(err) + } + + g := guardian.New(folder.Id, c.OrgId, c.SignedInUser) + canAdmin, err := g.CanAdmin() + if err != nil { + return toFolderError(err) + } + + if !canAdmin { + return toFolderError(m.ErrFolderAccessDenied) + } + + cmd := m.UpdateDashboardAclCommand{} + cmd.DashboardId = folder.Id + + for _, item := range apiCmd.Items { + cmd.Items = append(cmd.Items, &m.DashboardAcl{ + OrgId: c.OrgId, + DashboardId: folder.Id, + UserId: item.UserId, + TeamId: item.TeamId, + Role: item.Role, + Permission: item.Permission, + Created: time.Now(), + Updated: time.Now(), + }) + } + + if okToUpdate, err := g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, cmd.Items); err != nil || !okToUpdate { + if err != nil { + if err == guardian.ErrGuardianPermissionExists || + err == guardian.ErrGuardianOverride { + return ApiError(400, err.Error(), err) + } + + return ApiError(500, "Error while checking folder permissions", err) + } + + return ApiError(403, "Cannot remove own admin permission for a folder", nil) + } + + if err := bus.Dispatch(&cmd); err != nil { + if err == m.ErrDashboardAclInfoMissing { + err = m.ErrFolderAclInfoMissing + } + if err == m.ErrDashboardPermissionDashboardEmpty { + err = m.ErrFolderPermissionFolderEmpty + } + + if err == m.ErrFolderAclInfoMissing || err == m.ErrFolderPermissionFolderEmpty { + return ApiError(409, err.Error(), err) + } + + return ApiError(500, "Failed to create permission", err) + } + + return ApiSuccess("Folder permissions updated") +} diff --git a/pkg/api/folder_permission_test.go b/pkg/api/folder_permission_test.go new file mode 100644 index 00000000000..00d025fdce2 --- /dev/null +++ b/pkg/api/folder_permission_test.go @@ -0,0 +1,241 @@ +package api + +import ( + "testing" + + "github.com/grafana/grafana/pkg/api/dtos" + "github.com/grafana/grafana/pkg/bus" + "github.com/grafana/grafana/pkg/components/simplejson" + m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/dashboards" + "github.com/grafana/grafana/pkg/services/guardian" + + . "github.com/smartystreets/goconvey/convey" +) + +func TestFolderPermissionApiEndpoint(t *testing.T) { + Convey("Folder permissions test", t, func() { + Convey("Given folder not exists", func() { + mock := &fakeFolderService{ + GetFolderByUidError: m.ErrFolderNotFound, + } + + origNewFolderService := dashboards.NewFolderService + mockFolderService(mock) + + loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/folders/uid/permissions", "/api/folders/:uid/permissions", m.ROLE_EDITOR, func(sc *scenarioContext) { + callGetFolderPermissions(sc) + So(sc.resp.Code, ShouldEqual, 404) + }) + + cmd := dtos.UpdateDashboardAclCommand{ + Items: []dtos.DashboardAclUpdateItem{ + {UserId: 1000, Permission: m.PERMISSION_ADMIN}, + }, + } + + updateFolderPermissionScenario("When calling POST on", "/api/folders/uid/permissions", "/api/folders/:uid/permissions", cmd, func(sc *scenarioContext) { + callUpdateFolderPermissions(sc) + So(sc.resp.Code, ShouldEqual, 404) + }) + + Reset(func() { + dashboards.NewFolderService = origNewFolderService + }) + }) + + Convey("Given user has no admin permissions", func() { + origNewGuardian := guardian.New + guardian.MockDashboardGuardian(&guardian.FakeDashboardGuardian{CanAdminValue: false}) + + mock := &fakeFolderService{ + GetFolderByUidResult: &m.Folder{ + Id: 1, + Uid: "uid", + Title: "Folder", + }, + } + + origNewFolderService := dashboards.NewFolderService + mockFolderService(mock) + + loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/folders/uid/permissions", "/api/folders/:uid/permissions", m.ROLE_EDITOR, func(sc *scenarioContext) { + callGetFolderPermissions(sc) + So(sc.resp.Code, ShouldEqual, 403) + }) + + cmd := dtos.UpdateDashboardAclCommand{ + Items: []dtos.DashboardAclUpdateItem{ + {UserId: 1000, Permission: m.PERMISSION_ADMIN}, + }, + } + + updateFolderPermissionScenario("When calling POST on", "/api/folders/uid/permissions", "/api/folders/:uid/permissions", cmd, func(sc *scenarioContext) { + callUpdateFolderPermissions(sc) + So(sc.resp.Code, ShouldEqual, 403) + }) + + Reset(func() { + guardian.New = origNewGuardian + dashboards.NewFolderService = origNewFolderService + }) + }) + + Convey("Given user has admin permissions and permissions to update", func() { + origNewGuardian := guardian.New + guardian.MockDashboardGuardian(&guardian.FakeDashboardGuardian{ + CanAdminValue: true, + CheckPermissionBeforeUpdateValue: true, + GetAclValue: []*m.DashboardAclInfoDTO{ + {OrgId: 1, DashboardId: 1, UserId: 2, Permission: m.PERMISSION_VIEW}, + {OrgId: 1, DashboardId: 1, UserId: 3, Permission: m.PERMISSION_EDIT}, + {OrgId: 1, DashboardId: 1, UserId: 4, Permission: m.PERMISSION_ADMIN}, + {OrgId: 1, DashboardId: 1, TeamId: 1, Permission: m.PERMISSION_VIEW}, + {OrgId: 1, DashboardId: 1, TeamId: 2, Permission: m.PERMISSION_ADMIN}, + }, + }) + + mock := &fakeFolderService{ + GetFolderByUidResult: &m.Folder{ + Id: 1, + Uid: "uid", + Title: "Folder", + }, + } + + origNewFolderService := dashboards.NewFolderService + mockFolderService(mock) + + loggedInUserScenarioWithRole("When calling GET on", "GET", "/api/folders/uid/permissions", "/api/folders/:uid/permissions", m.ROLE_ADMIN, func(sc *scenarioContext) { + callGetFolderPermissions(sc) + So(sc.resp.Code, ShouldEqual, 200) + respJSON, err := simplejson.NewJson(sc.resp.Body.Bytes()) + So(err, ShouldBeNil) + So(len(respJSON.MustArray()), ShouldEqual, 5) + So(respJSON.GetIndex(0).Get("userId").MustInt(), ShouldEqual, 2) + So(respJSON.GetIndex(0).Get("permission").MustInt(), ShouldEqual, m.PERMISSION_VIEW) + }) + + cmd := dtos.UpdateDashboardAclCommand{ + Items: []dtos.DashboardAclUpdateItem{ + {UserId: 1000, Permission: m.PERMISSION_ADMIN}, + }, + } + + updateFolderPermissionScenario("When calling POST on", "/api/folders/uid/permissions", "/api/folders/:uid/permissions", cmd, func(sc *scenarioContext) { + callUpdateFolderPermissions(sc) + So(sc.resp.Code, ShouldEqual, 200) + }) + + Reset(func() { + guardian.New = origNewGuardian + dashboards.NewFolderService = origNewFolderService + }) + }) + + Convey("When trying to update permissions with duplicate permissions", func() { + origNewGuardian := guardian.New + guardian.MockDashboardGuardian(&guardian.FakeDashboardGuardian{ + CanAdminValue: true, + CheckPermissionBeforeUpdateValue: false, + CheckPermissionBeforeUpdateError: guardian.ErrGuardianPermissionExists, + }) + + mock := &fakeFolderService{ + GetFolderByUidResult: &m.Folder{ + Id: 1, + Uid: "uid", + Title: "Folder", + }, + } + + origNewFolderService := dashboards.NewFolderService + mockFolderService(mock) + + cmd := dtos.UpdateDashboardAclCommand{ + Items: []dtos.DashboardAclUpdateItem{ + {UserId: 1000, Permission: m.PERMISSION_ADMIN}, + }, + } + + updateFolderPermissionScenario("When calling POST on", "/api/folders/uid/permissions", "/api/folders/:uid/permissions", cmd, func(sc *scenarioContext) { + callUpdateFolderPermissions(sc) + So(sc.resp.Code, ShouldEqual, 400) + }) + + Reset(func() { + guardian.New = origNewGuardian + dashboards.NewFolderService = origNewFolderService + }) + }) + + Convey("When trying to override inherited permissions with lower presedence", func() { + origNewGuardian := guardian.New + guardian.MockDashboardGuardian(&guardian.FakeDashboardGuardian{ + CanAdminValue: true, + CheckPermissionBeforeUpdateValue: false, + CheckPermissionBeforeUpdateError: guardian.ErrGuardianOverride}, + ) + + mock := &fakeFolderService{ + GetFolderByUidResult: &m.Folder{ + Id: 1, + Uid: "uid", + Title: "Folder", + }, + } + + origNewFolderService := dashboards.NewFolderService + mockFolderService(mock) + + cmd := dtos.UpdateDashboardAclCommand{ + Items: []dtos.DashboardAclUpdateItem{ + {UserId: 1000, Permission: m.PERMISSION_ADMIN}, + }, + } + + updateFolderPermissionScenario("When calling POST on", "/api/folders/uid/permissions", "/api/folders/:uid/permissions", cmd, func(sc *scenarioContext) { + callUpdateFolderPermissions(sc) + So(sc.resp.Code, ShouldEqual, 400) + }) + + Reset(func() { + guardian.New = origNewGuardian + dashboards.NewFolderService = origNewFolderService + }) + }) + }) +} + +func callGetFolderPermissions(sc *scenarioContext) { + sc.handlerFunc = GetFolderPermissionList + sc.fakeReqWithParams("GET", sc.url, map[string]string{}).exec() +} + +func callUpdateFolderPermissions(sc *scenarioContext) { + bus.AddHandler("test", func(cmd *m.UpdateDashboardAclCommand) error { + return nil + }) + + sc.fakeReqWithParams("POST", sc.url, map[string]string{}).exec() +} + +func updateFolderPermissionScenario(desc string, url string, routePattern string, cmd dtos.UpdateDashboardAclCommand, fn scenarioFunc) { + Convey(desc+" "+url, func() { + defer bus.ClearBusHandlers() + + sc := setupScenarioContext(url) + + sc.defaultHandler = wrap(func(c *m.ReqContext) Response { + sc.context = c + sc.context.OrgId = TestOrgID + sc.context.UserId = TestUserID + + return UpdateFolderPermissions(c, cmd) + }) + + sc.m.Post(routePattern, sc.defaultHandler) + + fn(sc) + }) +} diff --git a/pkg/api/folder_test.go b/pkg/api/folder_test.go new file mode 100644 index 00000000000..7cefdcf8544 --- /dev/null +++ b/pkg/api/folder_test.go @@ -0,0 +1,251 @@ +package api + +import ( + "encoding/json" + "fmt" + "testing" + + "github.com/grafana/grafana/pkg/api/dtos" + "github.com/grafana/grafana/pkg/bus" + m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/dashboards" + + . "github.com/smartystreets/goconvey/convey" +) + +func TestFoldersApiEndpoint(t *testing.T) { + Convey("Create/update folder response tests", t, func() { + Convey("Given a correct request for creating a folder", func() { + cmd := m.CreateFolderCommand{ + Uid: "uid", + Title: "Folder", + } + + mock := &fakeFolderService{ + CreateFolderResult: &m.Folder{Id: 1, Uid: "uid", Title: "Folder"}, + } + + createFolderScenario("When calling POST on", "/api/folders", "/api/folders", mock, cmd, func(sc *scenarioContext) { + callCreateFolder(sc) + + Convey("It should return correct response data", func() { + folder := dtos.Folder{} + err := json.NewDecoder(sc.resp.Body).Decode(&folder) + So(err, ShouldBeNil) + So(folder.Id, ShouldEqual, 1) + So(folder.Uid, ShouldEqual, "uid") + So(folder.Title, ShouldEqual, "Folder") + }) + }) + }) + + Convey("Given incorrect requests for creating a folder", func() { + testCases := []struct { + Error error + ExpectedStatusCode int + }{ + {Error: m.ErrFolderWithSameUIDExists, ExpectedStatusCode: 400}, + {Error: m.ErrFolderTitleEmpty, ExpectedStatusCode: 400}, + {Error: m.ErrFolderSameNameExists, ExpectedStatusCode: 400}, + {Error: m.ErrDashboardInvalidUid, ExpectedStatusCode: 400}, + {Error: m.ErrDashboardUidToLong, ExpectedStatusCode: 400}, + {Error: m.ErrFolderAccessDenied, ExpectedStatusCode: 403}, + {Error: m.ErrFolderNotFound, ExpectedStatusCode: 404}, + {Error: m.ErrFolderVersionMismatch, ExpectedStatusCode: 412}, + {Error: m.ErrFolderFailedGenerateUniqueUid, ExpectedStatusCode: 500}, + } + + cmd := m.CreateFolderCommand{ + Uid: "uid", + Title: "Folder", + } + + for _, tc := range testCases { + mock := &fakeFolderService{ + CreateFolderError: tc.Error, + } + + createFolderScenario(fmt.Sprintf("Expect '%s' error when calling POST on", tc.Error.Error()), "/api/folders", "/api/folders", mock, cmd, func(sc *scenarioContext) { + callCreateFolder(sc) + if sc.resp.Code != tc.ExpectedStatusCode { + t.Errorf("For error '%s' expected status code %d, actual %d", tc.Error, tc.ExpectedStatusCode, sc.resp.Code) + } + }) + } + }) + + Convey("Given a correct request for updating a folder", func() { + cmd := m.UpdateFolderCommand{ + Title: "Folder upd", + } + + mock := &fakeFolderService{ + UpdateFolderResult: &m.Folder{Id: 1, Uid: "uid", Title: "Folder upd"}, + } + + updateFolderScenario("When calling PUT on", "/api/folders/uid", "/api/folders/:uid", mock, cmd, func(sc *scenarioContext) { + callUpdateFolder(sc) + + Convey("It should return correct response data", func() { + folder := dtos.Folder{} + err := json.NewDecoder(sc.resp.Body).Decode(&folder) + So(err, ShouldBeNil) + So(folder.Id, ShouldEqual, 1) + So(folder.Uid, ShouldEqual, "uid") + So(folder.Title, ShouldEqual, "Folder upd") + }) + }) + }) + + Convey("Given incorrect requests for updating a folder", func() { + testCases := []struct { + Error error + ExpectedStatusCode int + }{ + {Error: m.ErrFolderWithSameUIDExists, ExpectedStatusCode: 400}, + {Error: m.ErrFolderTitleEmpty, ExpectedStatusCode: 400}, + {Error: m.ErrFolderSameNameExists, ExpectedStatusCode: 400}, + {Error: m.ErrDashboardInvalidUid, ExpectedStatusCode: 400}, + {Error: m.ErrDashboardUidToLong, ExpectedStatusCode: 400}, + {Error: m.ErrFolderAccessDenied, ExpectedStatusCode: 403}, + {Error: m.ErrFolderNotFound, ExpectedStatusCode: 404}, + {Error: m.ErrFolderVersionMismatch, ExpectedStatusCode: 412}, + {Error: m.ErrFolderFailedGenerateUniqueUid, ExpectedStatusCode: 500}, + } + + cmd := m.UpdateFolderCommand{ + Title: "Folder upd", + } + + for _, tc := range testCases { + mock := &fakeFolderService{ + UpdateFolderError: tc.Error, + } + + updateFolderScenario(fmt.Sprintf("Expect '%s' error when calling PUT on", tc.Error.Error()), "/api/folders/uid", "/api/folders/:uid", mock, cmd, func(sc *scenarioContext) { + callUpdateFolder(sc) + if sc.resp.Code != tc.ExpectedStatusCode { + t.Errorf("For error '%s' expected status code %d, actual %d", tc.Error, tc.ExpectedStatusCode, sc.resp.Code) + } + }) + } + }) + }) +} + +func callGetFolderByUid(sc *scenarioContext) { + sc.handlerFunc = GetFolderByUid + sc.fakeReqWithParams("GET", sc.url, map[string]string{}).exec() +} + +func callDeleteFolder(sc *scenarioContext) { + sc.handlerFunc = DeleteFolder + sc.fakeReqWithParams("DELETE", sc.url, map[string]string{}).exec() +} + +func callCreateFolder(sc *scenarioContext) { + sc.fakeReqWithParams("POST", sc.url, map[string]string{}).exec() +} + +func createFolderScenario(desc string, url string, routePattern string, mock *fakeFolderService, cmd m.CreateFolderCommand, fn scenarioFunc) { + Convey(desc+" "+url, func() { + defer bus.ClearBusHandlers() + + sc := setupScenarioContext(url) + sc.defaultHandler = wrap(func(c *m.ReqContext) Response { + sc.context = c + sc.context.SignedInUser = &m.SignedInUser{OrgId: TestOrgID, UserId: TestUserID} + + return CreateFolder(c, cmd) + }) + + origNewFolderService := dashboards.NewFolderService + mockFolderService(mock) + + sc.m.Post(routePattern, sc.defaultHandler) + + defer func() { + dashboards.NewFolderService = origNewFolderService + }() + + fn(sc) + }) +} + +func callUpdateFolder(sc *scenarioContext) { + sc.fakeReqWithParams("PUT", sc.url, map[string]string{}).exec() +} + +func updateFolderScenario(desc string, url string, routePattern string, mock *fakeFolderService, cmd m.UpdateFolderCommand, fn scenarioFunc) { + Convey(desc+" "+url, func() { + defer bus.ClearBusHandlers() + + sc := setupScenarioContext(url) + sc.defaultHandler = wrap(func(c *m.ReqContext) Response { + sc.context = c + sc.context.SignedInUser = &m.SignedInUser{OrgId: TestOrgID, UserId: TestUserID} + + return UpdateFolder(c, cmd) + }) + + origNewFolderService := dashboards.NewFolderService + mockFolderService(mock) + + sc.m.Put(routePattern, sc.defaultHandler) + + defer func() { + dashboards.NewFolderService = origNewFolderService + }() + + fn(sc) + }) +} + +type fakeFolderService struct { + GetFoldersResult []*m.Folder + GetFoldersError error + GetFolderByUidResult *m.Folder + GetFolderByUidError error + GetFolderByIdResult *m.Folder + GetFolderByIdError error + CreateFolderResult *m.Folder + CreateFolderError error + UpdateFolderResult *m.Folder + UpdateFolderError error + DeleteFolderResult *m.Folder + DeleteFolderError error + DeletedFolderUids []string +} + +func (s *fakeFolderService) GetFolders(limit int) ([]*m.Folder, error) { + return s.GetFoldersResult, s.GetFoldersError +} + +func (s *fakeFolderService) GetFolderById(id int64) (*m.Folder, error) { + return s.GetFolderByIdResult, s.GetFolderByIdError +} + +func (s *fakeFolderService) GetFolderByUid(uid string) (*m.Folder, error) { + return s.GetFolderByUidResult, s.GetFolderByUidError +} + +func (s *fakeFolderService) CreateFolder(cmd *m.CreateFolderCommand) error { + cmd.Result = s.CreateFolderResult + return s.CreateFolderError +} + +func (s *fakeFolderService) UpdateFolder(existingUid string, cmd *m.UpdateFolderCommand) error { + cmd.Result = s.UpdateFolderResult + return s.UpdateFolderError +} + +func (s *fakeFolderService) DeleteFolder(uid string) (*m.Folder, error) { + s.DeletedFolderUids = append(s.DeletedFolderUids, uid) + return s.DeleteFolderResult, s.DeleteFolderError +} + +func mockFolderService(mock *fakeFolderService) { + dashboards.NewFolderService = func(orgId int64, user *m.SignedInUser) dashboards.FolderService { + return mock + } +} diff --git a/pkg/api/frontendsettings.go b/pkg/api/frontendsettings.go index 591dcc62344..5cd52122c3f 100644 --- a/pkg/api/frontendsettings.go +++ b/pkg/api/frontendsettings.go @@ -5,14 +5,13 @@ import ( "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/log" - "github.com/grafana/grafana/pkg/middleware" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/util" ) -func getFrontendSettingsMap(c *middleware.Context) (map[string]interface{}, error) { +func getFrontendSettingsMap(c *m.ReqContext) (map[string]interface{}, error) { orgDataSources := make([]*m.DataSource, 0) if c.OrgId != 0 { @@ -180,7 +179,7 @@ func getPanelSort(id string) int { return sort } -func GetFrontendSettings(c *middleware.Context) { +func GetFrontendSettings(c *m.ReqContext) { settings, err := getFrontendSettingsMap(c) if err != nil { c.JsonApiErr(400, "Failed to get frontend settings", err) diff --git a/pkg/api/grafana_com_proxy.go b/pkg/api/grafana_com_proxy.go index a2a446b48eb..afd3bb9bf8e 100644 --- a/pkg/api/grafana_com_proxy.go +++ b/pkg/api/grafana_com_proxy.go @@ -7,7 +7,7 @@ import ( "net/url" "time" - "github.com/grafana/grafana/pkg/middleware" + m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/util" ) @@ -41,7 +41,7 @@ func ReverseProxyGnetReq(proxyPath string) *httputil.ReverseProxy { return &httputil.ReverseProxy{Director: director} } -func ProxyGnetRequest(c *middleware.Context) { +func ProxyGnetRequest(c *m.ReqContext) { proxyPath := c.Params("*") proxy := ReverseProxyGnetReq(proxyPath) proxy.Transport = grafanaComProxyTransport diff --git a/pkg/api/http_server.go b/pkg/api/http_server.go index 0366b9aedad..b911780913d 100644 --- a/pkg/api/http_server.go +++ b/pkg/api/http_server.go @@ -162,6 +162,10 @@ func (hs *HttpServer) newMacaron() *macaron.Macaron { hs.mapStatic(m, setting.StaticRootPath, "", "public") hs.mapStatic(m, setting.StaticRootPath, "robots.txt", "robots.txt") + if setting.ImageUploadProvider == "local" { + hs.mapStatic(m, setting.ImagesDir, "", "/public/img/attachments") + } + m.Use(macaron.Renderer(macaron.RenderOptions{ Directory: path.Join(setting.StaticRootPath, "views"), IndentJSON: macaron.Env != macaron.PROD, diff --git a/pkg/api/index.go b/pkg/api/index.go index 1b836356189..e50c59e082a 100644 --- a/pkg/api/index.go +++ b/pkg/api/index.go @@ -6,13 +6,12 @@ import ( "github.com/grafana/grafana/pkg/api/dtos" "github.com/grafana/grafana/pkg/bus" - "github.com/grafana/grafana/pkg/middleware" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/setting" ) -func setIndexViewData(c *middleware.Context) (*dtos.IndexViewData, error) { +func setIndexViewData(c *m.ReqContext) (*dtos.IndexViewData, error) { settings, err := getFrontendSettingsMap(c) if err != nil { return nil, err @@ -74,7 +73,7 @@ func setIndexViewData(c *middleware.Context) (*dtos.IndexViewData, error) { } if setting.DisableGravatar { - data.User.GravatarUrl = setting.AppSubUrl + "/public/img/transparent.png" + data.User.GravatarUrl = setting.AppSubUrl + "/public/img/user_profile.png" } if len(data.User.Name) == 0 { @@ -102,8 +101,8 @@ func setIndexViewData(c *middleware.Context) (*dtos.IndexViewData, error) { } dashboardChildNavs := []*dtos.NavLink{ - {Text: "Home", Url: setting.AppSubUrl + "/", Icon: "gicon gicon-home", HideFromTabs: true}, - {Divider: true, HideFromTabs: true}, + {Text: "Home", Id: "home", Url: setting.AppSubUrl + "/", Icon: "gicon gicon-home", HideFromTabs: true}, + {Text: "Divider", Divider: true, Id: "divider", HideFromTabs: true}, {Text: "Manage", Id: "manage-dashboards", Url: setting.AppSubUrl + "/dashboards", Icon: "gicon gicon-manage"}, {Text: "Playlists", Id: "playlists", Url: setting.AppSubUrl + "/playlists", Icon: "gicon gicon-playlists"}, {Text: "Snapshots", Id: "snapshots", Url: setting.AppSubUrl + "/dashboard/snapshots", Icon: "gicon gicon-snapshots"}, @@ -261,7 +260,7 @@ func setIndexViewData(c *middleware.Context) (*dtos.IndexViewData, error) { if c.IsGrafanaAdmin { cfgNode.Children = append(cfgNode.Children, &dtos.NavLink{ - Divider: true, HideFromTabs: true, + Divider: true, HideFromTabs: true, Id: "admin-divider", Text: "Text", }) cfgNode.Children = append(cfgNode.Children, &dtos.NavLink{ Text: "Server Admin", @@ -299,7 +298,7 @@ func setIndexViewData(c *middleware.Context) (*dtos.IndexViewData, error) { return &data, nil } -func Index(c *middleware.Context) { +func Index(c *m.ReqContext) { if data, err := setIndexViewData(c); err != nil { c.Handle(500, "Failed to get settings", err) return @@ -308,7 +307,7 @@ func Index(c *middleware.Context) { } } -func NotFoundHandler(c *middleware.Context) { +func NotFoundHandler(c *m.ReqContext) { if c.IsApiRequest() { c.JsonApiErr(404, "Not found", nil) return diff --git a/pkg/api/login.go b/pkg/api/login.go index ebfe672f825..2ca2ce5a3e2 100644 --- a/pkg/api/login.go +++ b/pkg/api/login.go @@ -8,8 +8,8 @@ import ( "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/login" "github.com/grafana/grafana/pkg/metrics" - "github.com/grafana/grafana/pkg/middleware" m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/session" "github.com/grafana/grafana/pkg/setting" ) @@ -17,7 +17,7 @@ const ( VIEW_INDEX = "index" ) -func LoginView(c *middleware.Context) { +func LoginView(c *m.ReqContext) { viewData, err := setIndexViewData(c) if err != nil { c.Handle(500, "Failed to get settings", err) @@ -53,7 +53,7 @@ func LoginView(c *middleware.Context) { c.Redirect(setting.AppSubUrl + "/") } -func tryLoginUsingRememberCookie(c *middleware.Context) bool { +func tryLoginUsingRememberCookie(c *m.ReqContext) bool { // Check auto-login. uname := c.GetCookie(setting.CookieUserName) if len(uname) == 0 { @@ -87,7 +87,7 @@ func tryLoginUsingRememberCookie(c *middleware.Context) bool { return true } -func LoginApiPing(c *middleware.Context) { +func LoginApiPing(c *m.ReqContext) { if !tryLoginUsingRememberCookie(c) { c.JsonApiErr(401, "Unauthorized", nil) return @@ -96,18 +96,19 @@ func LoginApiPing(c *middleware.Context) { c.JsonOK("Logged in") } -func LoginPost(c *middleware.Context, cmd dtos.LoginCommand) Response { +func LoginPost(c *m.ReqContext, cmd dtos.LoginCommand) Response { if setting.DisableLoginForm { return ApiError(401, "Login is disabled", nil) } authQuery := login.LoginUserQuery{ - Username: cmd.User, - Password: cmd.Password, + Username: cmd.User, + Password: cmd.Password, + IpAddress: c.Req.RemoteAddr, } if err := bus.Dispatch(&authQuery); err != nil { - if err == login.ErrInvalidCredentials { + if err == login.ErrInvalidCredentials || err == login.ErrTooManyLoginAttempts { return ApiError(401, "Invalid username or password", err) } @@ -132,7 +133,7 @@ func LoginPost(c *middleware.Context, cmd dtos.LoginCommand) Response { return Json(200, result) } -func loginUserWithUser(user *m.User, c *middleware.Context) { +func loginUserWithUser(user *m.User, c *m.ReqContext) { if user == nil { log.Error(3, "User login with nil user") } @@ -145,13 +146,13 @@ func loginUserWithUser(user *m.User, c *middleware.Context) { c.SetSuperSecureCookie(user.Rands+user.Password, setting.CookieRememberName, user.Login, days, setting.AppSubUrl+"/") } - c.Session.RegenerateId(c) - c.Session.Set(middleware.SESS_KEY_USERID, user.Id) + c.Session.RegenerateId(c.Context) + c.Session.Set(session.SESS_KEY_USERID, user.Id) } -func Logout(c *middleware.Context) { +func Logout(c *m.ReqContext) { c.SetCookie(setting.CookieUserName, "", -1, setting.AppSubUrl+"/") c.SetCookie(setting.CookieRememberName, "", -1, setting.AppSubUrl+"/") - c.Session.Destory(c) + c.Session.Destory(c.Context) c.Redirect(setting.AppSubUrl + "/login") } diff --git a/pkg/api/login_oauth.go b/pkg/api/login_oauth.go index 847f09f0eb8..1dba38e9cbd 100644 --- a/pkg/api/login_oauth.go +++ b/pkg/api/login_oauth.go @@ -1,6 +1,7 @@ package api import ( + "context" "crypto/rand" "crypto/tls" "crypto/x509" @@ -11,14 +12,14 @@ import ( "net/http" "net/url" - "golang.org/x/net/context" "golang.org/x/oauth2" "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/metrics" - "github.com/grafana/grafana/pkg/middleware" m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/quota" + "github.com/grafana/grafana/pkg/services/session" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/social" ) @@ -29,25 +30,25 @@ var ( ErrSignUpNotAllowed = errors.New("Signup is not allowed for this adapter") ErrUsersQuotaReached = errors.New("Users quota reached") ErrNoEmail = errors.New("Login provider didn't return an email address") - oauthLogger = log.New("oauth.login") + oauthLogger = log.New("oauth") ) func GenStateString() string { rnd := make([]byte, 32) rand.Read(rnd) - return base64.StdEncoding.EncodeToString(rnd) + return base64.URLEncoding.EncodeToString(rnd) } -func OAuthLogin(ctx *middleware.Context) { +func OAuthLogin(ctx *m.ReqContext) { if setting.OAuthService == nil { - ctx.Handle(404, "login.OAuthLogin(oauth service not enabled)", nil) + ctx.Handle(404, "OAuth not enabled", nil) return } name := ctx.Params(":name") connect, ok := social.SocialMap[name] if !ok { - ctx.Handle(404, "login.OAuthLogin(social login not enabled)", errors.New(name)) + ctx.Handle(404, fmt.Sprintf("No OAuth with name %s configured", name), nil) return } @@ -62,7 +63,7 @@ func OAuthLogin(ctx *middleware.Context) { code := ctx.Query("code") if code == "" { state := GenStateString() - ctx.Session.Set(middleware.SESS_KEY_OAUTH_STATE, state) + ctx.Session.Set(session.SESS_KEY_OAUTH_STATE, state) if setting.OAuthService.OAuthInfos[name].HostedDomain == "" { ctx.Redirect(connect.AuthCodeURL(state, oauth2.AccessTypeOnline)) } else { @@ -71,7 +72,7 @@ func OAuthLogin(ctx *middleware.Context) { return } - savedState, ok := ctx.Session.Get(middleware.SESS_KEY_OAUTH_STATE).(string) + savedState, ok := ctx.Session.Get(session.SESS_KEY_OAUTH_STATE).(string) if !ok { ctx.Handle(500, "login.OAuthLogin(missing saved state)", nil) return @@ -96,7 +97,9 @@ func OAuthLogin(ctx *middleware.Context) { if setting.OAuthService.OAuthInfos[name].TlsClientCert != "" || setting.OAuthService.OAuthInfos[name].TlsClientKey != "" { cert, err := tls.LoadX509KeyPair(setting.OAuthService.OAuthInfos[name].TlsClientCert, setting.OAuthService.OAuthInfos[name].TlsClientKey) if err != nil { - log.Fatal(1, "Failed to setup TlsClientCert", "oauth provider", name, "error", err) + ctx.Logger.Error("Failed to setup TlsClientCert", "oauth", name, "error", err) + ctx.Handle(500, "login.OAuthLogin(Failed to setup TlsClientCert)", nil) + return } tr.TLSClientConfig.Certificates = append(tr.TLSClientConfig.Certificates, cert) @@ -105,7 +108,9 @@ func OAuthLogin(ctx *middleware.Context) { if setting.OAuthService.OAuthInfos[name].TlsClientCa != "" { caCert, err := ioutil.ReadFile(setting.OAuthService.OAuthInfos[name].TlsClientCa) if err != nil { - log.Fatal(1, "Failed to setup TlsClientCa", "oauth provider", name, "error", err) + ctx.Logger.Error("Failed to setup TlsClientCa", "oauth", name, "error", err) + ctx.Handle(500, "login.OAuthLogin(Failed to setup TlsClientCa)", nil) + return } caCertPool := x509.NewCertPool() caCertPool.AppendCertsFromPEM(caCert) @@ -124,13 +129,13 @@ func OAuthLogin(ctx *middleware.Context) { // token.TokenType was defaulting to "bearer", which is out of spec, so we explicitly set to "Bearer" token.TokenType = "Bearer" - ctx.Logger.Debug("OAuthLogin Got token") + oauthLogger.Debug("OAuthLogin Got token", "token", token) // set up oauth2 client client := connect.Client(oauthCtx, token) // get user info - userInfo, err := connect.UserInfo(client) + userInfo, err := connect.UserInfo(client, token) if err != nil { if sErr, ok := err.(*social.Error); ok { redirectWithError(ctx, sErr) @@ -140,7 +145,7 @@ func OAuthLogin(ctx *middleware.Context) { return } - ctx.Logger.Debug("OAuthLogin got user info", "userInfo", userInfo) + oauthLogger.Debug("OAuthLogin got user info", "userInfo", userInfo) // validate that we got at least an email address if userInfo.Email == "" { @@ -163,7 +168,7 @@ func OAuthLogin(ctx *middleware.Context) { redirectWithError(ctx, ErrSignUpNotAllowed) return } - limitReached, err := middleware.QuotaReached(ctx, "user") + limitReached, err := quota.QuotaReached(ctx, "user") if err != nil { ctx.Handle(500, "Failed to get user quota", err) return @@ -204,9 +209,8 @@ func OAuthLogin(ctx *middleware.Context) { ctx.Redirect(setting.AppSubUrl + "/") } -func redirectWithError(ctx *middleware.Context, err error, v ...interface{}) { - ctx.Logger.Info(err.Error(), v...) - // TODO: we can use the flash storage here once it's implemented +func redirectWithError(ctx *m.ReqContext, err error, v ...interface{}) { + ctx.Logger.Error(err.Error(), v...) ctx.Session.Set("loginError", err.Error()) ctx.Redirect(setting.AppSubUrl + "/login") } diff --git a/pkg/api/metrics.go b/pkg/api/metrics.go index 717565cd4a1..5d395d655a9 100644 --- a/pkg/api/metrics.go +++ b/pkg/api/metrics.go @@ -6,15 +6,14 @@ import ( "github.com/grafana/grafana/pkg/api/dtos" "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/middleware" - "github.com/grafana/grafana/pkg/models" + m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/tsdb" "github.com/grafana/grafana/pkg/tsdb/testdata" "github.com/grafana/grafana/pkg/util" ) // POST /api/tsdb/query -func QueryMetrics(c *middleware.Context, reqDto dtos.MetricRequest) Response { +func QueryMetrics(c *m.ReqContext, reqDto dtos.MetricRequest) Response { timeRange := tsdb.NewTimeRange(reqDto.From, reqDto.To) if len(reqDto.Queries) == 0 { @@ -26,7 +25,7 @@ func QueryMetrics(c *middleware.Context, reqDto dtos.MetricRequest) Response { return ApiError(400, "Query missing datasourceId", nil) } - dsQuery := models.GetDataSourceByIdQuery{Id: dsId, OrgId: c.OrgId} + dsQuery := m.GetDataSourceByIdQuery{Id: dsId, OrgId: c.OrgId} if err := bus.Dispatch(&dsQuery); err != nil { return ApiError(500, "failed to fetch data source", err) } @@ -61,7 +60,7 @@ func QueryMetrics(c *middleware.Context, reqDto dtos.MetricRequest) Response { } // GET /api/tsdb/testdata/scenarios -func GetTestDataScenarios(c *middleware.Context) Response { +func GetTestDataScenarios(c *m.ReqContext) Response { result := make([]interface{}, 0) for _, scenario := range testdata.ScenarioRegistry { @@ -77,14 +76,14 @@ func GetTestDataScenarios(c *middleware.Context) Response { } // Genereates a index out of range error -func GenerateError(c *middleware.Context) Response { +func GenerateError(c *m.ReqContext) Response { var array []string return Json(200, array[20]) } // GET /api/tsdb/testdata/gensql -func GenerateSqlTestData(c *middleware.Context) Response { - if err := bus.Dispatch(&models.InsertSqlTestDataCommand{}); err != nil { +func GenerateSqlTestData(c *m.ReqContext) Response { + if err := bus.Dispatch(&m.InsertSqlTestDataCommand{}); err != nil { return ApiError(500, "Failed to insert test data", err) } @@ -92,7 +91,7 @@ func GenerateSqlTestData(c *middleware.Context) Response { } // GET /api/tsdb/testdata/random-walk -func GetTestDataRandomWalk(c *middleware.Context) Response { +func GetTestDataRandomWalk(c *m.ReqContext) Response { from := c.Query("from") to := c.Query("to") intervalMs := c.QueryInt64("intervalMs") @@ -100,7 +99,7 @@ func GetTestDataRandomWalk(c *middleware.Context) Response { timeRange := tsdb.NewTimeRange(from, to) request := &tsdb.TsdbQuery{TimeRange: timeRange} - dsInfo := &models.DataSource{Type: "grafana-testdata-datasource"} + dsInfo := &m.DataSource{Type: "grafana-testdata-datasource"} request.Queries = append(request.Queries, &tsdb.Query{ RefId: "A", IntervalMs: intervalMs, diff --git a/pkg/api/org.go b/pkg/api/org.go index bddfebf80ce..5f20559dbbe 100644 --- a/pkg/api/org.go +++ b/pkg/api/org.go @@ -4,24 +4,23 @@ import ( "github.com/grafana/grafana/pkg/api/dtos" "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/metrics" - "github.com/grafana/grafana/pkg/middleware" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/util" ) // GET /api/org -func GetOrgCurrent(c *middleware.Context) Response { +func GetOrgCurrent(c *m.ReqContext) Response { return getOrgHelper(c.OrgId) } // GET /api/orgs/:orgId -func GetOrgById(c *middleware.Context) Response { +func GetOrgById(c *m.ReqContext) Response { return getOrgHelper(c.ParamsInt64(":orgId")) } // Get /api/orgs/name/:name -func GetOrgByName(c *middleware.Context) Response { +func GetOrgByName(c *m.ReqContext) Response { query := m.GetOrgByNameQuery{Name: c.Params(":name")} if err := bus.Dispatch(&query); err != nil { if err == m.ErrOrgNotFound { @@ -76,7 +75,7 @@ func getOrgHelper(orgId int64) Response { } // POST /api/orgs -func CreateOrg(c *middleware.Context, cmd m.CreateOrgCommand) Response { +func CreateOrg(c *m.ReqContext, cmd m.CreateOrgCommand) Response { if !c.IsSignedIn || (!setting.AllowUserOrgCreate && !c.IsGrafanaAdmin) { return ApiError(403, "Access denied", nil) } @@ -98,12 +97,12 @@ func CreateOrg(c *middleware.Context, cmd m.CreateOrgCommand) Response { } // PUT /api/org -func UpdateOrgCurrent(c *middleware.Context, form dtos.UpdateOrgForm) Response { +func UpdateOrgCurrent(c *m.ReqContext, form dtos.UpdateOrgForm) Response { return updateOrgHelper(form, c.OrgId) } // PUT /api/orgs/:orgId -func UpdateOrg(c *middleware.Context, form dtos.UpdateOrgForm) Response { +func UpdateOrg(c *m.ReqContext, form dtos.UpdateOrgForm) Response { return updateOrgHelper(form, c.ParamsInt64(":orgId")) } @@ -120,12 +119,12 @@ func updateOrgHelper(form dtos.UpdateOrgForm, orgId int64) Response { } // PUT /api/org/address -func UpdateOrgAddressCurrent(c *middleware.Context, form dtos.UpdateOrgAddressForm) Response { +func UpdateOrgAddressCurrent(c *m.ReqContext, form dtos.UpdateOrgAddressForm) Response { return updateOrgAddressHelper(form, c.OrgId) } // PUT /api/orgs/:orgId/address -func UpdateOrgAddress(c *middleware.Context, form dtos.UpdateOrgAddressForm) Response { +func UpdateOrgAddress(c *m.ReqContext, form dtos.UpdateOrgAddressForm) Response { return updateOrgAddressHelper(form, c.ParamsInt64(":orgId")) } @@ -150,7 +149,7 @@ func updateOrgAddressHelper(form dtos.UpdateOrgAddressForm, orgId int64) Respons } // GET /api/orgs/:orgId -func DeleteOrgById(c *middleware.Context) Response { +func DeleteOrgById(c *m.ReqContext) Response { if err := bus.Dispatch(&m.DeleteOrgCommand{Id: c.ParamsInt64(":orgId")}); err != nil { if err == m.ErrOrgNotFound { return ApiError(404, "Failed to delete organization. ID not found", nil) @@ -160,7 +159,7 @@ func DeleteOrgById(c *middleware.Context) Response { return ApiSuccess("Organization deleted") } -func SearchOrgs(c *middleware.Context) Response { +func SearchOrgs(c *m.ReqContext) Response { query := m.SearchOrgsQuery{ Query: c.Query("query"), Name: c.Query("name"), diff --git a/pkg/api/org_invite.go b/pkg/api/org_invite.go index 57d9913d2eb..6a727dd95cc 100644 --- a/pkg/api/org_invite.go +++ b/pkg/api/org_invite.go @@ -7,13 +7,12 @@ import ( "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/events" "github.com/grafana/grafana/pkg/metrics" - "github.com/grafana/grafana/pkg/middleware" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/util" ) -func GetPendingOrgInvites(c *middleware.Context) Response { +func GetPendingOrgInvites(c *m.ReqContext) Response { query := m.GetTempUsersQuery{OrgId: c.OrgId, Status: m.TmpUserInvitePending} if err := bus.Dispatch(&query); err != nil { @@ -27,7 +26,7 @@ func GetPendingOrgInvites(c *middleware.Context) Response { return Json(200, query.Result) } -func AddOrgInvite(c *middleware.Context, inviteDto dtos.AddInviteForm) Response { +func AddOrgInvite(c *m.ReqContext, inviteDto dtos.AddInviteForm) Response { if !inviteDto.Role.IsValid() { return ApiError(400, "Invalid role specified", nil) } @@ -89,7 +88,7 @@ func AddOrgInvite(c *middleware.Context, inviteDto dtos.AddInviteForm) Response return ApiSuccess(fmt.Sprintf("Created invite for %s", inviteDto.LoginOrEmail)) } -func inviteExistingUserToOrg(c *middleware.Context, user *m.User, inviteDto *dtos.AddInviteForm) Response { +func inviteExistingUserToOrg(c *m.ReqContext, user *m.User, inviteDto *dtos.AddInviteForm) Response { // user exists, add org role createOrgUserCmd := m.AddOrgUserCommand{OrgId: c.OrgId, UserId: user.Id, Role: inviteDto.Role} if err := bus.Dispatch(&createOrgUserCmd); err != nil { @@ -119,7 +118,7 @@ func inviteExistingUserToOrg(c *middleware.Context, user *m.User, inviteDto *dto } } -func RevokeInvite(c *middleware.Context) Response { +func RevokeInvite(c *m.ReqContext) Response { if ok, rsp := updateTempUserStatus(c.Params(":code"), m.TmpUserRevoked); !ok { return rsp } @@ -127,7 +126,7 @@ func RevokeInvite(c *middleware.Context) Response { return ApiSuccess("Invite revoked") } -func GetInviteInfoByCode(c *middleware.Context) Response { +func GetInviteInfoByCode(c *m.ReqContext) Response { query := m.GetTempUserByCodeQuery{Code: c.Params(":code")} if err := bus.Dispatch(&query); err != nil { @@ -147,7 +146,7 @@ func GetInviteInfoByCode(c *middleware.Context) Response { }) } -func CompleteInvite(c *middleware.Context, completeInvite dtos.CompleteInviteForm) Response { +func CompleteInvite(c *m.ReqContext, completeInvite dtos.CompleteInviteForm) Response { query := m.GetTempUserByCodeQuery{Code: completeInvite.InviteCode} if err := bus.Dispatch(&query); err != nil { diff --git a/pkg/api/org_users.go b/pkg/api/org_users.go index 57a15bd8db5..6d7c2bb94bd 100644 --- a/pkg/api/org_users.go +++ b/pkg/api/org_users.go @@ -3,18 +3,17 @@ package api import ( "github.com/grafana/grafana/pkg/api/dtos" "github.com/grafana/grafana/pkg/bus" - "github.com/grafana/grafana/pkg/middleware" m "github.com/grafana/grafana/pkg/models" ) // POST /api/org/users -func AddOrgUserToCurrentOrg(c *middleware.Context, cmd m.AddOrgUserCommand) Response { +func AddOrgUserToCurrentOrg(c *m.ReqContext, cmd m.AddOrgUserCommand) Response { cmd.OrgId = c.OrgId return addOrgUserHelper(cmd) } // POST /api/orgs/:orgId/users -func AddOrgUser(c *middleware.Context, cmd m.AddOrgUserCommand) Response { +func AddOrgUser(c *m.ReqContext, cmd m.AddOrgUserCommand) Response { cmd.OrgId = c.ParamsInt64(":orgId") return addOrgUserHelper(cmd) } @@ -45,38 +44,42 @@ func addOrgUserHelper(cmd m.AddOrgUserCommand) Response { } // GET /api/org/users -func GetOrgUsersForCurrentOrg(c *middleware.Context) Response { - return getOrgUsersHelper(c.OrgId) +func GetOrgUsersForCurrentOrg(c *m.ReqContext) Response { + return getOrgUsersHelper(c.OrgId, c.Params("query"), c.ParamsInt("limit")) } // GET /api/orgs/:orgId/users -func GetOrgUsers(c *middleware.Context) Response { - return getOrgUsersHelper(c.ParamsInt64(":orgId")) +func GetOrgUsers(c *m.ReqContext) Response { + return getOrgUsersHelper(c.ParamsInt64(":orgId"), "", 0) } -func getOrgUsersHelper(orgId int64) Response { - query := m.GetOrgUsersQuery{OrgId: orgId} +func getOrgUsersHelper(orgId int64, query string, limit int) Response { + q := m.GetOrgUsersQuery{ + OrgId: orgId, + Query: query, + Limit: limit, + } - if err := bus.Dispatch(&query); err != nil { + if err := bus.Dispatch(&q); err != nil { return ApiError(500, "Failed to get account user", err) } - for _, user := range query.Result { + for _, user := range q.Result { user.AvatarUrl = dtos.GetGravatarUrl(user.Email) } - return Json(200, query.Result) + return Json(200, q.Result) } // PATCH /api/org/users/:userId -func UpdateOrgUserForCurrentOrg(c *middleware.Context, cmd m.UpdateOrgUserCommand) Response { +func UpdateOrgUserForCurrentOrg(c *m.ReqContext, cmd m.UpdateOrgUserCommand) Response { cmd.OrgId = c.OrgId cmd.UserId = c.ParamsInt64(":userId") return updateOrgUserHelper(cmd) } // PATCH /api/orgs/:orgId/users/:userId -func UpdateOrgUser(c *middleware.Context, cmd m.UpdateOrgUserCommand) Response { +func UpdateOrgUser(c *m.ReqContext, cmd m.UpdateOrgUserCommand) Response { cmd.OrgId = c.ParamsInt64(":orgId") cmd.UserId = c.ParamsInt64(":userId") return updateOrgUserHelper(cmd) @@ -98,13 +101,13 @@ func updateOrgUserHelper(cmd m.UpdateOrgUserCommand) Response { } // DELETE /api/org/users/:userId -func RemoveOrgUserForCurrentOrg(c *middleware.Context) Response { +func RemoveOrgUserForCurrentOrg(c *m.ReqContext) Response { userId := c.ParamsInt64(":userId") return removeOrgUserHelper(c.OrgId, userId) } // DELETE /api/orgs/:orgId/users/:userId -func RemoveOrgUser(c *middleware.Context) Response { +func RemoveOrgUser(c *m.ReqContext) Response { userId := c.ParamsInt64(":userId") orgId := c.ParamsInt64(":orgId") return removeOrgUserHelper(orgId, userId) diff --git a/pkg/api/password.go b/pkg/api/password.go index e71f1317ee4..31ea5d91b34 100644 --- a/pkg/api/password.go +++ b/pkg/api/password.go @@ -3,12 +3,11 @@ package api import ( "github.com/grafana/grafana/pkg/api/dtos" "github.com/grafana/grafana/pkg/bus" - "github.com/grafana/grafana/pkg/middleware" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/util" ) -func SendResetPasswordEmail(c *middleware.Context, form dtos.SendResetPasswordEmailForm) Response { +func SendResetPasswordEmail(c *m.ReqContext, form dtos.SendResetPasswordEmailForm) Response { userQuery := m.GetUserByLoginQuery{LoginOrEmail: form.UserOrEmail} if err := bus.Dispatch(&userQuery); err != nil { @@ -24,7 +23,7 @@ func SendResetPasswordEmail(c *middleware.Context, form dtos.SendResetPasswordEm return ApiSuccess("Email sent") } -func ResetPassword(c *middleware.Context, form dtos.ResetUserPasswordForm) Response { +func ResetPassword(c *m.ReqContext, form dtos.ResetUserPasswordForm) Response { query := m.ValidateResetPasswordCodeQuery{Code: form.Code} if err := bus.Dispatch(&query); err != nil { diff --git a/pkg/api/playlist.go b/pkg/api/playlist.go index 040aef0474e..45de40ce337 100644 --- a/pkg/api/playlist.go +++ b/pkg/api/playlist.go @@ -3,11 +3,10 @@ package api import ( "github.com/grafana/grafana/pkg/bus" _ "github.com/grafana/grafana/pkg/log" - "github.com/grafana/grafana/pkg/middleware" m "github.com/grafana/grafana/pkg/models" ) -func ValidateOrgPlaylist(c *middleware.Context) { +func ValidateOrgPlaylist(c *m.ReqContext) { id := c.ParamsInt64(":id") query := m.GetPlaylistByIdQuery{Id: id} err := bus.Dispatch(&query) @@ -40,7 +39,7 @@ func ValidateOrgPlaylist(c *middleware.Context) { } } -func SearchPlaylists(c *middleware.Context) Response { +func SearchPlaylists(c *m.ReqContext) Response { query := c.Query("query") limit := c.QueryInt("limit") @@ -62,7 +61,7 @@ func SearchPlaylists(c *middleware.Context) Response { return Json(200, searchQuery.Result) } -func GetPlaylist(c *middleware.Context) Response { +func GetPlaylist(c *m.ReqContext) Response { id := c.ParamsInt64(":id") cmd := m.GetPlaylistByIdQuery{Id: id} @@ -115,7 +114,7 @@ func LoadPlaylistItems(id int64) ([]m.PlaylistItem, error) { return *itemQuery.Result, nil } -func GetPlaylistItems(c *middleware.Context) Response { +func GetPlaylistItems(c *m.ReqContext) Response { id := c.ParamsInt64(":id") playlistDTOs, err := LoadPlaylistItemDTOs(id) @@ -127,7 +126,7 @@ func GetPlaylistItems(c *middleware.Context) Response { return Json(200, playlistDTOs) } -func GetPlaylistDashboards(c *middleware.Context) Response { +func GetPlaylistDashboards(c *m.ReqContext) Response { playlistId := c.ParamsInt64(":id") playlists, err := LoadPlaylistDashboards(c.OrgId, c.SignedInUser, playlistId) @@ -138,7 +137,7 @@ func GetPlaylistDashboards(c *middleware.Context) Response { return Json(200, playlists) } -func DeletePlaylist(c *middleware.Context) Response { +func DeletePlaylist(c *m.ReqContext) Response { id := c.ParamsInt64(":id") cmd := m.DeletePlaylistCommand{Id: id, OrgId: c.OrgId} @@ -149,7 +148,7 @@ func DeletePlaylist(c *middleware.Context) Response { return Json(200, "") } -func CreatePlaylist(c *middleware.Context, cmd m.CreatePlaylistCommand) Response { +func CreatePlaylist(c *m.ReqContext, cmd m.CreatePlaylistCommand) Response { cmd.OrgId = c.OrgId if err := bus.Dispatch(&cmd); err != nil { @@ -159,7 +158,7 @@ func CreatePlaylist(c *middleware.Context, cmd m.CreatePlaylistCommand) Response return Json(200, cmd.Result) } -func UpdatePlaylist(c *middleware.Context, cmd m.UpdatePlaylistCommand) Response { +func UpdatePlaylist(c *m.ReqContext, cmd m.UpdatePlaylistCommand) Response { cmd.OrgId = c.OrgId if err := bus.Dispatch(&cmd); err != nil { diff --git a/pkg/api/pluginproxy/ds_proxy.go b/pkg/api/pluginproxy/ds_proxy.go index 5f4ec632c4d..b861a344c75 100644 --- a/pkg/api/pluginproxy/ds_proxy.go +++ b/pkg/api/pluginproxy/ds_proxy.go @@ -18,7 +18,6 @@ import ( "github.com/opentracing/opentracing-go" "github.com/grafana/grafana/pkg/log" - "github.com/grafana/grafana/pkg/middleware" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/setting" @@ -42,14 +41,14 @@ type jwtToken struct { type DataSourceProxy struct { ds *m.DataSource - ctx *middleware.Context + ctx *m.ReqContext targetUrl *url.URL proxyPath string route *plugins.AppPluginRoute plugin *plugins.DataSourcePlugin } -func NewDataSourceProxy(ds *m.DataSource, plugin *plugins.DataSourcePlugin, ctx *middleware.Context, proxyPath string) *DataSourceProxy { +func NewDataSourceProxy(ds *m.DataSource, plugin *plugins.DataSourcePlugin, ctx *m.ReqContext, proxyPath string) *DataSourceProxy { targetUrl, _ := url.Parse(ds.Url) return &DataSourceProxy{ @@ -190,8 +189,14 @@ func (proxy *DataSourceProxy) validateRequest() error { } if proxy.ds.Type == m.DS_PROMETHEUS { - if proxy.ctx.Req.Request.Method != http.MethodGet || !strings.HasPrefix(proxy.proxyPath, "api/") { - return errors.New("GET is only allowed on proxied Prometheus datasource") + if proxy.ctx.Req.Request.Method == "DELETE" { + return errors.New("Deletes not allowed on proxied Prometheus datasource") + } + if proxy.ctx.Req.Request.Method == "PUT" { + return errors.New("Puts not allowed on proxied Prometheus datasource") + } + if proxy.ctx.Req.Request.Method == "POST" && !(proxy.proxyPath == "api/v1/query" || proxy.proxyPath == "api/v1/query_range") { + return errors.New("Posts not allowed on proxied Prometheus datasource except on /query and /query_range") } } @@ -255,7 +260,7 @@ func (proxy *DataSourceProxy) logRequest() { "body", body) } -func checkWhiteList(c *middleware.Context, host string) bool { +func checkWhiteList(c *m.ReqContext, host string) bool { if host != "" && len(setting.DataProxyWhiteList) > 0 { if _, exists := setting.DataProxyWhiteList[host]; !exists { c.JsonApiErr(403, "Data proxy hostname and ip are not included in whitelist", nil) diff --git a/pkg/api/pluginproxy/ds_proxy_test.go b/pkg/api/pluginproxy/ds_proxy_test.go index a7a869b2a9f..3cf67d9178a 100644 --- a/pkg/api/pluginproxy/ds_proxy_test.go +++ b/pkg/api/pluginproxy/ds_proxy_test.go @@ -8,7 +8,6 @@ import ( macaron "gopkg.in/macaron.v1" "github.com/grafana/grafana/pkg/components/simplejson" - "github.com/grafana/grafana/pkg/middleware" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/setting" @@ -61,7 +60,7 @@ func TestDSRouteRule(t *testing.T) { } req, _ := http.NewRequest("GET", "http://localhost/asd", nil) - ctx := &middleware.Context{ + ctx := &m.ReqContext{ Context: &macaron.Context{ Req: macaron.Request{Request: req}, }, @@ -104,7 +103,7 @@ func TestDSRouteRule(t *testing.T) { Convey("When proxying graphite", func() { plugin := &plugins.DataSourcePlugin{} ds := &m.DataSource{Url: "htttp://graphite:8080", Type: m.DS_GRAPHITE} - ctx := &middleware.Context{} + ctx := &m.ReqContext{} proxy := NewDataSourceProxy(ds, plugin, ctx, "/render") @@ -130,7 +129,7 @@ func TestDSRouteRule(t *testing.T) { Password: "password", } - ctx := &middleware.Context{} + ctx := &m.ReqContext{} proxy := NewDataSourceProxy(ds, plugin, ctx, "") requestUrl, _ := url.Parse("http://grafana.com/sub") @@ -160,7 +159,7 @@ func TestDSRouteRule(t *testing.T) { JsonData: json, } - ctx := &middleware.Context{} + ctx := &m.ReqContext{} proxy := NewDataSourceProxy(ds, plugin, ctx, "") requestUrl, _ := url.Parse("http://grafana.com/sub") @@ -186,7 +185,7 @@ func TestDSRouteRule(t *testing.T) { JsonData: json, } - ctx := &middleware.Context{} + ctx := &m.ReqContext{} proxy := NewDataSourceProxy(ds, plugin, ctx, "") requestUrl, _ := url.Parse("http://grafana.com/sub") diff --git a/pkg/api/pluginproxy/pluginproxy.go b/pkg/api/pluginproxy/pluginproxy.go index 59138884228..eb78250838a 100644 --- a/pkg/api/pluginproxy/pluginproxy.go +++ b/pkg/api/pluginproxy/pluginproxy.go @@ -9,7 +9,6 @@ import ( "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/log" - "github.com/grafana/grafana/pkg/middleware" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/util" @@ -38,7 +37,7 @@ func getHeaders(route *plugins.AppPluginRoute, orgId int64, appId string) (http. return result, err } -func NewApiPluginProxy(ctx *middleware.Context, proxyPath string, route *plugins.AppPluginRoute, appId string) *httputil.ReverseProxy { +func NewApiPluginProxy(ctx *m.ReqContext, proxyPath string, route *plugins.AppPluginRoute, appId string) *httputil.ReverseProxy { targetUrl, _ := url.Parse(route.Url) director := func(req *http.Request) { diff --git a/pkg/api/plugins.go b/pkg/api/plugins.go index 0483b624a30..bc38f4a7775 100644 --- a/pkg/api/plugins.go +++ b/pkg/api/plugins.go @@ -5,13 +5,12 @@ import ( "github.com/grafana/grafana/pkg/api/dtos" "github.com/grafana/grafana/pkg/bus" - "github.com/grafana/grafana/pkg/middleware" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/plugins" "github.com/grafana/grafana/pkg/setting" ) -func GetPluginList(c *middleware.Context) Response { +func GetPluginList(c *m.ReqContext) Response { typeFilter := c.Query("type") enabledFilter := c.Query("enabled") embeddedFilter := c.Query("embedded") @@ -79,7 +78,7 @@ func GetPluginList(c *middleware.Context) Response { return Json(200, result) } -func GetPluginSettingById(c *middleware.Context) Response { +func GetPluginSettingById(c *m.ReqContext) Response { pluginId := c.Params(":pluginId") if def, exists := plugins.Plugins[pluginId]; !exists { @@ -116,7 +115,7 @@ func GetPluginSettingById(c *middleware.Context) Response { } } -func UpdatePluginSetting(c *middleware.Context, cmd m.UpdatePluginSettingCmd) Response { +func UpdatePluginSetting(c *m.ReqContext, cmd m.UpdatePluginSettingCmd) Response { pluginId := c.Params(":pluginId") cmd.OrgId = c.OrgId @@ -133,7 +132,7 @@ func UpdatePluginSetting(c *middleware.Context, cmd m.UpdatePluginSettingCmd) Re return ApiSuccess("Plugin settings updated") } -func GetPluginDashboards(c *middleware.Context) Response { +func GetPluginDashboards(c *m.ReqContext) Response { pluginId := c.Params(":pluginId") if list, err := plugins.GetPluginDashboards(c.OrgId, pluginId); err != nil { @@ -147,7 +146,7 @@ func GetPluginDashboards(c *middleware.Context) Response { } } -func GetPluginMarkdown(c *middleware.Context) Response { +func GetPluginMarkdown(c *m.ReqContext) Response { pluginId := c.Params(":pluginId") name := c.Params(":name") @@ -164,11 +163,11 @@ func GetPluginMarkdown(c *middleware.Context) Response { } } -func ImportDashboard(c *middleware.Context, apiCmd dtos.ImportDashboardCommand) Response { +func ImportDashboard(c *m.ReqContext, apiCmd dtos.ImportDashboardCommand) Response { cmd := plugins.ImportDashboardCommand{ OrgId: c.OrgId, - UserId: c.UserId, + User: c.SignedInUser, PluginId: apiCmd.PluginId, Path: apiCmd.Path, Inputs: apiCmd.Inputs, diff --git a/pkg/api/preferences.go b/pkg/api/preferences.go index 795b8994470..eb0ffa14b39 100644 --- a/pkg/api/preferences.go +++ b/pkg/api/preferences.go @@ -3,12 +3,11 @@ package api import ( "github.com/grafana/grafana/pkg/api/dtos" "github.com/grafana/grafana/pkg/bus" - "github.com/grafana/grafana/pkg/middleware" m "github.com/grafana/grafana/pkg/models" ) // POST /api/preferences/set-home-dash -func SetHomeDashboard(c *middleware.Context, cmd m.SavePreferencesCommand) Response { +func SetHomeDashboard(c *m.ReqContext, cmd m.SavePreferencesCommand) Response { cmd.UserId = c.UserId cmd.OrgId = c.OrgId @@ -21,7 +20,7 @@ func SetHomeDashboard(c *middleware.Context, cmd m.SavePreferencesCommand) Respo } // GET /api/user/preferences -func GetUserPreferences(c *middleware.Context) Response { +func GetUserPreferences(c *m.ReqContext) Response { return getPreferencesFor(c.OrgId, c.UserId) } @@ -42,7 +41,7 @@ func getPreferencesFor(orgId int64, userId int64) Response { } // PUT /api/user/preferences -func UpdateUserPreferences(c *middleware.Context, dtoCmd dtos.UpdatePrefsCmd) Response { +func UpdateUserPreferences(c *m.ReqContext, dtoCmd dtos.UpdatePrefsCmd) Response { return updatePreferencesFor(c.OrgId, c.UserId, &dtoCmd) } @@ -63,11 +62,11 @@ func updatePreferencesFor(orgId int64, userId int64, dtoCmd *dtos.UpdatePrefsCmd } // GET /api/org/preferences -func GetOrgPreferences(c *middleware.Context) Response { +func GetOrgPreferences(c *m.ReqContext) Response { return getPreferencesFor(c.OrgId, 0) } // PUT /api/org/preferences -func UpdateOrgPreferences(c *middleware.Context, dtoCmd dtos.UpdatePrefsCmd) Response { +func UpdateOrgPreferences(c *m.ReqContext, dtoCmd dtos.UpdatePrefsCmd) Response { return updatePreferencesFor(c.OrgId, 0, &dtoCmd) } diff --git a/pkg/api/quota.go b/pkg/api/quota.go index d8585435430..f92acaf470f 100644 --- a/pkg/api/quota.go +++ b/pkg/api/quota.go @@ -2,12 +2,11 @@ package api import ( "github.com/grafana/grafana/pkg/bus" - "github.com/grafana/grafana/pkg/middleware" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/setting" ) -func GetOrgQuotas(c *middleware.Context) Response { +func GetOrgQuotas(c *m.ReqContext) Response { if !setting.Quota.Enabled { return ApiError(404, "Quotas not enabled", nil) } @@ -20,7 +19,7 @@ func GetOrgQuotas(c *middleware.Context) Response { return Json(200, query.Result) } -func UpdateOrgQuota(c *middleware.Context, cmd m.UpdateOrgQuotaCmd) Response { +func UpdateOrgQuota(c *m.ReqContext, cmd m.UpdateOrgQuotaCmd) Response { if !setting.Quota.Enabled { return ApiError(404, "Quotas not enabled", nil) } @@ -37,7 +36,7 @@ func UpdateOrgQuota(c *middleware.Context, cmd m.UpdateOrgQuotaCmd) Response { return ApiSuccess("Organization quota updated") } -func GetUserQuotas(c *middleware.Context) Response { +func GetUserQuotas(c *m.ReqContext) Response { if !setting.Quota.Enabled { return ApiError(404, "Quotas not enabled", nil) } @@ -50,7 +49,7 @@ func GetUserQuotas(c *middleware.Context) Response { return Json(200, query.Result) } -func UpdateUserQuota(c *middleware.Context, cmd m.UpdateUserQuotaCmd) Response { +func UpdateUserQuota(c *m.ReqContext, cmd m.UpdateUserQuotaCmd) Response { if !setting.Quota.Enabled { return ApiError(404, "Quotas not enabled", nil) } diff --git a/pkg/api/render.go b/pkg/api/render.go index 65733cfab15..6e948ed294c 100644 --- a/pkg/api/render.go +++ b/pkg/api/render.go @@ -5,11 +5,11 @@ import ( "net/http" "github.com/grafana/grafana/pkg/components/renderer" - "github.com/grafana/grafana/pkg/middleware" + m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/util" ) -func RenderToPng(c *middleware.Context) { +func RenderToPng(c *m.ReqContext) { queryReader, err := util.NewUrlQueryReader(c.Req.URL) if err != nil { c.Handle(400, "Render parameters error", err) diff --git a/pkg/api/search.go b/pkg/api/search.go index fee062a5599..c8a0a5592bb 100644 --- a/pkg/api/search.go +++ b/pkg/api/search.go @@ -5,21 +5,26 @@ import ( "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/metrics" - "github.com/grafana/grafana/pkg/middleware" + m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/search" ) -func Search(c *middleware.Context) { +func Search(c *m.ReqContext) { query := c.Query("query") tags := c.QueryStrings("tag") starred := c.Query("starred") limit := c.QueryInt("limit") dashboardType := c.Query("type") + permission := m.PERMISSION_VIEW if limit == 0 { limit = 1000 } + if c.Query("permission") == "Edit" { + permission = m.PERMISSION_EDIT + } + dbids := make([]int64, 0) for _, id := range c.QueryStrings("dashboardIds") { dashboardId, err := strconv.ParseInt(id, 10, 64) @@ -46,6 +51,7 @@ func Search(c *middleware.Context) { DashboardIds: dbids, Type: dashboardType, FolderIds: folderIds, + Permission: permission, } err := bus.Dispatch(&searchQuery) diff --git a/pkg/api/signup.go b/pkg/api/signup.go index 36ece023087..838d2f9c0af 100644 --- a/pkg/api/signup.go +++ b/pkg/api/signup.go @@ -5,14 +5,13 @@ import ( "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/events" "github.com/grafana/grafana/pkg/metrics" - "github.com/grafana/grafana/pkg/middleware" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/util" ) // GET /api/user/signup/options -func GetSignUpOptions(c *middleware.Context) Response { +func GetSignUpOptions(c *m.ReqContext) Response { return Json(200, util.DynMap{ "verifyEmailEnabled": setting.VerifyEmailEnabled, "autoAssignOrg": setting.AutoAssignOrg, @@ -20,7 +19,7 @@ func GetSignUpOptions(c *middleware.Context) Response { } // POST /api/user/signup -func SignUp(c *middleware.Context, form dtos.SignUpForm) Response { +func SignUp(c *m.ReqContext, form dtos.SignUpForm) Response { if !setting.AllowUserSignUp { return ApiError(401, "User signup is disabled", nil) } @@ -52,7 +51,7 @@ func SignUp(c *middleware.Context, form dtos.SignUpForm) Response { return Json(200, util.DynMap{"status": "SignUpCreated"}) } -func SignUpStep2(c *middleware.Context, form dtos.SignUpStep2Form) Response { +func SignUpStep2(c *m.ReqContext, form dtos.SignUpStep2Form) Response { if !setting.AllowUserSignUp { return ApiError(401, "User signup is disabled", nil) } diff --git a/pkg/api/stars.go b/pkg/api/stars.go index c6f9d037eba..5361f64eea6 100644 --- a/pkg/api/stars.go +++ b/pkg/api/stars.go @@ -2,11 +2,10 @@ package api import ( "github.com/grafana/grafana/pkg/bus" - "github.com/grafana/grafana/pkg/middleware" m "github.com/grafana/grafana/pkg/models" ) -func StarDashboard(c *middleware.Context) Response { +func StarDashboard(c *m.ReqContext) Response { if !c.IsSignedIn { return ApiError(412, "You need to sign in to star dashboards", nil) } @@ -24,7 +23,7 @@ func StarDashboard(c *middleware.Context) Response { return ApiSuccess("Dashboard starred!") } -func UnstarDashboard(c *middleware.Context) Response { +func UnstarDashboard(c *m.ReqContext) Response { cmd := m.UnstarDashboardCommand{UserId: c.UserId, DashboardId: c.ParamsInt64(":id")} diff --git a/pkg/api/team.go b/pkg/api/team.go index af537224d41..316adfc4e7c 100644 --- a/pkg/api/team.go +++ b/pkg/api/team.go @@ -3,13 +3,12 @@ package api import ( "github.com/grafana/grafana/pkg/api/dtos" "github.com/grafana/grafana/pkg/bus" - "github.com/grafana/grafana/pkg/middleware" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/util" ) // POST /api/teams -func CreateTeam(c *middleware.Context, cmd m.CreateTeamCommand) Response { +func CreateTeam(c *m.ReqContext, cmd m.CreateTeamCommand) Response { cmd.OrgId = c.OrgId if err := bus.Dispatch(&cmd); err != nil { if err == m.ErrTeamNameTaken { @@ -25,7 +24,8 @@ func CreateTeam(c *middleware.Context, cmd m.CreateTeamCommand) Response { } // PUT /api/teams/:teamId -func UpdateTeam(c *middleware.Context, cmd m.UpdateTeamCommand) Response { +func UpdateTeam(c *m.ReqContext, cmd m.UpdateTeamCommand) Response { + cmd.OrgId = c.OrgId cmd.Id = c.ParamsInt64(":teamId") if err := bus.Dispatch(&cmd); err != nil { if err == m.ErrTeamNameTaken { @@ -38,8 +38,8 @@ func UpdateTeam(c *middleware.Context, cmd m.UpdateTeamCommand) Response { } // DELETE /api/teams/:teamId -func DeleteTeamById(c *middleware.Context) Response { - if err := bus.Dispatch(&m.DeleteTeamCommand{Id: c.ParamsInt64(":teamId")}); err != nil { +func DeleteTeamById(c *m.ReqContext) Response { + if err := bus.Dispatch(&m.DeleteTeamCommand{OrgId: c.OrgId, Id: c.ParamsInt64(":teamId")}); err != nil { if err == m.ErrTeamNotFound { return ApiError(404, "Failed to delete Team. ID not found", nil) } @@ -49,7 +49,7 @@ func DeleteTeamById(c *middleware.Context) Response { } // GET /api/teams/search -func SearchTeams(c *middleware.Context) Response { +func SearchTeams(c *m.ReqContext) Response { perPage := c.QueryInt("perpage") if perPage <= 0 { perPage = 1000 @@ -60,11 +60,11 @@ func SearchTeams(c *middleware.Context) Response { } query := m.SearchTeamsQuery{ + OrgId: c.OrgId, Query: c.Query("query"), Name: c.Query("name"), Page: page, Limit: perPage, - OrgId: c.OrgId, } if err := bus.Dispatch(&query); err != nil { @@ -82,8 +82,8 @@ func SearchTeams(c *middleware.Context) Response { } // GET /api/teams/:teamId -func GetTeamById(c *middleware.Context) Response { - query := m.GetTeamByIdQuery{Id: c.ParamsInt64(":teamId")} +func GetTeamById(c *m.ReqContext) Response { + query := m.GetTeamByIdQuery{OrgId: c.OrgId, Id: c.ParamsInt64(":teamId")} if err := bus.Dispatch(&query); err != nil { if err == m.ErrTeamNotFound { diff --git a/pkg/api/team_members.go b/pkg/api/team_members.go index 412e142edb7..4fb05b016e3 100644 --- a/pkg/api/team_members.go +++ b/pkg/api/team_members.go @@ -3,14 +3,13 @@ package api import ( "github.com/grafana/grafana/pkg/api/dtos" "github.com/grafana/grafana/pkg/bus" - "github.com/grafana/grafana/pkg/middleware" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/util" ) // GET /api/teams/:teamId/members -func GetTeamMembers(c *middleware.Context) Response { - query := m.GetTeamMembersQuery{TeamId: c.ParamsInt64(":teamId")} +func GetTeamMembers(c *m.ReqContext) Response { + query := m.GetTeamMembersQuery{OrgId: c.OrgId, TeamId: c.ParamsInt64(":teamId")} if err := bus.Dispatch(&query); err != nil { return ApiError(500, "Failed to get Team Members", err) @@ -24,14 +23,19 @@ func GetTeamMembers(c *middleware.Context) Response { } // POST /api/teams/:teamId/members -func AddTeamMember(c *middleware.Context, cmd m.AddTeamMemberCommand) Response { +func AddTeamMember(c *m.ReqContext, cmd m.AddTeamMemberCommand) Response { cmd.TeamId = c.ParamsInt64(":teamId") cmd.OrgId = c.OrgId if err := bus.Dispatch(&cmd); err != nil { - if err == m.ErrTeamMemberAlreadyAdded { - return ApiError(400, "User is already added to this team", err) + if err == m.ErrTeamNotFound { + return ApiError(404, "Team not found", nil) } + + if err == m.ErrTeamMemberAlreadyAdded { + return ApiError(400, "User is already added to this team", nil) + } + return ApiError(500, "Failed to add Member to Team", err) } @@ -41,8 +45,16 @@ func AddTeamMember(c *middleware.Context, cmd m.AddTeamMemberCommand) Response { } // DELETE /api/teams/:teamId/members/:userId -func RemoveTeamMember(c *middleware.Context) Response { - if err := bus.Dispatch(&m.RemoveTeamMemberCommand{TeamId: c.ParamsInt64(":teamId"), UserId: c.ParamsInt64(":userId")}); err != nil { +func RemoveTeamMember(c *m.ReqContext) Response { + if err := bus.Dispatch(&m.RemoveTeamMemberCommand{OrgId: c.OrgId, TeamId: c.ParamsInt64(":teamId"), UserId: c.ParamsInt64(":userId")}); err != nil { + if err == m.ErrTeamNotFound { + return ApiError(404, "Team not found", nil) + } + + if err == m.ErrTeamMemberNotFound { + return ApiError(404, "Team member not found", nil) + } + return ApiError(500, "Failed to remove Member from Team", err) } return ApiSuccess("Team Member removed") diff --git a/pkg/api/user.go b/pkg/api/user.go index 9a041d30272..b8483316b9d 100644 --- a/pkg/api/user.go +++ b/pkg/api/user.go @@ -3,19 +3,18 @@ package api import ( "github.com/grafana/grafana/pkg/api/dtos" "github.com/grafana/grafana/pkg/bus" - "github.com/grafana/grafana/pkg/middleware" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/util" ) // GET /api/user (current authenticated user) -func GetSignedInUser(c *middleware.Context) Response { +func GetSignedInUser(c *m.ReqContext) Response { return getUserUserProfile(c.UserId) } // GET /api/users/:id -func GetUserById(c *middleware.Context) Response { +func GetUserById(c *m.ReqContext) Response { return getUserUserProfile(c.ParamsInt64(":id")) } @@ -33,7 +32,7 @@ func getUserUserProfile(userId int64) Response { } // GET /api/users/lookup -func GetUserByLoginOrEmail(c *middleware.Context) Response { +func GetUserByLoginOrEmail(c *m.ReqContext) Response { query := m.GetUserByLoginQuery{LoginOrEmail: c.Query("loginOrEmail")} if err := bus.Dispatch(&query); err != nil { if err == m.ErrUserNotFound { @@ -55,7 +54,7 @@ func GetUserByLoginOrEmail(c *middleware.Context) Response { } // POST /api/user -func UpdateSignedInUser(c *middleware.Context, cmd m.UpdateUserCommand) Response { +func UpdateSignedInUser(c *m.ReqContext, cmd m.UpdateUserCommand) Response { if setting.AuthProxyEnabled { if setting.AuthProxyHeaderProperty == "email" && cmd.Email != c.Email { return ApiError(400, "Not allowed to change email when auth proxy is using email property", nil) @@ -69,13 +68,13 @@ func UpdateSignedInUser(c *middleware.Context, cmd m.UpdateUserCommand) Response } // POST /api/users/:id -func UpdateUser(c *middleware.Context, cmd m.UpdateUserCommand) Response { +func UpdateUser(c *m.ReqContext, cmd m.UpdateUserCommand) Response { cmd.UserId = c.ParamsInt64(":id") return handleUpdateUser(cmd) } //POST /api/users/:id/using/:orgId -func UpdateUserActiveOrg(c *middleware.Context) Response { +func UpdateUserActiveOrg(c *m.ReqContext) Response { userId := c.ParamsInt64(":id") orgId := c.ParamsInt64(":orgId") @@ -108,12 +107,12 @@ func handleUpdateUser(cmd m.UpdateUserCommand) Response { } // GET /api/user/orgs -func GetSignedInUserOrgList(c *middleware.Context) Response { +func GetSignedInUserOrgList(c *m.ReqContext) Response { return getUserOrgList(c.UserId) } // GET /api/user/:id/orgs -func GetUserOrgList(c *middleware.Context) Response { +func GetUserOrgList(c *m.ReqContext) Response { return getUserOrgList(c.ParamsInt64(":id")) } @@ -146,7 +145,7 @@ func validateUsingOrg(userId int64, orgId int64) bool { } // POST /api/user/using/:id -func UserSetUsingOrg(c *middleware.Context) Response { +func UserSetUsingOrg(c *m.ReqContext) Response { orgId := c.ParamsInt64(":id") if !validateUsingOrg(c.UserId, orgId) { @@ -163,7 +162,7 @@ func UserSetUsingOrg(c *middleware.Context) Response { } // GET /profile/switch-org/:id -func ChangeActiveOrgAndRedirectToHome(c *middleware.Context) { +func ChangeActiveOrgAndRedirectToHome(c *m.ReqContext) { orgId := c.ParamsInt64(":id") if !validateUsingOrg(c.UserId, orgId) { @@ -179,7 +178,7 @@ func ChangeActiveOrgAndRedirectToHome(c *middleware.Context) { c.Redirect(setting.AppSubUrl + "/") } -func ChangeUserPassword(c *middleware.Context, cmd m.ChangeUserPasswordCommand) Response { +func ChangeUserPassword(c *m.ReqContext, cmd m.ChangeUserPasswordCommand) Response { if setting.LdapEnabled || setting.AuthProxyEnabled { return ApiError(400, "Not allowed to change password when LDAP or Auth Proxy is enabled", nil) } @@ -211,7 +210,7 @@ func ChangeUserPassword(c *middleware.Context, cmd m.ChangeUserPasswordCommand) } // GET /api/users -func SearchUsers(c *middleware.Context) Response { +func SearchUsers(c *m.ReqContext) Response { query, err := searchUser(c) if err != nil { return ApiError(500, "Failed to fetch users", err) @@ -221,7 +220,7 @@ func SearchUsers(c *middleware.Context) Response { } // GET /api/users/search -func SearchUsersWithPaging(c *middleware.Context) Response { +func SearchUsersWithPaging(c *m.ReqContext) Response { query, err := searchUser(c) if err != nil { return ApiError(500, "Failed to fetch users", err) @@ -230,7 +229,7 @@ func SearchUsersWithPaging(c *middleware.Context) Response { return Json(200, query.Result) } -func searchUser(c *middleware.Context) (*m.SearchUsersQuery, error) { +func searchUser(c *m.ReqContext) (*m.SearchUsersQuery, error) { perPage := c.QueryInt("perpage") if perPage <= 0 { perPage = 1000 @@ -258,7 +257,7 @@ func searchUser(c *middleware.Context) (*m.SearchUsersQuery, error) { return query, nil } -func SetHelpFlag(c *middleware.Context) Response { +func SetHelpFlag(c *m.ReqContext) Response { flag := c.ParamsInt64(":id") bitmask := &c.HelpFlags1 @@ -276,7 +275,7 @@ func SetHelpFlag(c *middleware.Context) Response { return Json(200, &util.DynMap{"message": "Help flag set", "helpFlags1": cmd.HelpFlags1}) } -func ClearHelpFlags(c *middleware.Context) Response { +func ClearHelpFlags(c *m.ReqContext) Response { cmd := m.SetUserHelpFlagCommand{ UserId: c.UserId, HelpFlags1: m.HelpFlags1(0), diff --git a/pkg/cmd/grafana-cli/commands/install_command.go b/pkg/cmd/grafana-cli/commands/install_command.go index a1b249d9c81..f40bc9c081b 100644 --- a/pkg/cmd/grafana-cli/commands/install_command.go +++ b/pkg/cmd/grafana-cli/commands/install_command.go @@ -94,7 +94,7 @@ func InstallPlugin(pluginName, version string, c CommandLine) error { res, _ := s.ReadPlugin(pluginFolder, pluginName) for _, v := range res.Dependencies.Plugins { - InstallPlugin(v.Id, version, c) + InstallPlugin(v.Id, "", c) logger.Infof("Installed dependency: %v ✔\n", v.Id) } diff --git a/pkg/cmd/grafana-server/server.go b/pkg/cmd/grafana-server/server.go index b84c3d4e3d6..8ed3196e4ad 100644 --- a/pkg/cmd/grafana-server/server.go +++ b/pkg/cmd/grafana-server/server.go @@ -62,17 +62,22 @@ func (g *GrafanaServerImpl) Start() error { search.Init() login.Init() social.NewOAuthService() - plugins.Init() + + pluginManager, err := plugins.NewPluginManager(g.context) + if err != nil { + return fmt.Errorf("Failed to start plugins. error: %v", err) + } + g.childRoutines.Go(func() error { return pluginManager.Run(g.context) }) if err := provisioning.Init(g.context, setting.HomePath, setting.Cfg); err != nil { return fmt.Errorf("Failed to provision Grafana from config. error: %v", err) } - closer, err := tracing.Init(setting.Cfg) + tracingCloser, err := tracing.Init(setting.Cfg) if err != nil { return fmt.Errorf("Tracing settings is not valid. error: %v", err) } - defer closer.Close() + defer tracingCloser.Close() // init alerting if setting.AlertingEnabled && setting.ExecuteAlerts { diff --git a/pkg/components/imguploader/imguploader.go b/pkg/components/imguploader/imguploader.go index 383d2c6d311..52a31f9f606 100644 --- a/pkg/components/imguploader/imguploader.go +++ b/pkg/components/imguploader/imguploader.go @@ -88,6 +88,8 @@ func NewImageUploader() (ImageUploader, error) { container_name := azureBlobSec.Key("container_name").MustString("") return NewAzureBlobUploader(account_name, account_key, container_name), nil + case "local": + return NewLocalImageUploader() } if setting.ImageUploadProvider != "" { diff --git a/pkg/components/imguploader/imguploader_test.go b/pkg/components/imguploader/imguploader_test.go index d5008c9ae9f..b0311dac975 100644 --- a/pkg/components/imguploader/imguploader_test.go +++ b/pkg/components/imguploader/imguploader_test.go @@ -143,5 +143,23 @@ func TestImageUploaderFactory(t *testing.T) { So(original.container_name, ShouldEqual, "container_name") }) }) + + Convey("Local uploader", func() { + var err error + + setting.NewConfigContext(&setting.CommandLineArgs{ + HomePath: "../../../", + }) + + setting.ImageUploadProvider = "local" + + uploader, err := NewImageUploader() + + So(err, ShouldBeNil) + original, ok := uploader.(*LocalUploader) + + So(ok, ShouldBeTrue) + So(original, ShouldNotBeNil) + }) }) } diff --git a/pkg/components/imguploader/localuploader.go b/pkg/components/imguploader/localuploader.go new file mode 100644 index 00000000000..022d67122a7 --- /dev/null +++ b/pkg/components/imguploader/localuploader.go @@ -0,0 +1,22 @@ +package imguploader + +import ( + "context" + "path" + "path/filepath" + + "github.com/grafana/grafana/pkg/setting" +) + +type LocalUploader struct { +} + +func (u *LocalUploader) Upload(ctx context.Context, imageOnDiskPath string) (string, error) { + filename := filepath.Base(imageOnDiskPath) + image_url := setting.ToAbsUrl(path.Join("public/img/attachments", filename)) + return image_url, nil +} + +func NewLocalImageUploader() (*LocalUploader, error) { + return &LocalUploader{}, nil +} diff --git a/pkg/components/imguploader/localuploader_test.go b/pkg/components/imguploader/localuploader_test.go new file mode 100644 index 00000000000..0d8c0df0e57 --- /dev/null +++ b/pkg/components/imguploader/localuploader_test.go @@ -0,0 +1,18 @@ +package imguploader + +import ( + "context" + "testing" + + . "github.com/smartystreets/goconvey/convey" +) + +func TestUploadToLocal(t *testing.T) { + Convey("[Integration test] for external_image_store.local", t, func() { + localUploader, _ := NewLocalImageUploader() + path, err := localUploader.Upload(context.Background(), "../../../public/img/logo_transparent_400x.png") + + So(err, ShouldBeNil) + So(path, ShouldContainSubstring, "/public/img/attachments") + }) +} diff --git a/pkg/components/renderer/renderer.go b/pkg/components/renderer/renderer.go index 25d77557342..313f7892707 100644 --- a/pkg/components/renderer/renderer.go +++ b/pkg/components/renderer/renderer.go @@ -91,9 +91,15 @@ func RenderToPng(params *RenderOpts) (string, error) { timeout = 15 } + phantomDebugArg := "--debug=false" + if log.GetLogLevelFor("png-renderer") >= log.LvlDebug { + phantomDebugArg = "--debug=true" + } + cmdArgs := []string{ "--ignore-ssl-errors=true", "--web-security=false", + phantomDebugArg, scriptPath, "url=" + url, "width=" + params.Width, @@ -109,15 +115,13 @@ func RenderToPng(params *RenderOpts) (string, error) { } cmd := exec.Command(binPath, cmdArgs...) - stdout, err := cmd.StdoutPipe() + output, err := cmd.StdoutPipe() if err != nil { + rendererLog.Error("Could not acquire stdout pipe", err) return "", err } - stderr, err := cmd.StderrPipe() - if err != nil { - return "", err - } + cmd.Stderr = cmd.Stdout if params.Timezone != "" { baseEnviron := os.Environ() @@ -126,11 +130,12 @@ func RenderToPng(params *RenderOpts) (string, error) { err = cmd.Start() if err != nil { + rendererLog.Error("Could not start command", err) return "", err } - go io.Copy(os.Stdout, stdout) - go io.Copy(os.Stdout, stderr) + logWriter := log.NewLogWriter(rendererLog, log.LvlDebug, "[phantom] ") + go io.Copy(logWriter, output) done := make(chan error) go func() { diff --git a/pkg/log/log.go b/pkg/log/log.go index 9b3f8b27071..0e6874e1b4b 100644 --- a/pkg/log/log.go +++ b/pkg/log/log.go @@ -21,6 +21,7 @@ import ( var Root log15.Logger var loggersToClose []DisposableHandler +var filters map[string]log15.Lvl func init() { loggersToClose = make([]DisposableHandler, 0) @@ -114,6 +115,25 @@ func Close() { loggersToClose = make([]DisposableHandler, 0) } +func GetLogLevelFor(name string) Lvl { + if level, ok := filters[name]; ok { + switch level { + case log15.LvlWarn: + return LvlWarn + case log15.LvlInfo: + return LvlInfo + case log15.LvlError: + return LvlError + case log15.LvlCrit: + return LvlCrit + default: + return LvlDebug + } + } + + return LvlInfo +} + var logLevels = map[string]log15.Lvl{ "trace": log15.LvlDebug, "debug": log15.LvlDebug, @@ -187,7 +207,7 @@ func ReadLoggingConfig(modes []string, logsPath string, cfg *ini.File) { // Log level. _, level := getLogLevelFromConfig("log."+mode, defaultLevelName, cfg) - modeFilters := getFilters(util.SplitString(sec.Key("filters").String())) + filters := getFilters(util.SplitString(sec.Key("filters").String())) format := getLogFormat(sec.Key("format").MustString("")) var handler log15.Handler @@ -219,12 +239,12 @@ func ReadLoggingConfig(modes []string, logsPath string, cfg *ini.File) { } for key, value := range defaultFilters { - if _, exist := modeFilters[key]; !exist { - modeFilters[key] = value + if _, exist := filters[key]; !exist { + filters[key] = value } } - handler = LogFilterHandler(level, modeFilters, handler) + handler = LogFilterHandler(level, filters, handler) handlers = append(handlers, handler) } @@ -236,8 +256,8 @@ func LogFilterHandler(maxLevel log15.Lvl, filters map[string]log15.Lvl, h log15. if len(filters) > 0 { for i := 0; i < len(r.Ctx); i += 2 { - key := r.Ctx[i].(string) - if key == "logger" { + key, ok := r.Ctx[i].(string) + if ok && key == "logger" { loggerName, strOk := r.Ctx[i+1].(string) if strOk { if filterLevel, ok := filters[loggerName]; ok { diff --git a/pkg/log/log_writer.go b/pkg/log/log_writer.go new file mode 100644 index 00000000000..2ff401a7f0a --- /dev/null +++ b/pkg/log/log_writer.go @@ -0,0 +1,39 @@ +package log + +import ( + "io" + "strings" +) + +type logWriterImpl struct { + log Logger + level Lvl + prefix string +} + +func NewLogWriter(log Logger, level Lvl, prefix string) io.Writer { + return &logWriterImpl{ + log: log, + level: level, + prefix: prefix, + } +} + +func (l *logWriterImpl) Write(p []byte) (n int, err error) { + message := l.prefix + strings.TrimSpace(string(p)) + + switch l.level { + case LvlCrit: + l.log.Crit(message) + case LvlError: + l.log.Error(message) + case LvlWarn: + l.log.Warn(message) + case LvlInfo: + l.log.Info(message) + default: + l.log.Debug(message) + } + + return len(p), nil +} diff --git a/pkg/log/log_writer_test.go b/pkg/log/log_writer_test.go new file mode 100644 index 00000000000..4537b4d6100 --- /dev/null +++ b/pkg/log/log_writer_test.go @@ -0,0 +1,116 @@ +package log + +import ( + "testing" + + "github.com/inconshreveable/log15" + . "github.com/smartystreets/goconvey/convey" +) + +type FakeLogger struct { + debug string + info string + warn string + err string + crit string +} + +func (f *FakeLogger) New(ctx ...interface{}) log15.Logger { + return nil +} + +func (f *FakeLogger) Debug(msg string, ctx ...interface{}) { + f.debug = msg +} + +func (f *FakeLogger) Info(msg string, ctx ...interface{}) { + f.info = msg +} + +func (f *FakeLogger) Warn(msg string, ctx ...interface{}) { + f.warn = msg +} + +func (f *FakeLogger) Error(msg string, ctx ...interface{}) { + f.err = msg +} + +func (f *FakeLogger) Crit(msg string, ctx ...interface{}) { + f.crit = msg +} + +func (f *FakeLogger) GetHandler() log15.Handler { + return nil +} + +func (f *FakeLogger) SetHandler(l log15.Handler) {} + +func TestLogWriter(t *testing.T) { + Convey("When writing to a LogWriter", t, func() { + Convey("Should write using the correct level [crit]", func() { + fake := &FakeLogger{} + + crit := NewLogWriter(fake, LvlCrit, "") + n, err := crit.Write([]byte("crit")) + + So(n, ShouldEqual, 4) + So(err, ShouldBeNil) + So(fake.crit, ShouldEqual, "crit") + }) + + Convey("Should write using the correct level [error]", func() { + fake := &FakeLogger{} + + crit := NewLogWriter(fake, LvlError, "") + n, err := crit.Write([]byte("error")) + + So(n, ShouldEqual, 5) + So(err, ShouldBeNil) + So(fake.err, ShouldEqual, "error") + }) + + Convey("Should write using the correct level [warn]", func() { + fake := &FakeLogger{} + + crit := NewLogWriter(fake, LvlWarn, "") + n, err := crit.Write([]byte("warn")) + + So(n, ShouldEqual, 4) + So(err, ShouldBeNil) + So(fake.warn, ShouldEqual, "warn") + }) + + Convey("Should write using the correct level [info]", func() { + fake := &FakeLogger{} + + crit := NewLogWriter(fake, LvlInfo, "") + n, err := crit.Write([]byte("info")) + + So(n, ShouldEqual, 4) + So(err, ShouldBeNil) + So(fake.info, ShouldEqual, "info") + }) + + Convey("Should write using the correct level [debug]", func() { + fake := &FakeLogger{} + + crit := NewLogWriter(fake, LvlDebug, "") + n, err := crit.Write([]byte("debug")) + + So(n, ShouldEqual, 5) + So(err, ShouldBeNil) + So(fake.debug, ShouldEqual, "debug") + }) + + Convey("Should prefix the output with the prefix", func() { + fake := &FakeLogger{} + + crit := NewLogWriter(fake, LvlDebug, "prefix") + n, err := crit.Write([]byte("debug")) + + So(n, ShouldEqual, 5) // n is how much of input consumed + So(err, ShouldBeNil) + So(fake.debug, ShouldEqual, "prefixdebug") + }) + }) +} diff --git a/pkg/login/auth.go b/pkg/login/auth.go index 45561783e43..5527c7271d6 100644 --- a/pkg/login/auth.go +++ b/pkg/login/auth.go @@ -3,21 +3,20 @@ package login import ( "errors" - "crypto/subtle" "github.com/grafana/grafana/pkg/bus" m "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/setting" - "github.com/grafana/grafana/pkg/util" ) var ( - ErrInvalidCredentials = errors.New("Invalid Username or Password") + ErrInvalidCredentials = errors.New("Invalid Username or Password") + ErrTooManyLoginAttempts = errors.New("Too many consecutive incorrect login attempts for user. Login for user temporarily blocked") ) type LoginUserQuery struct { - Username string - Password string - User *m.User + Username string + Password string + User *m.User + IpAddress string } func Init() { @@ -26,41 +25,31 @@ func Init() { } func AuthenticateUser(query *LoginUserQuery) error { - err := loginUsingGrafanaDB(query) - if err == nil || err != ErrInvalidCredentials { + if err := validateLoginAttempts(query.Username); err != nil { return err } - if setting.LdapEnabled { - for _, server := range LdapCfg.Servers { - author := NewLdapAuthenticator(server) - err = author.Login(query) - if err == nil || err != ErrInvalidCredentials { - return err - } + err := loginUsingGrafanaDB(query) + if err == nil || (err != m.ErrUserNotFound && err != ErrInvalidCredentials) { + return err + } + + ldapEnabled, ldapErr := loginUsingLdap(query) + if ldapEnabled { + if ldapErr == nil || ldapErr != ErrInvalidCredentials { + return ldapErr } + + err = ldapErr + } + + if err == ErrInvalidCredentials { + saveInvalidLoginAttempt(query) + } + + if err == m.ErrUserNotFound { + return ErrInvalidCredentials } return err } - -func loginUsingGrafanaDB(query *LoginUserQuery) error { - userQuery := m.GetUserByLoginQuery{LoginOrEmail: query.Username} - - if err := bus.Dispatch(&userQuery); err != nil { - if err == m.ErrUserNotFound { - return ErrInvalidCredentials - } - return err - } - - user := userQuery.Result - - passwordHashed := util.EncodePassword(query.Password, user.Salt) - if subtle.ConstantTimeCompare([]byte(passwordHashed), []byte(user.Password)) != 1 { - return ErrInvalidCredentials - } - - query.User = user - return nil -} diff --git a/pkg/login/auth_test.go b/pkg/login/auth_test.go new file mode 100644 index 00000000000..59d3c8f2b33 --- /dev/null +++ b/pkg/login/auth_test.go @@ -0,0 +1,214 @@ +package login + +import ( + "errors" + "testing" + + m "github.com/grafana/grafana/pkg/models" + . "github.com/smartystreets/goconvey/convey" +) + +func TestAuthenticateUser(t *testing.T) { + Convey("Authenticate user", t, func() { + authScenario("When a user authenticates having too many login attempts", func(sc *authScenarioContext) { + mockLoginAttemptValidation(ErrTooManyLoginAttempts, sc) + mockLoginUsingGrafanaDB(nil, sc) + mockLoginUsingLdap(true, nil, sc) + mockSaveInvalidLoginAttempt(sc) + + err := AuthenticateUser(sc.loginUserQuery) + + Convey("it should result in", func() { + So(err, ShouldEqual, ErrTooManyLoginAttempts) + So(sc.loginAttemptValidationWasCalled, ShouldBeTrue) + So(sc.grafanaLoginWasCalled, ShouldBeFalse) + So(sc.ldapLoginWasCalled, ShouldBeFalse) + So(sc.saveInvalidLoginAttemptWasCalled, ShouldBeFalse) + }) + }) + + authScenario("When grafana user authenticate with valid credentials", func(sc *authScenarioContext) { + mockLoginAttemptValidation(nil, sc) + mockLoginUsingGrafanaDB(nil, sc) + mockLoginUsingLdap(true, ErrInvalidCredentials, sc) + mockSaveInvalidLoginAttempt(sc) + + err := AuthenticateUser(sc.loginUserQuery) + + Convey("it should result in", func() { + So(err, ShouldEqual, nil) + So(sc.loginAttemptValidationWasCalled, ShouldBeTrue) + So(sc.grafanaLoginWasCalled, ShouldBeTrue) + So(sc.ldapLoginWasCalled, ShouldBeFalse) + So(sc.saveInvalidLoginAttemptWasCalled, ShouldBeFalse) + }) + }) + + authScenario("When grafana user authenticate and unexpected error occurs", func(sc *authScenarioContext) { + customErr := errors.New("custom") + mockLoginAttemptValidation(nil, sc) + mockLoginUsingGrafanaDB(customErr, sc) + mockLoginUsingLdap(true, ErrInvalidCredentials, sc) + mockSaveInvalidLoginAttempt(sc) + + err := AuthenticateUser(sc.loginUserQuery) + + Convey("it should result in", func() { + So(err, ShouldEqual, customErr) + So(sc.loginAttemptValidationWasCalled, ShouldBeTrue) + So(sc.grafanaLoginWasCalled, ShouldBeTrue) + So(sc.ldapLoginWasCalled, ShouldBeFalse) + So(sc.saveInvalidLoginAttemptWasCalled, ShouldBeFalse) + }) + }) + + authScenario("When a non-existing grafana user authenticate and ldap disabled", func(sc *authScenarioContext) { + mockLoginAttemptValidation(nil, sc) + mockLoginUsingGrafanaDB(m.ErrUserNotFound, sc) + mockLoginUsingLdap(false, nil, sc) + mockSaveInvalidLoginAttempt(sc) + + err := AuthenticateUser(sc.loginUserQuery) + + Convey("it should result in", func() { + So(err, ShouldEqual, ErrInvalidCredentials) + So(sc.loginAttemptValidationWasCalled, ShouldBeTrue) + So(sc.grafanaLoginWasCalled, ShouldBeTrue) + So(sc.ldapLoginWasCalled, ShouldBeTrue) + So(sc.saveInvalidLoginAttemptWasCalled, ShouldBeFalse) + }) + }) + + authScenario("When a non-existing grafana user authenticate and invalid ldap credentials", func(sc *authScenarioContext) { + mockLoginAttemptValidation(nil, sc) + mockLoginUsingGrafanaDB(m.ErrUserNotFound, sc) + mockLoginUsingLdap(true, ErrInvalidCredentials, sc) + mockSaveInvalidLoginAttempt(sc) + + err := AuthenticateUser(sc.loginUserQuery) + + Convey("it should result in", func() { + So(err, ShouldEqual, ErrInvalidCredentials) + So(sc.loginAttemptValidationWasCalled, ShouldBeTrue) + So(sc.grafanaLoginWasCalled, ShouldBeTrue) + So(sc.ldapLoginWasCalled, ShouldBeTrue) + So(sc.saveInvalidLoginAttemptWasCalled, ShouldBeTrue) + }) + }) + + authScenario("When a non-existing grafana user authenticate and valid ldap credentials", func(sc *authScenarioContext) { + mockLoginAttemptValidation(nil, sc) + mockLoginUsingGrafanaDB(m.ErrUserNotFound, sc) + mockLoginUsingLdap(true, nil, sc) + mockSaveInvalidLoginAttempt(sc) + + err := AuthenticateUser(sc.loginUserQuery) + + Convey("it should result in", func() { + So(err, ShouldBeNil) + So(sc.loginAttemptValidationWasCalled, ShouldBeTrue) + So(sc.grafanaLoginWasCalled, ShouldBeTrue) + So(sc.ldapLoginWasCalled, ShouldBeTrue) + So(sc.saveInvalidLoginAttemptWasCalled, ShouldBeFalse) + }) + }) + + authScenario("When a non-existing grafana user authenticate and ldap returns unexpected error", func(sc *authScenarioContext) { + customErr := errors.New("custom") + mockLoginAttemptValidation(nil, sc) + mockLoginUsingGrafanaDB(m.ErrUserNotFound, sc) + mockLoginUsingLdap(true, customErr, sc) + mockSaveInvalidLoginAttempt(sc) + + err := AuthenticateUser(sc.loginUserQuery) + + Convey("it should result in", func() { + So(err, ShouldEqual, customErr) + So(sc.loginAttemptValidationWasCalled, ShouldBeTrue) + So(sc.grafanaLoginWasCalled, ShouldBeTrue) + So(sc.ldapLoginWasCalled, ShouldBeTrue) + So(sc.saveInvalidLoginAttemptWasCalled, ShouldBeFalse) + }) + }) + + authScenario("When grafana user authenticate with invalid credentials and invalid ldap credentials", func(sc *authScenarioContext) { + mockLoginAttemptValidation(nil, sc) + mockLoginUsingGrafanaDB(ErrInvalidCredentials, sc) + mockLoginUsingLdap(true, ErrInvalidCredentials, sc) + mockSaveInvalidLoginAttempt(sc) + + err := AuthenticateUser(sc.loginUserQuery) + + Convey("it should result in", func() { + So(err, ShouldEqual, ErrInvalidCredentials) + So(sc.loginAttemptValidationWasCalled, ShouldBeTrue) + So(sc.grafanaLoginWasCalled, ShouldBeTrue) + So(sc.ldapLoginWasCalled, ShouldBeTrue) + So(sc.saveInvalidLoginAttemptWasCalled, ShouldBeTrue) + }) + }) + }) +} + +type authScenarioContext struct { + loginUserQuery *LoginUserQuery + grafanaLoginWasCalled bool + ldapLoginWasCalled bool + loginAttemptValidationWasCalled bool + saveInvalidLoginAttemptWasCalled bool +} + +type authScenarioFunc func(sc *authScenarioContext) + +func mockLoginUsingGrafanaDB(err error, sc *authScenarioContext) { + loginUsingGrafanaDB = func(query *LoginUserQuery) error { + sc.grafanaLoginWasCalled = true + return err + } +} + +func mockLoginUsingLdap(enabled bool, err error, sc *authScenarioContext) { + loginUsingLdap = func(query *LoginUserQuery) (bool, error) { + sc.ldapLoginWasCalled = true + return enabled, err + } +} + +func mockLoginAttemptValidation(err error, sc *authScenarioContext) { + validateLoginAttempts = func(username string) error { + sc.loginAttemptValidationWasCalled = true + return err + } +} + +func mockSaveInvalidLoginAttempt(sc *authScenarioContext) { + saveInvalidLoginAttempt = func(query *LoginUserQuery) { + sc.saveInvalidLoginAttemptWasCalled = true + } +} + +func authScenario(desc string, fn authScenarioFunc) { + Convey(desc, func() { + origLoginUsingGrafanaDB := loginUsingGrafanaDB + origLoginUsingLdap := loginUsingLdap + origValidateLoginAttempts := validateLoginAttempts + origSaveInvalidLoginAttempt := saveInvalidLoginAttempt + + sc := &authScenarioContext{ + loginUserQuery: &LoginUserQuery{ + Username: "user", + Password: "pwd", + IpAddress: "192.168.1.1:56433", + }, + } + + defer func() { + loginUsingGrafanaDB = origLoginUsingGrafanaDB + loginUsingLdap = origLoginUsingLdap + validateLoginAttempts = origValidateLoginAttempts + saveInvalidLoginAttempt = origSaveInvalidLoginAttempt + }() + + fn(sc) + }) +} diff --git a/pkg/login/brute_force_login_protection.go b/pkg/login/brute_force_login_protection.go new file mode 100644 index 00000000000..2ea93979c7a --- /dev/null +++ b/pkg/login/brute_force_login_protection.go @@ -0,0 +1,48 @@ +package login + +import ( + "time" + + "github.com/grafana/grafana/pkg/bus" + m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/setting" +) + +var ( + maxInvalidLoginAttempts int64 = 5 + loginAttemptsWindow time.Duration = time.Minute * 5 +) + +var validateLoginAttempts = func(username string) error { + if setting.DisableBruteForceLoginProtection { + return nil + } + + loginAttemptCountQuery := m.GetUserLoginAttemptCountQuery{ + Username: username, + Since: time.Now().Add(-loginAttemptsWindow), + } + + if err := bus.Dispatch(&loginAttemptCountQuery); err != nil { + return err + } + + if loginAttemptCountQuery.Result >= maxInvalidLoginAttempts { + return ErrTooManyLoginAttempts + } + + return nil +} + +var saveInvalidLoginAttempt = func(query *LoginUserQuery) { + if setting.DisableBruteForceLoginProtection { + return + } + + loginAttemptCommand := m.CreateLoginAttemptCommand{ + Username: query.Username, + IpAddress: query.IpAddress, + } + + bus.Dispatch(&loginAttemptCommand) +} diff --git a/pkg/login/brute_force_login_protection_test.go b/pkg/login/brute_force_login_protection_test.go new file mode 100644 index 00000000000..5375134ba88 --- /dev/null +++ b/pkg/login/brute_force_login_protection_test.go @@ -0,0 +1,125 @@ +package login + +import ( + "testing" + + "github.com/grafana/grafana/pkg/bus" + m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/setting" + . "github.com/smartystreets/goconvey/convey" +) + +func TestLoginAttemptsValidation(t *testing.T) { + Convey("Validate login attempts", t, func() { + Convey("Given brute force login protection enabled", func() { + setting.DisableBruteForceLoginProtection = false + + Convey("When user login attempt count equals max-1 ", func() { + withLoginAttempts(maxInvalidLoginAttempts - 1) + err := validateLoginAttempts("user") + + Convey("it should not result in error", func() { + So(err, ShouldBeNil) + }) + }) + + Convey("When user login attempt count equals max ", func() { + withLoginAttempts(maxInvalidLoginAttempts) + err := validateLoginAttempts("user") + + Convey("it should result in too many login attempts error", func() { + So(err, ShouldEqual, ErrTooManyLoginAttempts) + }) + }) + + Convey("When user login attempt count is greater than max ", func() { + withLoginAttempts(maxInvalidLoginAttempts + 5) + err := validateLoginAttempts("user") + + Convey("it should result in too many login attempts error", func() { + So(err, ShouldEqual, ErrTooManyLoginAttempts) + }) + }) + + Convey("When saving invalid login attempt", func() { + defer bus.ClearBusHandlers() + createLoginAttemptCmd := &m.CreateLoginAttemptCommand{} + + bus.AddHandler("test", func(cmd *m.CreateLoginAttemptCommand) error { + createLoginAttemptCmd = cmd + return nil + }) + + saveInvalidLoginAttempt(&LoginUserQuery{ + Username: "user", + Password: "pwd", + IpAddress: "192.168.1.1:56433", + }) + + Convey("it should dispatch command", func() { + So(createLoginAttemptCmd, ShouldNotBeNil) + So(createLoginAttemptCmd.Username, ShouldEqual, "user") + So(createLoginAttemptCmd.IpAddress, ShouldEqual, "192.168.1.1:56433") + }) + }) + }) + + Convey("Given brute force login protection disabled", func() { + setting.DisableBruteForceLoginProtection = true + + Convey("When user login attempt count equals max-1 ", func() { + withLoginAttempts(maxInvalidLoginAttempts - 1) + err := validateLoginAttempts("user") + + Convey("it should not result in error", func() { + So(err, ShouldBeNil) + }) + }) + + Convey("When user login attempt count equals max ", func() { + withLoginAttempts(maxInvalidLoginAttempts) + err := validateLoginAttempts("user") + + Convey("it should not result in error", func() { + So(err, ShouldBeNil) + }) + }) + + Convey("When user login attempt count is greater than max ", func() { + withLoginAttempts(maxInvalidLoginAttempts + 5) + err := validateLoginAttempts("user") + + Convey("it should not result in error", func() { + So(err, ShouldBeNil) + }) + }) + + Convey("When saving invalid login attempt", func() { + defer bus.ClearBusHandlers() + createLoginAttemptCmd := (*m.CreateLoginAttemptCommand)(nil) + + bus.AddHandler("test", func(cmd *m.CreateLoginAttemptCommand) error { + createLoginAttemptCmd = cmd + return nil + }) + + saveInvalidLoginAttempt(&LoginUserQuery{ + Username: "user", + Password: "pwd", + IpAddress: "192.168.1.1:56433", + }) + + Convey("it should not dispatch command", func() { + So(createLoginAttemptCmd, ShouldBeNil) + }) + }) + }) + }) +} + +func withLoginAttempts(loginAttempts int64) { + bus.AddHandler("test", func(query *m.GetUserLoginAttemptCountQuery) error { + query.Result = loginAttempts + return nil + }) +} diff --git a/pkg/login/grafana_login.go b/pkg/login/grafana_login.go new file mode 100644 index 00000000000..677ba776e4f --- /dev/null +++ b/pkg/login/grafana_login.go @@ -0,0 +1,35 @@ +package login + +import ( + "crypto/subtle" + + "github.com/grafana/grafana/pkg/bus" + m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/util" +) + +var validatePassword = func(providedPassword string, userPassword string, userSalt string) error { + passwordHashed := util.EncodePassword(providedPassword, userSalt) + if subtle.ConstantTimeCompare([]byte(passwordHashed), []byte(userPassword)) != 1 { + return ErrInvalidCredentials + } + + return nil +} + +var loginUsingGrafanaDB = func(query *LoginUserQuery) error { + userQuery := m.GetUserByLoginQuery{LoginOrEmail: query.Username} + + if err := bus.Dispatch(&userQuery); err != nil { + return err + } + + user := userQuery.Result + + if err := validatePassword(query.Password, user.Password, user.Salt); err != nil { + return err + } + + query.User = user + return nil +} diff --git a/pkg/login/grafana_login_test.go b/pkg/login/grafana_login_test.go new file mode 100644 index 00000000000..88e52224113 --- /dev/null +++ b/pkg/login/grafana_login_test.go @@ -0,0 +1,139 @@ +package login + +import ( + "testing" + + "github.com/grafana/grafana/pkg/bus" + m "github.com/grafana/grafana/pkg/models" + . "github.com/smartystreets/goconvey/convey" +) + +func TestGrafanaLogin(t *testing.T) { + Convey("Login using Grafana DB", t, func() { + grafanaLoginScenario("When login with non-existing user", func(sc *grafanaLoginScenarioContext) { + sc.withNonExistingUser() + err := loginUsingGrafanaDB(sc.loginUserQuery) + + Convey("it should result in user not found error", func() { + So(err, ShouldEqual, m.ErrUserNotFound) + }) + + Convey("it should not call password validation", func() { + So(sc.validatePasswordCalled, ShouldBeFalse) + }) + + Convey("it should not pupulate user object", func() { + So(sc.loginUserQuery.User, ShouldBeNil) + }) + }) + + grafanaLoginScenario("When login with invalid credentials", func(sc *grafanaLoginScenarioContext) { + sc.withInvalidPassword() + err := loginUsingGrafanaDB(sc.loginUserQuery) + + Convey("it should result in invalid credentials error", func() { + So(err, ShouldEqual, ErrInvalidCredentials) + }) + + Convey("it should call password validation", func() { + So(sc.validatePasswordCalled, ShouldBeTrue) + }) + + Convey("it should not pupulate user object", func() { + So(sc.loginUserQuery.User, ShouldBeNil) + }) + }) + + grafanaLoginScenario("When login with valid credentials", func(sc *grafanaLoginScenarioContext) { + sc.withValidCredentials() + err := loginUsingGrafanaDB(sc.loginUserQuery) + + Convey("it should not result in error", func() { + So(err, ShouldBeNil) + }) + + Convey("it should call password validation", func() { + So(sc.validatePasswordCalled, ShouldBeTrue) + }) + + Convey("it should pupulate user object", func() { + So(sc.loginUserQuery.User, ShouldNotBeNil) + So(sc.loginUserQuery.User.Login, ShouldEqual, sc.loginUserQuery.Username) + So(sc.loginUserQuery.User.Password, ShouldEqual, sc.loginUserQuery.Password) + }) + }) + }) +} + +type grafanaLoginScenarioContext struct { + loginUserQuery *LoginUserQuery + validatePasswordCalled bool +} + +type grafanaLoginScenarioFunc func(c *grafanaLoginScenarioContext) + +func grafanaLoginScenario(desc string, fn grafanaLoginScenarioFunc) { + Convey(desc, func() { + origValidatePassword := validatePassword + + sc := &grafanaLoginScenarioContext{ + loginUserQuery: &LoginUserQuery{ + Username: "user", + Password: "pwd", + IpAddress: "192.168.1.1:56433", + }, + validatePasswordCalled: false, + } + + defer func() { + validatePassword = origValidatePassword + }() + + fn(sc) + }) +} + +func mockPasswordValidation(valid bool, sc *grafanaLoginScenarioContext) { + validatePassword = func(providedPassword string, userPassword string, userSalt string) error { + sc.validatePasswordCalled = true + + if !valid { + return ErrInvalidCredentials + } + + return nil + } +} + +func (sc *grafanaLoginScenarioContext) getUserByLoginQueryReturns(user *m.User) { + bus.AddHandler("test", func(query *m.GetUserByLoginQuery) error { + if user == nil { + return m.ErrUserNotFound + } + + query.Result = user + return nil + }) +} + +func (sc *grafanaLoginScenarioContext) withValidCredentials() { + sc.getUserByLoginQueryReturns(&m.User{ + Id: 1, + Login: sc.loginUserQuery.Username, + Password: sc.loginUserQuery.Password, + Salt: "salt", + }) + mockPasswordValidation(true, sc) +} + +func (sc *grafanaLoginScenarioContext) withNonExistingUser() { + sc.getUserByLoginQueryReturns(nil) +} + +func (sc *grafanaLoginScenarioContext) withInvalidPassword() { + sc.getUserByLoginQueryReturns(&m.User{ + Password: sc.loginUserQuery.Password, + Salt: "salt", + }) + mockPasswordValidation(false, sc) +} diff --git a/pkg/login/ldap_login.go b/pkg/login/ldap_login.go new file mode 100644 index 00000000000..b74b69db036 --- /dev/null +++ b/pkg/login/ldap_login.go @@ -0,0 +1,21 @@ +package login + +import ( + "github.com/grafana/grafana/pkg/setting" +) + +var loginUsingLdap = func(query *LoginUserQuery) (bool, error) { + if !setting.LdapEnabled { + return false, nil + } + + for _, server := range LdapCfg.Servers { + author := NewLdapAuthenticator(server) + err := author.Login(query) + if err == nil || err != ErrInvalidCredentials { + return true, err + } + } + + return true, ErrInvalidCredentials +} diff --git a/pkg/login/ldap_login_test.go b/pkg/login/ldap_login_test.go new file mode 100644 index 00000000000..6af125566e8 --- /dev/null +++ b/pkg/login/ldap_login_test.go @@ -0,0 +1,172 @@ +package login + +import ( + "testing" + + m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/setting" + . "github.com/smartystreets/goconvey/convey" +) + +func TestLdapLogin(t *testing.T) { + Convey("Login using ldap", t, func() { + Convey("Given ldap enabled and a server configured", func() { + setting.LdapEnabled = true + LdapCfg.Servers = append(LdapCfg.Servers, + &LdapServerConf{ + Host: "", + }) + + ldapLoginScenario("When login with invalid credentials", func(sc *ldapLoginScenarioContext) { + sc.withLoginResult(false) + enabled, err := loginUsingLdap(sc.loginUserQuery) + + Convey("it should return true", func() { + So(enabled, ShouldBeTrue) + }) + + Convey("it should return invalid credentials error", func() { + So(err, ShouldEqual, ErrInvalidCredentials) + }) + + Convey("it should call ldap login", func() { + So(sc.ldapAuthenticatorMock.loginCalled, ShouldBeTrue) + }) + }) + + ldapLoginScenario("When login with valid credentials", func(sc *ldapLoginScenarioContext) { + sc.withLoginResult(true) + enabled, err := loginUsingLdap(sc.loginUserQuery) + + Convey("it should return true", func() { + So(enabled, ShouldBeTrue) + }) + + Convey("it should not return error", func() { + So(err, ShouldBeNil) + }) + + Convey("it should call ldap login", func() { + So(sc.ldapAuthenticatorMock.loginCalled, ShouldBeTrue) + }) + }) + }) + + Convey("Given ldap enabled and no server configured", func() { + setting.LdapEnabled = true + LdapCfg.Servers = make([]*LdapServerConf, 0) + + ldapLoginScenario("When login", func(sc *ldapLoginScenarioContext) { + sc.withLoginResult(true) + enabled, err := loginUsingLdap(sc.loginUserQuery) + + Convey("it should return true", func() { + So(enabled, ShouldBeTrue) + }) + + Convey("it should return invalid credentials error", func() { + So(err, ShouldEqual, ErrInvalidCredentials) + }) + + Convey("it should not call ldap login", func() { + So(sc.ldapAuthenticatorMock.loginCalled, ShouldBeFalse) + }) + }) + }) + + Convey("Given ldap disabled", func() { + setting.LdapEnabled = false + + ldapLoginScenario("When login", func(sc *ldapLoginScenarioContext) { + sc.withLoginResult(false) + enabled, err := loginUsingLdap(&LoginUserQuery{ + Username: "user", + Password: "pwd", + }) + + Convey("it should return false", func() { + So(enabled, ShouldBeFalse) + }) + + Convey("it should not return error", func() { + So(err, ShouldBeNil) + }) + + Convey("it should not call ldap login", func() { + So(sc.ldapAuthenticatorMock.loginCalled, ShouldBeFalse) + }) + }) + }) + }) +} + +func mockLdapAuthenticator(valid bool) *mockLdapAuther { + mock := &mockLdapAuther{ + validLogin: valid, + } + + NewLdapAuthenticator = func(server *LdapServerConf) ILdapAuther { + return mock + } + + return mock +} + +type mockLdapAuther struct { + validLogin bool + loginCalled bool +} + +func (a *mockLdapAuther) Login(query *LoginUserQuery) error { + a.loginCalled = true + + if !a.validLogin { + return ErrInvalidCredentials + } + + return nil +} + +func (a *mockLdapAuther) SyncSignedInUser(signedInUser *m.SignedInUser) error { + return nil +} + +func (a *mockLdapAuther) GetGrafanaUserFor(ldapUser *LdapUserInfo) (*m.User, error) { + return nil, nil +} + +func (a *mockLdapAuther) SyncOrgRoles(user *m.User, ldapUser *LdapUserInfo) error { + return nil +} + +type ldapLoginScenarioContext struct { + loginUserQuery *LoginUserQuery + ldapAuthenticatorMock *mockLdapAuther +} + +type ldapLoginScenarioFunc func(c *ldapLoginScenarioContext) + +func ldapLoginScenario(desc string, fn ldapLoginScenarioFunc) { + Convey(desc, func() { + origNewLdapAuthenticator := NewLdapAuthenticator + + sc := &ldapLoginScenarioContext{ + loginUserQuery: &LoginUserQuery{ + Username: "user", + Password: "pwd", + IpAddress: "192.168.1.1:56433", + }, + ldapAuthenticatorMock: &mockLdapAuther{}, + } + + defer func() { + NewLdapAuthenticator = origNewLdapAuthenticator + }() + + fn(sc) + }) +} + +func (sc *ldapLoginScenarioContext) withLoginResult(valid bool) { + sc.ldapAuthenticatorMock = mockLdapAuthenticator(valid) +} diff --git a/pkg/login/settings.go b/pkg/login/ldap_settings.go similarity index 100% rename from pkg/login/settings.go rename to pkg/login/ldap_settings.go diff --git a/pkg/metrics/graphitebridge/graphite.go b/pkg/metrics/graphitebridge/graphite.go index 8504a908d9a..68fb544fc7c 100644 --- a/pkg/metrics/graphitebridge/graphite.go +++ b/pkg/metrics/graphitebridge/graphite.go @@ -26,9 +26,10 @@ import ( "strings" "time" + "context" + "github.com/prometheus/common/expfmt" "github.com/prometheus/common/model" - "golang.org/x/net/context" dto "github.com/prometheus/client_model/go" diff --git a/pkg/metrics/metrics.go b/pkg/metrics/metrics.go index 4d7de98f2ea..4d4a11d0faa 100644 --- a/pkg/metrics/metrics.go +++ b/pkg/metrics/metrics.go @@ -379,6 +379,7 @@ func sendUsageStats() { metrics["stats.alerts.count"] = statsQuery.Result.Alerts metrics["stats.active_users.count"] = statsQuery.Result.ActiveUsers metrics["stats.datasources.count"] = statsQuery.Result.Datasources + metrics["stats.stars.count"] = statsQuery.Result.Stars dsStats := models.GetDataSourceStatsQuery{} if err := bus.Dispatch(&dsStats); err != nil { diff --git a/pkg/middleware/auth.go b/pkg/middleware/auth.go index be3415d990b..d6c377bc9ac 100644 --- a/pkg/middleware/auth.go +++ b/pkg/middleware/auth.go @@ -7,6 +7,7 @@ import ( "gopkg.in/macaron.v1" m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/session" "github.com/grafana/grafana/pkg/setting" ) @@ -15,8 +16,8 @@ type AuthOptions struct { ReqSignedIn bool } -func getRequestUserId(c *Context) int64 { - userId := c.Session.Get(SESS_KEY_USERID) +func getRequestUserId(c *m.ReqContext) int64 { + userId := c.Session.Get(session.SESS_KEY_USERID) if userId != nil { return userId.(int64) @@ -25,7 +26,7 @@ func getRequestUserId(c *Context) int64 { return 0 } -func getApiKey(c *Context) string { +func getApiKey(c *m.ReqContext) string { header := c.Req.Header.Get("Authorization") parts := strings.SplitN(header, " ", 2) if len(parts) == 2 && parts[0] == "Bearer" { @@ -36,28 +37,28 @@ func getApiKey(c *Context) string { return "" } -func accessForbidden(c *Context) { +func accessForbidden(c *m.ReqContext) { if c.IsApiRequest() { c.JsonApiErr(403, "Permission denied", nil) return } - c.SetCookie("redirect_to", url.QueryEscape(setting.AppSubUrl+c.Req.RequestURI), 0, setting.AppSubUrl+"/") - c.Redirect(setting.AppSubUrl + "/login") + c.Redirect(setting.AppSubUrl + "/") } -func notAuthorized(c *Context) { +func notAuthorized(c *m.ReqContext) { if c.IsApiRequest() { c.JsonApiErr(401, "Unauthorized", nil) return } - c.SetCookie("redirect_to", url.QueryEscape(setting.AppSubUrl+c.Req.RequestURI), 0, setting.AppSubUrl+"/") + c.SetCookie("redirect_to", url.QueryEscape(setting.AppSubUrl+c.Req.RequestURI), 0, setting.AppSubUrl+"/", nil, false, true) + c.Redirect(setting.AppSubUrl + "/login") } func RoleAuth(roles ...m.RoleType) macaron.Handler { - return func(c *Context) { + return func(c *m.ReqContext) { ok := false for _, role := range roles { if role == c.OrgRole { @@ -72,7 +73,7 @@ func RoleAuth(roles ...m.RoleType) macaron.Handler { } func Auth(options *AuthOptions) macaron.Handler { - return func(c *Context) { + return func(c *m.ReqContext) { if !c.IsSignedIn && options.ReqSignedIn && !c.AllowAnonymous { notAuthorized(c) return diff --git a/pkg/middleware/auth_proxy.go b/pkg/middleware/auth_proxy.go index 3247805ec09..4d2a7a98908 100644 --- a/pkg/middleware/auth_proxy.go +++ b/pkg/middleware/auth_proxy.go @@ -10,10 +10,11 @@ import ( "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/login" m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/session" "github.com/grafana/grafana/pkg/setting" ) -func initContextWithAuthProxy(ctx *Context, orgId int64) bool { +func initContextWithAuthProxy(ctx *m.ReqContext, orgId int64) bool { if !setting.AuthProxyEnabled { return false } @@ -58,7 +59,7 @@ func initContextWithAuthProxy(ctx *Context, orgId int64) bool { } // initialize session - if err := ctx.Session.Start(ctx); err != nil { + if err := ctx.Session.Start(ctx.Context); err != nil { log.Error(3, "Failed to start session", err) return false } @@ -66,12 +67,12 @@ func initContextWithAuthProxy(ctx *Context, orgId int64) bool { // Make sure that we cannot share a session between different users! if getRequestUserId(ctx) > 0 && getRequestUserId(ctx) != query.Result.UserId { // remove session - if err := ctx.Session.Destory(ctx); err != nil { + if err := ctx.Session.Destory(ctx.Context); err != nil { log.Error(3, "Failed to destroy session, err") } // initialize a new session - if err := ctx.Session.Start(ctx); err != nil { + if err := ctx.Session.Start(ctx.Context); err != nil { log.Error(3, "Failed to start session", err) } } @@ -89,17 +90,17 @@ func initContextWithAuthProxy(ctx *Context, orgId int64) bool { ctx.SignedInUser = query.Result ctx.IsSignedIn = true - ctx.Session.Set(SESS_KEY_USERID, ctx.UserId) + ctx.Session.Set(session.SESS_KEY_USERID, ctx.UserId) return true } -var syncGrafanaUserWithLdapUser = func(ctx *Context, query *m.GetSignedInUserQuery) error { +var syncGrafanaUserWithLdapUser = func(ctx *m.ReqContext, query *m.GetSignedInUserQuery) error { if setting.LdapEnabled { expireEpoch := time.Now().Add(time.Duration(-setting.AuthProxyLdapSyncTtl) * time.Minute).Unix() var lastLdapSync int64 - if lastLdapSyncInSession := ctx.Session.Get(SESS_KEY_LASTLDAPSYNC); lastLdapSyncInSession != nil { + if lastLdapSyncInSession := ctx.Session.Get(session.SESS_KEY_LASTLDAPSYNC); lastLdapSyncInSession != nil { lastLdapSync = lastLdapSyncInSession.(int64) } @@ -113,14 +114,14 @@ var syncGrafanaUserWithLdapUser = func(ctx *Context, query *m.GetSignedInUserQue } } - ctx.Session.Set(SESS_KEY_LASTLDAPSYNC, time.Now().Unix()) + ctx.Session.Set(session.SESS_KEY_LASTLDAPSYNC, time.Now().Unix()) } } return nil } -func checkAuthenticationProxy(ctx *Context, proxyHeaderValue string) error { +func checkAuthenticationProxy(ctx *m.ReqContext, proxyHeaderValue string) error { if len(strings.TrimSpace(setting.AuthProxyWhitelist)) > 0 { proxies := strings.Split(setting.AuthProxyWhitelist, ",") remoteAddrSplit := strings.Split(ctx.Req.RemoteAddr, ":") diff --git a/pkg/middleware/auth_proxy_test.go b/pkg/middleware/auth_proxy_test.go index 4da0f52bbcf..b3c011bd870 100644 --- a/pkg/middleware/auth_proxy_test.go +++ b/pkg/middleware/auth_proxy_test.go @@ -6,8 +6,10 @@ import ( "github.com/grafana/grafana/pkg/login" m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/session" "github.com/grafana/grafana/pkg/setting" . "github.com/smartystreets/goconvey/convey" + "gopkg.in/macaron.v1" ) func TestAuthProxyWithLdapEnabled(t *testing.T) { @@ -29,45 +31,45 @@ func TestAuthProxyWithLdapEnabled(t *testing.T) { Convey("When session variable lastLdapSync not set, call syncSignedInUser and set lastLdapSync", func() { // arrange - session := mockSession{} - ctx := Context{Session: &session} - So(session.Get(SESS_KEY_LASTLDAPSYNC), ShouldBeNil) + sess := mockSession{} + ctx := m.ReqContext{Session: &sess} + So(sess.Get(session.SESS_KEY_LASTLDAPSYNC), ShouldBeNil) // act syncGrafanaUserWithLdapUser(&ctx, &query) // assert So(mockLdapAuther.syncSignedInUserCalled, ShouldBeTrue) - So(session.Get(SESS_KEY_LASTLDAPSYNC), ShouldBeGreaterThan, 0) + So(sess.Get(session.SESS_KEY_LASTLDAPSYNC), ShouldBeGreaterThan, 0) }) Convey("When session variable not expired, don't sync and don't change session var", func() { // arrange - session := mockSession{} - ctx := Context{Session: &session} + sess := mockSession{} + ctx := m.ReqContext{Session: &sess} now := time.Now().Unix() - session.Set(SESS_KEY_LASTLDAPSYNC, now) + sess.Set(session.SESS_KEY_LASTLDAPSYNC, now) // act syncGrafanaUserWithLdapUser(&ctx, &query) // assert - So(session.Get(SESS_KEY_LASTLDAPSYNC), ShouldEqual, now) + So(sess.Get(session.SESS_KEY_LASTLDAPSYNC), ShouldEqual, now) So(mockLdapAuther.syncSignedInUserCalled, ShouldBeFalse) }) Convey("When lastldapsync is expired, session variable should be updated", func() { // arrange - session := mockSession{} - ctx := Context{Session: &session} + sess := mockSession{} + ctx := m.ReqContext{Session: &sess} expiredTime := time.Now().Add(time.Duration(-120) * time.Minute).Unix() - session.Set(SESS_KEY_LASTLDAPSYNC, expiredTime) + sess.Set(session.SESS_KEY_LASTLDAPSYNC, expiredTime) // act syncGrafanaUserWithLdapUser(&ctx, &query) // assert - So(session.Get(SESS_KEY_LASTLDAPSYNC), ShouldBeGreaterThan, expiredTime) + So(sess.Get(session.SESS_KEY_LASTLDAPSYNC), ShouldBeGreaterThan, expiredTime) So(mockLdapAuther.syncSignedInUserCalled, ShouldBeTrue) }) }) @@ -77,7 +79,7 @@ type mockSession struct { value interface{} } -func (s *mockSession) Start(c *Context) error { +func (s *mockSession) Start(c *macaron.Context) error { return nil } @@ -102,11 +104,11 @@ func (s *mockSession) Release() error { return nil } -func (s *mockSession) Destory(c *Context) error { +func (s *mockSession) Destory(c *macaron.Context) error { return nil } -func (s *mockSession) RegenerateId(c *Context) error { +func (s *mockSession) RegenerateId(c *macaron.Context) error { return nil } diff --git a/pkg/middleware/dashboard_redirect.go b/pkg/middleware/dashboard_redirect.go new file mode 100644 index 00000000000..cb76f042a0d --- /dev/null +++ b/pkg/middleware/dashboard_redirect.go @@ -0,0 +1,49 @@ +package middleware + +import ( + "fmt" + "strings" + + "github.com/grafana/grafana/pkg/bus" + m "github.com/grafana/grafana/pkg/models" + "gopkg.in/macaron.v1" +) + +func getDashboardUrlBySlug(orgId int64, slug string) (string, error) { + query := m.GetDashboardQuery{Slug: slug, OrgId: orgId} + + if err := bus.Dispatch(&query); err != nil { + return "", m.ErrDashboardNotFound + } + + return m.GetDashboardUrl(query.Result.Uid, query.Result.Slug), nil +} + +func RedirectFromLegacyDashboardUrl() macaron.Handler { + return func(c *m.ReqContext) { + slug := c.Params("slug") + + if slug != "" { + if url, err := getDashboardUrlBySlug(c.OrgId, slug); err == nil { + url = fmt.Sprintf("%s?%s", url, c.Req.URL.RawQuery) + c.Redirect(url, 301) + return + } + } + } +} + +func RedirectFromLegacyDashboardSoloUrl() macaron.Handler { + return func(c *m.ReqContext) { + slug := c.Params("slug") + + if slug != "" { + if url, err := getDashboardUrlBySlug(c.OrgId, slug); err == nil { + url = strings.Replace(url, "/d/", "/d-solo/", 1) + url = fmt.Sprintf("%s?%s", url, c.Req.URL.RawQuery) + c.Redirect(url, 301) + return + } + } + } +} diff --git a/pkg/middleware/dashboard_redirect_test.go b/pkg/middleware/dashboard_redirect_test.go new file mode 100644 index 00000000000..0af06347ed0 --- /dev/null +++ b/pkg/middleware/dashboard_redirect_test.go @@ -0,0 +1,58 @@ +package middleware + +import ( + "strings" + "testing" + + "github.com/grafana/grafana/pkg/bus" + m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/util" + . "github.com/smartystreets/goconvey/convey" +) + +func TestMiddlewareDashboardRedirect(t *testing.T) { + Convey("Given the dashboard redirect middleware", t, func() { + bus.ClearBusHandlers() + redirectFromLegacyDashboardUrl := RedirectFromLegacyDashboardUrl() + redirectFromLegacyDashboardSoloUrl := RedirectFromLegacyDashboardSoloUrl() + + fakeDash := m.NewDashboard("Child dash") + fakeDash.Id = 1 + fakeDash.FolderId = 1 + fakeDash.HasAcl = false + fakeDash.Uid = util.GenerateShortUid() + + bus.AddHandler("test", func(query *m.GetDashboardQuery) error { + query.Result = fakeDash + return nil + }) + + middlewareScenario("GET dashboard by legacy url", func(sc *scenarioContext) { + sc.m.Get("/dashboard/db/:slug", redirectFromLegacyDashboardUrl, sc.defaultHandler) + + sc.fakeReqWithParams("GET", "/dashboard/db/dash?orgId=1&panelId=2", map[string]string{}).exec() + + Convey("Should redirect to new dashboard url with a 301 Moved Permanently", func() { + So(sc.resp.Code, ShouldEqual, 301) + redirectUrl, _ := sc.resp.Result().Location() + So(redirectUrl.Path, ShouldEqual, m.GetDashboardUrl(fakeDash.Uid, fakeDash.Slug)) + So(len(redirectUrl.Query()), ShouldEqual, 2) + }) + }) + + middlewareScenario("GET dashboard solo by legacy url", func(sc *scenarioContext) { + sc.m.Get("/dashboard-solo/db/:slug", redirectFromLegacyDashboardSoloUrl, sc.defaultHandler) + + sc.fakeReqWithParams("GET", "/dashboard-solo/db/dash?orgId=1&panelId=2", map[string]string{}).exec() + + Convey("Should redirect to new dashboard url with a 301 Moved Permanently", func() { + So(sc.resp.Code, ShouldEqual, 301) + redirectUrl, _ := sc.resp.Result().Location() + expectedUrl := m.GetDashboardUrl(fakeDash.Uid, fakeDash.Slug) + expectedUrl = strings.Replace(expectedUrl, "/d/", "/d-solo/", 1) + So(redirectUrl.Path, ShouldEqual, expectedUrl) + So(len(redirectUrl.Query()), ShouldEqual, 2) + }) + }) + }) +} diff --git a/pkg/middleware/logger.go b/pkg/middleware/logger.go index 94f707800be..2c63810b9c8 100644 --- a/pkg/middleware/logger.go +++ b/pkg/middleware/logger.go @@ -19,6 +19,7 @@ import ( "net/http" "time" + m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/setting" "github.com/prometheus/client_golang/prometheus" "gopkg.in/macaron.v1" @@ -47,7 +48,7 @@ func Logger() macaron.Handler { } if ctx, ok := c.Data["ctx"]; ok { - ctxTyped := ctx.(*Context) + ctxTyped := ctx.(*m.ReqContext) if status == 500 { ctxTyped.Logger.Error("Request Completed", "method", req.Method, "path", req.URL.Path, "status", status, "remote_addr", c.RemoteAddr(), "time_ms", int64(timeTakenMs), "size", rw.Size(), "referer", req.Referer()) } else { diff --git a/pkg/middleware/middleware.go b/pkg/middleware/middleware.go index 259d800f0a9..b5b244d5bff 100644 --- a/pkg/middleware/middleware.go +++ b/pkg/middleware/middleware.go @@ -2,7 +2,6 @@ package middleware import ( "strconv" - "strings" "gopkg.in/macaron.v1" @@ -11,29 +10,17 @@ import ( "github.com/grafana/grafana/pkg/log" l "github.com/grafana/grafana/pkg/login" m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/session" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/util" - "github.com/prometheus/client_golang/prometheus" ) -type Context struct { - *macaron.Context - *m.SignedInUser - - Session SessionStore - - IsSignedIn bool - IsRenderCall bool - AllowAnonymous bool - Logger log.Logger -} - func GetContextHandler() macaron.Handler { return func(c *macaron.Context) { - ctx := &Context{ + ctx := &m.ReqContext{ Context: c, SignedInUser: &m.SignedInUser{}, - Session: GetSession(), + Session: session.GetSession(), IsSignedIn: false, AllowAnonymous: false, Logger: log.New("context"), @@ -74,7 +61,7 @@ func GetContextHandler() macaron.Handler { } } -func initContextWithAnonymousUser(ctx *Context) bool { +func initContextWithAnonymousUser(ctx *m.ReqContext) bool { if !setting.AnonymousEnabled { return false } @@ -94,9 +81,9 @@ func initContextWithAnonymousUser(ctx *Context) bool { return true } -func initContextWithUserSessionCookie(ctx *Context, orgId int64) bool { +func initContextWithUserSessionCookie(ctx *m.ReqContext, orgId int64) bool { // initialize session - if err := ctx.Session.Start(ctx); err != nil { + if err := ctx.Session.Start(ctx.Context); err != nil { ctx.Logger.Error("Failed to start session", "error", err) return false } @@ -117,7 +104,7 @@ func initContextWithUserSessionCookie(ctx *Context, orgId int64) bool { return true } -func initContextWithApiKey(ctx *Context) bool { +func initContextWithApiKey(ctx *m.ReqContext) bool { var keyString string if keyString = getApiKey(ctx); keyString == "" { return false @@ -153,7 +140,7 @@ func initContextWithApiKey(ctx *Context) bool { return true } -func initContextWithBasicAuth(ctx *Context, orgId int64) bool { +func initContextWithBasicAuth(ctx *m.ReqContext, orgId int64) bool { if !setting.BasicAuthEnabled { return false @@ -195,68 +182,8 @@ func initContextWithBasicAuth(ctx *Context, orgId int64) bool { return true } -// Handle handles and logs error by given status. -func (ctx *Context) Handle(status int, title string, err error) { - if err != nil { - ctx.Logger.Error(title, "error", err) - if setting.Env != setting.PROD { - ctx.Data["ErrorMsg"] = err - } - } - - ctx.Data["Title"] = title - ctx.Data["AppSubUrl"] = setting.AppSubUrl - ctx.HTML(status, strconv.Itoa(status)) -} - -func (ctx *Context) JsonOK(message string) { - resp := make(map[string]interface{}) - resp["message"] = message - ctx.JSON(200, resp) -} - -func (ctx *Context) IsApiRequest() bool { - return strings.HasPrefix(ctx.Req.URL.Path, "/api") -} - -func (ctx *Context) JsonApiErr(status int, message string, err error) { - resp := make(map[string]interface{}) - - if err != nil { - ctx.Logger.Error(message, "error", err) - if setting.Env != setting.PROD { - resp["error"] = err.Error() - } - } - - switch status { - case 404: - resp["message"] = "Not Found" - case 500: - resp["message"] = "Internal Server Error" - } - - if message != "" { - resp["message"] = message - } - - ctx.JSON(status, resp) -} - -func (ctx *Context) HasUserRole(role m.RoleType) bool { - return ctx.OrgRole.Includes(role) -} - -func (ctx *Context) HasHelpFlag(flag m.HelpFlags1) bool { - return ctx.HelpFlags1.HasFlag(flag) -} - -func (ctx *Context) TimeRequest(timer prometheus.Summary) { - ctx.Data["perfmon.timer"] = timer -} - func AddDefaultResponseHeaders() macaron.Handler { - return func(ctx *Context) { + return func(ctx *m.ReqContext) { if ctx.IsApiRequest() && ctx.Req.Method == "GET" { ctx.Resp.Header().Add("Cache-Control", "no-cache") ctx.Resp.Header().Add("Pragma", "no-cache") diff --git a/pkg/middleware/middleware_test.go b/pkg/middleware/middleware_test.go index 0d9e0e5b973..83efc65d4d4 100644 --- a/pkg/middleware/middleware_test.go +++ b/pkg/middleware/middleware_test.go @@ -7,10 +7,11 @@ import ( "path/filepath" "testing" - "github.com/go-macaron/session" + ms "github.com/go-macaron/session" "github.com/grafana/grafana/pkg/bus" l "github.com/grafana/grafana/pkg/login" m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/session" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/util" . "github.com/smartystreets/goconvey/convey" @@ -130,8 +131,8 @@ func TestMiddlewareContext(t *testing.T) { middlewareScenario("UserId in session", func(sc *scenarioContext) { - sc.fakeReq("GET", "/").handler(func(c *Context) { - c.Session.Set(SESS_KEY_USERID, int64(12)) + sc.fakeReq("GET", "/").handler(func(c *m.ReqContext) { + c.Session.Set(session.SESS_KEY_USERID, int64(12)) }).exec() bus.AddHandler("test", func(query *m.GetSignedInUserQuery) error { @@ -276,8 +277,8 @@ func TestMiddlewareContext(t *testing.T) { }) // create session - sc.fakeReq("GET", "/").handler(func(c *Context) { - c.Session.Set(SESS_KEY_USERID, int64(33)) + sc.fakeReq("GET", "/").handler(func(c *m.ReqContext) { + c.Session.Set(session.SESS_KEY_USERID, int64(33)) }).exec() oldSessionID := sc.context.Session.ID() @@ -300,7 +301,7 @@ func TestMiddlewareContext(t *testing.T) { setting.LdapEnabled = true called := false - syncGrafanaUserWithLdapUser = func(ctx *Context, query *m.GetSignedInUserQuery) error { + syncGrafanaUserWithLdapUser = func(ctx *m.ReqContext, query *m.GetSignedInUserQuery) error { called = true return nil } @@ -336,12 +337,12 @@ func middlewareScenario(desc string, fn scenarioFunc) { sc.m.Use(GetContextHandler()) // mock out gc goroutine - startSessionGC = func() {} - sc.m.Use(Sessioner(&session.Options{})) + session.StartSessionGC = func() {} + sc.m.Use(Sessioner(&ms.Options{})) sc.m.Use(OrgRedirect()) sc.m.Use(AddDefaultResponseHeaders()) - sc.defaultHandler = func(c *Context) { + sc.defaultHandler = func(c *m.ReqContext) { sc.context = c if sc.handlerFunc != nil { sc.handlerFunc(sc.context) @@ -356,7 +357,7 @@ func middlewareScenario(desc string, fn scenarioFunc) { type scenarioContext struct { m *macaron.Macaron - context *Context + context *m.ReqContext resp *httptest.ResponseRecorder apiKey string authHeader string @@ -399,6 +400,20 @@ func (sc *scenarioContext) fakeReq(method, url string) *scenarioContext { return sc } +func (sc *scenarioContext) fakeReqWithParams(method, url string, queryParams map[string]string) *scenarioContext { + sc.resp = httptest.NewRecorder() + req, err := http.NewRequest(method, url, nil) + q := req.URL.Query() + for k, v := range queryParams { + q.Add(k, v) + } + req.URL.RawQuery = q.Encode() + So(err, ShouldBeNil) + sc.req = req + + return sc +} + func (sc *scenarioContext) handler(fn handlerFunc) *scenarioContext { sc.handlerFunc = fn return sc @@ -422,4 +437,4 @@ func (sc *scenarioContext) exec() { } type scenarioFunc func(c *scenarioContext) -type handlerFunc func(c *Context) +type handlerFunc func(c *m.ReqContext) diff --git a/pkg/middleware/org_redirect.go b/pkg/middleware/org_redirect.go index 9dd764be1bb..db263c2a17a 100644 --- a/pkg/middleware/org_redirect.go +++ b/pkg/middleware/org_redirect.go @@ -7,7 +7,7 @@ import ( "strings" "github.com/grafana/grafana/pkg/bus" - "github.com/grafana/grafana/pkg/models" + m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/setting" "gopkg.in/macaron.v1" @@ -22,7 +22,7 @@ func OrgRedirect() macaron.Handler { return } - ctx, ok := c.Data["ctx"].(*Context) + ctx, ok := c.Data["ctx"].(*m.ReqContext) if !ok || !ctx.IsSignedIn { return } @@ -31,7 +31,7 @@ func OrgRedirect() macaron.Handler { return } - cmd := models.SetUsingOrgCommand{UserId: ctx.UserId, OrgId: orgId} + cmd := m.SetUsingOrgCommand{UserId: ctx.UserId, OrgId: orgId} if err := bus.Dispatch(&cmd); err != nil { if ctx.IsApiRequest() { ctx.JsonApiErr(404, "Not found", nil) diff --git a/pkg/middleware/org_redirect_test.go b/pkg/middleware/org_redirect_test.go index 1f3d01f30f2..fa08154b250 100644 --- a/pkg/middleware/org_redirect_test.go +++ b/pkg/middleware/org_redirect_test.go @@ -6,7 +6,8 @@ import ( "fmt" "github.com/grafana/grafana/pkg/bus" - "github.com/grafana/grafana/pkg/models" + m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/session" . "github.com/smartystreets/goconvey/convey" ) @@ -14,16 +15,16 @@ func TestOrgRedirectMiddleware(t *testing.T) { Convey("Can redirect to correct org", t, func() { middlewareScenario("when setting a correct org for the user", func(sc *scenarioContext) { - sc.fakeReq("GET", "/").handler(func(c *Context) { - c.Session.Set(SESS_KEY_USERID, int64(12)) + sc.fakeReq("GET", "/").handler(func(c *m.ReqContext) { + c.Session.Set(session.SESS_KEY_USERID, int64(12)) }).exec() - bus.AddHandler("test", func(query *models.SetUsingOrgCommand) error { + bus.AddHandler("test", func(query *m.SetUsingOrgCommand) error { return nil }) - bus.AddHandler("test", func(query *models.GetSignedInUserQuery) error { - query.Result = &models.SignedInUser{OrgId: 1, UserId: 12} + bus.AddHandler("test", func(query *m.GetSignedInUserQuery) error { + query.Result = &m.SignedInUser{OrgId: 1, UserId: 12} return nil }) @@ -36,16 +37,16 @@ func TestOrgRedirectMiddleware(t *testing.T) { }) middlewareScenario("when setting an invalid org for user", func(sc *scenarioContext) { - sc.fakeReq("GET", "/").handler(func(c *Context) { - c.Session.Set(SESS_KEY_USERID, int64(12)) + sc.fakeReq("GET", "/").handler(func(c *m.ReqContext) { + c.Session.Set(session.SESS_KEY_USERID, int64(12)) }).exec() - bus.AddHandler("test", func(query *models.SetUsingOrgCommand) error { + bus.AddHandler("test", func(query *m.SetUsingOrgCommand) error { return fmt.Errorf("") }) - bus.AddHandler("test", func(query *models.GetSignedInUserQuery) error { - query.Result = &models.SignedInUser{OrgId: 1, UserId: 12} + bus.AddHandler("test", func(query *m.GetSignedInUserQuery) error { + query.Result = &m.SignedInUser{OrgId: 1, UserId: 12} return nil }) diff --git a/pkg/middleware/perf.go b/pkg/middleware/perf.go index e381121a47f..5b6ab6f2d0a 100644 --- a/pkg/middleware/perf.go +++ b/pkg/middleware/perf.go @@ -4,9 +4,11 @@ import ( "net/http" "gopkg.in/macaron.v1" + + m "github.com/grafana/grafana/pkg/models" ) func MeasureRequestTime() macaron.Handler { - return func(res http.ResponseWriter, req *http.Request, c *Context) { + return func(res http.ResponseWriter, req *http.Request, c *m.ReqContext) { } } diff --git a/pkg/middleware/quota.go b/pkg/middleware/quota.go index 23f98e78a7e..43efca43485 100644 --- a/pkg/middleware/quota.go +++ b/pkg/middleware/quota.go @@ -3,15 +3,15 @@ package middleware import ( "fmt" - "github.com/grafana/grafana/pkg/bus" - m "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/setting" "gopkg.in/macaron.v1" + + m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/quota" ) func Quota(target string) macaron.Handler { - return func(c *Context) { - limitReached, err := QuotaReached(c, target) + return func(c *m.ReqContext) { + limitReached, err := quota.QuotaReached(c, target) if err != nil { c.JsonApiErr(500, "failed to get quota", err) return @@ -22,82 +22,3 @@ func Quota(target string) macaron.Handler { } } } - -func QuotaReached(c *Context, target string) (bool, error) { - if !setting.Quota.Enabled { - return false, nil - } - - // get the list of scopes that this target is valid for. Org, User, Global - scopes, err := m.GetQuotaScopes(target) - if err != nil { - return false, err - } - - for _, scope := range scopes { - c.Logger.Debug("Checking quota", "target", target, "scope", scope) - - switch scope.Name { - case "global": - if scope.DefaultLimit < 0 { - continue - } - if scope.DefaultLimit == 0 { - return true, nil - } - if target == "session" { - usedSessions := getSessionCount() - if int64(usedSessions) > scope.DefaultLimit { - c.Logger.Debug("Sessions limit reached", "active", usedSessions, "limit", scope.DefaultLimit) - return true, nil - } - continue - } - query := m.GetGlobalQuotaByTargetQuery{Target: scope.Target} - if err := bus.Dispatch(&query); err != nil { - return true, err - } - if query.Result.Used >= scope.DefaultLimit { - return true, nil - } - case "org": - if !c.IsSignedIn { - continue - } - query := m.GetOrgQuotaByTargetQuery{OrgId: c.OrgId, Target: scope.Target, Default: scope.DefaultLimit} - if err := bus.Dispatch(&query); err != nil { - return true, err - } - if query.Result.Limit < 0 { - continue - } - if query.Result.Limit == 0 { - return true, nil - } - - if query.Result.Used >= query.Result.Limit { - return true, nil - } - case "user": - if !c.IsSignedIn || c.UserId == 0 { - continue - } - query := m.GetUserQuotaByTargetQuery{UserId: c.UserId, Target: scope.Target, Default: scope.DefaultLimit} - if err := bus.Dispatch(&query); err != nil { - return true, err - } - if query.Result.Limit < 0 { - continue - } - if query.Result.Limit == 0 { - return true, nil - } - - if query.Result.Used >= query.Result.Limit { - return true, nil - } - } - } - - return false, nil -} diff --git a/pkg/middleware/quota_test.go b/pkg/middleware/quota_test.go index b68aa485fa7..92c3d62674d 100644 --- a/pkg/middleware/quota_test.go +++ b/pkg/middleware/quota_test.go @@ -5,6 +5,7 @@ import ( "github.com/grafana/grafana/pkg/bus" m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/session" "github.com/grafana/grafana/pkg/setting" . "github.com/smartystreets/goconvey/convey" ) @@ -12,7 +13,7 @@ import ( func TestMiddlewareQuota(t *testing.T) { Convey("Given the grafana quota middleware", t, func() { - getSessionCount = func() int { + session.GetSessionCount = func() int { return 4 } @@ -74,8 +75,8 @@ func TestMiddlewareQuota(t *testing.T) { middlewareScenario("with user logged in", func(sc *scenarioContext) { // log us in, so we have a user_id and org_id in the context - sc.fakeReq("GET", "/").handler(func(c *Context) { - c.Session.Set(SESS_KEY_USERID, int64(12)) + sc.fakeReq("GET", "/").handler(func(c *m.ReqContext) { + c.Session.Set(session.SESS_KEY_USERID, int64(12)) }).exec() bus.AddHandler("test", func(query *m.GetSignedInUserQuery) error { diff --git a/pkg/middleware/recovery.go b/pkg/middleware/recovery.go index 0c9dc4670e2..ec289387aa4 100644 --- a/pkg/middleware/recovery.go +++ b/pkg/middleware/recovery.go @@ -24,6 +24,7 @@ import ( "gopkg.in/macaron.v1" "github.com/grafana/grafana/pkg/log" + m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/setting" ) @@ -106,7 +107,7 @@ func Recovery() macaron.Handler { panicLogger := log.Root // try to get request logger if ctx, ok := c.Data["ctx"]; ok { - ctxTyped := ctx.(*Context) + ctxTyped := ctx.(*m.ReqContext) panicLogger = ctxTyped.Logger } @@ -115,15 +116,15 @@ func Recovery() macaron.Handler { c.Data["Title"] = "Server Error" c.Data["AppSubUrl"] = setting.AppSubUrl - if theErr, ok := err.(error); ok { - c.Data["Title"] = theErr.Error() - } - if setting.Env == setting.DEV { + if theErr, ok := err.(error); ok { + c.Data["Title"] = theErr.Error() + } + c.Data["ErrorMsg"] = string(stack) } - ctx, ok := c.Data["ctx"].(*Context) + ctx, ok := c.Data["ctx"].(*m.ReqContext) if ok && ctx.IsApiRequest() { resp := make(map[string]interface{}) @@ -137,7 +138,7 @@ func Recovery() macaron.Handler { c.JSON(500, resp) } else { - c.HTML(500, "500") + c.HTML(500, "error") } } }() diff --git a/pkg/middleware/recovery_test.go b/pkg/middleware/recovery_test.go index 299186945ee..32545b7caca 100644 --- a/pkg/middleware/recovery_test.go +++ b/pkg/middleware/recovery_test.go @@ -4,8 +4,10 @@ import ( "path/filepath" "testing" - "github.com/go-macaron/session" + ms "github.com/go-macaron/session" "github.com/grafana/grafana/pkg/bus" + m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/session" . "github.com/smartystreets/goconvey/convey" "gopkg.in/macaron.v1" ) @@ -37,7 +39,7 @@ func TestRecoveryMiddleware(t *testing.T) { }) } -func PanicHandler(c *Context) { +func PanicHandler(c *m.ReqContext) { panic("Handler has panicked") } @@ -60,12 +62,12 @@ func recoveryScenario(desc string, url string, fn scenarioFunc) { sc.m.Use(GetContextHandler()) // mock out gc goroutine - startSessionGC = func() {} - sc.m.Use(Sessioner(&session.Options{})) + session.StartSessionGC = func() {} + sc.m.Use(Sessioner(&ms.Options{})) sc.m.Use(OrgRedirect()) sc.m.Use(AddDefaultResponseHeaders()) - sc.defaultHandler = func(c *Context) { + sc.defaultHandler = func(c *m.ReqContext) { sc.context = c if sc.handlerFunc != nil { sc.handlerFunc(sc.context) diff --git a/pkg/middleware/render_auth.go b/pkg/middleware/render_auth.go index d2f9c1b2b1a..225645e659e 100644 --- a/pkg/middleware/render_auth.go +++ b/pkg/middleware/render_auth.go @@ -10,7 +10,7 @@ import ( var renderKeysLock sync.Mutex var renderKeys map[string]*m.SignedInUser = make(map[string]*m.SignedInUser) -func initContextWithRenderAuth(ctx *Context) bool { +func initContextWithRenderAuth(ctx *m.ReqContext) bool { key := ctx.GetCookie("renderKey") if key == "" { return false diff --git a/pkg/middleware/session.go b/pkg/middleware/session.go index 4de111ff3d2..5654a42cb7d 100644 --- a/pkg/middleware/session.go +++ b/pkg/middleware/session.go @@ -1,170 +1,21 @@ package middleware import ( - "math/rand" - "time" - - "github.com/go-macaron/session" - _ "github.com/go-macaron/session/memcache" - _ "github.com/go-macaron/session/mysql" - _ "github.com/go-macaron/session/postgres" - _ "github.com/go-macaron/session/redis" - "github.com/grafana/grafana/pkg/log" + ms "github.com/go-macaron/session" "gopkg.in/macaron.v1" + + m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/session" ) -const ( - SESS_KEY_USERID = "uid" - SESS_KEY_OAUTH_STATE = "state" - SESS_KEY_APIKEY = "apikey_id" // used for render requests with api keys - SESS_KEY_LASTLDAPSYNC = "last_ldap_sync" -) +func Sessioner(options *ms.Options) macaron.Handler { + session.Init(options) -var sessionManager *session.Manager -var sessionOptions *session.Options -var startSessionGC func() -var getSessionCount func() int -var sessionLogger = log.New("session") - -func init() { - startSessionGC = func() { - sessionManager.GC() - sessionLogger.Debug("Session GC") - time.AfterFunc(time.Duration(sessionOptions.Gclifetime)*time.Second, startSessionGC) - } - getSessionCount = func() int { - return sessionManager.Count() - } -} - -func prepareOptions(opt *session.Options) *session.Options { - if len(opt.Provider) == 0 { - opt.Provider = "memory" - } - if len(opt.ProviderConfig) == 0 { - opt.ProviderConfig = "data/sessions" - } - if len(opt.CookieName) == 0 { - opt.CookieName = "grafana_sess" - } - if len(opt.CookiePath) == 0 { - opt.CookiePath = "/" - } - if opt.Gclifetime == 0 { - opt.Gclifetime = 3600 - } - if opt.Maxlifetime == 0 { - opt.Maxlifetime = opt.Gclifetime - } - if opt.IDLength == 0 { - opt.IDLength = 16 - } - - return opt -} - -func Sessioner(options *session.Options) macaron.Handler { - var err error - sessionOptions = prepareOptions(options) - sessionManager, err = session.NewManager(options.Provider, *options) - if err != nil { - panic(err) - } - - // start GC threads after some random seconds - rndSeconds := 10 + rand.Int63n(180) - time.AfterFunc(time.Duration(rndSeconds)*time.Second, startSessionGC) - - return func(ctx *Context) { + return func(ctx *m.ReqContext) { ctx.Next() - if err = ctx.Session.Release(); err != nil { + if err := ctx.Session.Release(); err != nil { panic("session(release): " + err.Error()) } } } - -func GetSession() SessionStore { - return &SessionWrapper{manager: sessionManager} -} - -type SessionStore interface { - // Set sets value to given key in session. - Set(interface{}, interface{}) error - // Get gets value by given key in session. - Get(interface{}) interface{} - // Delete deletes a key from session. - Delete(interface{}) interface{} - // ID returns current session ID. - ID() string - // Release releases session resource and save data to provider. - Release() error - // Destory deletes a session. - Destory(*Context) error - // init - Start(*Context) error - // RegenerateId regenerates the session id - RegenerateId(*Context) error -} - -type SessionWrapper struct { - session session.RawStore - manager *session.Manager -} - -func (s *SessionWrapper) Start(c *Context) error { - var err error - s.session, err = s.manager.Start(c.Context) - return err -} - -func (s *SessionWrapper) RegenerateId(c *Context) error { - var err error - s.session, err = s.manager.RegenerateId(c.Context) - return err -} - -func (s *SessionWrapper) Set(k interface{}, v interface{}) error { - if s.session != nil { - return s.session.Set(k, v) - } - return nil -} - -func (s *SessionWrapper) Get(k interface{}) interface{} { - if s.session != nil { - return s.session.Get(k) - } - return nil -} - -func (s *SessionWrapper) Delete(k interface{}) interface{} { - if s.session != nil { - return s.session.Delete(k) - } - return nil -} - -func (s *SessionWrapper) ID() string { - if s.session != nil { - return s.session.ID() - } - return "" -} - -func (s *SessionWrapper) Release() error { - if s.session != nil { - return s.session.Release() - } - return nil -} - -func (s *SessionWrapper) Destory(c *Context) error { - if s.session != nil { - if err := s.manager.Destory(c.Context); err != nil { - return err - } - s.session = nil - } - return nil -} diff --git a/pkg/middleware/validate_host.go b/pkg/middleware/validate_host.go index fa84e783767..63c4b3000e9 100644 --- a/pkg/middleware/validate_host.go +++ b/pkg/middleware/validate_host.go @@ -3,12 +3,13 @@ package middleware import ( "strings" + m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/setting" "gopkg.in/macaron.v1" ) func ValidateHostHeader(domain string) macaron.Handler { - return func(c *Context) { + return func(c *m.ReqContext) { // ignore local render calls if c.IsRenderCall { return diff --git a/pkg/models/alert.go b/pkg/models/alert.go index fa3f4b466a8..88b49350b97 100644 --- a/pkg/models/alert.go +++ b/pkg/models/alert.go @@ -159,10 +159,6 @@ type SetAlertStateCommand struct { Timestamp time.Time } -type DeleteAlertCommand struct { - AlertId int64 -} - //Queries type GetAlertsQuery struct { OrgId int64 @@ -170,8 +166,9 @@ type GetAlertsQuery struct { DashboardId int64 PanelId int64 Limit int64 + User *SignedInUser - Result []*Alert + Result []*AlertListItemDTO } type GetAllAlertsQuery struct { @@ -191,6 +188,21 @@ type GetAlertStatesForDashboardQuery struct { Result []*AlertStateInfoDTO } +type AlertListItemDTO struct { + Id int64 `json:"id"` + DashboardId int64 `json:"dashboardId"` + DashboardUid string `json:"dashboardUid"` + DashboardSlug string `json:"dashboardSlug"` + PanelId int64 `json:"panelId"` + Name string `json:"name"` + State AlertStateType `json:"state"` + NewStateDate time.Time `json:"newStateDate"` + EvalDate time.Time `json:"evalDate"` + EvalData *simplejson.Json `json:"evalData"` + ExecutionError string `json:"executionError"` + Url string `json:"url"` +} + type AlertStateInfoDTO struct { Id int64 `json:"id"` DashboardId int64 `json:"dashboardId"` @@ -198,3 +210,17 @@ type AlertStateInfoDTO struct { State AlertStateType `json:"state"` NewStateDate time.Time `json:"newStateDate"` } + +// "Internal" commands + +type UpdateDashboardAlertsCommand struct { + UserId int64 + OrgId int64 + Dashboard *Dashboard +} + +type ValidateDashboardAlertsCommand struct { + UserId int64 + OrgId int64 + Dashboard *Dashboard +} diff --git a/pkg/models/context.go b/pkg/models/context.go new file mode 100644 index 00000000000..262f6550954 --- /dev/null +++ b/pkg/models/context.go @@ -0,0 +1,86 @@ +package models + +import ( + "strings" + + "github.com/prometheus/client_golang/prometheus" + "gopkg.in/macaron.v1" + + "github.com/grafana/grafana/pkg/log" + "github.com/grafana/grafana/pkg/services/session" + "github.com/grafana/grafana/pkg/setting" +) + +type ReqContext struct { + *macaron.Context + *SignedInUser + + Session session.SessionStore + + IsSignedIn bool + IsRenderCall bool + AllowAnonymous bool + Logger log.Logger +} + +// Handle handles and logs error by given status. +func (ctx *ReqContext) Handle(status int, title string, err error) { + if err != nil { + ctx.Logger.Error(title, "error", err) + if setting.Env != setting.PROD { + ctx.Data["ErrorMsg"] = err + } + } + + ctx.Data["Title"] = title + ctx.Data["AppSubUrl"] = setting.AppSubUrl + ctx.Data["Theme"] = "dark" + + ctx.HTML(status, "error") +} + +func (ctx *ReqContext) JsonOK(message string) { + resp := make(map[string]interface{}) + resp["message"] = message + ctx.JSON(200, resp) +} + +func (ctx *ReqContext) IsApiRequest() bool { + return strings.HasPrefix(ctx.Req.URL.Path, "/api") +} + +func (ctx *ReqContext) JsonApiErr(status int, message string, err error) { + resp := make(map[string]interface{}) + + if err != nil { + ctx.Logger.Error(message, "error", err) + if setting.Env != setting.PROD { + resp["error"] = err.Error() + } + } + + switch status { + case 404: + resp["message"] = "Not Found" + case 500: + resp["message"] = "Internal Server Error" + } + + if message != "" { + resp["message"] = message + } + + ctx.JSON(status, resp) +} + +func (ctx *ReqContext) HasUserRole(role RoleType) bool { + return ctx.OrgRole.Includes(role) +} + +func (ctx *ReqContext) HasHelpFlag(flag HelpFlags1) bool { + return ctx.HelpFlags1.HasFlag(flag) +} + +func (ctx *ReqContext) TimeRequest(timer prometheus.Summary) { + ctx.Data["perfmon.timer"] = timer +} diff --git a/pkg/models/dashboard_acl.go b/pkg/models/dashboard_acl.go index fa7ad00de7f..5b91b2a70b4 100644 --- a/pkg/models/dashboard_acl.go +++ b/pkg/models/dashboard_acl.go @@ -26,6 +26,8 @@ func (p PermissionType) String() string { var ( ErrDashboardAclInfoMissing = errors.New("User id and team id cannot both be empty for a dashboard permission.") ErrDashboardPermissionDashboardEmpty = errors.New("Dashboard Id must be greater than zero for a dashboard permission.") + ErrFolderAclInfoMissing = errors.New("User id and team id cannot both be empty for a folder permission.") + ErrFolderPermissionFolderEmpty = errors.New("Folder Id must be greater than zero for a folder permission.") ) // Dashboard ACL model @@ -44,9 +46,9 @@ type DashboardAcl struct { } type DashboardAclInfoDTO struct { - Id int64 `json:"id"` OrgId int64 `json:"-"` - DashboardId int64 `json:"dashboardId"` + DashboardId int64 `json:"dashboardId,omitempty"` + FolderId int64 `json:"folderId,omitempty"` Created time.Time `json:"created"` Updated time.Time `json:"updated"` @@ -59,6 +61,32 @@ type DashboardAclInfoDTO struct { Role *RoleType `json:"role,omitempty"` Permission PermissionType `json:"permission"` PermissionName string `json:"permissionName"` + Uid string `json:"uid"` + Title string `json:"title"` + Slug string `json:"slug"` + IsFolder bool `json:"isFolder"` + Url string `json:"url"` +} + +func (dto *DashboardAclInfoDTO) hasSameRoleAs(other *DashboardAclInfoDTO) bool { + if dto.Role == nil || other.Role == nil { + return false + } + + return dto.UserId <= 0 && dto.TeamId <= 0 && dto.UserId == other.UserId && dto.TeamId == other.TeamId && *dto.Role == *other.Role +} + +func (dto *DashboardAclInfoDTO) hasSameUserAs(other *DashboardAclInfoDTO) bool { + return dto.UserId > 0 && dto.UserId == other.UserId +} + +func (dto *DashboardAclInfoDTO) hasSameTeamAs(other *DashboardAclInfoDTO) bool { + return dto.TeamId > 0 && dto.TeamId == other.TeamId +} + +// IsDuplicateOf returns true if other item has same role, same user or same team +func (dto *DashboardAclInfoDTO) IsDuplicateOf(other *DashboardAclInfoDTO) bool { + return dto.hasSameRoleAs(other) || dto.hasSameUserAs(other) || dto.hasSameTeamAs(other) } // @@ -70,21 +98,6 @@ type UpdateDashboardAclCommand struct { Items []*DashboardAcl } -type SetDashboardAclCommand struct { - DashboardId int64 - OrgId int64 - UserId int64 - TeamId int64 - Permission PermissionType - - Result DashboardAcl -} - -type RemoveDashboardAclCommand struct { - AclId int64 - OrgId int64 -} - // // QUERIES // diff --git a/pkg/models/dashboard_snapshot.go b/pkg/models/dashboard_snapshot.go index 9273b88f291..ec8b19f3c18 100644 --- a/pkg/models/dashboard_snapshot.go +++ b/pkg/models/dashboard_snapshot.go @@ -64,10 +64,12 @@ type DeleteDashboardSnapshotCommand struct { } type DeleteExpiredSnapshotsCommand struct { + DeletedRows int64 } type GetDashboardSnapshotQuery struct { - Key string + Key string + DeleteKey string Result *DashboardSnapshot } @@ -76,9 +78,10 @@ type DashboardSnapshots []*DashboardSnapshot type DashboardSnapshotsList []*DashboardSnapshotDTO type GetDashboardSnapshotsQuery struct { - Name string - Limit int - OrgId int64 + Name string + Limit int + OrgId int64 + SignedInUser *SignedInUser Result DashboardSnapshotsList } diff --git a/pkg/models/dashboard_version.go b/pkg/models/dashboard_version.go index 4acb4282a58..9f5f18cb263 100644 --- a/pkg/models/dashboard_version.go +++ b/pkg/models/dashboard_version.go @@ -75,4 +75,5 @@ type GetDashboardVersionsQuery struct { // type DeleteExpiredVersionsCommand struct { + DeletedRows int64 } diff --git a/pkg/models/dashboards.go b/pkg/models/dashboards.go index 091f27ec413..4b771038df6 100644 --- a/pkg/models/dashboards.go +++ b/pkg/models/dashboards.go @@ -2,23 +2,37 @@ package models import ( "errors" + "fmt" "strings" "time" "github.com/gosimple/slug" "github.com/grafana/grafana/pkg/components/simplejson" + "github.com/grafana/grafana/pkg/setting" ) // Typed errors var ( - ErrDashboardNotFound = errors.New("Dashboard not found") - ErrDashboardSnapshotNotFound = errors.New("Dashboard snapshot not found") - ErrDashboardWithSameNameExists = errors.New("A dashboard with the same name already exists") - ErrDashboardVersionMismatch = errors.New("The dashboard has been changed by someone else") - ErrDashboardTitleEmpty = errors.New("Dashboard title cannot be empty") - ErrDashboardFolderCannotHaveParent = errors.New("A Dashboard Folder cannot be added to another folder") - ErrDashboardContainsInvalidAlertData = errors.New("Invalid alert data. Cannot save dashboard") - ErrDashboardFailedToUpdateAlertData = errors.New("Failed to save alert data") + ErrDashboardNotFound = errors.New("Dashboard not found") + ErrDashboardFolderNotFound = errors.New("Folder not found") + ErrDashboardSnapshotNotFound = errors.New("Dashboard snapshot not found") + ErrDashboardWithSameUIDExists = errors.New("A dashboard with the same uid already exists") + ErrDashboardWithSameNameInFolderExists = errors.New("A dashboard with the same name in the folder already exists") + ErrDashboardVersionMismatch = errors.New("The dashboard has been changed by someone else") + ErrDashboardTitleEmpty = errors.New("Dashboard title cannot be empty") + ErrDashboardFolderCannotHaveParent = errors.New("A Dashboard Folder cannot be added to another folder") + ErrDashboardContainsInvalidAlertData = errors.New("Invalid alert data. Cannot save dashboard") + ErrDashboardFailedToUpdateAlertData = errors.New("Failed to save alert data") + ErrDashboardsWithSameSlugExists = errors.New("Multiple dashboards with the same slug exists") + ErrDashboardFailedGenerateUniqueUid = errors.New("Failed to generate unique dashboard id") + ErrDashboardTypeMismatch = errors.New("Dashboard cannot be changed to a folder") + ErrDashboardFolderWithSameNameAsDashboard = errors.New("Folder name cannot be the same as one of its dashboards") + ErrDashboardWithSameNameAsFolder = errors.New("Dashboard name cannot be the same as folder") + ErrDashboardFolderNameExists = errors.New("A folder with that name already exists") + ErrDashboardUpdateAccessDenied = errors.New("Access denied to save dashboard") + ErrDashboardInvalidUid = errors.New("uid contains illegal characters") + ErrDashboardUidToLong = errors.New("uid to long. max 40 characters") + RootFolderName = "General" ) type UpdatePluginDashboardError struct { @@ -39,6 +53,7 @@ var ( // Dashboard model type Dashboard struct { Id int64 + Uid string Slug string OrgId int64 GnetId int64 @@ -58,6 +73,30 @@ type Dashboard struct { Data *simplejson.Json } +func (d *Dashboard) SetId(id int64) { + d.Id = id + d.Data.Set("id", id) +} + +func (d *Dashboard) SetUid(uid string) { + d.Uid = uid + d.Data.Set("uid", uid) +} + +func (d *Dashboard) SetVersion(version int) { + d.Version = version + d.Data.Set("version", version) +} + +// GetDashboardIdForSavePermissionCheck return the dashboard id to be used for checking permission of dashboard +func (d *Dashboard) GetDashboardIdForSavePermissionCheck() int64 { + if d.Id == 0 { + return d.FolderId + } + + return d.Id +} + // NewDashboard creates a new dashboard func NewDashboard(title string) *Dashboard { dash := &Dashboard{} @@ -73,9 +112,10 @@ func NewDashboard(title string) *Dashboard { // NewDashboardFolder creates a new dashboard folder func NewDashboardFolder(title string) *Dashboard { folder := NewDashboard(title) + folder.IsFolder = true folder.Data.Set("schemaVersion", 16) - folder.Data.Set("editable", true) - folder.Data.Set("hideControls", true) + folder.Data.Set("version", 0) + folder.IsFolder = true return folder } @@ -89,14 +129,21 @@ func NewDashboardFromJson(data *simplejson.Json) *Dashboard { dash.Data = data dash.Title = dash.Data.Get("title").MustString() dash.UpdateSlug() + update := false if id, err := dash.Data.Get("id").Float64(); err == nil { dash.Id = int64(id) + update = true + } - if version, err := dash.Data.Get("version").Float64(); err == nil { - dash.Version = int(version) - dash.Updated = time.Now() - } + if uid, err := dash.Data.Get("uid").String(); err == nil { + dash.Uid = uid + update = true + } + + if version, err := dash.Data.Get("version").Float64(); err == nil && update { + dash.Version = int(version) + dash.Updated = time.Now() } else { dash.Data.Set("version", 0) dash.Created = time.Now() @@ -119,10 +166,6 @@ func (cmd *SaveDashboardCommand) GetDashboardModel() *Dashboard { userId = -1 } - if dash.Data.Get("version").MustInt(0) == 0 { - dash.CreatedBy = userId - } - dash.UpdatedBy = userId dash.OrgId = cmd.OrgId dash.PluginId = cmd.PluginId @@ -147,6 +190,40 @@ func SlugifyTitle(title string) string { return slug.Make(strings.ToLower(title)) } +// GetUrl return the html url for a folder if it's folder, otherwise for a dashboard +func (dash *Dashboard) GetUrl() string { + return GetDashboardFolderUrl(dash.IsFolder, dash.Uid, dash.Slug) +} + +// Return the html url for a dashboard +func (dash *Dashboard) GenerateUrl() string { + return GetDashboardUrl(dash.Uid, dash.Slug) +} + +// GetDashboardFolderUrl return the html url for a folder if it's folder, otherwise for a dashboard +func GetDashboardFolderUrl(isFolder bool, uid string, slug string) string { + if isFolder { + return GetFolderUrl(uid, slug) + } + + return GetDashboardUrl(uid, slug) +} + +// Return the html url for a dashboard +func GetDashboardUrl(uid string, slug string) string { + return fmt.Sprintf("%s/d/%s/%s", setting.AppSubUrl, uid, slug) +} + +// Return the full url for a dashboard +func GetFullDashboardUrl(uid string, slug string) string { + return fmt.Sprintf("%s%s", setting.AppUrl, GetDashboardUrl(uid, slug)) +} + +// GetFolderUrl return the html url for a folder +func GetFolderUrl(folderUid string, slug string) string { + return fmt.Sprintf("%s/dashboards/f/%s/%s", setting.AppSubUrl, folderUid, slug) +} + // // COMMANDS // @@ -167,18 +244,40 @@ type SaveDashboardCommand struct { Result *Dashboard } +type DashboardProvisioning struct { + Id int64 + DashboardId int64 + Name string + ExternalId string + Updated int64 +} + +type SaveProvisionedDashboardCommand struct { + DashboardCmd *SaveDashboardCommand + DashboardProvisioning *DashboardProvisioning + + Result *Dashboard +} + type DeleteDashboardCommand struct { Id int64 OrgId int64 } +type ValidateDashboardBeforeSaveCommand struct { + OrgId int64 + Dashboard *Dashboard + Overwrite bool +} + // // QUERIES // type GetDashboardQuery struct { - Slug string // required if no Id is specified + Slug string // required if no Id or Uid is specified Id int64 // optional if slug is set + Uid string // optional if slug is set OrgId int64 Result *Dashboard @@ -199,6 +298,14 @@ type GetDashboardsQuery struct { Result []*Dashboard } +type GetDashboardPermissionsForUserQuery struct { + DashboardIds []int64 + OrgId int64 + UserId int64 + OrgRole RoleType + Result []*DashboardPermissionForUser +} + type GetDashboardsByPluginIdQuery struct { OrgId int64 PluginId string @@ -209,3 +316,32 @@ type GetDashboardSlugByIdQuery struct { Id int64 Result string } + +type GetProvisionedDashboardDataQuery struct { + Name string + + Result []*DashboardProvisioning +} + +type GetDashboardsBySlugQuery struct { + OrgId int64 + Slug string + + Result []*Dashboard +} + +type DashboardPermissionForUser struct { + DashboardId int64 `json:"dashboardId"` + Permission PermissionType `json:"permission"` + PermissionName string `json:"permissionName"` +} + +type DashboardRef struct { + Uid string + Slug string +} + +type GetDashboardRefByIdQuery struct { + Id int64 + Result *DashboardRef +} diff --git a/pkg/models/datasource.go b/pkg/models/datasource.go index 81102b444ec..f2236ad8477 100644 --- a/pkg/models/datasource.go +++ b/pkg/models/datasource.go @@ -59,22 +59,23 @@ type DataSource struct { } var knownDatasourcePlugins map[string]bool = map[string]bool{ - DS_ES: true, - DS_GRAPHITE: true, - DS_INFLUXDB: true, - DS_INFLUXDB_08: true, - DS_KAIROSDB: true, - DS_CLOUDWATCH: true, - DS_PROMETHEUS: true, - DS_OPENTSDB: true, - DS_POSTGRES: true, - DS_MYSQL: true, - DS_MSSQL: true, - "opennms": true, - "druid": true, - "dalmatinerdb": true, - "gnocci": true, - "zabbix": true, + DS_ES: true, + DS_GRAPHITE: true, + DS_INFLUXDB: true, + DS_INFLUXDB_08: true, + DS_KAIROSDB: true, + DS_CLOUDWATCH: true, + DS_PROMETHEUS: true, + DS_OPENTSDB: true, + DS_POSTGRES: true, + DS_MYSQL: true, + DS_MSSQL: true, + "opennms": true, + "abhisant-druid-datasource": true, + "dalmatinerdb-datasource": true, + "gnocci": true, + "zabbix": true, + "alexanderzobnin-zabbix-datasource": true, "newrelic-app": true, "grafana-datadog-datasource": true, "grafana-simple-json": true, diff --git a/pkg/models/folders.go b/pkg/models/folders.go new file mode 100644 index 00000000000..c61620a11fc --- /dev/null +++ b/pkg/models/folders.go @@ -0,0 +1,91 @@ +package models + +import ( + "errors" + "strings" + "time" +) + +// Typed errors +var ( + ErrFolderNotFound = errors.New("Folder not found") + ErrFolderVersionMismatch = errors.New("The folder has been changed by someone else") + ErrFolderTitleEmpty = errors.New("Folder title cannot be empty") + ErrFolderWithSameUIDExists = errors.New("A folder/dashboard with the same uid already exists") + ErrFolderSameNameExists = errors.New("A folder or dashboard in the general folder with the same name already exists") + ErrFolderFailedGenerateUniqueUid = errors.New("Failed to generate unique folder id") + ErrFolderAccessDenied = errors.New("Access denied to folder") +) + +type Folder struct { + Id int64 + Uid string + Title string + Url string + Version int + + Created time.Time + Updated time.Time + + UpdatedBy int64 + CreatedBy int64 + HasAcl bool +} + +// GetDashboardModel turns the command into the savable model +func (cmd *CreateFolderCommand) GetDashboardModel(orgId int64, userId int64) *Dashboard { + dashFolder := NewDashboardFolder(strings.TrimSpace(cmd.Title)) + dashFolder.OrgId = orgId + dashFolder.SetUid(strings.TrimSpace(cmd.Uid)) + + if userId == 0 { + userId = -1 + } + + dashFolder.CreatedBy = userId + dashFolder.UpdatedBy = userId + dashFolder.UpdateSlug() + + return dashFolder +} + +// UpdateDashboardModel updates an existing model from command into model for update +func (cmd *UpdateFolderCommand) UpdateDashboardModel(dashFolder *Dashboard, orgId int64, userId int64) { + dashFolder.OrgId = orgId + dashFolder.Title = strings.TrimSpace(cmd.Title) + dashFolder.Data.Set("title", dashFolder.Title) + + if cmd.Uid != "" { + dashFolder.SetUid(cmd.Uid) + } + + dashFolder.SetVersion(cmd.Version) + dashFolder.IsFolder = true + + if userId == 0 { + userId = -1 + } + + dashFolder.UpdatedBy = userId + dashFolder.UpdateSlug() +} + +// +// COMMANDS +// + +type CreateFolderCommand struct { + Uid string `json:"uid"` + Title string `json:"title"` + + Result *Folder +} + +type UpdateFolderCommand struct { + Uid string `json:"uid"` + Title string `json:"title"` + Version int `json:"version"` + Overwrite bool `json:"overwrite"` + + Result *Folder +} diff --git a/pkg/models/login_attempt.go b/pkg/models/login_attempt.go new file mode 100644 index 00000000000..6e0976bc506 --- /dev/null +++ b/pkg/models/login_attempt.go @@ -0,0 +1,36 @@ +package models + +import ( + "time" +) + +type LoginAttempt struct { + Id int64 + Username string + IpAddress string + Created int64 +} + +// --------------------- +// COMMANDS + +type CreateLoginAttemptCommand struct { + Username string + IpAddress string + + Result LoginAttempt +} + +type DeleteOldLoginAttemptsCommand struct { + OlderThan time.Time + DeletedRows int64 +} + +// --------------------- +// QUERIES + +type GetUserLoginAttemptCountQuery struct { + Username string + Since time.Time + Result int64 +} diff --git a/pkg/models/org_user.go b/pkg/models/org_user.go index 9379625d458..ca32cc50060 100644 --- a/pkg/models/org_user.go +++ b/pkg/models/org_user.go @@ -95,7 +95,10 @@ type UpdateOrgUserCommand struct { // QUERIES type GetOrgUsersQuery struct { - OrgId int64 + OrgId int64 + Query string + Limit int + Result []*OrgUserDTO } diff --git a/pkg/models/stats.go b/pkg/models/stats.go index 0d982c3f4bd..e132d88c030 100644 --- a/pkg/models/stats.go +++ b/pkg/models/stats.go @@ -8,6 +8,7 @@ type SystemStats struct { Orgs int64 Playlists int64 Alerts int64 + Stars int64 } type DataSourceStats struct { diff --git a/pkg/models/team.go b/pkg/models/team.go index d2912f431b8..9c679a13394 100644 --- a/pkg/models/team.go +++ b/pkg/models/team.go @@ -7,8 +7,9 @@ import ( // Typed errors var ( - ErrTeamNotFound = errors.New("Team not found") - ErrTeamNameTaken = errors.New("Team name is taken") + ErrTeamNotFound = errors.New("Team not found") + ErrTeamNameTaken = errors.New("Team name is taken") + ErrTeamMemberNotFound = errors.New("Team member not found") ) // Team model @@ -37,18 +38,22 @@ type UpdateTeamCommand struct { Id int64 Name string Email string + OrgId int64 `json:"-"` } type DeleteTeamCommand struct { - Id int64 + OrgId int64 + Id int64 } type GetTeamByIdQuery struct { + OrgId int64 Id int64 Result *Team } type GetTeamsByUserQuery struct { + OrgId int64 UserId int64 `json:"userId"` Result []*Team `json:"teams"` } diff --git a/pkg/models/team_member.go b/pkg/models/team_member.go index 9970678a1ae..19cf657292d 100644 --- a/pkg/models/team_member.go +++ b/pkg/models/team_member.go @@ -31,6 +31,7 @@ type AddTeamMemberCommand struct { } type RemoveTeamMemberCommand struct { + OrgId int64 `json:"-"` UserId int64 TeamId int64 } @@ -39,6 +40,7 @@ type RemoveTeamMemberCommand struct { // QUERIES type GetTeamMembersQuery struct { + OrgId int64 TeamId int64 Result []*TeamMemberDTO } diff --git a/pkg/plugins/dashboard_importer.go b/pkg/plugins/dashboard_importer.go index bf516818e3c..53012f3e817 100644 --- a/pkg/plugins/dashboard_importer.go +++ b/pkg/plugins/dashboard_importer.go @@ -8,6 +8,7 @@ import ( "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/components/simplejson" m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/dashboards" ) type ImportDashboardCommand struct { @@ -17,7 +18,7 @@ type ImportDashboardCommand struct { Overwrite bool OrgId int64 - UserId int64 + User *m.SignedInUser PluginId string Result *PluginDashboardInfoDTO } @@ -34,7 +35,7 @@ type DashboardInputMissingError struct { } func (e DashboardInputMissingError) Error() string { - return fmt.Sprintf("Dashbord input variable: %v missing from import command", e.VariableName) + return fmt.Sprintf("Dashboard input variable: %v missing from import command", e.VariableName) } func init() { @@ -66,22 +67,32 @@ func ImportDashboard(cmd *ImportDashboardCommand) error { saveCmd := m.SaveDashboardCommand{ Dashboard: generatedDash, OrgId: cmd.OrgId, - UserId: cmd.UserId, + UserId: cmd.User.UserId, Overwrite: cmd.Overwrite, PluginId: cmd.PluginId, FolderId: dashboard.FolderId, } - if err := bus.Dispatch(&saveCmd); err != nil { + dto := &dashboards.SaveDashboardDTO{ + OrgId: cmd.OrgId, + Dashboard: saveCmd.GetDashboardModel(), + Overwrite: saveCmd.Overwrite, + User: cmd.User, + } + + savedDash, err := dashboards.NewService().SaveDashboard(dto) + + if err != nil { return err } cmd.Result = &PluginDashboardInfoDTO{ PluginId: cmd.PluginId, - Title: dashboard.Title, + Title: savedDash.Title, Path: cmd.Path, - Revision: dashboard.Data.Get("revision").MustInt64(1), - ImportedUri: "db/" + saveCmd.Result.Slug, + Revision: savedDash.Data.Get("revision").MustInt64(1), + ImportedUri: "db/" + savedDash.Slug, + ImportedUrl: savedDash.GetUrl(), ImportedRevision: dashboard.Data.Get("revision").MustInt64(1), Imported: true, } diff --git a/pkg/plugins/dashboard_importer_test.go b/pkg/plugins/dashboard_importer_test.go index 78df94309f8..549b3bb4cf9 100644 --- a/pkg/plugins/dashboard_importer_test.go +++ b/pkg/plugins/dashboard_importer_test.go @@ -1,12 +1,13 @@ package plugins import ( + "context" "io/ioutil" "testing" - "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/components/simplejson" m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/dashboards" "github.com/grafana/grafana/pkg/setting" . "github.com/smartystreets/goconvey/convey" "gopkg.in/ini.v1" @@ -14,19 +15,15 @@ import ( func TestDashboardImport(t *testing.T) { pluginScenario("When importing a plugin dashboard", t, func() { - var importedDash *m.Dashboard - - bus.AddHandler("test", func(cmd *m.SaveDashboardCommand) error { - importedDash = cmd.GetDashboardModel() - cmd.Result = importedDash - return nil - }) + origNewDashboardService := dashboards.NewService + mock := &dashboards.FakeDashboardService{} + dashboards.MockDashboardService(mock) cmd := ImportDashboardCommand{ PluginId: "test-app", Path: "dashboards/connections.json", OrgId: 1, - UserId: 1, + User: &m.SignedInUser{UserId: 1, OrgRole: m.ROLE_ADMIN}, Inputs: []ImportDashboardInput{ {Name: "*", Type: "datasource", Value: "graphite"}, }, @@ -36,18 +33,22 @@ func TestDashboardImport(t *testing.T) { So(err, ShouldBeNil) Convey("should install dashboard", func() { - So(importedDash, ShouldNotBeNil) + So(cmd.Result, ShouldNotBeNil) - resultStr, _ := importedDash.Data.EncodePretty() + resultStr, _ := mock.SavedDashboards[0].Dashboard.Data.EncodePretty() expectedBytes, _ := ioutil.ReadFile("../../tests/test-app/dashboards/connections_result.json") expectedJson, _ := simplejson.NewJson(expectedBytes) expectedStr, _ := expectedJson.EncodePretty() So(string(resultStr), ShouldEqual, string(expectedStr)) - panel := importedDash.Data.Get("rows").GetIndex(0).Get("panels").GetIndex(0) + panel := mock.SavedDashboards[0].Dashboard.Data.Get("rows").GetIndex(0).Get("panels").GetIndex(0) So(panel.Get("datasource").MustString(), ShouldEqual, "graphite") }) + + Reset(func() { + dashboards.NewService = origNewDashboardService + }) }) Convey("When evaling dashboard template", t, func() { @@ -83,7 +84,6 @@ func TestDashboardImport(t *testing.T) { }) }) - } func pluginScenario(desc string, t *testing.T, fn func()) { @@ -91,7 +91,7 @@ func pluginScenario(desc string, t *testing.T, fn func()) { setting.Cfg = ini.Empty() sec, _ := setting.Cfg.NewSection("plugin.test-app") sec.NewKey("path", "../../tests/test-app") - err := Init() + err := initPlugins(context.Background()) So(err, ShouldBeNil) diff --git a/pkg/plugins/dashboards.go b/pkg/plugins/dashboards.go index 37e3d8c0076..d15bcdd6db5 100644 --- a/pkg/plugins/dashboards.go +++ b/pkg/plugins/dashboards.go @@ -14,6 +14,7 @@ type PluginDashboardInfoDTO struct { Title string `json:"title"` Imported bool `json:"imported"` ImportedUri string `json:"importedUri"` + ImportedUrl string `json:"importedUrl"` Slug string `json:"slug"` DashboardId int64 `json:"dashboardId"` ImportedRevision int64 `json:"importedRevision"` @@ -64,6 +65,7 @@ func GetPluginDashboards(orgId int64, pluginId string) ([]*PluginDashboardInfoDT res.DashboardId = existingDash.Id res.Imported = true res.ImportedUri = "db/" + existingDash.Slug + res.ImportedUrl = existingDash.GetUrl() res.ImportedRevision = existingDash.Data.Get("revision").MustInt64(1) existingMatches[existingDash.Id] = true } diff --git a/pkg/plugins/dashboards_test.go b/pkg/plugins/dashboards_test.go index 980d7bb91bd..8573d452409 100644 --- a/pkg/plugins/dashboards_test.go +++ b/pkg/plugins/dashboards_test.go @@ -1,6 +1,7 @@ package plugins import ( + "context" "testing" "github.com/grafana/grafana/pkg/bus" @@ -17,7 +18,7 @@ func TestPluginDashboards(t *testing.T) { setting.Cfg = ini.Empty() sec, _ := setting.Cfg.NewSection("plugin.test-app") sec.NewKey("path", "../../tests/test-app") - err := Init() + err := initPlugins(context.Background()) So(err, ShouldBeNil) diff --git a/pkg/plugins/dashboards_updater.go b/pkg/plugins/dashboards_updater.go index 4c40e536d14..835e8873810 100644 --- a/pkg/plugins/dashboards_updater.go +++ b/pkg/plugins/dashboards_updater.go @@ -47,7 +47,7 @@ func autoUpdateAppDashboard(pluginDashInfo *PluginDashboardInfoDTO, orgId int64) PluginId: pluginDashInfo.PluginId, Overwrite: true, Dashboard: dash.Data, - UserId: 0, + User: &m.SignedInUser{UserId: 0, OrgRole: m.ROLE_ADMIN}, Path: pluginDashInfo.Path, } diff --git a/pkg/plugins/datasource/wrapper/datasource_plugin_wrapper.go b/pkg/plugins/datasource/wrapper/datasource_plugin_wrapper.go new file mode 100644 index 00000000000..170e187b282 --- /dev/null +++ b/pkg/plugins/datasource/wrapper/datasource_plugin_wrapper.go @@ -0,0 +1,161 @@ +package wrapper + +import ( + "context" + "errors" + "fmt" + + "github.com/grafana/grafana/pkg/components/null" + "github.com/grafana/grafana/pkg/log" + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana_plugin_model/go/datasource" +) + +func NewDatasourcePluginWrapper(log log.Logger, plugin datasource.DatasourcePlugin) *DatasourcePluginWrapper { + return &DatasourcePluginWrapper{DatasourcePlugin: plugin, logger: log} +} + +type DatasourcePluginWrapper struct { + datasource.DatasourcePlugin + logger log.Logger +} + +func (tw *DatasourcePluginWrapper) Query(ctx context.Context, ds *models.DataSource, query *tsdb.TsdbQuery) (*tsdb.Response, error) { + jsonData, err := ds.JsonData.MarshalJSON() + if err != nil { + return nil, err + } + + pbQuery := &datasource.DatasourceRequest{ + Datasource: &datasource.DatasourceInfo{ + Name: ds.Name, + Type: ds.Type, + Url: ds.Url, + Id: ds.Id, + OrgId: ds.OrgId, + JsonData: string(jsonData), + DecryptedSecureJsonData: ds.SecureJsonData.Decrypt(), + }, + TimeRange: &datasource.TimeRange{ + FromRaw: query.TimeRange.From, + ToRaw: query.TimeRange.To, + ToEpochMs: query.TimeRange.GetToAsMsEpoch(), + FromEpochMs: query.TimeRange.GetFromAsMsEpoch(), + }, + Queries: []*datasource.Query{}, + } + + for _, q := range query.Queries { + modelJson, _ := q.Model.MarshalJSON() + + pbQuery.Queries = append(pbQuery.Queries, &datasource.Query{ + ModelJson: string(modelJson), + IntervalMs: q.IntervalMs, + RefId: q.RefId, + MaxDataPoints: q.MaxDataPoints, + }) + } + + pbres, err := tw.DatasourcePlugin.Query(ctx, pbQuery) + + if err != nil { + return nil, err + } + + res := &tsdb.Response{ + Results: map[string]*tsdb.QueryResult{}, + } + + for _, r := range pbres.Results { + qr := &tsdb.QueryResult{ + RefId: r.RefId, + Series: []*tsdb.TimeSeries{}, + Tables: []*tsdb.Table{}, + } + + if r.Error != "" { + qr.Error = errors.New(r.Error) + qr.ErrorString = r.Error + } + + for _, s := range r.GetSeries() { + points := tsdb.TimeSeriesPoints{} + + for _, p := range s.Points { + po := tsdb.NewTimePoint(null.FloatFrom(p.Value), float64(p.Timestamp)) + points = append(points, po) + } + + qr.Series = append(qr.Series, &tsdb.TimeSeries{ + Name: s.Name, + Tags: s.Tags, + Points: points, + }) + } + + mappedTables, err := tw.mapTables(r) + if err != nil { + return nil, err + } + qr.Tables = mappedTables + + res.Results[r.RefId] = qr + } + + return res, nil +} +func (tw *DatasourcePluginWrapper) mapTables(r *datasource.QueryResult) ([]*tsdb.Table, error) { + var tables []*tsdb.Table + for _, t := range r.GetTables() { + mappedTable, err := tw.mapTable(t) + if err != nil { + return nil, err + } + tables = append(tables, mappedTable) + } + return tables, nil +} + +func (tw *DatasourcePluginWrapper) mapTable(t *datasource.Table) (*tsdb.Table, error) { + table := &tsdb.Table{} + for _, c := range t.GetColumns() { + table.Columns = append(table.Columns, tsdb.TableColumn{ + Text: c.Name, + }) + } + + table.Rows = make([]tsdb.RowValues, 0) + for _, r := range t.GetRows() { + row := tsdb.RowValues{} + for _, rv := range r.Values { + mappedRw, err := tw.mapRowValue(rv) + if err != nil { + return nil, err + } + + row = append(row, mappedRw) + } + table.Rows = append(table.Rows, row) + } + + return table, nil +} +func (tw *DatasourcePluginWrapper) mapRowValue(rv *datasource.RowValue) (interface{}, error) { + switch rv.Kind { + case datasource.RowValue_TYPE_NULL: + return nil, nil + case datasource.RowValue_TYPE_INT64: + return rv.Int64Value, nil + case datasource.RowValue_TYPE_BOOL: + return rv.BoolValue, nil + case datasource.RowValue_TYPE_STRING: + return rv.StringValue, nil + case datasource.RowValue_TYPE_DOUBLE: + return rv.DoubleValue, nil + case datasource.RowValue_TYPE_BYTES: + return rv.BytesValue, nil + default: + return nil, fmt.Errorf("Unsupported row value %v from plugin", rv.Kind) + } +} diff --git a/pkg/plugins/datasource/wrapper/datasource_plugin_wrapper_test.go b/pkg/plugins/datasource/wrapper/datasource_plugin_wrapper_test.go new file mode 100644 index 00000000000..834e8238e3a --- /dev/null +++ b/pkg/plugins/datasource/wrapper/datasource_plugin_wrapper_test.go @@ -0,0 +1,109 @@ +package wrapper + +import ( + "testing" + + "github.com/grafana/grafana/pkg/log" + "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana_plugin_model/go/datasource" +) + +func TestMapTables(t *testing.T) { + dpw := NewDatasourcePluginWrapper(log.New("test-logger"), nil) + var qr = &datasource.QueryResult{} + qr.Tables = append(qr.Tables, &datasource.Table{ + Columns: []*datasource.TableColumn{}, + Rows: nil, + }) + want := []*tsdb.Table{{}} + + have, err := dpw.mapTables(qr) + if err != nil { + t.Errorf("failed to map tables. error: %v", err) + } + if len(want) != len(have) { + t.Errorf("could not map all tables") + } +} + +func TestMapTable(t *testing.T) { + dpw := NewDatasourcePluginWrapper(log.New("test-logger"), nil) + + source := &datasource.Table{ + Columns: []*datasource.TableColumn{{Name: "column1"}, {Name: "column2"}}, + Rows: []*datasource.TableRow{{ + Values: []*datasource.RowValue{ + { + Kind: datasource.RowValue_TYPE_BOOL, + BoolValue: true, + }, + { + Kind: datasource.RowValue_TYPE_INT64, + Int64Value: 42, + }, + }, + }}, + } + + want := &tsdb.Table{ + Columns: []tsdb.TableColumn{{Text: "column1"}, {Text: "column2"}}, + } + have, err := dpw.mapTable(source) + if err != nil { + t.Fatalf("failed to map table. error: %v", err) + } + + for i := range have.Columns { + if want.Columns[i] != have.Columns[i] { + t.Fatalf("have column: %s, want %s", have, want) + } + } + + if len(have.Rows) != 1 { + t.Fatalf("Expects one row but got %d", len(have.Rows)) + } + + rowValuesCount := len(have.Rows[0]) + if rowValuesCount != 2 { + t.Fatalf("Expects two row values, got %d", rowValuesCount) + } +} + +func TestMappingRowValue(t *testing.T) { + dpw := NewDatasourcePluginWrapper(log.New("test-logger"), nil) + + boolRowValue, _ := dpw.mapRowValue(&datasource.RowValue{Kind: datasource.RowValue_TYPE_BOOL, BoolValue: true}) + haveBool, ok := boolRowValue.(bool) + if !ok || haveBool != true { + t.Fatalf("Expected true, was %v", haveBool) + } + + intRowValue, _ := dpw.mapRowValue(&datasource.RowValue{Kind: datasource.RowValue_TYPE_INT64, Int64Value: 42}) + haveInt, ok := intRowValue.(int64) + if !ok || haveInt != 42 { + t.Fatalf("Expected %d, was %d", 42, haveInt) + } + + stringRowValue, _ := dpw.mapRowValue(&datasource.RowValue{Kind: datasource.RowValue_TYPE_STRING, StringValue: "grafana"}) + haveString, ok := stringRowValue.(string) + if !ok || haveString != "grafana" { + t.Fatalf("Expected %s, was %s", "grafana", haveString) + } + + doubleRowValue, _ := dpw.mapRowValue(&datasource.RowValue{Kind: datasource.RowValue_TYPE_DOUBLE, DoubleValue: 1.5}) + haveDouble, ok := doubleRowValue.(float64) + if !ok || haveDouble != 1.5 { + t.Fatalf("Expected %v, was %v", 1.5, haveDouble) + } + + bytesRowValue, _ := dpw.mapRowValue(&datasource.RowValue{Kind: datasource.RowValue_TYPE_BYTES, BytesValue: []byte{66}}) + haveBytes, ok := bytesRowValue.([]byte) + if !ok || len(haveBytes) != 1 || haveBytes[0] != 66 { + t.Fatalf("Expected %v, was %v", []byte{66}, haveBytes) + } + + haveNil, _ := dpw.mapRowValue(&datasource.RowValue{Kind: datasource.RowValue_TYPE_NULL}) + if haveNil != nil { + t.Fatalf("Expected %v, was %v", nil, haveNil) + } +} diff --git a/pkg/plugins/datasource_plugin.go b/pkg/plugins/datasource_plugin.go index b36d7081dfe..37ce175efe4 100644 --- a/pkg/plugins/datasource_plugin.go +++ b/pkg/plugins/datasource_plugin.go @@ -1,9 +1,23 @@ package plugins import ( + "context" "encoding/json" + "fmt" "os" + "os/exec" + "path" "path/filepath" + "runtime" + "strings" + "time" + + "github.com/grafana/grafana/pkg/log" + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/plugins/datasource/wrapper" + "github.com/grafana/grafana/pkg/tsdb" + "github.com/grafana/grafana_plugin_model/go/datasource" + plugin "github.com/hashicorp/go-plugin" ) type DataSourcePlugin struct { @@ -16,6 +30,12 @@ type DataSourcePlugin struct { Mixed bool `json:"mixed,omitempty"` HasQueryHelp bool `json:"hasQueryHelp,omitempty"` Routes []*AppPluginRoute `json:"routes"` + + Backend bool `json:"backend,omitempty"` + Executable string `json:"executable,omitempty"` + + log log.Logger + client *plugin.Client } func (p *DataSourcePlugin) Load(decoder *json.Decoder, pluginDir string) error { @@ -39,3 +59,87 @@ func (p *DataSourcePlugin) Load(decoder *json.Decoder, pluginDir string) error { DataSources[p.Id] = p return nil } + +var handshakeConfig = plugin.HandshakeConfig{ + ProtocolVersion: 1, + MagicCookieKey: "grafana_plugin_type", + MagicCookieValue: "datasource", +} + +func composeBinaryName(executable, os, arch string) string { + var extension string + os = strings.ToLower(os) + if os == "windows" { + extension = ".exe" + } + + return fmt.Sprintf("%s_%s_%s%s", executable, os, strings.ToLower(arch), extension) +} + +func (p *DataSourcePlugin) initBackendPlugin(ctx context.Context, log log.Logger) error { + p.log = log.New("plugin-id", p.Id) + + err := p.spawnSubProcess() + if err == nil { + go p.restartKilledProcess(ctx) + } + + return err +} + +func (p *DataSourcePlugin) spawnSubProcess() error { + cmd := composeBinaryName(p.Executable, runtime.GOOS, runtime.GOARCH) + fullpath := path.Join(p.PluginDir, cmd) + + p.client = plugin.NewClient(&plugin.ClientConfig{ + HandshakeConfig: handshakeConfig, + Plugins: map[string]plugin.Plugin{p.Id: &datasource.DatasourcePluginImpl{}}, + Cmd: exec.Command(fullpath), + AllowedProtocols: []plugin.Protocol{plugin.ProtocolGRPC}, + Logger: LogWrapper{Logger: p.log}, + }) + + rpcClient, err := p.client.Client() + if err != nil { + return err + } + + raw, err := rpcClient.Dispense(p.Id) + if err != nil { + return err + } + + plugin := raw.(datasource.DatasourcePlugin) + + tsdb.RegisterTsdbQueryEndpoint(p.Id, func(dsInfo *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { + return wrapper.NewDatasourcePluginWrapper(p.log, plugin), nil + }) + + return nil +} + +func (p *DataSourcePlugin) restartKilledProcess(ctx context.Context) error { + ticker := time.NewTicker(time.Second * 1) + + for { + select { + case <-ctx.Done(): + return ctx.Err() + case <-ticker.C: + if p.client.Exited() { + err := p.spawnSubProcess() + p.log.Debug("Spawning new sub process", "name", p.Name, "id", p.Id) + if err != nil { + p.log.Error("Failed to spawn subprocess") + } + } + } + } +} + +func (p *DataSourcePlugin) Kill() { + if p.client != nil { + p.log.Debug("Killing subprocess ", "name", p.Name) + p.client.Kill() + } +} diff --git a/pkg/plugins/datasource_plugin_test.go b/pkg/plugins/datasource_plugin_test.go new file mode 100644 index 00000000000..147f0310f5c --- /dev/null +++ b/pkg/plugins/datasource_plugin_test.go @@ -0,0 +1,35 @@ +package plugins + +import ( + "testing" +) + +func TestComposeBinaryName(t *testing.T) { + tests := []struct { + name string + os string + arch string + + expectedPath string + }{ + { + name: "simple-json", + os: "linux", + arch: "amd64", + expectedPath: `simple-json_linux_amd64`, + }, + { + name: "simple-json", + os: "windows", + arch: "amd64", + expectedPath: `simple-json_windows_amd64.exe`, + }, + } + + for _, v := range tests { + have := composeBinaryName(v.name, v.os, v.arch) + if have != v.expectedPath { + t.Errorf("expected %s got %s", v.expectedPath, have) + } + } +} diff --git a/pkg/plugins/frontend_plugin.go b/pkg/plugins/frontend_plugin.go index 90c18d90044..04af4060169 100644 --- a/pkg/plugins/frontend_plugin.go +++ b/pkg/plugins/frontend_plugin.go @@ -40,7 +40,7 @@ func getPluginLogoUrl(pluginType, path, baseUrl string) string { } func (fp *FrontendPluginBase) setPathsBasedOnApp(app *AppPlugin) { - appSubPath := strings.Replace(strings.Replace(fp.PluginDir, app.PluginDir, "", 1), "\\", "/", 1) + appSubPath := strings.Replace(strings.Replace(fp.PluginDir, app.PluginDir, "", 1), "\\", "/", -1) fp.IncludedInAppId = app.Id fp.BaseUrl = app.BaseUrl diff --git a/pkg/plugins/frontend_plugin_test.go b/pkg/plugins/frontend_plugin_test.go index 304cd38deae..311f9fcc510 100644 --- a/pkg/plugins/frontend_plugin_test.go +++ b/pkg/plugins/frontend_plugin_test.go @@ -14,7 +14,7 @@ func TestFrontendPlugin(t *testing.T) { fp := &FrontendPluginBase{ PluginBase: PluginBase{ - PluginDir: "c:\\grafana\\public\\app\\plugins\\app\\testdata\\datasource", + PluginDir: "c:\\grafana\\public\\app\\plugins\\app\\testdata\\datasources\\datasource", BaseUrl: "fpbase", }, } @@ -29,6 +29,6 @@ func TestFrontendPlugin(t *testing.T) { } fp.setPathsBasedOnApp(app) - So(fp.Module, ShouldEqual, "app/plugins/app/testdata/datasource/module") + So(fp.Module, ShouldEqual, "app/plugins/app/testdata/datasources/datasource/module") }) } diff --git a/pkg/plugins/hclog-wrapper.go b/pkg/plugins/hclog-wrapper.go new file mode 100644 index 00000000000..cab7fc1a373 --- /dev/null +++ b/pkg/plugins/hclog-wrapper.go @@ -0,0 +1,49 @@ +package plugins + +import ( + "log" + + glog "github.com/grafana/grafana/pkg/log" + hclog "github.com/hashicorp/go-hclog" +) + +type LogWrapper struct { + Logger glog.Logger +} + +func (lw LogWrapper) Trace(msg string, args ...interface{}) { + lw.Logger.Debug(msg, args...) +} +func (lw LogWrapper) Debug(msg string, args ...interface{}) { + lw.Logger.Debug(msg, args...) +} +func (lw LogWrapper) Info(msg string, args ...interface{}) { + lw.Logger.Info(msg, args...) +} +func (lw LogWrapper) Warn(msg string, args ...interface{}) { + lw.Logger.Warn(msg, args...) +} +func (lw LogWrapper) Error(msg string, args ...interface{}) { + lw.Logger.Error(msg, args...) +} + +func (lw LogWrapper) IsTrace() bool { return true } +func (lw LogWrapper) IsDebug() bool { return true } +func (lw LogWrapper) IsInfo() bool { return true } +func (lw LogWrapper) IsWarn() bool { return true } +func (lw LogWrapper) IsError() bool { return true } + +func (lw LogWrapper) With(args ...interface{}) hclog.Logger { + return LogWrapper{Logger: lw.Logger.New(args...)} +} +func (lw LogWrapper) Named(name string) hclog.Logger { + return LogWrapper{Logger: lw.Logger.New()} +} + +func (lw LogWrapper) ResetNamed(name string) hclog.Logger { + return LogWrapper{Logger: lw.Logger.New()} +} + +func (lw LogWrapper) StandardLogger(ops *hclog.StandardLoggerOptions) *log.Logger { + return nil +} diff --git a/pkg/plugins/plugins.go b/pkg/plugins/plugins.go index 885bd5c9e03..417f565dd0c 100644 --- a/pkg/plugins/plugins.go +++ b/pkg/plugins/plugins.go @@ -1,6 +1,7 @@ package plugins import ( + "context" "encoding/json" "errors" "fmt" @@ -34,14 +35,41 @@ type PluginScanner struct { errors []error } -func Init() error { +type PluginManager struct { + log log.Logger +} + +func NewPluginManager(ctx context.Context) (*PluginManager, error) { + err := initPlugins(ctx) + + if err != nil { + return nil, err + } + + return &PluginManager{ + log: log.New("plugins"), + }, nil +} + +func (p *PluginManager) Run(ctx context.Context) error { + <-ctx.Done() + + for _, p := range DataSources { + p.Kill() + } + + p.log.Info("Stopped Plugins", "error", ctx.Err()) + return ctx.Err() +} + +func initPlugins(ctx context.Context) error { plog = log.New("plugins") - DataSources = make(map[string]*DataSourcePlugin) - StaticRoutes = make([]*PluginStaticRoute, 0) - Panels = make(map[string]*PanelPlugin) - Apps = make(map[string]*AppPlugin) - Plugins = make(map[string]*PluginBase) + DataSources = map[string]*DataSourcePlugin{} + StaticRoutes = []*PluginStaticRoute{} + Panels = map[string]*PanelPlugin{} + Apps = map[string]*AppPlugin{} + Plugins = map[string]*PluginBase{} PluginTypes = map[string]interface{}{ "panel": PanelPlugin{}, "datasource": DataSourcePlugin{}, @@ -53,9 +81,8 @@ func Init() error { // check if plugins dir exists if _, err := os.Stat(setting.PluginsPath); os.IsNotExist(err) { - plog.Warn("Plugin dir does not exist", "dir", setting.PluginsPath) if err = os.MkdirAll(setting.PluginsPath, os.ModePerm); err != nil { - plog.Warn("Failed to create plugin dir", "dir", setting.PluginsPath, "error", err) + plog.Error("Failed to create plugin dir", "dir", setting.PluginsPath, "error", err) } else { plog.Info("Plugin dir created", "dir", setting.PluginsPath) scan(setting.PluginsPath) @@ -70,9 +97,18 @@ func Init() error { for _, panel := range Panels { panel.initFrontendPlugin() } - for _, panel := range DataSources { - panel.initFrontendPlugin() + + for _, ds := range DataSources { + if ds.Backend { + err := ds.initBackendPlugin(ctx, plog) + if err != nil { + plog.Error("Failed to init plugin.", "error", err, "plugin", ds.Id) + } + } + + ds.initFrontendPlugin() } + for _, app := range Apps { app.initApp() } diff --git a/pkg/plugins/plugins_test.go b/pkg/plugins/plugins_test.go index f2dbc1e2e82..4d3ccb4502b 100644 --- a/pkg/plugins/plugins_test.go +++ b/pkg/plugins/plugins_test.go @@ -1,6 +1,7 @@ package plugins import ( + "context" "path/filepath" "testing" @@ -14,7 +15,7 @@ func TestPluginScans(t *testing.T) { Convey("When scaning for plugins", t, func() { setting.StaticRootPath, _ = filepath.Abs("../../public/") setting.Cfg = ini.Empty() - err := Init() + err := initPlugins(context.Background()) So(err, ShouldBeNil) So(len(DataSources), ShouldBeGreaterThan, 1) @@ -29,7 +30,7 @@ func TestPluginScans(t *testing.T) { setting.Cfg = ini.Empty() sec, _ := setting.Cfg.NewSection("plugin.nginx-app") sec.NewKey("path", "../../tests/test-app") - err := Init() + err := initPlugins(context.Background()) So(err, ShouldBeNil) So(len(Apps), ShouldBeGreaterThan, 0) diff --git a/pkg/plugins/update_checker.go b/pkg/plugins/update_checker.go index a2594ad915a..68ccdeaf840 100644 --- a/pkg/plugins/update_checker.go +++ b/pkg/plugins/update_checker.go @@ -63,7 +63,7 @@ func checkForUpdates() { resp, err := httpClient.Get("https://grafana.com/api/plugins/versioncheck?slugIn=" + pluginSlugs + "&grafanaVersion=" + setting.BuildVersion) if err != nil { - log.Trace("Failed to get plugins repo from grafana.net, %v", err.Error()) + log.Trace("Failed to get plugins repo from grafana.com, %v", err.Error()) return } @@ -101,7 +101,7 @@ func checkForUpdates() { resp2, err := httpClient.Get("https://raw.githubusercontent.com/grafana/grafana/master/latest.json") if err != nil { - log.Trace("Failed to get latest.json repo from github: %v", err.Error()) + log.Trace("Failed to get latest.json repo from github.com: %v", err.Error()) return } @@ -115,7 +115,7 @@ func checkForUpdates() { var githubLatest GithubLatest err = json.Unmarshal(body, &githubLatest) if err != nil { - log.Trace("Failed to unmarshal github latest, reading response from github: %v", err.Error()) + log.Trace("Failed to unmarshal github.com latest, reading response from github.com: %v", err.Error()) return } diff --git a/pkg/services/alerting/commands.go b/pkg/services/alerting/commands.go index 62671a559fa..2c145614751 100644 --- a/pkg/services/alerting/commands.go +++ b/pkg/services/alerting/commands.go @@ -5,24 +5,12 @@ import ( m "github.com/grafana/grafana/pkg/models" ) -type UpdateDashboardAlertsCommand struct { - UserId int64 - OrgId int64 - Dashboard *m.Dashboard -} - -type ValidateDashboardAlertsCommand struct { - UserId int64 - OrgId int64 - Dashboard *m.Dashboard -} - func init() { bus.AddHandler("alerting", updateDashboardAlerts) bus.AddHandler("alerting", validateDashboardAlerts) } -func validateDashboardAlerts(cmd *ValidateDashboardAlertsCommand) error { +func validateDashboardAlerts(cmd *m.ValidateDashboardAlertsCommand) error { extractor := NewDashAlertExtractor(cmd.Dashboard, cmd.OrgId) if _, err := extractor.GetAlerts(); err != nil { @@ -32,7 +20,7 @@ func validateDashboardAlerts(cmd *ValidateDashboardAlertsCommand) error { return nil } -func updateDashboardAlerts(cmd *UpdateDashboardAlertsCommand) error { +func updateDashboardAlerts(cmd *m.UpdateDashboardAlertsCommand) error { saveAlerts := m.SaveAlertsCommand{ OrgId: cmd.OrgId, UserId: cmd.UserId, diff --git a/pkg/services/alerting/eval_context.go b/pkg/services/alerting/eval_context.go index f5663deb8ca..d598203d675 100644 --- a/pkg/services/alerting/eval_context.go +++ b/pkg/services/alerting/eval_context.go @@ -12,17 +12,19 @@ import ( ) type EvalContext struct { - Firing bool - IsTestRun bool - EvalMatches []*EvalMatch - Logs []*ResultLogEntry - Error error - ConditionEvals string - StartTime time.Time - EndTime time.Time - Rule *Rule - log log.Logger - dashboardSlug string + Firing bool + IsTestRun bool + EvalMatches []*EvalMatch + Logs []*ResultLogEntry + Error error + ConditionEvals string + StartTime time.Time + EndTime time.Time + Rule *Rule + log log.Logger + + dashboardRef *m.DashboardRef + ImagePublicUrl string ImageOnDiskPath string NoDataFound bool @@ -83,29 +85,30 @@ func (c *EvalContext) GetNotificationTitle() string { return "[" + c.GetStateModel().Text + "] " + c.Rule.Name } -func (c *EvalContext) GetDashboardSlug() (string, error) { - if c.dashboardSlug != "" { - return c.dashboardSlug, nil +func (c *EvalContext) GetDashboardUID() (*m.DashboardRef, error) { + if c.dashboardRef != nil { + return c.dashboardRef, nil } - slugQuery := &m.GetDashboardSlugByIdQuery{Id: c.Rule.DashboardId} - if err := bus.Dispatch(slugQuery); err != nil { - return "", err + uidQuery := &m.GetDashboardRefByIdQuery{Id: c.Rule.DashboardId} + if err := bus.Dispatch(uidQuery); err != nil { + return nil, err } - c.dashboardSlug = slugQuery.Result - return c.dashboardSlug, nil + c.dashboardRef = uidQuery.Result + return c.dashboardRef, nil } +const urlFormat = "%s?fullscreen=true&edit=true&tab=alert&panelId=%d&orgId=%d" + func (c *EvalContext) GetRuleUrl() (string, error) { if c.IsTestRun { return setting.AppUrl, nil } - if slug, err := c.GetDashboardSlug(); err != nil { + if ref, err := c.GetDashboardUID(); err != nil { return "", err } else { - ruleUrl := fmt.Sprintf("%sdashboard/db/%s?fullscreen&edit&tab=alert&panelId=%d&orgId=%d", setting.AppUrl, slug, c.Rule.PanelId, c.Rule.OrgId) - return ruleUrl, nil + return fmt.Sprintf(urlFormat, m.GetFullDashboardUrl(ref.Uid, ref.Slug), c.Rule.PanelId, c.Rule.OrgId), nil } } diff --git a/pkg/services/alerting/extractor.go b/pkg/services/alerting/extractor.go index a609824cbc8..5206c81642e 100644 --- a/pkg/services/alerting/extractor.go +++ b/pkg/services/alerting/extractor.go @@ -143,10 +143,15 @@ func (e *DashAlertExtractor) GetAlertFromPanels(jsonWithPanels *simplejson.Json) // validate _, err = NewRuleFromDBAlert(alert) - if err == nil && alert.ValidToSave() { + if err != nil { + return nil, err + } + + if alert.ValidToSave() { alerts = append(alerts, alert) } else { - return nil, err + e.log.Debug("Invalid Alert Data. Dashboard, Org or Panel ID is not correct", "alertName", alert.Name, "panelId", alert.PanelId) + return nil, m.ErrDashboardContainsInvalidAlertData } } diff --git a/pkg/services/alerting/extractor_test.go b/pkg/services/alerting/extractor_test.go index 71f3026025d..f8b678e66bd 100644 --- a/pkg/services/alerting/extractor_test.go +++ b/pkg/services/alerting/extractor_test.go @@ -150,6 +150,22 @@ func TestAlertRuleExtraction(t *testing.T) { }) }) + Convey("Panel with id set to zero should return error", func() { + panelWithIdZero, err := ioutil.ReadFile("./test-data/panel-with-id-0.json") + So(err, ShouldBeNil) + + dashJson, err := simplejson.NewJson([]byte(panelWithIdZero)) + So(err, ShouldBeNil) + dash := m.NewDashboardFromJson(dashJson) + extractor := NewDashAlertExtractor(dash, 1) + + _, err = extractor.GetAlerts() + + Convey("panel with id 0 should return error", func() { + So(err, ShouldNotBeNil) + }) + }) + Convey("Parse alerts from dashboard without rows", func() { json, err := ioutil.ReadFile("./test-data/v5-dashboard.json") So(err, ShouldBeNil) diff --git a/pkg/services/alerting/notifier.go b/pkg/services/alerting/notifier.go index 47c9e0c590e..af9ba52a52a 100644 --- a/pkg/services/alerting/notifier.go +++ b/pkg/services/alerting/notifier.go @@ -87,10 +87,10 @@ func (n *notificationService) uploadImage(context *EvalContext) (err error) { IsAlertContext: true, } - if slug, err := context.GetDashboardSlug(); err != nil { + if ref, err := context.GetDashboardUID(); err != nil { return err } else { - renderOpts.Path = fmt.Sprintf("dashboard-solo/db/%s?&panelId=%d", slug, context.Rule.PanelId) + renderOpts.Path = fmt.Sprintf("d-solo/%s/%s?panelId=%d", ref.Uid, ref.Slug, context.Rule.PanelId) } if imagePath, err := renderer.RenderToPng(renderOpts); err != nil { diff --git a/pkg/services/alerting/notifiers/email.go b/pkg/services/alerting/notifiers/email.go index f84dc886d83..095f7c7156a 100644 --- a/pkg/services/alerting/notifiers/email.go +++ b/pkg/services/alerting/notifiers/email.go @@ -20,7 +20,7 @@ func init() { OptionsTemplate: `

Email addresses

- +
You can enter multiple email addresses using a ";" separator diff --git a/pkg/services/alerting/notifiers/opsgenie.go b/pkg/services/alerting/notifiers/opsgenie.go index 1a812f49ca3..863b4f1c286 100644 --- a/pkg/services/alerting/notifiers/opsgenie.go +++ b/pkg/services/alerting/notifiers/opsgenie.go @@ -23,6 +23,10 @@ func init() { API Key
+
+ Alert API Url + +
%s\nState: %s\nMessage: %s\n", evalContext.GetNotificationTitle(), evalContext.Rule.Name, evalContext.Rule.Message) ruleUrl, err := evalContext.GetRuleUrl() if err == nil { message = message + fmt.Sprintf("URL: %s\n", ruleUrl) } + if evalContext.ImagePublicUrl != "" { message = message + fmt.Sprintf("Image: %s\n", evalContext.ImagePublicUrl) } + metrics := generateMetricsMessage(evalContext) + if metrics != "" { + message = message + fmt.Sprintf("\nMetrics:%s", metrics) + } + + cmd := this.generateTelegramCmd(message, "text", "sendMessage", func(w *multipart.Writer) { + fw, _ := w.CreateFormField("parse_mode") + fw.Write([]byte("html")) + }) + return cmd +} + +func (this *TelegramNotifier) buildMessageInlineImage(evalContext *alerting.EvalContext) (*m.SendWebhookSync, error) { + var imageFile *os.File + var err error + + imageFile, err = os.Open(evalContext.ImageOnDiskPath) + defer imageFile.Close() + if err != nil { + return nil, err + } + + ruleUrl, err := evalContext.GetRuleUrl() + + metrics := generateMetricsMessage(evalContext) + message := generateImageCaption(evalContext, ruleUrl, metrics) + + cmd := this.generateTelegramCmd(message, "caption", "sendPhoto", func(w *multipart.Writer) { + fw, _ := w.CreateFormFile("photo", evalContext.ImageOnDiskPath) + io.Copy(fw, imageFile) + }) + return cmd, nil +} + +func (this *TelegramNotifier) generateTelegramCmd(message string, messageField string, apiAction string, extraConf func(writer *multipart.Writer)) *m.SendWebhookSync { + var body bytes.Buffer + w := multipart.NewWriter(&body) + + fw, _ := w.CreateFormField("chat_id") + fw.Write([]byte(this.ChatID)) + + fw, _ = w.CreateFormField(messageField) + fw.Write([]byte(message)) + + extraConf(w) + + w.Close() + + this.log.Info("Sending telegram notification", "chat_id", this.ChatID, "bot_token", this.BotToken, "apiAction", apiAction) + url := fmt.Sprintf(telegramApiUrl, this.BotToken, apiAction) + + cmd := &m.SendWebhookSync{ + Url: url, + Body: body.String(), + HttpMethod: "POST", + HttpHeader: map[string]string{ + "Content-Type": w.FormDataContentType(), + }, + } + return cmd +} + +func generateMetricsMessage(evalContext *alerting.EvalContext) string { metrics := "" fieldLimitCount := 4 for index, evt := range evalContext.EvalMatches { @@ -107,19 +181,51 @@ func (this *TelegramNotifier) Notify(evalContext *alerting.EvalContext) error { break } } - if metrics != "" { - message = message + fmt.Sprintf("\nMetrics:%s", metrics) + return metrics +} + +func generateImageCaption(evalContext *alerting.EvalContext, ruleUrl string, metrics string) string { + message := evalContext.GetNotificationTitle() + + if len(evalContext.Rule.Message) > 0 { + message = fmt.Sprintf("%s\nMessage: %s", message, evalContext.Rule.Message) } - bodyJSON.Set("text", message) + if len(message) > captionLengthLimit { + message = message[0:captionLengthLimit] - url := fmt.Sprintf(telegeramApiUrl, this.BotToken, "sendMessage") - body, _ := bodyJSON.MarshalJSON() + } - cmd := &m.SendWebhookSync{ - Url: url, - Body: string(body), - HttpMethod: "POST", + if len(ruleUrl) > 0 { + urlLine := fmt.Sprintf("\nURL: %s", ruleUrl) + message = appendIfPossible(message, urlLine, captionLengthLimit) + } + + if metrics != "" { + metricsLines := fmt.Sprintf("\n\nMetrics:%s", metrics) + message = appendIfPossible(message, metricsLines, captionLengthLimit) + } + + return message +} +func appendIfPossible(message string, extra string, sizeLimit int) string { + if len(extra)+len(message) <= sizeLimit { + return message + extra + } + log.Debug("Line too long for image caption.", "value", extra) + return message +} + +func (this *TelegramNotifier) ShouldNotify(context *alerting.EvalContext) bool { + return defaultShouldNotify(context) +} + +func (this *TelegramNotifier) Notify(evalContext *alerting.EvalContext) error { + var cmd *m.SendWebhookSync + if evalContext.ImagePublicUrl == "" && this.UploadImage == true { + cmd = this.buildMessage(evalContext, true) + } else { + cmd = this.buildMessage(evalContext, false) } if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil { diff --git a/pkg/services/alerting/notifiers/telegram_test.go b/pkg/services/alerting/notifiers/telegram_test.go index 3e8066e273b..05be787dced 100644 --- a/pkg/services/alerting/notifiers/telegram_test.go +++ b/pkg/services/alerting/notifiers/telegram_test.go @@ -5,6 +5,7 @@ import ( "github.com/grafana/grafana/pkg/components/simplejson" m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/alerting" . "github.com/smartystreets/goconvey/convey" ) @@ -50,6 +51,71 @@ func TestTelegramNotifier(t *testing.T) { So(telegramNotifier.ChatID, ShouldEqual, "-1234567890") }) + Convey("generateCaption should generate a message with all pertinent details", func() { + evalContext := alerting.NewEvalContext(nil, &alerting.Rule{ + Name: "This is an alarm", + Message: "Some kind of message.", + State: m.AlertStateOK, + }) + + caption := generateImageCaption(evalContext, "http://grafa.url/abcdef", "") + So(len(caption), ShouldBeLessThanOrEqualTo, 200) + So(caption, ShouldContainSubstring, "Some kind of message.") + So(caption, ShouldContainSubstring, "[OK] This is an alarm") + So(caption, ShouldContainSubstring, "http://grafa.url/abcdef") + }) + + Convey("When generating a message", func() { + + Convey("URL should be skipped if it's too long", func() { + evalContext := alerting.NewEvalContext(nil, &alerting.Rule{ + Name: "This is an alarm", + Message: "Some kind of message.", + State: m.AlertStateOK, + }) + + caption := generateImageCaption(evalContext, + "http://grafa.url/abcdefaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "foo bar") + So(len(caption), ShouldBeLessThanOrEqualTo, 200) + So(caption, ShouldContainSubstring, "Some kind of message.") + So(caption, ShouldContainSubstring, "[OK] This is an alarm") + So(caption, ShouldContainSubstring, "foo bar") + So(caption, ShouldNotContainSubstring, "http") + }) + + Convey("Message should be trimmed if it's too long", func() { + evalContext := alerting.NewEvalContext(nil, &alerting.Rule{ + Name: "This is an alarm", + Message: "Some kind of message that is too long for appending to our pretty little message, this line is actually exactly 197 chars long and I will get there in the end I promise I will. Yes siree that's it.", + State: m.AlertStateOK, + }) + + caption := generateImageCaption(evalContext, + "http://grafa.url/foo", + "") + So(len(caption), ShouldBeLessThanOrEqualTo, 200) + So(caption, ShouldContainSubstring, "[OK] This is an alarm") + So(caption, ShouldNotContainSubstring, "http") + So(caption, ShouldContainSubstring, "Some kind of message that is too long for appending to our pretty little message, this line is actually exactly 197 chars long and I will get there in the end I promise ") + }) + + Convey("Metrics should be skipped if they dont fit", func() { + evalContext := alerting.NewEvalContext(nil, &alerting.Rule{ + Name: "This is an alarm", + Message: "Some kind of message that is too long for appending to our pretty little message, this line is actually exactly 197 chars long and I will get there in the end I ", + State: m.AlertStateOK, + }) + + caption := generateImageCaption(evalContext, + "http://grafa.url/foo", + "foo bar long song") + So(len(caption), ShouldBeLessThanOrEqualTo, 200) + So(caption, ShouldContainSubstring, "[OK] This is an alarm") + So(caption, ShouldNotContainSubstring, "http") + So(caption, ShouldNotContainSubstring, "foo bar") + }) + }) }) }) } diff --git a/pkg/services/alerting/test-data/panel-with-id-0.json b/pkg/services/alerting/test-data/panel-with-id-0.json new file mode 100644 index 00000000000..d1f314a4f55 --- /dev/null +++ b/pkg/services/alerting/test-data/panel-with-id-0.json @@ -0,0 +1,63 @@ +{ + "id": 57, + "title": "Graphite 4", + "originalTitle": "Graphite 4", + "tags": ["graphite"], + "rows": [ + { + "panels": [ + { + "title": "Active desktop users", + "id": 0, + "editable": true, + "type": "graph", + "targets": [ + { + "refId": "A", + "target": "aliasByNode(statsd.fakesite.counters.session_start.desktop.count, 4)" + } + ], + "datasource": null, + "alert": { + "name": "name1", + "message": "desc1", + "handler": 1, + "frequency": "60s", + "conditions": [ + { + "type": "query", + "query": {"params": ["A", "5m", "now"]}, + "reducer": {"type": "avg", "params": []}, + "evaluator": {"type": ">", "params": [100]} + } + ] + } + }, + { + "title": "Active mobile users", + "id": 4, + "targets": [ + {"refId": "A", "target": ""}, + {"refId": "B", "target": "aliasByNode(statsd.fakesite.counters.session_start.mobile.count, 4)"} + ], + "datasource": "graphite2", + "alert": { + "name": "name2", + "message": "desc2", + "handler": 0, + "frequency": "60s", + "severity": "warning", + "conditions": [ + { + "type": "query", + "query": {"params": ["B", "5m", "now"]}, + "reducer": {"type": "avg", "params": []}, + "evaluator": {"type": ">", "params": [100]} + } + ] + } + } + ] + } +] + } diff --git a/pkg/services/alerting/ticker_test.go b/pkg/services/alerting/ticker_test.go index d4a5b958cdb..6670ee960a5 100644 --- a/pkg/services/alerting/ticker_test.go +++ b/pkg/services/alerting/ticker_test.go @@ -1,121 +1,121 @@ package alerting -import ( - "testing" - "time" - - "github.com/benbjohnson/clock" -) - -func inspectTick(tick time.Time, last time.Time, offset time.Duration, t *testing.T) { - if !tick.Equal(last.Add(time.Duration(1) * time.Second)) { - t.Fatalf("expected a tick 1 second more than prev, %s. got: %s", last, tick) - } -} - -// returns the new last tick seen -func assertAdvanceUntil(ticker *Ticker, last, desiredLast time.Time, offset, wait time.Duration, t *testing.T) time.Time { - for { - select { - case tick := <-ticker.C: - inspectTick(tick, last, offset, t) - last = tick - case <-time.NewTimer(wait).C: - if last.Before(desiredLast) { - t.Fatalf("waited %s for ticker to advance to %s, but only went up to %s", wait, desiredLast, last) - } - if last.After(desiredLast) { - t.Fatalf("timer advanced too far. should only have gone up to %s, but it went up to %s", desiredLast, last) - } - return last - } - } -} - -func assertNoAdvance(ticker *Ticker, desiredLast time.Time, wait time.Duration, t *testing.T) { - for { - select { - case tick := <-ticker.C: - t.Fatalf("timer should have stayed at %s, instead it advanced to %s", desiredLast, tick) - case <-time.NewTimer(wait).C: - return - } - } -} - -func TestTickerRetro1Hour(t *testing.T) { - offset := time.Duration(10) * time.Second - last := time.Unix(0, 0) - mock := clock.NewMock() - mock.Add(time.Duration(1) * time.Hour) - desiredLast := mock.Now().Add(-offset) - ticker := NewTicker(last, offset, mock) - - last = assertAdvanceUntil(ticker, last, desiredLast, offset, time.Duration(10)*time.Millisecond, t) - assertNoAdvance(ticker, last, time.Duration(500)*time.Millisecond, t) - -} - -func TestAdvanceWithUpdateOffset(t *testing.T) { - offset := time.Duration(10) * time.Second - last := time.Unix(0, 0) - mock := clock.NewMock() - mock.Add(time.Duration(1) * time.Hour) - desiredLast := mock.Now().Add(-offset) - ticker := NewTicker(last, offset, mock) - - last = assertAdvanceUntil(ticker, last, desiredLast, offset, time.Duration(10)*time.Millisecond, t) - assertNoAdvance(ticker, last, time.Duration(500)*time.Millisecond, t) - - // lowering offset should see a few more ticks - offset = time.Duration(5) * time.Second - ticker.updateOffset(offset) - desiredLast = mock.Now().Add(-offset) - last = assertAdvanceUntil(ticker, last, desiredLast, offset, time.Duration(9)*time.Millisecond, t) - assertNoAdvance(ticker, last, time.Duration(500)*time.Millisecond, t) - - // advancing clock should see even more ticks - mock.Add(time.Duration(1) * time.Hour) - desiredLast = mock.Now().Add(-offset) - last = assertAdvanceUntil(ticker, last, desiredLast, offset, time.Duration(8)*time.Millisecond, t) - assertNoAdvance(ticker, last, time.Duration(500)*time.Millisecond, t) - -} - -func getCase(lastSeconds, offsetSeconds int) (time.Time, time.Duration) { - last := time.Unix(int64(lastSeconds), 0) - offset := time.Duration(offsetSeconds) * time.Second - return last, offset -} - -func TestTickerNoAdvance(t *testing.T) { - - // it's 00:01:00 now. what are some cases where we don't want the ticker to advance? - mock := clock.NewMock() - mock.Add(time.Duration(60) * time.Second) - - type Case struct { - last int - offset int - } - - // note that some cases add up to now, others go into the future - cases := []Case{ - {50, 10}, - {50, 30}, - {59, 1}, - {59, 10}, - {59, 30}, - {60, 1}, - {60, 10}, - {60, 30}, - {90, 1}, - {90, 10}, - {90, 30}, - } - for _, c := range cases { - last, offset := getCase(c.last, c.offset) - ticker := NewTicker(last, offset, mock) - assertNoAdvance(ticker, last, time.Duration(500)*time.Millisecond, t) - } -} +//import ( +// "testing" +// "time" +// +// "github.com/benbjohnson/clock" +//) +// +//func inspectTick(tick time.Time, last time.Time, offset time.Duration, t *testing.T) { +// if !tick.Equal(last.Add(time.Duration(1) * time.Second)) { +// t.Fatalf("expected a tick 1 second more than prev, %s. got: %s", last, tick) +// } +//} +// +//// returns the new last tick seen +//func assertAdvanceUntil(ticker *Ticker, last, desiredLast time.Time, offset, wait time.Duration, t *testing.T) time.Time { +// for { +// select { +// case tick := <-ticker.C: +// inspectTick(tick, last, offset, t) +// last = tick +// case <-time.NewTimer(wait).C: +// if last.Before(desiredLast) { +// t.Fatalf("waited %s for ticker to advance to %s, but only went up to %s", wait, desiredLast, last) +// } +// if last.After(desiredLast) { +// t.Fatalf("timer advanced too far. should only have gone up to %s, but it went up to %s", desiredLast, last) +// } +// return last +// } +// } +//} +// +//func assertNoAdvance(ticker *Ticker, desiredLast time.Time, wait time.Duration, t *testing.T) { +// for { +// select { +// case tick := <-ticker.C: +// t.Fatalf("timer should have stayed at %s, instead it advanced to %s", desiredLast, tick) +// case <-time.NewTimer(wait).C: +// return +// } +// } +//} +// +//func TestTickerRetro1Hour(t *testing.T) { +// offset := time.Duration(10) * time.Second +// last := time.Unix(0, 0) +// mock := clock.NewMock() +// mock.Add(time.Duration(1) * time.Hour) +// desiredLast := mock.Now().Add(-offset) +// ticker := NewTicker(last, offset, mock) +// +// last = assertAdvanceUntil(ticker, last, desiredLast, offset, time.Duration(10)*time.Millisecond, t) +// assertNoAdvance(ticker, last, time.Duration(500)*time.Millisecond, t) +// +//} +// +//func TestAdvanceWithUpdateOffset(t *testing.T) { +// offset := time.Duration(10) * time.Second +// last := time.Unix(0, 0) +// mock := clock.NewMock() +// mock.Add(time.Duration(1) * time.Hour) +// desiredLast := mock.Now().Add(-offset) +// ticker := NewTicker(last, offset, mock) +// +// last = assertAdvanceUntil(ticker, last, desiredLast, offset, time.Duration(10)*time.Millisecond, t) +// assertNoAdvance(ticker, last, time.Duration(500)*time.Millisecond, t) +// +// // lowering offset should see a few more ticks +// offset = time.Duration(5) * time.Second +// ticker.updateOffset(offset) +// desiredLast = mock.Now().Add(-offset) +// last = assertAdvanceUntil(ticker, last, desiredLast, offset, time.Duration(9)*time.Millisecond, t) +// assertNoAdvance(ticker, last, time.Duration(500)*time.Millisecond, t) +// +// // advancing clock should see even more ticks +// mock.Add(time.Duration(1) * time.Hour) +// desiredLast = mock.Now().Add(-offset) +// last = assertAdvanceUntil(ticker, last, desiredLast, offset, time.Duration(8)*time.Millisecond, t) +// assertNoAdvance(ticker, last, time.Duration(500)*time.Millisecond, t) +// +//} +// +//func getCase(lastSeconds, offsetSeconds int) (time.Time, time.Duration) { +// last := time.Unix(int64(lastSeconds), 0) +// offset := time.Duration(offsetSeconds) * time.Second +// return last, offset +//} +// +//func TestTickerNoAdvance(t *testing.T) { +// +// // it's 00:01:00 now. what are some cases where we don't want the ticker to advance? +// mock := clock.NewMock() +// mock.Add(time.Duration(60) * time.Second) +// +// type Case struct { +// last int +// offset int +// } +// +// // note that some cases add up to now, others go into the future +// cases := []Case{ +// {50, 10}, +// {50, 30}, +// {59, 1}, +// {59, 10}, +// {59, 30}, +// {60, 1}, +// {60, 10}, +// {60, 30}, +// {90, 1}, +// {90, 10}, +// {90, 30}, +// } +// for _, c := range cases { +// last, offset := getCase(c.last, c.offset) +// ticker := NewTicker(last, offset, mock) +// assertNoAdvance(ticker, last, time.Duration(500)*time.Millisecond, t) +// } +//} diff --git a/pkg/services/annotations/annotations.go b/pkg/services/annotations/annotations.go index 02f927a76ba..a6cd7a33318 100644 --- a/pkg/services/annotations/annotations.go +++ b/pkg/services/annotations/annotations.go @@ -10,14 +10,16 @@ type Repository interface { } type ItemQuery struct { - OrgId int64 `json:"orgId"` - From int64 `json:"from"` - To int64 `json:"to"` - AlertId int64 `json:"alertId"` - DashboardId int64 `json:"dashboardId"` - PanelId int64 `json:"panelId"` - Tags []string `json:"tags"` - Type string `json:"type"` + OrgId int64 `json:"orgId"` + From int64 `json:"from"` + To int64 `json:"to"` + AlertId int64 `json:"alertId"` + DashboardId int64 `json:"dashboardId"` + PanelId int64 `json:"panelId"` + AnnotationId int64 `json:"annotationId"` + RegionId int64 `json:"regionId"` + Tags []string `json:"tags"` + Type string `json:"type"` Limit int64 `json:"limit"` } diff --git a/pkg/services/cleanup/cleanup.go b/pkg/services/cleanup/cleanup.go index 6e5e7684100..5e9efeea3b0 100644 --- a/pkg/services/cleanup/cleanup.go +++ b/pkg/services/cleanup/cleanup.go @@ -46,6 +46,7 @@ func (service *CleanUpService) start(ctx context.Context) error { service.cleanUpTmpFiles() service.deleteExpiredSnapshots() service.deleteExpiredDashboardVersions() + service.deleteOldLoginAttempts() case <-ctx.Done(): return ctx.Err() } @@ -82,9 +83,34 @@ func (service *CleanUpService) cleanUpTmpFiles() { } func (service *CleanUpService) deleteExpiredSnapshots() { - bus.Dispatch(&m.DeleteExpiredSnapshotsCommand{}) + cmd := m.DeleteExpiredSnapshotsCommand{} + if err := bus.Dispatch(&cmd); err != nil { + service.log.Error("Failed to delete expired snapshots", "error", err.Error()) + } else { + service.log.Debug("Deleted expired snapshots", "rows affected", cmd.DeletedRows) + } } func (service *CleanUpService) deleteExpiredDashboardVersions() { - bus.Dispatch(&m.DeleteExpiredVersionsCommand{}) + cmd := m.DeleteExpiredVersionsCommand{} + if err := bus.Dispatch(&cmd); err != nil { + service.log.Error("Failed to delete expired dashboard versions", "error", err.Error()) + } else { + service.log.Debug("Deleted old/expired dashboard versions", "rows affected", cmd.DeletedRows) + } +} + +func (service *CleanUpService) deleteOldLoginAttempts() { + if setting.DisableBruteForceLoginProtection { + return + } + + cmd := m.DeleteOldLoginAttemptsCommand{ + OlderThan: time.Now().Add(time.Minute * -10), + } + if err := bus.Dispatch(&cmd); err != nil { + service.log.Error("Problem deleting expired login attempts", "error", err.Error()) + } else { + service.log.Debug("Deleted expired login attempts", "rows affected", cmd.DeletedRows) + } } diff --git a/pkg/services/dashboards/dashboard_service.go b/pkg/services/dashboards/dashboard_service.go new file mode 100644 index 00000000000..1f39394d757 --- /dev/null +++ b/pkg/services/dashboards/dashboard_service.go @@ -0,0 +1,237 @@ +package dashboards + +import ( + "strings" + "time" + + "github.com/grafana/grafana/pkg/bus" + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/guardian" + "github.com/grafana/grafana/pkg/util" +) + +// DashboardService service for operating on dashboards +type DashboardService interface { + SaveDashboard(dto *SaveDashboardDTO) (*models.Dashboard, error) +} + +// DashboardProvisioningService service for operating on provisioned dashboards +type DashboardProvisioningService interface { + SaveProvisionedDashboard(dto *SaveDashboardDTO, provisioning *models.DashboardProvisioning) (*models.Dashboard, error) + SaveFolderForProvisionedDashboards(*SaveDashboardDTO) (*models.Dashboard, error) + GetProvisionedDashboardData(name string) ([]*models.DashboardProvisioning, error) +} + +// NewService factory for creating a new dashboard service +var NewService = func() DashboardService { + return &dashboardServiceImpl{} +} + +// NewProvisioningService factory for creating a new dashboard provisioning service +var NewProvisioningService = func() DashboardProvisioningService { + return &dashboardServiceImpl{} +} + +type SaveDashboardDTO struct { + OrgId int64 + UpdatedAt time.Time + User *models.SignedInUser + Message string + Overwrite bool + Dashboard *models.Dashboard +} + +type dashboardServiceImpl struct { + orgId int64 + user *models.SignedInUser +} + +func (dr *dashboardServiceImpl) GetProvisionedDashboardData(name string) ([]*models.DashboardProvisioning, error) { + cmd := &models.GetProvisionedDashboardDataQuery{Name: name} + err := bus.Dispatch(cmd) + if err != nil { + return nil, err + } + + return cmd.Result, nil +} + +func (dr *dashboardServiceImpl) buildSaveDashboardCommand(dto *SaveDashboardDTO, validateAlerts bool) (*models.SaveDashboardCommand, error) { + dash := dto.Dashboard + + dash.Title = strings.TrimSpace(dash.Title) + dash.Data.Set("title", dash.Title) + dash.SetUid(strings.TrimSpace(dash.Uid)) + + if dash.Title == "" { + return nil, models.ErrDashboardTitleEmpty + } + + if dash.IsFolder && dash.FolderId > 0 { + return nil, models.ErrDashboardFolderCannotHaveParent + } + + if dash.IsFolder && strings.ToLower(dash.Title) == strings.ToLower(models.RootFolderName) { + return nil, models.ErrDashboardFolderNameExists + } + + if !util.IsValidShortUid(dash.Uid) { + return nil, models.ErrDashboardInvalidUid + } else if len(dash.Uid) > 40 { + return nil, models.ErrDashboardUidToLong + } + + if validateAlerts { + validateAlertsCmd := models.ValidateDashboardAlertsCommand{ + OrgId: dto.OrgId, + Dashboard: dash, + } + + if err := bus.Dispatch(&validateAlertsCmd); err != nil { + return nil, models.ErrDashboardContainsInvalidAlertData + } + } + + validateBeforeSaveCmd := models.ValidateDashboardBeforeSaveCommand{ + OrgId: dto.OrgId, + Dashboard: dash, + Overwrite: dto.Overwrite, + } + + if err := bus.Dispatch(&validateBeforeSaveCmd); err != nil { + return nil, err + } + + guard := guardian.New(dash.GetDashboardIdForSavePermissionCheck(), dto.OrgId, dto.User) + if canSave, err := guard.CanSave(); err != nil || !canSave { + if err != nil { + return nil, err + } + return nil, models.ErrDashboardUpdateAccessDenied + } + + cmd := &models.SaveDashboardCommand{ + Dashboard: dash.Data, + Message: dto.Message, + OrgId: dto.OrgId, + Overwrite: dto.Overwrite, + UserId: dto.User.UserId, + FolderId: dash.FolderId, + IsFolder: dash.IsFolder, + PluginId: dash.PluginId, + } + + if !dto.UpdatedAt.IsZero() { + cmd.UpdatedAt = dto.UpdatedAt + } + + return cmd, nil +} + +func (dr *dashboardServiceImpl) updateAlerting(cmd *models.SaveDashboardCommand, dto *SaveDashboardDTO) error { + alertCmd := models.UpdateDashboardAlertsCommand{ + OrgId: dto.OrgId, + UserId: dto.User.UserId, + Dashboard: cmd.Result, + } + + if err := bus.Dispatch(&alertCmd); err != nil { + return models.ErrDashboardFailedToUpdateAlertData + } + + return nil +} + +func (dr *dashboardServiceImpl) SaveProvisionedDashboard(dto *SaveDashboardDTO, provisioning *models.DashboardProvisioning) (*models.Dashboard, error) { + dto.User = &models.SignedInUser{ + UserId: 0, + OrgRole: models.ROLE_ADMIN, + } + cmd, err := dr.buildSaveDashboardCommand(dto, true) + if err != nil { + return nil, err + } + + saveCmd := &models.SaveProvisionedDashboardCommand{ + DashboardCmd: cmd, + DashboardProvisioning: provisioning, + } + + // dashboard + err = bus.Dispatch(saveCmd) + if err != nil { + return nil, err + } + + //alerts + err = dr.updateAlerting(cmd, dto) + if err != nil { + return nil, err + } + + return cmd.Result, nil +} + +func (dr *dashboardServiceImpl) SaveFolderForProvisionedDashboards(dto *SaveDashboardDTO) (*models.Dashboard, error) { + dto.User = &models.SignedInUser{ + UserId: 0, + OrgRole: models.ROLE_ADMIN, + } + cmd, err := dr.buildSaveDashboardCommand(dto, false) + if err != nil { + return nil, err + } + + err = bus.Dispatch(cmd) + if err != nil { + return nil, err + } + + err = dr.updateAlerting(cmd, dto) + if err != nil { + return nil, err + } + + return cmd.Result, nil +} + +func (dr *dashboardServiceImpl) SaveDashboard(dto *SaveDashboardDTO) (*models.Dashboard, error) { + cmd, err := dr.buildSaveDashboardCommand(dto, true) + if err != nil { + return nil, err + } + + err = bus.Dispatch(cmd) + if err != nil { + return nil, err + } + + err = dr.updateAlerting(cmd, dto) + if err != nil { + return nil, err + } + + return cmd.Result, nil +} + +type FakeDashboardService struct { + SaveDashboardResult *models.Dashboard + SaveDashboardError error + SavedDashboards []*SaveDashboardDTO +} + +func (s *FakeDashboardService) SaveDashboard(dto *SaveDashboardDTO) (*models.Dashboard, error) { + s.SavedDashboards = append(s.SavedDashboards, dto) + + if s.SaveDashboardResult == nil && s.SaveDashboardError == nil { + s.SaveDashboardResult = dto.Dashboard + } + + return s.SaveDashboardResult, s.SaveDashboardError +} + +func MockDashboardService(mock *FakeDashboardService) { + NewService = func() DashboardService { + return mock + } +} diff --git a/pkg/services/dashboards/dashboard_service_test.go b/pkg/services/dashboards/dashboard_service_test.go new file mode 100644 index 00000000000..965b10655b3 --- /dev/null +++ b/pkg/services/dashboards/dashboard_service_test.go @@ -0,0 +1,95 @@ +package dashboards + +import ( + "errors" + "testing" + + "github.com/grafana/grafana/pkg/services/guardian" + + "github.com/grafana/grafana/pkg/bus" + "github.com/grafana/grafana/pkg/models" + + . "github.com/smartystreets/goconvey/convey" +) + +func TestDashboardService(t *testing.T) { + Convey("Dashboard service tests", t, func() { + service := dashboardServiceImpl{} + + origNewDashboardGuardian := guardian.New + guardian.MockDashboardGuardian(&guardian.FakeDashboardGuardian{CanSaveValue: true}) + + Convey("Save dashboard validation", func() { + dto := &SaveDashboardDTO{} + + Convey("When saving a dashboard with empty title it should return error", func() { + titles := []string{"", " ", " \t "} + + for _, title := range titles { + dto.Dashboard = models.NewDashboard(title) + _, err := service.SaveDashboard(dto) + So(err, ShouldEqual, models.ErrDashboardTitleEmpty) + } + }) + + Convey("Should return validation error if it's a folder and have a folder id", func() { + dto.Dashboard = models.NewDashboardFolder("Folder") + dto.Dashboard.FolderId = 1 + _, err := service.SaveDashboard(dto) + So(err, ShouldEqual, models.ErrDashboardFolderCannotHaveParent) + }) + + Convey("Should return validation error if folder is named General", func() { + dto.Dashboard = models.NewDashboardFolder("General") + _, err := service.SaveDashboard(dto) + So(err, ShouldEqual, models.ErrDashboardFolderNameExists) + }) + + Convey("When saving a dashboard should validate uid", func() { + bus.AddHandler("test", func(cmd *models.ValidateDashboardAlertsCommand) error { + return nil + }) + + bus.AddHandler("test", func(cmd *models.ValidateDashboardBeforeSaveCommand) error { + return nil + }) + + testCases := []struct { + Uid string + Error error + }{ + {Uid: "", Error: nil}, + {Uid: " ", Error: nil}, + {Uid: " \t ", Error: nil}, + {Uid: "asdf90_-", Error: nil}, + {Uid: "asdf/90", Error: models.ErrDashboardInvalidUid}, + {Uid: " asdfghjklqwertyuiopzxcvbnmasdfghjklqwer ", Error: nil}, + {Uid: "asdfghjklqwertyuiopzxcvbnmasdfghjklqwertyuiopzxcvbnmasdfghjklqwertyuiopzxcvbnm", Error: models.ErrDashboardUidToLong}, + } + + for _, tc := range testCases { + dto.Dashboard = models.NewDashboard("title") + dto.Dashboard.SetUid(tc.Uid) + dto.User = &models.SignedInUser{} + + _, err := service.buildSaveDashboardCommand(dto, true) + So(err, ShouldEqual, tc.Error) + } + }) + + Convey("Should return validation error if alert data is invalid", func() { + bus.AddHandler("test", func(cmd *models.ValidateDashboardAlertsCommand) error { + return errors.New("error") + }) + + dto.Dashboard = models.NewDashboard("Dash") + _, err := service.SaveDashboard(dto) + So(err, ShouldEqual, models.ErrDashboardContainsInvalidAlertData) + }) + }) + + Reset(func() { + guardian.New = origNewDashboardGuardian + }) + }) +} diff --git a/pkg/services/dashboards/dashboards.go b/pkg/services/dashboards/dashboards.go deleted file mode 100644 index 4bdba59b18e..00000000000 --- a/pkg/services/dashboards/dashboards.go +++ /dev/null @@ -1,82 +0,0 @@ -package dashboards - -import ( - "time" - - "github.com/grafana/grafana/pkg/bus" - "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/services/alerting" -) - -type Repository interface { - SaveDashboard(*SaveDashboardItem) (*models.Dashboard, error) -} - -var repositoryInstance Repository - -func GetRepository() Repository { - return repositoryInstance -} - -func SetRepository(rep Repository) { - repositoryInstance = rep -} - -type SaveDashboardItem struct { - OrgId int64 - UpdatedAt time.Time - UserId int64 - Message string - Overwrite bool - Dashboard *models.Dashboard -} - -type DashboardRepository struct{} - -func (dr *DashboardRepository) SaveDashboard(json *SaveDashboardItem) (*models.Dashboard, error) { - dashboard := json.Dashboard - - if dashboard.Title == "" { - return nil, models.ErrDashboardTitleEmpty - } - - validateAlertsCmd := alerting.ValidateDashboardAlertsCommand{ - OrgId: json.OrgId, - Dashboard: dashboard, - } - - if err := bus.Dispatch(&validateAlertsCmd); err != nil { - return nil, models.ErrDashboardContainsInvalidAlertData - } - - cmd := models.SaveDashboardCommand{ - Dashboard: dashboard.Data, - Message: json.Message, - OrgId: json.OrgId, - Overwrite: json.Overwrite, - UserId: json.UserId, - FolderId: dashboard.FolderId, - IsFolder: dashboard.IsFolder, - } - - if !json.UpdatedAt.IsZero() { - cmd.UpdatedAt = json.UpdatedAt - } - - err := bus.Dispatch(&cmd) - if err != nil { - return nil, err - } - - alertCmd := alerting.UpdateDashboardAlertsCommand{ - OrgId: json.OrgId, - UserId: json.UserId, - Dashboard: cmd.Result, - } - - if err := bus.Dispatch(&alertCmd); err != nil { - return nil, models.ErrDashboardFailedToUpdateAlertData - } - - return cmd.Result, nil -} diff --git a/pkg/services/dashboards/folder_service.go b/pkg/services/dashboards/folder_service.go new file mode 100644 index 00000000000..66afa6306fb --- /dev/null +++ b/pkg/services/dashboards/folder_service.go @@ -0,0 +1,245 @@ +package dashboards + +import ( + "github.com/grafana/grafana/pkg/bus" + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/guardian" + "github.com/grafana/grafana/pkg/services/search" +) + +// FolderService service for operating on folders +type FolderService interface { + GetFolders(limit int) ([]*models.Folder, error) + GetFolderById(id int64) (*models.Folder, error) + GetFolderByUid(uid string) (*models.Folder, error) + CreateFolder(cmd *models.CreateFolderCommand) error + UpdateFolder(uid string, cmd *models.UpdateFolderCommand) error + DeleteFolder(uid string) (*models.Folder, error) +} + +// NewFolderService factory for creating a new folder service +var NewFolderService = func(orgId int64, user *models.SignedInUser) FolderService { + return &dashboardServiceImpl{ + orgId: orgId, + user: user, + } +} + +func (dr *dashboardServiceImpl) GetFolders(limit int) ([]*models.Folder, error) { + if limit == 0 { + limit = 1000 + } + + searchQuery := search.Query{ + SignedInUser: dr.user, + DashboardIds: make([]int64, 0), + FolderIds: make([]int64, 0), + Limit: limit, + OrgId: dr.orgId, + Type: "dash-folder", + Permission: models.PERMISSION_VIEW, + } + + if err := bus.Dispatch(&searchQuery); err != nil { + return nil, err + } + + folders := make([]*models.Folder, 0) + + for _, hit := range searchQuery.Result { + folders = append(folders, &models.Folder{ + Id: hit.Id, + Uid: hit.Uid, + Title: hit.Title, + }) + } + + return folders, nil +} + +func (dr *dashboardServiceImpl) GetFolderById(id int64) (*models.Folder, error) { + query := models.GetDashboardQuery{OrgId: dr.orgId, Id: id} + dashFolder, err := getFolder(query) + + if err != nil { + return nil, toFolderError(err) + } + + g := guardian.New(dashFolder.Id, dr.orgId, dr.user) + if canView, err := g.CanView(); err != nil || !canView { + if err != nil { + return nil, toFolderError(err) + } + return nil, models.ErrFolderAccessDenied + } + + return dashToFolder(dashFolder), nil +} + +func (dr *dashboardServiceImpl) GetFolderByUid(uid string) (*models.Folder, error) { + query := models.GetDashboardQuery{OrgId: dr.orgId, Uid: uid} + dashFolder, err := getFolder(query) + + if err != nil { + return nil, toFolderError(err) + } + + g := guardian.New(dashFolder.Id, dr.orgId, dr.user) + if canView, err := g.CanView(); err != nil || !canView { + if err != nil { + return nil, toFolderError(err) + } + return nil, models.ErrFolderAccessDenied + } + + return dashToFolder(dashFolder), nil +} + +func (dr *dashboardServiceImpl) CreateFolder(cmd *models.CreateFolderCommand) error { + dashFolder := cmd.GetDashboardModel(dr.orgId, dr.user.UserId) + + dto := &SaveDashboardDTO{ + Dashboard: dashFolder, + OrgId: dr.orgId, + User: dr.user, + } + + saveDashboardCmd, err := dr.buildSaveDashboardCommand(dto, false) + if err != nil { + return toFolderError(err) + } + + err = bus.Dispatch(saveDashboardCmd) + if err != nil { + return toFolderError(err) + } + + query := models.GetDashboardQuery{OrgId: dr.orgId, Id: saveDashboardCmd.Result.Id} + dashFolder, err = getFolder(query) + if err != nil { + return toFolderError(err) + } + + cmd.Result = dashToFolder(dashFolder) + + return nil +} + +func (dr *dashboardServiceImpl) UpdateFolder(existingUid string, cmd *models.UpdateFolderCommand) error { + query := models.GetDashboardQuery{OrgId: dr.orgId, Uid: existingUid} + dashFolder, err := getFolder(query) + if err != nil { + return toFolderError(err) + } + + cmd.UpdateDashboardModel(dashFolder, dr.orgId, dr.user.UserId) + + dto := &SaveDashboardDTO{ + Dashboard: dashFolder, + OrgId: dr.orgId, + User: dr.user, + Overwrite: cmd.Overwrite, + } + + saveDashboardCmd, err := dr.buildSaveDashboardCommand(dto, false) + if err != nil { + return toFolderError(err) + } + + err = bus.Dispatch(saveDashboardCmd) + if err != nil { + return toFolderError(err) + } + + query = models.GetDashboardQuery{OrgId: dr.orgId, Id: saveDashboardCmd.Result.Id} + dashFolder, err = getFolder(query) + if err != nil { + return toFolderError(err) + } + + cmd.Result = dashToFolder(dashFolder) + + return nil +} + +func (dr *dashboardServiceImpl) DeleteFolder(uid string) (*models.Folder, error) { + query := models.GetDashboardQuery{OrgId: dr.orgId, Uid: uid} + dashFolder, err := getFolder(query) + if err != nil { + return nil, toFolderError(err) + } + + guardian := guardian.New(dashFolder.Id, dr.orgId, dr.user) + if canSave, err := guardian.CanSave(); err != nil || !canSave { + if err != nil { + return nil, toFolderError(err) + } + return nil, models.ErrFolderAccessDenied + } + + deleteCmd := models.DeleteDashboardCommand{OrgId: dr.orgId, Id: dashFolder.Id} + if err := bus.Dispatch(&deleteCmd); err != nil { + return nil, toFolderError(err) + } + + return dashToFolder(dashFolder), nil +} + +func getFolder(query models.GetDashboardQuery) (*models.Dashboard, error) { + if err := bus.Dispatch(&query); err != nil { + return nil, toFolderError(err) + } + + if !query.Result.IsFolder { + return nil, models.ErrFolderNotFound + } + + return query.Result, nil +} + +func dashToFolder(dash *models.Dashboard) *models.Folder { + return &models.Folder{ + Id: dash.Id, + Uid: dash.Uid, + Title: dash.Title, + HasAcl: dash.HasAcl, + Url: dash.GetUrl(), + Version: dash.Version, + Created: dash.Created, + CreatedBy: dash.CreatedBy, + Updated: dash.Updated, + UpdatedBy: dash.UpdatedBy, + } +} + +func toFolderError(err error) error { + if err == models.ErrDashboardTitleEmpty { + return models.ErrFolderTitleEmpty + } + + if err == models.ErrDashboardUpdateAccessDenied { + return models.ErrFolderAccessDenied + } + + if err == models.ErrDashboardWithSameNameInFolderExists { + return models.ErrFolderSameNameExists + } + + if err == models.ErrDashboardWithSameUIDExists { + return models.ErrFolderWithSameUIDExists + } + + if err == models.ErrDashboardVersionMismatch { + return models.ErrFolderVersionMismatch + } + + if err == models.ErrDashboardNotFound { + return models.ErrFolderNotFound + } + + if err == models.ErrDashboardFailedGenerateUniqueUid { + err = models.ErrFolderFailedGenerateUniqueUid + } + + return err +} diff --git a/pkg/services/dashboards/folder_service_test.go b/pkg/services/dashboards/folder_service_test.go new file mode 100644 index 00000000000..6357e84805a --- /dev/null +++ b/pkg/services/dashboards/folder_service_test.go @@ -0,0 +1,191 @@ +package dashboards + +import ( + "testing" + + "github.com/grafana/grafana/pkg/bus" + "github.com/grafana/grafana/pkg/models" + + "github.com/grafana/grafana/pkg/services/guardian" + + . "github.com/smartystreets/goconvey/convey" +) + +func TestFolderService(t *testing.T) { + Convey("Folder service tests", t, func() { + service := dashboardServiceImpl{ + orgId: 1, + user: &models.SignedInUser{UserId: 1}, + } + + Convey("Given user has no permissions", func() { + origNewGuardian := guardian.New + guardian.MockDashboardGuardian(&guardian.FakeDashboardGuardian{}) + + bus.AddHandler("test", func(query *models.GetDashboardQuery) error { + query.Result = models.NewDashboardFolder("Folder") + return nil + }) + + bus.AddHandler("test", func(cmd *models.ValidateDashboardAlertsCommand) error { + return nil + }) + + bus.AddHandler("test", func(cmd *models.ValidateDashboardBeforeSaveCommand) error { + return models.ErrDashboardUpdateAccessDenied + }) + + Convey("When get folder by id should return access denied error", func() { + _, err := service.GetFolderById(1) + So(err, ShouldNotBeNil) + So(err, ShouldEqual, models.ErrFolderAccessDenied) + }) + + Convey("When get folder by uid should return access denied error", func() { + _, err := service.GetFolderByUid("uid") + So(err, ShouldNotBeNil) + So(err, ShouldEqual, models.ErrFolderAccessDenied) + }) + + Convey("When creating folder should return access denied error", func() { + err := service.CreateFolder(&models.CreateFolderCommand{ + Title: "Folder", + }) + So(err, ShouldNotBeNil) + So(err, ShouldEqual, models.ErrFolderAccessDenied) + }) + + Convey("When updating folder should return access denied error", func() { + err := service.UpdateFolder("uid", &models.UpdateFolderCommand{ + Uid: "uid", + Title: "Folder", + }) + So(err, ShouldNotBeNil) + So(err, ShouldEqual, models.ErrFolderAccessDenied) + }) + + Convey("When deleting folder by uid should return access denied error", func() { + _, err := service.DeleteFolder("uid") + So(err, ShouldNotBeNil) + So(err, ShouldEqual, models.ErrFolderAccessDenied) + }) + + Reset(func() { + guardian.New = origNewGuardian + }) + }) + + Convey("Given user has permission to save", func() { + origNewGuardian := guardian.New + guardian.MockDashboardGuardian(&guardian.FakeDashboardGuardian{CanSaveValue: true}) + + dash := models.NewDashboardFolder("Folder") + dash.Id = 1 + + bus.AddHandler("test", func(query *models.GetDashboardQuery) error { + query.Result = dash + return nil + }) + + bus.AddHandler("test", func(cmd *models.ValidateDashboardAlertsCommand) error { + return nil + }) + + bus.AddHandler("test", func(cmd *models.ValidateDashboardBeforeSaveCommand) error { + return nil + }) + + bus.AddHandler("test", func(cmd *models.UpdateDashboardAlertsCommand) error { + return nil + }) + + bus.AddHandler("test", func(cmd *models.SaveDashboardCommand) error { + cmd.Result = dash + return nil + }) + + bus.AddHandler("test", func(cmd *models.DeleteDashboardCommand) error { + return nil + }) + + Convey("When creating folder should not return access denied error", func() { + err := service.CreateFolder(&models.CreateFolderCommand{ + Title: "Folder", + }) + So(err, ShouldBeNil) + }) + + Convey("When updating folder should not return access denied error", func() { + err := service.UpdateFolder("uid", &models.UpdateFolderCommand{ + Uid: "uid", + Title: "Folder", + }) + So(err, ShouldBeNil) + }) + + Convey("When deleting folder by uid should not return access denied error", func() { + _, err := service.DeleteFolder("uid") + So(err, ShouldBeNil) + }) + + Reset(func() { + guardian.New = origNewGuardian + }) + }) + + Convey("Given user has permission to view", func() { + origNewGuardian := guardian.New + guardian.MockDashboardGuardian(&guardian.FakeDashboardGuardian{CanViewValue: true}) + + dashFolder := models.NewDashboardFolder("Folder") + dashFolder.Id = 1 + dashFolder.Uid = "uid-abc" + + bus.AddHandler("test", func(query *models.GetDashboardQuery) error { + query.Result = dashFolder + return nil + }) + + Convey("When get folder by id should return folder", func() { + f, _ := service.GetFolderById(1) + So(f.Id, ShouldEqual, dashFolder.Id) + So(f.Uid, ShouldEqual, dashFolder.Uid) + So(f.Title, ShouldEqual, dashFolder.Title) + }) + + Convey("When get folder by uid should return folder", func() { + f, _ := service.GetFolderByUid("uid") + So(f.Id, ShouldEqual, dashFolder.Id) + So(f.Uid, ShouldEqual, dashFolder.Uid) + So(f.Title, ShouldEqual, dashFolder.Title) + }) + + Reset(func() { + guardian.New = origNewGuardian + }) + }) + + Convey("Should map errors correct", func() { + testCases := []struct { + ActualError error + ExpectedError error + }{ + {ActualError: models.ErrDashboardTitleEmpty, ExpectedError: models.ErrFolderTitleEmpty}, + {ActualError: models.ErrDashboardUpdateAccessDenied, ExpectedError: models.ErrFolderAccessDenied}, + {ActualError: models.ErrDashboardWithSameNameInFolderExists, ExpectedError: models.ErrFolderSameNameExists}, + {ActualError: models.ErrDashboardWithSameUIDExists, ExpectedError: models.ErrFolderWithSameUIDExists}, + {ActualError: models.ErrDashboardVersionMismatch, ExpectedError: models.ErrFolderVersionMismatch}, + {ActualError: models.ErrDashboardNotFound, ExpectedError: models.ErrFolderNotFound}, + {ActualError: models.ErrDashboardFailedGenerateUniqueUid, ExpectedError: models.ErrFolderFailedGenerateUniqueUid}, + {ActualError: models.ErrDashboardInvalidUid, ExpectedError: models.ErrDashboardInvalidUid}, + } + + for _, tc := range testCases { + actualError := toFolderError(tc.ActualError) + if actualError != tc.ExpectedError { + t.Errorf("For error '%s' expected error '%s', actual '%s'", tc.ActualError, tc.ExpectedError, actualError) + } + } + }) + }) +} diff --git a/pkg/services/guardian/guardian.go b/pkg/services/guardian/guardian.go index 1b664c11385..811b38cac86 100644 --- a/pkg/services/guardian/guardian.go +++ b/pkg/services/guardian/guardian.go @@ -1,13 +1,31 @@ package guardian import ( + "errors" + "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/log" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/setting" ) -type DashboardGuardian struct { +var ( + ErrGuardianPermissionExists = errors.New("Permission already exists") + ErrGuardianOverride = errors.New("You can only override a permission to be higher") +) + +// DashboardGuardian to be used for guard against operations without access on dashboard and acl +type DashboardGuardian interface { + CanSave() (bool, error) + CanEdit() (bool, error) + CanView() (bool, error) + CanAdmin() (bool, error) + HasPermission(permission m.PermissionType) (bool, error) + CheckPermissionBeforeUpdate(permission m.PermissionType, updatePermissions []*m.DashboardAcl) (bool, error) + GetAcl() ([]*m.DashboardAclInfoDTO, error) +} + +type dashboardGuardianImpl struct { user *m.SignedInUser dashId int64 orgId int64 @@ -16,8 +34,9 @@ type DashboardGuardian struct { log log.Logger } -func NewDashboardGuardian(dashId int64, orgId int64, user *m.SignedInUser) *DashboardGuardian { - return &DashboardGuardian{ +// New factory for creating a new dashboard guardian instance +var New = func(dashId int64, orgId int64, user *m.SignedInUser) DashboardGuardian { + return &dashboardGuardianImpl{ user: user, dashId: dashId, orgId: orgId, @@ -25,11 +44,11 @@ func NewDashboardGuardian(dashId int64, orgId int64, user *m.SignedInUser) *Dash } } -func (g *DashboardGuardian) CanSave() (bool, error) { +func (g *dashboardGuardianImpl) CanSave() (bool, error) { return g.HasPermission(m.PERMISSION_EDIT) } -func (g *DashboardGuardian) CanEdit() (bool, error) { +func (g *dashboardGuardianImpl) CanEdit() (bool, error) { if setting.ViewersCanEdit { return g.HasPermission(m.PERMISSION_VIEW) } @@ -37,15 +56,15 @@ func (g *DashboardGuardian) CanEdit() (bool, error) { return g.HasPermission(m.PERMISSION_EDIT) } -func (g *DashboardGuardian) CanView() (bool, error) { +func (g *dashboardGuardianImpl) CanView() (bool, error) { return g.HasPermission(m.PERMISSION_VIEW) } -func (g *DashboardGuardian) CanAdmin() (bool, error) { +func (g *dashboardGuardianImpl) CanAdmin() (bool, error) { return g.HasPermission(m.PERMISSION_ADMIN) } -func (g *DashboardGuardian) HasPermission(permission m.PermissionType) (bool, error) { +func (g *dashboardGuardianImpl) HasPermission(permission m.PermissionType) (bool, error) { if g.user.OrgRole == m.ROLE_ADMIN { return true, nil } @@ -55,6 +74,10 @@ func (g *DashboardGuardian) HasPermission(permission m.PermissionType) (bool, er return false, err } + return g.checkAcl(permission, acl) +} + +func (g *dashboardGuardianImpl) checkAcl(permission m.PermissionType, acl []*m.DashboardAclInfoDTO) (bool, error) { orgRole := g.user.OrgRole teamAclItems := []*m.DashboardAclInfoDTO{} @@ -79,18 +102,18 @@ func (g *DashboardGuardian) HasPermission(permission m.PermissionType) (bool, er } } - // do we have group rules? + // do we have team rules? if len(teamAclItems) == 0 { return false, nil } - // load groups + // load teams teams, err := g.getTeams() if err != nil { return false, err } - // evalute group rules + // evalute team rules for _, p := range acl { for _, ug := range teams { if ug.Id == p.TeamId && p.Permission >= permission { @@ -102,8 +125,59 @@ func (g *DashboardGuardian) HasPermission(permission m.PermissionType) (bool, er return false, nil } -// Returns dashboard acl -func (g *DashboardGuardian) GetAcl() ([]*m.DashboardAclInfoDTO, error) { +func (g *dashboardGuardianImpl) CheckPermissionBeforeUpdate(permission m.PermissionType, updatePermissions []*m.DashboardAcl) (bool, error) { + acl := []*m.DashboardAclInfoDTO{} + adminRole := m.ROLE_ADMIN + everyoneWithAdminRole := &m.DashboardAclInfoDTO{DashboardId: g.dashId, UserId: 0, TeamId: 0, Role: &adminRole, Permission: m.PERMISSION_ADMIN} + + // validate that duplicate permissions don't exists + for _, p := range updatePermissions { + aclItem := &m.DashboardAclInfoDTO{DashboardId: p.DashboardId, UserId: p.UserId, TeamId: p.TeamId, Role: p.Role, Permission: p.Permission} + if aclItem.IsDuplicateOf(everyoneWithAdminRole) { + return false, ErrGuardianPermissionExists + } + + for _, a := range acl { + if a.IsDuplicateOf(aclItem) { + return false, ErrGuardianPermissionExists + } + } + + acl = append(acl, aclItem) + } + + existingPermissions, err := g.GetAcl() + if err != nil { + return false, err + } + + // validate overridden permissions to be higher + for _, a := range acl { + for _, existingPerm := range existingPermissions { + // handle default permissions + if existingPerm.DashboardId == -1 { + existingPerm.DashboardId = g.dashId + } + + if a.DashboardId == existingPerm.DashboardId { + continue + } + + if a.IsDuplicateOf(existingPerm) && a.Permission <= existingPerm.Permission { + return false, ErrGuardianOverride + } + } + } + + if g.user.OrgRole == m.ROLE_ADMIN { + return true, nil + } + + return g.checkAcl(permission, acl) +} + +// GetAcl returns dashboard acl +func (g *dashboardGuardianImpl) GetAcl() ([]*m.DashboardAclInfoDTO, error) { if g.acl != nil { return g.acl, nil } @@ -113,18 +187,76 @@ func (g *DashboardGuardian) GetAcl() ([]*m.DashboardAclInfoDTO, error) { return nil, err } + for _, a := range query.Result { + // handle default permissions + if a.DashboardId == -1 { + a.DashboardId = g.dashId + } + } + g.acl = query.Result return g.acl, nil } -func (g *DashboardGuardian) getTeams() ([]*m.Team, error) { +func (g *dashboardGuardianImpl) getTeams() ([]*m.Team, error) { if g.groups != nil { return g.groups, nil } - query := m.GetTeamsByUserQuery{UserId: g.user.UserId} + query := m.GetTeamsByUserQuery{OrgId: g.orgId, UserId: g.user.UserId} err := bus.Dispatch(&query) g.groups = query.Result return query.Result, err } + +type FakeDashboardGuardian struct { + DashId int64 + OrgId int64 + User *m.SignedInUser + CanSaveValue bool + CanEditValue bool + CanViewValue bool + CanAdminValue bool + HasPermissionValue bool + CheckPermissionBeforeUpdateValue bool + CheckPermissionBeforeUpdateError error + GetAclValue []*m.DashboardAclInfoDTO +} + +func (g *FakeDashboardGuardian) CanSave() (bool, error) { + return g.CanSaveValue, nil +} + +func (g *FakeDashboardGuardian) CanEdit() (bool, error) { + return g.CanEditValue, nil +} + +func (g *FakeDashboardGuardian) CanView() (bool, error) { + return g.CanViewValue, nil +} + +func (g *FakeDashboardGuardian) CanAdmin() (bool, error) { + return g.CanAdminValue, nil +} + +func (g *FakeDashboardGuardian) HasPermission(permission m.PermissionType) (bool, error) { + return g.HasPermissionValue, nil +} + +func (g *FakeDashboardGuardian) CheckPermissionBeforeUpdate(permission m.PermissionType, updatePermissions []*m.DashboardAcl) (bool, error) { + return g.CheckPermissionBeforeUpdateValue, g.CheckPermissionBeforeUpdateError +} + +func (g *FakeDashboardGuardian) GetAcl() ([]*m.DashboardAclInfoDTO, error) { + return g.GetAclValue, nil +} + +func MockDashboardGuardian(mock *FakeDashboardGuardian) { + New = func(dashId int64, orgId int64, user *m.SignedInUser) DashboardGuardian { + mock.OrgId = orgId + mock.DashId = dashId + mock.User = user + return mock + } +} diff --git a/pkg/services/guardian/guardian_test.go b/pkg/services/guardian/guardian_test.go new file mode 100644 index 00000000000..bb7e6bd1a72 --- /dev/null +++ b/pkg/services/guardian/guardian_test.go @@ -0,0 +1,711 @@ +package guardian + +import ( + "fmt" + "testing" + + "github.com/grafana/grafana/pkg/bus" + + m "github.com/grafana/grafana/pkg/models" + . "github.com/smartystreets/goconvey/convey" +) + +func TestGuardian(t *testing.T) { + Convey("Guardian permission tests", t, func() { + orgRoleScenario("Given user has admin org role", m.ROLE_ADMIN, func(sc *scenarioContext) { + canAdmin, _ := sc.g.CanAdmin() + canEdit, _ := sc.g.CanEdit() + canSave, _ := sc.g.CanSave() + canView, _ := sc.g.CanView() + So(canAdmin, ShouldBeTrue) + So(canEdit, ShouldBeTrue) + So(canSave, ShouldBeTrue) + So(canView, ShouldBeTrue) + + Convey("When trying to update permissions", func() { + Convey("With duplicate user permissions should return error", func() { + p := []*m.DashboardAcl{ + {OrgId: 1, DashboardId: 1, UserId: 1, Permission: m.PERMISSION_VIEW}, + {OrgId: 1, DashboardId: 1, UserId: 1, Permission: m.PERMISSION_ADMIN}, + } + _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) + So(err, ShouldEqual, ErrGuardianPermissionExists) + }) + + Convey("With duplicate team permissions should return error", func() { + p := []*m.DashboardAcl{ + {OrgId: 1, DashboardId: 1, TeamId: 1, Permission: m.PERMISSION_VIEW}, + {OrgId: 1, DashboardId: 1, TeamId: 1, Permission: m.PERMISSION_ADMIN}, + } + _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) + So(err, ShouldEqual, ErrGuardianPermissionExists) + }) + + Convey("With duplicate everyone with editor role permission should return error", func() { + r := m.ROLE_EDITOR + p := []*m.DashboardAcl{ + {OrgId: 1, DashboardId: 1, Role: &r, Permission: m.PERMISSION_VIEW}, + {OrgId: 1, DashboardId: 1, Role: &r, Permission: m.PERMISSION_ADMIN}, + } + _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) + So(err, ShouldEqual, ErrGuardianPermissionExists) + }) + + Convey("With duplicate everyone with viewer role permission should return error", func() { + r := m.ROLE_VIEWER + p := []*m.DashboardAcl{ + {OrgId: 1, DashboardId: 1, Role: &r, Permission: m.PERMISSION_VIEW}, + {OrgId: 1, DashboardId: 1, Role: &r, Permission: m.PERMISSION_ADMIN}, + } + _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) + So(err, ShouldEqual, ErrGuardianPermissionExists) + }) + + Convey("With everyone with admin role permission should return error", func() { + r := m.ROLE_ADMIN + p := []*m.DashboardAcl{ + {OrgId: 1, DashboardId: 1, Role: &r, Permission: m.PERMISSION_ADMIN}, + } + _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) + So(err, ShouldEqual, ErrGuardianPermissionExists) + }) + }) + + Convey("Given default permissions", func() { + editor := m.ROLE_EDITOR + viewer := m.ROLE_VIEWER + existingPermissions := []*m.DashboardAclInfoDTO{ + {OrgId: 1, DashboardId: -1, Role: &editor, Permission: m.PERMISSION_EDIT}, + {OrgId: 1, DashboardId: -1, Role: &viewer, Permission: m.PERMISSION_VIEW}, + } + + bus.AddHandler("test", func(query *m.GetDashboardAclInfoListQuery) error { + query.Result = existingPermissions + return nil + }) + + Convey("When trying to update dashboard permissions without everyone with role editor can edit should be allowed", func() { + r := m.ROLE_VIEWER + p := []*m.DashboardAcl{ + {OrgId: 1, DashboardId: 1, Role: &r, Permission: m.PERMISSION_VIEW}, + } + ok, _ := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) + So(ok, ShouldBeTrue) + }) + + Convey("When trying to update dashboard permissions without everyone with role viewer can view should be allowed", func() { + r := m.ROLE_EDITOR + p := []*m.DashboardAcl{ + {OrgId: 1, DashboardId: 1, Role: &r, Permission: m.PERMISSION_EDIT}, + } + ok, _ := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) + So(ok, ShouldBeTrue) + }) + }) + + Convey("Given parent folder has user admin permission", func() { + existingPermissions := []*m.DashboardAclInfoDTO{ + {OrgId: 1, DashboardId: 2, UserId: 1, Permission: m.PERMISSION_ADMIN}, + } + + bus.AddHandler("test", func(query *m.GetDashboardAclInfoListQuery) error { + query.Result = existingPermissions + return nil + }) + + Convey("When trying to update dashboard permissions with admin user permission should return error", func() { + p := []*m.DashboardAcl{ + {OrgId: 1, DashboardId: 3, UserId: 1, Permission: m.PERMISSION_ADMIN}, + } + _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) + So(err, ShouldEqual, ErrGuardianOverride) + }) + + Convey("When trying to update dashboard permissions with edit user permission should return error", func() { + p := []*m.DashboardAcl{ + {OrgId: 1, DashboardId: 3, UserId: 1, Permission: m.PERMISSION_EDIT}, + } + _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) + So(err, ShouldEqual, ErrGuardianOverride) + }) + + Convey("When trying to update dashboard permissions with view user permission should return error", func() { + p := []*m.DashboardAcl{ + {OrgId: 1, DashboardId: 3, UserId: 1, Permission: m.PERMISSION_VIEW}, + } + _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) + So(err, ShouldEqual, ErrGuardianOverride) + }) + }) + + Convey("Given parent folder has user edit permission", func() { + existingPermissions := []*m.DashboardAclInfoDTO{ + {OrgId: 1, DashboardId: 2, UserId: 1, Permission: m.PERMISSION_EDIT}, + } + + bus.AddHandler("test", func(query *m.GetDashboardAclInfoListQuery) error { + query.Result = existingPermissions + return nil + }) + + Convey("When trying to update dashboard permissions with admin user permission should be allowed", func() { + p := []*m.DashboardAcl{ + {OrgId: 1, DashboardId: 3, UserId: 1, Permission: m.PERMISSION_ADMIN}, + } + ok, _ := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) + So(ok, ShouldBeTrue) + }) + + Convey("When trying to update dashboard permissions with edit user permission should return error", func() { + p := []*m.DashboardAcl{ + {OrgId: 1, DashboardId: 3, UserId: 1, Permission: m.PERMISSION_EDIT}, + } + _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) + So(err, ShouldEqual, ErrGuardianOverride) + }) + + Convey("When trying to update dashboard permissions with view user permission should return error", func() { + p := []*m.DashboardAcl{ + {OrgId: 1, DashboardId: 3, UserId: 1, Permission: m.PERMISSION_VIEW}, + } + _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) + So(err, ShouldEqual, ErrGuardianOverride) + }) + }) + + Convey("Given parent folder has user view permission", func() { + existingPermissions := []*m.DashboardAclInfoDTO{ + {OrgId: 1, DashboardId: 2, UserId: 1, Permission: m.PERMISSION_VIEW}, + } + + bus.AddHandler("test", func(query *m.GetDashboardAclInfoListQuery) error { + query.Result = existingPermissions + return nil + }) + + Convey("When trying to update dashboard permissions with admin user permission should be allowed", func() { + p := []*m.DashboardAcl{ + {OrgId: 1, DashboardId: 3, UserId: 1, Permission: m.PERMISSION_ADMIN}, + } + ok, _ := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) + So(ok, ShouldBeTrue) + }) + + Convey("When trying to update dashboard permissions with edit user permission should be allowed", func() { + p := []*m.DashboardAcl{ + {OrgId: 1, DashboardId: 3, UserId: 1, Permission: m.PERMISSION_EDIT}, + } + ok, _ := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) + So(ok, ShouldBeTrue) + }) + + Convey("When trying to update dashboard permissions with view user permission should return error", func() { + p := []*m.DashboardAcl{ + {OrgId: 1, DashboardId: 3, UserId: 1, Permission: m.PERMISSION_VIEW}, + } + _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) + So(err, ShouldEqual, ErrGuardianOverride) + }) + }) + + Convey("Given parent folder has team admin permission", func() { + existingPermissions := []*m.DashboardAclInfoDTO{ + {OrgId: 1, DashboardId: 2, TeamId: 1, Permission: m.PERMISSION_ADMIN}, + } + + bus.AddHandler("test", func(query *m.GetDashboardAclInfoListQuery) error { + query.Result = existingPermissions + return nil + }) + + Convey("When trying to update dashboard permissions with admin team permission should return error", func() { + p := []*m.DashboardAcl{ + {OrgId: 1, DashboardId: 3, TeamId: 1, Permission: m.PERMISSION_ADMIN}, + } + _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) + So(err, ShouldEqual, ErrGuardianOverride) + }) + + Convey("When trying to update dashboard permissions with edit team permission should return error", func() { + p := []*m.DashboardAcl{ + {OrgId: 1, DashboardId: 3, TeamId: 1, Permission: m.PERMISSION_EDIT}, + } + _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) + So(err, ShouldEqual, ErrGuardianOverride) + }) + + Convey("When trying to update dashboard permissions with view team permission should return error", func() { + p := []*m.DashboardAcl{ + {OrgId: 1, DashboardId: 3, TeamId: 1, Permission: m.PERMISSION_VIEW}, + } + _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) + So(err, ShouldEqual, ErrGuardianOverride) + }) + }) + + Convey("Given parent folder has team edit permission", func() { + existingPermissions := []*m.DashboardAclInfoDTO{ + {OrgId: 1, DashboardId: 2, TeamId: 1, Permission: m.PERMISSION_EDIT}, + } + + bus.AddHandler("test", func(query *m.GetDashboardAclInfoListQuery) error { + query.Result = existingPermissions + return nil + }) + + Convey("When trying to update dashboard permissions with admin team permission should be allowed", func() { + p := []*m.DashboardAcl{ + {OrgId: 1, DashboardId: 3, TeamId: 1, Permission: m.PERMISSION_ADMIN}, + } + ok, _ := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) + So(ok, ShouldBeTrue) + }) + + Convey("When trying to update dashboard permissions with edit team permission should return error", func() { + p := []*m.DashboardAcl{ + {OrgId: 1, DashboardId: 3, TeamId: 1, Permission: m.PERMISSION_EDIT}, + } + _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) + So(err, ShouldEqual, ErrGuardianOverride) + }) + + Convey("When trying to update dashboard permissions with view team permission should return error", func() { + p := []*m.DashboardAcl{ + {OrgId: 1, DashboardId: 3, TeamId: 1, Permission: m.PERMISSION_VIEW}, + } + _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) + So(err, ShouldEqual, ErrGuardianOverride) + }) + }) + + Convey("Given parent folder has team view permission", func() { + existingPermissions := []*m.DashboardAclInfoDTO{ + {OrgId: 1, DashboardId: 2, TeamId: 1, Permission: m.PERMISSION_VIEW}, + } + + bus.AddHandler("test", func(query *m.GetDashboardAclInfoListQuery) error { + query.Result = existingPermissions + return nil + }) + + Convey("When trying to update dashboard permissions with admin team permission should be allowed", func() { + p := []*m.DashboardAcl{ + {OrgId: 1, DashboardId: 3, TeamId: 1, Permission: m.PERMISSION_ADMIN}, + } + ok, _ := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) + So(ok, ShouldBeTrue) + }) + + Convey("When trying to update dashboard permissions with edit team permission should be allowed", func() { + p := []*m.DashboardAcl{ + {OrgId: 1, DashboardId: 3, TeamId: 1, Permission: m.PERMISSION_EDIT}, + } + ok, _ := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) + So(ok, ShouldBeTrue) + }) + + Convey("When trying to update dashboard permissions with view team permission should return error", func() { + p := []*m.DashboardAcl{ + {OrgId: 1, DashboardId: 3, TeamId: 1, Permission: m.PERMISSION_VIEW}, + } + _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) + So(err, ShouldEqual, ErrGuardianOverride) + }) + }) + + Convey("Given parent folder has editor role with edit permission", func() { + r := m.ROLE_EDITOR + existingPermissions := []*m.DashboardAclInfoDTO{ + {OrgId: 1, DashboardId: 2, Role: &r, Permission: m.PERMISSION_EDIT}, + } + + bus.AddHandler("test", func(query *m.GetDashboardAclInfoListQuery) error { + query.Result = existingPermissions + return nil + }) + + Convey("When trying to update dashboard permissions with everyone with editor role can admin permission should be allowed", func() { + p := []*m.DashboardAcl{ + {OrgId: 1, DashboardId: 3, Role: &r, Permission: m.PERMISSION_ADMIN}, + } + ok, _ := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) + So(ok, ShouldBeTrue) + }) + + Convey("When trying to update dashboard permissions with everyone with editor role can edit permission should return error", func() { + p := []*m.DashboardAcl{ + {OrgId: 1, DashboardId: 3, Role: &r, Permission: m.PERMISSION_EDIT}, + } + _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) + So(err, ShouldEqual, ErrGuardianOverride) + }) + + Convey("When trying to update dashboard permissions with everyone with editor role can view permission should return error", func() { + p := []*m.DashboardAcl{ + {OrgId: 1, DashboardId: 3, Role: &r, Permission: m.PERMISSION_VIEW}, + } + _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) + So(err, ShouldEqual, ErrGuardianOverride) + }) + }) + + Convey("Given parent folder has editor role with view permission", func() { + r := m.ROLE_EDITOR + existingPermissions := []*m.DashboardAclInfoDTO{ + {OrgId: 1, DashboardId: 2, Role: &r, Permission: m.PERMISSION_VIEW}, + } + + bus.AddHandler("test", func(query *m.GetDashboardAclInfoListQuery) error { + query.Result = existingPermissions + return nil + }) + + Convey("When trying to update dashboard permissions with everyone with viewer role can admin permission should be allowed", func() { + p := []*m.DashboardAcl{ + {OrgId: 1, DashboardId: 3, Role: &r, Permission: m.PERMISSION_ADMIN}, + } + ok, _ := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) + So(ok, ShouldBeTrue) + }) + + Convey("When trying to update dashboard permissions with everyone with viewer role can edit permission should be allowed", func() { + p := []*m.DashboardAcl{ + {OrgId: 1, DashboardId: 3, Role: &r, Permission: m.PERMISSION_EDIT}, + } + ok, _ := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) + So(ok, ShouldBeTrue) + }) + + Convey("When trying to update dashboard permissions with everyone with viewer role can view permission should return error", func() { + p := []*m.DashboardAcl{ + {OrgId: 1, DashboardId: 3, Role: &r, Permission: m.PERMISSION_VIEW}, + } + _, err := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) + So(err, ShouldEqual, ErrGuardianOverride) + }) + }) + }) + + orgRoleScenario("Given user has editor org role", m.ROLE_EDITOR, func(sc *scenarioContext) { + everyoneWithRoleScenario(m.ROLE_EDITOR, m.PERMISSION_ADMIN, sc, func(sc *scenarioContext) { + canAdmin, _ := sc.g.CanAdmin() + canEdit, _ := sc.g.CanEdit() + canSave, _ := sc.g.CanSave() + canView, _ := sc.g.CanView() + So(canAdmin, ShouldBeTrue) + So(canEdit, ShouldBeTrue) + So(canSave, ShouldBeTrue) + So(canView, ShouldBeTrue) + }) + + everyoneWithRoleScenario(m.ROLE_EDITOR, m.PERMISSION_EDIT, sc, func(sc *scenarioContext) { + canAdmin, _ := sc.g.CanAdmin() + canEdit, _ := sc.g.CanEdit() + canSave, _ := sc.g.CanSave() + canView, _ := sc.g.CanView() + So(canAdmin, ShouldBeFalse) + So(canEdit, ShouldBeTrue) + So(canSave, ShouldBeTrue) + So(canView, ShouldBeTrue) + }) + + everyoneWithRoleScenario(m.ROLE_EDITOR, m.PERMISSION_VIEW, sc, func(sc *scenarioContext) { + canAdmin, _ := sc.g.CanAdmin() + canEdit, _ := sc.g.CanEdit() + canSave, _ := sc.g.CanSave() + canView, _ := sc.g.CanView() + So(canAdmin, ShouldBeFalse) + So(canEdit, ShouldBeFalse) + So(canSave, ShouldBeFalse) + So(canView, ShouldBeTrue) + }) + + everyoneWithRoleScenario(m.ROLE_VIEWER, m.PERMISSION_ADMIN, sc, func(sc *scenarioContext) { + canAdmin, _ := sc.g.CanAdmin() + canEdit, _ := sc.g.CanEdit() + canSave, _ := sc.g.CanSave() + canView, _ := sc.g.CanView() + So(canAdmin, ShouldBeFalse) + So(canEdit, ShouldBeFalse) + So(canSave, ShouldBeFalse) + So(canView, ShouldBeFalse) + }) + + everyoneWithRoleScenario(m.ROLE_VIEWER, m.PERMISSION_EDIT, sc, func(sc *scenarioContext) { + canAdmin, _ := sc.g.CanAdmin() + canEdit, _ := sc.g.CanEdit() + canSave, _ := sc.g.CanSave() + canView, _ := sc.g.CanView() + So(canAdmin, ShouldBeFalse) + So(canEdit, ShouldBeFalse) + So(canSave, ShouldBeFalse) + So(canView, ShouldBeFalse) + }) + + everyoneWithRoleScenario(m.ROLE_VIEWER, m.PERMISSION_VIEW, sc, func(sc *scenarioContext) { + canAdmin, _ := sc.g.CanAdmin() + canEdit, _ := sc.g.CanEdit() + canSave, _ := sc.g.CanSave() + canView, _ := sc.g.CanView() + So(canAdmin, ShouldBeFalse) + So(canEdit, ShouldBeFalse) + So(canSave, ShouldBeFalse) + So(canView, ShouldBeFalse) + }) + + userWithPermissionScenario(m.PERMISSION_ADMIN, sc, func(sc *scenarioContext) { + canAdmin, _ := sc.g.CanAdmin() + canEdit, _ := sc.g.CanEdit() + canSave, _ := sc.g.CanSave() + canView, _ := sc.g.CanView() + So(canAdmin, ShouldBeTrue) + So(canEdit, ShouldBeTrue) + So(canSave, ShouldBeTrue) + So(canView, ShouldBeTrue) + }) + + userWithPermissionScenario(m.PERMISSION_EDIT, sc, func(sc *scenarioContext) { + canAdmin, _ := sc.g.CanAdmin() + canEdit, _ := sc.g.CanEdit() + canSave, _ := sc.g.CanSave() + canView, _ := sc.g.CanView() + So(canAdmin, ShouldBeFalse) + So(canEdit, ShouldBeTrue) + So(canSave, ShouldBeTrue) + So(canView, ShouldBeTrue) + }) + + userWithPermissionScenario(m.PERMISSION_VIEW, sc, func(sc *scenarioContext) { + canAdmin, _ := sc.g.CanAdmin() + canEdit, _ := sc.g.CanEdit() + canSave, _ := sc.g.CanSave() + canView, _ := sc.g.CanView() + So(canAdmin, ShouldBeFalse) + So(canEdit, ShouldBeFalse) + So(canSave, ShouldBeFalse) + So(canView, ShouldBeTrue) + }) + + teamWithPermissionScenario(m.PERMISSION_ADMIN, sc, func(sc *scenarioContext) { + canAdmin, _ := sc.g.CanAdmin() + canEdit, _ := sc.g.CanEdit() + canSave, _ := sc.g.CanSave() + canView, _ := sc.g.CanView() + So(canAdmin, ShouldBeTrue) + So(canEdit, ShouldBeTrue) + So(canSave, ShouldBeTrue) + So(canView, ShouldBeTrue) + }) + + teamWithPermissionScenario(m.PERMISSION_EDIT, sc, func(sc *scenarioContext) { + canAdmin, _ := sc.g.CanAdmin() + canEdit, _ := sc.g.CanEdit() + canSave, _ := sc.g.CanSave() + canView, _ := sc.g.CanView() + So(canAdmin, ShouldBeFalse) + So(canEdit, ShouldBeTrue) + So(canSave, ShouldBeTrue) + So(canView, ShouldBeTrue) + }) + + teamWithPermissionScenario(m.PERMISSION_VIEW, sc, func(sc *scenarioContext) { + canAdmin, _ := sc.g.CanAdmin() + canEdit, _ := sc.g.CanEdit() + canSave, _ := sc.g.CanSave() + canView, _ := sc.g.CanView() + So(canAdmin, ShouldBeFalse) + So(canEdit, ShouldBeFalse) + So(canSave, ShouldBeFalse) + So(canView, ShouldBeTrue) + }) + + Convey("When trying to update permissions should return false", func() { + p := []*m.DashboardAcl{ + {OrgId: 1, DashboardId: 1, UserId: 1, Permission: m.PERMISSION_VIEW}, + {OrgId: 1, DashboardId: 1, UserId: 1, Permission: m.PERMISSION_ADMIN}, + } + ok, _ := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) + So(ok, ShouldBeFalse) + }) + }) + + orgRoleScenario("Given user has viewer org role", m.ROLE_VIEWER, func(sc *scenarioContext) { + everyoneWithRoleScenario(m.ROLE_EDITOR, m.PERMISSION_ADMIN, sc, func(sc *scenarioContext) { + canAdmin, _ := sc.g.CanAdmin() + canEdit, _ := sc.g.CanEdit() + canSave, _ := sc.g.CanSave() + canView, _ := sc.g.CanView() + So(canAdmin, ShouldBeFalse) + So(canEdit, ShouldBeFalse) + So(canSave, ShouldBeFalse) + So(canView, ShouldBeFalse) + }) + + everyoneWithRoleScenario(m.ROLE_EDITOR, m.PERMISSION_EDIT, sc, func(sc *scenarioContext) { + canAdmin, _ := sc.g.CanAdmin() + canEdit, _ := sc.g.CanEdit() + canSave, _ := sc.g.CanSave() + canView, _ := sc.g.CanView() + So(canAdmin, ShouldBeFalse) + So(canEdit, ShouldBeFalse) + So(canSave, ShouldBeFalse) + So(canView, ShouldBeFalse) + }) + + everyoneWithRoleScenario(m.ROLE_EDITOR, m.PERMISSION_VIEW, sc, func(sc *scenarioContext) { + canAdmin, _ := sc.g.CanAdmin() + canEdit, _ := sc.g.CanEdit() + canSave, _ := sc.g.CanSave() + canView, _ := sc.g.CanView() + So(canAdmin, ShouldBeFalse) + So(canEdit, ShouldBeFalse) + So(canSave, ShouldBeFalse) + So(canView, ShouldBeFalse) + }) + + everyoneWithRoleScenario(m.ROLE_VIEWER, m.PERMISSION_ADMIN, sc, func(sc *scenarioContext) { + canAdmin, _ := sc.g.CanAdmin() + canEdit, _ := sc.g.CanEdit() + canSave, _ := sc.g.CanSave() + canView, _ := sc.g.CanView() + So(canAdmin, ShouldBeTrue) + So(canEdit, ShouldBeTrue) + So(canSave, ShouldBeTrue) + So(canView, ShouldBeTrue) + }) + + everyoneWithRoleScenario(m.ROLE_VIEWER, m.PERMISSION_EDIT, sc, func(sc *scenarioContext) { + canAdmin, _ := sc.g.CanAdmin() + canEdit, _ := sc.g.CanEdit() + canSave, _ := sc.g.CanSave() + canView, _ := sc.g.CanView() + So(canAdmin, ShouldBeFalse) + So(canEdit, ShouldBeTrue) + So(canSave, ShouldBeTrue) + So(canView, ShouldBeTrue) + }) + + everyoneWithRoleScenario(m.ROLE_VIEWER, m.PERMISSION_VIEW, sc, func(sc *scenarioContext) { + canAdmin, _ := sc.g.CanAdmin() + canEdit, _ := sc.g.CanEdit() + canSave, _ := sc.g.CanSave() + canView, _ := sc.g.CanView() + So(canAdmin, ShouldBeFalse) + So(canEdit, ShouldBeFalse) + So(canSave, ShouldBeFalse) + So(canView, ShouldBeTrue) + }) + + userWithPermissionScenario(m.PERMISSION_ADMIN, sc, func(sc *scenarioContext) { + canAdmin, _ := sc.g.CanAdmin() + canEdit, _ := sc.g.CanEdit() + canSave, _ := sc.g.CanSave() + canView, _ := sc.g.CanView() + So(canAdmin, ShouldBeTrue) + So(canEdit, ShouldBeTrue) + So(canSave, ShouldBeTrue) + So(canView, ShouldBeTrue) + }) + + userWithPermissionScenario(m.PERMISSION_EDIT, sc, func(sc *scenarioContext) { + canAdmin, _ := sc.g.CanAdmin() + canEdit, _ := sc.g.CanEdit() + canSave, _ := sc.g.CanSave() + canView, _ := sc.g.CanView() + So(canAdmin, ShouldBeFalse) + So(canEdit, ShouldBeTrue) + So(canSave, ShouldBeTrue) + So(canView, ShouldBeTrue) + }) + + userWithPermissionScenario(m.PERMISSION_VIEW, sc, func(sc *scenarioContext) { + canAdmin, _ := sc.g.CanAdmin() + canEdit, _ := sc.g.CanEdit() + canSave, _ := sc.g.CanSave() + canView, _ := sc.g.CanView() + So(canAdmin, ShouldBeFalse) + So(canEdit, ShouldBeFalse) + So(canSave, ShouldBeFalse) + So(canView, ShouldBeTrue) + }) + + Convey("When trying to update permissions should return false", func() { + p := []*m.DashboardAcl{ + {OrgId: 1, DashboardId: 1, UserId: 1, Permission: m.PERMISSION_VIEW}, + {OrgId: 1, DashboardId: 1, UserId: 1, Permission: m.PERMISSION_ADMIN}, + } + ok, _ := sc.g.CheckPermissionBeforeUpdate(m.PERMISSION_ADMIN, p) + So(ok, ShouldBeFalse) + }) + }) + }) +} + +type scenarioContext struct { + g DashboardGuardian +} + +type scenarioFunc func(c *scenarioContext) + +func orgRoleScenario(desc string, role m.RoleType, fn scenarioFunc) { + user := &m.SignedInUser{ + UserId: 1, + OrgId: 1, + OrgRole: role, + } + guard := New(1, 1, user) + sc := &scenarioContext{ + g: guard, + } + + Convey(desc, func() { + fn(sc) + }) +} + +func permissionScenario(desc string, sc *scenarioContext, permissions []*m.DashboardAclInfoDTO, fn scenarioFunc) { + bus.ClearBusHandlers() + + bus.AddHandler("test", func(query *m.GetDashboardAclInfoListQuery) error { + query.Result = permissions + return nil + }) + + teams := []*m.Team{} + + for _, p := range permissions { + if p.TeamId > 0 { + teams = append(teams, &m.Team{Id: p.TeamId}) + } + } + + bus.AddHandler("test", func(query *m.GetTeamsByUserQuery) error { + query.Result = teams + return nil + }) + + Convey(desc, func() { + fn(sc) + }) +} + +func userWithPermissionScenario(permission m.PermissionType, sc *scenarioContext, fn scenarioFunc) { + p := []*m.DashboardAclInfoDTO{ + {OrgId: 1, DashboardId: 1, UserId: 1, Permission: permission}, + } + permissionScenario(fmt.Sprintf("and user has permission to %s item", permission), sc, p, fn) +} + +func teamWithPermissionScenario(permission m.PermissionType, sc *scenarioContext, fn scenarioFunc) { + p := []*m.DashboardAclInfoDTO{ + {OrgId: 1, DashboardId: 1, TeamId: 1, Permission: permission}, + } + permissionScenario(fmt.Sprintf("and team has permission to %s item", permission), sc, p, fn) +} + +func everyoneWithRoleScenario(role m.RoleType, permission m.PermissionType, sc *scenarioContext, fn scenarioFunc) { + p := []*m.DashboardAclInfoDTO{ + {OrgId: 1, DashboardId: 1, UserId: -1, Role: &role, Permission: permission}, + } + permissionScenario(fmt.Sprintf("and everyone with %s role can %s item", role, permission), sc, p, fn) +} diff --git a/pkg/services/provisioning/dashboards/config_reader.go b/pkg/services/provisioning/dashboards/config_reader.go index a602ca71df3..9030ba609b9 100644 --- a/pkg/services/provisioning/dashboards/config_reader.go +++ b/pkg/services/provisioning/dashboards/config_reader.go @@ -2,41 +2,79 @@ package dashboards import ( "io/ioutil" + "os" "path/filepath" "strings" + "github.com/grafana/grafana/pkg/log" yaml "gopkg.in/yaml.v2" ) type configReader struct { path string + log log.Logger } -func (cr *configReader) readConfig() ([]*DashboardsAsConfig, error) { - files, err := ioutil.ReadDir(cr.path) +func (cr *configReader) parseConfigs(file os.FileInfo) ([]*DashboardsAsConfig, error) { + filename, _ := filepath.Abs(filepath.Join(cr.path, file.Name())) + yamlFile, err := ioutil.ReadFile(filename) if err != nil { return nil, err } + apiVersion := &ConfigVersion{ApiVersion: 0} + yaml.Unmarshal(yamlFile, &apiVersion) + + if apiVersion.ApiVersion > 0 { + + v1 := &DashboardAsConfigV1{} + err := yaml.Unmarshal(yamlFile, &v1) + if err != nil { + return nil, err + } + + if v1 != nil { + return v1.mapToDashboardAsConfig(), nil + } + + } else { + var v0 []*DashboardsAsConfigV0 + err := yaml.Unmarshal(yamlFile, &v0) + if err != nil { + return nil, err + } + + if v0 != nil { + cr.log.Warn("[Deprecated] the dashboard provisioning config is outdated. please upgrade", "filename", filename) + return mapV0ToDashboardAsConfig(v0), nil + } + } + + return []*DashboardsAsConfig{}, nil +} + +func (cr *configReader) readConfig() ([]*DashboardsAsConfig, error) { var dashboards []*DashboardsAsConfig + + files, err := ioutil.ReadDir(cr.path) + if err != nil { + cr.log.Error("cant read dashboard provisioning files from directory", "path", cr.path) + return dashboards, nil + } + for _, file := range files { if !strings.HasSuffix(file.Name(), ".yaml") && !strings.HasSuffix(file.Name(), ".yml") { continue } - filename, _ := filepath.Abs(filepath.Join(cr.path, file.Name())) - yamlFile, err := ioutil.ReadFile(filename) + parsedDashboards, err := cr.parseConfigs(file) if err != nil { - return nil, err + } - var datasource []*DashboardsAsConfig - err = yaml.Unmarshal(yamlFile, &datasource) - if err != nil { - return nil, err + if len(parsedDashboards) > 0 { + dashboards = append(dashboards, parsedDashboards...) } - - dashboards = append(dashboards, datasource...) } for i := range dashboards { diff --git a/pkg/services/provisioning/dashboards/config_reader_test.go b/pkg/services/provisioning/dashboards/config_reader_test.go index 56c5a5fcf3d..ecbf6435c36 100644 --- a/pkg/services/provisioning/dashboards/config_reader_test.go +++ b/pkg/services/provisioning/dashboards/config_reader_test.go @@ -3,60 +3,79 @@ package dashboards import ( "testing" + "github.com/grafana/grafana/pkg/log" . "github.com/smartystreets/goconvey/convey" ) var ( simpleDashboardConfig string = "./test-configs/dashboards-from-disk" + oldVersion string = "./test-configs/version-0" brokenConfigs string = "./test-configs/broken-configs" ) func TestDashboardsAsConfig(t *testing.T) { Convey("Dashboards as configuration", t, func() { + logger := log.New("test-logger") - Convey("Can read config file", func() { + Convey("Can read config file version 1 format", func() { + cfgProvider := configReader{path: simpleDashboardConfig, log: logger} + cfg, err := cfgProvider.readConfig() + So(err, ShouldBeNil) - cfgProvifer := configReader{path: simpleDashboardConfig} - cfg, err := cfgProvifer.readConfig() - if err != nil { - t.Fatalf("readConfig return an error %v", err) - } - - So(len(cfg), ShouldEqual, 2) - - ds := cfg[0] - - So(ds.Name, ShouldEqual, "general dashboards") - So(ds.Type, ShouldEqual, "file") - So(ds.OrgId, ShouldEqual, 2) - So(ds.Folder, ShouldEqual, "developers") - So(ds.Editable, ShouldBeTrue) - - So(len(ds.Options), ShouldEqual, 1) - So(ds.Options["folder"], ShouldEqual, "/var/lib/grafana/dashboards") - - ds2 := cfg[1] - - So(ds2.Name, ShouldEqual, "default") - So(ds2.Type, ShouldEqual, "file") - So(ds2.OrgId, ShouldEqual, 1) - So(ds2.Folder, ShouldEqual, "") - So(ds2.Editable, ShouldBeFalse) - - So(len(ds2.Options), ShouldEqual, 1) - So(ds2.Options["folder"], ShouldEqual, "/var/lib/grafana/dashboards") + validateDashboardAsConfig(cfg) }) - Convey("Should skip broken config files", func() { + Convey("Can read config file in version 0 format", func() { + cfgProvider := configReader{path: oldVersion, log: logger} + cfg, err := cfgProvider.readConfig() + So(err, ShouldBeNil) - cfgProvifer := configReader{path: brokenConfigs} - cfg, err := cfgProvifer.readConfig() + validateDashboardAsConfig(cfg) + }) + + Convey("Should skip invalid path", func() { + + cfgProvider := configReader{path: "/invalid-directory", log: logger} + cfg, err := cfgProvider.readConfig() if err != nil { t.Fatalf("readConfig return an error %v", err) } So(len(cfg), ShouldEqual, 0) + }) + Convey("Should skip broken config files", func() { + + cfgProvider := configReader{path: brokenConfigs, log: logger} + cfg, err := cfgProvider.readConfig() + if err != nil { + t.Fatalf("readConfig return an error %v", err) + } + + So(len(cfg), ShouldEqual, 0) }) }) } +func validateDashboardAsConfig(cfg []*DashboardsAsConfig) { + So(len(cfg), ShouldEqual, 2) + + ds := cfg[0] + So(ds.Name, ShouldEqual, "general dashboards") + So(ds.Type, ShouldEqual, "file") + So(ds.OrgId, ShouldEqual, 2) + So(ds.Folder, ShouldEqual, "developers") + So(ds.Editable, ShouldBeTrue) + So(len(ds.Options), ShouldEqual, 1) + So(ds.Options["path"], ShouldEqual, "/var/lib/grafana/dashboards") + So(ds.DisableDeletion, ShouldBeTrue) + + ds2 := cfg[1] + So(ds2.Name, ShouldEqual, "default") + So(ds2.Type, ShouldEqual, "file") + So(ds2.OrgId, ShouldEqual, 1) + So(ds2.Folder, ShouldEqual, "") + So(ds2.Editable, ShouldBeFalse) + So(len(ds2.Options), ShouldEqual, 1) + So(ds2.Options["path"], ShouldEqual, "/var/lib/grafana/dashboards") + So(ds2.DisableDeletion, ShouldBeFalse) +} diff --git a/pkg/services/provisioning/dashboards/dashboard.go b/pkg/services/provisioning/dashboards/dashboard.go index 1ee0f78497d..a5349517bbe 100644 --- a/pkg/services/provisioning/dashboards/dashboard.go +++ b/pkg/services/provisioning/dashboards/dashboard.go @@ -14,9 +14,10 @@ type DashboardProvisioner struct { } func Provision(ctx context.Context, configDirectory string) (*DashboardProvisioner, error) { + log := log.New("provisioning.dashboard") d := &DashboardProvisioner{ - cfgReader: &configReader{path: configDirectory}, - log: log.New("provisioning.dashboard"), + cfgReader: &configReader{path: configDirectory, log: log}, + log: log, ctx: ctx, } diff --git a/pkg/services/provisioning/dashboards/dashboard_cache.go b/pkg/services/provisioning/dashboards/dashboard_cache.go deleted file mode 100644 index da6b7e8a5e8..00000000000 --- a/pkg/services/provisioning/dashboards/dashboard_cache.go +++ /dev/null @@ -1,33 +0,0 @@ -package dashboards - -import ( - "github.com/grafana/grafana/pkg/services/dashboards" - gocache "github.com/patrickmn/go-cache" - "time" -) - -type dashboardCache struct { - internalCache *gocache.Cache -} - -func NewDashboardCache() *dashboardCache { - return &dashboardCache{internalCache: gocache.New(5*time.Minute, 30*time.Minute)} -} - -func (fr *dashboardCache) addDashboardCache(key string, json *dashboards.SaveDashboardItem) { - fr.internalCache.Add(key, json, time.Minute*10) -} - -func (fr *dashboardCache) getCache(key string) (*dashboards.SaveDashboardItem, bool) { - obj, exist := fr.internalCache.Get(key) - if !exist { - return nil, exist - } - - dash, ok := obj.(*dashboards.SaveDashboardItem) - if !ok { - return nil, ok - } - - return dash, ok -} diff --git a/pkg/services/provisioning/dashboards/file_reader.go b/pkg/services/provisioning/dashboards/file_reader.go index eb3085296fd..d3e9892c8f5 100644 --- a/pkg/services/provisioning/dashboards/file_reader.go +++ b/pkg/services/provisioning/dashboards/file_reader.go @@ -25,18 +25,22 @@ var ( ) type fileReader struct { - Cfg *DashboardsAsConfig - Path string - log log.Logger - dashboardRepo dashboards.Repository - cache *dashboardCache - createWalk func(fr *fileReader, folderId int64) filepath.WalkFunc + Cfg *DashboardsAsConfig + Path string + log log.Logger + dashboardService dashboards.DashboardProvisioningService } func NewDashboardFileReader(cfg *DashboardsAsConfig, log log.Logger) (*fileReader, error) { - path, ok := cfg.Options["folder"].(string) + var path string + path, ok := cfg.Options["path"].(string) if !ok { - return nil, fmt.Errorf("Failed to load dashboards. folder param is not a string") + path, ok = cfg.Options["folder"].(string) + if !ok { + return nil, fmt.Errorf("Failed to load dashboards. path param is not a string") + } + + log.Warn("[Deprecated] The folder property is deprecated. Please use path instead.") } if _, err := os.Stat(path); os.IsNotExist(err) { @@ -44,28 +48,26 @@ func NewDashboardFileReader(cfg *DashboardsAsConfig, log log.Logger) (*fileReade } return &fileReader{ - Cfg: cfg, - Path: path, - log: log, - dashboardRepo: dashboards.GetRepository(), - cache: NewDashboardCache(), - createWalk: createWalkFn, + Cfg: cfg, + Path: path, + log: log, + dashboardService: dashboards.NewProvisioningService(), }, nil } func (fr *fileReader) ReadAndListen(ctx context.Context) error { - ticker := time.NewTicker(checkDiskForChangesInterval) - if err := fr.startWalkingDisk(); err != nil { fr.log.Error("failed to search for dashboards", "error", err) } + ticker := time.NewTicker(checkDiskForChangesInterval) + running := false for { select { case <-ticker.C: - if !running { // avoid walking the filesystem in parallel. incase fs is very slow. + if !running { // avoid walking the filesystem in parallel. in-case fs is very slow. running = true go func() { if err := fr.startWalkingDisk(); err != nil { @@ -87,15 +89,116 @@ func (fr *fileReader) startWalkingDisk() error { } } - folderId, err := getOrCreateFolderId(fr.Cfg, fr.dashboardRepo) + folderId, err := getOrCreateFolderId(fr.Cfg, fr.dashboardService) if err != nil && err != ErrFolderNameMissing { return err } - return filepath.Walk(fr.Path, fr.createWalk(fr, folderId)) + provisionedDashboardRefs, err := getProvisionedDashboardByPath(fr.dashboardService, fr.Cfg.Name) + if err != nil { + return err + } + + filesFoundOnDisk := map[string]os.FileInfo{} + err = filepath.Walk(fr.Path, createWalkFn(filesFoundOnDisk)) + if err != nil { + return err + } + + fr.deleteDashboardIfFileIsMissing(provisionedDashboardRefs, filesFoundOnDisk) + + sanityChecker := newProvisioningSanityChecker(fr.Cfg.Name) + + // save dashboards based on json files + for path, fileInfo := range filesFoundOnDisk { + provisioningMetadata, err := fr.saveDashboard(path, folderId, fileInfo, provisionedDashboardRefs) + sanityChecker.track(provisioningMetadata) + if err != nil { + fr.log.Error("failed to save dashboard", "error", err) + } + } + sanityChecker.logWarnings(fr.log) + + return nil +} +func (fr *fileReader) deleteDashboardIfFileIsMissing(provisionedDashboardRefs map[string]*models.DashboardProvisioning, filesFoundOnDisk map[string]os.FileInfo) { + if fr.Cfg.DisableDeletion { + return + } + + // find dashboards to delete since json file is missing + var dashboardToDelete []int64 + for path, provisioningData := range provisionedDashboardRefs { + _, existsOnDisk := filesFoundOnDisk[path] + if !existsOnDisk { + dashboardToDelete = append(dashboardToDelete, provisioningData.DashboardId) + } + } + // delete dashboard that are missing json file + for _, dashboardId := range dashboardToDelete { + fr.log.Debug("deleting provisioned dashboard. missing on disk", "id", dashboardId) + cmd := &models.DeleteDashboardCommand{OrgId: fr.Cfg.OrgId, Id: dashboardId} + err := bus.Dispatch(cmd) + if err != nil { + fr.log.Error("failed to delete dashboard", "id", cmd.Id) + } + } } -func getOrCreateFolderId(cfg *DashboardsAsConfig, repo dashboards.Repository) (int64, error) { +func (fr *fileReader) saveDashboard(path string, folderId int64, fileInfo os.FileInfo, provisionedDashboardRefs map[string]*models.DashboardProvisioning) (provisioningMetadata, error) { + provisioningMetadata := provisioningMetadata{} + resolvedFileInfo, err := resolveSymlink(fileInfo, path) + if err != nil { + return provisioningMetadata, err + } + + provisionedData, alreadyProvisioned := provisionedDashboardRefs[path] + upToDate := alreadyProvisioned && provisionedData.Updated == resolvedFileInfo.ModTime().Unix() + + dash, err := fr.readDashboardFromFile(path, resolvedFileInfo.ModTime(), folderId) + if err != nil { + fr.log.Error("failed to load dashboard from ", "file", path, "error", err) + return provisioningMetadata, nil + } + + // keeps track of what uid's and title's we have already provisioned + provisioningMetadata.uid = dash.Dashboard.Uid + provisioningMetadata.title = dash.Dashboard.Title + + if upToDate { + return provisioningMetadata, nil + } + + if dash.Dashboard.Id != 0 { + fr.log.Error("provisioned dashboard json files cannot contain id") + return provisioningMetadata, nil + } + + if alreadyProvisioned { + dash.Dashboard.SetId(provisionedData.DashboardId) + } + + fr.log.Debug("saving new dashboard", "file", path) + dp := &models.DashboardProvisioning{ExternalId: path, Name: fr.Cfg.Name, Updated: resolvedFileInfo.ModTime().Unix()} + _, err = fr.dashboardService.SaveProvisionedDashboard(dash, dp) + return provisioningMetadata, err +} + +func getProvisionedDashboardByPath(service dashboards.DashboardProvisioningService, name string) (map[string]*models.DashboardProvisioning, error) { + arr, err := service.GetProvisionedDashboardData(name) + if err != nil { + return nil, err + } + + byPath := map[string]*models.DashboardProvisioning{} + for _, pd := range arr { + byPath[pd.ExternalId] = pd + } + + return byPath, nil +} + +func getOrCreateFolderId(cfg *DashboardsAsConfig, service dashboards.DashboardProvisioningService) (int64, error) { if cfg.Folder == "" { return 0, ErrFolderNameMissing } @@ -109,12 +212,12 @@ func getOrCreateFolderId(cfg *DashboardsAsConfig, repo dashboards.Repository) (i // dashboard folder not found. create one. if err == models.ErrDashboardNotFound { - dash := &dashboards.SaveDashboardItem{} - dash.Dashboard = models.NewDashboard(cfg.Folder) + dash := &dashboards.SaveDashboardDTO{} + dash.Dashboard = models.NewDashboardFolder(cfg.Folder) dash.Dashboard.IsFolder = true dash.Overwrite = true dash.OrgId = cfg.OrgId - dbDash, err := repo.SaveDashboard(dash) + dbDash, err := service.SaveFolderForProvisionedDashboards(dash) if err != nil { return 0, err } @@ -123,69 +226,59 @@ func getOrCreateFolderId(cfg *DashboardsAsConfig, repo dashboards.Repository) (i } if !cmd.Result.IsFolder { - return 0, fmt.Errorf("Got invalid response. Expected folder, found dashboard") + return 0, fmt.Errorf("got invalid response. expected folder, found dashboard") } return cmd.Result.Id, nil } -func createWalkFn(fr *fileReader, folderId int64) filepath.WalkFunc { +func resolveSymlink(fileinfo os.FileInfo, path string) (os.FileInfo, error) { + checkFilepath, err := filepath.EvalSymlinks(path) + if path != checkFilepath { + path = checkFilepath + fi, err := os.Lstat(checkFilepath) + if err != nil { + return nil, err + } + + return fi, nil + } + + return fileinfo, err +} + +func createWalkFn(filesOnDisk map[string]os.FileInfo) filepath.WalkFunc { return func(path string, fileInfo os.FileInfo, err error) error { if err != nil { return err } - if fileInfo.IsDir() { - if strings.HasPrefix(fileInfo.Name(), ".") { - return filepath.SkipDir - } - return nil - } - if !strings.HasSuffix(fileInfo.Name(), ".json") { - return nil - } - - cachedDashboard, exist := fr.cache.getCache(path) - if exist && cachedDashboard.UpdatedAt == fileInfo.ModTime() { - return nil - } - - dash, err := fr.readDashboardFromFile(path, folderId) - if err != nil { - fr.log.Error("failed to load dashboard from ", "file", path, "error", err) - return nil - } - - // id = 0 indicates ID validation should be avoided before writing to the db. - dash.Dashboard.Id = 0 - - cmd := &models.GetDashboardQuery{Slug: dash.Dashboard.Slug} - err = bus.Dispatch(cmd) - - // if we dont have the dashboard in the db, save it! - if err == models.ErrDashboardNotFound { - fr.log.Debug("saving new dashboard", "file", path) - _, err = fr.dashboardRepo.SaveDashboard(dash) + isValid, err := validateWalkablePath(fileInfo) + if !isValid { return err } - if err != nil { - fr.log.Error("failed to query for dashboard", "slug", dash.Dashboard.Slug, "error", err) - return nil - } - - // break if db version is newer then fil version - if cmd.Result.Updated.Unix() >= fileInfo.ModTime().Unix() { - return nil - } - - fr.log.Debug("loading dashboard from disk into database.", "file", path) - _, err = fr.dashboardRepo.SaveDashboard(dash) - return err + filesOnDisk[path] = fileInfo + return nil } } -func (fr *fileReader) readDashboardFromFile(path string, folderId int64) (*dashboards.SaveDashboardItem, error) { +func validateWalkablePath(fileInfo os.FileInfo) (bool, error) { + if fileInfo.IsDir() { + if strings.HasPrefix(fileInfo.Name(), ".") { + return false, filepath.SkipDir + } + return false, nil + } + + if !strings.HasSuffix(fileInfo.Name(), ".json") { + return false, nil + } + + return true, nil +} + +func (fr *fileReader) readDashboardFromFile(path string, lastModified time.Time, folderId int64) (*dashboards.SaveDashboardDTO, error) { reader, err := os.Open(path) if err != nil { return nil, err @@ -197,17 +290,53 @@ func (fr *fileReader) readDashboardFromFile(path string, folderId int64) (*dashb return nil, err } - stat, err := os.Stat(path) + dash, err := createDashboardJson(data, lastModified, fr.Cfg, folderId) if err != nil { return nil, err } - dash, err := createDashboardJson(data, stat.ModTime(), fr.Cfg, folderId) - if err != nil { - return nil, err - } - - fr.cache.addDashboardCache(path, dash) - return dash, nil } + +type provisioningMetadata struct { + uid string + title string +} + +func newProvisioningSanityChecker(provisioningProvider string) provisioningSanityChecker { + return provisioningSanityChecker{ + provisioningProvider: provisioningProvider, + uidUsage: map[string]uint8{}, + titleUsage: map[string]uint8{}} +} + +type provisioningSanityChecker struct { + provisioningProvider string + uidUsage map[string]uint8 + titleUsage map[string]uint8 +} + +func (checker provisioningSanityChecker) track(pm provisioningMetadata) { + if len(pm.uid) > 0 { + checker.uidUsage[pm.uid] += 1 + } + if len(pm.title) > 0 { + checker.titleUsage[pm.title] += 1 + } + +} + +func (checker provisioningSanityChecker) logWarnings(log log.Logger) { + for uid, times := range checker.uidUsage { + if times > 1 { + log.Error("the same 'uid' is used more than once", "uid", uid, "provider", checker.provisioningProvider) + } + } + + for title, times := range checker.titleUsage { + if times > 1 { + log.Error("the same 'title' is used more than once", "title", title, "provider", checker.provisioningProvider) + } + } + +} diff --git a/pkg/services/provisioning/dashboards/file_reader_test.go b/pkg/services/provisioning/dashboards/file_reader_test.go index 16e3e1184b8..cd5e3456734 100644 --- a/pkg/services/provisioning/dashboards/file_reader_test.go +++ b/pkg/services/provisioning/dashboards/file_reader_test.go @@ -19,16 +19,16 @@ var ( brokenDashboards string = "./test-dashboards/broken-dashboards" oneDashboard string = "./test-dashboards/one-dashboard" - fakeRepo *fakeDashboardRepo + fakeService *fakeDashboardProvisioningService ) func TestDashboardFileReader(t *testing.T) { Convey("Dashboard file reader", t, func() { bus.ClearBusHandlers() - fakeRepo = &fakeDashboardRepo{} + origNewDashboardProvisioningService := dashboards.NewProvisioningService + fakeService = mockDashboardProvisioningService() bus.AddHandler("test", mockGetDashboardQuery) - dashboards.SetRepository(fakeRepo) logger := log.New("test.logger") Convey("Reading dashboards from disk", func() { @@ -42,7 +42,7 @@ func TestDashboardFileReader(t *testing.T) { } Convey("Can read default dashboard", func() { - cfg.Options["folder"] = defaultDashboards + cfg.Options["path"] = defaultDashboards cfg.Folder = "Team A" reader, err := NewDashboardFileReader(cfg, logger) @@ -54,7 +54,7 @@ func TestDashboardFileReader(t *testing.T) { folders := 0 dashboards := 0 - for _, i := range fakeRepo.inserted { + for _, i := range fakeService.inserted { if i.Dashboard.IsFolder { folders++ } else { @@ -62,33 +62,16 @@ func TestDashboardFileReader(t *testing.T) { } } - So(dashboards, ShouldEqual, 2) So(folders, ShouldEqual, 1) - }) - - Convey("Should not update dashboards when db is newer", func() { - cfg.Options["folder"] = oneDashboard - - fakeRepo.getDashboard = append(fakeRepo.getDashboard, &models.Dashboard{ - Updated: time.Now().Add(time.Hour), - Slug: "grafana", - }) - - reader, err := NewDashboardFileReader(cfg, logger) - So(err, ShouldBeNil) - - err = reader.startWalkingDisk() - So(err, ShouldBeNil) - - So(len(fakeRepo.inserted), ShouldEqual, 0) + So(dashboards, ShouldEqual, 2) }) Convey("Can read default dashboard and replace old version in database", func() { - cfg.Options["folder"] = oneDashboard + cfg.Options["path"] = oneDashboard stat, _ := os.Stat(oneDashboard + "/dashboard1.json") - fakeRepo.getDashboard = append(fakeRepo.getDashboard, &models.Dashboard{ + fakeService.getDashboard = append(fakeService.getDashboard, &models.Dashboard{ Updated: stat.ModTime().AddDate(0, 0, -1), Slug: "grafana", }) @@ -99,7 +82,7 @@ func TestDashboardFileReader(t *testing.T) { err = reader.startWalkingDisk() So(err, ShouldBeNil) - So(len(fakeRepo.inserted), ShouldEqual, 1) + So(len(fakeService.inserted), ShouldEqual, 1) }) Convey("Invalid configuration should return error", func() { @@ -115,7 +98,7 @@ func TestDashboardFileReader(t *testing.T) { }) Convey("Broken dashboards should not cause error", func() { - cfg.Options["folder"] = brokenDashboards + cfg.Options["path"] = brokenDashboards _, err := NewDashboardFileReader(cfg, logger) So(err, ShouldBeNil) @@ -133,7 +116,7 @@ func TestDashboardFileReader(t *testing.T) { }, } - _, err := getOrCreateFolderId(cfg, fakeRepo) + _, err := getOrCreateFolderId(cfg, fakeService) So(err, ShouldEqual, ErrFolderNameMissing) }) @@ -148,42 +131,59 @@ func TestDashboardFileReader(t *testing.T) { }, } - folderId, err := getOrCreateFolderId(cfg, fakeRepo) + folderId, err := getOrCreateFolderId(cfg, fakeService) So(err, ShouldBeNil) inserted := false - for _, d := range fakeRepo.inserted { + for _, d := range fakeService.inserted { if d.Dashboard.IsFolder && d.Dashboard.Id == folderId { inserted = true } } - So(len(fakeRepo.inserted), ShouldEqual, 1) + So(len(fakeService.inserted), ShouldEqual, 1) So(inserted, ShouldBeTrue) }) Convey("Walking the folder with dashboards", func() { - cfg := &DashboardsAsConfig{ - Name: "Default", - Type: "file", - OrgId: 1, - Folder: "", - Options: map[string]interface{}{ - "folder": defaultDashboards, - }, - } - - reader, err := NewDashboardFileReader(cfg, log.New("test-logger")) - So(err, ShouldBeNil) + noFiles := map[string]os.FileInfo{} Convey("should skip dirs that starts with .", func() { - shouldSkip := reader.createWalk(reader, 0)("path", &FakeFileInfo{isDirectory: true, name: ".folder"}, nil) + shouldSkip := createWalkFn(noFiles)("path", &FakeFileInfo{isDirectory: true, name: ".folder"}, nil) So(shouldSkip, ShouldEqual, filepath.SkipDir) }) Convey("should keep walking if file is not .json", func() { - shouldSkip := reader.createWalk(reader, 0)("path", &FakeFileInfo{isDirectory: true, name: "folder"}, nil) + shouldSkip := createWalkFn(noFiles)("path", &FakeFileInfo{isDirectory: true, name: "folder"}, nil) So(shouldSkip, ShouldBeNil) }) }) + + Convey("Can use bpth path and folder as dashboard path", func() { + cfg := &DashboardsAsConfig{ + Name: "Default", + Type: "file", + OrgId: 1, + Folder: "", + Options: map[string]interface{}{}, + } + + Convey("using path parameter", func() { + cfg.Options["path"] = defaultDashboards + reader, err := NewDashboardFileReader(cfg, log.New("test-logger")) + So(err, ShouldBeNil) + So(reader.Path, ShouldEqual, defaultDashboards) + }) + + Convey("using folder as options", func() { + cfg.Options["folder"] = defaultDashboards + reader, err := NewDashboardFileReader(cfg, log.New("test-logger")) + So(err, ShouldBeNil) + So(reader.Path, ShouldEqual, defaultDashboards) + }) + }) + + Reset(func() { + dashboards.NewProvisioningService = origNewDashboardProvisioningService + }) }) } @@ -216,18 +216,37 @@ func (ffi FakeFileInfo) Sys() interface{} { return nil } -type fakeDashboardRepo struct { - inserted []*dashboards.SaveDashboardItem +func mockDashboardProvisioningService() *fakeDashboardProvisioningService { + mock := fakeDashboardProvisioningService{} + dashboards.NewProvisioningService = func() dashboards.DashboardProvisioningService { + return &mock + } + return &mock +} + +type fakeDashboardProvisioningService struct { + inserted []*dashboards.SaveDashboardDTO + provisioned []*models.DashboardProvisioning getDashboard []*models.Dashboard } -func (repo *fakeDashboardRepo) SaveDashboard(json *dashboards.SaveDashboardItem) (*models.Dashboard, error) { - repo.inserted = append(repo.inserted, json) - return json.Dashboard, nil +func (s *fakeDashboardProvisioningService) GetProvisionedDashboardData(name string) ([]*models.DashboardProvisioning, error) { + return s.provisioned, nil +} + +func (s *fakeDashboardProvisioningService) SaveProvisionedDashboard(dto *dashboards.SaveDashboardDTO, provisioning *models.DashboardProvisioning) (*models.Dashboard, error) { + s.inserted = append(s.inserted, dto) + s.provisioned = append(s.provisioned, provisioning) + return dto.Dashboard, nil +} + +func (s *fakeDashboardProvisioningService) SaveFolderForProvisionedDashboards(dto *dashboards.SaveDashboardDTO) (*models.Dashboard, error) { + s.inserted = append(s.inserted, dto) + return dto.Dashboard, nil } func mockGetDashboardQuery(cmd *models.GetDashboardQuery) error { - for _, d := range fakeRepo.getDashboard { + for _, d := range fakeService.getDashboard { if d.Slug == cmd.Slug { cmd.Result = d return nil diff --git a/pkg/services/provisioning/dashboards/test-configs/broken-configs/commented.yaml b/pkg/services/provisioning/dashboards/test-configs/broken-configs/commented.yaml index e40612af508..f0dcca9b47a 100644 --- a/pkg/services/provisioning/dashboards/test-configs/broken-configs/commented.yaml +++ b/pkg/services/provisioning/dashboards/test-configs/broken-configs/commented.yaml @@ -3,4 +3,4 @@ # folder: '' # type: file # options: -# folder: /var/lib/grafana/dashboards +# path: /var/lib/grafana/dashboards diff --git a/pkg/services/provisioning/dashboards/test-configs/dashboards-from-disk/dev-dashboards.yaml b/pkg/services/provisioning/dashboards/test-configs/dashboards-from-disk/dev-dashboards.yaml index a7c4a812092..e9776d69010 100644 --- a/pkg/services/provisioning/dashboards/test-configs/dashboards-from-disk/dev-dashboards.yaml +++ b/pkg/services/provisioning/dashboards/test-configs/dashboards-from-disk/dev-dashboards.yaml @@ -1,12 +1,16 @@ +apiVersion: 1 + +providers: - name: 'general dashboards' - org_id: 2 + orgId: 2 folder: 'developers' editable: true + disableDeletion: true type: file options: - folder: /var/lib/grafana/dashboards + path: /var/lib/grafana/dashboards - name: 'default' type: file options: - folder: /var/lib/grafana/dashboards + path: /var/lib/grafana/dashboards diff --git a/pkg/services/provisioning/dashboards/test-configs/dashboards-from-disk/sample.yaml b/pkg/services/provisioning/dashboards/test-configs/dashboards-from-disk/sample.yaml new file mode 100644 index 00000000000..5b73632b1ff --- /dev/null +++ b/pkg/services/provisioning/dashboards/test-configs/dashboards-from-disk/sample.yaml @@ -0,0 +1,10 @@ +apiVersion: 1 + +#providers: +#- name: 'gasdf' +# orgId: 2 +# folder: 'developers' +# editable: true +# type: file +# options: +# path: /var/lib/grafana/dashboards diff --git a/pkg/services/provisioning/dashboards/test-configs/version-0/version-0.yaml b/pkg/services/provisioning/dashboards/test-configs/version-0/version-0.yaml new file mode 100644 index 00000000000..979e762d4d4 --- /dev/null +++ b/pkg/services/provisioning/dashboards/test-configs/version-0/version-0.yaml @@ -0,0 +1,13 @@ +- name: 'general dashboards' + org_id: 2 + folder: 'developers' + editable: true + disableDeletion: true + type: file + options: + path: /var/lib/grafana/dashboards + +- name: 'default' + type: file + options: + path: /var/lib/grafana/dashboards diff --git a/pkg/services/provisioning/dashboards/test-dashboards/folder-one/dashboard1.json b/pkg/services/provisioning/dashboards/test-dashboards/folder-one/dashboard1.json index 5b6765a4ed6..febb98be0e8 100644 --- a/pkg/services/provisioning/dashboards/test-dashboards/folder-one/dashboard1.json +++ b/pkg/services/provisioning/dashboards/test-dashboards/folder-one/dashboard1.json @@ -1,5 +1,5 @@ { - "title": "Grafana", + "title": "Grafana1", "tags": [], "style": "dark", "timezone": "browser", @@ -170,4 +170,3 @@ }, "version": 5 } - \ No newline at end of file diff --git a/pkg/services/provisioning/dashboards/test-dashboards/folder-one/dashboard2.json b/pkg/services/provisioning/dashboards/test-dashboards/folder-one/dashboard2.json index 5b6765a4ed6..9291f16d9e7 100644 --- a/pkg/services/provisioning/dashboards/test-dashboards/folder-one/dashboard2.json +++ b/pkg/services/provisioning/dashboards/test-dashboards/folder-one/dashboard2.json @@ -1,5 +1,5 @@ { - "title": "Grafana", + "title": "Grafana2", "tags": [], "style": "dark", "timezone": "browser", @@ -170,4 +170,3 @@ }, "version": 5 } - \ No newline at end of file diff --git a/pkg/services/provisioning/dashboards/types.go b/pkg/services/provisioning/dashboards/types.go index 46ca3c9246e..f742b321552 100644 --- a/pkg/services/provisioning/dashboards/types.go +++ b/pkg/services/provisioning/dashboards/types.go @@ -10,20 +10,50 @@ import ( ) type DashboardsAsConfig struct { - Name string `json:"name" yaml:"name"` - Type string `json:"type" yaml:"type"` - OrgId int64 `json:"org_id" yaml:"org_id"` - Folder string `json:"folder" yaml:"folder"` - Editable bool `json:"editable" yaml:"editable"` - Options map[string]interface{} `json:"options" yaml:"options"` + Name string + Type string + OrgId int64 + Folder string + Editable bool + Options map[string]interface{} + DisableDeletion bool } -func createDashboardJson(data *simplejson.Json, lastModified time.Time, cfg *DashboardsAsConfig, folderId int64) (*dashboards.SaveDashboardItem, error) { - dash := &dashboards.SaveDashboardItem{} +type DashboardsAsConfigV0 struct { + Name string `json:"name" yaml:"name"` + Type string `json:"type" yaml:"type"` + OrgId int64 `json:"org_id" yaml:"org_id"` + Folder string `json:"folder" yaml:"folder"` + Editable bool `json:"editable" yaml:"editable"` + Options map[string]interface{} `json:"options" yaml:"options"` + DisableDeletion bool `json:"disableDeletion" yaml:"disableDeletion"` +} + +type ConfigVersion struct { + ApiVersion int64 `json:"apiVersion" yaml:"apiVersion"` +} + +type DashboardAsConfigV1 struct { + Providers []*DashboardProviderConfigs `json:"providers" yaml:"providers"` +} + +type DashboardProviderConfigs struct { + Name string `json:"name" yaml:"name"` + Type string `json:"type" yaml:"type"` + OrgId int64 `json:"orgId" yaml:"orgId"` + Folder string `json:"folder" yaml:"folder"` + Editable bool `json:"editable" yaml:"editable"` + Options map[string]interface{} `json:"options" yaml:"options"` + DisableDeletion bool `json:"disableDeletion" yaml:"disableDeletion"` +} + +func createDashboardJson(data *simplejson.Json, lastModified time.Time, cfg *DashboardsAsConfig, folderId int64) (*dashboards.SaveDashboardDTO, error) { + dash := &dashboards.SaveDashboardDTO{} dash.Dashboard = models.NewDashboardFromJson(data) dash.UpdatedAt = lastModified dash.Overwrite = true dash.OrgId = cfg.OrgId + dash.Dashboard.OrgId = cfg.OrgId dash.Dashboard.FolderId = folderId if !cfg.Editable { dash.Dashboard.Data.Set("editable", cfg.Editable) @@ -35,3 +65,39 @@ func createDashboardJson(data *simplejson.Json, lastModified time.Time, cfg *Das return dash, nil } + +func mapV0ToDashboardAsConfig(v0 []*DashboardsAsConfigV0) []*DashboardsAsConfig { + var r []*DashboardsAsConfig + + for _, v := range v0 { + r = append(r, &DashboardsAsConfig{ + Name: v.Name, + Type: v.Type, + OrgId: v.OrgId, + Folder: v.Folder, + Editable: v.Editable, + Options: v.Options, + DisableDeletion: v.DisableDeletion, + }) + } + + return r +} + +func (dc *DashboardAsConfigV1) mapToDashboardAsConfig() []*DashboardsAsConfig { + var r []*DashboardsAsConfig + + for _, v := range dc.Providers { + r = append(r, &DashboardsAsConfig{ + Name: v.Name, + Type: v.Type, + OrgId: v.OrgId, + Folder: v.Folder, + Editable: v.Editable, + Options: v.Options, + DisableDeletion: v.DisableDeletion, + }) + } + + return r +} diff --git a/pkg/services/provisioning/datasources/config_reader.go b/pkg/services/provisioning/datasources/config_reader.go new file mode 100644 index 00000000000..58ed5472a6b --- /dev/null +++ b/pkg/services/provisioning/datasources/config_reader.go @@ -0,0 +1,113 @@ +package datasources + +import ( + "io/ioutil" + "os" + "path/filepath" + "strings" + + "github.com/grafana/grafana/pkg/log" + "gopkg.in/yaml.v2" +) + +type configReader struct { + log log.Logger +} + +func (cr *configReader) readConfig(path string) ([]*DatasourcesAsConfig, error) { + var datasources []*DatasourcesAsConfig + + files, err := ioutil.ReadDir(path) + if err != nil { + cr.log.Error("cant read datasource provisioning files from directory", "path", path) + return datasources, nil + } + + for _, file := range files { + if strings.HasSuffix(file.Name(), ".yaml") || strings.HasSuffix(file.Name(), ".yml") { + datasource, err := cr.parseDatasourceConfig(path, file) + if err != nil { + return nil, err + } + + if datasource != nil { + datasources = append(datasources, datasource) + } + } + } + + err = validateDefaultUniqueness(datasources) + if err != nil { + return nil, err + } + + return datasources, nil +} + +func (cr *configReader) parseDatasourceConfig(path string, file os.FileInfo) (*DatasourcesAsConfig, error) { + filename, _ := filepath.Abs(filepath.Join(path, file.Name())) + yamlFile, err := ioutil.ReadFile(filename) + if err != nil { + return nil, err + } + + var apiVersion *ConfigVersion + err = yaml.Unmarshal(yamlFile, &apiVersion) + if err != nil { + return nil, err + } + + if apiVersion == nil { + apiVersion = &ConfigVersion{ApiVersion: 0} + } + + if apiVersion.ApiVersion > 0 { + var v1 *DatasourcesAsConfigV1 + err = yaml.Unmarshal(yamlFile, &v1) + if err != nil { + return nil, err + } + + return v1.mapToDatasourceFromConfig(apiVersion.ApiVersion), nil + } + + var v0 *DatasourcesAsConfigV0 + err = yaml.Unmarshal(yamlFile, &v0) + if err != nil { + return nil, err + } + + cr.log.Warn("[Deprecated] the datasource provisioning config is outdated. please upgrade", "filename", filename) + + return v0.mapToDatasourceFromConfig(apiVersion.ApiVersion), nil +} + +func validateDefaultUniqueness(datasources []*DatasourcesAsConfig) error { + defaultCount := 0 + for i := range datasources { + if datasources[i].Datasources == nil { + continue + } + + for _, ds := range datasources[i].Datasources { + if ds.OrgId == 0 { + ds.OrgId = 1 + } + + if ds.IsDefault { + defaultCount++ + if defaultCount > 1 { + return ErrInvalidConfigToManyDefault + } + } + } + + for _, ds := range datasources[i].DeleteDatasources { + if ds.OrgId == 0 { + ds.OrgId = 1 + } + } + } + + return nil +} diff --git a/pkg/services/provisioning/datasources/datasources_test.go b/pkg/services/provisioning/datasources/config_reader_test.go similarity index 62% rename from pkg/services/provisioning/datasources/datasources_test.go rename to pkg/services/provisioning/datasources/config_reader_test.go index f3252c28d9d..3198329e0ae 100644 --- a/pkg/services/provisioning/datasources/datasources_test.go +++ b/pkg/services/provisioning/datasources/config_reader_test.go @@ -11,12 +11,13 @@ import ( ) var ( - logger log.Logger = log.New("fake.logger") + logger log.Logger = log.New("fake.log") oneDatasourcesConfig string = "" twoDatasourcesConfig string = "./test-configs/two-datasources" twoDatasourcesConfigPurgeOthers string = "./test-configs/insert-two-delete-two" doubleDatasourcesConfig string = "./test-configs/double-default" allProperties string = "./test-configs/all-properties" + versionZero string = "./test-configs/version-0" brokenYaml string = "./test-configs/broken-yaml" fakeRepo *fakeRepository @@ -115,52 +116,101 @@ func TestDatasourceAsConfig(t *testing.T) { }) Convey("broken yaml should return error", func() { - _, err := configReader{}.readConfig(brokenYaml) + reader := &configReader{} + _, err := reader.readConfig(brokenYaml) So(err, ShouldNotBeNil) }) - Convey("can read all properties", func() { - cfgProvifer := configReader{} + Convey("skip invalid directory", func() { + cfgProvifer := &configReader{log: log.New("test logger")} + cfg, err := cfgProvifer.readConfig("./invalid-directory") + if err != nil { + t.Fatalf("readConfig return an error %v", err) + } + + So(len(cfg), ShouldEqual, 0) + }) + + Convey("can read all properties from version 1", func() { + cfgProvifer := &configReader{log: log.New("test logger")} cfg, err := cfgProvifer.readConfig(allProperties) if err != nil { t.Fatalf("readConfig return an error %v", err) } - So(len(cfg), ShouldEqual, 2) + So(len(cfg), ShouldEqual, 3) dsCfg := cfg[0] - ds := dsCfg.Datasources[0] - So(ds.Name, ShouldEqual, "name") - So(ds.Type, ShouldEqual, "type") - So(ds.Access, ShouldEqual, models.DS_ACCESS_PROXY) - So(ds.OrgId, ShouldEqual, 2) - So(ds.Url, ShouldEqual, "url") - So(ds.User, ShouldEqual, "user") - So(ds.Password, ShouldEqual, "password") - So(ds.Database, ShouldEqual, "database") - So(ds.BasicAuth, ShouldBeTrue) - So(ds.BasicAuthUser, ShouldEqual, "basic_auth_user") - So(ds.BasicAuthPassword, ShouldEqual, "basic_auth_password") - So(ds.WithCredentials, ShouldBeTrue) - So(ds.IsDefault, ShouldBeTrue) - So(ds.Editable, ShouldBeTrue) + So(dsCfg.ApiVersion, ShouldEqual, 1) - So(len(ds.JsonData), ShouldBeGreaterThan, 2) - So(ds.JsonData["graphiteVersion"], ShouldEqual, "1.1") - So(ds.JsonData["tlsAuth"], ShouldEqual, true) - So(ds.JsonData["tlsAuthWithCACert"], ShouldEqual, true) + validateDatasource(dsCfg) + validateDeleteDatasources(dsCfg) - So(len(ds.SecureJsonData), ShouldBeGreaterThan, 2) - So(ds.SecureJsonData["tlsCACert"], ShouldEqual, "MjNOcW9RdkbUDHZmpco2HCYzVq9dE+i6Yi+gmUJotq5CDA==") - So(ds.SecureJsonData["tlsClientCert"], ShouldEqual, "ckN0dGlyMXN503YNfjTcf9CV+GGQneN+xmAclQ==") - So(ds.SecureJsonData["tlsClientKey"], ShouldEqual, "ZkN4aG1aNkja/gKAB1wlnKFIsy2SRDq4slrM0A==") + dsCount := 0 + delDsCount := 0 - dstwo := cfg[1].Datasources[0] - So(dstwo.Name, ShouldEqual, "name2") + for _, c := range cfg { + dsCount += len(c.Datasources) + delDsCount += len(c.DeleteDatasources) + } + + So(dsCount, ShouldEqual, 2) + So(delDsCount, ShouldEqual, 1) + }) + + Convey("can read all properties from version 0", func() { + cfgProvifer := &configReader{log: log.New("test logger")} + cfg, err := cfgProvifer.readConfig(versionZero) + if err != nil { + t.Fatalf("readConfig return an error %v", err) + } + + So(len(cfg), ShouldEqual, 1) + + dsCfg := cfg[0] + + So(dsCfg.ApiVersion, ShouldEqual, 0) + + validateDatasource(dsCfg) + validateDeleteDatasources(dsCfg) }) }) } +func validateDeleteDatasources(dsCfg *DatasourcesAsConfig) { + So(len(dsCfg.DeleteDatasources), ShouldEqual, 1) + deleteDs := dsCfg.DeleteDatasources[0] + So(deleteDs.Name, ShouldEqual, "old-graphite3") + So(deleteDs.OrgId, ShouldEqual, 2) +} +func validateDatasource(dsCfg *DatasourcesAsConfig) { + ds := dsCfg.Datasources[0] + So(ds.Name, ShouldEqual, "name") + So(ds.Type, ShouldEqual, "type") + So(ds.Access, ShouldEqual, models.DS_ACCESS_PROXY) + So(ds.OrgId, ShouldEqual, 2) + So(ds.Url, ShouldEqual, "url") + So(ds.User, ShouldEqual, "user") + So(ds.Password, ShouldEqual, "password") + So(ds.Database, ShouldEqual, "database") + So(ds.BasicAuth, ShouldBeTrue) + So(ds.BasicAuthUser, ShouldEqual, "basic_auth_user") + So(ds.BasicAuthPassword, ShouldEqual, "basic_auth_password") + So(ds.WithCredentials, ShouldBeTrue) + So(ds.IsDefault, ShouldBeTrue) + So(ds.Editable, ShouldBeTrue) + So(ds.Version, ShouldEqual, 10) + + So(len(ds.JsonData), ShouldBeGreaterThan, 2) + So(ds.JsonData["graphiteVersion"], ShouldEqual, "1.1") + So(ds.JsonData["tlsAuth"], ShouldEqual, true) + So(ds.JsonData["tlsAuthWithCACert"], ShouldEqual, true) + + So(len(ds.SecureJsonData), ShouldBeGreaterThan, 2) + So(ds.SecureJsonData["tlsCACert"], ShouldEqual, "MjNOcW9RdkbUDHZmpco2HCYzVq9dE+i6Yi+gmUJotq5CDA==") + So(ds.SecureJsonData["tlsClientCert"], ShouldEqual, "ckN0dGlyMXN503YNfjTcf9CV+GGQneN+xmAclQ==") + So(ds.SecureJsonData["tlsClientKey"], ShouldEqual, "ZkN4aG1aNkja/gKAB1wlnKFIsy2SRDq4slrM0A==") +} type fakeRepository struct { inserted []*models.AddDataSourceCommand diff --git a/pkg/services/provisioning/datasources/datasources.go b/pkg/services/provisioning/datasources/datasources.go index ce631c565d4..1fa0a3b3173 100644 --- a/pkg/services/provisioning/datasources/datasources.go +++ b/pkg/services/provisioning/datasources/datasources.go @@ -2,16 +2,12 @@ package datasources import ( "errors" - "io/ioutil" - "path/filepath" - "strings" "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/models" - yaml "gopkg.in/yaml.v2" ) var ( @@ -25,13 +21,13 @@ func Provision(configDirectory string) error { type DatasourceProvisioner struct { log log.Logger - cfgProvider configReader + cfgProvider *configReader } func newDatasourceProvisioner(log log.Logger) DatasourceProvisioner { return DatasourceProvisioner{ log: log, - cfgProvider: configReader{}, + cfgProvider: &configReader{log: log}, } } @@ -94,61 +90,3 @@ func (dc *DatasourceProvisioner) deleteDatasources(dsToDelete []*DeleteDatasourc return nil } - -type configReader struct{} - -func (configReader) readConfig(path string) ([]*DatasourcesAsConfig, error) { - files, err := ioutil.ReadDir(path) - if err != nil { - return nil, err - } - - var datasources []*DatasourcesAsConfig - for _, file := range files { - if strings.HasSuffix(file.Name(), ".yaml") || strings.HasSuffix(file.Name(), ".yml") { - filename, _ := filepath.Abs(filepath.Join(path, file.Name())) - yamlFile, err := ioutil.ReadFile(filename) - - if err != nil { - return nil, err - } - var datasource *DatasourcesAsConfig - err = yaml.Unmarshal(yamlFile, &datasource) - if err != nil { - return nil, err - } - - if datasource != nil { - datasources = append(datasources, datasource) - } - } - } - - defaultCount := 0 - for i := range datasources { - if datasources[i].Datasources == nil { - continue - } - - for _, ds := range datasources[i].Datasources { - if ds.OrgId == 0 { - ds.OrgId = 1 - } - - if ds.IsDefault { - defaultCount++ - if defaultCount > 1 { - return nil, ErrInvalidConfigToManyDefault - } - } - } - - for _, ds := range datasources[i].DeleteDatasources { - if ds.OrgId == 0 { - ds.OrgId = 1 - } - } - } - - return datasources, nil -} diff --git a/pkg/services/provisioning/datasources/test-configs/all-properties/all-properties.yaml b/pkg/services/provisioning/datasources/test-configs/all-properties/all-properties.yaml index af0d3009a4c..b92b81f7079 100644 --- a/pkg/services/provisioning/datasources/test-configs/all-properties/all-properties.yaml +++ b/pkg/services/provisioning/datasources/test-configs/all-properties/all-properties.yaml @@ -1,23 +1,30 @@ +apiVersion: 1 + datasources: - name: name type: type access: proxy - org_id: 2 + orgId: 2 url: url password: password user: user database: database - basic_auth: true - basic_auth_user: basic_auth_user - basic_auth_password: basic_auth_password - with_credentials: true - is_default: true - json_data: + basicAuth: true + basicAuthUser: basic_auth_user + basicAuthPassword: basic_auth_password + withCredentials: true + isDefault: true + jsonData: graphiteVersion: "1.1" tlsAuth: true tlsAuthWithCACert: true - secure_json_data: + secureJsonData: tlsCACert: "MjNOcW9RdkbUDHZmpco2HCYzVq9dE+i6Yi+gmUJotq5CDA==" tlsClientCert: "ckN0dGlyMXN503YNfjTcf9CV+GGQneN+xmAclQ==" tlsClientKey: "ZkN4aG1aNkja/gKAB1wlnKFIsy2SRDq4slrM0A==" editable: true + version: 10 + +deleteDatasources: + - name: old-graphite3 + orgId: 2 diff --git a/pkg/services/provisioning/datasources/test-configs/all-properties/sample.yaml b/pkg/services/provisioning/datasources/test-configs/all-properties/sample.yaml new file mode 100644 index 00000000000..2187eabdc46 --- /dev/null +++ b/pkg/services/provisioning/datasources/test-configs/all-properties/sample.yaml @@ -0,0 +1,32 @@ +# Should not be included + + +apiVersion: 1 + +#datasources: +# - name: name +# type: type +# access: proxy +# orgId: 2 +# url: url +# password: password +# user: user +# database: database +# basicAuth: true +# basicAuthUser: basic_auth_user +# basicAuthPassword: basic_auth_password +# withCredentials: true +# jsonData: +# graphiteVersion: "1.1" +# tlsAuth: true +# tlsAuthWithCACert: true +# secureJsonData: +# tlsCACert: "MjNOcW9RdkbUDHZmpco2HCYzVq9dE+i6Yi+gmUJotq5CDA==" +# tlsClientCert: "ckN0dGlyMXN503YNfjTcf9CV+GGQneN+xmAclQ==" +# tlsClientKey: "ZkN4aG1aNkja/gKAB1wlnKFIsy2SRDq4slrM0A==" +# editable: true +# version: 10 +# +#deleteDatasources: +# - name: old-graphite3 +# orgId: 2 diff --git a/pkg/services/provisioning/datasources/test-configs/all-properties/second.yaml b/pkg/services/provisioning/datasources/test-configs/all-properties/second.yaml index 43c41ee9b3b..9f27a8d07ee 100644 --- a/pkg/services/provisioning/datasources/test-configs/all-properties/second.yaml +++ b/pkg/services/provisioning/datasources/test-configs/all-properties/second.yaml @@ -3,5 +3,5 @@ datasources: - name: name2 type: type2 access: proxy - org_id: 2 + orgId: 2 url: url2 diff --git a/pkg/services/provisioning/datasources/test-configs/version-0/version-0.yaml b/pkg/services/provisioning/datasources/test-configs/version-0/version-0.yaml new file mode 100644 index 00000000000..fcd4ddd6b01 --- /dev/null +++ b/pkg/services/provisioning/datasources/test-configs/version-0/version-0.yaml @@ -0,0 +1,28 @@ +datasources: + - name: name + type: type + access: proxy + org_id: 2 + url: url + password: password + user: user + database: database + basic_auth: true + basic_auth_user: basic_auth_user + basic_auth_password: basic_auth_password + with_credentials: true + is_default: true + json_data: + graphiteVersion: "1.1" + tlsAuth: true + tlsAuthWithCACert: true + secure_json_data: + tlsCACert: "MjNOcW9RdkbUDHZmpco2HCYzVq9dE+i6Yi+gmUJotq5CDA==" + tlsClientCert: "ckN0dGlyMXN503YNfjTcf9CV+GGQneN+xmAclQ==" + tlsClientKey: "ZkN4aG1aNkja/gKAB1wlnKFIsy2SRDq4slrM0A==" + editable: true + version: 10 + +delete_datasources: + - name: old-graphite3 + org_id: 2 diff --git a/pkg/services/provisioning/datasources/types.go b/pkg/services/provisioning/datasources/types.go index ee2175d6a90..8e2443a0169 100644 --- a/pkg/services/provisioning/datasources/types.go +++ b/pkg/services/provisioning/datasources/types.go @@ -1,22 +1,74 @@ package datasources -import "github.com/grafana/grafana/pkg/models" +import ( + "github.com/grafana/grafana/pkg/models" +) import "github.com/grafana/grafana/pkg/components/simplejson" +type ConfigVersion struct { + ApiVersion int64 `json:"apiVersion" yaml:"apiVersion"` +} + type DatasourcesAsConfig struct { - Datasources []*DataSourceFromConfig `json:"datasources" yaml:"datasources"` - DeleteDatasources []*DeleteDatasourceConfig `json:"delete_datasources" yaml:"delete_datasources"` + ApiVersion int64 + + Datasources []*DataSourceFromConfig + DeleteDatasources []*DeleteDatasourceConfig } type DeleteDatasourceConfig struct { + OrgId int64 + Name string +} + +type DataSourceFromConfig struct { + OrgId int64 + Version int + + Name string + Type string + Access string + Url string + Password string + User string + Database string + BasicAuth bool + BasicAuthUser string + BasicAuthPassword string + WithCredentials bool + IsDefault bool + JsonData map[string]interface{} + SecureJsonData map[string]string + Editable bool +} + +type DatasourcesAsConfigV0 struct { + ConfigVersion + + Datasources []*DataSourceFromConfigV0 `json:"datasources" yaml:"datasources"` + DeleteDatasources []*DeleteDatasourceConfigV0 `json:"delete_datasources" yaml:"delete_datasources"` +} + +type DatasourcesAsConfigV1 struct { + ConfigVersion + + Datasources []*DataSourceFromConfigV1 `json:"datasources" yaml:"datasources"` + DeleteDatasources []*DeleteDatasourceConfigV1 `json:"deleteDatasources" yaml:"deleteDatasources"` +} + +type DeleteDatasourceConfigV0 struct { OrgId int64 `json:"org_id" yaml:"org_id"` Name string `json:"name" yaml:"name"` } -type DataSourceFromConfig struct { - OrgId int64 `json:"org_id" yaml:"org_id"` - Version int `json:"version" yaml:"version"` +type DeleteDatasourceConfigV1 struct { + OrgId int64 `json:"orgId" yaml:"orgId"` + Name string `json:"name" yaml:"name"` +} +type DataSourceFromConfigV0 struct { + OrgId int64 `json:"org_id" yaml:"org_id"` + Version int `json:"version" yaml:"version"` Name string `json:"name" yaml:"name"` Type string `json:"type" yaml:"type"` Access string `json:"access" yaml:"access"` @@ -34,6 +86,108 @@ type DataSourceFromConfig struct { Editable bool `json:"editable" yaml:"editable"` } +type DataSourceFromConfigV1 struct { + OrgId int64 `json:"orgId" yaml:"orgId"` + Version int `json:"version" yaml:"version"` + Name string `json:"name" yaml:"name"` + Type string `json:"type" yaml:"type"` + Access string `json:"access" yaml:"access"` + Url string `json:"url" yaml:"url"` + Password string `json:"password" yaml:"password"` + User string `json:"user" yaml:"user"` + Database string `json:"database" yaml:"database"` + BasicAuth bool `json:"basicAuth" yaml:"basicAuth"` + BasicAuthUser string `json:"basicAuthUser" yaml:"basicAuthUser"` + BasicAuthPassword string `json:"basicAuthPassword" yaml:"basicAuthPassword"` + WithCredentials bool `json:"withCredentials" yaml:"withCredentials"` + IsDefault bool `json:"isDefault" yaml:"isDefault"` + JsonData map[string]interface{} `json:"jsonData" yaml:"jsonData"` + SecureJsonData map[string]string `json:"secureJsonData" yaml:"secureJsonData"` + Editable bool `json:"editable" yaml:"editable"` +} + +func (cfg *DatasourcesAsConfigV1) mapToDatasourceFromConfig(apiVersion int64) *DatasourcesAsConfig { + r := &DatasourcesAsConfig{} + + r.ApiVersion = apiVersion + + if cfg == nil { + return r + } + + for _, ds := range cfg.Datasources { + r.Datasources = append(r.Datasources, &DataSourceFromConfig{ + OrgId: ds.OrgId, + Name: ds.Name, + Type: ds.Type, + Access: ds.Access, + Url: ds.Url, + Password: ds.Password, + User: ds.User, + Database: ds.Database, + BasicAuth: ds.BasicAuth, + BasicAuthUser: ds.BasicAuthUser, + BasicAuthPassword: ds.BasicAuthPassword, + WithCredentials: ds.WithCredentials, + IsDefault: ds.IsDefault, + JsonData: ds.JsonData, + SecureJsonData: ds.SecureJsonData, + Editable: ds.Editable, + Version: ds.Version, + }) + } + + for _, ds := range cfg.DeleteDatasources { + r.DeleteDatasources = append(r.DeleteDatasources, &DeleteDatasourceConfig{ + OrgId: ds.OrgId, + Name: ds.Name, + }) + } + + return r +} + +func (cfg *DatasourcesAsConfigV0) mapToDatasourceFromConfig(apiVersion int64) *DatasourcesAsConfig { + r := &DatasourcesAsConfig{} + + r.ApiVersion = apiVersion + + if cfg == nil { + return r + } + + for _, ds := range cfg.Datasources { + r.Datasources = append(r.Datasources, &DataSourceFromConfig{ + OrgId: ds.OrgId, + Name: ds.Name, + Type: ds.Type, + Access: ds.Access, + Url: ds.Url, + Password: ds.Password, + User: ds.User, + Database: ds.Database, + BasicAuth: ds.BasicAuth, + BasicAuthUser: ds.BasicAuthUser, + BasicAuthPassword: ds.BasicAuthPassword, + WithCredentials: ds.WithCredentials, + IsDefault: ds.IsDefault, + JsonData: ds.JsonData, + SecureJsonData: ds.SecureJsonData, + Editable: ds.Editable, + Version: ds.Version, + }) + } + + for _, ds := range cfg.DeleteDatasources { + r.DeleteDatasources = append(r.DeleteDatasources, &DeleteDatasourceConfig{ + OrgId: ds.OrgId, + Name: ds.Name, + }) + } + + return r +} + func createInsertCommand(ds *DataSourceFromConfig) *models.AddDataSourceCommand { jsonData := simplejson.New() if len(ds.JsonData) > 0 { diff --git a/pkg/services/quota/quota.go b/pkg/services/quota/quota.go new file mode 100644 index 00000000000..2ec399437e6 --- /dev/null +++ b/pkg/services/quota/quota.go @@ -0,0 +1,87 @@ +package quota + +import ( + "github.com/grafana/grafana/pkg/bus" + m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/session" + "github.com/grafana/grafana/pkg/setting" +) + +func QuotaReached(c *m.ReqContext, target string) (bool, error) { + if !setting.Quota.Enabled { + return false, nil + } + + // get the list of scopes that this target is valid for. Org, User, Global + scopes, err := m.GetQuotaScopes(target) + if err != nil { + return false, err + } + + for _, scope := range scopes { + c.Logger.Debug("Checking quota", "target", target, "scope", scope) + + switch scope.Name { + case "global": + if scope.DefaultLimit < 0 { + continue + } + if scope.DefaultLimit == 0 { + return true, nil + } + if target == "session" { + usedSessions := session.GetSessionCount() + if int64(usedSessions) > scope.DefaultLimit { + c.Logger.Debug("Sessions limit reached", "active", usedSessions, "limit", scope.DefaultLimit) + return true, nil + } + continue + } + query := m.GetGlobalQuotaByTargetQuery{Target: scope.Target} + if err := bus.Dispatch(&query); err != nil { + return true, err + } + if query.Result.Used >= scope.DefaultLimit { + return true, nil + } + case "org": + if !c.IsSignedIn { + continue + } + query := m.GetOrgQuotaByTargetQuery{OrgId: c.OrgId, Target: scope.Target, Default: scope.DefaultLimit} + if err := bus.Dispatch(&query); err != nil { + return true, err + } + if query.Result.Limit < 0 { + continue + } + if query.Result.Limit == 0 { + return true, nil + } + + if query.Result.Used >= query.Result.Limit { + return true, nil + } + case "user": + if !c.IsSignedIn || c.UserId == 0 { + continue + } + query := m.GetUserQuotaByTargetQuery{UserId: c.UserId, Target: scope.Target, Default: scope.DefaultLimit} + if err := bus.Dispatch(&query); err != nil { + return true, err + } + if query.Result.Limit < 0 { + continue + } + if query.Result.Limit == 0 { + return true, nil + } + + if query.Result.Used >= query.Result.Limit { + return true, nil + } + } + } + + return false, nil +} diff --git a/pkg/services/search/handlers.go b/pkg/services/search/handlers.go index 247585402ef..cf194c320bb 100644 --- a/pkg/services/search/handlers.go +++ b/pkg/services/search/handlers.go @@ -21,6 +21,7 @@ func searchHandler(query *Query) error { FolderIds: query.FolderIds, Tags: query.Tags, Limit: query.Limit, + Permission: query.Permission, } if err := bus.Dispatch(&dashQuery); err != nil { diff --git a/pkg/services/search/models.go b/pkg/services/search/models.go index cf510ed8462..2da09672f13 100644 --- a/pkg/services/search/models.go +++ b/pkg/services/search/models.go @@ -13,15 +13,17 @@ const ( type Hit struct { Id int64 `json:"id"` + Uid string `json:"uid"` Title string `json:"title"` Uri string `json:"uri"` - Slug string `json:"slug"` + Url string `json:"url"` Type HitType `json:"type"` Tags []string `json:"tags"` IsStarred bool `json:"isStarred"` FolderId int64 `json:"folderId,omitempty"` + FolderUid string `json:"folderUid,omitempty"` FolderTitle string `json:"folderTitle,omitempty"` - FolderSlug string `json:"folderSlug,omitempty"` + FolderUrl string `json:"folderUrl,omitempty"` } type HitList []*Hit @@ -50,6 +52,7 @@ type Query struct { Type string DashboardIds []int64 FolderIds []int64 + Permission models.PermissionType Result HitList } @@ -64,7 +67,7 @@ type FindPersistedDashboardsQuery struct { FolderIds []int64 Tags []string Limit int - IsBrowse bool + Permission models.PermissionType Result HitList } diff --git a/pkg/services/session/session.go b/pkg/services/session/session.go new file mode 100644 index 00000000000..2ca9296b97f --- /dev/null +++ b/pkg/services/session/session.go @@ -0,0 +1,162 @@ +package session + +import ( + "math/rand" + "time" + + ms "github.com/go-macaron/session" + _ "github.com/go-macaron/session/memcache" + _ "github.com/go-macaron/session/mysql" + _ "github.com/go-macaron/session/postgres" + _ "github.com/go-macaron/session/redis" + "github.com/grafana/grafana/pkg/log" + "gopkg.in/macaron.v1" +) + +const ( + SESS_KEY_USERID = "uid" + SESS_KEY_OAUTH_STATE = "state" + SESS_KEY_APIKEY = "apikey_id" // used for render requests with api keys + SESS_KEY_LASTLDAPSYNC = "last_ldap_sync" +) + +var sessionManager *ms.Manager +var sessionOptions *ms.Options +var StartSessionGC func() +var GetSessionCount func() int +var sessionLogger = log.New("session") + +func init() { + StartSessionGC = func() { + sessionManager.GC() + sessionLogger.Debug("Session GC") + time.AfterFunc(time.Duration(sessionOptions.Gclifetime)*time.Second, StartSessionGC) + } + GetSessionCount = func() int { + return sessionManager.Count() + } +} + +func Init(options *ms.Options) { + var err error + sessionOptions = prepareOptions(options) + sessionManager, err = ms.NewManager(options.Provider, *options) + if err != nil { + panic(err) + } + + // start GC threads after some random seconds + rndSeconds := 10 + rand.Int63n(180) + time.AfterFunc(time.Duration(rndSeconds)*time.Second, StartSessionGC) +} + +func prepareOptions(opt *ms.Options) *ms.Options { + if len(opt.Provider) == 0 { + opt.Provider = "memory" + } + if len(opt.ProviderConfig) == 0 { + opt.ProviderConfig = "data/sessions" + } + if len(opt.CookieName) == 0 { + opt.CookieName = "grafana_sess" + } + if len(opt.CookiePath) == 0 { + opt.CookiePath = "/" + } + if opt.Gclifetime == 0 { + opt.Gclifetime = 3600 + } + if opt.Maxlifetime == 0 { + opt.Maxlifetime = opt.Gclifetime + } + if opt.IDLength == 0 { + opt.IDLength = 16 + } + + return opt +} + +func GetSession() SessionStore { + return &SessionWrapper{manager: sessionManager} +} + +type SessionStore interface { + // Set sets value to given key in session. + Set(interface{}, interface{}) error + // Get gets value by given key in session. + Get(interface{}) interface{} + // Delete deletes a key from session. + Delete(interface{}) interface{} + // ID returns current session ID. + ID() string + // Release releases session resource and save data to provider. + Release() error + // Destory deletes a session. + Destory(*macaron.Context) error + // init + Start(*macaron.Context) error + // RegenerateId regenerates the session id + RegenerateId(*macaron.Context) error +} + +type SessionWrapper struct { + session ms.RawStore + manager *ms.Manager +} + +func (s *SessionWrapper) Start(c *macaron.Context) error { + var err error + s.session, err = s.manager.Start(c) + return err +} + +func (s *SessionWrapper) RegenerateId(c *macaron.Context) error { + var err error + s.session, err = s.manager.RegenerateId(c) + return err +} + +func (s *SessionWrapper) Set(k interface{}, v interface{}) error { + if s.session != nil { + return s.session.Set(k, v) + } + return nil +} + +func (s *SessionWrapper) Get(k interface{}) interface{} { + if s.session != nil { + return s.session.Get(k) + } + return nil +} + +func (s *SessionWrapper) Delete(k interface{}) interface{} { + if s.session != nil { + return s.session.Delete(k) + } + return nil +} + +func (s *SessionWrapper) ID() string { + if s.session != nil { + return s.session.ID() + } + return "" +} + +func (s *SessionWrapper) Release() error { + if s.session != nil { + return s.session.Release() + } + return nil +} + +func (s *SessionWrapper) Destory(c *macaron.Context) error { + if s.session != nil { + if err := s.manager.Destory(c); err != nil { + return err + } + s.session = nil + } + return nil +} diff --git a/pkg/services/sqlstore/alert.go b/pkg/services/sqlstore/alert.go index 73be7d774fd..f449bec5849 100644 --- a/pkg/services/sqlstore/alert.go +++ b/pkg/services/sqlstore/alert.go @@ -10,11 +10,13 @@ import ( m "github.com/grafana/grafana/pkg/models" ) +// timeNow makes it possible to test usage of time +var timeNow = time.Now + func init() { bus.AddHandler("sql", SaveAlerts) bus.AddHandler("sql", HandleAlertsQuery) bus.AddHandler("sql", GetAlertById) - bus.AddHandler("sql", DeleteAlertById) bus.AddHandler("sql", GetAllAlertQueryHandler) bus.AddHandler("sql", SetAlertState) bus.AddHandler("sql", GetAlertStatesForDashboard) @@ -24,7 +26,7 @@ func init() { func GetAlertById(query *m.GetAlertByIdQuery) error { alert := m.Alert{} - has, err := x.Id(query.Id).Get(&alert) + has, err := x.ID(query.Id).Get(&alert) if !has { return fmt.Errorf("could not find alert") } @@ -61,59 +63,62 @@ func deleteAlertByIdInternal(alertId int64, reason string, sess *DBSession) erro return nil } -func DeleteAlertById(cmd *m.DeleteAlertCommand) error { - return inTransaction(func(sess *DBSession) error { - return deleteAlertByIdInternal(cmd.AlertId, "DeleteAlertCommand", sess) - }) -} - func HandleAlertsQuery(query *m.GetAlertsQuery) error { - var sql bytes.Buffer - params := make([]interface{}, 0) + builder := SqlBuilder{} - sql.WriteString(`SELECT * - from alert - `) + builder.Write(`SELECT + alert.id, + alert.dashboard_id, + alert.panel_id, + alert.name, + alert.state, + alert.new_state_date, + alert.eval_date, + alert.execution_error, + dashboard.uid as dashboard_uid, + dashboard.slug as dashboard_slug + FROM alert + INNER JOIN dashboard on dashboard.id = alert.dashboard_id `) - sql.WriteString(`WHERE org_id = ?`) - params = append(params, query.OrgId) + builder.Write(`WHERE alert.org_id = ?`, query.OrgId) if query.DashboardId != 0 { - sql.WriteString(` AND dashboard_id = ?`) - params = append(params, query.DashboardId) + builder.Write(` AND alert.dashboard_id = ?`, query.DashboardId) } if query.PanelId != 0 { - sql.WriteString(` AND panel_id = ?`) - params = append(params, query.PanelId) + builder.Write(` AND alert.panel_id = ?`, query.PanelId) } - if len(query.State) > 0 && query.State[0] != "ALL" { - sql.WriteString(` AND (`) + if len(query.State) > 0 && query.State[0] != "all" { + builder.Write(` AND (`) for i, v := range query.State { if i > 0 { - sql.WriteString(" OR ") + builder.Write(" OR ") } if strings.HasPrefix(v, "not_") { - sql.WriteString("state <> ? ") + builder.Write("state <> ? ") v = strings.TrimPrefix(v, "not_") } else { - sql.WriteString("state = ? ") + builder.Write("state = ? ") } - params = append(params, v) + builder.AddParams(v) } - sql.WriteString(")") + builder.Write(")") } - sql.WriteString(" ORDER BY name ASC") + if query.User.OrgRole != m.ROLE_ADMIN { + builder.writeDashboardPermissionFilter(query.User, m.PERMISSION_EDIT) + } + + builder.Write(" ORDER BY name ASC") if query.Limit != 0 { - sql.WriteString(" LIMIT ?") - params = append(params, query.Limit) + builder.Write(" LIMIT ?", query.Limit) } - alerts := make([]*m.Alert, 0) - if err := x.Sql(sql.String(), params...).Find(&alerts); err != nil { + alerts := make([]*m.AlertListItemDTO, 0) + if err := x.SQL(builder.GetSqlString(), builder.params...).Find(&alerts); err != nil { return err } @@ -127,7 +132,7 @@ func HandleAlertsQuery(query *m.GetAlertsQuery) error { return nil } -func DeleteAlertDefinition(dashboardId int64, sess *DBSession) error { +func deleteAlertDefinition(dashboardId int64, sess *DBSession) error { alerts := make([]*m.Alert, 0) sess.Where("dashboard_id = ?", dashboardId).Find(&alerts) @@ -145,7 +150,7 @@ func SaveAlerts(cmd *m.SaveAlertsCommand) error { return err } - if err := upsertAlerts(existingAlerts, cmd, sess); err != nil { + if err := updateAlerts(existingAlerts, cmd, sess); err != nil { return err } @@ -157,7 +162,7 @@ func SaveAlerts(cmd *m.SaveAlertsCommand) error { }) } -func upsertAlerts(existingAlerts []*m.Alert, cmd *m.SaveAlertsCommand, sess *DBSession) error { +func updateAlerts(existingAlerts []*m.Alert, cmd *m.SaveAlertsCommand, sess *DBSession) error { for _, alert := range cmd.Alerts { update := false var alertToUpdate *m.Alert @@ -173,7 +178,7 @@ func upsertAlerts(existingAlerts []*m.Alert, cmd *m.SaveAlertsCommand, sess *DBS if update { if alertToUpdate.ContainsUpdates(alert) { - alert.Updated = time.Now() + alert.Updated = timeNow() alert.State = alertToUpdate.State sess.MustCols("message") _, err := sess.Id(alert.Id).Update(alert) @@ -184,10 +189,10 @@ func upsertAlerts(existingAlerts []*m.Alert, cmd *m.SaveAlertsCommand, sess *DBS sqlog.Debug("Alert updated", "name", alert.Name, "id", alert.Id) } } else { - alert.Updated = time.Now() - alert.Created = time.Now() + alert.Updated = timeNow() + alert.Created = timeNow() alert.State = m.AlertStatePending - alert.NewStateDate = time.Now() + alert.NewStateDate = timeNow() _, err := sess.Insert(alert) if err != nil { @@ -251,7 +256,7 @@ func SetAlertState(cmd *m.SetAlertStateCommand) error { alert.State = cmd.State alert.StateChanges += 1 - alert.NewStateDate = time.Now() + alert.NewStateDate = timeNow() alert.EvalData = cmd.EvalData if cmd.Error == "" { @@ -260,7 +265,7 @@ func SetAlertState(cmd *m.SetAlertStateCommand) error { alert.ExecutionError = cmd.Error } - sess.Id(alert.Id).Update(&alert) + sess.ID(alert.Id).Update(&alert) return nil }) } @@ -274,11 +279,13 @@ func PauseAlert(cmd *m.PauseAlertCommand) error { var buffer bytes.Buffer params := make([]interface{}, 0) - buffer.WriteString(`UPDATE alert SET state = ?`) + buffer.WriteString(`UPDATE alert SET state = ?, new_state_date = ?`) if cmd.Paused { params = append(params, string(m.AlertStatePaused)) + params = append(params, timeNow()) } else { params = append(params, string(m.AlertStatePending)) + params = append(params, timeNow()) } buffer.WriteString(` WHERE id IN (?` + strings.Repeat(",?", len(cmd.AlertIds)-1) + `)`) @@ -304,7 +311,7 @@ func PauseAllAlerts(cmd *m.PauseAllAlertCommand) error { newState = string(m.AlertStatePending) } - res, err := sess.Exec(`UPDATE alert SET state = ?`, newState) + res, err := sess.Exec(`UPDATE alert SET state = ?, new_state_date = ?`, newState, timeNow()) if err != nil { return err } @@ -324,7 +331,7 @@ func GetAlertStatesForDashboard(query *m.GetAlertStatesForDashboardQuery) error WHERE org_id = ? AND dashboard_id = ?` query.Result = make([]*m.AlertStateInfoDTO, 0) - err := x.Sql(rawSql, query.OrgId, query.DashboardId).Find(&query.Result) + err := x.SQL(rawSql, query.OrgId, query.DashboardId).Find(&query.Result) return err } diff --git a/pkg/services/sqlstore/alert_notification.go b/pkg/services/sqlstore/alert_notification.go index 5e66627f194..ae691c7166c 100644 --- a/pkg/services/sqlstore/alert_notification.go +++ b/pkg/services/sqlstore/alert_notification.go @@ -76,7 +76,7 @@ func GetAlertNotificationsToSend(query *m.GetAlertNotificationsToSendQuery) erro sql.WriteString(`)`) results := make([]*m.AlertNotification, 0) - if err := x.Sql(sql.String(), params...).Find(&results); err != nil { + if err := x.SQL(sql.String(), params...).Find(&results); err != nil { return err } @@ -165,7 +165,7 @@ func UpdateAlertNotification(cmd *m.UpdateAlertNotificationCommand) error { return inTransaction(func(sess *DBSession) (err error) { current := m.AlertNotification{} - if _, err = sess.Id(cmd.Id).Get(¤t); err != nil { + if _, err = sess.ID(cmd.Id).Get(¤t); err != nil { return err } @@ -187,7 +187,7 @@ func UpdateAlertNotification(cmd *m.UpdateAlertNotificationCommand) error { sess.UseBool("is_default") - if affected, err := sess.Id(cmd.Id).Update(current); err != nil { + if affected, err := sess.ID(cmd.Id).Update(current); err != nil { return err } else if affected == 0 { return fmt.Errorf("Could not find alert notification") diff --git a/pkg/services/sqlstore/alert_test.go b/pkg/services/sqlstore/alert_test.go index 7b27f5b9ca4..296d16c2f45 100644 --- a/pkg/services/sqlstore/alert_test.go +++ b/pkg/services/sqlstore/alert_test.go @@ -6,9 +6,26 @@ import ( "github.com/grafana/grafana/pkg/components/simplejson" m "github.com/grafana/grafana/pkg/models" . "github.com/smartystreets/goconvey/convey" + "time" ) +func mockTimeNow() { + var timeSeed int64 + timeNow = func() time.Time { + fakeNow := time.Unix(timeSeed, 0) + timeSeed += 1 + return fakeNow + } +} + +func resetTimeNow() { + timeNow = time.Now +} + func TestAlertingDataAccess(t *testing.T) { + mockTimeNow() + defer resetTimeNow() + Convey("Testing Alerting data access", t, func() { InitTestDB(t) @@ -50,13 +67,11 @@ func TestAlertingDataAccess(t *testing.T) { So(err, ShouldBeNil) }) - Convey("can pause alert", func() { - cmd := &m.PauseAllAlertCommand{ - Paused: true, - } + alert, _ := getAlertById(1) + stateDateBeforePause := alert.NewStateDate - err = PauseAllAlerts(cmd) - So(err, ShouldBeNil) + Convey("can pause all alerts", func() { + pauseAllAlerts(true) Convey("cannot updated paused alert", func() { cmd := &m.SetAlertStateCommand{ @@ -67,19 +82,38 @@ func TestAlertingDataAccess(t *testing.T) { err = SetAlertState(cmd) So(err, ShouldNotBeNil) }) + + Convey("pausing alerts should update their NewStateDate", func() { + alert, _ = getAlertById(1) + stateDateAfterPause := alert.NewStateDate + So(stateDateBeforePause, ShouldHappenBefore, stateDateAfterPause) + }) + + Convey("unpausing alerts should update their NewStateDate again", func() { + pauseAllAlerts(false) + alert, _ = getAlertById(1) + stateDateAfterUnpause := alert.NewStateDate + So(stateDateBeforePause, ShouldHappenBefore, stateDateAfterUnpause) + }) }) }) Convey("Can read properties", func() { - alertQuery := m.GetAlertsQuery{DashboardId: testDash.Id, PanelId: 1, OrgId: 1} + alertQuery := m.GetAlertsQuery{DashboardId: testDash.Id, PanelId: 1, OrgId: 1, User: &m.SignedInUser{OrgRole: m.ROLE_ADMIN}} err2 := HandleAlertsQuery(&alertQuery) alert := alertQuery.Result[0] So(err2, ShouldBeNil) So(alert.Name, ShouldEqual, "Alerting title") - So(alert.Message, ShouldEqual, "Alerting message") So(alert.State, ShouldEqual, "pending") - So(alert.Frequency, ShouldEqual, 1) + }) + + Convey("Viewer cannot read alerts", func() { + alertQuery := m.GetAlertsQuery{DashboardId: testDash.Id, PanelId: 1, OrgId: 1, User: &m.SignedInUser{OrgRole: m.ROLE_VIEWER}} + err2 := HandleAlertsQuery(&alertQuery) + + So(err2, ShouldBeNil) + So(alertQuery.Result, ShouldHaveLength, 0) }) Convey("Alerts with same dashboard id and panel id should update", func() { @@ -100,7 +134,7 @@ func TestAlertingDataAccess(t *testing.T) { }) Convey("Alerts should be updated", func() { - query := m.GetAlertsQuery{DashboardId: testDash.Id, OrgId: 1} + query := m.GetAlertsQuery{DashboardId: testDash.Id, OrgId: 1, User: &m.SignedInUser{OrgRole: m.ROLE_ADMIN}} err2 := HandleAlertsQuery(&query) So(err2, ShouldBeNil) @@ -149,7 +183,7 @@ func TestAlertingDataAccess(t *testing.T) { Convey("Should save 3 dashboards", func() { So(err, ShouldBeNil) - queryForDashboard := m.GetAlertsQuery{DashboardId: testDash.Id, OrgId: 1} + queryForDashboard := m.GetAlertsQuery{DashboardId: testDash.Id, OrgId: 1, User: &m.SignedInUser{OrgRole: m.ROLE_ADMIN}} err2 := HandleAlertsQuery(&queryForDashboard) So(err2, ShouldBeNil) @@ -163,7 +197,7 @@ func TestAlertingDataAccess(t *testing.T) { err = SaveAlerts(&cmd) Convey("should delete the missing alert", func() { - query := m.GetAlertsQuery{DashboardId: testDash.Id, OrgId: 1} + query := m.GetAlertsQuery{DashboardId: testDash.Id, OrgId: 1, User: &m.SignedInUser{OrgRole: m.ROLE_ADMIN}} err2 := HandleAlertsQuery(&query) So(err2, ShouldBeNil) So(len(query.Result), ShouldEqual, 2) @@ -198,7 +232,7 @@ func TestAlertingDataAccess(t *testing.T) { So(err, ShouldBeNil) Convey("Alerts should be removed", func() { - query := m.GetAlertsQuery{DashboardId: testDash.Id, OrgId: 1} + query := m.GetAlertsQuery{DashboardId: testDash.Id, OrgId: 1, User: &m.SignedInUser{OrgRole: m.ROLE_ADMIN}} err2 := HandleAlertsQuery(&query) So(testDash.Id, ShouldEqual, 1) @@ -208,3 +242,90 @@ func TestAlertingDataAccess(t *testing.T) { }) }) } + +func TestPausingAlerts(t *testing.T) { + mockTimeNow() + defer resetTimeNow() + + Convey("Given an alert", t, func() { + InitTestDB(t) + + testDash := insertTestDashboard("dashboard with alerts", 1, 0, false, "alert") + alert, _ := insertTestAlert("Alerting title", "Alerting message", testDash.OrgId, testDash.Id, simplejson.New()) + + stateDateBeforePause := alert.NewStateDate + stateDateAfterPause := stateDateBeforePause + Convey("when paused", func() { + pauseAlert(testDash.OrgId, 1, true) + + Convey("the NewStateDate should be updated", func() { + alert, _ := getAlertById(1) + + stateDateAfterPause = alert.NewStateDate + So(stateDateBeforePause, ShouldHappenBefore, stateDateAfterPause) + }) + }) + + Convey("when unpaused", func() { + pauseAlert(testDash.OrgId, 1, false) + + Convey("the NewStateDate should be updated again", func() { + alert, _ := getAlertById(1) + + stateDateAfterUnpause := alert.NewStateDate + So(stateDateAfterPause, ShouldHappenBefore, stateDateAfterUnpause) + }) + }) + }) +} +func pauseAlert(orgId int64, alertId int64, pauseState bool) (int64, error) { + cmd := &m.PauseAlertCommand{ + OrgId: orgId, + AlertIds: []int64{alertId}, + Paused: pauseState, + } + err := PauseAlert(cmd) + So(err, ShouldBeNil) + return cmd.ResultCount, err +} +func insertTestAlert(title string, message string, orgId int64, dashId int64, settings *simplejson.Json) (*m.Alert, error) { + items := []*m.Alert{ + { + PanelId: 1, + DashboardId: dashId, + OrgId: orgId, + Name: title, + Message: message, + Settings: settings, + Frequency: 1, + }, + } + + cmd := m.SaveAlertsCommand{ + Alerts: items, + DashboardId: dashId, + OrgId: orgId, + UserId: 1, + } + + err := SaveAlerts(&cmd) + return cmd.Alerts[0], err +} + +func getAlertById(id int64) (*m.Alert, error) { + q := &m.GetAlertByIdQuery{ + Id: id, + } + err := GetAlertById(q) + So(err, ShouldBeNil) + return q.Result, err +} + +func pauseAllAlerts(pauseState bool) error { + cmd := &m.PauseAllAlertCommand{ + Paused: pauseState, + } + err := PauseAllAlerts(cmd) + So(err, ShouldBeNil) + return err +} diff --git a/pkg/services/sqlstore/annotation.go b/pkg/services/sqlstore/annotation.go index effffb8bab4..76f1819a18c 100644 --- a/pkg/services/sqlstore/annotation.go +++ b/pkg/services/sqlstore/annotation.go @@ -138,6 +138,17 @@ func (r *SqlAnnotationRepo) Find(query *annotations.ItemQuery) ([]*annotations.I sql.WriteString(`WHERE annotation.org_id = ?`) params = append(params, query.OrgId) + if query.AnnotationId != 0 { + fmt.Print("annotation query") + sql.WriteString(` AND annotation.id = ?`) + params = append(params, query.AnnotationId) + } + + if query.RegionId != 0 { + sql.WriteString(` AND annotation.region_id = ?`) + params = append(params, query.RegionId) + } + if query.AlertId != 0 { sql.WriteString(` AND annotation.alert_id = ?`) params = append(params, query.AlertId) @@ -197,6 +208,7 @@ func (r *SqlAnnotationRepo) Find(query *annotations.ItemQuery) ([]*annotations.I sql.WriteString(fmt.Sprintf(" ORDER BY epoch DESC LIMIT %v", query.Limit)) items := make([]*annotations.ItemDTO, 0) + if err := x.Sql(sql.String(), params...).Find(&items); err != nil { return nil, err } diff --git a/pkg/services/sqlstore/annotation_test.go b/pkg/services/sqlstore/annotation_test.go index 2afd4479b66..d5cee110b9a 100644 --- a/pkg/services/sqlstore/annotation_test.go +++ b/pkg/services/sqlstore/annotation_test.go @@ -51,6 +51,20 @@ func TestAnnotations(t *testing.T) { So(err, ShouldBeNil) So(annotation.Id, ShouldBeGreaterThan, 0) + annotation2 := &annotations.Item{ + OrgId: 1, + UserId: 1, + DashboardId: 2, + Text: "hello", + Type: "alert", + Epoch: 20, + Tags: []string{"outage", "error", "type:outage", "server:server-1"}, + RegionId: 1, + } + err = repo.Save(annotation2) + So(err, ShouldBeNil) + So(annotation2.Id, ShouldBeGreaterThan, 0) + Convey("Can query for annotation", func() { items, err := repo.Find(&annotations.ItemQuery{ OrgId: 1, @@ -67,6 +81,28 @@ func TestAnnotations(t *testing.T) { }) }) + Convey("Can query for annotation by id", func() { + items, err := repo.Find(&annotations.ItemQuery{ + OrgId: 1, + AnnotationId: annotation2.Id, + }) + + So(err, ShouldBeNil) + So(items, ShouldHaveLength, 1) + So(items[0].Id, ShouldEqual, annotation2.Id) + }) + + Convey("Can query for annotation by region id", func() { + items, err := repo.Find(&annotations.ItemQuery{ + OrgId: 1, + RegionId: annotation2.RegionId, + }) + + So(err, ShouldBeNil) + So(items, ShouldHaveLength, 1) + So(items[0].Id, ShouldEqual, annotation2.Id) + }) + Convey("Should not find any when item is outside time range", func() { items, err := repo.Find(&annotations.ItemQuery{ OrgId: 1, diff --git a/pkg/services/sqlstore/dashboard.go b/pkg/services/sqlstore/dashboard.go index 0b6b60a5e11..8a89c3d942c 100644 --- a/pkg/services/sqlstore/dashboard.go +++ b/pkg/services/sqlstore/dashboard.go @@ -1,12 +1,14 @@ package sqlstore import ( + "strings" "time" "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/metrics" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/search" + "github.com/grafana/grafana/pkg/util" ) func init() { @@ -17,155 +19,155 @@ func init() { bus.AddHandler("sql", SearchDashboards) bus.AddHandler("sql", GetDashboardTags) bus.AddHandler("sql", GetDashboardSlugById) + bus.AddHandler("sql", GetDashboardUIDById) bus.AddHandler("sql", GetDashboardsByPluginId) + bus.AddHandler("sql", GetDashboardPermissionsForUser) + bus.AddHandler("sql", GetDashboardsBySlug) + bus.AddHandler("sql", ValidateDashboardBeforeSave) } +var generateNewUid func() string = util.GenerateShortUid + func SaveDashboard(cmd *m.SaveDashboardCommand) error { return inTransaction(func(sess *DBSession) error { - dash := cmd.GetDashboardModel() - - // try get existing dashboard - var existing, sameTitle m.Dashboard - - if dash.Id > 0 { - dashWithIdExists, err := sess.Where("id=? AND org_id=?", dash.Id, dash.OrgId).Get(&existing) - if err != nil { - return err - } - if !dashWithIdExists { - return m.ErrDashboardNotFound - } - - // check for is someone else has written in between - if dash.Version != existing.Version { - if cmd.Overwrite { - dash.Version = existing.Version - } else { - return m.ErrDashboardVersionMismatch - } - } - - // do not allow plugin dashboard updates without overwrite flag - if existing.PluginId != "" && cmd.Overwrite == false { - return m.UpdatePluginDashboardError{PluginId: existing.PluginId} - } - } - - sameTitleExists, err := sess.Where("org_id=? AND slug=?", dash.OrgId, dash.Slug).Get(&sameTitle) - if err != nil { - return err - } - - if sameTitleExists { - // another dashboard with same name - if dash.Id != sameTitle.Id { - if cmd.Overwrite { - dash.Id = sameTitle.Id - dash.Version = sameTitle.Version - } else { - return m.ErrDashboardWithSameNameExists - } - } - } - - err = setHasAcl(sess, dash) - if err != nil { - return err - } - - parentVersion := dash.Version - affectedRows := int64(0) - - if dash.Id == 0 { - dash.Version = 1 - metrics.M_Api_Dashboard_Insert.Inc() - dash.Data.Set("version", dash.Version) - affectedRows, err = sess.Insert(dash) - } else { - dash.Version++ - dash.Data.Set("version", dash.Version) - - if !cmd.UpdatedAt.IsZero() { - dash.Updated = cmd.UpdatedAt - } - - affectedRows, err = sess.MustCols("folder_id", "has_acl").Id(dash.Id).Update(dash) - } - - if err != nil { - return err - } - - if affectedRows == 0 { - return m.ErrDashboardNotFound - } - - dashVersion := &m.DashboardVersion{ - DashboardId: dash.Id, - ParentVersion: parentVersion, - RestoredFrom: cmd.RestoredFrom, - Version: dash.Version, - Created: time.Now(), - CreatedBy: dash.UpdatedBy, - Message: cmd.Message, - Data: dash.Data, - } - - // insert version entry - if affectedRows, err = sess.Insert(dashVersion); err != nil { - return err - } else if affectedRows == 0 { - return m.ErrDashboardNotFound - } - - // delete existing tags - _, err = sess.Exec("DELETE FROM dashboard_tag WHERE dashboard_id=?", dash.Id) - if err != nil { - return err - } - - // insert new tags - tags := dash.GetTags() - if len(tags) > 0 { - for _, tag := range tags { - if _, err := sess.Insert(&DashboardTag{DashboardId: dash.Id, Term: tag}); err != nil { - return err - } - } - } - cmd.Result = dash - - return err + return saveDashboard(sess, cmd) }) } -func setHasAcl(sess *DBSession, dash *m.Dashboard) error { - // check if parent has acl - if dash.FolderId > 0 { - var parent m.Dashboard - if hasParent, err := sess.Where("folder_id=?", dash.FolderId).Get(&parent); err != nil { +func saveDashboard(sess *DBSession, cmd *m.SaveDashboardCommand) error { + dash := cmd.GetDashboardModel() + + userId := cmd.UserId + + if userId == 0 { + userId = -1 + } + + if dash.Id > 0 { + var existing m.Dashboard + dashWithIdExists, err := sess.Where("id=? AND org_id=?", dash.Id, dash.OrgId).Get(&existing) + if err != nil { return err - } else if hasParent && parent.HasAcl { - dash.HasAcl = true + } + if !dashWithIdExists { + return m.ErrDashboardNotFound + } + + // check for is someone else has written in between + if dash.Version != existing.Version { + if cmd.Overwrite { + dash.SetVersion(existing.Version) + } else { + return m.ErrDashboardVersionMismatch + } + } + + // do not allow plugin dashboard updates without overwrite flag + if existing.PluginId != "" && cmd.Overwrite == false { + return m.UpdatePluginDashboardError{PluginId: existing.PluginId} } } - // check if dash has its own acl - if dash.Id > 0 { - if res, err := sess.Query("SELECT 1 from dashboard_acl WHERE dashboard_id =?", dash.Id); err != nil { + if dash.Uid == "" { + uid, err := generateNewDashboardUid(sess, dash.OrgId) + if err != nil { return err + } + dash.SetUid(uid) + } + + parentVersion := dash.Version + affectedRows := int64(0) + var err error + + if dash.Id == 0 { + dash.SetVersion(1) + dash.Created = time.Now() + dash.CreatedBy = userId + dash.Updated = time.Now() + dash.UpdatedBy = userId + metrics.M_Api_Dashboard_Insert.Inc() + affectedRows, err = sess.Insert(dash) + } else { + dash.SetVersion(dash.Version + 1) + + if !cmd.UpdatedAt.IsZero() { + dash.Updated = cmd.UpdatedAt } else { - if len(res) > 0 { - dash.HasAcl = true + dash.Updated = time.Now() + } + + dash.UpdatedBy = userId + + affectedRows, err = sess.MustCols("folder_id").ID(dash.Id).Update(dash) + } + + if err != nil { + return err + } + + if affectedRows == 0 { + return m.ErrDashboardNotFound + } + + dashVersion := &m.DashboardVersion{ + DashboardId: dash.Id, + ParentVersion: parentVersion, + RestoredFrom: cmd.RestoredFrom, + Version: dash.Version, + Created: time.Now(), + CreatedBy: dash.UpdatedBy, + Message: cmd.Message, + Data: dash.Data, + } + + // insert version entry + if affectedRows, err = sess.Insert(dashVersion); err != nil { + return err + } else if affectedRows == 0 { + return m.ErrDashboardNotFound + } + + // delete existing tags + _, err = sess.Exec("DELETE FROM dashboard_tag WHERE dashboard_id=?", dash.Id) + if err != nil { + return err + } + + // insert new tags + tags := dash.GetTags() + if len(tags) > 0 { + for _, tag := range tags { + if _, err := sess.Insert(&DashboardTag{DashboardId: dash.Id, Term: tag}); err != nil { + return err } } } - return nil + cmd.Result = dash + + return err +} + +func generateNewDashboardUid(sess *DBSession, orgId int64) (string, error) { + for i := 0; i < 3; i++ { + uid := generateNewUid() + + exists, err := sess.Where("org_id=? AND uid=?", orgId, uid).Get(&m.Dashboard{}) + if err != nil { + return "", err + } + + if !exists { + return uid, nil + } + } + + return "", m.ErrDashboardFailedGenerateUniqueUid } func GetDashboard(query *m.GetDashboardQuery) error { - dashboard := m.Dashboard{Slug: query.Slug, OrgId: query.OrgId, Id: query.Id} + dashboard := m.Dashboard{Slug: query.Slug, OrgId: query.OrgId, Id: query.Id, Uid: query.Uid} has, err := x.Get(&dashboard) if err != nil { @@ -174,18 +176,21 @@ func GetDashboard(query *m.GetDashboardQuery) error { return m.ErrDashboardNotFound } - dashboard.Data.Set("id", dashboard.Id) + dashboard.SetId(dashboard.Id) + dashboard.SetUid(dashboard.Uid) query.Result = &dashboard return nil } type DashboardSearchProjection struct { Id int64 + Uid string Title string Slug string Term string IsFolder bool FolderId int64 + FolderUid string FolderSlug string FolderTitle string } @@ -196,7 +201,7 @@ func findDashboards(query *search.FindPersistedDashboardsQuery) ([]DashboardSear limit = 1000 } - sb := NewSearchBuilder(query.SignedInUser, limit). + sb := NewSearchBuilder(query.SignedInUser, limit, query.Permission). WithTags(query.Tags). WithDashboardIdsIn(query.DashboardIds) @@ -258,15 +263,21 @@ func makeQueryResult(query *search.FindPersistedDashboardsQuery, res []Dashboard if !exists { hit = &search.Hit{ Id: item.Id, + Uid: item.Uid, Title: item.Title, Uri: "db/" + item.Slug, - Slug: item.Slug, + Url: m.GetDashboardFolderUrl(item.IsFolder, item.Uid, item.Slug), Type: getHitType(item), FolderId: item.FolderId, + FolderUid: item.FolderUid, FolderTitle: item.FolderTitle, - FolderSlug: item.FolderSlug, Tags: []string{}, } + + if item.FolderId > 0 { + hit.FolderUrl = m.GetFolderUrl(item.FolderUid, item.FolderSlug) + } + query.Result = append(query.Result, hit) hits[item.Id] = hit } @@ -309,6 +320,7 @@ func DeleteDashboard(cmd *m.DeleteDashboardCommand) error { "DELETE FROM dashboard_version WHERE dashboard_id = ?", "DELETE FROM dashboard WHERE folder_id = ?", "DELETE FROM annotation WHERE dashboard_id = ?", + "DELETE FROM dashboard_provisioning WHERE dashboard_id = ?", } for _, sql := range deletes { @@ -318,7 +330,7 @@ func DeleteDashboard(cmd *m.DeleteDashboardCommand) error { } } - if err := DeleteAlertDefinition(dashboard.Id, sess); err != nil { + if err := deleteAlertDefinition(dashboard.Id, sess); err != nil { return nil } @@ -343,6 +355,76 @@ func GetDashboards(query *m.GetDashboardsQuery) error { return nil } +// GetDashboardPermissionsForUser returns the maximum permission the specified user has for a dashboard(s) +// The function takes in a list of dashboard ids and the user id and role +func GetDashboardPermissionsForUser(query *m.GetDashboardPermissionsForUserQuery) error { + if len(query.DashboardIds) == 0 { + return m.ErrCommandValidationFailed + } + + if query.OrgRole == m.ROLE_ADMIN { + var permissions = make([]*m.DashboardPermissionForUser, 0) + for _, d := range query.DashboardIds { + permissions = append(permissions, &m.DashboardPermissionForUser{ + DashboardId: d, + Permission: m.PERMISSION_ADMIN, + PermissionName: m.PERMISSION_ADMIN.String(), + }) + } + query.Result = permissions + + return nil + } + + params := make([]interface{}, 0) + + // check dashboards that have ACLs via user id, team id or role + sql := `SELECT d.id AS dashboard_id, MAX(COALESCE(da.permission, pt.permission)) AS permission + FROM dashboard AS d + LEFT JOIN dashboard_acl as da on d.folder_id = da.dashboard_id or d.id = da.dashboard_id + LEFT JOIN team_member as ugm on ugm.team_id = da.team_id + LEFT JOIN org_user ou ON ou.role = da.role AND ou.user_id = ? + ` + params = append(params, query.UserId) + + //check the user's role for dashboards that do not have hasAcl set + sql += `LEFT JOIN org_user ouRole ON ouRole.user_id = ? AND ouRole.org_id = ?` + params = append(params, query.UserId) + params = append(params, query.OrgId) + + sql += ` + LEFT JOIN (SELECT 1 AS permission, 'Viewer' AS role + UNION SELECT 2 AS permission, 'Editor' AS role + UNION SELECT 4 AS permission, 'Admin' AS role) pt ON ouRole.role = pt.role + WHERE + d.Id IN (?` + strings.Repeat(",?", len(query.DashboardIds)-1) + `) ` + for _, id := range query.DashboardIds { + params = append(params, id) + } + + sql += ` AND + d.org_id = ? AND + ( + (d.has_acl = ? AND (da.user_id = ? OR ugm.user_id = ? OR ou.id IS NOT NULL)) + OR (d.has_acl = ? AND ouRole.id IS NOT NULL) + ) + group by d.id + order by d.id asc` + params = append(params, query.OrgId) + params = append(params, dialect.BooleanStr(true)) + params = append(params, query.UserId) + params = append(params, query.UserId) + params = append(params, dialect.BooleanStr(false)) + + err := x.Sql(sql, params...).Find(&query.Result) + + for _, p := range query.Result { + p.PermissionName = p.Permission.String() + } + + return err +} + func GetDashboardsByPluginId(query *m.GetDashboardsByPluginIdQuery) error { var dashboards = make([]*m.Dashboard, 0) whereExpr := "org_id=? AND plugin_id=? AND is_folder=" + dialect.BooleanStr(false) @@ -365,7 +447,7 @@ func GetDashboardSlugById(query *m.GetDashboardSlugByIdQuery) error { var rawSql = `SELECT slug from dashboard WHERE Id=?` var slug = DashboardSlugDTO{} - exists, err := x.Sql(rawSql, query.Id).Get(&slug) + exists, err := x.SQL(rawSql, query.Id).Get(&slug) if err != nil { return err @@ -376,3 +458,156 @@ func GetDashboardSlugById(query *m.GetDashboardSlugByIdQuery) error { query.Result = slug.Slug return nil } + +func GetDashboardsBySlug(query *m.GetDashboardsBySlugQuery) error { + var dashboards []*m.Dashboard + + if err := x.Where("org_id=? AND slug=?", query.OrgId, query.Slug).Find(&dashboards); err != nil { + return err + } + + query.Result = dashboards + return nil +} + +func GetDashboardUIDById(query *m.GetDashboardRefByIdQuery) error { + var rawSql = `SELECT uid, slug from dashboard WHERE Id=?` + + us := &m.DashboardRef{} + + exists, err := x.SQL(rawSql, query.Id).Get(us) + + if err != nil { + return err + } else if exists == false { + return m.ErrDashboardNotFound + } + + query.Result = us + return nil +} + +func getExistingDashboardByIdOrUidForUpdate(sess *DBSession, cmd *m.ValidateDashboardBeforeSaveCommand) (err error) { + dash := cmd.Dashboard + + dashWithIdExists := false + var existingById m.Dashboard + + if dash.Id > 0 { + dashWithIdExists, err = sess.Where("id=? AND org_id=?", dash.Id, dash.OrgId).Get(&existingById) + if err != nil { + return err + } + + if !dashWithIdExists { + return m.ErrDashboardNotFound + } + + if dash.Uid == "" { + dash.SetUid(existingById.Uid) + } + } + + dashWithUidExists := false + var existingByUid m.Dashboard + + if dash.Uid != "" { + dashWithUidExists, err = sess.Where("org_id=? AND uid=?", dash.OrgId, dash.Uid).Get(&existingByUid) + if err != nil { + return err + } + } + + if dash.FolderId > 0 { + var existingFolder m.Dashboard + folderExists, folderErr := sess.Where("org_id=? AND id=? AND is_folder=?", dash.OrgId, dash.FolderId, dialect.BooleanStr(true)).Get(&existingFolder) + if folderErr != nil { + return folderErr + } + + if !folderExists { + return m.ErrDashboardFolderNotFound + } + } + + if !dashWithIdExists && !dashWithUidExists { + return nil + } + + if dashWithIdExists && dashWithUidExists && existingById.Id != existingByUid.Id { + return m.ErrDashboardWithSameUIDExists + } + + existing := existingById + + if !dashWithIdExists && dashWithUidExists { + dash.SetId(existingByUid.Id) + dash.SetUid(existingByUid.Uid) + existing = existingByUid + } + + if (existing.IsFolder && !dash.IsFolder) || + (!existing.IsFolder && dash.IsFolder) { + return m.ErrDashboardTypeMismatch + } + + // check for is someone else has written in between + if dash.Version != existing.Version { + if cmd.Overwrite { + dash.SetVersion(existing.Version) + } else { + return m.ErrDashboardVersionMismatch + } + } + + // do not allow plugin dashboard updates without overwrite flag + if existing.PluginId != "" && cmd.Overwrite == false { + return m.UpdatePluginDashboardError{PluginId: existing.PluginId} + } + + return nil +} + +func getExistingDashboardByTitleAndFolder(sess *DBSession, cmd *m.ValidateDashboardBeforeSaveCommand) error { + dash := cmd.Dashboard + var existing m.Dashboard + + exists, err := sess.Where("org_id=? AND slug=? AND (is_folder=? OR folder_id=?)", dash.OrgId, dash.Slug, dialect.BooleanStr(true), dash.FolderId).Get(&existing) + if err != nil { + return err + } + + if exists && dash.Id != existing.Id { + if existing.IsFolder && !dash.IsFolder { + return m.ErrDashboardWithSameNameAsFolder + } + + if !existing.IsFolder && dash.IsFolder { + return m.ErrDashboardFolderWithSameNameAsDashboard + } + + if cmd.Overwrite { + dash.SetId(existing.Id) + dash.SetUid(existing.Uid) + dash.SetVersion(existing.Version) + } else { + return m.ErrDashboardWithSameNameInFolderExists + } + } + + return nil +} + +func ValidateDashboardBeforeSave(cmd *m.ValidateDashboardBeforeSaveCommand) (err error) { + return inTransaction(func(sess *DBSession) error { + if err = getExistingDashboardByIdOrUidForUpdate(sess, cmd); err != nil { + return err + } + + if err = getExistingDashboardByTitleAndFolder(sess, cmd); err != nil { + return err + } + + return nil + }) +} diff --git a/pkg/services/sqlstore/dashboard_acl.go b/pkg/services/sqlstore/dashboard_acl.go index 3b0c89e02ef..ae91d1d41f3 100644 --- a/pkg/services/sqlstore/dashboard_acl.go +++ b/pkg/services/sqlstore/dashboard_acl.go @@ -1,17 +1,12 @@ package sqlstore import ( - "fmt" - "time" - "github.com/grafana/grafana/pkg/bus" m "github.com/grafana/grafana/pkg/models" ) func init() { - bus.AddHandler("sql", SetDashboardAcl) bus.AddHandler("sql", UpdateDashboardAcl) - bus.AddHandler("sql", RemoveDashboardAcl) bus.AddHandler("sql", GetDashboardAclInfoList) } @@ -24,7 +19,7 @@ func UpdateDashboardAcl(cmd *m.UpdateDashboardAclCommand) error { } for _, item := range cmd.Items { - if item.UserId == 0 && item.TeamId == 0 && !item.Role.IsValid() { + if item.UserId == 0 && item.TeamId == 0 && (item.Role == nil || !item.Role.IsValid()) { return m.ErrDashboardAclInfoMissing } @@ -40,100 +35,25 @@ func UpdateDashboardAcl(cmd *m.UpdateDashboardAclCommand) error { // Update dashboard HasAcl flag dashboard := m.Dashboard{HasAcl: true} - if _, err := sess.Cols("has_acl").Where("id=? OR folder_id=?", cmd.DashboardId, cmd.DashboardId).Update(&dashboard); err != nil { + if _, err := sess.Cols("has_acl").Where("id=?", cmd.DashboardId).Update(&dashboard); err != nil { return err } return nil }) } -func SetDashboardAcl(cmd *m.SetDashboardAclCommand) error { - return inTransaction(func(sess *DBSession) error { - if cmd.UserId == 0 && cmd.TeamId == 0 { - return m.ErrDashboardAclInfoMissing - } - - if cmd.DashboardId == 0 { - return m.ErrDashboardPermissionDashboardEmpty - } - - if res, err := sess.Query("SELECT 1 from "+dialect.Quote("dashboard_acl")+" WHERE dashboard_id =? and (team_id=? or user_id=?)", cmd.DashboardId, cmd.TeamId, cmd.UserId); err != nil { - return err - } else if len(res) == 1 { - - entity := m.DashboardAcl{ - Permission: cmd.Permission, - Updated: time.Now(), - } - - if _, err := sess.Cols("updated", "permission").Where("dashboard_id =? and (team_id=? or user_id=?)", cmd.DashboardId, cmd.TeamId, cmd.UserId).Update(&entity); err != nil { - return err - } - - return nil - } - - entity := m.DashboardAcl{ - OrgId: cmd.OrgId, - TeamId: cmd.TeamId, - UserId: cmd.UserId, - Created: time.Now(), - Updated: time.Now(), - DashboardId: cmd.DashboardId, - Permission: cmd.Permission, - } - - cols := []string{"org_id", "created", "updated", "dashboard_id", "permission"} - - if cmd.UserId != 0 { - cols = append(cols, "user_id") - } - - if cmd.TeamId != 0 { - cols = append(cols, "team_id") - } - - _, err := sess.Cols(cols...).Insert(&entity) - if err != nil { - return err - } - - cmd.Result = entity - - // Update dashboard HasAcl flag - dashboard := m.Dashboard{ - HasAcl: true, - } - - if _, err := sess.Cols("has_acl").Where("id=? OR folder_id=?", cmd.DashboardId, cmd.DashboardId).Update(&dashboard); err != nil { - return err - } - - return nil - }) -} - -func RemoveDashboardAcl(cmd *m.RemoveDashboardAclCommand) error { - return inTransaction(func(sess *DBSession) error { - var rawSQL = "DELETE FROM " + dialect.Quote("dashboard_acl") + " WHERE org_id =? and id=?" - _, err := sess.Exec(rawSQL, cmd.OrgId, cmd.AclId) - if err != nil { - return err - } - - return err - }) -} - +// GetDashboardAclInfoList returns a list of permissions for a dashboard. They can be fetched from three +// different places. +// 1) Permissions for the dashboard +// 2) permissions for its parent folder +// 3) if no specific permissions have been set for the dashboard or its parent folder then get the default permissions func GetDashboardAclInfoList(query *m.GetDashboardAclInfoListQuery) error { - dashboardFilter := fmt.Sprintf(`IN ( - SELECT %d - UNION - SELECT folder_id from dashboard where id = %d - )`, query.DashboardId, query.DashboardId) + var err error - rawSQL := ` - SELECT + falseStr := dialect.BooleanStr(false) + + if query.DashboardId == 0 { + sql := `SELECT da.id, da.org_id, da.dashboard_id, @@ -143,43 +63,60 @@ func GetDashboardAclInfoList(query *m.GetDashboardAclInfoListQuery) error { da.role, da.created, da.updated, - u.login AS user_login, - u.email AS user_email, - ug.name AS team - FROM` + dialect.Quote("dashboard_acl") + ` as da - LEFT OUTER JOIN ` + dialect.Quote("user") + ` AS u ON u.id = da.user_id - LEFT OUTER JOIN team ug on ug.id = da.team_id - WHERE dashboard_id ` + dashboardFilter + ` AND da.org_id = ? + '' as user_login, + '' as user_email, + '' as team, + '' as title, + '' as slug, + '' as uid,` + + falseStr + ` AS is_folder + FROM dashboard_acl as da + WHERE da.dashboard_id = -1` + query.Result = make([]*m.DashboardAclInfoDTO, 0) + err = x.SQL(sql).Find(&query.Result) - -- Also include default permission if has_acl = 0 + } else { - UNION - SELECT - da.id, - da.org_id, - da.dashboard_id, - da.user_id, - da.team_id, - da.permission, - da.role, - da.created, - da.updated, - '' as user_login, - '' as user_email, - '' as team - FROM dashboard_acl as da, - dashboard as dash - LEFT JOIN dashboard folder on dash.folder_id = folder.id - WHERE - dash.id = ? AND ( - dash.has_acl = ` + dialect.BooleanStr(false) + ` or - folder.has_acl = ` + dialect.BooleanStr(false) + ` - ) AND - da.dashboard_id = -1 - ` + rawSQL := ` + -- get permissions for the dashboard and its parent folder + SELECT + da.id, + da.org_id, + da.dashboard_id, + da.user_id, + da.team_id, + da.permission, + da.role, + da.created, + da.updated, + u.login AS user_login, + u.email AS user_email, + ug.name AS team, + d.title, + d.slug, + d.uid, + d.is_folder + FROM dashboard as d + LEFT JOIN dashboard folder on folder.id = d.folder_id + LEFT JOIN dashboard_acl AS da ON + da.dashboard_id = d.id OR + da.dashboard_id = d.folder_id OR + ( + -- include default permissions --> + da.org_id = -1 AND ( + (folder.id IS NOT NULL AND folder.has_acl = ` + falseStr + `) OR + (folder.id IS NULL AND d.has_acl = ` + falseStr + `) + ) + ) + LEFT JOIN ` + dialect.Quote("user") + ` AS u ON u.id = da.user_id + LEFT JOIN team ug on ug.id = da.team_id + WHERE d.org_id = ? AND d.id = ? AND da.id IS NOT NULL + ORDER BY 1 ASC + ` - query.Result = make([]*m.DashboardAclInfoDTO, 0) - err := x.SQL(rawSQL, query.OrgId, query.DashboardId).Find(&query.Result) + query.Result = make([]*m.DashboardAclInfoDTO, 0) + err = x.SQL(rawSQL, query.OrgId, query.DashboardId).Find(&query.Result) + } for _, p := range query.Result { p.PermissionName = p.Permission.String() diff --git a/pkg/services/sqlstore/dashboard_acl_test.go b/pkg/services/sqlstore/dashboard_acl_test.go index bb6363883d6..8fbb9c0d813 100644 --- a/pkg/services/sqlstore/dashboard_acl_test.go +++ b/pkg/services/sqlstore/dashboard_acl_test.go @@ -17,7 +17,7 @@ func TestDashboardAclDataAccess(t *testing.T) { childDash := insertTestDashboard("2 test dash", 1, savedFolder.Id, false, "prod", "webapp") Convey("When adding dashboard permission with userId and teamId set to 0", func() { - err := SetDashboardAcl(&m.SetDashboardAclCommand{ + err := testHelperUpdateDashboardAcl(savedFolder.Id, m.DashboardAcl{ OrgId: 1, DashboardId: savedFolder.Id, Permission: m.PERMISSION_EDIT, @@ -41,8 +41,25 @@ func TestDashboardAclDataAccess(t *testing.T) { }) }) + Convey("Given dashboard folder with removed default permissions", func() { + err := UpdateDashboardAcl(&m.UpdateDashboardAclCommand{ + DashboardId: savedFolder.Id, + Items: []*m.DashboardAcl{}, + }) + So(err, ShouldBeNil) + + Convey("When reading dashboard acl should return no acl items", func() { + query := m.GetDashboardAclInfoListQuery{DashboardId: childDash.Id, OrgId: 1} + + err := GetDashboardAclInfoList(&query) + So(err, ShouldBeNil) + + So(len(query.Result), ShouldEqual, 0) + }) + }) + Convey("Given dashboard folder permission", func() { - err := SetDashboardAcl(&m.SetDashboardAclCommand{ + err := testHelperUpdateDashboardAcl(savedFolder.Id, m.DashboardAcl{ OrgId: 1, UserId: currentUser.Id, DashboardId: savedFolder.Id, @@ -61,7 +78,7 @@ func TestDashboardAclDataAccess(t *testing.T) { }) Convey("Given child dashboard permission", func() { - err := SetDashboardAcl(&m.SetDashboardAclCommand{ + err := testHelperUpdateDashboardAcl(childDash.Id, m.DashboardAcl{ OrgId: 1, UserId: currentUser.Id, DashboardId: childDash.Id, @@ -83,7 +100,7 @@ func TestDashboardAclDataAccess(t *testing.T) { }) Convey("Given child dashboard permission in folder with no permissions", func() { - err := SetDashboardAcl(&m.SetDashboardAclCommand{ + err := testHelperUpdateDashboardAcl(childDash.Id, m.DashboardAcl{ OrgId: 1, UserId: currentUser.Id, DashboardId: childDash.Id, @@ -108,17 +125,12 @@ func TestDashboardAclDataAccess(t *testing.T) { }) Convey("Should be able to add dashboard permission", func() { - setDashAclCmd := m.SetDashboardAclCommand{ + err := testHelperUpdateDashboardAcl(savedFolder.Id, m.DashboardAcl{ OrgId: 1, UserId: currentUser.Id, DashboardId: savedFolder.Id, Permission: m.PERMISSION_EDIT, - } - - err := SetDashboardAcl(&setDashAclCmd) - So(err, ShouldBeNil) - - So(setDashAclCmd.Result.Id, ShouldEqual, 3) + }) q1 := &m.GetDashboardAclInfoListQuery{DashboardId: savedFolder.Id, OrgId: 1} err = GetDashboardAclInfoList(q1) @@ -130,42 +142,9 @@ func TestDashboardAclDataAccess(t *testing.T) { So(q1.Result[0].UserId, ShouldEqual, currentUser.Id) So(q1.Result[0].UserLogin, ShouldEqual, currentUser.Login) So(q1.Result[0].UserEmail, ShouldEqual, currentUser.Email) - So(q1.Result[0].Id, ShouldEqual, setDashAclCmd.Result.Id) - - Convey("Should update hasAcl field to true for dashboard folder and its children", func() { - q2 := &m.GetDashboardsQuery{DashboardIds: []int64{savedFolder.Id, childDash.Id}} - err := GetDashboards(q2) - So(err, ShouldBeNil) - So(q2.Result[0].HasAcl, ShouldBeTrue) - So(q2.Result[1].HasAcl, ShouldBeTrue) - }) - - Convey("Should be able to update an existing permission", func() { - err := SetDashboardAcl(&m.SetDashboardAclCommand{ - OrgId: 1, - UserId: 1, - DashboardId: savedFolder.Id, - Permission: m.PERMISSION_ADMIN, - }) - - So(err, ShouldBeNil) - - q3 := &m.GetDashboardAclInfoListQuery{DashboardId: savedFolder.Id, OrgId: 1} - err = GetDashboardAclInfoList(q3) - So(err, ShouldBeNil) - So(len(q3.Result), ShouldEqual, 1) - So(q3.Result[0].DashboardId, ShouldEqual, savedFolder.Id) - So(q3.Result[0].Permission, ShouldEqual, m.PERMISSION_ADMIN) - So(q3.Result[0].UserId, ShouldEqual, 1) - - }) Convey("Should be able to delete an existing permission", func() { - err := RemoveDashboardAcl(&m.RemoveDashboardAclCommand{ - OrgId: 1, - AclId: setDashAclCmd.Result.Id, - }) - + err := testHelperUpdateDashboardAcl(savedFolder.Id) So(err, ShouldBeNil) q3 := &m.GetDashboardAclInfoListQuery{DashboardId: savedFolder.Id, OrgId: 1} @@ -181,14 +160,12 @@ func TestDashboardAclDataAccess(t *testing.T) { So(err, ShouldBeNil) Convey("Should be able to add a user permission for a team", func() { - setDashAclCmd := m.SetDashboardAclCommand{ + err := testHelperUpdateDashboardAcl(savedFolder.Id, m.DashboardAcl{ OrgId: 1, TeamId: group1.Result.Id, DashboardId: savedFolder.Id, Permission: m.PERMISSION_EDIT, - } - - err := SetDashboardAcl(&setDashAclCmd) + }) So(err, ShouldBeNil) q1 := &m.GetDashboardAclInfoListQuery{DashboardId: savedFolder.Id, OrgId: 1} @@ -197,23 +174,10 @@ func TestDashboardAclDataAccess(t *testing.T) { So(q1.Result[0].DashboardId, ShouldEqual, savedFolder.Id) So(q1.Result[0].Permission, ShouldEqual, m.PERMISSION_EDIT) So(q1.Result[0].TeamId, ShouldEqual, group1.Result.Id) - - Convey("Should be able to delete an existing permission for a team", func() { - err := RemoveDashboardAcl(&m.RemoveDashboardAclCommand{ - OrgId: 1, - AclId: setDashAclCmd.Result.Id, - }) - - So(err, ShouldBeNil) - q3 := &m.GetDashboardAclInfoListQuery{DashboardId: savedFolder.Id, OrgId: 1} - err = GetDashboardAclInfoList(q3) - So(err, ShouldBeNil) - So(len(q3.Result), ShouldEqual, 0) - }) }) Convey("Should be able to update an existing permission for a team", func() { - err := SetDashboardAcl(&m.SetDashboardAclCommand{ + err := testHelperUpdateDashboardAcl(savedFolder.Id, m.DashboardAcl{ OrgId: 1, TeamId: group1.Result.Id, DashboardId: savedFolder.Id, @@ -229,7 +193,24 @@ func TestDashboardAclDataAccess(t *testing.T) { So(q3.Result[0].Permission, ShouldEqual, m.PERMISSION_ADMIN) So(q3.Result[0].TeamId, ShouldEqual, group1.Result.Id) }) + }) + }) + Convey("Given a root folder", func() { + var rootFolderId int64 = 0 + + Convey("When reading dashboard acl should return default permissions", func() { + query := m.GetDashboardAclInfoListQuery{DashboardId: rootFolderId, OrgId: 1} + + err := GetDashboardAclInfoList(&query) + So(err, ShouldBeNil) + + So(len(query.Result), ShouldEqual, 2) + defaultPermissionsId := -1 + So(query.Result[0].DashboardId, ShouldEqual, defaultPermissionsId) + So(*query.Result[0].Role, ShouldEqual, m.ROLE_VIEWER) + So(query.Result[1].DashboardId, ShouldEqual, defaultPermissionsId) + So(*query.Result[1].Role, ShouldEqual, m.ROLE_EDITOR) }) }) }) diff --git a/pkg/services/sqlstore/dashboard_folder_test.go b/pkg/services/sqlstore/dashboard_folder_test.go new file mode 100644 index 00000000000..ea8f1216706 --- /dev/null +++ b/pkg/services/sqlstore/dashboard_folder_test.go @@ -0,0 +1,358 @@ +package sqlstore + +import ( + "testing" + + . "github.com/smartystreets/goconvey/convey" + + m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/search" +) + +func TestDashboardFolderDataAccess(t *testing.T) { + Convey("Testing DB", t, func() { + InitTestDB(t) + + Convey("Given one dashboard folder with two dashboards and one dashboard in the root folder", func() { + folder := insertTestDashboard("1 test dash folder", 1, 0, true, "prod", "webapp") + dashInRoot := insertTestDashboard("test dash 67", 1, 0, false, "prod", "webapp") + childDash := insertTestDashboard("test dash 23", 1, folder.Id, false, "prod", "webapp") + insertTestDashboard("test dash 45", 1, folder.Id, false, "prod") + + currentUser := createUser("viewer", "Viewer", false) + + Convey("and no acls are set", func() { + Convey("should return all dashboards", func() { + query := &search.FindPersistedDashboardsQuery{ + SignedInUser: &m.SignedInUser{UserId: currentUser.Id, OrgId: 1, OrgRole: m.ROLE_VIEWER}, + OrgId: 1, + DashboardIds: []int64{folder.Id, dashInRoot.Id}, + } + err := SearchDashboards(query) + So(err, ShouldBeNil) + So(len(query.Result), ShouldEqual, 2) + So(query.Result[0].Id, ShouldEqual, folder.Id) + So(query.Result[1].Id, ShouldEqual, dashInRoot.Id) + }) + }) + + Convey("and acl is set for dashboard folder", func() { + var otherUser int64 = 999 + testHelperUpdateDashboardAcl(folder.Id, m.DashboardAcl{DashboardId: folder.Id, OrgId: 1, UserId: otherUser, Permission: m.PERMISSION_EDIT}) + + Convey("should not return folder", func() { + query := &search.FindPersistedDashboardsQuery{ + SignedInUser: &m.SignedInUser{UserId: currentUser.Id, OrgId: 1, OrgRole: m.ROLE_VIEWER}, + OrgId: 1, DashboardIds: []int64{folder.Id, dashInRoot.Id}, + } + err := SearchDashboards(query) + So(err, ShouldBeNil) + So(len(query.Result), ShouldEqual, 1) + So(query.Result[0].Id, ShouldEqual, dashInRoot.Id) + }) + + Convey("when the user is given permission", func() { + testHelperUpdateDashboardAcl(folder.Id, m.DashboardAcl{DashboardId: folder.Id, OrgId: 1, UserId: currentUser.Id, Permission: m.PERMISSION_EDIT}) + + Convey("should be able to access folder", func() { + query := &search.FindPersistedDashboardsQuery{ + SignedInUser: &m.SignedInUser{UserId: currentUser.Id, OrgId: 1, OrgRole: m.ROLE_VIEWER}, + OrgId: 1, + DashboardIds: []int64{folder.Id, dashInRoot.Id}, + } + err := SearchDashboards(query) + So(err, ShouldBeNil) + So(len(query.Result), ShouldEqual, 2) + So(query.Result[0].Id, ShouldEqual, folder.Id) + So(query.Result[1].Id, ShouldEqual, dashInRoot.Id) + }) + }) + + Convey("when the user is an admin", func() { + Convey("should be able to access folder", func() { + query := &search.FindPersistedDashboardsQuery{ + SignedInUser: &m.SignedInUser{ + UserId: currentUser.Id, + OrgId: 1, + OrgRole: m.ROLE_ADMIN, + }, + OrgId: 1, + DashboardIds: []int64{folder.Id, dashInRoot.Id}, + } + err := SearchDashboards(query) + So(err, ShouldBeNil) + So(len(query.Result), ShouldEqual, 2) + So(query.Result[0].Id, ShouldEqual, folder.Id) + So(query.Result[1].Id, ShouldEqual, dashInRoot.Id) + }) + }) + }) + + Convey("and acl is set for dashboard child and folder has all permissions removed", func() { + var otherUser int64 = 999 + testHelperUpdateDashboardAcl(folder.Id) + testHelperUpdateDashboardAcl(childDash.Id, m.DashboardAcl{DashboardId: folder.Id, OrgId: 1, UserId: otherUser, Permission: m.PERMISSION_EDIT}) + + Convey("should not return folder or child", func() { + query := &search.FindPersistedDashboardsQuery{SignedInUser: &m.SignedInUser{UserId: currentUser.Id, OrgId: 1, OrgRole: m.ROLE_VIEWER}, OrgId: 1, DashboardIds: []int64{folder.Id, childDash.Id, dashInRoot.Id}} + err := SearchDashboards(query) + So(err, ShouldBeNil) + So(len(query.Result), ShouldEqual, 1) + So(query.Result[0].Id, ShouldEqual, dashInRoot.Id) + }) + + Convey("when the user is given permission to child", func() { + testHelperUpdateDashboardAcl(childDash.Id, m.DashboardAcl{DashboardId: childDash.Id, OrgId: 1, UserId: currentUser.Id, Permission: m.PERMISSION_EDIT}) + + Convey("should be able to search for child dashboard but not folder", func() { + query := &search.FindPersistedDashboardsQuery{SignedInUser: &m.SignedInUser{UserId: currentUser.Id, OrgId: 1, OrgRole: m.ROLE_VIEWER}, OrgId: 1, DashboardIds: []int64{folder.Id, childDash.Id, dashInRoot.Id}} + err := SearchDashboards(query) + So(err, ShouldBeNil) + So(len(query.Result), ShouldEqual, 2) + So(query.Result[0].Id, ShouldEqual, childDash.Id) + So(query.Result[1].Id, ShouldEqual, dashInRoot.Id) + }) + }) + + Convey("when the user is an admin", func() { + Convey("should be able to search for child dash and folder", func() { + query := &search.FindPersistedDashboardsQuery{ + SignedInUser: &m.SignedInUser{ + UserId: currentUser.Id, + OrgId: 1, + OrgRole: m.ROLE_ADMIN, + }, + OrgId: 1, + DashboardIds: []int64{folder.Id, dashInRoot.Id, childDash.Id}, + } + err := SearchDashboards(query) + So(err, ShouldBeNil) + So(len(query.Result), ShouldEqual, 3) + So(query.Result[0].Id, ShouldEqual, folder.Id) + So(query.Result[1].Id, ShouldEqual, childDash.Id) + So(query.Result[2].Id, ShouldEqual, dashInRoot.Id) + }) + }) + }) + }) + + Convey("Given two dashboard folders with one dashboard each and one dashboard in the root folder", func() { + folder1 := insertTestDashboard("1 test dash folder", 1, 0, true, "prod") + folder2 := insertTestDashboard("2 test dash folder", 1, 0, true, "prod") + dashInRoot := insertTestDashboard("test dash 67", 1, 0, false, "prod") + childDash1 := insertTestDashboard("child dash 1", 1, folder1.Id, false, "prod") + childDash2 := insertTestDashboard("child dash 2", 1, folder2.Id, false, "prod") + + currentUser := createUser("viewer", "Viewer", false) + var rootFolderId int64 = 0 + + Convey("and one folder is expanded, the other collapsed", func() { + Convey("should return dashboards in root and expanded folder", func() { + query := &search.FindPersistedDashboardsQuery{FolderIds: []int64{rootFolderId, folder1.Id}, SignedInUser: &m.SignedInUser{UserId: currentUser.Id, OrgId: 1, OrgRole: m.ROLE_VIEWER}, OrgId: 1} + err := SearchDashboards(query) + So(err, ShouldBeNil) + So(len(query.Result), ShouldEqual, 4) + So(query.Result[0].Id, ShouldEqual, folder1.Id) + So(query.Result[1].Id, ShouldEqual, folder2.Id) + So(query.Result[2].Id, ShouldEqual, childDash1.Id) + So(query.Result[3].Id, ShouldEqual, dashInRoot.Id) + }) + }) + + Convey("and acl is set for one dashboard folder", func() { + var otherUser int64 = 999 + testHelperUpdateDashboardAcl(folder1.Id, m.DashboardAcl{DashboardId: folder1.Id, OrgId: 1, UserId: otherUser, Permission: m.PERMISSION_EDIT}) + + Convey("and a dashboard is moved from folder without acl to the folder with an acl", func() { + moveDashboard(1, childDash2.Data, folder1.Id) + + Convey("should not return folder with acl or its children", func() { + query := &search.FindPersistedDashboardsQuery{ + SignedInUser: &m.SignedInUser{UserId: currentUser.Id, OrgId: 1, OrgRole: m.ROLE_VIEWER}, + OrgId: 1, + DashboardIds: []int64{folder1.Id, childDash1.Id, childDash2.Id, dashInRoot.Id}, + } + err := SearchDashboards(query) + So(err, ShouldBeNil) + So(len(query.Result), ShouldEqual, 1) + So(query.Result[0].Id, ShouldEqual, dashInRoot.Id) + }) + }) + Convey("and a dashboard is moved from folder with acl to the folder without an acl", func() { + moveDashboard(1, childDash1.Data, folder2.Id) + + Convey("should return folder without acl and its children", func() { + query := &search.FindPersistedDashboardsQuery{ + SignedInUser: &m.SignedInUser{UserId: currentUser.Id, OrgId: 1, OrgRole: m.ROLE_VIEWER}, + OrgId: 1, + DashboardIds: []int64{folder2.Id, childDash1.Id, childDash2.Id, dashInRoot.Id}, + } + err := SearchDashboards(query) + So(err, ShouldBeNil) + So(len(query.Result), ShouldEqual, 4) + So(query.Result[0].Id, ShouldEqual, folder2.Id) + So(query.Result[1].Id, ShouldEqual, childDash1.Id) + So(query.Result[2].Id, ShouldEqual, childDash2.Id) + So(query.Result[3].Id, ShouldEqual, dashInRoot.Id) + }) + }) + + Convey("and a dashboard with an acl is moved to the folder without an acl", func() { + testHelperUpdateDashboardAcl(childDash1.Id, m.DashboardAcl{DashboardId: childDash1.Id, OrgId: 1, UserId: otherUser, Permission: m.PERMISSION_EDIT}) + moveDashboard(1, childDash1.Data, folder2.Id) + + Convey("should return folder without acl but not the dashboard with acl", func() { + query := &search.FindPersistedDashboardsQuery{ + SignedInUser: &m.SignedInUser{UserId: currentUser.Id, OrgId: 1, OrgRole: m.ROLE_VIEWER}, + OrgId: 1, + DashboardIds: []int64{folder2.Id, childDash1.Id, childDash2.Id, dashInRoot.Id}, + } + err := SearchDashboards(query) + So(err, ShouldBeNil) + So(len(query.Result), ShouldEqual, 4) + So(query.Result[0].Id, ShouldEqual, folder2.Id) + So(query.Result[1].Id, ShouldEqual, childDash1.Id) + So(query.Result[2].Id, ShouldEqual, childDash2.Id) + So(query.Result[3].Id, ShouldEqual, dashInRoot.Id) + }) + }) + }) + }) + + Convey("Given two dashboard folders", func() { + + folder1 := insertTestDashboard("1 test dash folder", 1, 0, true, "prod") + folder2 := insertTestDashboard("2 test dash folder", 1, 0, true, "prod") + insertTestDashboard("folder in another org", 2, 0, true, "prod") + + adminUser := createUser("admin", "Admin", true) + editorUser := createUser("editor", "Editor", false) + viewerUser := createUser("viewer", "Viewer", false) + + Convey("Admin users", func() { + Convey("Should have write access to all dashboard folders in their org", func() { + query := search.FindPersistedDashboardsQuery{ + OrgId: 1, + SignedInUser: &m.SignedInUser{UserId: adminUser.Id, OrgRole: m.ROLE_ADMIN, OrgId: 1}, + Permission: m.PERMISSION_VIEW, + Type: "dash-folder", + } + + err := SearchDashboards(&query) + So(err, ShouldBeNil) + + So(len(query.Result), ShouldEqual, 2) + So(query.Result[0].Id, ShouldEqual, folder1.Id) + So(query.Result[1].Id, ShouldEqual, folder2.Id) + }) + + Convey("should have write access to all folders and dashboards", func() { + query := m.GetDashboardPermissionsForUserQuery{ + DashboardIds: []int64{folder1.Id, folder2.Id}, + OrgId: 1, + UserId: adminUser.Id, + OrgRole: m.ROLE_ADMIN, + } + + err := GetDashboardPermissionsForUser(&query) + So(err, ShouldBeNil) + + So(len(query.Result), ShouldEqual, 2) + So(query.Result[0].DashboardId, ShouldEqual, folder1.Id) + So(query.Result[0].Permission, ShouldEqual, m.PERMISSION_ADMIN) + So(query.Result[1].DashboardId, ShouldEqual, folder2.Id) + So(query.Result[1].Permission, ShouldEqual, m.PERMISSION_ADMIN) + }) + }) + + Convey("Editor users", func() { + query := search.FindPersistedDashboardsQuery{ + OrgId: 1, + SignedInUser: &m.SignedInUser{UserId: editorUser.Id, OrgRole: m.ROLE_EDITOR, OrgId: 1}, + Permission: m.PERMISSION_EDIT, + } + + Convey("Should have write access to all dashboard folders with default ACL", func() { + err := SearchDashboards(&query) + So(err, ShouldBeNil) + + So(len(query.Result), ShouldEqual, 2) + So(query.Result[0].Id, ShouldEqual, folder1.Id) + So(query.Result[1].Id, ShouldEqual, folder2.Id) + }) + + Convey("should have edit access to folders with default ACL", func() { + query := m.GetDashboardPermissionsForUserQuery{ + DashboardIds: []int64{folder1.Id, folder2.Id}, + OrgId: 1, + UserId: editorUser.Id, + OrgRole: m.ROLE_EDITOR, + } + + err := GetDashboardPermissionsForUser(&query) + So(err, ShouldBeNil) + + So(len(query.Result), ShouldEqual, 2) + So(query.Result[0].DashboardId, ShouldEqual, folder1.Id) + So(query.Result[0].Permission, ShouldEqual, m.PERMISSION_EDIT) + So(query.Result[1].DashboardId, ShouldEqual, folder2.Id) + So(query.Result[1].Permission, ShouldEqual, m.PERMISSION_EDIT) + }) + + Convey("Should have write access to one dashboard folder if default role changed to view for one folder", func() { + testHelperUpdateDashboardAcl(folder1.Id, m.DashboardAcl{DashboardId: folder1.Id, OrgId: 1, UserId: editorUser.Id, Permission: m.PERMISSION_VIEW}) + + err := SearchDashboards(&query) + So(err, ShouldBeNil) + + So(len(query.Result), ShouldEqual, 1) + So(query.Result[0].Id, ShouldEqual, folder2.Id) + }) + + }) + + Convey("Viewer users", func() { + query := search.FindPersistedDashboardsQuery{ + OrgId: 1, + SignedInUser: &m.SignedInUser{UserId: viewerUser.Id, OrgRole: m.ROLE_VIEWER, OrgId: 1}, + Permission: m.PERMISSION_EDIT, + } + + Convey("Should have no write access to any dashboard folders with default ACL", func() { + err := SearchDashboards(&query) + So(err, ShouldBeNil) + + So(len(query.Result), ShouldEqual, 0) + }) + + Convey("should have view access to folders with default ACL", func() { + query := m.GetDashboardPermissionsForUserQuery{ + DashboardIds: []int64{folder1.Id, folder2.Id}, + OrgId: 1, + UserId: viewerUser.Id, + OrgRole: m.ROLE_VIEWER, + } + + err := GetDashboardPermissionsForUser(&query) + So(err, ShouldBeNil) + + So(len(query.Result), ShouldEqual, 2) + So(query.Result[0].DashboardId, ShouldEqual, folder1.Id) + So(query.Result[0].Permission, ShouldEqual, m.PERMISSION_VIEW) + So(query.Result[1].DashboardId, ShouldEqual, folder2.Id) + So(query.Result[1].Permission, ShouldEqual, m.PERMISSION_VIEW) + }) + + Convey("Should be able to get one dashboard folder if default role changed to edit for one folder", func() { + testHelperUpdateDashboardAcl(folder1.Id, m.DashboardAcl{DashboardId: folder1.Id, OrgId: 1, UserId: viewerUser.Id, Permission: m.PERMISSION_EDIT}) + + err := SearchDashboards(&query) + So(err, ShouldBeNil) + + So(len(query.Result), ShouldEqual, 1) + So(query.Result[0].Id, ShouldEqual, folder1.Id) + }) + }) + }) + }) +} diff --git a/pkg/services/sqlstore/dashboard_provisioning.go b/pkg/services/sqlstore/dashboard_provisioning.go new file mode 100644 index 00000000000..69409c3b873 --- /dev/null +++ b/pkg/services/sqlstore/dashboard_provisioning.go @@ -0,0 +1,66 @@ +package sqlstore + +import ( + "github.com/grafana/grafana/pkg/bus" + "github.com/grafana/grafana/pkg/models" +) + +func init() { + bus.AddHandler("sql", GetProvisionedDashboardDataQuery) + bus.AddHandler("sql", SaveProvisionedDashboard) +} + +type DashboardExtras struct { + Id int64 + DashboardId int64 + Key string + Value string +} + +func SaveProvisionedDashboard(cmd *models.SaveProvisionedDashboardCommand) error { + return inTransaction(func(sess *DBSession) error { + err := saveDashboard(sess, cmd.DashboardCmd) + + if err != nil { + return err + } + + cmd.Result = cmd.DashboardCmd.Result + if cmd.DashboardProvisioning.Updated == 0 { + cmd.DashboardProvisioning.Updated = cmd.Result.Updated.Unix() + } + + return saveProvionedData(sess, cmd.DashboardProvisioning, cmd.Result) + }) +} + +func saveProvionedData(sess *DBSession, cmd *models.DashboardProvisioning, dashboard *models.Dashboard) error { + result := &models.DashboardProvisioning{} + + exist, err := sess.Where("dashboard_id=?", dashboard.Id).Get(result) + if err != nil { + return err + } + + cmd.Id = result.Id + cmd.DashboardId = dashboard.Id + + if exist { + _, err = sess.ID(result.Id).Update(cmd) + } else { + _, err = sess.Insert(cmd) + } + + return err +} + +func GetProvisionedDashboardDataQuery(cmd *models.GetProvisionedDashboardDataQuery) error { + var result []*models.DashboardProvisioning + + if err := x.Where("name = ?", cmd.Name).Find(&result); err != nil { + return err + } + + cmd.Result = result + return nil +} diff --git a/pkg/services/sqlstore/dashboard_provisioning_test.go b/pkg/services/sqlstore/dashboard_provisioning_test.go new file mode 100644 index 00000000000..b752173b67d --- /dev/null +++ b/pkg/services/sqlstore/dashboard_provisioning_test.go @@ -0,0 +1,55 @@ +package sqlstore + +import ( + "testing" + "time" + + "github.com/grafana/grafana/pkg/components/simplejson" + "github.com/grafana/grafana/pkg/models" + . "github.com/smartystreets/goconvey/convey" +) + +func TestDashboardProvisioningTest(t *testing.T) { + Convey("Testing Dashboard provisioning", t, func() { + InitTestDB(t) + + saveDashboardCmd := &models.SaveDashboardCommand{ + OrgId: 1, + FolderId: 0, + IsFolder: false, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "id": nil, + "title": "test dashboard", + }), + } + + Convey("Saving dashboards with extras", func() { + now := time.Now() + + cmd := &models.SaveProvisionedDashboardCommand{ + DashboardCmd: saveDashboardCmd, + DashboardProvisioning: &models.DashboardProvisioning{ + Name: "default", + ExternalId: "/var/grafana.json", + Updated: now.Unix(), + }, + } + + err := SaveProvisionedDashboard(cmd) + So(err, ShouldBeNil) + So(cmd.Result, ShouldNotBeNil) + So(cmd.Result.Id, ShouldNotEqual, 0) + dashId := cmd.Result.Id + + Convey("Can query for provisioned dashboards", func() { + query := &models.GetProvisionedDashboardDataQuery{Name: "default"} + err := GetProvisionedDashboardDataQuery(query) + So(err, ShouldBeNil) + + So(len(query.Result), ShouldEqual, 1) + So(query.Result[0].DashboardId, ShouldEqual, dashId) + So(query.Result[0].Updated, ShouldEqual, now.Unix()) + }) + }) + }) +} diff --git a/pkg/services/sqlstore/dashboard_service_integration_test.go b/pkg/services/sqlstore/dashboard_service_integration_test.go new file mode 100644 index 00000000000..d005270c33c --- /dev/null +++ b/pkg/services/sqlstore/dashboard_service_integration_test.go @@ -0,0 +1,932 @@ +package sqlstore + +import ( + "testing" + + "github.com/grafana/grafana/pkg/components/simplejson" + "github.com/grafana/grafana/pkg/services/dashboards" + "github.com/grafana/grafana/pkg/services/guardian" + + "github.com/grafana/grafana/pkg/bus" + "github.com/grafana/grafana/pkg/models" + + . "github.com/smartystreets/goconvey/convey" +) + +func TestIntegratedDashboardService(t *testing.T) { + Convey("Dashboard service integration tests", t, func() { + InitTestDB(t) + var testOrgId int64 = 1 + + Convey("Given saved folders and dashboards in organization A", func() { + + bus.AddHandler("test", func(cmd *models.ValidateDashboardAlertsCommand) error { + return nil + }) + + bus.AddHandler("test", func(cmd *models.UpdateDashboardAlertsCommand) error { + return nil + }) + + savedFolder := saveTestFolder("Saved folder", testOrgId) + savedDashInFolder := saveTestDashboard("Saved dash in folder", testOrgId, savedFolder.Id) + saveTestDashboard("Other saved dash in folder", testOrgId, savedFolder.Id) + savedDashInGeneralFolder := saveTestDashboard("Saved dashboard in general folder", testOrgId, 0) + otherSavedFolder := saveTestFolder("Other saved folder", testOrgId) + + Convey("Should return dashboard model", func() { + So(savedFolder.Title, ShouldEqual, "Saved folder") + So(savedFolder.Slug, ShouldEqual, "saved-folder") + So(savedFolder.Id, ShouldNotEqual, 0) + So(savedFolder.IsFolder, ShouldBeTrue) + So(savedFolder.FolderId, ShouldEqual, 0) + So(len(savedFolder.Uid), ShouldBeGreaterThan, 0) + + So(savedDashInFolder.Title, ShouldEqual, "Saved dash in folder") + So(savedDashInFolder.Slug, ShouldEqual, "saved-dash-in-folder") + So(savedDashInFolder.Id, ShouldNotEqual, 0) + So(savedDashInFolder.IsFolder, ShouldBeFalse) + So(savedDashInFolder.FolderId, ShouldEqual, savedFolder.Id) + So(len(savedDashInFolder.Uid), ShouldBeGreaterThan, 0) + }) + + // Basic validation tests + + Convey("When saving a dashboard with non-existing id", func() { + cmd := models.SaveDashboardCommand{ + OrgId: testOrgId, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "id": float64(123412321), + "title": "Expect error", + }), + } + + err := callSaveWithError(cmd) + + Convey("It should result in not found error", func() { + So(err, ShouldNotBeNil) + So(err, ShouldEqual, models.ErrDashboardNotFound) + }) + }) + + // Given other organization + + Convey("Given organization B", func() { + var otherOrgId int64 = 2 + + Convey("When saving a dashboard with id that are saved in organization A", func() { + cmd := models.SaveDashboardCommand{ + OrgId: otherOrgId, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "id": savedDashInFolder.Id, + "title": "Expect error", + }), + Overwrite: false, + } + + err := callSaveWithError(cmd) + + Convey("It should result in not found error", func() { + So(err, ShouldNotBeNil) + So(err, ShouldEqual, models.ErrDashboardNotFound) + }) + }) + + permissionScenario("Given user has permission to save", true, func(sc *dashboardPermissionScenarioContext) { + Convey("When saving a dashboard with uid that are saved in organization A", func() { + var otherOrgId int64 = 2 + cmd := models.SaveDashboardCommand{ + OrgId: otherOrgId, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "uid": savedDashInFolder.Uid, + "title": "Dash with existing uid in other org", + }), + Overwrite: false, + } + + res := callSaveWithResult(cmd) + + Convey("It should create dashboard in other organization", func() { + So(res, ShouldNotBeNil) + + query := models.GetDashboardQuery{OrgId: otherOrgId, Uid: savedDashInFolder.Uid} + + err := bus.Dispatch(&query) + So(err, ShouldBeNil) + So(query.Result.Id, ShouldNotEqual, savedDashInFolder.Id) + So(query.Result.Id, ShouldEqual, res.Id) + So(query.Result.OrgId, ShouldEqual, otherOrgId) + So(query.Result.Uid, ShouldEqual, savedDashInFolder.Uid) + }) + }) + }) + }) + + // Given user has no permission to save + + permissionScenario("Given user has no permission to save", false, func(sc *dashboardPermissionScenarioContext) { + + Convey("When trying to create a new dashboard in the General folder", func() { + cmd := models.SaveDashboardCommand{ + OrgId: testOrgId, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "title": "Dash", + }), + UserId: 10000, + Overwrite: true, + } + + err := callSaveWithError(cmd) + + Convey("It should call dashboard guardian with correct arguments and result in access denied error", func() { + So(err, ShouldNotBeNil) + So(err, ShouldEqual, models.ErrDashboardUpdateAccessDenied) + + So(sc.dashboardGuardianMock.DashId, ShouldEqual, 0) + So(sc.dashboardGuardianMock.OrgId, ShouldEqual, cmd.OrgId) + So(sc.dashboardGuardianMock.User.UserId, ShouldEqual, cmd.UserId) + }) + }) + + Convey("When trying to create a new dashboard in other folder", func() { + cmd := models.SaveDashboardCommand{ + OrgId: testOrgId, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "title": "Dash", + }), + FolderId: otherSavedFolder.Id, + UserId: 10000, + Overwrite: true, + } + + err := callSaveWithError(cmd) + + Convey("It should call dashboard guardian with correct arguments and rsult in access denied error", func() { + So(err, ShouldNotBeNil) + So(err, ShouldEqual, models.ErrDashboardUpdateAccessDenied) + + So(sc.dashboardGuardianMock.DashId, ShouldEqual, otherSavedFolder.Id) + So(sc.dashboardGuardianMock.OrgId, ShouldEqual, cmd.OrgId) + So(sc.dashboardGuardianMock.User.UserId, ShouldEqual, cmd.UserId) + }) + }) + + Convey("When trying to update a dashboard by existing id in the General folder", func() { + cmd := models.SaveDashboardCommand{ + OrgId: testOrgId, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "id": savedDashInGeneralFolder.Id, + "title": "Dash", + }), + FolderId: savedDashInGeneralFolder.FolderId, + UserId: 10000, + Overwrite: true, + } + + err := callSaveWithError(cmd) + + Convey("It should call dashboard guardian with correct arguments and result in access denied error", func() { + So(err, ShouldNotBeNil) + So(err, ShouldEqual, models.ErrDashboardUpdateAccessDenied) + + So(sc.dashboardGuardianMock.DashId, ShouldEqual, savedDashInGeneralFolder.Id) + So(sc.dashboardGuardianMock.OrgId, ShouldEqual, cmd.OrgId) + So(sc.dashboardGuardianMock.User.UserId, ShouldEqual, cmd.UserId) + }) + }) + + Convey("When trying to update a dashboard by existing id in other folder", func() { + cmd := models.SaveDashboardCommand{ + OrgId: testOrgId, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "id": savedDashInFolder.Id, + "title": "Dash", + }), + FolderId: savedDashInFolder.FolderId, + UserId: 10000, + Overwrite: true, + } + + err := callSaveWithError(cmd) + + Convey("It should call dashboard guardian with correct arguments and result in access denied error", func() { + So(err, ShouldNotBeNil) + So(err, ShouldEqual, models.ErrDashboardUpdateAccessDenied) + + So(sc.dashboardGuardianMock.DashId, ShouldEqual, savedDashInFolder.Id) + So(sc.dashboardGuardianMock.OrgId, ShouldEqual, cmd.OrgId) + So(sc.dashboardGuardianMock.User.UserId, ShouldEqual, cmd.UserId) + }) + }) + }) + + // Given user has permission to save + + permissionScenario("Given user has permission to save", true, func(sc *dashboardPermissionScenarioContext) { + + Convey("and overwrite flag is set to false", func() { + shouldOverwrite := false + + Convey("When creating a dashboard in General folder with same name as dashboard in other folder", func() { + cmd := models.SaveDashboardCommand{ + OrgId: testOrgId, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "id": nil, + "title": savedDashInFolder.Title, + }), + FolderId: 0, + Overwrite: shouldOverwrite, + } + + res := callSaveWithResult(cmd) + So(res, ShouldNotBeNil) + + Convey("It should create a new dashboard", func() { + query := models.GetDashboardQuery{OrgId: cmd.OrgId, Id: res.Id} + + err := bus.Dispatch(&query) + So(err, ShouldBeNil) + So(query.Result.Id, ShouldEqual, res.Id) + So(query.Result.FolderId, ShouldEqual, 0) + }) + }) + + Convey("When creating a dashboard in other folder with same name as dashboard in General folder", func() { + cmd := models.SaveDashboardCommand{ + OrgId: testOrgId, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "id": nil, + "title": savedDashInGeneralFolder.Title, + }), + FolderId: savedFolder.Id, + Overwrite: shouldOverwrite, + } + + res := callSaveWithResult(cmd) + So(res, ShouldNotBeNil) + + Convey("It should create a new dashboard", func() { + So(res.Id, ShouldNotEqual, savedDashInGeneralFolder.Id) + + query := models.GetDashboardQuery{OrgId: cmd.OrgId, Id: res.Id} + + err := bus.Dispatch(&query) + So(err, ShouldBeNil) + So(query.Result.FolderId, ShouldEqual, savedFolder.Id) + }) + }) + + Convey("When creating a folder with same name as dashboard in other folder", func() { + cmd := models.SaveDashboardCommand{ + OrgId: testOrgId, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "id": nil, + "title": savedDashInFolder.Title, + }), + IsFolder: true, + Overwrite: shouldOverwrite, + } + + res := callSaveWithResult(cmd) + So(res, ShouldNotBeNil) + + Convey("It should create a new folder", func() { + So(res.Id, ShouldNotEqual, savedDashInGeneralFolder.Id) + So(res.IsFolder, ShouldBeTrue) + + query := models.GetDashboardQuery{OrgId: cmd.OrgId, Id: res.Id} + + err := bus.Dispatch(&query) + So(err, ShouldBeNil) + So(query.Result.FolderId, ShouldEqual, 0) + So(query.Result.IsFolder, ShouldBeTrue) + }) + }) + + Convey("When saving a dashboard without id and uid and unique title in folder", func() { + cmd := models.SaveDashboardCommand{ + OrgId: testOrgId, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "title": "Dash without id and uid", + }), + Overwrite: shouldOverwrite, + } + + res := callSaveWithResult(cmd) + So(res, ShouldNotBeNil) + + Convey("It should create a new dashboard", func() { + So(res.Id, ShouldBeGreaterThan, 0) + So(len(res.Uid), ShouldBeGreaterThan, 0) + query := models.GetDashboardQuery{OrgId: cmd.OrgId, Id: res.Id} + + err := bus.Dispatch(&query) + So(err, ShouldBeNil) + So(query.Result.Id, ShouldEqual, res.Id) + So(query.Result.Uid, ShouldEqual, res.Uid) + }) + }) + + Convey("When saving a dashboard when dashboard id is zero ", func() { + cmd := models.SaveDashboardCommand{ + OrgId: testOrgId, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "id": 0, + "title": "Dash with zero id", + }), + Overwrite: shouldOverwrite, + } + + res := callSaveWithResult(cmd) + So(res, ShouldNotBeNil) + + Convey("It should create a new dashboard", func() { + query := models.GetDashboardQuery{OrgId: cmd.OrgId, Id: res.Id} + + err := bus.Dispatch(&query) + So(err, ShouldBeNil) + So(query.Result.Id, ShouldEqual, res.Id) + }) + }) + + Convey("When saving a dashboard in non-existing folder", func() { + cmd := models.SaveDashboardCommand{ + OrgId: testOrgId, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "title": "Expect error", + }), + FolderId: 123412321, + Overwrite: shouldOverwrite, + } + + err := callSaveWithError(cmd) + + Convey("It should result in folder not found error", func() { + So(err, ShouldNotBeNil) + So(err, ShouldEqual, models.ErrDashboardFolderNotFound) + }) + }) + + Convey("When updating an existing dashboard by id without current version", func() { + cmd := models.SaveDashboardCommand{ + OrgId: 1, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "id": savedDashInGeneralFolder.Id, + "title": "test dash 23", + }), + FolderId: savedFolder.Id, + Overwrite: shouldOverwrite, + } + + err := callSaveWithError(cmd) + + Convey("It should result in version mismatch error", func() { + So(err, ShouldNotBeNil) + So(err, ShouldEqual, models.ErrDashboardVersionMismatch) + }) + }) + + Convey("When updating an existing dashboard by id with current version", func() { + cmd := models.SaveDashboardCommand{ + OrgId: 1, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "id": savedDashInGeneralFolder.Id, + "title": "Updated title", + "version": savedDashInGeneralFolder.Version, + }), + FolderId: savedFolder.Id, + Overwrite: shouldOverwrite, + } + + res := callSaveWithResult(cmd) + So(res, ShouldNotBeNil) + + Convey("It should update dashboard", func() { + query := models.GetDashboardQuery{OrgId: cmd.OrgId, Id: savedDashInGeneralFolder.Id} + + err := bus.Dispatch(&query) + So(err, ShouldBeNil) + So(query.Result.Title, ShouldEqual, "Updated title") + So(query.Result.FolderId, ShouldEqual, savedFolder.Id) + So(query.Result.Version, ShouldBeGreaterThan, savedDashInGeneralFolder.Version) + }) + }) + + Convey("When updating an existing dashboard by uid without current version", func() { + cmd := models.SaveDashboardCommand{ + OrgId: 1, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "uid": savedDashInFolder.Uid, + "title": "test dash 23", + }), + FolderId: 0, + Overwrite: shouldOverwrite, + } + + err := callSaveWithError(cmd) + + Convey("It should result in version mismatch error", func() { + So(err, ShouldNotBeNil) + So(err, ShouldEqual, models.ErrDashboardVersionMismatch) + }) + }) + + Convey("When updating an existing dashboard by uid with current version", func() { + cmd := models.SaveDashboardCommand{ + OrgId: 1, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "uid": savedDashInFolder.Uid, + "title": "Updated title", + "version": savedDashInFolder.Version, + }), + FolderId: 0, + Overwrite: shouldOverwrite, + } + + res := callSaveWithResult(cmd) + So(res, ShouldNotBeNil) + + Convey("It should update dashboard", func() { + query := models.GetDashboardQuery{OrgId: cmd.OrgId, Id: savedDashInFolder.Id} + + err := bus.Dispatch(&query) + So(err, ShouldBeNil) + So(query.Result.Title, ShouldEqual, "Updated title") + So(query.Result.FolderId, ShouldEqual, 0) + So(query.Result.Version, ShouldBeGreaterThan, savedDashInFolder.Version) + }) + }) + + Convey("When creating a dashboard with same name as dashboard in other folder", func() { + cmd := models.SaveDashboardCommand{ + OrgId: testOrgId, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "id": nil, + "title": savedDashInFolder.Title, + }), + FolderId: savedDashInFolder.FolderId, + Overwrite: shouldOverwrite, + } + + err := callSaveWithError(cmd) + + Convey("It should result in dashboard with same name in folder error", func() { + So(err, ShouldNotBeNil) + So(err, ShouldEqual, models.ErrDashboardWithSameNameInFolderExists) + }) + }) + + Convey("When creating a dashboard with same name as dashboard in General folder", func() { + cmd := models.SaveDashboardCommand{ + OrgId: testOrgId, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "id": nil, + "title": savedDashInGeneralFolder.Title, + }), + FolderId: savedDashInGeneralFolder.FolderId, + Overwrite: shouldOverwrite, + } + + err := callSaveWithError(cmd) + + Convey("It should result in dashboard with same name in folder error", func() { + So(err, ShouldNotBeNil) + So(err, ShouldEqual, models.ErrDashboardWithSameNameInFolderExists) + }) + }) + + Convey("When creating a folder with same name as existing folder", func() { + cmd := models.SaveDashboardCommand{ + OrgId: testOrgId, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "id": nil, + "title": savedFolder.Title, + }), + IsFolder: true, + Overwrite: shouldOverwrite, + } + + err := callSaveWithError(cmd) + + Convey("It should result in dashboard with same name in folder error", func() { + So(err, ShouldNotBeNil) + So(err, ShouldEqual, models.ErrDashboardWithSameNameInFolderExists) + }) + }) + }) + + Convey("and overwrite flag is set to true", func() { + shouldOverwrite := true + + Convey("When updating an existing dashboard by id without current version", func() { + cmd := models.SaveDashboardCommand{ + OrgId: 1, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "id": savedDashInGeneralFolder.Id, + "title": "Updated title", + }), + FolderId: savedFolder.Id, + Overwrite: shouldOverwrite, + } + + res := callSaveWithResult(cmd) + So(res, ShouldNotBeNil) + + Convey("It should update dashboard", func() { + query := models.GetDashboardQuery{OrgId: cmd.OrgId, Id: savedDashInGeneralFolder.Id} + + err := bus.Dispatch(&query) + So(err, ShouldBeNil) + So(query.Result.Title, ShouldEqual, "Updated title") + So(query.Result.FolderId, ShouldEqual, savedFolder.Id) + So(query.Result.Version, ShouldBeGreaterThan, savedDashInGeneralFolder.Version) + }) + }) + + Convey("When updating an existing dashboard by uid without current version", func() { + cmd := models.SaveDashboardCommand{ + OrgId: 1, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "uid": savedDashInFolder.Uid, + "title": "Updated title", + }), + FolderId: 0, + Overwrite: shouldOverwrite, + } + + res := callSaveWithResult(cmd) + So(res, ShouldNotBeNil) + + Convey("It should update dashboard", func() { + query := models.GetDashboardQuery{OrgId: cmd.OrgId, Id: savedDashInFolder.Id} + + err := bus.Dispatch(&query) + So(err, ShouldBeNil) + So(query.Result.Title, ShouldEqual, "Updated title") + So(query.Result.FolderId, ShouldEqual, 0) + So(query.Result.Version, ShouldBeGreaterThan, savedDashInFolder.Version) + }) + }) + + Convey("When updating uid for existing dashboard using id", func() { + cmd := models.SaveDashboardCommand{ + OrgId: 1, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "id": savedDashInFolder.Id, + "uid": "new-uid", + "title": savedDashInFolder.Title, + }), + Overwrite: shouldOverwrite, + } + + res := callSaveWithResult(cmd) + + Convey("It should update dashboard", func() { + So(res, ShouldNotBeNil) + So(res.Id, ShouldEqual, savedDashInFolder.Id) + So(res.Uid, ShouldEqual, "new-uid") + + query := models.GetDashboardQuery{OrgId: cmd.OrgId, Id: savedDashInFolder.Id} + + err := bus.Dispatch(&query) + So(err, ShouldBeNil) + So(query.Result.Uid, ShouldEqual, "new-uid") + So(query.Result.Version, ShouldBeGreaterThan, savedDashInFolder.Version) + }) + }) + + Convey("When updating uid to an existing uid for existing dashboard using id", func() { + cmd := models.SaveDashboardCommand{ + OrgId: 1, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "id": savedDashInFolder.Id, + "uid": savedDashInGeneralFolder.Uid, + "title": savedDashInFolder.Title, + }), + Overwrite: shouldOverwrite, + } + + err := callSaveWithError(cmd) + + Convey("It should result in same uid exists error", func() { + So(err, ShouldNotBeNil) + So(err, ShouldEqual, models.ErrDashboardWithSameUIDExists) + }) + }) + + Convey("When creating a dashboard with same name as dashboard in other folder", func() { + cmd := models.SaveDashboardCommand{ + OrgId: testOrgId, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "id": nil, + "title": savedDashInFolder.Title, + }), + FolderId: savedDashInFolder.FolderId, + Overwrite: shouldOverwrite, + } + + res := callSaveWithResult(cmd) + + Convey("It should overwrite existing dashboard", func() { + So(res, ShouldNotBeNil) + So(res.Id, ShouldEqual, savedDashInFolder.Id) + So(res.Uid, ShouldEqual, savedDashInFolder.Uid) + + query := models.GetDashboardQuery{OrgId: cmd.OrgId, Id: res.Id} + + err := bus.Dispatch(&query) + So(err, ShouldBeNil) + So(query.Result.Id, ShouldEqual, res.Id) + So(query.Result.Uid, ShouldEqual, res.Uid) + }) + }) + + Convey("When creating a dashboard with same name as dashboard in General folder", func() { + cmd := models.SaveDashboardCommand{ + OrgId: testOrgId, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "id": nil, + "title": savedDashInGeneralFolder.Title, + }), + FolderId: savedDashInGeneralFolder.FolderId, + Overwrite: shouldOverwrite, + } + + res := callSaveWithResult(cmd) + + Convey("It should overwrite existing dashboard", func() { + So(res, ShouldNotBeNil) + So(res.Id, ShouldEqual, savedDashInGeneralFolder.Id) + So(res.Uid, ShouldEqual, savedDashInGeneralFolder.Uid) + + query := models.GetDashboardQuery{OrgId: cmd.OrgId, Id: res.Id} + + err := bus.Dispatch(&query) + So(err, ShouldBeNil) + So(query.Result.Id, ShouldEqual, res.Id) + So(query.Result.Uid, ShouldEqual, res.Uid) + }) + }) + + Convey("When trying to update existing folder to a dashboard using id", func() { + cmd := models.SaveDashboardCommand{ + OrgId: 1, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "id": savedFolder.Id, + "title": "new title", + }), + IsFolder: false, + Overwrite: shouldOverwrite, + } + + err := callSaveWithError(cmd) + + Convey("It should result in type mismatch error", func() { + So(err, ShouldNotBeNil) + So(err, ShouldEqual, models.ErrDashboardTypeMismatch) + }) + }) + + Convey("When trying to update existing dashboard to a folder using id", func() { + cmd := models.SaveDashboardCommand{ + OrgId: 1, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "id": savedDashInFolder.Id, + "title": "new folder title", + }), + IsFolder: true, + Overwrite: shouldOverwrite, + } + + err := callSaveWithError(cmd) + + Convey("It should result in type mismatch error", func() { + So(err, ShouldNotBeNil) + So(err, ShouldEqual, models.ErrDashboardTypeMismatch) + }) + }) + + Convey("When trying to update existing folder to a dashboard using uid", func() { + cmd := models.SaveDashboardCommand{ + OrgId: 1, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "uid": savedFolder.Uid, + "title": "new title", + }), + IsFolder: false, + Overwrite: shouldOverwrite, + } + + err := callSaveWithError(cmd) + + Convey("It should result in type mismatch error", func() { + So(err, ShouldNotBeNil) + So(err, ShouldEqual, models.ErrDashboardTypeMismatch) + }) + }) + + Convey("When trying to update existing dashboard to a folder using uid", func() { + cmd := models.SaveDashboardCommand{ + OrgId: 1, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "uid": savedDashInFolder.Uid, + "title": "new folder title", + }), + IsFolder: true, + Overwrite: shouldOverwrite, + } + + err := callSaveWithError(cmd) + + Convey("It should result in type mismatch error", func() { + So(err, ShouldNotBeNil) + So(err, ShouldEqual, models.ErrDashboardTypeMismatch) + }) + }) + + Convey("When trying to update existing folder to a dashboard using title", func() { + cmd := models.SaveDashboardCommand{ + OrgId: 1, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "title": savedFolder.Title, + }), + IsFolder: false, + Overwrite: shouldOverwrite, + } + + err := callSaveWithError(cmd) + + Convey("It should result in dashboard with same name as folder error", func() { + So(err, ShouldNotBeNil) + So(err, ShouldEqual, models.ErrDashboardWithSameNameAsFolder) + }) + }) + + Convey("When trying to update existing dashboard to a folder using title", func() { + cmd := models.SaveDashboardCommand{ + OrgId: 1, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "title": savedDashInGeneralFolder.Title, + }), + IsFolder: true, + Overwrite: shouldOverwrite, + } + + err := callSaveWithError(cmd) + + Convey("It should result in folder with same name as dashboard error", func() { + So(err, ShouldNotBeNil) + So(err, ShouldEqual, models.ErrDashboardFolderWithSameNameAsDashboard) + }) + }) + }) + }) + }) + }) +} + +type scenarioContext struct { + dashboardGuardianMock *guardian.FakeDashboardGuardian +} + +type scenarioFunc func(c *scenarioContext) + +func dashboardGuardianScenario(desc string, mock *guardian.FakeDashboardGuardian, fn scenarioFunc) { + Convey(desc, func() { + origNewDashboardGuardian := guardian.New + guardian.MockDashboardGuardian(mock) + + sc := &scenarioContext{ + dashboardGuardianMock: mock, + } + + defer func() { + guardian.New = origNewDashboardGuardian + }() + + fn(sc) + }) +} + +type dashboardPermissionScenarioContext struct { + dashboardGuardianMock *guardian.FakeDashboardGuardian +} + +type dashboardPermissionScenarioFunc func(sc *dashboardPermissionScenarioContext) + +func dashboardPermissionScenario(desc string, mock *guardian.FakeDashboardGuardian, fn dashboardPermissionScenarioFunc) { + Convey(desc, func() { + origNewDashboardGuardian := guardian.New + guardian.MockDashboardGuardian(mock) + + sc := &dashboardPermissionScenarioContext{ + dashboardGuardianMock: mock, + } + + defer func() { + guardian.New = origNewDashboardGuardian + }() + + fn(sc) + }) +} + +func permissionScenario(desc string, canSave bool, fn dashboardPermissionScenarioFunc) { + mock := &guardian.FakeDashboardGuardian{ + CanSaveValue: canSave, + } + dashboardPermissionScenario(desc, mock, fn) +} + +func callSaveWithResult(cmd models.SaveDashboardCommand) *models.Dashboard { + dto := toSaveDashboardDto(cmd) + res, _ := dashboards.NewService().SaveDashboard(&dto) + return res +} + +func callSaveWithError(cmd models.SaveDashboardCommand) error { + dto := toSaveDashboardDto(cmd) + _, err := dashboards.NewService().SaveDashboard(&dto) + return err +} + +func dashboardServiceScenario(desc string, mock *guardian.FakeDashboardGuardian, fn scenarioFunc) { + Convey(desc, func() { + origNewDashboardGuardian := guardian.New + guardian.MockDashboardGuardian(mock) + + sc := &scenarioContext{ + dashboardGuardianMock: mock, + } + + defer func() { + guardian.New = origNewDashboardGuardian + }() + + fn(sc) + }) +} + +func saveTestDashboard(title string, orgId int64, folderId int64) *models.Dashboard { + cmd := models.SaveDashboardCommand{ + OrgId: orgId, + FolderId: folderId, + IsFolder: false, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "id": nil, + "title": title, + }), + } + + dto := dashboards.SaveDashboardDTO{ + OrgId: orgId, + Dashboard: cmd.GetDashboardModel(), + User: &models.SignedInUser{ + UserId: 1, + OrgRole: models.ROLE_ADMIN, + }, + } + + res, err := dashboards.NewService().SaveDashboard(&dto) + So(err, ShouldBeNil) + + return res +} + +func saveTestFolder(title string, orgId int64) *models.Dashboard { + cmd := models.SaveDashboardCommand{ + OrgId: orgId, + FolderId: 0, + IsFolder: true, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "id": nil, + "title": title, + }), + } + + dto := dashboards.SaveDashboardDTO{ + OrgId: orgId, + Dashboard: cmd.GetDashboardModel(), + User: &models.SignedInUser{ + UserId: 1, + OrgRole: models.ROLE_ADMIN, + }, + } + + res, err := dashboards.NewService().SaveDashboard(&dto) + So(err, ShouldBeNil) + + return res +} + +func toSaveDashboardDto(cmd models.SaveDashboardCommand) dashboards.SaveDashboardDTO { + dash := (&cmd).GetDashboardModel() + + return dashboards.SaveDashboardDTO{ + Dashboard: dash, + Message: cmd.Message, + OrgId: cmd.OrgId, + User: &models.SignedInUser{UserId: cmd.UserId}, + Overwrite: cmd.Overwrite, + } +} diff --git a/pkg/services/sqlstore/dashboard_snapshot.go b/pkg/services/sqlstore/dashboard_snapshot.go index 0ef7f99da67..9e82bbb2c83 100644 --- a/pkg/services/sqlstore/dashboard_snapshot.go +++ b/pkg/services/sqlstore/dashboard_snapshot.go @@ -16,20 +16,23 @@ func init() { bus.AddHandler("sql", DeleteExpiredSnapshots) } +// DeleteExpiredSnapshots removes snapshots with old expiry dates. +// SnapShotRemoveExpired is deprecated and should be removed in the future. +// Snapshot expiry is decided by the user when they share the snapshot. func DeleteExpiredSnapshots(cmd *m.DeleteExpiredSnapshotsCommand) error { return inTransaction(func(sess *DBSession) error { - var expiredCount int64 = 0 - - if setting.SnapShotRemoveExpired { - deleteExpiredSql := "DELETE FROM dashboard_snapshot WHERE expires < ?" - expiredResponse, err := x.Exec(deleteExpiredSql, time.Now) - if err != nil { - return err - } - expiredCount, _ = expiredResponse.RowsAffected() + if !setting.SnapShotRemoveExpired { + sqlog.Warn("[Deprecated] The snapshot_remove_expired setting is outdated. Please remove from your config.") + return nil } - sqlog.Debug("Deleted old/expired snaphots", "expired", expiredCount) + deleteExpiredSql := "DELETE FROM dashboard_snapshot WHERE expires < ?" + expiredResponse, err := sess.Exec(deleteExpiredSql, time.Now()) + if err != nil { + return err + } + cmd.DeletedRows, _ = expiredResponse.RowsAffected() + return nil }) } @@ -72,7 +75,7 @@ func DeleteDashboardSnapshot(cmd *m.DeleteDashboardSnapshotCommand) error { } func GetDashboardSnapshot(query *m.GetDashboardSnapshotQuery) error { - snapshot := m.DashboardSnapshot{Key: query.Key} + snapshot := m.DashboardSnapshot{Key: query.Key, DeleteKey: query.DeleteKey} has, err := x.Get(&snapshot) if err != nil { @@ -85,6 +88,8 @@ func GetDashboardSnapshot(query *m.GetDashboardSnapshotQuery) error { return nil } +// SearchDashboardSnapshots returns a list of all snapshots for admins +// for other roles, it returns snapshots created by the user func SearchDashboardSnapshots(query *m.GetDashboardSnapshotsQuery) error { var snapshots = make(m.DashboardSnapshotsList, 0) @@ -95,7 +100,16 @@ func SearchDashboardSnapshots(query *m.GetDashboardSnapshotsQuery) error { sess.Where("name LIKE ?", query.Name) } - sess.Where("org_id = ?", query.OrgId) + // admins can see all snapshots, everyone else can only see their own snapshots + if query.SignedInUser.OrgRole == m.ROLE_ADMIN { + sess.Where("org_id = ?", query.OrgId) + } else if !query.SignedInUser.IsAnonymous { + sess.Where("org_id = ? AND user_id = ?", query.OrgId, query.SignedInUser.UserId) + } else { + query.Result = snapshots + return nil + } + err := sess.Find(&snapshots) query.Result = snapshots return err diff --git a/pkg/services/sqlstore/dashboard_snapshot_test.go b/pkg/services/sqlstore/dashboard_snapshot_test.go index 50375088b4b..2081cbf6194 100644 --- a/pkg/services/sqlstore/dashboard_snapshot_test.go +++ b/pkg/services/sqlstore/dashboard_snapshot_test.go @@ -2,11 +2,14 @@ package sqlstore import ( "testing" + "time" + "github.com/go-xorm/xorm" . "github.com/smartystreets/goconvey/convey" "github.com/grafana/grafana/pkg/components/simplejson" m "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/setting" ) func TestDashboardSnapshotDBAccess(t *testing.T) { @@ -14,17 +17,19 @@ func TestDashboardSnapshotDBAccess(t *testing.T) { Convey("Testing DashboardSnapshot data access", t, func() { InitTestDB(t) - Convey("Given saved snaphot", func() { + Convey("Given saved snapshot", func() { cmd := m.CreateDashboardSnapshotCommand{ Key: "hej", Dashboard: simplejson.NewFromAny(map[string]interface{}{ "hello": "mupp", }), + UserId: 1000, + OrgId: 1, } err := CreateDashboardSnapshot(&cmd) So(err, ShouldBeNil) - Convey("Should be able to get snaphot by key", func() { + Convey("Should be able to get snapshot by key", func() { query := m.GetDashboardSnapshotQuery{Key: "hej"} err = GetDashboardSnapshot(&query) So(err, ShouldBeNil) @@ -33,6 +38,135 @@ func TestDashboardSnapshotDBAccess(t *testing.T) { So(query.Result.Dashboard.Get("hello").MustString(), ShouldEqual, "mupp") }) + Convey("And the user has the admin role", func() { + Convey("Should return all the snapshots", func() { + query := m.GetDashboardSnapshotsQuery{ + OrgId: 1, + SignedInUser: &m.SignedInUser{OrgRole: m.ROLE_ADMIN}, + } + err := SearchDashboardSnapshots(&query) + So(err, ShouldBeNil) + + So(query.Result, ShouldNotBeNil) + So(len(query.Result), ShouldEqual, 1) + }) + }) + + Convey("And the user has the editor role and has created a snapshot", func() { + Convey("Should return all the snapshots", func() { + query := m.GetDashboardSnapshotsQuery{ + OrgId: 1, + SignedInUser: &m.SignedInUser{OrgRole: m.ROLE_EDITOR, UserId: 1000}, + } + err := SearchDashboardSnapshots(&query) + So(err, ShouldBeNil) + + So(query.Result, ShouldNotBeNil) + So(len(query.Result), ShouldEqual, 1) + }) + }) + + Convey("And the user has the editor role and has not created any snapshot", func() { + Convey("Should not return any snapshots", func() { + query := m.GetDashboardSnapshotsQuery{ + OrgId: 1, + SignedInUser: &m.SignedInUser{OrgRole: m.ROLE_EDITOR, UserId: 2}, + } + err := SearchDashboardSnapshots(&query) + So(err, ShouldBeNil) + + So(query.Result, ShouldNotBeNil) + So(len(query.Result), ShouldEqual, 0) + }) + }) + + Convey("And the user is anonymous", func() { + cmd := m.CreateDashboardSnapshotCommand{ + Key: "strangesnapshotwithuserid0", + DeleteKey: "adeletekey", + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "hello": "mupp", + }), + UserId: 0, + OrgId: 1, + } + err := CreateDashboardSnapshot(&cmd) + So(err, ShouldBeNil) + + Convey("Should not return any snapshots", func() { + query := m.GetDashboardSnapshotsQuery{ + OrgId: 1, + SignedInUser: &m.SignedInUser{OrgRole: m.ROLE_EDITOR, IsAnonymous: true, UserId: 0}, + } + err := SearchDashboardSnapshots(&query) + So(err, ShouldBeNil) + + So(query.Result, ShouldNotBeNil) + So(len(query.Result), ShouldEqual, 0) + }) + }) }) }) } + +func TestDeleteExpiredSnapshots(t *testing.T) { + Convey("Testing dashboard snapshots clean up", t, func() { + x := InitTestDB(t) + + setting.SnapShotRemoveExpired = true + + notExpiredsnapshot := createTestSnapshot(x, "key1", 1000) + createTestSnapshot(x, "key2", -1000) + createTestSnapshot(x, "key3", -1000) + + Convey("Clean up old dashboard snapshots", func() { + err := DeleteExpiredSnapshots(&m.DeleteExpiredSnapshotsCommand{}) + So(err, ShouldBeNil) + + query := m.GetDashboardSnapshotsQuery{ + OrgId: 1, + SignedInUser: &m.SignedInUser{OrgRole: m.ROLE_ADMIN}, + } + err = SearchDashboardSnapshots(&query) + So(err, ShouldBeNil) + + So(len(query.Result), ShouldEqual, 1) + So(query.Result[0].Key, ShouldEqual, notExpiredsnapshot.Key) + }) + + Convey("Don't delete anything if there are no expired snapshots", func() { + err := DeleteExpiredSnapshots(&m.DeleteExpiredSnapshotsCommand{}) + So(err, ShouldBeNil) + + query := m.GetDashboardSnapshotsQuery{ + OrgId: 1, + SignedInUser: &m.SignedInUser{OrgRole: m.ROLE_ADMIN}, + } + SearchDashboardSnapshots(&query) + + So(len(query.Result), ShouldEqual, 1) + }) + }) +} + +func createTestSnapshot(x *xorm.Engine, key string, expires int64) *m.DashboardSnapshot { + cmd := m.CreateDashboardSnapshotCommand{ + Key: key, + DeleteKey: "delete" + key, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "hello": "mupp", + }), + UserId: 1000, + OrgId: 1, + Expires: expires, + } + err := CreateDashboardSnapshot(&cmd) + So(err, ShouldBeNil) + + // Set expiry date manually - to be able to create expired snapshots + expireDate := time.Now().Add(time.Second * time.Duration(expires)) + _, err = x.Exec("update dashboard_snapshot set expires = ? where "+dialect.Quote("key")+" = ?", expireDate, key) + So(err, ShouldBeNil) + + return cmd.Result +} diff --git a/pkg/services/sqlstore/dashboard_test.go b/pkg/services/sqlstore/dashboard_test.go index a552bd0546a..9124a686236 100644 --- a/pkg/services/sqlstore/dashboard_test.go +++ b/pkg/services/sqlstore/dashboard_test.go @@ -1,22 +1,21 @@ package sqlstore import ( + "fmt" "testing" - - "github.com/go-xorm/xorm" - . "github.com/smartystreets/goconvey/convey" + "time" "github.com/grafana/grafana/pkg/components/simplejson" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/search" "github.com/grafana/grafana/pkg/setting" + "github.com/grafana/grafana/pkg/util" + . "github.com/smartystreets/goconvey/convey" ) func TestDashboardDataAccess(t *testing.T) { - var x *xorm.Engine - Convey("Testing DB", t, func() { - x = InitTestDB(t) + InitTestDB(t) Convey("Given saved dashboard", func() { savedFolder := insertTestDashboard("1 test dash folder", 1, 0, true, "prod", "webapp") @@ -30,15 +29,33 @@ func TestDashboardDataAccess(t *testing.T) { So(savedDash.Id, ShouldNotEqual, 0) So(savedDash.IsFolder, ShouldBeFalse) So(savedDash.FolderId, ShouldBeGreaterThan, 0) + So(len(savedDash.Uid), ShouldBeGreaterThan, 0) So(savedFolder.Title, ShouldEqual, "1 test dash folder") So(savedFolder.Slug, ShouldEqual, "1-test-dash-folder") So(savedFolder.Id, ShouldNotEqual, 0) So(savedFolder.IsFolder, ShouldBeTrue) So(savedFolder.FolderId, ShouldEqual, 0) + So(len(savedFolder.Uid), ShouldBeGreaterThan, 0) }) - Convey("Should be able to get dashboard", func() { + Convey("Should be able to get dashboard by id", func() { + query := m.GetDashboardQuery{ + Id: savedDash.Id, + OrgId: 1, + } + + err := GetDashboard(&query) + So(err, ShouldBeNil) + + So(query.Result.Title, ShouldEqual, "test dash 23") + So(query.Result.Slug, ShouldEqual, "test-dash-23") + So(query.Result.Id, ShouldEqual, savedDash.Id) + So(query.Result.Uid, ShouldEqual, savedDash.Uid) + So(query.Result.IsFolder, ShouldBeFalse) + }) + + Convey("Should be able to get dashboard by slug", func() { query := m.GetDashboardQuery{ Slug: "test-dash-23", OrgId: 1, @@ -49,6 +66,24 @@ func TestDashboardDataAccess(t *testing.T) { So(query.Result.Title, ShouldEqual, "test dash 23") So(query.Result.Slug, ShouldEqual, "test-dash-23") + So(query.Result.Id, ShouldEqual, savedDash.Id) + So(query.Result.Uid, ShouldEqual, savedDash.Uid) + So(query.Result.IsFolder, ShouldBeFalse) + }) + + Convey("Should be able to get dashboard by uid", func() { + query := m.GetDashboardQuery{ + Uid: savedDash.Uid, + OrgId: 1, + } + + err := GetDashboard(&query) + So(err, ShouldBeNil) + + So(query.Result.Title, ShouldEqual, "test dash 23") + So(query.Result.Slug, ShouldEqual, "test-dash-23") + So(query.Result.Id, ShouldEqual, savedDash.Id) + So(query.Result.Uid, ShouldEqual, savedDash.Uid) So(query.Result.IsFolder, ShouldBeFalse) }) @@ -63,124 +98,59 @@ func TestDashboardDataAccess(t *testing.T) { So(err, ShouldBeNil) }) - Convey("Should return error if no dashboard is updated", func() { - cmd := m.SaveDashboardCommand{ - OrgId: 1, - Overwrite: true, - Dashboard: simplejson.NewFromAny(map[string]interface{}{ - "id": float64(123412321), - "title": "Expect error", - "tags": []interface{}{}, - }), - } - - err := SaveDashboard(&cmd) - So(err, ShouldNotBeNil) - }) - - Convey("Should not be able to overwrite dashboard in another org", func() { - query := m.GetDashboardQuery{Slug: "test-dash-23", OrgId: 1} - GetDashboard(&query) - - cmd := m.SaveDashboardCommand{ - OrgId: 2, - Overwrite: true, - Dashboard: simplejson.NewFromAny(map[string]interface{}{ - "id": float64(query.Result.Id), - "title": "Expect error", - "tags": []interface{}{}, - }), - } - - err := SaveDashboard(&cmd) - So(err, ShouldNotBeNil) - }) - - Convey("Should be able to search for dashboard folder", func() { - query := search.FindPersistedDashboardsQuery{ - Title: "1 test dash folder", - OrgId: 1, - SignedInUser: &m.SignedInUser{OrgId: 1}, - } - - err := SearchDashboards(&query) - So(err, ShouldBeNil) - - So(len(query.Result), ShouldEqual, 1) - hit := query.Result[0] - So(hit.Type, ShouldEqual, search.DashHitFolder) - }) - - Convey("Should be able to search for a dashboard folder's children", func() { - query := search.FindPersistedDashboardsQuery{ - OrgId: 1, - FolderIds: []int64{savedFolder.Id}, - SignedInUser: &m.SignedInUser{OrgId: 1}, - } - - err := SearchDashboards(&query) - So(err, ShouldBeNil) - - So(len(query.Result), ShouldEqual, 2) - hit := query.Result[0] - So(hit.Id, ShouldEqual, savedDash.Id) - }) - - Convey("Should be able to search for dashboard by dashboard ids", func() { - Convey("should be able to find two dashboards by id", func() { - query := search.FindPersistedDashboardsQuery{ - DashboardIds: []int64{2, 3}, - SignedInUser: &m.SignedInUser{OrgId: 1}, + Convey("Should retry generation of uid once if it fails.", func() { + timesCalled := 0 + generateNewUid = func() string { + timesCalled += 1 + if timesCalled <= 2 { + return savedDash.Uid + } else { + return util.GenerateShortUid() } - - err := SearchDashboards(&query) - So(err, ShouldBeNil) - - So(len(query.Result), ShouldEqual, 2) - - hit := query.Result[0] - So(len(hit.Tags), ShouldEqual, 2) - - hit2 := query.Result[1] - So(len(hit2.Tags), ShouldEqual, 1) - }) - - Convey("DashboardIds that does not exists should not cause errors", func() { - query := search.FindPersistedDashboardsQuery{ - DashboardIds: []int64{1000}, - SignedInUser: &m.SignedInUser{OrgId: 1}, - } - - err := SearchDashboards(&query) - So(err, ShouldBeNil) - So(len(query.Result), ShouldEqual, 0) - }) - }) - - Convey("Should not be able to save dashboard with same name", func() { + } cmd := m.SaveDashboardCommand{ OrgId: 1, Dashboard: simplejson.NewFromAny(map[string]interface{}{ - "id": nil, - "title": "test dash 23", + "title": "new dash 12334", "tags": []interface{}{}, }), } err := SaveDashboard(&cmd) - So(err, ShouldNotBeNil) + So(err, ShouldBeNil) + + generateNewUid = util.GenerateShortUid }) - Convey("Should be able to update dashboard and remove folderId", func() { + Convey("Should be able to create dashboard", func() { cmd := m.SaveDashboardCommand{ OrgId: 1, Dashboard: simplejson.NewFromAny(map[string]interface{}{ - "id": 1, + "title": "folderId", + "tags": []interface{}{}, + }), + UserId: 100, + } + + err := SaveDashboard(&cmd) + So(err, ShouldBeNil) + So(cmd.Result.CreatedBy, ShouldEqual, 100) + So(cmd.Result.Created.IsZero(), ShouldBeFalse) + So(cmd.Result.UpdatedBy, ShouldEqual, 100) + So(cmd.Result.Updated.IsZero(), ShouldBeFalse) + }) + + Convey("Should be able to update dashboard by id and remove folderId", func() { + cmd := m.SaveDashboardCommand{ + OrgId: 1, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "id": savedDash.Id, "title": "folderId", "tags": []interface{}{}, }), Overwrite: true, FolderId: 2, + UserId: 100, } err := SaveDashboard(&cmd) @@ -190,25 +160,30 @@ func TestDashboardDataAccess(t *testing.T) { cmd = m.SaveDashboardCommand{ OrgId: 1, Dashboard: simplejson.NewFromAny(map[string]interface{}{ - "id": 1, + "id": savedDash.Id, "title": "folderId", "tags": []interface{}{}, }), FolderId: 0, Overwrite: true, + UserId: 100, } err = SaveDashboard(&cmd) So(err, ShouldBeNil) query := m.GetDashboardQuery{ - Slug: cmd.Result.Slug, + Id: savedDash.Id, OrgId: 1, } err = GetDashboard(&query) So(err, ShouldBeNil) So(query.Result.FolderId, ShouldEqual, 0) + So(query.Result.CreatedBy, ShouldEqual, savedDash.CreatedBy) + So(query.Result.Created, ShouldEqual, savedDash.Created.Truncate(time.Second)) + So(query.Result.UpdatedBy, ShouldEqual, 100) + So(query.Result.Updated.IsZero(), ShouldBeFalse) }) Convey("Should be able to delete a dashboard folder and its children", func() { @@ -228,6 +203,36 @@ func TestDashboardDataAccess(t *testing.T) { So(len(query.Result), ShouldEqual, 0) }) + Convey("Should return error if no dashboard is found for update when dashboard id is greater than zero", func() { + cmd := m.SaveDashboardCommand{ + OrgId: 1, + Overwrite: true, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "id": float64(123412321), + "title": "Expect error", + "tags": []interface{}{}, + }), + } + + err := SaveDashboard(&cmd) + So(err, ShouldEqual, m.ErrDashboardNotFound) + }) + + Convey("Should not return error if no dashboard is found for update when dashboard id is zero", func() { + cmd := m.SaveDashboardCommand{ + OrgId: 1, + Overwrite: true, + Dashboard: simplejson.NewFromAny(map[string]interface{}{ + "id": 0, + "title": "New dash", + "tags": []interface{}{}, + }), + } + + err := SaveDashboard(&cmd) + So(err, ShouldBeNil) + }) + Convey("Should be able to get dashboard tags", func() { query := m.GetDashboardTagsQuery{OrgId: 1} @@ -237,6 +242,63 @@ func TestDashboardDataAccess(t *testing.T) { So(len(query.Result), ShouldEqual, 2) }) + Convey("Should be able to search for dashboard folder", func() { + query := search.FindPersistedDashboardsQuery{ + Title: "1 test dash folder", + OrgId: 1, + SignedInUser: &m.SignedInUser{OrgId: 1, OrgRole: m.ROLE_EDITOR}, + } + + err := SearchDashboards(&query) + So(err, ShouldBeNil) + + So(len(query.Result), ShouldEqual, 1) + hit := query.Result[0] + So(hit.Type, ShouldEqual, search.DashHitFolder) + So(hit.Url, ShouldEqual, fmt.Sprintf("/dashboards/f/%s/%s", savedFolder.Uid, savedFolder.Slug)) + So(hit.FolderTitle, ShouldEqual, "") + }) + + Convey("Should be able to search for a dashboard folder's children", func() { + query := search.FindPersistedDashboardsQuery{ + OrgId: 1, + FolderIds: []int64{savedFolder.Id}, + SignedInUser: &m.SignedInUser{OrgId: 1, OrgRole: m.ROLE_EDITOR}, + } + + err := SearchDashboards(&query) + So(err, ShouldBeNil) + + So(len(query.Result), ShouldEqual, 2) + hit := query.Result[0] + So(hit.Id, ShouldEqual, savedDash.Id) + So(hit.Url, ShouldEqual, fmt.Sprintf("/d/%s/%s", savedDash.Uid, savedDash.Slug)) + So(hit.FolderId, ShouldEqual, savedFolder.Id) + So(hit.FolderUid, ShouldEqual, savedFolder.Uid) + So(hit.FolderTitle, ShouldEqual, savedFolder.Title) + So(hit.FolderUrl, ShouldEqual, fmt.Sprintf("/dashboards/f/%s/%s", savedFolder.Uid, savedFolder.Slug)) + }) + + Convey("Should be able to search for dashboard by dashboard ids", func() { + Convey("should be able to find two dashboards by id", func() { + query := search.FindPersistedDashboardsQuery{ + DashboardIds: []int64{2, 3}, + SignedInUser: &m.SignedInUser{OrgId: 1, OrgRole: m.ROLE_EDITOR}, + } + + err := SearchDashboards(&query) + So(err, ShouldBeNil) + + So(len(query.Result), ShouldEqual, 2) + + hit := query.Result[0] + So(len(hit.Tags), ShouldEqual, 2) + + hit2 := query.Result[1] + So(len(hit2.Tags), ShouldEqual, 1) + }) + }) + Convey("Given two dashboards, one is starred dashboard by user 10, other starred by user 1", func() { starredDash := insertTestDashboard("starred dash", 1, 0, false) StarDashboard(&m.StarDashboardCommand{ @@ -250,7 +312,10 @@ func TestDashboardDataAccess(t *testing.T) { }) Convey("Should be able to search for starred dashboards", func() { - query := search.FindPersistedDashboardsQuery{SignedInUser: &m.SignedInUser{UserId: 10, OrgId: 1}, IsStarred: true} + query := search.FindPersistedDashboardsQuery{ + SignedInUser: &m.SignedInUser{UserId: 10, OrgId: 1, OrgRole: m.ROLE_EDITOR}, + IsStarred: true, + } err := SearchDashboards(&query) So(err, ShouldBeNil) @@ -260,205 +325,6 @@ func TestDashboardDataAccess(t *testing.T) { }) }) - Convey("Given one dashboard folder with two dashboards and one dashboard in the root folder", func() { - folder := insertTestDashboard("1 test dash folder", 1, 0, true, "prod", "webapp") - dashInRoot := insertTestDashboard("test dash 67", 1, 0, false, "prod", "webapp") - childDash := insertTestDashboard("test dash 23", 1, folder.Id, false, "prod", "webapp") - insertTestDashboard("test dash 45", 1, folder.Id, false, "prod") - - currentUser := createUser("viewer", "Viewer", false) - - Convey("and no acls are set", func() { - Convey("should return all dashboards", func() { - query := &search.FindPersistedDashboardsQuery{SignedInUser: &m.SignedInUser{UserId: currentUser.Id, OrgId: 1}, OrgId: 1, DashboardIds: []int64{folder.Id, dashInRoot.Id}} - err := SearchDashboards(query) - So(err, ShouldBeNil) - So(len(query.Result), ShouldEqual, 2) - So(query.Result[0].Id, ShouldEqual, folder.Id) - So(query.Result[1].Id, ShouldEqual, dashInRoot.Id) - }) - }) - - Convey("and acl is set for dashboard folder", func() { - var otherUser int64 = 999 - updateTestDashboardWithAcl(folder.Id, otherUser, m.PERMISSION_EDIT) - - Convey("should not return folder", func() { - query := &search.FindPersistedDashboardsQuery{SignedInUser: &m.SignedInUser{UserId: currentUser.Id, OrgId: 1}, OrgId: 1, DashboardIds: []int64{folder.Id, dashInRoot.Id}} - err := SearchDashboards(query) - So(err, ShouldBeNil) - So(len(query.Result), ShouldEqual, 1) - So(query.Result[0].Id, ShouldEqual, dashInRoot.Id) - }) - - Convey("when the user is given permission", func() { - updateTestDashboardWithAcl(folder.Id, currentUser.Id, m.PERMISSION_EDIT) - - Convey("should be able to access folder", func() { - query := &search.FindPersistedDashboardsQuery{SignedInUser: &m.SignedInUser{UserId: currentUser.Id, OrgId: 1}, OrgId: 1, DashboardIds: []int64{folder.Id, dashInRoot.Id}} - err := SearchDashboards(query) - So(err, ShouldBeNil) - So(len(query.Result), ShouldEqual, 2) - So(query.Result[0].Id, ShouldEqual, folder.Id) - So(query.Result[1].Id, ShouldEqual, dashInRoot.Id) - }) - }) - - Convey("when the user is an admin", func() { - Convey("should be able to access folder", func() { - query := &search.FindPersistedDashboardsQuery{ - SignedInUser: &m.SignedInUser{ - UserId: currentUser.Id, - OrgId: 1, - OrgRole: m.ROLE_ADMIN, - }, - OrgId: 1, - DashboardIds: []int64{folder.Id, dashInRoot.Id}, - } - err := SearchDashboards(query) - So(err, ShouldBeNil) - So(len(query.Result), ShouldEqual, 2) - So(query.Result[0].Id, ShouldEqual, folder.Id) - So(query.Result[1].Id, ShouldEqual, dashInRoot.Id) - }) - }) - }) - - Convey("and acl is set for dashboard child and folder has all permissions removed", func() { - var otherUser int64 = 999 - aclId := updateTestDashboardWithAcl(folder.Id, otherUser, m.PERMISSION_EDIT) - removeAcl(aclId) - updateTestDashboardWithAcl(childDash.Id, otherUser, m.PERMISSION_EDIT) - - Convey("should not return folder or child", func() { - query := &search.FindPersistedDashboardsQuery{SignedInUser: &m.SignedInUser{UserId: currentUser.Id, OrgId: 1}, OrgId: 1, DashboardIds: []int64{folder.Id, childDash.Id, dashInRoot.Id}} - err := SearchDashboards(query) - So(err, ShouldBeNil) - So(len(query.Result), ShouldEqual, 1) - So(query.Result[0].Id, ShouldEqual, dashInRoot.Id) - }) - - Convey("when the user is given permission to child", func() { - updateTestDashboardWithAcl(childDash.Id, currentUser.Id, m.PERMISSION_EDIT) - - Convey("should be able to search for child dashboard but not folder", func() { - query := &search.FindPersistedDashboardsQuery{SignedInUser: &m.SignedInUser{UserId: currentUser.Id, OrgId: 1}, OrgId: 1, DashboardIds: []int64{folder.Id, childDash.Id, dashInRoot.Id}} - err := SearchDashboards(query) - So(err, ShouldBeNil) - So(len(query.Result), ShouldEqual, 2) - So(query.Result[0].Id, ShouldEqual, childDash.Id) - So(query.Result[1].Id, ShouldEqual, dashInRoot.Id) - }) - }) - - Convey("when the user is an admin", func() { - Convey("should be able to search for child dash and folder", func() { - query := &search.FindPersistedDashboardsQuery{ - SignedInUser: &m.SignedInUser{ - UserId: currentUser.Id, - OrgId: 1, - OrgRole: m.ROLE_ADMIN, - }, - OrgId: 1, - DashboardIds: []int64{folder.Id, dashInRoot.Id, childDash.Id}, - } - err := SearchDashboards(query) - So(err, ShouldBeNil) - So(len(query.Result), ShouldEqual, 3) - So(query.Result[0].Id, ShouldEqual, folder.Id) - So(query.Result[1].Id, ShouldEqual, childDash.Id) - So(query.Result[2].Id, ShouldEqual, dashInRoot.Id) - }) - }) - }) - }) - - Convey("Given two dashboard folders with one dashboard each and one dashboard in the root folder", func() { - folder1 := insertTestDashboard("1 test dash folder", 1, 0, true, "prod") - folder2 := insertTestDashboard("2 test dash folder", 1, 0, true, "prod") - dashInRoot := insertTestDashboard("test dash 67", 1, 0, false, "prod") - childDash1 := insertTestDashboard("child dash 1", 1, folder1.Id, false, "prod") - childDash2 := insertTestDashboard("child dash 2", 1, folder2.Id, false, "prod") - - currentUser := createUser("viewer", "Viewer", false) - var rootFolderId int64 = 0 - - Convey("and one folder is expanded, the other collapsed", func() { - Convey("should return dashboards in root and expanded folder", func() { - query := &search.FindPersistedDashboardsQuery{FolderIds: []int64{rootFolderId, folder1.Id}, SignedInUser: &m.SignedInUser{UserId: currentUser.Id, OrgId: 1}, OrgId: 1} - err := SearchDashboards(query) - So(err, ShouldBeNil) - So(len(query.Result), ShouldEqual, 4) - So(query.Result[0].Id, ShouldEqual, folder1.Id) - So(query.Result[1].Id, ShouldEqual, folder2.Id) - So(query.Result[2].Id, ShouldEqual, childDash1.Id) - So(query.Result[3].Id, ShouldEqual, dashInRoot.Id) - }) - }) - - Convey("and acl is set for one dashboard folder", func() { - var otherUser int64 = 999 - updateTestDashboardWithAcl(folder1.Id, otherUser, m.PERMISSION_EDIT) - - Convey("and a dashboard is moved from folder without acl to the folder with an acl", func() { - movedDash := moveDashboard(1, childDash2.Data, folder1.Id) - So(movedDash.HasAcl, ShouldBeTrue) - - Convey("should not return folder with acl or its children", func() { - query := &search.FindPersistedDashboardsQuery{ - SignedInUser: &m.SignedInUser{UserId: currentUser.Id, OrgId: 1}, - OrgId: 1, - DashboardIds: []int64{folder1.Id, childDash1.Id, childDash2.Id, dashInRoot.Id}, - } - err := SearchDashboards(query) - So(err, ShouldBeNil) - So(len(query.Result), ShouldEqual, 1) - So(query.Result[0].Id, ShouldEqual, dashInRoot.Id) - }) - }) - - Convey("and a dashboard is moved from folder with acl to the folder without an acl", func() { - movedDash := moveDashboard(1, childDash1.Data, folder2.Id) - So(movedDash.HasAcl, ShouldBeFalse) - - Convey("should return folder without acl and its children", func() { - query := &search.FindPersistedDashboardsQuery{ - SignedInUser: &m.SignedInUser{UserId: currentUser.Id, OrgId: 1}, - OrgId: 1, - DashboardIds: []int64{folder2.Id, childDash1.Id, childDash2.Id, dashInRoot.Id}, - } - err := SearchDashboards(query) - So(err, ShouldBeNil) - So(len(query.Result), ShouldEqual, 4) - So(query.Result[0].Id, ShouldEqual, folder2.Id) - So(query.Result[1].Id, ShouldEqual, childDash1.Id) - So(query.Result[2].Id, ShouldEqual, childDash2.Id) - So(query.Result[3].Id, ShouldEqual, dashInRoot.Id) - }) - }) - - Convey("and a dashboard with an acl is moved to the folder without an acl", func() { - updateTestDashboardWithAcl(childDash1.Id, otherUser, m.PERMISSION_EDIT) - movedDash := moveDashboard(1, childDash1.Data, folder2.Id) - So(movedDash.HasAcl, ShouldBeTrue) - - Convey("should return folder without acl but not the dashboard with acl", func() { - query := &search.FindPersistedDashboardsQuery{ - SignedInUser: &m.SignedInUser{UserId: currentUser.Id, OrgId: 1}, - OrgId: 1, - DashboardIds: []int64{folder2.Id, childDash1.Id, childDash2.Id, dashInRoot.Id}, - } - err := SearchDashboards(query) - So(err, ShouldBeNil) - So(len(query.Result), ShouldEqual, 3) - So(query.Result[0].Id, ShouldEqual, folder2.Id) - So(query.Result[1].Id, ShouldEqual, childDash2.Id) - So(query.Result[2].Id, ShouldEqual, dashInRoot.Id) - }) - }) - }) - }) - Convey("Given a plugin with imported dashboards", func() { pluginId := "test-app" @@ -495,6 +361,9 @@ func insertTestDashboard(title string, orgId int64, folderId int64, isFolder boo err := SaveDashboard(&cmd) So(err, ShouldBeNil) + cmd.Result.Data.Set("id", cmd.Result.Id) + cmd.Result.Data.Set("uid", cmd.Result.Uid) + return cmd.Result } @@ -531,25 +400,6 @@ func createUser(name string, role string, isAdmin bool) m.User { return currentUserCmd.Result } -func updateTestDashboardWithAcl(dashId int64, userId int64, permissions m.PermissionType) int64 { - cmd := &m.SetDashboardAclCommand{ - OrgId: 1, - UserId: userId, - DashboardId: dashId, - Permission: permissions, - } - - err := SetDashboardAcl(cmd) - So(err, ShouldBeNil) - - return cmd.Result.Id -} - -func removeAcl(aclId int64) { - err := RemoveDashboardAcl(&m.RemoveDashboardAclCommand{AclId: aclId, OrgId: 1}) - So(err, ShouldBeNil) -} - func moveDashboard(orgId int64, dashboard *simplejson.Json, newFolderId int64) *m.Dashboard { cmd := m.SaveDashboardCommand{ OrgId: orgId, diff --git a/pkg/services/sqlstore/dashboard_version.go b/pkg/services/sqlstore/dashboard_version.go index 49c35397094..547f62628f3 100644 --- a/pkg/services/sqlstore/dashboard_version.go +++ b/pkg/services/sqlstore/dashboard_version.go @@ -69,7 +69,6 @@ func GetDashboardVersions(query *m.GetDashboardVersionsQuery) error { func DeleteExpiredVersions(cmd *m.DeleteExpiredVersionsCommand) error { return inTransaction(func(sess *DBSession) error { - expiredCount := int64(0) versions := []DashboardVersionExp{} versionsToKeep := setting.DashboardVersionsToKeep @@ -98,8 +97,7 @@ func DeleteExpiredVersions(cmd *m.DeleteExpiredVersionsCommand) error { if err != nil { return err } - expiredCount, _ = expiredResponse.RowsAffected() - sqlog.Debug("Deleted old/expired dashboard versions", "expired", expiredCount) + cmd.DeletedRows, _ = expiredResponse.RowsAffected() } return nil diff --git a/pkg/services/sqlstore/dashboard_version_test.go b/pkg/services/sqlstore/dashboard_version_test.go index 6ed37cd6904..1b74e7847c4 100644 --- a/pkg/services/sqlstore/dashboard_version_test.go +++ b/pkg/services/sqlstore/dashboard_version_test.go @@ -12,7 +12,7 @@ import ( ) func updateTestDashboard(dashboard *m.Dashboard, data map[string]interface{}) { - data["title"] = dashboard.Title + data["id"] = dashboard.Id saveCmd := m.SaveDashboardCommand{ OrgId: dashboard.OrgId, @@ -44,7 +44,7 @@ func TestGetDashboardVersion(t *testing.T) { dashCmd := m.GetDashboardQuery{ OrgId: savedDash.OrgId, - Slug: savedDash.Slug, + Uid: savedDash.Uid, } err = GetDashboard(&dashCmd) diff --git a/pkg/services/sqlstore/datasource.go b/pkg/services/sqlstore/datasource.go index e9b400a1772..00d520bcfc6 100644 --- a/pkg/services/sqlstore/datasource.go +++ b/pkg/services/sqlstore/datasource.go @@ -27,6 +27,9 @@ func GetDataSourceById(query *m.GetDataSourceByIdQuery) error { datasource := m.DataSource{OrgId: query.OrgId, Id: query.Id} has, err := x.Get(&datasource) + if err != nil { + return err + } if !has { return m.ErrDataSourceNotFound diff --git a/pkg/services/sqlstore/datasource_test.go b/pkg/services/sqlstore/datasource_test.go index e6f0114ab4d..90300e20029 100644 --- a/pkg/services/sqlstore/datasource_test.go +++ b/pkg/services/sqlstore/datasource_test.go @@ -3,34 +3,11 @@ package sqlstore import ( "testing" - "github.com/go-xorm/xorm" - . "github.com/smartystreets/goconvey/convey" m "github.com/grafana/grafana/pkg/models" - "github.com/grafana/grafana/pkg/services/sqlstore/sqlutil" ) -func InitTestDB(t *testing.T) *xorm.Engine { - x, err := xorm.NewEngine(sqlutil.TestDB_Sqlite3.DriverName, sqlutil.TestDB_Sqlite3.ConnStr) - //x, err := xorm.NewEngine(sqlutil.TestDB_Mysql.DriverName, sqlutil.TestDB_Mysql.ConnStr) - //x, err := xorm.NewEngine(sqlutil.TestDB_Postgres.DriverName, sqlutil.TestDB_Postgres.ConnStr) - - // x.ShowSQL() - - if err != nil { - t.Fatalf("Failed to init in memory sqllite3 db %v", err) - } - - sqlutil.CleanDB(x) - - if err := SetEngine(x); err != nil { - t.Fatal(err) - } - - return x -} - type Test struct { Id int64 Name string diff --git a/pkg/services/sqlstore/login_attempt.go b/pkg/services/sqlstore/login_attempt.go new file mode 100644 index 00000000000..78da198e8e7 --- /dev/null +++ b/pkg/services/sqlstore/login_attempt.go @@ -0,0 +1,91 @@ +package sqlstore + +import ( + "strconv" + "time" + + "github.com/grafana/grafana/pkg/bus" + m "github.com/grafana/grafana/pkg/models" +) + +var getTimeNow = time.Now + +func init() { + bus.AddHandler("sql", CreateLoginAttempt) + bus.AddHandler("sql", DeleteOldLoginAttempts) + bus.AddHandler("sql", GetUserLoginAttemptCount) +} + +func CreateLoginAttempt(cmd *m.CreateLoginAttemptCommand) error { + return inTransaction(func(sess *DBSession) error { + loginAttempt := m.LoginAttempt{ + Username: cmd.Username, + IpAddress: cmd.IpAddress, + Created: getTimeNow().Unix(), + } + + if _, err := sess.Insert(&loginAttempt); err != nil { + return err + } + + cmd.Result = loginAttempt + + return nil + }) +} + +func DeleteOldLoginAttempts(cmd *m.DeleteOldLoginAttemptsCommand) error { + return inTransaction(func(sess *DBSession) error { + var maxId int64 + sql := "SELECT max(id) as id FROM login_attempt WHERE created < ?" + result, err := sess.Query(sql, cmd.OlderThan.Unix()) + + if err != nil { + return err + } + + maxId = toInt64(result[0]["id"]) + + if maxId == 0 { + return nil + } + + sql = "DELETE FROM login_attempt WHERE id <= ?" + + if result, err := sess.Exec(sql, maxId); err != nil { + return err + } else if cmd.DeletedRows, err = result.RowsAffected(); err != nil { + return err + } + + return nil + }) +} + +func GetUserLoginAttemptCount(query *m.GetUserLoginAttemptCountQuery) error { + loginAttempt := new(m.LoginAttempt) + total, err := x. + Where("username = ?", query.Username). + And("created >= ?", query.Since.Unix()). + Count(loginAttempt) + + if err != nil { + return err + } + + query.Result = total + return nil +} + +func toInt64(i interface{}) int64 { + switch i.(type) { + case []byte: + n, _ := strconv.ParseInt(string(i.([]byte)), 10, 64) + return n + case int: + return int64(i.(int)) + case int64: + return i.(int64) + } + return 0 +} diff --git a/pkg/services/sqlstore/login_attempt_test.go b/pkg/services/sqlstore/login_attempt_test.go new file mode 100644 index 00000000000..8008e2d8a62 --- /dev/null +++ b/pkg/services/sqlstore/login_attempt_test.go @@ -0,0 +1,125 @@ +package sqlstore + +import ( + "testing" + "time" + + m "github.com/grafana/grafana/pkg/models" + . "github.com/smartystreets/goconvey/convey" +) + +func mockTime(mock time.Time) time.Time { + getTimeNow = func() time.Time { return mock } + return mock +} + +func TestLoginAttempts(t *testing.T) { + Convey("Testing Login Attempts DB Access", t, func() { + InitTestDB(t) + + user := "user" + beginningOfTime := mockTime(time.Date(2017, 10, 22, 8, 0, 0, 0, time.Local)) + + err := CreateLoginAttempt(&m.CreateLoginAttemptCommand{ + Username: user, + IpAddress: "192.168.0.1", + }) + So(err, ShouldBeNil) + + timePlusOneMinute := mockTime(beginningOfTime.Add(time.Minute * 1)) + + err = CreateLoginAttempt(&m.CreateLoginAttemptCommand{ + Username: user, + IpAddress: "192.168.0.1", + }) + So(err, ShouldBeNil) + + timePlusTwoMinutes := mockTime(beginningOfTime.Add(time.Minute * 2)) + + err = CreateLoginAttempt(&m.CreateLoginAttemptCommand{ + Username: user, + IpAddress: "192.168.0.1", + }) + So(err, ShouldBeNil) + + Convey("Should return a total count of zero login attempts when comparing since beginning of time + 2min and 1s", func() { + query := m.GetUserLoginAttemptCountQuery{ + Username: user, + Since: timePlusTwoMinutes.Add(time.Second * 1), + } + err := GetUserLoginAttemptCount(&query) + So(err, ShouldBeNil) + So(query.Result, ShouldEqual, 0) + }) + + Convey("Should return the total count of login attempts since beginning of time", func() { + query := m.GetUserLoginAttemptCountQuery{ + Username: user, + Since: beginningOfTime, + } + err := GetUserLoginAttemptCount(&query) + So(err, ShouldBeNil) + So(query.Result, ShouldEqual, 3) + }) + + Convey("Should return the total count of login attempts since beginning of time + 1min", func() { + query := m.GetUserLoginAttemptCountQuery{ + Username: user, + Since: timePlusOneMinute, + } + err := GetUserLoginAttemptCount(&query) + So(err, ShouldBeNil) + So(query.Result, ShouldEqual, 2) + }) + + Convey("Should return the total count of login attempts since beginning of time + 2min", func() { + query := m.GetUserLoginAttemptCountQuery{ + Username: user, + Since: timePlusTwoMinutes, + } + err := GetUserLoginAttemptCount(&query) + So(err, ShouldBeNil) + So(query.Result, ShouldEqual, 1) + }) + + Convey("Should return deleted rows older than beginning of time", func() { + cmd := m.DeleteOldLoginAttemptsCommand{ + OlderThan: beginningOfTime, + } + err := DeleteOldLoginAttempts(&cmd) + + So(err, ShouldBeNil) + So(cmd.DeletedRows, ShouldEqual, 0) + }) + + Convey("Should return deleted rows older than beginning of time + 1min", func() { + cmd := m.DeleteOldLoginAttemptsCommand{ + OlderThan: timePlusOneMinute, + } + err := DeleteOldLoginAttempts(&cmd) + + So(err, ShouldBeNil) + So(cmd.DeletedRows, ShouldEqual, 1) + }) + + Convey("Should return deleted rows older than beginning of time + 2min", func() { + cmd := m.DeleteOldLoginAttemptsCommand{ + OlderThan: timePlusTwoMinutes, + } + err := DeleteOldLoginAttempts(&cmd) + + So(err, ShouldBeNil) + So(cmd.DeletedRows, ShouldEqual, 2) + }) + + Convey("Should return deleted rows older than beginning of time + 2min and 1s", func() { + cmd := m.DeleteOldLoginAttemptsCommand{ + OlderThan: timePlusTwoMinutes.Add(time.Second * 1), + } + err := DeleteOldLoginAttempts(&cmd) + + So(err, ShouldBeNil) + So(cmd.DeletedRows, ShouldEqual, 3) + }) + }) +} diff --git a/pkg/services/sqlstore/migrations/common.go b/pkg/services/sqlstore/migrations/common.go index cf0b39f1a35..cc31b1d4580 100644 --- a/pkg/services/sqlstore/migrations/common.go +++ b/pkg/services/sqlstore/migrations/common.go @@ -24,3 +24,24 @@ func addTableRenameMigration(mg *Migrator, oldName string, newName string, versi migrationId := fmt.Sprintf("Rename table %s to %s - %s", oldName, newName, versionSuffix) mg.AddMigration(migrationId, NewRenameTableMigration(oldName, newName)) } + +func addTableReplaceMigrations(mg *Migrator, from Table, to Table, migrationVersion int64, tableDataMigration map[string]string) { + fromV := version(migrationVersion - 1) + toV := version(migrationVersion) + tmpTableName := to.Name + "_tmp_qwerty" + + createTable := fmt.Sprintf("create %v %v", to.Name, toV) + copyTableData := fmt.Sprintf("copy %v %v to %v", to.Name, fromV, toV) + dropTable := fmt.Sprintf("drop %v", tmpTableName) + + addDropAllIndicesMigrations(mg, fromV, from) + addTableRenameMigration(mg, from.Name, tmpTableName, fromV) + mg.AddMigration(createTable, NewAddTableMigration(to)) + addTableIndicesMigrations(mg, toV, to) + mg.AddMigration(copyTableData, NewCopyTableDataMigration(to.Name, tmpTableName, tableDataMigration)) + mg.AddMigration(dropTable, NewDropTableMigration(tmpTableName)) +} + +func version(v int64) string { + return fmt.Sprintf("v%v", v) +} diff --git a/pkg/services/sqlstore/migrations/dashboard_mig.go b/pkg/services/sqlstore/migrations/dashboard_mig.go index 4f1602be931..296950ee497 100644 --- a/pkg/services/sqlstore/migrations/dashboard_mig.go +++ b/pkg/services/sqlstore/migrations/dashboard_mig.go @@ -1,6 +1,8 @@ package migrations -import . "github.com/grafana/grafana/pkg/services/sqlstore/migrator" +import ( + . "github.com/grafana/grafana/pkg/services/sqlstore/migrator" +) func addDashboardMigration(mg *Migrator) { var dashboardV1 = Table{ @@ -150,4 +152,65 @@ func addDashboardMigration(mg *Migrator) { mg.AddMigration("Add column has_acl in dashboard", NewAddColumnMigration(dashboardV2, &Column{ Name: "has_acl", Type: DB_Bool, Nullable: false, Default: "0", })) + + mg.AddMigration("Add column uid in dashboard", NewAddColumnMigration(dashboardV2, &Column{ + Name: "uid", Type: DB_NVarchar, Length: 40, Nullable: true, + })) + + mg.AddMigration("Update uid column values in dashboard", new(RawSqlMigration). + Sqlite("UPDATE dashboard SET uid=printf('%09d',id) WHERE uid IS NULL;"). + Postgres("UPDATE dashboard SET uid=lpad('' || id,9,'0') WHERE uid IS NULL;"). + Mysql("UPDATE dashboard SET uid=lpad(id,9,'0') WHERE uid IS NULL;")) + + mg.AddMigration("Add unique index dashboard_org_id_uid", NewAddIndexMigration(dashboardV2, &Index{ + Cols: []string{"org_id", "uid"}, Type: UniqueIndex, + })) + + mg.AddMigration("Remove unique index org_id_slug", NewDropIndexMigration(dashboardV2, &Index{ + Cols: []string{"org_id", "slug"}, Type: UniqueIndex, + })) + + mg.AddMigration("Update dashboard title length", NewTableCharsetMigration("dashboard", []*Column{ + {Name: "title", Type: DB_NVarchar, Length: 189, Nullable: false}, + })) + + mg.AddMigration("Add unique index for dashboard_org_id_title_folder_id", NewAddIndexMigration(dashboardV2, &Index{ + Cols: []string{"org_id", "folder_id", "title"}, Type: UniqueIndex, + })) + + dashboardExtrasTable := Table{ + Name: "dashboard_provisioning", + Columns: []*Column{ + {Name: "id", Type: DB_BigInt, IsPrimaryKey: true, IsAutoIncrement: true}, + {Name: "dashboard_id", Type: DB_BigInt, Nullable: true}, + {Name: "name", Type: DB_NVarchar, Length: 150, Nullable: false}, + {Name: "external_id", Type: DB_Text, Nullable: false}, + {Name: "updated", Type: DB_DateTime, Nullable: false}, + }, + Indices: []*Index{}, + } + + mg.AddMigration("create dashboard_provisioning", NewAddTableMigration(dashboardExtrasTable)) + + dashboardExtrasTableV2 := Table{ + Name: "dashboard_provisioning", + Columns: []*Column{ + {Name: "id", Type: DB_BigInt, IsPrimaryKey: true, IsAutoIncrement: true}, + {Name: "dashboard_id", Type: DB_BigInt, Nullable: true}, + {Name: "name", Type: DB_NVarchar, Length: 150, Nullable: false}, + {Name: "external_id", Type: DB_Text, Nullable: false}, + {Name: "updated", Type: DB_Int, Default: "0", Nullable: false}, + }, + Indices: []*Index{ + {Cols: []string{"dashboard_id"}}, + {Cols: []string{"dashboard_id", "name"}, Type: IndexType}, + }, + } + + addTableReplaceMigrations(mg, dashboardExtrasTable, dashboardExtrasTableV2, 2, map[string]string{ + "id": "id", + "dashboard_id": "dashboard_id", + "name": "name", + "external_id": "external_id", + }) } diff --git a/pkg/services/sqlstore/migrations/login_attempt_mig.go b/pkg/services/sqlstore/migrations/login_attempt_mig.go new file mode 100644 index 00000000000..df14eb4effa --- /dev/null +++ b/pkg/services/sqlstore/migrations/login_attempt_mig.go @@ -0,0 +1,42 @@ +package migrations + +import . "github.com/grafana/grafana/pkg/services/sqlstore/migrator" + +func addLoginAttemptMigrations(mg *Migrator) { + loginAttemptV1 := Table{ + Name: "login_attempt", + Columns: []*Column{ + {Name: "id", Type: DB_BigInt, IsPrimaryKey: true, IsAutoIncrement: true}, + {Name: "username", Type: DB_NVarchar, Length: 190, Nullable: false}, + {Name: "ip_address", Type: DB_NVarchar, Length: 30, Nullable: false}, + {Name: "created", Type: DB_DateTime, Nullable: false}, + }, + Indices: []*Index{ + {Cols: []string{"username"}}, + }, + } + + // create table + mg.AddMigration("create login attempt table", NewAddTableMigration(loginAttemptV1)) + // add indices + mg.AddMigration("add index login_attempt.username", NewAddIndexMigration(loginAttemptV1, loginAttemptV1.Indices[0])) + + loginAttemptV2 := Table{ + Name: "login_attempt", + Columns: []*Column{ + {Name: "id", Type: DB_BigInt, IsPrimaryKey: true, IsAutoIncrement: true}, + {Name: "username", Type: DB_NVarchar, Length: 190, Nullable: false}, + {Name: "ip_address", Type: DB_NVarchar, Length: 30, Nullable: false}, + {Name: "created", Type: DB_Int, Default: "0", Nullable: false}, + }, + Indices: []*Index{ + {Cols: []string{"username"}}, + }, + } + + addTableReplaceMigrations(mg, loginAttemptV1, loginAttemptV2, 2, map[string]string{ + "id": "id", + "username": "username", + "ip_address": "ip_address", + }) +} diff --git a/pkg/services/sqlstore/migrations/migrations.go b/pkg/services/sqlstore/migrations/migrations.go index 8e9268779ef..282f98e7318 100644 --- a/pkg/services/sqlstore/migrations/migrations.go +++ b/pkg/services/sqlstore/migrations/migrations.go @@ -29,6 +29,7 @@ func AddMigrations(mg *Migrator) { addTeamMigrations(mg) addDashboardAclMigrations(mg) addTagMigration(mg) + addLoginAttemptMigrations(mg) } func addMigrationLogMigrations(mg *Migrator) { diff --git a/pkg/services/sqlstore/migrations/migrations_test.go b/pkg/services/sqlstore/migrations/migrations_test.go index 5bddf6ff605..51aea0bbdef 100644 --- a/pkg/services/sqlstore/migrations/migrations_test.go +++ b/pkg/services/sqlstore/migrations/migrations_test.go @@ -14,13 +14,15 @@ import ( var indexTypes = []string{"Unknown", "INDEX", "UNIQUE INDEX"} func TestMigrations(t *testing.T) { - //log.NewLogger(0, "console", `{"level": 0}`) - testDBs := []sqlutil.TestDB{ sqlutil.TestDB_Sqlite3, } for _, testDB := range testDBs { + sql := `select count(*) as count from migration_log` + r := struct { + Count int64 + }{} Convey("Initial "+testDB.DriverName+" migration", t, func() { x, err := xorm.NewEngine(testDB.DriverName, testDB.ConnStr) @@ -28,30 +30,31 @@ func TestMigrations(t *testing.T) { sqlutil.CleanDB(x) + has, err := x.SQL(sql).Get(&r) + So(err, ShouldNotBeNil) + mg := NewMigrator(x) AddMigrations(mg) err = mg.Start() So(err, ShouldBeNil) - // tables, err := x.DBMetas() - // So(err, ShouldBeNil) - // - // fmt.Printf("\nDB Schema after migration: table count: %v\n", len(tables)) - // - // for _, table := range tables { - // fmt.Printf("\nTable: %v \n", table.Name) - // for _, column := range table.Columns() { - // fmt.Printf("\t %v \n", column.String(x.Dialect())) - // } - // - // if len(table.Indexes) > 0 { - // fmt.Printf("\n\tIndexes:\n") - // for _, index := range table.Indexes { - // fmt.Printf("\t %v (%v) %v \n", index.Name, strings.Join(index.Cols, ","), indexTypes[index.Type]) - // } - // } - // } + has, err = x.SQL(sql).Get(&r) + So(err, ShouldBeNil) + So(has, ShouldBeTrue) + expectedMigrations := mg.MigrationsCount() - 2 //we currently skip to migrations. We should rewrite skipped migrations to write in the log as well. until then we have to keep this + So(r.Count, ShouldEqual, expectedMigrations) + + mg = NewMigrator(x) + AddMigrations(mg) + + err = mg.Start() + So(err, ShouldBeNil) + + has, err = x.SQL(sql).Get(&r) + So(err, ShouldBeNil) + So(has, ShouldBeTrue) + So(r.Count, ShouldEqual, expectedMigrations) }) } } diff --git a/pkg/services/sqlstore/migrator/dialect.go b/pkg/services/sqlstore/migrator/dialect.go index 651405921d9..064b5981063 100644 --- a/pkg/services/sqlstore/migrator/dialect.go +++ b/pkg/services/sqlstore/migrator/dialect.go @@ -19,6 +19,7 @@ type Dialect interface { LikeStr() string Default(col *Column) string BooleanStr(bool) string + DateTimeFunc(string) string CreateIndexSql(tableName string, index *Index) string CreateTableSql(table *Table) string @@ -78,6 +79,10 @@ func (b *BaseDialect) Default(col *Column) string { return col.Default } +func (db *BaseDialect) DateTimeFunc(value string) string { + return value +} + func (b *BaseDialect) CreateTableSql(table *Table) string { var sql string sql = "CREATE TABLE IF NOT EXISTS " diff --git a/pkg/services/sqlstore/migrator/migrator.go b/pkg/services/sqlstore/migrator/migrator.go index 64831ee46b4..a8bd36ac8a3 100644 --- a/pkg/services/sqlstore/migrator/migrator.go +++ b/pkg/services/sqlstore/migrator/migrator.go @@ -35,6 +35,10 @@ func NewMigrator(engine *xorm.Engine) *Migrator { return mg } +func (mg *Migrator) MigrationsCount() int { + return len(mg.migrations) +} + func (mg *Migrator) AddMigration(id string, m Migration) { m.SetId(id) mg.migrations = append(mg.migrations, m) diff --git a/pkg/services/sqlstore/migrator/sqlite_dialect.go b/pkg/services/sqlstore/migrator/sqlite_dialect.go index fe1e781c8df..1a31cee4f5e 100644 --- a/pkg/services/sqlstore/migrator/sqlite_dialect.go +++ b/pkg/services/sqlstore/migrator/sqlite_dialect.go @@ -36,6 +36,10 @@ func (db *Sqlite3) BooleanStr(value bool) string { return "0" } +func (db *Sqlite3) DateTimeFunc(value string) string { + return "datetime(" + value + ")" +} + func (db *Sqlite3) SqlType(c *Column) string { switch c.Type { case DB_Date, DB_DateTime, DB_TimeStamp, DB_Time: diff --git a/pkg/services/sqlstore/org_test.go b/pkg/services/sqlstore/org_test.go index 59d96c4f8ca..c57d15a48d5 100644 --- a/pkg/services/sqlstore/org_test.go +++ b/pkg/services/sqlstore/org_test.go @@ -123,6 +123,31 @@ func TestAccountDataAccess(t *testing.T) { So(query.Result[0].Role, ShouldEqual, "Admin") }) + Convey("Can get organization users with query", func() { + query := m.GetOrgUsersQuery{ + OrgId: ac1.OrgId, + Query: "ac1", + } + err := GetOrgUsers(&query) + + So(err, ShouldBeNil) + So(len(query.Result), ShouldEqual, 1) + So(query.Result[0].Email, ShouldEqual, ac1.Email) + }) + + Convey("Can get organization users with query and limit", func() { + query := m.GetOrgUsersQuery{ + OrgId: ac1.OrgId, + Query: "ac", + Limit: 1, + } + err := GetOrgUsers(&query) + + So(err, ShouldBeNil) + So(len(query.Result), ShouldEqual, 1) + So(query.Result[0].Email, ShouldEqual, ac1.Email) + }) + Convey("Can set using org", func() { cmd := m.SetUsingOrgCommand{UserId: ac2.Id, OrgId: ac1.Id} err := SetUsingOrg(&cmd) @@ -174,10 +199,13 @@ func TestAccountDataAccess(t *testing.T) { So(err, ShouldBeNil) So(len(query.Result), ShouldEqual, 3) - err = SetDashboardAcl(&m.SetDashboardAclCommand{DashboardId: 1, OrgId: ac1.OrgId, UserId: ac3.Id, Permission: m.PERMISSION_EDIT}) + dash1 := insertTestDashboard("1 test dash", ac1.OrgId, 0, false, "prod", "webapp") + dash2 := insertTestDashboard("2 test dash", ac3.OrgId, 0, false, "prod", "webapp") + + err = testHelperUpdateDashboardAcl(dash1.Id, m.DashboardAcl{DashboardId: dash1.Id, OrgId: ac1.OrgId, UserId: ac3.Id, Permission: m.PERMISSION_EDIT}) So(err, ShouldBeNil) - err = SetDashboardAcl(&m.SetDashboardAclCommand{DashboardId: 2, OrgId: ac3.OrgId, UserId: ac3.Id, Permission: m.PERMISSION_EDIT}) + err = testHelperUpdateDashboardAcl(dash2.Id, m.DashboardAcl{DashboardId: dash2.Id, OrgId: ac3.OrgId, UserId: ac3.Id, Permission: m.PERMISSION_EDIT}) So(err, ShouldBeNil) Convey("When org user is deleted", func() { @@ -209,3 +237,11 @@ func TestAccountDataAccess(t *testing.T) { }) }) } + +func testHelperUpdateDashboardAcl(dashboardId int64, items ...m.DashboardAcl) error { + cmd := m.UpdateDashboardAclCommand{DashboardId: dashboardId} + for _, item := range items { + cmd.Items = append(cmd.Items, &item) + } + return UpdateDashboardAcl(&cmd) +} diff --git a/pkg/services/sqlstore/org_users.go b/pkg/services/sqlstore/org_users.go index 2c2a51fd362..0b991c73c55 100644 --- a/pkg/services/sqlstore/org_users.go +++ b/pkg/services/sqlstore/org_users.go @@ -2,6 +2,7 @@ package sqlstore import ( "fmt" + "strings" "time" "github.com/grafana/grafana/pkg/bus" @@ -69,9 +70,30 @@ func UpdateOrgUser(cmd *m.UpdateOrgUserCommand) error { func GetOrgUsers(query *m.GetOrgUsersQuery) error { query.Result = make([]*m.OrgUserDTO, 0) + sess := x.Table("org_user") sess.Join("INNER", "user", fmt.Sprintf("org_user.user_id=%s.id", x.Dialect().Quote("user"))) - sess.Where("org_user.org_id=?", query.OrgId) + + whereConditions := make([]string, 0) + whereParams := make([]interface{}, 0) + + whereConditions = append(whereConditions, "org_user.org_id = ?") + whereParams = append(whereParams, query.OrgId) + + if query.Query != "" { + queryWithWildcards := "%" + query.Query + "%" + whereConditions = append(whereConditions, "(email "+dialect.LikeStr()+" ? OR name "+dialect.LikeStr()+" ? OR login "+dialect.LikeStr()+" ?)") + whereParams = append(whereParams, queryWithWildcards, queryWithWildcards, queryWithWildcards) + } + + if len(whereConditions) > 0 { + sess.Where(strings.Join(whereConditions, " AND "), whereParams...) + } + + if query.Limit > 0 { + sess.Limit(query.Limit, 0) + } + sess.Cols("org_user.org_id", "org_user.user_id", "user.email", "user.login", "org_user.role", "user.last_seen_at") sess.Asc("user.email", "user.login") diff --git a/pkg/services/sqlstore/playlist.go b/pkg/services/sqlstore/playlist.go index 72f3079db8d..67720cbadb8 100644 --- a/pkg/services/sqlstore/playlist.go +++ b/pkg/services/sqlstore/playlist.go @@ -1,8 +1,6 @@ package sqlstore import ( - "fmt" - "github.com/grafana/grafana/pkg/bus" m "github.com/grafana/grafana/pkg/models" ) @@ -17,17 +15,13 @@ func init() { } func CreatePlaylist(cmd *m.CreatePlaylistCommand) error { - var err error - playlist := m.Playlist{ Name: cmd.Name, Interval: cmd.Interval, OrgId: cmd.OrgId, } - _, err = x.Insert(&playlist) - - fmt.Printf("%v", playlist.Id) + _, err := x.Insert(&playlist) playlistItems := make([]m.PlaylistItem, 0) for _, item := range cmd.Items { @@ -47,7 +41,6 @@ func CreatePlaylist(cmd *m.CreatePlaylistCommand) error { } func UpdatePlaylist(cmd *m.UpdatePlaylistCommand) error { - var err error playlist := m.Playlist{ Id: cmd.Id, OrgId: cmd.OrgId, @@ -68,7 +61,7 @@ func UpdatePlaylist(cmd *m.UpdatePlaylistCommand) error { Interval: playlist.Interval, } - _, err = x.Id(cmd.Id).Cols("id", "name", "interval").Update(&playlist) + _, err := x.ID(cmd.Id).Cols("id", "name", "interval").Update(&playlist) if err != nil { return err @@ -104,7 +97,7 @@ func GetPlaylist(query *m.GetPlaylistByIdQuery) error { } playlist := m.Playlist{} - _, err := x.Id(query.Id).Get(&playlist) + _, err := x.ID(query.Id).Get(&playlist) query.Result = &playlist diff --git a/pkg/services/sqlstore/search_builder.go b/pkg/services/sqlstore/search_builder.go index ddf192da5ff..ddfbfbfc551 100644 --- a/pkg/services/sqlstore/search_builder.go +++ b/pkg/services/sqlstore/search_builder.go @@ -1,7 +1,6 @@ package sqlstore import ( - "bytes" "strings" m "github.com/grafana/grafana/pkg/models" @@ -9,6 +8,7 @@ import ( // SearchBuilder is a builder/object mother that builds a dashboard search query type SearchBuilder struct { + SqlBuilder tags []string isStarred bool limit int @@ -18,14 +18,14 @@ type SearchBuilder struct { whereTypeFolder bool whereTypeDash bool whereFolderIds []int64 - sql bytes.Buffer - params []interface{} + permission m.PermissionType } -func NewSearchBuilder(signedInUser *m.SignedInUser, limit int) *SearchBuilder { +func NewSearchBuilder(signedInUser *m.SignedInUser, limit int, permission m.PermissionType) *SearchBuilder { searchBuilder := &SearchBuilder{ signedInUser: signedInUser, limit: limit, + permission: permission, } return searchBuilder @@ -101,11 +101,13 @@ func (sb *SearchBuilder) buildSelect() { sb.sql.WriteString( `SELECT dashboard.id, + dashboard.uid, dashboard.title, dashboard.slug, dashboard_tag.term, dashboard.is_folder, dashboard.folder_id, + folder.uid as folder_uid, folder.slug as folder_slug, folder.title as folder_title FROM `) @@ -151,10 +153,7 @@ func (sb *SearchBuilder) buildMainQuery() { sb.sql.WriteString(` WHERE `) sb.buildSearchWhereClause() - sb.sql.WriteString(` - LIMIT ?) as ids - INNER JOIN dashboard on ids.id = dashboard.id - `) + sb.sql.WriteString(` LIMIT ?) as ids INNER JOIN dashboard on ids.id = dashboard.id `) sb.params = append(sb.params, sb.limit) } @@ -174,23 +173,7 @@ func (sb *SearchBuilder) buildSearchWhereClause() { } } - if sb.signedInUser.OrgRole != m.ROLE_ADMIN { - allowedDashboardsSubQuery := ` AND (dashboard.has_acl = ` + dialect.BooleanStr(false) + ` OR dashboard.id in ( - SELECT distinct d.id AS DashboardId - FROM dashboard AS d - LEFT JOIN dashboard_acl as da on d.folder_id = da.dashboard_id or d.id = da.dashboard_id - LEFT JOIN team_member as ugm on ugm.team_id = da.team_id - LEFT JOIN org_user ou on ou.role = da.role - WHERE - d.has_acl = ` + dialect.BooleanStr(true) + ` and - (da.user_id = ? or ugm.user_id = ? or ou.id is not null) - and d.org_id = ? - ) - )` - - sb.sql.WriteString(allowedDashboardsSubQuery) - sb.params = append(sb.params, sb.signedInUser.UserId, sb.signedInUser.UserId, sb.signedInUser.OrgId) - } + sb.writeDashboardPermissionFilter(sb.signedInUser, sb.permission) if len(sb.whereTitle) > 0 { sb.sql.WriteString(" AND dashboard.title " + dialect.LikeStr() + " ?") diff --git a/pkg/services/sqlstore/search_builder_test.go b/pkg/services/sqlstore/search_builder_test.go index 32ccbc583f5..e8b02c445ec 100644 --- a/pkg/services/sqlstore/search_builder_test.go +++ b/pkg/services/sqlstore/search_builder_test.go @@ -16,7 +16,8 @@ func TestSearchBuilder(t *testing.T) { OrgId: 1, UserId: 1, } - sb := NewSearchBuilder(signedInUser, 1000) + + sb := NewSearchBuilder(signedInUser, 1000, m.PERMISSION_VIEW) Convey("When building a normal search", func() { sql, params := sb.IsStarred().WithTitle("test").ToSql() diff --git a/pkg/services/sqlstore/sqlbuilder.go b/pkg/services/sqlstore/sqlbuilder.go new file mode 100644 index 00000000000..b42c7926203 --- /dev/null +++ b/pkg/services/sqlstore/sqlbuilder.go @@ -0,0 +1,75 @@ +package sqlstore + +import ( + "bytes" + "strings" + + m "github.com/grafana/grafana/pkg/models" +) + +type SqlBuilder struct { + sql bytes.Buffer + params []interface{} +} + +func (sb *SqlBuilder) Write(sql string, params ...interface{}) { + sb.sql.WriteString(sql) + + if len(params) > 0 { + sb.params = append(sb.params, params...) + } +} + +func (sb *SqlBuilder) GetSqlString() string { + return sb.sql.String() +} + +func (sb *SqlBuilder) AddParams(params ...interface{}) { + sb.params = append(sb.params, params...) +} + +func (sb *SqlBuilder) writeDashboardPermissionFilter(user *m.SignedInUser, permission m.PermissionType) { + + if user.OrgRole == m.ROLE_ADMIN { + return + } + + okRoles := []interface{}{user.OrgRole} + + if user.OrgRole == m.ROLE_EDITOR { + okRoles = append(okRoles, m.ROLE_VIEWER) + } + + falseStr := dialect.BooleanStr(false) + + sb.sql.WriteString(` AND + ( + dashboard.id IN ( + SELECT distinct d.id AS DashboardId + FROM dashboard AS d + LEFT JOIN dashboard folder on folder.id = d.folder_id + LEFT JOIN dashboard_acl AS da ON + da.dashboard_id = d.id OR + da.dashboard_id = d.folder_id OR + ( + -- include default permissions --> + da.org_id = -1 AND ( + (folder.id IS NOT NULL AND folder.has_acl = ` + falseStr + `) OR + (folder.id IS NULL AND d.has_acl = ` + falseStr + `) + ) + ) + LEFT JOIN team_member as ugm on ugm.team_id = da.team_id + WHERE + d.org_id = ? AND + da.permission >= ? AND + ( + da.user_id = ? OR + ugm.user_id = ? OR + da.role IN (?` + strings.Repeat(",?", len(okRoles)-1) + `) + ) + ) + )`) + + sb.params = append(sb.params, user.OrgId, permission, user.UserId, user.UserId) + sb.params = append(sb.params, okRoles...) +} diff --git a/pkg/services/sqlstore/sqlstore.go b/pkg/services/sqlstore/sqlstore.go index 8558fb4506d..c00a55667d1 100644 --- a/pkg/services/sqlstore/sqlstore.go +++ b/pkg/services/sqlstore/sqlstore.go @@ -7,14 +7,15 @@ import ( "path" "path/filepath" "strings" + "testing" "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/log" m "github.com/grafana/grafana/pkg/models" "github.com/grafana/grafana/pkg/services/annotations" - "github.com/grafana/grafana/pkg/services/dashboards" "github.com/grafana/grafana/pkg/services/sqlstore/migrations" "github.com/grafana/grafana/pkg/services/sqlstore/migrator" + "github.com/grafana/grafana/pkg/services/sqlstore/sqlutil" "github.com/grafana/grafana/pkg/setting" "github.com/go-sql-driver/mysql" @@ -103,7 +104,6 @@ func SetEngine(engine *xorm.Engine) (err error) { // Init repo instances annotations.SetRepository(&SqlAnnotationRepo{}) - dashboards.SetRepository(&dashboards.DashboardRepository{}) return nil } @@ -218,3 +218,46 @@ func LoadConfig() { DbCfg.ServerCertName = sec.Key("server_cert_name").String() DbCfg.Path = sec.Key("path").MustString("data/grafana.db") } + +var ( + dbSqlite = "sqlite" + dbMySql = "mysql" + dbPostgres = "postgres" +) + +func InitTestDB(t *testing.T) *xorm.Engine { + selectedDb := dbSqlite + //selectedDb := dbMySql + //selectedDb := dbPostgres + + var x *xorm.Engine + var err error + + // environment variable present for test db? + if db, present := os.LookupEnv("GRAFANA_TEST_DB"); present { + selectedDb = db + } + + switch strings.ToLower(selectedDb) { + case dbMySql: + x, err = xorm.NewEngine(sqlutil.TestDB_Mysql.DriverName, sqlutil.TestDB_Mysql.ConnStr) + case dbPostgres: + x, err = xorm.NewEngine(sqlutil.TestDB_Postgres.DriverName, sqlutil.TestDB_Postgres.ConnStr) + default: + x, err = xorm.NewEngine(sqlutil.TestDB_Sqlite3.DriverName, sqlutil.TestDB_Sqlite3.ConnStr) + } + + // x.ShowSQL() + + if err != nil { + t.Fatalf("Failed to init in memory sqllite3 db %v", err) + } + + sqlutil.CleanDB(x) + + if err := SetEngine(x); err != nil { + t.Fatal(err) + } + + return x +} diff --git a/pkg/services/sqlstore/sqlutil/sqlutil.go b/pkg/services/sqlstore/sqlutil/sqlutil.go index d9ec617f135..7daf077a509 100644 --- a/pkg/services/sqlstore/sqlutil/sqlutil.go +++ b/pkg/services/sqlstore/sqlutil/sqlutil.go @@ -11,7 +11,7 @@ type TestDB struct { ConnStr string } -var TestDB_Sqlite3 = TestDB{DriverName: "sqlite3", ConnStr: ":memory:?_loc=Local"} +var TestDB_Sqlite3 = TestDB{DriverName: "sqlite3", ConnStr: ":memory:"} var TestDB_Mysql = TestDB{DriverName: "mysql", ConnStr: "grafana:password@tcp(localhost:3306)/grafana_tests?collation=utf8mb4_unicode_ci"} var TestDB_Postgres = TestDB{DriverName: "postgres", ConnStr: "user=grafanatest password=grafanatest host=localhost port=5432 dbname=grafanatest sslmode=disable"} var TestDB_Mssql = TestDB{DriverName: "mssql", ConnStr: "server=localhost;port=1433;database=grafana_tests;user id=grafana;password=password"} diff --git a/pkg/services/sqlstore/stats.go b/pkg/services/sqlstore/stats.go index aa01c8a3761..cfe2d88c82c 100644 --- a/pkg/services/sqlstore/stats.go +++ b/pkg/services/sqlstore/stats.go @@ -13,12 +13,12 @@ func init() { bus.AddHandler("sql", GetAdminStats) } -var activeUserTimeLimit time.Duration = time.Hour * 24 * 14 +var activeUserTimeLimit time.Duration = time.Hour * 24 * 30 func GetDataSourceStats(query *m.GetDataSourceStatsQuery) error { var rawSql = `SELECT COUNT(*) as count, type FROM data_source GROUP BY type` query.Result = make([]*m.DataSourceStats, 0) - err := x.Sql(rawSql).Find(&query.Result) + err := x.SQL(rawSql).Find(&query.Result) if err != nil { return err } @@ -30,36 +30,39 @@ func GetSystemStats(query *m.GetSystemStatsQuery) error { var rawSql = `SELECT ( SELECT COUNT(*) - FROM ` + dialect.Quote("user") + ` - ) AS users, + FROM ` + dialect.Quote("user") + ` + ) AS users, ( SELECT COUNT(*) - FROM ` + dialect.Quote("org") + ` - ) AS orgs, - ( - SELECT COUNT(*) - FROM ` + dialect.Quote("dashboard") + ` - ) AS dashboards, - ( - SELECT COUNT(*) - FROM ` + dialect.Quote("data_source") + ` - ) AS datasources, - ( - SELECT COUNT(*) - FROM ` + dialect.Quote("playlist") + ` - ) AS playlists, - ( - SELECT COUNT(*) - FROM ` + dialect.Quote("alert") + ` - ) AS alerts, + FROM ` + dialect.Quote("org") + ` + ) AS orgs, + ( + SELECT COUNT(*) + FROM ` + dialect.Quote("dashboard") + ` + ) AS dashboards, + ( + SELECT COUNT(*) + FROM ` + dialect.Quote("data_source") + ` + ) AS datasources, + ( + SELECT COUNT(*) FROM ` + dialect.Quote("star") + ` + ) AS stars, + ( + SELECT COUNT(*) + FROM ` + dialect.Quote("playlist") + ` + ) AS playlists, + ( + SELECT COUNT(*) + FROM ` + dialect.Quote("alert") + ` + ) AS alerts, ( SELECT COUNT(*) FROM ` + dialect.Quote("user") + ` where last_seen_at > ? - ) as active_users + ) as active_users ` activeUserDeadlineDate := time.Now().Add(-activeUserTimeLimit) var stats m.SystemStats - _, err := x.Sql(rawSql, activeUserDeadlineDate).Get(&stats) + _, err := x.SQL(rawSql, activeUserDeadlineDate).Get(&stats) if err != nil { return err } @@ -70,51 +73,51 @@ func GetSystemStats(query *m.GetSystemStatsQuery) error { func GetAdminStats(query *m.GetAdminStatsQuery) error { var rawSql = `SELECT - ( - SELECT COUNT(*) - FROM ` + dialect.Quote("user") + ` - ) AS users, - ( - SELECT COUNT(*) - FROM ` + dialect.Quote("org") + ` - ) AS orgs, - ( - SELECT COUNT(*) - FROM ` + dialect.Quote("dashboard") + ` - ) AS dashboards, - ( - SELECT COUNT(*) - FROM ` + dialect.Quote("dashboard_snapshot") + ` - ) AS snapshots, - ( - SELECT COUNT( DISTINCT ( ` + dialect.Quote("term") + ` )) - FROM ` + dialect.Quote("dashboard_tag") + ` - ) AS tags, - ( - SELECT COUNT(*) - FROM ` + dialect.Quote("data_source") + ` - ) AS datasources, - ( - SELECT COUNT(*) - FROM ` + dialect.Quote("playlist") + ` - ) AS playlists, - ( - SELECT COUNT(*) FROM ` + dialect.Quote("star") + ` - ) AS stars, - ( - SELECT COUNT(*) - FROM ` + dialect.Quote("alert") + ` - ) AS alerts, + ( + SELECT COUNT(*) + FROM ` + dialect.Quote("user") + ` + ) AS users, + ( + SELECT COUNT(*) + FROM ` + dialect.Quote("org") + ` + ) AS orgs, + ( + SELECT COUNT(*) + FROM ` + dialect.Quote("dashboard") + ` + ) AS dashboards, + ( + SELECT COUNT(*) + FROM ` + dialect.Quote("dashboard_snapshot") + ` + ) AS snapshots, + ( + SELECT COUNT( DISTINCT ( ` + dialect.Quote("term") + ` )) + FROM ` + dialect.Quote("dashboard_tag") + ` + ) AS tags, + ( + SELECT COUNT(*) + FROM ` + dialect.Quote("data_source") + ` + ) AS datasources, + ( + SELECT COUNT(*) + FROM ` + dialect.Quote("playlist") + ` + ) AS playlists, + ( + SELECT COUNT(*) FROM ` + dialect.Quote("star") + ` + ) AS stars, + ( + SELECT COUNT(*) + FROM ` + dialect.Quote("alert") + ` + ) AS alerts, ( SELECT COUNT(*) - from ` + dialect.Quote("user") + ` where last_seen_at > ? + from ` + dialect.Quote("user") + ` where last_seen_at > ? ) as active_users - ` + ` activeUserDeadlineDate := time.Now().Add(-activeUserTimeLimit) var stats m.AdminStats - _, err := x.Sql(rawSql, activeUserDeadlineDate).Get(&stats) + _, err := x.SQL(rawSql, activeUserDeadlineDate).Get(&stats) if err != nil { return err } diff --git a/pkg/services/sqlstore/team.go b/pkg/services/sqlstore/team.go index 98bb1a36eb9..d238301c7ce 100644 --- a/pkg/services/sqlstore/team.go +++ b/pkg/services/sqlstore/team.go @@ -25,7 +25,7 @@ func init() { func CreateTeam(cmd *m.CreateTeamCommand) error { return inTransaction(func(sess *DBSession) error { - if isNameTaken, err := isTeamNameTaken(cmd.Name, 0, sess); err != nil { + if isNameTaken, err := isTeamNameTaken(cmd.OrgId, cmd.Name, 0, sess); err != nil { return err } else if isNameTaken { return m.ErrTeamNameTaken @@ -50,7 +50,7 @@ func CreateTeam(cmd *m.CreateTeamCommand) error { func UpdateTeam(cmd *m.UpdateTeamCommand) error { return inTransaction(func(sess *DBSession) error { - if isNameTaken, err := isTeamNameTaken(cmd.Name, cmd.Id, sess); err != nil { + if isNameTaken, err := isTeamNameTaken(cmd.OrgId, cmd.Name, cmd.Id, sess); err != nil { return err } else if isNameTaken { return m.ErrTeamNameTaken @@ -78,22 +78,23 @@ func UpdateTeam(cmd *m.UpdateTeamCommand) error { }) } +// DeleteTeam will delete a team, its member and any permissions connected to the team func DeleteTeam(cmd *m.DeleteTeamCommand) error { return inTransaction(func(sess *DBSession) error { - if res, err := sess.Query("SELECT 1 from team WHERE id=?", cmd.Id); err != nil { + if teamExists, err := teamExists(cmd.OrgId, cmd.Id, sess); err != nil { return err - } else if len(res) != 1 { + } else if !teamExists { return m.ErrTeamNotFound } deletes := []string{ - "DELETE FROM team_member WHERE team_id = ?", - "DELETE FROM team WHERE id = ?", - "DELETE FROM dashboard_acl WHERE team_id = ?", + "DELETE FROM team_member WHERE org_id=? and team_id = ?", + "DELETE FROM team WHERE org_id=? and id = ?", + "DELETE FROM dashboard_acl WHERE org_id=? and team_id = ?", } for _, sql := range deletes { - _, err := sess.Exec(sql, cmd.Id) + _, err := sess.Exec(sql, cmd.OrgId, cmd.Id) if err != nil { return err } @@ -102,9 +103,19 @@ func DeleteTeam(cmd *m.DeleteTeamCommand) error { }) } -func isTeamNameTaken(name string, existingId int64, sess *DBSession) (bool, error) { +func teamExists(orgId int64, teamId int64, sess *DBSession) (bool, error) { + if res, err := sess.Query("SELECT 1 from team WHERE org_id=? and id=?", orgId, teamId); err != nil { + return false, err + } else if len(res) != 1 { + return false, nil + } + + return true, nil +} + +func isTeamNameTaken(orgId int64, name string, existingId int64, sess *DBSession) (bool, error) { var team m.Team - exists, err := sess.Where("name=?", name).Get(&team) + exists, err := sess.Where("org_id=? and name=?", orgId, name).Get(&team) if err != nil { return false, nil @@ -128,6 +139,7 @@ func SearchTeams(query *m.SearchTeamsQuery) error { sql.WriteString(`select team.id as id, + team.org_id, team.name as name, team.email as email, (select count(*) from team_member where team_member.team_id = team.id) as member_count @@ -176,7 +188,7 @@ func SearchTeams(query *m.SearchTeamsQuery) error { func GetTeamById(query *m.GetTeamByIdQuery) error { var team m.Team - exists, err := x.Id(query.Id).Get(&team) + exists, err := x.Where("org_id=? and id=?", query.OrgId, query.Id).Get(&team) if err != nil { return err } @@ -189,12 +201,13 @@ func GetTeamById(query *m.GetTeamByIdQuery) error { return nil } +// GetTeamsByUser is used by the Guardian when checking a users' permissions func GetTeamsByUser(query *m.GetTeamsByUserQuery) error { query.Result = make([]*m.Team, 0) sess := x.Table("team") sess.Join("INNER", "team_member", "team.id=team_member.team_id") - sess.Where("team_member.user_id=?", query.UserId) + sess.Where("team.org_id=? and team_member.user_id=?", query.OrgId, query.UserId) err := sess.Find(&query.Result) if err != nil { @@ -204,17 +217,18 @@ func GetTeamsByUser(query *m.GetTeamsByUserQuery) error { return nil } +// AddTeamMember adds a user to a team func AddTeamMember(cmd *m.AddTeamMemberCommand) error { return inTransaction(func(sess *DBSession) error { - if res, err := sess.Query("SELECT 1 from team_member WHERE team_id=? and user_id=?", cmd.TeamId, cmd.UserId); err != nil { + if res, err := sess.Query("SELECT 1 from team_member WHERE org_id=? and team_id=? and user_id=?", cmd.OrgId, cmd.TeamId, cmd.UserId); err != nil { return err } else if len(res) == 1 { return m.ErrTeamMemberAlreadyAdded } - if res, err := sess.Query("SELECT 1 from team WHERE id=?", cmd.TeamId); err != nil { + if teamExists, err := teamExists(cmd.OrgId, cmd.TeamId, sess); err != nil { return err - } else if len(res) != 1 { + } else if !teamExists { return m.ErrTeamNotFound } @@ -231,23 +245,35 @@ func AddTeamMember(cmd *m.AddTeamMemberCommand) error { }) } +// RemoveTeamMember removes a member from a team func RemoveTeamMember(cmd *m.RemoveTeamMemberCommand) error { return inTransaction(func(sess *DBSession) error { - var rawSql = "DELETE FROM team_member WHERE team_id=? and user_id=?" - _, err := sess.Exec(rawSql, cmd.TeamId, cmd.UserId) + if teamExists, err := teamExists(cmd.OrgId, cmd.TeamId, sess); err != nil { + return err + } else if !teamExists { + return m.ErrTeamNotFound + } + + var rawSql = "DELETE FROM team_member WHERE org_id=? and team_id=? and user_id=?" + res, err := sess.Exec(rawSql, cmd.OrgId, cmd.TeamId, cmd.UserId) if err != nil { return err } + rows, err := res.RowsAffected() + if rows == 0 { + return m.ErrTeamMemberNotFound + } return err }) } +// GetTeamMembers return a list of members for the specified team func GetTeamMembers(query *m.GetTeamMembersQuery) error { query.Result = make([]*m.TeamMemberDTO, 0) sess := x.Table("team_member") sess.Join("INNER", "user", fmt.Sprintf("team_member.user_id=%s.id", x.Dialect().Quote("user"))) - sess.Where("team_member.team_id=?", query.TeamId) + sess.Where("team_member.org_id=? and team_member.team_id=?", query.OrgId, query.TeamId) sess.Cols("user.org_id", "team_member.team_id", "team_member.user_id", "user.email", "user.login") sess.Asc("user.login", "user.email") diff --git a/pkg/services/sqlstore/team_test.go b/pkg/services/sqlstore/team_test.go index dbae4545266..f136411eeba 100644 --- a/pkg/services/sqlstore/team_test.go +++ b/pkg/services/sqlstore/team_test.go @@ -27,8 +27,9 @@ func TestTeamCommandsAndQueries(t *testing.T) { userIds = append(userIds, userCmd.Result.Id) } - group1 := m.CreateTeamCommand{Name: "group1 name", Email: "test1@test.com"} - group2 := m.CreateTeamCommand{Name: "group2 name", Email: "test2@test.com"} + var testOrgId int64 = 1 + group1 := m.CreateTeamCommand{OrgId: testOrgId, Name: "group1 name", Email: "test1@test.com"} + group2 := m.CreateTeamCommand{OrgId: testOrgId, Name: "group2 name", Email: "test2@test.com"} err := CreateTeam(&group1) So(err, ShouldBeNil) @@ -36,7 +37,7 @@ func TestTeamCommandsAndQueries(t *testing.T) { So(err, ShouldBeNil) Convey("Should be able to create teams and add users", func() { - query := &m.SearchTeamsQuery{Name: "group1 name", Page: 1, Limit: 10} + query := &m.SearchTeamsQuery{OrgId: testOrgId, Name: "group1 name", Page: 1, Limit: 10} err = SearchTeams(query) So(err, ShouldBeNil) So(query.Page, ShouldEqual, 1) @@ -44,25 +45,27 @@ func TestTeamCommandsAndQueries(t *testing.T) { team1 := query.Result.Teams[0] So(team1.Name, ShouldEqual, "group1 name") So(team1.Email, ShouldEqual, "test1@test.com") + So(team1.OrgId, ShouldEqual, testOrgId) - err = AddTeamMember(&m.AddTeamMemberCommand{OrgId: 1, TeamId: team1.Id, UserId: userIds[0]}) + err = AddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: team1.Id, UserId: userIds[0]}) So(err, ShouldBeNil) - q1 := &m.GetTeamMembersQuery{TeamId: team1.Id} + q1 := &m.GetTeamMembersQuery{OrgId: testOrgId, TeamId: team1.Id} err = GetTeamMembers(q1) So(err, ShouldBeNil) So(q1.Result[0].TeamId, ShouldEqual, team1.Id) So(q1.Result[0].Login, ShouldEqual, "loginuser0") + So(q1.Result[0].OrgId, ShouldEqual, testOrgId) }) Convey("Should be able to search for teams", func() { - query := &m.SearchTeamsQuery{Query: "group", Page: 1} + query := &m.SearchTeamsQuery{OrgId: testOrgId, Query: "group", Page: 1} err = SearchTeams(query) So(err, ShouldBeNil) So(len(query.Result.Teams), ShouldEqual, 2) So(query.Result.TotalCount, ShouldEqual, 2) - query2 := &m.SearchTeamsQuery{Query: ""} + query2 := &m.SearchTeamsQuery{OrgId: testOrgId, Query: ""} err = SearchTeams(query2) So(err, ShouldBeNil) So(len(query2.Result.Teams), ShouldEqual, 2) @@ -70,9 +73,9 @@ func TestTeamCommandsAndQueries(t *testing.T) { Convey("Should be able to return all teams a user is member of", func() { groupId := group2.Result.Id - err := AddTeamMember(&m.AddTeamMemberCommand{OrgId: 1, TeamId: groupId, UserId: userIds[0]}) + err := AddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: groupId, UserId: userIds[0]}) - query := &m.GetTeamsByUserQuery{UserId: userIds[0]} + query := &m.GetTeamsByUserQuery{OrgId: testOrgId, UserId: userIds[0]} err = GetTeamsByUser(query) So(err, ShouldBeNil) So(len(query.Result), ShouldEqual, 1) @@ -81,31 +84,34 @@ func TestTeamCommandsAndQueries(t *testing.T) { }) Convey("Should be able to remove users from a group", func() { - err = RemoveTeamMember(&m.RemoveTeamMemberCommand{TeamId: group1.Result.Id, UserId: userIds[0]}) + err = AddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[0]}) So(err, ShouldBeNil) - q1 := &m.GetTeamMembersQuery{TeamId: group1.Result.Id} - err = GetTeamMembers(q1) + err = RemoveTeamMember(&m.RemoveTeamMemberCommand{OrgId: testOrgId, TeamId: group1.Result.Id, UserId: userIds[0]}) So(err, ShouldBeNil) - So(len(q1.Result), ShouldEqual, 0) + + q2 := &m.GetTeamMembersQuery{OrgId: testOrgId, TeamId: group1.Result.Id} + err = GetTeamMembers(q2) + So(err, ShouldBeNil) + So(len(q2.Result), ShouldEqual, 0) }) Convey("Should be able to remove a group with users and permissions", func() { groupId := group2.Result.Id - err := AddTeamMember(&m.AddTeamMemberCommand{OrgId: 1, TeamId: groupId, UserId: userIds[1]}) + err := AddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: groupId, UserId: userIds[1]}) So(err, ShouldBeNil) - err = AddTeamMember(&m.AddTeamMemberCommand{OrgId: 1, TeamId: groupId, UserId: userIds[2]}) + err = AddTeamMember(&m.AddTeamMemberCommand{OrgId: testOrgId, TeamId: groupId, UserId: userIds[2]}) So(err, ShouldBeNil) - err = SetDashboardAcl(&m.SetDashboardAclCommand{DashboardId: 1, OrgId: 1, Permission: m.PERMISSION_EDIT, TeamId: groupId}) + err = testHelperUpdateDashboardAcl(1, m.DashboardAcl{DashboardId: 1, OrgId: testOrgId, Permission: m.PERMISSION_EDIT, TeamId: groupId}) - err = DeleteTeam(&m.DeleteTeamCommand{Id: groupId}) + err = DeleteTeam(&m.DeleteTeamCommand{OrgId: testOrgId, Id: groupId}) So(err, ShouldBeNil) - query := &m.GetTeamByIdQuery{Id: groupId} + query := &m.GetTeamByIdQuery{OrgId: testOrgId, Id: groupId} err = GetTeamById(query) So(err, ShouldEqual, m.ErrTeamNotFound) - permQuery := &m.GetDashboardAclInfoListQuery{DashboardId: 1, OrgId: 1} + permQuery := &m.GetDashboardAclInfoListQuery{DashboardId: 1, OrgId: testOrgId} err = GetDashboardAclInfoList(permQuery) So(err, ShouldBeNil) diff --git a/pkg/services/sqlstore/user_test.go b/pkg/services/sqlstore/user_test.go index a65b7226eb6..2830733c96a 100644 --- a/pkg/services/sqlstore/user_test.go +++ b/pkg/services/sqlstore/user_test.go @@ -99,7 +99,7 @@ func TestUserDataAccess(t *testing.T) { err = AddOrgUser(&m.AddOrgUserCommand{LoginOrEmail: users[0].Login, Role: m.ROLE_VIEWER, OrgId: users[0].OrgId}) So(err, ShouldBeNil) - err = SetDashboardAcl(&m.SetDashboardAclCommand{DashboardId: 1, OrgId: users[0].OrgId, UserId: users[0].Id, Permission: m.PERMISSION_EDIT}) + testHelperUpdateDashboardAcl(1, m.DashboardAcl{DashboardId: 1, OrgId: users[0].OrgId, UserId: users[0].Id, Permission: m.PERMISSION_EDIT}) So(err, ShouldBeNil) err = SavePreferences(&m.SavePreferencesCommand{UserId: users[0].Id, OrgId: users[0].OrgId, HomeDashboardId: 1, Theme: "dark"}) diff --git a/pkg/setting/setting.go b/pkg/setting/setting.go index 8cdb94bd413..6099388f668 100644 --- a/pkg/setting/setting.go +++ b/pkg/setting/setting.go @@ -75,19 +75,19 @@ var ( EnforceDomain bool // Security settings. - SecretKey string - LogInRememberDays int - CookieUserName string - CookieRememberName string - DisableGravatar bool - EmailCodeValidMinutes int - DataProxyWhiteList map[string]bool + SecretKey string + LogInRememberDays int + CookieUserName string + CookieRememberName string + DisableGravatar bool + EmailCodeValidMinutes int + DataProxyWhiteList map[string]bool + DisableBruteForceLoginProtection bool // Snapshots ExternalSnapshotUrl string ExternalSnapshotName string ExternalEnabled bool - SnapShotTTLDays int SnapShotRemoveExpired bool // Dashboard history @@ -514,6 +514,7 @@ func NewConfigContext(args *CommandLineArgs) error { CookieUserName = security.Key("cookie_username").String() CookieRememberName = security.Key("cookie_remember_name").String() DisableGravatar = security.Key("disable_gravatar").MustBool(true) + DisableBruteForceLoginProtection = security.Key("disable_brute_force_login_protection").MustBool(false) // read snapshots settings snapshots := Cfg.Section("snapshots") @@ -521,7 +522,6 @@ func NewConfigContext(args *CommandLineArgs) error { ExternalSnapshotName = snapshots.Key("external_snapshot_name").String() ExternalEnabled = snapshots.Key("external_enabled").MustBool(true) SnapShotRemoveExpired = snapshots.Key("snapshot_remove_expired").MustBool(true) - SnapShotTTLDays = snapshots.Key("snapshot_TTL_days").MustInt(90) // read dashboard settings dashboards := Cfg.Section("dashboards") @@ -578,7 +578,7 @@ func NewConfigContext(args *CommandLineArgs) error { // PhantomJS rendering ImagesDir = filepath.Join(DataPath, "png") - PhantomDir = filepath.Join(HomePath, "vendor/phantomjs") + PhantomDir = filepath.Join(HomePath, "tools/phantomjs") analytics := Cfg.Section("analytics") ReportingEnabled = analytics.Key("reporting_enabled").MustBool(true) diff --git a/pkg/social/generic_oauth.go b/pkg/social/generic_oauth.go index ec3c445ebb2..b92d64ad9fc 100644 --- a/pkg/social/generic_oauth.go +++ b/pkg/social/generic_oauth.go @@ -1,18 +1,21 @@ package social import ( + "encoding/base64" "encoding/json" "errors" "fmt" "net/http" + "net/mail" + "regexp" "github.com/grafana/grafana/pkg/models" "golang.org/x/oauth2" ) -type GenericOAuth struct { - *oauth2.Config +type SocialGenericOAuth struct { + *SocialBase allowedDomains []string allowedOrganizations []string apiUrl string @@ -20,19 +23,19 @@ type GenericOAuth struct { teamIds []int } -func (s *GenericOAuth) Type() int { +func (s *SocialGenericOAuth) Type() int { return int(models.GENERIC) } -func (s *GenericOAuth) IsEmailAllowed(email string) bool { +func (s *SocialGenericOAuth) IsEmailAllowed(email string) bool { return isEmailAllowed(email, s.allowedDomains) } -func (s *GenericOAuth) IsSignupAllowed() bool { +func (s *SocialGenericOAuth) IsSignupAllowed() bool { return s.allowSignup } -func (s *GenericOAuth) IsTeamMember(client *http.Client) bool { +func (s *SocialGenericOAuth) IsTeamMember(client *http.Client) bool { if len(s.teamIds) == 0 { return true } @@ -53,7 +56,7 @@ func (s *GenericOAuth) IsTeamMember(client *http.Client) bool { return false } -func (s *GenericOAuth) IsOrganizationMember(client *http.Client) bool { +func (s *SocialGenericOAuth) IsOrganizationMember(client *http.Client) bool { if len(s.allowedOrganizations) == 0 { return true } @@ -74,7 +77,7 @@ func (s *GenericOAuth) IsOrganizationMember(client *http.Client) bool { return false } -func (s *GenericOAuth) FetchPrivateEmail(client *http.Client) (string, error) { +func (s *SocialGenericOAuth) FetchPrivateEmail(client *http.Client) (string, error) { type Record struct { Email string `json:"email"` Primary bool `json:"primary"` @@ -115,7 +118,7 @@ func (s *GenericOAuth) FetchPrivateEmail(client *http.Client) (string, error) { return email, nil } -func (s *GenericOAuth) FetchTeamMemberships(client *http.Client) ([]int, error) { +func (s *SocialGenericOAuth) FetchTeamMemberships(client *http.Client) ([]int, error) { type Record struct { Id int `json:"id"` } @@ -140,7 +143,7 @@ func (s *GenericOAuth) FetchTeamMemberships(client *http.Client) ([]int, error) return ids, nil } -func (s *GenericOAuth) FetchOrganizations(client *http.Client) ([]string, error) { +func (s *SocialGenericOAuth) FetchOrganizations(client *http.Client) ([]string, error) { type Record struct { Login string `json:"login"` } @@ -165,53 +168,48 @@ func (s *GenericOAuth) FetchOrganizations(client *http.Client) ([]string, error) return logins, nil } -func (s *GenericOAuth) UserInfo(client *http.Client) (*BasicUserInfo, error) { - var data struct { - Name string `json:"name"` - DisplayName string `json:"display_name"` - Login string `json:"login"` - Username string `json:"username"` - Email string `json:"email"` - Attributes map[string][]string `json:"attributes"` +type UserInfoJson struct { + Name string `json:"name"` + DisplayName string `json:"display_name"` + Login string `json:"login"` + Username string `json:"username"` + Email string `json:"email"` + Upn string `json:"upn"` + Attributes map[string][]string `json:"attributes"` +} + +func (s *SocialGenericOAuth) UserInfo(client *http.Client, token *oauth2.Token) (*BasicUserInfo, error) { + var data UserInfoJson + var err error + + if s.extractToken(&data, token) != true { + response, err := HttpGet(client, s.apiUrl) + if err != nil { + return nil, fmt.Errorf("Error getting user info: %s", err) + } + + err = json.Unmarshal(response.Body, &data) + if err != nil { + return nil, fmt.Errorf("Error decoding user info JSON: %s", err) + } } - response, err := HttpGet(client, s.apiUrl) - if err != nil { - return nil, fmt.Errorf("Error getting user info: %s", err) - } + name := s.extractName(&data) - err = json.Unmarshal(response.Body, &data) - if err != nil { - return nil, fmt.Errorf("Error getting user info: %s", err) - } - - userInfo := &BasicUserInfo{ - Name: data.Name, - Login: data.Login, - Email: data.Email, - } - - if userInfo.Email == "" && data.Attributes["email:primary"] != nil { - userInfo.Email = data.Attributes["email:primary"][0] - } - - if userInfo.Email == "" { - userInfo.Email, err = s.FetchPrivateEmail(client) + email := s.extractEmail(&data) + if email == "" { + email, err = s.FetchPrivateEmail(client) if err != nil { return nil, err } } - if userInfo.Name == "" && data.DisplayName != "" { - userInfo.Name = data.DisplayName - } + login := s.extractLogin(&data, email) - if userInfo.Login == "" && data.Username != "" { - userInfo.Login = data.Username - } - - if userInfo.Login == "" { - userInfo.Login = data.Email + userInfo := &BasicUserInfo{ + Name: name, + Login: login, + Email: email, } if !s.IsTeamMember(client) { @@ -224,3 +222,82 @@ func (s *GenericOAuth) UserInfo(client *http.Client) (*BasicUserInfo, error) { return userInfo, nil } + +func (s *SocialGenericOAuth) extractToken(data *UserInfoJson, token *oauth2.Token) bool { + idToken := token.Extra("id_token") + if idToken == nil { + s.log.Debug("No id_token found", "token", token) + return false + } + + jwtRegexp := regexp.MustCompile("^([-_a-zA-Z0-9]+)[.]([-_a-zA-Z0-9]+)[.]([-_a-zA-Z0-9]+)$") + matched := jwtRegexp.FindStringSubmatch(idToken.(string)) + if matched == nil { + s.log.Debug("id_token is not in JWT format", "id_token", idToken.(string)) + return false + } + + payload, err := base64.RawURLEncoding.DecodeString(matched[2]) + if err != nil { + s.log.Error("Error base64 decoding id_token", "raw_payload", matched[2], "err", err) + return false + } + + err = json.Unmarshal(payload, data) + if err != nil { + s.log.Error("Error decoding id_token JSON", "payload", string(payload), "err", err) + return false + } + + email := s.extractEmail(data) + if email == "" { + s.log.Debug("No email found in id_token", "json", string(payload), "data", data) + return false + } + + s.log.Debug("Received id_token", "json", string(payload), "data", data) + return true +} + +func (s *SocialGenericOAuth) extractEmail(data *UserInfoJson) string { + if data.Email != "" { + return data.Email + } + + if data.Attributes["email:primary"] != nil { + return data.Attributes["email:primary"][0] + } + + if data.Upn != "" { + emailAddr, emailErr := mail.ParseAddress(data.Upn) + if emailErr == nil { + return emailAddr.Address + } + } + + return "" +} + +func (s *SocialGenericOAuth) extractLogin(data *UserInfoJson, email string) string { + if data.Login != "" { + return data.Login + } + + if data.Username != "" { + return data.Username + } + + return email +} + +func (s *SocialGenericOAuth) extractName(data *UserInfoJson) string { + if data.Name != "" { + return data.Name + } + + if data.DisplayName != "" { + return data.DisplayName + } + + return "" +} diff --git a/pkg/social/github_oauth.go b/pkg/social/github_oauth.go index 7e348e2363a..815c684cf03 100644 --- a/pkg/social/github_oauth.go +++ b/pkg/social/github_oauth.go @@ -12,7 +12,7 @@ import ( ) type SocialGithub struct { - *oauth2.Config + *SocialBase allowedDomains []string allowedOrganizations []string apiUrl string @@ -192,13 +192,12 @@ func (s *SocialGithub) FetchOrganizations(client *http.Client, organizationsUrl return logins, nil } -func (s *SocialGithub) UserInfo(client *http.Client) (*BasicUserInfo, error) { +func (s *SocialGithub) UserInfo(client *http.Client, token *oauth2.Token) (*BasicUserInfo, error) { var data struct { - Id int `json:"id"` - Login string `json:"login"` - Email string `json:"email"` - OrganizationsUrl string `json:"organizations_url"` + Id int `json:"id"` + Login string `json:"login"` + Email string `json:"email"` } response, err := HttpGet(client, s.apiUrl) @@ -217,11 +216,13 @@ func (s *SocialGithub) UserInfo(client *http.Client) (*BasicUserInfo, error) { Email: data.Email, } + organizationsUrl := fmt.Sprintf(s.apiUrl + "/orgs") + if !s.IsTeamMember(client) { return nil, ErrMissingTeamMembership } - if !s.IsOrganizationMember(client, data.OrganizationsUrl) { + if !s.IsOrganizationMember(client, organizationsUrl) { return nil, ErrMissingOrganizationMembership } diff --git a/pkg/social/google_oauth.go b/pkg/social/google_oauth.go index c44720fbe21..e9ab08305f6 100644 --- a/pkg/social/google_oauth.go +++ b/pkg/social/google_oauth.go @@ -11,7 +11,7 @@ import ( ) type SocialGoogle struct { - *oauth2.Config + *SocialBase allowedDomains []string hostedDomain string apiUrl string @@ -30,7 +30,7 @@ func (s *SocialGoogle) IsSignupAllowed() bool { return s.allowSignup } -func (s *SocialGoogle) UserInfo(client *http.Client) (*BasicUserInfo, error) { +func (s *SocialGoogle) UserInfo(client *http.Client, token *oauth2.Token) (*BasicUserInfo, error) { var data struct { Name string `json:"name"` Email string `json:"email"` diff --git a/pkg/social/grafana_com_oauth.go b/pkg/social/grafana_com_oauth.go index 9cc87d4b41c..d3614520d61 100644 --- a/pkg/social/grafana_com_oauth.go +++ b/pkg/social/grafana_com_oauth.go @@ -11,7 +11,7 @@ import ( ) type SocialGrafanaCom struct { - *oauth2.Config + *SocialBase url string allowedOrganizations []string allowSignup bool @@ -49,7 +49,7 @@ func (s *SocialGrafanaCom) IsOrganizationMember(organizations []OrgRecord) bool return false } -func (s *SocialGrafanaCom) UserInfo(client *http.Client) (*BasicUserInfo, error) { +func (s *SocialGrafanaCom) UserInfo(client *http.Client, token *oauth2.Token) (*BasicUserInfo, error) { var data struct { Name string `json:"name"` Login string `json:"username"` diff --git a/pkg/social/social.go b/pkg/social/social.go index d40c0a0c965..b763e2d71b2 100644 --- a/pkg/social/social.go +++ b/pkg/social/social.go @@ -4,9 +4,11 @@ import ( "net/http" "strings" - "golang.org/x/net/context" + "context" + "golang.org/x/oauth2" + "github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/setting" "github.com/grafana/grafana/pkg/util" ) @@ -21,7 +23,7 @@ type BasicUserInfo struct { type SocialConnector interface { Type() int - UserInfo(client *http.Client) (*BasicUserInfo, error) + UserInfo(client *http.Client, token *oauth2.Token) (*BasicUserInfo, error) IsEmailAllowed(email string) bool IsSignupAllowed() bool @@ -30,6 +32,11 @@ type SocialConnector interface { Client(ctx context.Context, t *oauth2.Token) *http.Client } +type SocialBase struct { + *oauth2.Config + log log.Logger +} + type Error struct { s string } @@ -90,10 +97,15 @@ func NewOAuthService() { Scopes: info.Scopes, } + logger := log.New("oauth." + name) + // GitHub. if name == "github" { SocialMap["github"] = &SocialGithub{ - Config: &config, + SocialBase: &SocialBase{ + Config: &config, + log: logger, + }, allowedDomains: info.AllowedDomains, apiUrl: info.ApiUrl, allowSignup: info.AllowSignup, @@ -105,7 +117,10 @@ func NewOAuthService() { // Google. if name == "google" { SocialMap["google"] = &SocialGoogle{ - Config: &config, + SocialBase: &SocialBase{ + Config: &config, + log: logger, + }, allowedDomains: info.AllowedDomains, hostedDomain: info.HostedDomain, apiUrl: info.ApiUrl, @@ -115,8 +130,11 @@ func NewOAuthService() { // Generic - Uses the same scheme as Github. if name == "generic_oauth" { - SocialMap["generic_oauth"] = &GenericOAuth{ - Config: &config, + SocialMap["generic_oauth"] = &SocialGenericOAuth{ + SocialBase: &SocialBase{ + Config: &config, + log: logger, + }, allowedDomains: info.AllowedDomains, apiUrl: info.ApiUrl, allowSignup: info.AllowSignup, @@ -138,7 +156,10 @@ func NewOAuthService() { } SocialMap["grafana_com"] = &SocialGrafanaCom{ - Config: &config, + SocialBase: &SocialBase{ + Config: &config, + log: logger, + }, url: setting.GrafanaComUrl, allowSignup: info.AllowSignup, allowedOrganizations: util.SplitString(sec.Key("allowed_organizations").String()), diff --git a/pkg/tsdb/cloudwatch/cloudwatch.go b/pkg/tsdb/cloudwatch/cloudwatch.go index d5bdd010269..3879dce4ea6 100644 --- a/pkg/tsdb/cloudwatch/cloudwatch.go +++ b/pkg/tsdb/cloudwatch/cloudwatch.go @@ -152,8 +152,6 @@ func (e *CloudWatchExecutor) executeQuery(ctx context.Context, parameters *simpl MetricName: aws.String(query.MetricName), Dimensions: query.Dimensions, Period: aws.Int64(int64(query.Period)), - StartTime: aws.Time(startTime), - EndTime: aws.Time(endTime), } if len(query.Statistics) > 0 { params.Statistics = query.Statistics @@ -162,15 +160,36 @@ func (e *CloudWatchExecutor) executeQuery(ctx context.Context, parameters *simpl params.ExtendedStatistics = query.ExtendedStatistics } - if setting.Env == setting.DEV { - plog.Debug("CloudWatch query", "raw query", params) + // 1 minutes resolutin metrics is stored for 15 days, 15 * 24 * 60 = 21600 + if query.HighResolution && (((endTime.Unix() - startTime.Unix()) / int64(query.Period)) > 21600) { + return nil, errors.New("too long query period") } + var resp *cloudwatch.GetMetricStatisticsOutput + for startTime.Before(endTime) { + params.StartTime = aws.Time(startTime) + if query.HighResolution { + startTime = startTime.Add(time.Duration(1440*query.Period) * time.Second) + } else { + startTime = endTime + } + params.EndTime = aws.Time(startTime) - resp, err := client.GetMetricStatisticsWithContext(ctx, params, request.WithResponseReadTimeout(10*time.Second)) - if err != nil { - return nil, err + if setting.Env == setting.DEV { + plog.Debug("CloudWatch query", "raw query", params) + } + + partResp, err := client.GetMetricStatisticsWithContext(ctx, params, request.WithResponseReadTimeout(10*time.Second)) + if err != nil { + return nil, err + } + if resp != nil { + resp.Datapoints = append(resp.Datapoints, partResp.Datapoints...) + } else { + resp = partResp + + } + metrics.M_Aws_CloudWatch_GetMetricStatistics.Inc() } - metrics.M_Aws_CloudWatch_GetMetricStatistics.Inc() queryRes, err := parseResponse(resp, query) if err != nil { @@ -274,6 +293,8 @@ func parseQuery(model *simplejson.Json) (*CloudWatchQuery, error) { alias = "{{metric}}_{{stat}}" } + highResolution := model.Get("highResolution").MustBool(false) + return &CloudWatchQuery{ Region: region, Namespace: namespace, @@ -283,6 +304,7 @@ func parseQuery(model *simplejson.Json) (*CloudWatchQuery, error) { ExtendedStatistics: aws.StringSlice(extendedStatistics), Period: period, Alias: alias, + HighResolution: highResolution, }, nil } diff --git a/pkg/tsdb/cloudwatch/cloudwatch_test.go b/pkg/tsdb/cloudwatch/cloudwatch_test.go index 5c322a44d56..719edba08ba 100644 --- a/pkg/tsdb/cloudwatch/cloudwatch_test.go +++ b/pkg/tsdb/cloudwatch/cloudwatch_test.go @@ -31,6 +31,7 @@ func TestCloudWatch(t *testing.T) { "p90.00" ], "period": "60", + "highResolution": false, "alias": "{{metric}}_{{stat}}" } ` diff --git a/pkg/tsdb/cloudwatch/metric_find_query.go b/pkg/tsdb/cloudwatch/metric_find_query.go index 1e1e855b123..c82cff390c3 100644 --- a/pkg/tsdb/cloudwatch/metric_find_query.go +++ b/pkg/tsdb/cloudwatch/metric_find_query.go @@ -3,6 +3,7 @@ package cloudwatch import ( "context" "errors" + "fmt" "reflect" "sort" "strings" @@ -37,6 +38,7 @@ var customMetricsDimensionsMap map[string]map[string]map[string]*CustomMetricsCa func init() { metricsMap = map[string][]string{ + "AWS/AmazonMQ": {"CpuUtilization", "HeapUsage", "NetworkIn", "NetworkOut", "TotalMessageCount", "ConsumerCount", "EnqueueCount", "EnqueueTime", "ExpiredCount", "InflightCount", "DispatchCount", "DequeueCount", "MemoryUsage", "ProducerCount", "QueueSize"}, "AWS/ApiGateway": {"4XXError", "5XXError", "CacheHitCount", "CacheMissCount", "Count", "IntegrationLatency", "Latency"}, "AWS/ApplicationELB": {"ActiveConnectionCount", "ClientTLSNegotiationErrorCount", "HealthyHostCount", "HTTPCode_ELB_4XX_Count", "HTTPCode_ELB_5XX_Count", "HTTPCode_Target_2XX_Count", "HTTPCode_Target_3XX_Count", "HTTPCode_Target_4XX_Count", "HTTPCode_Target_5XX_Count", "IPv6ProcessedBytes", "IPv6RequestCount", "NewConnectionCount", "ProcessedBytes", "RejectedConnectionCount", "RequestCount", "RequestCountPerTarget", "TargetConnectionErrorCount", "TargetResponseTime", "TargetTLSNegotiationErrorCount", "UnHealthyHostCount"}, "AWS/AutoScaling": {"GroupMinSize", "GroupMaxSize", "GroupDesiredCapacity", "GroupInServiceInstances", "GroupPendingInstances", "GroupStandbyInstances", "GroupTerminatingInstances", "GroupTotalInstances"}, @@ -96,16 +98,19 @@ func init() { "AWS/SES": {"Bounce", "Complaint", "Delivery", "Reject", "Send"}, "AWS/SNS": {"NumberOfMessagesPublished", "PublishSize", "NumberOfNotificationsDelivered", "NumberOfNotificationsFailed"}, "AWS/SQS": {"NumberOfMessagesSent", "SentMessageSize", "NumberOfMessagesReceived", "NumberOfEmptyReceives", "NumberOfMessagesDeleted", "ApproximateAgeOfOldestMessage", "ApproximateNumberOfMessagesDelayed", "ApproximateNumberOfMessagesVisible", "ApproximateNumberOfMessagesNotVisible"}, + "AWS/States": {"ExecutionTime", "ExecutionThrottled", "ExecutionsAborted", "ExecutionsFailed", "ExecutionsStarted", "ExecutionsSucceeded", "ExecutionsTimedOut", "ActivityRunTime", "ActivityScheduleTime", "ActivityTime", "ActivitiesFailed", "ActivitiesHeartbeatTimedOut", "ActivitiesScheduled", "ActivitiesScheduled", "ActivitiesSucceeded", "ActivitiesTimedOut", "LambdaFunctionRunTime", "LambdaFunctionScheduleTime", "LambdaFunctionTime", "LambdaFunctionsFailed", "LambdaFunctionsHeartbeatTimedOut", "LambdaFunctionsScheduled", "LambdaFunctionsStarted", "LambdaFunctionsSucceeded", "LambdaFunctionsTimedOut"}, "AWS/StorageGateway": {"CacheHitPercent", "CachePercentUsed", "CachePercentDirty", "CloudBytesDownloaded", "CloudDownloadLatency", "CloudBytesUploaded", "UploadBufferFree", "UploadBufferPercentUsed", "UploadBufferUsed", "QueuedWrites", "ReadBytes", "ReadTime", "TotalCacheSize", "WriteBytes", "WriteTime", "TimeSinceLastRecoveryPoint", "WorkingStorageFree", "WorkingStoragePercentUsed", "WorkingStorageUsed", "CacheHitPercent", "CachePercentUsed", "CachePercentDirty", "ReadBytes", "ReadTime", "WriteBytes", "WriteTime", "QueuedWrites"}, "AWS/SWF": {"DecisionTaskScheduleToStartTime", "DecisionTaskStartToCloseTime", "DecisionTasksCompleted", "StartedDecisionTasksTimedOutOnClose", "WorkflowStartToCloseTime", "WorkflowsCanceled", "WorkflowsCompleted", "WorkflowsContinuedAsNew", "WorkflowsFailed", "WorkflowsTerminated", "WorkflowsTimedOut", "ActivityTaskScheduleToCloseTime", "ActivityTaskScheduleToStartTime", "ActivityTaskStartToCloseTime", "ActivityTasksCanceled", "ActivityTasksCompleted", "ActivityTasksFailed", "ScheduledActivityTasksTimedOutOnClose", "ScheduledActivityTasksTimedOutOnStart", "StartedActivityTasksTimedOutOnClose", "StartedActivityTasksTimedOutOnHeartbeat"}, "AWS/VPN": {"TunnelState", "TunnelDataIn", "TunnelDataOut"}, + "Rekognition": {"SuccessfulRequestCount", "ThrottledCount", "ResponseTime", "DetectedFaceCount", "DetectedLabelCount", "ServerErrorCount", "UserErrorCount"}, "WAF": {"AllowedRequests", "BlockedRequests", "CountedRequests"}, "AWS/WorkSpaces": {"Available", "Unhealthy", "ConnectionAttempt", "ConnectionSuccess", "ConnectionFailure", "SessionLaunchTime", "InSessionLatency", "SessionDisconnect"}, "KMS": {"SecondsUntilKeyMaterialExpiration"}, } dimensionsMap = map[string][]string{ + "AWS/AmazonMQ": {"Broker", "Topic", "Queue"}, "AWS/ApiGateway": {"ApiName", "Method", "Resource", "Stage"}, "AWS/ApplicationELB": {"LoadBalancer", "TargetGroup", "AvailabilityZone"}, "AWS/AutoScaling": {"AutoScalingGroupName"}, @@ -142,9 +147,11 @@ func init() { "AWS/SES": {}, "AWS/SNS": {"Application", "Platform", "TopicName"}, "AWS/SQS": {"QueueName"}, + "AWS/States": {"StateMachineArn", "ActivityArn", "LambdaFunctionArn"}, "AWS/StorageGateway": {"GatewayId", "GatewayName", "VolumeId"}, "AWS/SWF": {"Domain", "WorkflowTypeName", "WorkflowTypeVersion", "ActivityTypeName", "ActivityTypeVersion"}, "AWS/VPN": {"VpnId", "TunnelIpAddress"}, + "Rekognition": {}, "WAF": {"Rule", "WebACL"}, "AWS/WorkSpaces": {"DirectoryId", "WorkspaceId"}, "KMS": {"KeyId"}, @@ -185,18 +192,6 @@ func (e *CloudWatchExecutor) executeMetricFindQuery(ctx context.Context, queryCo data, err = e.handleGetEbsVolumeIds(ctx, parameters, queryContext) break case "ec2_instance_attribute": - region := parameters.Get("region").MustString() - dsInfo := e.getDsInfo(region) - cfg, err := e.getAwsConfig(dsInfo) - if err != nil { - return nil, errors.New("Failed to call ec2:DescribeInstances") - } - sess, err := session.NewSession(cfg) - if err != nil { - return nil, errors.New("Failed to call ec2:DescribeInstances") - } - e.ec2Svc = ec2.New(sess, cfg) - data, err = e.handleGetEc2InstanceAttribute(ctx, parameters, queryContext) break } @@ -224,6 +219,21 @@ func transformToTable(data []suggestData, result *tsdb.QueryResult) { result.Meta.Set("rowCount", len(data)) } +func parseMultiSelectValue(input string) []string { + trimmedInput := strings.TrimSpace(input) + + if strings.HasPrefix(trimmedInput, "{") { + values := strings.Split(strings.TrimRight(strings.TrimLeft(trimmedInput, "{"), "}"), ",") + trimValues := make([]string, len(values)) + for i, v := range values { + trimValues[i] = strings.TrimSpace(v) + } + return trimValues + } else { + return []string{trimmedInput} + } +} + // Whenever this list is updated, frontend list should also be updated. // Please update the region list in public/app/plugins/datasource/cloudwatch/partials/config.html func (e *CloudWatchExecutor) handleGetRegions(ctx context.Context, parameters *simplejson.Json, queryContext *tsdb.TsdbQuery) ([]suggestData, error) { @@ -362,19 +372,44 @@ func (e *CloudWatchExecutor) handleGetDimensionValues(ctx context.Context, param return result, nil } +func (e *CloudWatchExecutor) ensureClientSession(region string) error { + if e.ec2Svc == nil { + dsInfo := e.getDsInfo(region) + cfg, err := e.getAwsConfig(dsInfo) + if err != nil { + return fmt.Errorf("Failed to call ec2:getAwsConfig, %v", err) + } + sess, err := session.NewSession(cfg) + if err != nil { + return fmt.Errorf("Failed to call ec2:NewSession, %v", err) + } + e.ec2Svc = ec2.New(sess, cfg) + } + return nil +} + func (e *CloudWatchExecutor) handleGetEbsVolumeIds(ctx context.Context, parameters *simplejson.Json, queryContext *tsdb.TsdbQuery) ([]suggestData, error) { region := parameters.Get("region").MustString() instanceId := parameters.Get("instanceId").MustString() - instanceIds := []*string{aws.String(instanceId)} + err := e.ensureClientSession(region) + if err != nil { + return nil, err + } + + instanceIds := aws.StringSlice(parseMultiSelectValue(instanceId)) instances, err := e.ec2DescribeInstances(region, nil, instanceIds) if err != nil { return nil, err } result := make([]suggestData, 0) - for _, mapping := range instances.Reservations[0].Instances[0].BlockDeviceMappings { - result = append(result, suggestData{Text: *mapping.Ebs.VolumeId, Value: *mapping.Ebs.VolumeId}) + for _, reservation := range instances.Reservations { + for _, instance := range reservation.Instances { + for _, mapping := range instance.BlockDeviceMappings { + result = append(result, suggestData{Text: *mapping.Ebs.VolumeId, Value: *mapping.Ebs.VolumeId}) + } + } } return result, nil @@ -401,6 +436,11 @@ func (e *CloudWatchExecutor) handleGetEc2InstanceAttribute(ctx context.Context, } } + err := e.ensureClientSession(region) + if err != nil { + return nil, err + } + instances, err := e.ec2DescribeInstances(region, filters, nil) if err != nil { return nil, err @@ -476,7 +516,7 @@ func (e *CloudWatchExecutor) cloudwatchListMetrics(region string, namespace stri return !lastPage }) if err != nil { - return nil, errors.New("Failed to call cloudwatch:ListMetrics") + return nil, fmt.Errorf("Failed to call cloudwatch:ListMetrics, %v", err) } return &resp, nil diff --git a/pkg/tsdb/cloudwatch/metric_find_query_test.go b/pkg/tsdb/cloudwatch/metric_find_query_test.go index 255b343a33a..bf87e7b7d41 100644 --- a/pkg/tsdb/cloudwatch/metric_find_query_test.go +++ b/pkg/tsdb/cloudwatch/metric_find_query_test.go @@ -8,6 +8,7 @@ import ( "github.com/aws/aws-sdk-go/service/cloudwatch" "github.com/aws/aws-sdk-go/service/ec2" "github.com/aws/aws-sdk-go/service/ec2/ec2iface" + "github.com/bmizerany/assert" "github.com/grafana/grafana/pkg/components/simplejson" "github.com/grafana/grafana/pkg/tsdb" . "github.com/smartystreets/goconvey/convey" @@ -114,4 +115,85 @@ func TestCloudWatchMetrics(t *testing.T) { So(result[0].Text, ShouldEqual, "i-12345678") }) }) + + Convey("When calling handleGetEbsVolumeIds", t, func() { + + executor := &CloudWatchExecutor{ + ec2Svc: mockedEc2{Resp: ec2.DescribeInstancesOutput{ + Reservations: []*ec2.Reservation{ + { + Instances: []*ec2.Instance{ + { + InstanceId: aws.String("i-1"), + BlockDeviceMappings: []*ec2.InstanceBlockDeviceMapping{ + {Ebs: &ec2.EbsInstanceBlockDevice{VolumeId: aws.String("vol-1-1")}}, + {Ebs: &ec2.EbsInstanceBlockDevice{VolumeId: aws.String("vol-1-2")}}, + }, + }, + { + InstanceId: aws.String("i-2"), + BlockDeviceMappings: []*ec2.InstanceBlockDeviceMapping{ + {Ebs: &ec2.EbsInstanceBlockDevice{VolumeId: aws.String("vol-2-1")}}, + {Ebs: &ec2.EbsInstanceBlockDevice{VolumeId: aws.String("vol-2-2")}}, + }, + }, + }, + }, + { + Instances: []*ec2.Instance{ + { + InstanceId: aws.String("i-3"), + BlockDeviceMappings: []*ec2.InstanceBlockDeviceMapping{ + {Ebs: &ec2.EbsInstanceBlockDevice{VolumeId: aws.String("vol-3-1")}}, + {Ebs: &ec2.EbsInstanceBlockDevice{VolumeId: aws.String("vol-3-2")}}, + }, + }, + { + InstanceId: aws.String("i-4"), + BlockDeviceMappings: []*ec2.InstanceBlockDeviceMapping{ + {Ebs: &ec2.EbsInstanceBlockDevice{VolumeId: aws.String("vol-4-1")}}, + {Ebs: &ec2.EbsInstanceBlockDevice{VolumeId: aws.String("vol-4-2")}}, + }, + }, + }, + }, + }, + }}, + } + + json := simplejson.New() + json.Set("region", "us-east-1") + json.Set("instanceId", "{i-1, i-2, i-3, i-4}") + result, _ := executor.handleGetEbsVolumeIds(context.Background(), json, &tsdb.TsdbQuery{}) + + Convey("Should return all 8 VolumeIds", func() { + So(len(result), ShouldEqual, 8) + So(result[0].Text, ShouldEqual, "vol-1-1") + So(result[1].Text, ShouldEqual, "vol-1-2") + So(result[2].Text, ShouldEqual, "vol-2-1") + So(result[3].Text, ShouldEqual, "vol-2-2") + So(result[4].Text, ShouldEqual, "vol-3-1") + So(result[5].Text, ShouldEqual, "vol-3-2") + So(result[6].Text, ShouldEqual, "vol-4-1") + So(result[7].Text, ShouldEqual, "vol-4-2") + }) + }) +} + +func TestParseMultiSelectValue(t *testing.T) { + + var values []string + + values = parseMultiSelectValue(" i-someInstance ") + assert.Equal(t, []string{"i-someInstance"}, values) + + values = parseMultiSelectValue("{i-05}") + assert.Equal(t, []string{"i-05"}, values) + + values = parseMultiSelectValue(" {i-01, i-03, i-04} ") + assert.Equal(t, []string{"i-01", "i-03", "i-04"}, values) + + values = parseMultiSelectValue("i-{01}") + assert.Equal(t, []string{"i-{01}"}, values) + } diff --git a/pkg/tsdb/cloudwatch/types.go b/pkg/tsdb/cloudwatch/types.go index c2a5ab8c3d7..0737b64686d 100644 --- a/pkg/tsdb/cloudwatch/types.go +++ b/pkg/tsdb/cloudwatch/types.go @@ -13,4 +13,5 @@ type CloudWatchQuery struct { ExtendedStatistics []*string Period int Alias string + HighResolution bool } diff --git a/pkg/tsdb/influxdb/query.go b/pkg/tsdb/influxdb/query.go index 499f446e9f0..0a16a507877 100644 --- a/pkg/tsdb/influxdb/query.go +++ b/pkg/tsdb/influxdb/query.go @@ -70,7 +70,7 @@ func (query *Query) renderTags() []string { } else if tag.Operator == "<" || tag.Operator == ">" { textValue = tag.Value } else { - textValue = fmt.Sprintf("'%s'", tag.Value) + textValue = fmt.Sprintf("'%s'", strings.Replace(tag.Value, `\`, `\\`, -1)) } res = append(res, fmt.Sprintf(`%s"%s" %s %s`, str, tag.Key, tag.Operator, textValue)) diff --git a/pkg/tsdb/influxdb/query_test.go b/pkg/tsdb/influxdb/query_test.go index 4a620539a26..f1270560269 100644 --- a/pkg/tsdb/influxdb/query_test.go +++ b/pkg/tsdb/influxdb/query_test.go @@ -170,6 +170,12 @@ func TestInfluxdbQueryBuilder(t *testing.T) { So(strings.Join(query.renderTags(), ""), ShouldEqual, `"key" = 'value'`) }) + Convey("can escape backslashes when rendering string tags", func() { + query := &Query{Tags: []*Tag{{Operator: "=", Value: `C:\test\`, Key: "key"}}} + + So(strings.Join(query.renderTags(), ""), ShouldEqual, `"key" = 'C:\\test\\'`) + }) + Convey("can render regular measurement", func() { query := &Query{Measurement: `apa`, Policy: "policy"} diff --git a/pkg/tsdb/models.go b/pkg/tsdb/models.go index cbda7c97515..dee7289af7f 100644 --- a/pkg/tsdb/models.go +++ b/pkg/tsdb/models.go @@ -14,9 +14,7 @@ type TsdbQuery struct { type Query struct { RefId string Model *simplejson.Json - Depends []string DataSource *models.DataSource - Results []*TimeSeries MaxDataPoints int64 IntervalMs int64 } diff --git a/pkg/tsdb/mssql/macros.go b/pkg/tsdb/mssql/macros.go index 6e83623413d..e5d057824d0 100644 --- a/pkg/tsdb/mssql/macros.go +++ b/pkg/tsdb/mssql/macros.go @@ -14,19 +14,25 @@ const sExpr = `\$` + rsIdentifier + `\(([^\)]*)\)` type MsSqlMacroEngine struct { TimeRange *tsdb.TimeRange + Query *tsdb.Query } func NewMssqlMacroEngine() tsdb.SqlMacroEngine { return &MsSqlMacroEngine{} } -func (m *MsSqlMacroEngine) Interpolate(timeRange *tsdb.TimeRange, sql string) (string, error) { +func (m *MsSqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) { m.TimeRange = timeRange + m.Query = query rExp, _ := regexp.Compile(sExpr) var macroError error sql = replaceAllStringSubmatchFunc(rExp, sql, func(groups []string) string { - res, err := m.evaluateMacro(groups[1], strings.Split(groups[2], ",")) + args := strings.Split(groups[2], ",") + for i, arg := range args { + args[i] = strings.Trim(arg, " ") + } + res, err := m.evaluateMacro(groups[1], args) if err != nil && macroError == nil { macroError = err return "macro_error()" diff --git a/pkg/tsdb/mssql/macros_test.go b/pkg/tsdb/mssql/macros_test.go index 07f251752cb..dedc4fbb28b 100644 --- a/pkg/tsdb/mssql/macros_test.go +++ b/pkg/tsdb/mssql/macros_test.go @@ -11,72 +11,73 @@ func TestMacroEngine(t *testing.T) { Convey("MacroEngine", t, func() { engine := &MsSqlMacroEngine{} timeRange := &tsdb.TimeRange{From: "5m", To: "now"} + query := &tsdb.Query{} Convey("interpolate __time function", func() { - sql, err := engine.Interpolate(nil, "select $__time(time_column)") + sql, err := engine.Interpolate(query, nil, "select $__time(time_column)") So(err, ShouldBeNil) So(sql, ShouldEqual, "select time_column AS time") }) Convey("interpolate __utcTime function", func() { - sql, err := engine.Interpolate(nil, "select $__utcTime(time_column)") + sql, err := engine.Interpolate(query, nil, "select $__utcTime(time_column)") So(err, ShouldBeNil) So(sql, ShouldEqual, "select DATEADD(second, DATEDIFF(second,GETDATE(),GETUTCDATE()), time_column) AS time") }) Convey("interpolate __timeEpoch function", func() { - sql, err := engine.Interpolate(nil, "select $__timeEpoch(time_column)") + sql, err := engine.Interpolate(query, nil, "select $__timeEpoch(time_column)") So(err, ShouldBeNil) So(sql, ShouldEqual, "select DATEDIFF(second, {d '1970-01-01'}, DATEADD(second, DATEDIFF(second,GETDATE(),GETUTCDATE()), time_column) ) AS time") }) Convey("interpolate __timeEpoch function wrapped in aggregation", func() { - sql, err := engine.Interpolate(nil, "select min($__timeEpoch(time_column))") + sql, err := engine.Interpolate(query, nil, "select min($__timeEpoch(time_column))") So(err, ShouldBeNil) So(sql, ShouldEqual, "select min(DATEDIFF(second, {d '1970-01-01'}, DATEADD(second, DATEDIFF(second,GETDATE(),GETUTCDATE()), time_column) ) AS time)") }) Convey("interpolate __timeFilter function", func() { - sql, err := engine.Interpolate(timeRange, "WHERE $__timeFilter(time_column)") + sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)") So(err, ShouldBeNil) So(sql, ShouldEqual, "WHERE time_column >= DATEADD(s, 18446744066914186738+DATEDIFF(second,GETUTCDATE(),GETDATE()), '1970-01-01') AND time_column <= DATEADD(s, 18446744066914187038+DATEDIFF(second,GETUTCDATE(),GETDATE()), '1970-01-01')") }) Convey("interpolate __timeFrom function", func() { - sql, err := engine.Interpolate(timeRange, "select $__timeFrom(time_column)") + sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") So(err, ShouldBeNil) So(sql, ShouldEqual, "select DATEADD(second, 18446744066914186738+DATEDIFF(second,GETUTCDATE(),GETDATE()), '1970-01-01')") }) Convey("interpolate __timeTo function", func() { - sql, err := engine.Interpolate(timeRange, "select $__timeTo(time_column)") + sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") So(err, ShouldBeNil) So(sql, ShouldEqual, "select DATEADD(second, 18446744066914187038+DATEDIFF(second,GETUTCDATE(),GETDATE()), '1970-01-01')") }) Convey("interpolate __unixEpochFilter function", func() { - sql, err := engine.Interpolate(timeRange, "select $__unixEpochFilter(18446744066914186738)") + sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(18446744066914186738)") So(err, ShouldBeNil) So(sql, ShouldEqual, "select 18446744066914186738 >= 18446744066914186738 AND 18446744066914186738 <= 18446744066914187038") }) Convey("interpolate __unixEpochFrom function", func() { - sql, err := engine.Interpolate(timeRange, "select $__unixEpochFrom()") + sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()") So(err, ShouldBeNil) So(sql, ShouldEqual, "select 18446744066914186738") }) Convey("interpolate __unixEpochTo function", func() { - sql, err := engine.Interpolate(timeRange, "select $__unixEpochTo()") + sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()") So(err, ShouldBeNil) So(sql, ShouldEqual, "select 18446744066914187038") diff --git a/pkg/tsdb/mssql/mssql.go b/pkg/tsdb/mssql/mssql.go index f8f462ded3a..51dc5335707 100644 --- a/pkg/tsdb/mssql/mssql.go +++ b/pkg/tsdb/mssql/mssql.go @@ -60,7 +60,7 @@ func (e *MssqlQueryEndpoint) Query(ctx context.Context, dsInfo *models.DataSourc return e.sqlEngine.Query(ctx, dsInfo, tsdbQuery, e.transformToTimeSeries, e.transformToTable) } -func (e MssqlQueryEndpoint) transformToTable(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult) error { +func (e MssqlQueryEndpoint) transformToTable(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult, tsdbQuery *tsdb.TsdbQuery) error { columnNames, err := rows.Columns() columnCount := len(columnNames) @@ -135,7 +135,7 @@ func (e MssqlQueryEndpoint) getTypedRowData(types []*sql.ColumnType, rows *core. return values, nil } -func (e MssqlQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult) error { +func (e MssqlQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult, tsdbQuery *tsdb.TsdbQuery) error { pointsBySeries := make(map[string]*tsdb.TimeSeries) seriesByQueryOrder := list.New() diff --git a/pkg/tsdb/mysql/macros.go b/pkg/tsdb/mysql/macros.go index 108b81fc5f3..b0170070dcf 100644 --- a/pkg/tsdb/mysql/macros.go +++ b/pkg/tsdb/mysql/macros.go @@ -3,6 +3,7 @@ package mysql import ( "fmt" "regexp" + "strconv" "strings" "time" @@ -15,19 +16,25 @@ const sExpr = `\$` + rsIdentifier + `\(([^\)]*)\)` type MySqlMacroEngine struct { TimeRange *tsdb.TimeRange + Query *tsdb.Query } func NewMysqlMacroEngine() tsdb.SqlMacroEngine { return &MySqlMacroEngine{} } -func (m *MySqlMacroEngine) Interpolate(timeRange *tsdb.TimeRange, sql string) (string, error) { +func (m *MySqlMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) { m.TimeRange = timeRange + m.Query = query rExp, _ := regexp.Compile(sExpr) var macroError error sql = replaceAllStringSubmatchFunc(rExp, sql, func(groups []string) string { - res, err := m.evaluateMacro(groups[1], strings.Split(groups[2], ",")) + args := strings.Split(groups[2], ",") + for i, arg := range args { + args[i] = strings.Trim(arg, " ") + } + res, err := m.evaluateMacro(groups[1], args) if err != nil && macroError == nil { macroError = err return "macro_error()" @@ -76,13 +83,26 @@ func (m *MySqlMacroEngine) evaluateMacro(name string, args []string) (string, er case "__timeTo": return fmt.Sprintf("FROM_UNIXTIME(%d)", uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil case "__timeGroup": - if len(args) != 2 { + if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval", name) } - interval, err := time.ParseDuration(strings.Trim(args[1], `'" `)) + interval, err := time.ParseDuration(strings.Trim(args[1], `'"`)) if err != nil { return "", fmt.Errorf("error parsing interval %v", args[1]) } + if len(args) == 3 { + m.Query.Model.Set("fill", true) + m.Query.Model.Set("fillInterval", interval.Seconds()) + if args[2] == "NULL" { + m.Query.Model.Set("fillNull", true) + } else { + floatVal, err := strconv.ParseFloat(args[2], 64) + if err != nil { + return "", fmt.Errorf("error parsing fill value %v", args[2]) + } + m.Query.Model.Set("fillValue", floatVal) + } + } return fmt.Sprintf("cast(cast(UNIX_TIMESTAMP(%s)/(%.0f) as signed)*%.0f as signed)", args[0], interval.Seconds(), interval.Seconds()), nil case "__unixEpochFilter": if len(args) == 0 { diff --git a/pkg/tsdb/mysql/macros_test.go b/pkg/tsdb/mysql/macros_test.go index 988612fb287..a89ba16ab78 100644 --- a/pkg/tsdb/mysql/macros_test.go +++ b/pkg/tsdb/mysql/macros_test.go @@ -10,31 +10,32 @@ import ( func TestMacroEngine(t *testing.T) { Convey("MacroEngine", t, func() { engine := &MySqlMacroEngine{} + query := &tsdb.Query{} timeRange := &tsdb.TimeRange{From: "5m", To: "now"} Convey("interpolate __time function", func() { - sql, err := engine.Interpolate(nil, "select $__time(time_column)") + sql, err := engine.Interpolate(query, timeRange, "select $__time(time_column)") So(err, ShouldBeNil) So(sql, ShouldEqual, "select UNIX_TIMESTAMP(time_column) as time_sec") }) Convey("interpolate __time function wrapped in aggregation", func() { - sql, err := engine.Interpolate(nil, "select min($__time(time_column))") + sql, err := engine.Interpolate(query, timeRange, "select min($__time(time_column))") So(err, ShouldBeNil) So(sql, ShouldEqual, "select min(UNIX_TIMESTAMP(time_column) as time_sec)") }) Convey("interpolate __timeFilter function", func() { - sql, err := engine.Interpolate(timeRange, "WHERE $__timeFilter(time_column)") + sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)") So(err, ShouldBeNil) So(sql, ShouldEqual, "WHERE time_column >= FROM_UNIXTIME(18446744066914186738) AND time_column <= FROM_UNIXTIME(18446744066914187038)") }) Convey("interpolate __timeFrom function", func() { - sql, err := engine.Interpolate(timeRange, "select $__timeFrom(time_column)") + sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") So(err, ShouldBeNil) So(sql, ShouldEqual, "select FROM_UNIXTIME(18446744066914186738)") @@ -42,35 +43,43 @@ func TestMacroEngine(t *testing.T) { Convey("interpolate __timeGroup function", func() { - sql, err := engine.Interpolate(timeRange, "GROUP BY $__timeGroup(time_column,'5m')") + sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m')") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, "GROUP BY cast(cast(UNIX_TIMESTAMP(time_column)/(300) as signed)*300 as signed)") + }) + + Convey("interpolate __timeGroup function with spaces around arguments", func() { + + sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column , '5m')") So(err, ShouldBeNil) So(sql, ShouldEqual, "GROUP BY cast(cast(UNIX_TIMESTAMP(time_column)/(300) as signed)*300 as signed)") }) Convey("interpolate __timeTo function", func() { - sql, err := engine.Interpolate(timeRange, "select $__timeTo(time_column)") + sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") So(err, ShouldBeNil) So(sql, ShouldEqual, "select FROM_UNIXTIME(18446744066914187038)") }) Convey("interpolate __unixEpochFilter function", func() { - sql, err := engine.Interpolate(timeRange, "select $__unixEpochFilter(18446744066914186738)") + sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(18446744066914186738)") So(err, ShouldBeNil) So(sql, ShouldEqual, "select 18446744066914186738 >= 18446744066914186738 AND 18446744066914186738 <= 18446744066914187038") }) Convey("interpolate __unixEpochFrom function", func() { - sql, err := engine.Interpolate(timeRange, "select $__unixEpochFrom()") + sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()") So(err, ShouldBeNil) So(sql, ShouldEqual, "select 18446744066914186738") }) Convey("interpolate __unixEpochTo function", func() { - sql, err := engine.Interpolate(timeRange, "select $__unixEpochTo()") + sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()") So(err, ShouldBeNil) So(sql, ShouldEqual, "select 18446744066914187038") diff --git a/pkg/tsdb/mysql/mysql.go b/pkg/tsdb/mysql/mysql.go index e5c6b92f245..f3060e235e5 100644 --- a/pkg/tsdb/mysql/mysql.go +++ b/pkg/tsdb/mysql/mysql.go @@ -5,8 +5,9 @@ import ( "context" "database/sql" "fmt" + "math" + "reflect" "strconv" - "time" "github.com/go-sql-driver/mysql" @@ -56,7 +57,7 @@ func (e *MysqlQueryEndpoint) Query(ctx context.Context, dsInfo *models.DataSourc return e.sqlEngine.Query(ctx, dsInfo, tsdbQuery, e.transformToTimeSeries, e.transformToTable) } -func (e MysqlQueryEndpoint) transformToTable(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult) error { +func (e MysqlQueryEndpoint) transformToTable(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult, tsdbQuery *tsdb.TsdbQuery) error { columnNames, err := rows.Columns() columnCount := len(columnNames) @@ -73,24 +74,36 @@ func (e MysqlQueryEndpoint) transformToTable(query *tsdb.Query, rows *core.Rows, table.Columns[i].Text = name } - columnTypes, err := rows.ColumnTypes() - if err != nil { - return err - } - rowLimit := 1000000 rowCount := 0 + timeIndex := -1 + + // check if there is a column named time + for i, col := range columnNames { + switch col { + case "time_sec": + timeIndex = i + } + } for ; rows.Next(); rowCount++ { if rowCount > rowLimit { return fmt.Errorf("MySQL query row limit exceeded, limit %d", rowLimit) } - values, err := e.getTypedRowData(columnTypes, rows) + values, err := e.getTypedRowData(rows) if err != nil { return err } + // for annotations, convert to epoch + if timeIndex != -1 { + switch value := values[timeIndex].(type) { + case time.Time: + values[timeIndex] = float64(value.UnixNano() / 1e9) + } + } + table.Rows = append(table.Rows, values) } @@ -99,60 +112,20 @@ func (e MysqlQueryEndpoint) transformToTable(query *tsdb.Query, rows *core.Rows, return nil } -func (e MysqlQueryEndpoint) getTypedRowData(types []*sql.ColumnType, rows *core.Rows) (tsdb.RowValues, error) { +func (e MysqlQueryEndpoint) getTypedRowData(rows *core.Rows) (tsdb.RowValues, error) { + types, err := rows.ColumnTypes() + if err != nil { + return nil, err + } + values := make([]interface{}, len(types)) - for i, stype := range types { - e.log.Debug("type", "type", stype) - switch stype.DatabaseTypeName() { - case mysql.FieldTypeNameTiny: - values[i] = new(int8) - case mysql.FieldTypeNameInt24: - values[i] = new(int32) - case mysql.FieldTypeNameShort: - values[i] = new(int16) - case mysql.FieldTypeNameVarString: - values[i] = new(string) - case mysql.FieldTypeNameVarChar: - values[i] = new(string) - case mysql.FieldTypeNameLong: - values[i] = new(int) - case mysql.FieldTypeNameLongLong: - values[i] = new(int64) - case mysql.FieldTypeNameDouble: - values[i] = new(float64) - case mysql.FieldTypeNameDecimal: - values[i] = new(float32) - case mysql.FieldTypeNameNewDecimal: - values[i] = new(float64) - case mysql.FieldTypeNameFloat: - values[i] = new(float64) - case mysql.FieldTypeNameTimestamp: - values[i] = new(time.Time) - case mysql.FieldTypeNameDateTime: - values[i] = new(time.Time) - case mysql.FieldTypeNameTime: - values[i] = new(string) - case mysql.FieldTypeNameYear: - values[i] = new(int16) - case mysql.FieldTypeNameNULL: - values[i] = nil - case mysql.FieldTypeNameBit: + for i := range values { + scanType := types[i].ScanType() + values[i] = reflect.New(scanType).Interface() + + if types[i].DatabaseTypeName() == "BIT" { values[i] = new([]byte) - case mysql.FieldTypeNameBLOB: - values[i] = new(string) - case mysql.FieldTypeNameTinyBLOB: - values[i] = new(string) - case mysql.FieldTypeNameMediumBLOB: - values[i] = new(string) - case mysql.FieldTypeNameLongBLOB: - values[i] = new(string) - case mysql.FieldTypeNameString: - values[i] = new(string) - case mysql.FieldTypeNameDate: - values[i] = new(string) - default: - return nil, fmt.Errorf("Database type %s not supported", stype.DatabaseTypeName()) } } @@ -160,14 +133,54 @@ func (e MysqlQueryEndpoint) getTypedRowData(types []*sql.ColumnType, rows *core. return nil, err } + for i := 0; i < len(types); i++ { + typeName := reflect.ValueOf(values[i]).Type().String() + + switch typeName { + case "*sql.RawBytes": + values[i] = string(*values[i].(*sql.RawBytes)) + case "*mysql.NullTime": + sqlTime := (*values[i].(*mysql.NullTime)) + if sqlTime.Valid { + values[i] = sqlTime.Time + } else { + values[i] = nil + } + case "*sql.NullInt64": + nullInt64 := (*values[i].(*sql.NullInt64)) + if nullInt64.Valid { + values[i] = nullInt64.Int64 + } else { + values[i] = nil + } + case "*sql.NullFloat64": + nullFloat64 := (*values[i].(*sql.NullFloat64)) + if nullFloat64.Valid { + values[i] = nullFloat64.Float64 + } else { + values[i] = nil + } + } + + if types[i].DatabaseTypeName() == "DECIMAL" { + f, err := strconv.ParseFloat(values[i].(string), 64) + + if err == nil { + values[i] = f + } else { + values[i] = nil + } + } + } + return values, nil } -func (e MysqlQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult) error { +func (e MysqlQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult, tsdbQuery *tsdb.TsdbQuery) error { pointsBySeries := make(map[string]*tsdb.TimeSeries) seriesByQueryOrder := list.New() - columnNames, err := rows.Columns() + columnNames, err := rows.Columns() if err != nil { return err } @@ -176,6 +189,18 @@ func (e MysqlQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core. rowLimit := 1000000 rowCount := 0 + fillMissing := query.Model.Get("fill").MustBool(false) + var fillInterval float64 + fillValue := null.Float{} + if fillMissing { + fillInterval = query.Model.Get("fillInterval").MustFloat64() * 1000 + if query.Model.Get("fillNull").MustBool(false) == false { + fillValue.Float64 = query.Model.Get("fillValue").MustFloat64() + fillValue.Valid = true + } + + } + for ; rows.Next(); rowCount++ { if rowCount > rowLimit { return fmt.Errorf("MySQL query row limit exceeded, limit %d", rowLimit) @@ -195,19 +220,50 @@ func (e MysqlQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core. return fmt.Errorf("Found row with no time value") } - if series, exist := pointsBySeries[rowData.metric]; exist { - series.Points = append(series.Points, tsdb.TimePoint{rowData.value, rowData.time}) - } else { - series := &tsdb.TimeSeries{Name: rowData.metric} - series.Points = append(series.Points, tsdb.TimePoint{rowData.value, rowData.time}) + series, exist := pointsBySeries[rowData.metric] + if exist == false { + series = &tsdb.TimeSeries{Name: rowData.metric} pointsBySeries[rowData.metric] = series seriesByQueryOrder.PushBack(rowData.metric) } + + if fillMissing { + var intervalStart float64 + if exist == false { + intervalStart = float64(tsdbQuery.TimeRange.MustGetFrom().UnixNano() / 1e6) + } else { + intervalStart = series.Points[len(series.Points)-1][1].Float64 + fillInterval + } + + // align interval start + intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval + + for i := intervalStart; i < rowData.time.Float64; i += fillInterval { + series.Points = append(series.Points, tsdb.TimePoint{fillValue, null.FloatFrom(i)}) + rowCount++ + } + } + + series.Points = append(series.Points, tsdb.TimePoint{rowData.value, rowData.time}) } for elem := seriesByQueryOrder.Front(); elem != nil; elem = elem.Next() { key := elem.Value.(string) result.Series = append(result.Series, pointsBySeries[key]) + + if fillMissing { + series := pointsBySeries[key] + // fill in values from last fetched value till interval end + intervalStart := series.Points[len(series.Points)-1][1].Float64 + intervalEnd := float64(tsdbQuery.TimeRange.MustGetTo().UnixNano() / 1e6) + + // align interval start + intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval + for i := intervalStart + fillInterval; i < intervalEnd; i += fillInterval { + series.Points = append(series.Points, tsdb.TimePoint{fillValue, null.FloatFrom(i)}) + rowCount++ + } + } } result.Meta.Set("rowCount", rowCount) diff --git a/pkg/tsdb/mysql/mysql_test.go b/pkg/tsdb/mysql/mysql_test.go index 55def0b4129..fe2c82223d2 100644 --- a/pkg/tsdb/mysql/mysql_test.go +++ b/pkg/tsdb/mysql/mysql_test.go @@ -30,19 +30,19 @@ func TestMySQL(t *testing.T) { defer sess.Close() sql := "CREATE TABLE `mysql_types` (" - sql += "`atinyint` tinyint(1)," - sql += "`avarchar` varchar(3)," + sql += "`atinyint` tinyint(1) NOT NULL," + sql += "`avarchar` varchar(3) NOT NULL," sql += "`achar` char(3)," - sql += "`amediumint` mediumint," - sql += "`asmallint` smallint," - sql += "`abigint` bigint," - sql += "`aint` int(11)," + sql += "`amediumint` mediumint NOT NULL," + sql += "`asmallint` smallint NOT NULL," + sql += "`abigint` bigint NOT NULL," + sql += "`aint` int(11) NOT NULL," sql += "`adouble` double(10,2)," sql += "`anewdecimal` decimal(10,2)," - sql += "`afloat` float(10,2)," + sql += "`afloat` float(10,2) NOT NULL," sql += "`atimestamp` timestamp NOT NULL," - sql += "`adatetime` datetime," - sql += "`atime` time," + sql += "`adatetime` datetime NOT NULL," + sql += "`atime` time NOT NULL," // sql += "`ayear` year," // Crashes xorm when running cleandb sql += "`abit` bit(1)," sql += "`atinytext` tinytext," @@ -55,7 +55,12 @@ func TestMySQL(t *testing.T) { sql += "`alongblob` longblob," sql += "`aenum` enum('val1', 'val2')," sql += "`aset` set('a', 'b', 'c', 'd')," - sql += "`adate` date" + sql += "`adate` date," + sql += "`time_sec` datetime(6)," + sql += "`aintnull` int(11)," + sql += "`afloatnull` float(10,2)," + sql += "`avarcharnull` varchar(3)," + sql += "`adecimalnull` decimal(10,2)" sql += ") ENGINE=InnoDB DEFAULT CHARSET=latin1;" _, err := sess.Exec(sql) So(err, ShouldBeNil) @@ -64,11 +69,11 @@ func TestMySQL(t *testing.T) { sql += "(`atinyint`, `avarchar`, `achar`, `amediumint`, `asmallint`, `abigint`, `aint`, `adouble`, " sql += "`anewdecimal`, `afloat`, `adatetime`, `atimestamp`, `atime`, `abit`, `atinytext`, " sql += "`atinyblob`, `atext`, `ablob`, `amediumtext`, `amediumblob`, `alongtext`, `alongblob`, " - sql += "`aenum`, `aset`, `adate`) " + sql += "`aenum`, `aset`, `adate`, `time_sec`) " sql += "VALUES(1, 'abc', 'def', 1, 10, 100, 1420070400, 1.11, " sql += "2.22, 3.33, now(), current_timestamp(), '11:11:11', 1, 'tinytext', " sql += "'tinyblob', 'text', 'blob', 'mediumtext', 'mediumblob', 'longtext', 'longblob', " - sql += "'val2', 'a,b', curdate());" + sql += "'val2', 'a,b', curdate(), '2018-01-01 00:01:01.123456');" _, err = sess.Exec(sql) So(err, ShouldBeNil) @@ -90,32 +95,38 @@ func TestMySQL(t *testing.T) { So(err, ShouldBeNil) column := queryResult.Tables[0].Rows[0] + So(*column[0].(*int8), ShouldEqual, 1) - So(*column[1].(*string), ShouldEqual, "abc") - So(*column[2].(*string), ShouldEqual, "def") + So(column[1].(string), ShouldEqual, "abc") + So(column[2].(string), ShouldEqual, "def") So(*column[3].(*int32), ShouldEqual, 1) So(*column[4].(*int16), ShouldEqual, 10) So(*column[5].(*int64), ShouldEqual, 100) - So(*column[6].(*int), ShouldEqual, 1420070400) - So(*column[7].(*float64), ShouldEqual, 1.11) - So(*column[8].(*float64), ShouldEqual, 2.22) - So(*column[9].(*float64), ShouldEqual, 3.33) + So(*column[6].(*int32), ShouldEqual, 1420070400) + So(column[7].(float64), ShouldEqual, 1.11) + So(column[8].(float64), ShouldEqual, 2.22) + So(*column[9].(*float32), ShouldEqual, 3.33) _, offset := time.Now().Zone() - So((*column[10].(*time.Time)), ShouldHappenWithin, time.Duration(10*time.Second), time.Now().Add(time.Duration(offset)*time.Second)) - So(*column[11].(*time.Time), ShouldHappenWithin, time.Duration(10*time.Second), time.Now().Add(time.Duration(offset)*time.Second)) - So(*column[12].(*string), ShouldEqual, "11:11:11") + So(column[10].(time.Time), ShouldHappenWithin, time.Duration(10*time.Second), time.Now().Add(time.Duration(offset)*time.Second)) + So(column[11].(time.Time), ShouldHappenWithin, time.Duration(10*time.Second), time.Now().Add(time.Duration(offset)*time.Second)) + So(column[12].(string), ShouldEqual, "11:11:11") So(*column[13].(*[]byte), ShouldHaveSameTypeAs, []byte{1}) - So(*column[14].(*string), ShouldEqual, "tinytext") - So(*column[15].(*string), ShouldEqual, "tinyblob") - So(*column[16].(*string), ShouldEqual, "text") - So(*column[17].(*string), ShouldEqual, "blob") - So(*column[18].(*string), ShouldEqual, "mediumtext") - So(*column[19].(*string), ShouldEqual, "mediumblob") - So(*column[20].(*string), ShouldEqual, "longtext") - So(*column[21].(*string), ShouldEqual, "longblob") - So(*column[22].(*string), ShouldEqual, "val2") - So(*column[23].(*string), ShouldEqual, "a,b") - So(*column[24].(*string), ShouldEqual, time.Now().Format("2006-01-02T00:00:00Z")) + So(column[14].(string), ShouldEqual, "tinytext") + So(column[15].(string), ShouldEqual, "tinyblob") + So(column[16].(string), ShouldEqual, "text") + So(column[17].(string), ShouldEqual, "blob") + So(column[18].(string), ShouldEqual, "mediumtext") + So(column[19].(string), ShouldEqual, "mediumblob") + So(column[20].(string), ShouldEqual, "longtext") + So(column[21].(string), ShouldEqual, "longblob") + So(column[22].(string), ShouldEqual, "val2") + So(column[23].(string), ShouldEqual, "a,b") + So(column[24].(time.Time).Format("2006-01-02T00:00:00Z"), ShouldEqual, time.Now().Format("2006-01-02T00:00:00Z")) + So(column[25].(float64), ShouldEqual, 1514764861) + So(column[26], ShouldEqual, nil) + So(column[27], ShouldEqual, nil) + So(column[28], ShouldEqual, "") + So(column[29], ShouldEqual, nil) }) }) } diff --git a/pkg/tsdb/opentsdb/opentsdb.go b/pkg/tsdb/opentsdb/opentsdb.go index 29daa0c3bb4..692b891eddd 100644 --- a/pkg/tsdb/opentsdb/opentsdb.go +++ b/pkg/tsdb/opentsdb/opentsdb.go @@ -22,23 +22,10 @@ import ( ) type OpenTsdbExecutor struct { - //*models.DataSource - //httpClient *http.Client } func NewOpenTsdbExecutor(datasource *models.DataSource) (tsdb.TsdbQueryEndpoint, error) { - /* - httpClient, err := datasource.GetHttpClient() - - if err != nil { - return nil, err - } - */ - - return &OpenTsdbExecutor{ - //DataSource: datasource, - //httpClient: httpClient, - }, nil + return &OpenTsdbExecutor{}, nil } var ( diff --git a/pkg/tsdb/postgres/macros.go b/pkg/tsdb/postgres/macros.go index 086eb96655f..23daeebec5a 100644 --- a/pkg/tsdb/postgres/macros.go +++ b/pkg/tsdb/postgres/macros.go @@ -3,6 +3,7 @@ package postgres import ( "fmt" "regexp" + "strconv" "strings" "time" @@ -15,19 +16,25 @@ const sExpr = `\$` + rsIdentifier + `\(([^\)]*)\)` type PostgresMacroEngine struct { TimeRange *tsdb.TimeRange + Query *tsdb.Query } func NewPostgresMacroEngine() tsdb.SqlMacroEngine { return &PostgresMacroEngine{} } -func (m *PostgresMacroEngine) Interpolate(timeRange *tsdb.TimeRange, sql string) (string, error) { +func (m *PostgresMacroEngine) Interpolate(query *tsdb.Query, timeRange *tsdb.TimeRange, sql string) (string, error) { m.TimeRange = timeRange + m.Query = query rExp, _ := regexp.Compile(sExpr) var macroError error sql = replaceAllStringSubmatchFunc(rExp, sql, func(groups []string) string { - res, err := m.evaluateMacro(groups[1], strings.Split(groups[2], ",")) + args := strings.Split(groups[2], ",") + for i, arg := range args { + args[i] = strings.Trim(arg, " ") + } + res, err := m.evaluateMacro(groups[1], args) if err != nil && macroError == nil { macroError = err return "macro_error()" @@ -82,13 +89,26 @@ func (m *PostgresMacroEngine) evaluateMacro(name string, args []string) (string, case "__timeTo": return fmt.Sprintf("to_timestamp(%d)", uint64(m.TimeRange.GetToAsMsEpoch()/1000)), nil case "__timeGroup": - if len(args) != 2 { - return "", fmt.Errorf("macro %v needs time column and interval", name) + if len(args) < 2 { + return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name) } - interval, err := time.ParseDuration(strings.Trim(args[1], `' `)) + interval, err := time.ParseDuration(strings.Trim(args[1], `'`)) if err != nil { return "", fmt.Errorf("error parsing interval %v", args[1]) } + if len(args) == 3 { + m.Query.Model.Set("fill", true) + m.Query.Model.Set("fillInterval", interval.Seconds()) + if args[2] == "NULL" { + m.Query.Model.Set("fillNull", true) + } else { + floatVal, err := strconv.ParseFloat(args[2], 64) + if err != nil { + return "", fmt.Errorf("error parsing fill value %v", args[2]) + } + m.Query.Model.Set("fillValue", floatVal) + } + } return fmt.Sprintf("(extract(epoch from %s)/%v)::bigint*%v AS time", args[0], interval.Seconds(), interval.Seconds()), nil case "__unixEpochFilter": if len(args) == 0 { diff --git a/pkg/tsdb/postgres/macros_test.go b/pkg/tsdb/postgres/macros_test.go index ebc5191d46e..b18acced963 100644 --- a/pkg/tsdb/postgres/macros_test.go +++ b/pkg/tsdb/postgres/macros_test.go @@ -10,31 +10,32 @@ import ( func TestMacroEngine(t *testing.T) { Convey("MacroEngine", t, func() { engine := &PostgresMacroEngine{} + query := &tsdb.Query{} timeRange := &tsdb.TimeRange{From: "5m", To: "now"} Convey("interpolate __time function", func() { - sql, err := engine.Interpolate(nil, "select $__time(time_column)") + sql, err := engine.Interpolate(query, timeRange, "select $__time(time_column)") So(err, ShouldBeNil) So(sql, ShouldEqual, "select time_column AS \"time\"") }) Convey("interpolate __time function wrapped in aggregation", func() { - sql, err := engine.Interpolate(nil, "select min($__time(time_column))") + sql, err := engine.Interpolate(query, timeRange, "select min($__time(time_column))") So(err, ShouldBeNil) So(sql, ShouldEqual, "select min(time_column AS \"time\")") }) Convey("interpolate __timeFilter function", func() { - sql, err := engine.Interpolate(timeRange, "WHERE $__timeFilter(time_column)") + sql, err := engine.Interpolate(query, timeRange, "WHERE $__timeFilter(time_column)") So(err, ShouldBeNil) So(sql, ShouldEqual, "WHERE extract(epoch from time_column) BETWEEN 18446744066914186738 AND 18446744066914187038") }) Convey("interpolate __timeFrom function", func() { - sql, err := engine.Interpolate(timeRange, "select $__timeFrom(time_column)") + sql, err := engine.Interpolate(query, timeRange, "select $__timeFrom(time_column)") So(err, ShouldBeNil) So(sql, ShouldEqual, "select to_timestamp(18446744066914186738)") @@ -42,35 +43,43 @@ func TestMacroEngine(t *testing.T) { Convey("interpolate __timeGroup function", func() { - sql, err := engine.Interpolate(timeRange, "GROUP BY $__timeGroup(time_column,'5m')") + sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column,'5m')") + So(err, ShouldBeNil) + + So(sql, ShouldEqual, "GROUP BY (extract(epoch from time_column)/300)::bigint*300 AS time") + }) + + Convey("interpolate __timeGroup function with spaces between args", func() { + + sql, err := engine.Interpolate(query, timeRange, "GROUP BY $__timeGroup(time_column , '5m')") So(err, ShouldBeNil) So(sql, ShouldEqual, "GROUP BY (extract(epoch from time_column)/300)::bigint*300 AS time") }) Convey("interpolate __timeTo function", func() { - sql, err := engine.Interpolate(timeRange, "select $__timeTo(time_column)") + sql, err := engine.Interpolate(query, timeRange, "select $__timeTo(time_column)") So(err, ShouldBeNil) So(sql, ShouldEqual, "select to_timestamp(18446744066914187038)") }) Convey("interpolate __unixEpochFilter function", func() { - sql, err := engine.Interpolate(timeRange, "select $__unixEpochFilter(18446744066914186738)") + sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFilter(18446744066914186738)") So(err, ShouldBeNil) So(sql, ShouldEqual, "select 18446744066914186738 >= 18446744066914186738 AND 18446744066914186738 <= 18446744066914187038") }) Convey("interpolate __unixEpochFrom function", func() { - sql, err := engine.Interpolate(timeRange, "select $__unixEpochFrom()") + sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochFrom()") So(err, ShouldBeNil) So(sql, ShouldEqual, "select 18446744066914186738") }) Convey("interpolate __unixEpochTo function", func() { - sql, err := engine.Interpolate(timeRange, "select $__unixEpochTo()") + sql, err := engine.Interpolate(query, timeRange, "select $__unixEpochTo()") So(err, ShouldBeNil) So(sql, ShouldEqual, "select 18446744066914187038") diff --git a/pkg/tsdb/postgres/postgres.go b/pkg/tsdb/postgres/postgres.go index a8c96d8119c..6a084ad1237 100644 --- a/pkg/tsdb/postgres/postgres.go +++ b/pkg/tsdb/postgres/postgres.go @@ -4,6 +4,7 @@ import ( "container/list" "context" "fmt" + "math" "net/url" "strconv" "time" @@ -53,14 +54,15 @@ func generateConnectionString(datasource *models.DataSource) string { } sslmode := datasource.JsonData.Get("sslmode").MustString("verify-full") - return fmt.Sprintf("postgres://%s:%s@%s/%s?sslmode=%s", url.PathEscape(datasource.User), url.PathEscape(password), url.PathEscape(datasource.Url), url.PathEscape(datasource.Database), url.QueryEscape(sslmode)) + u := &url.URL{Scheme: "postgres", User: url.UserPassword(datasource.User, password), Host: datasource.Url, Path: datasource.Database, RawQuery: "sslmode=" + sslmode} + return u.String() } func (e *PostgresQueryEndpoint) Query(ctx context.Context, dsInfo *models.DataSource, tsdbQuery *tsdb.TsdbQuery) (*tsdb.Response, error) { return e.sqlEngine.Query(ctx, dsInfo, tsdbQuery, e.transformToTimeSeries, e.transformToTable) } -func (e PostgresQueryEndpoint) transformToTable(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult) error { +func (e PostgresQueryEndpoint) transformToTable(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult, tsdbQuery *tsdb.TsdbQuery) error { columnNames, err := rows.Columns() if err != nil { @@ -157,7 +159,7 @@ func (e PostgresQueryEndpoint) getTypedRowData(rows *core.Rows) (tsdb.RowValues, return values, nil } -func (e PostgresQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult) error { +func (e PostgresQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *core.Rows, result *tsdb.QueryResult, tsdbQuery *tsdb.TsdbQuery) error { pointsBySeries := make(map[string]*tsdb.TimeSeries) seriesByQueryOrder := list.New() @@ -198,6 +200,18 @@ func (e PostgresQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *co return fmt.Errorf("Found no column named time") } + fillMissing := query.Model.Get("fill").MustBool(false) + var fillInterval float64 + fillValue := null.Float{} + if fillMissing { + fillInterval = query.Model.Get("fillInterval").MustFloat64() * 1000 + if query.Model.Get("fillNull").MustBool(false) == false { + fillValue.Float64 = query.Model.Get("fillValue").MustFloat64() + fillValue.Valid = true + } + + } + for rows.Next() { var timestamp float64 var value null.Float @@ -220,14 +234,14 @@ func (e PostgresQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *co case time.Time: timestamp = float64(columnValue.UnixNano() / 1e6) default: - return fmt.Errorf("Invalid type for column time, must be of type timestamp or unix timestamp") + return fmt.Errorf("Invalid type for column time, must be of type timestamp or unix timestamp, got: %T %v", columnValue, columnValue) } if metricIndex >= 0 { if columnValue, ok := values[metricIndex].(string); ok == true { metric = columnValue } else { - return fmt.Errorf("Column metric must be of type char,varchar or text") + return fmt.Errorf("Column metric must be of type char,varchar or text, got: %T %v", values[metricIndex], values[metricIndex]) } } @@ -249,7 +263,34 @@ func (e PostgresQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *co if metricIndex == -1 { metric = col } - e.appendTimePoint(pointsBySeries, seriesByQueryOrder, metric, timestamp, value) + + series, exist := pointsBySeries[metric] + if exist == false { + series = &tsdb.TimeSeries{Name: metric} + pointsBySeries[metric] = series + seriesByQueryOrder.PushBack(metric) + } + + if fillMissing { + var intervalStart float64 + if exist == false { + intervalStart = float64(tsdbQuery.TimeRange.MustGetFrom().UnixNano() / 1e6) + } else { + intervalStart = series.Points[len(series.Points)-1][1].Float64 + fillInterval + } + + // align interval start + intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval + + for i := intervalStart; i < timestamp; i += fillInterval { + series.Points = append(series.Points, tsdb.TimePoint{fillValue, null.FloatFrom(i)}) + rowCount++ + } + } + + series.Points = append(series.Points, tsdb.TimePoint{value, null.FloatFrom(timestamp)}) + + e.log.Debug("Rows", "metric", metric, "time", timestamp, "value", value) rowCount++ } @@ -258,20 +299,22 @@ func (e PostgresQueryEndpoint) transformToTimeSeries(query *tsdb.Query, rows *co for elem := seriesByQueryOrder.Front(); elem != nil; elem = elem.Next() { key := elem.Value.(string) result.Series = append(result.Series, pointsBySeries[key]) + + if fillMissing { + series := pointsBySeries[key] + // fill in values from last fetched value till interval end + intervalStart := series.Points[len(series.Points)-1][1].Float64 + intervalEnd := float64(tsdbQuery.TimeRange.MustGetTo().UnixNano() / 1e6) + + // align interval start + intervalStart = math.Floor(intervalStart/fillInterval) * fillInterval + for i := intervalStart + fillInterval; i < intervalEnd; i += fillInterval { + series.Points = append(series.Points, tsdb.TimePoint{fillValue, null.FloatFrom(i)}) + rowCount++ + } + } } result.Meta.Set("rowCount", rowCount) return nil } - -func (e PostgresQueryEndpoint) appendTimePoint(pointsBySeries map[string]*tsdb.TimeSeries, seriesByQueryOrder *list.List, metric string, timestamp float64, value null.Float) { - if series, exist := pointsBySeries[metric]; exist { - series.Points = append(series.Points, tsdb.TimePoint{value, null.FloatFrom(timestamp)}) - } else { - series := &tsdb.TimeSeries{Name: metric} - series.Points = append(series.Points, tsdb.TimePoint{value, null.FloatFrom(timestamp)}) - pointsBySeries[metric] = series - seriesByQueryOrder.PushBack(metric) - } - e.log.Debug("Rows", "metric", metric, "time", timestamp, "value", value) -} diff --git a/pkg/tsdb/sql_engine.go b/pkg/tsdb/sql_engine.go index 12778b4e1ad..7ea0682235f 100644 --- a/pkg/tsdb/sql_engine.go +++ b/pkg/tsdb/sql_engine.go @@ -17,15 +17,15 @@ type SqlEngine interface { ctx context.Context, ds *models.DataSource, query *TsdbQuery, - transformToTimeSeries func(query *Query, rows *core.Rows, result *QueryResult) error, - transformToTable func(query *Query, rows *core.Rows, result *QueryResult) error, + transformToTimeSeries func(query *Query, rows *core.Rows, result *QueryResult, tsdbQuery *TsdbQuery) error, + transformToTable func(query *Query, rows *core.Rows, result *QueryResult, tsdbQuery *TsdbQuery) error, ) (*Response, error) } -// SqlMacroEngine interpolates macros into sql. It takes in the timeRange to be able to -// generate queries that use from and to. +// SqlMacroEngine interpolates macros into sql. It takes in the Query to have access to query context and +// timeRange to be able to generate queries that use from and to. type SqlMacroEngine interface { - Interpolate(timeRange *TimeRange, sql string) (string, error) + Interpolate(query *Query, timeRange *TimeRange, sql string) (string, error) } type DefaultSqlEngine struct { @@ -77,8 +77,8 @@ func (e *DefaultSqlEngine) Query( ctx context.Context, dsInfo *models.DataSource, tsdbQuery *TsdbQuery, - transformToTimeSeries func(query *Query, rows *core.Rows, result *QueryResult) error, - transformToTable func(query *Query, rows *core.Rows, result *QueryResult) error, + transformToTimeSeries func(query *Query, rows *core.Rows, result *QueryResult, tsdbQuery *TsdbQuery) error, + transformToTable func(query *Query, rows *core.Rows, result *QueryResult, tsdbQuery *TsdbQuery) error, ) (*Response, error) { result := &Response{ Results: make(map[string]*QueryResult), @@ -97,7 +97,7 @@ func (e *DefaultSqlEngine) Query( queryResult := &QueryResult{Meta: simplejson.New(), RefId: query.RefId} result.Results[query.RefId] = queryResult - rawSql, err := e.MacroEngine.Interpolate(tsdbQuery.TimeRange, rawSql) + rawSql, err := e.MacroEngine.Interpolate(query, tsdbQuery.TimeRange, rawSql) if err != nil { queryResult.Error = err continue @@ -117,13 +117,13 @@ func (e *DefaultSqlEngine) Query( switch format { case "time_series": - err := transformToTimeSeries(query, rows, queryResult) + err := transformToTimeSeries(query, rows, queryResult, tsdbQuery) if err != nil { queryResult.Error = err continue } case "table": - err := transformToTable(query, rows, queryResult) + err := transformToTable(query, rows, queryResult, tsdbQuery) if err != nil { queryResult.Error = err continue diff --git a/pkg/tsdb/time_range.go b/pkg/tsdb/time_range.go index cf6bc6a5048..fd797bf731a 100644 --- a/pkg/tsdb/time_range.go +++ b/pkg/tsdb/time_range.go @@ -11,14 +11,14 @@ func NewTimeRange(from, to string) *TimeRange { return &TimeRange{ From: from, To: to, - Now: time.Now(), + now: time.Now(), } } type TimeRange struct { From string To string - Now time.Time + now time.Time } func (tr *TimeRange) GetFromAsMsEpoch() int64 { @@ -65,12 +65,12 @@ func (tr *TimeRange) ParseFrom() (time.Time, error) { return time.Time{}, err } - return tr.Now.Add(diff), nil + return tr.now.Add(diff), nil } func (tr *TimeRange) ParseTo() (time.Time, error) { if tr.To == "now" { - return tr.Now, nil + return tr.now, nil } else if strings.HasPrefix(tr.To, "now-") { withoutNow := strings.Replace(tr.To, "now-", "", 1) @@ -79,7 +79,7 @@ func (tr *TimeRange) ParseTo() (time.Time, error) { return time.Time{}, nil } - return tr.Now.Add(diff), nil + return tr.now.Add(diff), nil } if res, ok := tryParseUnixMsEpoch(tr.To); ok { diff --git a/pkg/tsdb/time_range_test.go b/pkg/tsdb/time_range_test.go index 5d89e3977b8..37ced412e4d 100644 --- a/pkg/tsdb/time_range_test.go +++ b/pkg/tsdb/time_range_test.go @@ -16,7 +16,7 @@ func TestTimeRange(t *testing.T) { tr := TimeRange{ From: "5m", To: "now", - Now: now, + now: now, } Convey("5m ago ", func() { @@ -39,7 +39,7 @@ func TestTimeRange(t *testing.T) { tr := TimeRange{ From: "5h", To: "now-10m", - Now: now, + now: now, } Convey("5h ago ", func() { @@ -65,7 +65,7 @@ func TestTimeRange(t *testing.T) { tr := TimeRange{ From: "1474973725473", To: "1474975757930", - Now: now, + now: now, } res, err := tr.ParseFrom() @@ -82,7 +82,7 @@ func TestTimeRange(t *testing.T) { tr := TimeRange{ From: "asdf", To: "asdf", - Now: now, + now: now, } _, err = tr.ParseFrom() diff --git a/pkg/util/shortid_generator.go b/pkg/util/shortid_generator.go new file mode 100644 index 00000000000..d87b6f70fe6 --- /dev/null +++ b/pkg/util/shortid_generator.go @@ -0,0 +1,30 @@ +package util + +import ( + "regexp" + + "github.com/teris-io/shortid" +) + +var allowedChars = shortid.DefaultABC + +var validUidPattern = regexp.MustCompile(`^[a-zA-Z0-9\-\_]*$`).MatchString + +func init() { + gen, _ := shortid.New(1, allowedChars, 1) + shortid.SetDefault(gen) +} + +// IsValidShortUid checks if short unique identifier contains valid characters +func IsValidShortUid(uid string) bool { + if !validUidPattern(uid) { + return false + } + + return true +} + +// GenerateShortUid generates a short unique identifier. +func GenerateShortUid() string { + return shortid.MustGenerate() +} diff --git a/pkg/util/shortid_generator_test.go b/pkg/util/shortid_generator_test.go new file mode 100644 index 00000000000..359e054a0ca --- /dev/null +++ b/pkg/util/shortid_generator_test.go @@ -0,0 +1,11 @@ +package util + +import "testing" + +func TestAllowedCharMatchesUidPattern(t *testing.T) { + for _, c := range allowedChars { + if !IsValidShortUid(string(c)) { + t.Fatalf("charset for creating new shortids contains chars not present in uid pattern") + } + } +} diff --git a/public/app/app.ts b/public/app/app.ts index 45050240602..9ac76b8ec91 100644 --- a/public/app/app.ts +++ b/public/app/app.ts @@ -27,6 +27,9 @@ _.move = function(array, fromIndex, toIndex) { }; import { coreModule, registerAngularDirectives } from './core/core'; +import { setupAngularRoutes } from './routes/routes'; + +declare var System: any; export class GrafanaApp { registerFunctions: any; @@ -113,6 +116,7 @@ export class GrafanaApp { this.useModule(coreModule); // register react angular wrappers + coreModule.config(setupAngularRoutes); registerAngularDirectives(); var preBootRequires = [System.import('app/features/all')]; @@ -121,6 +125,7 @@ export class GrafanaApp { .then(() => { // disable tool tip animation $.fn.tooltip.defaults.animation = false; + // bootstrap the app angular.bootstrap(document, this.ngModuleDependencies).invoke(() => { _.each(this.preBootModules, module => { diff --git a/public/app/containers/AlertRuleList/AlertRuleList.jest.tsx b/public/app/containers/AlertRuleList/AlertRuleList.jest.tsx new file mode 100644 index 00000000000..eac18a6c69d --- /dev/null +++ b/public/app/containers/AlertRuleList/AlertRuleList.jest.tsx @@ -0,0 +1,69 @@ +import React from 'react'; +import moment from 'moment'; +import { AlertRuleList } from './AlertRuleList'; +import { RootStore } from 'app/stores/RootStore/RootStore'; +import { backendSrv, createNavTree } from 'test/mocks/common'; +import { mount } from 'enzyme'; +import toJson from 'enzyme-to-json'; + +describe('AlertRuleList', () => { + let page, store; + + beforeAll(() => { + backendSrv.get.mockReturnValue( + Promise.resolve([ + { + id: 11, + dashboardId: 58, + panelId: 3, + name: 'Panel Title alert', + state: 'ok', + newStateDate: moment() + .subtract(5, 'minutes') + .format(), + evalData: {}, + executionError: '', + url: 'd/ufkcofof/my-goal', + canEdit: true, + }, + ]) + ); + + store = RootStore.create( + {}, + { + backendSrv: backendSrv, + navTree: createNavTree('alerting', 'alert-list'), + } + ); + + page = mount(); + }); + + it('should call api to get rules', () => { + expect(backendSrv.get.mock.calls[0][0]).toEqual('/api/alerts'); + }); + + it('should render 1 rule', () => { + page.update(); + let ruleNode = page.find('.alert-rule-item'); + expect(toJson(ruleNode)).toMatchSnapshot(); + }); + + it('toggle state should change pause rule if not paused', async () => { + backendSrv.post.mockReturnValue( + Promise.resolve({ + state: 'paused', + }) + ); + + page.find('.fa-pause').simulate('click'); + + // wait for api call to resolve + await Promise.resolve(); + page.update(); + + expect(store.alertList.rules[0].state).toBe('paused'); + expect(page.find('.fa-play')).toHaveLength(1); + }); +}); diff --git a/public/app/containers/AlertRuleList/AlertRuleList.tsx b/public/app/containers/AlertRuleList/AlertRuleList.tsx new file mode 100644 index 00000000000..9ecb9a177d7 --- /dev/null +++ b/public/app/containers/AlertRuleList/AlertRuleList.tsx @@ -0,0 +1,175 @@ +import React from 'react'; +import classNames from 'classnames'; +import { inject, observer } from 'mobx-react'; +import PageHeader from 'app/core/components/PageHeader/PageHeader'; +import { IAlertRule } from 'app/stores/AlertListStore/AlertListStore'; +import appEvents from 'app/core/app_events'; +import IContainerProps from 'app/containers/IContainerProps'; +import Highlighter from 'react-highlight-words'; + +@inject('view', 'nav', 'alertList') +@observer +export class AlertRuleList extends React.Component { + stateFilters = [ + { text: 'All', value: 'all' }, + { text: 'OK', value: 'ok' }, + { text: 'Not OK', value: 'not_ok' }, + { text: 'Alerting', value: 'alerting' }, + { text: 'No Data', value: 'no_data' }, + { text: 'Paused', value: 'paused' }, + ]; + + constructor(props) { + super(props); + + this.props.nav.load('alerting', 'alert-list'); + this.fetchRules(); + } + + onStateFilterChanged = evt => { + this.props.view.updateQuery({ state: evt.target.value }); + this.fetchRules(); + }; + + fetchRules() { + this.props.alertList.loadRules({ + state: this.props.view.query.get('state') || 'all', + }); + } + + onOpenHowTo = () => { + appEvents.emit('show-modal', { + src: 'public/app/features/alerting/partials/alert_howto.html', + modalClass: 'confirm-modal', + model: {}, + }); + }; + + onSearchQueryChange = evt => { + this.props.alertList.setSearchQuery(evt.target.value); + }; + + render() { + const { nav, alertList } = this.props; + + return ( +
+ +
+
+
+ +
+
+ + +
+ +
+
+ + + +
+
    + {alertList.filteredRules.map(rule => ( + + ))} +
+
+
+
+ ); + } +} + +function AlertStateFilterOption({ text, value }) { + return ( + + ); +} + +export interface AlertRuleItemProps { + rule: IAlertRule; + search: string; +} + +@observer +export class AlertRuleItem extends React.Component { + toggleState = () => { + this.props.rule.togglePaused(); + }; + + renderText(text: string) { + return ( + + ); + } + + render() { + const { rule } = this.props; + + let stateClass = classNames({ + fa: true, + 'fa-play': rule.isPaused, + 'fa-pause': !rule.isPaused, + }); + + let ruleUrl = `${rule.url}?panelId=${rule.panelId}&fullscreen=true&edit=true&tab=alert`; + + return ( +
  • + + + +
    +
    + +
    + {this.renderText(rule.stateText)} + for {rule.stateAge} +
    +
    + {rule.info &&
    {this.renderText(rule.info)}
    } +
    + +
    + + + + +
    +
  • + ); + } +} diff --git a/public/app/containers/AlertRuleList/__snapshots__/AlertRuleList.jest.tsx.snap b/public/app/containers/AlertRuleList/__snapshots__/AlertRuleList.jest.tsx.snap new file mode 100644 index 00000000000..f408f6409be --- /dev/null +++ b/public/app/containers/AlertRuleList/__snapshots__/AlertRuleList.jest.tsx.snap @@ -0,0 +1,103 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`AlertRuleList should render 1 rule 1`] = ` +
  • + + + +
    +
    + +
    + + + + + OK + + + + + + for + 5 minutes + +
    +
    +
    +
    + + + + +
    +
  • +`; diff --git a/public/app/containers/IContainerProps.ts b/public/app/containers/IContainerProps.ts new file mode 100644 index 00000000000..6e790cee06d --- /dev/null +++ b/public/app/containers/IContainerProps.ts @@ -0,0 +1,20 @@ +import { SearchStore } from './../stores/SearchStore/SearchStore'; +import { ServerStatsStore } from './../stores/ServerStatsStore/ServerStatsStore'; +import { NavStore } from './../stores/NavStore/NavStore'; +import { PermissionsStore } from './../stores/PermissionsStore/PermissionsStore'; +import { AlertListStore } from './../stores/AlertListStore/AlertListStore'; +import { ViewStore } from './../stores/ViewStore/ViewStore'; +import { FolderStore } from './../stores/FolderStore/FolderStore'; + +interface IContainerProps { + search: typeof SearchStore.Type; + serverStats: typeof ServerStatsStore.Type; + nav: typeof NavStore.Type; + alertList: typeof AlertListStore.Type; + permissions: typeof PermissionsStore.Type; + view: typeof ViewStore.Type; + folder: typeof FolderStore.Type; + backendSrv: any; +} + +export default IContainerProps; diff --git a/public/app/containers/ManageDashboards/FolderPermissions.tsx b/public/app/containers/ManageDashboards/FolderPermissions.tsx new file mode 100644 index 00000000000..9c82db1c18c --- /dev/null +++ b/public/app/containers/ManageDashboards/FolderPermissions.tsx @@ -0,0 +1,74 @@ +import React, { Component } from 'react'; +import { inject, observer } from 'mobx-react'; +import { toJS } from 'mobx'; +import IContainerProps from 'app/containers/IContainerProps'; +import PageHeader from 'app/core/components/PageHeader/PageHeader'; +import Permissions from 'app/core/components/Permissions/Permissions'; +import Tooltip from 'app/core/components/Tooltip/Tooltip'; +import PermissionsInfo from 'app/core/components/Permissions/PermissionsInfo'; +import AddPermissions from 'app/core/components/Permissions/AddPermissions'; +import SlideDown from 'app/core/components/Animations/SlideDown'; + +@inject('nav', 'folder', 'view', 'permissions') +@observer +export class FolderPermissions extends Component { + constructor(props) { + super(props); + this.handleAddPermission = this.handleAddPermission.bind(this); + this.loadStore(); + } + + componentWillUnmount() { + const { permissions } = this.props; + permissions.hideAddPermissions(); + } + + loadStore() { + const { nav, folder, view } = this.props; + return folder.load(view.routeParams.get('uid') as string).then(res => { + view.updatePathAndQuery(`${res.url}/permissions`, {}, {}); + return nav.initFolderNav(toJS(folder.folder), 'manage-folder-permissions'); + }); + } + + handleAddPermission() { + const { permissions } = this.props; + permissions.toggleAddPermissions(); + } + + render() { + const { nav, folder, permissions, backendSrv } = this.props; + + if (!folder.folder || !nav.main) { + return

    Loading

    ; + } + + const dashboardId = folder.folder.id; + + return ( +
    + +
    +
    +

    Folder Permissions

    + + + +
    + +
    + + + + +
    +
    + ); + } +} diff --git a/public/app/containers/ManageDashboards/FolderSettings.jest.tsx b/public/app/containers/ManageDashboards/FolderSettings.jest.tsx new file mode 100644 index 00000000000..bed3d569bcc --- /dev/null +++ b/public/app/containers/ManageDashboards/FolderSettings.jest.tsx @@ -0,0 +1,84 @@ +import React from 'react'; +import { FolderSettings } from './FolderSettings'; +import { RootStore } from 'app/stores/RootStore/RootStore'; +import { backendSrv } from 'test/mocks/common'; +import { shallow } from 'enzyme'; + +describe('FolderSettings', () => { + let wrapper; + let page; + + beforeAll(() => { + backendSrv.getFolderByUid.mockReturnValue( + Promise.resolve({ + id: 1, + uid: 'uid', + title: 'Folder Name', + url: '/dashboards/f/uid/folder-name', + canSave: true, + version: 1, + }) + ); + + const store = RootStore.create( + { + view: { + path: 'asd', + query: {}, + routeParams: { + uid: 'uid-str', + }, + }, + }, + { + backendSrv: backendSrv, + } + ); + + wrapper = shallow(); + page = wrapper.dive(); + return page + .instance() + .loadStore() + .then(() => { + page.update(); + }); + }); + + it('should set the title input field', () => { + const titleInput = page.find('.gf-form-input'); + expect(titleInput).toHaveLength(1); + expect(titleInput.prop('value')).toBe('Folder Name'); + }); + + it('should update title and enable save button when changed', () => { + const titleInput = page.find('.gf-form-input'); + const disabledSubmitButton = page.find('button[type="submit"]'); + expect(disabledSubmitButton.prop('disabled')).toBe(true); + + titleInput.simulate('change', { target: { value: 'New Title' } }); + + const updatedTitleInput = page.find('.gf-form-input'); + expect(updatedTitleInput.prop('value')).toBe('New Title'); + const enabledSubmitButton = page.find('button[type="submit"]'); + expect(enabledSubmitButton.prop('disabled')).toBe(false); + }); + + it('should disable save button if title is changed back to old title', () => { + const titleInput = page.find('.gf-form-input'); + + titleInput.simulate('change', { target: { value: 'Folder Name' } }); + + const enabledSubmitButton = page.find('button[type="submit"]'); + expect(enabledSubmitButton.prop('disabled')).toBe(true); + }); + + it('should disable save button if title is changed to empty string', () => { + const titleInput = page.find('.gf-form-input'); + + titleInput.simulate('change', { target: { value: '' } }); + + const enabledSubmitButton = page.find('button[type="submit"]'); + expect(enabledSubmitButton.prop('disabled')).toBe(true); + }); +}); diff --git a/public/app/containers/ManageDashboards/FolderSettings.tsx b/public/app/containers/ManageDashboards/FolderSettings.tsx new file mode 100644 index 00000000000..586a8f05b4c --- /dev/null +++ b/public/app/containers/ManageDashboards/FolderSettings.tsx @@ -0,0 +1,158 @@ +import React from 'react'; +import { inject, observer } from 'mobx-react'; +import { toJS } from 'mobx'; +import PageHeader from 'app/core/components/PageHeader/PageHeader'; +import IContainerProps from 'app/containers/IContainerProps'; +import { getSnapshot } from 'mobx-state-tree'; +import appEvents from 'app/core/app_events'; + +@inject('nav', 'folder', 'view') +@observer +export class FolderSettings extends React.Component { + formSnapshot: any; + + constructor(props) { + super(props); + this.loadStore(); + } + + loadStore() { + const { nav, folder, view } = this.props; + + return folder.load(view.routeParams.get('uid') as string).then(res => { + this.formSnapshot = getSnapshot(folder); + view.updatePathAndQuery(`${res.url}/settings`, {}, {}); + + return nav.initFolderNav(toJS(folder.folder), 'manage-folder-settings'); + }); + } + + onTitleChange(evt) { + this.props.folder.setTitle(this.getFormSnapshot().folder.title, evt.target.value); + } + + getFormSnapshot() { + if (!this.formSnapshot) { + this.formSnapshot = getSnapshot(this.props.folder); + } + + return this.formSnapshot; + } + + save(evt) { + if (evt) { + evt.stopPropagation(); + evt.preventDefault(); + } + + const { nav, folder, view } = this.props; + + folder + .saveFolder({ overwrite: false }) + .then(newUrl => { + view.updatePathAndQuery(newUrl, {}, {}); + + appEvents.emit('dashboard-saved'); + appEvents.emit('alert-success', ['Folder saved']); + }) + .then(() => { + return nav.initFolderNav(toJS(folder.folder), 'manage-folder-settings'); + }) + .catch(this.handleSaveFolderError.bind(this)); + } + + delete(evt) { + if (evt) { + evt.stopPropagation(); + evt.preventDefault(); + } + + const { folder, view } = this.props; + const title = folder.folder.title; + + appEvents.emit('confirm-modal', { + title: 'Delete', + text: `Do you want to delete this folder and all its dashboards?`, + icon: 'fa-trash', + yesText: 'Delete', + onConfirm: () => { + return folder.deleteFolder().then(() => { + appEvents.emit('alert-success', ['Folder Deleted', `${title} has been deleted`]); + view.updatePathAndQuery('dashboards', '', ''); + }); + }, + }); + } + + handleSaveFolderError(err) { + if (err.data && err.data.status === 'version-mismatch') { + err.isHandled = true; + + const { nav, folder, view } = this.props; + + appEvents.emit('confirm-modal', { + title: 'Conflict', + text: 'Someone else has updated this folder.', + text2: 'Would you still like to save this folder?', + yesText: 'Save & Overwrite', + icon: 'fa-warning', + onConfirm: () => { + folder + .saveFolder({ overwrite: true }) + .then(newUrl => { + view.updatePathAndQuery(newUrl, {}, {}); + + appEvents.emit('dashboard-saved'); + appEvents.emit('alert-success', ['Folder saved']); + }) + .then(() => { + return nav.initFolderNav(toJS(folder.folder), 'manage-folder-settings'); + }); + }, + }); + } + } + + render() { + const { nav, folder } = this.props; + + if (!folder.folder || !nav.main) { + return

    Loading

    ; + } + + return ( +
    + +
    +

    Folder Settings

    + +
    +
    +
    + + +
    +
    + + +
    + +
    +
    +
    + ); + } +} diff --git a/public/app/containers/ServerStats/ServerStats.jest.tsx b/public/app/containers/ServerStats/ServerStats.jest.tsx new file mode 100644 index 00000000000..a329a47527d --- /dev/null +++ b/public/app/containers/ServerStats/ServerStats.jest.tsx @@ -0,0 +1,30 @@ +import React from 'react'; +import renderer from 'react-test-renderer'; +import { ServerStats } from './ServerStats'; +import { RootStore } from 'app/stores/RootStore/RootStore'; +import { backendSrv, createNavTree } from 'test/mocks/common'; + +describe('ServerStats', () => { + it('Should render table with stats', done => { + backendSrv.get.mockReturnValue( + Promise.resolve({ + dashboards: 10, + }) + ); + + const store = RootStore.create( + {}, + { + backendSrv: backendSrv, + navTree: createNavTree('cfg', 'admin', 'server-stats'), + } + ); + + const page = renderer.create(); + + setTimeout(() => { + expect(page.toJSON()).toMatchSnapshot(); + done(); + }); + }); +}); diff --git a/public/app/containers/ServerStats/ServerStats.tsx b/public/app/containers/ServerStats/ServerStats.tsx new file mode 100644 index 00000000000..e40b441d967 --- /dev/null +++ b/public/app/containers/ServerStats/ServerStats.tsx @@ -0,0 +1,45 @@ +import React from 'react'; +import { inject, observer } from 'mobx-react'; +import PageHeader from 'app/core/components/PageHeader/PageHeader'; +import IContainerProps from 'app/containers/IContainerProps'; + +@inject('nav', 'serverStats') +@observer +export class ServerStats extends React.Component { + constructor(props) { + super(props); + const { nav, serverStats } = this.props; + + nav.load('cfg', 'admin', 'server-stats'); + serverStats.load(); + } + + render() { + const { nav, serverStats } = this.props; + return ( +
    + +
    + + + + + + + + {serverStats.stats.map(StatItem)} +
    NameValue
    +
    +
    + ); + } +} + +function StatItem(stat) { + return ( + + {stat.name} + {stat.value} + + ); +} diff --git a/public/app/containers/ServerStats/__snapshots__/ServerStats.jest.tsx.snap b/public/app/containers/ServerStats/__snapshots__/ServerStats.jest.tsx.snap new file mode 100644 index 00000000000..eac793ca2ca --- /dev/null +++ b/public/app/containers/ServerStats/__snapshots__/ServerStats.jest.tsx.snap @@ -0,0 +1,170 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`ServerStats Should render table with stats 1`] = ` +
    +
    +
    +
    +
    + + + + +
    +

    + admin-Text +

    + +
    +
    + +
    +
    +
    +
    + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
    + Name + + Value +
    + Total dashboards + + 10 +
    + Total users + + 0 +
    + Active users (seen last 30 days) + + 0 +
    + Total orgs + + 0 +
    + Total playlists + + 0 +
    + Total snapshots + + 0 +
    + Total dashboard tags + + 0 +
    + Total starred dashboards + + 0 +
    + Total alerts + + 0 +
    +
    +
    +`; diff --git a/public/app/core/angular_wrappers.ts b/public/app/core/angular_wrappers.ts index 83a70fa4c8a..ace0eb00b07 100644 --- a/public/app/core/angular_wrappers.ts +++ b/public/app/core/angular_wrappers.ts @@ -3,10 +3,22 @@ import { PasswordStrength } from './components/PasswordStrength'; import PageHeader from './components/PageHeader/PageHeader'; import EmptyListCTA from './components/EmptyListCTA/EmptyListCTA'; import LoginBackground from './components/Login/LoginBackground'; +import { SearchResult } from './components/search/SearchResult'; +import { TagFilter } from './components/TagFilter/TagFilter'; +import UserPicker from './components/Picker/UserPicker'; +import DashboardPermissions from './components/Permissions/DashboardPermissions'; export function registerAngularDirectives() { react2AngularDirective('passwordStrength', PasswordStrength, ['password']); react2AngularDirective('pageHeader', PageHeader, ['model', 'noTabs']); react2AngularDirective('emptyListCta', EmptyListCTA, ['model']); react2AngularDirective('loginBackground', LoginBackground, []); + react2AngularDirective('searchResult', SearchResult, []); + react2AngularDirective('tagFilter', TagFilter, [ + 'tags', + ['onSelect', { watchDepth: 'reference' }], + ['tagOptions', { watchDepth: 'reference' }], + ]); + react2AngularDirective('selectUserPicker', UserPicker, ['backendSrv', 'handlePicked']); + react2AngularDirective('dashboardPermissions', DashboardPermissions, ['backendSrv', 'dashboardId', 'folder']); } diff --git a/public/app/core/components/Animations/SlideDown.tsx b/public/app/core/components/Animations/SlideDown.tsx new file mode 100644 index 00000000000..4d515f98f16 --- /dev/null +++ b/public/app/core/components/Animations/SlideDown.tsx @@ -0,0 +1,37 @@ +import React from 'react'; +import Transition from 'react-transition-group/Transition'; + +const defaultMaxHeight = '200px'; // When animating using max-height we need to use a static value. +// If this is not enough, pass in diff --git a/public/app/core/components/PageHeader/PageHeader.jest.tsx b/public/app/core/components/PageHeader/PageHeader.jest.tsx new file mode 100644 index 00000000000..a9ba8d008a3 --- /dev/null +++ b/public/app/core/components/PageHeader/PageHeader.jest.tsx @@ -0,0 +1,53 @@ +import React from 'react'; +import PageHeader from './PageHeader'; +import { shallow } from 'enzyme'; + +describe('PageHeader', () => { + let wrapper; + + describe('when the nav tree has a node with a title', () => { + beforeAll(() => { + const nav = { + main: { + icon: 'fa fa-folder-open', + id: 'node', + subTitle: 'node subtitle', + url: '', + text: 'node', + }, + node: {}, + }; + wrapper = shallow(); + }); + + it('should render the title', () => { + const title = wrapper.find('.page-header__title'); + expect(title.text()).toBe('node'); + }); + }); + + describe('when the nav tree has a node with breadcrumbs and a title', () => { + beforeAll(() => { + const nav = { + main: { + icon: 'fa fa-folder-open', + id: 'child', + subTitle: 'child subtitle', + url: '', + text: 'child', + breadcrumbs: [{ title: 'Parent', url: 'parentUrl' }], + }, + node: {}, + }; + wrapper = shallow(); + }); + + it('should render the title with breadcrumbs first and then title last', () => { + const title = wrapper.find('.page-header__title'); + expect(title.text()).toBe('Parent / child'); + + const parentLink = wrapper.find('.page-header__title > a.text-link'); + expect(parentLink.prop('href')).toBe('parentUrl'); + }); + }); +}); diff --git a/public/app/core/components/PageHeader/PageHeader.tsx b/public/app/core/components/PageHeader/PageHeader.tsx index 9b45267a8e5..f998cb9981f 100644 --- a/public/app/core/components/PageHeader/PageHeader.tsx +++ b/public/app/core/components/PageHeader/PageHeader.tsx @@ -1,61 +1,15 @@ -import React from "react"; -import { NavModel, NavModelItem } from "../../nav_model_srv"; -import classNames from "classnames"; -import appEvents from "app/core/app_events"; +import React from 'react'; +import { observer } from 'mobx-react'; +import { NavModel, NavModelItem } from '../../nav_model_srv'; +import classNames from 'classnames'; +import appEvents from 'app/core/app_events'; +import { toJS } from 'mobx'; export interface IProps { model: NavModel; } -function TabItem(tab: NavModelItem) { - if (tab.hideFromTabs) { - return null; - } - - let tabClasses = classNames({ - "gf-tabs-link": true, - active: tab.active - }); - - return ( -
  • - - - {tab.text} - -
  • - ); -} - -function SelectOption(navItem: NavModelItem) { - if (navItem.hideFromTabs) { - // TODO: Rename hideFromTabs => hideFromNav - return null; - } - - return ( - - ); -} - -function Navigation({ main }: { main: NavModelItem }) { - return ( - - ); -} - -function SelectNav({ - main, - customCss -}: { - main: NavModelItem; - customCss: string; -}) { +const SelectNav = ({ main, customCss }: { main: NavModelItem; customCss: string }) => { const defaultSelectedItem = main.children.find(navItem => { return navItem.active === true; }); @@ -63,40 +17,90 @@ function SelectNav({ const gotoUrl = evt => { var element = evt.target; var url = element.options[element.selectedIndex].value; - appEvents.emit("location-change", { href: url }); + appEvents.emit('location-change', { href: url }); }; return (
    -
    ); -} +}; -function Tabs({ main, customCss }: { main: NavModelItem; customCss: string }) { +const Tabs = ({ main, customCss }: { main: NavModelItem; customCss: string }) => { return ( -
      {main.children.map(TabItem)}
    - ); -} +
      + {main.children.map((tab, idx) => { + if (tab.hideFromTabs) { + return null; + } + const tabClasses = classNames({ + 'gf-tabs-link': true, + active: tab.active, + }); + + return ( +
    • + + + {tab.text} + +
    • + ); + })} +
    + ); +}; + +const Navigation = ({ main }: { main: NavModelItem }) => { + return ( + + ); +}; + +@observer export default class PageHeader extends React.Component { constructor(props) { super(props); } - renderBreadcrumb(breadcrumbs) { + shouldComponentUpdate() { + //Hack to re-render on changed props from angular with the @observer decorator + return true; + } + + renderTitle(title: string, breadcrumbs: any[]) { + if (!title && (!breadcrumbs || breadcrumbs.length === 0)) { + return null; + } + + if (!breadcrumbs || breadcrumbs.length === 0) { + return

    {title}

    ; + } + const breadcrumbsResult = []; for (let i = 0; i < breadcrumbs.length; i++) { const bc = breadcrumbs[i]; @@ -110,7 +114,9 @@ export default class PageHeader extends React.Component { breadcrumbsResult.push( / {bc.title}); } } - return breadcrumbsResult; + breadcrumbsResult.push( / {title}); + + return

    {breadcrumbsResult}

    ; } renderHeaderTitle(main) { @@ -122,16 +128,8 @@ export default class PageHeader extends React.Component {
    - {main.text &&

    {main.text}

    } - {main.breadcrumbs && - main.breadcrumbs.length > 0 && ( -

    - {this.renderBreadcrumb(main.breadcrumbs)} -

    - )} - {main.subTitle && ( -
    {main.subTitle}
    - )} + {this.renderTitle(main.text, main.breadcrumbs)} + {main.subTitle &&
    {main.subTitle}
    } {main.subType && (
    @@ -150,12 +148,14 @@ export default class PageHeader extends React.Component { return null; } + const main = toJS(model.main); // Convert to JS if its a mobx observable + return (
    - {this.renderHeaderTitle(model.main)} - {model.main.children && } + {this.renderHeaderTitle(main)} + {main.children && }
    diff --git a/public/app/core/components/Permissions/AddPermissions.jest.tsx b/public/app/core/components/Permissions/AddPermissions.jest.tsx new file mode 100644 index 00000000000..fe97c4c7e62 --- /dev/null +++ b/public/app/core/components/Permissions/AddPermissions.jest.tsx @@ -0,0 +1,90 @@ +import React from 'react'; +import AddPermissions from './AddPermissions'; +import { RootStore } from 'app/stores/RootStore/RootStore'; +import { backendSrv } from 'test/mocks/common'; +import { shallow } from 'enzyme'; + +describe('AddPermissions', () => { + let wrapper; + let store; + let instance; + + beforeAll(() => { + backendSrv.get.mockReturnValue( + Promise.resolve([ + { id: 2, dashboardId: 1, role: 'Viewer', permission: 1, permissionName: 'View' }, + { id: 3, dashboardId: 1, role: 'Editor', permission: 1, permissionName: 'Edit' }, + ]) + ); + + backendSrv.post = jest.fn(() => Promise.resolve({})); + + store = RootStore.create( + {}, + { + backendSrv: backendSrv, + } + ); + + wrapper = shallow(); + instance = wrapper.instance(); + return store.permissions.load(1, true, false); + }); + + describe('when permission for a user is added', () => { + it('should save permission to db', () => { + const evt = { + target: { + value: 'User', + }, + }; + const userItem = { + id: 2, + login: 'user2', + }; + + instance.typeChanged(evt); + instance.userPicked(userItem); + + wrapper.update(); + + expect(wrapper.find('[data-save-permission]').prop('disabled')).toBe(false); + + wrapper.find('form').simulate('submit', { preventDefault() {} }); + + expect(backendSrv.post.mock.calls.length).toBe(1); + expect(backendSrv.post.mock.calls[0][0]).toBe('/api/dashboards/id/1/permissions'); + }); + }); + + describe('when permission for team is added', () => { + it('should save permission to db', () => { + const evt = { + target: { + value: 'Group', + }, + }; + + const teamItem = { + id: 2, + name: 'ug1', + }; + + instance.typeChanged(evt); + instance.teamPicked(teamItem); + + wrapper.update(); + + expect(wrapper.find('[data-save-permission]').prop('disabled')).toBe(false); + + wrapper.find('form').simulate('submit', { preventDefault() {} }); + + expect(backendSrv.post.mock.calls.length).toBe(1); + expect(backendSrv.post.mock.calls[0][0]).toBe('/api/dashboards/id/1/permissions'); + }); + }); + + afterEach(() => { + backendSrv.post.mockClear(); + }); +}); diff --git a/public/app/core/components/Permissions/AddPermissions.tsx b/public/app/core/components/Permissions/AddPermissions.tsx new file mode 100644 index 00000000000..07ccfdbbef5 --- /dev/null +++ b/public/app/core/components/Permissions/AddPermissions.tsx @@ -0,0 +1,143 @@ +import React, { Component } from 'react'; +import { observer } from 'mobx-react'; +import { aclTypes } from 'app/stores/PermissionsStore/PermissionsStore'; +import UserPicker, { User } from 'app/core/components/Picker/UserPicker'; +import TeamPicker, { Team } from 'app/core/components/Picker/TeamPicker'; +import DescriptionPicker, { OptionWithDescription } from 'app/core/components/Picker/DescriptionPicker'; +import { permissionOptions } from 'app/stores/PermissionsStore/PermissionsStore'; + +export interface IProps { + permissions: any; + backendSrv: any; +} +@observer +class AddPermissions extends Component { + constructor(props) { + super(props); + this.userPicked = this.userPicked.bind(this); + this.teamPicked = this.teamPicked.bind(this); + this.permissionPicked = this.permissionPicked.bind(this); + this.typeChanged = this.typeChanged.bind(this); + this.handleSubmit = this.handleSubmit.bind(this); + } + + componentWillMount() { + const { permissions } = this.props; + permissions.resetNewType(); + } + + typeChanged(evt) { + const { value } = evt.target; + const { permissions } = this.props; + + permissions.setNewType(value); + } + + userPicked(user: User) { + const { permissions } = this.props; + if (!user) { + permissions.newItem.setUser(null, null); + return; + } + return permissions.newItem.setUser(user.id, user.login); + } + + teamPicked(team: Team) { + const { permissions } = this.props; + if (!team) { + permissions.newItem.setTeam(null, null); + return; + } + return permissions.newItem.setTeam(team.id, team.name); + } + + permissionPicked(permission: OptionWithDescription) { + const { permissions } = this.props; + return permissions.newItem.setPermission(permission.value); + } + + resetNewType() { + const { permissions } = this.props; + return permissions.resetNewType(); + } + + handleSubmit(evt) { + evt.preventDefault(); + const { permissions } = this.props; + permissions.addStoreItem(); + } + + render() { + const { permissions, backendSrv } = this.props; + const newItem = permissions.newItem; + const pickerClassName = 'width-20'; + + const isValid = newItem.isValid(); + + return ( +
    + +
    +
    Add Permission For
    +
    +
    +
    + +
    +
    + + {newItem.type === 'User' ? ( +
    + +
    + ) : null} + + {newItem.type === 'Group' ? ( +
    + +
    + ) : null} + +
    + +
    + +
    + +
    +
    +
    +
    + ); + } +} + +export default AddPermissions; diff --git a/public/app/core/components/Permissions/DashboardPermissions.tsx b/public/app/core/components/Permissions/DashboardPermissions.tsx new file mode 100644 index 00000000000..12339cc7c34 --- /dev/null +++ b/public/app/core/components/Permissions/DashboardPermissions.tsx @@ -0,0 +1,70 @@ +import React, { Component } from 'react'; +import { observer } from 'mobx-react'; +import { store } from 'app/stores/store'; +import Permissions from 'app/core/components/Permissions/Permissions'; +import Tooltip from 'app/core/components/Tooltip/Tooltip'; +import PermissionsInfo from 'app/core/components/Permissions/PermissionsInfo'; +import AddPermissions from 'app/core/components/Permissions/AddPermissions'; +import SlideDown from 'app/core/components/Animations/SlideDown'; +import { FolderInfo } from './FolderInfo'; + +export interface IProps { + dashboardId: number; + folder?: FolderInfo; + backendSrv: any; +} +@observer +class DashboardPermissions extends Component { + permissions: any; + + constructor(props) { + super(props); + this.handleAddPermission = this.handleAddPermission.bind(this); + this.permissions = store.permissions; + } + + handleAddPermission() { + this.permissions.toggleAddPermissions(); + } + + componentWillUnmount() { + this.permissions.hideAddPermissions(); + } + + render() { + const { dashboardId, folder, backendSrv } = this.props; + + return ( +
    +
    +
    +

    Permissions

    + + + +
    + +
    +
    + + + + +
    + ); + } +} + +export default DashboardPermissions; diff --git a/public/app/core/components/Permissions/DisabledPermissionsListItem.tsx b/public/app/core/components/Permissions/DisabledPermissionsListItem.tsx new file mode 100644 index 00000000000..db45714136e --- /dev/null +++ b/public/app/core/components/Permissions/DisabledPermissionsListItem.tsx @@ -0,0 +1,40 @@ +import React, { Component } from 'react'; +import DescriptionPicker from 'app/core/components/Picker/DescriptionPicker'; +import { permissionOptions } from 'app/stores/PermissionsStore/PermissionsStore'; + +export interface IProps { + item: any; +} + +export default class DisabledPermissionListItem extends Component { + render() { + const { item } = this.props; + + return ( + + + + + + + Can + +
    + {}} + value={item.permission} + disabled={true} + className={'gf-form-input--form-dropdown-right'} + /> +
    + + + + + + ); + } +} diff --git a/public/app/core/components/Permissions/FolderInfo.ts b/public/app/core/components/Permissions/FolderInfo.ts new file mode 100644 index 00000000000..d4a6020bb71 --- /dev/null +++ b/public/app/core/components/Permissions/FolderInfo.ts @@ -0,0 +1,5 @@ +export interface FolderInfo { + id: number; + title: string; + url: string; +} diff --git a/public/app/core/components/Permissions/Permissions.tsx b/public/app/core/components/Permissions/Permissions.tsx new file mode 100644 index 00000000000..0a0572ed86e --- /dev/null +++ b/public/app/core/components/Permissions/Permissions.tsx @@ -0,0 +1,92 @@ +import React, { Component } from 'react'; +import PermissionsList from './PermissionsList'; +import { observer } from 'mobx-react'; +import { FolderInfo } from './FolderInfo'; + +export interface DashboardAcl { + id?: number; + dashboardId?: number; + userId?: number; + userLogin?: string; + userEmail?: string; + teamId?: number; + team?: string; + permission?: number; + permissionName?: string; + role?: string; + icon?: string; + nameHtml?: string; + inherited?: boolean; + sortName?: string; + sortRank?: number; +} + +export interface IProps { + dashboardId: number; + folderInfo?: FolderInfo; + permissions?: any; + isFolder: boolean; + backendSrv: any; +} + +@observer +class Permissions extends Component { + constructor(props) { + super(props); + const { dashboardId, isFolder, folderInfo } = this.props; + this.permissionChanged = this.permissionChanged.bind(this); + this.typeChanged = this.typeChanged.bind(this); + this.removeItem = this.removeItem.bind(this); + this.loadStore(dashboardId, isFolder, folderInfo && folderInfo.id === 0); + } + + loadStore(dashboardId, isFolder, isInRoot = false) { + return this.props.permissions.load(dashboardId, isFolder, isInRoot); + } + + permissionChanged(index: number, permission: number, permissionName: string) { + const { permissions } = this.props; + permissions.updatePermissionOnIndex(index, permission, permissionName); + } + + removeItem(index: number) { + const { permissions } = this.props; + permissions.removeStoreItem(index); + } + + resetNewType() { + const { permissions } = this.props; + permissions.resetNewType(); + } + + typeChanged(evt) { + const { value } = evt.target; + const { permissions, dashboardId } = this.props; + + if (value === 'Viewer' || value === 'Editor') { + permissions.addStoreItem({ permission: 1, role: value, dashboardId: dashboardId }, dashboardId); + this.resetNewType(); + return; + } + + permissions.setNewType(value); + } + + render() { + const { permissions, folderInfo } = this.props; + + return ( +
    + +
    + ); + } +} + +export default Permissions; diff --git a/public/app/core/components/Permissions/PermissionsInfo.tsx b/public/app/core/components/Permissions/PermissionsInfo.tsx new file mode 100644 index 00000000000..9791e344085 --- /dev/null +++ b/public/app/core/components/Permissions/PermissionsInfo.tsx @@ -0,0 +1,13 @@ +import React from 'react'; + +export default () => { + return ( +
    +
    What are Permissions?
    +

    + An Access Control List (ACL) model is used to limit access to Dashboard Folders. A user or a Team can be + assigned permissions for a folder or for a single dashboard. +

    +
    + ); +}; diff --git a/public/app/core/components/Permissions/PermissionsList.tsx b/public/app/core/components/Permissions/PermissionsList.tsx new file mode 100644 index 00000000000..b215dad2391 --- /dev/null +++ b/public/app/core/components/Permissions/PermissionsList.tsx @@ -0,0 +1,64 @@ +import React, { Component } from 'react'; +import PermissionsListItem from './PermissionsListItem'; +import DisabledPermissionsListItem from './DisabledPermissionsListItem'; +import { observer } from 'mobx-react'; +import { FolderInfo } from './FolderInfo'; + +export interface IProps { + permissions: any[]; + removeItem: any; + permissionChanged: any; + fetching: boolean; + folderInfo?: FolderInfo; +} + +@observer +class PermissionsList extends Component { + render() { + const { permissions, removeItem, permissionChanged, fetching, folderInfo } = this.props; + + return ( + + + Admin Role', + permission: 4, + icon: 'fa fa-fw fa-street-view', + }} + /> + {permissions.map((item, idx) => { + return ( + + ); + })} + {fetching === true && permissions.length < 1 ? ( + + + + ) : null} + + {fetching === false && permissions.length < 1 ? ( + + + + ) : null} + +
    + Loading permissions... +
    + No permissions are set. Will only be accessible by admins. +
    + ); + } +} + +export default PermissionsList; diff --git a/public/app/core/components/Permissions/PermissionsListItem.tsx b/public/app/core/components/Permissions/PermissionsListItem.tsx new file mode 100644 index 00000000000..3140b8fcc0c --- /dev/null +++ b/public/app/core/components/Permissions/PermissionsListItem.tsx @@ -0,0 +1,65 @@ +import React from 'react'; +import { observer } from 'mobx-react'; +import DescriptionPicker from 'app/core/components/Picker/DescriptionPicker'; +import { permissionOptions } from 'app/stores/PermissionsStore/PermissionsStore'; + +const setClassNameHelper = inherited => { + return inherited ? 'gf-form-disabled' : ''; +}; + +export default observer(({ item, removeItem, permissionChanged, itemIndex, folderInfo }) => { + const handleRemoveItem = evt => { + evt.preventDefault(); + removeItem(itemIndex); + }; + + const handleChangePermission = permissionOption => { + permissionChanged(itemIndex, permissionOption.value, permissionOption.label); + }; + + const inheritedFromRoot = item.dashboardId === -1 && folderInfo && folderInfo.id === 0; + + return ( + + + + + + + {item.inherited && + folderInfo && ( + + Inherited from folder{' '} + + {folderInfo.title} + {' '} + + )} + {inheritedFromRoot && Default Permission} + + Can + +
    + +
    + + + {!item.inherited ? ( + + + + ) : ( + + )} + + + ); +}); diff --git a/public/app/core/components/Picker/DescriptionOption.tsx b/public/app/core/components/Picker/DescriptionOption.tsx new file mode 100644 index 00000000000..12a1fdd9163 --- /dev/null +++ b/public/app/core/components/Picker/DescriptionOption.tsx @@ -0,0 +1,56 @@ +import React, { Component } from 'react'; + +export interface IProps { + onSelect: any; + onFocus: any; + option: any; + isFocused: any; + className: any; +} + +class DescriptionOption extends Component { + constructor(props) { + super(props); + this.handleMouseDown = this.handleMouseDown.bind(this); + this.handleMouseEnter = this.handleMouseEnter.bind(this); + this.handleMouseMove = this.handleMouseMove.bind(this); + } + + handleMouseDown(event) { + event.preventDefault(); + event.stopPropagation(); + this.props.onSelect(this.props.option, event); + } + + handleMouseEnter(event) { + this.props.onFocus(this.props.option, event); + } + + handleMouseMove(event) { + if (this.props.isFocused) { + return; + } + this.props.onFocus(this.props.option, event); + } + + render() { + const { option, children, className } = this.props; + return ( +