Compare commits

..

21 Commits

Author SHA1 Message Date
Dominik Prokop
987ef9aab5 Packages: publish packages@6.3.0-beta.1 2019-07-10 14:54:16 +02:00
Dominik Prokop
d799e8ba36 Packages: publish packages@6.3.0-beta.0 2019-07-10 14:53:22 +02:00
Dominik Prokop
da8dfdb13f Merge branch 'grafana/gtk/verify' of github.com:grafana/grafana into grafana/gtk/verify 2019-07-10 14:43:46 +02:00
Dominik Prokop
c052d89af4 Fix ts error 2019-07-10 14:40:07 +02:00
Dominik Prokop
f4e5d38c99 Merge branch 'master' into grafana/gtk/verify 2019-07-10 14:39:48 +02:00
Dominik Prokop
895aba437b Merge branch 'master' into grafana/gtk/verify 2019-07-10 14:34:44 +02:00
ryan
461b97ee80 fix folder paths 2019-07-10 00:33:50 -07:00
ryan
724731fddc merge all dist folders into one 2019-07-10 00:14:18 -07:00
ryan
905f2c3e16 Packages: publish packages@6.3.0-alpha.40 2019-07-09 23:26:20 -07:00
ryan
807594fd65 add download task 2019-07-09 23:16:17 -07:00
ryan
38c288bb9a bump version 2019-07-09 10:56:08 -07:00
ryan
82996d6f0a Packages: publish packages@6.3.0-alpha.39 2019-07-09 10:45:55 -07:00
ryan
f9d0c6525f merge master 2019-07-09 10:04:56 -07:00
ryan
26d5db2b63 use axios for basic testing 2019-07-08 20:53:09 -07:00
ryan
a13b96521d use ci-work folder rather than build 2019-07-08 16:56:29 -07:00
ryan
e7d1f1df14 add stubs for each ci task 2019-07-08 16:00:06 -07:00
ryan
42e7cd7d65 Merge branch 'gtk/verify' of github.com:grafana/grafana into grafana/gtk/verify
* 'gtk/verify' of github.com:grafana/grafana:
  update comments
  copy all svg and png, useful if people don't use the img folder
  validate type and id
2019-07-08 14:16:35 -07:00
ryan
282dd029aa update comments 2019-07-08 09:22:49 -07:00
ryan
15fd54b21b Merge branch 'master' into gtk/verify
* master:
  Docs: Documents new features available with Loki data source in Explore (#17984)
  Prometheus: added time range filter to series labels query (#16851)
  Explore: Adds support for new loki 'start' and 'end' params for labels endpoint (#17512)
  Chore: Removes custom debounce utility in favor of lodash/debounce (#17977)
  Api: Fix auth tokens returning wrong seenAt value (#17980)
2019-07-08 09:15:46 -07:00
ryan
1d9c4cbdfe copy all svg and png, useful if people don't use the img folder 2019-07-07 23:04:46 -07:00
ryan
315476e20a validate type and id 2019-07-07 21:59:59 -07:00
66 changed files with 563 additions and 2381 deletions

View File

@@ -7,17 +7,12 @@ aliases:
only: /^v[0-9]+(\.[0-9]+){2}(-.+|[^-.]*)$/
- &filter-not-release-or-master
tags:
ignore: /^v[0--9]+(\.[0-9]+){2}(-.+|[^-.]*)$/
ignore: /^v[0-9]+(\.[0-9]+){2}(-.+|[^-.]*)$/
branches:
ignore: master
- &filter-only-master
branches:
only: master
- &filter-only-master-but-not-release
tags:
ignore: /^v[0-9]+(\.[0-9]+){2}(-.+|[^-.]*)$/
branches:
only: master
version: 2
@@ -628,21 +623,6 @@ jobs:
echo "-- no changes to docs files --"
fi
release-next-packages:
docker:
- image: circleci/node:10
steps:
- checkout
- run:
name: Boostrap lerna
command: 'npx lerna bootstrap'
- run:
name: npm - Prepare auth token
command: 'echo //registry.npmjs.org/:_authToken=$NPM_TOKEN >> ~/.npmrc'
- run:
name: Release next packages
command: './scripts/circle-release-next-packages.sh'
workflows:
version: 2
build-master:
@@ -714,11 +694,6 @@ workflows:
requires:
- end-to-end-test
filters: *filter-only-master
- release-next-packages:
requires:
- test-frontend
- build-fast-frontend
filters: *filter-only-master-but-not-release
release:
jobs:
- build-all:
@@ -828,4 +803,3 @@ workflows:
- postgres-integration-test
- cache-server-test
filters: *filter-not-release-or-master

View File

@@ -1,73 +1,4 @@
# 6.4.0 (unreleased)
# 6.3.0-beta1
### Features / Enhancements
* **Alerting**: Add tags to alert rules. [#10989](https://github.com/grafana/grafana/pull/10989), [@Thib17](https://github.com/Thib17)
* **Alerting**: Attempt to send email notifications to all given email addresses. [#16881](https://github.com/grafana/grafana/pull/16881), [@zhulongcheng](https://github.com/zhulongcheng)
* **Alerting**: Improve alert rule testing. [#16286](https://github.com/grafana/grafana/pull/16286), [@marefr](https://github.com/marefr)
* **Alerting**: Support for configuring content field for Discord alert notifier. [#17017](https://github.com/grafana/grafana/pull/17017), [@jan25](https://github.com/jan25)
* **Alertmanager**: Replace illegal chars with underscore in label names. [#17002](https://github.com/grafana/grafana/pull/17002), [@bergquist](https://github.com/bergquist)
* **Auth**: Allow expiration of API keys. [#17678](https://github.com/grafana/grafana/pull/17678), [@papagian](https://github.com/papagian)
* **Auth**: Return device, os and browser when listing user auth tokens in HTTP API. [#17504](https://github.com/grafana/grafana/pull/17504), [@shavonn](https://github.com/shavonn)
* **Auth**: Support list and revoke of user auth tokens in UI. [#17434](https://github.com/grafana/grafana/pull/17434), [@shavonn](https://github.com/shavonn)
* **AzureMonitor**: change clashing built-in Grafana variables/macro names for Azure Logs. [#17140](https://github.com/grafana/grafana/pull/17140), [@shavonn](https://github.com/shavonn)
* **CloudWatch**: Made region visible for AWS Cloudwatch Expressions. [#17243](https://github.com/grafana/grafana/pull/17243), [@utkarshcmu](https://github.com/utkarshcmu)
* **Cloudwatch**: Add AWS DocDB metrics. [#17241](https://github.com/grafana/grafana/pull/17241), [@utkarshcmu](https://github.com/utkarshcmu)
* **Dashboard**: Use timezone dashboard setting when exporting to CSV. [#18002](https://github.com/grafana/grafana/pull/18002), [@dehrax](https://github.com/dehrax)
* **Data links**. [#17267](https://github.com/grafana/grafana/pull/17267), [@torkelo](https://github.com/torkelo)
* **Docker**: Switch base image to ubuntu:latest from debian:stretch to avoid security issues.. [#17066](https://github.com/grafana/grafana/pull/17066), [@bergquist](https://github.com/bergquist)
* **Elasticsearch**: Support for visualizing logs in Explore . [#17605](https://github.com/grafana/grafana/pull/17605), [@marefr](https://github.com/marefr)
* **Explore**: Adds Live option for supported datasources. [#17062](https://github.com/grafana/grafana/pull/17062), [@hugohaggmark](https://github.com/hugohaggmark)
* **Explore**: Adds orgId to URL for sharing purposes. [#17895](https://github.com/grafana/grafana/pull/17895), [@kaydelaney](https://github.com/kaydelaney)
* **Explore**: Adds support for new loki 'start' and 'end' params for labels endpoint. [#17512](https://github.com/grafana/grafana/pull/17512), [@kaydelaney](https://github.com/kaydelaney)
* **Explore**: Adds support for toggling raw query mode in explore. [#17870](https://github.com/grafana/grafana/pull/17870), [@kaydelaney](https://github.com/kaydelaney)
* **Explore**: Allow switching between metrics and logs . [#16959](https://github.com/grafana/grafana/pull/16959), [@marefr](https://github.com/marefr)
* **Explore**: Combines the timestamp and local time columns into one. [#17775](https://github.com/grafana/grafana/pull/17775), [@hugohaggmark](https://github.com/hugohaggmark)
* **Explore**: Display log lines context . [#17097](https://github.com/grafana/grafana/pull/17097), [@dprokop](https://github.com/dprokop)
* **Explore**: Don't parse log levels if provided by field or label. [#17180](https://github.com/grafana/grafana/pull/17180), [@marefr](https://github.com/marefr)
* **Explore**: Improves performance of Logs element by limiting re-rendering. [#17685](https://github.com/grafana/grafana/pull/17685), [@kaydelaney](https://github.com/kaydelaney)
* **Explore**: Support for new LogQL filtering syntax. [#16674](https://github.com/grafana/grafana/pull/16674), [@davkal](https://github.com/davkal)
* **Explore**: Use new TimePicker from Grafana/UI. [#17793](https://github.com/grafana/grafana/pull/17793), [@hugohaggmark](https://github.com/hugohaggmark)
* **Explore**: handle newlines in LogRow Highlighter. [#17425](https://github.com/grafana/grafana/pull/17425), [@rrfeng](https://github.com/rrfeng)
* **Graph**: Added new fill gradient option. [#17528](https://github.com/grafana/grafana/pull/17528), [@torkelo](https://github.com/torkelo)
* **GraphPanel**: Don't sort series when legend table & sort column is not visible . [#17095](https://github.com/grafana/grafana/pull/17095), [@shavonn](https://github.com/shavonn)
* **InfluxDB**: Support for visualizing logs in Explore. [#17450](https://github.com/grafana/grafana/pull/17450), [@hugohaggmark](https://github.com/hugohaggmark)
* **Logging**: Login and Logout actions (#17760). [#17883](https://github.com/grafana/grafana/pull/17883), [@ATTron](https://github.com/ATTron)
* **Logging**: Move log package to pkg/infra. [#17023](https://github.com/grafana/grafana/pull/17023), [@zhulongcheng](https://github.com/zhulongcheng)
* **Metrics**: Expose stats about roles as metrics. [#17469](https://github.com/grafana/grafana/pull/17469), [@bergquist](https://github.com/bergquist)
* **MySQL/Postgres/MSSQL**: Add parsing for day, weeks and year intervals in macros. [#13086](https://github.com/grafana/grafana/pull/13086), [@bernardd](https://github.com/bernardd)
* **MySQL**: Add support for periodically reloading client certs. [#14892](https://github.com/grafana/grafana/pull/14892), [@tpetr](https://github.com/tpetr)
* **Plugins**: replace dataFormats list with skipDataQuery flag in plugin.json. [#16984](https://github.com/grafana/grafana/pull/16984), [@ryantxu](https://github.com/ryantxu)
* **Prometheus**: Take timezone into account for step alignment. [#17477](https://github.com/grafana/grafana/pull/17477), [@fxmiii](https://github.com/fxmiii)
* **Prometheus**: Use overridden panel range for $__range instead of dashboard range. [#17352](https://github.com/grafana/grafana/pull/17352), [@patrick246](https://github.com/patrick246)
* **Prometheus**: added time range filter to series labels query. [#16851](https://github.com/grafana/grafana/pull/16851), [@FUSAKLA](https://github.com/FUSAKLA)
* **Provisioning**: Support folder that doesn't exist yet in dashboard provisioning. [#17407](https://github.com/grafana/grafana/pull/17407), [@Nexucis](https://github.com/Nexucis)
* **Refresh picker**: Handle empty intervals. [#17585](https://github.com/grafana/grafana/pull/17585), [@dehrax](https://github.com/dehrax)
* **Singlestat**: Add y min/max config to singlestat sparklines. [#17527](https://github.com/grafana/grafana/pull/17527), [@pitr](https://github.com/pitr)
* **Snapshot**: use given key and deleteKey. [#16876](https://github.com/grafana/grafana/pull/16876), [@zhulongcheng](https://github.com/zhulongcheng)
* **Templating**: Correctly display __text in multi-value variable after page reload. [#17840](https://github.com/grafana/grafana/pull/17840), [@EduardSergeev](https://github.com/EduardSergeev)
* **Templating**: Support selecting all filtered values of a multi-value variable. [#16873](https://github.com/grafana/grafana/pull/16873), [@r66ad](https://github.com/r66ad)
* **Tracing**: allow propagation with Zipkin headers. [#17009](https://github.com/grafana/grafana/pull/17009), [@jrockway](https://github.com/jrockway)
* **Users**: Disable users removed from LDAP. [#16820](https://github.com/grafana/grafana/pull/16820), [@alexanderzobnin](https://github.com/alexanderzobnin)
### Bug Fixes
* **AddPanel**: Fix issue when removing moved add panel widget . [#17659](https://github.com/grafana/grafana/pull/17659), [@dehrax](https://github.com/dehrax)
* **CLI**: Fix encrypt-datasource-passwords fails with sql error. [#18014](https://github.com/grafana/grafana/pull/18014), [@marefr](https://github.com/marefr)
* **Elasticsearch**: Fix default max concurrent shard requests. [#17770](https://github.com/grafana/grafana/pull/17770), [@marefr](https://github.com/marefr)
* **Explore**: Fix browsing back to dashboard panel. [#17061](https://github.com/grafana/grafana/pull/17061), [@jschill](https://github.com/jschill)
* **Explore**: Fix filter by series level in logs graph. [#17798](https://github.com/grafana/grafana/pull/17798), [@marefr](https://github.com/marefr)
* **Explore**: Fix issues when loading and both graph/table are collapsed. [#17113](https://github.com/grafana/grafana/pull/17113), [@marefr](https://github.com/marefr)
* **Explore**: Fix selection/copy of log lines. [#17121](https://github.com/grafana/grafana/pull/17121), [@marefr](https://github.com/marefr)
* **Fix**: Wrap value of multi variable in array when coming from URL. [#16992](https://github.com/grafana/grafana/pull/16992), [@aocenas](https://github.com/aocenas)
* **Frontend**: Fix for Json tree component not working. [#17608](https://github.com/grafana/grafana/pull/17608), [@srid12](https://github.com/srid12)
* **Graphite**: Fix for issue with alias function being moved last. [#17791](https://github.com/grafana/grafana/pull/17791), [@torkelo](https://github.com/torkelo)
* **Graphite**: Fixes issue with seriesByTag & function with variable param. [#17795](https://github.com/grafana/grafana/pull/17795), [@torkelo](https://github.com/torkelo)
* **Graphite**: use POST for /metrics/find requests. [#17814](https://github.com/grafana/grafana/pull/17814), [@papagian](https://github.com/papagian)
* **HTTP Server**: Serve Grafana with a custom URL path prefix. [#17048](https://github.com/grafana/grafana/pull/17048), [@jan25](https://github.com/jan25)
* **InfluxDB**: Fixes single quotes are not escaped in label value filters. [#17398](https://github.com/grafana/grafana/pull/17398), [@Panzki](https://github.com/Panzki)
* **Prometheus**: Correctly escape '|' literals in interpolated PromQL variables. [#16932](https://github.com/grafana/grafana/pull/16932), [@Limess](https://github.com/Limess)
* **Prometheus**: Fix when adding label for metrics which contains colons in Explore. [#16760](https://github.com/grafana/grafana/pull/16760), [@tolwi](https://github.com/tolwi)
* **SinglestatPanel**: Remove background color when value turns null. [#17552](https://github.com/grafana/grafana/pull/17552), [@druggieri](https://github.com/druggieri)
# 6.3.0 (unreleased)
# 6.2.5 (2019-06-25)

View File

@@ -8,7 +8,7 @@ GO_FILES := ./pkg/...
all: deps build
deps-go:
$(GO) run build.go setup
go run build.go setup
deps-js: node_modules
@@ -16,15 +16,15 @@ deps: deps-js
build-go:
@echo "build go files"
$(GO) run build.go build
GO111MODULE=on go run build.go build
build-server:
@echo "build server"
$(GO) run build.go build-server
GO111MODULE=on go run build.go build-server
build-cli:
@echo "build in CI environment"
$(GO) run build.go build-cli
GO111MODULE=on go run build.go build-cli
build-js:
@echo "build frontend"
@@ -35,7 +35,7 @@ build: build-go build-js
build-docker-dev:
@echo "build development container"
@echo "\033[92mInfo:\033[0m the frontend code is expected to be built already."
$(GO) run build.go -goos linux -pkg-arch amd64 ${OPT} build pkg-archive latest
GO111MODULE=on go run build.go -goos linux -pkg-arch amd64 ${OPT} build pkg-archive latest
cp dist/grafana-latest.linux-x64.tar.gz packaging/docker
cd packaging/docker && docker build --tag grafana/grafana:dev .
@@ -45,7 +45,7 @@ build-docker-full:
test-go:
@echo "test backend"
$(GO) test -v ./pkg/...
GO111MODULE=on go test -v ./pkg/...
test-js:
@echo "test frontend"
@@ -107,7 +107,7 @@ golangci-lint: scripts/go/bin/golangci-lint
go-vet:
@echo "lint via go vet"
@$(GO) vet $(GO_FILES)
@go vet $(GO_FILES)
lint-go: go-vet golangci-lint revive revive-alerting gosec

View File

@@ -147,34 +147,12 @@ Writing & watching frontend tests
```bash
# Run Golang tests using sqlite3 as database (default)
go test ./pkg/...
```
##### Running the MySQL or Postgres backend tests:
# Run Golang tests using mysql as database - convenient to use /docker/blocks/mysql_tests
GRAFANA_TEST_DB=mysql go test ./pkg/...
Run these by setting `GRAFANA_TEST_DB` in your environment.
- `GRAFANA_TEST_DB=mysql` to test MySQL
- `GRAFANA_TEST_DB=postgres` to test Postgres
Follow the instructions in `./devenv` to spin up test containers running the appropriate databases with `docker-compose`
- Use `docker/blocks/mysql_tests` or `docker/blocks/postgres_tests` as appropriate
```bash
# MySQL
# Tests can only be ran in one Go package at a time due to clashing db queries. To run MySQL tests for the "pkg/services/sqlstore" package, run:
GRAFANA_TEST_DB=mysql go test ./pkg/services/sqlstore/...
# Or run all the packages using the circle CI scripts. This method will be slower as the scripts will run all the tests, including the integration tests.
./scripts/circle-test-mysql.sh
```
```bash
# Postgres
# Tests can only be ran in one Go package at a time due to clashing db queries. To run Postgres tests for the "pkg/services/sqlstore" package, run:
GRAFANA_TEST_DB=postgres go test ./pkg/services/sqlstore/...
# Or run all the packages using the circle CI scripts. This method will be slower as the scripts will run all the tests, including the integration tests.
./scripts/circle-test-postgres.sh
# Run Golang tests using postgres as database - convenient to use /docker/blocks/postgres_tests
GRAFANA_TEST_DB=postgres go test ./pkg/...
```
#### End-to-end

View File

@@ -5,7 +5,7 @@
# root_url = %(protocol)s://%(domain)s:10080/grafana/
nginxproxy:
build: docker/blocks/nginx_proxy_mac
build: docker/blocks/nginx_proxy
ports:
- "10080:10080"

View File

@@ -45,8 +45,6 @@ datasources:
password: $PASSWORD
```
If you have a literal `$` in your value and want to avoid interpolation, `$$` can be used.
<hr />
## Configuration Management Tools

View File

@@ -27,7 +27,7 @@ header_name = X-WEBAUTH-USER
header_property = username
# Set to `true` to enable auto sign up of users who do not exist in Grafana DB. Defaults to `true`.
auto_sign_up = true
# If combined with Grafana LDAP integration define sync interval in minutes
# If combined with Grafana LDAP integration define sync interval
ldap_sync_ttl = 60
# Limit where auth proxy requests come from by configuring a list of IP addresses.
# This can be used to prevent users spoofing the X-WEBAUTH-USER header.

View File

@@ -1,4 +1,4 @@
{
"stable": "6.2.5",
"testing": "6.3.0-beta1"
"testing": "6.2.5"
}

View File

@@ -2,5 +2,5 @@
"npmClient": "yarn",
"useWorkspaces": true,
"packages": ["packages/*"],
"version": "6.4.0-alpha.12"
"version": "6.3.0-beta.1"
}

View File

@@ -5,7 +5,7 @@
"company": "Grafana Labs"
},
"name": "grafana",
"version": "6.4.0-pre",
"version": "6.3.0-pre",
"repository": {
"type": "git",
"url": "http://github.com/grafana/grafana.git"
@@ -89,7 +89,7 @@
"ng-annotate-loader": "0.6.1",
"ng-annotate-webpack-plugin": "0.3.0",
"ngtemplate-loader": "2.0.1",
"node-sass": "4.12.0",
"node-sass": "4.11.0",
"npm": "6.9.0",
"optimize-css-assets-webpack-plugin": "5.0.1",
"phantomjs-prebuilt": "2.1.16",
@@ -148,8 +148,7 @@
"themes:generate": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/generateSassVariableFiles.ts",
"packages:prepare": "lerna run clean && npm run test && lerna version --tag-version-prefix=\"packages@\" -m \"Packages: publish %s\" --no-push",
"packages:build": "lerna run clean && lerna run build",
"packages:publish": "lerna publish from-package --contents dist",
"packages:publishNext": "lerna publish from-package --contents dist --dist-tag next --yes"
"packages:publish": "lerna publish from-package --contents dist --dist-tag next --tag-version-prefix=\"packages@\""
},
"husky": {
"hooks": {
@@ -202,7 +201,7 @@
"file-saver": "1.3.8",
"immutable": "3.8.2",
"jquery": "3.4.1",
"lodash": "4.17.14",
"lodash": "4.17.11",
"marked": "0.6.2",
"moment": "2.24.0",
"mousetrap": "1.6.3",

View File

@@ -1,6 +1,6 @@
{
"name": "@grafana/data",
"version": "6.4.0-alpha.12",
"version": "6.3.0-beta.1",
"description": "Grafana Data Library",
"keywords": [
"typescript"
@@ -11,7 +11,8 @@
"typecheck": "tsc --noEmit",
"clean": "rimraf ./dist ./compiled",
"bundle": "rollup -c rollup.config.ts",
"build": "grafana-toolkit package:build --scope=data"
"build": "grafana-toolkit package:build --scope=data",
"postpublish": "npm run clean"
},
"author": "Grafana Labs",
"license": "Apache-2.0",
@@ -36,5 +37,8 @@
"rollup-plugin-visualizer": "0.9.2",
"sinon": "1.17.6",
"typescript": "3.4.1"
},
"resolutions": {
"@types/lodash": "4.14.119"
}
}

View File

@@ -1,6 +1,6 @@
{
"name": "@grafana/runtime",
"version": "6.4.0-alpha.12",
"version": "6.3.0-beta.1",
"description": "Grafana Runtime Library",
"keywords": [
"grafana"
@@ -11,7 +11,8 @@
"typecheck": "tsc --noEmit",
"clean": "rimraf ./dist ./compiled",
"bundle": "rollup -c rollup.config.ts",
"build": "grafana-toolkit package:build --scope=runtime"
"build": "grafana-toolkit package:build --scope=runtime",
"postpublish": "npm run clean"
},
"author": "Grafana Labs",
"license": "Apache-2.0",
@@ -32,5 +33,8 @@
"rollup-plugin-typescript2": "0.19.3",
"rollup-plugin-visualizer": "0.9.2",
"typescript": "3.4.1"
},
"resolutions": {
"@types/lodash": "4.14.119"
}
}

View File

@@ -1,6 +1,6 @@
{
"name": "@grafana/toolkit",
"version": "6.4.0-alpha.12",
"version": "6.3.0-beta.1",
"description": "Grafana Toolkit",
"keywords": [
"grafana",
@@ -15,7 +15,8 @@
"typecheck": "tsc --noEmit",
"precommit": "npm run tslint & npm run typecheck",
"clean": "rimraf ./dist ./compiled",
"build": "grafana-toolkit toolkit:build"
"build": "grafana-toolkit toolkit:build",
"postpublish": "npm run clean"
},
"author": "Grafana Labs",
"license": "Apache-2.0",
@@ -47,7 +48,7 @@
"jest": "24.8.0",
"jest-cli": "^24.8.0",
"jest-coverage-badges": "^1.1.2",
"lodash": "4.17.14",
"lodash": "4.17.11",
"mini-css-extract-plugin": "^0.7.0",
"node-sass": "^4.12.0",
"optimize-css-assets-webpack-plugin": "^5.0.3",
@@ -74,6 +75,9 @@
"url-loader": "^2.0.1",
"webpack": "4.35.0"
},
"resolutions": {
"@types/lodash": "4.14.119"
},
"devDependencies": {
"@types/glob": "^7.1.1",
"@types/prettier": "^1.16.4"

View File

@@ -15,11 +15,10 @@ import { closeMilestoneTask } from './tasks/closeMilestone';
import { pluginDevTask } from './tasks/plugin.dev';
import {
ciBuildPluginTask,
ciBuildPluginDocsTask,
ciBundlePluginTask,
ciTestPluginTask,
ciPluginReportTask,
ciDeployPluginTask,
ciSetupPluginTask,
} from './tasks/plugin.ci';
import { buildPackageTask } from './tasks/package.build';
@@ -149,21 +148,14 @@ export const run = (includeInternalScripts = false) => {
program
.command('plugin:ci-build')
.option('--backend <backend>', 'For backend task, which backend to run')
.option('--platform <platform>', 'For backend task, which backend to run')
.description('Build the plugin, leaving artifacts in /dist')
.action(async cmd => {
await execTask(ciBuildPluginTask)({
backend: cmd.backend,
platform: cmd.platform,
});
});
program
.command('plugin:ci-docs')
.description('Build the HTML docs')
.action(async cmd => {
await execTask(ciBuildPluginDocsTask)({});
});
program
.command('plugin:ci-bundle')
.description('Create a zip artifact for the plugin')
@@ -171,23 +163,24 @@ export const run = (includeInternalScripts = false) => {
await execTask(ciBundlePluginTask)({});
});
program
.command('plugin:ci-setup')
.option('--installer <installer>', 'Name of installer to download and run')
.description('Install and configure grafana')
.action(async cmd => {
await execTask(ciSetupPluginTask)({
installer: cmd.installer,
});
});
program
.command('plugin:ci-test')
.option('--full', 'run all the tests (even stuff that will break)')
.description('end-to-end test using bundle in /artifacts')
.action(async cmd => {
await execTask(ciTestPluginTask)({
full: cmd.full,
platform: cmd.platform,
});
});
program
.command('plugin:ci-report')
.description('Build a report for this whole process')
.action(async cmd => {
await execTask(ciPluginReportTask)({});
});
program
.command('plugin:ci-deploy')
.description('Publish plugin CI results')

View File

@@ -99,4 +99,4 @@ const buildTaskRunner: TaskRunner<PackageBuildOptions> = async ({ scope }) => {
await Promise.all(scopes.map(s => s()));
};
export const buildPackageTask = new Task<PackageBuildOptions>('Package build', buildTaskRunner);
export const buildPackageTask = new Task<PackageBuildOptions>('@grafana/ui build', buildTaskRunner);

View File

@@ -9,8 +9,8 @@ import path = require('path');
import fs = require('fs');
export interface PluginCIOptions {
backend?: string;
full?: boolean;
platform?: string;
installer?: string;
}
const calcJavascriptSize = (base: string, files?: string[]): number => {
@@ -33,43 +33,23 @@ const calcJavascriptSize = (base: string, files?: string[]): number => {
return size;
};
const getJobFromProcessArgv = () => {
const arg = process.argv[2];
if (arg && arg.startsWith('plugin:ci-')) {
const task = arg.substring('plugin:ci-'.length);
if ('build' === task) {
if ('--platform' === process.argv[3] && process.argv[4]) {
return task + '_' + process.argv[4];
}
return 'build_nodejs';
}
return task;
const getWorkFolder = () => {
let dir = `${process.cwd()}/work`;
if (process.env.CIRCLE_JOB) {
dir = path.resolve(dir, process.env.CIRCLE_JOB);
}
return 'unknown_job';
};
const job = process.env.CIRCLE_JOB || getJobFromProcessArgv();
const getJobFolder = () => {
const dir = path.resolve(process.cwd(), 'ci', 'jobs', job);
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir, { recursive: true });
}
return dir;
};
const getCiFolder = () => {
const dir = path.resolve(process.cwd(), 'ci');
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir, { recursive: true });
}
return dir;
};
const writeJobStats = (startTime: number, workDir: string) => {
const writeWorkStats = (startTime: number, workDir: string) => {
const elapsed = Date.now() - startTime;
const stats = {
job,
job: `${process.env.CIRCLE_JOB}`,
startTime,
buildTime: elapsed,
endTime: Date.now(),
};
const f = path.resolve(workDir, 'stats.json');
@@ -91,17 +71,16 @@ const writeJobStats = (startTime: number, workDir: string) => {
* Anything that should be put into the final zip file should be put in:
* ~/work/build_xxx/dist
*/
const buildPluginRunner: TaskRunner<PluginCIOptions> = async ({ backend }) => {
const buildPluginRunner: TaskRunner<PluginCIOptions> = async ({ platform }) => {
const start = Date.now();
const workDir = getJobFolder();
const workDir = getWorkFolder();
await execa('rimraf', [workDir]);
fs.mkdirSync(workDir);
if (backend) {
if (platform) {
console.log('TODO, backend support?');
fs.mkdirSync(path.resolve(process.cwd(), 'dist'));
const file = path.resolve(process.cwd(), 'dist', `README_${backend}.txt`);
fs.writeFile(file, `TODO... build bakend plugin: ${backend}!`, err => {
const file = path.resolve(workDir, 'README.txt');
fs.writeFile(workDir + '/README.txt', 'TODO... build it!', err => {
if (err) {
throw new Error('Unable to write: ' + file);
}
@@ -111,98 +90,52 @@ const buildPluginRunner: TaskRunner<PluginCIOptions> = async ({ backend }) => {
await pluginBuildRunner({ coverage: true });
}
// Move local folders to the scoped job folder
for (const name of ['dist', 'coverage']) {
const dir = path.resolve(process.cwd(), name);
if (fs.existsSync(dir)) {
fs.renameSync(dir, path.resolve(workDir, name));
}
// Move dist to the scoped work folder
const distDir = path.resolve(process.cwd(), 'dist');
if (fs.existsSync(distDir)) {
fs.renameSync(distDir, path.resolve(workDir, 'dist'));
}
writeJobStats(start, workDir);
writeWorkStats(start, workDir);
};
export const ciBuildPluginTask = new Task<PluginCIOptions>('Build Plugin', buildPluginRunner);
/**
* 2. Build Docs
*
* Take /docs/* and format it into /ci/docs/HTML site
*
*/
const buildPluginDocsRunner: TaskRunner<PluginCIOptions> = async () => {
const docsSrc = path.resolve(process.cwd(), 'docs');
if (!fs.existsSync(docsSrc)) {
throw new Error('Docs folder does not exist!');
}
const start = Date.now();
const workDir = getJobFolder();
await execa('rimraf', [workDir]);
fs.mkdirSync(workDir);
const docsDest = path.resolve(process.cwd(), 'ci', 'docs');
fs.mkdirSync(docsDest);
const exe = await execa('cp', ['-rv', docsSrc + '/.', docsDest]);
console.log(exe.stdout);
fs.writeFile(path.resolve(docsDest, 'index.html'), `TODO... actually build docs`, err => {
if (err) {
throw new Error('Unable to docs');
}
});
writeJobStats(start, workDir);
};
export const ciBuildPluginDocsTask = new Task<PluginCIOptions>('Build Plugin Docs', buildPluginDocsRunner);
/**
* 2. BUNDLE
*
* Take everything from `~/ci/job/{any}/dist` and
* 1. merge it into: `~/ci/dist`
* 2. zip it into artifacts in `~/ci/artifacts`
* 3. prepare grafana environment in: `~/ci/grafana-test-env`
* Take everything from `~/work/build_XXX/dist` and zip it into
* artifacts
*
*/
const bundlePluginRunner: TaskRunner<PluginCIOptions> = async () => {
const start = Date.now();
const ciDir = getCiFolder();
const artifactsDir = path.resolve(ciDir, 'artifacts');
const distDir = path.resolve(ciDir, 'dist');
const docsDir = path.resolve(ciDir, 'docs');
const grafanaEnvDir = path.resolve(ciDir, 'grafana-test-env');
await execa('rimraf', [artifactsDir, distDir, grafanaEnvDir]);
fs.mkdirSync(artifactsDir);
fs.mkdirSync(distDir);
fs.mkdirSync(grafanaEnvDir);
const workDir = getWorkFolder();
console.log('Build Dist Folder');
// 1. Check for a local 'dist' folder
const d = path.resolve(process.cwd(), 'dist');
if (fs.existsSync(d)) {
await execa('cp', ['-rn', d + '/.', distDir]);
// Copy all `dist` folders to the root dist folder
const distDir = path.resolve(process.cwd(), 'dist');
if (!fs.existsSync(distDir)) {
fs.mkdirSync(distDir);
}
// 2. Look for any 'dist' folders under ci/job/XXX/dist
const dirs = fs.readdirSync(path.resolve(ciDir, 'jobs'));
for (const j of dirs) {
const contents = path.resolve(ciDir, 'jobs', j, 'dist');
if (fs.existsSync(contents)) {
try {
await execa('cp', ['-rn', contents + '/.', distDir]);
} catch (er) {
throw new Error('Duplicate files found in dist folders');
fs.mkdirSync(distDir, { recursive: true });
const dirs = fs.readdirSync(workDir);
for (const dir of dirs) {
if (dir.startsWith('build_')) {
const contents = path.resolve(dir, 'dist');
if (fs.existsSync(contents)) {
await execa('cp', ['-rp', contents, distDir]);
}
}
}
console.log('Building ZIP');
// Create an artifact
const artifactsDir = path.resolve(process.cwd(), 'artifacts');
if (!fs.existsSync(artifactsDir)) {
fs.mkdirSync(artifactsDir, { recursive: true });
}
const pluginInfo = getPluginJson(`${distDir}/plugin.json`);
let zipName = pluginInfo.id + '-' + pluginInfo.info.version + '.zip';
let zipFile = path.resolve(artifactsDir, zipName);
const zipName = pluginInfo.id + '-' + pluginInfo.info.version + '.zip';
const zipFile = path.resolve(artifactsDir, zipName);
process.chdir(distDir);
await execa('zip', ['-r', zipFile, '.']);
restoreCwd();
@@ -211,89 +144,93 @@ const bundlePluginRunner: TaskRunner<PluginCIOptions> = async () => {
if (zipStats.size < 100) {
throw new Error('Invalid zip file: ' + zipFile);
}
const zipInfo: any = {
await execa('sha1sum', [zipFile, '>', zipFile + '.sha1']);
const info = {
name: zipName,
size: zipStats.size,
};
const info: any = {
plugin: zipInfo,
};
try {
const exe = await execa('shasum', [zipFile]);
const idx = exe.stdout.indexOf(' ');
const sha1 = exe.stdout.substring(0, idx);
fs.writeFile(zipFile + '.sha1', sha1, err => {});
zipInfo.sha1 = sha1;
} catch {
console.warn('Unable to read SHA1 Checksum');
}
// If docs exist, zip them into artifacts
if (fs.existsSync(docsDir)) {
zipName = pluginInfo.id + '-' + pluginInfo.info.version + '-docs.zip';
zipFile = path.resolve(artifactsDir, zipName);
process.chdir(docsDir);
await execa('zip', ['-r', zipFile, '.']);
restoreCwd();
const zipStats = fs.statSync(zipFile);
const zipInfo: any = {
name: zipName,
size: zipStats.size,
};
try {
const exe = await execa('shasum', [zipFile]);
const idx = exe.stdout.indexOf(' ');
const sha1 = exe.stdout.substring(0, idx);
fs.writeFile(zipFile + '.sha1', sha1, err => {});
zipInfo.sha1 = sha1;
} catch {
console.warn('Unable to read SHA1 Checksum');
}
info.docs = zipInfo;
}
let p = path.resolve(artifactsDir, 'info.json');
fs.writeFile(p, JSON.stringify(info, null, 2), err => {
const f = path.resolve(artifactsDir, 'info.json');
fs.writeFile(f, JSON.stringify(info, null, 2), err => {
if (err) {
throw new Error('Error writing artifact info: ' + p);
throw new Error('Error writing artifact info: ' + f);
}
});
console.log('Setup Grafan Environment');
p = path.resolve(grafanaEnvDir, 'plugins', pluginInfo.id);
fs.mkdirSync(p, { recursive: true });
await execa('unzip', [zipFile, '-d', p]);
// Write the custom settings
p = path.resolve(grafanaEnvDir, 'custom.ini');
const customIniBody =
`# Autogenerated by @grafana/toolkit \n` +
`[paths] \n` +
`plugins = ${path.resolve(grafanaEnvDir, 'plugins')}\n` +
`\n`; // empty line
fs.writeFile(p, customIniBody, err => {
if (err) {
throw new Error('Unable to write: ' + p);
}
});
writeJobStats(start, getJobFolder());
writeWorkStats(start, workDir);
};
export const ciBundlePluginTask = new Task<PluginCIOptions>('Bundle Plugin', bundlePluginRunner);
/**
* 3. Test (end-to-end)
* 3. Setup (install grafana and setup provisioning)
*
* deploy the zip to a running grafana instance
*
*/
const testPluginRunner: TaskRunner<PluginCIOptions> = async ({ full }) => {
const setupPluginRunner: TaskRunner<PluginCIOptions> = async ({ installer }) => {
const start = Date.now();
const workDir = getJobFolder();
const pluginInfo = getPluginJson(`${process.cwd()}/src/plugin.json`);
if (!installer) {
throw new Error('Missing installer path');
}
// Download the grafana installer
const workDir = getWorkFolder();
const installFile = path.resolve(workDir, installer);
if (!fs.existsSync(installFile)) {
console.log('download', installer);
const exe = await execa('wget', ['-O', installFile, 'https://dl.grafana.com/oss/release/' + installer]);
console.log(exe.stdout);
}
// Find the plugin zip file
const artifactsDir = path.resolve(process.cwd(), 'artifacts');
const artifactsInfo = require(path.resolve(artifactsDir, 'info.json'));
const pluginZip = path.resolve(workDir, 'artifacts', artifactsInfo.name);
if (!fs.existsSync(pluginZip)) {
throw new Error('Missing zip file:' + pluginZip);
}
// Create a grafana runtime folder
const grafanaPluginsDir = path.resolve(require('os').homedir(), 'grafana', 'plugins');
await execa('rimraf', [grafanaPluginsDir]);
fs.mkdirSync(grafanaPluginsDir, { recursive: true });
// unzip package.zip -d /opt
let exe = await execa('unzip', [pluginZip, '-d', grafanaPluginsDir]);
console.log(exe.stdout);
// Write the custom settings
const customIniPath = '/usr/share/grafana/conf/custom.ini';
const customIniBody = `[paths] \n` + `plugins = ${grafanaPluginsDir}\n` + '';
fs.writeFile(customIniPath, customIniBody, err => {
if (err) {
throw new Error('Unable to write: ' + customIniPath);
}
});
console.log('Install Grafana');
exe = await execa('sudo', ['dpkg', 'i', installFile]);
console.log(exe.stdout);
exe = await execa('sudo', ['grafana-server', 'start']);
console.log(exe.stdout);
exe = await execa('grafana-cli', ['--version']);
writeWorkStats(start, workDir + '_setup');
};
export const ciSetupPluginTask = new Task<PluginCIOptions>('Setup Grafana', setupPluginRunner);
/**
* 4. Test (end-to-end)
*
* deploy the zip to a running grafana instance
*
*/
const testPluginRunner: TaskRunner<PluginCIOptions> = async ({ platform }) => {
const start = Date.now();
const workDir = getWorkFolder();
const args = {
withCredentials: true,
@@ -310,28 +247,16 @@ const testPluginRunner: TaskRunner<PluginCIOptions> = async ({ full }) => {
console.log('Grafana Version: ' + JSON.stringify(frontendSettings.data.buildInfo, null, 2));
const allPlugins: any[] = await axios.get('api/plugins', args).data;
// for (const plugin of allPlugins) {
// if (plugin.id === pluginInfo.id) {
// console.log('------------');
// console.log(plugin);
// console.log('------------');
// } else {
// console.log('Plugin:', plugin.id, plugin.latestVersion);
// }
// }
console.log('PLUGINS:', allPlugins);
const pluginInfo = getPluginJson(`${process.cwd()}/src/plugin.json`);
const pluginSettings = await axios.get(`api/plugins/${pluginInfo.id}/settings`, args);
if (full) {
const pluginSettings = await axios.get(`api/plugins/${pluginInfo.id}/settings`, args);
console.log('Plugin Info: ' + JSON.stringify(pluginSettings.data, null, 2));
}
console.log('Plugin Info: ' + JSON.stringify(pluginSettings.data, null, 2));
console.log('TODO puppeteer');
const elapsed = Date.now() - start;
const stats = {
job,
job: `${process.env.CIRCLE_JOB}`,
sha1: `${process.env.CIRCLE_SHA1}`,
startTime: start,
buildTime: elapsed,
@@ -339,45 +264,39 @@ const testPluginRunner: TaskRunner<PluginCIOptions> = async ({ full }) => {
};
console.log('TODO Puppeteer Tests', stats);
writeJobStats(start, workDir);
writeWorkStats(start, workDir);
};
export const ciTestPluginTask = new Task<PluginCIOptions>('Test Plugin (e2e)', testPluginRunner);
/**
* 4. Report
*
* Create a report from all the previous steps
*
*/
const pluginReportRunner: TaskRunner<PluginCIOptions> = async () => {
const start = Date.now();
const workDir = getJobFolder();
const reportDir = path.resolve(process.cwd(), 'ci', 'report');
await execa('rimraf', [reportDir]);
fs.mkdirSync(reportDir);
const file = path.resolve(reportDir, `report.txt`);
fs.writeFile(file, `TODO... actually make a report (csv etc)`, err => {
if (err) {
throw new Error('Unable to write: ' + file);
}
});
console.log('TODO... real report');
writeJobStats(start, workDir);
};
export const ciPluginReportTask = new Task<PluginCIOptions>('Deploy plugin', pluginReportRunner);
/**
* 5. Deploy
* 4. Deploy
*
* deploy the zip to a running grafana instance
*
*/
const deployPluginRunner: TaskRunner<PluginCIOptions> = async () => {
console.log('TODO DEPLOY??');
const start = Date.now();
// TASK Time
if (process.env.CIRCLE_INTERNAL_TASK_DATA) {
const timingInfo = fs.readdirSync(`${process.env.CIRCLE_INTERNAL_TASK_DATA}`);
if (timingInfo) {
timingInfo.forEach(file => {
console.log('TIMING INFO: ', file);
});
}
}
const elapsed = Date.now() - start;
const stats = {
job: `${process.env.CIRCLE_JOB}`,
sha1: `${process.env.CIRCLE_SHA1}`,
startTime: start,
buildTime: elapsed,
endTime: Date.now(),
};
console.log('TODO DEPLOY??', stats);
console.log(' if PR => write a comment to github with difference ');
console.log(' if master | vXYZ ==> upload artifacts to some repo ');
};

View File

@@ -1,6 +1,6 @@
{
"name": "@grafana/ui",
"version": "6.4.0-alpha.12",
"version": "6.3.0-beta.1",
"description": "Grafana Components Library",
"keywords": [
"grafana",
@@ -15,18 +15,18 @@
"storybook:build": "build-storybook -o ./dist/storybook -c .storybook",
"clean": "rimraf ./dist ./compiled",
"bundle": "rollup -c rollup.config.ts",
"build": "grafana-toolkit package:build --scope=ui"
"build": "grafana-toolkit package:build --scope=ui",
"postpublish": "npm run clean"
},
"author": "Grafana Labs",
"license": "Apache-2.0",
"dependencies": {
"@grafana/data": "^6.4.0-alpha.8",
"@torkelo/react-select": "2.1.1",
"@types/react-color": "2.17.0",
"classnames": "2.2.6",
"d3": "5.9.1",
"jquery": "3.4.1",
"lodash": "4.17.14",
"lodash": "4.17.11",
"moment": "2.24.0",
"papaparse": "4.6.3",
"react": "16.8.6",
@@ -77,5 +77,8 @@
"rollup-plugin-typescript2": "0.19.3",
"rollup-plugin-visualizer": "0.9.2",
"typescript": "3.4.1"
},
"resolutions": {
"@types/lodash": "4.14.119"
}
}

View File

@@ -29,7 +29,7 @@ export class TableInputCSV extends React.PureComponent<Props, State> {
};
}
readCSV: any = debounce(() => {
readCSV = debounce(() => {
const { config } = this.props;
const { text } = this.state;

View File

@@ -68,10 +68,7 @@ func (hs *HTTPServer) AddAPIKey(c *models.ReqContext, cmd models.AddApiKeyComman
if err == models.ErrInvalidApiKeyExpiration {
return Error(400, err.Error(), nil)
}
if err == models.ErrDuplicateApiKey {
return Error(409, err.Error(), nil)
}
return Error(500, "Failed to add API Key", err)
return Error(500, "Failed to add API key", err)
}
result := &dtos.NewApiKeyResult{

View File

@@ -278,7 +278,8 @@ func (hs *HTTPServer) PostDashboard(c *m.ReqContext, cmd m.SaveDashboardCommand)
inFolder := cmd.FolderId > 0
err := dashboards.MakeUserAdmin(hs.Bus, cmd.OrgId, cmd.UserId, dashboard.Id, !inFolder)
if err != nil {
hs.log.Error("Could not make user admin", "dashboard", dashboard.Title, "user", c.SignedInUser.UserId, "error", err)
hs.log.Error("Could not make user admin", "dashboard", cmd.Result.Title, "user", c.SignedInUser.UserId, "error", err)
return Error(500, "Failed to make user admin of dashboard", err)
}
}

View File

@@ -64,6 +64,7 @@ func (hs *HTTPServer) CreateFolder(c *m.ReqContext, cmd m.CreateFolderCommand) R
if hs.Cfg.EditorsCanAdmin {
if err := dashboards.MakeUserAdmin(hs.Bus, c.OrgId, c.SignedInUser.UserId, cmd.Result.Id, true); err != nil {
hs.log.Error("Could not make user admin", "folder", cmd.Result.Title, "user", c.SignedInUser.UserId, "error", err)
return Error(500, "Failed to make user admin of folder", err)
}
}

View File

@@ -7,7 +7,6 @@ import (
var ErrInvalidApiKey = errors.New("Invalid API Key")
var ErrInvalidApiKeyExpiration = errors.New("Negative value for SecondsToLive")
var ErrDuplicateApiKey = errors.New("API Key Organization ID And Name Must Be Unique")
type ApiKey struct {
Id int64

View File

@@ -14,7 +14,6 @@ import (
"os"
"reflect"
"strconv"
"strings"
"github.com/grafana/grafana/pkg/util/errutil"
)
@@ -186,14 +185,8 @@ func transformMap(i map[interface{}]interface{}) interface{} {
// interpolateValue returns final value after interpolation. At the moment only env var interpolation is done
// here but in the future something like interpolation from file could be also done here.
// For a literal '$', '$$' can be used to avoid interpolation.
func interpolateValue(val string) string {
parts := strings.Split(val, "$$")
interpolated := make([]string, len(parts))
for i, v := range parts {
interpolated[i] = os.ExpandEnv(v)
}
return strings.Join(interpolated, "$")
return os.ExpandEnv(val)
}
type interpolated struct {

View File

@@ -1,18 +1,16 @@
package values
import (
"os"
"testing"
. "github.com/smartystreets/goconvey/convey"
"gopkg.in/yaml.v2"
"os"
"testing"
)
func TestValues(t *testing.T) {
Convey("Values", t, func() {
os.Setenv("INT", "1")
os.Setenv("STRING", "test")
os.Setenv("EMPTYSTRING", "")
os.Setenv("BOOL", "true")
Convey("IntValue", func() {
@@ -63,24 +61,6 @@ func TestValues(t *testing.T) {
So(d.Val.Value(), ShouldEqual, "")
So(d.Val.Raw, ShouldEqual, "")
})
Convey("empty var should have empty value", func() {
unmarshalingTest(`val: $EMPTYSTRING`, d)
So(d.Val.Value(), ShouldEqual, "")
So(d.Val.Raw, ShouldEqual, "$EMPTYSTRING")
})
Convey("$$ should be a literal $", func() {
unmarshalingTest(`val: $$`, d)
So(d.Val.Value(), ShouldEqual, "$")
So(d.Val.Raw, ShouldEqual, "$$")
})
Convey("$$ should be a literal $ and not expanded within a string", func() {
unmarshalingTest(`val: mY,Passwo$$rd`, d)
So(d.Val.Value(), ShouldEqual, "mY,Passwo$rd")
So(d.Val.Raw, ShouldEqual, "mY,Passwo$$rd")
})
})
Convey("BoolValue", func() {
@@ -219,7 +199,6 @@ func TestValues(t *testing.T) {
Reset(func() {
os.Unsetenv("INT")
os.Unsetenv("STRING")
os.Unsetenv("EMPTYSTRING")
os.Unsetenv("BOOL")
})
})

View File

@@ -37,12 +37,6 @@ func DeleteApiKeyCtx(ctx context.Context, cmd *models.DeleteApiKeyCommand) error
func AddApiKey(cmd *models.AddApiKeyCommand) error {
return inTransaction(func(sess *DBSession) error {
key := models.ApiKey{OrgId: cmd.OrgId, Name: cmd.Name}
exists, _ := sess.Get(&key)
if exists {
return models.ErrDuplicateApiKey
}
updated := timeNow()
var expires *int64 = nil
if cmd.SecondsToLive > 0 {

View File

@@ -115,23 +115,3 @@ func TestApiKeyDataAccess(t *testing.T) {
})
})
}
func TestApiKeyErrors(t *testing.T) {
mockTimeNow()
defer resetTimeNow()
t.Run("Testing API Duplicate Key Errors", func(t *testing.T) {
InitTestDB(t)
t.Run("Given saved api key", func(t *testing.T) {
cmd := models.AddApiKeyCommand{OrgId: 0, Name: "duplicate", Key: "asd"}
err := AddApiKey(&cmd)
assert.Nil(t, err)
t.Run("Add API Key with existing Org ID and Name", func(t *testing.T) {
cmd := models.AddApiKeyCommand{OrgId: 0, Name: "duplicate", Key: "asd"}
err = AddApiKey(&cmd)
assert.EqualError(t, err, models.ErrDuplicateApiKey.Error())
})
})
})
}

View File

@@ -324,6 +324,10 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel {
}
}
const sortedRows = rows.sort((a, b) => {
return a.timestamp > b.timestamp ? -1 : 1;
});
// Meta data to display in status
const meta: LogsMetaItem[] = [];
if (_.size(commonLabels) > 0) {
@@ -339,7 +343,7 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel {
if (limits.length > 0) {
meta.push({
label: 'Limit',
value: `${limits[0].meta.limit} (${rows.length} returned)`,
value: `${limits[0].meta.limit} (${sortedRows.length} returned)`,
kind: LogsMetaKind.String,
});
}
@@ -347,7 +351,7 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel {
return {
hasUniqueLabels,
meta,
rows,
rows: sortedRows,
};
}

View File

@@ -418,13 +418,6 @@ describe('dataFrameToLogsModel', () => {
expect(logsModel.hasUniqueLabels).toBeFalsy();
expect(logsModel.rows).toHaveLength(2);
expect(logsModel.rows).toMatchObject([
{
timestamp: '2019-04-26T09:28:11.352440161Z',
entry: 't=2019-04-26T11:05:28+0200 lvl=info msg="Initializing DatasourceCacheService" logger=server',
labels: { filename: '/var/log/grafana/grafana.log', job: 'grafana' },
logLevel: 'info',
uniqueLabels: {},
},
{
timestamp: '2019-04-26T14:42:50.991981292Z',
entry: 't=2019-04-26T16:42:50+0200 lvl=eror msg="new token…t unhashed token=56d9fdc5c8b7400bd51b060eea8ca9d7',
@@ -432,6 +425,13 @@ describe('dataFrameToLogsModel', () => {
logLevel: 'error',
uniqueLabels: {},
},
{
timestamp: '2019-04-26T09:28:11.352440161Z',
entry: 't=2019-04-26T11:05:28+0200 lvl=info msg="Initializing DatasourceCacheService" logger=server',
labels: { filename: '/var/log/grafana/grafana.log', job: 'grafana' },
logLevel: 'info',
uniqueLabels: {},
},
]);
expect(logsModel.series).toHaveLength(2);
@@ -524,6 +524,12 @@ describe('dataFrameToLogsModel', () => {
expect(logsModel.hasUniqueLabels).toBeTruthy();
expect(logsModel.rows).toHaveLength(3);
expect(logsModel.rows).toMatchObject([
{
entry: 'INFO 2',
labels: { foo: 'bar', baz: '2' },
logLevel: LogLevel.error,
uniqueLabels: { baz: '2' },
},
{
entry: 'WARN boooo',
labels: { foo: 'bar', baz: '1' },
@@ -536,12 +542,6 @@ describe('dataFrameToLogsModel', () => {
logLevel: LogLevel.error,
uniqueLabels: { baz: '2' },
},
{
entry: 'INFO 2',
labels: { foo: 'bar', baz: '2' },
logLevel: LogLevel.error,
uniqueLabels: { baz: '2' },
},
]);
expect(logsModel.series).toHaveLength(2);

View File

@@ -487,11 +487,11 @@ export const getRefIds = (value: any): string[] => {
};
const sortInAscendingOrder = (a: LogRowModel, b: LogRowModel) => {
if (a.timestamp < b.timestamp) {
if (a.timeEpochMs < b.timeEpochMs) {
return -1;
}
if (a.timestamp > b.timestamp) {
if (a.timeEpochMs > b.timeEpochMs) {
return 1;
}
@@ -499,11 +499,11 @@ const sortInAscendingOrder = (a: LogRowModel, b: LogRowModel) => {
};
const sortInDescendingOrder = (a: LogRowModel, b: LogRowModel) => {
if (a.timestamp > b.timestamp) {
if (a.timeEpochMs > b.timeEpochMs) {
return -1;
}
if (a.timestamp < b.timestamp) {
if (a.timeEpochMs < b.timeEpochMs) {
return 1;
}

View File

@@ -72,10 +72,9 @@ export class ResultProcessor {
const graphInterval = this.state.queryIntervals.intervalMs;
const dataFrame = this.rawData.map(result => guessFieldTypes(toDataFrame(result)));
const newResults = this.rawData ? dataFrameToLogsModel(dataFrame, graphInterval) : null;
const sortedNewResults = sortLogsResult(newResults, this.state.refreshInterval);
if (this.replacePreviousResults) {
return sortedNewResults;
return newResults;
}
const prevLogsResult: LogsModel = this.state.logsResult || { hasUniqueLabels: false, rows: [] };
@@ -87,17 +86,17 @@ export class ResultProcessor {
for (const row of rowsInState) {
processedRows.push({ ...row, fresh: false });
}
for (const row of sortedNewResults.rows) {
for (const row of newResults.rows) {
processedRows.push({ ...row, fresh: true });
}
const processedSeries = this.mergeGraphResults(sortedNewResults.series, seriesInState);
const processedSeries = this.mergeGraphResults(newResults.series, seriesInState);
const slice = -1000;
const rows = processedRows.slice(slice);
const series = processedSeries.slice(slice);
return { ...sortedNewResults, rows, series };
return { ...newResults, rows, series };
};
private makeTimeSeriesList = (rawData: any[]) => {

View File

@@ -4,7 +4,7 @@ import { variableRegex } from 'app/features/templating/variable';
import { ScopedVars } from '@grafana/ui';
import { TimeRange } from '@grafana/data';
function luceneEscape(value: string) {
function luceneEscape(value) {
return value.replace(/([\!\*\+\-\=<>\s\&\|\(\)\[\]\{\}\^\~\?\:\\/"])/g, '\\$1');
}
@@ -12,8 +12,8 @@ export class TemplateSrv {
variables: any[];
private regex = variableRegex;
private index: any = {};
private grafanaVariables: any = {};
private index = {};
private grafanaVariables = {};
private builtIns: any = {};
private timeRange: TimeRange = null;
@@ -23,7 +23,7 @@ export class TemplateSrv {
this.variables = [];
}
init(variables: any, timeRange?: TimeRange) {
init(variables, timeRange?: TimeRange) {
this.variables = variables;
this.timeRange = timeRange;
this.updateIndex();
@@ -34,7 +34,7 @@ export class TemplateSrv {
}
updateIndex() {
const existsOrEmpty = (value: any) => value || value === '';
const existsOrEmpty = value => value || value === '';
this.index = this.variables.reduce((acc, currentValue) => {
if (currentValue.current && (currentValue.current.isNone || existsOrEmpty(currentValue.current.value))) {
@@ -64,12 +64,12 @@ export class TemplateSrv {
this.updateIndex();
}
variableInitialized(variable: any) {
variableInitialized(variable) {
this.index[variable.name] = variable;
}
getAdhocFilters(datasourceName: string) {
let filters: any = [];
getAdhocFilters(datasourceName) {
let filters = [];
if (this.variables) {
for (let i = 0; i < this.variables.length; i++) {
@@ -92,7 +92,7 @@ export class TemplateSrv {
return filters;
}
luceneFormat(value: any) {
luceneFormat(value) {
if (typeof value === 'string') {
return luceneEscape(value);
}
@@ -108,7 +108,7 @@ export class TemplateSrv {
// encode string according to RFC 3986; in contrast to encodeURIComponent()
// also the sub-delims "!", "'", "(", ")" and "*" are encoded;
// unicode handling uses UTF-8 as in ECMA-262.
encodeURIComponentStrict(str: string) {
encodeURIComponentStrict(str) {
return encodeURIComponent(str).replace(/[!'()*]/g, c => {
return (
'%' +
@@ -120,7 +120,7 @@ export class TemplateSrv {
});
}
formatValue(value: any, format: any, variable: any) {
formatValue(value, format, variable) {
// for some scopedVars there is no variable
variable = variable || {};
@@ -180,11 +180,11 @@ export class TemplateSrv {
}
}
setGrafanaVariable(name: string, value: any) {
setGrafanaVariable(name, value) {
this.grafanaVariables[name] = value;
}
getVariableName(expression: string) {
getVariableName(expression) {
this.regex.lastIndex = 0;
const match = this.regex.exec(expression);
if (!match) {
@@ -194,12 +194,12 @@ export class TemplateSrv {
return variableName;
}
variableExists(expression: string) {
variableExists(expression) {
const name = this.getVariableName(expression);
return name && this.index[name] !== void 0;
}
highlightVariablesAsHtml(str: string) {
highlightVariablesAsHtml(str) {
if (!str || !_.isString(str)) {
return str;
}
@@ -214,7 +214,7 @@ export class TemplateSrv {
});
}
getAllValue(variable: any) {
getAllValue(variable) {
if (variable.allValue) {
return variable.allValue;
}
@@ -225,7 +225,7 @@ export class TemplateSrv {
return values;
}
replace(target: string, scopedVars?: ScopedVars, format?: string | Function): any {
replace(target: string, scopedVars?: ScopedVars, format?: string | Function) {
if (!target) {
return target;
}
@@ -266,11 +266,11 @@ export class TemplateSrv {
});
}
isAllValue(value: any) {
isAllValue(value) {
return value === '$__all' || (Array.isArray(value) && value[0] === '$__all');
}
replaceWithText(target: string, scopedVars: ScopedVars) {
replaceWithText(target, scopedVars) {
if (!target) {
return target;
}
@@ -278,7 +278,7 @@ export class TemplateSrv {
let variable;
this.regex.lastIndex = 0;
return target.replace(this.regex, (match: any, var1: any, var2: any, fmt2: any, var3: any) => {
return target.replace(this.regex, (match, var1, var2, fmt2, var3) => {
if (scopedVars) {
const option = scopedVars[var1 || var2 || var3];
if (option) {
@@ -297,7 +297,7 @@ export class TemplateSrv {
});
}
fillVariableValuesForUrl(params: any, scopedVars?: ScopedVars) {
fillVariableValuesForUrl(params, scopedVars?) {
_.each(this.variables, variable => {
if (scopedVars && scopedVars[variable.name] !== void 0) {
if (scopedVars[variable.name].skipUrlSync) {
@@ -313,7 +313,7 @@ export class TemplateSrv {
});
}
distributeVariable(value: any, variable: any) {
distributeVariable(value, variable) {
value = _.map(value, (val: any, index: number) => {
if (index !== 0) {
return variable + '=' + val;

View File

@@ -16,12 +16,12 @@ export const variableRegexExec = (variableString: string) => {
};
export interface Variable {
setValue(option: any): any;
updateOptions(): any;
dependsOn(variable: any): any;
setValueFromUrl(urlValue: any): any;
getValueForUrl(): any;
getSaveModel(): any;
setValue(option);
updateOptions();
dependsOn(variable);
setValueFromUrl(urlValue);
getValueForUrl();
getSaveModel();
}
export let variableTypes = {};

View File

@@ -1,5 +1,5 @@
// Libaries
import angular, { IQService, ILocationService, auto, IPromise } from 'angular';
import angular from 'angular';
import _ from 'lodash';
// Utils & Services
@@ -19,9 +19,9 @@ export class VariableSrv {
/** @ngInject */
constructor(
private $q: IQService,
private $location: ILocationService,
private $injector: auto.IInjectorService,
private $q,
private $location,
private $injector,
private templateSrv: TemplateSrv,
private timeSrv: TimeSrv
) {}
@@ -71,7 +71,7 @@ export class VariableSrv {
});
}
processVariable(variable: any, queryParams: any) {
processVariable(variable, queryParams) {
const dependencies = [];
for (const otherVariable of this.variables) {
@@ -100,8 +100,7 @@ export class VariableSrv {
});
}
createVariableFromModel(model: any) {
// @ts-ignore
createVariableFromModel(model) {
const ctor = variableTypes[model.type].ctor;
if (!ctor) {
throw {
@@ -113,24 +112,24 @@ export class VariableSrv {
return variable;
}
addVariable(variable: any) {
addVariable(variable) {
this.variables.push(variable);
this.templateSrv.updateIndex();
this.dashboard.updateSubmenuVisibility();
}
removeVariable(variable: any) {
removeVariable(variable) {
const index = _.indexOf(this.variables, variable);
this.variables.splice(index, 1);
this.templateSrv.updateIndex();
this.dashboard.updateSubmenuVisibility();
}
updateOptions(variable: any) {
updateOptions(variable) {
return variable.updateOptions();
}
variableUpdated(variable: any, emitChangeEvents?: any) {
variableUpdated(variable, emitChangeEvents?) {
// if there is a variable lock ignore cascading update because we are in a boot up scenario
if (variable.initLock) {
return this.$q.when();
@@ -153,7 +152,7 @@ export class VariableSrv {
});
}
selectOptionsForCurrentValue(variable: any) {
selectOptionsForCurrentValue(variable) {
let i, y, value, option;
const selected: any = [];
@@ -177,7 +176,7 @@ export class VariableSrv {
return selected;
}
validateVariableSelectionState(variable: any) {
validateVariableSelectionState(variable) {
if (!variable.current) {
variable.current = {};
}
@@ -222,7 +221,7 @@ export class VariableSrv {
* @param variable Instance of Variable
* @param urlValue Value of the query parameter
*/
setOptionFromUrl(variable: any, urlValue: string | string[]): IPromise<any> {
setOptionFromUrl(variable: any, urlValue: string | string[]): Promise<any> {
let promise = this.$q.when();
if (variable.refresh) {
@@ -269,7 +268,7 @@ export class VariableSrv {
});
}
setOptionAsCurrent(variable: any, option: any) {
setOptionAsCurrent(variable, option) {
variable.current = _.cloneDeep(option);
if (_.isArray(variable.current.text) && variable.current.text.length > 0) {
@@ -299,7 +298,7 @@ export class VariableSrv {
this.$location.search(params);
}
setAdhocFilter(options: any) {
setAdhocFilter(options) {
let variable: any = _.find(this.variables, {
type: 'adhoc',
datasource: options.datasource,

View File

@@ -78,7 +78,7 @@ export class UsersActionBar extends PureComponent<Props> {
}
}
function mapStateToProps(state: any) {
function mapStateToProps(state) {
return {
searchQuery: getUsersSearchQuery(state.users),
pendingInvitesCount: getInviteesCount(state.users),

View File

@@ -34,7 +34,7 @@ export interface State {
export class UsersListPage extends PureComponent<Props, State> {
externalUserMngInfoHtml: string;
constructor(props: Props) {
constructor(props) {
super(props);
if (this.props.externalUserMngInfo) {
@@ -59,13 +59,13 @@ export class UsersListPage extends PureComponent<Props, State> {
return await this.props.loadInvitees();
}
onRoleChange = (role: string, user: OrgUser) => {
onRoleChange = (role, user) => {
const updatedUser = { ...user, role: role };
this.props.updateUser(updatedUser);
};
onRemoveUser = (user: OrgUser) => {
onRemoveUser = user => {
appEvents.emit('confirm-modal', {
title: 'Delete',
text: 'Are you sure you want to delete user ' + user.login + '?',
@@ -119,7 +119,7 @@ export class UsersListPage extends PureComponent<Props, State> {
}
}
function mapStateToProps(state: any) {
function mapStateToProps(state) {
return {
navModel: getNavModel(state.navIndex, 'users'),
users: getUsers(state.users),

View File

@@ -1,6 +1,4 @@
import { UsersState } from 'app/types';
export const getUsers = (state: UsersState) => {
export const getUsers = state => {
const regex = new RegExp(state.searchQuery, 'i');
return state.users.filter(user => {
@@ -8,7 +6,7 @@ export const getUsers = (state: UsersState) => {
});
};
export const getInvitees = (state: UsersState) => {
export const getInvitees = state => {
const regex = new RegExp(state.searchQuery, 'i');
return state.invitees.filter(invitee => {
@@ -16,5 +14,5 @@ export const getInvitees = (state: UsersState) => {
});
};
export const getInviteesCount = (state: UsersState) => state.invitees.length;
export const getUsersSearchQuery = (state: UsersState) => state.searchQuery;
export const getInviteesCount = state => state.invitees.length;
export const getUsersSearchQuery = state => state.searchQuery;

View File

@@ -1,6 +1,4 @@
import _ from 'lodash';
import DatasourceSrv from 'app/features/plugins/datasource_srv';
import CloudWatchDatasource from './datasource';
export class CloudWatchConfigCtrl {
static templateUrl = 'partials/config.html';
current: any;
@@ -10,7 +8,7 @@ export class CloudWatchConfigCtrl {
secretKeyExist = false;
/** @ngInject */
constructor($scope: any, datasourceSrv: DatasourceSrv) {
constructor($scope, datasourceSrv) {
this.current.jsonData.timeField = this.current.jsonData.timeField || '@timestamp';
this.current.jsonData.authType = this.current.jsonData.authType || 'credentials';
@@ -34,7 +32,7 @@ export class CloudWatchConfigCtrl {
{ name: 'ARN', value: 'arn' },
];
indexPatternTypes: any = [
indexPatternTypes = [
{ name: 'No pattern', value: undefined },
{ name: 'Hourly', value: 'Hourly', example: '[logstash-]YYYY.MM.DD.HH' },
{ name: 'Daily', value: 'Daily', example: '[logstash-]YYYY.MM.DD' },
@@ -73,14 +71,14 @@ export class CloudWatchConfigCtrl {
getRegions() {
this.datasourceSrv
.loadDatasource(this.current.name)
.then((ds: CloudWatchDatasource) => {
.then(ds => {
return ds.getRegions();
})
.then(
(regions: any) => {
regions => {
this.regions = _.map(regions, 'value');
},
(err: any) => {
err => {
console.error('failed to get latest regions');
}
);

View File

@@ -1,9 +1,9 @@
import angular, { IQService } from 'angular';
import angular from 'angular';
import _ from 'lodash';
import { dateMath } from '@grafana/data';
import kbn from 'app/core/utils/kbn';
import { CloudWatchQuery } from './types';
import { DataSourceApi, DataQueryRequest, DataSourceInstanceSettings, ScopedVars } from '@grafana/ui';
import { DataSourceApi, DataQueryRequest, DataSourceInstanceSettings } from '@grafana/ui';
import { BackendSrv } from 'app/core/services/backend_srv';
import { TemplateSrv } from 'app/features/templating/template_srv';
import { TimeSrv } from 'app/features/dashboard/services/TimeSrv';
@@ -18,7 +18,7 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery>
/** @ngInject */
constructor(
private instanceSettings: DataSourceInstanceSettings,
private $q: IQService,
private $q,
private backendSrv: BackendSrv,
private templateSrv: TemplateSrv,
private timeSrv: TimeSrv
@@ -96,7 +96,7 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery>
return this.performTimeSeriesQuery(request);
}
getPeriod(target: any, options: any, now?: number) {
getPeriod(target, options, now?) {
const start = this.convertToCloudWatchTime(options.range.from, false);
const end = this.convertToCloudWatchTime(options.range.to, true);
now = Math.round((now || Date.now()) / 1000);
@@ -142,8 +142,8 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery>
return period;
}
performTimeSeriesQuery(request: any) {
return this.awsRequest('/api/tsdb/query', request).then((res: any) => {
performTimeSeriesQuery(request) {
return this.awsRequest('/api/tsdb/query', request).then(res => {
const data = [];
if (res.results) {
@@ -165,7 +165,7 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery>
});
}
transformSuggestDataFromTable(suggestData: any) {
transformSuggestDataFromTable(suggestData) {
return _.map(suggestData.results['metricFindQuery'].tables[0].rows, v => {
return {
text: v[0],
@@ -174,7 +174,7 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery>
});
}
doMetricQueryRequest(subtype: any, parameters: any) {
doMetricQueryRequest(subtype, parameters) {
const range = this.timeSrv.timeRange();
return this.awsRequest('/api/tsdb/query', {
from: range.from.valueOf().toString(),
@@ -192,7 +192,7 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery>
parameters
),
],
}).then((r: any) => {
}).then(r => {
return this.transformSuggestDataFromTable(r);
});
}
@@ -205,27 +205,21 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery>
return this.doMetricQueryRequest('namespaces', null);
}
getMetrics(namespace: string, region: string) {
getMetrics(namespace, region) {
return this.doMetricQueryRequest('metrics', {
region: this.templateSrv.replace(this.getActualRegion(region)),
namespace: this.templateSrv.replace(namespace),
});
}
getDimensionKeys(namespace: string, region: string) {
getDimensionKeys(namespace, region) {
return this.doMetricQueryRequest('dimension_keys', {
region: this.templateSrv.replace(this.getActualRegion(region)),
namespace: this.templateSrv.replace(namespace),
});
}
getDimensionValues(
region: string,
namespace: string,
metricName: string,
dimensionKey: string,
filterDimensions: {}
) {
getDimensionValues(region, namespace, metricName, dimensionKey, filterDimensions) {
return this.doMetricQueryRequest('dimension_values', {
region: this.templateSrv.replace(this.getActualRegion(region)),
namespace: this.templateSrv.replace(namespace),
@@ -235,14 +229,14 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery>
});
}
getEbsVolumeIds(region: string, instanceId: string) {
getEbsVolumeIds(region, instanceId) {
return this.doMetricQueryRequest('ebs_volume_ids', {
region: this.templateSrv.replace(this.getActualRegion(region)),
instanceId: this.templateSrv.replace(instanceId),
});
}
getEc2InstanceAttribute(region: string, attributeName: string, filters: any) {
getEc2InstanceAttribute(region, attributeName, filters) {
return this.doMetricQueryRequest('ec2_instance_attribute', {
region: this.templateSrv.replace(this.getActualRegion(region)),
attributeName: this.templateSrv.replace(attributeName),
@@ -250,7 +244,7 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery>
});
}
getResourceARNs(region: string, resourceType: string, tags: any) {
getResourceARNs(region, resourceType, tags) {
return this.doMetricQueryRequest('resource_arns', {
region: this.templateSrv.replace(this.getActualRegion(region)),
resourceType: this.templateSrv.replace(resourceType),
@@ -258,7 +252,7 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery>
});
}
metricFindQuery(query: string) {
metricFindQuery(query) {
let region;
let namespace;
let metricName;
@@ -330,7 +324,7 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery>
return this.$q.when([]);
}
annotationQuery(options: any) {
annotationQuery(options) {
const annotation = options.annotation;
const statistics = _.map(annotation.statistics, s => {
return this.templateSrv.replace(s);
@@ -365,7 +359,7 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery>
parameters
),
],
}).then((r: any) => {
}).then(r => {
return _.map(r.results['annotationQuery'].tables[0].rows, v => {
return {
annotation: annotation,
@@ -378,7 +372,7 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery>
});
}
targetContainsTemplate(target: any) {
targetContainsTemplate(target) {
return (
this.templateSrv.variableExists(target.region) ||
this.templateSrv.variableExists(target.namespace) ||
@@ -401,14 +395,14 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery>
});
}
awsRequest(url: string, data: any) {
awsRequest(url, data) {
const options = {
method: 'POST',
url,
data,
url: url,
data: data,
};
return this.backendSrv.datasourceRequest(options).then((result: any) => {
return this.backendSrv.datasourceRequest(options).then(result => {
return result.data;
});
}
@@ -417,14 +411,14 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery>
return this.defaultRegion;
}
getActualRegion(region: string) {
getActualRegion(region) {
if (region === 'default' || _.isEmpty(region)) {
return this.getDefaultRegion();
}
return region;
}
getExpandedVariables(target: any, dimensionKey: any, variable: any, templateSrv: TemplateSrv) {
getExpandedVariables(target, dimensionKey, variable, templateSrv) {
/* if the all checkbox is marked we should add all values to the targets */
const allSelected: any = _.find(variable.options, { selected: true, text: 'All' });
const selectedVariables = _.filter(variable.options, v => {
@@ -436,7 +430,7 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery>
});
const currentVariables = !_.isArray(variable.current.value)
? [variable.current]
: variable.current.value.map((v: any) => {
: variable.current.value.map(v => {
return {
text: v,
value: v,
@@ -446,9 +440,9 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery>
selectedVariables.some((s: any) => {
return s.value === currentVariables[0].value;
}) || currentVariables[0].value === '$__all';
return (useSelectedVariables ? selectedVariables : currentVariables).map((v: any) => {
return (useSelectedVariables ? selectedVariables : currentVariables).map(v => {
const t = angular.copy(target);
const scopedVar: any = {};
const scopedVar = {};
scopedVar[variable.name] = v;
t.refId = target.refId + '_' + v.value;
t.dimensions[dimensionKey] = templateSrv.replace(t.dimensions[dimensionKey], scopedVar);
@@ -461,7 +455,7 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery>
});
}
expandTemplateVariable(targets: any, scopedVars: ScopedVars, templateSrv: TemplateSrv) {
expandTemplateVariable(targets, scopedVars, templateSrv) {
// Datasource and template srv logic uber-complected. This should be cleaned up.
return _.chain(targets)
.map(target => {
@@ -486,15 +480,15 @@ export default class CloudWatchDatasource extends DataSourceApi<CloudWatchQuery>
.value();
}
convertToCloudWatchTime(date: any, roundUp: any) {
convertToCloudWatchTime(date, roundUp) {
if (_.isString(date)) {
date = dateMath.parse(date, roundUp);
}
return Math.round(date.valueOf() / 1000);
}
convertDimensionFormat(dimensions: any, scopedVars: ScopedVars) {
const convertedDimensions: any = {};
convertDimensionFormat(dimensions, scopedVars) {
const convertedDimensions = {};
_.each(dimensions, (value, key) => {
convertedDimensions[this.templateSrv.replace(key, scopedVars)] = this.templateSrv.replace(value, scopedVars);
});

View File

@@ -1,6 +1,5 @@
import './query_parameter_ctrl';
import { QueryCtrl } from 'app/plugins/sdk';
import { auto } from 'angular';
export class CloudWatchQueryCtrl extends QueryCtrl {
static templateUrl = 'partials/query.editor.html';
@@ -8,7 +7,7 @@ export class CloudWatchQueryCtrl extends QueryCtrl {
aliasSyntax: string;
/** @ngInject */
constructor($scope: any, $injector: auto.IInjectorService) {
constructor($scope, $injector) {
super($scope, $injector);
this.aliasSyntax = '{{metric}} {{stat}} {{namespace}} {{region}} {{<dimension name>}}';
}

View File

@@ -1,12 +1,10 @@
import angular, { IQService } from 'angular';
import angular from 'angular';
import coreModule from 'app/core/core_module';
import _ from 'lodash';
import { TemplateSrv } from 'app/features/templating/template_srv';
import DatasourceSrv from 'app/features/plugins/datasource_srv';
export class CloudWatchQueryParameterCtrl {
/** @ngInject */
constructor($scope: any, templateSrv: TemplateSrv, uiSegmentSrv: any, datasourceSrv: DatasourceSrv, $q: IQService) {
constructor($scope, templateSrv, uiSegmentSrv, datasourceSrv, $q) {
$scope.init = () => {
const target = $scope.target;
target.namespace = target.namespace || '';
@@ -71,7 +69,7 @@ export class CloudWatchQueryParameterCtrl {
);
};
$scope.statSegmentChanged = (segment: any, index: number) => {
$scope.statSegmentChanged = (segment, index) => {
if (segment.value === $scope.removeStatSegment.value) {
$scope.statSegments.splice(index, 1);
} else {
@@ -93,7 +91,7 @@ export class CloudWatchQueryParameterCtrl {
$scope.onChange();
};
$scope.ensurePlusButton = (segments: any) => {
$scope.ensurePlusButton = segments => {
const count = segments.length;
const lastSegment = segments[Math.max(count - 1, 0)];
@@ -102,7 +100,7 @@ export class CloudWatchQueryParameterCtrl {
}
};
$scope.getDimSegments = (segment: any, $index: number) => {
$scope.getDimSegments = (segment, $index) => {
if (segment.type === 'operator') {
return $q.when([]);
}
@@ -132,7 +130,7 @@ export class CloudWatchQueryParameterCtrl {
});
};
$scope.dimSegmentChanged = (segment: any, index: number) => {
$scope.dimSegmentChanged = (segment, index) => {
$scope.dimSegments[index] = segment;
if (segment.value === $scope.removeDimSegment.value) {
@@ -150,7 +148,7 @@ export class CloudWatchQueryParameterCtrl {
};
$scope.syncDimSegmentsWithModel = () => {
const dims: any = {};
const dims = {};
const length = $scope.dimSegments.length;
for (let i = 0; i < length - 2; i += 3) {
@@ -167,7 +165,7 @@ export class CloudWatchQueryParameterCtrl {
$scope.getRegions = () => {
return $scope.datasource
.metricFindQuery('regions()')
.then((results: any) => {
.then(results => {
results.unshift({ text: 'default' });
return results;
})
@@ -199,8 +197,8 @@ export class CloudWatchQueryParameterCtrl {
$scope.onChange();
};
$scope.transformToSegments = (addTemplateVars: any) => {
return (results: any) => {
$scope.transformToSegments = addTemplateVars => {
return results => {
const segments = _.map(results, segment => {
return uiSegmentSrv.newSegment({
value: segment.text,

View File

@@ -32,7 +32,7 @@ describe('CloudWatchDatasource', () => {
} as any;
beforeEach(() => {
ctx.ds = new CloudWatchDatasource(instanceSettings, {} as any, backendSrv, templateSrv, timeSrv);
ctx.ds = new CloudWatchDatasource(instanceSettings, {}, backendSrv, templateSrv, timeSrv);
});
describe('When performing CloudWatch query', () => {
@@ -56,7 +56,7 @@ describe('CloudWatchDatasource', () => {
],
};
const response: any = {
const response = {
timings: [null],
results: {
A: {
@@ -156,7 +156,7 @@ describe('CloudWatchDatasource', () => {
});
it('should return series list', done => {
ctx.ds.query(query).then((result: any) => {
ctx.ds.query(query).then(result => {
expect(result.data[0].target).toBe(response.results.A.series[0].name);
expect(result.data[0].datapoints[0][0]).toBe(response.results.A.series[0].points[0][0]);
done();
@@ -204,7 +204,7 @@ describe('CloudWatchDatasource', () => {
],
};
ctx.ds.query(query).then((result: any) => {
ctx.ds.query(query).then(result => {
expect(requestParams.queries[0].region).toBe(instanceSettings.jsonData.defaultRegion);
done();
});
@@ -231,7 +231,7 @@ describe('CloudWatchDatasource', () => {
],
};
const response: any = {
const response = {
timings: [null],
results: {
A: {
@@ -259,7 +259,7 @@ describe('CloudWatchDatasource', () => {
});
it('should return series list', done => {
ctx.ds.query(query).then((result: any) => {
ctx.ds.query(query).then(result => {
expect(result.data[0].target).toBe(response.results.A.series[0].name);
expect(result.data[0].datapoints[0][0]).toBe(response.results.A.series[0].points[0][0]);
done();
@@ -411,7 +411,7 @@ describe('CloudWatchDatasource', () => {
});
it('should generate the correct query for multilple template variables with expression', done => {
const query: any = {
const query = {
range: { from: 'now-1h', to: 'now' },
rangeRaw: { from: 1483228800, to: 1483232400 },
targets: [
@@ -466,17 +466,17 @@ describe('CloudWatchDatasource', () => {
});
});
function describeMetricFindQuery(query: any, func: any) {
function describeMetricFindQuery(query, func) {
describe('metricFindQuery ' + query, () => {
const scenario: any = {};
scenario.setup = (setupCallback: any) => {
scenario.setup = setupCallback => {
beforeEach(() => {
setupCallback();
ctx.backendSrv.datasourceRequest = jest.fn(args => {
scenario.request = args.data;
return Promise.resolve({ data: scenario.requestResponse });
});
ctx.ds.metricFindQuery(query).then((args: any) => {
ctx.ds.metricFindQuery(query).then(args => {
scenario.result = args;
});
});
@@ -486,7 +486,7 @@ describe('CloudWatchDatasource', () => {
});
}
describeMetricFindQuery('regions()', (scenario: any) => {
describeMetricFindQuery('regions()', scenario => {
scenario.setup(() => {
scenario.requestResponse = {
results: {
@@ -504,7 +504,7 @@ describe('CloudWatchDatasource', () => {
});
});
describeMetricFindQuery('namespaces()', (scenario: any) => {
describeMetricFindQuery('namespaces()', scenario => {
scenario.setup(() => {
scenario.requestResponse = {
results: {
@@ -522,7 +522,7 @@ describe('CloudWatchDatasource', () => {
});
});
describeMetricFindQuery('metrics(AWS/EC2)', (scenario: any) => {
describeMetricFindQuery('metrics(AWS/EC2)', scenario => {
scenario.setup(() => {
scenario.requestResponse = {
results: {
@@ -540,7 +540,7 @@ describe('CloudWatchDatasource', () => {
});
});
describeMetricFindQuery('dimension_keys(AWS/EC2)', (scenario: any) => {
describeMetricFindQuery('dimension_keys(AWS/EC2)', scenario => {
scenario.setup(() => {
scenario.requestResponse = {
results: {
@@ -558,7 +558,7 @@ describe('CloudWatchDatasource', () => {
});
});
describeMetricFindQuery('dimension_values(us-east-1,AWS/EC2,CPUUtilization,InstanceId)', (scenario: any) => {
describeMetricFindQuery('dimension_values(us-east-1,AWS/EC2,CPUUtilization,InstanceId)', scenario => {
scenario.setup(() => {
scenario.requestResponse = {
results: {
@@ -576,7 +576,7 @@ describe('CloudWatchDatasource', () => {
});
});
describeMetricFindQuery('dimension_values(default,AWS/EC2,CPUUtilization,InstanceId)', (scenario: any) => {
describeMetricFindQuery('dimension_values(default,AWS/EC2,CPUUtilization,InstanceId)', scenario => {
scenario.setup(() => {
scenario.requestResponse = {
results: {
@@ -594,7 +594,7 @@ describe('CloudWatchDatasource', () => {
});
});
describeMetricFindQuery('resource_arns(default,ec2:instance,{"environment":["production"]})', (scenario: any) => {
describeMetricFindQuery('resource_arns(default,ec2:instance,{"environment":["production"]})', scenario => {
scenario.setup(() => {
scenario.requestResponse = {
results: {

View File

@@ -1,11 +1,10 @@
import coreModule from 'app/core/core_module';
import _ from 'lodash';
import * as queryDef from './query_def';
import { IQService } from 'angular';
export class ElasticBucketAggCtrl {
/** @ngInject */
constructor($scope: any, uiSegmentSrv: any, $q: IQService, $rootScope: any) {
constructor($scope, uiSegmentSrv, $q, $rootScope) {
const bucketAggs = $scope.target.bucketAggs;
$scope.orderByOptions = [];
@@ -159,7 +158,7 @@ export class ElasticBucketAggCtrl {
$scope.agg.settings.filters.push({ query: '*' });
};
$scope.removeFiltersQuery = (filter: any) => {
$scope.removeFiltersQuery = filter => {
$scope.agg.settings.filters = _.without($scope.agg.settings.filters, filter);
};

View File

@@ -8,7 +8,7 @@ export class ElasticConfigCtrl {
current: DataSourceInstanceSettings<ElasticsearchOptions>;
/** @ngInject */
constructor($scope: any) {
constructor($scope) {
this.current.jsonData.timeField = this.current.jsonData.timeField || '@timestamp';
this.current.jsonData.esVersion = this.current.jsonData.esVersion || 5;
const defaultMaxConcurrentShardRequests = this.current.jsonData.esVersion >= 70 ? 5 : 256;
@@ -18,7 +18,7 @@ export class ElasticConfigCtrl {
this.current.jsonData.logLevelField = this.current.jsonData.logLevelField || '';
}
indexPatternTypes: any = [
indexPatternTypes = [
{ name: 'No pattern', value: undefined },
{ name: 'Hourly', value: 'Hourly', example: '[logstash-]YYYY.MM.DD.HH' },
{ name: 'Daily', value: 'Daily', example: '[logstash-]YYYY.MM.DD' },

View File

@@ -63,7 +63,7 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
}
}
private request(method: string, url: string, data?: undefined) {
private request(method, url, data?) {
const options: any = {
url: this.url + '/' + url,
method: method,
@@ -82,29 +82,29 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
return this.backendSrv.datasourceRequest(options);
}
private get(url: string) {
private get(url) {
const range = this.timeSrv.timeRange();
const indexList = this.indexPattern.getIndexList(range.from.valueOf(), range.to.valueOf());
if (_.isArray(indexList) && indexList.length) {
return this.request('GET', indexList[0] + url).then((results: any) => {
return this.request('GET', indexList[0] + url).then(results => {
results.data.$$config = results.config;
return results.data;
});
} else {
return this.request('GET', this.indexPattern.getIndexForToday() + url).then((results: any) => {
return this.request('GET', this.indexPattern.getIndexForToday() + url).then(results => {
results.data.$$config = results.config;
return results.data;
});
}
}
private post(url: string, data: any) {
private post(url, data) {
return this.request('POST', url, data)
.then((results: any) => {
.then(results => {
results.data.$$config = results.config;
return results.data;
})
.catch((err: any) => {
.catch(err => {
if (err.data && err.data.error) {
throw {
message: 'Elasticsearch error: ' + err.data.error.reason,
@@ -116,14 +116,14 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
});
}
annotationQuery(options: any) {
annotationQuery(options) {
const annotation = options.annotation;
const timeField = annotation.timeField || '@timestamp';
const queryString = annotation.query || '*';
const tagsField = annotation.tagsField || 'tags';
const textField = annotation.textField || null;
const range: any = {};
const range = {};
range[timeField] = {
from: options.range.from.valueOf(),
to: options.range.to.valueOf(),
@@ -144,8 +144,8 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
},
};
const data: any = {
query,
const data = {
query: query,
size: 10000,
};
@@ -168,11 +168,11 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
const payload = angular.toJson(header) + '\n' + angular.toJson(data) + '\n';
return this.post('_msearch', payload).then((res: any) => {
return this.post('_msearch', payload).then(res => {
const list = [];
const hits = res.responses[0].hits.hits;
const getFieldFromSource = (source: any, fieldName: any) => {
const getFieldFromSource = (source, fieldName) => {
if (!fieldName) {
return;
}
@@ -229,7 +229,7 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
testDatasource() {
// validate that the index exist and has date field
return this.getFields({ type: 'date' }).then(
(dateFields: any) => {
dateFields => {
const timeField: any = _.find(dateFields, { text: this.timeField });
if (!timeField) {
return {
@@ -239,7 +239,7 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
}
return { status: 'success', message: 'Index OK. Time field name OK.' };
},
(err: any) => {
err => {
console.log(err);
if (err.data && err.data.error) {
let message = angular.toJson(err.data.error);
@@ -254,7 +254,7 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
);
}
getQueryHeader(searchType: any, timeFrom: any, timeTo: any) {
getQueryHeader(searchType, timeFrom, timeTo) {
const queryHeader: any = {
search_type: searchType,
ignore_unavailable: true,
@@ -319,7 +319,7 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
const url = this.getMultiSearchUrl();
return this.post(url, payload).then((res: any) => {
return this.post(url, payload).then(res => {
const er = new ElasticResponse(sentTargets, res);
if (sentTargets.some(target => target.isLogsQuery)) {
return er.getLogs(this.logMessageField, this.logLevelField);
@@ -329,10 +329,10 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
});
}
getFields(query: any) {
getFields(query) {
const configuredEsVersion = this.esVersion;
return this.get('/_mapping').then((result: any) => {
const typeMap: any = {
return this.get('/_mapping').then(result => {
const typeMap = {
float: 'number',
double: 'number',
integer: 'number',
@@ -344,7 +344,7 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
nested: 'nested',
};
function shouldAddField(obj: any, key: any, query: any) {
function shouldAddField(obj, key, query) {
if (key[0] === '_') {
return false;
}
@@ -358,10 +358,10 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
}
// Store subfield names: [system, process, cpu, total] -> system.process.cpu.total
const fieldNameParts: any = [];
const fields: any = {};
const fieldNameParts = [];
const fields = {};
function getFieldsRecursively(obj: any) {
function getFieldsRecursively(obj) {
for (const key in obj) {
const subObj = obj[key];
@@ -415,7 +415,7 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
});
}
getTerms(queryDef: any) {
getTerms(queryDef) {
const range = this.timeSrv.timeRange();
const searchType = this.esVersion >= 5 ? 'query_then_fetch' : 'count';
const header = this.getQueryHeader(searchType, range.from, range.to);
@@ -427,7 +427,7 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
const url = this.getMultiSearchUrl();
return this.post(url, esQuery).then((res: any) => {
return this.post(url, esQuery).then(res => {
if (!res.responses[0].aggregations) {
return [];
}
@@ -450,7 +450,7 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
return '_msearch';
}
metricFindQuery(query: any) {
metricFindQuery(query) {
query = angular.fromJson(query);
if (!query) {
return this.$q.when([]);
@@ -472,11 +472,11 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
return this.getFields({});
}
getTagValues(options: any) {
getTagValues(options) {
return this.getTerms({ field: options.key, query: '*' });
}
targetContainsTemplate(target: any) {
targetContainsTemplate(target) {
if (this.templateSrv.variableExists(target.query) || this.templateSrv.variableExists(target.alias)) {
return true;
}
@@ -500,7 +500,7 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
return false;
}
private isPrimitive(obj: any) {
private isPrimitive(obj) {
if (obj === null || obj === undefined) {
return true;
}
@@ -511,7 +511,7 @@ export class ElasticDatasource extends DataSourceApi<ElasticsearchQuery, Elastic
return false;
}
private objectContainsTemplate(obj: any) {
private objectContainsTemplate(obj) {
if (!obj) {
return false;
}

View File

@@ -4,15 +4,14 @@ import * as queryDef from './query_def';
import TableModel from 'app/core/table_model';
import { DataFrame, toDataFrame, FieldType } from '@grafana/data';
import { DataQueryResponse } from '@grafana/ui';
import { ElasticsearchAggregation } from './types';
export class ElasticResponse {
constructor(private targets: any, private response: any) {
constructor(private targets, private response) {
this.targets = targets;
this.response = response;
}
processMetrics(esAgg: any, target: any, seriesList: any, props: any) {
processMetrics(esAgg, target, seriesList, props) {
let metric, y, i, newSeries, bucket, value;
for (y = 0; y < target.metrics.length; y++) {
@@ -114,7 +113,7 @@ export class ElasticResponse {
}
}
processAggregationDocs(esAgg: any, aggDef: ElasticsearchAggregation, target: any, table: any, props: any) {
processAggregationDocs(esAgg, aggDef, target, table, props) {
// add columns
if (table.columns.length === 0) {
for (const propKey of _.keys(props)) {
@@ -124,7 +123,7 @@ export class ElasticResponse {
}
// helper func to add values to value array
const addMetricValue = (values: any[], metricName: string, value: any) => {
const addMetricValue = (values, metricName, value) => {
table.addColumn({ text: metricName });
values.push(value);
};
@@ -189,8 +188,8 @@ export class ElasticResponse {
// This is quite complex
// need to recurse down the nested buckets to build series
processBuckets(aggs: any, target: any, seriesList: any, table: any, props: any, depth: any) {
let bucket, aggDef: any, esAgg, aggId;
processBuckets(aggs, target, seriesList, table, props, depth) {
let bucket, aggDef, esAgg, aggId;
const maxDepth = target.bucketAggs.length - 1;
for (aggId in aggs) {
@@ -225,7 +224,7 @@ export class ElasticResponse {
}
}
private getMetricName(metric: any) {
private getMetricName(metric) {
let metricDef: any = _.find(queryDef.metricAggTypes, { value: metric });
if (!metricDef) {
metricDef = _.find(queryDef.extendedStats, { value: metric });
@@ -234,13 +233,13 @@ export class ElasticResponse {
return metricDef ? metricDef.text : metric;
}
private getSeriesName(series: any, target: any, metricTypeCount: any) {
private getSeriesName(series, target, metricTypeCount) {
let metricName = this.getMetricName(series.metric);
if (target.alias) {
const regex = /\{\{([\s\S]+?)\}\}/g;
return target.alias.replace(regex, (match: any, g1: any, g2: any) => {
return target.alias.replace(regex, (match, g1, g2) => {
const group = g1 || g2;
if (group.indexOf('term ') === 0) {
@@ -304,7 +303,7 @@ export class ElasticResponse {
return name.trim() + ' ' + metricName;
}
nameSeries(seriesList: any, target: any) {
nameSeries(seriesList, target) {
const metricTypeCount = _.uniq(_.map(seriesList, 'metric')).length;
for (let i = 0; i < seriesList.length; i++) {
@@ -313,17 +312,17 @@ export class ElasticResponse {
}
}
processHits(hits: { total: { value: any }; hits: any[] }, seriesList: any[]) {
processHits(hits, seriesList) {
const hitsTotal = typeof hits.total === 'number' ? hits.total : hits.total.value; // <- Works with Elasticsearch 7.0+
const series: any = {
const series = {
target: 'docs',
type: 'docs',
datapoints: [],
total: hitsTotal,
filterable: true,
};
let propName, hit, doc: any, i;
let propName, hit, doc, i;
for (i = 0; i < hits.hits.length; i++) {
hit = hits.hits[i];
@@ -348,7 +347,7 @@ export class ElasticResponse {
seriesList.push(series);
}
trimDatapoints(aggregations: any, target: any) {
trimDatapoints(aggregations, target) {
const histogram: any = _.find(target.bucketAggs, { type: 'date_histogram' });
const shouldDropFirstAndLast = histogram && histogram.settings && histogram.settings.trimEdges;
@@ -363,7 +362,7 @@ export class ElasticResponse {
}
}
getErrorFromElasticResponse(response: any, err: any) {
getErrorFromElasticResponse(response, err) {
const result: any = {};
result.data = JSON.stringify(err, null, 4);
if (err.root_cause && err.root_cause.length > 0 && err.root_cause[0].reason) {
@@ -395,7 +394,7 @@ export class ElasticResponse {
if (response.aggregations) {
const aggregations = response.aggregations;
const target = this.targets[i];
const tmpSeriesList: any[] = [];
const tmpSeriesList = [];
const table = new TableModel();
this.processBuckets(aggregations, target, tmpSeriesList, table, {}, 0);
@@ -427,7 +426,7 @@ export class ElasticResponse {
const hits = response.hits;
let propNames: string[] = [];
let propName, hit, doc: any, i;
let propName, hit, doc, i;
for (i = 0; i < hits.hits.length; i++) {
hit = hits.hits[i];
@@ -535,7 +534,7 @@ export class ElasticResponse {
if (response.aggregations) {
const aggregations = response.aggregations;
const target = this.targets[n];
const tmpSeriesList: any[] = [];
const tmpSeriesList = [];
const table = new TableModel();
this.processBuckets(aggregations, target, tmpSeriesList, table, {}, 0);

View File

@@ -1,6 +1,6 @@
import { toUtc, dateTime } from '@grafana/data';
const intervalMap: any = {
const intervalMap = {
Hourly: { startOf: 'hour', amount: 'hours' },
Daily: { startOf: 'day', amount: 'days' },
Weekly: { startOf: 'isoWeek', amount: 'weeks' },
@@ -9,7 +9,7 @@ const intervalMap: any = {
};
export class IndexPattern {
constructor(private pattern: any, private interval: string | null) {}
constructor(private pattern, private interval: string | null) {}
getIndexForToday() {
if (this.interval) {
@@ -19,7 +19,7 @@ export class IndexPattern {
}
}
getIndexList(from: any, to: any) {
getIndexList(from, to) {
if (!this.interval) {
return this.pattern;
}

View File

@@ -2,11 +2,10 @@ import coreModule from 'app/core/core_module';
import _ from 'lodash';
import * as queryDef from './query_def';
import { ElasticsearchAggregation } from './types';
import { IQService } from 'angular';
export class ElasticMetricAggCtrl {
/** @ngInject */
constructor($scope: any, uiSegmentSrv: any, $q: IQService, $rootScope: any) {
constructor($scope, uiSegmentSrv, $q, $rootScope) {
const metricAggs: ElasticsearchAggregation[] = $scope.target.metrics;
$scope.metricAggTypes = queryDef.getMetricAggTypes($scope.esVersion);
$scope.extendedStats = queryDef.extendedStats;

View File

@@ -14,7 +14,7 @@ export function elasticPipelineVariables() {
};
}
const newVariable = (index: any) => {
const newVariable = index => {
return {
name: 'var' + index,
pipelineAgg: 'select metric',
@@ -23,7 +23,7 @@ const newVariable = (index: any) => {
export class ElasticPipelineVariablesCtrl {
/** @ngInject */
constructor($scope: any) {
constructor($scope) {
$scope.variables = $scope.variables || [newVariable(1)];
$scope.onChangeInternal = () => {
@@ -35,7 +35,7 @@ export class ElasticPipelineVariablesCtrl {
$scope.onChange();
};
$scope.remove = (index: number) => {
$scope.remove = index => {
$scope.variables.splice(index, 1);
$scope.onChange();
};

View File

@@ -1,17 +1,16 @@
import * as queryDef from './query_def';
import { ElasticsearchAggregation } from './types';
export class ElasticQueryBuilder {
timeField: string;
esVersion: number;
constructor(options: any) {
constructor(options) {
this.timeField = options.timeField;
this.esVersion = options.esVersion;
}
getRangeFilter() {
const filter: any = {};
const filter = {};
filter[this.timeField] = {
gte: '$timeFrom',
lte: '$timeTo',
@@ -21,7 +20,7 @@ export class ElasticQueryBuilder {
return filter;
}
buildTermsAgg(aggDef: ElasticsearchAggregation, queryNode: { terms?: any; aggs?: any }, target: { metrics: any[] }) {
buildTermsAgg(aggDef, queryNode, target) {
let metricRef, metric, y;
queryNode.terms = { field: aggDef.field };
@@ -64,7 +63,7 @@ export class ElasticQueryBuilder {
return queryNode;
}
getDateHistogramAgg(aggDef: ElasticsearchAggregation) {
getDateHistogramAgg(aggDef) {
const esAgg: any = {};
const settings = aggDef.settings || {};
esAgg.interval = settings.interval;
@@ -88,7 +87,7 @@ export class ElasticQueryBuilder {
return esAgg;
}
getHistogramAgg(aggDef: ElasticsearchAggregation) {
getHistogramAgg(aggDef) {
const esAgg: any = {};
const settings = aggDef.settings || {};
esAgg.interval = settings.interval;
@@ -101,8 +100,8 @@ export class ElasticQueryBuilder {
return esAgg;
}
getFiltersAgg(aggDef: ElasticsearchAggregation) {
const filterObj: any = {};
getFiltersAgg(aggDef) {
const filterObj = {};
for (let i = 0; i < aggDef.settings.filters.length; i++) {
const query = aggDef.settings.filters[i].query;
let label = aggDef.settings.filters[i].label;
@@ -118,7 +117,7 @@ export class ElasticQueryBuilder {
return filterObj;
}
documentQuery(query: any, size: number) {
documentQuery(query, size) {
query.size = size;
query.sort = {};
query.sort[this.timeField] = { order: 'desc', unmapped_type: 'boolean' };
@@ -137,12 +136,12 @@ export class ElasticQueryBuilder {
return query;
}
addAdhocFilters(query: any, adhocFilters: any) {
addAdhocFilters(query, adhocFilters) {
if (!adhocFilters) {
return;
}
let i, filter, condition: any, queryCondition: any;
let i, filter, condition, queryCondition;
for (i = 0; i < adhocFilters.length; i++) {
filter = adhocFilters[i];
@@ -184,7 +183,7 @@ export class ElasticQueryBuilder {
}
}
build(target: any, adhocFilters?: any, queryString?: string) {
build(target, adhocFilters?, queryString?) {
// make sure query has defaults;
target.metrics = target.metrics || [queryDef.defaultMetricAgg()];
target.bucketAggs = target.bucketAggs || [queryDef.defaultBucketAgg()];
@@ -225,7 +224,7 @@ export class ElasticQueryBuilder {
for (i = 0; i < target.bucketAggs.length; i++) {
const aggDef = target.bucketAggs[i];
const esAgg: any = {};
const esAgg = {};
switch (aggDef.type) {
case 'date_histogram': {
@@ -266,8 +265,8 @@ export class ElasticQueryBuilder {
continue;
}
const aggField: any = {};
let metricAgg: any = null;
const aggField = {};
let metricAgg = null;
if (queryDef.isPipelineAgg(metric.type)) {
if (queryDef.isPipelineAggWithMultipleBucketPaths(metric.type)) {
@@ -324,7 +323,7 @@ export class ElasticQueryBuilder {
return query;
}
getTermsQuery(queryDef: any) {
getTermsQuery(queryDef) {
const query: any = {
size: 0,
query: {
@@ -369,7 +368,7 @@ export class ElasticQueryBuilder {
return query;
}
getLogsQuery(target: any, querystring: string) {
getLogsQuery(target, querystring) {
let query: any = {
size: 0,
query: {

View File

@@ -2,7 +2,7 @@ import './bucket_agg';
import './metric_agg';
import './pipeline_variables';
import angular, { auto } from 'angular';
import angular from 'angular';
import _ from 'lodash';
import * as queryDef from './query_def';
import { QueryCtrl } from 'app/plugins/sdk';
@@ -15,7 +15,7 @@ export class ElasticQueryCtrl extends QueryCtrl {
rawQueryOld: string;
/** @ngInject */
constructor($scope: any, $injector: auto.IInjectorService, private $rootScope: any, private uiSegmentSrv: any) {
constructor($scope, $injector, private $rootScope, private uiSegmentSrv) {
super($scope, $injector);
this.esVersion = this.datasource.esVersion;
@@ -35,7 +35,7 @@ export class ElasticQueryCtrl extends QueryCtrl {
this.queryUpdated();
}
getFields(type: any) {
getFields(type) {
const jsonStr = angular.toJson({ find: 'fields', type: type });
return this.datasource
.metricFindQuery(jsonStr)
@@ -98,7 +98,7 @@ export class ElasticQueryCtrl extends QueryCtrl {
return text;
}
handleQueryError(err: any): any[] {
handleQueryError(err) {
this.error = err.message || 'Failed to issue metric query';
return [];
}

View File

@@ -128,7 +128,7 @@ export const movingAvgModelOptions = [
{ text: 'Holt Winters', value: 'holt_winters' },
];
export const pipelineOptions: any = {
export const pipelineOptions = {
moving_avg: [
{ text: 'window', default: 5 },
{ text: 'model', default: 'simple' },
@@ -139,7 +139,7 @@ export const pipelineOptions: any = {
bucket_script: [],
};
export const movingAvgModelSettings: any = {
export const movingAvgModelSettings = {
simple: [],
linear: [],
ewma: [{ text: 'Alpha', value: 'alpha', default: undefined }],
@@ -153,7 +153,7 @@ export const movingAvgModelSettings: any = {
],
};
export function getMetricAggTypes(esVersion: any) {
export function getMetricAggTypes(esVersion) {
return _.filter(metricAggTypes, f => {
if (f.minVersion) {
return f.minVersion <= esVersion;
@@ -163,7 +163,7 @@ export function getMetricAggTypes(esVersion: any) {
});
}
export function getPipelineOptions(metric: any) {
export function getPipelineOptions(metric) {
if (!isPipelineAgg(metric.type)) {
return [];
}
@@ -171,7 +171,7 @@ export function getPipelineOptions(metric: any) {
return pipelineOptions[metric.type];
}
export function isPipelineAgg(metricType: any) {
export function isPipelineAgg(metricType) {
if (metricType) {
const po = pipelineOptions[metricType];
return po !== null && po !== undefined;
@@ -180,7 +180,7 @@ export function isPipelineAgg(metricType: any) {
return false;
}
export function isPipelineAggWithMultipleBucketPaths(metricType: any) {
export function isPipelineAggWithMultipleBucketPaths(metricType) {
if (metricType) {
return metricAggTypes.find(t => t.value === metricType && t.supportsMultipleBucketPaths) !== undefined;
}
@@ -188,8 +188,8 @@ export function isPipelineAggWithMultipleBucketPaths(metricType: any) {
return false;
}
export function getPipelineAggOptions(targets: any) {
const result: any[] = [];
export function getPipelineAggOptions(targets) {
const result = [];
_.each(targets.metrics, metric => {
if (!isPipelineAgg(metric.type)) {
result.push({ text: describeMetric(metric), value: metric.id });
@@ -199,8 +199,8 @@ export function getPipelineAggOptions(targets: any) {
return result;
}
export function getMovingAvgSettings(model: any, filtered: boolean) {
const filteredResult: any[] = [];
export function getMovingAvgSettings(model, filtered) {
const filteredResult = [];
if (filtered) {
_.each(movingAvgModelSettings[model], setting => {
if (!setting.isCheckbox) {
@@ -212,8 +212,8 @@ export function getMovingAvgSettings(model: any, filtered: boolean) {
return movingAvgModelSettings[model];
}
export function getOrderByOptions(target: any) {
const metricRefs: any[] = [];
export function getOrderByOptions(target) {
const metricRefs = [];
_.each(target.metrics, metric => {
if (metric.type !== 'count') {
metricRefs.push({ text: describeMetric(metric), value: metric.id });
@@ -223,12 +223,12 @@ export function getOrderByOptions(target: any) {
return orderByOptions.concat(metricRefs);
}
export function describeOrder(order: string) {
export function describeOrder(order) {
const def: any = _.find(orderOptions, { value: order });
return def.text;
}
export function describeMetric(metric: { type: string; field: string }) {
export function describeMetric(metric) {
const def: any = _.find(metricAggTypes, { value: metric.type });
if (!def.requiresField && !isPipelineAgg(metric.type)) {
return def.text;
@@ -236,7 +236,7 @@ export function describeMetric(metric: { type: string; field: string }) {
return def.text + ' ' + metric.field;
}
export function describeOrderBy(orderBy: any, target: any) {
export function describeOrderBy(orderBy, target) {
const def: any = _.find(orderByOptions, { value: orderBy });
if (def) {
return def.text;

View File

@@ -69,7 +69,7 @@ describe('ElasticDatasource', function(this: any) {
});
it('should translate index pattern to current day', () => {
let requestOptions: any;
let requestOptions;
ctx.backendSrv.datasourceRequest = jest.fn(options => {
requestOptions = options;
return Promise.resolve({ data: {} });
@@ -83,7 +83,7 @@ describe('ElasticDatasource', function(this: any) {
});
describe('When issuing metric query with interval pattern', () => {
let requestOptions: any, parts: any, header: any, query: any, result: any;
let requestOptions, parts, header, query, result;
beforeEach(async () => {
createDatasource({
@@ -154,7 +154,7 @@ describe('ElasticDatasource', function(this: any) {
});
describe('When issuing logs query with interval pattern', () => {
let query, queryBuilderSpy: any;
let query, queryBuilderSpy;
beforeEach(async () => {
createDatasource({
@@ -249,7 +249,7 @@ describe('ElasticDatasource', function(this: any) {
});
describe('When issuing document query', () => {
let requestOptions: any, parts: any, header: any;
let requestOptions, parts, header;
beforeEach(() => {
createDatasource({
@@ -539,7 +539,7 @@ describe('ElasticDatasource', function(this: any) {
});
describe('When issuing aggregation query on es5.x', () => {
let requestOptions: any, parts: any, header: any;
let requestOptions, parts, header;
beforeEach(() => {
createDatasource({
@@ -582,7 +582,7 @@ describe('ElasticDatasource', function(this: any) {
});
describe('When issuing metricFind query on es5.x', () => {
let requestOptions: any, parts, header: any, body: any, results: any;
let requestOptions, parts, header, body, results;
beforeEach(() => {
createDatasource({
@@ -615,7 +615,7 @@ describe('ElasticDatasource', function(this: any) {
});
});
ctx.ds.metricFindQuery('{"find": "terms", "field": "test"}').then((res: any) => {
ctx.ds.metricFindQuery('{"find": "terms", "field": "test"}').then(res => {
results = res;
});

View File

@@ -2,8 +2,8 @@ import { ElasticResponse } from '../elastic_response';
describe('ElasticResponse', () => {
let targets;
let response: any;
let result: any;
let response;
let result;
describe('simple query and count', () => {
beforeEach(() => {
@@ -48,7 +48,7 @@ describe('ElasticResponse', () => {
});
describe('simple query count & avg aggregation', () => {
let result: any;
let result;
beforeEach(() => {
targets = [
@@ -97,7 +97,7 @@ describe('ElasticResponse', () => {
});
describe('single group by query one metric', () => {
let result: any;
let result;
beforeEach(() => {
targets = [
@@ -149,7 +149,7 @@ describe('ElasticResponse', () => {
});
describe('single group by query two metrics', () => {
let result: any;
let result;
beforeEach(() => {
targets = [
@@ -209,7 +209,7 @@ describe('ElasticResponse', () => {
});
describe('with percentiles ', () => {
let result: any;
let result;
beforeEach(() => {
targets = [
@@ -257,7 +257,7 @@ describe('ElasticResponse', () => {
});
describe('with extended_stats', () => {
let result: any;
let result;
beforeEach(() => {
targets = [
@@ -333,7 +333,7 @@ describe('ElasticResponse', () => {
});
describe('single group by with alias pattern', () => {
let result: any;
let result;
beforeEach(() => {
targets = [
@@ -394,7 +394,7 @@ describe('ElasticResponse', () => {
});
describe('histogram response', () => {
let result: any;
let result;
beforeEach(() => {
targets = [
@@ -426,7 +426,7 @@ describe('ElasticResponse', () => {
});
describe('with two filters agg', () => {
let result: any;
let result;
beforeEach(() => {
targets = [
@@ -583,7 +583,7 @@ describe('ElasticResponse', () => {
});
describe('No group by time with percentiles ', () => {
let result: any;
let result;
beforeEach(() => {
targets = [
@@ -720,7 +720,7 @@ describe('ElasticResponse', () => {
});
describe('with bucket_script ', () => {
let result: any;
let result;
beforeEach(() => {
targets = [
@@ -861,7 +861,7 @@ describe('ElasticResponse', () => {
expect(result.data[0].fields).toContainEqual({ name: '@timestamp', type: 'time' });
expect(result.data[0].fields).toContainEqual({ name: 'host', type: 'string' });
expect(result.data[0].fields).toContainEqual({ name: 'message', type: 'string' });
result.data[0].rows.forEach((row: any, i: number) => {
result.data[0].rows.forEach((row, i) => {
expect(row).toContain(response.responses[0].hits.hits[i]._id);
expect(row).toContain(response.responses[0].hits.hits[i]._type);
expect(row).toContain(response.responses[0].hits.hits[i]._index);
@@ -869,7 +869,7 @@ describe('ElasticResponse', () => {
});
expect(result.data[1]).toHaveProperty('name', 'Count');
response.responses[0].aggregations['2'].buckets.forEach((bucket: any) => {
response.responses[0].aggregations['2'].buckets.forEach(bucket => {
expect(result.data[1].rows).toContainEqual([bucket.doc_count, bucket.key]);
});
});

View File

@@ -1,7 +1,7 @@
import { ElasticQueryBuilder } from '../query_builder';
describe('ElasticQueryBuilder', () => {
let builder: any;
let builder;
beforeEach(() => {
builder = new ElasticQueryBuilder({ timeField: '@timestamp' });
@@ -103,7 +103,6 @@ describe('ElasticQueryBuilder', () => {
],
},
100,
// @ts-ignore
1000
);

View File

@@ -1,5 +1,3 @@
import { auto } from 'angular';
export class QueryCtrl {
target: any;
datasource: any;
@@ -8,7 +6,7 @@ export class QueryCtrl {
hasRawMode: boolean;
error: string;
constructor(public $scope: any, _$injector: auto.IInjectorService) {
constructor(public $scope, _$injector) {
this.panelCtrl = this.panelCtrl || { panel: {} };
this.target = this.target || { target: '' };
this.panel = this.panelCtrl.panel;

View File

@@ -1,5 +1,3 @@
import { TemplateSrv } from 'app/features/templating/template_srv';
export class AzureMonitorAnnotationsQueryCtrl {
static templateUrl = 'partials/annotations.editor.html';
datasource: any;
@@ -11,7 +9,7 @@ export class AzureMonitorAnnotationsQueryCtrl {
'<your table>\n| where $__timeFilter() \n| project TimeGenerated, Text=YourTitleColumn, Tags="tag1,tag2"';
/** @ngInject */
constructor(private templateSrv: TemplateSrv) {
constructor(private templateSrv) {
this.annotation.queryType = this.annotation.queryType || 'Azure Log Analytics';
this.annotation.rawQuery = this.annotation.rawQuery || this.defaultQuery;
this.initDropdowns();
@@ -27,7 +25,7 @@ export class AzureMonitorAnnotationsQueryCtrl {
return;
}
return this.datasource.azureMonitorDatasource.getSubscriptions().then((subs: any[]) => {
return this.datasource.azureMonitorDatasource.getSubscriptions().then(subs => {
this.subscriptions = subs;
if (!this.annotation.subscription && this.annotation.queryType === 'Azure Log Analytics') {
@@ -47,7 +45,7 @@ export class AzureMonitorAnnotationsQueryCtrl {
return this.datasource
.getAzureLogAnalyticsWorkspaces(this.annotation.subscription)
.then((list: any[]) => {
.then(list => {
this.workspaces = list;
if (list.length > 0 && !this.annotation.workspace) {
this.annotation.workspace = list[0].value;
@@ -74,6 +72,6 @@ export class AzureMonitorAnnotationsQueryCtrl {
};
get templateVariables() {
return this.templateSrv.variables.map((t: any) => '$' + t.name);
return this.templateSrv.variables.map(t => '$' + t.name);
}
}

View File

@@ -1,5 +1,4 @@
import AzureMonitorDatasource from '../datasource';
// @ts-ignore
import Q from 'q';
import { TemplateSrv } from 'app/features/templating/template_srv';
import { toUtc } from '@grafana/data';
@@ -47,7 +46,7 @@ describe('AppInsightsDatasource', () => {
});
it('should return success status', () => {
return ctx.ds.testDatasource().then((results: any) => {
return ctx.ds.testDatasource().then(results => {
expect(results.status).toEqual('success');
});
});
@@ -72,7 +71,7 @@ describe('AppInsightsDatasource', () => {
});
it('should return error status and a detailed error message', () => {
return ctx.ds.testDatasource().then((results: any) => {
return ctx.ds.testDatasource().then(results => {
expect(results.status).toEqual('error');
expect(results.message).toEqual(
'1. Application Insights: Not Found: Invalid Application Id for Application Insights service. '
@@ -100,7 +99,7 @@ describe('AppInsightsDatasource', () => {
});
it('should return error status and a detailed error message', () => {
return ctx.ds.testDatasource().then((results: any) => {
return ctx.ds.testDatasource().then(results => {
expect(results.status).toEqual('error');
expect(results.message).toEqual('1. Application Insights: Error: SomeOtherError. An error message. ');
});
@@ -150,7 +149,7 @@ describe('AppInsightsDatasource', () => {
});
it('should return a single datapoint', () => {
return ctx.ds.query(options).then((results: any) => {
return ctx.ds.query(options).then(results => {
expect(results.data.length).toBe(1);
expect(results.data[0].datapoints.length).toBe(1);
expect(results.data[0].target).toEqual('exceptions/server');
@@ -197,7 +196,7 @@ describe('AppInsightsDatasource', () => {
});
it('should return a list of datapoints', () => {
return ctx.ds.query(options).then((results: any) => {
return ctx.ds.query(options).then(results => {
expect(results.data.length).toBe(1);
expect(results.data[0].datapoints.length).toBe(2);
expect(results.data[0].target).toEqual('exceptions/server');
@@ -268,7 +267,7 @@ describe('AppInsightsDatasource', () => {
});
it('should return a list of datapoints', () => {
return ctx.ds.query(options).then((results: any) => {
return ctx.ds.query(options).then(results => {
expect(results.data.length).toBe(3);
expect(results.data[0].datapoints.length).toBe(2);
expect(results.data[0].target).toEqual('exceptions/server{client/city="Miami"}');
@@ -293,7 +292,7 @@ describe('AppInsightsDatasource', () => {
});
it('should return a list of datapoints', () => {
return ctx.ds.query(options).then((results: any) => {
return ctx.ds.query(options).then(results => {
expect(results.data.length).toBe(3);
expect(results.data[0].datapoints.length).toBe(2);
expect(results.data[0].target).toEqual('exceptions/server + client/city + Miami');
@@ -324,7 +323,7 @@ describe('AppInsightsDatasource', () => {
});
it('should return a list of metric names', () => {
return ctx.ds.metricFindQuery('appInsightsMetricNames()').then((results: any) => {
return ctx.ds.metricFindQuery('appInsightsMetricNames()').then(results => {
expect(results.length).toBe(2);
expect(results[0].text).toBe('exceptions/server');
expect(results[0].value).toBe('exceptions/server');
@@ -362,7 +361,7 @@ describe('AppInsightsDatasource', () => {
});
it('should return a list of group bys', () => {
return ctx.ds.metricFindQuery('appInsightsGroupBys(requests/count)').then((results: any) => {
return ctx.ds.metricFindQuery('appInsightsGroupBys(requests/count)').then(results => {
expect(results[0].text).toContain('client/os');
expect(results[0].value).toContain('client/os');
expect(results[1].text).toContain('client/city');
@@ -390,7 +389,7 @@ describe('AppInsightsDatasource', () => {
});
it('should return a list of metric names', () => {
return ctx.ds.getAppInsightsMetricNames().then((results: any) => {
return ctx.ds.getAppInsightsMetricNames().then(results => {
expect(results.length).toBe(2);
expect(results[0].text).toBe('exceptions/server');
expect(results[0].value).toBe('exceptions/server');
@@ -428,7 +427,7 @@ describe('AppInsightsDatasource', () => {
});
it('should return a list of group bys', () => {
return ctx.ds.getAppInsightsMetricMetadata('requests/count').then((results: any) => {
return ctx.ds.getAppInsightsMetricMetadata('requests/count').then(results => {
expect(results.primaryAggType).toEqual('avg');
expect(results.supportedAggTypes).toContain('avg');
expect(results.supportedAggTypes).toContain('sum');

View File

@@ -6,7 +6,6 @@ import { DataSourceInstanceSettings } from '@grafana/ui';
import { AzureDataSourceJsonData } from '../types';
import { BackendSrv } from 'app/core/services/backend_srv';
import { TemplateSrv } from 'app/features/templating/template_srv';
import { IQService } from 'angular';
export interface LogAnalyticsColumn {
text: string;
@@ -25,7 +24,7 @@ export default class AppInsightsDatasource {
instanceSettings: DataSourceInstanceSettings<AzureDataSourceJsonData>,
private backendSrv: BackendSrv,
private templateSrv: TemplateSrv,
private $q: IQService
private $q
) {
this.id = instanceSettings.id;
this.applicationId = instanceSettings.jsonData.appInsightsAppId;
@@ -37,7 +36,7 @@ export default class AppInsightsDatasource {
return !!this.applicationId && this.applicationId.length > 0;
}
query(options: any) {
query(options) {
const queries = _.filter(options.targets, item => {
return item.hide !== true;
}).map(target => {
@@ -107,7 +106,6 @@ export default class AppInsightsDatasource {
});
if (!queries || queries.length === 0) {
// @ts-ignore
return;
}
@@ -132,16 +130,16 @@ export default class AppInsightsDatasource {
});
}
doQueries(queries: any) {
doQueries(queries) {
return _.map(queries, query => {
return this.doRequest(query.url)
.then((result: any) => {
.then(result => {
return {
result: result,
query: query,
};
})
.catch((err: any) => {
.catch(err => {
throw {
error: err,
query: query,
@@ -150,7 +148,7 @@ export default class AppInsightsDatasource {
});
}
annotationQuery(options: any) {}
annotationQuery(options) {}
metricFindQuery(query: string) {
const appInsightsMetricNameQuery = query.match(/^AppInsightsMetricNames\(\)/i);
@@ -170,7 +168,7 @@ export default class AppInsightsDatasource {
testDatasource() {
const url = `${this.baseUrl}/metrics/metadata`;
return this.doRequest(url)
.then((response: any) => {
.then(response => {
if (response.status === 200) {
return {
status: 'success',
@@ -184,7 +182,7 @@ export default class AppInsightsDatasource {
message: 'Returned http status code ' + response.status,
};
})
.catch((error: any) => {
.catch(error => {
let message = 'Application Insights: ';
message += error.statusText ? error.statusText + ': ' : '';
@@ -203,13 +201,13 @@ export default class AppInsightsDatasource {
});
}
doRequest(url: any, maxRetries = 1) {
doRequest(url, maxRetries = 1) {
return this.backendSrv
.datasourceRequest({
url: this.url + url,
method: 'GET',
})
.catch((error: any) => {
.catch(error => {
if (maxRetries > 0) {
return this.doRequest(url, maxRetries - 1);
}
@@ -225,20 +223,20 @@ export default class AppInsightsDatasource {
getMetricMetadata(metricName: string) {
const url = `${this.baseUrl}/metrics/metadata`;
return this.doRequest(url).then((result: any) => {
return this.doRequest(url).then(result => {
return new ResponseParser(result).parseMetadata(metricName);
});
}
getGroupBys(metricName: string) {
return this.getMetricMetadata(metricName).then((result: any) => {
return this.getMetricMetadata(metricName).then(result => {
return new ResponseParser(result).parseGroupBys();
});
}
getQuerySchema() {
const url = `${this.baseUrl}/query/schema`;
return this.doRequest(url).then((result: any) => {
return this.doRequest(url).then(result => {
const schema = new ResponseParser(result).parseQuerySchema();
// console.log(schema);
return schema;

View File

@@ -162,7 +162,7 @@ export default class FakeSchemaData {
};
}
static getlogAnalyticsFakeMetadata(): any {
static getlogAnalyticsFakeMetadata() {
return {
tables: [
{

View File

@@ -1,5 +1,5 @@
export default class SupportedNamespaces {
supportedMetricNamespaces: any = {
supportedMetricNamespaces = {
azuremonitor: [
'Microsoft.AnalysisServices/servers',
'Microsoft.ApiManagement/service',

View File

@@ -43,7 +43,7 @@ const defaultSchema: any = () => ({
},
});
const cleanText = (s: string) => s.replace(/[{}[\]="(),!~+\-*/^%]/g, '').trim();
const cleanText = s => s.replace(/[{}[\]="(),!~+\-*/^%]/g, '').trim();
const wrapText = (text: string) => ({ text });
export default class KustoQueryField extends QueryField {
@@ -353,13 +353,11 @@ export default class KustoQueryField extends QueryField {
}
getTableSuggestions(db = 'Default'): SuggestionGroup[] {
// @ts-ignore
if (this.schema.Databases[db]) {
return [
{
prefixMatch: true,
label: 'Tables',
// @ts-ignore
items: _.map(this.schema.Databases[db].Tables, (t: any) => ({ text: t.Name })),
},
];

View File

@@ -10,7 +10,6 @@ describe('Graphite query model', () => {
waitForFuncDefsLoaded: jest.fn().mockReturnValue(Promise.resolve(null)),
createFuncInstance: gfunc.createFuncInstance,
},
// @ts-ignore
templateSrv: new TemplateSrvStub(),
targets: [],
};

View File

@@ -111,7 +111,7 @@ describe('Request URL', () => {
};
const datasourceWithLabels = {
metadataRequest: (url: string) => {
metadataRequest: url => {
if (url.slice(0, 15) === '/api/prom/label') {
return { data: { data: ['other'] } };
} else {
@@ -154,7 +154,7 @@ describe('Query imports', () => {
it('returns empty query from selector query if label is not available', async () => {
const datasourceWithLabels = {
metadataRequest: (url: string) =>
metadataRequest: url =>
url.slice(0, 15) === '/api/prom/label'
? { data: { data: ['other'] } }
: { data: { data: [] as DataQueryResponseData[] } },
@@ -166,7 +166,7 @@ describe('Query imports', () => {
it('returns selector query from selector query with common labels', async () => {
const datasourceWithLabels = {
metadataRequest: (url: string) =>
metadataRequest: url =>
url.slice(0, 15) === '/api/prom/label'
? { data: { data: ['foo'] } }
: { data: { data: [] as DataQueryResponseData[] } },
@@ -178,7 +178,7 @@ describe('Query imports', () => {
it('returns selector query from selector query with all labels if logging label list is empty', async () => {
const datasourceWithLabels = {
metadataRequest: (url: string) =>
metadataRequest: url =>
url.slice(0, 15) === '/api/prom/label'
? { data: { data: [] as DataQueryResponseData[] } }
: { data: { data: [] as DataQueryResponseData[] } },

View File

@@ -3,7 +3,7 @@
echo -e "Collecting code stats (typescript errors & more)"
ERROR_COUNT_LIMIT=1670
ERROR_COUNT_LIMIT=2350
DIRECTIVES_LIMIT=172
CONTROLLERS_LIMIT=139

View File

@@ -1,42 +0,0 @@
#!/bin/bash
function parse_git_hash() {
git rev-parse --short HEAD 2> /dev/null | sed "s/\(.*\)/\1/"
}
function prapare_version_commit () {
echo $'\nCommiting version changes. This commit will not be checked-in!'
git config --global user.email "circleci@grafana.com"
git config --global user.name "CirceCI"
git commit -am "Version commit"
}
#Get current version from lerna.json
PACKAGE_VERSION=`grep '"version"' lerna.json | cut -d '"' -f 4`
# Get short current commit's has
GIT_SHA=$(parse_git_hash)
echo "Commit: ${GIT_SHA}"
echo "Current lerna.json version: ${PACKAGE_VERSION}"
# count packages that changed
count=`npx lerna changed --loglevel silent | awk '{c++} END {print c}'`
if [ -z $count ]; then
echo "No changes in packages, skipping packages publishing"
else
echo "Changes detected in ${count} packages"
echo "Releasing packages under ${PACKAGE_VERSION}-${GIT_SHA}"
npx lerna version ${PACKAGE_VERSION}-${GIT_SHA} --no-git-tag-version --no-push --force-publish -y
echo $'\nGit status:'
git status -s
echo $'\nBuilding packages'
yarn packages:build
prapare_version_commit
echo $'\nPublishing packages'
yarn packages:publishNext
fi

1615
yarn.lock

File diff suppressed because it is too large Load Diff