Compare commits

..

26 Commits

Author SHA1 Message Date
Dominik Prokop
4cafe613e1 Merge pull request #289 from grafana/bump-version-8.4.5
Release: Bump version to 8.4.5
2022-03-31 05:23:19 -07:00
grafanabot
eb30befb0f "Release: Updated versions in package to 8.4.5" 2022-03-31 12:07:48 +00:00
Dominik Prokop
8cea14f8d1 bump-version.yml - remove check about version alignment with main 2022-03-31 14:03:53 +02:00
Dominik Prokop
83f8c366a1 Merge branch 'v8.4.x' of github.com:grafana/grafana into v8.4.x 2022-03-31 13:39:07 +02:00
Dimitris Sotirakis
fcfb01faeb Update grabpl (#46639)
(cherry picked from commit 85184ee9dc)
(cherry picked from commit 3d8c6b1640)
2022-03-16 13:36:26 +00:00
Daniel Lee
b9df06ed3a Merge pull request #279 from grafana/bump-version-8.4.4
Release: Bump version to 8.4.4
2022-03-16 13:21:40 +01:00
grafanabot
a07ad8af45 "Release: Updated versions in package to 8.4.4" 2022-03-16 12:12:00 +00:00
Daniel Lee
708cda2052 Merge branch 'v8.4.x' of github.com:grafana/grafana into v8.4.x 2022-03-16 13:07:45 +01:00
Giordano Ricci
b7d2911cac Merge pull request #269 from grafana/bump-version-8.4.3
Release: Bump version to 8.4.3
2022-03-02 13:01:27 +00:00
grafanabot
f944a0fe16 "Release: Updated versions in package to 8.4.3" 2022-03-02 12:50:11 +00:00
Elfo404
e68b468420 Merge branch 'v8.4.x' of github.com:grafana/grafana into v8.4.x 2022-03-02 12:46:54 +00:00
sam boyer
dea7d4996a grafana-cli: Diff generated ts directly instead of relying on git (#45815)
* Add diffing support to grafana-cli cue gen-ts

* Rely on diff comparison in cuetsify pipeline step

* Ignore *.gen.ts files with eslint

* Chore: Fix lint `sdboyer/cuetsify-compare` (#45818)

* Sync drone

(cherry picked from commit 40645ab19e39ff9b0a12b7ebb13a4dc4c5e1d472)

* Fix lint

(cherry picked from commit c95ece983984432fea029335b2b729b09d76c7eb)

* Sign drone

Co-authored-by: Dimitris Sotirakis <sotirakis.dim@gmail.com>
(cherry picked from commit 60db643983)
(cherry picked from commit 0a3fcc07d8)
2022-02-24 13:19:31 +02:00
Grot (@grafanabot)
9fc02d717f Release: Bump version to 8.4.2 (#261)
* "Release: Updated versions in package to 8.4.2"

* updated yarn.lock - 8.4.2 release process

Co-authored-by: Stephanie Closson <srclosson@gmail.com>
2022-02-23 14:22:54 -04:00
Stephanie Closson
d98982bf02 Merge branch 'v8.4.x' of github.com:grafana/grafana into v8.4.x 2022-02-23 13:53:00 -04:00
Andrej Ocenas
53f5c6a44c Merge pull request #258 from grafana/bump-version-8.4.1
Release: Bump version to 8.4.1
2022-02-18 12:14:07 +01:00
Dimitris Sotirakis
5407140306 Update grabpl (#45591) (#45596)
(cherry picked from commit 0a8c3f92f6)
(cherry picked from commit 787553ed31)
2022-02-18 12:57:17 +02:00
Andrej Ocenas
0a66eb3a8a update yarn.lock 2022-02-18 11:33:51 +01:00
grafanabot
468735b7a2 "Release: Updated versions in package to 8.4.1" 2022-02-18 10:18:25 +00:00
Andrej Ocenas
eb1e411991 Merge branch 'v8.4.x' of github.com:grafana/grafana into v8.4.x 2022-02-18 11:11:57 +01:00
Andrej Ocenas
e6728d1434 Merge pull request #255 from grafana/bump-version-8.4.0
Release: Bump version to 8.4.0
2022-02-16 17:08:58 +01:00
Andrej Ocenas
6cd56ee459 Update yarn.lock 2022-02-16 16:38:48 +01:00
grafanabot
2d7625ac1b "Release: Updated versions in package to 8.4.0" 2022-02-16 15:24:39 +00:00
Andrej Ocenas
37c34c6d5b Merge branch 'v8.4.x' of github.com:grafana/grafana into v8.4.x 2022-02-16 16:11:07 +01:00
Andres Martinez Gotor
d0871959b6 Update yarn.lock 2022-02-02 17:58:37 +01:00
Andres Martinez Gotor
0a90d1eeeb Merge pull request #243 from grafana/bump-version-8.4.0-beta.1
Release: Bump version to 8.4.0-beta.1
2022-02-02 17:39:53 +01:00
grafanabot
c52dae0296 "Release: Updated versions in package to 8.4.0-beta.1" 2022-02-02 16:09:46 +00:00
96 changed files with 1416 additions and 2528 deletions

View File

@@ -1,8 +1,8 @@
[run]
init_cmds = [
["make", "gen-go"],
["GO_BUILD_DEV=1", "make", "build-cli"],
["GO_BUILD_DEV=1", "make", "build-server"],
["go", "run", "build.go", "-dev", "build-cli"],
["go", "run", "build.go", "-dev", "build-server"],
["./bin/grafana-server", "-packaging=dev", "cfg:app_mode=development"]
]
watch_all = true
@@ -13,10 +13,10 @@ watch_dirs = [
"$WORKDIR/conf",
]
watch_exts = [".go", ".ini", ".toml", ".template.html"]
ignore_files = ["wire_gen.go"]
ignore_files = ["wire_gen.go"]
build_delay = 1500
cmds = [
["make", "gen-go"],
["GO_BUILD_DEV=1", "make", "build-server"],
["go", "run", "build.go", "-dev", "build-server"],
["./bin/grafana-server", "-packaging=dev", "cfg:app_mode=development"]
]

1174
.drone.yml

File diff suppressed because it is too large Load Diff

49
.github/workflows/publish.yml vendored Normal file
View File

@@ -0,0 +1,49 @@
name: publish_docs
on:
push:
branches:
- v8.4.x
paths:
- 'docs/sources/**'
- 'packages/grafana-*/**'
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- run: git clone --single-branch --no-tags --depth 1 -b master https://grafanabot:${{ secrets.GH_BOT_ACCESS_TOKEN }}@github.com/grafana/website-sync ./.github/actions/website-sync
- name: generate-packages-docs
uses: actions/setup-node@v2.5.1
id: generate-docs
with:
node-version: '14'
- name: Get yarn cache directory path
id: yarn-cache-dir-path
run: echo "::set-output name=dir::$(yarn config get cacheFolder)"
- uses: actions/cache@v2.1.7
with:
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
key: yarn-${{ hashFiles('**/yarn.lock') }}
restore-keys: |
yarn-
- run: yarn install --immutable
- run: ./scripts/ci-reference-docs-build.sh
- name: publish-to-git
uses: ./.github/actions/website-sync
id: publish
with:
repository: grafana/website
branch: master
host: github.com
github_pat: '${{ secrets.GH_BOT_ACCESS_TOKEN }}'
source_folder: docs/sources
target_folder: content/docs/grafana/latest
allow_no_changes: 'true'
- shell: bash
run: |
test -n "${{ steps.publish.outputs.commit_hash }}"
test -n "${{ steps.publish.outputs.working_directory }}"

View File

@@ -1,43 +1,3 @@
<!-- 8.4.7 START -->
# 8.4.7 (2022-04-19)
### Features and enhancements
- **CloudWatch:** Added missing MemoryDB Namespace metrics. [#47290](https://github.com/grafana/grafana/pull/47290), [@james-deee](https://github.com/james-deee)
- **Histogram Panel:** Take decimal into consideration. [#47330](https://github.com/grafana/grafana/pull/47330), [@mdvictor](https://github.com/mdvictor)
- **TimeSeries:** Sort tooltip values based on raw values. [#46738](https://github.com/grafana/grafana/pull/46738), [@dprokop](https://github.com/dprokop)
### Bug fixes
- **API:** Include userId, orgId, uname in request logging middleware. [#47183](https://github.com/grafana/grafana/pull/47183), [@marefr](https://github.com/marefr)
- **Elasticsearch:** Respect maxConcurrentShardRequests datasource setting. [#47120](https://github.com/grafana/grafana/pull/47120), [@alexandrst88](https://github.com/alexandrst88)
<!-- 8.4.7 END -->
<!-- 8.4.6 START -->
# 8.4.6 (2022-04-12)
- **Security:** Fixes CVE-2022-24812. For more information, see our [blog](https://grafana.com/blog/2022/04/12/grafana-enterprise-8.4.6-released-with-high-severity-security-fix/)
<!-- 8.4.6 END -->
<!-- 8.4.5 START -->
# 8.4.5 (2022-03-31)
### Features and enhancements
- **Instrumentation:** Make backend plugin metrics endpoints available with optional authentication. [#46467](https://github.com/grafana/grafana/pull/46467), [@marefr](https://github.com/marefr)
- **Table panel:** Show datalinks for cell display modes JSON View and Gauge derivates. [#46020](https://github.com/grafana/grafana/pull/46020), [@mdvictor](https://github.com/mdvictor)
### Bug fixes
- **Azure Monitor:** Small bug fixes for Resource Picker. [#46665](https://github.com/grafana/grafana/pull/46665), [@sarahzinger](https://github.com/sarahzinger)
- **Logger:** Use specified format for file logger. [#46970](https://github.com/grafana/grafana/pull/46970), [@sakjur](https://github.com/sakjur)
- **Logs:** Handle missing fields in dataframes better. [#46963](https://github.com/grafana/grafana/pull/46963), [@gabor](https://github.com/gabor)
- **ManageDashboards:** Fix error when deleting all dashboards from folder view. [#46877](https://github.com/grafana/grafana/pull/46877), [@joshhunt](https://github.com/joshhunt)
<!-- 8.4.5 END -->
<!-- 8.4.4 START -->
# 8.4.4 (2022-03-16)

View File

@@ -20,7 +20,7 @@ COPY emails emails
ENV NODE_ENV production
RUN yarn build
FROM golang:1.17.9-alpine3.15 as go-builder
FROM golang:1.17.8-alpine3.15 as go-builder
RUN apk add --no-cache gcc g++ make

View File

@@ -21,7 +21,7 @@ COPY emails emails
ENV NODE_ENV production
RUN yarn build
FROM golang:1.17.9 AS go-builder
FROM golang:1.17.8 AS go-builder
WORKDIR /src/grafana

View File

@@ -14,8 +14,6 @@ GO_FILES ?= ./pkg/...
SH_FILES ?= $(shell find ./scripts -name *.sh)
API_DEFINITION_FILES = $(shell find ./pkg/api/docs/definitions -name '*.go' -print)
SWAGGER_TAG ?= latest
GO_BUILD_FLAGS += $(if $(GO_BUILD_DEV),-dev)
GO_BUILD_FLAGS += $(if $(GO_BUILD_TAGS),-build-tags=$(GO_BUILD_TAGS))
all: deps build
@@ -89,15 +87,15 @@ gen-go: $(WIRE)
build-go: $(MERGED_SPEC_TARGET) gen-go ## Build all Go binaries.
@echo "build go files"
$(GO) run build.go $(GO_BUILD_FLAGS) build
$(GO) run build.go build
build-server: ## Build Grafana server.
@echo "build server"
$(GO) run build.go $(GO_BUILD_FLAGS) build-server
$(GO) run build.go build-server
build-cli: ## Build Grafana CLI application.
@echo "build grafana-cli"
$(GO) run build.go $(GO_BUILD_FLAGS) build-cli
$(GO) run build.go build-cli
build-js: ## Build frontend assets.
@echo "build frontend"

View File

@@ -29,10 +29,10 @@ For Cortex and Loki data sources to work with Grafana 8.0 alerting, enable the r
- From the **Select data source** drop-down, select an external Prometheus, an external Loki, or a Grafana Cloud data source.
- From the **Namespace** drop-down, select an existing rule namespace. Otherwise, click **Add new** and enter a name to create a new one. Namespaces can contain one or more rule groups and only have an organizational purpose.
- From the **Group** drop-down, select an existing group within the selected namespace. Otherwise, click **Add new** and enter a name to create a new one. Newly created rules are appended to the end of the group. Rules within a group are run sequentially at a regular interval, with the same evaluation time.
{{< figure src="/static/img/docs/alerting/unified/rule-edit-mimir-alert-type-8-0.png" max-width="550px" caption="Alert details" >}}
{{< figure src="/static/img/docs/alerting/unified/rule-edit-cortex-alert-type-8-0.png" max-width="550px" caption="Alert details" >}}
1. In Step 2, add the query to evaluate.
- Enter a PromQL or LogQL expression. The rule fires if the evaluation result has at least one series with a value that is greater than 0. An alert is created for each series.
{{< figure src="/static/img/docs/alerting/unified/rule-edit-mimir-query-8-0.png" max-width="550px" caption="Alert details" >}}
{{< figure src="/static/img/docs/alerting/unified/rule-edit-cortex-query-8-0.png" max-width="550px" caption="Alert details" >}}
1. In Step 3, add additional metadata associated with the rule.
- Add a description and summary to customize alert messages. Use the guidelines in [Annotations and labels for alerting]({{< relref "./alert-annotation-label.md" >}}).
- Add Runbook URL, panel, dashboard, and alert IDs.

View File

@@ -31,10 +31,10 @@ Grafana allows you to create alerting rules for an external Cortex or Loki insta
- From the **Select data source** drop-down, select an external Prometheus, an external Loki, or a Grafana Cloud data source.
- From the **Namespace** drop-down, select an existing rule namespace. Otherwise, click **Add new** and enter a name to create a new one. Namespaces can contain one or more rule groups and only have an organizational purpose. For more information, see [Cortex or Loki rule groups and namespaces]({{< relref "./edit-cortex-loki-namespace-group.md" >}}).
- From the **Group** drop-down, select an existing group within the selected namespace. Otherwise, click **Add new** and enter a name to create a new one. Newly created rules are appended to the end of the group. Rules within a group are run sequentially at a regular interval, with the same evaluation time.
{{< figure src="/static/img/docs/alerting/unified/rule-edit-mimir-alert-type-8-0.png" max-width="550px" caption="Alert details" >}}
{{< figure src="/static/img/docs/alerting/unified/rule-edit-cortex-alert-type-8-0.png" max-width="550px" caption="Alert details" >}}
1. In Step 2, add the query to evaluate.
- Enter a PromQL or LogQL expression. The rule fires if the evaluation result has at least one series with a value that is greater than 0. An alert is created for each series.
{{< figure src="/static/img/docs/alerting/unified/rule-edit-mimir-query-8-0.png" max-width="550px" caption="Alert details" >}}
{{< figure src="/static/img/docs/alerting/unified/rule-edit-cortex-query-8-0.png" max-width="550px" caption="Alert details" >}}
1. In Step 3, add conditions.
- In the **For** text box, specify the duration for which the condition must be true before an alert fires. If you specify `5m`, the condition must be true for 5 minutes before the alert fires.
> **Note:** Once a condition is met, the alert goes into the `Pending` state. If the condition remains active for the duration specified, the alert transitions to the `Firing` state, else it reverts to the `Normal` state.

View File

@@ -9,9 +9,9 @@ weight = 405
A namespace contains one or more groups. The rules within a group are run sequentially at a regular interval. The default interval is one (1) minute. You can rename Cortex or Loki rule namespaces and groups, and edit group evaluation intervals.
![Group list](/static/img/docs/alerting/unified/rule-list-edit-mimir-loki-icon-8-2.png 'Rule group list screenshot')
![Group list](/static/img/docs/alerting/unified/rule-list-edit-cortex-loki-icon-8-2.png 'Rule group list screenshot')
{{< figure src="/static/img/docs/alerting/unified/rule-list-edit-mimir-loki-icon-8-2.png" max-width="550px" caption="Alert details" >}}
{{< figure src="/static/img/docs/alerting/unified/rule-list-edit-cortex-loki-icon-8-2.png" max-width="550px" caption="Alert details" >}}
## Rename a namespace
@@ -36,4 +36,4 @@ The rules within a group are run sequentially at a regular interval, the default
When you rename the group, a new group with all the rules from the old group is created. The old group is deleted.
![Group edit modal](/static/img/docs/alerting/unified/rule-list-mimir-loki-edit-ns-group-8-2.png 'Rule group edit modal screenshot')
![Group edit modal](/static/img/docs/alerting/unified/rule-list-cortex-loki-edit-ns-group-8-2.png 'Rule group edit modal screenshot')

View File

@@ -4,7 +4,7 @@ aliases = ["/docs/grafana/latest/features/dashboard/dashboards/"]
weight = 80
+++
# Dashboard rows
# Dshboard rows
A dashboard row is a logical divider within a dashboard. It is used to group panels together.

View File

@@ -143,13 +143,13 @@ types of template variables.
### Query variable
The Elasticsearch data source supports two types of queries you can use in the _Query_ field of _Query_ variables. The query is written using a custom JSON string. The field should be mapped as a [keyword](https://www.elastic.co/guide/en/elasticsearch/reference/current/keyword.html#keyword) in the Elasticsearch index mapping. If it is [multi-field](https://www.elastic.co/guide/en/elasticsearch/reference/current/multi-fields.html) with both a `text` and `keyword` type, then use `"field":"fieldname.keyword"`(sometimes`fieldname.raw`) to specify the keyword field in your query.
The Elasticsearch data source supports two types of queries you can use in the _Query_ field of _Query_ variables. The query is written using a custom JSON string.
| Query | Description |
| ------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `{"find": "fields", "type": "keyword"}` | Returns a list of field names with the index type `keyword`. |
| `{"find": "terms", "field": "hostname.keyword", "size": 1000}` | Returns a list of values for a keyword using term aggregation. Query will use current dashboard time range as time range query. |
| `{"find": "terms", "field": "hostname", "query": '<lucene query>'}` | Returns a list of values for a keyword field using term aggregation and a specified lucene query filter. Query will use current dashboard time range as time range for query. |
| Query | Description |
| -------------------------------------------------------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `{"find": "fields", "type": "keyword"}` | Returns a list of field names with the index type `keyword`. |
| `{"find": "terms", "field": "@hostname", "size": 1000}` | Returns a list of values for a field using term aggregation. Query will use current dashboard time range as time range for query. |
| `{"find": "terms", "field": "@hostname", "query": '<lucene query>'}` | Returns a list of values for a field using term aggregation and a specified lucene query filter. Query will use current dashboard time range as time range for query. |
There is a default size limit of 500 on terms queries. Set the size property in your query to set a custom limit.
You can use other variables inside the query. Example query definition for a variable named `$host`.

View File

@@ -30,7 +30,7 @@ To access data source settings, hover your mouse over the **Configuration** (gea
### Min time interval
A lower limit for the [$__interval]({{< relref "../variables/variable-types/global-variables/#__interval" >}}) and [$__interval_ms]({{< relref "../variables/variable-types/global-variables/#__interval_ms" >}}) variables.
A lower limit for the [$__interval]({{< relref "../variables/variable-types/_index.md#the-interval-variable" >}}) and [$__interval_ms]({{< relref "../variables/variable-types/_index.md#the-interval-ms-variable" >}}) variables.
Recommended to be set to write frequency, for example `1m` if your data is written every minute.
This option can also be overridden/configured in a dashboard panel under data source options. It's important to note that this value **needs** to be formatted as a
number followed by a valid time identifier, e.g. `1m` (1 minute) or `30s` (30 seconds). The following time identifiers are supported:

View File

@@ -36,7 +36,7 @@ Grafana ships with a built-in MySQL data source plugin that allows you to query
### Min time interval
A lower limit for the [$__interval]({{< relref "../variables/variable-types/global-variables/#__interval" >}}) and [$__interval_ms]({{< relref "../variables/variable-types/global-variables/#__interval_ms" >}}) variables.
A lower limit for the [$__interval]({{< relref "../variables/variable-types/_index.md#the-interval-variable" >}}) and [$__interval_ms]({{< relref "../variables/variable-types/_index.md#the-interval-ms-variable" >}}) variables.
Recommended to be set to write frequency, for example `1m` if your data is written every minute.
This option can also be overridden/configured in a dashboard panel under data source options. It's important to note that this value **needs** to be formatted as a
number followed by a valid time identifier, e.g. `1m` (1 minute) or `30s` (30 seconds). The following time identifiers are supported:

View File

@@ -33,7 +33,7 @@ To access PostgreSQL settings, hover your mouse over the **Configuration** (gear
### Min time interval
A lower limit for the [$__interval]({{< relref "../variables/variable-types/global-variables/#__interval" >}}) and [$__interval_ms]({{< relref "../variables/variable-types/global-variables/#__interval_ms" >}}) variables.
A lower limit for the [$__interval]({{< relref "../variables/variable-types/_index.md#the-interval-variable" >}}) and [$__interval_ms]({{< relref "../variables/variable-types/_index.md#the-interval-ms-variable" >}}) variables.
Recommended to be set to write frequency, for example `1m` if your data is written every minute.
This option can also be overridden/configured in a dashboard panel under data source options. It's important to note that this value **needs** to be formatted as a
number followed by a valid time identifier, e.g. `1m` (1 minute) or `30s` (30 seconds). The following time identifiers are supported:

View File

@@ -16,25 +16,23 @@ Grafana includes built-in support for Prometheus. This topic explains options, v
To access Prometheus settings, hover your mouse over the **Configuration** (gear) icon, then click **Data Sources**, and then click the Prometheus data source.
| Name | Description |
| --------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `Name` | The data source name. This is how you refer to the data source in panels and queries. |
| `Default` | Default data source that is pre-selected for new panels. |
| `Url` | The URL of your Prometheus server, for example, `http://prometheus.example.org:9090`. |
| `Access` | Server (default) = URL needs to be accessible from the Grafana backend/server, Browser = URL needs to be accessible from the browser. **Note**: Browser (direct) access is deprecated and will be removed in a future release. |
| `Basic Auth` | Enable basic authentication to the Prometheus data source. |
| `User` | User name for basic authentication. |
| `Password` | Password for basic authentication. |
| `Scrape interval` | Set this to the typical scrape and evaluation interval configured in Prometheus. Defaults to 15s. |
| `HTTP method` | Use either POST or GET HTTP method to query your data source. POST is the recommended and pre-selected method as it allows bigger queries. Change this to GET if you have a Prometheus version older than 2.1 or if POST requests are restricted in your network. |
| `Disable metrics lookup` | Checking this option will disable the metrics chooser and metric/label support in the query field's autocomplete. This helps if you have performance issues with bigger Prometheus instances. |
| `Custom Query Parameters` | Add custom parameters to the Prometheus query URL. For example `timeout`, `partial_response`, `dedup`, or `max_source_resolution`. Multiple parameters should be concatenated together with an '&amp;'. |
| **Exemplars configuration** | |
| `Internal link` | Enable this option is you have an internal link. When you enable this option, you will see a data source selector. Select the backend tracing data store for your exemplar data. |
| `Data source` | You will see this option only if you enable `Internal link` option. Select the backend tracing data store for your exemplar data. |
| `URL` | You will see this option only if the `Internal link` option is disabled. Enter the full URL of the external link. You can interpolate the value from the field with `${__value.raw }` macro. |
| `URL Label` | (Optional) add a custom display label to override the value of the `Label name` field. |
| `Label name` | Add a name for the exemplar traceID property. |
| Name | Description |
| ------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `Name` | The data source name. This is how you refer to the data source in panels and queries. |
| `Default` | Default data source that is pre-selected for new panels. |
| `Url` | The URL of your Prometheus server, for example, `http://prometheus.example.org:9090`. |
| `Access` | Server (default) = URL needs to be accessible from the Grafana backend/server, Browser = URL needs to be accessible from the browser. **Note**: Browser (direct) access is deprecated and will be removed in a future release. |
| `Basic Auth` | Enable basic authentication to the Prometheus data source. |
| `User` | User name for basic authentication. |
| `Password` | Password for basic authentication. |
| `Scrape interval` | Set this to the typical scrape and evaluation interval configured in Prometheus. Defaults to 15s. |
| `HTTP method` | Use either POST or GET HTTP method to query your data source. POST is the recommended and pre-selected method as it allows bigger queries. Change this to GET if you have a Prometheus version older than 2.1 or if POST requests are restricted in your network. |
| `Disable metrics lookup` | Checking this option will disable the metrics chooser and metric/label support in the query field's autocomplete. This helps if you have performance issues with bigger Prometheus instances. |
| `Custom Query Parameters` | Add custom parameters to the Prometheus query URL. For example `timeout`, `partial_response`, `dedup`, or `max_source_resolution`. Multiple parameters should be concatenated together with an '&amp;'. |
| `Label name` | Add the name of the field in the label object. |
| `URL` | If the link is external, then enter the full link URL. You can interpolate the value from the field with `${__value.raw }` macro. |
| `URL Label` | (Optional) Set a custom display label for the link URL. The link label defaults to the full external URL or the name of datasource and is overridden by this setting. |
| `Internal link` | Select if the link is internal or external. In the case of an internal link, a data source selector allows you to select the target data source. Supports tracing data sources only. |
## Prometheus query editor

View File

@@ -53,7 +53,6 @@ Improve an existing plugin with one of our guides:
- [Build a streaming data source plugin]({{< relref "build-a-streaming-data-source-plugin.md" >}})
- [Error handling]({{< relref "error-handling.md" >}})
- [Working with data frames]({{< relref "working-with-data-frames.md" >}})
- [Development with local Grafana]({{< relref "development-with-local-grafana.md" >}})
### Concepts

View File

@@ -1,83 +0,0 @@
+++
title = "Development with local Grafana"
+++
# Development with local Grafana
This guide allows you to setup a development environment where you run Grafana and your plugin locally. With this, you will be able to see your changes as you add them.
## Run Grafana in your host
If you have git, Go and the required version of NodeJS in your system, you can clone and run Grafana locally:
1. Download and set up Grafana. You can find instructions on how to do it in the [developer-guide](https://github.com/grafana/grafana/blob/HEAD/contribute/developer-guide.md).
2. Grafana will look for plugins, by default, on its `data/plugins` directory. You can create a symbolic link to your plugin repository to detect new changes:
```bash
ln -s <plugin-path>/dist data/plugins/<plugin-name>
```
3. (Optional) If the step above doesn't work for you (e.g. you are running on Windows), you can also modify the default path in the Grafana configuration (that can be found at `conf/custom.ini`) and point to the directory with your plugin:
```ini
[paths]
plugins = <path-to-your-plugin-parent-directory>
```
## Run Grafana with docker-compose
Another possibility is to run Grafana with docker-compose so it runs in a container. For doing so, create the docker-compose file in your plugin directory:
**NOTE**: Some plugins already include a docker-compose file so you can skip this step.
```yaml
version: '3.7'
services:
grafana:
# Change latest with your target version, if needed
image: grafana/grafana:latest
ports:
- 3000:3000/tcp
volumes:
# Use your plugin folder (e.g. redshift-datasource)
- ./dist:/var/lib/grafana/plugins/<plugin-folder>
- ./provisioning:/etc/grafana/provisioning
environment:
- TERM=linux
- GF_LOG_LEVEL=debug
- GF_DATAPROXY_LOGGING=true
- GF_DEFAULT_APP_MODE=development
```
## Run your plugin
Finally start your plugin in development mode. Go to your plugin root directory and follow these steps:
1. Build your plugin backend and start the frontend in watch mode:
```bash
mage -v
yarn watch
```
2. Start Grafana backend and frontend:
2.1 For a local copy of Grafana, go to the directory with Grafana source code and run:
```bash
make run
```
```bash
yarn start
```
2.2 Or with docker-compose, in your plugin directory, run:
```bash
docker-compose up
```
After this, you should be able to see your plugin listed in Grafana and test your changes. Note that any change in the fronted will require you to refresh your browser while changes in the backend may require to rebuild your plugin binaries and reload the plugin (`mage && mage reloadPlugin` for local development or `docker-compose up` again if you are using docker-compose).

View File

@@ -16,8 +16,6 @@ You can install and run Grafana using the official Docker images. Our docker ima
Each edition is available in two variants: Alpine and Ubuntu. See below.
For documentation regarding the configuration of a docker image, refer to [configure a Grafana Docker image](https://grafana.com/docs/grafana/latest/administration/configure-docker/).
This topic also contains important information about [migrating from earlier Docker image versions](#migrate-from-previous-docker-containers-versions).
> **Note:** You can use [Grafana Cloud](https://grafana.com/products/cloud/features/#cloud-logs) to avoid the overhead of installing, maintaining, and scaling your observability stack. The free forever plan includes Grafana, 10K Prometheus series, 50 GB logs, and more.[Create a free account to get started](https://grafana.com/auth/sign-up/create-user?pg=docs-grafana-install&plcmt=in-text).

View File

@@ -8,9 +8,6 @@ weight = 10000
Here you can find detailed release notes that list everything that is included in every release as well as notices
about deprecations, breaking changes as well as changes that relate to plugin development.
- [Release notes for 8.4.7]({{< relref "release-notes-8-4-7" >}})
- [Release notes for 8.4.6]({{< relref "release-notes-8-4-6" >}})
- [Release notes for 8.4.5]({{< relref "release-notes-8-4-5" >}})
- [Release notes for 8.4.4]({{< relref "release-notes-8-4-4" >}})
- [Release notes for 8.4.3]({{< relref "release-notes-8-4-3" >}})
- [Release notes for 8.4.2]({{< relref "release-notes-8-4-2" >}})

View File

@@ -1,20 +0,0 @@
+++
title = "Release notes for Grafana 8.4.5"
hide_menu = true
+++
<!-- Auto generated by update changelog github action -->
# Release notes for Grafana 8.4.5
### Features and enhancements
- **Instrumentation:** Make backend plugin metrics endpoints available with optional authentication. [#46467](https://github.com/grafana/grafana/pull/46467), [@marefr](https://github.com/marefr)
- **Table panel:** Show datalinks for cell display modes JSON View and Gauge derivates. [#46020](https://github.com/grafana/grafana/pull/46020), [@mdvictor](https://github.com/mdvictor)
### Bug fixes
- **Azure Monitor:** Small bug fixes for Resource Picker. [#46665](https://github.com/grafana/grafana/pull/46665), [@sarahzinger](https://github.com/sarahzinger)
- **Logger:** Use specified format for file logger. [#46970](https://github.com/grafana/grafana/pull/46970), [@sakjur](https://github.com/sakjur)
- **Logs:** Handle missing fields in dataframes better. [#46963](https://github.com/grafana/grafana/pull/46963), [@gabor](https://github.com/gabor)
- **ManageDashboards:** Fix error when deleting all dashboards from folder view. [#46877](https://github.com/grafana/grafana/pull/46877), [@joshhunt](https://github.com/joshhunt)

View File

@@ -1,10 +0,0 @@
+++
title = "Release notes for Grafana 8.4.6"
hide_menu = true
+++
<!-- Auto generated by update changelog github action -->
# Release notes for Grafana 8.4.6
- **Security:** Fixes CVE-2022-24812. For more information, see our [blog](https://grafana.com/blog/2022/04/12/grafana-enterprise-8.4.6-released-with-high-severity-security-fix/)

View File

@@ -1,19 +0,0 @@
+++
title = "Release notes for Grafana 8.4.7"
hide_menu = true
+++
<!-- Auto generated by update changelog github action -->
# Release notes for Grafana 8.4.7
### Features and enhancements
- **CloudWatch:** Added missing MemoryDB Namespace metrics. [#47290](https://github.com/grafana/grafana/pull/47290), [@james-deee](https://github.com/james-deee)
- **Histogram Panel:** Take decimal into consideration. [#47330](https://github.com/grafana/grafana/pull/47330), [@mdvictor](https://github.com/mdvictor)
- **TimeSeries:** Sort tooltip values based on raw values. [#46738](https://github.com/grafana/grafana/pull/46738), [@dprokop](https://github.com/dprokop)
### Bug fixes
- **API:** Include userId, orgId, uname in request logging middleware. [#47183](https://github.com/grafana/grafana/pull/47183), [@marefr](https://github.com/marefr)
- **Elasticsearch:** Respect maxConcurrentShardRequests datasource setting. [#47120](https://github.com/grafana/grafana/pull/47120), [@alexandrst88](https://github.com/alexandrst88)

10
go.mod
View File

@@ -105,12 +105,12 @@ require (
go.opentelemetry.io/otel/trace v1.2.0
golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e
golang.org/x/exp v0.0.0-20210220032938-85be41e4509f // indirect
golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f
golang.org/x/net v0.0.0-20211013171255-e13a2654a71e
golang.org/x/oauth2 v0.0.0-20210819190943-2bc19b11175f
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c
golang.org/x/time v0.0.0-20210723032227-1f47c861a9ac
golang.org/x/tools v0.1.9
gonum.org/v1/gonum v0.11.0
golang.org/x/tools v0.1.5
gonum.org/v1/gonum v0.9.3
google.golang.org/api v0.58.0
google.golang.org/grpc v1.41.0
google.golang.org/protobuf v1.27.1
@@ -168,9 +168,9 @@ require (
github.com/go-openapi/errors v0.20.0 // indirect
github.com/go-openapi/jsonpointer v0.19.5 // indirect
github.com/go-openapi/jsonreference v0.19.6 // indirect
github.com/go-openapi/loads v0.20.2
github.com/go-openapi/loads v0.20.2 // indirect
github.com/go-openapi/runtime v0.19.29 // indirect
github.com/go-openapi/spec v0.20.4
github.com/go-openapi/spec v0.20.4 // indirect
github.com/go-openapi/swag v0.19.15 // indirect
github.com/go-openapi/validate v0.20.2 // indirect
github.com/gogo/googleapis v1.4.1 // indirect

37
go.sum
View File

@@ -68,7 +68,6 @@ cuelang.org/go v0.4.0/go.mod h1:tz/edkPi+T37AZcb5GlPY+WJkL6KiDlDVupKwL3vvjs=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20201218220906-28db891af037/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
gioui.org v0.0.0-20210308172011-57750fc8a0a6/go.mod h1:RSH6KIUZ0p2xy5zHDxgAM4zumjgTw83q2ge/PI+yyw8=
git.sr.ht/~sbinet/gg v0.3.1/go.mod h1:KGYtlADtqsqANL9ueOFkWymvzUvLMQllU5Ixo+8v3pc=
github.com/AndreasBriese/bbloom v0.0.0-20190306092124-e2d15f34fcf9/go.mod h1:bOvUY6CB00SOBii9/FifXqc0awNKxLFCL/+pkDPuyl8=
github.com/Azure/azure-amqp-common-go/v3 v3.0.0/go.mod h1:SY08giD/XbhTz07tJdpw1SoxQXHPN30+DI3Z04SYqyg=
github.com/Azure/azure-event-hubs-go/v3 v3.2.0/go.mod h1:BPIIJNH/l/fVHYq3Rm6eg4clbrULrQ3q7+icmqHyyLc=
@@ -255,10 +254,7 @@ github.com/aerospike/aerospike-client-go v1.27.0/go.mod h1:zj8LBEnWBDOVEIJt8LvaR
github.com/afex/hystrix-go v0.0.0-20180502004556-fa1af6a1f4f5/go.mod h1:SkGFH1ia65gfNATL8TAiHDNxPzPdmEL5uirI2Uyuz6c=
github.com/agnivade/levenshtein v1.0.1/go.mod h1:CURSv5d9Uaml+FovSIICkLbAUZ9S4RqaHDIsdSBg7lM=
github.com/ajg/form v1.5.1/go.mod h1:uL1WgH+h2mgNtvBq0339dVnzXdBETtL2LeUXaIv25UY=
github.com/ajstarks/deck v0.0.0-20200831202436-30c9fc6549a9/go.mod h1:JynElWSGnm/4RlzPXRlREEwqTHAN3T56Bv2ITsFT3gY=
github.com/ajstarks/deck/generate v0.0.0-20210309230005-c3f852c02e19/go.mod h1:T13YZdzov6OU0A1+RfKZiZN9ca6VeKdBdyDV+BY97Tk=
github.com/ajstarks/svgo v0.0.0-20180226025133-644b8db467af/go.mod h1:K08gAheRH3/J6wwsYMMT4xOr94bZjxIelGM0+d/wbFw=
github.com/ajstarks/svgo v0.0.0-20211024235047-1546f124cd8b/go.mod h1:1KcenG0jGWcpt8ov532z81sp/kMMUG485J2InIOyADM=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
@@ -372,7 +368,6 @@ github.com/bmizerany/pat v0.0.0-20170815010413-6226ea591a40/go.mod h1:8rLXio+Wji
github.com/boltdb/bolt v1.3.1/go.mod h1:clJnj/oiGkjum5o1McbSZDSLxVThjynRyGBgiAx27Ps=
github.com/bonitoo-io/go-sql-bigquery v0.3.4-1.4.0/go.mod h1:J4Y6YJm0qTWB9aFziB7cPeSyc6dOZFyJdteSeybVpXQ=
github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8=
github.com/boombuler/barcode v1.0.1/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8=
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b h1:L/QXpzIa3pOvUGt1D1lA5KjYhPBAN/3iWdP7xeFS9F0=
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b/go.mod h1:H0wQNHz2YrLsuXOZozoeDmnHXkNCRmMW0gwFWDfEZDA=
github.com/bshuster-repo/logrus-logstash-hook v0.4.1/go.mod h1:zsTqEiSzDgAa/8GZR7E1qaXrhYNDKBYy5/dWPTIflbk=
@@ -802,7 +797,6 @@ github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm
github.com/go-fonts/dejavu v0.1.0/go.mod h1:4Wt4I4OU2Nq9asgDCteaAaWZOV24E+0/Pwo0gppep4g=
github.com/go-fonts/latin-modern v0.2.0/go.mod h1:rQVLdDMK+mK1xscDwsqM5J8U2jrRa3T0ecnM9pNujks=
github.com/go-fonts/liberation v0.1.1/go.mod h1:K6qoJYypsmfVjWg8KOVDQhLc8UDgIK2HYqyqAO9z7GY=
github.com/go-fonts/liberation v0.2.0/go.mod h1:K6qoJYypsmfVjWg8KOVDQhLc8UDgIK2HYqyqAO9z7GY=
github.com/go-fonts/stix v0.1.0/go.mod h1:w/c1f0ldAUlJmLBvlbkvVXLAD+tAMqobIIQpmnUIzUY=
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
@@ -816,7 +810,6 @@ github.com/go-kit/kit v0.11.0/go.mod h1:73/6Ixaufkvb5Osvkls8C79vuQ49Ba1rUEUYNSf+
github.com/go-kit/log v0.1.0 h1:DGJh0Sm43HbOeYDNnVZFl8BvcYVvjD5bqYJvp0REbwQ=
github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY=
github.com/go-latex/latex v0.0.0-20210118124228-b3d85cf34e07/go.mod h1:CO1AlKB2CSIqUrmQPqA0gdRIlnLEY0gK5JGjh37zN5U=
github.com/go-latex/latex v0.0.0-20210823091927-c0d11ff05a81/go.mod h1:SX0U8uGpxhq9o2S/CELCSUxEWWAuoCUcVCQWv7G2OCk=
github.com/go-ldap/ldap v3.0.2+incompatible/go.mod h1:qfd9rJvER9Q0/D/Sqn1DfHRoBp40uXYvFoEVrNEPqRc=
github.com/go-ldap/ldap/v3 v3.1.3/go.mod h1:3rbOH3jRS2u6jg2rJnKAMLE/xQyCKIveG2Sa/Cohzb8=
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
@@ -872,6 +865,7 @@ github.com/go-openapi/jsonreference v0.18.0/go.mod h1:g4xxGn04lDIRh0GJb5QlpE3Hfo
github.com/go-openapi/jsonreference v0.19.2/go.mod h1:jMjeRr2HHw6nAVajTXJ4eiUwohSTlpa0o73RUL1owJc=
github.com/go-openapi/jsonreference v0.19.3/go.mod h1:rjx6GuL8TTa9VaixXglHmQmIL98+wF9xc8zWvFonSJ8=
github.com/go-openapi/jsonreference v0.19.4/go.mod h1:RdybgQwPxbL4UEjuAruzK1x3nE69AqPYEJeo/TWfEeg=
github.com/go-openapi/jsonreference v0.19.5 h1:1WJP/wi4OjB4iV8KVbH73rQaoialJrqv8gitZLxGLtM=
github.com/go-openapi/jsonreference v0.19.5/go.mod h1:RdybgQwPxbL4UEjuAruzK1x3nE69AqPYEJeo/TWfEeg=
github.com/go-openapi/jsonreference v0.19.6 h1:UBIxjkht+AWIgYzCDSv2GN+E/togfwXUJFRTWhl2Jjs=
github.com/go-openapi/jsonreference v0.19.6/go.mod h1:diGHMEHg2IqXZGKxqyvWdfWU/aim5Dprw5bqpKkTvns=
@@ -913,6 +907,7 @@ github.com/go-openapi/spec v0.19.14/go.mod h1:gwrgJS15eCUgjLpMjBJmbZezCsw88LmgeE
github.com/go-openapi/spec v0.19.15/go.mod h1:+81FIL1JwC5P3/Iuuozq3pPE9dXdIEGxFutcFKaVbmU=
github.com/go-openapi/spec v0.20.0/go.mod h1:+81FIL1JwC5P3/Iuuozq3pPE9dXdIEGxFutcFKaVbmU=
github.com/go-openapi/spec v0.20.1/go.mod h1:93x7oh+d+FQsmsieroS4cmR3u0p/ywH649a3qwC9OsQ=
github.com/go-openapi/spec v0.20.3 h1:uH9RQ6vdyPSs2pSy9fL8QPspDF2AMIMPtmK5coSSjtQ=
github.com/go-openapi/spec v0.20.3/go.mod h1:gG4F8wdEDN+YPBMVnzE85Rbhf+Th2DTvA9nFPQ5AYEg=
github.com/go-openapi/spec v0.20.4 h1:O8hJrt0UMnhHcluhIdUgCLRWyM2x7QkBXRvOs7m+O1M=
github.com/go-openapi/spec v0.20.4/go.mod h1:faYFR1CvsJZ0mNsmsphTMSoRrNV3TEDoAM7FOEWeq8I=
@@ -956,8 +951,6 @@ github.com/go-openapi/validate v0.19.15/go.mod h1:tbn/fdOwYHgrhPBzidZfJC2MIVvs9G
github.com/go-openapi/validate v0.20.1/go.mod h1:b60iJT+xNNLfaQJUqLI7946tYiFEOuE9E4k54HpKcJ0=
github.com/go-openapi/validate v0.20.2 h1:AhqDegYV3J3iQkMPJSXkvzymHKMTw0BST3RK3hTT4ts=
github.com/go-openapi/validate v0.20.2/go.mod h1:e7OJoKNgd0twXZwIn0A43tHbvIcr/rZIVCbJBpTUoY0=
github.com/go-pdf/fpdf v0.5.0/go.mod h1:HzcnA+A23uwogo0tp9yU+l3V+KXhiESpt1PMayhOh5M=
github.com/go-pdf/fpdf v0.6.0/go.mod h1:HzcnA+A23uwogo0tp9yU+l3V+KXhiESpt1PMayhOh5M=
github.com/go-playground/locales v0.12.1/go.mod h1:IUMDtCfWo/w/mtMfIE/IG2K+Ey3ygWanZIBtBW0W2TM=
github.com/go-playground/universal-translator v0.16.0/go.mod h1:1AnU7NaIRDWWzGEKwgtJRd2xk99HeFyHw3yid4rvQIY=
github.com/go-redis/redis v6.15.9+incompatible h1:K0pv1D7EQUjfyoMql+r/jZqCLizCGKFlFgcHWWmHQjg=
@@ -1248,6 +1241,8 @@ github.com/grafana/dskit v0.0.0-20211011144203-3a88ec0b675f h1:FvvSVEbnGeM2bUivG
github.com/grafana/dskit v0.0.0-20211011144203-3a88ec0b675f/go.mod h1:uPG2nyK4CtgNDmWv7qyzYcdI+S90kHHRWvHnBtEMBXM=
github.com/grafana/go-mssqldb v0.0.0-20210326084033-d0ce3c521036 h1:GplhUk6Xes5JIhUUrggPcPBhOn+eT8+WsHiebvq7GgA=
github.com/grafana/go-mssqldb v0.0.0-20210326084033-d0ce3c521036/go.mod h1:xbL0rPBG9cCiLr28tMa8zpbdarY27NDyej4t/EjAShU=
github.com/grafana/grafana-aws-sdk v0.9.1 h1:jMZlsLsWnqOwLt2UNcLUsJ2z6289hLYlscK35QgS158=
github.com/grafana/grafana-aws-sdk v0.9.1/go.mod h1:6KaQ8uUD4KpXr/b7bAC7zbfSXTVOiTk4XhIrwkGWn4w=
github.com/grafana/grafana-aws-sdk v0.10.0 h1:q7+mJtT/vsU5InDN57yM+BJ2z1kJDf1W4WwWPEZ0Cxw=
github.com/grafana/grafana-aws-sdk v0.10.0/go.mod h1:vFIOHEnY1u5nY0/tge1IHQjPuG6DRKr2ISf/HikUdjE=
github.com/grafana/grafana-google-sdk-go v0.0.0-20211104130251-b190293eaf58 h1:2ud7NNM7LrGPO4x0NFR8qLq68CqI4SmB7I2yRN2w9oE=
@@ -1954,7 +1949,6 @@ github.com/philhofer/fwd v1.1.1/go.mod h1:gk3iGcWd9+svBvR0sR+KPcfE+RNWozjowpeBVG
github.com/phpdave11/gofpdf v1.4.2/go.mod h1:zpO6xFn9yxo3YLyMvW8HcKWVdbNqgIfOOp2dXMnm1mY=
github.com/phpdave11/gofpdi v1.0.7/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI=
github.com/phpdave11/gofpdi v1.0.12/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI=
github.com/phpdave11/gofpdi v1.0.13/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI=
github.com/pierrec/lz4 v1.0.2-0.20190131084431-473cd7ce01a1/go.mod h1:3/3N9NVKO0jef7pBehbT1qWhCMrIgbYNnFAZCqQ5LRc=
github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
github.com/pierrec/lz4 v2.5.2+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
@@ -2149,7 +2143,6 @@ github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR
github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q=
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/ruudk/golang-pdf417 v0.0.0-20181029194003-1af4ab5afa58/go.mod h1:6lfFZQK844Gfx8o5WFuvpxWRwnSoipWe/p622j1v06w=
github.com/ruudk/golang-pdf417 v0.0.0-20201230142125-a7e3863a1245/go.mod h1:pQAZKsJ8yyVxGRWYNEm9oFB8ieLgKFnamEyDmSA0BRk=
github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
github.com/ryanuber/columnize v2.1.0+incompatible/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
github.com/ryanuber/go-glob v1.0.0/go.mod h1:807d1WSdnB0XRJzKNil9Om6lcp/3a0v4qIHxIXzX/Yc=
@@ -2367,6 +2360,8 @@ github.com/valyala/fasthttp v1.6.0/go.mod h1:FstJa9V+Pj9vQ7OJie2qMHdwemEDaDiSdBn
github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8=
github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ=
github.com/valyala/tcplisten v0.0.0-20161114210144-ceec8f93295a/go.mod h1:v3UYOV9WzVtRmSR+PDvWpU/qWl4Wa5LApYYX4ZtKbio=
github.com/vectordotdev/go-datemath v0.1.1-0.20220110192739-f9ce83ec349f h1:2upw/ZfjkCKpc4k6DXg7lMfCSLkfw/8epV5/y2ZUQ8U=
github.com/vectordotdev/go-datemath v0.1.1-0.20220110192739-f9ce83ec349f/go.mod h1:PnwzbSst7KD3vpBzzlntZU5gjVa455Uqa5QPiKSYJzQ=
github.com/vectordotdev/go-datemath v0.1.1-0.20220323213446-f3954d0b18ae h1:oyiy3uBj1F4O3AaFh7hUGBrJjAssJhKyAbwxtkslxqo=
github.com/vectordotdev/go-datemath v0.1.1-0.20220323213446-f3954d0b18ae/go.mod h1:PnwzbSst7KD3vpBzzlntZU5gjVa455Uqa5QPiKSYJzQ=
github.com/vektah/gqlparser v1.1.2/go.mod h1:1ycwN7Ij5njmMkPPAOaRFY4rET2Enx7IkVv3vaXspKw=
@@ -2432,7 +2427,6 @@ github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9de
github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
github.com/yuin/goldmark v1.4.1/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k=
github.com/yuin/gopher-lua v0.0.0-20180630135845-46796da1b0b4/go.mod h1:aEV29XrmTYFr3CiRxZeGHpkvbwq+prZduBqMaascyCU=
github.com/yuin/gopher-lua v0.0.0-20200816102855-ee81675732da/go.mod h1:E1AXubJBdNmFERAOucpDIxNzeGfLzg0mYh+UfMWdChA=
github.com/yvasiyarov/go-metrics v0.0.0-20140926110328-57bccd1ccd43/go.mod h1:aX5oPXxHm3bOH+xeAttToC8pqch2ScQN/JoXYupl6xs=
@@ -2634,10 +2628,6 @@ golang.org/x/image v0.0.0-20200430140353-33d19683fad8/go.mod h1:FeLwcggjj3mMvU+o
golang.org/x/image v0.0.0-20200618115811-c13761719519/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.0.0-20201208152932-35266b937fa6/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.0.0-20210216034530-4410531fe030/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/image v0.0.0-20210607152325-775e3b0c77b9/go.mod h1:023OzeP/+EPmXeapQh35lcL3II3LrY8Ic+EFFKVhULM=
golang.org/x/image v0.0.0-20210628002857-a66eb6448b8d/go.mod h1:023OzeP/+EPmXeapQh35lcL3II3LrY8Ic+EFFKVhULM=
golang.org/x/image v0.0.0-20211028202545-6944b10bf410/go.mod h1:023OzeP/+EPmXeapQh35lcL3II3LrY8Ic+EFFKVhULM=
golang.org/x/image v0.0.0-20220302094943-723b81ca9867/go.mod h1:023OzeP/+EPmXeapQh35lcL3II3LrY8Ic+EFFKVhULM=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU=
golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
@@ -2665,7 +2655,6 @@ golang.org/x/mod v0.3.1-0.20200828183125-ce943fd02449/go.mod h1:s0Qsj1ACt9ePp/hM
golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
golang.org/x/net v0.0.0-20170114055629-f2499483f923/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180530234432-1e491301e022/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@@ -2747,8 +2736,8 @@ golang.org/x/net v0.0.0-20210610132358-84b48f89b13b/go.mod h1:9nx3DQGgdP8bBQD5qx
golang.org/x/net v0.0.0-20210614182718-04defd469f4e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20210726213435-c6fcb2dbf985/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20210903162142-ad29c8ab022f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f h1:OfiFi4JbukWwe3lzw+xunroH1mnC1e2Gy5cxNJApiSY=
golang.org/x/net v0.0.0-20211015210444-4f30a5c0130f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20211013171255-e13a2654a71e h1:Xj+JO91noE97IN6F/7WZxzC5QE6yENAQPrwIYhW3bsA=
golang.org/x/net v0.0.0-20211013171255-e13a2654a71e/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20181106182150-f42d05182288/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
@@ -2938,7 +2927,6 @@ golang.org/x/sys v0.0.0-20210823070655-63515b42dcdf/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20210906170528-6f6e22806c34/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210908233432-aa78b53d3365/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210917161153-d61c044b1678/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20211019181941-9d821ace8654/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20211110154304-99a53858aa08/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20211124211545-fe61309f8881/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220114195835-da31bd327af9 h1:XfKQ4OlFl8okEOr5UvAqFRVj8pY/4yfcXrddB8qAbU0=
@@ -3078,9 +3066,8 @@ golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.4/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.5 h1:ouewzE6p+/VEB31YYnTbEJdi8pFqKp4P4n85vwo3DHA=
golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.1.9 h1:j9KsMiaP1c3B0OTQGth0/k+miLGTgLsAFUCrF2vLcF8=
golang.org/x/tools v0.1.9/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
@@ -3093,14 +3080,13 @@ gonum.org/v1/gonum v0.0.0-20181121035319-3f7ecaa7e8ca/go.mod h1:Y+Yx5eoAFn32cQvJ
gonum.org/v1/gonum v0.6.0/go.mod h1:9mxDZsDKxgMAuccQkewq682L+0eCu4dCN2yonUJTCLU=
gonum.org/v1/gonum v0.6.2/go.mod h1:9mxDZsDKxgMAuccQkewq682L+0eCu4dCN2yonUJTCLU=
gonum.org/v1/gonum v0.8.2/go.mod h1:oe/vMfY3deqTw+1EZJhuvEW2iwGF1bW9wwu7XCu0+v0=
gonum.org/v1/gonum v0.9.3 h1:DnoIG+QAMaF5NvxnGe/oKsgKcAc6PcUyl8q0VetfQ8s=
gonum.org/v1/gonum v0.9.3/go.mod h1:TZumC3NeyVQskjXqmyWt4S3bINhy7B4eYwW69EbyX+0=
gonum.org/v1/gonum v0.11.0 h1:f1IJhK4Km5tBJmaiJXtk/PkL4cdVX6J+tGiM187uT5E=
gonum.org/v1/gonum v0.11.0/go.mod h1:fSG4YDCxxUZQJ7rKsQrj0gMOg00Il0Z96/qMA4bVQhA=
gonum.org/v1/netlib v0.0.0-20181029234149-ec6d1f5cefe6/go.mod h1:wa6Ws7BG/ESfp6dHfk7C6KdzKA7wR7u/rKwOGE66zvw=
gonum.org/v1/netlib v0.0.0-20190313105609-8cb42192e0e0 h1:OE9mWmgKkjJyEmDAAtGMPjXu+YNeGvK9VTSHY6+Qihc=
gonum.org/v1/netlib v0.0.0-20190313105609-8cb42192e0e0/go.mod h1:wa6Ws7BG/ESfp6dHfk7C6KdzKA7wR7u/rKwOGE66zvw=
gonum.org/v1/plot v0.0.0-20190515093506-e2840ee46a6b/go.mod h1:Wt8AAjI+ypCyYX3nZBvf6cAIx93T+c/OS2HFAYskSZc=
gonum.org/v1/plot v0.9.0/go.mod h1:3Pcqqmp6RHvJI72kgb8fThyUnav364FOsdDo2aGW5lY=
gonum.org/v1/plot v0.10.1/go.mod h1:VZW5OlhkL1mysU9vaqNHnsy86inf6Ot+jB3r+BczCEo=
google.golang.org/api v0.0.0-20160322025152-9bf6e6e569ff/go.mod h1:4mhQ8q/RsB7i+udVvVy5NUi08OU8ZlA0gRVgrF7VFY0=
google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk=
google.golang.org/api v0.3.2/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk=
@@ -3386,7 +3372,6 @@ honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWh
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
honnef.co/go/tools v0.1.3/go.mod h1:NgwopIslSNH47DimFoV78dnkksY2EFtX0ajyb3K/las=
honnef.co/go/tools v0.2.0/go.mod h1:lPVVZ2BS5TfnjLyizF7o7hv7j9/L+8cZY2hLyjP9cGY=
howett.net/plist v0.0.0-20181124034731-591f970eefbb/go.mod h1:vMygbs4qMhSZSc4lCUl2OEE+rDiIIJAIdR4m7MiMcm0=
inet.af/netaddr v0.0.0-20210707202901-70468d781e6c/go.mod h1:z0nx+Dh+7N7CC8V5ayHtHGpZpxLQZZxkIaaz6HN65Ls=

View File

@@ -1,4 +1,4 @@
{
"stable": "8.4.6",
"testing": "8.4.6"
"stable": "8.4.4",
"testing": "8.4.4"
}

View File

@@ -4,5 +4,5 @@
"packages": [
"packages/*"
],
"version": "8.4.10"
"version": "8.4.5"
}

View File

@@ -3,7 +3,7 @@
"license": "AGPL-3.0-only",
"private": true,
"name": "grafana",
"version": "8.4.10",
"version": "8.4.5",
"repository": "github:grafana/grafana",
"scripts": {
"api-tests": "jest --notify --watch --config=devenv/e2e-api-tests/jest.js",
@@ -32,6 +32,7 @@
"packages:publishLatest": "lerna publish from-package --contents dist --yes --no-verify-access",
"packages:publishNext": "lerna publish from-package --contents dist --dist-tag next --yes --no-verify-access",
"packages:publishTest": "lerna publish from-package --contents dist --dist-tag test --yes --no-verify-access",
"packages:publishPrevious": "lerna publish from-package --contents dist --dist-tag previous --yes --no-verify-access",
"packages:publishDev": "lerna publish from-package --contents dist --dist-tag dev --yes --registry http://grafana-npm.local:4873 --force-publish=*",
"packages:typecheck": "lerna run typecheck",
"packages:clean": "lerna run clean",
@@ -131,7 +132,7 @@
"@types/papaparse": "5.3.1",
"@types/pluralize": "^0.0.29",
"@types/prismjs": "1.26.0",
"@types/rc-time-picker": "3.4.1",
"@types/rc-time-picker": "^3",
"@types/react": "17.0.38",
"@types/react-beautiful-dnd": "13.1.2",
"@types/react-dom": "17.0.11",
@@ -309,7 +310,7 @@
"logfmt": "^1.3.2",
"lru-cache": "6.0.0",
"memoize-one": "6.0.0",
"moment": "2.29.2",
"moment": "2.29.1",
"moment-timezone": "0.5.34",
"monaco-editor": "^0.31.1",
"monaco-promql": "^1.7.2",

View File

@@ -2,7 +2,7 @@
"author": "Grafana Labs",
"license": "Apache-2.0",
"name": "@grafana/data",
"version": "8.4.10",
"version": "8.4.5",
"description": "Grafana Data Library",
"keywords": [
"typescript"
@@ -21,15 +21,15 @@
"typecheck": "tsc --noEmit"
},
"dependencies": {
"@braintree/sanitize-url": "6.0.0",
"@grafana/schema": "8.4.10",
"@braintree/sanitize-url": "5.0.2",
"@grafana/schema": "8.4.5",
"@types/d3-interpolate": "^1.4.0",
"d3-interpolate": "1.4.0",
"date-fns": "2.28.0",
"eventemitter3": "4.0.7",
"lodash": "4.17.21",
"marked": "4.0.10",
"moment": "2.29.2",
"moment": "2.29.1",
"moment-timezone": "0.5.34",
"ol": "6.12.0",
"papaparse": "5.3.1",
@@ -52,6 +52,7 @@
"@testing-library/react": "12.1.2",
"@testing-library/react-hooks": "7.0.2",
"@testing-library/user-event": "13.5.0",
"@types/braintree__sanitize-url": "4.1.0",
"@types/jest": "27.4.0",
"@types/jquery": "3.5.11",
"@types/lodash": "4.14.149",

View File

@@ -414,46 +414,6 @@ describe('Date display options', () => {
expect(processor('2020-08-01T08:48:43.783337Z').text).toEqual('2020-08-01 08:48:43');
});
it('should handle ISO string dates when in other timezones than UTC', () => {
const processor = getDisplayProcessor({
timeZone: 'CET',
field: {
type: FieldType.time,
config: {},
},
theme: createTheme(),
});
expect(processor('2020-08-01T08:48:43.783337Z').text).toEqual('2020-08-01 10:48:43'); //DST
expect(processor('2020-12-01T08:48:43.783337Z').text).toEqual('2020-12-01 09:48:43'); //STD
});
it('should handle ISO string dates with timezone offset', () => {
const processor = getDisplayProcessor({
timeZone: 'utc',
field: {
type: FieldType.time,
config: {},
},
theme: createTheme(),
});
expect(processor('2020-12-01T08:48:43.783337+02:00').text).toEqual('2020-12-01 06:48:43');
});
it('should handle ISO string dates without timezone qualifier by assuming UTC', () => {
const processor = getDisplayProcessor({
timeZone: 'CET',
field: {
type: FieldType.time,
config: {},
},
theme: createTheme(),
});
expect(processor('2020-12-01T08:48:43.783337').text).toEqual('2020-12-01 09:48:43');
});
describe('number formatting for string values', () => {
it('should preserve string unchanged if unit is strings', () => {
const processor = getDisplayProcessor({

View File

@@ -6,7 +6,7 @@ import { Field, FieldType } from '../types/dataFrame';
import { DisplayProcessor, DisplayValue } from '../types/displayValue';
import { getValueFormat, isBooleanUnit } from '../valueFormats/valueFormats';
import { getValueMappingResult } from '../utils/valueMappings';
import { toUtc, dateTimeParse } from '../datetime';
import { dateTime, dateTimeParse } from '../datetime';
import { KeyValue, TimeZone } from '../types';
import { getScaleCalculator } from './scale';
import { GrafanaTheme2 } from '../themes/types';
@@ -76,7 +76,7 @@ export function getDisplayProcessor(options?: DisplayProcessorOptions): DisplayP
const isStringUnit = unit === 'string';
if (hasDateUnit && typeof value === 'string') {
value = toUtc(value).valueOf();
value = dateTime(value).valueOf();
}
let numeric = isStringUnit ? NaN : anyToNumber(value);

View File

@@ -57,32 +57,12 @@ describe('stringToMs', () => {
});
});
describe('[un]escapeStringForRegex', () => {
it.each([
'[]',
'\\',
'[(abc])',
'onetwothree',
'<namedgroup}(this is not a regex>',
'string\\with\\backslash',
'everyspecialchar([{])}.,/?&*-^&<>#',
])('should be symmetric', (input) => {
const output = unEscapeStringFromRegex(escapeStringForRegex(input));
expect(output).toEqual(input);
});
});
describe('escapeStringForRegex', () => {
it.each([
'[[[',
'[]\\',
'[(abc])',
'onetwothree',
'<namedgroup}(this is not a regex>',
'string\\with\\backslash',
'everyspecialchar([{])}.,/?&*-^&<>#',
])('should always produce output that compiles', (value) => {
expect(() => new RegExp(escapeStringForRegex(value))).not.toThrowError();
describe('when using a string with special chars', () => {
it('then all special chars should be escaped', () => {
const result = escapeStringForRegex('([{}])|*+-.?<>#&^$');
expect(result).toBe('\\(\\[\\{\\}\\]\\)\\|\\*\\+\\-\\.\\?\\<\\>\\#\\&\\^\\$');
});
});
describe('when using a string without special chars', () => {

View File

@@ -1,16 +1,12 @@
import { camelCase } from 'lodash';
const specialChars = ['(', '[', '{', '}', ']', ')', '\\', '|', '*', '+', '-', '.', '?', '<', '>', '#', '&', '^', '$'];
const specialMatcher = '([\\' + specialChars.join('\\') + '])';
const specialCharEscape = new RegExp(specialMatcher, 'g');
const specialCharUnescape = new RegExp('(\\\\)' + specialMatcher, 'g');
const specialChars = ['(', '[', '{', '}', ']', ')', '|', '*', '+', '-', '.', '?', '<', '>', '#', '&', '^', '$'];
export const escapeStringForRegex = (value: string) => {
if (!value) {
return value;
}
return value.replace(specialCharEscape, '\\$1');
return specialChars.reduce((escaped, currentChar) => escaped.replace(currentChar, '\\' + currentChar), value);
};
export const unEscapeStringFromRegex = (value: string) => {
@@ -18,7 +14,7 @@ export const unEscapeStringFromRegex = (value: string) => {
return value;
}
return value.replace(specialCharUnescape, '$2');
return specialChars.reduce((escaped, currentChar) => escaped.replace('\\' + currentChar, currentChar), value);
};
export function stringStartsAsRegEx(str: string): boolean {

View File

@@ -159,9 +159,7 @@ export function buildHistogram(frames: DataFrame[], options?: HistogramTransform
for (const frame of frames) {
for (const field of frame.fields) {
if (field.type === FieldType.number) {
allValues = allValues.concat(
field.values.toArray().map((val: number) => Number(val.toFixed(field.config.decimals ?? 0)))
);
allValues = allValues.concat(field.values.toArray());
}
}
}
@@ -219,7 +217,7 @@ export function buildHistogram(frames: DataFrame[], options?: HistogramTransform
unit: undefined,
},
});
if (!config && Object.keys(field.config).length) {
if (!config && field.config.unit) {
config = field.config;
}
}

View File

@@ -2,7 +2,7 @@
"author": "Grafana Labs",
"license": "Apache-2.0",
"name": "@grafana/e2e-selectors",
"version": "8.4.10",
"version": "8.4.5",
"description": "Grafana End-to-End Test Selectors Library",
"keywords": [
"cli",

View File

@@ -332,8 +332,4 @@ export const Components = {
orgsTable: 'data-testid-user-orgs-table',
sessionsTable: 'data-testid-user-sessions-table',
},
FileUpload: {
inputField: 'data-testid-file-upload-input-field',
fileNameSpan: 'data-testid-file-upload-file-name',
},
};

View File

@@ -2,7 +2,7 @@
"author": "Grafana Labs",
"license": "Apache-2.0",
"name": "@grafana/e2e",
"version": "8.4.10",
"version": "8.4.5",
"description": "Grafana End-to-End Test Library",
"keywords": [
"cli",
@@ -48,7 +48,7 @@
"@babel/core": "7.16.7",
"@babel/preset-env": "7.16.7",
"@cypress/webpack-preprocessor": "5.11.0",
"@grafana/e2e-selectors": "8.4.10",
"@grafana/e2e-selectors": "8.4.5",
"@grafana/tsconfig": "^1.0.0-rc1",
"@mochajs/json-file-reporter": "^1.2.0",
"babel-loader": "8.2.3",

View File

@@ -2,7 +2,7 @@
"author": "Grafana Labs",
"license": "Apache-2.0",
"name": "@grafana/runtime",
"version": "8.4.10",
"version": "8.4.5",
"description": "Grafana Runtime Library",
"keywords": [
"grafana",
@@ -22,9 +22,9 @@
"typecheck": "tsc --noEmit"
},
"dependencies": {
"@grafana/data": "8.4.10",
"@grafana/e2e-selectors": "8.4.10",
"@grafana/ui": "8.4.10",
"@grafana/data": "8.4.5",
"@grafana/e2e-selectors": "8.4.5",
"@grafana/ui": "8.4.5",
"@sentry/browser": "6.17.2",
"history": "4.10.1",
"lodash": "4.17.21",

View File

@@ -98,9 +98,6 @@ export class GrafanaBootConfig implements GrafanaConfig {
featureHighlights = {
enabled: false,
};
reporting = {
enabled: true,
};
constructor(options: GrafanaBootConfig) {
const mode = options.bootData.user.lightTheme ? 'light' : 'dark';

View File

@@ -2,7 +2,7 @@
"author": "Grafana Labs",
"license": "Apache-2.0",
"name": "@grafana/schema",
"version": "8.4.10",
"version": "8.4.5",
"description": "Grafana Schema Library",
"keywords": [
"typescript"

View File

@@ -2,7 +2,7 @@
"author": "Grafana Labs",
"license": "Apache-2.0",
"name": "@grafana/toolkit",
"version": "8.4.10",
"version": "8.4.5",
"description": "Grafana Toolkit",
"keywords": [
"grafana",
@@ -28,10 +28,10 @@
"dependencies": {
"@babel/core": "7.13.14",
"@babel/preset-env": "7.13.12",
"@grafana/data": "8.4.10",
"@grafana/data": "8.4.5",
"@grafana/eslint-config": "2.5.2",
"@grafana/tsconfig": "^1.0.0-rc1",
"@grafana/ui": "8.4.10",
"@grafana/ui": "8.4.5",
"@jest/core": "26.6.3",
"@rushstack/eslint-patch": "1.0.6",
"@types/command-exists": "^1.2.0",

View File

@@ -2,7 +2,7 @@
"author": "Grafana Labs",
"license": "Apache-2.0",
"name": "@grafana/ui",
"version": "8.4.10",
"version": "8.4.5",
"description": "Grafana Components Library",
"keywords": [
"grafana",
@@ -33,9 +33,9 @@
"@emotion/css": "11.7.1",
"@emotion/react": "11.7.1",
"@grafana/aws-sdk": "0.0.31",
"@grafana/data": "8.4.10",
"@grafana/e2e-selectors": "8.4.10",
"@grafana/schema": "8.4.10",
"@grafana/data": "8.4.5",
"@grafana/e2e-selectors": "8.4.5",
"@grafana/schema": "8.4.5",
"@grafana/slate-react": "0.22.10-grafana",
"@monaco-editor/react": "4.3.1",
"@popperjs/core": "2.11.2",
@@ -58,7 +58,7 @@
"jquery": "3.6.0",
"lodash": "4.17.21",
"memoize-one": "6.0.0",
"moment": "2.29.2",
"moment": "2.29.1",
"monaco-editor": "^0.31.1",
"ol": "6.12.0",
"prismjs": "1.26.0",

View File

@@ -1,58 +1,32 @@
import { render, waitFor, fireEvent, screen } from '@testing-library/react';
import userEvent from '@testing-library/user-event';
import React from 'react';
import { selectors } from '@grafana/e2e-selectors';
import { shallow } from 'enzyme';
import { FileUpload } from './FileUpload';
describe('FileUpload', () => {
it('should render upload button with default text and no file name', () => {
render(<FileUpload onFileUpload={() => {}} />);
expect(screen.getByRole('button', { name: 'Upload file' })).toBeInTheDocument();
expect(screen.queryByLabelText('File name')).toBeNull();
const wrapper = shallow(<FileUpload onFileUpload={() => {}} />);
expect(wrapper.findWhere((comp) => comp.text() === 'Upload file').exists()).toBeTruthy();
expect(wrapper.find({ 'aria-label': 'File name' }).exists()).toBeFalsy();
});
it('clicking the button should trigger the input', async () => {
const mockInputOnClick = jest.fn();
const { getByTestId } = render(<FileUpload onFileUpload={() => {}} />);
const button = screen.getByRole('button', { name: 'Upload file' });
const input = getByTestId(selectors.components.FileUpload.inputField);
it("should trim uploaded file's name", () => {
const wrapper = shallow(<FileUpload onFileUpload={() => {}} />);
// attach a click listener to the input
input.onclick = mockInputOnClick;
wrapper.find('input').simulate('change', {
currentTarget: {
files: [{ name: 'longFileName.something.png' }],
},
});
expect(wrapper.find({ 'aria-label': 'File name' }).exists()).toBeTruthy();
// Trim file name longer than 16 chars
expect(wrapper.find({ 'aria-label': 'File name' }).text()).toEqual('longFileName.som....png');
await userEvent.click(button);
expect(mockInputOnClick).toHaveBeenCalled();
});
it('should display uploaded file name', async () => {
const testFileName = 'grafana.png';
const file = new File(['(⌐□_□)'], testFileName, { type: 'image/png' });
const onFileUpload = jest.fn();
const { getByTestId } = render(<FileUpload onFileUpload={onFileUpload} />);
let uploader = getByTestId(selectors.components.FileUpload.inputField);
await waitFor(() =>
fireEvent.change(uploader, {
target: { files: [file] },
})
);
let uploaderLabel = getByTestId(selectors.components.FileUpload.fileNameSpan);
expect(uploaderLabel).toHaveTextContent(testFileName);
});
it("should trim uploaded file's name", async () => {
const testFileName = 'longFileName.something.png';
const file = new File(['(⌐□_□)'], testFileName, { type: 'image/png' });
const onFileUpload = jest.fn();
const { getByTestId } = render(<FileUpload onFileUpload={onFileUpload} />);
let uploader = getByTestId(selectors.components.FileUpload.inputField);
await waitFor(() =>
fireEvent.change(uploader, {
target: { files: [file] },
})
);
let uploaderLabel = getByTestId(selectors.components.FileUpload.fileNameSpan);
expect(uploaderLabel).toHaveTextContent('longFileName.som....png');
// Keep the name below the length limit intact
wrapper.find('input').simulate('change', {
currentTarget: {
files: [{ name: 'longFileName.png' }],
},
});
expect(wrapper.find({ 'aria-label': 'File name' }).text()).toEqual('longFileName.png');
});
});

View File

@@ -1,16 +1,11 @@
import { css, cx } from '@emotion/css';
import React, { FC, FormEvent, useCallback, useState } from 'react';
import { v4 as uuidv4 } from 'uuid';
import { GrafanaTheme2 } from '@grafana/data';
import { selectors } from '@grafana/e2e-selectors';
import { useStyles2 } from '../../themes';
import { getFocusStyles } from '../../themes/mixins';
import { css, cx } from '@emotion/css';
import { Icon } from '../index';
import { stylesFactory, useTheme2 } from '../../themes';
import { ComponentSize } from '../../types/size';
import { getButtonStyles } from '../Button';
import { trimFileName } from '../../utils/file';
import { Icon } from '../index';
export interface Props {
/** Callback function to handle uploaded file */
@@ -30,9 +25,9 @@ export const FileUpload: FC<Props> = ({
accept = '*',
size = 'md',
}) => {
const style = useStyles2(getStyles(size));
const theme = useTheme2();
const style = getStyles(theme, size);
const [fileName, setFileName] = useState('');
const id = uuidv4();
const onChange = useCallback(
(event: FormEvent<HTMLInputElement>) => {
@@ -47,26 +42,20 @@ export const FileUpload: FC<Props> = ({
return (
<>
<input
type="file"
id={id}
className={style.fileUpload}
onChange={onChange}
multiple={false}
accept={accept}
data-testid={selectors.components.FileUpload.inputField}
/>
<label role="button" htmlFor={id} className={cx(style.labelWrapper, className)}>
<label className={cx(style.button, className)}>
<Icon name="upload" className={style.icon} />
{children}
<input
type="file"
id="fileUpload"
className={style.fileUpload}
onChange={onChange}
multiple={false}
accept={accept}
/>
</label>
{fileName && (
<span
aria-label="File name"
className={style.fileName}
data-testid={selectors.components.FileUpload.fileNameSpan}
>
<span aria-label="File name" className={style.fileName}>
{trimFileName(fileName)}
</span>
)}
@@ -74,25 +63,16 @@ export const FileUpload: FC<Props> = ({
);
};
const getStyles = (size: ComponentSize) => (theme: GrafanaTheme2) => {
const getStyles = stylesFactory((theme: GrafanaTheme2, size: ComponentSize) => {
const buttonStyles = getButtonStyles({ theme, variant: 'primary', size, iconOnly: false });
const focusStyle = getFocusStyles(theme);
return {
fileUpload: css({
height: '0.1px',
opacity: '0',
overflow: 'hidden',
position: 'absolute',
width: '0.1px',
zIndex: -1,
'&:focus + label': focusStyle,
'&:focus-visible + label': focusStyle,
}),
labelWrapper: buttonStyles.button,
fileUpload: css`
display: none;
`,
button: buttonStyles.button,
icon: buttonStyles.icon,
fileName: css({
marginLeft: theme.spacing(0.5),
}),
fileName: css`
margin-left: ${theme.spacing(0.5)};
`,
};
};
});

View File

@@ -2,13 +2,7 @@ import { XYFieldMatchers } from './types';
import { ArrayVector, DataFrame, FieldConfig, FieldType, outerJoinDataFrames, TimeRange } from '@grafana/data';
import { nullToUndefThreshold } from './nullToUndefThreshold';
import { applyNullInsertThreshold } from './nullInsertThreshold';
import {
AxisPlacement,
GraphDrawStyle,
GraphFieldConfig,
ScaleDistribution,
ScaleDistributionConfig,
} from '@grafana/schema';
import { AxisPlacement, GraphFieldConfig, ScaleDistribution, ScaleDistributionConfig } from '@grafana/schema';
import { FIXED_UNIT } from './GraphNG';
// will mutate the DataFrame's fields' values
@@ -37,23 +31,7 @@ function applySpanNullsThresholds(frame: DataFrame) {
export function preparePlotFrame(frames: DataFrame[], dimFields: XYFieldMatchers, timeRange?: TimeRange | null) {
let alignedFrame = outerJoinDataFrames({
frames: frames.map((frame) => {
let fr = applyNullInsertThreshold(frame, null, timeRange?.to.valueOf());
// prevent minesweeper-expansion of nulls (gaps) when joining bars
// since bar width is determined from the minimum distance between non-undefined values
// (this strategy will still retain any original pre-join nulls, though)
fr.fields.forEach((f) => {
if (f.type === FieldType.number && f.config.custom?.drawStyle === GraphDrawStyle.Bars) {
f.config.custom = {
...f.config.custom,
spanNulls: -1,
};
}
});
return fr;
}),
frames: frames.map((frame) => applyNullInsertThreshold(frame, null, timeRange?.to.valueOf())),
joinBy: dimFields.x,
keep: dimFields.y,
keepOriginIndices: true,

View File

@@ -45,13 +45,13 @@ export const VizTooltipContainer: React.FC<VizTooltipContainerProps> = ({
const tH = Math.floor(entry.contentRect.height + 2 * 8);
if (tooltipMeasurement.width !== tW || tooltipMeasurement.height !== tH) {
setTooltipMeasurement({
width: Math.min(tW, width),
height: Math.min(tH, height),
width: tW,
height: tH,
});
}
}
}),
[tooltipMeasurement, width, height]
[tooltipMeasurement]
);
useLayoutEffect(() => {

View File

@@ -214,7 +214,7 @@ export const TooltipPlugin: React.FC<TooltipPluginProps> = ({
let series: SeriesTableRowProps[] = [];
const frame = otherProps.data;
const fields = frame.fields;
const sortIdx: any[] = [];
const sortIdx: Array<[number, number]> = [];
for (let i = 0; i < fields.length; i++) {
const field = frame.fields[i];
@@ -232,7 +232,7 @@ export const TooltipPlugin: React.FC<TooltipPluginProps> = ({
const v = otherProps.data.fields[i].values.get(focusedPointIdxs[i]!);
const display = field.display!(v);
sortIdx.push(v);
sortIdx.push([series.length, v]);
series.push({
color: display.color || FALLBACK_COLOR,
label: getFieldDisplayName(field, frame),
@@ -242,16 +242,7 @@ export const TooltipPlugin: React.FC<TooltipPluginProps> = ({
}
if (sortOrder !== SortOrder.None) {
// create sort reference series array, as Array.sort() mutates the original array
const sortRef = [...series];
const sortFn = arrayUtils.sortValues(sortOrder);
series.sort((a, b) => {
// get compared values indices to retrieve raw values from sortIdx
const aIdx = sortRef.indexOf(a);
const bIdx = sortRef.indexOf(b);
return sortFn(sortIdx[aIdx], sortIdx[bIdx]);
});
series.sort((a, b) => arrayUtils.sortValues(sortOrder)(a.value, b.value));
}
tooltip = <SeriesTable series={series} timestamp={xVal} />;

View File

@@ -1,6 +1,6 @@
{
"name": "@jaegertracing/jaeger-ui-components",
"version": "8.4.10",
"version": "8.4.5",
"main": "src/index.ts",
"types": "src/index.ts",
"license": "Apache-2.0",
@@ -26,8 +26,8 @@
},
"dependencies": {
"@emotion/css": "11.7.1",
"@grafana/data": "8.4.10",
"@grafana/ui": "8.4.10",
"@grafana/data": "8.4.5",
"@grafana/ui": "8.4.5",
"chance": "^1.0.10",
"classnames": "^2.2.5",
"combokeys": "^3.0.0",
@@ -39,7 +39,7 @@
"lodash": "4.17.21",
"lru-memoize": "^1.1.0",
"memoize-one": "6.0.0",
"moment": "2.29.2",
"moment": "2.29.1",
"moment-timezone": "0.5.34",
"prop-types": "15.8.1",
"react": "17.0.2",

View File

@@ -272,9 +272,6 @@ func (hs *HTTPServer) getFrontendSettingsMap(c *models.ReqContext) (map[string]i
"recordedQueries": map[string]bool{
"enabled": hs.Cfg.SectionWithEnvOverrides("recorded_queries").Key("enabled").MustBool(true),
},
"reporting": map[string]bool{
"enabled": hs.Cfg.SectionWithEnvOverrides("reporting").Key("enabled").MustBool(true),
},
"unifiedAlertingEnabled": hs.Cfg.UnifiedAlerting.Enabled,
}

View File

@@ -221,11 +221,6 @@ func (hs *HTTPServer) PostSyncUserWithLDAP(c *models.ReqContext) response.Respon
ReqContext: c,
ExternalUser: user,
SignupAllowed: hs.Cfg.LDAPAllowSignup,
UserLookupParams: models.UserLookupParams{
UserID: &query.Result.Id, // Upsert by ID only
Email: nil,
Login: nil,
},
}
err = bus.Dispatch(c.Req.Context(), upsertCmd)

View File

@@ -313,11 +313,6 @@ func syncUser(
ReqContext: ctx,
ExternalUser: extUser,
SignupAllowed: connect.IsSignupAllowed(),
UserLookupParams: models.UserLookupParams{
Email: &extUser.Email,
UserID: nil,
Login: nil,
},
}
if err := bus.Dispatch(ctx.Req.Context(), cmd); err != nil {
return nil, err

View File

@@ -67,8 +67,7 @@ func TestUserAPIEndpoint_userLoggedIn(t *testing.T) {
}
idToken := "testidtoken"
token = token.WithExtra(map[string]interface{}{"id_token": idToken})
login := "loginuser"
query := &models.GetUserByAuthInfoQuery{AuthModule: "test", AuthId: "test", UserLookupParams: models.UserLookupParams{Login: &login}}
query := &models.GetUserByAuthInfoQuery{Login: "loginuser", AuthModule: "test", AuthId: "test"}
cmd := &models.UpdateAuthInfoCommand{
UserId: user.Id,
AuthId: query.AuthId,

View File

@@ -11,7 +11,7 @@ const (
)
type Logger interface {
// New returns a new contextual Logger that has this logger's context plus the given context.
// New returns a new Logger that has this logger's context plus the given context
New(ctx ...interface{}) *ConcreteLogger
Log(keyvals ...interface{}) error

View File

@@ -1,9 +1,6 @@
package level
import (
"github.com/go-kit/log"
gokitlevel "github.com/go-kit/log/level"
)
import "github.com/go-kit/log"
// Error returns a logger that includes a Key/ErrorValue pair.
func Error(logger log.Logger) log.Logger {
@@ -56,12 +53,6 @@ func (l *logger) Log(keyvals ...interface{}) error {
levelAllowed = l.allowed&v.level != 0
break
}
if v, ok := keyvals[i].(gokitlevel.Value); ok {
hasLevel = true
levelAllowed = l.allowed&levelFromGokitLevel(v) != 0
break
}
}
if !hasLevel && l.squelchNoLevel {
return l.errNoLevel
@@ -205,35 +196,6 @@ const (
levelError
)
func IsKey(v interface{}) bool {
return v != nil && (v == Key() || v == gokitlevel.Key())
}
func GetValue(v interface{}) Value {
if v == nil {
return nil
}
if val, ok := v.(Value); ok {
return val
}
if val, ok := v.(gokitlevel.Value); ok {
switch val {
case gokitlevel.InfoValue():
return InfoValue()
case gokitlevel.WarnValue():
return WarnValue()
case gokitlevel.ErrorValue():
return ErrorValue()
case gokitlevel.DebugValue():
return DebugValue()
}
}
return nil
}
type levelValue struct {
name string
level
@@ -241,16 +203,3 @@ type levelValue struct {
func (v *levelValue) String() string { return v.name }
func (v *levelValue) levelVal() {}
func levelFromGokitLevel(l gokitlevel.Value) level {
switch l.String() {
case gokitlevel.ErrorValue().String():
return levelError
case gokitlevel.WarnValue().String():
return levelWarn
case gokitlevel.DebugValue().String():
return levelDebug
}
return levelInfo
}

View File

@@ -1,154 +0,0 @@
package level_test
import (
"testing"
gokitlog "github.com/go-kit/log"
gokitlevel "github.com/go-kit/log/level"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/log/level"
"github.com/stretchr/testify/require"
)
func TestNewFilter(t *testing.T) {
newFilteredLoggerScenario(t, "Given all levels is allowed should log all messages", level.AllowAll(), func(t *testing.T, ctx *scenarioContext) {
logTestMessages(t, ctx)
require.Len(t, ctx.loggedArgs, 8)
require.Equal(t, "lvl", ctx.loggedArgs[0][2].(string))
require.Equal(t, "info", ctx.loggedArgs[0][3].(level.Value).String())
require.Equal(t, "lvl", ctx.loggedArgs[1][2].(string))
require.Equal(t, "warn", ctx.loggedArgs[1][3].(level.Value).String())
require.Equal(t, "lvl", ctx.loggedArgs[2][2].(string))
require.Equal(t, "eror", ctx.loggedArgs[2][3].(level.Value).String())
require.Equal(t, "lvl", ctx.loggedArgs[3][2].(string))
require.Equal(t, "dbug", ctx.loggedArgs[3][3].(level.Value).String())
require.Equal(t, "level", ctx.loggedArgs[4][0].(string))
require.Equal(t, "info", ctx.loggedArgs[4][1].(gokitlevel.Value).String())
require.Equal(t, "level", ctx.loggedArgs[5][0].(string))
require.Equal(t, "warn", ctx.loggedArgs[5][1].(gokitlevel.Value).String())
require.Equal(t, "level", ctx.loggedArgs[6][0].(string))
require.Equal(t, "error", ctx.loggedArgs[6][1].(gokitlevel.Value).String())
require.Equal(t, "level", ctx.loggedArgs[7][0].(string))
require.Equal(t, "debug", ctx.loggedArgs[7][1].(gokitlevel.Value).String())
})
newFilteredLoggerScenario(t, "Given error, warnings, info, debug is allowed should log all messages", level.AllowDebug(), func(t *testing.T, ctx *scenarioContext) {
logTestMessages(t, ctx)
require.Len(t, ctx.loggedArgs, 8)
require.Equal(t, "lvl", ctx.loggedArgs[0][2].(string))
require.Equal(t, "info", ctx.loggedArgs[0][3].(level.Value).String())
require.Equal(t, "lvl", ctx.loggedArgs[1][2].(string))
require.Equal(t, "warn", ctx.loggedArgs[1][3].(level.Value).String())
require.Equal(t, "lvl", ctx.loggedArgs[2][2].(string))
require.Equal(t, "eror", ctx.loggedArgs[2][3].(level.Value).String())
require.Equal(t, "lvl", ctx.loggedArgs[3][2].(string))
require.Equal(t, "dbug", ctx.loggedArgs[3][3].(level.Value).String())
require.Equal(t, "level", ctx.loggedArgs[4][0].(string))
require.Equal(t, "info", ctx.loggedArgs[4][1].(gokitlevel.Value).String())
require.Equal(t, "level", ctx.loggedArgs[5][0].(string))
require.Equal(t, "warn", ctx.loggedArgs[5][1].(gokitlevel.Value).String())
require.Equal(t, "level", ctx.loggedArgs[6][0].(string))
require.Equal(t, "error", ctx.loggedArgs[6][1].(gokitlevel.Value).String())
require.Equal(t, "level", ctx.loggedArgs[7][0].(string))
require.Equal(t, "debug", ctx.loggedArgs[7][1].(gokitlevel.Value).String())
})
newFilteredLoggerScenario(t, "Given error, warnings is allowed should log error and warning messages", level.AllowWarn(), func(t *testing.T, ctx *scenarioContext) {
logTestMessages(t, ctx)
require.Len(t, ctx.loggedArgs, 4)
require.Equal(t, "lvl", ctx.loggedArgs[0][2].(string))
require.Equal(t, "warn", ctx.loggedArgs[0][3].(level.Value).String())
require.Equal(t, "lvl", ctx.loggedArgs[1][2].(string))
require.Equal(t, "eror", ctx.loggedArgs[1][3].(level.Value).String())
require.Equal(t, "level", ctx.loggedArgs[2][0].(string))
require.Equal(t, "warn", ctx.loggedArgs[2][1].(gokitlevel.Value).String())
require.Equal(t, "level", ctx.loggedArgs[3][0].(string))
require.Equal(t, "error", ctx.loggedArgs[3][1].(gokitlevel.Value).String())
})
newFilteredLoggerScenario(t, "Given error allowed should log error messages", level.AllowError(), func(t *testing.T, ctx *scenarioContext) {
logTestMessages(t, ctx)
require.Len(t, ctx.loggedArgs, 2)
require.Equal(t, "lvl", ctx.loggedArgs[0][2].(string))
require.Equal(t, "eror", ctx.loggedArgs[0][3].(level.Value).String())
require.Equal(t, "level", ctx.loggedArgs[1][0].(string))
require.Equal(t, "error", ctx.loggedArgs[1][1].(gokitlevel.Value).String())
})
newFilteredLoggerScenario(t, "Given error, warnings, info is allowed should log error, warning and info messages", level.AllowInfo(), func(t *testing.T, ctx *scenarioContext) {
logTestMessages(t, ctx)
require.Len(t, ctx.loggedArgs, 6)
require.Equal(t, "lvl", ctx.loggedArgs[0][2].(string))
require.Equal(t, "info", ctx.loggedArgs[0][3].(level.Value).String())
require.Equal(t, "lvl", ctx.loggedArgs[1][2].(string))
require.Equal(t, "warn", ctx.loggedArgs[1][3].(level.Value).String())
require.Equal(t, "lvl", ctx.loggedArgs[2][2].(string))
require.Equal(t, "eror", ctx.loggedArgs[2][3].(level.Value).String())
require.Equal(t, "level", ctx.loggedArgs[3][0].(string))
require.Equal(t, "info", ctx.loggedArgs[3][1].(gokitlevel.Value).String())
require.Equal(t, "level", ctx.loggedArgs[4][0].(string))
require.Equal(t, "warn", ctx.loggedArgs[4][1].(gokitlevel.Value).String())
require.Equal(t, "level", ctx.loggedArgs[5][0].(string))
require.Equal(t, "error", ctx.loggedArgs[5][1].(gokitlevel.Value).String())
})
newFilteredLoggerScenario(t, "Given no levels is allowed should not log any messages", level.AllowNone(), func(t *testing.T, ctx *scenarioContext) {
logTestMessages(t, ctx)
require.Len(t, ctx.loggedArgs, 0)
})
}
func logTestMessages(t *testing.T, ctx *scenarioContext) {
t.Helper()
ctx.logger.Info("info msg")
ctx.logger.Warn("warn msg")
ctx.logger.Error("error msg")
ctx.logger.Debug("debug msg")
err := gokitlevel.Info(ctx.logger).Log("msg", "gokit info msg")
require.NoError(t, err)
err = gokitlevel.Warn(ctx.logger).Log("msg", "gokit warn msg")
require.NoError(t, err)
err = gokitlevel.Error(ctx.logger).Log("msg", "gokit error msg")
require.NoError(t, err)
err = gokitlevel.Debug(ctx.logger).Log("msg", "gokit debug msg")
require.NoError(t, err)
}
type scenarioContext struct {
loggedArgs [][]interface{}
logger log.Logger
}
func newFilteredLoggerScenario(t *testing.T, desc string, option level.Option, fn func(t *testing.T, ctx *scenarioContext)) {
t.Helper()
ctx := &scenarioContext{
loggedArgs: [][]interface{}{},
}
l := gokitlog.LoggerFunc(func(i ...interface{}) error {
ctx.loggedArgs = append(ctx.loggedArgs, i)
return nil
})
filteredLogger := level.NewFilter(l, option)
testLogger := log.New("test")
testLogger.Swap(filteredLogger)
ctx.logger = testLogger
t.Run(desc, func(t *testing.T) {
fn(t, ctx)
})
}

View File

@@ -192,15 +192,6 @@ func (cl *ConcreteLogger) New(ctx ...interface{}) *ConcreteLogger {
return newConcreteLogger(gokitlog.With(&cl.SwapLogger), ctx...)
}
// New creates a new logger.
// First ctx argument is expected to be the name of the logger.
// Note: For a contextual logger, i.e. a logger with a shared
// name plus additional contextual information, you must use the
// Logger interface New method for it to work as expected.
// Example creating a shared logger:
// requestLogger := log.New("request-logger")
// Example creating a contextual logger:
// contextualLogger := requestLogger.New("username", "user123")
func New(ctx ...interface{}) *ConcreteLogger {
if len(ctx) == 0 {
return root.New()

View File

@@ -30,9 +30,8 @@ func TestLogger(t *testing.T) {
log4 := log3.New("key", "value")
err = log4.Log("msg", "hello 4")
require.NoError(t, err)
log3.Error("hello 3 again")
require.Len(t, ctx.loggedArgs, 5)
require.Len(t, ctx.loggedArgs, 4)
require.Len(t, ctx.loggedArgs[0], 4)
require.Equal(t, "logger", ctx.loggedArgs[0][0].(string))
require.Equal(t, "one", ctx.loggedArgs[0][1].(string))
@@ -63,16 +62,6 @@ func TestLogger(t *testing.T) {
require.Equal(t, "msg", ctx.loggedArgs[3][4].(string))
require.Equal(t, "hello 4", ctx.loggedArgs[3][5].(string))
require.Len(t, ctx.loggedArgs[4], 8)
require.Equal(t, "logger", ctx.loggedArgs[4][0].(string))
require.Equal(t, "three", ctx.loggedArgs[4][1].(string))
require.Equal(t, "t", ctx.loggedArgs[4][2].(string))
require.Equal(t, ctx.mockedTime.Format("2006-01-02T15:04:05.99-0700"), ctx.loggedArgs[4][3].(fmt.Stringer).String())
require.Equal(t, "lvl", ctx.loggedArgs[4][4].(string))
require.Equal(t, level.ErrorValue(), ctx.loggedArgs[4][5].(level.Value))
require.Equal(t, "msg", ctx.loggedArgs[4][6].(string))
require.Equal(t, "hello 3 again", ctx.loggedArgs[4][7].(string))
t.Run("When initializing root logger should swap loggers as expected", func(t *testing.T) {
swappedLoggedArgs := [][]interface{}{}
swapLogger := gokitlog.LoggerFunc(func(i ...interface{}) error {
@@ -98,7 +87,7 @@ func TestLogger(t *testing.T) {
log3.Error("hello 3")
log3.Debug("debug")
require.Len(t, ctx.loggedArgs, 5)
require.Len(t, ctx.loggedArgs, 4)
require.Len(t, swappedLoggedArgs, 7, "expected 4 messages for AllowAll logger and 3 messages for AllowInfo logger")
})
})

View File

@@ -8,8 +8,8 @@ import (
"os"
"github.com/go-kit/log"
"github.com/go-kit/log/level"
gokitsyslog "github.com/go-kit/log/syslog"
"github.com/grafana/grafana/pkg/infra/log/level"
"gopkg.in/ini.v1"
)
@@ -25,26 +25,31 @@ type SysLogHandler struct {
var selector = func(keyvals ...interface{}) syslog.Priority {
for i := 0; i < len(keyvals); i += 2 {
if level.IsKey(keyvals[i]) {
val := level.GetValue(keyvals[i+1])
if val != nil {
switch val {
case level.ErrorValue():
if keyvals[i] == level.Key() {
if v, ok := keyvals[i+1].(string); ok {
switch v {
case "emergency":
return syslog.LOG_EMERG
case "alert":
return syslog.LOG_ALERT
case "critical":
return syslog.LOG_CRIT
case "error":
return syslog.LOG_ERR
case level.WarnValue():
case "warning":
return syslog.LOG_WARNING
case level.InfoValue():
case "notice":
return syslog.LOG_NOTICE
case "info":
return syslog.LOG_INFO
case level.DebugValue():
case "debug":
return syslog.LOG_DEBUG
}
return syslog.LOG_LOCAL0
}
break
}
}
return syslog.LOG_INFO
return syslog.LOG_LOCAL0
}
func NewSyslog(sec *ini.Section, format Formatedlogger) *SysLogHandler {
@@ -57,7 +62,7 @@ func NewSyslog(sec *ini.Section, format Formatedlogger) *SysLogHandler {
handler.Tag = sec.Key("tag").MustString("")
if err := handler.Init(); err != nil {
root.Error("Failed to init syslog log handler", "error", err)
_ = level.Error(root).Log("Failed to init syslog log handler", "error", err)
os.Exit(1)
}
handler.logger = gokitsyslog.NewSyslogLogger(handler.syslog, format, gokitsyslog.PrioritySelectorOption(selector))

View File

@@ -57,13 +57,9 @@ var loginUsingLDAP = func(ctx context.Context, query *models.LoginUserQuery) (bo
ReqContext: query.ReqContext,
ExternalUser: externalUser,
SignupAllowed: setting.LDAPAllowSignup,
UserLookupParams: models.UserLookupParams{
Login: &externalUser.Login,
Email: &externalUser.Email,
UserID: nil,
},
}
if err = bus.Dispatch(ctx, upsert); err != nil {
err = bus.Dispatch(ctx, upsert)
if err != nil {
return true, err
}
query.User = upsert.Result

View File

@@ -33,7 +33,7 @@ func Logger(cfg *setting.Cfg) web.Handler {
c.Next()
timeTaken := time.Since(start) / time.Millisecond
duration := time.Since(start).String()
ctx := contexthandler.FromContext(c.Req.Context())
if ctx != nil && ctx.PerfmonTimer != nil {
ctx.PerfmonTimer.Observe(float64(timeTaken))
@@ -53,7 +53,6 @@ func Logger(cfg *setting.Cfg) web.Handler {
"status", status,
"remote_addr", c.RemoteAddr(),
"time_ms", int64(timeTaken),
"duration", duration,
"size", rw.Size(),
"referer", req.Referer(),
}

View File

@@ -55,11 +55,11 @@ type RequestURIKey struct{}
// COMMANDS
type UpsertUserCommand struct {
ReqContext *ReqContext
ExternalUser *ExternalUserInfo
UserLookupParams
Result *User
ReqContext *ReqContext
ExternalUser *ExternalUserInfo
SignupAllowed bool
Result *User
}
type SetAuthInfoCommand struct {
@@ -96,14 +96,9 @@ type LoginUserQuery struct {
type GetUserByAuthInfoQuery struct {
AuthModule string
AuthId string
UserLookupParams
}
type UserLookupParams struct {
// Describes lookup order as well
UserID *int64 // if set, will try to find the user by id
Email *string // if set, will try to find the user by email
Login *string // if set, will try to find the user by login
UserId int64
Email string
Login string
}
type GetExternalUserInfoByLoginQuery struct {

View File

@@ -66,11 +66,6 @@ func (h *ContextHandler) initContextWithJWT(ctx *models.ReqContext, orgId int64)
ReqContext: ctx,
SignupAllowed: h.Cfg.JWTAuthAutoSignUp,
ExternalUser: extUser,
UserLookupParams: models.UserLookupParams{
UserID: nil,
Login: &query.Login,
Email: &query.Email,
},
}
if err := bus.Dispatch(ctx.Req.Context(), upsert); err != nil {
ctx.Logger.Error("Failed to upsert JWT user", "error", err)

View File

@@ -247,11 +247,6 @@ func (auth *AuthProxy) LoginViaLDAP() (int64, error) {
ReqContext: auth.ctx,
SignupAllowed: auth.cfg.LDAPAllowSignup,
ExternalUser: extUser,
UserLookupParams: models.UserLookupParams{
Login: &extUser.Login,
Email: &extUser.Email,
UserID: nil,
},
}
if err := bus.Dispatch(auth.ctx.Req.Context(), upsert); err != nil {
return 0, err
@@ -308,11 +303,6 @@ func (auth *AuthProxy) LoginViaHeader() (int64, error) {
ReqContext: auth.ctx,
SignupAllowed: auth.cfg.AuthProxyAutoSignUp,
ExternalUser: extUser,
UserLookupParams: models.UserLookupParams{
UserID: nil,
Login: &extUser.Login,
Email: &extUser.Email,
},
}
err := bus.Dispatch(auth.ctx.Req.Context(), upsert)

View File

@@ -135,7 +135,7 @@ func (h *ContextHandler) Middleware(mContext *web.Context) {
case h.initContextWithAnonymousUser(reqContext):
}
reqContext.Logger = reqContext.Logger.New("userId", reqContext.UserId, "orgId", reqContext.OrgId, "uname", reqContext.Login)
reqContext.Logger = log.New("context", "userId", reqContext.UserId, "orgId", reqContext.OrgId, "uname", reqContext.Login)
span.AddEvents(
[]string{"uname", "orgId", "userId"},
[]tracing.EventValue{

View File

@@ -9,7 +9,6 @@ import (
"net/http"
"net/url"
"os"
"strconv"
"strings"
"sync"
"time"
@@ -1360,11 +1359,6 @@ func handleLog(msg centrifuge.LogEntry) {
func (g *GrafanaLive) sampleLiveStats() {
numClients := g.node.Hub().NumClients()
numUsers := g.node.Hub().NumUsers()
numChannels := g.node.Hub().NumChannels()
var numNodes int
if info, err := g.node.Info(); err == nil {
numNodes = len(info.Nodes)
}
g.usageStats.sampleCount++
g.usageStats.numClientsSum += numClients
@@ -1374,16 +1368,16 @@ func (g *GrafanaLive) sampleLiveStats() {
g.usageStats.numClientsMax = numClients
}
if numClients < g.usageStats.numClientsMin {
g.usageStats.numClientsMin = numClients
}
if numUsers > g.usageStats.numUsersMax {
g.usageStats.numUsersMax = numUsers
}
if numNodes > g.usageStats.numNodesMax {
g.usageStats.numNodesMax = numNodes
}
if numChannels > g.usageStats.numChannelsMax {
g.usageStats.numChannelsMax = numChannels
if numUsers < g.usageStats.numUsersMin {
g.usageStats.numUsersMin = numUsers
}
}
@@ -1391,65 +1385,38 @@ func (g *GrafanaLive) resetLiveStats() {
g.usageStats = usageStats{}
}
func getHistogramMetric(val int, bounds []int, metricPrefix string) string {
for _, bound := range bounds {
if val <= bound {
return metricPrefix + "le_" + strconv.Itoa(bound)
}
}
return metricPrefix + "le_inf"
}
func (g *GrafanaLive) collectLiveStats(_ context.Context) (map[string]interface{}, error) {
liveUsersAvg := 0
liveClientsAvg := 0
if g.usageStats.sampleCount > 0 {
liveUsersAvg = g.usageStats.numUsersSum / g.usageStats.sampleCount
liveClientsAvg = g.usageStats.numClientsSum / g.usageStats.sampleCount
}
var liveEnabled int
if g.Cfg.LiveMaxConnections != 0 {
liveEnabled = 1
}
var liveHAEnabled int
if g.Cfg.LiveHAEngine != "" {
liveHAEnabled = 1
}
metrics := map[string]interface{}{
"stats.live_enabled.count": liveEnabled,
"stats.live_ha_enabled.count": liveHAEnabled,
"stats.live_samples.count": g.usageStats.sampleCount,
"stats.live_users_max.count": g.usageStats.numUsersMax,
"stats.live_users_avg.count": liveUsersAvg,
"stats.live_clients_max.count": g.usageStats.numClientsMax,
"stats.live_clients_avg.count": liveClientsAvg,
"stats.live_channels_max.count": g.usageStats.numChannelsMax,
"stats.live_nodes_max.count": g.usageStats.numNodesMax,
}
metrics[getHistogramMetric(g.usageStats.numClientsMax, []int{0, 10, 100, 1000, 10000, 100000}, "stats.live_clients_")] = 1
metrics[getHistogramMetric(g.usageStats.numUsersMax, []int{0, 10, 100, 1000, 10000, 100000}, "stats.live_users_")] = 1
metrics[getHistogramMetric(g.usageStats.numChannelsMax, []int{0, 10, 100, 1000, 10000, 100000}, "stats.live_channels_")] = 1
metrics[getHistogramMetric(g.usageStats.numNodesMax, []int{1, 3, 9}, "stats.live_nodes_")] = 1
return metrics, nil
}
func (g *GrafanaLive) registerUsageMetrics() {
g.usageStatsService.RegisterSendReportCallback(g.resetLiveStats)
g.usageStatsService.RegisterMetricsFunc(g.collectLiveStats)
g.usageStatsService.RegisterMetricsFunc(func(context.Context) (map[string]interface{}, error) {
liveUsersAvg := 0
liveClientsAvg := 0
if g.usageStats.sampleCount > 0 {
liveUsersAvg = g.usageStats.numUsersSum / g.usageStats.sampleCount
liveClientsAvg = g.usageStats.numClientsSum / g.usageStats.sampleCount
}
metrics := map[string]interface{}{
"stats.live_samples.count": g.usageStats.sampleCount,
"stats.live_users_max.count": g.usageStats.numUsersMax,
"stats.live_users_min.count": g.usageStats.numUsersMin,
"stats.live_users_avg.count": liveUsersAvg,
"stats.live_clients_max.count": g.usageStats.numClientsMax,
"stats.live_clients_min.count": g.usageStats.numClientsMin,
"stats.live_clients_avg.count": liveClientsAvg,
}
return metrics, nil
})
}
type usageStats struct {
numClientsMax int
numClientsSum int
numUsersMax int
numUsersSum int
sampleCount int
numNodesMax int
numChannelsMax int
numClientsMax int
numClientsMin int
numClientsSum int
numUsersMax int
numUsersMin int
numUsersSum int
sampleCount int
}

View File

@@ -157,45 +157,3 @@ func TestCheckOrigin(t *testing.T) {
})
}
}
func Test_getHistogramMetric(t *testing.T) {
type args struct {
val int
bounds []int
metricPrefix string
}
tests := []struct {
name string
args args
want string
}{
{
"zero",
args{0, []int{0, 10, 100, 1000, 10000, 100000}, "live_users_"},
"live_users_le_0",
},
{
"equal_to_bound",
args{10, []int{0, 10, 100, 1000, 10000, 100000}, "live_users_"},
"live_users_le_10",
},
{
"in_the_middle",
args{30000, []int{0, 10, 100, 1000, 10000, 100000}, "live_users_"},
"live_users_le_100000",
},
{
"more_than_upper_bound",
args{300000, []int{0, 10, 100, 1000, 10000, 100000}, "live_users_"},
"live_users_le_inf",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := getHistogramMetric(tt.args.val, tt.args.bounds, tt.args.metricPrefix); got != tt.want {
t.Errorf("getHistogramMetric() = %v, want %v", got, tt.want)
}
})
}
}

View File

@@ -89,12 +89,11 @@ func (s *Implementation) LookupAndFix(ctx context.Context, query *models.GetUser
}
// if user id was specified and doesn't match the user_auth entry, remove it
if query.UserLookupParams.UserID != nil &&
*query.UserLookupParams.UserID != 0 &&
*query.UserLookupParams.UserID != authQuery.Result.UserId {
if err := s.DeleteAuthInfo(ctx, &models.DeleteAuthInfoCommand{
if query.UserId != 0 && query.UserId != authQuery.Result.UserId {
err := s.DeleteAuthInfo(ctx, &models.DeleteAuthInfoCommand{
UserAuth: authQuery.Result,
}); err != nil {
})
if err != nil {
s.logger.Error("Error removing user_auth entry", "error", err)
}
@@ -125,42 +124,42 @@ func (s *Implementation) LookupAndFix(ctx context.Context, query *models.GetUser
return false, nil, nil, models.ErrUserNotFound
}
func (s *Implementation) LookupByOneOf(ctx context.Context, params *models.UserLookupParams) (*models.User, error) {
func (s *Implementation) LookupByOneOf(userId int64, email string, login string) (bool, *models.User, error) {
foundUser := false
var user *models.User
var err error
foundUser := false
// If not found, try to find the user by id
if params.UserID != nil && *params.UserID != 0 {
foundUser, user, err = s.getUserById(*params.UserID)
if userId != 0 {
foundUser, user, err = s.getUserById(userId)
if err != nil {
return nil, err
return false, nil, err
}
}
// If not found, try to find the user by email address
if !foundUser && params.Email != nil && *params.Email != "" {
user = &models.User{Email: *params.Email}
if !foundUser && email != "" {
user = &models.User{Email: email}
foundUser, err = s.getUser(user)
if err != nil {
return nil, err
return false, nil, err
}
}
// If not found, try to find the user by login
if !foundUser && params.Login != nil && *params.Login != "" {
user = &models.User{Login: *params.Login}
if !foundUser && login != "" {
user = &models.User{Login: login}
foundUser, err = s.getUser(user)
if err != nil {
return nil, err
return false, nil, err
}
}
if !foundUser {
return nil, models.ErrUserNotFound
return false, nil, models.ErrUserNotFound
}
return user, nil
return foundUser, user, nil
}
func (s *Implementation) GenericOAuthLookup(ctx context.Context, authModule string, authId string, userID int64) (*models.UserAuth, error) {
@@ -189,7 +188,7 @@ func (s *Implementation) LookupAndUpdate(ctx context.Context, query *models.GetU
// 2. FindByUserDetails
if !foundUser {
user, err = s.LookupByOneOf(ctx, &query.UserLookupParams)
_, user, err = s.LookupByOneOf(query.UserId, query.Email, query.Login)
if err != nil {
return nil, err
}

View File

@@ -39,7 +39,7 @@ func TestUserAuth(t *testing.T) {
// By Login
login := "loginuser0"
query := &models.GetUserByAuthInfoQuery{UserLookupParams: models.UserLookupParams{Login: &login}}
query := &models.GetUserByAuthInfoQuery{Login: login}
user, err := srv.LookupAndUpdate(context.Background(), query)
require.Nil(t, err)
@@ -48,9 +48,7 @@ func TestUserAuth(t *testing.T) {
// By ID
id := user.Id
user, err = srv.LookupByOneOf(context.Background(), &models.UserLookupParams{
UserID: &id,
})
_, user, err = srv.LookupByOneOf(id, "", "")
require.Nil(t, err)
require.Equal(t, user.Id, id)
@@ -58,9 +56,7 @@ func TestUserAuth(t *testing.T) {
// By Email
email := "user1@test.com"
user, err = srv.LookupByOneOf(context.Background(), &models.UserLookupParams{
Email: &email,
})
_, user, err = srv.LookupByOneOf(0, email, "")
require.Nil(t, err)
require.Equal(t, user.Email, email)
@@ -68,9 +64,7 @@ func TestUserAuth(t *testing.T) {
// Don't find nonexistent user
email = "nonexistent@test.com"
user, err = srv.LookupByOneOf(context.Background(), &models.UserLookupParams{
Email: &email,
})
_, user, err = srv.LookupByOneOf(0, email, "")
require.Equal(t, models.ErrUserNotFound, err)
require.Nil(t, user)
@@ -87,7 +81,7 @@ func TestUserAuth(t *testing.T) {
// create user_auth entry
login := "loginuser0"
query.UserLookupParams.Login = &login
query.Login = login
user, err = srv.LookupAndUpdate(context.Background(), query)
require.Nil(t, err)
@@ -101,9 +95,9 @@ func TestUserAuth(t *testing.T) {
require.Equal(t, user.Login, login)
// get with non-matching id
idPlusOne := user.Id + 1
id := user.Id
query.UserLookupParams.UserID = &idPlusOne
query.UserId = id + 1
user, err = srv.LookupAndUpdate(context.Background(), query)
require.Nil(t, err)
@@ -146,9 +140,7 @@ func TestUserAuth(t *testing.T) {
login := "loginuser0"
// Calling GetUserByAuthInfoQuery on an existing user will populate an entry in the user_auth table
query := &models.GetUserByAuthInfoQuery{AuthModule: "test", AuthId: "test", UserLookupParams: models.UserLookupParams{
Login: &login,
}}
query := &models.GetUserByAuthInfoQuery{Login: login, AuthModule: "test", AuthId: "test"}
user, err := srv.LookupAndUpdate(context.Background(), query)
require.Nil(t, err)
@@ -197,9 +189,7 @@ func TestUserAuth(t *testing.T) {
// Calling srv.LookupAndUpdateQuery on an existing user will populate an entry in the user_auth table
// Make the first log-in during the past
getTime = func() time.Time { return time.Now().AddDate(0, 0, -2) }
query := &models.GetUserByAuthInfoQuery{AuthModule: "test1", AuthId: "test1", UserLookupParams: models.UserLookupParams{
Login: &login,
}}
query := &models.GetUserByAuthInfoQuery{Login: login, AuthModule: "test1", AuthId: "test1"}
user, err := srv.LookupAndUpdate(context.Background(), query)
getTime = time.Now
@@ -209,9 +199,7 @@ func TestUserAuth(t *testing.T) {
// Add a second auth module for this user
// Have this module's last log-in be more recent
getTime = func() time.Time { return time.Now().AddDate(0, 0, -1) }
query = &models.GetUserByAuthInfoQuery{AuthModule: "test2", AuthId: "test2", UserLookupParams: models.UserLookupParams{
Login: &login,
}}
query = &models.GetUserByAuthInfoQuery{Login: login, AuthModule: "test2", AuthId: "test2"}
user, err = srv.LookupAndUpdate(context.Background(), query)
getTime = time.Now
@@ -251,21 +239,16 @@ func TestUserAuth(t *testing.T) {
// Expect to pass since there's a matching login user
getTime = func() time.Time { return time.Now().AddDate(0, 0, -2) }
query := &models.GetUserByAuthInfoQuery{AuthModule: genericOAuthModule, AuthId: "", UserLookupParams: models.UserLookupParams{
Login: &login,
}}
query := &models.GetUserByAuthInfoQuery{Login: login, AuthModule: genericOAuthModule, AuthId: ""}
user, err := srv.LookupAndUpdate(context.Background(), query)
getTime = time.Now
require.Nil(t, err)
require.Equal(t, user.Login, login)
otherLoginUser := "aloginuser"
// Should throw a "user not found" error since there's no matching login user
getTime = func() time.Time { return time.Now().AddDate(0, 0, -2) }
query = &models.GetUserByAuthInfoQuery{AuthModule: genericOAuthModule, AuthId: "", UserLookupParams: models.UserLookupParams{
Login: &otherLoginUser,
}}
query = &models.GetUserByAuthInfoQuery{Login: "aloginuser", AuthModule: genericOAuthModule, AuthId: ""}
user, err = srv.LookupAndUpdate(context.Background(), query)
getTime = time.Now

View File

@@ -45,9 +45,11 @@ func (ls *Implementation) UpsertUser(ctx context.Context, cmd *models.UpsertUser
extUser := cmd.ExternalUser
user, err := ls.AuthInfoService.LookupAndUpdate(ctx, &models.GetUserByAuthInfoQuery{
AuthModule: extUser.AuthModule,
AuthId: extUser.AuthId,
UserLookupParams: cmd.UserLookupParams,
AuthModule: extUser.AuthModule,
AuthId: extUser.AuthId,
UserId: extUser.UserId,
Email: extUser.Email,
Login: extUser.Login,
})
if err != nil {
if !errors.Is(err, models.ErrUserNotFound) {

View File

@@ -93,12 +93,10 @@ func Test_teamSync(t *testing.T) {
AuthInfoService: authInfoMock,
}
email := "test_user@example.org"
upserCmd := &models.UpsertUserCommand{ExternalUser: &models.ExternalUserInfo{Email: email},
UserLookupParams: models.UserLookupParams{Email: &email}}
upserCmd := &models.UpsertUserCommand{ExternalUser: &models.ExternalUserInfo{Email: "test_user@example.org"}}
expectedUser := &models.User{
Id: 1,
Email: email,
Email: "test_user@example.org",
Name: "test_user",
Login: "test_user",
}

View File

@@ -102,15 +102,9 @@ func (dd *DingDingNotifier) Notify(ctx context.Context, as ...*types.Alert) (boo
}
}
if tmplErr != nil {
dd.log.Warn("failed to template DingDing message", "err", tmplErr.Error())
tmplErr = nil
}
u := tmpl(dd.URL)
if tmplErr != nil {
dd.log.Warn("failed to template DingDing URL", "err", tmplErr.Error(), "fallback", dd.URL)
u = dd.URL
dd.log.Warn("failed to template DingDing message", "err", tmplErr.Error())
}
body, err := json.Marshal(bodyMsg)

View File

@@ -79,64 +79,6 @@ func TestDingdingNotifier(t *testing.T) {
"msgtype": "actionCard",
},
expMsgError: nil,
}, {
name: "Missing field in template",
settings: `{
"url": "http://localhost",
"message": "I'm a custom template {{ .NotAField }} bad template",
"msgType": "actionCard"
}`,
alerts: []*types.Alert{
{
Alert: model.Alert{
Labels: model.LabelSet{"alertname": "alert1", "lbl1": "val1"},
Annotations: model.LabelSet{"ann1": "annv1"},
},
}, {
Alert: model.Alert{
Labels: model.LabelSet{"alertname": "alert1", "lbl1": "val2"},
Annotations: model.LabelSet{"ann1": "annv2"},
},
},
},
expMsg: map[string]interface{}{
"link": map[string]interface{}{
"messageUrl": "dingtalk://dingtalkclient/page/link?pc_slide=false&url=http%3A%2F%2Flocalhost%2Falerting%2Flist",
"text": "I'm a custom template ",
"title": "",
},
"msgtype": "link",
},
expMsgError: nil,
}, {
name: "Invalid template",
settings: `{
"url": "http://localhost",
"message": "I'm a custom template {{ {.NotAField }} bad template",
"msgType": "actionCard"
}`,
alerts: []*types.Alert{
{
Alert: model.Alert{
Labels: model.LabelSet{"alertname": "alert1", "lbl1": "val1"},
Annotations: model.LabelSet{"ann1": "annv1"},
},
}, {
Alert: model.Alert{
Labels: model.LabelSet{"alertname": "alert1", "lbl1": "val2"},
Annotations: model.LabelSet{"ann1": "annv2"},
},
},
},
expMsg: map[string]interface{}{
"link": map[string]interface{}{
"messageUrl": "dingtalk://dingtalkclient/page/link?pc_slide=false&url=http%3A%2F%2Flocalhost%2Falerting%2Flist",
"text": "",
"title": "",
},
"msgtype": "link",
},
expMsgError: nil,
}, {
name: "Error in initing",
settings: `{}`,
@@ -175,8 +117,6 @@ func TestDingdingNotifier(t *testing.T) {
require.NoError(t, err)
require.True(t, ok)
require.NotEmpty(t, webhookSender.Webhook.Url)
expBody, err := json.Marshal(c.expMsg)
require.NoError(t, err)

View File

@@ -120,15 +120,9 @@ func (gcn *GoogleChatNotifier) Notify(ctx context.Context, as ...*types.Alert) (
},
}
if tmplErr != nil {
gcn.log.Warn("failed to template GoogleChat message", "err", tmplErr.Error())
tmplErr = nil
}
u := tmpl(gcn.URL)
if tmplErr != nil {
gcn.log.Warn("failed to template GoogleChat URL", "err", tmplErr.Error(), "fallback", gcn.URL)
u = gcn.URL
gcn.log.Warn("failed to template GoogleChat message", "err", tmplErr.Error())
}
body, err := json.Marshal(res)

View File

@@ -205,7 +205,7 @@ func TestGoogleChatNotifier(t *testing.T) {
},
expMsgError: nil,
}, {
name: "Missing field in template",
name: "Invalid template",
settings: `{"url": "http://localhost", "message": "I'm a custom template {{ .NotAField }} bad template"}`,
alerts: []*types.Alert{
{
@@ -258,55 +258,6 @@ func TestGoogleChatNotifier(t *testing.T) {
},
},
expMsgError: nil,
}, {
name: "Invalid template",
settings: `{"url": "http://localhost", "message": "I'm a custom template {{ {.NotAField }} bad template"}`,
alerts: []*types.Alert{
{
Alert: model.Alert{
Labels: model.LabelSet{"alertname": "alert1", "lbl1": "val1"},
Annotations: model.LabelSet{"ann1": "annv1", "__dashboardUid__": "abcd", "__panelId__": "efgh"},
},
},
},
expMsg: &outerStruct{
PreviewText: "[FIRING:1] (val1)",
FallbackText: "[FIRING:1] (val1)",
Cards: []card{
{
Header: header{
Title: "[FIRING:1] (val1)",
},
Sections: []section{
{
Widgets: []widget{
buttonWidget{
Buttons: []button{
{
TextButton: textButton{
Text: "OPEN IN GRAFANA",
OnClick: onClick{
OpenLink: openLink{
URL: "http://localhost/alerting/list",
},
},
},
},
},
},
textParagraphWidget{
Text: text{
// RFC822 only has the minute, hence it works in most cases.
Text: "Grafana v" + setting.BuildVersion + " | " + constNow.Format(time.RFC822),
},
},
},
},
},
},
},
},
expMsgError: nil,
},
}
@@ -342,8 +293,6 @@ func TestGoogleChatNotifier(t *testing.T) {
require.NoError(t, err)
require.True(t, ok)
require.NotEmpty(t, webhookSender.Webhook.Url)
expBody, err := json.Marshal(c.expMsg)
require.NoError(t, err)

View File

@@ -88,15 +88,9 @@ func (tn *TeamsNotifier) Notify(ctx context.Context, as ...*types.Alert) (bool,
},
}
if tmplErr != nil {
tn.log.Warn("failed to template Teams message", "err", tmplErr.Error())
tmplErr = nil
}
u := tmpl(tn.URL)
if tmplErr != nil {
tn.log.Warn("failed to template Teams URL", "err", tmplErr.Error(), "fallback", tn.URL)
u = tn.URL
tn.log.Warn("failed to template Teams message", "err", tmplErr.Error())
}
b, err := json.Marshal(&body)

View File

@@ -103,88 +103,6 @@ func TestTeamsNotifier(t *testing.T) {
},
},
expMsgError: nil,
}, {
name: "Missing field in template",
settings: `{
"url": "http://localhost",
"message": "I'm a custom template {{ .NotAField }} bad template"
}`,
alerts: []*types.Alert{
{
Alert: model.Alert{
Labels: model.LabelSet{"alertname": "alert1", "lbl1": "val1"},
Annotations: model.LabelSet{"ann1": "annv1"},
},
}, {
Alert: model.Alert{
Labels: model.LabelSet{"alertname": "alert1", "lbl1": "val2"},
Annotations: model.LabelSet{"ann1": "annv2"},
},
},
},
expMsg: map[string]interface{}{
"@type": "MessageCard",
"@context": "http://schema.org/extensions",
"summary": "[FIRING:2] ",
"title": "[FIRING:2] ",
"themeColor": "#D63232",
"sections": []map[string]interface{}{
{
"title": "Details",
"text": "I'm a custom template ",
},
},
"potentialAction": []map[string]interface{}{
{
"@context": "http://schema.org",
"@type": "OpenUri",
"name": "View Rule",
"targets": []map[string]interface{}{{"os": "default", "uri": "http://localhost/alerting/list"}},
},
},
},
expMsgError: nil,
}, {
name: "Invalid template",
settings: `{
"url": "http://localhost",
"message": "I'm a custom template {{ {.NotAField }} bad template"
}`,
alerts: []*types.Alert{
{
Alert: model.Alert{
Labels: model.LabelSet{"alertname": "alert1", "lbl1": "val1"},
Annotations: model.LabelSet{"ann1": "annv1"},
},
}, {
Alert: model.Alert{
Labels: model.LabelSet{"alertname": "alert1", "lbl1": "val2"},
Annotations: model.LabelSet{"ann1": "annv2"},
},
},
},
expMsg: map[string]interface{}{
"@type": "MessageCard",
"@context": "http://schema.org/extensions",
"summary": "[FIRING:2] ",
"title": "[FIRING:2] ",
"themeColor": "#D63232",
"sections": []map[string]interface{}{
{
"title": "Details",
"text": "",
},
},
"potentialAction": []map[string]interface{}{
{
"@context": "http://schema.org",
"@type": "OpenUri",
"name": "View Rule",
"targets": []map[string]interface{}{{"os": "default", "uri": "http://localhost/alerting/list"}},
},
},
},
expMsgError: nil,
}, {
name: "Error in initing",
settings: `{}`,
@@ -224,8 +142,6 @@ func TestTeamsNotifier(t *testing.T) {
require.True(t, ok)
require.NoError(t, err)
require.NotEmpty(t, webhookSender.Webhook.Url)
expBody, err := json.Marshal(c.expMsg)
require.NoError(t, err)

View File

@@ -97,15 +97,9 @@ func (vn *VictoropsNotifier) Notify(ctx context.Context, as ...*types.Alert) (bo
ruleURL := joinUrlPath(vn.tmpl.ExternalURL.String(), "/alerting/list", vn.log)
bodyJSON.Set("alert_url", ruleURL)
if tmplErr != nil {
vn.log.Warn("failed to template VictorOps message", "err", tmplErr.Error())
tmplErr = nil
}
u := tmpl(vn.URL)
if tmplErr != nil {
vn.log.Info("failed to template VictorOps URL", "err", tmplErr.Error(), "fallback", vn.URL)
u = vn.URL
vn.log.Warn("failed to template VictorOps message", "err", tmplErr.Error())
}
b, err := bodyJSON.MarshalJSON()

View File

@@ -75,81 +75,6 @@ func TestVictoropsNotifier(t *testing.T) {
"state_message": "**Firing**\n\nValue: [no value]\nLabels:\n - alertname = alert1\n - lbl1 = val1\nAnnotations:\n - ann1 = annv1\nSilence: http://localhost/alerting/silence/new?alertmanager=grafana&matcher=alertname%3Dalert1&matcher=lbl1%3Dval1\n\nValue: [no value]\nLabels:\n - alertname = alert1\n - lbl1 = val2\nAnnotations:\n - ann1 = annv2\nSilence: http://localhost/alerting/silence/new?alertmanager=grafana&matcher=alertname%3Dalert1&matcher=lbl1%3Dval2\n",
},
expMsgError: nil,
}, {
name: "Custom message",
settings: `{"url": "http://localhost", "messageType": "Alerts firing: {{ len .Alerts.Firing }}"}`,
alerts: []*types.Alert{
{
Alert: model.Alert{
Labels: model.LabelSet{"alertname": "alert1", "lbl1": "val1"},
Annotations: model.LabelSet{"ann1": "annv1"},
},
}, {
Alert: model.Alert{
Labels: model.LabelSet{"alertname": "alert1", "lbl1": "val2"},
Annotations: model.LabelSet{"ann1": "annv2"},
},
},
},
expMsg: map[string]interface{}{
"alert_url": "http://localhost/alerting/list",
"entity_display_name": "[FIRING:2] ",
"entity_id": "6e3538104c14b583da237e9693b76debbc17f0f8058ef20492e5853096cf8733",
"message_type": "ALERTS FIRING: 2",
"monitoring_tool": "Grafana v" + setting.BuildVersion,
"state_message": "**Firing**\n\nValue: [no value]\nLabels:\n - alertname = alert1\n - lbl1 = val1\nAnnotations:\n - ann1 = annv1\nSilence: http://localhost/alerting/silence/new?alertmanager=grafana&matcher=alertname%3Dalert1&matcher=lbl1%3Dval1\n\nValue: [no value]\nLabels:\n - alertname = alert1\n - lbl1 = val2\nAnnotations:\n - ann1 = annv2\nSilence: http://localhost/alerting/silence/new?alertmanager=grafana&matcher=alertname%3Dalert1&matcher=lbl1%3Dval2\n",
},
expMsgError: nil,
}, {
name: "Missing field in template",
settings: `{"url": "http://localhost", "messageType": "custom template {{ .NotAField }} bad template"}`,
alerts: []*types.Alert{
{
Alert: model.Alert{
Labels: model.LabelSet{"alertname": "alert1", "lbl1": "val1"},
Annotations: model.LabelSet{"ann1": "annv1"},
},
}, {
Alert: model.Alert{
Labels: model.LabelSet{"alertname": "alert1", "lbl1": "val2"},
Annotations: model.LabelSet{"ann1": "annv2"},
},
},
},
expMsg: map[string]interface{}{
"alert_url": "http://localhost/alerting/list",
"entity_display_name": "",
"entity_id": "6e3538104c14b583da237e9693b76debbc17f0f8058ef20492e5853096cf8733",
"message_type": "CUSTOM TEMPLATE ",
"monitoring_tool": "Grafana v" + setting.BuildVersion,
"state_message": "",
},
expMsgError: nil,
}, {
name: "Invalid template",
settings: `{"url": "http://localhost", "messageType": "custom template {{ {.NotAField }} bad template"}`,
alerts: []*types.Alert{
{
Alert: model.Alert{
Labels: model.LabelSet{"alertname": "alert1", "lbl1": "val1"},
Annotations: model.LabelSet{"ann1": "annv1"},
},
}, {
Alert: model.Alert{
Labels: model.LabelSet{"alertname": "alert1", "lbl1": "val2"},
Annotations: model.LabelSet{"ann1": "annv2"},
},
},
},
expMsg: map[string]interface{}{
"alert_url": "http://localhost/alerting/list",
"entity_display_name": "",
"entity_id": "6e3538104c14b583da237e9693b76debbc17f0f8058ef20492e5853096cf8733",
"message_type": "CRITICAL",
"monitoring_tool": "Grafana v" + setting.BuildVersion,
"state_message": "",
},
expMsgError: nil,
}, {
name: "Error in initing, no URL",
settings: `{}`,
@@ -189,8 +114,6 @@ func TestVictoropsNotifier(t *testing.T) {
require.NoError(t, err)
require.True(t, ok)
require.NotEmpty(t, webhookSender.Webhook.Url)
// Remove the non-constant timestamp
j, err := simplejson.NewJson([]byte(webhookSender.Webhook.Body))
require.NoError(t, err)

View File

@@ -42,12 +42,12 @@ var netClient = &http.Client{
}
func (ns *NotificationService) sendWebRequestSync(ctx context.Context, webhook *Webhook) error {
ns.log.Debug("Sending webhook", "url", webhook.Url, "http method", webhook.HttpMethod)
if webhook.HttpMethod == "" {
webhook.HttpMethod = http.MethodPost
}
ns.log.Debug("Sending webhook", "url", webhook.Url, "http method", webhook.HttpMethod)
if webhook.HttpMethod != http.MethodPost && webhook.HttpMethod != http.MethodPut {
return fmt.Errorf("webhook only supports HTTP methods PUT or POST")
}

View File

@@ -355,7 +355,6 @@ var metricsMap = map[string][]string{
"AWS/MediaPackage": {"ActiveInput", "EgressBytes", "EgressRequestCount", "EgressResponseTime", "IngressBytes", "IngressResponseTime"},
"AWS/MediaStore": {"RequestCount", "4xxErrorCount", "5xxErrorCount", "BytesUploaded", "BytesDownloaded", "TotalTime", "TurnaroundTime"},
"AWS/MediaTailor": {"AdDecisionServer.Ads", "AdDecisionServer.Duration", "AdDecisionServer.Errors", "AdDecisionServer.FillRate", "AdDecisionServer.Timeouts", "AdNotReady", "Avails.Duration", "Avails.FillRate", "Avails.FilledDuration", "GetManifest.Errors", "Origin.Errors", "Origin.Timeouts"},
"AWS/MemoryDB": {"ActiveDefragHits", "AuthenticationFailures", "BytesUsedForMemoryDB", "CommandAuthorizationFailures", "CPUUtilization", "CurrConnections", "CurrItems", "DatabaseMemoryUsagePercentage", "DB0AverageTTL", "EngineCPUUtilization", "EvalBasedCmds", "Evictions", "FreeableMemory", "GeoSpatialBasedCmds", "GetTypeCmds", "HashBasedCmds", "HyperLogLogBasedCmds", "IsPrimary", "KeyAuthorizationFailures", "KeyBasedCmds", "KeyspaceHits", "KeyspaceMisses", "KeysTracked", "ListBasedCmds", "MaxReplicationThroughput", "MemoryFragmentationRatio", "NetworkBandwidthInAllowanceExceeded", "NetworkBandwidthOutAllowanceExceeded", "NetworkBytesIn", "NetworkBytesOut", "NetworkConntrackAllowanceExceeded", "NetworkPacketsIn", "NetworkPacketsOut", "NetworkPacketsPerSecondAllowanceExceeded", "NewConnections", "PrimaryLinkHealthStatus", "PubSubBasedCmds", "Reclaimed", "ReplicationBytes", "ReplicationDelayedWriteCommands", "ReplicationLag", "SetBasedCmds", "SetTypeCmds", "SortedSetBasedCmds", "StringBasedCmds", "StreamBasedCmds", "SwapUsage"},
"AWS/NATGateway": {"ActiveConnectionCount", "BytesInFromDestination", "BytesInFromSource", "BytesOutToDestination", "BytesOutToSource", "ConnectionAttemptCount", "ConnectionEstablishedCount", "ErrorPortAllocation", "IdleTimeoutCount", "PacketsDropCount", "PacketsInFromDestination", "PacketsInFromSource", "PacketsOutToDestination", "PacketsOutToSource"},
"AWS/Neptune": {"CPUUtilization", "ClusterReplicaLag", "ClusterReplicaLagMaximum", "ClusterReplicaLagMinimum", "EngineUptime", "FreeLocalStorage", "FreeableMemory", "GremlinErrors", "GremlinHttp1xx", "GremlinHttp2xx", "GremlinHttp4xx", "GremlinHttp5xx", "GremlinRequests", "GremlinRequestsPerSec", "GremlinWebSocketAvailableConnections", "GremlinWebSocketClientErrors", "GremlinWebSocketServerErrors", "GremlinWebSocketSuccess", "Http100", "Http101", "Http1xx", "Http200", "Http2xx", "Http400", "Http403", "Http405", "Http413", "Http429", "Http4xx", "Http500", "Http501", "Http5xx", "LoaderErrors", "LoaderRequests", "NetworkReceiveThroughput", "NetworkThroughput", "NetworkTransmitThroughput", "SparqlErrors", "SparqlHttp1xx", "SparqlHttp2xx", "SparqlHttp4xx", "SparqlHttp5xx", "SparqlRequests", "SparqlRequestsPerSec", "StatusErrors", "StatusRequests", "VolumeBytesUsed", "VolumeReadIOPs", "VolumeWriteIOPs"},
"AWS/NetworkELB": {"ActiveFlowCount", "ActiveFlowCount_TLS", "ClientTLSNegotiationErrorCount", "ConsumedLCUs", "HealthyHostCount", "NewFlowCount", "NewFlowCount_TLS", "ProcessedBytes", "ProcessedBytes_TLS", "TCP_Client_Reset_Count", "TCP_ELB_Reset_Count", "TCP_Target_Reset_Count", "TargetTLSNegotiationErrorCount", "UnHealthyHostCount"},
@@ -469,7 +468,6 @@ var dimensionsMap = map[string][]string{
"AWS/MediaPackage": {"Channel", "No Dimension", "OriginEndpoint", "StatusCodeRange"},
"AWS/MediaStore": {"ContainerName", "ObjectGroupName", "RequestType"},
"AWS/MediaTailor": {"ConfigurationName"},
"AWS/MemoryDB": {"ClusterName", "NodeName"},
"AWS/NATGateway": {"NatGatewayId"},
"AWS/Neptune": {"DBClusterIdentifier", "DatabaseClass", "EngineName", "Role"},
"AWS/NetworkELB": {"AvailabilityZone", "LoadBalancer", "TargetGroup"},

View File

@@ -5,7 +5,6 @@ import (
"encoding/json"
"errors"
"fmt"
"strconv"
"github.com/Masterminds/semver"
"github.com/grafana/grafana-plugin-sdk-go/backend"
@@ -96,17 +95,8 @@ func newInstanceSettings() datasource.InstanceFactoryFunc {
timeInterval = ""
}
var maxConcurrentShardRequests float64
switch v := jsonData["maxConcurrentShardRequests"].(type) {
case float64:
maxConcurrentShardRequests = v
case string:
maxConcurrentShardRequests, err = strconv.ParseFloat(v, 64)
if err != nil {
maxConcurrentShardRequests = 256
}
default:
maxConcurrentShardRequests, ok := jsonData["maxConcurrentShardRequests"].(float64)
if !ok {
maxConcurrentShardRequests = 256
}

View File

@@ -1,6 +1,6 @@
{
"name": "@grafana-plugins/input-datasource",
"version": "8.4.10",
"version": "8.4.5",
"description": "Input Datasource",
"private": true,
"repository": {
@@ -24,9 +24,9 @@
"webpack": "5.58.1"
},
"dependencies": {
"@grafana/data": "8.4.10",
"@grafana/toolkit": "8.4.10",
"@grafana/ui": "8.4.10",
"@grafana/data": "8.4.5",
"@grafana/toolkit": "8.4.5",
"@grafana/ui": "8.4.5",
"jquery": "3.5.1",
"react": "17.0.1",
"react-dom": "17.0.1",

View File

@@ -14,66 +14,6 @@ describe('Extract fields from text', () => {
`);
});
it('Test key-values with single/double quotes', async () => {
const extractor = fieldExtractors.get(FieldExtractorID.KeyValues);
const out = extractor.parse('a="1", "b"=\'2\',c=3 x:y ;\r\nz="d and 4"');
expect(out).toMatchInlineSnapshot(`
Object {
"a": "1",
"b": "2",
"c": "3",
"x": "y",
"z": "d and 4",
}
`);
});
it('Test key-values with nested single/double quotes', async () => {
const extractor = fieldExtractors.get(FieldExtractorID.KeyValues);
const out = extractor.parse(
`a="1", "b"=\'2\',c=3 x:y ;\r\nz="dbl_quotes=\\"Double Quotes\\" sgl_quotes='Single Quotes'"`
);
expect(out).toMatchInlineSnapshot(`
Object {
"a": "1",
"b": "2",
"c": "3",
"x": "y",
"z": "dbl_quotes=\\"Double Quotes\\" sgl_quotes='Single Quotes'",
}
`);
});
it('Test key-values with nested separator characters', async () => {
const extractor = fieldExtractors.get(FieldExtractorID.KeyValues);
const out = extractor.parse(`a="1", "b"=\'2\',c=3 x:y ;\r\nz="This is; testing& validating, 1=:2"`);
expect(out).toMatchInlineSnapshot(`
Object {
"a": "1",
"b": "2",
"c": "3",
"x": "y",
"z": "This is; testing& validating, 1=:2",
}
`);
});
it('Test key-values where some values are null', async () => {
const extractor = fieldExtractors.get(FieldExtractorID.KeyValues);
const out = extractor.parse(`a=, "b"=\'2\',c=3 x: `);
expect(out).toMatchInlineSnapshot(`
Object {
"a": "",
"b": "2",
"c": "3",
"x": "",
}
`);
});
it('Split key+values', async () => {
const extractor = fieldExtractors.get(FieldExtractorID.KeyValues);
const out = extractor.parse('a="1", "b"=\'2\',c=3 x:y ;\r\nz="7"');

View File

@@ -19,99 +19,33 @@ const extJSON: FieldExtractor = {
},
};
function parseKeyValuePairs(raw: string): Record<string, string> {
const buff: string[] = []; // array of characters
let esc = '';
let key = '';
const obj: Record<string, string> = {};
for (let i = 0; i < raw.length; i++) {
let c = raw[i];
if (c === esc) {
esc = '';
c = raw[++i];
}
const isEscaped = c === '\\';
if (isEscaped) {
c = raw[++i];
}
// When escaped just append
if (isEscaped || esc.length) {
buff.push(c);
continue;
}
if (c === `"` || c === `'`) {
esc = c;
}
switch (c) {
case ':':
case '=':
if (buff.length) {
if (key) {
obj[key] = '';
}
key = buff.join('');
buff.length = 0; // clear values
}
break;
// escape chars
case `"`:
case `'`:
// whitespace
case ` `:
case `\n`:
case `\t`:
case `\r`:
case `\n`:
if (buff.length && key === '') {
obj[buff.join('')] = '';
buff.length = 0;
}
// seperators
case ',':
case ';':
case '&':
case '{':
case '}':
if (buff.length) {
const val = buff.join('');
if (key.length) {
obj[key] = val;
key = '';
} else {
key = val;
}
buff.length = 0; // clear values
}
break;
// append our buffer
default:
buff.push(c);
if (i === raw.length - 1) {
if (key === '' && buff.length) {
obj[buff.join('')] = '';
buff.length = 0;
}
}
}
}
if (key.length) {
obj[key] = buff.join('');
}
return obj;
}
// strips quotes and leading/trailing braces in prom labels
const stripDecor = /['"]|^\{|\}$/g;
// splits on whitespace and other label pair delimiters
const splitLines = /[\s,;&]+/g;
// splits kv pairs
const splitPair = /[=:]/g;
const extLabels: FieldExtractor = {
id: FieldExtractorID.KeyValues,
name: 'Key+value pairs',
description: 'Look for a=b, c: d values in the line',
parse: parseKeyValuePairs,
parse: (v: string) => {
const obj: Record<string, any> = {};
v.trim()
.replace(stripDecor, '')
.split(splitLines)
.forEach((pair) => {
let [k, v] = pair.split(splitPair);
if (k != null) {
obj[k] = v;
}
});
return obj;
},
};
const fmts = [extJSON, extLabels];

View File

@@ -2,7 +2,7 @@ import React, { FC, Fragment, useState } from 'react';
import { useDispatch } from 'react-redux';
import { useLocation } from 'react-router-dom';
import { css } from '@emotion/css';
import { AppEvents, GrafanaTheme2, textUtil, urlUtil } from '@grafana/data';
import { AppEvents, GrafanaTheme2, urlUtil } from '@grafana/data';
import { config } from '@grafana/runtime';
import { Button, ConfirmModal, ClipboardButton, HorizontalGroup, LinkButton, useStyles2 } from '@grafana/ui';
import { contextSrv } from 'app/core/services/context_srv';
@@ -96,7 +96,7 @@ export const RuleDetailsActionButtons: FC<Props> = ({ rule, rulesSource }) => {
variant="primary"
icon="book"
target="__blank"
href={textUtil.sanitizeUrl(rule.annotations[Annotation.runbookURL])}
href={rule.annotations[Annotation.runbookURL]}
>
View runbook
</LinkButton>

View File

@@ -93,15 +93,12 @@ describe('AlertingQueryRunner', () => {
await expect(data.pipe(take(1))).toEmitValuesWith((values) => {
const [data] = values;
// these test are flakey since the absolute computed "timeRange" can differ from the relative "defaultRelativeTimeRange"
// so instead we will check if the size of the timeranges match
const relativeA = rangeUtil.timeRangeToRelative(data.A.timeRange);
const relativeB = rangeUtil.timeRangeToRelative(data.B.timeRange);
const defaultRange = getDefaultRelativeTimeRange();
const expected = getDefaultRelativeTimeRange();
expect(relativeA.from - defaultRange.from).toEqual(relativeA.to - defaultRange.to);
expect(relativeB.from - defaultRange.from).toEqual(relativeB.to - defaultRange.to);
expect(relativeA).toEqual(expected);
expect(relativeB).toEqual(expected);
});
});

View File

@@ -110,7 +110,7 @@ RUN rm dockerize-linux-amd64-v${DOCKERIZE_VERSION}.tar.gz
# Use old Debian (this has support into 2022) in order to ensure binary compatibility with older glibc's.
FROM debian:stretch-20210208
ENV GOVERSION=1.17.9 \
ENV GOVERSION=1.17.8 \
PATH=/usr/local/go/bin:$PATH \
GOPATH=/go \
NODEVERSION=16.13.0-1nodesource1 \
@@ -147,10 +147,8 @@ RUN apt-get update && \
unzip && \
gem install -N fpm && \
ln -s /usr/bin/llvm-dsymutil-6.0 /usr/bin/dsymutil && \
curl -fsS https://deb.nodesource.com/gpgkey/nodesource.gpg.key | apt-key add - && \
curl -O https://deb.nodesource.com/node_16.x/pool/main/n/nodejs/nodejs_${NODEVERSION}_amd64.deb &&\
dpkg -i nodejs_${NODEVERSION}_amd64.deb &&\
rm nodejs_${NODEVERSION}_amd64.deb &&\
curl -fsL https://deb.nodesource.com/setup_16.x | bash - && \
apt-get update && apt-get install -yq nodejs=${NODEVERSION} && \
curl -fsS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - && \
echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list && \
apt-get update && apt-get install -yq yarn=${YARNVERSION} && \

View File

@@ -0,0 +1,54 @@
VERSION = dev
TAG = grafana/build-container
USER_ID = $(shell id -u)
GROUP_ID = $(shell id -g)
all: build deploy
build:
docker build -t "${TAG}:${VERSION}" .
deploy:
docker push "${TAG}:${VERSION}"
run:
docker run -ti \
-e "CIRCLE_BRANCH=local" \
-e "CIRCLE_BUILD_NUM=472" \
${TAG}:${VERSION} \
bash
run-with-local-source-live:
docker run -d \
-e "CIRCLE_BRANCH=local" \
-e "CIRCLE_BUILD_NUM=472" \
-w "/go/src/github.com/grafana/grafana" \
--name grafana-build \
-v "${GOPATH}/src/github.com/grafana/grafana:/go/src/github.com/grafana/grafana" \
${TAG}:${VERSION} \
bash -c "/tmp/bootstrap.sh; mkdir /.cache; chown "${USER_ID}:${GROUP_ID}" /.cache; tail -f /dev/null"
docker exec -ti --user "${USER_ID}:${GROUP_ID}" grafana-build bash
run-with-local-source-copy:
docker run -d \
-e "CIRCLE_BRANCH=local" \
-e "CIRCLE_BUILD_NUM=472" \
-w "/go/src/github.com/grafana/grafana" \
--name grafana-build \
${TAG}:${VERSION} \
bash -c "/tmp/bootstrap.sh; tail -f /dev/null"
docker cp "${GOPATH}/src/github.com/grafana/grafana" grafana-build:/go/src/github.com/grafana/
docker exec -ti grafana-build bash
update-source:
docker cp "${GOPATH}/src/github.com/grafana/grafana" grafana-build:/go/src/github.com/grafana/
attach:
docker exec -ti grafana-build bash
attach-live:
docker exec -ti --user "${USER_ID}:${GROUP_ID}" grafana-build bash
stop:
docker kill grafana-build
docker rm grafana-build

View File

@@ -5,6 +5,19 @@ is in Dockerfile, but there are supporting scripts such as the Makefile, for bui
The image is based on Debian Stretch, since we want an older Linux distribution (Stretch has long-term support into 2022) to build binaries that are as portable as possible.
## Makefile targets
- `make run-with-local-source-copy`
- Starts the container locally and copies your local sources into the container
- `make run-with-local-source-live`
- Starts the container (as your user) locally and maps your Grafana project dir into the container
- `make update-source`
- Updates the sources in the container from your local sources
- `make stop`
- Kills the container
- `make attach`
- Opens bash within the running container
## Build/Publish Docker Image
In order to build and publish the Grafana build Docker image, execute the following:

View File

@@ -0,0 +1,53 @@
#!/usr/bin/env bash
PACKAGES=("@grafana/ui" "@grafana/data" "@grafana/toolkit" "@grafana/runtime" "@grafana/e2e" "@grafana/e2e-selectors" "@grafana/schema")
GRAFANA_TAG=${1:-}
RELEASE_CHANNEL="latest"
if echo "$GRAFANA_TAG" | grep -q "^v"; then
_grafana_version=$(echo "${GRAFANA_TAG}" | cut -d "v" -f 2)
else
echo "Provided tag is not a version tag, skipping packages release..."
exit
fi
if grep -q "beta" <<< "$GRAFANA_TAG"; then
RELEASE_CHANNEL="next"
fi
echo "$_grafana_version"
# lerna bootstrap might have created yarn.lock
git checkout .
# Get current version from lerna.json
# Since this happens on tagged branch, the lerna.json version and package.json file SHOULD be updated already
# as specified in release guideline
PACKAGE_VERSION=$(grep '"version"' lerna.json | cut -d '"' -f 4)
echo "Releasing grafana packages @ ${PACKAGE_VERSION} under ${RELEASE_CHANNEL} channel"
if [ $RELEASE_CHANNEL == "latest" ]; then
SCRIPT="publishLatest"
elif [ $RELEASE_CHANNEL == "next" ]; then
SCRIPT="publishNext"
else
echo "Unknown channel, skipping packages release"
exit
fi
# Publish to NPM registry
echo "//registry.npmjs.org/:_authToken=${NPM_TOKEN}" >> ~/.npmrc
echo $'\nPublishing packages to NPM registry'
yarn packages:${SCRIPT}
# When releasing stable(latest) version of packages we are updating previously published next tag(beta) to be the same version as latest
if [ $RELEASE_CHANNEL == "latest" ]; then
for i in "${PACKAGES[@]}"
do
:
npm dist-tag add "$i"@"$PACKAGE_VERSION" next
done
fi

View File

@@ -1,9 +1,7 @@
load(
'scripts/drone/steps/lib.star',
'build_image',
'yarn_install_step',
'identify_runner_step',
'gen_version_step',
'initialize_step',
'download_grabpl_step',
'lint_frontend_step',
'codespell_step',
@@ -27,17 +25,16 @@ load(
def docs_pipelines(edition, ver_mode, trigger):
steps = [
download_grabpl_step(),
identify_runner_step(),
gen_version_step(ver_mode),
yarn_install_step(),
steps = [download_grabpl_step()] + initialize_step(edition, platform='linux', ver_mode=ver_mode)
# Insert remaining steps
steps.extend([
codespell_step(),
lint_docs(),
build_frontend_package_step(edition=edition, ver_mode=ver_mode),
build_frontend_docs_step(edition=edition),
build_docs_website_step(),
]
])
return pipeline(
name='{}-docs'.format(ver_mode), edition=edition, trigger=trigger, services=[], steps=steps,
@@ -48,7 +45,7 @@ def lint_docs():
'name': 'lint-docs',
'image': build_image,
'depends_on': [
'yarn-install',
'initialize',
],
'environment': {
'NODE_OPTIONS': '--max_old_space_size=8192',

View File

@@ -2,10 +2,7 @@ load(
'scripts/drone/steps/lib.star',
'download_grabpl_step',
'build_image',
'identify_runner_step',
'gen_version_step',
'wire_install_step',
'yarn_install_step',
'initialize_step',
'lint_drone_step',
'lint_backend_step',
'lint_frontend_step',
@@ -41,8 +38,7 @@ load(
'upload_cdn_step',
'validate_scuemata_step',
'ensure_cuetsified_step',
'test_a11y_frontend_step',
'trigger_oss'
'test_a11y_frontend_step'
)
load(
@@ -70,16 +66,10 @@ load('scripts/drone/vault.star', 'from_secret')
ver_mode = 'main'
def get_steps(edition):
def get_steps(edition, is_downstream=False):
services = integration_test_services(edition)
publish = edition != 'enterprise' or is_downstream
include_enterprise2 = edition == 'enterprise'
init_steps = [
identify_runner_step(),
download_grabpl_step(),
gen_version_step(ver_mode),
wire_install_step(),
yarn_install_step(),
]
test_steps = [
lint_drone_step(),
codespell_step(),
@@ -93,9 +83,9 @@ def get_steps(edition):
build_steps = [
trigger_test_release(),
enterprise_downstream_step(edition=edition),
build_backend_step(edition=edition, ver_mode=ver_mode),
build_frontend_step(edition=edition, ver_mode=ver_mode),
build_frontend_package_step(edition=edition, ver_mode=ver_mode),
build_backend_step(edition=edition, ver_mode=ver_mode, is_downstream=is_downstream),
build_frontend_step(edition=edition, ver_mode=ver_mode, is_downstream=is_downstream),
build_frontend_package_step(edition=edition, ver_mode=ver_mode, is_downstream=is_downstream),
build_plugins_step(edition=edition, sign=True),
validate_scuemata_step(),
ensure_cuetsified_step(),
@@ -115,12 +105,12 @@ def get_steps(edition):
test_backend_integration_step(edition=edition2),
])
build_steps.extend([
build_backend_step(edition=edition2, ver_mode=ver_mode, variants=['linux-amd64']),
build_backend_step(edition=edition2, ver_mode=ver_mode, variants=['linux-amd64'], is_downstream=is_downstream),
])
# Insert remaining steps
build_steps.extend([
package_step(edition=edition, ver_mode=ver_mode, include_enterprise2=include_enterprise2),
package_step(edition=edition, ver_mode=ver_mode, include_enterprise2=include_enterprise2, is_downstream=is_downstream),
grafana_server_step(edition=edition),
e2e_tests_step('dashboards-suite', edition=edition),
e2e_tests_step('smoke-tests-suite', edition=edition),
@@ -128,42 +118,42 @@ def get_steps(edition):
e2e_tests_step('various-suite', edition=edition),
e2e_tests_artifacts(edition=edition),
build_storybook_step(edition=edition, ver_mode=ver_mode),
store_storybook_step(edition=edition, ver_mode=ver_mode, trigger=trigger_oss),
store_storybook_step(edition=edition, ver_mode=ver_mode),
test_a11y_frontend_step(ver_mode=ver_mode, edition=edition),
frontend_metrics_step(edition=edition, trigger=trigger_oss),
frontend_metrics_step(edition=edition),
copy_packages_for_docker_step(),
build_docker_images_step(edition=edition, ver_mode=ver_mode, publish=False),
build_docker_images_step(edition=edition, ver_mode=ver_mode, ubuntu=True, publish=False),
publish_images_step(edition=edition, ver_mode=ver_mode, mode='', docker_repo='grafana', trigger=trigger_oss),
publish_images_step(edition=edition, ver_mode=ver_mode, mode='', docker_repo='grafana-oss', trigger=trigger_oss)
publish_images_step(edition=edition, ver_mode=ver_mode, mode='', docker_repo='grafana', ubuntu=False),
publish_images_step(edition=edition, ver_mode=ver_mode, mode='', docker_repo='grafana-oss', ubuntu=True)
])
if include_enterprise2:
integration_test_steps.extend([redis_integration_tests_step(edition=edition2, ver_mode=ver_mode), memcached_integration_tests_step(edition=edition2, ver_mode=ver_mode)])
build_steps.extend([
release_canary_npm_packages_step(edition, trigger=trigger_oss),
upload_packages_step(edition=edition, ver_mode=ver_mode, trigger=trigger_oss),
upload_cdn_step(edition=edition, ver_mode=ver_mode, trigger=trigger_oss)
release_canary_npm_packages_step(edition),
upload_packages_step(edition=edition, ver_mode=ver_mode, is_downstream=is_downstream),
upload_cdn_step(edition=edition, ver_mode=ver_mode)
])
if include_enterprise2:
edition2 = 'enterprise2'
build_steps.extend([
package_step(edition=edition2, ver_mode=ver_mode, include_enterprise2=include_enterprise2, variants=['linux-amd64']),
upload_packages_step(edition=edition2, ver_mode=ver_mode),
package_step(edition=edition2, ver_mode=ver_mode, include_enterprise2=include_enterprise2, variants=['linux-amd64'], is_downstream=is_downstream),
upload_packages_step(edition=edition2, ver_mode=ver_mode, is_downstream=is_downstream),
upload_cdn_step(edition=edition2, ver_mode=ver_mode)
])
windows_steps = get_windows_steps(edition=edition, ver_mode=ver_mode)
if edition == 'enterprise':
windows_steps = get_windows_steps(edition=edition, ver_mode=ver_mode, is_downstream=is_downstream)
if edition == 'enterprise' and not is_downstream:
store_steps = []
else:
store_steps = [
store_packages_step(edition=edition, ver_mode=ver_mode),
store_packages_step(edition=edition, ver_mode=ver_mode, is_downstream=is_downstream),
]
return init_steps, test_steps, build_steps, integration_test_steps, windows_steps, store_steps
return test_steps, build_steps, integration_test_steps, windows_steps, store_steps
def trigger_test_release():
return {
@@ -190,10 +180,7 @@ def trigger_test_release():
'include': [
'.drone.yml',
]
},
'repo': [
'grafana/grafana',
]
}
}
}
@@ -207,9 +194,6 @@ def main_pipelines(edition):
drone_change_trigger = {
'event': ['push',],
'branch': 'main',
'repo': [
'grafana/grafana',
],
'paths': {
'include': [
'.drone.yml',
@@ -219,39 +203,73 @@ def main_pipelines(edition):
],
},
}
init_steps, test_steps, build_steps, integration_test_steps, windows_steps, store_steps = get_steps(edition=edition)
test_steps, build_steps, integration_test_steps, windows_steps, store_steps = get_steps(edition=edition)
if edition == 'enterprise':
services.append(ldap_service())
integration_test_steps.append(benchmark_ldap_step())
pipelines = [docs_pipelines(edition, ver_mode, trigger), pipeline(
name='main-test', edition=edition, trigger=trigger, services=[],
steps=init_steps + test_steps,
volumes=[],
), pipeline(
name='main-build-e2e-publish', edition=edition, trigger=trigger, services=[],
steps=init_steps + build_steps,
volumes=volumes,
), pipeline(
name='main-integration-tests', edition=edition, trigger=trigger, services=services,
steps=[download_grabpl_step(), identify_runner_step(),] + integration_test_steps,
volumes=volumes,
), pipeline(
name='windows-main', edition=edition, trigger=dict(trigger, repo=['grafana/grafana']),
steps=[identify_runner_step('windows')] + windows_steps,
depends_on=['main-test', 'main-build-e2e-publish', 'main-integration-tests'], platform='windows',
), notify_pipeline(
name='notify-drone-changes', slack_channel='slack-webhooks-test', trigger=drone_change_trigger,
template=drone_change_template, secret='drone-changes-webhook',
), pipeline(
name='publish-main', edition=edition, trigger=dict(trigger, repo=['grafana/grafana']),
steps=[download_grabpl_step(), identify_runner_step(),] + store_steps,
depends_on=['main-test', 'main-build-e2e-publish', 'main-integration-tests', 'windows-main', ],
), notify_pipeline(
name='notify-main', slack_channel='grafana-ci-notifications', trigger=dict(trigger, status=['failure']),
depends_on=['main-test', 'main-build-e2e-publish', 'main-integration-tests', 'windows-main', 'publish-main'],
template=failure_template, secret='slack_webhook'
)]
pipelines = [
docs_pipelines(edition, ver_mode, trigger),
pipeline(
name='main-test', edition=edition, trigger=trigger, services=[],
steps=[download_grabpl_step()] + initialize_step(edition, platform='linux', ver_mode=ver_mode) + test_steps,
volumes=[],
),
pipeline(
name='main-build-e2e-publish', edition=edition, trigger=trigger, services=[],
steps=[download_grabpl_step()] + initialize_step(edition, platform='linux', ver_mode=ver_mode) + build_steps,
volumes=volumes,
),
pipeline(
name='main-integration-tests', edition=edition, trigger=trigger, services=services,
steps=[download_grabpl_step()] + integration_test_steps,
volumes=volumes,
),
pipeline(
name='windows-main', edition=edition, trigger=trigger,
steps=initialize_step(edition, platform='windows', ver_mode=ver_mode) + windows_steps,
depends_on=['main-test', 'main-build-e2e-publish', 'main-integration-tests'], platform='windows',
), notify_pipeline(
name='notify-drone-changes', slack_channel='slack-webhooks-test', trigger=drone_change_trigger, template=drone_change_template, secret='drone-changes-webhook',
),
]
if edition != 'enterprise':
pipelines.append(pipeline(
name='publish-main', edition=edition, trigger=trigger,
steps=[download_grabpl_step()] + initialize_step(edition, platform='linux', ver_mode=ver_mode, install_deps=False) + store_steps,
depends_on=['main-test', 'main-build-e2e-publish', 'main-integration-tests', 'windows-main',],
))
pipelines.append(notify_pipeline(
name='notify-main', slack_channel='grafana-ci-notifications', trigger=dict(trigger, status = ['failure']),
depends_on=['main-test', 'main-build-e2e-publish', 'main-integration-tests', 'windows-main', 'publish-main'], template=failure_template, secret='slack_webhook'
))
else:
# Add downstream enterprise pipelines triggerable from OSS builds
trigger = {
'event': ['custom',],
}
test_steps, build_steps, integration_test_steps, windows_steps, store_steps = get_steps(edition=edition, is_downstream=True)
pipelines.append(pipeline(
name='build-main-downstream', edition=edition, trigger=trigger, services=services,
steps=[download_grabpl_step()] + initialize_step(edition, platform='linux', ver_mode=ver_mode, is_downstream=True) + test_steps + build_steps + integration_test_steps,
volumes=volumes,
))
pipelines.append(pipeline(
name='windows-main-downstream', edition=edition, trigger=trigger,
steps=[download_grabpl_step()] + initialize_step(edition, platform='windows', ver_mode=ver_mode, is_downstream=True) + windows_steps,
platform='windows', depends_on=['build-main-downstream'],
))
pipelines.append(pipeline(
name='publish-main-downstream', edition=edition, trigger=trigger,
steps=[download_grabpl_step()] + initialize_step(edition, platform='linux', ver_mode=ver_mode, is_downstream=True, install_deps=False) + store_steps,
depends_on=['build-main-downstream', 'windows-main-downstream'],
))
pipelines.append(notify_pipeline(
name='notify-main-downstream', slack_channel='grafana-enterprise-ci-notifications', trigger=dict(trigger, status = ['failure']),
depends_on=['build-main-downstream', 'windows-main-downstream', 'publish-main-downstream'], template=failure_template, secret='slack_webhook',
))
return pipelines

View File

@@ -1,10 +1,7 @@
load(
'scripts/drone/steps/lib.star',
'download_grabpl_step',
'gen_version_step',
'yarn_install_step',
'wire_install_step',
'identify_runner_step',
'initialize_step',
'lint_drone_step',
'lint_backend_step',
'lint_frontend_step',
@@ -62,13 +59,6 @@ def pr_pipelines(edition):
volumes = integration_test_services_volumes()
variants = ['linux-amd64', 'linux-amd64-musl', 'darwin-amd64', 'windows-amd64', 'armv6',]
include_enterprise2 = edition == 'enterprise'
init_steps = [
identify_runner_step(),
download_grabpl_step(),
gen_version_step(ver_mode),
wire_install_step(),
yarn_install_step(),
]
test_steps = [
lint_drone_step(),
codespell_step(),
@@ -142,12 +132,14 @@ def pr_pipelines(edition):
return [
pipeline(
name='pr-test', edition=edition, trigger=trigger, services=[], steps=init_steps + test_steps,
name='pr-test', edition=edition, trigger=trigger, services=[], steps=[download_grabpl_step()] + initialize_step(edition, platform='linux', ver_mode=ver_mode)
+ test_steps,
), pipeline(
name='pr-build-e2e', edition=edition, trigger=trigger, services=[], steps=init_steps + build_steps,
name='pr-build-e2e', edition=edition, trigger=trigger, services=[], steps=[download_grabpl_step()] + initialize_step(edition, platform='linux', ver_mode=ver_mode)
+ build_steps,
), pipeline(
name='pr-integration-tests', edition=edition, trigger=trigger, services=services,
steps=[download_grabpl_step(), identify_runner_step(),] + integration_test_steps,
steps=[download_grabpl_step()] + integration_test_steps,
volumes=volumes,
), docs_pipelines(edition, ver_mode, trigger_docs())
]

View File

@@ -1,15 +1,11 @@
load(
'scripts/drone/steps/lib.star',
'disable_tests',
'clone_enterprise_step',
'download_grabpl_step',
'gen_version_step',
'yarn_install_step',
'wire_install_step',
'init_enterprise_step',
'initialize_step',
'lint_drone_step',
'test_release_ver',
'build_image',
'identify_runner_step',
'publish_image',
'lint_backend_step',
'lint_frontend_step',
@@ -35,14 +31,14 @@ load(
'memcached_integration_tests_step',
'get_windows_steps',
'benchmark_ldap_step',
'frontend_metrics_step',
'store_storybook_step',
'upload_packages_step',
'store_packages_step',
'upload_cdn_step',
'validate_scuemata_step',
'ensure_cuetsified_step',
'publish_images_step',
'trigger_oss'
'publish_images_step'
)
load(
@@ -97,7 +93,7 @@ def retrieve_npm_packages_step():
'name': 'retrieve-npm-packages',
'image': publish_image,
'depends_on': [
'yarn-install',
'initialize',
],
'environment': {
'GCP_KEY': from_secret('gcp_key'),
@@ -170,26 +166,16 @@ def get_steps(edition, ver_mode):
should_upload = should_publish or ver_mode in ('release-branch',)
include_enterprise2 = edition == 'enterprise'
edition2 = 'enterprise2'
init_steps = [
identify_runner_step(),
download_grabpl_step(),
gen_version_step(ver_mode),
wire_install_step(),
yarn_install_step(),
]
test_steps = []
if edition != 'enterprise':
test_steps.extend([shellcheck_step()])
test_steps.extend([
test_steps = [
codespell_step(),
shellcheck_step(),
lint_backend_step(edition=edition),
lint_frontend_step(),
test_backend_step(edition=edition),
test_backend_integration_step(edition=edition),
test_frontend_step(),
])
]
build_steps = [
build_backend_step(edition=edition, ver_mode=ver_mode),
@@ -239,11 +225,11 @@ def get_steps(edition, ver_mode):
build_steps.append(build_storybook)
if include_enterprise2:
integration_test_steps.extend([redis_integration_tests_step(), memcached_integration_tests_step()])
integration_test_steps.extend([redis_integration_tests_step(edition=edition2, ver_mode=ver_mode), memcached_integration_tests_step(edition=edition2, ver_mode=ver_mode)])
if should_upload:
publish_steps.append(upload_cdn_step(edition=edition, ver_mode=ver_mode, trigger=trigger_oss))
publish_steps.append(upload_packages_step(edition=edition, ver_mode=ver_mode, trigger=trigger_oss))
publish_steps.append(upload_cdn_step(edition=edition, ver_mode=ver_mode))
publish_steps.append(upload_packages_step(edition=edition, ver_mode=ver_mode))
if should_publish:
publish_step = store_storybook_step(edition=edition, ver_mode=ver_mode)
build_npm_step = build_npm_packages_step(edition=edition, ver_mode=ver_mode)
@@ -265,16 +251,16 @@ def get_steps(edition, ver_mode):
if step:
publish_steps.append(step)
return init_steps, test_steps, build_steps, integration_test_steps, package_steps, windows_package_steps, publish_steps
return test_steps, build_steps, integration_test_steps, package_steps, windows_package_steps, publish_steps
def get_oss_pipelines(trigger, ver_mode):
edition = 'oss'
services = integration_test_services(edition=edition)
volumes = integration_test_services_volumes()
init_steps, test_steps, build_steps, integration_test_steps, package_steps, windows_package_steps, publish_steps = get_steps(edition=edition, ver_mode=ver_mode)
test_steps, build_steps, integration_test_steps, package_steps, windows_package_steps, publish_steps = get_steps(edition=edition, ver_mode=ver_mode)
windows_pipeline = pipeline(
name='oss-windows-{}'.format(ver_mode), edition=edition, trigger=trigger,
steps=[identify_runner_step('windows')] + windows_package_steps,
steps=initialize_step(edition, platform='windows', ver_mode=ver_mode) + windows_package_steps,
platform='windows', depends_on=[
'oss-build{}-publish-{}'.format(get_e2e_suffix(), ver_mode),
],
@@ -282,7 +268,8 @@ def get_oss_pipelines(trigger, ver_mode):
pipelines = [
pipeline(
name='oss-build{}-publish-{}'.format(get_e2e_suffix(), ver_mode), edition=edition, trigger=trigger, services=[],
steps=init_steps + build_steps + package_steps + publish_steps,
steps=[download_grabpl_step()] + initialize_step(edition, platform='linux', ver_mode=ver_mode) +
build_steps + package_steps + publish_steps,
volumes=volumes,
),
]
@@ -290,12 +277,14 @@ def get_oss_pipelines(trigger, ver_mode):
pipelines.extend([
pipeline(
name='oss-test-{}'.format(ver_mode), edition=edition, trigger=trigger, services=[],
steps=init_steps + test_steps,
steps=[download_grabpl_step()] + initialize_step(edition, platform='linux', ver_mode=ver_mode) +
test_steps,
volumes=[],
),
pipeline(
name='oss-integration-tests-{}'.format(ver_mode), edition=edition, trigger=trigger, services=services,
steps=[download_grabpl_step(), identify_runner_step(),] + integration_test_steps,
steps=[download_grabpl_step()] + initialize_step(edition, platform='linux', ver_mode=ver_mode) +
integration_test_steps,
volumes=volumes,
)
])
@@ -315,28 +304,10 @@ def get_enterprise_pipelines(trigger, ver_mode):
edition = 'enterprise'
services = integration_test_services(edition=edition)
volumes = integration_test_services_volumes()
deps_on_clone_enterprise_step = {
'depends_on': [
'init-enterprise',
]
}
_, test_steps, build_steps, integration_test_steps, package_steps, windows_package_steps, publish_steps = get_steps(edition=edition, ver_mode=ver_mode)
init_steps = [
download_grabpl_step(),
identify_runner_step(),
clone_enterprise_step(ver_mode),
init_enterprise_step(ver_mode)
]
for step in [wire_install_step(), yarn_install_step(), gen_version_step(ver_mode)]:
step.update(deps_on_clone_enterprise_step)
init_steps.extend([step])
for step in integration_test_steps:
step.update(deps_on_clone_enterprise_step)
test_steps, build_steps, integration_test_steps, package_steps, windows_package_steps, publish_steps = get_steps(edition=edition, ver_mode=ver_mode)
windows_pipeline = pipeline(
name='enterprise-windows-{}'.format(ver_mode), edition=edition, trigger=trigger,
steps=[identify_runner_step('windows')] + windows_package_steps,
steps=initialize_step(edition, platform='windows', ver_mode=ver_mode) + windows_package_steps,
platform='windows', depends_on=[
'enterprise-build{}-publish-{}'.format(get_e2e_suffix(), ver_mode),
],
@@ -344,7 +315,8 @@ def get_enterprise_pipelines(trigger, ver_mode):
pipelines = [
pipeline(
name='enterprise-build{}-publish-{}'.format(get_e2e_suffix(), ver_mode), edition=edition, trigger=trigger, services=[],
steps=init_steps + build_steps + package_steps + publish_steps,
steps=[download_grabpl_step()] + initialize_step(edition, platform='linux', ver_mode=ver_mode) +
build_steps + package_steps + publish_steps,
volumes=volumes,
),
]
@@ -352,12 +324,14 @@ def get_enterprise_pipelines(trigger, ver_mode):
pipelines.extend([
pipeline(
name='enterprise-test-{}'.format(ver_mode), edition=edition, trigger=trigger, services=[],
steps=init_steps + test_steps,
steps=[download_grabpl_step()] + initialize_step(edition, platform='linux', ver_mode=ver_mode) +
test_steps,
volumes=[],
),
pipeline(
name='enterprise-integration-tests-{}'.format(ver_mode), edition=edition, trigger=trigger, services=services,
steps=[download_grabpl_step(), identify_runner_step(), clone_enterprise_step(ver_mode), init_enterprise_step(ver_mode),] + integration_test_steps,
steps=[download_grabpl_step()] + initialize_step(edition, platform='linux', ver_mode=ver_mode) +
integration_test_steps,
volumes=volumes,
),
])
@@ -383,9 +357,8 @@ def publish_artifacts_step(mode):
'image': publish_image,
'environment': {
'GCP_KEY': from_secret('gcp_key'),
'PRERELEASE_BUCKET': from_secret('prerelease_bucket'),
},
'commands': ['./bin/grabpl artifacts publish {}--tag ${{TAG}} --src-bucket $${{PRERELEASE_BUCKET}}'.format(security)],
'commands': ['./bin/grabpl artifacts publish {}--tag ${{TAG}} --src-bucket grafana-prerelease'.format(security)],
'depends_on': ['grabpl'],
}
@@ -419,25 +392,14 @@ def publish_packages_pipeline():
'event': ['promote'],
'target': ['public'],
}
oss_steps = [
steps = [
download_grabpl_step(),
store_packages_step(edition='oss', ver_mode='release'),
]
enterprise_steps = [
download_grabpl_step(),
store_packages_step(edition='enterprise', ver_mode='release'),
]
deps = [
'publish-artifacts-public',
'publish-docker-oss-public',
'publish-docker-enterprise-public'
]
return [pipeline(
name='publish-packages-oss', trigger=trigger, steps=oss_steps, edition="all", depends_on=deps
), pipeline(
name='publish-packages-enterprise', trigger=trigger, steps=enterprise_steps, edition="all", depends_on=deps
name='publish-packages', trigger=trigger, steps=steps, edition="all", depends_on=['publish-artifacts-public']
)]
def publish_npm_pipelines(mode):
@@ -447,17 +409,17 @@ def publish_npm_pipelines(mode):
}
steps = [
download_grabpl_step(),
yarn_install_step(),
retrieve_npm_packages_step(),
release_npm_packages_step()
]
return [pipeline(
name='publish-npm-packages-{}'.format(mode), trigger=trigger, steps = steps, edition="all"
name='publish-npm-packages-{}'.format(mode), trigger=trigger, steps = initialize_step(edition='oss', platform='linux', ver_mode='release') + steps, edition="all"
)]
def release_pipelines(ver_mode='release', trigger=None):
def release_pipelines(ver_mode='release', trigger=None, environment=None):
# 'enterprise' edition services contain both OSS and enterprise services
services = integration_test_services(edition='enterprise')
if not trigger:
trigger = {
'event': {
@@ -467,10 +429,12 @@ def release_pipelines(ver_mode='release', trigger=None):
},
'ref': ['refs/tags/v*',],
'repo': {
'exclude': ['grafana/grafana'],
'exclude': ['grafana/grafana'],
},
}
should_publish = ver_mode == 'release'
# The release pipelines include also enterprise ones, so both editions are built for a release.
# We could also solve this by triggering a downstream build for the enterprise repo, but by including enterprise
# in OSS release builds, we simplify the UX for the release engineer.
@@ -479,6 +443,13 @@ def release_pipelines(ver_mode='release', trigger=None):
pipelines = oss_pipelines + enterprise_pipelines
# if ver_mode == 'release':
# pipelines.append(publish_artifacts_pipelines())
#pipelines.append(notify_pipeline(
# name='notify-{}'.format(ver_mode), slack_channel='grafana-ci-notifications', trigger=dict(trigger, status = ['failure']),
# depends_on=[p['name'] for p in pipelines], template=failure_template, secret='slack_webhook',
#))
return pipelines
def get_e2e_suffix():

View File

@@ -1,23 +1,16 @@
load('scripts/drone/vault.star', 'from_secret', 'github_token', 'pull_secret', 'drone_token', 'prerelease_bucket')
grabpl_version = 'v2.9.41'
build_image = 'grafana/build-container:1.5.4'
grabpl_version = 'v2.9.27'
build_image = 'grafana/build-container:1.5.3'
publish_image = 'grafana/grafana-ci-deploy:1.3.1'
deploy_docker_image = 'us.gcr.io/kubernetes-dev/drone/plugins/deploy-image'
alpine_image = 'alpine:3.15'
curl_image = 'byrnedo/alpine-curl:0.1.8'
windows_image = 'mcr.microsoft.com/windows:1809'
wix_image = 'grafana/ci-wix:0.1.1'
test_release_ver = 'v7.3.0-test'
disable_tests = False
trigger_oss = {
'when': {
'repo': [
'grafana/grafana',
]
}
}
def slack_step(channel, template, secret):
return {
@@ -31,57 +24,98 @@ def slack_step(channel, template, secret):
}
def gen_version_step(ver_mode):
def initialize_step(edition, platform, ver_mode, is_downstream=False, install_deps=True):
if platform == 'windows':
return [
{
'name': 'identify-runner',
'image': windows_image,
'commands': [
'echo $env:DRONE_RUNNER_NAME',
],
},
]
common_cmds = [
# Generate Go code, will install Wire
# TODO: Install Wire in Docker image instead
'make gen-go',
]
if ver_mode == 'release':
args = '${DRONE_TAG}'
common_cmds.append('./bin/grabpl verify-version ${DRONE_TAG}')
else:
build_no = '${DRONE_BUILD_NUMBER}'
if not is_downstream:
build_no = '${DRONE_BUILD_NUMBER}'
else:
build_no = '$${SOURCE_BUILD_NUMBER}'
args = '--build-id {}'.format(build_no)
return {
'name': 'gen-version',
'image': build_image,
'depends_on': [
'grabpl',
],
'commands': [
identify_runner = identify_runner_step(platform)
if install_deps:
common_cmds.extend([
'./bin/grabpl gen-version {}'.format(args),
],
}
def yarn_install_step():
return {
'name': 'yarn-install',
'image': build_image,
'commands': [
'yarn install --immutable',
],
'depends_on': [
'grabpl',
],
}
])
if edition in ('enterprise', 'enterprise2'):
source_commit = ''
if ver_mode == 'release':
committish = '${DRONE_TAG}'
source_commit = ' ${DRONE_TAG}'
environment = {
'GITHUB_TOKEN': from_secret(github_token),
}
token = "--github-token $${GITHUB_TOKEN}"
elif ver_mode == 'release-branch':
committish = '${DRONE_BRANCH}'
environment = {}
token = ""
else:
environment = {}
if is_downstream:
source_commit = ' $${SOURCE_COMMIT}'
committish = '${DRONE_COMMIT}'
token = ""
steps = [
identify_runner,
clone_enterprise(committish),
{
'name': 'initialize',
'image': build_image,
'depends_on': [
'clone-enterprise',
],
'environment': environment,
'commands': [
'mv bin/grabpl /tmp/',
'rmdir bin',
'mv grafana-enterprise /tmp/',
'/tmp/grabpl init-enterprise {} /tmp/grafana-enterprise{}'.format(token, source_commit),
'mv /tmp/grafana-enterprise/deployment_tools_config.json deployment_tools_config.json',
'mkdir bin',
'mv /tmp/grabpl bin/'
] + common_cmds,
},
]
return steps
steps = [
identify_runner,
{
'name': 'initialize',
'image': build_image,
'commands': common_cmds,
},
]
return steps
def wire_install_step():
return {
'name': 'wire-install',
'image': build_image,
'commands': [
'make gen-go',
],
}
def identify_runner_step(platform='linux'):
if platform == 'windows':
return {
'name': 'identify-runner',
'image': windows_image,
'commands': [
'echo $env:DRONE_RUNNER_NAME',
],
}
else:
def identify_runner_step(platform):
if platform == 'linux':
return {
'name': 'identify-runner',
'image': alpine_image,
@@ -89,15 +123,17 @@ def identify_runner_step(platform='linux'):
'echo $DRONE_RUNNER_NAME',
],
}
def clone_enterprise_step(ver_mode):
if ver_mode == 'release':
committish = '${DRONE_TAG}'
elif ver_mode == 'release-branch':
committish = '${DRONE_BRANCH}'
else:
committish = '${DRONE_COMMIT}'
return {
'name': 'identify-runner',
'image': windows_image,
'commands': [
'echo $env:DRONE_RUNNER_NAME',
],
}
def clone_enterprise(committish):
return {
'name': 'clone-enterprise',
'image': build_image,
@@ -111,51 +147,8 @@ def clone_enterprise_step(ver_mode):
],
}
def init_enterprise_step(ver_mode):
source_commit = ''
if ver_mode == 'release':
source_commit = ' ${DRONE_TAG}'
environment = {
'GITHUB_TOKEN': from_secret(github_token),
}
token = "--github-token $${GITHUB_TOKEN}"
elif ver_mode == 'release-branch':
environment = {}
token = ""
else:
environment = {}
token = ""
return {
'name': 'init-enterprise',
'image': build_image,
'depends_on': [
'clone-enterprise',
],
'environment': environment,
'commands': [
'mv bin/grabpl /tmp/',
'rmdir bin',
'mv grafana-enterprise /tmp/',
'/tmp/grabpl init-enterprise {} /tmp/grafana-enterprise{}'.format(token, source_commit),
'mv /tmp/grafana-enterprise/deployment_tools_config.json deployment_tools_config.json',
'mkdir bin',
'mv /tmp/grabpl bin/'
],
}
def download_grabpl_step(platform="linux"):
if platform == 'windows':
return {
'name': 'grabpl',
'image': wix_image,
'commands': [
'$$ProgressPreference = "SilentlyContinue"',
'Invoke-WebRequest https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/{}/windows/grabpl.exe -OutFile grabpl.exe'.format(
grabpl_version),
]
}
def download_grabpl_step():
return {
'name': 'grabpl',
'image': curl_image,
@@ -212,7 +205,7 @@ def lint_backend_step(edition):
'CGO_ENABLED': '1',
},
'depends_on': [
'wire-install',
'initialize',
],
'commands': [
# Don't use Make since it will re-download the linters
@@ -225,6 +218,9 @@ def benchmark_ldap_step():
return {
'name': 'benchmark-ldap',
'image': build_image,
'depends_on': [
'initialize',
],
'environment': {
'LDAP_HOSTNAME': 'ldap',
},
@@ -257,35 +253,35 @@ def build_storybook_step(edition, ver_mode):
}
def store_storybook_step(edition, ver_mode, trigger=None):
def store_storybook_step(edition, ver_mode):
if edition in ('enterprise', 'enterprise2'):
return None
commands = []
if ver_mode == 'release':
commands.extend([
'./bin/grabpl store-storybook --deployment latest --src-bucket grafana-prerelease --src-dir artifacts/storybook',
'./bin/grabpl store-storybook --deployment ${DRONE_TAG} --src-bucket grafana-prerelease --src-dir artifacts/storybook',
])
channels = ['latest', '${DRONE_TAG}', ]
else:
# main pipelines should deploy storybook to grafana-storybook/canary public bucket
commands = ['./bin/grabpl store-storybook --deployment canary --src-bucket grafana-storybook', ]
channels = ['canary', ]
commands.extend([
'printenv GCP_KEY | base64 -d > /tmp/gcpkey.json',
'gcloud auth activate-service-account --key-file=/tmp/gcpkey.json',
] + [
'gsutil -m rsync -d -r ./packages/grafana-ui/dist/storybook gs://$${{PRERELEASE_BUCKET}}/artifacts/storybook/{}'.format(
c)
for c in channels
])
step = {
return {
'name': 'store-storybook',
'image': publish_image,
'depends_on': ['build-storybook', ] + end_to_end_tests_deps(edition),
'depends_on': ['build-storybook',] + end_to_end_tests_deps(edition),
'environment': {
'GCP_KEY': from_secret('gcp_key'),
'PRERELEASE_BUCKET': from_secret(prerelease_bucket)
},
'commands': commands,
}
if trigger and ver_mode in ("release-branch", "main"):
step.update(trigger)
return step
def e2e_tests_artifacts(edition):
return {
@@ -326,7 +322,7 @@ def e2e_tests_artifacts(edition):
}
def upload_cdn_step(edition, ver_mode, trigger=None):
def upload_cdn_step(edition, ver_mode):
src_dir = ''
if ver_mode == "release":
bucket = "$${PRERELEASE_BUCKET}"
@@ -344,7 +340,7 @@ def upload_cdn_step(edition, ver_mode, trigger=None):
'grafana-server',
])
step = {
return {
'name': 'upload-cdn-assets' + enterprise2_suffix(edition),
'image': publish_image,
'depends_on': deps,
@@ -356,27 +352,31 @@ def upload_cdn_step(edition, ver_mode, trigger=None):
'./bin/grabpl upload-cdn --edition {} --src-bucket "{}"{}'.format(edition, bucket, src_dir),
],
}
if trigger and ver_mode in ("release-branch", "main"):
step.update(trigger)
return step
def build_backend_step(edition, ver_mode, variants=None):
def build_backend_step(edition, ver_mode, variants=None, is_downstream=False):
variants_str = ''
if variants:
variants_str = ' --variants {}'.format(','.join(variants))
# TODO: Convert number of jobs to percentage
if ver_mode == 'release':
env = {
'GITHUB_TOKEN': from_secret(github_token),
}
cmds = [
'./bin/grabpl build-backend --jobs 8 --edition {} ${{DRONE_TAG}}'.format(
'./bin/grabpl build-backend --jobs 8 --edition {} --github-token $${{GITHUB_TOKEN}} --no-pull-enterprise ${{DRONE_TAG}}'.format(
edition,
),
]
else:
build_no = '${DRONE_BUILD_NUMBER}'
if not is_downstream:
build_no = '${DRONE_BUILD_NUMBER}'
else:
build_no = '$${SOURCE_BUILD_NUMBER}'
env = {}
cmds = [
'./bin/grabpl build-backend --jobs 8 --edition {} --build-id {}{}'.format(
'./bin/grabpl build-backend --jobs 8 --edition {} --build-id {}{} --no-pull-enterprise'.format(
edition, build_no, variants_str,
),
]
@@ -385,67 +385,70 @@ def build_backend_step(edition, ver_mode, variants=None):
'name': 'build-backend' + enterprise2_suffix(edition),
'image': build_image,
'depends_on': [
'gen-version',
'wire-install',
'initialize',
],
'environment': env,
'commands': cmds,
}
def build_frontend_step(edition, ver_mode):
build_no = '${DRONE_BUILD_NUMBER}'
def build_frontend_step(edition, ver_mode, is_downstream=False):
if not is_downstream:
build_no = '${DRONE_BUILD_NUMBER}'
else:
build_no = '$${SOURCE_BUILD_NUMBER}'
# TODO: Use percentage for num jobs
if ver_mode == 'release':
cmds = [
'./bin/grabpl build-frontend --jobs 8 ' + \
'--edition {} ${{DRONE_TAG}}'.format(edition),
'./bin/grabpl build-frontend --jobs 8 --github-token $${GITHUB_TOKEN} --no-install-deps ' + \
'--edition {} --no-pull-enterprise ${{DRONE_TAG}}'.format(edition),
]
else:
cmds = [
'./bin/grabpl build-frontend --jobs 8 --edition {} '.format(edition) + \
'--build-id {}'.format(build_no),
'./bin/grabpl build-frontend --jobs 8 --no-install-deps --edition {} '.format(edition) + \
'--build-id {} --no-pull-enterprise'.format(build_no),
]
return {
'name': 'build-frontend',
'image': build_image,
'depends_on': [
'initialize',
],
'environment': {
'NODE_OPTIONS': '--max_old_space_size=8192',
},
'depends_on': [
'gen-version',
'yarn-install',
],
'commands': cmds,
}
def build_frontend_package_step(edition, ver_mode):
build_no = '${DRONE_BUILD_NUMBER}'
def build_frontend_package_step(edition, ver_mode, is_downstream=False):
if not is_downstream:
build_no = '${DRONE_BUILD_NUMBER}'
else:
build_no = '$${SOURCE_BUILD_NUMBER}'
# TODO: Use percentage for num jobs
if ver_mode == 'release':
cmds = [
'./bin/grabpl build-frontend-packages --jobs 8 ' + \
'--edition {} ${{DRONE_TAG}}'.format(edition),
]
'./bin/grabpl build-frontend-packages --jobs 8 --github-token $${GITHUB_TOKEN} ' + \
'--edition {} --no-pull-enterprise ${{DRONE_TAG}}'.format(edition),
]
else:
cmds = [
'./bin/grabpl build-frontend-packages --jobs 8 --edition {} '.format(edition) + \
'--build-id {}'.format(build_no),
]
'--build-id {} --no-pull-enterprise'.format(build_no),
]
return {
'name': 'build-frontend-packages',
'image': build_image,
'depends_on': [
'initialize',
],
'environment': {
'NODE_OPTIONS': '--max_old_space_size=8192',
},
'depends_on': [
'gen-version',
'yarn-install',
],
'commands': cmds,
}
@@ -475,14 +478,13 @@ def build_plugins_step(edition, sign=False):
return {
'name': 'build-plugins',
'image': build_image,
'environment': env,
'depends_on': [
'gen-version',
'yarn-install',
'initialize',
],
'environment': env,
'commands': [
# TODO: Use percentage for num jobs
'./bin/grabpl build-plugins --jobs 8 --edition {}{}'.format(edition, sign_args),
'./bin/grabpl build-plugins --jobs 8 --edition {} --no-install-deps{}'.format(edition, sign_args),
],
}
@@ -492,7 +494,7 @@ def test_backend_step(edition):
'name': 'test-backend' + enterprise2_suffix(edition),
'image': build_image,
'depends_on': [
'wire-install',
'initialize',
],
'commands': [
'./bin/grabpl test-backend --edition {}'.format(edition),
@@ -505,7 +507,7 @@ def test_backend_integration_step(edition):
'name': 'test-backend-integration' + enterprise2_suffix(edition),
'image': build_image,
'depends_on': [
'wire-install',
'initialize',
],
'commands': [
'./bin/grabpl integration-tests --edition {}'.format(edition),
@@ -517,12 +519,12 @@ def test_frontend_step():
return {
'name': 'test-frontend',
'image': build_image,
'depends_on': [
'initialize',
],
'environment': {
'TEST_MAX_WORKERS': '50%',
},
'depends_on': [
'yarn-install',
],
'commands': [
'yarn run ci:test-frontend',
],
@@ -533,16 +535,16 @@ def lint_frontend_step():
return {
'name': 'lint-frontend',
'image': build_image,
'depends_on': [
'initialize',
],
'environment': {
'TEST_MAX_WORKERS': '50%',
},
'depends_on': [
'yarn-install',
],
'commands': [
'yarn run prettier:check',
'yarn run lint',
'yarn run i18n:compile', # TODO: right place for this?
'yarn run i18n:compile', # TODO: right place for this?
'yarn run typecheck',
],
}
@@ -579,11 +581,11 @@ def test_a11y_frontend_step(ver_mode, edition, port=3001):
}
def frontend_metrics_step(edition, trigger=None):
def frontend_metrics_step(edition):
if edition in ('enterprise', 'enterprise2'):
return None
step = {
return {
'name': 'publish-frontend-metrics',
'image': build_image,
'depends_on': [
@@ -597,15 +599,15 @@ def frontend_metrics_step(edition, trigger=None):
'./scripts/ci-frontend-metrics.sh | ./bin/grabpl publish-metrics $${GRAFANA_MISC_STATS_API_KEY}',
],
}
if trigger:
step.update(trigger)
return step
def codespell_step():
return {
'name': 'codespell',
'image': build_image,
'depends_on': [
'initialize',
],
'commands': [
# Important: all words have to be in lowercase, and separated by "\n".
'echo -e "unknwon\nreferer\nerrorstring\neror\niam\nwan" > words_to_ignore.txt',
@@ -620,7 +622,7 @@ def shellcheck_step():
'name': 'shellcheck',
'image': build_image,
'depends_on': [
'grabpl',
'initialize',
],
'commands': [
'./bin/grabpl shellcheck',
@@ -628,7 +630,7 @@ def shellcheck_step():
}
def package_step(edition, ver_mode, include_enterprise2=False, variants=None):
def package_step(edition, ver_mode, include_enterprise2=False, variants=None, is_downstream=False):
deps = [
'build-plugins',
'build-backend',
@@ -649,6 +651,7 @@ def package_step(edition, ver_mode, include_enterprise2=False, variants=None):
sign_args = ' --sign'
env = {
'GRAFANA_API_KEY': from_secret('grafana_api_key'),
'GITHUB_TOKEN': from_secret(github_token),
'GPG_PRIV_KEY': from_secret('gpg_priv_key'),
'GPG_PUB_KEY': from_secret('gpg_pub_key'),
'GPG_KEY_PASSWORD': from_secret('gpg_key_password'),
@@ -663,15 +666,18 @@ def package_step(edition, ver_mode, include_enterprise2=False, variants=None):
if ver_mode == 'release':
cmds = [
'{}./bin/grabpl package --jobs 8 --edition {} '.format(test_args, edition) + \
'{} ${{DRONE_TAG}}'.format(
'--github-token $${{GITHUB_TOKEN}} --no-pull-enterprise{} ${{DRONE_TAG}}'.format(
sign_args
),
]
else:
build_no = '${DRONE_BUILD_NUMBER}'
if not is_downstream:
build_no = '${DRONE_BUILD_NUMBER}'
else:
build_no = '$${SOURCE_BUILD_NUMBER}'
cmds = [
'{}./bin/grabpl package --jobs 8 --edition {} '.format(test_args, edition) + \
'--build-id {}{}{}'.format(build_no, variants_str, sign_args),
'--build-id {} --no-pull-enterprise{}{}'.format(build_no, variants_str, sign_args),
]
return {
@@ -713,7 +719,6 @@ def grafana_server_step(edition, port=3001):
],
}
def e2e_tests_step(suite, edition, port=3001, tries=None):
cmd = './bin/grabpl e2e-tests --port {} --suite {}'.format(port, suite)
if tries:
@@ -793,15 +798,13 @@ def build_docker_images_step(edition, ver_mode, archs=None, ubuntu=False, publis
},
}
def publish_images_step(edition, ver_mode, mode, docker_repo, trigger=None):
def publish_images_step(edition, ver_mode, mode, docker_repo, ubuntu=False):
if mode == 'security':
mode = '--{} '.format(mode)
else:
mode = ''
cmd = './bin/grabpl artifacts docker publish {}--dockerhub-repo {} --base alpine --base ubuntu --arch amd64 --arch arm64 --arch armv7'.format(
mode, docker_repo)
cmd = './bin/grabpl artifacts docker publish {}--dockerhub-repo {} --base alpine --base ubuntu --arch amd64 --arch arm64 --arch armv7'.format(mode, docker_repo)
if ver_mode == 'release':
deps = ['fetch-images-{}'.format(edition)]
@@ -809,7 +812,7 @@ def publish_images_step(edition, ver_mode, mode, docker_repo, trigger=None):
else:
deps = ['build-docker-images', 'build-docker-images-ubuntu']
step = {
return {
'name': 'publish-images-{}'.format(docker_repo),
'image': 'google/cloud-sdk',
'environment': {
@@ -824,15 +827,14 @@ def publish_images_step(edition, ver_mode, mode, docker_repo, trigger=None):
'path': '/var/run/docker.sock'
}],
}
if trigger and ver_mode in ("release-branch", "main"):
step.update(trigger)
return step
def postgres_integration_tests_step(edition, ver_mode):
deps = []
deps.extend(['grabpl'])
if edition in ('enterprise', 'enterprise2') and ver_mode in ('release-branch', 'release'):
deps.extend(['initialize'])
else:
deps.extend(['grabpl'])
return {
'name': 'postgres-integration-tests',
'image': build_image,
@@ -857,7 +859,10 @@ def postgres_integration_tests_step(edition, ver_mode):
def mysql_integration_tests_step(edition, ver_mode):
deps = []
deps.extend(['grabpl'])
if edition in ('enterprise', 'enterprise2') and ver_mode in ('release-branch', 'release'):
deps.extend(['initialize'])
else:
deps.extend(['grabpl'])
return {
'name': 'mysql-integration-tests',
'image': build_image,
@@ -878,9 +883,12 @@ def mysql_integration_tests_step(edition, ver_mode):
}
def redis_integration_tests_step():
def redis_integration_tests_step(edition, ver_mode):
deps = []
deps.extend(['grabpl'])
if edition in ('enterprise', 'enterprise2') and ver_mode in ('release-branch', 'release'):
deps.extend(['initialize'])
else:
deps.extend(['grabpl'])
return {
'name': 'redis-integration-tests',
'image': build_image,
@@ -895,9 +903,12 @@ def redis_integration_tests_step():
}
def memcached_integration_tests_step():
def memcached_integration_tests_step(edition, ver_mode):
deps = []
deps.extend(['grabpl'])
if edition in ('enterprise', 'enterprise2') and ver_mode in ('release-branch', 'release'):
deps.extend(['initialize'])
else:
deps.extend(['grabpl'])
return {
'name': 'memcached-integration-tests',
'image': build_image,
@@ -912,11 +923,11 @@ def memcached_integration_tests_step():
}
def release_canary_npm_packages_step(edition, trigger=None):
def release_canary_npm_packages_step(edition):
if edition in ('enterprise', 'enterprise2'):
return None
step = {
return {
'name': 'release-canary-npm-packages',
'image': build_image,
'depends_on': end_to_end_tests_deps(edition),
@@ -927,9 +938,6 @@ def release_canary_npm_packages_step(edition, trigger=None):
'./scripts/circle-release-canary-packages.sh',
],
}
if trigger:
step.update(trigger)
return step
def enterprise2_suffix(edition):
@@ -938,16 +946,15 @@ def enterprise2_suffix(edition):
return ''
def upload_packages_step(edition, ver_mode, trigger=None):
if ver_mode == 'main' and edition in ('enterprise', 'enterprise2'):
def upload_packages_step(edition, ver_mode, is_downstream=False):
if ver_mode == 'main' and edition in ('enterprise', 'enterprise2') and not is_downstream:
return None
if ver_mode == 'release':
packages_bucket = '$${{PRERELEASE_BUCKET}}/artifacts/downloads{}'.format(enterprise2_suffix(edition))
cmd = './bin/grabpl upload-packages --edition {} --packages-bucket {}'.format(edition, packages_bucket)
elif edition == 'enterprise2':
cmd = './bin/grabpl upload-packages --edition {} --packages-bucket grafana-downloads-enterprise2'.format(
edition)
cmd = './bin/grabpl upload-packages --edition {} --packages-bucket grafana-downloads-enterprise2'.format(edition)
else:
cmd = './bin/grabpl upload-packages --edition {} --packages-bucket grafana-downloads'.format(edition)
@@ -955,11 +962,11 @@ def upload_packages_step(edition, ver_mode, trigger=None):
if edition in 'enterprise2' or not end_to_end_tests_deps(edition):
deps.extend([
'package' + enterprise2_suffix(edition),
])
])
else:
deps.extend(end_to_end_tests_deps(edition))
step = {
return {
'name': 'upload-packages' + enterprise2_suffix(edition),
'image': publish_image,
'depends_on': deps,
@@ -969,18 +976,18 @@ def upload_packages_step(edition, ver_mode, trigger=None):
},
'commands': [cmd, ],
}
if trigger and ver_mode in ("release-branch", "main"):
step.update(trigger)
return step
def store_packages_step(edition, ver_mode):
def store_packages_step(edition, ver_mode, is_downstream=False):
if ver_mode == 'release':
cmd = './bin/grabpl store-packages --edition {} --packages-bucket grafana-downloads --gcp-key /tmp/gcpkey.json ${{DRONE_TAG}}'.format(
edition,
)
elif ver_mode == 'main':
build_no = '${DRONE_BUILD_NUMBER}'
if not is_downstream:
build_no = '${DRONE_BUILD_NUMBER}'
else:
build_no = '$${SOURCE_BUILD_NUMBER}'
cmd = './bin/grabpl store-packages --edition {} --gcp-key /tmp/gcpkey.json --build-id {}'.format(
edition, build_no,
)
@@ -1006,7 +1013,12 @@ def store_packages_step(edition, ver_mode):
}
def get_windows_steps(edition, ver_mode):
def get_windows_steps(edition, ver_mode, is_downstream=False):
if not is_downstream:
source_commit = ''
else:
source_commit = ' $$env:SOURCE_COMMIT'
init_cmds = []
sfx = ''
if edition in ('enterprise', 'enterprise2'):
@@ -1019,12 +1031,12 @@ def get_windows_steps(edition, ver_mode):
])
steps = [
{
'name': 'windows-init',
'name': 'initialize',
'image': wix_image,
'commands': init_cmds,
},
]
if (ver_mode == 'main' and (edition not in ('enterprise', 'enterprise2'))) or ver_mode in (
if (ver_mode == 'main' and (edition not in ('enterprise', 'enterprise2') or is_downstream)) or ver_mode in (
'release', 'release-branch',
):
bucket_part = ''
@@ -1036,7 +1048,10 @@ def get_windows_steps(edition, ver_mode):
dir = 'main'
bucket = 'grafana-downloads'
bucket_part = ' --packages-bucket {}'.format(bucket)
build_no = 'DRONE_BUILD_NUMBER'
if not is_downstream:
build_no = 'DRONE_BUILD_NUMBER'
else:
build_no = 'SOURCE_BUILD_NUMBER'
ver_part = '--build-id $$env:{}'.format(build_no)
installer_commands = [
'$$gcpKey = $$env:GCP_KEY',
@@ -1047,7 +1062,7 @@ def get_windows_steps(edition, ver_mode):
'rm gcpkey.json',
'cp C:\\App\\nssm-2.24.zip .',
]
if (ver_mode == 'main' and (edition not in ('enterprise', 'enterprise2'))) or ver_mode in (
if (ver_mode == 'main' and (edition not in ('enterprise', 'enterprise2') or is_downstream)) or ver_mode in (
'release',
):
installer_commands.extend([
@@ -1068,15 +1083,15 @@ def get_windows_steps(edition, ver_mode):
steps.append({
'name': 'build-windows-installer',
'image': wix_image,
'depends_on': [
'windows-init',
],
'environment': {
'GCP_KEY': from_secret('gcp_key'),
'PRERELEASE_BUCKET': from_secret(prerelease_bucket),
'GITHUB_TOKEN': from_secret('github_token')
},
'commands': installer_commands,
'depends_on': [
'initialize',
],
})
if edition in ('enterprise', 'enterprise2'):
@@ -1095,10 +1110,11 @@ def get_windows_steps(edition, ver_mode):
clone_cmds = [
'git clone "https://$$env:GITHUB_TOKEN@github.com/grafana/grafana-enterprise.git"',
]
clone_cmds.extend([
'cd grafana-enterprise',
'git checkout {}'.format(committish),
])
if not is_downstream:
clone_cmds.extend([
'cd grafana-enterprise',
'git checkout {}'.format(committish),
])
steps.insert(0, {
'name': 'clone',
'image': wix_image,
@@ -1116,7 +1132,7 @@ def get_windows_steps(edition, ver_mode):
'rm -r -force grafana-enterprise',
'cp grabpl.exe C:\\App\\grabpl.exe',
'rm -force grabpl.exe',
'C:\\App\\grabpl.exe init-enterprise --github-token $$env:GITHUB_TOKEN C:\\App\\grafana-enterprise',
'C:\\App\\grabpl.exe init-enterprise --github-token $$env:GITHUB_TOKEN C:\\App\\grafana-enterprise{}'.format(source_commit),
'cp C:\\App\\grabpl.exe grabpl.exe',
])
if 'environment' in steps[1]:
@@ -1156,7 +1172,6 @@ def ensure_cuetsified_step():
],
}
def end_to_end_tests_deps(edition):
if disable_tests:
return []

View File

@@ -3100,10 +3100,10 @@ __metadata:
languageName: node
linkType: hard
"@braintree/sanitize-url@npm:6.0.0":
version: 6.0.0
resolution: "@braintree/sanitize-url@npm:6.0.0"
checksum: 409ce7709dc1a0c67bc887d20af1becd4145d5c62cc5124b1c4c1f3ea2a8d69b0ee9f582d446469c6f5294b56442b99048cbbba6861dd5c834d4e019b95e1f40
"@braintree/sanitize-url@npm:*, @braintree/sanitize-url@npm:5.0.2":
version: 5.0.2
resolution: "@braintree/sanitize-url@npm:5.0.2"
checksum: c033f9a0e6dd6fbd4022df2d3916a278510f759971b1e8ab278b3ce1123a3816d5fdd9d84c5c9fbcd6c94c05f8421c4c669f110c8db67eaf58f3018825af514e
languageName: node
linkType: hard
@@ -3633,9 +3633,9 @@ __metadata:
version: 0.0.0-use.local
resolution: "@grafana-plugins/input-datasource@workspace:plugins-bundled/internal/input-datasource"
dependencies:
"@grafana/data": 8.4.10
"@grafana/toolkit": 8.4.10
"@grafana/ui": 8.4.10
"@grafana/data": 8.4.5
"@grafana/toolkit": 8.4.5
"@grafana/ui": 8.4.5
"@types/jest": 26.0.15
"@types/lodash": 4.14.149
"@types/react": 17.0.30
@@ -3676,12 +3676,12 @@ __metadata:
languageName: node
linkType: hard
"@grafana/data@8.4.10, @grafana/data@workspace:*, @grafana/data@workspace:packages/grafana-data":
"@grafana/data@8.4.5, @grafana/data@workspace:*, @grafana/data@workspace:packages/grafana-data":
version: 0.0.0-use.local
resolution: "@grafana/data@workspace:packages/grafana-data"
dependencies:
"@braintree/sanitize-url": 6.0.0
"@grafana/schema": 8.4.10
"@braintree/sanitize-url": 5.0.2
"@grafana/schema": 8.4.5
"@grafana/tsconfig": ^1.0.0-rc1
"@rollup/plugin-commonjs": 21.0.1
"@rollup/plugin-json": 4.1.0
@@ -3692,6 +3692,7 @@ __metadata:
"@testing-library/react": 12.1.2
"@testing-library/react-hooks": 7.0.2
"@testing-library/user-event": 13.5.0
"@types/braintree__sanitize-url": 4.1.0
"@types/d3-interpolate": ^1.4.0
"@types/jest": 27.4.0
"@types/jquery": 3.5.11
@@ -3710,7 +3711,7 @@ __metadata:
eventemitter3: 4.0.7
lodash: 4.17.21
marked: 4.0.10
moment: 2.29.2
moment: 2.29.1
moment-timezone: 0.5.34
ol: 6.12.0
papaparse: 5.3.1
@@ -3732,7 +3733,7 @@ __metadata:
languageName: unknown
linkType: soft
"@grafana/e2e-selectors@8.4.10, @grafana/e2e-selectors@workspace:*, @grafana/e2e-selectors@workspace:packages/grafana-e2e-selectors":
"@grafana/e2e-selectors@8.4.5, @grafana/e2e-selectors@workspace:*, @grafana/e2e-selectors@workspace:packages/grafana-e2e-selectors":
version: 0.0.0-use.local
resolution: "@grafana/e2e-selectors@workspace:packages/grafana-e2e-selectors"
dependencies:
@@ -3756,7 +3757,7 @@ __metadata:
"@babel/core": 7.16.7
"@babel/preset-env": 7.16.7
"@cypress/webpack-preprocessor": 5.11.0
"@grafana/e2e-selectors": 8.4.10
"@grafana/e2e-selectors": 8.4.5
"@grafana/tsconfig": ^1.0.0-rc1
"@mochajs/json-file-reporter": ^1.2.0
"@rollup/plugin-commonjs": 21.0.1
@@ -3836,10 +3837,10 @@ __metadata:
version: 0.0.0-use.local
resolution: "@grafana/runtime@workspace:packages/grafana-runtime"
dependencies:
"@grafana/data": 8.4.10
"@grafana/e2e-selectors": 8.4.10
"@grafana/data": 8.4.5
"@grafana/e2e-selectors": 8.4.5
"@grafana/tsconfig": ^1.0.0-rc1
"@grafana/ui": 8.4.10
"@grafana/ui": 8.4.5
"@rollup/plugin-commonjs": 21.0.1
"@rollup/plugin-node-resolve": 13.1.3
"@sentry/browser": 6.17.2
@@ -3868,7 +3869,7 @@ __metadata:
languageName: unknown
linkType: soft
"@grafana/schema@8.4.10, @grafana/schema@workspace:*, @grafana/schema@workspace:packages/grafana-schema":
"@grafana/schema@8.4.5, @grafana/schema@workspace:*, @grafana/schema@workspace:packages/grafana-schema":
version: 0.0.0-use.local
resolution: "@grafana/schema@workspace:packages/grafana-schema"
dependencies:
@@ -3915,16 +3916,16 @@ __metadata:
languageName: node
linkType: hard
"@grafana/toolkit@8.4.10, @grafana/toolkit@workspace:*, @grafana/toolkit@workspace:packages/grafana-toolkit":
"@grafana/toolkit@8.4.5, @grafana/toolkit@workspace:*, @grafana/toolkit@workspace:packages/grafana-toolkit":
version: 0.0.0-use.local
resolution: "@grafana/toolkit@workspace:packages/grafana-toolkit"
dependencies:
"@babel/core": 7.13.14
"@babel/preset-env": 7.13.12
"@grafana/data": 8.4.10
"@grafana/data": 8.4.5
"@grafana/eslint-config": 2.5.2
"@grafana/tsconfig": ^1.0.0-rc1
"@grafana/ui": 8.4.10
"@grafana/ui": 8.4.5
"@jest/core": 26.6.3
"@rushstack/eslint-patch": 1.0.6
"@types/command-exists": ^1.2.0
@@ -4015,7 +4016,7 @@ __metadata:
languageName: node
linkType: hard
"@grafana/ui@8.4.10, @grafana/ui@workspace:*, @grafana/ui@workspace:packages/grafana-ui":
"@grafana/ui@8.4.5, @grafana/ui@workspace:*, @grafana/ui@workspace:packages/grafana-ui":
version: 0.0.0-use.local
resolution: "@grafana/ui@workspace:packages/grafana-ui"
dependencies:
@@ -4023,9 +4024,9 @@ __metadata:
"@emotion/css": 11.7.1
"@emotion/react": 11.7.1
"@grafana/aws-sdk": 0.0.31
"@grafana/data": 8.4.10
"@grafana/e2e-selectors": 8.4.10
"@grafana/schema": 8.4.10
"@grafana/data": 8.4.5
"@grafana/e2e-selectors": 8.4.5
"@grafana/schema": 8.4.5
"@grafana/slate-react": 0.22.10-grafana
"@grafana/tsconfig": ^1.0.0-rc1
"@mdx-js/react": 1.6.22
@@ -4112,7 +4113,7 @@ __metadata:
lodash: 4.17.21
memoize-one: 6.0.0
mock-raf: 1.0.1
moment: 2.29.2
moment: 2.29.1
monaco-editor: ^0.31.1
ol: 6.12.0
postcss: 8.4.5
@@ -4242,9 +4243,9 @@ __metadata:
resolution: "@jaegertracing/jaeger-ui-components@workspace:packages/jaeger-ui-components"
dependencies:
"@emotion/css": 11.7.1
"@grafana/data": 8.4.10
"@grafana/data": 8.4.5
"@grafana/tsconfig": ^1.0.0-rc1
"@grafana/ui": 8.4.10
"@grafana/ui": 8.4.5
"@types/classnames": ^2.2.7
"@types/deep-freeze": ^0.1.1
"@types/grafana__slate-react": "npm:@types/slate-react@0.22.5"
@@ -4267,7 +4268,7 @@ __metadata:
lodash: 4.17.21
lru-memoize: ^1.1.0
memoize-one: 6.0.0
moment: 2.29.2
moment: 2.29.1
moment-timezone: 0.5.34
prop-types: 15.8.1
react: 17.0.2
@@ -8954,6 +8955,15 @@ __metadata:
languageName: node
linkType: hard
"@types/braintree__sanitize-url@npm:4.1.0":
version: 4.1.0
resolution: "@types/braintree__sanitize-url@npm:4.1.0"
dependencies:
"@braintree/sanitize-url": "*"
checksum: 29b91a4c6923d5e52cabd263abff9eecd24c2cdc7a1f16d945a26fa599e370d490bf1a4c7080157836850736c962712d26fe83cae94d5a67a0a968d2ef14950f
languageName: node
linkType: hard
"@types/cheerio@npm:*, @types/cheerio@npm:^0.22.22":
version: 0.22.30
resolution: "@types/cheerio@npm:0.22.30"
@@ -9997,7 +10007,7 @@ __metadata:
languageName: node
linkType: hard
"@types/rc-time-picker@npm:3.4.1":
"@types/rc-time-picker@npm:^3":
version: 3.4.1
resolution: "@types/rc-time-picker@npm:3.4.1"
dependencies:
@@ -19655,7 +19665,7 @@ __metadata:
"@types/papaparse": 5.3.1
"@types/pluralize": ^0.0.29
"@types/prismjs": 1.26.0
"@types/rc-time-picker": 3.4.1
"@types/rc-time-picker": ^3
"@types/react": 17.0.38
"@types/react-beautiful-dnd": 13.1.2
"@types/react-dom": 17.0.11
@@ -19764,7 +19774,7 @@ __metadata:
lru-cache: 6.0.0
memoize-one: 6.0.0
mini-css-extract-plugin: 2.5.2
moment: 2.29.2
moment: 2.29.1
moment-timezone: 0.5.34
monaco-editor: ^0.31.1
monaco-promql: ^1.7.2
@@ -24992,9 +25002,9 @@ __metadata:
linkType: hard
"minimist@npm:^1.1.1, minimist@npm:^1.2.0, minimist@npm:^1.2.5":
version: 1.2.6
resolution: "minimist@npm:1.2.6"
checksum: d15428cd1e11eb14e1233bcfb88ae07ed7a147de251441d61158619dfb32c4d7e9061d09cab4825fdee18ecd6fce323228c8c47b5ba7cd20af378ca4048fb3fb
version: 1.2.5
resolution: "minimist@npm:1.2.5"
checksum: 86706ce5b36c16bfc35c5fe3dbb01d5acdc9a22f2b6cc810b6680656a1d2c0e44a0159c9a3ba51fb072bb5c203e49e10b51dcd0eec39c481f4c42086719bae52
languageName: node
linkType: hard
@@ -25227,10 +25237,10 @@ __metadata:
languageName: node
linkType: hard
"moment@npm:2.29.2, moment@npm:2.x, moment@npm:>= 2.9.0, moment@npm:^2.19.4, moment@npm:^2.20.1":
version: 2.29.2
resolution: "moment@npm:2.29.2"
checksum: ee850b5776485e2af0775ceb3cfebaa7d7638f0a750fe0678fcae24c310749f96c1938808384bd422a55e5703834a71fcb09c8a1d36d9cf847f6ed0205d7a3e5
"moment@npm:2.29.1, moment@npm:2.x, moment@npm:>= 2.9.0, moment@npm:^2.19.4, moment@npm:^2.20.1":
version: 2.29.1
resolution: "moment@npm:2.29.1"
checksum: 1e14d5f422a2687996be11dd2d50c8de3bd577c4a4ca79ba5d02c397242a933e5b941655de6c8cb90ac18f01cc4127e55b4a12ae3c527a6c0a274e455979345e
languageName: node
linkType: hard