Compare commits
10 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 4f1f264657 | |||
| 49f78c15e8 | |||
| 76340a9741 | |||
| b15acdf1f2 | |||
| ca8402fbda | |||
| abb44794fe | |||
| c228eaa99d | |||
| f41cc1c0d6 | |||
| b557d71c9a | |||
| e404352a38 |
@@ -13,17 +13,29 @@ on:
|
||||
required: false
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
id-token: write
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
bump-version:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: grafana/shared-workflows/actions/get-vault-secrets@main
|
||||
with:
|
||||
repo_secrets: |
|
||||
GRAFANA_DELIVERY_BOT_APP_PEM=delivery-bot-app:PRIVATE_KEY
|
||||
- name: Generate token
|
||||
id: generate_token
|
||||
uses: tibdex/github-app-token@3beb63f4bd073e61482598c45c71c1019b59b73a
|
||||
with:
|
||||
app_id: ${{ vars.DELIVERY_BOT_APP_ID }}
|
||||
private_key: ${{ env.GRAFANA_DELIVERY_BOT_APP_PEM }}
|
||||
repositories: '["grafana"]'
|
||||
permissions: '{"contents": "write", "pull_requests": "write"}'
|
||||
- name: Checkout Grafana
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
token: ${{ steps.generate_token.outputs.token }}
|
||||
- name: Update package.json versions
|
||||
uses: ./pkg/build/actions/bump-version
|
||||
with:
|
||||
@@ -35,10 +47,10 @@ jobs:
|
||||
DRY_RUN: ${{ inputs.dry_run }}
|
||||
REF_NAME: ${{ github.ref_name }}
|
||||
RUN_ID: ${{ github.run_id }}
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
GH_TOKEN: ${{ steps.generate_token.outputs.token }}
|
||||
run: |
|
||||
git config --local user.name "github-actions[bot]"
|
||||
git config --local user.email "github-actions[bot]@users.noreply.github.com"
|
||||
git config --local user.name "grafana-delivery-bot[bot]"
|
||||
git config --local user.email "grafana-delivery-bot[bot]@users.noreply.github.com"
|
||||
git config --local --add --bool push.autoSetupRemote true
|
||||
git checkout -b "bump-version/${RUN_ID}/${VERSION}"
|
||||
git add .
|
||||
|
||||
@@ -298,16 +298,21 @@ groupByNode(summarize(movingAverage(apps.$app.$server.counters.requests.count, 5
|
||||
## Add ad hoc filters
|
||||
|
||||
_Ad hoc filters_ are one of the most complex and flexible variable options available.
|
||||
Instead of a regular list of variable options, this variable allows you to build a dashboard-wide ad hoc query.
|
||||
Instead of creating a variable for each dimension by which you want to filter, ad hoc filters automatically create variables (key/value pairs) for all the dimensions returned by your data source query.
|
||||
This allows you to apply filters dashboard-wide.
|
||||
|
||||
Ad hoc filters let you add label/value filters that are automatically added to all metric queries that use the specified data source.
|
||||
Unlike other variables, you don't use ad hoc filters in queries.
|
||||
Instead, you use ad hoc filters to write filters for existing queries.
|
||||
|
||||
{{< admonition type="note" >}}
|
||||
Not all data sources support ad hoc filters.
|
||||
Examples of those that do include Prometheus, Loki, InfluxDB, and Elasticsearch.
|
||||
{{< /admonition >}}
|
||||
The following data sources support ad hoc filters:
|
||||
|
||||
- Dashboard - Use this special data source to [apply ad hoc filters to data from unsupported data sources](#filter-any-data-using-the-dashboard-data-source).
|
||||
- Prometheus
|
||||
- Loki
|
||||
- InfluxDB
|
||||
- Elasticsearch
|
||||
- OpenSearch
|
||||
|
||||
To create an ad hoc filter, follow these steps:
|
||||
|
||||
@@ -324,6 +329,60 @@ To create an ad hoc filter, follow these steps:
|
||||
|
||||
Now you can [filter data on the dashboard](ref:filter-dashboard).
|
||||
|
||||
### Filter any data using the Dashboard data source
|
||||
|
||||
In cases where a data source doesn't support the use of ad hoc filters, you can use the Dashboard data source to reference that data, and then filter it in a new panel.
|
||||
This allows you to bypass the limitations of the data source in the source panel.
|
||||
|
||||
{{< figure src="/media/docs/grafana/panels-visualizations/screenshot-adhoc-filter-dashboard-ds-v12.2.png" max-width="750px" alt="The query section of a panel with the Dashboard data source configured" >}}
|
||||
|
||||
To use ad hoc filters on data from an unsupported data source, follow these steps:
|
||||
|
||||
1. Navigate to the dashboard with the panel with the data you want to filter.
|
||||
1. Click **Edit** in top-right corner of the dashboard.
|
||||
1. At the top of the dashboard, click **Add** and select **Visualization** in the drop-down list.
|
||||
1. In the **Queries** tab of the edit panel view, enter `Dashboard` in the **Data source** field and select **-- Dashboard --**.
|
||||
1. In the query configuration section, make the following selections:
|
||||
- **Source panel** - Choose the panel with the source data.
|
||||
- **Data** - Select **All Data** to use the data of the panel, and not just the annotations. This is the default selection.
|
||||
- **AdHoc Filters** - Toggle on the switch to make the data from the referenced panel filterable.
|
||||
|
||||
{{< admonition type="note">}}
|
||||
If you're referencing multiple panels in a dashboard with the Dashboard data source, you can only use one of those source panels at a time for ad hoc filtering.
|
||||
{{< /admonition >}}
|
||||
|
||||
1. Configure any other needed options for the panel.
|
||||
1. Click **Save dashboard**.
|
||||
|
||||
Now you can filter the data from the source panel by way of the Dashboard data source.
|
||||
Add as many panels as you need.
|
||||
|
||||
### Dashboard drilldown with ad hoc filters
|
||||
|
||||
In table and bar chart visualizations, you can apply ad hoc filters directly from the visualization.
|
||||
To quickly apply ad hoc filter variables, follow these steps:
|
||||
|
||||
1. To display the filter icons, hover your cursor over the table cell with the value for which you want to filter. In this example, the cell value is `ConfigMap Updated`, which is in the `alertname` column:
|
||||
|
||||
{{< figure src="/media/docs/grafana/panels-visualizations/screenshot-adhoc-filter-icon-v12.2.png" max-width="550px" alt="Table and bar chart with ad hoc filter icon displayed on a table cell" >}}
|
||||
|
||||
In bar chart visualizations, hover and click the bar to display the filter button:
|
||||
|
||||
{{< figure src="/media/docs/grafana/panels-visualizations/screenshot-adhoc-filter-icon-bar-v12.2.png" max-width="300px" alt="The ad hoc filter button in a bar chart tooltip">}}
|
||||
|
||||
1. Click the add filter icon.
|
||||
|
||||
The variable pair `alertname = ConfigMap Updated` is added to the ad hoc filter and all panels using the same data source that include that variable value are filtered by that value:
|
||||
|
||||
{{< figure src="/media/docs/grafana/panels-visualizations/screenshot-adhoc-filter-applied-v12.2.png" max-width="550px" alt="Table and bar chart, filtered" >}}
|
||||
|
||||
If one of the panels in the dashboard using that data source doesn't include that variable value, the panel won't return any data. In this example, the variable pair `_name_ = ALERTS` has been added to the ad hoc filter so the bar chart doesn't return any results:
|
||||
|
||||
{{< figure src="/media/docs/grafana/panels-visualizations/screenshot-adhoc-filter-no-data-v12.2.png" max-width="650px" alt="Table, filtered and bar chart returning no results" >}}
|
||||
|
||||
In cases where the data source you're using doesn't support ad hoc filtering, consider using the special Dashboard data source.
|
||||
For more information, refer to [Filter any data using the Dashboard data source](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/dashboards/variables/add-template-variables/#filter-any-data-using-the-dashboard-data-source).
|
||||
|
||||
<!-- vale Grafana.Spelling = YES -->
|
||||
<!-- vale Grafana.WordList = YES -->
|
||||
|
||||
|
||||
@@ -200,6 +200,8 @@ Each data link configuration consists of:
|
||||
|
||||
Use private data source connect (PDC) to connect to and query data within a secure network without opening that network to inbound traffic from Grafana Cloud. See [Private data source connect](https://grafana.com/docs/grafana-cloud/connect-externally-hosted/private-data-source-connect/) for more information on how PDC works and [Configure Grafana private data source connect (PDC)](https://grafana.com/docs/grafana-cloud/connect-externally-hosted/private-data-source-connect/configure-pdc/#configure-grafana-private-data-source-connect-pdc) for steps on setting up a PDC connection.
|
||||
|
||||
If you use PDC with SIGv4 (AWS Signature Version 4 Authentication), the PDC agent must allow internet egress to`sts.<region>.amazonaws.com:443`.
|
||||
|
||||
- **Private data source connect** - Click in the box to set the default PDC connection from the dropdown menu or create a new connection.
|
||||
|
||||
Once you have configured your Elasticsearch data source options, click **Save & test** at the bottom to test out your data source connection. You can also remove a connection by clicking **Delete**.
|
||||
|
||||
@@ -233,7 +233,9 @@ You can add multiple exemplars.
|
||||
|
||||
- **Private data source connect** - _Only for Grafana Cloud users._ Private data source connect, or PDC, allows you to establish a private, secured connection between a Grafana Cloud instance, or stack, and data sources secured within a private network. Click the drop-down to locate the URL for PDC. For more information regarding Grafana PDC refer to [Private data source connect (PDC)](ref:private-data-source-connect) and [Configure Grafana private data source connect (PDC)](https://grafana.com/docs/grafana-cloud/connect-externally-hosted/private-data-source-connect/configure-pdc/#configure-grafana-private-data-source-connect-pdc) for steps on setting up a PDC connection.
|
||||
|
||||
Click **Manage private data source connect** to be taken to your PDC connection page, where you’ll find your PDC configuration details.
|
||||
If you use PDC with SIGv4 (AWS Signature Version 4 Authentication), the PDC agent must allow internet egress to`sts.<region>.amazonaws.com:443`.
|
||||
|
||||
Click **Manage private data source connect** to open your PDC connection page and view your configuration details.
|
||||
|
||||
After you have configured your Prometheus data source options, click **Save & test** at the bottom to test out your data source connection.
|
||||
|
||||
|
||||
@@ -278,6 +278,17 @@ When linking to another dashboard that uses template variables, select variable
|
||||
|
||||
If you want to add all of the current dashboard's variables to the URL, then use `${__all_variables}`.
|
||||
|
||||
When you link to another dashboard, ensure that:
|
||||
|
||||
- The target dashboard has the same variable name. If it doesn't (for example, `server` in the source dashboard and `host` in the target), you must align them or explicitly map values (for example, `&var-host=${server}`).
|
||||
- You use the variable _name_, and not the label. Labels are only used as display text and aren't recognized in URLs.
|
||||
|
||||
For example, if you have a variable with the name `var-server` and the label `ChooseYourServer`, you must use `var-server` in the URL, as shown in the following table:
|
||||
|
||||
| Correct link | Incorrect link |
|
||||
| ---------------------------------------------- | -------------------------------------------------------- |
|
||||
| `/d/xxxx/dashboard-b?orgId=1&var-server=web02` | `/d/xxxx/dashboard-b?orgId=1&var-ChooseYourServer=web02` |
|
||||
|
||||
## Add data links or actions {#add-a-data-link}
|
||||
|
||||
The following tasks describe how to configure data links and actions.
|
||||
@@ -296,9 +307,7 @@ To add a data link, follow these steps:
|
||||
This is a human-readable label for the link displayed in the UI. This is a required field.
|
||||
|
||||
1. Enter the **URL** to which you want to link.
|
||||
|
||||
To add a data link variable, click in the **URL** field and enter `$` or press Ctrl+Space or Cmd+Space to see a list of available variables. This is a required field.
|
||||
|
||||
1. (Optional) To add a data link variable, click in the **URL** field and enter `$` or press Ctrl+Space or Cmd+Space to see a list of available variables.
|
||||
1. If you want the link to open in a new tab, toggle the **Open in a new tab** switch.
|
||||
1. If you want the data link to open with a single click on the visualization, toggle the **One click** switch.
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ refs:
|
||||
|
||||
# SQL expressions
|
||||
|
||||
{{< docs/private-preview product="SQL expressions" >}}
|
||||
{{< docs/public-preview product="SQL expressions" >}}
|
||||
|
||||
SQL Expressions are server-side expressions that manipulate and transform the results of data source queries using MySQL-like syntax. They allow you to easily query and transform your data after it has been queried, using SQL, which provides a familiar and powerful syntax that can handle everything from simple filters to highly complex, multi-step transformations.
|
||||
|
||||
@@ -60,11 +60,17 @@ A key capability of SQL expressions is the ability to JOIN data from multiple ta
|
||||
|
||||
To work with SQL expressions, you must use data from a backend data source. In Grafana, a backend data source refers to a data source plugin or integration that communicates with a database, service, or API through the Grafana server, rather than directly from the browser (frontend).
|
||||
|
||||
## Known limitations
|
||||
|
||||
- Currently, only one SQL expression is supported per panel or alert.
|
||||
- Grafana supports certain data sources. Refer to [compatible data sources](#compatible-data-sources) for a current list.
|
||||
- Autocomplete is available, but column/field autocomplete is only available after enabling the `sqlExpressionsColumnAutoComplete` feature toggle, which is provided on an experimental basis.
|
||||
|
||||
## Compatible data sources
|
||||
|
||||
The following are compatible data sources:
|
||||
|
||||
**Full support:** All query types for each data source are supported.
|
||||
**Full support:** Grafana supports all query types for each of these data sources.
|
||||
|
||||
- Elasticsearch
|
||||
- MySQL
|
||||
@@ -73,7 +79,7 @@ The following are compatible data sources:
|
||||
- Google Sheets
|
||||
- Amazon Athena
|
||||
|
||||
**Partial support:** The following data sources offer limited or conditional support. Some allow different types of queries, depending on the service being accessed. For example, Azure Monitor can query multiple services, each with its own query format. In some cases, you can also change the query type within a panel.
|
||||
**Partial support:** The following data sources have limited or conditional support. Some support multiple query types depending on the service. For example, Azure Monitor can query multiple services, each with its own query format. In some cases, you can also switch the query type within a panel.
|
||||
|
||||
- InfluxDB
|
||||
- Infinity
|
||||
@@ -97,6 +103,10 @@ To create a SQL expression, complete the following steps:
|
||||
|
||||
After you have added a SQL expression, you can select from other data source queries by referencing the RefIDs of the queries in your SQL expression as if they were tables in a SQL database.
|
||||
|
||||
{{< admonition type="note" >}}
|
||||
The **RefID** is a unique identifier assigned to each query within a Grafana panel that serves as a reference name for that query's data.
|
||||
{{< /admonition >}}
|
||||
|
||||

|
||||
|
||||
## Workflow to build SQL expressions
|
||||
@@ -134,22 +144,65 @@ The SQL expression workflow in Grafana is designed with the following behaviors:
|
||||
|
||||
- **Non-tabular or incorrectly shaped data will not render in certain panels.** Visualizations such as graphs or gauges require properly structured data. Mismatched formats will result in rendering issues or missing data.
|
||||
|
||||
For data to be used in SQL expressions, it must be in a **tabular format**, specifically the **FullLong format**. This means all relevant data is contained within a single table, with values such as metric labels stored as columns and individual cells. Because not all data sources return results in this format by default, Grafana will automatically convert compatible query results to FullLong format when they are referenced in a SQL expression.
|
||||
|
||||
## SQL conversion rules
|
||||
|
||||
When a RefID is referenced within a SQL statement (e.g., `SELECT * FROM A`), the system invokes a distinct SQL conversion process.
|
||||
When you reference a RefID within a SQL statement (e.g., `SELECT * FROM A`), the system invokes a distinct SQL conversion process.
|
||||
|
||||
The SQL conversion path:
|
||||
|
||||
- The query result is treated as a single data frame, without labels, and is mapped directly to a tabular format.
|
||||
- If the frame type is present and is either numeric, wide time series, or multi-frame time series (for example, labeled formats), Grafana automatically converts the data into a table structure.
|
||||
- The query result appears as a single data frame, without labels, and is mapped directly to a tabular format.
|
||||
- If the frame type is present and is either numeric, wide time series, or multi-frame time series (for example: labeled formats), Grafana automatically converts the data into a table structure.
|
||||
|
||||
## Known limitations
|
||||
## Supported functions
|
||||
|
||||
- Currently, only one SQL expression is supported per panel or alert.
|
||||
- Grafana supports certain data sources. Refer to [compatible data sources](#compatible-data-sources) for a current list.
|
||||
- Autocomplete is available, but column/field autocomplete is only available after enabling the `sqlExpressionsColumnAutoComplete` feature toggle, which is provided on an experimental basis.
|
||||
Grafana maintains a complete list of supported SQL keywords, operators, and functions in the SQL expressions query validator implementation.
|
||||
|
||||
For the most up-to-date reference of all supported SQL functionality, refer to the `allowedNode` and `allowedFunction` definitions in the Grafana [codebase](https://github.com/grafana/grafana/blob/main/pkg/expr/sql/parser_allow.go).
|
||||
|
||||
## Alerting and recording rules
|
||||
|
||||
SQL expressions integrates alerting and recording rules, allowing you to define complex conditions and metrics using standard SQL queries. The system processes your query results and automatically creates alert instances or recorded metrics based on the returned data structure.
|
||||
|
||||
For SQL Expressions to work properly with alerting and recording rules, your query must return:
|
||||
|
||||
- One numeric column - **_required_**. This contains the value that triggers alerts or gets recorded.
|
||||
- Unique string column combinations - **_required_**. Each row must have a unique combination of string column values.
|
||||
- One or more string columns - _optional_. These become **labels** for the alert instances or metrics. Examples: `service`, `region`.
|
||||
|
||||
Consider the following query results:
|
||||
|
||||
```sql
|
||||
error_count,service,region
|
||||
25,auth-service,us-east
|
||||
0,payment-service,us-west
|
||||
15,user-service,eu-west
|
||||
```
|
||||
|
||||
This query returns:
|
||||
|
||||
- the numeric column `error_count` (values: 25, 0, 15)
|
||||
- the string columns `service` and `region`
|
||||
|
||||
For alert rules, this creates three alert instances:
|
||||
|
||||
- First instance with labels {service=auth-service, region=us-east} and value 25 (triggers alert - high error count)
|
||||
- Second instance with labels {service=payment-service, region=us-west} and value 0 (no alert - zero errors)
|
||||
- Third instance with labels {service=user-service, region=eu-west} and value 15 (triggers alert - elevated error count)
|
||||
|
||||
For recording rules, creates one metric with three series:
|
||||
|
||||
- First series: error_count_total{service=auth-service, region=us-east} 25
|
||||
- Second series: error_count_total{service=payment-service, region=us-west} 0
|
||||
- Third series: error_count_total{service=user-service, region=eu-west} 15
|
||||
|
||||
Following are some best practices for alerting and recording rules:
|
||||
|
||||
- Keep numeric values meaningful (for example: error counts, request duration).
|
||||
- Use clear, descriptive column names - these become your labels.
|
||||
- Keep string values short and consistent.
|
||||
- Avoid too many unique label combinations, as this can result in high cardinality.
|
||||
- Always use `GROUP BY` to avoid duplicate label errors.
|
||||
- Aggregate numeric values logically (for example: `SUM(error_count)`).
|
||||
|
||||
## Supported data source formats
|
||||
|
||||
@@ -202,3 +255,19 @@ During conversion:
|
||||
2. Add the SQL expression `SELECT * from A`. After you add a SQL expression that selects from RefID A, Grafana converts it to a table response:
|
||||
|
||||

|
||||
|
||||
## LLM integration
|
||||
|
||||
The Grafana LLM plugin seamlessly integrates AI-powered assistance into your SQL expressions workflow.
|
||||
|
||||
{{< admonition type="note" >}}
|
||||
The Grafana LLM plugin is currently in public preview, meaning Grafana offers limited support, and breaking changes might occur prior to the feature being made generally available.
|
||||
{{< /admonition >}}
|
||||
|
||||
To use this integration, first [install and configure the LLM plugin](https://grafana.com/grafana/plugins/grafana-llm-app/). After installation, open your dashboard and select **Edit** to open the panel editor. Navigate to the **Queries** tab and scroll to the bottom where you'll find two new buttons positioned to the right of the **Run query** button in your SQL Expressions query.
|
||||
|
||||
{{< figure src="/media/docs/sql-expressions/sqlexpressions-LLM-integration-v12.2.png" caption="LLM integration" >}}
|
||||
|
||||
Click **Explain query** to open a drawer that displays a detailed explanation of your query, including its interpreted business meaning and performance statistics. Once the explanation is generated, the button changes to **View explanation**.
|
||||
|
||||
Click **Improve query** to open a suggestions drawer that contains performance and reliability enhancements, column naming best practices, and guidance on panel optimization. Click **Apply** to implement a suggestion. After you’ve interacted with the interface, you'll see a **Suggestions** button for quick access. Newer suggestions appear at the top, with older ones listed below, creating a history of improvements. If your SQL query has a parsing error, such as a syntax issue, the LLM will attempt to provide a corrected version. The LLM automatically identifies errors and helps you rewrite the query correctly.
|
||||
|
||||
@@ -88,6 +88,22 @@ While the first field can be time-based and you can use a bar chart to plot time
|
||||
|
||||
We recommend that you only use one dataset in a bar chart because using multiple datasets can result in unexpected behavior.
|
||||
|
||||
<!-- vale Grafana.WordList = NO -->
|
||||
<!-- vale Grafana.Spelling = NO -->
|
||||
|
||||
## Apply ad hoc filters from the bar chart
|
||||
|
||||
In bar charts, you can apply ad hoc filters directly from the visualization.
|
||||
|
||||
To display the filter button, hover your cursor over the bar that has the value for which you want to filter and click the bar:
|
||||
|
||||
{{< figure src="/media/docs/grafana/panels-visualizations/screenshot-adhoc-filter-icon-bar-v12.2.png" max-width="300px" alt="The ad hoc filter button in a bar chart tooltip">}}
|
||||
|
||||
For more information about applying ad hoc filters this way, refer to [Dashboard drilldown with ad hoc filters](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/dashboards/variables/add-template-variables/#dashboard-drilldown-with-ad-hoc-filters).
|
||||
|
||||
<!-- vale Grafana.Spelling = YES -->
|
||||
<!-- vale Grafana.WordList = YES -->
|
||||
|
||||
## Configuration options
|
||||
|
||||
{{< docs/shared lookup="visualizations/config-options-intro.md" source="grafana" version="<GRAFANA_VERSION>" >}}
|
||||
|
||||
@@ -174,6 +174,22 @@ Columns with filters applied have a blue filter displayed next to the title.
|
||||
|
||||
To remove the filter, click the blue filter icon and then click **Clear filter**.
|
||||
|
||||
<!-- vale Grafana.WordList = NO -->
|
||||
<!-- vale Grafana.Spelling = NO -->
|
||||
|
||||
### Apply ad hoc filters from the table
|
||||
|
||||
In tables, you can apply ad hoc filters directly from the visualization with one click.
|
||||
|
||||
To display the filter icons, hover your cursor over the cell that has the value for which you want to filter:
|
||||
|
||||
{{< figure src="/media/docs/grafana/panels-visualizations/screenshot-table-adhoc-filter-v12.2.png" max-width="500px" alt="Table with ad hoc filter icon displayed on a cell" >}}
|
||||
|
||||
For more information about applying ad hoc filters this way, refer to [Dashboard drilldown with ad hoc filters](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/dashboards/variables/add-template-variables/#dashboard-drilldown-with-ad-hoc-filters).
|
||||
|
||||
<!-- vale Grafana.Spelling = YES -->
|
||||
<!-- vale Grafana.WordList = YES -->
|
||||
|
||||
## Sort columns
|
||||
|
||||
Click a column title to change the sort order from default to descending to ascending.
|
||||
@@ -408,7 +424,7 @@ However, you can switch back and forth between tabs.
|
||||
|
||||
The **Pill** cell type displays each item in a comma-separated string in a colored block.
|
||||
|
||||
{{< figure src="/media/docs/grafana/panels-visualizations/screenshot-table-pills-v12.1.png" max-width="750px" alt="Table using the pill cell type" >}}
|
||||
{{< figure src="/media/docs/grafana/panels-visualizations/screenshot-table-pill-cells-v12.2.png" max-width="750px" alt="Table using the pill cell type" >}}
|
||||
|
||||
The colors applied to each piece of text are maintained throughout the table.
|
||||
For example, if the word "test" is first displayed in a red pill, it will always be displayed in a red pill.
|
||||
@@ -439,6 +455,8 @@ in these cells if the [`disable_sanitize_html`](https://grafana.com/docs/grafana
|
||||
Toggle on the **Tooltip from field** switch to use the values from another field (or column) in a tooltip.
|
||||
For more information, refer to [Tooltip from field](#tooltip-from-field).
|
||||
|
||||
{{< figure src="/media/docs/grafana/panels-visualizations/screenshot-table-markdown-v12.2.png" max-width="600px" alt="Table using the pill cell type" >}}
|
||||
|
||||
#### Image
|
||||
|
||||
If you have a field value that is an image URL or a base64 encoded image, this cell type displays it as an image.
|
||||
|
||||
@@ -0,0 +1,102 @@
|
||||
package commands
|
||||
|
||||
import (
|
||||
"flag"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/urfave/cli/v2"
|
||||
|
||||
"github.com/grafana/grafana/pkg/cmd/grafana-cli/utils"
|
||||
)
|
||||
|
||||
func TestRemoveCommand_StaticFS_FailsWithImmutableError(t *testing.T) {
|
||||
t.Run("removeCommand fails with immutable error for plugins using StaticFS", func(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
pluginID := "test-plugin"
|
||||
pluginDir := filepath.Join(tmpDir, pluginID)
|
||||
|
||||
err := os.MkdirAll(pluginDir, 0750)
|
||||
require.NoError(t, err)
|
||||
|
||||
pluginJSON := `{
|
||||
"id": "test-plugin",
|
||||
"name": "Test Plugin",
|
||||
"type": "datasource",
|
||||
"info": {
|
||||
"version": "1.0.0"
|
||||
}
|
||||
}`
|
||||
err = os.WriteFile(filepath.Join(pluginDir, "plugin.json"), []byte(pluginJSON), 0644)
|
||||
require.NoError(t, err)
|
||||
|
||||
cmdLine := createCliContextWithArgs(t, []string{pluginID}, "pluginsDir", tmpDir)
|
||||
require.NotNil(t, cmdLine)
|
||||
|
||||
// Verify plugin directory exists before attempting removal
|
||||
_, err = os.Stat(pluginDir)
|
||||
require.NoError(t, err, "Plugin directory should exist before removal attempt")
|
||||
|
||||
err = removeCommand(cmdLine)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Verify plugin directory has been removed
|
||||
_, err = os.Stat(pluginDir)
|
||||
require.ErrorIs(t, err, os.ErrNotExist)
|
||||
})
|
||||
}
|
||||
|
||||
func TestRemoveCommand_PluginNotFound(t *testing.T) {
|
||||
t.Run("removeCommand should handle missing plugin gracefully", func(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
cmdLine := createCliContextWithArgs(t, []string{"non-existent-plugin"}, "pluginsDir", tmpDir)
|
||||
require.NotNil(t, cmdLine)
|
||||
|
||||
err := removeCommand(cmdLine)
|
||||
require.NoError(t, err)
|
||||
})
|
||||
}
|
||||
|
||||
func TestRemoveCommand_MissingPluginParameter(t *testing.T) {
|
||||
t.Run("removeCommand should error when no plugin ID is provided", func(t *testing.T) {
|
||||
cmdLine := createCliContextWithArgs(t, []string{})
|
||||
require.NotNil(t, cmdLine)
|
||||
|
||||
err := removeCommand(cmdLine)
|
||||
require.Error(t, err)
|
||||
require.Contains(t, err.Error(), "missing plugin parameter")
|
||||
})
|
||||
}
|
||||
|
||||
// createCliContextWithArgs creates a CLI context with the specified arguments and optional flag key-value pairs.
|
||||
// Usage: createCliContextWithArgs(t, []string{"plugin-id"}, "pluginsDir", "/path/to/plugins", "flag2", "value2")
|
||||
func createCliContextWithArgs(t *testing.T, args []string, flagPairs ...string) *utils.ContextCommandLine {
|
||||
if len(flagPairs)%2 != 0 {
|
||||
t.Fatalf("flagPairs must be provided in key-value pairs, got %d arguments", len(flagPairs))
|
||||
}
|
||||
|
||||
app := &cli.App{
|
||||
Name: "grafana",
|
||||
}
|
||||
|
||||
flagSet := flag.NewFlagSet("test", 0)
|
||||
|
||||
// Add flags from the key-value pairs
|
||||
for i := 0; i < len(flagPairs); i += 2 {
|
||||
key := flagPairs[i]
|
||||
value := flagPairs[i+1]
|
||||
flagSet.String(key, "", "")
|
||||
err := flagSet.Set(key, value)
|
||||
require.NoError(t, err, "Failed to set flag %s=%s", key, value)
|
||||
}
|
||||
|
||||
err := flagSet.Parse(args)
|
||||
require.NoError(t, err)
|
||||
|
||||
ctx := cli.NewContext(app, flagSet, nil)
|
||||
return &utils.ContextCommandLine{
|
||||
Context: ctx,
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,150 @@
|
||||
package commands
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
"github.com/urfave/cli/v2"
|
||||
|
||||
"github.com/grafana/grafana/pkg/cmd/grafana-cli/models"
|
||||
)
|
||||
|
||||
func TestUpgradeCommand(t *testing.T) {
|
||||
t.Run("Plugin is removed even if upgrade fails", func(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
pluginID := "test-upgrade-plugin"
|
||||
pluginDir := filepath.Join(tmpDir, pluginID)
|
||||
|
||||
err := os.MkdirAll(pluginDir, 0750)
|
||||
require.NoError(t, err)
|
||||
|
||||
pluginJSON := `{
|
||||
"id": "test-upgrade-plugin",
|
||||
"name": "Test Upgrade Plugin",
|
||||
"type": "datasource",
|
||||
"info": {
|
||||
"version": "1.0.0"
|
||||
}
|
||||
}`
|
||||
err = os.WriteFile(filepath.Join(pluginDir, "plugin.json"), []byte(pluginJSON), 0644)
|
||||
require.NoError(t, err)
|
||||
|
||||
// Create a mock HTTP server that returns plugin info with a newer version
|
||||
mockServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
// Handle plugin info request
|
||||
if r.URL.Path == "/repo/"+pluginID {
|
||||
plugin := models.Plugin{
|
||||
ID: pluginID,
|
||||
Versions: []models.Version{
|
||||
{
|
||||
Version: "2.0.0", // Newer than the local version (1.0.0)
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
err = json.NewEncoder(w).Encode(plugin)
|
||||
require.NoError(t, err)
|
||||
return
|
||||
}
|
||||
|
||||
// For any other request (like installation), return 500 to cause the upgrade to fail
|
||||
// after the removal attempt, which is what we want to test
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
_, err = w.Write([]byte("Server error"))
|
||||
require.NoError(t, err)
|
||||
}))
|
||||
defer mockServer.Close()
|
||||
|
||||
// Use our test implementation that properly implements GcomToken()
|
||||
cmdLine := newTestCommandLine([]string{pluginID}, tmpDir, mockServer.URL)
|
||||
|
||||
// Verify plugin directory exists before attempting upgrade
|
||||
_, err = os.Stat(pluginDir)
|
||||
require.NoError(t, err)
|
||||
|
||||
err = upgradeCommand(cmdLine)
|
||||
require.Error(t, err)
|
||||
require.Contains(t, err.Error(), "API returned invalid status: 500 Internal Server Error")
|
||||
|
||||
// Verify plugin directory was removed during the removal step
|
||||
_, err = os.Stat(pluginDir)
|
||||
require.True(t, os.IsNotExist(err))
|
||||
})
|
||||
}
|
||||
|
||||
func TestUpgradeCommand_PluginNotFound(t *testing.T) {
|
||||
t.Run("upgradeCommand should handle missing plugin gracefully", func(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
cmdLine := createCliContextWithArgs(t, []string{"non-existent-plugin"}, "pluginsDir", tmpDir)
|
||||
require.NotNil(t, cmdLine)
|
||||
|
||||
err := upgradeCommand(cmdLine)
|
||||
require.Error(t, err)
|
||||
// Should fail trying to find the local plugin
|
||||
require.Contains(t, err.Error(), "could not find plugin non-existent-plugin")
|
||||
})
|
||||
}
|
||||
|
||||
func TestUpgradeCommand_MissingPluginParameter(t *testing.T) {
|
||||
t.Run("upgradeCommand should error when no plugin ID is provided", func(t *testing.T) {
|
||||
cmdLine := createCliContextWithArgs(t, []string{})
|
||||
require.NotNil(t, cmdLine)
|
||||
|
||||
err := upgradeCommand(cmdLine)
|
||||
require.Error(t, err)
|
||||
require.Contains(t, err.Error(), "please specify plugin to update")
|
||||
})
|
||||
}
|
||||
|
||||
// Simple args implementation
|
||||
type simpleArgs []string
|
||||
|
||||
func (a simpleArgs) First() string {
|
||||
if len(a) > 0 {
|
||||
return a[0]
|
||||
}
|
||||
return ""
|
||||
}
|
||||
func (a simpleArgs) Get(int) string { return "" }
|
||||
func (a simpleArgs) Tail() []string { return nil }
|
||||
func (a simpleArgs) Len() int { return len(a) }
|
||||
func (a simpleArgs) Present() bool { return len(a) > 0 }
|
||||
func (a simpleArgs) Slice() []string { return []string(a) }
|
||||
|
||||
// Base struct with default implementations for unused CommandLine methods
|
||||
type baseCommandLine struct{}
|
||||
|
||||
func (b baseCommandLine) ShowHelp() error { return nil }
|
||||
func (b baseCommandLine) ShowVersion() {}
|
||||
func (b baseCommandLine) Application() *cli.App { return nil }
|
||||
func (b baseCommandLine) Int(_ string) int { return 0 }
|
||||
func (b baseCommandLine) String(_ string) string { return "" }
|
||||
func (b baseCommandLine) StringSlice(_ string) []string { return nil }
|
||||
func (b baseCommandLine) FlagNames() []string { return nil }
|
||||
func (b baseCommandLine) Generic(_ string) any { return nil }
|
||||
func (b baseCommandLine) Bool(_ string) bool { return false }
|
||||
func (b baseCommandLine) PluginURL() string { return "" }
|
||||
func (b baseCommandLine) GcomToken() string { return "" }
|
||||
|
||||
// Test implementation - only implements what we actually need
|
||||
type testCommandLine struct {
|
||||
baseCommandLine // Embedded struct provides default implementations
|
||||
args simpleArgs
|
||||
pluginDir string
|
||||
repoURL string
|
||||
}
|
||||
|
||||
func newTestCommandLine(args []string, pluginDir, repoURL string) *testCommandLine {
|
||||
return &testCommandLine{args: simpleArgs(args), pluginDir: pluginDir, repoURL: repoURL}
|
||||
}
|
||||
|
||||
// Only implement the methods actually used by upgradeCommand
|
||||
func (t *testCommandLine) Args() cli.Args { return t.args }
|
||||
func (t *testCommandLine) PluginDirectory() string { return t.pluginDir }
|
||||
func (t *testCommandLine) PluginRepoURL() string { return t.repoURL }
|
||||
@@ -236,6 +236,15 @@ func (f StaticFS) Files() ([]string, error) {
|
||||
return files, nil
|
||||
}
|
||||
|
||||
func (f StaticFS) Remove() error {
|
||||
if remover, ok := f.FS.(FSRemover); ok {
|
||||
if err := remover.Remove(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// LocalFile implements a fs.File for accessing the local filesystem.
|
||||
type LocalFile struct {
|
||||
f *os.File
|
||||
|
||||
@@ -270,12 +270,27 @@ func TestStaticFS(t *testing.T) {
|
||||
require.Equal(t, []string{allowedFn, deniedFn}, files)
|
||||
})
|
||||
|
||||
t.Run("staticfs filters underelying fs's files", func(t *testing.T) {
|
||||
t.Run("staticfs filters underlying fs's files", func(t *testing.T) {
|
||||
files, err := staticFS.Files()
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, []string{allowedFn}, files)
|
||||
})
|
||||
})
|
||||
|
||||
t.Run("FSRemover interface implementation verification", func(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
|
||||
lfs := NewLocalFS(tmpDir)
|
||||
var localFSInterface FS = lfs
|
||||
_, isRemover := localFSInterface.(FSRemover)
|
||||
require.True(t, isRemover)
|
||||
|
||||
sfs, err := NewStaticFS(localFS)
|
||||
require.NoError(t, err)
|
||||
var staticFSInterface FS = sfs
|
||||
_, isRemover = staticFSInterface.(FSRemover)
|
||||
require.True(t, isRemover)
|
||||
})
|
||||
}
|
||||
|
||||
// TestFSTwoDotsInFileName ensures that LocalFS and StaticFS allow two dots in file names.
|
||||
|
||||
@@ -5,6 +5,8 @@ import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"testing"
|
||||
|
||||
@@ -422,3 +424,100 @@ func createPlugin(t *testing.T, pluginID string, class plugins.Class, managed, b
|
||||
func testCompatOpts() plugins.AddOpts {
|
||||
return plugins.NewAddOpts("10.0.0", runtime.GOOS, runtime.GOARCH, "")
|
||||
}
|
||||
|
||||
func TestPluginInstaller_Removal(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
|
||||
t.Run("LocalFS plugin removal succeeds via installer.Remove", func(t *testing.T) {
|
||||
pluginDir := filepath.Join(tmpDir, "localfs-plugin")
|
||||
err := os.MkdirAll(pluginDir, 0750)
|
||||
require.NoError(t, err)
|
||||
|
||||
pluginJSON := `{
|
||||
"id": "localfs-plugin",
|
||||
"name": "LocalFS Plugin",
|
||||
"type": "datasource",
|
||||
"info": {
|
||||
"version": "1.0.0"
|
||||
}
|
||||
}`
|
||||
err = os.WriteFile(filepath.Join(pluginDir, "plugin.json"), []byte(pluginJSON), 0644)
|
||||
require.NoError(t, err)
|
||||
|
||||
localFS := plugins.NewLocalFS(pluginDir)
|
||||
pluginV1 := createPlugin(t, "localfs-plugin", plugins.ClassExternal, true, true, func(plugin *plugins.Plugin) {
|
||||
plugin.Info.Version = "1.0.0"
|
||||
plugin.FS = localFS
|
||||
})
|
||||
|
||||
registry := &fakes.FakePluginRegistry{
|
||||
Store: map[string]*plugins.Plugin{
|
||||
"localfs-plugin": pluginV1,
|
||||
},
|
||||
}
|
||||
|
||||
loader := &fakes.FakeLoader{
|
||||
UnloadFunc: func(_ context.Context, p *plugins.Plugin) (*plugins.Plugin, error) {
|
||||
return p, nil
|
||||
},
|
||||
}
|
||||
|
||||
_, err = os.Stat(pluginDir)
|
||||
require.NoError(t, err)
|
||||
|
||||
inst := New(&config.PluginManagementCfg{}, registry, loader, &fakes.FakePluginRepo{}, &fakes.FakePluginStorage{}, storage.SimpleDirNameGeneratorFunc, &fakes.FakeAuthService{})
|
||||
err = inst.Remove(context.Background(), "localfs-plugin", "1.0.0")
|
||||
require.NoError(t, err)
|
||||
|
||||
_, err = os.Stat(pluginDir)
|
||||
require.True(t, os.IsNotExist(err))
|
||||
})
|
||||
|
||||
t.Run("StaticFS plugin removal is skipped via installer.Remove", func(t *testing.T) {
|
||||
pluginDir := filepath.Join(tmpDir, "staticfs-plugin")
|
||||
err := os.MkdirAll(pluginDir, 0750)
|
||||
require.NoError(t, err)
|
||||
|
||||
pluginJSON := `{
|
||||
"id": "staticfs-plugin",
|
||||
"name": "StaticFS Plugin",
|
||||
"type": "datasource",
|
||||
"info": {
|
||||
"version": "1.0.0"
|
||||
}
|
||||
}`
|
||||
err = os.WriteFile(filepath.Join(pluginDir, "plugin.json"), []byte(pluginJSON), 0644)
|
||||
require.NoError(t, err)
|
||||
|
||||
localFS := plugins.NewLocalFS(pluginDir)
|
||||
staticFS, err := plugins.NewStaticFS(localFS)
|
||||
require.NoError(t, err)
|
||||
|
||||
pluginV1 := createPlugin(t, "staticfs-plugin", plugins.ClassExternal, true, true, func(plugin *plugins.Plugin) {
|
||||
plugin.Info.Version = "1.0.0"
|
||||
plugin.FS = staticFS
|
||||
})
|
||||
|
||||
registry := &fakes.FakePluginRegistry{
|
||||
Store: map[string]*plugins.Plugin{
|
||||
"staticfs-plugin": pluginV1,
|
||||
},
|
||||
}
|
||||
|
||||
loader := &fakes.FakeLoader{
|
||||
UnloadFunc: func(_ context.Context, p *plugins.Plugin) (*plugins.Plugin, error) {
|
||||
return p, nil
|
||||
},
|
||||
}
|
||||
|
||||
_, err = os.Stat(pluginDir)
|
||||
require.NoError(t, err)
|
||||
|
||||
inst := New(&config.PluginManagementCfg{}, registry, loader, &fakes.FakePluginRepo{}, &fakes.FakePluginStorage{}, storage.SimpleDirNameGeneratorFunc, &fakes.FakeAuthService{})
|
||||
err = inst.Remove(context.Background(), "staticfs-plugin", "1.0.0")
|
||||
require.NoError(t, err)
|
||||
|
||||
_, err = os.Stat(pluginDir)
|
||||
require.ErrorIs(t, err, os.ErrNotExist)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ package sources
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
@@ -110,3 +111,32 @@ func TestDirAsLocalSources(t *testing.T) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestLocalSource(t *testing.T) {
|
||||
t.Run("NewLocalSource should always return plugins with StaticFS", func(t *testing.T) {
|
||||
tmpDir := t.TempDir()
|
||||
pluginID := "test-plugin"
|
||||
pluginDir := filepath.Join(tmpDir, pluginID)
|
||||
|
||||
err := os.MkdirAll(pluginDir, 0750)
|
||||
require.NoError(t, err)
|
||||
|
||||
pluginJSON := `{
|
||||
"id": "test-plugin",
|
||||
"name": "Test Plugin",
|
||||
"type": "datasource",
|
||||
"info": {
|
||||
"version": "1.0.0"
|
||||
}
|
||||
}`
|
||||
err = os.WriteFile(filepath.Join(pluginDir, "plugin.json"), []byte(pluginJSON), 0644)
|
||||
require.NoError(t, err)
|
||||
|
||||
bundles, err := NewLocalSource(plugins.ClassExternal, []string{pluginDir}).Discover(t.Context())
|
||||
require.NoError(t, err)
|
||||
require.Len(t, bundles, 1, "Should discover exactly one plugin")
|
||||
require.Equal(t, pluginID, bundles[0].Primary.JSONData.ID)
|
||||
_, canRemove := bundles[0].Primary.FS.(plugins.FSRemover)
|
||||
require.True(t, canRemove)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -688,15 +688,15 @@ var validConfigWithAutogen = `{
|
||||
"receiver": "some email",
|
||||
"object_matchers": [["__grafana_autogenerated__", "=", "true"]],
|
||||
"routes": [{
|
||||
"receiver": "some email",
|
||||
"group_by": ["grafana_folder", "alertname"],
|
||||
"object_matchers": [["__grafana_receiver__", "=", "some email"]],
|
||||
"continue": false
|
||||
},{
|
||||
"receiver": "other email",
|
||||
"group_by": ["grafana_folder", "alertname"],
|
||||
"object_matchers": [["__grafana_receiver__", "=", "other email"]],
|
||||
"continue": false
|
||||
},{
|
||||
"receiver": "some email",
|
||||
"group_by": ["grafana_folder", "alertname"],
|
||||
"object_matchers": [["__grafana_receiver__", "=", "some email"]],
|
||||
"continue": false
|
||||
}]
|
||||
},{
|
||||
"receiver": "other email",
|
||||
|
||||
@@ -190,8 +190,12 @@ func (s *NotificationSettings) Fingerprint() data.Fingerprint {
|
||||
for _, interval := range s.MuteTimeIntervals {
|
||||
writeString(interval)
|
||||
}
|
||||
// Add a separator between the time intervals to avoid collisions
|
||||
// when all settings are the same including interval names except for the interval type (mute vs active).
|
||||
_, _ = h.Write([]byte{255})
|
||||
for _, interval := range s.ActiveTimeIntervals {
|
||||
writeString(interval)
|
||||
}
|
||||
|
||||
return data.Fingerprint(h.Sum64())
|
||||
}
|
||||
|
||||
@@ -113,6 +113,8 @@ func TestValidate(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestNotificationSettingsLabels(t *testing.T) {
|
||||
timeInterval := "time-interval-1"
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
notificationSettings NotificationSettings
|
||||
@@ -135,7 +137,7 @@ func TestNotificationSettingsLabels(t *testing.T) {
|
||||
labels: data.Labels{
|
||||
AutogeneratedRouteLabel: "true",
|
||||
AutogeneratedRouteReceiverNameLabel: "receiver name",
|
||||
AutogeneratedRouteSettingsHashLabel: "6027cdeaff62ba3f",
|
||||
AutogeneratedRouteSettingsHashLabel: "c65d254ff4c279f2",
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -151,7 +153,7 @@ func TestNotificationSettingsLabels(t *testing.T) {
|
||||
labels: data.Labels{
|
||||
AutogeneratedRouteLabel: "true",
|
||||
AutogeneratedRouteReceiverNameLabel: "receiver name",
|
||||
AutogeneratedRouteSettingsHashLabel: "47164c92f2986a35",
|
||||
AutogeneratedRouteSettingsHashLabel: "634e52b238fc78f0",
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -168,7 +170,25 @@ func TestNotificationSettingsLabels(t *testing.T) {
|
||||
labels: data.Labels{
|
||||
AutogeneratedRouteLabel: "true",
|
||||
AutogeneratedRouteReceiverNameLabel: "receiver name",
|
||||
AutogeneratedRouteSettingsHashLabel: "a173df6210e43af0",
|
||||
AutogeneratedRouteSettingsHashLabel: "9ac606ba0f6bcfb5",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "default notification settings with active time interval",
|
||||
notificationSettings: CopyNotificationSettings(NewDefaultNotificationSettings("receiver name"), NSMuts.WithActiveTimeIntervals(timeInterval)),
|
||||
labels: data.Labels{
|
||||
AutogeneratedRouteLabel: "true",
|
||||
AutogeneratedRouteReceiverNameLabel: "receiver name",
|
||||
AutogeneratedRouteSettingsHashLabel: "8304d9c06fda36e2",
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "default notification settings with mute time interval",
|
||||
notificationSettings: CopyNotificationSettings(NewDefaultNotificationSettings("receiver name"), NSMuts.WithMuteTimeIntervals(timeInterval)),
|
||||
labels: data.Labels{
|
||||
AutogeneratedRouteLabel: "true",
|
||||
AutogeneratedRouteReceiverNameLabel: "receiver name",
|
||||
AutogeneratedRouteSettingsHashLabel: "171cfd2d4e0810fa",
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -181,6 +201,27 @@ func TestNotificationSettingsLabels(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestNotificationSettings_TimeIntervals(t *testing.T) {
|
||||
// Create notification settings with default settings and usign the same
|
||||
// time interval, but in one case as a mute time interval and in another case
|
||||
// as an active time interval. They should produce different hashes.
|
||||
|
||||
receiver := "receiver name"
|
||||
timeInterval := "time interval name"
|
||||
|
||||
muteSettings := NotificationSettings{
|
||||
Receiver: receiver,
|
||||
MuteTimeIntervals: []string{timeInterval},
|
||||
}
|
||||
|
||||
activeSettings := NotificationSettings{
|
||||
Receiver: receiver,
|
||||
ActiveTimeIntervals: []string{timeInterval},
|
||||
}
|
||||
|
||||
require.NotEqual(t, activeSettings.Fingerprint(), muteSettings.Fingerprint())
|
||||
}
|
||||
|
||||
func TestNormalizedGroupBy(t *testing.T) {
|
||||
validNotificationSettings := NotificationSettingsGen()
|
||||
|
||||
|
||||
@@ -86,9 +86,9 @@ func TestAddAutogenConfig(t *testing.T) {
|
||||
Receiver: "default",
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteLabel, "true"),
|
||||
Routes: []*definitions.Route{
|
||||
basicContactRoute("receiver1"),
|
||||
basicContactRoute("receiver3"),
|
||||
basicContactRoute("receiver2"),
|
||||
basicContactRoute("receiver3"),
|
||||
basicContactRoute("receiver1"),
|
||||
},
|
||||
}),
|
||||
},
|
||||
@@ -100,9 +100,9 @@ func TestAddAutogenConfig(t *testing.T) {
|
||||
Receiver: "default",
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteLabel, "true"),
|
||||
Routes: []*definitions.Route{
|
||||
basicContactRoute("receiver1"),
|
||||
basicContactRoute("receiver3"),
|
||||
basicContactRoute("receiver2"),
|
||||
basicContactRoute("receiver3"),
|
||||
basicContactRoute("receiver1"),
|
||||
},
|
||||
}),
|
||||
},
|
||||
@@ -130,42 +130,42 @@ func TestAddAutogenConfig(t *testing.T) {
|
||||
Receiver: "default",
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteLabel, "true"),
|
||||
Routes: []*definitions.Route{
|
||||
withChildRoutes(basicContactRoute("receiver5"), &definitions.Route{
|
||||
Receiver: "receiver5",
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "030d6474aec0b553"),
|
||||
MuteTimeIntervals: []string{"maintenance"},
|
||||
}, &definitions.Route{
|
||||
Receiver: "receiver5",
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "cd6cd2089632453c"),
|
||||
ActiveTimeIntervals: []string{"active"},
|
||||
}),
|
||||
withChildRoutes(basicContactRoute("receiver1"), &definitions.Route{
|
||||
Receiver: "receiver1",
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "dde34b8127e68f31"),
|
||||
GroupInterval: util.Pointer(model.Duration(1 * time.Minute)),
|
||||
}, &definitions.Route{
|
||||
Receiver: "receiver1",
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "f134b8faf7db083c"),
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "02466789dc88da23"),
|
||||
GroupByStr: []string{models.FolderTitleLabel, model.AlertNameLabel, "custom"},
|
||||
GroupInterval: util.Pointer(model.Duration(1 * time.Minute)),
|
||||
GroupWait: util.Pointer(model.Duration(2 * time.Minute)),
|
||||
RepeatInterval: util.Pointer(model.Duration(3 * time.Minute)),
|
||||
MuteTimeIntervals: []string{"maintenance"},
|
||||
ActiveTimeIntervals: []string{"active"},
|
||||
}, &definitions.Route{
|
||||
Receiver: "receiver1",
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "efc87d76ccc550bc"),
|
||||
GroupInterval: util.Pointer(model.Duration(1 * time.Minute)),
|
||||
}),
|
||||
withChildRoutes(basicContactRoute("receiver2"), &definitions.Route{
|
||||
Receiver: "receiver2",
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "27e1d1717c9ef621"),
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "63ad04d6c21c3aec"),
|
||||
GroupWait: util.Pointer(model.Duration(2 * time.Minute)),
|
||||
}),
|
||||
withChildRoutes(basicContactRoute("receiver5"), &definitions.Route{
|
||||
Receiver: "receiver5",
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "8cd5f9adeac58123"),
|
||||
ActiveTimeIntervals: []string{"active"},
|
||||
}, &definitions.Route{
|
||||
Receiver: "receiver5",
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "f0770544f1741cf6"),
|
||||
MuteTimeIntervals: []string{"maintenance"},
|
||||
}),
|
||||
withChildRoutes(basicContactRoute("receiver4"), &definitions.Route{
|
||||
Receiver: "receiver4",
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "b3a2fa5e615dcc7e"),
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "9bbbec5f72627ae5"),
|
||||
GroupByStr: []string{models.FolderTitleLabel, model.AlertNameLabel, "custom"},
|
||||
}),
|
||||
withChildRoutes(basicContactRoute("receiver3"), &definitions.Route{
|
||||
Receiver: "receiver3",
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "9e282ef0193d830a"),
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "fbcacbfae385a901"),
|
||||
RepeatInterval: util.Pointer(model.Duration(3 * time.Minute)),
|
||||
}),
|
||||
},
|
||||
@@ -183,7 +183,7 @@ func TestAddAutogenConfig(t *testing.T) {
|
||||
Routes: []*definitions.Route{
|
||||
withChildRoutes(basicContactRoute("receiver1"), &definitions.Route{
|
||||
Receiver: "receiver1",
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "dde34b8127e68f31"),
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "efc87d76ccc550bc"),
|
||||
GroupByStr: nil,
|
||||
GroupInterval: util.Pointer(model.Duration(1 * time.Minute)),
|
||||
}),
|
||||
@@ -203,13 +203,13 @@ func TestAddAutogenConfig(t *testing.T) {
|
||||
Routes: []*definitions.Route{
|
||||
withChildRoutes(basicContactRoute("receiver1"), &definitions.Route{
|
||||
Receiver: "receiver1",
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "dde34b8127e68f31"),
|
||||
GroupByStr: nil,
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "828092ed6f427a00"), // Different hash.
|
||||
GroupByStr: []string{models.FolderTitleLabel, model.AlertNameLabel},
|
||||
GroupInterval: util.Pointer(model.Duration(1 * time.Minute)),
|
||||
}, &definitions.Route{
|
||||
Receiver: "receiver1",
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "e1f3a275a8918385"), // Different hash.
|
||||
GroupByStr: []string{models.FolderTitleLabel, model.AlertNameLabel},
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "efc87d76ccc550bc"),
|
||||
GroupByStr: nil,
|
||||
GroupInterval: util.Pointer(model.Duration(1 * time.Minute)),
|
||||
}),
|
||||
},
|
||||
@@ -229,7 +229,7 @@ func TestAddAutogenConfig(t *testing.T) {
|
||||
Routes: []*definitions.Route{
|
||||
withChildRoutes(basicContactRoute("receiver1"), &definitions.Route{
|
||||
Receiver: "receiver1",
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "e1f3a275a8918385"),
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteSettingsHashLabel, "828092ed6f427a00"),
|
||||
GroupByStr: []string{models.FolderTitleLabel, model.AlertNameLabel},
|
||||
GroupInterval: util.Pointer(model.Duration(1 * time.Minute)),
|
||||
}),
|
||||
@@ -249,9 +249,9 @@ func TestAddAutogenConfig(t *testing.T) {
|
||||
Receiver: "default",
|
||||
ObjectMatchers: matcher(models.AutogeneratedRouteLabel, "true"),
|
||||
Routes: []*definitions.Route{
|
||||
basicContactRoute("receiver1"),
|
||||
basicContactRoute("receiver3"),
|
||||
basicContactRoute("receiver2"),
|
||||
basicContactRoute("receiver3"),
|
||||
basicContactRoute("receiver1"),
|
||||
},
|
||||
}),
|
||||
},
|
||||
|
||||
Reference in New Issue
Block a user