Compare commits
3 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| c558072c0c | |||
| 2ce89f099f | |||
| 829022d488 |
@@ -121,8 +121,6 @@ linters:
|
||||
- '**/pkg/tsdb/zipkin/**/*'
|
||||
- '**/pkg/tsdb/jaeger/*'
|
||||
- '**/pkg/tsdb/jaeger/**/*'
|
||||
- '**/pkg/tsdb/elasticsearch/*'
|
||||
- '**/pkg/tsdb/elasticsearch/**/*'
|
||||
deny:
|
||||
- pkg: github.com/grafana/grafana/pkg/api
|
||||
desc: Core plugins are not allowed to depend on Grafana core packages
|
||||
|
||||
apps/dashboard/pkg/migration/conversion/testdata/output/v2beta1.tabs-and-rows-repeated.v0alpha1.json
Vendored
-4
@@ -586,7 +586,6 @@
|
||||
},
|
||||
"id": -1,
|
||||
"panels": [],
|
||||
"repeat": "custom_var_tab",
|
||||
"title": "Repeated Tab by \"$custom_var_tab\"",
|
||||
"type": "row"
|
||||
},
|
||||
@@ -611,11 +610,8 @@
|
||||
"y": 22
|
||||
},
|
||||
"id": 6,
|
||||
"maxPerRow": 3,
|
||||
"options": {},
|
||||
"pluginVersion": "12.4.0-19736337744",
|
||||
"repeat": "custom_var_panel",
|
||||
"repeatDirection": "h",
|
||||
"targets": [
|
||||
{
|
||||
"refId": "A"
|
||||
|
||||
Vendored
-4
@@ -586,7 +586,6 @@
|
||||
},
|
||||
"id": -1,
|
||||
"panels": [],
|
||||
"repeat": "custom_var_tab",
|
||||
"title": "Repeated Tab by \"$custom_var_tab\"",
|
||||
"type": "row"
|
||||
},
|
||||
@@ -611,11 +610,8 @@
|
||||
"y": 22
|
||||
},
|
||||
"id": 6,
|
||||
"maxPerRow": 3,
|
||||
"options": {},
|
||||
"pluginVersion": "12.4.0-19736337744",
|
||||
"repeat": "custom_var_panel",
|
||||
"repeatDirection": "h",
|
||||
"targets": [
|
||||
{
|
||||
"refId": "A"
|
||||
|
||||
@@ -439,11 +439,6 @@ func processTabItem(elements map[string]dashv2alpha1.DashboardElement, tab *dash
|
||||
rowPanel["title"] = *tab.Spec.Title
|
||||
}
|
||||
|
||||
if tab.Spec.Repeat != nil && tab.Spec.Repeat.Value != "" {
|
||||
// We only use value here as V1 doesn't support mode
|
||||
rowPanel["repeat"] = tab.Spec.Repeat.Value
|
||||
}
|
||||
|
||||
rowPanel["gridPos"] = map[string]interface{}{
|
||||
"x": 0,
|
||||
"y": currentY,
|
||||
@@ -824,21 +819,6 @@ func convertAutoGridLayoutToPanelsWithOffset(elements map[string]dashv2alpha1.Da
|
||||
},
|
||||
}
|
||||
|
||||
// Convert AutoGridRepeatOptions to RepeatOptions if present
|
||||
// AutoGridRepeatOptions only has mode and value; infer direction and maxPerRow from AutoGrid settings:
|
||||
// - direction: always "h" (AutoGrid flows horizontally, left-to-right then wraps)
|
||||
// - maxPerRow: from AutoGrid's maxColumnCount
|
||||
if item.Spec.Repeat != nil {
|
||||
directionH := dashv2alpha1.DashboardRepeatOptionsDirectionH
|
||||
maxPerRow := int64(maxColumnCount)
|
||||
gridItem.Spec.Repeat = &dashv2alpha1.DashboardRepeatOptions{
|
||||
Mode: item.Spec.Repeat.Mode,
|
||||
Value: item.Spec.Repeat.Value,
|
||||
Direction: &directionH,
|
||||
MaxPerRow: &maxPerRow,
|
||||
}
|
||||
}
|
||||
|
||||
panel, err := convertPanelFromElement(&element, &gridItem)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to convert panel %s: %w", item.Spec.Element.Name, err)
|
||||
|
||||
Vendored
+3
-3
@@ -2117,7 +2117,7 @@
|
||||
}
|
||||
],
|
||||
"title": "Numeric, no series",
|
||||
"type": "radialbar"
|
||||
"type": "gauge"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
@@ -2183,7 +2183,7 @@
|
||||
}
|
||||
],
|
||||
"title": "Non-numeric",
|
||||
"type": "radialbar"
|
||||
"type": "gauge"
|
||||
}
|
||||
],
|
||||
"preload": false,
|
||||
@@ -2201,4 +2201,4 @@
|
||||
"title": "Panel tests - Gauge (new)",
|
||||
"uid": "panel-tests-gauge-new",
|
||||
"weekStart": ""
|
||||
}
|
||||
}
|
||||
@@ -2067,7 +2067,7 @@
|
||||
}
|
||||
],
|
||||
"title": "Numeric, no series",
|
||||
"type": "radialbar"
|
||||
"type": "gauge"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
@@ -2131,7 +2131,7 @@
|
||||
}
|
||||
],
|
||||
"title": "Non-numeric",
|
||||
"type": "radialbar"
|
||||
"type": "gauge"
|
||||
}
|
||||
],
|
||||
"preload": false,
|
||||
|
||||
@@ -25,6 +25,10 @@ cards:
|
||||
height: 24
|
||||
href: ./foundation-sdk/
|
||||
description: The Grafana Foundation SDK is a set of tools, types, and libraries that let you define Grafana dashboards and resources using familiar programming languages like Go, TypeScript, Python, Java, and PHP. Use it in conjunction with `grafanactl` to push your programmatically generated resources.
|
||||
- title: JSON schema v2
|
||||
height: 24
|
||||
href: ./schema-v2/
|
||||
description: Grafana dashboards are represented as JSON objects that store metadata, panels, variables, and settings. Observability as Code works with all versions of the JSON model, and it's fully compatible with version 2.
|
||||
- title: Git Sync (private preview)
|
||||
height: 24
|
||||
href: ./provision-resources/intro-git-sync/
|
||||
@@ -64,7 +68,7 @@ Historically, managing Grafana as code involved various community and Grafana La
|
||||
|
||||
- This approach requires handling HTTP requests and responses but provides complete control over resource management.
|
||||
- `grafanactl`, Git Sync, and the Foundation SDK are all built on top of these APIs.
|
||||
- To understand Dashboard Schemas accepted by the APIs, refer to the [JSON models documentation](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/visualizations/dashboards/build-dashboards/view-dashboard-json-model/index.md).
|
||||
- To understand Dashboard Schemas accepted by the APIs, refer to the [JSON models documentation](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/observability-as-code/schema-v2/).
|
||||
|
||||
## Explore
|
||||
|
||||
|
||||
@@ -0,0 +1,243 @@
|
||||
---
|
||||
description: A reference for the JSON dashboard schemas used with Observability as Code, including the experimental V2 schema.
|
||||
keywords:
|
||||
- configuration
|
||||
- as code
|
||||
- dashboards
|
||||
- git integration
|
||||
- git sync
|
||||
- github
|
||||
labels:
|
||||
products:
|
||||
- cloud
|
||||
- enterprise
|
||||
- oss
|
||||
title: JSON schema v2
|
||||
weight: 500
|
||||
canonical: https://grafana.com/docs/grafana/latest/as-code/observability-as-code/schema-v2/
|
||||
aliases:
|
||||
- ../../observability-as-code/schema-v2/ # /docs/grafana/next/observability-as-code/schema-v2/
|
||||
---
|
||||
|
||||
# Dashboard JSON schema v2
|
||||
|
||||
{{< admonition type="caution" >}}
|
||||
|
||||
Dashboard JSON schema v2 is an [experimental](https://grafana.com/docs/release-life-cycle/) feature. Engineering and on-call support is not available. Documentation is either limited or not provided outside of code comments. No SLA is provided. To get early access to this feature, request it through [this form](https://docs.google.com/forms/d/e/1FAIpQLSd73nQzuhzcHJOrLFK4ef_uMxHAQiPQh1-rsQUT2MRqbeMLpg/viewform?usp=dialog).
|
||||
|
||||
**Do not enable this feature in production environments as it may result in the irreversible loss of data.**
|
||||
|
||||
{{< /admonition >}}
|
||||
|
||||
Grafana dashboards are represented as JSON objects that store metadata, panels, variables, and settings.
|
||||
|
||||
Observability as Code works with all versions of the JSON model, and it's fully compatible with version 2.
|
||||
|
||||
## Before you begin
|
||||
|
||||
Schema v2 is automatically enabled with the Dynamic Dashboards feature toggle.
|
||||
To get early access to this feature, request it through [this form](https://docs.google.com/forms/d/e/1FAIpQLSd73nQzuhzcHJOrLFK4ef_uMxHAQiPQh1-rsQUT2MRqbeMLpg/viewform?usp=dialog).
|
||||
It also requires the new dashboards API feature toggle, `kubernetesDashboards`, to be enabled as well.
|
||||
|
||||
For more information on how dashboards behave depending on your feature flag configuration, refer to [Notes and limitations](#notes-and-limitations).
|
||||
|
||||
## Accessing the JSON Model
|
||||
|
||||
To view the JSON representation of a dashboard:
|
||||
|
||||
1. Toggle on the edit mode switch in the top-right corner of the dashboard.
|
||||
1. Click the gear icon in the top navigation bar to go to **Settings**.
|
||||
1. Select the **JSON Model** tab.
|
||||
1. Copy or edit the JSON structure as needed.
|
||||
|
||||
## JSON fields
|
||||
|
||||
```json
|
||||
{
|
||||
"annotations": [],
|
||||
"cursorSync": "Off",
|
||||
"editable": true,
|
||||
"elements": {},
|
||||
"layout": {
|
||||
"kind": GridLayout, // Can also be AutoGridLayout, RowsLayout, or TabsLayout
|
||||
"spec": {
|
||||
"items": []
|
||||
}
|
||||
},
|
||||
"links": [],
|
||||
"liveNow": false,
|
||||
"preload": false,
|
||||
"tags": [], // Tags associated with the dashboard.
|
||||
"timeSettings": {
|
||||
"autoRefresh": "",
|
||||
"autoRefreshIntervals": [
|
||||
"5s",
|
||||
"10s",
|
||||
"30s",
|
||||
"1m",
|
||||
"5m",
|
||||
"15m",
|
||||
"30m",
|
||||
"1h",
|
||||
"2h",
|
||||
"1d"
|
||||
],
|
||||
"fiscalYearStartMonth": 0,
|
||||
"from": "now-6h",
|
||||
"hideTimepicker": false,
|
||||
"timezone": "browser",
|
||||
"to": "now"
|
||||
},
|
||||
"title": "",
|
||||
"variables": []
|
||||
},
|
||||
```
|
||||
|
||||
The dashboard JSON sample shown uses the default `GridLayoutKind`.
|
||||
The JSON in a new dashboard for the other three layout options, `AutoGridLayout`, `RowsLayout`, and `TabsLayout`, are as follows:
|
||||
|
||||
**`AutoGridLayout`**
|
||||
|
||||
```json
|
||||
"layout": {
|
||||
"kind": "AutoGridLayout",
|
||||
"spec": {
|
||||
"columnWidthMode": "standard",
|
||||
"items": [],
|
||||
"fillScreen": false,
|
||||
"maxColumnCount": 3,
|
||||
"rowHeightMode": "standard"
|
||||
}
|
||||
},
|
||||
```
|
||||
|
||||
**`RowsLayout`**
|
||||
|
||||
```json
|
||||
"layout": {
|
||||
"kind": "RowsLayout",
|
||||
"spec": {
|
||||
"rows": []
|
||||
},
|
||||
```
|
||||
|
||||
**`TabsLayout`**
|
||||
|
||||
```json
|
||||
"layout": {
|
||||
"kind": "TabsLayout",
|
||||
"spec": {
|
||||
"tabs": []
|
||||
},
|
||||
```
|
||||
|
||||
### `DashboardSpec`
|
||||
|
||||
The following table explains the usage of the dashboard JSON fields.
|
||||
The table includes default and other fields:
|
||||
|
||||
<!-- prettier-ignore-start -->
|
||||
|
||||
| Name | Usage |
|
||||
| ------------ | ------------------------------------------------------------------------- |
|
||||
| annotations | Contains the list of annotations that are associated with the dashboard. |
|
||||
| cursorSync | Dashboard cursor sync behavior.<ul><li>`Off` - No shared crosshair or tooltip (default)</li><li>`Crosshair` - Shared crosshair</li><li>`Tooltip` - Shared crosshair and shared tooltip</li></ul> |
|
||||
| editable | bool. Whether or not a dashboard is editable. |
|
||||
| elements | Contains the list of elements included in the dashboard. Supported dashboard elements are: PanelKind and LibraryPanelKind. |
|
||||
| layout | The dashboard layout. Supported layouts are:<ul><li>GridLayoutKind</li><li>AutoGridLayoutKind</li><li>RowsLayoutKind</li><li>TabsLayoutKind</li></ul> |
|
||||
| links | Links with references to other dashboards or external websites. |
|
||||
| liveNow | bool. When set to `true`, the dashboard redraws panels at an interval matching the pixel width. This keeps data "moving left" regardless of the query refresh rate. This setting helps avoid dashboards presenting stale live data. |
|
||||
| preload | bool. When set to `true`, the dashboard loads all panels when the dashboard is loaded. |
|
||||
| tags | Contains the list of tags associated with dashboard. |
|
||||
| timeSettings | All time settings for the dashboard. |
|
||||
| title | Title of the dashboard. |
|
||||
| variables | Contains the list of configured template variables. |
|
||||
|
||||
<!-- prettier-ignore-end -->
|
||||
|
||||
### `annotations`
|
||||
|
||||
The configuration for the list of annotations that are associated with the dashboard.
|
||||
For the JSON and field usage notes, refer to the [annotations schema documentation](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/observability-as-code/schema-v2/annotations-schema/).
|
||||
|
||||
### `elements`
|
||||
|
||||
Dashboards can contain the following elements:
|
||||
|
||||
- [PanelKind](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/observability-as-code/schema-v2/panel-schema/)
|
||||
- [LibraryPanelKind](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/observability-as-code/schema-v2/librarypanel-schema/)
|
||||
|
||||
### `layout`
|
||||
|
||||
Dashboards can have four layout options:
|
||||
|
||||
- [GridLayoutKind](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/observability-as-code/schema-v2/layout-schema/#gridlayoutkind)
|
||||
- [AutoGridLayoutKind](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/observability-as-code/schema-v2/layout-schema/#autogridlayoutkind)
|
||||
- [RowsLayoutKind](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/observability-as-code/schema-v2/layout-schema/#rowslayoutkind)
|
||||
- [TabsLayoutKind](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/observability-as-code/schema-v2/layout-schema/#tabslayoutkind)
|
||||
|
||||
For the JSON and field usage notes about each of these, refer to the [layout schema documentation](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/observability-as-code/schema-v2/layout-schema/).
|
||||
|
||||
### `links`
|
||||
|
||||
The configuration for links with references to other dashboards or external websites.
|
||||
|
||||
For the JSON and field usage notes, refer to the [links schema documentation](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/observability-as-code/schema-v2/links-schema/).
|
||||
|
||||
### `tags`
|
||||
|
||||
Tags associated with the dashboard. Each tag can be up to 50 characters long.
|
||||
|
||||
` [...string]`
|
||||
|
||||
### `timesettings`
|
||||
|
||||
The `TimeSettingsSpec` defines the default time configuration for the time picker and the refresh picker for the specific dashboard.
|
||||
For the JSON and field usage notes about the `TimeSettingsSpec`, refer to the [timesettings schema documentation](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/observability-as-code/schema-v2/timesettings-schema/).
|
||||
|
||||
### `variables`
|
||||
|
||||
The `variables` schema defines which variables are used in the dashboard.
|
||||
|
||||
There are eight variables types:
|
||||
|
||||
- QueryVariableKind
|
||||
- TextVariableKind
|
||||
- ConstantVariableKind
|
||||
- DatasourceVariableKind
|
||||
- IntervalVariableKind
|
||||
- CustomVariableKind
|
||||
- GroupByVariableKind
|
||||
- AdhocVariableKind
|
||||
|
||||
For the JSON and field usage notes about the `variables` spec, refer to the [variables schema documentation](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/observability-as-code/schema-v2/variables-schema/).
|
||||
|
||||
## Notes and limitations
|
||||
|
||||
### Existing dashboards
|
||||
|
||||
With schema v2 enabled, you can still open and view your pre-existing dashboards.
|
||||
Upon saving, they’ll be updated to the new schema where you can take advantage of the new features and functionalities.
|
||||
|
||||
### Dashboard behavior with disabled feature flags
|
||||
|
||||
If you disable the Dynamic dashboards or `kubernetesDashboards` feature flags, you should be aware of how dashboards will behave.
|
||||
|
||||
#### Disable Dynamic dashboards
|
||||
|
||||
If the Dynamic dashboards feature toggle is disabled, depending on how the dashboard was built, it will behave differently:
|
||||
|
||||
- Dashboards built on the new schema through the UI - View only
|
||||
- Dashboards built on Schema v1 - View and edit
|
||||
- Dashboards built on the new schema by way of Terraform or the CLI - View and edit
|
||||
- Provisioned dashboards built on the new schema - View and edit, but the edit experience will be the old experience
|
||||
|
||||
#### Disable Dynamic dashboards and `kubernetesDashboards`
|
||||
|
||||
You’ll be unable to view or edit dashboards created or updated in the new schema.
|
||||
|
||||
### Import and export
|
||||
|
||||
From the UI, dashboards created on schema v2 can be exported and imported like other dashboards.
|
||||
When you export them to use in another instance, references of data sources are not persisted but data source types are.
|
||||
You’ll have the option to select the data source of your choice in the import UI.
|
||||
@@ -0,0 +1,86 @@
|
||||
---
|
||||
description: A reference for the JSON annotations schema used with Observability as Code.
|
||||
keywords:
|
||||
- configuration
|
||||
- as code
|
||||
- as-code
|
||||
- dashboards
|
||||
- git integration
|
||||
- git sync
|
||||
- github
|
||||
- annotations
|
||||
labels:
|
||||
products:
|
||||
- cloud
|
||||
- enterprise
|
||||
- oss
|
||||
menuTitle: annotations schema
|
||||
title: annotations
|
||||
weight: 100
|
||||
canonical: https://grafana.com/docs/grafana/latest/as-code/observability-as-code/schema-v2/annotations-schema/
|
||||
aliases:
|
||||
- ../../../observability-as-code/schema-v2/annotations-schema/ # /docs/grafana/next/observability-as-code/schema-v2/annotations-schema/
|
||||
---
|
||||
|
||||
# `annotations`
|
||||
|
||||
The configuration for the list of annotations that are associated with the dashboard.
|
||||
|
||||
```json
|
||||
"annotations": [
|
||||
{
|
||||
"kind": "AnnotationQuery",
|
||||
"spec": {
|
||||
"builtIn": false,
|
||||
"datasource": {
|
||||
"type": "",
|
||||
"uid": ""
|
||||
},
|
||||
"enable": false,
|
||||
"hide": false,
|
||||
"iconColor": "",
|
||||
"name": ""
|
||||
}
|
||||
}
|
||||
],
|
||||
```
|
||||
|
||||
`AnnotationsQueryKind` consists of:
|
||||
|
||||
- kind: "AnnotationQuery"
|
||||
- spec: [AnnotationQuerySpec](#annotationqueryspec)
|
||||
|
||||
## `AnnotationQuerySpec`
|
||||
|
||||
| Name | Type/Definition |
|
||||
| ---------- | ----------------------------------------------------------------- |
|
||||
| datasource | [`DataSourceRef`](#datasourceref) |
|
||||
| query | [`DataQueryKind`](#dataquerykind) |
|
||||
| enable | bool |
|
||||
| hide | bool |
|
||||
| iconColor | string |
|
||||
| name | string |
|
||||
| builtIn | bool. Default is `false`. |
|
||||
| filter | [`AnnotationPanelFilter`](#annotationpanelfilter) |
|
||||
| options | `[string]`: A catch-all field for datasource-specific properties. |
|
||||
|
||||
### `DataSourceRef`
|
||||
|
||||
| Name | Usage |
|
||||
| ----- | ---------------------------------- |
|
||||
| type? | string. The plugin type-id. |
|
||||
| uid? | The specific data source instance. |
|
||||
|
||||
### `DataQueryKind`
|
||||
|
||||
| Name | Type |
|
||||
| ---- | ------ |
|
||||
| kind | string |
|
||||
| spec | string |
|
||||
|
||||
### `AnnotationPanelFilter`
|
||||
|
||||
| Name | Type/Definition |
|
||||
| -------- | ------------------------------------------------------------------------------ |
|
||||
| exclude? | bool. Should the specified panels be included or excluded. Default is `false`. |
|
||||
| ids | `[...uint8]`. Panel IDs that should be included or excluded. |
|
||||
@@ -0,0 +1,339 @@
|
||||
---
|
||||
description: A reference for the JSON layout schema used with Observability as Code.
|
||||
keywords:
|
||||
- configuration
|
||||
- as code
|
||||
- as-code
|
||||
- dashboards
|
||||
- git integration
|
||||
- git sync
|
||||
- github
|
||||
- layout
|
||||
labels:
|
||||
products:
|
||||
- cloud
|
||||
- enterprise
|
||||
- oss
|
||||
menuTitle: layout schema
|
||||
title: layout
|
||||
weight: 400
|
||||
canonical: https://grafana.com/docs/grafana/latest/as-code/observability-as-code/schema-v2/layout-schema/
|
||||
aliases:
|
||||
- ../../../observability-as-code/schema-v2/layout-schema/ # /docs/grafana/next/observability-as-code/schema-v2/layout-schema/
|
||||
---
|
||||
|
||||
# `layout`
|
||||
|
||||
There are four layout options offering two types of panel control:
|
||||
|
||||
**Panel layout options**
|
||||
|
||||
These options control the size and position of panels:
|
||||
|
||||
- [GridLayoutKind](#gridlayoutkind) - Corresponds to the **Custom** option in the UI. You define panel size and panel positions using x- and y- settings.
|
||||
- [AutoGridLayoutKind](#autogridlayoutkind) - Corresponds to the **Auto grid** option in the UI. Panel size and position are automatically set based on column and row parameters.
|
||||
|
||||
**Panel grouping options**
|
||||
|
||||
These options control the grouping of panels:
|
||||
|
||||
- [RowsLayoutKind](#rowslayoutkind) - Groups panels into rows.
|
||||
- [TabsLayoutKind](#tabslayoutkind) - Groups panels into tabs.
|
||||
|
||||
## `GridLayoutKind`
|
||||
|
||||
The grid layout allows you to manually size and position grid items by setting the height, width, x, and y of each item.
|
||||
This layout corresponds to the **Custom** option in the UI.
|
||||
|
||||
Following is the JSON for a default grid layout, a grid layout item, and a grid layout row:
|
||||
|
||||
```json
|
||||
"kind": "GridLayout",
|
||||
"spec": {
|
||||
"items": [
|
||||
{
|
||||
"kind": "GridLayoutItem",
|
||||
"spec": {
|
||||
"element": {...},
|
||||
"height": 0,
|
||||
"width": 0,
|
||||
"x": 0,
|
||||
"y": 0
|
||||
}
|
||||
},
|
||||
{
|
||||
"kind": "GridLayoutRow",
|
||||
"spec": {
|
||||
"collapsed": false,
|
||||
"elements": [],
|
||||
"title": "",
|
||||
"y": 0
|
||||
}
|
||||
},
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
`GridLayoutKind` consists of:
|
||||
|
||||
- kind: "GridLayout"
|
||||
- spec: GridLayoutSpec
|
||||
- items: GridLayoutItemKind` or GridLayoutRowKind`
|
||||
- GridLayoutItemKind
|
||||
- kind: "GridLayoutItem"
|
||||
- spec: [GridLayoutItemSpec](#gridlayoutitemspec)
|
||||
- GridLayoutRowKind
|
||||
- kind: "GridLayoutRow"
|
||||
- spec: [GridLayoutRowSpec](#gridlayoutrowspec)
|
||||
|
||||
### `GridLayoutItemSpec`
|
||||
|
||||
The following table explains the usage of the grid layout item JSON fields:
|
||||
|
||||
| Name | Usage |
|
||||
| ------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| x | integer. Position of the item x-axis. |
|
||||
| y | integer. Position of the item y-axis. |
|
||||
| width | Width of the item in pixels. |
|
||||
| height | Height of the item in pixels. |
|
||||
| element | `ElementReference`. Reference to a [`PanelKind`](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/observability-as-code/schema-v2/panel-schema/) from `dashboard.spec.elements` expressed as JSON Schema reference. |
|
||||
| repeat? | [RepeatOptions](#repeatoptions). Configured repeat options, if any |
|
||||
|
||||
#### `RepeatOptions`
|
||||
|
||||
The following table explains the usage of the repeat option JSON fields:
|
||||
|
||||
| Name | Usage |
|
||||
| ---------- | ---------------------------------------------------- |
|
||||
| mode | `RepeatMode` - "variable" |
|
||||
| value | string |
|
||||
| direction? | Options are `h` for horizontal and `v` for vertical. |
|
||||
| maxPerRow? | integer |
|
||||
|
||||
### `GridLayoutRowSpec`
|
||||
|
||||
The following table explains the usage of the grid layout row JSON fields:
|
||||
|
||||
<!-- prettier-ignore-start -->
|
||||
|
||||
| Name | Usage |
|
||||
| ---- | ----- |
|
||||
| y | integer. Position of the row y-axis |
|
||||
| collapsed | bool. Whether or not the row is collapsed |
|
||||
| title | Row title |
|
||||
| elements | [`[...GridLayoutItemKind]`](#gridlayoutitemspec). Grid items in the row will have their y value be relative to the row's y value. This means a panel positioned at `y: 0` in a row with `y: 10` will be positioned at `y: 11` (row header has a height of 1) in the dashboard. |
|
||||
| repeat? | [RowRepeatOptions](#rowrepeatoptions) Configured row repeat options, if any</p> |
|
||||
|
||||
<!-- prettier-ignore-end -->
|
||||
|
||||
#### `RowRepeatOptions`
|
||||
|
||||
| Name | Usage |
|
||||
| ----- | ------------------------- |
|
||||
| mode | `RepeatMode` - "variable" |
|
||||
| value | string |
|
||||
|
||||
## `AutoGridLayoutKind`
|
||||
|
||||
With an auto grid, Grafana sizes and positions your panels for the best fit based on the column and row constraints that you set.
|
||||
This layout corresponds to the **Auto grid** option in the UI.
|
||||
|
||||
Following is the JSON for a default auto grid layout and a grid layout item:
|
||||
|
||||
<!-- prettier-ignore-end -->
|
||||
|
||||
```json
|
||||
"kind": "AutoGridLayout",
|
||||
"spec": {
|
||||
"columnWidthMode": "standard",
|
||||
"fillScreen": false,
|
||||
"items": [
|
||||
{
|
||||
"kind": "AutoGridLayoutItem",
|
||||
"spec": {
|
||||
"element": {...},
|
||||
}
|
||||
}
|
||||
],
|
||||
"maxColumnCount": 3,
|
||||
"rowHeightMode": "standard"
|
||||
}
|
||||
```
|
||||
|
||||
`AutoGridLayoutKind` consists of:
|
||||
|
||||
- kind: "AutoGridLayout"
|
||||
- spec: [AutoGridLayoutSpec](#autogridlayoutspec)
|
||||
|
||||
### `AutoGridLayoutSpec`
|
||||
|
||||
The following table explains the usage of the auto grid layout JSON fields:
|
||||
|
||||
<!-- prettier-ignore-start -->
|
||||
|
||||
| Name | Usage |
|
||||
| ---- | ----- |
|
||||
| maxColumnCount? | number. Default is `3`. |
|
||||
| columnWidthMode | Options are: `narrow`, `standard`, `wide`, and `custom`. Default is `standard`. |
|
||||
| columnWidth? | number |
|
||||
| rowHeightMode | Options are: `short`, `standard`, `tall`, and `custom`. Default is `standard`. |
|
||||
| rowHeight? | number |
|
||||
| fillScreen? | bool. Default is `false`. |
|
||||
| items | `AutoGridLayoutItemKind`. Consists of:<ul><li>kind: "AutoGridLayoutItem"</li><li>spec: [AutoGridLayoutItemSpec](#autogridlayoutitemspec)</li></ul> |
|
||||
|
||||
<!-- prettier-ignore-end -->
|
||||
|
||||
#### `AutoGridLayoutItemSpec`
|
||||
|
||||
The following table explains the usage of the auto grid layout item JSON fields:
|
||||
|
||||
<!-- prettier-ignore-start -->
|
||||
|
||||
| Name | Usage |
|
||||
| ---- | ----- |
|
||||
| element | `ElementReference`. Reference to a [`PanelKind`](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/observability-as-code/schema-v2/panel-schema/) from `dashboard.spec.elements` expressed as JSON Schema reference. |
|
||||
| repeat? | [AutoGridRepeatOptions](#autogridrepeatoptions). Configured repeat options, if any. |
|
||||
| conditionalRendering? | `ConditionalRenderingGroupKind`. Rules for hiding or showing panels, if any. Consists of:<ul><li>kind: "ConditionalRenderingGroup"</li><li>spec: [ConditionalRenderingGroupSpec](#conditionalrenderinggroupspec)</li></ul> |
|
||||
|
||||
<!-- prettier-ignore-end -->
|
||||
|
||||
##### `AutoGridRepeatOptions`
|
||||
|
||||
The following table explains the usage of the auto grid repeat option JSON fields:
|
||||
|
||||
| Name | Usage |
|
||||
| ----- | ------------------------- |
|
||||
| mode | `RepeatMode` - "variable" |
|
||||
| value | String |
|
||||
|
||||
##### `ConditionalRenderingGroupSpec`
|
||||
|
||||
<!-- prettier-ignore-start -->
|
||||
|
||||
| Name | Usage |
|
||||
| ---- | ----- |
|
||||
| visibility | Options are `show` and `hide` |
|
||||
| condition | Options are `and` and `or` |
|
||||
| items | Options are:<ul><li>ConditionalRenderingVariableKind<ul><li>kind: "ConditionalRenderingVariable"</li><li>spec: [ConditionalRenderingVariableSpec](#conditionalrenderingvariablespec)</li></ul></li><li>ConditionalRenderingDataKind<ul><li>kind: "ConditionalRenderingData"</li><li>spec: [ConditionalRenderingDataSpec](#conditionalrenderingdataspec)</li></ul></li><li>ConditionalRenderingTimeRangeSizeKind<ul><li>kind: "ConditionalRenderingTimeRangeSize"</li><li>spec: [ConditionalRenderingTimeRangeSizeSpec](#conditionalrenderingtimerangesizespec)</li></ul></li></ul> |
|
||||
|
||||
<!-- prettier-ignore-end -->
|
||||
|
||||
###### `ConditionalRenderingVariableSpec`
|
||||
|
||||
| Name | Usage |
|
||||
| -------- | ------------------------------------ |
|
||||
| variable | string |
|
||||
| operator | Options are `equals` and `notEquals` |
|
||||
| value | string |
|
||||
|
||||
###### `ConditionalRenderingDataSpec`
|
||||
|
||||
| Name | Type |
|
||||
| ----- | ---- |
|
||||
| value | bool |
|
||||
|
||||
###### `ConditionalRenderingTimeRangeSizeSpec`
|
||||
|
||||
| Name | Type |
|
||||
| ----- | ------ |
|
||||
| value | string |
|
||||
|
||||
## `RowsLayoutKind`
|
||||
|
||||
The `RowsLayoutKind` is one of two options that you can use to group panels.
|
||||
You can nest any other kind of layout inside a layout row.
|
||||
Rows can also be nested in auto grids or tabs.
|
||||
|
||||
Following is the JSON for a default rows layout row:
|
||||
|
||||
```json
|
||||
"kind": "RowsLayout",
|
||||
"spec": {
|
||||
"rows": [
|
||||
{
|
||||
"kind": "RowsLayoutRow",
|
||||
"spec": {
|
||||
"layout": {
|
||||
"kind": "GridLayout", // Can also be AutoGridLayout or TabsLayout
|
||||
"spec": {...}
|
||||
},
|
||||
"title": ""
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
`RowsLayoutKind` consists of:
|
||||
|
||||
- kind: RowsLayout
|
||||
- spec: RowsLayoutSpec
|
||||
- rows: RowsLayoutRowKind
|
||||
- kind: RowsLayoutRow
|
||||
- spec: [RowsLayoutRowSpec](#rowslayoutrowspec)
|
||||
|
||||
### `RowsLayoutRowSpec`
|
||||
|
||||
The following table explains the usage of the rows layout row JSON fields:
|
||||
|
||||
<!-- prettier-ignore-start -->
|
||||
|
||||
| Name | Usage |
|
||||
| ---- | ----- |
|
||||
| title? | Title of the row. |
|
||||
| collapse | bool. Whether or not the row is collapsed. |
|
||||
| hideHeader? | bool. Whether the row header is hidden or shown. |
|
||||
| fullScreen? | bool. Whether or not the row takes up the full screen. |
|
||||
| conditionalRendering? | `ConditionalRenderingGroupKind`. Rules for hiding or showing rows, if any. Consists of:<ul><li>kind: "ConditionalRenderingGroup"</li><li>spec: [ConditionalRenderingGroupSpec](#conditionalrenderinggroupspec)</li></ul> |
|
||||
| repeat? | [RowRepeatOptions](#rowrepeatoptions). Configured repeat options, if any. |
|
||||
| layout | Supported layouts are:<ul><li>[GridLayoutKind](#gridlayoutkind)</li><li>[RowsLayoutKind](#rowslayoutkind)</li><li>[AutoGridLayoutKind](#autogridlayoutkind)</li><li>[TabsLayoutKind](#tabslayoutkind)</li></ul> |
|
||||
|
||||
<!-- prettier-ignore-end -->
|
||||
|
||||
## `TabsLayoutKind`
|
||||
|
||||
The `TabsLayoutKind` is one of two options that you can use to group panels.
|
||||
You can nest any other kind of layout inside a tab.
|
||||
Tabs can also be nested in auto grids or rows.
|
||||
|
||||
Following is the JSON for a default tabs layout tab and a tab:
|
||||
|
||||
```json
|
||||
"kind": "TabsLayout",
|
||||
"spec": {
|
||||
"tabs": [
|
||||
{
|
||||
"kind": "TabsLayoutTab",
|
||||
"spec": {
|
||||
"layout": {
|
||||
"kind": "GridLayout", // Can also be AutoGridLayout or RowsLayout
|
||||
"spec": {...}
|
||||
},
|
||||
"title": "New tab"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
`TabsLayoutKind` consists of:
|
||||
|
||||
- kind: TabsLayout
|
||||
- spec: TabsLayoutSpec
|
||||
- tabs: TabsLayoutTabKind
|
||||
- kind: TabsLayoutTab
|
||||
- spec: [TabsLayoutTabSpec](#tabslayouttabspec)
|
||||
|
||||
### `TabsLayoutTabSpec`
|
||||
|
||||
The following table explains the usage of the tabs layout tab JSON fields:
|
||||
|
||||
<!-- prettier-ignore-start -->
|
||||
|
||||
| Name | Usage |
|
||||
| ---- | ----- |
|
||||
| title? | The title of the tab. |
|
||||
| layout | Supported layouts are:<ul><li>[GridLayoutKind](#gridlayoutkind)</li><li>[RowsLayoutKind](#rowslayoutkind)</li><li>[AutoGridLayoutKind](#autogridlayoutkind)</li><li>[TabsLayoutKind](#tabslayoutkind)</li></ul> |
|
||||
| conditionalRendering? | `ConditionalRenderingGroupKind`. Rules for hiding or showing panels, if any. Consists of:<ul><li>kind: "ConditionalRenderingGroup"</li><li>spec: [ConditionalRenderingGroupSpec](#conditionalrenderinggroupspec)</li></ul> |
|
||||
|
||||
<!-- prettier-ignore-end -->
|
||||
@@ -0,0 +1,68 @@
|
||||
---
|
||||
description: A reference for the JSON library panel schema used with Observability as Code.
|
||||
keywords:
|
||||
- configuration
|
||||
- as code
|
||||
- as-code
|
||||
- dashboards
|
||||
- git integration
|
||||
- git sync
|
||||
- github
|
||||
- library panel
|
||||
labels:
|
||||
products:
|
||||
- cloud
|
||||
- enterprise
|
||||
- oss
|
||||
menuTitle: LibraryPanelKind schema
|
||||
title: LibraryPanelKind
|
||||
weight: 300
|
||||
canonical: https://grafana.com/docs/grafana/latest/as-code/observability-as-code/schema-v2/librarypanel-schema/
|
||||
aliases:
|
||||
- ../../../observability-as-code/schema-v2/librarypanel-schema/ # /docs/grafana/next/observability-as-code/schema-v2/librarypanel-schema/
|
||||
---
|
||||
|
||||
# `LibraryPanelKind`
|
||||
|
||||
A library panel is a reusable panel that you can use in any dashboard.
|
||||
When you make a change to a library panel, that change propagates to all instances of where the panel is used.
|
||||
Library panels streamline reuse of panels across multiple dashboards.
|
||||
|
||||
Following is the default library panel element JSON:
|
||||
|
||||
```json
|
||||
"kind": "LibraryPanel",
|
||||
"spec": {
|
||||
"id": 0,
|
||||
"libraryPanel": {
|
||||
name: "",
|
||||
uid: "",
|
||||
}
|
||||
"title": ""
|
||||
}
|
||||
```
|
||||
|
||||
The `LibraryPanelKind` consists of:
|
||||
|
||||
- kind: "LibraryPanel"
|
||||
- spec: [LibraryPanelKindSpec](#librarypanelkindspec)
|
||||
- libraryPanel: [LibraryPanelRef](#librarypanelref)
|
||||
|
||||
## `LibraryPanelKindSpec`
|
||||
|
||||
The following table explains the usage of the library panel element JSON fields:
|
||||
|
||||
| Name | Usage |
|
||||
| ------------ | ------------------------------------------------ |
|
||||
| id | Panel ID for the library panel in the dashboard. |
|
||||
| libraryPanel | [`LibraryPanelRef`](#librarypanelref) |
|
||||
| title | Title for the library panel in the dashboard. |
|
||||
|
||||
### `LibraryPanelRef`
|
||||
|
||||
The following table explains the usage of the library panel reference JSON fields:
|
||||
|
||||
| Name | Usage |
|
||||
| ---- | ------------------ |
|
||||
| name | Library panel name |
|
||||
| uid | Library panel uid |
|
||||
@@ -0,0 +1,67 @@
|
||||
---
|
||||
description: A reference for the JSON links schema used with Observability as Code.
|
||||
keywords:
|
||||
- configuration
|
||||
- as code
|
||||
- as-code
|
||||
- dashboards
|
||||
- git integration
|
||||
- git sync
|
||||
- github
|
||||
- links
|
||||
labels:
|
||||
products:
|
||||
- cloud
|
||||
- enterprise
|
||||
- oss
|
||||
menuTitle: links schema
|
||||
title: links
|
||||
weight: 500
|
||||
canonical: https://grafana.com/docs/grafana/latest/as-code/observability-as-code/schema-v2/links-schema/
|
||||
aliases:
|
||||
- ../../../observability-as-code/schema-v2/links-schema/ # /docs/grafana/next/observability-as-code/schema-v2/links-schema/
|
||||
---
|
||||
|
||||
# `links`
|
||||
|
||||
The `links` schema is the configuration for links with references to other dashboards or external websites.
|
||||
Following are the default JSON fields:
|
||||
|
||||
```json
|
||||
"links": [
|
||||
{
|
||||
"asDropdown": false,
|
||||
"icon": "",
|
||||
"includeVars": false,
|
||||
"keepTime": false,
|
||||
"tags": [],
|
||||
"targetBlank": false,
|
||||
"title": "",
|
||||
"tooltip": "",
|
||||
"type": "link",
|
||||
},
|
||||
],
|
||||
```
|
||||
|
||||
## `DashboardLink`
|
||||
|
||||
The following table explains the usage of the dashboard link JSON fields.
|
||||
The table includes default and other fields:
|
||||
|
||||
<!-- prettier-ignore-start -->
|
||||
|
||||
| Name | Usage |
|
||||
| ----------- | --------------------------------------- |
|
||||
| title | string. Title to display with the link. |
|
||||
| type | `DashboardLinkType`. Link type. Accepted values are:<ul><li>dashboards - To refer to another dashboard</li><li>link - To refer to an external resource</li></ul> |
|
||||
| icon | string. Icon name to be displayed with the link. |
|
||||
| tooltip | string. Tooltip to display when the user hovers their mouse over it. |
|
||||
| url? | string. Link URL. Only required/valid if the type is link. |
|
||||
| tags | string. List of tags to limit the linked dashboards. If empty, all dashboards will be displayed. Only valid if the type is dashboards. |
|
||||
| asDropdown | bool. If true, all dashboards links will be displayed in a dropdown. If false, all dashboards links will be displayed side by side. Only valid if the type is dashboards. Default is `false`. |
|
||||
| targetBlank | bool. If true, the link will be opened in a new tab. Default is `false`. |
|
||||
| includeVars | bool. If true, includes current template variables values in the link as query params. Default is `false`. |
|
||||
| keepTime | bool. If true, includes current time range in the link as query params. Default is `false`. |
|
||||
| placement? | string. Use placement to display the link somewhere else on the dashboard other than above the visualizations. Use the `inControlsMenu` parameter to render the link in the dashboard controls dropdown menu. |
|
||||
|
||||
<!-- prettier-ignore-end -->
|
||||
@@ -0,0 +1,305 @@
|
||||
---
|
||||
description: A reference for the JSON panel schema used with Observability as Code.
|
||||
keywords:
|
||||
- configuration
|
||||
- as code
|
||||
- as-code
|
||||
- dashboards
|
||||
- git integration
|
||||
- git sync
|
||||
- github
|
||||
- panels
|
||||
labels:
|
||||
products:
|
||||
- cloud
|
||||
- enterprise
|
||||
- oss
|
||||
menuTitle: PanelKind schema
|
||||
title: PanelKind
|
||||
weight: 200
|
||||
canonical: https://grafana.com/docs/grafana/latest/as-code/observability-as-code/schema-v2/panel-schema/
|
||||
aliases:
|
||||
- ../../../observability-as-code/schema-v2/panel-schema/ # /docs/grafana/next/observability-as-code/schema-v2/panel-schema/
|
||||
---
|
||||
|
||||
# `PanelKind`
|
||||
|
||||
The panel element contains all the information about the panel including the visualization type, panel and visualization configuration, queries, and transformations.
|
||||
There's a panel element for each panel contained in the dashboard.
|
||||
|
||||
Following is the default panel element JSON:
|
||||
|
||||
```json
|
||||
"kind": "Panel",
|
||||
"spec": {
|
||||
"data": {
|
||||
"kind": "QueryGroup",
|
||||
"spec": {...},
|
||||
"description": "",
|
||||
"id": 0,
|
||||
"links": [],
|
||||
"title": "",
|
||||
"vizConfig": {
|
||||
"kind": "",
|
||||
"spec": {...},
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
The `PanelKind` consists of:
|
||||
|
||||
- kind: "Panel"
|
||||
- spec: [PanelSpec](#panelspec)
|
||||
|
||||
## `PanelSpec`
|
||||
|
||||
The following table explains the usage of the panel element JSON fields:
|
||||
|
||||
<!-- prettier-ignore-start -->
|
||||
|
||||
| Name | Usage |
|
||||
| ------------ | --------------------------------------------------------------------- |
|
||||
| data | `QueryGroupKind`, which includes queries and transformations. Consists of:<ul><li>kind: "QueryGroup"</li><li>spec: [QueryGroupSpec](#querygroupspec)</li></ul> |
|
||||
| description | The panel description. |
|
||||
| id | The panel ID. |
|
||||
| links | Links with references to other dashboards or external websites. |
|
||||
| title | The panel title. |
|
||||
| vizConfig | `VizConfigKind`. Includes visualization type, field configuration options, and all other visualization options. Consists of:<ul><li>kind: string. Plugin ID.</li><li>spec: [VizConfigSpec](#vizconfigspec)</li></ul> |
|
||||
| transparent? | bool. Controls whether or not the panel background is transparent. |
|
||||
|
||||
<!-- prettier-ignore-end -->
|
||||
|
||||
### `QueryGroupSpec`
|
||||
|
||||
<!-- prettier-ignore-start -->
|
||||
|
||||
| Name | Usage |
|
||||
| --------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------ |
|
||||
| queries | `PanelQueryKind`. Consists of:<ul><li>kind: PanelQuery</li><li>spec: [PanelQuerySpec](#panelqueryspec)</li></ul> |
|
||||
| transformations | `TransformationKind`. Consists of:<ul><li>kind: string. The transformation ID.</li><li>spec: [DataTransformerConfig](#datatransformerconfig)</li></ul> |
|
||||
| queryOptions | [`QueryOptionsSpec`](#queryoptionsspec) |
|
||||
|
||||
<!-- prettier-ignore-end -->
|
||||
|
||||
#### `PanelQuerySpec`
|
||||
|
||||
| Name | Usage |
|
||||
| ----------- | --------------------------------- |
|
||||
| query | [`DataQueryKind`](#dataquerykind) |
|
||||
| datasource? | [`DataSourceRef`](#datasourceref) |
|
||||
|
||||
##### `DataQueryKind`
|
||||
|
||||
| Name | Type |
|
||||
| ---- | ------ |
|
||||
| kind | string |
|
||||
| spec | string |
|
||||
|
||||
##### `DataSourceRef`
|
||||
|
||||
| Name | Usage |
|
||||
| ----- | ---------------------------------- |
|
||||
| type? | string. The plugin type-id. |
|
||||
| uid? | The specific data source instance. |
|
||||
|
||||
#### `DataTransformerConfig`
|
||||
|
||||
Transformations allow you to manipulate data returned by a query before the system applies a visualization.
|
||||
Using transformations you can: rename fields, join time series data, perform mathematical operations across queries, or use the output of one transformation as the input to another transformation.
|
||||
|
||||
<!-- prettier-ignore-start -->
|
||||
|
||||
| Name | Usage |
|
||||
| --------- | ------------------------------------------- |
|
||||
| id | string. Unique identifier of transformer. |
|
||||
| disabled? | bool. Disabled transformations are skipped. |
|
||||
| filter? | [`MatcherConfig`](#matcherconfig). Optional frame matcher. When missing it will be applied to all results. |
|
||||
| topic? | `DataTopic`. Where to pull `DataFrames` from as input to transformation. Options are: `series`, `annotations`, and `alertStates`. |
|
||||
| options | Options to be passed to the transformer. Valid options depend on the transformer id. |
|
||||
|
||||
<!-- prettier-ignore-end -->
|
||||
|
||||
##### `MatcherConfig`
|
||||
|
||||
Matcher is a predicate configuration.
|
||||
Based on the configuration a set of field or values, it's filtered to apply an override or transformation.
|
||||
It comes with in id (to resolve implementation from registry) and a configuration that’s specific to a particular matcher type.
|
||||
|
||||
| Name | Usage |
|
||||
| -------- | -------------------------------------------------------------------------------------- |
|
||||
| id | string. The matcher id. This is used to find the matcher implementation from registry. |
|
||||
| options? | The matcher options. This is specific to the matcher implementation. |
|
||||
|
||||
#### `QueryOptionsSpec`
|
||||
|
||||
| Name | Type |
|
||||
| ----------------- | ------- |
|
||||
| timeFrom? | string |
|
||||
| maxDataPoints? | integer |
|
||||
| timeShift? | string |
|
||||
| queryCachingTTL? | integer |
|
||||
| interval? | string |
|
||||
| cacheTimeout? | string |
|
||||
| hideTimeOverride? | bool |
|
||||
|
||||
### `VizConfigSpec`
|
||||
|
||||
| Name | Type/Definition |
|
||||
| ------------- | --------------------------------------- |
|
||||
| pluginVersion | string |
|
||||
| options | string |
|
||||
| fieldConfig | [FieldConfigSource](#fieldconfigsource) |
|
||||
|
||||
#### `FieldConfigSource`
|
||||
|
||||
The data model used in Grafana, namely the _data frame_, is a columnar-oriented table structure that unifies both time series and table query results.
|
||||
Each column within this structure is called a field.
|
||||
A field can represent a single time series or table column.
|
||||
Field options allow you to change how the data is displayed in your visualizations.
|
||||
|
||||
<!-- prettier-ignore-start -->
|
||||
|
||||
| Name | Type/Definition |
|
||||
| ---------- | ------------------------------------- |
|
||||
| defaults | [`FieldConfig`](#fieldconfig). Defaults are the options applied to all fields. |
|
||||
| overrides | The options applied to specific fields overriding the defaults. |
|
||||
| matcher | [`MatcherConfig`](#matcherconfig). Optional frame matcher. When missing it will be applied to all results. |
|
||||
| properties | `DynamicConfigValue`. Consists of:<ul><li>`id` - string</li><li>value?</li></ul> |
|
||||
|
||||
<!-- prettier-ignore-end -->
|
||||
|
||||
##### `FieldConfig`
|
||||
|
||||
<!-- prettier-ignore-start -->
|
||||
|
||||
| Name | Type/Definition |
|
||||
| ------------------ | --------------------------------------- |
|
||||
| displayName? | string. The display value for this field. This supports template variables where empty is auto. |
|
||||
| displayNameFromDS? | string. This can be used by data sources that return an explicit naming structure for values and labels. When this property is configured, this value is used rather than the default naming strategy. |
|
||||
| description? | string. Human readable field metadata. |
|
||||
| path? | string. An explicit path to the field in the data source. When the frame meta includes a path, this will default to `${frame.meta.path}/${field.name}`. When defined, this value can be used as an identifier within the data source scope, and may be used to update the results. |
|
||||
| writeable? | bool. True if the data source can write a value to the path. Auth/authz are supported separately. |
|
||||
| filterable? | bool. True if the data source field supports ad-hoc filters. |
|
||||
| unit? | string. Unit a field should use. The unit you select is applied to all fields except time. You can use the unit's ID available in Grafana or a custom unit. [Available units in Grafana](https://github.com/grafana/grafana/blob/main/packages/grafana-data/src/valueFormats/categories.ts). As custom units, you can use the following formats:<ul><li>`suffix:<suffix>` for custom unit that should go after value.</li><li>`prefix:<prefix>` for custom unit that should go before value.</li><li> `time:<format>` for custom date time formats type for example</li><li>`time:YYYY-MM-DD`</li><li>`si:<base scale><unit characters>` for custom SI units. For example: `si: mF`. You can specify both a unit and the source data scale, so if your source data is represented as milli (thousands of) something, prefix the unit with that SI scale character.</li><li>`count:<unit>` for a custom count unit.</li><li>`currency:<unit>` for custom a currency unit.</li></ul> |
|
||||
| decimals? | number. Specify the number of decimals Grafana includes in the rendered value. If you leave this field blank, Grafana automatically truncates the number of decimals based on the value. For example 1.1234 will display as 1.12 and 100.456 will display as 100. To display all decimals, set the unit to `string`. |
|
||||
| min? | number. The minimum value used in percentage threshold calculations. Leave empty for auto calculation based on all series and fields. |
|
||||
| max? | number. The maximum value used in percentage threshold calculations. Leave empty for auto calculation based on all series and fields. |
|
||||
| mappings? | `[...ValueMapping]`. Convert input values into a display string. Options are: [`ValueMap`](#valuemap), [`RangeMap`](#rangemap), [`RegexMap`](#rangemap), [`SpecialValueMap`](#specialvaluemap). |
|
||||
| thresholds? | `ThresholdsConfig`. Map numeric values to states. Consists of:<ul><li>`mode` - `ThresholdsMode`. Options are: `absolute` and `percentage`.</li><li>`steps` - `[...Threshold]`</li></ul> |
|
||||
| color? | [`FieldColor`](#fieldcolor). Panel color configuration. |
|
||||
| links? | `[...]`. The behavior when clicking a result. |
|
||||
| noValue? | string. Alternative to an empty string. |
|
||||
| custom? | `{...}`. Specified by the `FieldConfig` field in panel plugin schemas. |
|
||||
|
||||
<!-- prettier-ignore-end -->
|
||||
|
||||
###### `ValueMap`
|
||||
|
||||
Maps text values to a color or different display text and color.
|
||||
For example, you can configure a value mapping so that all instances of the value 10 appear as Perfection! rather than the number.
|
||||
|
||||
<!-- prettier-ignore-start -->
|
||||
|
||||
| Name | Usage |
|
||||
| ------- | -------- |
|
||||
| type | `MappingType` & "value". `MappingType` options are: `value`, `range`, `regex`, and `special`. |
|
||||
| options | string. [`ValueMappingResult`](#valuemappingresult). Map with `<value_to_match>`: `ValueMappingResult`. For example: `{ "10": { text: "Perfection!", color: "green" } }`. |
|
||||
|
||||
<!-- prettier-ignore-end -->
|
||||
|
||||
###### `RangeMap`
|
||||
|
||||
Maps numerical ranges to a display text and color.
|
||||
For example, if a value is within a certain range, you can configure a range value mapping to display Low or High rather than the number.
|
||||
|
||||
<!-- prettier-ignore-start -->
|
||||
|
||||
| Name | Usage |
|
||||
| ------- | ---------------------------------------------------------------------------------------------------- |
|
||||
| type | `MappingType` & "range". `MappingType` options are: `value`, `range`, `regex`, and `special`. |
|
||||
| options | Range to match against and the result to apply when the value is within the range. Spec:<ul><li>`from` - `float64` or `null`. Min value of the range. It can be null which means `-Infinity`.</li><li>`to` - `float64` or `null`. Max value of the range. It can be null which means `+Infinity`.</li><li>`result` - [`ValueMappingResult`](#valuemappingresult) |
|
||||
|
||||
<!-- prettier-ignore-end -->
|
||||
|
||||
###### `RegexMap`
|
||||
|
||||
Maps regular expressions to replacement text and a color.
|
||||
For example, if a value is `www.example.com`, you can configure a regex value mapping so that Grafana displays www and truncates the domain.
|
||||
|
||||
<!-- prettier-ignore-start -->
|
||||
|
||||
| Name | Usage |
|
||||
| ------- | --------------------------------------------------------------------------------------------- |
|
||||
| type | `MappingType` & "regex". `MappingType` options are: `value`, `range`, `regex`, and `special`. |
|
||||
| options | Regular expression to match against and the result to apply when the value matches the regex. Spec:<ul><li>`pattern` - string. Regular expression to match against.</li><li>`result` - [`ValueMappingResult`](#valuemappingresult) |
|
||||
|
||||
<!-- prettier-ignore-end -->
|
||||
|
||||
###### `SpecialValueMap`
|
||||
|
||||
Maps special values like Null, NaN (not a number), and boolean values like true and false to a display text and color.
|
||||
See `SpecialValueMatch` in the following table to see the list of special values.
|
||||
For example, you can configure a special value mapping so that null values appear as N/A.
|
||||
|
||||
<!-- prettier-ignore-start -->
|
||||
|
||||
| Name | Usage |
|
||||
| ------- | ----------------------------------------------------------------------------------------------- |
|
||||
| type | `MappingType` & "special". `MappingType` options are: `value`, `range`, `regex`, and `special`. |
|
||||
| options | Spec:<ul><li>`match` - `SpecialValueMatch`. Special value to match against. Types are:<ul><li>true</li><li>false</li><li>null</li><li>nan</li><li>empty</li></ul> </li><li>`result` - [`ValueMappingResult`](#valuemappingresult) |
|
||||
|
||||
<!-- prettier-ignore-end -->
|
||||
|
||||
###### `ValueMappingResult`
|
||||
|
||||
Result used as replacement with text and color when the value matches.
|
||||
|
||||
| Name | Usage |
|
||||
| ----- | ----------------------------------------------------------------------------- |
|
||||
| text | string. Text to display when the value matches. |
|
||||
| color | string. Color to use when the value matches. |
|
||||
| icon | string. Icon to display when the value matches. Only specific visualizations. |
|
||||
| index | int32. Position in the mapping array. Only used internally. |
|
||||
|
||||
###### `FieldColor`
|
||||
|
||||
Map a field to a color.
|
||||
|
||||
<!-- prettier-ignore-start -->
|
||||
|
||||
| Name | Usage |
|
||||
| ----------- | -------------------------------------------------------------------- |
|
||||
| mode | [`FieldColorModeId`](#fieldcolormodeid). The main color scheme mode. |
|
||||
| FixedColor? | string. The fixed color value for fixed or shades color modes. |
|
||||
| seriesBy? | `FieldColorSeriesByMode`. Some visualizations need to know how to assign a series color from by value color schemes. Defines how to assign a series color from "by value" color schemes. For example for an aggregated data points like a timeseries, the color can be assigned by the min, max or last value. Options are: `min`, `max`, and `last`. |
|
||||
|
||||
<!-- prettier-ignore-end -->
|
||||
|
||||
###### `FieldColorModeId`
|
||||
|
||||
Color mode for a field.
|
||||
You can specify a single color, or select a continuous (gradient) color schemes, based on a value.
|
||||
Continuous color interpolates a color using the percentage of a value relative to min and max.
|
||||
Accepted values are:
|
||||
|
||||
<!-- prettier-ignore-start -->
|
||||
|
||||
| Name | Description |
|
||||
| --- | ---- |
|
||||
| thresholds | From thresholds. Informs Grafana to take the color from the matching threshold. |
|
||||
| palette-classic | Classic palette. Grafana will assign color by looking up a color in a palette by series index. Useful for graphs and pie charts and other categorical data visualizations. |
|
||||
| palette-classic-by-name | Classic palette (by name). Grafana will assign color by looking up a color in a palette by series name. Useful for Graphs and pie charts and other categorical data visualizations |
|
||||
| continuous-GrYlRd | Continuous Green-Yellow-Red palette mode |
|
||||
| continuous-RdYlGr | Continuous Red-Yellow-Green palette mode |
|
||||
| continuous-BlYlRd | Continuous Blue-Yellow-Red palette mode |
|
||||
| continuous-YlRd | Continuous Yellow-Red palette mode |
|
||||
| continuous-BlPu | Continuous Blue-Purple palette mode |
|
||||
| continuous-YlBl | Continuous Yellow-Blue palette mode |
|
||||
| continuous-blues | Continuous Blue palette mode |
|
||||
| continuous-reds | Continuous Red palette mode |
|
||||
| continuous-greens | Continuous Green palette mode |
|
||||
| continuous-purples | Continuous Purple palette mode |
|
||||
| shades | Shades of a single color. Specify a single color, useful in an override rule. |
|
||||
| fixed | Fixed color mode. Specify a single color, useful in an override rule. |
|
||||
|
||||
<!-- prettier-ignore-end -->
|
||||
@@ -0,0 +1,87 @@
|
||||
---
|
||||
description: A reference for the JSON timesettings schema used with Observability as Code.
|
||||
keywords:
|
||||
- configuration
|
||||
- as code
|
||||
- as-code
|
||||
- dashboards
|
||||
- git integration
|
||||
- git sync
|
||||
- github
|
||||
- time settings
|
||||
labels:
|
||||
products:
|
||||
- cloud
|
||||
- enterprise
|
||||
- oss
|
||||
menuTitle: timesettings schema
|
||||
title: timesettings
|
||||
weight: 600
|
||||
canonical: https://grafana.com/docs/grafana/latest/as-code/observability-as-code/schema-v2/timesettings-schema/
|
||||
aliases:
|
||||
- ../../../observability-as-code/schema-v2/timesettings-schema/ # /docs/grafana/next/observability-as-code/schema-v2/timesettings-schema/
|
||||
---
|
||||
|
||||
# `timeSettings`
|
||||
|
||||
The `TimeSettingsSpec` defines the default time configuration for the time picker and the refresh picker for the specific dashboard.
|
||||
|
||||
Following is the JSON for default time settings:
|
||||
|
||||
```json
|
||||
"timeSettings": {
|
||||
"autoRefresh": "",
|
||||
"autoRefreshIntervals": [
|
||||
"5s",
|
||||
"10s",
|
||||
"30s",
|
||||
"1m",
|
||||
"5m",
|
||||
"15m",
|
||||
"30m",
|
||||
"1h",
|
||||
"2h",
|
||||
"1d"
|
||||
],
|
||||
"fiscalYearStartMonth": 0,
|
||||
"from": "now-6h",
|
||||
"hideTimepicker": false,
|
||||
"timezone": "browser",
|
||||
"to": "now"
|
||||
},
|
||||
```
|
||||
|
||||
`timeSettings` consists of:
|
||||
|
||||
- [TimeSettingsSpec](#timesettingsspec)
|
||||
|
||||
## `TimeSettingsSpec`
|
||||
|
||||
The following table explains the usage of the time settings JSON fields:
|
||||
|
||||
<!-- prettier-ignore-start -->
|
||||
|
||||
| Name | Usage |
|
||||
| ---- | ----- |
|
||||
| timezone? | string. Timezone of dashboard. Accepted values are IANA TZDB zone ID, `browser`, or `utc`. Default is `browser`. |
|
||||
| from | string. Start time range for dashboard. Accepted values are relative time strings like `now-6h` or absolute time strings like `2020-07-10T08:00:00.000Z`. Default is `now-6h`. |
|
||||
| to | string. End time range for dashboard. Accepted values are relative time strings like `now-6h` or absolute time strings like `2020-07-10T08:00:00.000Z`. Default is `now`. |
|
||||
| autoRefresh | string. Refresh rate of dashboard. Represented by interval string. For example: `5s`, `1m`, `1h`, `1d`. No default. In schema v1: `refresh`. |
|
||||
| autoRefreshIntervals | string. Interval options available in the refresh picker drop-down menu. The default array is `["5s", "10s", "30s", "1m", "5m", "15m", "30m", "1h", "2h", "1d"]`. |
|
||||
|quickRanges? | Selectable options available in the time picker drop-down menu. Has no effect on provisioned dashboard. Defined in the [`TimeRangeOption`](#timerangeoption) spec. In schema v1: `timepicker.quick_ranges`, not exposed in the UI. |
|
||||
| hideTimepicker | bool. Whether or not the time picker is visible. Default is `false`. In schema v1: `timepicker.hidden`. |
|
||||
| weekStart? | Day when the week starts. Expressed by the name of the day in lowercase. For example: `monday`. Options are `saturday`, `monday`, and `sunday`. |
|
||||
| fiscalYearStartMonth | The month that the fiscal year starts on. `0` = January, `11` = December |
|
||||
| nowDelay? | string. Override the "now" time by entering a time delay. Use this option to accommodate known delays in data aggregation to avoid null values. In schema v1: `timepicker.nowDelay`. |
|
||||
|
||||
<!-- prettier-ignore-end -->
|
||||
|
||||
### `TimeRangeOption`
|
||||
|
||||
The following table explains the usage of the time range option JSON fields:
|
||||
|
||||
| Name | Usage |
|
||||
| ------- | ---------------------------------- |
|
||||
| display | string. Default is `Last 6 hours`. |
|
||||
| from | string. Default is `now-6h`. |
|
||||
| to | string. Default is `now`. |
|
||||
@@ -0,0 +1,501 @@
|
||||
---
|
||||
description: A reference for the JSON variables schema used with Observability as Code.
|
||||
keywords:
|
||||
- configuration
|
||||
- as code
|
||||
- as-code
|
||||
- dashboards
|
||||
- git integration
|
||||
- git sync
|
||||
- github
|
||||
- variables
|
||||
labels:
|
||||
products:
|
||||
- cloud
|
||||
- enterprise
|
||||
- oss
|
||||
menuTitle: variables schema
|
||||
title: variables
|
||||
weight: 700
|
||||
canonical: https://grafana.com/docs/grafana/latest/as-code/observability-as-code/schema-v2/variables-schema/
|
||||
aliases:
|
||||
- ../../../observability-as-code/schema-v2/variables-schema/ # /docs/grafana/next/observability-as-code/schema-v2/variables-schema/
|
||||
---
|
||||
|
||||
# `variables`
|
||||
|
||||
The available variable types described in the following sections:
|
||||
|
||||
- [QueryVariableKind](#queryvariablekind)
|
||||
- [TextVariableKind](#textvariablekind)
|
||||
- [ConstantVariableKind](#constantvariablekind)
|
||||
- [DatasourceVariableKind](#datasourcevariablekind)
|
||||
- [IntervalVariableKind](#intervalvariablekind)
|
||||
- [CustomVariableKind](#customvariablekind)
|
||||
- [SwitchVariableKind](#switchvariablekind)
|
||||
- [GroupByVariableKind](#groupbyvariablekind)
|
||||
- [AdhocVariableKind](#adhocvariablekind)
|
||||
|
||||
## `QueryVariableKind`
|
||||
|
||||
Following is the JSON for a default query variable:
|
||||
|
||||
```json
|
||||
"variables": [
|
||||
{
|
||||
"kind": "QueryVariable",
|
||||
"spec": {
|
||||
"current": {
|
||||
"text": "",
|
||||
"value": ""
|
||||
},
|
||||
"hide": "dontHide",
|
||||
"includeAll": false,
|
||||
"multi": false,
|
||||
"name": "",
|
||||
"options": [],
|
||||
"query": defaultDataQueryKind(),
|
||||
"refresh": "never",
|
||||
"regex": "",
|
||||
"skipUrlSync": false,
|
||||
"sort": "disabled"
|
||||
}
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
`QueryVariableKind` consists of:
|
||||
|
||||
- kind: "QueryVariable"
|
||||
- spec: [QueryVariableSpec](#queryvariablespec)
|
||||
|
||||
### `QueryVariableSpec`
|
||||
|
||||
The following table explains the usage of the query variable JSON fields:
|
||||
|
||||
<!-- prettier-ignore-start -->
|
||||
|
||||
| Name | Usage |
|
||||
| ------------ | ---------------------------------------------- |
|
||||
| name | string. Name of the variable. |
|
||||
| current | "Text" and a "value" or [`VariableOption`](#variableoption) |
|
||||
| label? | string |
|
||||
| hide | `VariableHide`. Options are: `dontHide`, `hideLabel`, and `hideVariable`. |
|
||||
| refresh | `VariableRefresh`. Options are `never`, `onDashboardLoad`, and `onTimeChanged`. |
|
||||
| skipUrlSync | bool. Default is `false`. |
|
||||
| description? | string |
|
||||
| datasource? | [`DataSourceRef`](#datasourceref) |
|
||||
| query | `DataQueryKind`. Consists of:<ul><li>kind: string</li><li>spec: string</li></ul> |
|
||||
| regex | string |
|
||||
| sort | `VariableSort`. Options are:<ul><li>disabled</li><li>alphabeticalAsc</li><li>alphabeticalDesc</li><li>numericalAsc</li><li>numericalDesc</li><li>alphabeticalCaseInsensitiveAsc</li><li>alphabeticalCaseInsensitiveDesc</li><li>naturalAsc</li><li>naturalDesc</li></ul> |
|
||||
| definition? | string |
|
||||
| options | [`VariableOption`](#variableoption) |
|
||||
| multi | bool. Default is `false`. |
|
||||
| includeAll | bool. Default is `false`. |
|
||||
| allValue? | string |
|
||||
| placeholder? | string |
|
||||
|
||||
<!-- prettier-ignore-end -->
|
||||
|
||||
#### `VariableOption`
|
||||
|
||||
| Name | Usage |
|
||||
| -------- | -------------------------------------------- |
|
||||
| selected | bool. Whether or not the option is selected. |
|
||||
| text | string. Text to be displayed for the option. |
|
||||
| value | string. Value of the option. |
|
||||
|
||||
#### `DataSourceRef`
|
||||
|
||||
| Name | Usage |
|
||||
| ----- | ---------------------------------- |
|
||||
| type? | string. The plugin type-id. |
|
||||
| uid? | The specific data source instance. |
|
||||
|
||||
## `TextVariableKind`
|
||||
|
||||
Following is the JSON for a default text variable:
|
||||
|
||||
```json
|
||||
"variables": [
|
||||
{
|
||||
"kind": "TextVariable",
|
||||
"spec": {
|
||||
"current": {
|
||||
"text": "",
|
||||
"value": ""
|
||||
},
|
||||
"hide": "dontHide",
|
||||
"name": "",
|
||||
"query": "",
|
||||
"skipUrlSync": false
|
||||
}
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
`TextVariableKind` consists of:
|
||||
|
||||
- kind: TextVariableKind
|
||||
- spec: [TextVariableSpec](#textvariablespec)
|
||||
|
||||
### `TextVariableSpec`
|
||||
|
||||
The following table explains the usage of the query variable JSON fields:
|
||||
|
||||
| Name | Usage |
|
||||
| ------------ | -------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| name | string. Name of the variable. |
|
||||
| current | "Text" and a "value" or `VariableOption`. Refer to the [`VariableOption` definition](#variableoption) under `QueryVariableKind`. |
|
||||
| query | string |
|
||||
| label? | string |
|
||||
| hide | `VariableHide`. Options are: `dontHide`, `hideLabel`, and `hideVariable`. |
|
||||
| skipUrlSync | bool. Default is `false`. |
|
||||
| description? | string |
|
||||
|
||||
## `ConstantVariableKind`
|
||||
|
||||
Following is the JSON for a default constant variable:
|
||||
|
||||
```json
|
||||
"variables": [
|
||||
{
|
||||
"kind": "ConstantVariable",
|
||||
"spec": {
|
||||
"current": {
|
||||
"text": "",
|
||||
"value": ""
|
||||
},
|
||||
"hide": "hideVariable",
|
||||
"name": "",
|
||||
"query": "",
|
||||
"skipUrlSync": true
|
||||
}
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
`ConstantVariableKind` consists of:
|
||||
|
||||
- kind: "ConstantVariable"
|
||||
- spec: [ConstantVariableSpec](#constantvariablespec)
|
||||
|
||||
### `ConstantVariableSpec`
|
||||
|
||||
The following table explains the usage of the constant variable JSON fields:
|
||||
|
||||
| Name | Usage |
|
||||
| ------------ | -------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| name | string. Name of the variable. |
|
||||
| query | string |
|
||||
| current | "Text" and a "value" or `VariableOption`. Refer to the [`VariableOption` definition](#variableoption) under `QueryVariableKind`. |
|
||||
| label? | string |
|
||||
| hide | `VariableHide`. Options are: `dontHide`, `hideLabel`, and `hideVariable`. |
|
||||
| skipUrlSync | bool. Default is `false`. |
|
||||
| description? | string |
|
||||
|
||||
## `DatasourceVariableKind`
|
||||
|
||||
Following is the JSON for a default data source variable:
|
||||
|
||||
```json
|
||||
"variables": [
|
||||
{
|
||||
"kind": "DatasourceVariable",
|
||||
"spec": {
|
||||
"current": {
|
||||
"text": "",
|
||||
"value": ""
|
||||
},
|
||||
"hide": "dontHide",
|
||||
"includeAll": false,
|
||||
"multi": false,
|
||||
"name": "",
|
||||
"options": [],
|
||||
"pluginId": "",
|
||||
"refresh": "never",
|
||||
"regex": "",
|
||||
"skipUrlSync": false
|
||||
}
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
`DatasourceVariableKind` consists of:
|
||||
|
||||
- kind: "DatasourceVariable"
|
||||
- spec: [DatasourceVariableSpec](#datasourcevariablespec)
|
||||
|
||||
### `DatasourceVariableSpec`
|
||||
|
||||
The following table explains the usage of the data source variable JSON fields:
|
||||
|
||||
| Name | Usage |
|
||||
| ------------ | -------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| name | string. Name of the variable. |
|
||||
| pluginId | string |
|
||||
| refresh | `VariableRefresh`. Options are `never`, `onDashboardLoad`, and `onTimeChanged`. |
|
||||
| regex | string |
|
||||
| current | `Text` and a `value` or `VariableOption`. Refer to the [`VariableOption` definition](#variableoption) under `QueryVariableKind`. |
|
||||
| options | `VariableOption`. Refer to the [`VariableOption` definition](#variableoption) under `QueryVariableKind`. |
|
||||
| multi | bool. Default is `false`. |
|
||||
| includeAll | bool. Default is `false`. |
|
||||
| allValue? | string |
|
||||
| label? | string |
|
||||
| hide | `VariableHide`. Options are: `dontHide`, `hideLabel`, and `hideVariable`. |
|
||||
| skipUrlSync | bool. Default is `false`. |
|
||||
| description? | string |
|
||||
|
||||
## `IntervalVariableKind`
|
||||
|
||||
Following is the JSON for a default interval variable:
|
||||
|
||||
```json
|
||||
"variables": [
|
||||
{
|
||||
"kind": "IntervalVariable",
|
||||
"spec": {
|
||||
"auto": false,
|
||||
"auto_count": 0,
|
||||
"auto_min": "",
|
||||
"current": {
|
||||
"text": "",
|
||||
"value": ""
|
||||
},
|
||||
"hide": "dontHide",
|
||||
"name": "",
|
||||
"options": [],
|
||||
"query": "",
|
||||
"refresh": "never",
|
||||
"skipUrlSync": false
|
||||
}
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
`IntervalVariableKind` consists of:
|
||||
|
||||
- kind: "IntervalVariable"
|
||||
- spec: [IntervalVariableSpec](#intervalvariablespec)
|
||||
|
||||
### `IntervalVariableSpec`
|
||||
|
||||
The following table explains the usage of the interval variable JSON fields:
|
||||
|
||||
| Name | Usage |
|
||||
| ------------ | -------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| name | string. Name of the variable. |
|
||||
| query | string |
|
||||
| current | `Text` and a `value` or `VariableOption`. Refer to the [`VariableOption` definition](#variableoption) under `QueryVariableKind`. |
|
||||
| options | `VariableOption`. Refer to the [`VariableOption` definition](#variableoption) under `QueryVariableKind`. |
|
||||
| auto | bool. Default is `false`. |
|
||||
| auto_count | integer. Default is `0`. |
|
||||
| refresh | `VariableRefresh`. Options are `never`, `onDashboardLoad`, and `onTimeChanged`. |
|
||||
| label? | string |
|
||||
| hide | `VariableHide`. Options are: `dontHide`, `hideLabel`, and `hideVariable`. |
|
||||
| skipUrlSync | bool. Default is `false` |
|
||||
| description? | string |
|
||||
|
||||
## `CustomVariableKind`
|
||||
|
||||
Following is the JSON for a default custom variable:
|
||||
|
||||
```json
|
||||
"variables": [
|
||||
{
|
||||
"kind": "CustomVariable",
|
||||
"spec": {
|
||||
"current": defaultVariableOption(),
|
||||
"hide": "dontHide",
|
||||
"includeAll": false,
|
||||
"multi": false,
|
||||
"name": "",
|
||||
"options": [],
|
||||
"query": "",
|
||||
"skipUrlSync": false
|
||||
}
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
`CustomVariableKind` consists of:
|
||||
|
||||
- kind: "CustomVariable"
|
||||
- spec: [CustomVariableSpec](#customvariablespec)
|
||||
|
||||
### `CustomVariableSpec`
|
||||
|
||||
The following table explains the usage of the custom variable JSON fields:
|
||||
|
||||
| Name | Usage |
|
||||
| ------------ | -------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| name | string. Name of the variable. |
|
||||
| query | string |
|
||||
| current | `Text` and a `value` or `VariableOption`. Refer to the [`VariableOption` definition](#variableoption) under `QueryVariableKind`. |
|
||||
| options | `VariableOption`. Refer to the [`VariableOption` definition](#variableoption) under `QueryVariableKind`. |
|
||||
| multi | bool. Default is `false`. |
|
||||
| includeAll | bool. Default is `false`. |
|
||||
| allValue? | string |
|
||||
| label? | string |
|
||||
| hide | `VariableHide`. Options are: `dontHide`, `hideLabel`, and `hideVariable`. |
|
||||
| skipUrlSync | bool. Default is `false`. |
|
||||
| description? | string |
|
||||
|
||||
## `SwitchVariableKind`
|
||||
|
||||
Following is the JSON for a default switch variable:
|
||||
|
||||
```json
|
||||
"variables": [
|
||||
{
|
||||
"kind": "SwitchVariable",
|
||||
"spec": {
|
||||
"current": "false",
|
||||
"enabledValue": "true",
|
||||
"disabledValue": "false",
|
||||
"hide": "dontHide",
|
||||
"name": "",
|
||||
"skipUrlSync": false
|
||||
}
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
`SwitchVariableKind` consists of:
|
||||
|
||||
- kind: "SwitchVariable"
|
||||
- spec: [SwitchVariableSpec](#switchvariablespec)
|
||||
|
||||
### `SwitchVariableSpec`
|
||||
|
||||
The following table explains the usage of the switch variable JSON fields:
|
||||
|
||||
<!-- prettier-ignore-start -->
|
||||
|
||||
| Name | Usage |
|
||||
| -------------- | -------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| name | string. Name of the variable. |
|
||||
| current | string. Current value of the switch variable (either `enabledValue` or `disabledValue`). |
|
||||
| enabledValue | string. Value when the switch is in the enabled state. |
|
||||
| disabledValue | string. Value when the switch is in the disabled state. |
|
||||
| label? | string |
|
||||
| hide | `VariableHide`. Options are: `dontHide`, `hideLabel`, and `hideVariable`. |
|
||||
| skipUrlSync | bool. Default is `false`. |
|
||||
| description? | string |
|
||||
|
||||
<!-- prettier-ignore-end -->
|
||||
|
||||
## `GroupByVariableKind`
|
||||
|
||||
Following is the JSON for a default group by variable:
|
||||
|
||||
```json
|
||||
"variables": [
|
||||
{
|
||||
"kind": "GroupByVariable",
|
||||
"spec": {
|
||||
"current": {
|
||||
"text": [
|
||||
""
|
||||
],
|
||||
"value": [
|
||||
""
|
||||
]
|
||||
},
|
||||
"datasource": {},
|
||||
"hide": "dontHide",
|
||||
"multi": false,
|
||||
"name": "",
|
||||
"options": [],
|
||||
"skipUrlSync": false
|
||||
}
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
`GroupByVariableKind` consists of:
|
||||
|
||||
- kind: "GroupByVariable"
|
||||
- spec: [GroupByVariableSpec](#groupbyvariablespec)
|
||||
|
||||
### `GroupByVariableSpec`
|
||||
|
||||
The following table explains the usage of the group by variable JSON fields:
|
||||
|
||||
| Name | Usage |
|
||||
| ------------ | -------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| name | string. Name of the variable |
|
||||
| datasource? | `DataSourceRef`. Refer to the [`DataSourceRef` definition](#datasourceref) under `QueryVariableKind`. |
|
||||
| current | `Text` and a `value` or `VariableOption`. Refer to the [`VariableOption` definition](#variableoption) under `QueryVariableKind`. |
|
||||
| options | `VariableOption`. Refer to the [`VariableOption` definition](#variableoption) under `QueryVariableKind`. |
|
||||
| multi | bool. Default is `false`. |
|
||||
| label? | string |
|
||||
| hide | `VariableHide`. Options are: `dontHide`, `hideLabel`, and `hideVariable`. |
|
||||
| skipUrlSync | bool. Default is `false`. |
|
||||
| description? | string. |
|
||||
|
||||
## `AdhocVariableKind`
|
||||
|
||||
Following is the JSON for a default ad hoc variable:
|
||||
|
||||
```json
|
||||
"variables": [
|
||||
{
|
||||
"kind": "AdhocVariable",
|
||||
"spec": {
|
||||
"baseFilters": [],
|
||||
"defaultKeys": [],
|
||||
"filters": [],
|
||||
"hide": "dontHide",
|
||||
"name": "",
|
||||
"skipUrlSync": false
|
||||
}
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
`AdhocVariableKind` consists of:
|
||||
|
||||
- kind: "AdhocVariable"
|
||||
- spec: [AdhocVariableSpec](#adhocvariablespec)
|
||||
|
||||
### `AdhocVariableSpec`
|
||||
|
||||
The following table explains the usage of the ad hoc variable JSON fields:
|
||||
|
||||
| Name | Usage |
|
||||
| ------------ | -------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| name | string. Name of the variable. |
|
||||
| datasource? | `DataSourceRef`. Consists of:<ul><li>type? - string. The plugin type-id.</li><li>uid? - string. The specific data source instance.</li></ul> |
|
||||
| baseFilters | [AdHocFilterWithLabels](#adhocfilterswithlabels) |
|
||||
| filters | [AdHocFilterWithLabels](#adhocfilterswithlabels) |
|
||||
| defaultKeys | [MetricFindValue](#metricfindvalue) |
|
||||
| label? | string |
|
||||
| hide | `VariableHide`. Options are: `dontHide`, `hideLabel`, and `hideVariable`. |
|
||||
| skipUrlSync | bool. Default is `false`. |
|
||||
| description? | string |
|
||||
|
||||
#### `AdHocFiltersWithLabels`
|
||||
|
||||
The following table explains the usage of the ad hoc variable with labels JSON fields:
|
||||
|
||||
| Name | Type |
|
||||
| ------------ | ------------- |
|
||||
| key | string |
|
||||
| operator | string |
|
||||
| value | string |
|
||||
| values? | `[...string]` |
|
||||
| keyLabel | string |
|
||||
| valueLabels? | `[...string]` |
|
||||
| forceEdit? | bool |
|
||||
|
||||
#### `MetricFindValue`
|
||||
|
||||
The following table explains the usage of the metric find value JSON fields:
|
||||
|
||||
| Name | Type |
|
||||
| ----------- | ---------------- |
|
||||
| text | string |
|
||||
| value? | string or number |
|
||||
| group? | string |
|
||||
| expandable? | bool |
|
||||
@@ -103,11 +103,10 @@ To configure basic settings for the data source, complete the following steps:
|
||||
|
||||
1. Set the data source's basic configuration options:
|
||||
|
||||
| Name | Description |
|
||||
| ------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| **Name** | Sets the name you use to refer to the data source in panels and queries. |
|
||||
| **Default** | Sets whether the data source is pre-selected for new panels. |
|
||||
| **Universe Domain** | The universe domain to connect to. For more information, refer to [Documentation on universe domains](https://docs.cloud.google.com/python/docs/reference/monitoring/latest/google.cloud.monitoring_v3.services.service_monitoring_service.ServiceMonitoringServiceAsyncClient#google_cloud_monitoring_v3_services_service_monitoring_service_ServiceMonitoringServiceAsyncClient_universe_domain). Defaults to `googleapis.com`. |
|
||||
| Name | Description |
|
||||
| ----------- | ------------------------------------------------------------------------ |
|
||||
| **Name** | Sets the name you use to refer to the data source in panels and queries. |
|
||||
| **Default** | Sets whether the data source is pre-selected for new panels. |
|
||||
|
||||
### Provision the data source
|
||||
|
||||
@@ -130,7 +129,6 @@ datasources:
|
||||
clientEmail: stackdriver@myproject.iam.gserviceaccount.com
|
||||
authenticationType: jwt
|
||||
defaultProject: my-project-name
|
||||
universeDomain: googleapis.com
|
||||
secureJsonData:
|
||||
privateKey: |
|
||||
-----BEGIN PRIVATE KEY-----
|
||||
@@ -154,7 +152,6 @@ datasources:
|
||||
clientEmail: stackdriver@myproject.iam.gserviceaccount.com
|
||||
authenticationType: jwt
|
||||
defaultProject: my-project-name
|
||||
universeDomain: googleapis.com
|
||||
privateKeyPath: /etc/secrets/gce.pem
|
||||
```
|
||||
|
||||
@@ -169,7 +166,6 @@ datasources:
|
||||
access: proxy
|
||||
jsonData:
|
||||
authenticationType: gce
|
||||
universeDomain: googleapis.com
|
||||
```
|
||||
|
||||
## Import pre-configured dashboards
|
||||
|
||||
@@ -171,3 +171,146 @@ Status Codes:
|
||||
|
||||
**Example response (JSON diff)**:
|
||||
|
||||
```http
|
||||
HTTP/1.1 200 OK
|
||||
Content-Type: text/html; charset=UTF-8
|
||||
|
||||
```
|
||||
|
||||
The response is a textual representation of the diff, with the dashboard values being in JSON, similar to the diffs seen on sites like GitHub or GitLab.
|
||||
|
||||
Status Codes:
|
||||
|
||||
- **200** - Ok
|
||||
- **400** - Bad request (invalid JSON sent)
|
||||
- **401** - Unauthorized
|
||||
- **404** - Not found
|
||||
|
||||
**Example response (basic diff)**:
|
||||
|
||||
```http
|
||||
HTTP/1.1 200 OK
|
||||
Content-Type: text/html; charset=UTF-8
|
||||
|
||||
```
|
||||
|
||||
The response here is a summary of the changes, derived from the diff between the two JSON objects.
|
||||
|
||||
Status Codes:
|
||||
|
||||
- **200** - OK
|
||||
- **400** - Bad request (invalid JSON sent)
|
||||
- **401** - Unauthorized
|
||||
- **404** - Not found
|
||||
{
|
||||
"id": 70,
|
||||
"slug": "my-dashboard",
|
||||
"status": "success",
|
||||
"uid": "QA7wKklGz",
|
||||
"url": "/d/QA7wKklGz/my-dashboard",
|
||||
"version": 3
|
||||
}
|
||||
```
|
||||
|
||||
JSON response body schema:
|
||||
|
||||
- **slug** - the URL friendly slug of the dashboard's title
|
||||
- **status** - whether the restoration was successful or not
|
||||
- **version** - the new dashboard version, following the restoration
|
||||
|
||||
Status codes:
|
||||
|
||||
- **200** - OK
|
||||
- **400** - Bad request (specified version has the same content as the current dashboard)
|
||||
- **401** - Unauthorized
|
||||
- **404** - Not found (dashboard not found or dashboard version not found)
|
||||
- **500** - Internal server error (indicates issue retrieving dashboard tags from database)
|
||||
|
||||
**Example error response**
|
||||
|
||||
```http
|
||||
HTTP/1.1 404 Not Found
|
||||
Content-Type: application/json; charset=UTF-8
|
||||
Content-Length: 46
|
||||
|
||||
{
|
||||
"message": "Dashboard version not found"
|
||||
}
|
||||
```
|
||||
|
||||
JSON response body schema:
|
||||
|
||||
- **message** - Message explaining the reason for the request failure.
|
||||
|
||||
## Compare dashboard versions
|
||||
|
||||
`POST /api/dashboards/calculate-diff`
|
||||
|
||||
Compares two dashboard versions by calculating the JSON diff of them.
|
||||
|
||||
**Example request**:
|
||||
|
||||
```http
|
||||
POST /api/dashboards/calculate-diff HTTP/1.1
|
||||
Accept: text/html
|
||||
Content-Type: application/json
|
||||
Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk
|
||||
|
||||
{
|
||||
"base": {
|
||||
"dashboardId": 1,
|
||||
"version": 1
|
||||
},
|
||||
"new": {
|
||||
"dashboardId": 1,
|
||||
"version": 2
|
||||
},
|
||||
"diffType": "json"
|
||||
}
|
||||
```
|
||||
|
||||
JSON body schema:
|
||||
|
||||
- **base** - an object representing the base dashboard version
|
||||
- **new** - an object representing the new dashboard version
|
||||
- **diffType** - the type of diff to return. Can be "json" or "basic".
|
||||
|
||||
**Example response (JSON diff)**:
|
||||
|
||||
```http
|
||||
HTTP/1.1 200 OK
|
||||
Content-Type: text/html; charset=UTF-8
|
||||
|
||||
<p id="l1" class="diff-line diff-json-same">
|
||||
<!-- Diff omitted -->
|
||||
</p>
|
||||
```
|
||||
|
||||
The response is a textual representation of the diff, with the dashboard values being in JSON, similar to the diffs seen on sites like GitHub or GitLab.
|
||||
|
||||
Status Codes:
|
||||
|
||||
- **200** - Ok
|
||||
- **400** - Bad request (invalid JSON sent)
|
||||
- **401** - Unauthorized
|
||||
- **404** - Not found
|
||||
|
||||
**Example response (basic diff)**:
|
||||
|
||||
```http
|
||||
HTTP/1.1 200 OK
|
||||
Content-Type: text/html; charset=UTF-8
|
||||
|
||||
<div class="diff-group">
|
||||
<!-- Diff omitted -->
|
||||
</div>
|
||||
```
|
||||
|
||||
The response here is a summary of the changes, derived from the diff between the two JSON objects.
|
||||
|
||||
Status Codes:
|
||||
|
||||
- **200** - OK
|
||||
- **400** - Bad request (invalid JSON sent)
|
||||
- **401** - Unauthorized
|
||||
- **404** - Not found
|
||||
|
||||
@@ -87,7 +87,6 @@ With a Grafana Enterprise license, you also get access to premium data sources,
|
||||
- [CockroachDB](/grafana/plugins/grafana-cockroachdb-datasource)
|
||||
- [Databricks](/grafana/plugins/grafana-databricks-datasource)
|
||||
- [DataDog](/grafana/plugins/grafana-datadog-datasource)
|
||||
- [IBM Db2](/grafana/plugins/grafana-ibmdb2-datasource)
|
||||
- [Drone](/grafana/plugins/grafana-drone-datasource)
|
||||
- [DynamoDB](/grafana/plugins/grafana-dynamodb-datasource/)
|
||||
- [Dynatrace](/grafana/plugins/grafana-dynatrace-datasource)
|
||||
|
||||
+45
-162
@@ -3,75 +3,45 @@ aliases:
|
||||
- ../../../reference/dashboard/ # /docs/grafana/next/reference/dashboard/
|
||||
- ../../../dashboards/json-model/ # /docs/grafana/next/dashboards/json-model/
|
||||
- ../../../dashboards/build-dashboards/view-dashboard-json-model/ # /docs/grafana/next/dashboards/build-dashboards/view-dashboard-json-model/
|
||||
- ../../../as-code/observability-as-code/schema-v2/ # /docs/grafana/latest/as-code/observability-as-code/schema-v2/
|
||||
- ../../../as-code/observability-as-code/schema-v2/annotations-schema/ # /docs/grafana/latest/as-code/observability-as-code/schema-v2/annotations-schema/
|
||||
- ../../../as-code/observability-as-code/schema-v2/panel-schema/ # /docs/grafana/latest/as-code/observability-as-code/schema-v2/panel-schema/
|
||||
- ../../../as-code/observability-as-code/schema-v2/librarypanel-schema/ # /docs/grafana/latest/as-code/observability-as-code/schema-v2/librarypanel-schema/
|
||||
- ../../../as-code/observability-as-code/schema-v2/layout-schema/ # /docs/grafana/latest/as-code/observability-as-code/schema-v2/layout-schema/
|
||||
- ../../../as-code/observability-as-code/schema-v2/links-schema/ # /docs/grafana/latest/as-code/observability-as-code/schema-v2/links-schema/
|
||||
- ../../../as-code/observability-as-code/schema-v2/timesettings-schema/ # /docs/grafana/latest/as-code/observability-as-code/schema-v2/timesettings-schema/
|
||||
- ../../../as-code/observability-as-code/schema-v2/variables-schema/ # /docs/grafana/latest/as-code/observability-as-code/schema-v2/variables-schema/
|
||||
- ../../../observability-as-code/schema-v2/ # /docs/grafana/latest/observability-as-code/schema-v2/
|
||||
- ../../../../next/observability-as-code/schema-v2/annotations-schema/ # /docs/grafana/next/observability-as-code/schema-v2/annotations-schema/
|
||||
- ../../../../next/observability-as-code/schema-v2/panel-schema/ # /docs/grafana/next/observability-as-code/schema-v2/panel-schema/
|
||||
- ../../../../next/observability-as-code/schema-v2/librarypanel-schema/ # /docs/grafana/next/observability-as-code/schema-v2/librarypanel-schema/
|
||||
- ../../../../next/observability-as-code/schema-v2/layout-schema/ # /docs/grafana/next/observability-as-code/schema-v2/layout-schema/
|
||||
- ../../../../next/observability-as-code/schema-v2/links-schema/ # /docs/grafana/next/observability-as-code/schema-v2/links-schema/
|
||||
- ../../../../next/observability-as-code/schema-v2/timesettings-schema/ # /docs/grafana/next/observability-as-code/schema-v2/timesettings-schema/
|
||||
- ../../../../next/observability-as-code/schema-v2/variables-schema/ # /docs/grafana/next/observability-as-code/schema-v2/variables-schema/
|
||||
keywords:
|
||||
- grafana
|
||||
- dashboard
|
||||
- documentation
|
||||
- json
|
||||
- model
|
||||
- schema v2
|
||||
- v1 resource
|
||||
- v2 resource
|
||||
- classic
|
||||
labels:
|
||||
products:
|
||||
- cloud
|
||||
- enterprise
|
||||
- oss
|
||||
title: JSON model
|
||||
description: View and update your Grafana dashboard JSON object
|
||||
description: View your Grafana dashboard JSON object
|
||||
weight: 700
|
||||
refs:
|
||||
annotations:
|
||||
- pattern: /docs/grafana/
|
||||
destination: /docs/grafana/<GRAFANA_VERSION>/dashboards/build-dashboards/annotate-visualizations/
|
||||
- pattern: /docs/grafana-cloud/
|
||||
destination: /docs/grafana-cloud/visualizations/dashboards/build-dashboards/annotate-visualizations/
|
||||
---
|
||||
|
||||
# Dashboard JSON model
|
||||
|
||||
Grafana dashboards are represented as JSON objects that store metadata, panels, variables, and settings.
|
||||
A dashboard in Grafana is represented by a JSON object, which stores metadata of its dashboard. Dashboard metadata includes dashboard properties, metadata from panels, template variables, panel queries, etc.
|
||||
|
||||
## Different dashboard schema models
|
||||
|
||||
There are currently three dashboard JSON schema models:
|
||||
|
||||
- [Classic](#classic-model) - A non-Kubernetes resource used before the adoption of the Kubernetes API by Grafana in v12.2.0. It's been widely used for exporting, importing, and sharing dashboards in the Grafana dashboards collection at [grafana.com/dashboards](https://grafana.com/grafana/dashboards/).
|
||||
- [V1 Resource](#v1-resource-model) - The Classic dashboard schema formatted as a Kubernetes-style resource. Its `spec` property contains the Classic model of the schema. This is the default format for API communication after Grafana v12.2.0, which enabled the Kubernetes Platform API as default backend for Grafana dashboards. Dashboards created using the Classic model can be exported using either the Classic or the V1 Resource format.
|
||||
- [V2 Resource](#v2-resource-model) - The latest format, supporting new features such as advanced layouts and conditional rendering. It models all dashboard elements as Kubernetes kinds, following Kubernetes conventions for declaring dashboard components. This format is future-proof and represents the evolving standard for dashboards.
|
||||
|
||||
{{< admonition type="note" >}}
|
||||
[Observability as Code](https://grafana.com/docs/grafana/latest/as-code/observability-as-code/) works with all versions of the JSON model, and it's fully compatible with version 2.
|
||||
{{< /admonition >}}
|
||||
|
||||
## Access and update the JSON model (#view-json)
|
||||
|
||||
To access the JSON representation of a dashboard:
|
||||
To view the JSON of a dashboard:
|
||||
|
||||
1. Click **Edit** in the top-right corner of the dashboard.
|
||||
1. Click the gear icon in the right sidebar and click **Settings** in the secondary sidebar.
|
||||
1. Select the **JSON Model** tab.
|
||||
1. Update the JSON structure as needed.
|
||||
1. Click **Save changes**.
|
||||
1. Click **Settings**.
|
||||
1. Go to the **JSON Model** tab.
|
||||
1. When you've finished viewing the JSON, click **Back to dashboard** and **Exit edit**.
|
||||
|
||||
## Classic model
|
||||
## JSON fields
|
||||
|
||||
When you create a new dashboard in self-managed Grafana, a new dashboard JSON object was initialized with the following fields:
|
||||
When a user creates a new dashboard, a new dashboard JSON object is initialized with the following fields:
|
||||
|
||||
{{< admonition type="note" >}}
|
||||
In the following JSON, id is shown as null which is the default value assigned to it until a dashboard is saved.
|
||||
After a dashboard is saved, an integer value is assigned to the `id` field.
|
||||
In the following JSON, id is shown as null which is the default value assigned to it until a dashboard is saved. Once a dashboard is saved, an integer value is assigned to the `id` field.
|
||||
{{< /admonition >}}
|
||||
|
||||
```json
|
||||
@@ -106,30 +76,26 @@ After a dashboard is saved, an integer value is assigned to the `id` field.
|
||||
|
||||
Each field in the dashboard JSON is explained below with its usage:
|
||||
|
||||
<!--prettier-ignore-start -->
|
||||
|
||||
| Name | Usage |
|
||||
| ----------------- | ------------------------------------------------------------------------------------------ |
|
||||
| **id** | unique numeric identifier for the dashboard. (generated by the db) |
|
||||
| **uid** | unique dashboard identifier that can be generated by anyone. string (8-40) |
|
||||
| **title** | current title of dashboard |
|
||||
| **tags** | tags associated with dashboard, an array of strings |
|
||||
| **style** | theme of dashboard, i.e. `dark` or `light` |
|
||||
| **timezone** | timezone of dashboard, i.e. `utc` or `browser` |
|
||||
| **editable** | whether a dashboard is editable or not |
|
||||
| Name | Usage |
|
||||
| ----------------- | ----------------------------------------------------------------------------------------------------------------- |
|
||||
| **id** | unique numeric identifier for the dashboard. (generated by the db) |
|
||||
| **uid** | unique dashboard identifier that can be generated by anyone. string (8-40) |
|
||||
| **title** | current title of dashboard |
|
||||
| **tags** | tags associated with dashboard, an array of strings |
|
||||
| **style** | theme of dashboard, i.e. `dark` or `light` |
|
||||
| **timezone** | timezone of dashboard, i.e. `utc` or `browser` |
|
||||
| **editable** | whether a dashboard is editable or not |
|
||||
| **graphTooltip** | 0 for no shared crosshair or tooltip (default), 1 for shared crosshair, 2 for shared crosshair AND shared tooltip |
|
||||
| **time** | time range for dashboard, i.e. last 6 hours, last 7 days, etc |
|
||||
| **timepicker** | timepicker metadata, see [timepicker section](#timepicker) for details |
|
||||
| **templating** | templating metadata, see [templating section](#templating) for details |
|
||||
| **annotations** | annotations metadata, see [annotations](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/dashboards/build-dashboards/annotate-visualizations/) for how to add them |
|
||||
| **refresh** | auto-refresh interval|
|
||||
| **schemaVersion** | version of the JSON schema (integer), incremented each time a Grafana update brings changes to said schema |
|
||||
| **version** | version of the dashboard (integer), incremented each time the dashboard is updated |
|
||||
| **panels** | panels array, see below for detail. |
|
||||
| **time** | time range for dashboard, i.e. last 6 hours, last 7 days, etc |
|
||||
| **timepicker** | timepicker metadata, see [timepicker section](#timepicker) for details |
|
||||
| **templating** | templating metadata, see [templating section](#templating) for details |
|
||||
| **annotations** | annotations metadata, see [annotations](ref:annotations) for how to add them |
|
||||
| **refresh** | auto-refresh interval |
|
||||
| **schemaVersion** | version of the JSON schema (integer), incremented each time a Grafana update brings changes to said schema |
|
||||
| **version** | version of the dashboard (integer), incremented each time the dashboard is updated |
|
||||
| **panels** | panels array, see below for detail. |
|
||||
|
||||
<!--prettier-ignore-end -->
|
||||
|
||||
### Panels
|
||||
## Panels
|
||||
|
||||
Panels are the building blocks of a dashboard. It consists of data source queries, type of graphs, aliases, etc. Panel JSON consists of an array of JSON objects, each representing a different panel. Most of the fields are common for all panels but some fields depend on the panel type. Following is an example of panel JSON of a text panel.
|
||||
|
||||
@@ -202,22 +168,18 @@ The grid has a negative gravity that moves panels up if there is empty space abo
|
||||
|
||||
Usage of the fields is explained below:
|
||||
|
||||
<!--prettier-ignore-start -->
|
||||
|
||||
| Name | Usage |
|
||||
| --------------------- | --------------------------------------------------------- |
|
||||
| **collapse** | whether timepicker is collapsed or not |
|
||||
| **enable** | whether timepicker is enabled or not |
|
||||
| **notice** | |
|
||||
| **now** | |
|
||||
| **hidden** | whether timepicker is hidden or not |
|
||||
| **nowDelay** | override the now time by entering a time delay. Use this option to accommodate known delays in data aggregation to avoid null values. |
|
||||
| **quick_ranges** | custom quick ranges |
|
||||
| **refresh_intervals** | interval options available in the refresh picker dropdown |
|
||||
| **status** | |
|
||||
| **type** | |
|
||||
|
||||
<!--prettier-ignore-end -->
|
||||
| Name | Usage |
|
||||
| --------------------- | ------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| **collapse** | whether timepicker is collapsed or not |
|
||||
| **enable** | whether timepicker is enabled or not |
|
||||
| **notice** | |
|
||||
| **now** | |
|
||||
| **hidden** | whether timepicker is hidden or not |
|
||||
| **nowDelay** | override the now time by entering a time delay. Use this option to accommodate known delays in data aggregation to avoid null values. |
|
||||
| **quick_ranges** | custom quick ranges |
|
||||
| **refresh_intervals** | interval options available in the refresh picker dropdown |
|
||||
| **status** | |
|
||||
| **type** | |
|
||||
|
||||
### templating
|
||||
|
||||
@@ -308,82 +270,3 @@ Usage of the above mentioned fields in the templating section is explained below
|
||||
| **refresh** | configures when to refresh a variable |
|
||||
| **regex** | extracts part of a series name or metric node segment |
|
||||
| **type** | type of variable, i.e. `custom`, `query` or `interval` |
|
||||
|
||||
## V1 Resource model
|
||||
|
||||
The V1 Resource schema model formats the [Classic JSON model](#classic-model) schema as a Kubernetes-style resource.
|
||||
The `spec` property of the schema contains the Classic-style model of the schema.
|
||||
|
||||
Dashboards created using the Classic model can be exported using either this model or the Classic one.
|
||||
|
||||
The following code snippet shows the fields included in the V1 Resource model.
|
||||
|
||||
```json
|
||||
{
|
||||
"apiVersion": "dashboard.grafana.app/v1beta1",
|
||||
"kind": "Dashboard",
|
||||
"metadata": {
|
||||
"name": "isnt5ss",
|
||||
"namespace": "stacks-521104",
|
||||
"uid": "92674c0e-0360-4bb4-99ab-fb150581376d",
|
||||
"resourceVersion": "1764705030717045",
|
||||
"generation": 1,
|
||||
"creationTimestamp": "2025-12-02T19:50:30Z",
|
||||
"labels": {
|
||||
"grafana.app/deprecatedInternalID": "1329"
|
||||
},
|
||||
"annotations": {
|
||||
"grafana.app/createdBy": "user:u000000002",
|
||||
"grafana.app/folder": "",
|
||||
"grafana.app/saved-from-ui": "Grafana Cloud (instant)"
|
||||
}
|
||||
},
|
||||
"spec": {
|
||||
"annotations": {
|
||||
"list": [
|
||||
{
|
||||
"builtIn": 1,
|
||||
"datasource": {
|
||||
"type": "grafana",
|
||||
"uid": "-- Grafana --"
|
||||
},
|
||||
"enable": true,
|
||||
"hide": true,
|
||||
"iconColor": "rgba(0, 211, 255, 1)",
|
||||
"name": "Annotations & Alerts",
|
||||
"type": "dashboard"
|
||||
}
|
||||
]
|
||||
},
|
||||
"editable": true,
|
||||
"fiscalYearStartMonth": 0,
|
||||
"graphTooltip": 0,
|
||||
"id": 1329,
|
||||
"links": [],
|
||||
"panels": [],
|
||||
"preload": false,
|
||||
"schemaVersion": 42,
|
||||
"tags": [],
|
||||
"templating": {
|
||||
"list": []
|
||||
},
|
||||
"time": {
|
||||
"from": "now-6h",
|
||||
"to": "now"
|
||||
},
|
||||
"timepicker": {},
|
||||
"timezone": "Africa/Abidjan",
|
||||
"title": "Graphite suggestions",
|
||||
"uid": "isnt5ss",
|
||||
"version": 1,
|
||||
"weekStart": ""
|
||||
},
|
||||
"status": {}
|
||||
}
|
||||
```
|
||||
|
||||
## V2 Resource model
|
||||
|
||||
{{< docs/public-preview product="Dashboard JSON schema v2" >}}
|
||||
|
||||
For the detailed V2 Resource model schema, refer to the [Swagger documentation](https://play.grafana.org/swagger?api=dashboard.grafana.app-v2beta1).
|
||||
|
||||
@@ -3743,21 +3743,46 @@
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"public/app/plugins/datasource/elasticsearch/components/QueryEditor/BucketAggregationsEditor/SettingsEditor/DateHistogramSettingsEditor.tsx": {
|
||||
"@typescript-eslint/consistent-type-assertions": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"public/app/plugins/datasource/elasticsearch/components/QueryEditor/BucketAggregationsEditor/SettingsEditor/TermsSettingsEditor.tsx": {
|
||||
"@typescript-eslint/consistent-type-assertions": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"public/app/plugins/datasource/elasticsearch/components/QueryEditor/BucketAggregationsEditor/aggregations.ts": {
|
||||
"@typescript-eslint/consistent-type-assertions": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"public/app/plugins/datasource/elasticsearch/components/QueryEditor/BucketAggregationsEditor/state/reducer.ts": {
|
||||
"@typescript-eslint/consistent-type-assertions": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"public/app/plugins/datasource/elasticsearch/components/QueryEditor/MetricAggregationsEditor/MetricEditor.tsx": {
|
||||
"@typescript-eslint/consistent-type-assertions": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"public/app/plugins/datasource/elasticsearch/components/QueryEditor/MetricAggregationsEditor/SettingsEditor/SettingField.tsx": {
|
||||
"@typescript-eslint/consistent-type-assertions": {
|
||||
"count": 2
|
||||
}
|
||||
},
|
||||
"public/app/plugins/datasource/elasticsearch/components/QueryEditor/MetricAggregationsEditor/aggregations.ts": {
|
||||
"@typescript-eslint/consistent-type-assertions": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"public/app/plugins/datasource/elasticsearch/components/QueryEditor/MetricAggregationsEditor/state/reducer.ts": {
|
||||
"@typescript-eslint/consistent-type-assertions": {
|
||||
"count": 1
|
||||
}
|
||||
},
|
||||
"public/app/plugins/datasource/elasticsearch/configuration/DataLinks.tsx": {
|
||||
"no-restricted-syntax": {
|
||||
"count": 1
|
||||
|
||||
@@ -82,7 +82,6 @@ module.exports = {
|
||||
// Decoupled plugins run their own tests so ignoring them here.
|
||||
'<rootDir>/public/app/plugins/datasource/azuremonitor',
|
||||
'<rootDir>/public/app/plugins/datasource/cloud-monitoring',
|
||||
'<rootDir>/public/app/plugins/datasource/elasticsearch',
|
||||
'<rootDir>/public/app/plugins/datasource/grafana-postgresql-datasource',
|
||||
'<rootDir>/public/app/plugins/datasource/grafana-pyroscope-datasource',
|
||||
'<rootDir>/public/app/plugins/datasource/grafana-testdata-datasource',
|
||||
|
||||
@@ -727,6 +727,17 @@ const injectedRtkApi = api
|
||||
}),
|
||||
invalidatesTags: ['dashboards', 'permissions'],
|
||||
}),
|
||||
restoreDashboardVersionByUid: build.mutation<
|
||||
RestoreDashboardVersionByUidApiResponse,
|
||||
RestoreDashboardVersionByUidApiArg
|
||||
>({
|
||||
query: (queryArg) => ({
|
||||
url: `/dashboards/uid/${queryArg.uid}/restore`,
|
||||
method: 'POST',
|
||||
body: queryArg.restoreDashboardVersionCommand,
|
||||
}),
|
||||
invalidatesTags: ['dashboards', 'versions'],
|
||||
}),
|
||||
getDashboardVersionsByUid: build.query<GetDashboardVersionsByUidApiResponse, GetDashboardVersionsByUidApiArg>({
|
||||
query: (queryArg) => ({
|
||||
url: `/dashboards/uid/${queryArg.uid}/versions`,
|
||||
@@ -2617,6 +2628,26 @@ export type UpdateDashboardPermissionsByUidApiArg = {
|
||||
uid: string;
|
||||
updateDashboardAclCommand: UpdateDashboardAclCommand;
|
||||
};
|
||||
export type RestoreDashboardVersionByUidApiResponse = /** status 200 (empty) */ {
|
||||
/** FolderUID The unique identifier (uid) of the folder the dashboard belongs to. */
|
||||
folderUid?: string;
|
||||
/** ID The unique identifier (id) of the created/updated dashboard. */
|
||||
id: number;
|
||||
/** Status status of the response. */
|
||||
status: string;
|
||||
/** Slug The slug of the dashboard. */
|
||||
title: string;
|
||||
/** UID The unique identifier (uid) of the created/updated dashboard. */
|
||||
uid: string;
|
||||
/** URL The relative URL for accessing the created/updated dashboard. */
|
||||
url: string;
|
||||
/** Version The version of the dashboard. */
|
||||
version: number;
|
||||
};
|
||||
export type RestoreDashboardVersionByUidApiArg = {
|
||||
uid: string;
|
||||
restoreDashboardVersionCommand: RestoreDashboardVersionCommand;
|
||||
};
|
||||
export type GetDashboardVersionsByUidApiResponse = /** status 200 (empty) */ DashboardVersionResponseMeta;
|
||||
export type GetDashboardVersionsByUidApiArg = {
|
||||
uid: string;
|
||||
@@ -4537,6 +4568,9 @@ export type DashboardAclUpdateItem = {
|
||||
export type UpdateDashboardAclCommand = {
|
||||
items?: DashboardAclUpdateItem[];
|
||||
};
|
||||
export type RestoreDashboardVersionCommand = {
|
||||
version?: number;
|
||||
};
|
||||
export type DashboardVersionMeta = {
|
||||
created?: string;
|
||||
createdBy?: string;
|
||||
@@ -6599,6 +6633,7 @@ export const {
|
||||
useGetDashboardPermissionsListByUidQuery,
|
||||
useLazyGetDashboardPermissionsListByUidQuery,
|
||||
useUpdateDashboardPermissionsByUidMutation,
|
||||
useRestoreDashboardVersionByUidMutation,
|
||||
useGetDashboardVersionsByUidQuery,
|
||||
useLazyGetDashboardVersionsByUidQuery,
|
||||
useGetDashboardVersionByUidQuery,
|
||||
|
||||
+1
-1
@@ -10,7 +10,7 @@
|
||||
|
||||
import * as common from '@grafana/schema';
|
||||
|
||||
export const pluginVersion = "%VERSION%";
|
||||
export const pluginVersion = "12.4.0-pre";
|
||||
|
||||
export type BucketAggregation = (DateHistogram | Histogram | Terms | Filters | GeoHashGrid | Nested);
|
||||
|
||||
|
||||
Generated
-6
@@ -29,14 +29,11 @@ export interface Options extends common.SingleStatBaseOptions {
|
||||
barWidthFactor: number;
|
||||
effects: GaugePanelEffects;
|
||||
endpointMarker?: ('point' | 'glow' | 'none');
|
||||
minVizHeight: number;
|
||||
minVizWidth: number;
|
||||
segmentCount: number;
|
||||
segmentSpacing: number;
|
||||
shape: ('circle' | 'gauge');
|
||||
showThresholdLabels: boolean;
|
||||
showThresholdMarkers: boolean;
|
||||
sizing: common.BarGaugeSizing;
|
||||
sparkline?: boolean;
|
||||
textMode?: ('auto' | 'value_and_name' | 'value' | 'name' | 'none');
|
||||
}
|
||||
@@ -46,14 +43,11 @@ export const defaultOptions: Partial<Options> = {
|
||||
barWidthFactor: 0.5,
|
||||
effects: {},
|
||||
endpointMarker: 'point',
|
||||
minVizHeight: 75,
|
||||
minVizWidth: 75,
|
||||
segmentCount: 1,
|
||||
segmentSpacing: 0.3,
|
||||
shape: 'gauge',
|
||||
showThresholdLabels: false,
|
||||
showThresholdMarkers: true,
|
||||
sizing: common.BarGaugeSizing.Auto,
|
||||
sparkline: true,
|
||||
textMode: 'auto',
|
||||
};
|
||||
|
||||
@@ -795,10 +795,6 @@ func (hs *HTTPServer) GetDashboardVersion(c *contextmodel.ReqContext) response.R
|
||||
// swagger:route POST /dashboards/uid/{uid}/restore dashboards versions restoreDashboardVersionByUID
|
||||
//
|
||||
// Restore a dashboard to a given dashboard version using UID.
|
||||
// This API will be removed when /apis/dashboards.grafana.app/v1 is released.
|
||||
// You can restore a dashboard by reading it from history, then creating it again.
|
||||
//
|
||||
// Deprecated: true
|
||||
//
|
||||
// Responses:
|
||||
// 200: postDashboardResponse
|
||||
|
||||
@@ -13,8 +13,6 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
|
||||
"github.com/grafana/grafana/pkg/apimachinery/identity"
|
||||
"github.com/grafana/grafana/pkg/services/apiserver/endpoints/request"
|
||||
"github.com/open-feature/go-sdk/openfeature"
|
||||
"go.opentelemetry.io/otel/codes"
|
||||
|
||||
@@ -49,7 +47,6 @@ type PluginsService struct {
|
||||
updateStrategy string
|
||||
|
||||
features featuremgmt.FeatureToggles
|
||||
cfg *setting.Cfg
|
||||
}
|
||||
|
||||
func ProvidePluginsService(cfg *setting.Cfg,
|
||||
@@ -92,7 +89,6 @@ func ProvidePluginsService(cfg *setting.Cfg,
|
||||
features: features,
|
||||
updateChecker: updateChecker,
|
||||
updateStrategy: cfg.PluginUpdateStrategy,
|
||||
cfg: cfg,
|
||||
}, nil
|
||||
}
|
||||
|
||||
@@ -140,7 +136,7 @@ func (s *PluginsService) HasUpdate(ctx context.Context, pluginID string) (string
|
||||
// checkAndUpdate checks for updates and applies them if auto-update is enabled.
|
||||
func (s *PluginsService) checkAndUpdate(ctx context.Context) {
|
||||
s.instrumentedCheckForUpdates(ctx)
|
||||
if s.checkFlagPluginsAutoUpdate(ctx) {
|
||||
if openfeature.NewDefaultClient().Boolean(ctx, featuremgmt.FlagPluginsAutoUpdate, false, openfeature.TransactionContext(ctx)) {
|
||||
s.updateAll(ctx)
|
||||
}
|
||||
}
|
||||
@@ -222,17 +218,6 @@ func (s *PluginsService) checkForUpdates(ctx context.Context) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *PluginsService) checkFlagPluginsAutoUpdate(ctx context.Context) bool {
|
||||
ns := request.GetNamespaceMapper(s.cfg)(1)
|
||||
ctx = identity.WithServiceIdentityForSingleNamespaceContext(ctx, ns)
|
||||
flag, err := openfeature.NewDefaultClient().BooleanValueDetails(ctx, featuremgmt.FlagPluginsAutoUpdate, false, openfeature.TransactionContext(ctx))
|
||||
if err != nil {
|
||||
s.log.Error("flag evaluation error", "flag", featuremgmt.FlagPluginsAutoUpdate, "error", err)
|
||||
}
|
||||
|
||||
return flag.Value
|
||||
}
|
||||
|
||||
func (s *PluginsService) canUpdate(ctx context.Context, plugin pluginstore.Plugin, gcomVersion string) bool {
|
||||
if !s.updateChecker.IsUpdatable(ctx, plugin) {
|
||||
return false
|
||||
@@ -242,7 +227,7 @@ func (s *PluginsService) canUpdate(ctx context.Context, plugin pluginstore.Plugi
|
||||
return false
|
||||
}
|
||||
|
||||
if s.checkFlagPluginsAutoUpdate(ctx) {
|
||||
if openfeature.NewDefaultClient().Boolean(ctx, featuremgmt.FlagPluginsAutoUpdate, false, openfeature.TransactionContext(ctx)) {
|
||||
return s.updateChecker.CanUpdate(plugin.ID, plugin.Info.Version, gcomVersion, s.updateStrategy == setting.PluginUpdateStrategyMinor)
|
||||
}
|
||||
|
||||
|
||||
@@ -24,24 +24,6 @@ func TestMain(m *testing.M) {
|
||||
testsuite.Run(m)
|
||||
}
|
||||
|
||||
// mockElasticsearchHandler returns a handler that mocks Elasticsearch endpoints.
|
||||
// It responds to GET / with cluster info (required for datasource initialization)
|
||||
// and returns 401 Unauthorized for all other requests.
|
||||
func mockElasticsearchHandler(onRequest func(r *http.Request)) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
switch {
|
||||
case r.Method == http.MethodGet && r.URL.Path == "/":
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
_, _ = w.Write([]byte(`{"version":{"build_flavor":"default","number":"8.0.0"}}`))
|
||||
default:
|
||||
if onRequest != nil {
|
||||
onRequest(r)
|
||||
}
|
||||
w.WriteHeader(http.StatusUnauthorized)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestIntegrationElasticsearch(t *testing.T) {
|
||||
testutil.SkipIntegrationTestInShortMode(t)
|
||||
|
||||
@@ -53,8 +35,9 @@ func TestIntegrationElasticsearch(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
var outgoingRequest *http.Request
|
||||
outgoingServer := httptest.NewServer(mockElasticsearchHandler(func(r *http.Request) {
|
||||
outgoingServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
outgoingRequest = r
|
||||
w.WriteHeader(http.StatusUnauthorized)
|
||||
}))
|
||||
t.Cleanup(outgoingServer.Close)
|
||||
|
||||
|
||||
@@ -639,7 +639,7 @@
|
||||
]
|
||||
},
|
||||
"dependencies": {
|
||||
"grafanaDependency": "\u003e=11.6.0",
|
||||
"grafanaDependency": "",
|
||||
"grafanaVersion": "*",
|
||||
"plugins": [],
|
||||
"extensions": {
|
||||
|
||||
@@ -92,7 +92,7 @@ func (s *Service) CheckHealth(ctx context.Context, req *backend.CheckHealthReque
|
||||
}, nil
|
||||
}
|
||||
|
||||
url := fmt.Sprintf("%s/v3/projects/%s/metricDescriptors", dsInfo.services[cloudMonitor].url, defaultProject)
|
||||
url := fmt.Sprintf("%v/v3/projects/%v/metricDescriptors", dsInfo.services[cloudMonitor].url, defaultProject)
|
||||
request, err := http.NewRequest(http.MethodGet, url, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -139,7 +139,6 @@ type datasourceInfo struct {
|
||||
defaultProject string
|
||||
clientEmail string
|
||||
tokenUri string
|
||||
universeDomain string
|
||||
services map[string]datasourceService
|
||||
privateKey string
|
||||
usingImpersonation bool
|
||||
@@ -151,7 +150,6 @@ type datasourceJSONData struct {
|
||||
DefaultProject string `json:"defaultProject"`
|
||||
ClientEmail string `json:"clientEmail"`
|
||||
TokenURI string `json:"tokenUri"`
|
||||
UniverseDomain string `json:"universeDomain"`
|
||||
UsingImpersonation bool `json:"usingImpersonation"`
|
||||
ServiceAccountToImpersonate string `json:"serviceAccountToImpersonate"`
|
||||
}
|
||||
@@ -181,7 +179,6 @@ func newInstanceSettings(httpClientProvider httpclient.Provider) datasource.Inst
|
||||
defaultProject: jsonData.DefaultProject,
|
||||
clientEmail: jsonData.ClientEmail,
|
||||
tokenUri: jsonData.TokenURI,
|
||||
universeDomain: jsonData.UniverseDomain,
|
||||
usingImpersonation: jsonData.UsingImpersonation,
|
||||
serviceAccountToImpersonate: jsonData.ServiceAccountToImpersonate,
|
||||
services: map[string]datasourceService{},
|
||||
@@ -197,13 +194,13 @@ func newInstanceSettings(httpClientProvider httpclient.Provider) datasource.Inst
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for name := range routes {
|
||||
for name, info := range routes {
|
||||
client, err := newHTTPClient(dsInfo, opts, &httpClientProvider, name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
dsInfo.services[name] = datasourceService{
|
||||
url: buildURL(name, dsInfo.universeDomain),
|
||||
url: info.url,
|
||||
client: client,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,12 +23,12 @@ type routeInfo struct {
|
||||
var routes = map[string]routeInfo{
|
||||
cloudMonitor: {
|
||||
method: "GET",
|
||||
url: "https://monitoring.",
|
||||
url: "https://monitoring.googleapis.com",
|
||||
scopes: []string{cloudMonitorScope},
|
||||
},
|
||||
resourceManager: {
|
||||
method: "GET",
|
||||
url: "https://cloudresourcemanager.",
|
||||
url: "https://cloudresourcemanager.googleapis.com",
|
||||
scopes: []string{resourceManagerScope},
|
||||
},
|
||||
}
|
||||
@@ -68,13 +68,6 @@ func getMiddleware(model *datasourceInfo, routePath string) (httpclient.Middlewa
|
||||
return tokenprovider.AuthMiddleware(provider), nil
|
||||
}
|
||||
|
||||
func buildURL(route string, universeDomain string) string {
|
||||
if universeDomain == "" {
|
||||
universeDomain = "googleapis.com"
|
||||
}
|
||||
return routes[route].url + universeDomain
|
||||
}
|
||||
|
||||
func newHTTPClient(model *datasourceInfo, opts httpclient.Options, clientProvider *httpclient.Provider, route string) (*http.Client, error) {
|
||||
m, err := getMiddleware(model, route)
|
||||
if err != nil {
|
||||
|
||||
@@ -111,7 +111,7 @@ func Test_setRequestVariables(t *testing.T) {
|
||||
im: &fakeInstance{
|
||||
services: map[string]datasourceService{
|
||||
cloudMonitor: {
|
||||
url: buildURL(cloudMonitor, "googleapis.com"),
|
||||
url: routes[cloudMonitor].url,
|
||||
client: &http.Client{},
|
||||
},
|
||||
},
|
||||
|
||||
@@ -3,8 +3,8 @@ package elasticsearch
|
||||
import (
|
||||
"regexp"
|
||||
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
es "github.com/grafana/grafana/pkg/tsdb/elasticsearch/client"
|
||||
"github.com/grafana/grafana/pkg/tsdb/elasticsearch/simplejson"
|
||||
)
|
||||
|
||||
// addDateHistogramAgg adds a date histogram aggregation to the aggregation builder
|
||||
|
||||
@@ -16,6 +16,7 @@ import (
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/log"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/tracing"
|
||||
"github.com/grafana/grafana/pkg/services/featuremgmt"
|
||||
)
|
||||
|
||||
// Used in logging to mark a stage
|
||||
@@ -34,7 +35,6 @@ type DatasourceInfo struct {
|
||||
Interval string
|
||||
MaxConcurrentShardRequests int64
|
||||
IncludeFrozen bool
|
||||
ClusterInfo ClusterInfo
|
||||
}
|
||||
|
||||
type ConfiguredFields struct {
|
||||
@@ -159,7 +159,7 @@ func (c *baseClientImpl) ExecuteMultisearch(r *MultiSearchRequest) (*MultiSearch
|
||||
resSpan.End()
|
||||
}()
|
||||
|
||||
improvedParsingEnabled := isFeatureEnabled(c.ctx, "elasticsearchImprovedParsing")
|
||||
improvedParsingEnabled := isFeatureEnabled(c.ctx, featuremgmt.FlagElasticsearchImprovedParsing)
|
||||
msr, err := c.parser.parseMultiSearchResponse(res.Body, improvedParsingEnabled)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -197,11 +197,7 @@ func (c *baseClientImpl) createMultiSearchRequests(searchRequests []*SearchReque
|
||||
|
||||
func (c *baseClientImpl) getMultiSearchQueryParameters() string {
|
||||
var qs []string
|
||||
// if the build flavor is not serverless, we can use the max concurrent shard requests
|
||||
// this is because serverless clusters do not support max concurrent shard requests
|
||||
if !c.ds.ClusterInfo.IsServerless() && c.ds.MaxConcurrentShardRequests > 0 {
|
||||
qs = append(qs, fmt.Sprintf("max_concurrent_shard_requests=%d", c.ds.MaxConcurrentShardRequests))
|
||||
}
|
||||
qs = append(qs, fmt.Sprintf("max_concurrent_shard_requests=%d", c.ds.MaxConcurrentShardRequests))
|
||||
|
||||
if c.ds.IncludeFrozen {
|
||||
qs = append(qs, "ignore_throttled=false")
|
||||
|
||||
@@ -15,7 +15,7 @@ import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/grafana/grafana/pkg/tsdb/elasticsearch/simplejson"
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
)
|
||||
|
||||
func TestClient_ExecuteMultisearch(t *testing.T) {
|
||||
|
||||
@@ -1,51 +0,0 @@
|
||||
package es
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
type VersionInfo struct {
|
||||
BuildFlavor string `json:"build_flavor"`
|
||||
}
|
||||
|
||||
// ClusterInfo represents Elasticsearch cluster information returned from the root endpoint.
|
||||
// It is used to determine cluster capabilities and configuration like whether the cluster is serverless.
|
||||
type ClusterInfo struct {
|
||||
Version VersionInfo `json:"version"`
|
||||
}
|
||||
|
||||
const (
|
||||
BuildFlavorServerless = "serverless"
|
||||
)
|
||||
|
||||
// GetClusterInfo fetches cluster information from the Elasticsearch root endpoint.
|
||||
// It returns the cluster build flavor which is used to determine if the cluster is serverless.
|
||||
func GetClusterInfo(httpCli *http.Client, url string) (clusterInfo ClusterInfo, err error) {
|
||||
resp, err := httpCli.Get(url)
|
||||
if err != nil {
|
||||
return ClusterInfo{}, fmt.Errorf("error getting ES cluster info: %w", err)
|
||||
}
|
||||
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return ClusterInfo{}, fmt.Errorf("unexpected status code %d getting ES cluster info", resp.StatusCode)
|
||||
}
|
||||
|
||||
defer func() {
|
||||
if closeErr := resp.Body.Close(); closeErr != nil && err == nil {
|
||||
err = fmt.Errorf("error closing response body: %w", closeErr)
|
||||
}
|
||||
}()
|
||||
|
||||
err = json.NewDecoder(resp.Body).Decode(&clusterInfo)
|
||||
if err != nil {
|
||||
return ClusterInfo{}, fmt.Errorf("error decoding ES cluster info: %w", err)
|
||||
}
|
||||
|
||||
return clusterInfo, nil
|
||||
}
|
||||
|
||||
func (ci ClusterInfo) IsServerless() bool {
|
||||
return ci.Version.BuildFlavor == BuildFlavorServerless
|
||||
}
|
||||
@@ -1,188 +0,0 @@
|
||||
package es
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestGetClusterInfo(t *testing.T) {
|
||||
t.Run("Should successfully get cluster info", func(t *testing.T) {
|
||||
ts := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
|
||||
rw.Header().Set("Content-Type", "application/json")
|
||||
_, err := rw.Write([]byte(`{
|
||||
"name": "test-cluster",
|
||||
"cluster_name": "elasticsearch",
|
||||
"cluster_uuid": "abc123",
|
||||
"version": {
|
||||
"number": "8.0.0",
|
||||
"build_flavor": "default",
|
||||
"build_type": "tar",
|
||||
"build_hash": "abc123",
|
||||
"build_date": "2023-01-01T00:00:00.000Z",
|
||||
"build_snapshot": false,
|
||||
"lucene_version": "9.0.0"
|
||||
}
|
||||
}`))
|
||||
require.NoError(t, err)
|
||||
}))
|
||||
|
||||
t.Cleanup(func() {
|
||||
ts.Close()
|
||||
})
|
||||
|
||||
clusterInfo, err := GetClusterInfo(ts.Client(), ts.URL)
|
||||
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, clusterInfo)
|
||||
assert.Equal(t, "default", clusterInfo.Version.BuildFlavor)
|
||||
})
|
||||
|
||||
t.Run("Should successfully get serverless cluster info", func(t *testing.T) {
|
||||
ts := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
|
||||
rw.Header().Set("Content-Type", "application/json")
|
||||
_, err := rw.Write([]byte(`{
|
||||
"name": "serverless-cluster",
|
||||
"cluster_name": "elasticsearch",
|
||||
"cluster_uuid": "def456",
|
||||
"version": {
|
||||
"number": "8.11.0",
|
||||
"build_flavor": "serverless",
|
||||
"build_type": "docker",
|
||||
"build_hash": "def456",
|
||||
"build_date": "2023-11-01T00:00:00.000Z",
|
||||
"build_snapshot": false,
|
||||
"lucene_version": "9.8.0"
|
||||
}
|
||||
}`))
|
||||
require.NoError(t, err)
|
||||
}))
|
||||
|
||||
t.Cleanup(func() {
|
||||
ts.Close()
|
||||
})
|
||||
|
||||
clusterInfo, err := GetClusterInfo(ts.Client(), ts.URL)
|
||||
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, clusterInfo)
|
||||
assert.Equal(t, "serverless", clusterInfo.Version.BuildFlavor)
|
||||
assert.True(t, clusterInfo.IsServerless())
|
||||
})
|
||||
|
||||
t.Run("Should return error when HTTP request fails", func(t *testing.T) {
|
||||
clusterInfo, err := GetClusterInfo(http.DefaultClient, "http://invalid-url-that-does-not-exist.local:9999")
|
||||
|
||||
require.Error(t, err)
|
||||
require.Equal(t, ClusterInfo{}, clusterInfo)
|
||||
assert.Contains(t, err.Error(), "error getting ES cluster info")
|
||||
})
|
||||
|
||||
t.Run("Should return error when response body is invalid JSON", func(t *testing.T) {
|
||||
ts := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
|
||||
rw.Header().Set("Content-Type", "application/json")
|
||||
_, err := rw.Write([]byte(`{"invalid json`))
|
||||
require.NoError(t, err)
|
||||
}))
|
||||
|
||||
t.Cleanup(func() {
|
||||
ts.Close()
|
||||
})
|
||||
|
||||
clusterInfo, err := GetClusterInfo(ts.Client(), ts.URL)
|
||||
|
||||
require.Error(t, err)
|
||||
require.Equal(t, ClusterInfo{}, clusterInfo)
|
||||
assert.Contains(t, err.Error(), "error decoding ES cluster info")
|
||||
})
|
||||
|
||||
t.Run("Should handle empty version object", func(t *testing.T) {
|
||||
ts := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
|
||||
rw.Header().Set("Content-Type", "application/json")
|
||||
_, err := rw.Write([]byte(`{
|
||||
"name": "test-cluster",
|
||||
"version": {}
|
||||
}`))
|
||||
require.NoError(t, err)
|
||||
}))
|
||||
|
||||
t.Cleanup(func() {
|
||||
ts.Close()
|
||||
})
|
||||
|
||||
clusterInfo, err := GetClusterInfo(ts.Client(), ts.URL)
|
||||
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, ClusterInfo{}, clusterInfo)
|
||||
assert.Equal(t, "", clusterInfo.Version.BuildFlavor)
|
||||
assert.False(t, clusterInfo.IsServerless())
|
||||
})
|
||||
|
||||
t.Run("Should handle HTTP error status codes", func(t *testing.T) {
|
||||
ts := httptest.NewServer(http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
|
||||
rw.WriteHeader(http.StatusUnauthorized)
|
||||
_, err := rw.Write([]byte(`{"error": "Unauthorized"}`))
|
||||
require.NoError(t, err)
|
||||
}))
|
||||
|
||||
t.Cleanup(func() {
|
||||
ts.Close()
|
||||
})
|
||||
|
||||
clusterInfo, err := GetClusterInfo(ts.Client(), ts.URL)
|
||||
|
||||
require.Error(t, err)
|
||||
require.Equal(t, ClusterInfo{}, clusterInfo)
|
||||
assert.Contains(t, err.Error(), "unexpected status code 401 getting ES cluster info")
|
||||
})
|
||||
}
|
||||
|
||||
func TestClusterInfo_IsServerless(t *testing.T) {
|
||||
t.Run("Should return true when build_flavor is serverless", func(t *testing.T) {
|
||||
clusterInfo := ClusterInfo{
|
||||
Version: VersionInfo{
|
||||
BuildFlavor: BuildFlavorServerless,
|
||||
},
|
||||
}
|
||||
|
||||
assert.True(t, clusterInfo.IsServerless())
|
||||
})
|
||||
|
||||
t.Run("Should return false when build_flavor is default", func(t *testing.T) {
|
||||
clusterInfo := ClusterInfo{
|
||||
Version: VersionInfo{
|
||||
BuildFlavor: "default",
|
||||
},
|
||||
}
|
||||
|
||||
assert.False(t, clusterInfo.IsServerless())
|
||||
})
|
||||
|
||||
t.Run("Should return false when build_flavor is empty", func(t *testing.T) {
|
||||
clusterInfo := ClusterInfo{
|
||||
Version: VersionInfo{
|
||||
BuildFlavor: "",
|
||||
},
|
||||
}
|
||||
|
||||
assert.False(t, clusterInfo.IsServerless())
|
||||
})
|
||||
|
||||
t.Run("Should return false when build_flavor is unknown value", func(t *testing.T) {
|
||||
clusterInfo := ClusterInfo{
|
||||
Version: VersionInfo{
|
||||
BuildFlavor: "unknown",
|
||||
},
|
||||
}
|
||||
|
||||
assert.False(t, clusterInfo.IsServerless())
|
||||
})
|
||||
|
||||
t.Run("should return false when cluster info is empty", func(t *testing.T) {
|
||||
clusterInfo := ClusterInfo{}
|
||||
assert.False(t, clusterInfo.IsServerless())
|
||||
})
|
||||
}
|
||||
@@ -8,7 +8,7 @@ import (
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana/pkg/tsdb/elasticsearch/simplejson"
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
)
|
||||
|
||||
func TestSearchRequest(t *testing.T) {
|
||||
|
||||
@@ -6,8 +6,8 @@ import (
|
||||
"strconv"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
es "github.com/grafana/grafana/pkg/tsdb/elasticsearch/client"
|
||||
"github.com/grafana/grafana/pkg/tsdb/elasticsearch/simplejson"
|
||||
)
|
||||
|
||||
// processQuery processes a single query and adds it to the multi-search request builder
|
||||
|
||||
@@ -3,7 +3,7 @@ package elasticsearch
|
||||
import (
|
||||
"strconv"
|
||||
|
||||
"github.com/grafana/grafana/pkg/tsdb/elasticsearch/simplejson"
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
)
|
||||
|
||||
// setFloatPath converts a string value at the specified path to float64
|
||||
|
||||
@@ -88,14 +88,6 @@ func newInstanceSettings(httpClientProvider *httpclient.Provider) datasource.Ins
|
||||
httpCliOpts.SigV4.Service = "es"
|
||||
}
|
||||
|
||||
apiKeyAuth, ok := jsonData["apiKeyAuth"].(bool)
|
||||
if ok && apiKeyAuth {
|
||||
apiKey := settings.DecryptedSecureJSONData["apiKey"]
|
||||
if apiKey != "" {
|
||||
httpCliOpts.Header.Add("Authorization", "ApiKey "+apiKey)
|
||||
}
|
||||
}
|
||||
|
||||
httpCli, err := httpClientProvider.New(httpCliOpts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -159,11 +151,6 @@ func newInstanceSettings(httpClientProvider *httpclient.Provider) datasource.Ins
|
||||
includeFrozen = false
|
||||
}
|
||||
|
||||
clusterInfo, err := es.GetClusterInfo(httpCli, settings.URL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
configuredFields := es.ConfiguredFields{
|
||||
TimeField: timeField,
|
||||
LogLevelField: logLevelField,
|
||||
@@ -179,7 +166,6 @@ func newInstanceSettings(httpClientProvider *httpclient.Provider) datasource.Ins
|
||||
ConfiguredFields: configuredFields,
|
||||
Interval: interval,
|
||||
IncludeFrozen: includeFrozen,
|
||||
ClusterInfo: clusterInfo,
|
||||
}
|
||||
return model, nil
|
||||
}
|
||||
|
||||
@@ -3,8 +3,6 @@ package elasticsearch
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
@@ -20,26 +18,8 @@ type datasourceInfo struct {
|
||||
Interval string `json:"interval"`
|
||||
}
|
||||
|
||||
// mockElasticsearchServer creates a test HTTP server that mocks Elasticsearch cluster info endpoint
|
||||
func mockElasticsearchServer() *httptest.Server {
|
||||
return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.WriteHeader(http.StatusOK)
|
||||
// Return a mock Elasticsearch cluster info response
|
||||
_ = json.NewEncoder(w).Encode(map[string]interface{}{
|
||||
"version": map[string]interface{}{
|
||||
"build_flavor": "serverless",
|
||||
"number": "8.0.0",
|
||||
},
|
||||
})
|
||||
}))
|
||||
}
|
||||
|
||||
func TestNewInstanceSettings(t *testing.T) {
|
||||
t.Run("fields exist", func(t *testing.T) {
|
||||
server := mockElasticsearchServer()
|
||||
defer server.Close()
|
||||
|
||||
dsInfo := datasourceInfo{
|
||||
TimeField: "@timestamp",
|
||||
MaxConcurrentShardRequests: 5,
|
||||
@@ -48,7 +28,6 @@ func TestNewInstanceSettings(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
|
||||
dsSettings := backend.DataSourceInstanceSettings{
|
||||
URL: server.URL,
|
||||
JSONData: json.RawMessage(settingsJSON),
|
||||
}
|
||||
|
||||
@@ -58,9 +37,6 @@ func TestNewInstanceSettings(t *testing.T) {
|
||||
|
||||
t.Run("timeField", func(t *testing.T) {
|
||||
t.Run("is nil", func(t *testing.T) {
|
||||
server := mockElasticsearchServer()
|
||||
defer server.Close()
|
||||
|
||||
dsInfo := datasourceInfo{
|
||||
MaxConcurrentShardRequests: 5,
|
||||
Interval: "Daily",
|
||||
@@ -70,7 +46,6 @@ func TestNewInstanceSettings(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
|
||||
dsSettings := backend.DataSourceInstanceSettings{
|
||||
URL: server.URL,
|
||||
JSONData: json.RawMessage(settingsJSON),
|
||||
}
|
||||
|
||||
@@ -79,9 +54,6 @@ func TestNewInstanceSettings(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("is empty", func(t *testing.T) {
|
||||
server := mockElasticsearchServer()
|
||||
defer server.Close()
|
||||
|
||||
dsInfo := datasourceInfo{
|
||||
MaxConcurrentShardRequests: 5,
|
||||
Interval: "Daily",
|
||||
@@ -92,7 +64,6 @@ func TestNewInstanceSettings(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
|
||||
dsSettings := backend.DataSourceInstanceSettings{
|
||||
URL: server.URL,
|
||||
JSONData: json.RawMessage(settingsJSON),
|
||||
}
|
||||
|
||||
@@ -103,9 +74,6 @@ func TestNewInstanceSettings(t *testing.T) {
|
||||
|
||||
t.Run("maxConcurrentShardRequests", func(t *testing.T) {
|
||||
t.Run("no maxConcurrentShardRequests", func(t *testing.T) {
|
||||
server := mockElasticsearchServer()
|
||||
defer server.Close()
|
||||
|
||||
dsInfo := datasourceInfo{
|
||||
TimeField: "@timestamp",
|
||||
}
|
||||
@@ -113,7 +81,6 @@ func TestNewInstanceSettings(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
|
||||
dsSettings := backend.DataSourceInstanceSettings{
|
||||
URL: server.URL,
|
||||
JSONData: json.RawMessage(settingsJSON),
|
||||
}
|
||||
|
||||
@@ -123,9 +90,6 @@ func TestNewInstanceSettings(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("string maxConcurrentShardRequests", func(t *testing.T) {
|
||||
server := mockElasticsearchServer()
|
||||
defer server.Close()
|
||||
|
||||
dsInfo := datasourceInfo{
|
||||
TimeField: "@timestamp",
|
||||
MaxConcurrentShardRequests: "10",
|
||||
@@ -134,7 +98,6 @@ func TestNewInstanceSettings(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
|
||||
dsSettings := backend.DataSourceInstanceSettings{
|
||||
URL: server.URL,
|
||||
JSONData: json.RawMessage(settingsJSON),
|
||||
}
|
||||
|
||||
@@ -144,9 +107,6 @@ func TestNewInstanceSettings(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("number maxConcurrentShardRequests", func(t *testing.T) {
|
||||
server := mockElasticsearchServer()
|
||||
defer server.Close()
|
||||
|
||||
dsInfo := datasourceInfo{
|
||||
TimeField: "@timestamp",
|
||||
MaxConcurrentShardRequests: 10,
|
||||
@@ -155,7 +115,6 @@ func TestNewInstanceSettings(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
|
||||
dsSettings := backend.DataSourceInstanceSettings{
|
||||
URL: server.URL,
|
||||
JSONData: json.RawMessage(settingsJSON),
|
||||
}
|
||||
|
||||
@@ -165,9 +124,6 @@ func TestNewInstanceSettings(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("zero maxConcurrentShardRequests", func(t *testing.T) {
|
||||
server := mockElasticsearchServer()
|
||||
defer server.Close()
|
||||
|
||||
dsInfo := datasourceInfo{
|
||||
TimeField: "@timestamp",
|
||||
MaxConcurrentShardRequests: 0,
|
||||
@@ -176,7 +132,6 @@ func TestNewInstanceSettings(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
|
||||
dsSettings := backend.DataSourceInstanceSettings{
|
||||
URL: server.URL,
|
||||
JSONData: json.RawMessage(settingsJSON),
|
||||
}
|
||||
|
||||
@@ -186,9 +141,6 @@ func TestNewInstanceSettings(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("negative maxConcurrentShardRequests", func(t *testing.T) {
|
||||
server := mockElasticsearchServer()
|
||||
defer server.Close()
|
||||
|
||||
dsInfo := datasourceInfo{
|
||||
TimeField: "@timestamp",
|
||||
MaxConcurrentShardRequests: -10,
|
||||
@@ -197,7 +149,6 @@ func TestNewInstanceSettings(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
|
||||
dsSettings := backend.DataSourceInstanceSettings{
|
||||
URL: server.URL,
|
||||
JSONData: json.RawMessage(settingsJSON),
|
||||
}
|
||||
|
||||
@@ -207,9 +158,6 @@ func TestNewInstanceSettings(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("float maxConcurrentShardRequests", func(t *testing.T) {
|
||||
server := mockElasticsearchServer()
|
||||
defer server.Close()
|
||||
|
||||
dsInfo := datasourceInfo{
|
||||
TimeField: "@timestamp",
|
||||
MaxConcurrentShardRequests: 10.5,
|
||||
@@ -218,7 +166,6 @@ func TestNewInstanceSettings(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
|
||||
dsSettings := backend.DataSourceInstanceSettings{
|
||||
URL: server.URL,
|
||||
JSONData: json.RawMessage(settingsJSON),
|
||||
}
|
||||
|
||||
@@ -228,9 +175,6 @@ func TestNewInstanceSettings(t *testing.T) {
|
||||
})
|
||||
|
||||
t.Run("invalid maxConcurrentShardRequests", func(t *testing.T) {
|
||||
server := mockElasticsearchServer()
|
||||
defer server.Close()
|
||||
|
||||
dsInfo := datasourceInfo{
|
||||
TimeField: "@timestamp",
|
||||
MaxConcurrentShardRequests: "invalid",
|
||||
@@ -239,7 +183,6 @@ func TestNewInstanceSettings(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
|
||||
dsSettings := backend.DataSourceInstanceSettings{
|
||||
URL: server.URL,
|
||||
JSONData: json.RawMessage(settingsJSON),
|
||||
}
|
||||
|
||||
|
||||
@@ -28,6 +28,7 @@ func (s *Service) CheckHealth(ctx context.Context, req *backend.CheckHealthReque
|
||||
Message: "Health check failed: Failed to get data source info",
|
||||
}, nil
|
||||
}
|
||||
|
||||
healthStatusUrl, err := url.Parse(ds.URL)
|
||||
if err != nil {
|
||||
logger.Error("Failed to parse data source URL", "error", err)
|
||||
@@ -37,14 +38,6 @@ func (s *Service) CheckHealth(ctx context.Context, req *backend.CheckHealthReque
|
||||
}, nil
|
||||
}
|
||||
|
||||
// If the cluster is serverless, return a healthy result
|
||||
if ds.ClusterInfo.IsServerless() {
|
||||
return &backend.CheckHealthResult{
|
||||
Status: backend.HealthStatusOk,
|
||||
Message: "Elasticsearch Serverless data source is healthy.",
|
||||
}, nil
|
||||
}
|
||||
|
||||
// check that ES is healthy
|
||||
healthStatusUrl.Path = path.Join(healthStatusUrl.Path, "_cluster/health")
|
||||
healthStatusUrl.RawQuery = "wait_for_status=yellow"
|
||||
|
||||
@@ -9,7 +9,7 @@ import (
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||
|
||||
"github.com/grafana/grafana/pkg/tsdb/elasticsearch/simplejson"
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
)
|
||||
|
||||
// metricsResponseProcessor handles processing of metrics query responses
|
||||
|
||||
@@ -4,7 +4,7 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana/pkg/tsdb/elasticsearch/simplejson"
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
)
|
||||
|
||||
// Query represents the time series query model of the datasource
|
||||
|
||||
@@ -6,7 +6,7 @@ import (
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/log"
|
||||
|
||||
"github.com/grafana/grafana/pkg/tsdb/elasticsearch/simplejson"
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
)
|
||||
|
||||
func parseQuery(tsdbQuery []backend.DataQuery, logger log.Logger) ([]*Query, error) {
|
||||
|
||||
@@ -5,7 +5,7 @@ import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
|
||||
"github.com/grafana/grafana/pkg/tsdb/elasticsearch/simplejson"
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
)
|
||||
|
||||
// AggregationParser parses raw Elasticsearch DSL aggregations
|
||||
|
||||
@@ -15,9 +15,9 @@ import (
|
||||
"go.opentelemetry.io/otel/codes"
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
es "github.com/grafana/grafana/pkg/tsdb/elasticsearch/client"
|
||||
"github.com/grafana/grafana/pkg/tsdb/elasticsearch/instrumentation"
|
||||
"github.com/grafana/grafana/pkg/tsdb/elasticsearch/simplejson"
|
||||
)
|
||||
|
||||
const (
|
||||
|
||||
@@ -7,8 +7,8 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||
es "github.com/grafana/grafana/pkg/tsdb/elasticsearch/client"
|
||||
"github.com/grafana/grafana/pkg/tsdb/elasticsearch/simplejson"
|
||||
)
|
||||
|
||||
// flatten flattens multi-level objects to single level objects. It uses dot notation to join keys.
|
||||
|
||||
@@ -1,582 +0,0 @@
|
||||
// Package simplejson provides a wrapper for arbitrary JSON objects that adds methods to access properties.
|
||||
// Use of this package in place of types and the standard library's encoding/json package is strongly discouraged.
|
||||
//
|
||||
// Don't lint for stale code, since it's a copied library and we might as well keep the whole thing.
|
||||
// nolint:unused
|
||||
package simplejson
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"database/sql/driver"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"log"
|
||||
)
|
||||
|
||||
// returns the current implementation version
|
||||
func Version() string {
|
||||
return "0.5.0"
|
||||
}
|
||||
|
||||
type Json struct {
|
||||
data any
|
||||
}
|
||||
|
||||
func (j *Json) FromDB(data []byte) error {
|
||||
j.data = make(map[string]any)
|
||||
|
||||
dec := json.NewDecoder(bytes.NewBuffer(data))
|
||||
dec.UseNumber()
|
||||
return dec.Decode(&j.data)
|
||||
}
|
||||
|
||||
func (j *Json) ToDB() ([]byte, error) {
|
||||
if j == nil || j.data == nil {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
return j.Encode()
|
||||
}
|
||||
|
||||
func (j *Json) Scan(val any) error {
|
||||
switch v := val.(type) {
|
||||
case []byte:
|
||||
if len(v) == 0 {
|
||||
return nil
|
||||
}
|
||||
return json.Unmarshal(v, &j)
|
||||
case string:
|
||||
if len(v) == 0 {
|
||||
return nil
|
||||
}
|
||||
return json.Unmarshal([]byte(v), &j)
|
||||
default:
|
||||
return fmt.Errorf("unsupported type: %T", v)
|
||||
}
|
||||
}
|
||||
|
||||
func (j *Json) Value() (driver.Value, error) {
|
||||
return j.ToDB()
|
||||
}
|
||||
|
||||
// DeepCopyInto creates a copy by serializing JSON
|
||||
func (j *Json) DeepCopyInto(out *Json) {
|
||||
b, err := j.Encode()
|
||||
if err == nil {
|
||||
_ = out.UnmarshalJSON(b)
|
||||
}
|
||||
}
|
||||
|
||||
// DeepCopy will make a deep copy of the JSON object
|
||||
func (j *Json) DeepCopy() *Json {
|
||||
if j == nil {
|
||||
return nil
|
||||
}
|
||||
out := new(Json)
|
||||
j.DeepCopyInto(out)
|
||||
return out
|
||||
}
|
||||
|
||||
// NewJson returns a pointer to a new `Json` object
|
||||
// after unmarshaling `body` bytes
|
||||
func NewJson(body []byte) (*Json, error) {
|
||||
j := new(Json)
|
||||
err := j.UnmarshalJSON(body)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return j, nil
|
||||
}
|
||||
|
||||
// MustJson returns a pointer to a new `Json` object, panicking if `body` cannot be parsed.
|
||||
func MustJson(body []byte) *Json {
|
||||
j, err := NewJson(body)
|
||||
|
||||
if err != nil {
|
||||
panic(fmt.Sprintf("could not unmarshal JSON: %q", err))
|
||||
}
|
||||
|
||||
return j
|
||||
}
|
||||
|
||||
// New returns a pointer to a new, empty `Json` object
|
||||
func New() *Json {
|
||||
return &Json{
|
||||
data: make(map[string]any),
|
||||
}
|
||||
}
|
||||
|
||||
// NewFromAny returns a pointer to a new `Json` object with provided data.
|
||||
func NewFromAny(data any) *Json {
|
||||
return &Json{data: data}
|
||||
}
|
||||
|
||||
// Interface returns the underlying data
|
||||
func (j *Json) Interface() any {
|
||||
return j.data
|
||||
}
|
||||
|
||||
// Encode returns its marshaled data as `[]byte`
|
||||
func (j *Json) Encode() ([]byte, error) {
|
||||
return j.MarshalJSON()
|
||||
}
|
||||
|
||||
// EncodePretty returns its marshaled data as `[]byte` with indentation
|
||||
func (j *Json) EncodePretty() ([]byte, error) {
|
||||
return json.MarshalIndent(&j.data, "", " ")
|
||||
}
|
||||
|
||||
// Implements the json.Marshaler interface.
|
||||
func (j *Json) MarshalJSON() ([]byte, error) {
|
||||
return json.Marshal(&j.data)
|
||||
}
|
||||
|
||||
// Set modifies `Json` map by `key` and `value`
|
||||
// Useful for changing single key/value in a `Json` object easily.
|
||||
func (j *Json) Set(key string, val any) {
|
||||
m, err := j.Map()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
m[key] = val
|
||||
}
|
||||
|
||||
// SetPath modifies `Json`, recursively checking/creating map keys for the supplied path,
|
||||
// and then finally writing in the value
|
||||
func (j *Json) SetPath(branch []string, val any) {
|
||||
if len(branch) == 0 {
|
||||
j.data = val
|
||||
return
|
||||
}
|
||||
|
||||
// in order to insert our branch, we need map[string]any
|
||||
if _, ok := (j.data).(map[string]any); !ok {
|
||||
// have to replace with something suitable
|
||||
j.data = make(map[string]any)
|
||||
}
|
||||
curr := j.data.(map[string]any)
|
||||
|
||||
for i := 0; i < len(branch)-1; i++ {
|
||||
b := branch[i]
|
||||
// key exists?
|
||||
if _, ok := curr[b]; !ok {
|
||||
n := make(map[string]any)
|
||||
curr[b] = n
|
||||
curr = n
|
||||
continue
|
||||
}
|
||||
|
||||
// make sure the value is the right sort of thing
|
||||
if _, ok := curr[b].(map[string]any); !ok {
|
||||
// have to replace with something suitable
|
||||
n := make(map[string]any)
|
||||
curr[b] = n
|
||||
}
|
||||
|
||||
curr = curr[b].(map[string]any)
|
||||
}
|
||||
|
||||
// add remaining k/v
|
||||
curr[branch[len(branch)-1]] = val
|
||||
}
|
||||
|
||||
// Del modifies `Json` map by deleting `key` if it is present.
|
||||
func (j *Json) Del(key string) {
|
||||
m, err := j.Map()
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
delete(m, key)
|
||||
}
|
||||
|
||||
// Get returns a pointer to a new `Json` object
|
||||
// for `key` in its `map` representation
|
||||
//
|
||||
// useful for chaining operations (to traverse a nested JSON):
|
||||
//
|
||||
// js.Get("top_level").Get("dict").Get("value").Int()
|
||||
func (j *Json) Get(key string) *Json {
|
||||
m, err := j.Map()
|
||||
if err == nil {
|
||||
if val, ok := m[key]; ok {
|
||||
return &Json{val}
|
||||
}
|
||||
}
|
||||
return &Json{nil}
|
||||
}
|
||||
|
||||
// GetPath searches for the item as specified by the branch
|
||||
// without the need to deep dive using Get()'s.
|
||||
//
|
||||
// js.GetPath("top_level", "dict")
|
||||
func (j *Json) GetPath(branch ...string) *Json {
|
||||
jin := j
|
||||
for _, p := range branch {
|
||||
jin = jin.Get(p)
|
||||
}
|
||||
return jin
|
||||
}
|
||||
|
||||
// GetIndex returns a pointer to a new `Json` object
|
||||
// for `index` in its `array` representation
|
||||
//
|
||||
// this is the analog to Get when accessing elements of
|
||||
// a json array instead of a json object:
|
||||
//
|
||||
// js.Get("top_level").Get("array").GetIndex(1).Get("key").Int()
|
||||
func (j *Json) GetIndex(index int) *Json {
|
||||
a, err := j.Array()
|
||||
if err == nil {
|
||||
if len(a) > index {
|
||||
return &Json{a[index]}
|
||||
}
|
||||
}
|
||||
return &Json{nil}
|
||||
}
|
||||
|
||||
// CheckGetIndex returns a pointer to a new `Json` object
|
||||
// for `index` in its `array` representation, and a `bool`
|
||||
// indicating success or failure
|
||||
//
|
||||
// useful for chained operations when success is important:
|
||||
//
|
||||
// if data, ok := js.Get("top_level").CheckGetIndex(0); ok {
|
||||
// log.Println(data)
|
||||
// }
|
||||
func (j *Json) CheckGetIndex(index int) (*Json, bool) {
|
||||
a, err := j.Array()
|
||||
if err == nil {
|
||||
if len(a) > index {
|
||||
return &Json{a[index]}, true
|
||||
}
|
||||
}
|
||||
return nil, false
|
||||
}
|
||||
|
||||
// SetIndex modifies `Json` array by `index` and `value`
|
||||
// for `index` in its `array` representation
|
||||
func (j *Json) SetIndex(index int, val any) {
|
||||
a, err := j.Array()
|
||||
if err == nil {
|
||||
if len(a) > index {
|
||||
a[index] = val
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// CheckGet returns a pointer to a new `Json` object and
|
||||
// a `bool` identifying success or failure
|
||||
//
|
||||
// useful for chained operations when success is important:
|
||||
//
|
||||
// if data, ok := js.Get("top_level").CheckGet("inner"); ok {
|
||||
// log.Println(data)
|
||||
// }
|
||||
func (j *Json) CheckGet(key string) (*Json, bool) {
|
||||
m, err := j.Map()
|
||||
if err == nil {
|
||||
if val, ok := m[key]; ok {
|
||||
return &Json{val}, true
|
||||
}
|
||||
}
|
||||
return nil, false
|
||||
}
|
||||
|
||||
// Map type asserts to `map`
|
||||
func (j *Json) Map() (map[string]any, error) {
|
||||
if m, ok := (j.data).(map[string]any); ok {
|
||||
return m, nil
|
||||
}
|
||||
return nil, errors.New("type assertion to map[string]any failed")
|
||||
}
|
||||
|
||||
// Array type asserts to an `array`
|
||||
func (j *Json) Array() ([]any, error) {
|
||||
if a, ok := (j.data).([]any); ok {
|
||||
return a, nil
|
||||
}
|
||||
return nil, errors.New("type assertion to []any failed")
|
||||
}
|
||||
|
||||
// Bool type asserts to `bool`
|
||||
func (j *Json) Bool() (bool, error) {
|
||||
if s, ok := (j.data).(bool); ok {
|
||||
return s, nil
|
||||
}
|
||||
return false, errors.New("type assertion to bool failed")
|
||||
}
|
||||
|
||||
// String type asserts to `string`
|
||||
func (j *Json) String() (string, error) {
|
||||
if s, ok := (j.data).(string); ok {
|
||||
return s, nil
|
||||
}
|
||||
return "", errors.New("type assertion to string failed")
|
||||
}
|
||||
|
||||
// Bytes type asserts to `[]byte`
|
||||
func (j *Json) Bytes() ([]byte, error) {
|
||||
if s, ok := (j.data).(string); ok {
|
||||
return []byte(s), nil
|
||||
}
|
||||
return nil, errors.New("type assertion to []byte failed")
|
||||
}
|
||||
|
||||
// StringArray type asserts to an `array` of `string`
|
||||
func (j *Json) StringArray() ([]string, error) {
|
||||
arr, err := j.Array()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
retArr := make([]string, 0, len(arr))
|
||||
for _, a := range arr {
|
||||
if a == nil {
|
||||
retArr = append(retArr, "")
|
||||
continue
|
||||
}
|
||||
s, ok := a.(string)
|
||||
if !ok {
|
||||
return nil, err
|
||||
}
|
||||
retArr = append(retArr, s)
|
||||
}
|
||||
return retArr, nil
|
||||
}
|
||||
|
||||
// MustArray guarantees the return of a `[]any` (with optional default)
|
||||
//
|
||||
// useful when you want to iterate over array values in a succinct manner:
|
||||
//
|
||||
// for i, v := range js.Get("results").MustArray() {
|
||||
// fmt.Println(i, v)
|
||||
// }
|
||||
func (j *Json) MustArray(args ...[]any) []any {
|
||||
var def []any
|
||||
|
||||
switch len(args) {
|
||||
case 0:
|
||||
case 1:
|
||||
def = args[0]
|
||||
default:
|
||||
log.Panicf("MustArray() received too many arguments %d", len(args))
|
||||
}
|
||||
|
||||
a, err := j.Array()
|
||||
if err == nil {
|
||||
return a
|
||||
}
|
||||
|
||||
return def
|
||||
}
|
||||
|
||||
// MustMap guarantees the return of a `map[string]any` (with optional default)
|
||||
//
|
||||
// useful when you want to iterate over map values in a succinct manner:
|
||||
//
|
||||
// for k, v := range js.Get("dictionary").MustMap() {
|
||||
// fmt.Println(k, v)
|
||||
// }
|
||||
func (j *Json) MustMap(args ...map[string]any) map[string]any {
|
||||
var def map[string]any
|
||||
|
||||
switch len(args) {
|
||||
case 0:
|
||||
case 1:
|
||||
def = args[0]
|
||||
default:
|
||||
log.Panicf("MustMap() received too many arguments %d", len(args))
|
||||
}
|
||||
|
||||
a, err := j.Map()
|
||||
if err == nil {
|
||||
return a
|
||||
}
|
||||
|
||||
return def
|
||||
}
|
||||
|
||||
// MustString guarantees the return of a `string` (with optional default)
|
||||
//
|
||||
// useful when you explicitly want a `string` in a single value return context:
|
||||
//
|
||||
// myFunc(js.Get("param1").MustString(), js.Get("optional_param").MustString("my_default"))
|
||||
func (j *Json) MustString(args ...string) string {
|
||||
var def string
|
||||
|
||||
switch len(args) {
|
||||
case 0:
|
||||
case 1:
|
||||
def = args[0]
|
||||
default:
|
||||
log.Panicf("MustString() received too many arguments %d", len(args))
|
||||
}
|
||||
|
||||
s, err := j.String()
|
||||
if err == nil {
|
||||
return s
|
||||
}
|
||||
|
||||
return def
|
||||
}
|
||||
|
||||
// MustStringArray guarantees the return of a `[]string` (with optional default)
|
||||
//
|
||||
// useful when you want to iterate over array values in a succinct manner:
|
||||
//
|
||||
// for i, s := range js.Get("results").MustStringArray() {
|
||||
// fmt.Println(i, s)
|
||||
// }
|
||||
func (j *Json) MustStringArray(args ...[]string) []string {
|
||||
var def []string
|
||||
|
||||
switch len(args) {
|
||||
case 0:
|
||||
case 1:
|
||||
def = args[0]
|
||||
default:
|
||||
log.Panicf("MustStringArray() received too many arguments %d", len(args))
|
||||
}
|
||||
|
||||
a, err := j.StringArray()
|
||||
if err == nil {
|
||||
return a
|
||||
}
|
||||
|
||||
return def
|
||||
}
|
||||
|
||||
// MustInt guarantees the return of an `int` (with optional default)
|
||||
//
|
||||
// useful when you explicitly want an `int` in a single value return context:
|
||||
//
|
||||
// myFunc(js.Get("param1").MustInt(), js.Get("optional_param").MustInt(5150))
|
||||
func (j *Json) MustInt(args ...int) int {
|
||||
var def int
|
||||
|
||||
switch len(args) {
|
||||
case 0:
|
||||
case 1:
|
||||
def = args[0]
|
||||
default:
|
||||
log.Panicf("MustInt() received too many arguments %d", len(args))
|
||||
}
|
||||
|
||||
i, err := j.Int()
|
||||
if err == nil {
|
||||
return i
|
||||
}
|
||||
|
||||
return def
|
||||
}
|
||||
|
||||
// MustFloat64 guarantees the return of a `float64` (with optional default)
|
||||
//
|
||||
// useful when you explicitly want a `float64` in a single value return context:
|
||||
//
|
||||
// myFunc(js.Get("param1").MustFloat64(), js.Get("optional_param").MustFloat64(5.150))
|
||||
func (j *Json) MustFloat64(args ...float64) float64 {
|
||||
var def float64
|
||||
|
||||
switch len(args) {
|
||||
case 0:
|
||||
case 1:
|
||||
def = args[0]
|
||||
default:
|
||||
log.Panicf("MustFloat64() received too many arguments %d", len(args))
|
||||
}
|
||||
|
||||
f, err := j.Float64()
|
||||
if err == nil {
|
||||
return f
|
||||
}
|
||||
|
||||
return def
|
||||
}
|
||||
|
||||
// MustBool guarantees the return of a `bool` (with optional default)
|
||||
//
|
||||
// useful when you explicitly want a `bool` in a single value return context:
|
||||
//
|
||||
// myFunc(js.Get("param1").MustBool(), js.Get("optional_param").MustBool(true))
|
||||
func (j *Json) MustBool(args ...bool) bool {
|
||||
var def bool
|
||||
|
||||
switch len(args) {
|
||||
case 0:
|
||||
case 1:
|
||||
def = args[0]
|
||||
default:
|
||||
log.Panicf("MustBool() received too many arguments %d", len(args))
|
||||
}
|
||||
|
||||
b, err := j.Bool()
|
||||
if err == nil {
|
||||
return b
|
||||
}
|
||||
|
||||
return def
|
||||
}
|
||||
|
||||
// MustInt64 guarantees the return of an `int64` (with optional default)
|
||||
//
|
||||
// useful when you explicitly want an `int64` in a single value return context:
|
||||
//
|
||||
// myFunc(js.Get("param1").MustInt64(), js.Get("optional_param").MustInt64(5150))
|
||||
func (j *Json) MustInt64(args ...int64) int64 {
|
||||
var def int64
|
||||
|
||||
switch len(args) {
|
||||
case 0:
|
||||
case 1:
|
||||
def = args[0]
|
||||
default:
|
||||
log.Panicf("MustInt64() received too many arguments %d", len(args))
|
||||
}
|
||||
|
||||
i, err := j.Int64()
|
||||
if err == nil {
|
||||
return i
|
||||
}
|
||||
|
||||
return def
|
||||
}
|
||||
|
||||
// MustUInt64 guarantees the return of an `uint64` (with optional default)
|
||||
//
|
||||
// useful when you explicitly want an `uint64` in a single value return context:
|
||||
//
|
||||
// myFunc(js.Get("param1").MustUint64(), js.Get("optional_param").MustUint64(5150))
|
||||
func (j *Json) MustUint64(args ...uint64) uint64 {
|
||||
var def uint64
|
||||
|
||||
switch len(args) {
|
||||
case 0:
|
||||
case 1:
|
||||
def = args[0]
|
||||
default:
|
||||
log.Panicf("MustUint64() received too many arguments %d", len(args))
|
||||
}
|
||||
|
||||
i, err := j.Uint64()
|
||||
if err == nil {
|
||||
return i
|
||||
}
|
||||
|
||||
return def
|
||||
}
|
||||
|
||||
// MarshalYAML implements yaml.Marshaller.
|
||||
func (j *Json) MarshalYAML() (any, error) {
|
||||
return j.data, nil
|
||||
}
|
||||
|
||||
// UnmarshalYAML implements yaml.Unmarshaller.
|
||||
func (j *Json) UnmarshalYAML(unmarshal func(any) error) error {
|
||||
var data any
|
||||
if err := unmarshal(&data); err != nil {
|
||||
return err
|
||||
}
|
||||
j.data = data
|
||||
return nil
|
||||
}
|
||||
@@ -1,90 +0,0 @@
|
||||
package simplejson
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"io"
|
||||
"reflect"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
// Implements the json.Unmarshaler interface.
|
||||
func (j *Json) UnmarshalJSON(p []byte) error {
|
||||
dec := json.NewDecoder(bytes.NewBuffer(p))
|
||||
dec.UseNumber()
|
||||
return dec.Decode(&j.data)
|
||||
}
|
||||
|
||||
// NewFromReader returns a *Json by decoding from an io.Reader
|
||||
func NewFromReader(r io.Reader) (*Json, error) {
|
||||
j := new(Json)
|
||||
dec := json.NewDecoder(r)
|
||||
dec.UseNumber()
|
||||
err := dec.Decode(&j.data)
|
||||
return j, err
|
||||
}
|
||||
|
||||
// Float64 coerces into a float64
|
||||
func (j *Json) Float64() (float64, error) {
|
||||
switch n := j.data.(type) {
|
||||
case json.Number:
|
||||
return n.Float64()
|
||||
case float32, float64:
|
||||
return reflect.ValueOf(j.data).Float(), nil
|
||||
case int, int8, int16, int32, int64:
|
||||
return float64(reflect.ValueOf(j.data).Int()), nil
|
||||
case uint, uint8, uint16, uint32, uint64:
|
||||
return float64(reflect.ValueOf(j.data).Uint()), nil
|
||||
}
|
||||
return 0, errors.New("invalid value type")
|
||||
}
|
||||
|
||||
// Int coerces into an int
|
||||
func (j *Json) Int() (int, error) {
|
||||
switch n := j.data.(type) {
|
||||
case json.Number:
|
||||
i, err := n.Int64()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return int(i), nil
|
||||
case float32, float64:
|
||||
return int(reflect.ValueOf(j.data).Float()), nil
|
||||
case int, int8, int16, int32, int64:
|
||||
return int(reflect.ValueOf(j.data).Int()), nil
|
||||
case uint, uint8, uint16, uint32, uint64:
|
||||
return int(reflect.ValueOf(j.data).Uint()), nil
|
||||
}
|
||||
return 0, errors.New("invalid value type")
|
||||
}
|
||||
|
||||
// Int64 coerces into an int64
|
||||
func (j *Json) Int64() (int64, error) {
|
||||
switch n := j.data.(type) {
|
||||
case json.Number:
|
||||
return n.Int64()
|
||||
case float32, float64:
|
||||
return int64(reflect.ValueOf(j.data).Float()), nil
|
||||
case int, int8, int16, int32, int64:
|
||||
return reflect.ValueOf(j.data).Int(), nil
|
||||
case uint, uint8, uint16, uint32, uint64:
|
||||
return int64(reflect.ValueOf(j.data).Uint()), nil
|
||||
}
|
||||
return 0, errors.New("invalid value type")
|
||||
}
|
||||
|
||||
// Uint64 coerces into an uint64
|
||||
func (j *Json) Uint64() (uint64, error) {
|
||||
switch n := j.data.(type) {
|
||||
case json.Number:
|
||||
return strconv.ParseUint(n.String(), 10, 64)
|
||||
case float32, float64:
|
||||
return uint64(reflect.ValueOf(j.data).Float()), nil
|
||||
case int, int8, int16, int32, int64:
|
||||
return uint64(reflect.ValueOf(j.data).Int()), nil
|
||||
case uint, uint8, uint16, uint32, uint64:
|
||||
return reflect.ValueOf(j.data).Uint(), nil
|
||||
}
|
||||
return 0, errors.New("invalid value type")
|
||||
}
|
||||
@@ -1,274 +0,0 @@
|
||||
package simplejson
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestSimplejson(t *testing.T) {
|
||||
var ok bool
|
||||
var err error
|
||||
|
||||
js, err := NewJson([]byte(`{
|
||||
"test": {
|
||||
"string_array": ["asdf", "ghjk", "zxcv"],
|
||||
"string_array_null": ["abc", null, "efg"],
|
||||
"array": [1, "2", 3],
|
||||
"arraywithsubs": [{"subkeyone": 1},
|
||||
{"subkeytwo": 2, "subkeythree": 3}],
|
||||
"int": 10,
|
||||
"float": 5.150,
|
||||
"string": "simplejson",
|
||||
"bool": true,
|
||||
"sub_obj": {"a": 1}
|
||||
}
|
||||
}`))
|
||||
|
||||
assert.NotEqual(t, nil, js)
|
||||
assert.Equal(t, nil, err)
|
||||
|
||||
_, ok = js.CheckGet("test")
|
||||
assert.Equal(t, true, ok)
|
||||
|
||||
_, ok = js.CheckGet("missing_key")
|
||||
assert.Equal(t, false, ok)
|
||||
|
||||
aws := js.Get("test").Get("arraywithsubs")
|
||||
assert.NotEqual(t, nil, aws)
|
||||
var awsval int
|
||||
awsval, _ = aws.GetIndex(0).Get("subkeyone").Int()
|
||||
assert.Equal(t, 1, awsval)
|
||||
awsval, _ = aws.GetIndex(1).Get("subkeytwo").Int()
|
||||
assert.Equal(t, 2, awsval)
|
||||
awsval, _ = aws.GetIndex(1).Get("subkeythree").Int()
|
||||
assert.Equal(t, 3, awsval)
|
||||
|
||||
arr := js.Get("test").Get("array")
|
||||
assert.NotEqual(t, nil, arr)
|
||||
val, ok := arr.CheckGetIndex(0)
|
||||
assert.Equal(t, ok, true)
|
||||
valInt, _ := val.Int()
|
||||
assert.Equal(t, valInt, 1)
|
||||
val, ok = arr.CheckGetIndex(1)
|
||||
assert.Equal(t, ok, true)
|
||||
valStr, _ := val.String()
|
||||
assert.Equal(t, valStr, "2")
|
||||
val, ok = arr.CheckGetIndex(2)
|
||||
assert.Equal(t, ok, true)
|
||||
valInt, _ = val.Int()
|
||||
assert.Equal(t, valInt, 3)
|
||||
_, ok = arr.CheckGetIndex(3)
|
||||
assert.Equal(t, ok, false)
|
||||
|
||||
i, _ := js.Get("test").Get("int").Int()
|
||||
assert.Equal(t, 10, i)
|
||||
|
||||
f, _ := js.Get("test").Get("float").Float64()
|
||||
assert.Equal(t, 5.150, f)
|
||||
|
||||
s, _ := js.Get("test").Get("string").String()
|
||||
assert.Equal(t, "simplejson", s)
|
||||
|
||||
b, _ := js.Get("test").Get("bool").Bool()
|
||||
assert.Equal(t, true, b)
|
||||
|
||||
mi := js.Get("test").Get("int").MustInt()
|
||||
assert.Equal(t, 10, mi)
|
||||
|
||||
mi2 := js.Get("test").Get("missing_int").MustInt(5150)
|
||||
assert.Equal(t, 5150, mi2)
|
||||
|
||||
ms := js.Get("test").Get("string").MustString()
|
||||
assert.Equal(t, "simplejson", ms)
|
||||
|
||||
ms2 := js.Get("test").Get("missing_string").MustString("fyea")
|
||||
assert.Equal(t, "fyea", ms2)
|
||||
|
||||
ma2 := js.Get("test").Get("missing_array").MustArray([]any{"1", 2, "3"})
|
||||
assert.Equal(t, ma2, []any{"1", 2, "3"})
|
||||
|
||||
msa := js.Get("test").Get("string_array").MustStringArray()
|
||||
assert.Equal(t, msa[0], "asdf")
|
||||
assert.Equal(t, msa[1], "ghjk")
|
||||
assert.Equal(t, msa[2], "zxcv")
|
||||
|
||||
msa2 := js.Get("test").Get("string_array").MustStringArray([]string{"1", "2", "3"})
|
||||
assert.Equal(t, msa2[0], "asdf")
|
||||
assert.Equal(t, msa2[1], "ghjk")
|
||||
assert.Equal(t, msa2[2], "zxcv")
|
||||
|
||||
msa3 := js.Get("test").Get("missing_array").MustStringArray([]string{"1", "2", "3"})
|
||||
assert.Equal(t, msa3, []string{"1", "2", "3"})
|
||||
|
||||
mm2 := js.Get("test").Get("missing_map").MustMap(map[string]any{"found": false})
|
||||
assert.Equal(t, mm2, map[string]any{"found": false})
|
||||
|
||||
strs, err := js.Get("test").Get("string_array").StringArray()
|
||||
assert.Equal(t, err, nil)
|
||||
assert.Equal(t, strs[0], "asdf")
|
||||
assert.Equal(t, strs[1], "ghjk")
|
||||
assert.Equal(t, strs[2], "zxcv")
|
||||
|
||||
strs2, err := js.Get("test").Get("string_array_null").StringArray()
|
||||
assert.Equal(t, err, nil)
|
||||
assert.Equal(t, strs2[0], "abc")
|
||||
assert.Equal(t, strs2[1], "")
|
||||
assert.Equal(t, strs2[2], "efg")
|
||||
|
||||
gp, _ := js.GetPath("test", "string").String()
|
||||
assert.Equal(t, "simplejson", gp)
|
||||
|
||||
gp2, _ := js.GetPath("test", "int").Int()
|
||||
assert.Equal(t, 10, gp2)
|
||||
|
||||
assert.Equal(t, js.Get("test").Get("bool").MustBool(), true)
|
||||
|
||||
js.Set("float2", 300.0)
|
||||
assert.Equal(t, js.Get("float2").MustFloat64(), 300.0)
|
||||
|
||||
js.Set("test2", "setTest")
|
||||
assert.Equal(t, "setTest", js.Get("test2").MustString())
|
||||
|
||||
js.Del("test2")
|
||||
assert.NotEqual(t, "setTest", js.Get("test2").MustString())
|
||||
|
||||
js.Get("test").Get("sub_obj").Set("a", 2)
|
||||
assert.Equal(t, 2, js.Get("test").Get("sub_obj").Get("a").MustInt())
|
||||
|
||||
js.GetPath("test", "sub_obj").Set("a", 3)
|
||||
assert.Equal(t, 3, js.GetPath("test", "sub_obj", "a").MustInt())
|
||||
}
|
||||
|
||||
func TestStdlibInterfaces(t *testing.T) {
|
||||
val := new(struct {
|
||||
Name string `json:"name"`
|
||||
Params *Json `json:"params"`
|
||||
})
|
||||
val2 := new(struct {
|
||||
Name string `json:"name"`
|
||||
Params *Json `json:"params"`
|
||||
})
|
||||
|
||||
raw := `{"name":"myobject","params":{"string":"simplejson"}}`
|
||||
|
||||
assert.Equal(t, nil, json.Unmarshal([]byte(raw), val))
|
||||
|
||||
assert.Equal(t, "myobject", val.Name)
|
||||
assert.NotEqual(t, nil, val.Params.data)
|
||||
s, _ := val.Params.Get("string").String()
|
||||
assert.Equal(t, "simplejson", s)
|
||||
|
||||
p, err := json.Marshal(val)
|
||||
assert.Equal(t, nil, err)
|
||||
assert.Equal(t, nil, json.Unmarshal(p, val2))
|
||||
assert.Equal(t, val, val2) // stable
|
||||
}
|
||||
|
||||
func TestSet(t *testing.T) {
|
||||
js, err := NewJson([]byte(`{}`))
|
||||
assert.Equal(t, nil, err)
|
||||
|
||||
js.Set("baz", "bing")
|
||||
|
||||
s, err := js.GetPath("baz").String()
|
||||
assert.Equal(t, nil, err)
|
||||
assert.Equal(t, "bing", s)
|
||||
}
|
||||
|
||||
func TestReplace(t *testing.T) {
|
||||
js, err := NewJson([]byte(`{}`))
|
||||
assert.Equal(t, nil, err)
|
||||
|
||||
err = js.UnmarshalJSON([]byte(`{"baz":"bing"}`))
|
||||
assert.Equal(t, nil, err)
|
||||
|
||||
s, err := js.GetPath("baz").String()
|
||||
assert.Equal(t, nil, err)
|
||||
assert.Equal(t, "bing", s)
|
||||
}
|
||||
|
||||
func TestSetPath(t *testing.T) {
|
||||
js, err := NewJson([]byte(`{}`))
|
||||
assert.Equal(t, nil, err)
|
||||
|
||||
js.SetPath([]string{"foo", "bar"}, "baz")
|
||||
|
||||
s, err := js.GetPath("foo", "bar").String()
|
||||
assert.Equal(t, nil, err)
|
||||
assert.Equal(t, "baz", s)
|
||||
}
|
||||
|
||||
func TestSetPathNoPath(t *testing.T) {
|
||||
js, err := NewJson([]byte(`{"some":"data","some_number":1.0,"some_bool":false}`))
|
||||
assert.Equal(t, nil, err)
|
||||
|
||||
f := js.GetPath("some_number").MustFloat64(99.0)
|
||||
assert.Equal(t, f, 1.0)
|
||||
|
||||
js.SetPath([]string{}, map[string]any{"foo": "bar"})
|
||||
|
||||
s, err := js.GetPath("foo").String()
|
||||
assert.Equal(t, nil, err)
|
||||
assert.Equal(t, "bar", s)
|
||||
|
||||
f = js.GetPath("some_number").MustFloat64(99.0)
|
||||
assert.Equal(t, f, 99.0)
|
||||
}
|
||||
|
||||
func TestPathWillAugmentExisting(t *testing.T) {
|
||||
js, err := NewJson([]byte(`{"this":{"a":"aa","b":"bb","c":"cc"}}`))
|
||||
assert.Equal(t, nil, err)
|
||||
|
||||
js.SetPath([]string{"this", "d"}, "dd")
|
||||
|
||||
cases := []struct {
|
||||
path []string
|
||||
outcome string
|
||||
}{
|
||||
{
|
||||
path: []string{"this", "a"},
|
||||
outcome: "aa",
|
||||
},
|
||||
{
|
||||
path: []string{"this", "b"},
|
||||
outcome: "bb",
|
||||
},
|
||||
{
|
||||
path: []string{"this", "c"},
|
||||
outcome: "cc",
|
||||
},
|
||||
{
|
||||
path: []string{"this", "d"},
|
||||
outcome: "dd",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range cases {
|
||||
s, err := js.GetPath(tc.path...).String()
|
||||
assert.Equal(t, nil, err)
|
||||
assert.Equal(t, tc.outcome, s)
|
||||
}
|
||||
}
|
||||
|
||||
func TestPathWillOverwriteExisting(t *testing.T) {
|
||||
// notice how "a" is 0.1 - but then we'll try to set at path a, foo
|
||||
js, err := NewJson([]byte(`{"this":{"a":0.1,"b":"bb","c":"cc"}}`))
|
||||
assert.Equal(t, nil, err)
|
||||
|
||||
js.SetPath([]string{"this", "a", "foo"}, "bar")
|
||||
|
||||
s, err := js.GetPath("this", "a", "foo").String()
|
||||
assert.Equal(t, nil, err)
|
||||
assert.Equal(t, "bar", s)
|
||||
}
|
||||
|
||||
func TestMustJson(t *testing.T) {
|
||||
js := MustJson([]byte(`{"foo": "bar"}`))
|
||||
assert.Equal(t, js.Get("foo").MustString(), "bar")
|
||||
|
||||
assert.PanicsWithValue(t, "could not unmarshal JSON: \"unexpected EOF\"", func() {
|
||||
MustJson([]byte(`{`))
|
||||
})
|
||||
}
|
||||
@@ -1,48 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/instancemgmt"
|
||||
elasticsearch "github.com/grafana/grafana/pkg/tsdb/elasticsearch"
|
||||
)
|
||||
|
||||
var (
|
||||
_ backend.QueryDataHandler = (*Datasource)(nil)
|
||||
_ backend.CheckHealthHandler = (*Datasource)(nil)
|
||||
_ backend.CallResourceHandler = (*Datasource)(nil)
|
||||
)
|
||||
|
||||
func NewDatasource(context.Context, backend.DataSourceInstanceSettings) (instancemgmt.Instance, error) {
|
||||
return &Datasource{
|
||||
Service: elasticsearch.ProvideService(httpclient.NewProvider()),
|
||||
}, nil
|
||||
}
|
||||
|
||||
type Datasource struct {
|
||||
Service *elasticsearch.Service
|
||||
}
|
||||
|
||||
func contextualMiddlewares(ctx context.Context) context.Context {
|
||||
cfg := backend.GrafanaConfigFromContext(ctx)
|
||||
responseLimitMiddleware := httpclient.ResponseLimitMiddleware(cfg.ResponseLimit())
|
||||
ctx = httpclient.WithContextualMiddleware(ctx, responseLimitMiddleware)
|
||||
return ctx
|
||||
}
|
||||
|
||||
func (d *Datasource) QueryData(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
|
||||
ctx = contextualMiddlewares(ctx)
|
||||
return d.Service.QueryData(ctx, req)
|
||||
}
|
||||
|
||||
func (d *Datasource) CallResource(ctx context.Context, req *backend.CallResourceRequest, sender backend.CallResourceResponseSender) error {
|
||||
ctx = contextualMiddlewares(ctx)
|
||||
return d.Service.CallResource(ctx, req, sender)
|
||||
}
|
||||
|
||||
func (d *Datasource) CheckHealth(ctx context.Context, req *backend.CheckHealthRequest) (*backend.CheckHealthResult, error) {
|
||||
ctx = contextualMiddlewares(ctx)
|
||||
return d.Service.CheckHealth(ctx, req)
|
||||
}
|
||||
@@ -1,23 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"os"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/datasource"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/log"
|
||||
)
|
||||
|
||||
func main() {
|
||||
// Start listening to requests sent from Grafana. This call is blocking so
|
||||
// it won't finish until Grafana shuts down the process or the plugin choose
|
||||
// to exit by itself using os.Exit. Manage automatically manages life cycle
|
||||
// of datasource instances. It accepts datasource instance factory as first
|
||||
// argument. This factory will be automatically called on incoming request
|
||||
// from Grafana to create different instances of SampleDatasource (per datasource
|
||||
// ID). When datasource configuration changed Dispose method will be called and
|
||||
// new datasource instance created using NewSampleDatasource factory.
|
||||
if err := datasource.Manage("elasticsearch", NewDatasource, datasource.ManageOpts{}); err != nil {
|
||||
log.DefaultLogger.Error(err.Error())
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
Generated
-2
@@ -4024,14 +4024,12 @@
|
||||
},
|
||||
"/dashboards/uid/{uid}/restore": {
|
||||
"post": {
|
||||
"description": "This API will be removed when /apis/dashboards.grafana.app/v1 is released.\nYou can restore a dashboard by reading it from history, then creating it again.",
|
||||
"tags": [
|
||||
"dashboards",
|
||||
"versions"
|
||||
],
|
||||
"summary": "Restore a dashboard to a given dashboard version using UID.",
|
||||
"operationId": "restoreDashboardVersionByUID",
|
||||
"deprecated": true,
|
||||
"parameters": [
|
||||
{
|
||||
"name": "Body",
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { DataQuery } from '@grafana/data';
|
||||
import { createMonitoringLogger, MonitoringLogger } from '@grafana/runtime';
|
||||
import store from 'app/core/store';
|
||||
import { RichHistoryQuery } from 'app/types/explore';
|
||||
|
||||
@@ -26,8 +27,15 @@ jest.mock('@grafana/runtime', () => ({
|
||||
...jest.requireActual('@grafana/runtime'),
|
||||
getBackendSrv: () => backendSrv,
|
||||
getDataSourceSrv: () => dsMock,
|
||||
createMonitoringLogger: jest.fn().mockReturnValue({ logWarning: jest.fn() }),
|
||||
}));
|
||||
|
||||
// logger is created at import so we cannot initialize inside the test
|
||||
const loggerIndex = (createMonitoringLogger as jest.Mock).mock.calls.findIndex(
|
||||
(args) => args[0] === 'features.query-history.local-storage'
|
||||
);
|
||||
const loggerMock: MonitoringLogger = (createMonitoringLogger as jest.Mock).mock.results[loggerIndex]?.value;
|
||||
|
||||
interface MockQuery extends DataQuery {
|
||||
query: string;
|
||||
}
|
||||
@@ -75,6 +83,8 @@ describe('RichHistoryLocalStorage', () => {
|
||||
jest.setSystemTime(now);
|
||||
storage = new RichHistoryLocalStorage();
|
||||
await storage.deleteAll();
|
||||
|
||||
(loggerMock.logWarning as jest.Mock).mockReset();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
@@ -223,6 +233,90 @@ describe('RichHistoryLocalStorage', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('quota errors and retries', () => {
|
||||
it('should rotate and retry saving when QuotaExceededError occurs once', async () => {
|
||||
const initial = [
|
||||
{ ts: Date.now(), starred: true, comment: 'starred1', queries: [], datasourceName: 'name-of-dev-test' },
|
||||
{ ts: Date.now(), starred: false, comment: 'notStarred1', queries: [], datasourceName: 'name-of-dev-test' },
|
||||
{ ts: Date.now(), starred: true, comment: 'starred2', queries: [], datasourceName: 'name-of-dev-test' },
|
||||
];
|
||||
store.setObject(key, initial);
|
||||
|
||||
// Spy on setObject to throw once with QuotaExceededError, then call through
|
||||
const originalSetObject = store.setObject.bind(store);
|
||||
jest
|
||||
.spyOn(store, 'setObject')
|
||||
// first attempt throws and errors
|
||||
.mockImplementationOnce(() => {
|
||||
const err = new Error('quota hit');
|
||||
err.name = 'QuotaExceededError';
|
||||
throw err;
|
||||
})
|
||||
// second attempt calls through
|
||||
.mockImplementation((k: string, value: unknown) => {
|
||||
return originalSetObject(k, value);
|
||||
});
|
||||
|
||||
const result = await storage.addToRichHistory({
|
||||
starred: false,
|
||||
datasourceUid: 'dev-test',
|
||||
datasourceName: 'name-of-dev-test',
|
||||
comment: 'new',
|
||||
queries: [{ refId: 'A' }],
|
||||
});
|
||||
expect(result.richHistoryQuery).toBeDefined();
|
||||
|
||||
// After one failure, rotation removes one unstarred entry
|
||||
const saved = store.getObject<RichHistoryQuery[]>(key)!;
|
||||
expect(saved).toHaveLength(3);
|
||||
expect(saved).toMatchObject([
|
||||
expect.objectContaining({ comment: 'new' }),
|
||||
expect.objectContaining({ comment: 'starred1' }),
|
||||
expect.objectContaining({ comment: 'starred2' }),
|
||||
]);
|
||||
|
||||
// Ensure logger was called for the failure, with expected flags
|
||||
expect(loggerMock.logWarning).toHaveBeenCalled();
|
||||
const [message, payload] = (loggerMock.logWarning as jest.Mock).mock.calls[0];
|
||||
expect(message).toContain('Failed to save rich history to local storage');
|
||||
expect(payload.saveRetriesLeft).toBe('3');
|
||||
expect(payload.quotaExceededError).toBe('true');
|
||||
});
|
||||
|
||||
it('should throw StorageFull when QuotaExceededError persists for all retries and track attempts', async () => {
|
||||
store.setObject(key, [
|
||||
{ ts: Date.now(), starred: false, comment: 'notStarred1', queries: [], datasourceName: 'name-of-dev-test' },
|
||||
]);
|
||||
|
||||
const setSpy = jest.spyOn(store, 'setObject').mockImplementation(() => {
|
||||
const err = new Error('quota still hit');
|
||||
err.name = 'QuotaExceededError';
|
||||
throw err;
|
||||
});
|
||||
|
||||
await expect(
|
||||
storage.addToRichHistory({
|
||||
starred: false,
|
||||
datasourceUid: 'dev-test',
|
||||
datasourceName: 'name-of-dev-test',
|
||||
comment: 'new',
|
||||
queries: [{ refId: 'B' }],
|
||||
})
|
||||
).rejects.toMatchObject({ name: 'StorageFull' });
|
||||
|
||||
// 4 failed tracking attempts (1 save + 3 retries) should be logged (for each failed try)
|
||||
expect(loggerMock.logWarning).toHaveBeenCalledTimes(4);
|
||||
const calls = (loggerMock.logWarning as jest.Mock).mock.calls;
|
||||
expect(calls[0][0]).toContain('Failed to save rich history to local storage');
|
||||
expect(calls[0][1].saveRetriesLeft).toBe('3');
|
||||
expect(calls[1][1].saveRetriesLeft).toBe('2');
|
||||
expect(calls[2][1].saveRetriesLeft).toBe('1');
|
||||
expect(calls[3][1].saveRetriesLeft).toBe('0');
|
||||
|
||||
setSpy.mockRestore();
|
||||
});
|
||||
});
|
||||
|
||||
describe('migration', () => {
|
||||
afterEach(() => {
|
||||
storage.deleteAll();
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import { find, isEqual, omit } from 'lodash';
|
||||
|
||||
import { DataQuery, SelectableValue } from '@grafana/data';
|
||||
import { RichHistorySearchFilters, RichHistorySettings } from 'app/core/utils/richHistoryTypes';
|
||||
import { createMonitoringLogger } from '@grafana/runtime';
|
||||
import { RichHistorySearchFilters, RichHistorySettings, SortOrder } from 'app/core/utils/richHistoryTypes';
|
||||
import { RichHistoryQuery } from 'app/types/explore';
|
||||
|
||||
import store from '../store';
|
||||
@@ -26,10 +27,18 @@ export type RichHistoryLocalStorageDTO = {
|
||||
queries: DataQuery[];
|
||||
};
|
||||
|
||||
const logger = createMonitoringLogger('features.query-history.local-storage');
|
||||
|
||||
/**
|
||||
* Local storage implementation for Rich History. It keeps all entries in browser's local storage.
|
||||
*/
|
||||
export default class RichHistoryLocalStorage implements RichHistoryStorage {
|
||||
public static getLocalStorageUsageInBytes(): number {
|
||||
const richHistory: RichHistoryLocalStorageDTO[] = store.get(RICH_HISTORY_KEY) || '';
|
||||
// each character is 2 bytes
|
||||
return richHistory.length * 2;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return history entries based on provided filters, perform migration and clean up entries not matching retention policy.
|
||||
*/
|
||||
@@ -77,21 +86,43 @@ export default class RichHistoryLocalStorage implements RichHistoryStorage {
|
||||
throw error;
|
||||
}
|
||||
|
||||
const { queriesToKeep, limitExceeded } = checkLimits(currentRichHistoryDTOs);
|
||||
let { queriesToKeep, limitExceeded } = cleanUpUnstarredQuery(currentRichHistoryDTOs, MAX_HISTORY_ITEMS);
|
||||
|
||||
const updatedHistory: RichHistoryLocalStorageDTO[] = [newRichHistoryQueryDTO, ...queriesToKeep];
|
||||
let updatedHistory: RichHistoryLocalStorageDTO[] = [newRichHistoryQueryDTO, ...queriesToKeep];
|
||||
|
||||
try {
|
||||
store.setObject(RICH_HISTORY_KEY, updatedHistory);
|
||||
} catch (error) {
|
||||
if (error instanceof Error && error.name === 'QuotaExceededError') {
|
||||
throwError(RichHistoryServiceError.StorageFull, `Saving rich history failed: ${error.message}`);
|
||||
} else {
|
||||
throw error;
|
||||
let saveRetriesLeft = 3;
|
||||
let saved = false;
|
||||
|
||||
while (!saved && saveRetriesLeft >= 0) {
|
||||
try {
|
||||
store.setObject(RICH_HISTORY_KEY, updatedHistory);
|
||||
saved = true;
|
||||
} catch (error) {
|
||||
await this.trackLocalStorageUsage('Failed to save rich history to local storage', {
|
||||
saveRetriesLeft: saveRetriesLeft.toString(),
|
||||
quotaExceededError: error instanceof Error && error.name === 'QuotaExceededError' ? 'true' : 'false',
|
||||
errorMessage: error instanceof Error ? error?.message : 'unknown',
|
||||
});
|
||||
|
||||
if (saveRetriesLeft >= 1) {
|
||||
saveRetriesLeft--;
|
||||
const { queriesToKeep: newQueriesToKeep } = cleanUpUnstarredQuery(queriesToKeep, queriesToKeep.length - 1);
|
||||
updatedHistory = [newRichHistoryQueryDTO, ...newQueriesToKeep];
|
||||
queriesToKeep = newQueriesToKeep;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (error instanceof Error && error.name === 'QuotaExceededError') {
|
||||
throwError(RichHistoryServiceError.StorageFull, `Saving rich history failed: ${error.message}`);
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (limitExceeded) {
|
||||
await this.trackLocalStorageUsage('Rich history query limit exceeded.');
|
||||
|
||||
return {
|
||||
warning: {
|
||||
type: RichHistoryStorageWarning.LimitExceeded,
|
||||
@@ -148,6 +179,33 @@ export default class RichHistoryLocalStorage implements RichHistoryStorage {
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
private async trackLocalStorageUsage(message: string, additionalInfo?: Record<string, string>) {
|
||||
const allQueriesCount =
|
||||
(
|
||||
await this.getRichHistory({
|
||||
search: '',
|
||||
sortOrder: SortOrder.Ascending,
|
||||
datasourceFilters: [],
|
||||
starred: false,
|
||||
})
|
||||
).total || -1;
|
||||
|
||||
const allQueriesSizeInBytes = RichHistoryLocalStorage.getLocalStorageUsageInBytes();
|
||||
|
||||
const totalLocalStorageSize = calculateTotalLocalStorageSize();
|
||||
|
||||
const localStats = {
|
||||
totalLocalStorageSize: totalLocalStorageSize?.toString(),
|
||||
allQueriesSizeInBytes: allQueriesSizeInBytes?.toString(),
|
||||
allQueriesCount: allQueriesCount?.toString(),
|
||||
};
|
||||
|
||||
logger.logWarning(message, {
|
||||
...localStats,
|
||||
...additionalInfo,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function updateRichHistory(
|
||||
@@ -185,17 +243,20 @@ function cleanUp(richHistory: RichHistoryLocalStorageDTO[]): RichHistoryLocalSto
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensures the entry can be added. Throws an error if current limit has been hit.
|
||||
* Ensures the entry can be added.
|
||||
* Returns queries that should be saved back giving space for one extra query.
|
||||
*/
|
||||
export function checkLimits(queriesToKeep: RichHistoryLocalStorageDTO[]): {
|
||||
export function cleanUpUnstarredQuery(
|
||||
queriesToKeep: RichHistoryLocalStorageDTO[],
|
||||
max: number
|
||||
): {
|
||||
queriesToKeep: RichHistoryLocalStorageDTO[];
|
||||
limitExceeded: boolean;
|
||||
} {
|
||||
// remove oldest non-starred items to give space for the recent query
|
||||
let limitExceeded = false;
|
||||
let current = queriesToKeep.length - 1;
|
||||
while (current >= 0 && queriesToKeep.length >= MAX_HISTORY_ITEMS) {
|
||||
while (current >= 0 && queriesToKeep.length >= max) {
|
||||
if (!queriesToKeep[current].starred) {
|
||||
queriesToKeep.splice(current, 1);
|
||||
limitExceeded = true;
|
||||
@@ -247,3 +308,26 @@ function throwError(name: string, message: string) {
|
||||
error.name = name;
|
||||
throw error;
|
||||
}
|
||||
|
||||
function calculateTotalLocalStorageSize() {
|
||||
try {
|
||||
let total = 0;
|
||||
|
||||
// eslint-disable-next-line
|
||||
const ls = window.localStorage;
|
||||
|
||||
for (let i = 0; i < ls.length; i++) {
|
||||
const key = ls.key(i);
|
||||
if (key) {
|
||||
const value = ls.getItem(key);
|
||||
if (value) {
|
||||
total += key.length + value.length;
|
||||
}
|
||||
}
|
||||
}
|
||||
// each character is 2 bytes
|
||||
return total * 2;
|
||||
} catch (e) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -28,6 +28,7 @@ export type RichHistorySearchFilters = {
|
||||
// so the resulting timerange from this will be [now - from, now - to].
|
||||
from?: number;
|
||||
to?: number;
|
||||
// true if only starred entries should be returned, false if ALL entries should be returned,
|
||||
starred: boolean;
|
||||
page?: number;
|
||||
};
|
||||
|
||||
@@ -1,67 +0,0 @@
|
||||
import { render, screen } from 'test/test-utils';
|
||||
|
||||
import { KnownProvenance } from '../types/knownProvenance';
|
||||
|
||||
import { ProvisioningBadge } from './Provisioning';
|
||||
|
||||
describe('ProvisioningBadge', () => {
|
||||
describe('when the provenance is file', () => {
|
||||
it('should render the badge with the correct text', () => {
|
||||
render(<ProvisioningBadge provenance={KnownProvenance.File} />);
|
||||
|
||||
expect(screen.getByText('Provisioned')).toBeInTheDocument();
|
||||
expect(screen.queryByText('Imported')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render correct tooltip text', async () => {
|
||||
const { user } = render(<ProvisioningBadge tooltip provenance={KnownProvenance.File} />);
|
||||
|
||||
const badge = screen.getByText('Provisioned');
|
||||
await user.hover(badge);
|
||||
|
||||
expect(
|
||||
screen.getByText('This resource has been provisioned via file and cannot be edited through the UI')
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
describe('when the provenance is ConvertedPrometheus', () => {
|
||||
it('should render the badge with the correct text', () => {
|
||||
render(<ProvisioningBadge provenance={KnownProvenance.ConvertedPrometheus} />);
|
||||
|
||||
expect(screen.getByText('Imported')).toBeInTheDocument();
|
||||
expect(screen.queryByText('Provisioned')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render correct tooltip text', async () => {
|
||||
const { user } = render(<ProvisioningBadge tooltip provenance={KnownProvenance.ConvertedPrometheus} />);
|
||||
|
||||
const badge = screen.getByText('Imported');
|
||||
await user.hover(badge);
|
||||
|
||||
expect(
|
||||
screen.getByText('This resource has been provisioned via Prometheus/Mimir and cannot be edited through the UI')
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
describe('when the provenance is API', () => {
|
||||
it('should render the badge with the correct text', () => {
|
||||
render(<ProvisioningBadge provenance={KnownProvenance.API} />);
|
||||
|
||||
expect(screen.getByText('Provisioned')).toBeInTheDocument();
|
||||
expect(screen.queryByText('Imported')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render correct tooltip text', async () => {
|
||||
const { user } = render(<ProvisioningBadge tooltip provenance={KnownProvenance.API} />);
|
||||
|
||||
const badge = screen.getByText('Provisioned');
|
||||
await user.hover(badge);
|
||||
|
||||
expect(
|
||||
screen.getByText('This resource has been provisioned via api and cannot be edited through the UI')
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -3,8 +3,6 @@ import { ComponentPropsWithoutRef } from 'react';
|
||||
import { Trans, t } from '@grafana/i18n';
|
||||
import { Alert, Badge, Tooltip } from '@grafana/ui';
|
||||
|
||||
import { KnownProvenance } from '../types/knownProvenance';
|
||||
|
||||
export enum ProvisionedResource {
|
||||
ContactPoint = 'contact point',
|
||||
Template = 'template',
|
||||
@@ -66,17 +64,11 @@ export const ProvisioningBadge = ({
|
||||
*/
|
||||
provenance?: string;
|
||||
}) => {
|
||||
const isConvertedPrometheus = provenance === KnownProvenance.ConvertedPrometheus;
|
||||
const badgeText = isConvertedPrometheus
|
||||
? t('alerting.provisioning-badge.badge.text-converted-prometheus', 'Imported')
|
||||
: t('alerting.provisioning-badge.badge.text-provisioned', 'Provisioned');
|
||||
const badgeColor = isConvertedPrometheus ? 'blue' : 'purple';
|
||||
const badge = <Badge text={badgeText} color={badgeColor} />;
|
||||
const badge = <Badge text={t('alerting.provisioning-badge.badge.text-provisioned', 'Provisioned')} color="purple" />;
|
||||
|
||||
if (tooltip) {
|
||||
const provenanceText = isConvertedPrometheus ? 'Prometheus/Mimir' : provenance;
|
||||
const provenanceTooltip = (
|
||||
<Trans i18nKey="alerting.provisioning.badge-tooltip-provenance" values={{ provenance: provenanceText }}>
|
||||
<Trans i18nKey="alerting.provisioning.badge-tooltip-provenance" values={{ provenance }}>
|
||||
This resource has been provisioned via {{ provenance }} and cannot be edited through the UI
|
||||
</Trans>
|
||||
);
|
||||
|
||||
-60
@@ -1,60 +0,0 @@
|
||||
import { render, screen } from 'test/test-utils';
|
||||
|
||||
import { AccessControlAction } from 'app/types/accessControl';
|
||||
|
||||
import { setupMswServer } from '../../mockApi';
|
||||
import { grantUserPermissions } from '../../mocks';
|
||||
import { AlertmanagerProvider } from '../../state/AlertmanagerContext';
|
||||
import { KnownProvenance } from '../../types/knownProvenance';
|
||||
|
||||
import { ContactPointHeader } from './ContactPointHeader';
|
||||
import { ContactPointWithMetadata } from './utils';
|
||||
|
||||
setupMswServer();
|
||||
|
||||
const renderWithProvider = (component: React.ReactElement, alertmanagerSourceName?: string) => {
|
||||
return render(
|
||||
<AlertmanagerProvider accessType="notification" alertmanagerSourceName={alertmanagerSourceName}>
|
||||
{component}
|
||||
</AlertmanagerProvider>
|
||||
);
|
||||
};
|
||||
|
||||
describe('ContactPointHeader', () => {
|
||||
beforeEach(() => {
|
||||
grantUserPermissions([
|
||||
AccessControlAction.AlertingNotificationsRead,
|
||||
AccessControlAction.AlertingNotificationsWrite,
|
||||
]);
|
||||
});
|
||||
|
||||
const mockContactPoint: ContactPointWithMetadata = {
|
||||
id: 'test-contact-point',
|
||||
name: 'Test Contact Point',
|
||||
provenance: KnownProvenance.API,
|
||||
policies: [],
|
||||
grafana_managed_receiver_configs: [],
|
||||
};
|
||||
|
||||
it('shows Provisioned badge when contact point has file provenance via K8s annotations', () => {
|
||||
const contactPointWithFile = {
|
||||
...mockContactPoint,
|
||||
provenance: KnownProvenance.File,
|
||||
};
|
||||
|
||||
renderWithProvider(<ContactPointHeader contactPoint={contactPointWithFile} onDelete={jest.fn()} />);
|
||||
|
||||
expect(screen.getByText('Provisioned')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows correct badge when contact point has converted_prometheus provenance', () => {
|
||||
const contactPointWithConvertedPrometheus = {
|
||||
...mockContactPoint,
|
||||
provenance: KnownProvenance.ConvertedPrometheus,
|
||||
};
|
||||
|
||||
renderWithProvider(<ContactPointHeader contactPoint={contactPointWithConvertedPrometheus} onDelete={jest.fn()} />);
|
||||
|
||||
expect(screen.getByText('Imported')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
+8
-9
@@ -13,7 +13,6 @@ import {
|
||||
canDeleteEntity,
|
||||
canEditEntity,
|
||||
getAnnotation,
|
||||
isProvisionedResource,
|
||||
shouldUseK8sApi,
|
||||
} from 'app/features/alerting/unified/utils/k8s/utils';
|
||||
|
||||
@@ -32,15 +31,13 @@ interface ContactPointHeaderProps {
|
||||
}
|
||||
|
||||
export const ContactPointHeader = ({ contactPoint, onDelete }: ContactPointHeaderProps) => {
|
||||
const { name, id, provenance, policies = [] } = contactPoint;
|
||||
const { name, id, provisioned, policies = [] } = contactPoint;
|
||||
const styles = useStyles2(getStyles);
|
||||
const [showPermissionsDrawer, setShowPermissionsDrawer] = useState(false);
|
||||
const { selectedAlertmanager } = useAlertmanager();
|
||||
|
||||
const usingK8sApi = shouldUseK8sApi(selectedAlertmanager!);
|
||||
|
||||
const isProvisioned = isProvisionedResource(provenance);
|
||||
|
||||
const [exportSupported, exportAllowed] = useAlertmanagerAbility(AlertmanagerAction.ExportContactPoint);
|
||||
const [editSupported, editAllowed] = useAlertmanagerAbility(AlertmanagerAction.UpdateContactPoint);
|
||||
const [deleteSupported, deleteAllowed] = useAlertmanagerAbility(AlertmanagerAction.UpdateContactPoint);
|
||||
@@ -73,14 +70,14 @@ export const ContactPointHeader = ({ contactPoint, onDelete }: ContactPointHeade
|
||||
/** Does the current user have permissions to edit the contact point? */
|
||||
const hasAbilityToEdit = usingK8sApi ? canEditEntity(contactPoint) : editAllowed;
|
||||
/** Can the contact point actually be edited via the UI? */
|
||||
const contactPointIsEditable = !isProvisioned;
|
||||
const contactPointIsEditable = !provisioned;
|
||||
/** Given the alertmanager, the user's permissions, and the state of the contact point - can it actually be edited? */
|
||||
const canEdit = editSupported && hasAbilityToEdit && contactPointIsEditable;
|
||||
|
||||
/** Does the current user have permissions to delete the contact point? */
|
||||
const hasAbilityToDelete = usingK8sApi ? canDeleteEntity(contactPoint) : deleteAllowed;
|
||||
/** Can the contact point actually be deleted, regardless of permissions? i.e. ensuring it isn't provisioned and isn't referenced elsewhere */
|
||||
const contactPointIsDeleteable = !isProvisioned && !numberOfPoliciesPreventingDeletion && !numberOfRules;
|
||||
const contactPointIsDeleteable = !provisioned && !numberOfPoliciesPreventingDeletion && !numberOfRules;
|
||||
/** Given the alertmanager, the user's permissions, and the state of the contact point - can it actually be deleted? */
|
||||
const canBeDeleted = deleteSupported && hasAbilityToDelete && contactPointIsDeleteable;
|
||||
|
||||
@@ -133,7 +130,7 @@ export const ContactPointHeader = ({ contactPoint, onDelete }: ContactPointHeade
|
||||
|
||||
const reasonsDeleteIsDisabled = [
|
||||
!hasAbilityToDelete ? cannotDeleteNoPermissions : '',
|
||||
isProvisioned ? cannotDeleteProvisioned : '',
|
||||
provisioned ? cannotDeleteProvisioned : '',
|
||||
numberOfPoliciesPreventingDeletion > 0 ? cannotDeletePolicies : '',
|
||||
numberOfRules ? cannotDeleteRules : '',
|
||||
].filter(Boolean);
|
||||
@@ -212,13 +209,15 @@ export const ContactPointHeader = ({ contactPoint, onDelete }: ContactPointHeade
|
||||
{referencedByRulesText}
|
||||
</TextLink>
|
||||
)}
|
||||
{isProvisioned && <ProvisioningBadge tooltip provenance={provenance} />}
|
||||
{provisioned && (
|
||||
<ProvisioningBadge tooltip provenance={getAnnotation(contactPoint, K8sAnnotations.Provenance)} />
|
||||
)}
|
||||
{!isReferencedByAnything && <UnusedContactPointBadge />}
|
||||
<Spacer />
|
||||
<LinkButton
|
||||
tooltipPlacement="top"
|
||||
tooltip={
|
||||
isProvisioned
|
||||
provisioned
|
||||
? t(
|
||||
'alerting.contact-point-header.tooltip-provisioned-contact-points',
|
||||
'Provisioned contact points cannot be edited in the UI'
|
||||
|
||||
+1
-4
@@ -13,7 +13,6 @@ import { setupMswServer } from '../../mockApi';
|
||||
import { grantUserPermissions, mockDataSource } from '../../mocks';
|
||||
import { AlertmanagerProvider } from '../../state/AlertmanagerContext';
|
||||
import { setupDataSources } from '../../testSetup/datasources';
|
||||
import { KnownProvenance } from '../../types/knownProvenance';
|
||||
import { DataSourceType, GRAFANA_RULES_SOURCE_NAME } from '../../utils/datasource';
|
||||
|
||||
import { ContactPoint } from './ContactPoint';
|
||||
@@ -306,9 +305,7 @@ describe('contact points', () => {
|
||||
});
|
||||
|
||||
it('should disable buttons when provisioned', async () => {
|
||||
const { user } = renderWithProvider(
|
||||
<ContactPoint contactPoint={{ ...basicContactPoint, provenance: KnownProvenance.File }} />
|
||||
);
|
||||
const { user } = renderWithProvider(<ContactPoint contactPoint={{ ...basicContactPoint, provisioned: true }} />);
|
||||
|
||||
expect(screen.getByText(/provisioned/i)).toBeInTheDocument();
|
||||
|
||||
|
||||
+10
-10
@@ -50,7 +50,7 @@ exports[`useContactPoints should return contact points with status 1`] = `
|
||||
},
|
||||
},
|
||||
],
|
||||
"provenance": undefined,
|
||||
"provisioned": false,
|
||||
},
|
||||
{
|
||||
"grafana_managed_receiver_configs": [
|
||||
@@ -93,7 +93,7 @@ exports[`useContactPoints should return contact points with status 1`] = `
|
||||
},
|
||||
"name": "lotsa-emails",
|
||||
"policies": [],
|
||||
"provenance": undefined,
|
||||
"provisioned": false,
|
||||
},
|
||||
{
|
||||
"grafana_managed_receiver_configs": [
|
||||
@@ -129,7 +129,7 @@ exports[`useContactPoints should return contact points with status 1`] = `
|
||||
},
|
||||
"name": "OnCall Conctact point",
|
||||
"policies": [],
|
||||
"provenance": undefined,
|
||||
"provisioned": false,
|
||||
},
|
||||
{
|
||||
"grafana_managed_receiver_configs": [
|
||||
@@ -178,7 +178,7 @@ exports[`useContactPoints should return contact points with status 1`] = `
|
||||
},
|
||||
},
|
||||
],
|
||||
"provenance": "api",
|
||||
"provisioned": true,
|
||||
},
|
||||
{
|
||||
"grafana_managed_receiver_configs": [
|
||||
@@ -243,7 +243,7 @@ exports[`useContactPoints should return contact points with status 1`] = `
|
||||
},
|
||||
"name": "Slack with multiple channels",
|
||||
"policies": [],
|
||||
"provenance": undefined,
|
||||
"provisioned": false,
|
||||
},
|
||||
],
|
||||
"error": undefined,
|
||||
@@ -301,7 +301,7 @@ exports[`useContactPoints when having oncall plugin installed and no alert manag
|
||||
},
|
||||
},
|
||||
],
|
||||
"provenance": undefined,
|
||||
"provisioned": false,
|
||||
},
|
||||
{
|
||||
"grafana_managed_receiver_configs": [
|
||||
@@ -344,7 +344,7 @@ exports[`useContactPoints when having oncall plugin installed and no alert manag
|
||||
},
|
||||
"name": "lotsa-emails",
|
||||
"policies": [],
|
||||
"provenance": undefined,
|
||||
"provisioned": false,
|
||||
},
|
||||
{
|
||||
"grafana_managed_receiver_configs": [
|
||||
@@ -383,7 +383,7 @@ exports[`useContactPoints when having oncall plugin installed and no alert manag
|
||||
},
|
||||
"name": "OnCall Conctact point",
|
||||
"policies": [],
|
||||
"provenance": undefined,
|
||||
"provisioned": false,
|
||||
},
|
||||
{
|
||||
"grafana_managed_receiver_configs": [
|
||||
@@ -432,7 +432,7 @@ exports[`useContactPoints when having oncall plugin installed and no alert manag
|
||||
},
|
||||
},
|
||||
],
|
||||
"provenance": "api",
|
||||
"provisioned": true,
|
||||
},
|
||||
{
|
||||
"grafana_managed_receiver_configs": [
|
||||
@@ -497,7 +497,7 @@ exports[`useContactPoints when having oncall plugin installed and no alert manag
|
||||
},
|
||||
"name": "Slack with multiple channels",
|
||||
"policies": [],
|
||||
"provenance": undefined,
|
||||
"provisioned": false,
|
||||
},
|
||||
],
|
||||
"error": undefined,
|
||||
|
||||
-234
@@ -6,13 +6,10 @@ import { disablePlugin } from 'app/features/alerting/unified/mocks/server/config
|
||||
import { setOnCallIntegrations } from 'app/features/alerting/unified/mocks/server/handlers/plugins/configure-plugins';
|
||||
import { SupportedPlugin } from 'app/features/alerting/unified/types/pluginBridges';
|
||||
import { GRAFANA_RULES_SOURCE_NAME } from 'app/features/alerting/unified/utils/datasource';
|
||||
import { AlertManagerCortexConfig } from 'app/plugins/datasource/alertmanager/types';
|
||||
import { AccessControlAction } from 'app/types/accessControl';
|
||||
|
||||
import { setupMswServer } from '../../mockApi';
|
||||
import { grantUserPermissions } from '../../mocks';
|
||||
import { setAlertmanagerConfig } from '../../mocks/server/entities/alertmanagers';
|
||||
import { KnownProvenance } from '../../types/knownProvenance';
|
||||
|
||||
import { useContactPointsWithStatus } from './useContactPoints';
|
||||
|
||||
@@ -72,235 +69,4 @@ describe('useContactPoints', () => {
|
||||
expect(snapshot).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Provenance handling', () => {
|
||||
it('should extract provenance when provenance is "api"', async () => {
|
||||
// Set up alertmanager config with a receiver that has API provenance
|
||||
const config: AlertManagerCortexConfig = {
|
||||
template_files: {},
|
||||
alertmanager_config: {
|
||||
receivers: [
|
||||
{
|
||||
name: 'api-provenance-contact-point',
|
||||
grafana_managed_receiver_configs: [
|
||||
{
|
||||
uid: 'test-uid-1',
|
||||
name: 'api-provenance-contact-point',
|
||||
type: 'email',
|
||||
disableResolveMessage: false,
|
||||
settings: {
|
||||
addresses: 'test@example.com',
|
||||
},
|
||||
secureFields: {},
|
||||
provenance: 'api', // This will be used by the K8s mock handler
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
setAlertmanagerConfig(GRAFANA_RULES_SOURCE_NAME, config);
|
||||
|
||||
const { result } = renderHook(
|
||||
() =>
|
||||
useContactPointsWithStatus({
|
||||
alertmanager: GRAFANA_RULES_SOURCE_NAME,
|
||||
fetchPolicies: false,
|
||||
fetchStatuses: false,
|
||||
}),
|
||||
{ wrapper }
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
});
|
||||
|
||||
const contactPoint = result.current.contactPoints?.find((cp) => cp.name === 'api-provenance-contact-point');
|
||||
expect(contactPoint).toBeDefined();
|
||||
expect(contactPoint?.provenance).toBe(KnownProvenance.API);
|
||||
});
|
||||
|
||||
it('should extract provenance when provenance is "file"', async () => {
|
||||
const config: AlertManagerCortexConfig = {
|
||||
template_files: {},
|
||||
alertmanager_config: {
|
||||
receivers: [
|
||||
{
|
||||
name: 'file-provenance-contact-point',
|
||||
grafana_managed_receiver_configs: [
|
||||
{
|
||||
uid: 'test-uid-2',
|
||||
name: 'file-provenance-contact-point',
|
||||
type: 'email',
|
||||
disableResolveMessage: false,
|
||||
settings: {
|
||||
addresses: 'test@example.com',
|
||||
},
|
||||
secureFields: {},
|
||||
provenance: 'file',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
setAlertmanagerConfig(GRAFANA_RULES_SOURCE_NAME, config);
|
||||
|
||||
const { result } = renderHook(
|
||||
() =>
|
||||
useContactPointsWithStatus({
|
||||
alertmanager: GRAFANA_RULES_SOURCE_NAME,
|
||||
fetchPolicies: false,
|
||||
fetchStatuses: false,
|
||||
}),
|
||||
{ wrapper }
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
});
|
||||
|
||||
const contactPoint = result.current.contactPoints?.find((cp) => cp.name === 'file-provenance-contact-point');
|
||||
expect(contactPoint).toBeDefined();
|
||||
expect(contactPoint?.provenance).toBe(KnownProvenance.File);
|
||||
});
|
||||
|
||||
it('should extract provenance when provenance is "converted_prometheus"', async () => {
|
||||
const config: AlertManagerCortexConfig = {
|
||||
template_files: {},
|
||||
alertmanager_config: {
|
||||
receivers: [
|
||||
{
|
||||
name: 'mimir-provenance-contact-point',
|
||||
grafana_managed_receiver_configs: [
|
||||
{
|
||||
uid: 'test-uid-3',
|
||||
name: 'mimir-provenance-contact-point',
|
||||
type: 'email',
|
||||
disableResolveMessage: false,
|
||||
settings: {
|
||||
addresses: 'test@example.com',
|
||||
},
|
||||
secureFields: {},
|
||||
provenance: 'converted_prometheus',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
setAlertmanagerConfig(GRAFANA_RULES_SOURCE_NAME, config);
|
||||
|
||||
const { result } = renderHook(
|
||||
() =>
|
||||
useContactPointsWithStatus({
|
||||
alertmanager: GRAFANA_RULES_SOURCE_NAME,
|
||||
fetchPolicies: false,
|
||||
fetchStatuses: false,
|
||||
}),
|
||||
{ wrapper }
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
});
|
||||
|
||||
const contactPoint = result.current.contactPoints?.find((cp) => cp.name === 'mimir-provenance-contact-point');
|
||||
expect(contactPoint).toBeDefined();
|
||||
expect(contactPoint?.provenance).toBe(KnownProvenance.ConvertedPrometheus);
|
||||
});
|
||||
|
||||
it('should map "none" provenance annotation to undefined', async () => {
|
||||
const config: AlertManagerCortexConfig = {
|
||||
template_files: {},
|
||||
alertmanager_config: {
|
||||
receivers: [
|
||||
{
|
||||
name: 'none-provenance-contact-point',
|
||||
grafana_managed_receiver_configs: [
|
||||
{
|
||||
uid: 'test-uid-4',
|
||||
name: 'none-provenance-contact-point',
|
||||
type: 'email',
|
||||
disableResolveMessage: false,
|
||||
settings: {
|
||||
addresses: 'test@example.com',
|
||||
},
|
||||
secureFields: {},
|
||||
// No provenance field - will default to PROVENANCE_NONE in mock handler
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
setAlertmanagerConfig(GRAFANA_RULES_SOURCE_NAME, config);
|
||||
|
||||
const { result } = renderHook(
|
||||
() =>
|
||||
useContactPointsWithStatus({
|
||||
alertmanager: GRAFANA_RULES_SOURCE_NAME,
|
||||
fetchPolicies: false,
|
||||
fetchStatuses: false,
|
||||
}),
|
||||
{ wrapper }
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
});
|
||||
|
||||
const contactPoint = result.current.contactPoints?.find((cp) => cp.name === 'none-provenance-contact-point');
|
||||
expect(contactPoint).toBeDefined();
|
||||
// The mock handler sets PROVENANCE_NONE ('none') when no provenance is found
|
||||
// parseK8sReceiver converts 'none' to undefined
|
||||
expect(contactPoint?.provenance).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should handle missing annotations gracefully', async () => {
|
||||
// This test verifies that when annotations are undefined, provenance is handled correctly
|
||||
const config: AlertManagerCortexConfig = {
|
||||
template_files: {},
|
||||
alertmanager_config: {
|
||||
receivers: [
|
||||
{
|
||||
name: 'no-annotations-contact-point',
|
||||
grafana_managed_receiver_configs: [
|
||||
{
|
||||
uid: 'test-uid-5',
|
||||
name: 'no-annotations-contact-point',
|
||||
type: 'email',
|
||||
disableResolveMessage: false,
|
||||
settings: {
|
||||
addresses: 'test@example.com',
|
||||
},
|
||||
secureFields: {},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
setAlertmanagerConfig(GRAFANA_RULES_SOURCE_NAME, config);
|
||||
|
||||
const { result } = renderHook(
|
||||
() =>
|
||||
useContactPointsWithStatus({
|
||||
alertmanager: GRAFANA_RULES_SOURCE_NAME,
|
||||
fetchPolicies: false,
|
||||
fetchStatuses: false,
|
||||
}),
|
||||
{ wrapper }
|
||||
);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
});
|
||||
|
||||
const contactPoint = result.current.contactPoints?.find((cp) => cp.name === 'no-annotations-contact-point');
|
||||
expect(contactPoint).toBeDefined();
|
||||
// When annotations are missing, the mock handler should set provenance to undefined
|
||||
expect(contactPoint?.provenance).toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -11,7 +11,7 @@ import { ComGithubGrafanaGrafanaPkgApisAlertingNotificationsV0Alpha1Receiver } f
|
||||
import { BaseAlertmanagerArgs, Skippable } from 'app/features/alerting/unified/types/hooks';
|
||||
import { cloudNotifierTypes } from 'app/features/alerting/unified/utils/cloud-alertmanager-notifier-types';
|
||||
import { GRAFANA_RULES_SOURCE_NAME } from 'app/features/alerting/unified/utils/datasource';
|
||||
import { shouldUseK8sApi } from 'app/features/alerting/unified/utils/k8s/utils';
|
||||
import { isK8sEntityProvisioned, shouldUseK8sApi } from 'app/features/alerting/unified/utils/k8s/utils';
|
||||
import { GrafanaManagedContactPoint, Receiver } from 'app/plugins/datasource/alertmanager/types';
|
||||
|
||||
import { getAPINamespace } from '../../../../../api/utils';
|
||||
@@ -21,9 +21,7 @@ import { useAsync } from '../../hooks/useAsync';
|
||||
import { usePluginBridge } from '../../hooks/usePluginBridge';
|
||||
import { useProduceNewAlertmanagerConfiguration } from '../../hooks/useProduceNewAlertmanagerConfig';
|
||||
import { addReceiverAction, deleteReceiverAction, updateReceiverAction } from '../../reducers/alertmanager/receivers';
|
||||
import { KnownProvenance } from '../../types/knownProvenance';
|
||||
import { getIrmIfPresentOrOnCallPluginId } from '../../utils/config';
|
||||
import { K8sAnnotations } from '../../utils/k8s/constants';
|
||||
|
||||
import { enhanceContactPointsWithMetadata } from './utils';
|
||||
|
||||
@@ -80,13 +78,10 @@ const useOnCallIntegrations = ({ skip }: Skippable = {}) => {
|
||||
type K8sReceiver = ComGithubGrafanaGrafanaPkgApisAlertingNotificationsV0Alpha1Receiver;
|
||||
|
||||
const parseK8sReceiver = (item: K8sReceiver): GrafanaManagedContactPoint => {
|
||||
const metadataProvenance = item.metadata.annotations?.[K8sAnnotations.Provenance];
|
||||
const provenance = metadataProvenance === KnownProvenance.None ? undefined : metadataProvenance;
|
||||
|
||||
return {
|
||||
id: item.metadata.name || item.metadata.uid || item.spec.title,
|
||||
name: item.spec.title,
|
||||
provenance: provenance,
|
||||
provisioned: isK8sEntityProvisioned(item),
|
||||
grafana_managed_receiver_configs: item.spec.integrations,
|
||||
metadata: item.metadata,
|
||||
};
|
||||
|
||||
+7
-8
@@ -16,8 +16,7 @@ import {
|
||||
deleteNotificationTemplateAction,
|
||||
updateNotificationTemplateAction,
|
||||
} from '../../reducers/alertmanager/notificationTemplates';
|
||||
import { KnownProvenance } from '../../types/knownProvenance';
|
||||
import { K8sAnnotations } from '../../utils/k8s/constants';
|
||||
import { K8sAnnotations, PROVENANCE_NONE } from '../../utils/k8s/constants';
|
||||
import { getAnnotation, shouldUseK8sApi } from '../../utils/k8s/utils';
|
||||
import { ensureDefine } from '../../utils/templates';
|
||||
import { TemplateFormValues } from '../receivers/TemplateForm';
|
||||
@@ -80,7 +79,7 @@ function templateGroupsToTemplates(
|
||||
function templateGroupToTemplate(
|
||||
templateGroup: ComGithubGrafanaGrafanaPkgApisAlertingNotificationsV0Alpha1TemplateGroup
|
||||
): NotificationTemplate {
|
||||
const provenance = getAnnotation(templateGroup, K8sAnnotations.Provenance) ?? KnownProvenance.None;
|
||||
const provenance = getAnnotation(templateGroup, K8sAnnotations.Provenance) ?? PROVENANCE_NONE;
|
||||
return {
|
||||
// K8s entities should always have a metadata.name property. The type is marked as optional because it's also used in other places
|
||||
uid: templateGroup.metadata.name ?? templateGroup.spec.title,
|
||||
@@ -97,8 +96,8 @@ function amConfigToTemplates(config: AlertManagerCortexConfig): NotificationTemp
|
||||
uid: title,
|
||||
title,
|
||||
content,
|
||||
// Undefined, null or empty string should be converted to KnownProvenance.None
|
||||
provenance: (config.template_file_provenances ?? {})[title] || KnownProvenance.None,
|
||||
// Undefined, null or empty string should be converted to PROVENANCE_NONE
|
||||
provenance: (config.template_file_provenances ?? {})[title] || PROVENANCE_NONE,
|
||||
missing: !templates.includes(title),
|
||||
}));
|
||||
}
|
||||
@@ -273,7 +272,7 @@ export function useValidateNotificationTemplate({
|
||||
}
|
||||
|
||||
interface NotificationTemplateMetadata {
|
||||
provenance?: string;
|
||||
isProvisioned: boolean;
|
||||
}
|
||||
|
||||
export function useNotificationTemplateMetadata(
|
||||
@@ -281,11 +280,11 @@ export function useNotificationTemplateMetadata(
|
||||
): NotificationTemplateMetadata {
|
||||
if (!template) {
|
||||
return {
|
||||
provenance: KnownProvenance.None,
|
||||
isProvisioned: false,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
provenance: template.provenance,
|
||||
isProvisioned: Boolean(template.provenance) && template.provenance !== PROVENANCE_NONE,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,12 +1,8 @@
|
||||
import { GrafanaManagedContactPoint } from 'app/plugins/datasource/alertmanager/types';
|
||||
|
||||
import { KnownProvenance } from '../../types/knownProvenance';
|
||||
import { ReceiverTypes } from '../receivers/grafanaAppReceivers/onCall/onCall';
|
||||
|
||||
import { RECEIVER_META_KEY, RECEIVER_PLUGIN_META_KEY } from './constants';
|
||||
import {
|
||||
ReceiverConfigWithMetadata,
|
||||
enhanceContactPointsWithMetadata,
|
||||
getReceiverDescription,
|
||||
isAutoGeneratedPolicy,
|
||||
summarizeEmailAddresses,
|
||||
@@ -132,110 +128,3 @@ describe('summarizeEmailAddresses', () => {
|
||||
expect(summarizeEmailAddresses('foo@foo.com\n bar@bar.com ')).toBe(output);
|
||||
});
|
||||
});
|
||||
|
||||
describe('enhanceContactPointsWithMetadata', () => {
|
||||
it('should extract provenance from receiver configs when contact point has no provenance', () => {
|
||||
const contactPoint: GrafanaManagedContactPoint = {
|
||||
name: 'test-contact-point',
|
||||
grafana_managed_receiver_configs: [
|
||||
{
|
||||
uid: 'test-uid',
|
||||
name: 'test-contact-point',
|
||||
type: 'email',
|
||||
settings: { addresses: 'test@example.com' },
|
||||
secureFields: {},
|
||||
provenance: KnownProvenance.API,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const enhanced = enhanceContactPointsWithMetadata({
|
||||
contactPoints: [contactPoint],
|
||||
notifiers: [],
|
||||
status: [],
|
||||
});
|
||||
|
||||
expect(enhanced[0].provenance).toBe(KnownProvenance.API);
|
||||
});
|
||||
|
||||
it('should prefer contact point provenance over receiver config provenance', () => {
|
||||
const contactPoint: GrafanaManagedContactPoint = {
|
||||
name: 'test-contact-point',
|
||||
provenance: KnownProvenance.File, // Provenance on contact point (from K8s)
|
||||
grafana_managed_receiver_configs: [
|
||||
{
|
||||
uid: 'test-uid',
|
||||
name: 'test-contact-point',
|
||||
type: 'email',
|
||||
settings: { addresses: 'test@example.com' },
|
||||
secureFields: {},
|
||||
provenance: KnownProvenance.API, // Different provenance on receiver config
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const enhanced = enhanceContactPointsWithMetadata({
|
||||
contactPoints: [contactPoint],
|
||||
notifiers: [],
|
||||
status: [],
|
||||
});
|
||||
|
||||
expect(enhanced[0].provenance).toBe(KnownProvenance.File);
|
||||
});
|
||||
|
||||
it('should extract provenance from first receiver config that has it', () => {
|
||||
const contactPoint: GrafanaManagedContactPoint = {
|
||||
name: 'test-contact-point',
|
||||
grafana_managed_receiver_configs: [
|
||||
{
|
||||
uid: 'test-uid-1',
|
||||
name: 'test-contact-point',
|
||||
type: 'email',
|
||||
settings: { addresses: 'test@example.com' },
|
||||
secureFields: {},
|
||||
// No provenance on first receiver
|
||||
},
|
||||
{
|
||||
uid: 'test-uid-2',
|
||||
name: 'test-contact-point',
|
||||
type: 'slack',
|
||||
settings: { recipient: '#channel' },
|
||||
secureFields: {},
|
||||
provenance: KnownProvenance.ConvertedPrometheus, // Provenance on second receiver
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const enhanced = enhanceContactPointsWithMetadata({
|
||||
contactPoints: [contactPoint],
|
||||
notifiers: [],
|
||||
status: [],
|
||||
});
|
||||
|
||||
expect(enhanced[0].provenance).toBe(KnownProvenance.ConvertedPrometheus);
|
||||
});
|
||||
|
||||
it('should have undefined provenance when neither contact point nor receiver configs have provenance', () => {
|
||||
const contactPoint: GrafanaManagedContactPoint = {
|
||||
name: 'test-contact-point',
|
||||
grafana_managed_receiver_configs: [
|
||||
{
|
||||
uid: 'test-uid',
|
||||
name: 'test-contact-point',
|
||||
type: 'email',
|
||||
settings: { addresses: 'test@example.com' },
|
||||
secureFields: {},
|
||||
// No provenance
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const enhanced = enhanceContactPointsWithMetadata({
|
||||
contactPoints: [contactPoint],
|
||||
notifiers: [],
|
||||
status: [],
|
||||
});
|
||||
|
||||
expect(enhanced[0].provenance).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -146,16 +146,9 @@ export function enhanceContactPointsWithMetadata({
|
||||
|
||||
const id = getContactPointIdentifier(contactPoint);
|
||||
|
||||
// Extract provenance from contactPoint first; else, search in its receivers
|
||||
const contactPointProvenance =
|
||||
'provenance' in contactPoint && contactPoint.provenance !== undefined
|
||||
? contactPoint.provenance
|
||||
: receivers.find((receiver) => Boolean(receiver.provenance))?.provenance;
|
||||
|
||||
return {
|
||||
...contactPoint,
|
||||
id,
|
||||
provenance: contactPointProvenance,
|
||||
policies:
|
||||
alertmanagerConfiguration && usedContactPointsByName && (usedContactPointsByName[contactPoint.name] ?? []),
|
||||
grafana_managed_receiver_configs: receivers.map((receiver, index) => {
|
||||
|
||||
@@ -9,7 +9,7 @@ import {
|
||||
IoK8SApimachineryPkgApisMetaV1ObjectMeta,
|
||||
} from 'app/features/alerting/unified/openapi/timeIntervalsApi.gen';
|
||||
import { BaseAlertmanagerArgs, Skippable } from 'app/features/alerting/unified/types/hooks';
|
||||
import { KnownProvenance } from 'app/features/alerting/unified/types/knownProvenance';
|
||||
import { PROVENANCE_NONE } from 'app/features/alerting/unified/utils/k8s/constants';
|
||||
import {
|
||||
isK8sEntityProvisioned,
|
||||
shouldUseK8sApi,
|
||||
@@ -62,7 +62,7 @@ const parseAmTimeInterval: (interval: MuteTimeInterval, provenance: string) => M
|
||||
return {
|
||||
...interval,
|
||||
id: interval.name,
|
||||
provisioned: Boolean(provenance && provenance !== KnownProvenance.None),
|
||||
provisioned: Boolean(provenance && provenance !== PROVENANCE_NONE),
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
+2
-6
@@ -11,7 +11,7 @@ import { AlertmanagerAction, useAlertmanagerAbility } from 'app/features/alertin
|
||||
import { FormAmRoute } from 'app/features/alerting/unified/types/amroutes';
|
||||
import { addUniqueIdentifierToRoute } from 'app/features/alerting/unified/utils/amroutes';
|
||||
import { getErrorCode, stringifyErrorLike } from 'app/features/alerting/unified/utils/misc';
|
||||
import { ObjectMatcher, RouteWithID } from 'app/plugins/datasource/alertmanager/types';
|
||||
import { ObjectMatcher, ROUTES_META_SYMBOL, RouteWithID } from 'app/plugins/datasource/alertmanager/types';
|
||||
|
||||
import { anyOfRequestState, isError } from '../../hooks/useAsync';
|
||||
import { useAlertmanager } from '../../state/AlertmanagerContext';
|
||||
@@ -27,7 +27,6 @@ import { useAddPolicyModal, useAlertGroupsModal, useDeletePolicyModal, useEditPo
|
||||
import { Policy } from './Policy';
|
||||
import { TIMING_OPTIONS_DEFAULTS } from './timingOptions';
|
||||
import {
|
||||
isRouteProvisioned,
|
||||
useAddNotificationPolicy,
|
||||
useDeleteNotificationPolicy,
|
||||
useNotificationPolicyRoute,
|
||||
@@ -100,8 +99,6 @@ export const NotificationPoliciesList = () => {
|
||||
}
|
||||
return;
|
||||
}, [defaultPolicy]);
|
||||
const routeProvenance = defaultPolicy?.provenance;
|
||||
const isRootRouteProvisioned = rootRoute ? isRouteProvisioned(rootRoute) : false;
|
||||
|
||||
// useAsync could also work but it's hard to wait until it's done in the tests
|
||||
// Combining with useEffect gives more predictable results because the condition is in useEffect
|
||||
@@ -247,8 +244,7 @@ export const NotificationPoliciesList = () => {
|
||||
currentRoute={defaults(rootRoute, TIMING_OPTIONS_DEFAULTS)}
|
||||
contactPointsState={contactPointsState.receivers}
|
||||
readOnly={!hasConfigurationAPI}
|
||||
provisioned={isRootRouteProvisioned}
|
||||
provenance={routeProvenance}
|
||||
provisioned={rootRoute[ROUTES_META_SYMBOL]?.provisioned}
|
||||
alertManagerSourceName={selectedAlertmanager}
|
||||
onAddPolicy={openAddModal}
|
||||
onEditPolicy={openEditModal}
|
||||
|
||||
@@ -17,7 +17,6 @@ import {
|
||||
import { useAlertmanagerAbilities } from '../../hooks/useAbilities';
|
||||
import { mockReceiversState } from '../../mocks';
|
||||
import { AlertmanagerProvider } from '../../state/AlertmanagerContext';
|
||||
import { KnownProvenance } from '../../types/knownProvenance';
|
||||
import { GRAFANA_RULES_SOURCE_NAME } from '../../utils/datasource';
|
||||
|
||||
import {
|
||||
@@ -332,60 +331,6 @@ describe('Policy', () => {
|
||||
const customPolicy = screen.getByTestId('am-route-container');
|
||||
expect(within(customPolicy).getByTestId('matches-all')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows correct badge when policy has file provenance', () => {
|
||||
const mockRoute: RouteWithID = {
|
||||
id: 'test-route',
|
||||
receiver: 'test-receiver',
|
||||
routes: [],
|
||||
};
|
||||
|
||||
renderPolicy(
|
||||
<Policy
|
||||
readOnly
|
||||
isDefaultPolicy
|
||||
currentRoute={mockRoute}
|
||||
contactPointsState={mockReceiversState()}
|
||||
alertManagerSourceName={GRAFANA_RULES_SOURCE_NAME}
|
||||
onEditPolicy={noop}
|
||||
onAddPolicy={noop}
|
||||
onDeletePolicy={noop}
|
||||
onShowAlertInstances={noop}
|
||||
provisioned
|
||||
provenance={KnownProvenance.File}
|
||||
/>
|
||||
);
|
||||
|
||||
const badge = screen.getByText('Provisioned');
|
||||
expect(badge).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows correct badge when policy has converted_prometheus provenance', () => {
|
||||
const mockRoute: RouteWithID = {
|
||||
id: 'test-route',
|
||||
receiver: 'test-receiver',
|
||||
routes: [],
|
||||
};
|
||||
|
||||
renderPolicy(
|
||||
<Policy
|
||||
readOnly
|
||||
isDefaultPolicy
|
||||
currentRoute={mockRoute}
|
||||
contactPointsState={mockReceiversState()}
|
||||
alertManagerSourceName={GRAFANA_RULES_SOURCE_NAME}
|
||||
onEditPolicy={noop}
|
||||
onAddPolicy={noop}
|
||||
onDeletePolicy={noop}
|
||||
onShowAlertInstances={noop}
|
||||
provisioned
|
||||
provenance={KnownProvenance.ConvertedPrometheus}
|
||||
/>
|
||||
);
|
||||
|
||||
const badge = screen.getByText('Imported');
|
||||
expect(badge).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
// Doesn't matter which path the routes use, it just needs to match the initialEntries history entry to render the element
|
||||
|
||||
@@ -61,7 +61,6 @@ interface PolicyComponentProps {
|
||||
contactPointsState?: ReceiversState;
|
||||
readOnly?: boolean;
|
||||
provisioned?: boolean;
|
||||
provenance?: string;
|
||||
inheritedProperties?: InheritableProperties;
|
||||
routesMatchingFilters?: RoutesMatchingFilters;
|
||||
|
||||
@@ -90,7 +89,6 @@ const Policy = (props: PolicyComponentProps) => {
|
||||
contactPointsState,
|
||||
readOnly = false,
|
||||
provisioned = false,
|
||||
provenance,
|
||||
alertManagerSourceName,
|
||||
currentRoute,
|
||||
inheritedProperties,
|
||||
@@ -257,7 +255,7 @@ const Policy = (props: PolicyComponentProps) => {
|
||||
<Spacer />
|
||||
{/* TODO maybe we should move errors to the gutter instead? */}
|
||||
{errors.length > 0 && <Errors errors={errors} />}
|
||||
{provisioned && <ProvisioningBadge tooltip provenance={provenance} />}
|
||||
{provisioned && <ProvisioningBadge />}
|
||||
<Stack direction="row" gap={0.5}>
|
||||
{!isAutoGenerated && !readOnly && (
|
||||
<Authorize actions={[AlertmanagerAction.CreateNotificationPolicy]}>
|
||||
|
||||
+1
-90
@@ -1,15 +1,9 @@
|
||||
import { MatcherOperator, ROUTES_META_SYMBOL, Route } from 'app/plugins/datasource/alertmanager/types';
|
||||
|
||||
import { ComGithubGrafanaGrafanaPkgApisAlertingNotificationsV0Alpha1Route } from '../../openapi/routesApi.gen';
|
||||
import { KnownProvenance } from '../../types/knownProvenance';
|
||||
import { ROOT_ROUTE_NAME } from '../../utils/k8s/constants';
|
||||
|
||||
import {
|
||||
createKubernetesRoutingTreeSpec,
|
||||
isRouteProvisioned,
|
||||
k8sSubRouteToRoute,
|
||||
routeToK8sSubRoute,
|
||||
} from './useNotificationPolicyRoute';
|
||||
import { createKubernetesRoutingTreeSpec, k8sSubRouteToRoute, routeToK8sSubRoute } from './useNotificationPolicyRoute';
|
||||
|
||||
test('k8sSubRouteToRoute', () => {
|
||||
const input: ComGithubGrafanaGrafanaPkgApisAlertingNotificationsV0Alpha1Route = {
|
||||
@@ -121,86 +115,3 @@ test('createKubernetesRoutingTreeSpec', () => {
|
||||
expect(tree.metadata.name).toBe(ROOT_ROUTE_NAME);
|
||||
expect(tree).toMatchSnapshot();
|
||||
});
|
||||
|
||||
describe('isRouteProvisioned', () => {
|
||||
it('returns false when route has no provenance', () => {
|
||||
const route: Route = {
|
||||
receiver: 'test-receiver',
|
||||
};
|
||||
|
||||
expect(isRouteProvisioned(route)).toBeFalsy();
|
||||
});
|
||||
|
||||
it('returns false when route has KnownProvenance.None in metadata', () => {
|
||||
const route: Route = {
|
||||
receiver: 'test-receiver',
|
||||
[ROUTES_META_SYMBOL]: {
|
||||
provenance: KnownProvenance.None,
|
||||
},
|
||||
};
|
||||
|
||||
expect(isRouteProvisioned(route)).toBeFalsy();
|
||||
});
|
||||
|
||||
it('returns false when route has KnownProvenance.None at top level', () => {
|
||||
const route: Route = {
|
||||
receiver: 'test-receiver',
|
||||
provenance: KnownProvenance.None,
|
||||
};
|
||||
expect(isRouteProvisioned(route)).toBeFalsy();
|
||||
});
|
||||
|
||||
it('returns true when route has file provenance in metadata', () => {
|
||||
const route: Route = {
|
||||
receiver: 'test-receiver',
|
||||
[ROUTES_META_SYMBOL]: {
|
||||
provenance: KnownProvenance.File,
|
||||
},
|
||||
};
|
||||
|
||||
expect(isRouteProvisioned(route)).toBeTruthy();
|
||||
});
|
||||
|
||||
it('returns true when route has api provenance in metadata', () => {
|
||||
const route: Route = {
|
||||
receiver: 'test-receiver',
|
||||
[ROUTES_META_SYMBOL]: {
|
||||
provenance: KnownProvenance.API,
|
||||
},
|
||||
};
|
||||
|
||||
expect(isRouteProvisioned(route)).toBeTruthy();
|
||||
});
|
||||
|
||||
it('returns true when route has converted_prometheus provenance in metadata', () => {
|
||||
const route: Route = {
|
||||
receiver: 'test-receiver',
|
||||
[ROUTES_META_SYMBOL]: {
|
||||
provenance: KnownProvenance.ConvertedPrometheus,
|
||||
},
|
||||
};
|
||||
|
||||
expect(isRouteProvisioned(route)).toBeTruthy();
|
||||
});
|
||||
|
||||
it('returns true when route has file provenance at top level', () => {
|
||||
const route: Route = {
|
||||
receiver: 'test-receiver',
|
||||
provenance: KnownProvenance.File,
|
||||
};
|
||||
|
||||
expect(isRouteProvisioned(route)).toBeTruthy();
|
||||
});
|
||||
|
||||
it('falls back to top-level provenance when metadata provenance is missing', () => {
|
||||
const route: Route = {
|
||||
receiver: 'test-receiver',
|
||||
provenance: KnownProvenance.File,
|
||||
[ROUTES_META_SYMBOL]: {
|
||||
provenance: undefined,
|
||||
},
|
||||
};
|
||||
|
||||
expect(isRouteProvisioned(route)).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
+4
-10
@@ -22,8 +22,8 @@ import {
|
||||
} from '../../reducers/alertmanager/notificationPolicyRoutes';
|
||||
import { FormAmRoute } from '../../types/amroutes';
|
||||
import { addUniqueIdentifierToRoute } from '../../utils/amroutes';
|
||||
import { K8sAnnotations, ROOT_ROUTE_NAME } from '../../utils/k8s/constants';
|
||||
import { getAnnotation, isProvisionedResource, shouldUseK8sApi } from '../../utils/k8s/utils';
|
||||
import { PROVENANCE_NONE, ROOT_ROUTE_NAME } from '../../utils/k8s/constants';
|
||||
import { isK8sEntityProvisioned, shouldUseK8sApi } from '../../utils/k8s/utils';
|
||||
import { routeAdapter } from '../../utils/routeAdapter';
|
||||
import {
|
||||
InsertPosition,
|
||||
@@ -33,11 +33,6 @@ import {
|
||||
omitRouteFromRouteTree,
|
||||
} from '../../utils/routeTree';
|
||||
|
||||
export function isRouteProvisioned(route: Route): boolean {
|
||||
const provenance = route[ROUTES_META_SYMBOL]?.provenance ?? route.provenance;
|
||||
return isProvisionedResource(provenance);
|
||||
}
|
||||
|
||||
const k8sRoutesToRoutesMemoized = memoize(k8sRoutesToRoutes, { maxSize: 1 });
|
||||
|
||||
const {
|
||||
@@ -87,7 +82,7 @@ const parseAmConfigRoute = memoize((route: Route): Route => {
|
||||
return {
|
||||
...route,
|
||||
[ROUTES_META_SYMBOL]: {
|
||||
provenance: route.provenance,
|
||||
provisioned: Boolean(route.provenance && route.provenance !== PROVENANCE_NONE),
|
||||
},
|
||||
};
|
||||
});
|
||||
@@ -237,11 +232,10 @@ function k8sRoutesToRoutes(routes: ComGithubGrafanaGrafanaPkgApisAlertingNotific
|
||||
...route.spec.defaults,
|
||||
routes: route.spec.routes?.map(k8sSubRouteToRoute),
|
||||
[ROUTES_META_SYMBOL]: {
|
||||
provenance: getAnnotation(route, K8sAnnotations.Provenance),
|
||||
provisioned: isK8sEntityProvisioned(route),
|
||||
resourceVersion: route.metadata.resourceVersion,
|
||||
name: route.metadata.name,
|
||||
},
|
||||
provenance: getAnnotation(route, K8sAnnotations.Provenance),
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
@@ -33,7 +33,6 @@ import { AccessControlAction } from 'app/types/accessControl';
|
||||
|
||||
import { AITemplateButtonComponent } from '../../enterprise-components/AI/AIGenTemplateButton/addAITemplateButton';
|
||||
import { GRAFANA_RULES_SOURCE_NAME } from '../../utils/datasource';
|
||||
import { isProvisionedResource } from '../../utils/k8s/utils';
|
||||
import { makeAMLink, stringifyErrorLike } from '../../utils/misc';
|
||||
import { EditorColumnHeader } from '../EditorColumnHeader';
|
||||
import { ProvisionedResource, ProvisioningAlert } from '../Provisioning';
|
||||
@@ -123,8 +122,7 @@ export const TemplateForm = ({ originalTemplate, prefill, alertmanager }: Props)
|
||||
// AI feedback state
|
||||
const [aiGeneratedTemplate, setAiGeneratedTemplate] = useState(false);
|
||||
|
||||
const { provenance } = useNotificationTemplateMetadata(originalTemplate);
|
||||
const isProvisioned = isProvisionedResource(provenance);
|
||||
const { isProvisioned } = useNotificationTemplateMetadata(originalTemplate);
|
||||
const originalTemplatePrefill: TemplateFormValues | undefined = originalTemplate
|
||||
? { title: originalTemplate.title, content: originalTemplate.content }
|
||||
: undefined;
|
||||
|
||||
@@ -1,98 +0,0 @@
|
||||
import { render, screen, within } from 'test/test-utils';
|
||||
|
||||
import { AppNotificationList } from 'app/core/components/AppNotifications/AppNotificationList';
|
||||
import { AccessControlAction } from 'app/types/accessControl';
|
||||
|
||||
import { setupMswServer } from '../../mockApi';
|
||||
import { grantUserPermissions } from '../../mocks';
|
||||
import { AlertmanagerProvider } from '../../state/AlertmanagerContext';
|
||||
import { KnownProvenance } from '../../types/knownProvenance';
|
||||
import { GRAFANA_RULES_SOURCE_NAME } from '../../utils/datasource';
|
||||
import { NotificationTemplate } from '../contact-points/useNotificationTemplates';
|
||||
|
||||
import { TemplatesTable } from './TemplatesTable';
|
||||
|
||||
const mockTemplates: Array<Partial<NotificationTemplate>> = [
|
||||
{
|
||||
uid: 'mimir-template',
|
||||
title: 'mimir-template',
|
||||
content: '{{ define "mimir-template" }}Template from Mimir{{ end }}',
|
||||
provenance: KnownProvenance.ConvertedPrometheus,
|
||||
},
|
||||
{
|
||||
uid: 'file-template',
|
||||
title: 'file-template',
|
||||
content: '{{ define "file-template" }}File provisioned template{{ end }}',
|
||||
provenance: KnownProvenance.File,
|
||||
},
|
||||
{
|
||||
uid: 'api-template',
|
||||
title: 'api-template',
|
||||
content: '{{ define "api-template" }}API provisioned template{{ end }}',
|
||||
provenance: KnownProvenance.API,
|
||||
},
|
||||
{
|
||||
uid: 'no-provenance-template',
|
||||
title: 'no-provenance-template',
|
||||
content: '{{ define "no-provenance-template" }}No provenance template{{ end }}',
|
||||
provenance: KnownProvenance.None,
|
||||
},
|
||||
{
|
||||
uid: 'undefined-provenance-template',
|
||||
title: 'undefined-provenance-template',
|
||||
content: '{{ define "undefined-provenance-template" }}Undefined provenance template{{ end }}',
|
||||
provenance: undefined,
|
||||
},
|
||||
];
|
||||
|
||||
const renderWithProvider = (templates: Array<Partial<NotificationTemplate>>) => {
|
||||
return render(
|
||||
<AlertmanagerProvider accessType={'notification'}>
|
||||
<TemplatesTable alertManagerName={GRAFANA_RULES_SOURCE_NAME} templates={templates as NotificationTemplate[]} />
|
||||
<AppNotificationList />
|
||||
</AlertmanagerProvider>
|
||||
);
|
||||
};
|
||||
|
||||
setupMswServer();
|
||||
|
||||
describe('TemplatesTable', () => {
|
||||
beforeEach(() => {
|
||||
grantUserPermissions([
|
||||
AccessControlAction.AlertingNotificationsRead,
|
||||
AccessControlAction.AlertingNotificationsWrite,
|
||||
AccessControlAction.AlertingNotificationsExternalRead,
|
||||
AccessControlAction.AlertingNotificationsExternalWrite,
|
||||
]);
|
||||
});
|
||||
|
||||
it('shows "Imported" badge for templates with converted_prometheus provenance', () => {
|
||||
const templates = [mockTemplates[0]]; // mimir-template
|
||||
renderWithProvider(templates);
|
||||
|
||||
const templateRow = screen.getByRole('row', { name: /mimir-template/i });
|
||||
const badge = within(templateRow).getByText('Imported');
|
||||
expect(badge).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows "Provisioned" badge for templates with other provenance', () => {
|
||||
// api and file templates
|
||||
[mockTemplates[1], mockTemplates[2]].forEach((template) => {
|
||||
renderWithProvider([template]);
|
||||
|
||||
const templateRow = screen.getByRole('row', { name: new RegExp(template.title ?? '', 'i') });
|
||||
const badge = within(templateRow).getByText('Provisioned');
|
||||
expect(badge).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('does not show badge for templates with KnownProvenance.None or empty string provenance', () => {
|
||||
// no-provenance-template and undefined-provenance-template
|
||||
[mockTemplates[3], mockTemplates[4]].forEach((template) => {
|
||||
renderWithProvider([template]);
|
||||
|
||||
const templateRow = screen.getByRole('row', { name: new RegExp(template.title ?? '', 'i') });
|
||||
expect(within(templateRow).queryByText('Provisioned')).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -10,7 +10,6 @@ import { GRAFANA_RULES_SOURCE_NAME } from 'app/features/alerting/unified/utils/d
|
||||
import { Authorize } from '../../components/Authorize';
|
||||
import { AlertmanagerAction } from '../../hooks/useAbilities';
|
||||
import { getAlertTableStyles } from '../../styles/table';
|
||||
import { isProvisionedResource } from '../../utils/k8s/utils';
|
||||
import { makeAMLink, stringifyErrorLike } from '../../utils/misc';
|
||||
import { CollapseToggle } from '../CollapseToggle';
|
||||
import { DetailsField } from '../DetailsField';
|
||||
@@ -129,8 +128,7 @@ function TemplateRow({ notificationTemplate, idx, alertManagerName, onDeleteClic
|
||||
const isGrafanaAlertmanager = alertManagerName === GRAFANA_RULES_SOURCE_NAME;
|
||||
|
||||
const [isExpanded, setIsExpanded] = useState(false);
|
||||
const { provenance } = useNotificationTemplateMetadata(notificationTemplate);
|
||||
const isProvisioned = isProvisionedResource(provenance);
|
||||
const { isProvisioned } = useNotificationTemplateMetadata(notificationTemplate);
|
||||
|
||||
const { uid, title: name, content: template, missing } = notificationTemplate;
|
||||
const misconfiguredBadgeText = t('alerting.templates.misconfigured-badge-text', 'Misconfigured');
|
||||
@@ -141,7 +139,7 @@ function TemplateRow({ notificationTemplate, idx, alertManagerName, onDeleteClic
|
||||
<CollapseToggle isCollapsed={!isExpanded} onToggle={() => setIsExpanded(!isExpanded)} />
|
||||
</td>
|
||||
<td>
|
||||
{name} {isProvisioned && <ProvisioningBadge tooltip provenance={provenance} />}{' '}
|
||||
{name} {isProvisioned && <ProvisioningBadge />}{' '}
|
||||
{missing && !isGrafanaAlertmanager && (
|
||||
<Tooltip
|
||||
content={
|
||||
|
||||
+6
-9
@@ -9,11 +9,7 @@ import {
|
||||
} from 'app/features/alerting/unified/components/contact-points/useContactPoints';
|
||||
import { showManageContactPointPermissions } from 'app/features/alerting/unified/components/contact-points/utils';
|
||||
import { GRAFANA_RULES_SOURCE_NAME } from 'app/features/alerting/unified/utils/datasource';
|
||||
import {
|
||||
canEditEntity,
|
||||
canModifyProtectedEntity,
|
||||
isProvisionedResource,
|
||||
} from 'app/features/alerting/unified/utils/k8s/utils';
|
||||
import { canEditEntity, canModifyProtectedEntity } from 'app/features/alerting/unified/utils/k8s/utils';
|
||||
import {
|
||||
GrafanaManagedContactPoint,
|
||||
GrafanaManagedReceiverConfig,
|
||||
@@ -131,8 +127,7 @@ export const GrafanaReceiverForm = ({ contactPoint, readOnly = false, editMode }
|
||||
// If there is no contact point it means we're creating a new one, so scoped permissions doesn't exist yet
|
||||
const hasScopedEditPermissions = contactPoint ? canEditEntity(contactPoint) : true;
|
||||
const hasScopedEditProtectedPermissions = contactPoint ? canModifyProtectedEntity(contactPoint) : true;
|
||||
const isProvisioned = isProvisionedResource(contactPoint?.provenance);
|
||||
const isEditable = !readOnly && hasScopedEditPermissions && !isProvisioned;
|
||||
const isEditable = !readOnly && hasScopedEditPermissions && !contactPoint?.provisioned;
|
||||
const isTestable = !readOnly;
|
||||
const canEditProtectedFields = editMode ? hasScopedEditProtectedPermissions : true;
|
||||
|
||||
@@ -175,8 +170,10 @@ export const GrafanaReceiverForm = ({ contactPoint, readOnly = false, editMode }
|
||||
</Alert>
|
||||
)}
|
||||
|
||||
{isProvisioned && hasLegacyIntegrations(contactPoint, grafanaNotifiers) && <ImportedContactPointAlert />}
|
||||
{isProvisioned && !hasLegacyIntegrations(contactPoint, grafanaNotifiers) && (
|
||||
{contactPoint?.provisioned && hasLegacyIntegrations(contactPoint, grafanaNotifiers) && (
|
||||
<ImportedContactPointAlert />
|
||||
)}
|
||||
{contactPoint?.provisioned && !hasLegacyIntegrations(contactPoint, grafanaNotifiers) && (
|
||||
<ProvisioningAlert resource={ProvisionedResource.ContactPoint} />
|
||||
)}
|
||||
|
||||
|
||||
+2
-2
@@ -7,8 +7,8 @@ import { grantUserPermissions } from 'app/features/alerting/unified/mocks';
|
||||
import { getAlertmanagerConfig } from 'app/features/alerting/unified/mocks/server/entities/alertmanagers';
|
||||
import { AlertmanagerProvider } from 'app/features/alerting/unified/state/AlertmanagerContext';
|
||||
import { NotificationChannelOption } from 'app/features/alerting/unified/types/alerting';
|
||||
import { KnownProvenance } from 'app/features/alerting/unified/types/knownProvenance';
|
||||
import { GRAFANA_RULES_SOURCE_NAME } from 'app/features/alerting/unified/utils/datasource';
|
||||
import { PROVENANCE_NONE } from 'app/features/alerting/unified/utils/k8s/constants';
|
||||
import { DEFAULT_TEMPLATES } from 'app/features/alerting/unified/utils/template-constants';
|
||||
import { AccessControlAction } from 'app/types/accessControl';
|
||||
|
||||
@@ -68,7 +68,7 @@ describe('getTemplateOptions function', () => {
|
||||
uid: title,
|
||||
title,
|
||||
content,
|
||||
provenance: KnownProvenance.None,
|
||||
provenance: PROVENANCE_NONE,
|
||||
};
|
||||
});
|
||||
const defaultTemplates = parseTemplates(DEFAULT_TEMPLATES);
|
||||
|
||||
@@ -4,8 +4,7 @@ import {
|
||||
ComGithubGrafanaGrafanaPkgApisAlertingNotificationsV0Alpha1Route,
|
||||
ComGithubGrafanaGrafanaPkgApisAlertingNotificationsV0Alpha1RoutingTree,
|
||||
} from 'app/features/alerting/unified/openapi/routesApi.gen';
|
||||
import { KnownProvenance } from 'app/features/alerting/unified/types/knownProvenance';
|
||||
import { K8sAnnotations, ROOT_ROUTE_NAME } from 'app/features/alerting/unified/utils/k8s/constants';
|
||||
import { K8sAnnotations, PROVENANCE_NONE, ROOT_ROUTE_NAME } from 'app/features/alerting/unified/utils/k8s/constants';
|
||||
import { AlertManagerCortexConfig, MatcherOperator, Route } from 'app/plugins/datasource/alertmanager/types';
|
||||
|
||||
/**
|
||||
@@ -67,7 +66,7 @@ export const getUserDefinedRoutingTree: (
|
||||
name: ROOT_ROUTE_NAME,
|
||||
namespace: 'default',
|
||||
annotations: {
|
||||
[K8sAnnotations.Provenance]: KnownProvenance.None,
|
||||
[K8sAnnotations.Provenance]: PROVENANCE_NONE,
|
||||
},
|
||||
// Resource versions are much shorter than this in reality, but this is an easy way
|
||||
// for us to mock the concurrency logic and check if the policies have updated since the last fetch
|
||||
|
||||
@@ -6,9 +6,8 @@ import {
|
||||
} from 'app/features/alerting/unified/mocks/server/entities/alertmanagers';
|
||||
import { ALERTING_API_SERVER_BASE_URL, getK8sResponse } from 'app/features/alerting/unified/mocks/server/utils';
|
||||
import { ComGithubGrafanaGrafanaPkgApisAlertingNotificationsV0Alpha1Receiver } from 'app/features/alerting/unified/openapi/receiversApi.gen';
|
||||
import { KnownProvenance } from 'app/features/alerting/unified/types/knownProvenance';
|
||||
import { GRAFANA_RULES_SOURCE_NAME } from 'app/features/alerting/unified/utils/datasource';
|
||||
import { K8sAnnotations } from 'app/features/alerting/unified/utils/k8s/constants';
|
||||
import { K8sAnnotations, PROVENANCE_NONE } from 'app/features/alerting/unified/utils/k8s/constants';
|
||||
|
||||
const usedByPolicies = ['grafana-default-email'];
|
||||
const usedByRules = ['grafana-default-email'];
|
||||
@@ -24,7 +23,7 @@ const getReceiversList = () => {
|
||||
const provenance =
|
||||
contactPoint.grafana_managed_receiver_configs?.find((integration) => {
|
||||
return integration.provenance;
|
||||
})?.provenance || KnownProvenance.None;
|
||||
})?.provenance || PROVENANCE_NONE;
|
||||
return {
|
||||
metadata: {
|
||||
// This isn't exactly accurate, but its the cleanest way to use the same data for AM config and K8S responses
|
||||
|
||||
@@ -3,9 +3,8 @@ import { HttpResponse, http } from 'msw';
|
||||
import { getAlertmanagerConfig } from 'app/features/alerting/unified/mocks/server/entities/alertmanagers';
|
||||
import { ALERTING_API_SERVER_BASE_URL, getK8sResponse } from 'app/features/alerting/unified/mocks/server/utils';
|
||||
import { ComGithubGrafanaGrafanaPkgApisAlertingNotificationsV0Alpha1TemplateGroup } from 'app/features/alerting/unified/openapi/templatesApi.gen';
|
||||
import { KnownProvenance } from 'app/features/alerting/unified/types/knownProvenance';
|
||||
import { GRAFANA_RULES_SOURCE_NAME } from 'app/features/alerting/unified/utils/datasource';
|
||||
import { PROVENANCE_ANNOTATION } from 'app/features/alerting/unified/utils/k8s/constants';
|
||||
import { PROVENANCE_ANNOTATION, PROVENANCE_NONE } from 'app/features/alerting/unified/utils/k8s/constants';
|
||||
|
||||
const config = getAlertmanagerConfig(GRAFANA_RULES_SOURCE_NAME);
|
||||
|
||||
@@ -15,7 +14,7 @@ const mappedTemplates = Object.entries(
|
||||
).map<ComGithubGrafanaGrafanaPkgApisAlertingNotificationsV0Alpha1TemplateGroup>(([title, template]) => ({
|
||||
metadata: {
|
||||
name: titleToK8sResourceName(title), // K8s uses unique identifiers for resources
|
||||
annotations: { [PROVENANCE_ANNOTATION]: config.template_file_provenances?.[title] || KnownProvenance.None },
|
||||
annotations: { [PROVENANCE_ANNOTATION]: config.template_file_provenances?.[title] || PROVENANCE_NONE },
|
||||
},
|
||||
spec: {
|
||||
title: title,
|
||||
|
||||
@@ -4,8 +4,7 @@ import { base64UrlEncode } from '@grafana/alerting';
|
||||
import { filterBySelector } from 'app/features/alerting/unified/mocks/server/handlers/k8s/utils';
|
||||
import { ALERTING_API_SERVER_BASE_URL, getK8sResponse } from 'app/features/alerting/unified/mocks/server/utils';
|
||||
import { ComGithubGrafanaGrafanaPkgApisAlertingNotificationsV0Alpha1TimeInterval } from 'app/features/alerting/unified/openapi/timeIntervalsApi.gen';
|
||||
import { KnownProvenance } from 'app/features/alerting/unified/types/knownProvenance';
|
||||
import { K8sAnnotations } from 'app/features/alerting/unified/utils/k8s/constants';
|
||||
import { K8sAnnotations, PROVENANCE_NONE } from 'app/features/alerting/unified/utils/k8s/constants';
|
||||
|
||||
/** UID of a time interval that we expect to follow all happy paths within tests/mocks */
|
||||
export const TIME_INTERVAL_UID_HAPPY_PATH = 'f4eae7a4895fa786';
|
||||
@@ -22,7 +21,7 @@ const allTimeIntervals = getK8sResponse<ComGithubGrafanaGrafanaPkgApisAlertingNo
|
||||
{
|
||||
metadata: {
|
||||
annotations: {
|
||||
[K8sAnnotations.Provenance]: KnownProvenance.None,
|
||||
[K8sAnnotations.Provenance]: PROVENANCE_NONE,
|
||||
},
|
||||
name: base64UrlEncode(TIME_INTERVAL_NAME_HAPPY_PATH),
|
||||
uid: TIME_INTERVAL_UID_HAPPY_PATH,
|
||||
|
||||
@@ -1,6 +0,0 @@
|
||||
export enum KnownProvenance {
|
||||
None = 'none' /** Provenance value given for entities that were not provisioned */,
|
||||
API = 'api',
|
||||
File = 'file',
|
||||
ConvertedPrometheus = 'converted_prometheus',
|
||||
}
|
||||
@@ -4,6 +4,9 @@
|
||||
* */
|
||||
export const PROVENANCE_ANNOTATION = 'grafana.com/provenance';
|
||||
|
||||
/** Value of {@link PROVENANCE_ANNOTATION} given for entities that were not provisioned */
|
||||
export const PROVENANCE_NONE = 'none';
|
||||
|
||||
export enum K8sAnnotations {
|
||||
Provenance = 'grafana.com/provenance',
|
||||
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
import { KnownProvenance } from '../../types/knownProvenance';
|
||||
|
||||
import { encodeFieldSelector, isProvisionedResource } from './utils';
|
||||
import { encodeFieldSelector } from './utils';
|
||||
|
||||
describe('encodeFieldSelector', () => {
|
||||
it('should escape backslashes', () => {
|
||||
@@ -27,29 +25,3 @@ describe('encodeFieldSelector', () => {
|
||||
expect(encodeFieldSelector('foo=bar,bar=baz,qux\\foo')).toBe('foo\\=bar\\,bar\\=baz\\,qux\\\\foo');
|
||||
});
|
||||
});
|
||||
|
||||
describe('isProvisionedResource', () => {
|
||||
it('should return true when provenance is API', () => {
|
||||
expect(isProvisionedResource(KnownProvenance.API)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true when provenance is File', () => {
|
||||
expect(isProvisionedResource(KnownProvenance.File)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return true when provenance is ConvertedPrometheus', () => {
|
||||
expect(isProvisionedResource(KnownProvenance.ConvertedPrometheus)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when provenance is none', () => {
|
||||
expect(isProvisionedResource(KnownProvenance.None)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false when provenance is undefined', () => {
|
||||
expect(isProvisionedResource(undefined)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true for any other non-empty string', () => {
|
||||
expect(isProvisionedResource('custom-provenance')).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
import { IoK8SApimachineryPkgApisMetaV1ObjectMeta } from 'app/features/alerting/unified/openapi/receiversApi.gen';
|
||||
import { GRAFANA_RULES_SOURCE_NAME } from 'app/features/alerting/unified/utils/datasource';
|
||||
import { K8sAnnotations } from 'app/features/alerting/unified/utils/k8s/constants';
|
||||
|
||||
import { KnownProvenance } from '../../types/knownProvenance';
|
||||
import { K8sAnnotations, PROVENANCE_NONE } from 'app/features/alerting/unified/utils/k8s/constants';
|
||||
|
||||
/**
|
||||
* Should we call the kubernetes-style API for managing alertmanager entities?
|
||||
@@ -24,7 +22,7 @@ type EntityToCheck = {
|
||||
*/
|
||||
export const isK8sEntityProvisioned = (k8sEntity: EntityToCheck) => {
|
||||
const provenance = getAnnotation(k8sEntity, K8sAnnotations.Provenance);
|
||||
return isProvisionedResource(provenance);
|
||||
return Boolean(provenance && provenance !== PROVENANCE_NONE);
|
||||
};
|
||||
|
||||
export const ANNOTATION_PREFIX_ACCESS = 'grafana.com/access/';
|
||||
@@ -61,7 +59,3 @@ export const stringifyFieldSelector = (fieldSelectors: FieldSelector[]): string
|
||||
.map(([key, value, operator = '=']) => `${key}${operator}${encodeFieldSelector(value)}`)
|
||||
.join(',');
|
||||
};
|
||||
|
||||
export function isProvisionedResource(provenance?: string): boolean {
|
||||
return Boolean(provenance && provenance !== KnownProvenance.None);
|
||||
}
|
||||
|
||||
@@ -781,10 +781,6 @@ export function tabItemToSaveModel(
|
||||
panels: [],
|
||||
};
|
||||
|
||||
if (tab.state.repeatByVariable) {
|
||||
rowPanel.repeat = tab.state.repeatByVariable;
|
||||
}
|
||||
|
||||
panelsArray.push(rowPanel);
|
||||
|
||||
// The base Y position for panels in this tab (after the row panel)
|
||||
@@ -916,15 +912,6 @@ function autoGridLayoutToPanels(layout: AutoGridLayoutManager, isSnapshot = fals
|
||||
},
|
||||
isSnapshot
|
||||
);
|
||||
|
||||
// Handle repeat properties for AutoGridItem
|
||||
// AutoGrid always uses horizontal direction, and maxPerRow is derived from maxColumnCount
|
||||
if (item.state.variableName) {
|
||||
panel.repeat = item.state.variableName;
|
||||
panel.repeatDirection = 'h';
|
||||
panel.maxPerRow = maxColumnCount;
|
||||
}
|
||||
|
||||
panels.push(panel);
|
||||
|
||||
// Move to next position
|
||||
|
||||
@@ -53,7 +53,7 @@ describe('buildCategories', () => {
|
||||
it('should add enterprise phantom plugins', () => {
|
||||
const enterprisePluginsCategory = categories[3];
|
||||
expect(enterprisePluginsCategory.title).toBe('Enterprise plugins');
|
||||
expect(enterprisePluginsCategory.plugins.length).toBe(32);
|
||||
expect(enterprisePluginsCategory.plugins.length).toBe(31);
|
||||
expect(enterprisePluginsCategory.plugins[0].name).toBe('Adobe Analytics');
|
||||
expect(enterprisePluginsCategory.plugins[enterprisePluginsCategory.plugins.length - 1].name).toBe('Zendesk');
|
||||
});
|
||||
|
||||
@@ -13,7 +13,6 @@ import catchpointSvg from 'img/plugins/catchpoint.svg';
|
||||
import cloudflareJpg from 'img/plugins/cloudflare.jpg';
|
||||
import cockroachdbJpg from 'img/plugins/cockroachdb.jpg';
|
||||
import datadogPng from 'img/plugins/datadog.png';
|
||||
import db2Svg from 'img/plugins/db2.svg';
|
||||
import droneSvg from 'img/plugins/drone.svg';
|
||||
import dynatracePng from 'img/plugins/dynatrace.png';
|
||||
import gitlabSvg from 'img/plugins/gitlab.svg';
|
||||
@@ -419,12 +418,6 @@ function getEnterprisePhantomPlugins(): DataSourcePluginMeta[] {
|
||||
name: 'SolarWinds',
|
||||
imgUrl: solarWindsSvg,
|
||||
}),
|
||||
getPhantomPlugin({
|
||||
id: 'grafana-ibmdb2-datasource',
|
||||
description: t('datasources.get-enterprise-phantom-plugins.description.ibmdb2-datasource', 'IBM Db2 data source'),
|
||||
name: 'IBM Db2',
|
||||
imgUrl: db2Svg,
|
||||
}),
|
||||
];
|
||||
}
|
||||
|
||||
|
||||
@@ -200,7 +200,7 @@ describe('Explore: Query History', () => {
|
||||
await waitForExplore();
|
||||
await openQueryHistory();
|
||||
|
||||
jest.spyOn(localStorage, 'checkLimits').mockImplementationOnce((queries) => {
|
||||
jest.spyOn(localStorage, 'cleanUpUnstarredQuery').mockImplementationOnce((queries) => {
|
||||
return { queriesToKeep: queries, limitExceeded: true };
|
||||
});
|
||||
|
||||
|
||||
@@ -4,6 +4,8 @@ const cloudwatchPlugin = async () =>
|
||||
await import(/* webpackChunkName: "cloudwatchPlugin" */ 'app/plugins/datasource/cloudwatch/module');
|
||||
const dashboardDSPlugin = async () =>
|
||||
await import(/* webpackChunkName "dashboardDSPlugin" */ 'app/plugins/datasource/dashboard/module');
|
||||
const elasticsearchPlugin = async () =>
|
||||
await import(/* webpackChunkName: "elasticsearchPlugin" */ 'app/plugins/datasource/elasticsearch/module');
|
||||
const grafanaPlugin = async () =>
|
||||
await import(/* webpackChunkName: "grafanaPlugin" */ 'app/plugins/datasource/grafana/module');
|
||||
const influxdbPlugin = async () =>
|
||||
@@ -73,6 +75,7 @@ const builtInPlugins: Record<string, System.Module | (() => Promise<System.Modul
|
||||
// datasources
|
||||
'core:plugin/cloudwatch': cloudwatchPlugin,
|
||||
'core:plugin/dashboard': dashboardDSPlugin,
|
||||
'core:plugin/elasticsearch': elasticsearchPlugin,
|
||||
'core:plugin/grafana': grafanaPlugin,
|
||||
'core:plugin/influxdb': influxdbPlugin,
|
||||
'core:plugin/mixed': mixedPlugin,
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
import { isEmpty } from 'lodash';
|
||||
|
||||
import {
|
||||
API_GROUP as DASHBOARD_API_GROUP,
|
||||
BASE_URL as v0alphaBaseURL,
|
||||
} from '@grafana/api-clients/rtkq/dashboard/v0alpha1';
|
||||
import { BASE_URL as v0alphaBaseURL } from '@grafana/api-clients/rtkq/dashboard/v0alpha1';
|
||||
import { generatedAPI as legacyUserAPI } from '@grafana/api-clients/rtkq/legacy/user';
|
||||
import { DataFrame, DataFrameView, getDisplayProcessor, SelectableValue, toDataFrame } from '@grafana/data';
|
||||
import { t } from '@grafana/i18n';
|
||||
@@ -88,11 +85,10 @@ export class UnifiedSearcher implements GrafanaSearcher {
|
||||
fieldSelector: `metadata.name=${name}`,
|
||||
})
|
||||
);
|
||||
const items = result.data.items;
|
||||
starsIds = items?.length
|
||||
? items[0].spec.resource.find(({ group, kind }) => group === DASHBOARD_API_GROUP && kind === 'Dashboard')
|
||||
?.names || []
|
||||
: [];
|
||||
starsIds =
|
||||
result.data.items?.[0].spec.resource.find(
|
||||
(info) => info.group === 'dashboard.grafana.app' && info.kind === 'Dashboard'
|
||||
)?.names || [];
|
||||
} else {
|
||||
starsIds = await dispatch(legacyUserAPI.endpoints.getStars.initiate()).unwrap();
|
||||
}
|
||||
@@ -335,7 +331,7 @@ export class UnifiedSearcher implements GrafanaSearcher {
|
||||
}
|
||||
|
||||
if (query.deleted) {
|
||||
uri = `${getAPIBaseURL(DASHBOARD_API_GROUP, 'v1beta1')}/dashboards/?labelSelector=grafana.app/get-trash=true`;
|
||||
uri = `${getAPIBaseURL('dashboard.grafana.app', 'v1beta1')}/dashboards/?labelSelector=grafana.app/get-trash=true`;
|
||||
}
|
||||
return uri;
|
||||
}
|
||||
|
||||
@@ -108,7 +108,7 @@ export interface GrafanaManagedContactPoint {
|
||||
/** If parsed from k8s API, we'll have an ID property */
|
||||
id?: string;
|
||||
metadata?: IoK8SApimachineryPkgApisMetaV1ObjectMeta;
|
||||
provenance?: string;
|
||||
provisioned?: boolean;
|
||||
grafana_managed_receiver_configs?: GrafanaManagedReceiverConfig[];
|
||||
}
|
||||
|
||||
@@ -148,7 +148,7 @@ export type Route = {
|
||||
provenance?: string;
|
||||
/** this is used to add additional metadata to the routes without interfering with original route definition (symbols aren't iterable) */
|
||||
[ROUTES_META_SYMBOL]?: {
|
||||
provenance?: string;
|
||||
provisioned?: boolean;
|
||||
resourceVersion?: string;
|
||||
name?: string;
|
||||
};
|
||||
|
||||
-247
@@ -1,247 +0,0 @@
|
||||
import { render, screen, waitFor, cleanup } from '@testing-library/react';
|
||||
import userEvent from '@testing-library/user-event';
|
||||
|
||||
import { CoreApp, LoadingState, PanelData } from '@grafana/data';
|
||||
import { config, reportInteraction } from '@grafana/runtime';
|
||||
|
||||
import { AzureQueryType, LogsEditorMode } from '../../dataquery.gen';
|
||||
import { selectors } from '../../e2e/selectors';
|
||||
import createMockQuery from '../../mocks/query';
|
||||
import { AzureMonitorQuery } from '../../types/query';
|
||||
import { selectOptionInTest } from '../../utils/testUtils';
|
||||
|
||||
import { QueryHeader } from './QueryHeader';
|
||||
|
||||
jest.mock('@grafana/runtime', () => ({
|
||||
...jest.requireActual('@grafana/runtime'),
|
||||
reportInteraction: jest.fn(),
|
||||
}));
|
||||
|
||||
describe('Azure Monitor QueryHeader', () => {
|
||||
const setAzureLogsCheatSheetModalOpen = jest.fn();
|
||||
const onRunQuery = jest.fn();
|
||||
|
||||
const renderComponent = (query: AzureMonitorQuery, props?: Partial<React.ComponentProps<typeof QueryHeader>>) => {
|
||||
return render(
|
||||
<QueryHeader
|
||||
query={query}
|
||||
onQueryChange={props?.onQueryChange ?? jest.fn()}
|
||||
setAzureLogsCheatSheetModalOpen={setAzureLogsCheatSheetModalOpen}
|
||||
data={props?.data}
|
||||
onRunQuery={onRunQuery}
|
||||
app={props?.app}
|
||||
/>
|
||||
);
|
||||
};
|
||||
|
||||
beforeEach(() => {
|
||||
config.featureToggles = {};
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
cleanup();
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('renders the service selector', async () => {
|
||||
const query = createMockQuery();
|
||||
|
||||
renderComponent(query);
|
||||
|
||||
expect(screen.getByTestId(selectors.components.queryEditor.header.select)).toBeInTheDocument();
|
||||
expect(screen.getByLabelText(/Service/i)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('changes query type when a new service is selected', async () => {
|
||||
const query = createMockQuery();
|
||||
const onQueryChange = jest.fn();
|
||||
|
||||
renderComponent(query, { onQueryChange });
|
||||
|
||||
const serviceSelect = await screen.findByLabelText(/Service/i);
|
||||
|
||||
await selectOptionInTest(serviceSelect, 'Logs');
|
||||
|
||||
await waitFor(() => {
|
||||
expect(onQueryChange).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
const lastCall = onQueryChange.mock.calls[onQueryChange.mock.calls.length - 1][0];
|
||||
|
||||
expect(lastCall).toEqual(
|
||||
expect.objectContaining({
|
||||
queryType: AzureQueryType.LogAnalytics,
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('initializes logs editor mode to Raw when a raw query exists and builder is enabled', async () => {
|
||||
config.featureToggles.azureMonitorLogsBuilderEditor = true;
|
||||
|
||||
const query: AzureMonitorQuery = {
|
||||
...createMockQuery(),
|
||||
queryType: AzureQueryType.LogAnalytics,
|
||||
azureLogAnalytics: {
|
||||
query: 'SecurityEvent | take 10',
|
||||
},
|
||||
};
|
||||
|
||||
const onQueryChange = jest.fn();
|
||||
|
||||
renderComponent(query, { onQueryChange });
|
||||
|
||||
await waitFor(() =>
|
||||
expect(onQueryChange).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
azureLogAnalytics: expect.objectContaining({
|
||||
mode: LogsEditorMode.Raw,
|
||||
}),
|
||||
})
|
||||
)
|
||||
);
|
||||
});
|
||||
|
||||
it('renders the logs editor mode radio buttons when builder is enabled', async () => {
|
||||
config.featureToggles.azureMonitorLogsBuilderEditor = true;
|
||||
|
||||
const query: AzureMonitorQuery = {
|
||||
...createMockQuery(),
|
||||
queryType: AzureQueryType.LogAnalytics,
|
||||
azureLogAnalytics: {
|
||||
mode: LogsEditorMode.Builder,
|
||||
},
|
||||
};
|
||||
|
||||
renderComponent(query);
|
||||
|
||||
expect(screen.getByRole('radiogroup')).toBeInTheDocument();
|
||||
|
||||
expect(screen.getByLabelText('Builder')).toBeInTheDocument();
|
||||
expect(screen.getByLabelText('KQL')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows the kick start button when in Logs + Raw mode', async () => {
|
||||
const query: AzureMonitorQuery = {
|
||||
...createMockQuery(),
|
||||
queryType: AzureQueryType.LogAnalytics,
|
||||
azureLogAnalytics: {
|
||||
mode: LogsEditorMode.Raw,
|
||||
},
|
||||
};
|
||||
|
||||
renderComponent(query);
|
||||
|
||||
expect(screen.getByRole('button', { name: /Kick start your query/i })).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('opens the logs cheat sheet modal and reports interaction when kick start button is clicked', async () => {
|
||||
const user = userEvent.setup();
|
||||
|
||||
const query: AzureMonitorQuery = {
|
||||
...createMockQuery(),
|
||||
queryType: AzureQueryType.LogAnalytics,
|
||||
azureLogAnalytics: {
|
||||
mode: LogsEditorMode.Raw,
|
||||
},
|
||||
};
|
||||
|
||||
renderComponent(query);
|
||||
|
||||
await user.click(screen.getByRole('button', { name: /Kick start your query/i }));
|
||||
|
||||
expect(setAzureLogsCheatSheetModalOpen).toHaveBeenCalled();
|
||||
expect(reportInteraction).toHaveBeenCalledWith(
|
||||
'grafana_azure_logs_query_patterns_opened',
|
||||
expect.objectContaining({
|
||||
version: 'v2',
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it('shows confirmation modal when switching from Raw to Builder with existing KQL', async () => {
|
||||
const user = userEvent.setup();
|
||||
config.featureToggles.azureMonitorLogsBuilderEditor = true;
|
||||
|
||||
const query: AzureMonitorQuery = {
|
||||
...createMockQuery(),
|
||||
queryType: AzureQueryType.LogAnalytics,
|
||||
azureLogAnalytics: {
|
||||
mode: LogsEditorMode.Raw,
|
||||
query: 'SecurityEvent | take 10',
|
||||
},
|
||||
};
|
||||
|
||||
renderComponent(query);
|
||||
|
||||
await user.click(screen.getByLabelText('Builder'));
|
||||
|
||||
expect(screen.getByText(/Switch editor mode\?/i)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('applies mode change when confirming the switch modal', async () => {
|
||||
const user = userEvent.setup();
|
||||
config.featureToggles.azureMonitorLogsBuilderEditor = true;
|
||||
|
||||
const query: AzureMonitorQuery = {
|
||||
...createMockQuery(),
|
||||
queryType: AzureQueryType.LogAnalytics,
|
||||
azureLogAnalytics: {
|
||||
mode: LogsEditorMode.Raw,
|
||||
query: 'SecurityEvent | take 10',
|
||||
},
|
||||
};
|
||||
|
||||
const onQueryChange = jest.fn();
|
||||
|
||||
renderComponent(query, { onQueryChange });
|
||||
|
||||
await user.click(screen.getByLabelText('Builder'));
|
||||
await user.click(screen.getByText(/Switch to Builder/i));
|
||||
|
||||
await waitFor(() =>
|
||||
expect(onQueryChange).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
azureLogAnalytics: expect.objectContaining({
|
||||
mode: LogsEditorMode.Builder,
|
||||
query: undefined,
|
||||
}),
|
||||
})
|
||||
)
|
||||
);
|
||||
});
|
||||
|
||||
it('renders the Run query button in Builder mode when not in Explore', async () => {
|
||||
config.featureToggles.azureMonitorLogsBuilderEditor = true;
|
||||
|
||||
const query: AzureMonitorQuery = {
|
||||
...createMockQuery(),
|
||||
queryType: AzureQueryType.LogAnalytics,
|
||||
azureLogAnalytics: {
|
||||
mode: LogsEditorMode.Builder,
|
||||
},
|
||||
};
|
||||
|
||||
renderComponent(query, { app: CoreApp.Dashboard });
|
||||
|
||||
expect(screen.getByTestId(selectors.components.queryEditor.logsQueryEditor.runQuery.button)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('disables the Run query button spinner while loading', async () => {
|
||||
config.featureToggles.azureMonitorLogsBuilderEditor = true;
|
||||
|
||||
const query: AzureMonitorQuery = {
|
||||
...createMockQuery(),
|
||||
queryType: AzureQueryType.LogAnalytics,
|
||||
azureLogAnalytics: {
|
||||
mode: LogsEditorMode.Builder,
|
||||
},
|
||||
};
|
||||
|
||||
renderComponent(query, {
|
||||
app: CoreApp.Dashboard,
|
||||
data: { state: LoadingState.Loading } as PanelData,
|
||||
});
|
||||
|
||||
expect(screen.getByTestId(selectors.components.queryEditor.logsQueryEditor.runQuery.button)).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
@@ -84,9 +84,12 @@ export const QueryHeader = ({
|
||||
}
|
||||
|
||||
const goingToBuilder = newMode === LogsEditorMode.Builder;
|
||||
const hasRawKql = !!query.azureLogAnalytics?.query;
|
||||
const goingToRaw = newMode === LogsEditorMode.Raw;
|
||||
|
||||
if (goingToBuilder && hasRawKql) {
|
||||
const hasRawKql = !!query.azureLogAnalytics?.query;
|
||||
const hasBuilderQuery = !!query.azureLogAnalytics?.builderQuery;
|
||||
|
||||
if ((goingToBuilder && hasRawKql) || (goingToRaw && hasBuilderQuery)) {
|
||||
setPendingModeChange(newMode);
|
||||
setShowModeSwitchWarning(true);
|
||||
} else {
|
||||
@@ -100,7 +103,7 @@ export const QueryHeader = ({
|
||||
azureLogAnalytics: {
|
||||
...query.azureLogAnalytics,
|
||||
mode,
|
||||
query: mode === LogsEditorMode.Builder ? undefined : query.azureLogAnalytics?.query,
|
||||
query: '',
|
||||
builderQuery: mode === LogsEditorMode.Raw ? undefined : query.azureLogAnalytics?.builderQuery,
|
||||
dashboardTime: mode === LogsEditorMode.Builder ? true : undefined,
|
||||
},
|
||||
@@ -120,7 +123,10 @@ export const QueryHeader = ({
|
||||
'components.query-header.body-switching-to-builder',
|
||||
'Switching to Builder will discard your current KQL query and clear the KQL editor. Are you sure?'
|
||||
)
|
||||
: null
|
||||
: t(
|
||||
'components.query-header.body-switching-to-kql',
|
||||
'Switching to KQL will discard your current builder settings. Are you sure?'
|
||||
)
|
||||
}
|
||||
confirmText={t('components.query-header.confirmText-switch-to', 'Switch to {{newMode}}', {
|
||||
newMode: pendingModeChange === LogsEditorMode.Builder ? 'Builder' : 'KQL',
|
||||
|
||||
+1
@@ -204,6 +204,7 @@
|
||||
"query-header": {
|
||||
"aria-label-kick-start": "Azure logs kick start your query button",
|
||||
"body-switching-to-builder": "Switching to Builder will discard your current KQL query and clear the KQL editor. Are you sure?",
|
||||
"body-switching-to-kql": "Switching to KQL will discard your current builder settings. Are you sure?",
|
||||
"button-kick-start-your-query": "Kick start your query",
|
||||
"button-run-query": "Run query",
|
||||
"confirmText-switch-to": "Switch to {{newMode}}",
|
||||
|
||||
+6
-25
@@ -1,10 +1,10 @@
|
||||
import { memo } from 'react';
|
||||
|
||||
import { DataSourcePluginOptionsEditorProps, updateDatasourcePluginJsonDataOption } from '@grafana/data';
|
||||
import { DataSourcePluginOptionsEditorProps } from '@grafana/data';
|
||||
import { ConnectionConfig } from '@grafana/google-sdk';
|
||||
import { ConfigSection, DataSourceDescription } from '@grafana/plugin-ui';
|
||||
import { config, reportInteraction } from '@grafana/runtime';
|
||||
import { Divider, Field, Input, SecureSocksProxySettings, Stack } from '@grafana/ui';
|
||||
import { reportInteraction, config } from '@grafana/runtime';
|
||||
import { Divider, SecureSocksProxySettings } from '@grafana/ui';
|
||||
|
||||
import { CloudMonitoringOptions, CloudMonitoringSecureJsonData } from '../../types/types';
|
||||
|
||||
@@ -36,33 +36,14 @@ export const ConfigEditor = memo(({ options, onOptionsChange }: Props) => {
|
||||
<Divider />
|
||||
<ConfigSection
|
||||
title="Additional settings"
|
||||
description="Additional settings are optional settings that can be configured for more control over your data source. This includes Secure Socks Proxy and Universe Domain."
|
||||
description="Additional settings are optional settings that can be configured for more control over your data source. This includes Secure Socks Proxy."
|
||||
isCollapsible
|
||||
isInitiallyOpen={
|
||||
options.jsonData.enableSecureSocksProxy !== undefined || options.jsonData.universeDomain !== undefined
|
||||
}
|
||||
isInitiallyOpen={options.jsonData.enableSecureSocksProxy !== undefined}
|
||||
>
|
||||
<Stack direction={'column'}>
|
||||
<Field noMargin label="Universe Domain">
|
||||
<Input
|
||||
width={50}
|
||||
value={options.jsonData.universeDomain}
|
||||
onChange={(event) =>
|
||||
updateDatasourcePluginJsonDataOption(
|
||||
{ options, onOptionsChange },
|
||||
'universeDomain',
|
||||
event.currentTarget.value
|
||||
)
|
||||
}
|
||||
placeholder="googleapis.com"
|
||||
></Input>
|
||||
</Field>
|
||||
<SecureSocksProxySettings options={options} onOptionsChange={onOptionsChange} />
|
||||
</Stack>
|
||||
<SecureSocksProxySettings options={options} onOptionsChange={onOptionsChange} />
|
||||
</ConfigSection>
|
||||
</>
|
||||
)}
|
||||
<Divider />
|
||||
</>
|
||||
);
|
||||
});
|
||||
|
||||
@@ -38,7 +38,6 @@ export interface Aggregation {
|
||||
export interface CloudMonitoringOptions extends DataSourceOptions {
|
||||
gceDefaultProject?: string;
|
||||
enableSecureSocksProxy?: boolean;
|
||||
universeDomain?: string;
|
||||
}
|
||||
|
||||
export interface CloudMonitoringSecureJsonData extends DataSourceSecureJsonData {}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user