Compare commits
22 Commits
colin-stua
...
sriram/SQL
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2f0764d1a0 | ||
|
|
05ad955c7b | ||
|
|
2b82490e88 | ||
|
|
c5bff2df50 | ||
|
|
c621dbc325 | ||
|
|
ecd3f0b490 | ||
|
|
2efcc88e62 | ||
|
|
6fea614106 | ||
|
|
c0c05a65fd | ||
|
|
41ed2aeb23 | ||
|
|
9e9233051e | ||
|
|
a5faedbe68 | ||
|
|
6fee200327 | ||
|
|
0b9046be15 | ||
|
|
20eeff3e7b | ||
|
|
ec55871b9b | ||
|
|
0bfcc55411 | ||
|
|
016301c304 | ||
|
|
5e05289bc8 | ||
|
|
be734e970e | ||
|
|
05681efee3 | ||
|
|
844a7332b9 |
@@ -129,7 +129,7 @@ DashboardLink: {
|
||||
placement?: DashboardLinkPlacement
|
||||
}
|
||||
|
||||
// Dashboard Link placement. Defines where the link should be displayed.
|
||||
// Dashboard Link placement. Defines where the link should be displayed.
|
||||
// - "inControlsMenu" renders the link in bottom part of the dashboard controls dropdown menu
|
||||
DashboardLinkPlacement: "inControlsMenu"
|
||||
|
||||
@@ -932,6 +932,7 @@ CustomVariableSpec: {
|
||||
skipUrlSync: bool | *false
|
||||
description?: string
|
||||
allowCustomValue: bool | *true
|
||||
valuesFormat?: "csv" | "json"
|
||||
}
|
||||
|
||||
// Custom variable kind
|
||||
|
||||
@@ -935,6 +935,7 @@ CustomVariableSpec: {
|
||||
skipUrlSync: bool | *false
|
||||
description?: string
|
||||
allowCustomValue: bool | *true
|
||||
valuesFormat?: "csv" | "json"
|
||||
}
|
||||
|
||||
// Custom variable kind
|
||||
|
||||
@@ -222,8 +222,10 @@ lineage: schemas: [{
|
||||
// Optional field, if you want to extract part of a series name or metric node segment.
|
||||
// Named capture groups can be used to separate the display text and value.
|
||||
regex?: string
|
||||
// Determine whether regex applies to variable value or display text
|
||||
regexApplyTo?: #VariableRegexApplyTo
|
||||
// Optional, indicates whether a custom type variable uses CSV or JSON to define its values
|
||||
valuesFormat?: "csv" | "json" | *"csv"
|
||||
// Determine whether regex applies to variable value or display text
|
||||
regexApplyTo?: #VariableRegexApplyTo
|
||||
// Additional static options for query variable
|
||||
staticOptions?: [...#VariableOption]
|
||||
// Ordering of static options in relation to options returned from data source for query variable
|
||||
|
||||
@@ -222,8 +222,10 @@ lineage: schemas: [{
|
||||
// Optional field, if you want to extract part of a series name or metric node segment.
|
||||
// Named capture groups can be used to separate the display text and value.
|
||||
regex?: string
|
||||
// Determine whether regex applies to variable value or display text
|
||||
regexApplyTo?: #VariableRegexApplyTo
|
||||
// Optional, indicates whether a custom type variable uses CSV or JSON to define its values
|
||||
valuesFormat?: "csv" | "json" | *"csv"
|
||||
// Determine whether regex applies to variable value or display text
|
||||
regexApplyTo?: #VariableRegexApplyTo
|
||||
// Additional static options for query variable
|
||||
staticOptions?: [...#VariableOption]
|
||||
// Ordering of static options in relation to options returned from data source for query variable
|
||||
|
||||
@@ -133,7 +133,7 @@ DashboardLink: {
|
||||
placement?: DashboardLinkPlacement
|
||||
}
|
||||
|
||||
// Dashboard Link placement. Defines where the link should be displayed.
|
||||
// Dashboard Link placement. Defines where the link should be displayed.
|
||||
// - "inControlsMenu" renders the link in bottom part of the dashboard controls dropdown menu
|
||||
DashboardLinkPlacement: "inControlsMenu"
|
||||
|
||||
@@ -936,6 +936,7 @@ CustomVariableSpec: {
|
||||
skipUrlSync: bool | *false
|
||||
description?: string
|
||||
allowCustomValue: bool | *true
|
||||
valuesFormat?: "csv" | "json"
|
||||
}
|
||||
|
||||
// Custom variable kind
|
||||
|
||||
@@ -1703,18 +1703,19 @@ func NewDashboardCustomVariableKind() *DashboardCustomVariableKind {
|
||||
// Custom variable specification
|
||||
// +k8s:openapi-gen=true
|
||||
type DashboardCustomVariableSpec struct {
|
||||
Name string `json:"name"`
|
||||
Query string `json:"query"`
|
||||
Current DashboardVariableOption `json:"current"`
|
||||
Options []DashboardVariableOption `json:"options"`
|
||||
Multi bool `json:"multi"`
|
||||
IncludeAll bool `json:"includeAll"`
|
||||
AllValue *string `json:"allValue,omitempty"`
|
||||
Label *string `json:"label,omitempty"`
|
||||
Hide DashboardVariableHide `json:"hide"`
|
||||
SkipUrlSync bool `json:"skipUrlSync"`
|
||||
Description *string `json:"description,omitempty"`
|
||||
AllowCustomValue bool `json:"allowCustomValue"`
|
||||
Name string `json:"name"`
|
||||
Query string `json:"query"`
|
||||
Current DashboardVariableOption `json:"current"`
|
||||
Options []DashboardVariableOption `json:"options"`
|
||||
Multi bool `json:"multi"`
|
||||
IncludeAll bool `json:"includeAll"`
|
||||
AllValue *string `json:"allValue,omitempty"`
|
||||
Label *string `json:"label,omitempty"`
|
||||
Hide DashboardVariableHide `json:"hide"`
|
||||
SkipUrlSync bool `json:"skipUrlSync"`
|
||||
Description *string `json:"description,omitempty"`
|
||||
AllowCustomValue bool `json:"allowCustomValue"`
|
||||
ValuesFormat *DashboardCustomVariableSpecValuesFormat `json:"valuesFormat,omitempty"`
|
||||
}
|
||||
|
||||
// NewDashboardCustomVariableSpec creates a new DashboardCustomVariableSpec object.
|
||||
@@ -2098,6 +2099,14 @@ const (
|
||||
DashboardQueryVariableSpecStaticOptionsOrderSorted DashboardQueryVariableSpecStaticOptionsOrder = "sorted"
|
||||
)
|
||||
|
||||
// +k8s:openapi-gen=true
|
||||
type DashboardCustomVariableSpecValuesFormat string
|
||||
|
||||
const (
|
||||
DashboardCustomVariableSpecValuesFormatCsv DashboardCustomVariableSpecValuesFormat = "csv"
|
||||
DashboardCustomVariableSpecValuesFormatJson DashboardCustomVariableSpecValuesFormat = "json"
|
||||
)
|
||||
|
||||
// +k8s:openapi-gen=true
|
||||
type DashboardPanelKindOrLibraryPanelKind struct {
|
||||
PanelKind *DashboardPanelKind `json:"PanelKind,omitempty"`
|
||||
|
||||
@@ -1548,6 +1548,12 @@ func schema_pkg_apis_dashboard_v2alpha1_DashboardCustomVariableSpec(ref common.R
|
||||
Format: "",
|
||||
},
|
||||
},
|
||||
"valuesFormat": {
|
||||
SchemaProps: spec.SchemaProps{
|
||||
Type: []string{"string"},
|
||||
Format: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
Required: []string{"name", "query", "current", "options", "multi", "includeAll", "hide", "skipUrlSync", "allowCustomValue"},
|
||||
},
|
||||
|
||||
@@ -939,6 +939,7 @@ CustomVariableSpec: {
|
||||
skipUrlSync: bool | *false
|
||||
description?: string
|
||||
allowCustomValue: bool | *true
|
||||
valuesFormat?: "csv" | "json"
|
||||
}
|
||||
|
||||
// Custom variable kind
|
||||
|
||||
@@ -1707,18 +1707,19 @@ func NewDashboardCustomVariableKind() *DashboardCustomVariableKind {
|
||||
// Custom variable specification
|
||||
// +k8s:openapi-gen=true
|
||||
type DashboardCustomVariableSpec struct {
|
||||
Name string `json:"name"`
|
||||
Query string `json:"query"`
|
||||
Current DashboardVariableOption `json:"current"`
|
||||
Options []DashboardVariableOption `json:"options"`
|
||||
Multi bool `json:"multi"`
|
||||
IncludeAll bool `json:"includeAll"`
|
||||
AllValue *string `json:"allValue,omitempty"`
|
||||
Label *string `json:"label,omitempty"`
|
||||
Hide DashboardVariableHide `json:"hide"`
|
||||
SkipUrlSync bool `json:"skipUrlSync"`
|
||||
Description *string `json:"description,omitempty"`
|
||||
AllowCustomValue bool `json:"allowCustomValue"`
|
||||
Name string `json:"name"`
|
||||
Query string `json:"query"`
|
||||
Current DashboardVariableOption `json:"current"`
|
||||
Options []DashboardVariableOption `json:"options"`
|
||||
Multi bool `json:"multi"`
|
||||
IncludeAll bool `json:"includeAll"`
|
||||
AllValue *string `json:"allValue,omitempty"`
|
||||
Label *string `json:"label,omitempty"`
|
||||
Hide DashboardVariableHide `json:"hide"`
|
||||
SkipUrlSync bool `json:"skipUrlSync"`
|
||||
Description *string `json:"description,omitempty"`
|
||||
AllowCustomValue bool `json:"allowCustomValue"`
|
||||
ValuesFormat *DashboardCustomVariableSpecValuesFormat `json:"valuesFormat,omitempty"`
|
||||
}
|
||||
|
||||
// NewDashboardCustomVariableSpec creates a new DashboardCustomVariableSpec object.
|
||||
@@ -2133,6 +2134,14 @@ const (
|
||||
DashboardQueryVariableSpecStaticOptionsOrderSorted DashboardQueryVariableSpecStaticOptionsOrder = "sorted"
|
||||
)
|
||||
|
||||
// +k8s:openapi-gen=true
|
||||
type DashboardCustomVariableSpecValuesFormat string
|
||||
|
||||
const (
|
||||
DashboardCustomVariableSpecValuesFormatCsv DashboardCustomVariableSpecValuesFormat = "csv"
|
||||
DashboardCustomVariableSpecValuesFormatJson DashboardCustomVariableSpecValuesFormat = "json"
|
||||
)
|
||||
|
||||
// +k8s:openapi-gen=true
|
||||
type DashboardPanelKindOrLibraryPanelKind struct {
|
||||
PanelKind *DashboardPanelKind `json:"PanelKind,omitempty"`
|
||||
|
||||
@@ -1560,6 +1560,12 @@ func schema_pkg_apis_dashboard_v2beta1_DashboardCustomVariableSpec(ref common.Re
|
||||
Format: "",
|
||||
},
|
||||
},
|
||||
"valuesFormat": {
|
||||
SchemaProps: spec.SchemaProps{
|
||||
Type: []string{"string"},
|
||||
Format: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
Required: []string{"name", "query", "current", "options", "multi", "includeAll", "hide", "skipUrlSync", "allowCustomValue"},
|
||||
},
|
||||
|
||||
4
apps/dashboard/pkg/apis/dashboard_manifest.go
generated
4
apps/dashboard/pkg/apis/dashboard_manifest.go
generated
File diff suppressed because one or more lines are too long
@@ -1336,6 +1336,17 @@ func buildCustomVariable(varMap map[string]interface{}, commonProps CommonVariab
|
||||
customVar.Spec.AllValue = &allValue
|
||||
}
|
||||
|
||||
if valuesFormat := schemaversion.GetStringValue(varMap, "valuesFormat"); valuesFormat != "" {
|
||||
switch valuesFormat {
|
||||
case string(dashv2alpha1.DashboardCustomVariableSpecValuesFormatJson):
|
||||
format := dashv2alpha1.DashboardCustomVariableSpecValuesFormatJson
|
||||
customVar.Spec.ValuesFormat = &format
|
||||
case string(dashv2alpha1.DashboardCustomVariableSpecValuesFormatCsv):
|
||||
format := dashv2alpha1.DashboardCustomVariableSpecValuesFormatCsv
|
||||
customVar.Spec.ValuesFormat = &format
|
||||
}
|
||||
}
|
||||
|
||||
return dashv2alpha1.DashboardVariableKind{
|
||||
CustomVariableKind: customVar,
|
||||
}, nil
|
||||
|
||||
@@ -685,6 +685,7 @@ func convertVariable_V2alpha1_to_V2beta1(in *dashv2alpha1.DashboardVariableKind,
|
||||
SkipUrlSync: in.CustomVariableKind.Spec.SkipUrlSync,
|
||||
Description: in.CustomVariableKind.Spec.Description,
|
||||
AllowCustomValue: in.CustomVariableKind.Spec.AllowCustomValue,
|
||||
ValuesFormat: convertCustomValuesFormat_V2alpha1_to_V2beta1(in.CustomVariableKind.Spec.ValuesFormat),
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -758,6 +759,23 @@ func convertVariable_V2alpha1_to_V2beta1(in *dashv2alpha1.DashboardVariableKind,
|
||||
return nil
|
||||
}
|
||||
|
||||
func convertCustomValuesFormat_V2alpha1_to_V2beta1(in *dashv2alpha1.DashboardCustomVariableSpecValuesFormat) *dashv2beta1.DashboardCustomVariableSpecValuesFormat {
|
||||
if in == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
switch *in {
|
||||
case dashv2alpha1.DashboardCustomVariableSpecValuesFormatJson:
|
||||
v := dashv2beta1.DashboardCustomVariableSpecValuesFormatJson
|
||||
return &v
|
||||
case dashv2alpha1.DashboardCustomVariableSpecValuesFormatCsv:
|
||||
v := dashv2beta1.DashboardCustomVariableSpecValuesFormatCsv
|
||||
return &v
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
func convertQueryVariableSpec_V2alpha1_to_V2beta1(in *dashv2alpha1.DashboardQueryVariableSpec, out *dashv2beta1.DashboardQueryVariableSpec, scope conversion.Scope) error {
|
||||
out.Name = in.Name
|
||||
out.Current = convertVariableOption_V2alpha1_to_V2beta1(in.Current)
|
||||
|
||||
@@ -82,8 +82,8 @@ cloud.google.com/go/storage v1.55.0 h1:NESjdAToN9u1tmhVqhXCaCwYBuvEhZLLv0gBr+2zn
|
||||
cloud.google.com/go/storage v1.55.0/go.mod h1:ztSmTTwzsdXe5syLVS0YsbFxXuvEmEyZj7v7zChEmuY=
|
||||
cloud.google.com/go/trace v1.11.6 h1:2O2zjPzqPYAHrn3OKl029qlqG6W8ZdYaOWRyr8NgMT4=
|
||||
cloud.google.com/go/trace v1.11.6/go.mod h1:GA855OeDEBiBMzcckLPE2kDunIpC72N+Pq8WFieFjnI=
|
||||
connectrpc.com/connect v1.18.1 h1:PAg7CjSAGvscaf6YZKUefjoih5Z/qYkyaTrBW8xvYPw=
|
||||
connectrpc.com/connect v1.18.1/go.mod h1:0292hj1rnx8oFrStN7cB4jjVBeqs+Yx5yDIC2prWDO8=
|
||||
connectrpc.com/connect v1.19.1 h1:R5M57z05+90EfEvCY1b7hBxDVOUl45PrtXtAV2fOC14=
|
||||
connectrpc.com/connect v1.19.1/go.mod h1:tN20fjdGlewnSFeZxLKb0xwIZ6ozc3OQs2hTXy4du9w=
|
||||
cuelabs.dev/go/oci/ociregistry v0.0.0-20251212221603-3adeb8663819 h1:Zh+Ur3OsoWpvALHPLT45nOekHkgOt+IOfutBbPqM17I=
|
||||
cuelabs.dev/go/oci/ociregistry v0.0.0-20251212221603-3adeb8663819/go.mod h1:WjmQxb+W6nVNCgj8nXrF24lIz95AHwnSl36tpjDZSU8=
|
||||
cuelang.org/go v0.11.1 h1:pV+49MX1mmvDm8Qh3Za3M786cty8VKPWzQ1Ho4gZRP0=
|
||||
@@ -749,6 +749,8 @@ github.com/google/cel-go v0.26.1 h1:iPbVVEdkhTX++hpe3lzSk7D3G3QSYqLGoHOcEio+UXQ=
|
||||
github.com/google/cel-go v0.26.1/go.mod h1:A9O8OU9rdvrK5MQyrqfIxo1a0u4g3sF8KB6PUIaryMM=
|
||||
github.com/google/flatbuffers v25.2.10+incompatible h1:F3vclr7C3HpB1k9mxCGRMXq6FdUalZ6H/pNX4FP1v0Q=
|
||||
github.com/google/flatbuffers v25.2.10+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
|
||||
github.com/google/gnostic v0.7.1 h1:t5Kc7j/8kYr8t2u11rykRrPPovlEMG4+xdc/SpekATs=
|
||||
github.com/google/gnostic v0.7.1/go.mod h1:KSw6sxnxEBFM8jLPfJd46xZP+yQcfE8XkiqfZx5zR28=
|
||||
github.com/google/gnostic-models v0.7.1 h1:SisTfuFKJSKM5CPZkffwi6coztzzeYUhc3v4yxLWH8c=
|
||||
github.com/google/gnostic-models v0.7.1/go.mod h1:whL5G0m6dmc5cPxKc5bdKdEN3UjI7OUGxBlw57miDrQ=
|
||||
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
|
||||
@@ -887,8 +889,8 @@ github.com/grafana/prometheus-alertmanager v0.25.1-0.20250911094103-5456b6e45604
|
||||
github.com/grafana/prometheus-alertmanager v0.25.1-0.20250911094103-5456b6e45604/go.mod h1:O/QP1BCm0HHIzbKvgMzqb5sSyH88rzkFk84F4TfJjBU=
|
||||
github.com/grafana/pyroscope-go/godeltaprof v0.1.9 h1:c1Us8i6eSmkW+Ez05d3co8kasnuOY813tbMN8i/a3Og=
|
||||
github.com/grafana/pyroscope-go/godeltaprof v0.1.9/go.mod h1:2+l7K7twW49Ct4wFluZD3tZ6e0SjanjcUUBPVD/UuGU=
|
||||
github.com/grafana/pyroscope/api v1.2.1-0.20250415190842-3ff7247547ae h1:35W3Wjp9KWnSoV/DuymmyIj5aHE0CYlDQ5m2KeXUPAc=
|
||||
github.com/grafana/pyroscope/api v1.2.1-0.20250415190842-3ff7247547ae/go.mod h1:6CJ1uXmLZ13ufpO9xE4pST+DyaBt0uszzrV0YnoaVLQ=
|
||||
github.com/grafana/pyroscope/api v1.2.1-0.20251118081820-ace37f973a0f h1:fTlIj5n4x5dU63XHItug7GLjtnaeJdPqBlqg4zlABq0=
|
||||
github.com/grafana/pyroscope/api v1.2.1-0.20251118081820-ace37f973a0f/go.mod h1:VBNcIhunCZsJ3/mcYx+j7uFf0P/108eiWa+8+Z9ll3o=
|
||||
github.com/grafana/regexp v0.0.0-20240518133315-a468a5bfb3bc h1:GN2Lv3MGO7AS6PrRoT6yV5+wkrOpcszoIsO4+4ds248=
|
||||
github.com/grafana/regexp v0.0.0-20240518133315-a468a5bfb3bc/go.mod h1:+JKpmjMGhpgPL+rXZ5nsZieVzvarn86asRlBg4uNGnk=
|
||||
github.com/grafana/sqlds/v5 v5.0.3 h1:+yUMUxfa0WANQsmS9xtTFSRX1Q55Iv1B9EjlrW4VlBU=
|
||||
|
||||
@@ -217,6 +217,13 @@ metaV0Alpha1: {
|
||||
title: string
|
||||
description?: string
|
||||
}]
|
||||
// +listType=atomic
|
||||
addedFunctions?: [...{
|
||||
// +listType=set
|
||||
targets: [...string]
|
||||
title: string
|
||||
description?: string
|
||||
}]
|
||||
// +listType=set
|
||||
// +listMapKey=id
|
||||
exposedComponents?: [...{
|
||||
|
||||
@@ -193,6 +193,8 @@ type MetaExtensions struct {
|
||||
AddedComponents []MetaV0alpha1ExtensionsAddedComponents `json:"addedComponents,omitempty"`
|
||||
// +listType=atomic
|
||||
AddedLinks []MetaV0alpha1ExtensionsAddedLinks `json:"addedLinks,omitempty"`
|
||||
// +listType=atomic
|
||||
AddedFunctions []MetaV0alpha1ExtensionsAddedFunctions `json:"addedFunctions,omitempty"`
|
||||
// +listType=set
|
||||
// +listMapKey=id
|
||||
ExposedComponents []MetaV0alpha1ExtensionsExposedComponents `json:"exposedComponents,omitempty"`
|
||||
@@ -396,6 +398,21 @@ func NewMetaV0alpha1ExtensionsAddedLinks() *MetaV0alpha1ExtensionsAddedLinks {
|
||||
}
|
||||
}
|
||||
|
||||
// +k8s:openapi-gen=true
|
||||
type MetaV0alpha1ExtensionsAddedFunctions struct {
|
||||
// +listType=set
|
||||
Targets []string `json:"targets"`
|
||||
Title string `json:"title"`
|
||||
Description *string `json:"description,omitempty"`
|
||||
}
|
||||
|
||||
// NewMetaV0alpha1ExtensionsAddedFunctions creates a new MetaV0alpha1ExtensionsAddedFunctions object.
|
||||
func NewMetaV0alpha1ExtensionsAddedFunctions() *MetaV0alpha1ExtensionsAddedFunctions {
|
||||
return &MetaV0alpha1ExtensionsAddedFunctions{
|
||||
Targets: []string{},
|
||||
}
|
||||
}
|
||||
|
||||
// +k8s:openapi-gen=true
|
||||
type MetaV0alpha1ExtensionsExposedComponents struct {
|
||||
Id string `json:"id"`
|
||||
|
||||
2
apps/plugins/pkg/apis/plugins_manifest.go
generated
2
apps/plugins/pkg/apis/plugins_manifest.go
generated
File diff suppressed because one or more lines are too long
@@ -367,7 +367,8 @@ func jsonDataToMetaJSONData(jsonData plugins.JSONData) pluginsv0alpha1.MetaJSOND
|
||||
|
||||
// Map Extensions
|
||||
if len(jsonData.Extensions.AddedLinks) > 0 || len(jsonData.Extensions.AddedComponents) > 0 ||
|
||||
len(jsonData.Extensions.ExposedComponents) > 0 || len(jsonData.Extensions.ExtensionPoints) > 0 {
|
||||
len(jsonData.Extensions.ExposedComponents) > 0 || len(jsonData.Extensions.ExtensionPoints) > 0 ||
|
||||
len(jsonData.Extensions.AddedFunctions) > 0 {
|
||||
extensions := &pluginsv0alpha1.MetaExtensions{}
|
||||
|
||||
if len(jsonData.Extensions.AddedLinks) > 0 {
|
||||
@@ -398,6 +399,20 @@ func jsonDataToMetaJSONData(jsonData plugins.JSONData) pluginsv0alpha1.MetaJSOND
|
||||
}
|
||||
}
|
||||
|
||||
if len(jsonData.Extensions.AddedFunctions) > 0 {
|
||||
extensions.AddedFunctions = make([]pluginsv0alpha1.MetaV0alpha1ExtensionsAddedFunctions, 0, len(jsonData.Extensions.AddedFunctions))
|
||||
for _, comp := range jsonData.Extensions.AddedFunctions {
|
||||
v0Comp := pluginsv0alpha1.MetaV0alpha1ExtensionsAddedFunctions{
|
||||
Targets: comp.Targets,
|
||||
Title: comp.Title,
|
||||
}
|
||||
if comp.Description != "" {
|
||||
v0Comp.Description = &comp.Description
|
||||
}
|
||||
extensions.AddedFunctions = append(extensions.AddedFunctions, v0Comp)
|
||||
}
|
||||
}
|
||||
|
||||
if len(jsonData.Extensions.ExposedComponents) > 0 {
|
||||
extensions.ExposedComponents = make([]pluginsv0alpha1.MetaV0alpha1ExtensionsExposedComponents, 0, len(jsonData.Extensions.ExposedComponents))
|
||||
for _, comp := range jsonData.Extensions.ExposedComponents {
|
||||
|
||||
@@ -48,6 +48,14 @@ Recording rules can be helpful in various scenarios, such as:
|
||||
|
||||
The evaluation group of the recording rule determines how often the metric is pre-computed.
|
||||
|
||||
## Recommendations
|
||||
|
||||
- **Use frequent evaluation intervals**. Set frequent evaluation intervals for recording rules. Long intervals, such as an hour, can cause the recorded metric to be stale and lead to misaligned alert rule evaluations, especially when combined with a long pending period.
|
||||
- **Align alert evaluation with recording frequency**. The evaluation interval of an alert rule that depends on a recorded metric should be aligned with the recording rule's interval. If a recording rule runs every 3 minutes, the alert rule should also be evaluated at a similar frequency to ensure it acts on fresh data.
|
||||
- **Use `_over_time` functions for instant queries**. Since all alert rules are ultimately executed as an instant query, you can use functions like `max_over_time(my_metric[5m])` as an instant query. This allows you to get an aggregated value over a period without using a range query and a reduce expression.
|
||||
|
||||
## Types of recording rules
|
||||
|
||||
Similar to alert rules, Grafana supports two types of recording rules:
|
||||
|
||||
1. [Grafana-managed recording rules](ref:grafana-managed-recording-rules), which can query any Grafana data source supported by alerting. It's the recommended option.
|
||||
|
||||
@@ -1,57 +0,0 @@
|
||||
---
|
||||
canonical: https://grafana.com/docs/grafana/latest/alerting/best-practices/
|
||||
description: This section provides a set of guides for useful alerting practices and recommendations
|
||||
keywords:
|
||||
- grafana
|
||||
labels:
|
||||
products:
|
||||
- cloud
|
||||
- enterprise
|
||||
- oss
|
||||
menuTitle: Best practices
|
||||
title: Grafana Alerting best practices
|
||||
weight: 170
|
||||
---
|
||||
|
||||
# Grafana Alerting best practices
|
||||
|
||||
This section provides a set of guides and examples of best practices for Grafana Alerting. Here you can learn more about how to handle common alert management problems and you can see examples of more advanced usage of Grafana Alerting.
|
||||
|
||||
{{< section >}}
|
||||
|
||||
Designing and configuring an alert management set up that works takes time. Here are some additional tips on how to create an effective alert management set up:
|
||||
|
||||
{{< shared id="alert-planning-fundamentals" >}}
|
||||
|
||||
**Which are the key metrics for your business that you want to monitor and alert on?**
|
||||
|
||||
- Find events that are important to know about and not so trivial or frequent that recipients ignore them.
|
||||
- Alerts should only be created for big events that require immediate attention or intervention.
|
||||
- Consider quality over quantity.
|
||||
|
||||
**How do you want to organize your alerts and notifications?**
|
||||
|
||||
- Be selective about who you set to receive alerts. Consider sending them to the right teams, whoever is on call, and the specific channels.
|
||||
- Think carefully about priority and severity levels.
|
||||
- Automate as far as possible provisioning Alerting resources with the API or Terraform.
|
||||
|
||||
**Which information should you include in notifications?**
|
||||
|
||||
- Consider who the alert receivers and responders are.
|
||||
- Share information that helps responders identify and address potential issues.
|
||||
- Link alerts to dashboards to guide responders on which data to investigate.
|
||||
|
||||
**How can you reduce alert fatigue?**
|
||||
|
||||
- Avoid noisy, unnecessary alerts by using silences, mute timings, or pausing alert rule evaluation.
|
||||
- Continually tune your alert rules to review effectiveness. Remove alert rules to avoid duplication or ineffective alerts.
|
||||
- Continually review your thresholds and evaluation rules.
|
||||
|
||||
**How should you configure recording rules?**
|
||||
|
||||
- Use frequent evaluation intervals. It is recommended to set a frequent evaluation interval for recording rules. Long intervals, such as an hour, can cause the recorded metric to be stale and lead to misaligned alert rule evaluations, especially when combined with a long pending period.
|
||||
- Understand query types. Grafana Alerting uses both **Instant** and **Range** queries. Instant queries fetch a single data point, while Range queries fetch a series of data points over time. When using a Range query in an alert condition, you must use a Reduce expression to aggregate the series into a single value.
|
||||
- Align alert evaluation with recording frequency. The evaluation interval of an alert rule that depends on a recorded metric should be aligned with the recording rule's interval. If a recording rule runs every 3 minutes, the alert rule should also be evaluated at a similar frequency to ensure it acts on fresh data.
|
||||
- Use `_over_time` functions for instant queries. Since all alert rules are ultimately executed as an instant query, you can use functions like `max_over_time(my_metric[1h])` as an instant query. This allows you to get an aggregated value over a period without using a range query and a reduce expression.
|
||||
|
||||
{{< /shared >}}
|
||||
22
docs/sources/alerting/examples/_index.md
Normal file
22
docs/sources/alerting/examples/_index.md
Normal file
@@ -0,0 +1,22 @@
|
||||
---
|
||||
canonical: https://grafana.com/docs/grafana/latest/alerting/examples/
|
||||
description: This section provides a set of guides for useful alerting practices and recommendations
|
||||
keywords:
|
||||
- grafana
|
||||
labels:
|
||||
products:
|
||||
- cloud
|
||||
- enterprise
|
||||
- oss
|
||||
menuTitle: Examples
|
||||
title: Examples
|
||||
weight: 180
|
||||
---
|
||||
|
||||
# Examples
|
||||
|
||||
This section provides practical examples that show how to work with different types of alerting data, apply alert design patterns, reuse alert logic, and take advantage of specific Grafana Alerting features.
|
||||
|
||||
This section includes:
|
||||
|
||||
{{< section >}}
|
||||
@@ -1,5 +1,7 @@
|
||||
---
|
||||
canonical: https://grafana.com/docs/grafana/latest/alerting/best-practices/dynamic-labels
|
||||
aliases:
|
||||
- ../best-practices/dynamic-labels/ # /docs/grafana/<GRAFANA_VERSION>/alerting/best-practices/dynamic-labels/
|
||||
canonical: https://grafana.com/docs/grafana/latest/alerting/examples/dynamic-labels
|
||||
description: This example shows how to define dynamic labels based on query values, along with important behavior to keep in mind when using them.
|
||||
keywords:
|
||||
- grafana
|
||||
@@ -10,7 +12,7 @@ labels:
|
||||
- cloud
|
||||
- enterprise
|
||||
- oss
|
||||
menuTitle: Examples of dynamic labels
|
||||
menuTitle: Dynamic labels
|
||||
title: Example of dynamic labels in alert instances
|
||||
weight: 1104
|
||||
refs:
|
||||
@@ -1,5 +1,7 @@
|
||||
---
|
||||
canonical: https://grafana.com/docs/grafana/latest/alerting/best-practices/dynamic-thresholds
|
||||
aliases:
|
||||
- ../best-practices/dynamic-thresholds/ # /docs/grafana/<GRAFANA_VERSION>/alerting/best-practices/dynamic-thresholds/
|
||||
canonical: https://grafana.com/docs/grafana/latest/alerting/examples/dynamic-thresholds
|
||||
description: This example shows how to use a distinct threshold value per dimension using multi-dimensional alerts and a Math expression.
|
||||
keywords:
|
||||
- grafana
|
||||
@@ -10,7 +12,7 @@ labels:
|
||||
- cloud
|
||||
- enterprise
|
||||
- oss
|
||||
menuTitle: Examples of dynamic thresholds
|
||||
menuTitle: Dynamic thresholds
|
||||
title: Example of dynamic thresholds per dimension
|
||||
weight: 1105
|
||||
refs:
|
||||
@@ -1,5 +1,7 @@
|
||||
---
|
||||
canonical: https://grafana.com/docs/grafana/latest/alerting/best-practices/high-cardinality-alerts/
|
||||
aliases:
|
||||
- ../best-practices/high-cardinality-alerts/ # /docs/grafana/<GRAFANA_VERSION>/alerting/best-practices/high-cardinality-alerts/
|
||||
canonical: https://grafana.com/docs/grafana/latest/alerting/examples/high-cardinality-alerts/
|
||||
description: Learn how to detect and alert on high-cardinality metrics that can overload your metrics backend and increase observability costs.
|
||||
keywords:
|
||||
- grafana
|
||||
@@ -8,7 +10,7 @@ labels:
|
||||
- cloud
|
||||
- enterprise
|
||||
- oss
|
||||
menuTitle: Examples of high-cardinality alerts
|
||||
menuTitle: High-cardinality alerts
|
||||
title: Examples of high-cardinality alerts
|
||||
weight: 1105
|
||||
refs:
|
||||
@@ -1,5 +1,7 @@
|
||||
---
|
||||
canonical: https://grafana.com/docs/grafana/latest/alerting/best-practices/multi-dimensional-alerts/
|
||||
aliases:
|
||||
- ../best-practices/multi-dimensional-alerts/ # /docs/grafana/<GRAFANA_VERSION>/alerting/best-practices/multi-dimensional-alerts/
|
||||
canonical: https://grafana.com/docs/grafana/latest/alerting/examples/multi-dimensional-alerts/
|
||||
description: This example shows how a single alert rule can generate multiple alert instances using time series data.
|
||||
keywords:
|
||||
- grafana
|
||||
@@ -8,7 +10,7 @@ labels:
|
||||
- cloud
|
||||
- enterprise
|
||||
- oss
|
||||
menuTitle: Examples of multi-dimensional alerts
|
||||
menuTitle: Multi-dimensional alerts
|
||||
title: Example of multi-dimensional alerts on time series data
|
||||
weight: 1101
|
||||
refs:
|
||||
@@ -1,5 +1,7 @@
|
||||
---
|
||||
canonical: https://grafana.com/docs/grafana/latest/alerting/best-practices/table-data
|
||||
aliases:
|
||||
- ../best-practices/table-data/ # /docs/grafana/<GRAFANA_VERSION>/alerting/best-practices/table-data/
|
||||
canonical: https://grafana.com/docs/grafana/latest/alerting/examples/table-data
|
||||
description: This example shows how to create an alert rule using table data.
|
||||
keywords:
|
||||
- grafana
|
||||
@@ -8,7 +10,7 @@ labels:
|
||||
- cloud
|
||||
- enterprise
|
||||
- oss
|
||||
menuTitle: Examples of table data
|
||||
menuTitle: Table data
|
||||
title: Example of alerting on tabular data
|
||||
weight: 1102
|
||||
refs:
|
||||
@@ -1,5 +1,7 @@
|
||||
---
|
||||
canonical: https://grafana.com/docs/grafana/latest/alerting/best-practices/trace-based-alerts/
|
||||
aliases:
|
||||
- ../best-practices/trace-based-alerts/ # /docs/grafana/<GRAFANA_VERSION>/alerting/best-practices/trace-based-alerts/
|
||||
canonical: https://grafana.com/docs/grafana/latest/alerting/examples/trace-based-alerts/
|
||||
description: This guide provides introductory examples and distinct approaches for setting up trace-based alerts in Grafana.
|
||||
keywords:
|
||||
- grafana
|
||||
@@ -8,7 +10,7 @@ labels:
|
||||
- cloud
|
||||
- enterprise
|
||||
- oss
|
||||
title: Examples of trace-based alerts
|
||||
title: Trace-based alerts
|
||||
weight: 1103
|
||||
refs:
|
||||
testdata-data-source:
|
||||
@@ -1,5 +1,7 @@
|
||||
---
|
||||
canonical: https://grafana.com/docs/grafana/latest/alerting/best-practices/tutorials/
|
||||
aliases:
|
||||
- ../best-practices/tutorials/ # /docs/grafana/<GRAFANA_VERSION>/alerting/best-practices/tutorials/
|
||||
canonical: https://grafana.com/docs/grafana/latest/alerting/examples/tutorials/
|
||||
description: This section provides a set of step-by-step tutorials guides to get started with Grafana Aletings.
|
||||
keywords:
|
||||
- grafana
|
||||
35
docs/sources/alerting/guides/_index.md
Normal file
35
docs/sources/alerting/guides/_index.md
Normal file
@@ -0,0 +1,35 @@
|
||||
---
|
||||
canonical: https://grafana.com/docs/grafana/latest/alerting/guides/
|
||||
description: This section provides a set of guides for useful alerting practices and recommendations
|
||||
keywords:
|
||||
- grafana
|
||||
labels:
|
||||
products:
|
||||
- cloud
|
||||
- enterprise
|
||||
- oss
|
||||
menuTitle: Guides
|
||||
title: Guides
|
||||
weight: 170
|
||||
refs:
|
||||
examples:
|
||||
- pattern: /docs/grafana/
|
||||
destination: /docs/grafana/<GRAFANA_VERSION>/alerting/examples/
|
||||
- pattern: /docs/grafana-cloud/
|
||||
destination: /docs/grafana-cloud/alerting-and-irm/alerting/examples/
|
||||
tutorials:
|
||||
- pattern: /docs/grafana/
|
||||
destination: /docs/grafana/<GRAFANA_VERSION>/alerting/examples/tutorials/
|
||||
- pattern: /docs/grafana-cloud/
|
||||
destination: /docs/grafana-cloud/alerting-and-irm/alerting/examples/tutorials/
|
||||
---
|
||||
|
||||
# Guides
|
||||
|
||||
Guides in the Grafana Alerting documentation provide best practices and practical recommendations to help you move from a basic alerting setup to real-world use cases.
|
||||
|
||||
These guides cover topics such as:
|
||||
|
||||
{{< section >}}
|
||||
|
||||
For more hands-on examples, refer to [Examples](ref:examples) and [Tutorials](ref:tutorials).
|
||||
201
docs/sources/alerting/guides/best-practices.md
Normal file
201
docs/sources/alerting/guides/best-practices.md
Normal file
@@ -0,0 +1,201 @@
|
||||
---
|
||||
aliases:
|
||||
- ../best-practices/ # /docs/grafana/<GRAFANA_VERSION>/alerting/best-practices/
|
||||
canonical: https://grafana.com/docs/grafana/latest/alerting/guides/best-practices/
|
||||
description: Designing and configuring an effective alerting system takes time. This guide focuses on building alerting systems that scale with real-world operations.
|
||||
keywords:
|
||||
- grafana
|
||||
- alerting
|
||||
- guide
|
||||
labels:
|
||||
products:
|
||||
- cloud
|
||||
- enterprise
|
||||
- oss
|
||||
menuTitle: Best practices
|
||||
title: Best practices
|
||||
weight: 1010
|
||||
refs:
|
||||
recovery-threshold:
|
||||
- pattern: /docs/grafana/
|
||||
destination: /docs/grafana/<GRAFANA_VERSION>/alerting/fundamentals/alert-rules/queries-conditions/#recovery-threshold
|
||||
- pattern: /docs/grafana-cloud/
|
||||
destination: /docs/grafana-cloud/alerting-and-irm/fundamentals/alert-rules/queries-conditions/#recovery-threshold
|
||||
keep-firing-for:
|
||||
- pattern: /docs/grafana/
|
||||
destination: /docs/grafana/<GRAFANA_VERSION>/alerting/fundamentals/alert-rule-evaluation/#keep-firing-for
|
||||
- pattern: /docs/grafana-cloud/
|
||||
destination: /docs/grafana-cloud/alerting-and-irm/fundamentals/alert-rule-evaluation/#keep-firing-for
|
||||
pending-period:
|
||||
- pattern: /docs/grafana/
|
||||
destination: /docs/grafana/<GRAFANA_VERSION>/alerting/fundamentals/alert-rule-evaluation/#pending-period
|
||||
- pattern: /docs/grafana-cloud/
|
||||
destination: /docs/grafana-cloud/alerting-and-irm/fundamentals/alert-rule-evaluation/#pending-period
|
||||
silences:
|
||||
- pattern: /docs/grafana/
|
||||
destination: /docs/grafana/<GRAFANA_VERSION>/alerting/configure-notifications/create-silence/
|
||||
- pattern: /docs/grafana-cloud/
|
||||
destination: /docs/grafana-cloud/alerting-and-irm/alerting/configure-notifications/create-silence/
|
||||
timing-options:
|
||||
- pattern: /docs/grafana/
|
||||
destination: /docs/grafana/<GRAFANA_VERSION>/alerting/fundamentals/notifications/group-alert-notifications/#timing-options
|
||||
- pattern: /docs/grafana-cloud/
|
||||
destination: /docs/grafana-cloud/alerting-and-irm/alerting/fundamentals/notifications/group-alert-notifications/#timing-options
|
||||
group-alert-notifications:
|
||||
- pattern: /docs/grafana/
|
||||
destination: /docs/grafana/<GRAFANA_VERSION>/alerting/fundamentals/notifications/group-alert-notifications/
|
||||
- pattern: /docs/grafana-cloud/
|
||||
destination: /docs/grafana-cloud/alerting-and-irm/alerting/fundamentals/notifications/group-alert-notifications/
|
||||
notification-policies:
|
||||
- pattern: /docs/grafana/
|
||||
destination: /docs/grafana/<GRAFANA_VERSION>/alerting/fundamentals/notifications/notification-policies/
|
||||
- pattern: /docs/grafana-cloud/
|
||||
destination: /docs/grafana-cloud/alerting-and-irm/alerting/fundamentals/notifications/notification-policies/
|
||||
annotations:
|
||||
- pattern: /docs/grafana/
|
||||
destination: /docs/grafana/<GRAFANA_VERSION>/alerting/fundamentals/alert-rules/annotation-label/#annotations
|
||||
- pattern: /docs/grafana-cloud/
|
||||
destination: /docs/grafana-cloud/alerting-and-irm/alerting/fundamentals/alert-rules/annotation-label/#annotations
|
||||
multi-dimensional-alerts:
|
||||
- pattern: /docs/grafana/
|
||||
destination: /docs/grafana/<GRAFANA_VERSION>/alerting/examples/multi-dimensional-alerts/
|
||||
- pattern: /docs/grafana-cloud/
|
||||
destination: /docs/grafana-cloud/alerting-and-irm/alerting/examples/multi-dimensional-alerts/
|
||||
---
|
||||
|
||||
# Alerting best practices
|
||||
|
||||
Designing and configuring an effective alerting system takes time. This guide focuses on building alerting systems that scale with real-world operations.
|
||||
|
||||
The practices described here are intentionally high-level and apply regardless of tooling. Whether you use Prometheus, Grafana Alerting, or another stack, the same constraints apply: complex systems, imperfect signals, and humans on call.
|
||||
|
||||
Alerting is never finished. It evolves with incidents, organizational changes, and the systems it’s meant to protect.
|
||||
|
||||
{{< shared id="alert-planning-fundamentals" >}}
|
||||
|
||||
## Prioritize symptoms, but don’t ignore infrastructure signals
|
||||
|
||||
Alerts should primarily detect user-facing failures, not internal component behavior. Users don't care that a pod restarted; they care when the application is slow or failing. Symptom-based alerts tie directly to user impact.
|
||||
|
||||
Reliability metrics that impact users—latency, errors, availability—are better paging signals than infrastructure events or internal errors.
|
||||
|
||||
That said, infrastructure signals still matter. They can act as early warning indicators and are often useful when alerting maturity is low. A sustained spike in CPU or memory usage might not justify a page, but it can help explain or anticipate symptom-based failures.
|
||||
|
||||
Infrastructure alerts tend to be noisy and are often ignored when treated like paging signals. They are usually better suited for lower-severity channels such as dashboards, alert lists, or non-paging destinations like a dedicated Slack channel, where they can be monitored without interrupting on-call.
|
||||
|
||||
The key is balance as your alerting matures. Use infrastructure alerts to support diagnosis and prevention, not as a replacement for symptom-based alerts.
|
||||
|
||||
## Escalate priority based on confidence
|
||||
|
||||
Alert priority is often tied to user impact and the urgency to respond, but confidence should determine when escalation is necessary.
|
||||
|
||||
In this context, escalation defines how responders are notified as confidence grows. This can include increasing alert priority, widening notification, paging additional responders, or opening an incident once intervention is clearly required.
|
||||
|
||||
Early signals are often ambiguous, and confidence in a non-transient failure is usually low. Paging too early creates noise; paging too late means users are impacted for longer before anyone acts. A small or sudden increase in latency may not justify immediate action, but it can indicate a failure in progress.
|
||||
|
||||
Confidence increases as signals become stronger or begin to correlate.
|
||||
|
||||
Escalation is justified when issues are sustained or reinforced by multiple signals. For example, high latency combined with a rising error rate, or the same event firing over a sustained period. These patterns reduce the chance of transient noise and increase the likelihood of real impact.
|
||||
|
||||
Use confidence in user impact to drive escalation and avoid unnecessary pages.
|
||||
|
||||
## Scope alerts for scalability and actionability
|
||||
|
||||
In distributed systems, avoid creating separate alert rules for every host, service, or endpoint. Instead, define alert rules that scale automatically using [multi-dimensional alert rules](ref:multi-dimensional-alerts). This reduces rule duplication and allows alerting to scale as the system grows.
|
||||
|
||||
Start simple. Default to a single dimension such as `service` or `endpoint` to keep alerts manageable. Add dimensions only when they improve actionability. For example, when missing a dimension like `region` hides failures or doesn't provide enough information to act quickly.
|
||||
|
||||
Additional dimensions like `region` or `instance` can help identify the root cause, but more isn't always better.
|
||||
|
||||
## Design alerts for first responders and clear actions
|
||||
|
||||
Alerts should be designed for the first responder, not the person who created the alert. Anyone on call should be able to understand what's wrong and what to do next without deep knowledge of the system or alert configuration.
|
||||
|
||||
Avoid vague alerts that force responders to spend time figuring out context. Every alert should clearly explain why it exists, what triggered it, and how to investigate. Use [annotations](ref:annotations) to link to relevant dashboards and runbooks, which are essential for faster resolution.
|
||||
|
||||
Alerts should indicate a real problem and be actionable, even if the impact is low. Informational alerts add noise without improving reliability.
|
||||
|
||||
If no action is possible, it shouldn't be an alert—consider using a dashboard instead. Over time, alerts behave like technical debt: easy to create, costly to maintain, and hard to remove.
|
||||
|
||||
Review alerts often and remove those that don’t lead to action.
|
||||
|
||||
## Alerts should have an owner and system scope
|
||||
|
||||
Alerts without ownership are often ignored. Every alert must have an owner: a team responsible for maintaining the alert and responding when it fires.
|
||||
|
||||
Alerts must also define a system scope, such as a service or infrastructure component. Scope provides organizational context and connects alerts with ownership. Defining clear scopes is easier when services are treated as first-class entities, and organizations are built around service ownership.
|
||||
|
||||
> [Service Center in Grafana Cloud](/docs/grafana-cloud/alerting-and-irm/service-center/) can help operate a service-oriented view of your system and align alert scope with ownership.
|
||||
|
||||
After scope, ownership, and alert priority are defined, routing determines where alerts go and how they escalate. **Notification routing is as important as the alerts**.
|
||||
|
||||
Alerts should be delivered to the right team and channel based on priority, ownership, and team workflows. Use [notification policies](ref:notification-policies) to define a routing tree that matches the context of your service or scope:
|
||||
|
||||
- Define a parent policy for default routing within the scope.
|
||||
- Define nested policies for specific cases or higher-priority issues.
|
||||
|
||||
## Prevent notification overload with alert grouping
|
||||
|
||||
Without alert grouping, responders can receive many notifications for the same underlying problem.
|
||||
|
||||
For example, a database failure can trigger several alerts at the same time like increased latency, higher error rates, and internal errors. Paging separately for each symptom quickly turns into notification spam, even though there is a single root cause.
|
||||
|
||||
[Notification grouping](ref:group-alert-notifications) consolidates related alerts into a single notification. Instead of receiving multiple pages for the same issue, responders get one alert that represents the incident and includes all related firing alerts.
|
||||
|
||||
Grouping should follow operational boundaries such as service or owner, as defined by notification policies. Downstream or cascading failures should be grouped together so they surface as one issue rather than many.
|
||||
|
||||
## Mitigate flapping alerts
|
||||
|
||||
Short-lived failure spikes often trigger alerts that auto-resolve quickly. Alerting on transient failures creates noise and leads responders to ignore them.
|
||||
|
||||
Require issues to persist before alerting. Set a [pending period](ref:pending-period) to define how long a condition must remain true before firing. For example, instead of alerting immediately on high error rate, require it to stay above the threshold for some minutes.
|
||||
|
||||
Also, stabilize alerts by tuning query ranges and aggregations. Using raw data makes alerts sensitive to noise. Instead, evaluate over a time window and aggregate the data to smooth short spikes.
|
||||
|
||||
```promql
|
||||
# Reacts to transient spikes. Avoid this.
|
||||
cpu_usage > 90
|
||||
|
||||
# Smooth fluctuations.
|
||||
avg_over_time(cpu_usage[5m]) > 90
|
||||
```
|
||||
|
||||
For latency and error-based alerts, percentiles are often more useful than averages:
|
||||
|
||||
```promql
|
||||
quantile_over_time(0.95, http_duration_seconds[5m]) > 3
|
||||
```
|
||||
|
||||
Finally, avoid rapid resolve-and-fire notifications by using [`keep_firing_for`](ref:keep-firing-for) or [recovery thresholds](ref:recovery-threshold) to keep alerts active briefly during recovery. Both options reduce flapping and unnecessary notifications.
|
||||
|
||||
## Graduate symptom-based alerts into SLOs
|
||||
|
||||
When a symptom-based alert fires frequently, it usually indicates a reliability concern that should be measured and managed more deliberately. This is often a sign that the alert could evolve into an [SLO](/docs/grafana-cloud/alerting-and-irm/slo/).
|
||||
|
||||
Traditional alerts create pressure to react immediately, while error budgets introduce a buffer of time to act, changing how urgency is handled. Alerts can then be defined in terms of error budget burn rate rather than reacting to every minor deviation.
|
||||
|
||||
SLOs also align distinct teams around common reliability goals by providing a shared definition of what "good" looks like. They help consolidate multiple symptom alerts into a single user-facing objective.
|
||||
|
||||
For example, instead of several teams alerting on high latency, a single SLO can be used across teams to capture overall API performance.
|
||||
|
||||
## Integrate alerting into incident post-mortems
|
||||
|
||||
Every incident is an opportunity to improve alerting. After each incident, evaluate whether alerts helped responders act quickly or added unnecessary noise.
|
||||
|
||||
Assess which alerts fired, and how they influenced incident response. Review whether alerts triggered too late, too early, or without enough context, and adjust thresholds, priority, or escalation based on what actually happened.
|
||||
|
||||
Use [silences](ref:silences) during active incidents to reduce repeated notifications, but scope them carefully to avoid silencing unrelated alerts.
|
||||
|
||||
Post-mortems should evaluate alerts with root causes and lessons learned. If responders lacked key information during the incident, enrich alerts with additional context, dashboards, or better guidance.
|
||||
|
||||
## Alerts should be continuously improved
|
||||
|
||||
Alerting is an iterative process. Alerts that aren’t reviewed and refined lose effectiveness as systems and traffic patterns change.
|
||||
|
||||
Schedule regular reviews of existing alerts. Remove alerts that don’t lead to action, and tune alerts or thresholds that fire too often without providing useful signal. Reduce false positives to combat alert fatigue.
|
||||
|
||||
Prioritize clarity and simplicity in alert design. Simpler alerts are easier to understand, maintain, and trust under pressure. Favor fewer high-quality, actionable alerts over a large number of low-value ones.
|
||||
|
||||
Use dashboards and observability tools for investigation, not alerts.
|
||||
|
||||
{{< /shared >}}
|
||||
@@ -1,5 +1,7 @@
|
||||
---
|
||||
canonical: https://grafana.com/docs/grafana/latest/alerting/best-practices/connectivity-errors/
|
||||
aliases:
|
||||
- ../best-practices/connectivity-errors/ # /docs/grafana/<GRAFANA_VERSION>/alerting/best-practices/connectivity-errors/
|
||||
canonical: https://grafana.com/docs/grafana/latest/alerting/guides/connectivity-errors/
|
||||
description: Learn how to detect and handle connectivity issues in alerts using Prometheus, Grafana Alerting, or both.
|
||||
keywords:
|
||||
- grafana
|
||||
@@ -14,7 +16,7 @@ labels:
|
||||
- oss
|
||||
menuTitle: Handle connectivity errors
|
||||
title: Handle connectivity errors in alerts
|
||||
weight: 1010
|
||||
weight: 1020
|
||||
refs:
|
||||
pending-period:
|
||||
- pattern: /docs/grafana/
|
||||
@@ -1,5 +1,7 @@
|
||||
---
|
||||
canonical: https://grafana.com/docs/grafana/latest/alerting/best-practices/missing-data/
|
||||
aliases:
|
||||
- ../best-practices/missing-data/ # /docs/grafana/<GRAFANA_VERSION>/alerting/best-practices/missing-data/
|
||||
canonical: https://grafana.com/docs/grafana/latest/alerting/guides/missing-data/
|
||||
description: Learn how to detect missing metrics and design alerts that handle gaps in data in Prometheus and Grafana Alerting.
|
||||
keywords:
|
||||
- grafana
|
||||
@@ -14,7 +16,7 @@ labels:
|
||||
- oss
|
||||
menuTitle: Handle missing data
|
||||
title: Handle missing data in Grafana Alerting
|
||||
weight: 1020
|
||||
weight: 1030
|
||||
refs:
|
||||
connectivity-errors-guide:
|
||||
- pattern: /docs/grafana/
|
||||
@@ -41,9 +41,13 @@ Select a group to expand it and view the list of alert rules within that group.
|
||||
|
||||
The list view includes a number of filters to simplify managing large volumes of alerts.
|
||||
|
||||
## Filter and save searches
|
||||
|
||||
Click the **Filter** button to open the filter popup. You can filter by name, label, folder/namespace, evaluation group, data source, contact point, rule source, rule state, rule type, and the health of the alert rule from the popup menu. Click **Apply** at the bottom of the filter popup to enact the filters as you search.
|
||||
|
||||
{{< figure src="/media/docs/alerting/alerting-list-view-filter.png" max-width="750px" alt="Alert rule filter options" >}}
|
||||
Click the **Saved searches** button to open the list of previously saved searches, or click **+ Save current search** to add your current search to the saved searches list. You can also rename a saved search or set it as a default search. When you set a saved search as the default search, the Alert rules page opens with the search applied.
|
||||
|
||||
{{< figure src="/media/docs/alerting/alerting-saved-searches.png" max-width="750px" alt="Alert rule filter options" >}}
|
||||
|
||||
## Change alert rules list view
|
||||
|
||||
|
||||
@@ -23,6 +23,8 @@ killercoda:
|
||||
|
||||
This tutorial is a continuation of the [Get started with Grafana Alerting - Route alerts using dynamic labels](http://www.grafana.com/tutorials/alerting-get-started-pt5/) tutorial.
|
||||
|
||||
{{< youtube id="mqj_hN24zLU" >}}
|
||||
|
||||
<!-- USE CASE -->
|
||||
|
||||
In this tutorial you will learn how to:
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { test, expect } from '@grafana/plugin-e2e';
|
||||
|
||||
import { flows, type Variable } from './utils';
|
||||
import { flows, saveDashboard, type Variable } from './utils';
|
||||
|
||||
test.use({
|
||||
featureToggles: {
|
||||
@@ -64,20 +64,7 @@ test.describe(
|
||||
label: 'VariableUnderTest',
|
||||
};
|
||||
|
||||
// common steps to add a new variable
|
||||
await flows.newEditPaneVariableClick(dashboardPage, selectors);
|
||||
await flows.newEditPanelCommonVariableInputs(dashboardPage, selectors, variable);
|
||||
|
||||
// set the textbox variable value
|
||||
const type = 'variable-type Value';
|
||||
const fieldLabel = dashboardPage.getByGrafanaSelector(
|
||||
selectors.components.PanelEditor.OptionsPane.fieldLabel(type)
|
||||
);
|
||||
await expect(fieldLabel).toBeVisible();
|
||||
const inputField = fieldLabel.locator('input');
|
||||
await expect(inputField).toBeVisible();
|
||||
await inputField.fill(variable.value);
|
||||
await inputField.blur();
|
||||
await flows.addNewTextBoxVariable(dashboardPage, variable);
|
||||
|
||||
// select the variable in the dashboard and confirm the variable value is set
|
||||
await dashboardPage.getByGrafanaSelector(selectors.pages.Dashboard.SubMenu.submenuItem).click();
|
||||
@@ -140,5 +127,94 @@ test.describe(
|
||||
await expect(panelContent).toBeVisible();
|
||||
await expect(markdownContent).toContainText('VariableUnderTest: 10m');
|
||||
});
|
||||
test('can hide a variable', async ({ dashboardPage, selectors, page }) => {
|
||||
const variable: Variable = {
|
||||
type: 'textbox',
|
||||
name: 'VariableUnderTest',
|
||||
value: 'foo',
|
||||
label: 'VariableUnderTest',
|
||||
};
|
||||
|
||||
await saveDashboard(dashboardPage, page, selectors, 'can hide a variable');
|
||||
await flows.addNewTextBoxVariable(dashboardPage, variable);
|
||||
|
||||
// check the variable is visible in the dashboard
|
||||
const variableLabel = dashboardPage.getByGrafanaSelector(
|
||||
selectors.pages.Dashboard.SubMenu.submenuItemLabels(variable.label)
|
||||
);
|
||||
await expect(variableLabel).toBeVisible();
|
||||
// hide the variable
|
||||
await dashboardPage
|
||||
.getByGrafanaSelector(selectors.pages.Dashboard.Settings.Variables.Edit.General.generalDisplaySelect)
|
||||
.click();
|
||||
await page.getByText('Hidden', { exact: true }).click();
|
||||
|
||||
// check that the variable is still visible
|
||||
await expect(
|
||||
dashboardPage.getByGrafanaSelector(selectors.pages.Dashboard.SubMenu.submenuItemLabels(variable.label!))
|
||||
).toBeVisible();
|
||||
|
||||
// save dashboard and exit edit mode and check variable is not visible
|
||||
await saveDashboard(dashboardPage, page, selectors);
|
||||
await dashboardPage.getByGrafanaSelector(selectors.components.NavToolbar.editDashboard.editButton).click();
|
||||
await expect(
|
||||
dashboardPage.getByGrafanaSelector(selectors.pages.Dashboard.SubMenu.submenuItemLabels(variable.label!))
|
||||
).toBeHidden();
|
||||
// refresh and check that variable isn't visible
|
||||
await page.reload();
|
||||
await expect(
|
||||
dashboardPage.getByGrafanaSelector(selectors.pages.Dashboard.SubMenu.submenuItemLabels(variable.label!))
|
||||
).toBeHidden();
|
||||
// check that the variable is visible in edit mode
|
||||
await dashboardPage.getByGrafanaSelector(selectors.components.NavToolbar.editDashboard.editButton).click();
|
||||
await expect(
|
||||
dashboardPage.getByGrafanaSelector(selectors.pages.Dashboard.SubMenu.submenuItemLabels(variable.label!))
|
||||
).toBeVisible();
|
||||
});
|
||||
|
||||
test('can hide variable under the controls menu', async ({ dashboardPage, selectors, page }) => {
|
||||
const variable: Variable = {
|
||||
type: 'textbox',
|
||||
name: 'VariableUnderTest',
|
||||
value: 'foo',
|
||||
label: 'VariableUnderTest',
|
||||
};
|
||||
await saveDashboard(dashboardPage, page, selectors, 'can hide a variable in controls menu');
|
||||
|
||||
await flows.addNewTextBoxVariable(dashboardPage, variable);
|
||||
|
||||
// check the variable is visible in the dashboard
|
||||
const variableLabel = dashboardPage.getByGrafanaSelector(
|
||||
selectors.pages.Dashboard.SubMenu.submenuItemLabels(variable.label)
|
||||
);
|
||||
await expect(variableLabel).toBeVisible();
|
||||
// hide the variable
|
||||
await dashboardPage
|
||||
.getByGrafanaSelector(selectors.pages.Dashboard.Settings.Variables.Edit.General.generalDisplaySelect)
|
||||
.click();
|
||||
await page.getByText('Controls menu', { exact: true }).click();
|
||||
|
||||
// check that the variable is hidden under the controls menu
|
||||
await expect(
|
||||
dashboardPage.getByGrafanaSelector(selectors.pages.Dashboard.SubMenu.submenuItemLabels(variable.label!))
|
||||
).toBeHidden();
|
||||
await dashboardPage.getByGrafanaSelector(selectors.pages.Dashboard.ControlsButton).click();
|
||||
await expect(
|
||||
dashboardPage.getByGrafanaSelector(selectors.pages.Dashboard.SubMenu.submenuItemLabels(variable.label!))
|
||||
).toBeVisible();
|
||||
|
||||
// save dashboard and refresh
|
||||
await saveDashboard(dashboardPage, page, selectors);
|
||||
await page.reload();
|
||||
|
||||
//check that the variable is hidden under the controls menu
|
||||
await expect(
|
||||
dashboardPage.getByGrafanaSelector(selectors.pages.Dashboard.SubMenu.submenuItemLabels(variable.label!))
|
||||
).toBeHidden();
|
||||
await dashboardPage.getByGrafanaSelector(selectors.pages.Dashboard.ControlsButton).click();
|
||||
await expect(
|
||||
dashboardPage.getByGrafanaSelector(selectors.pages.Dashboard.SubMenu.submenuItemLabels(variable.label!))
|
||||
).toBeVisible();
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
@@ -79,6 +79,20 @@ export const flows = {
|
||||
await variableLabelInput.blur();
|
||||
}
|
||||
},
|
||||
async addNewTextBoxVariable(dashboardPage: DashboardPage, variable: Variable) {
|
||||
await flows.newEditPaneVariableClick(dashboardPage, selectors);
|
||||
await flows.newEditPanelCommonVariableInputs(dashboardPage, selectors, variable);
|
||||
// set the textbox variable value
|
||||
const type = 'variable-type Value';
|
||||
const fieldLabel = dashboardPage.getByGrafanaSelector(
|
||||
selectors.components.PanelEditor.OptionsPane.fieldLabel(type)
|
||||
);
|
||||
await expect(fieldLabel).toBeVisible();
|
||||
const inputField = fieldLabel.locator('input');
|
||||
await expect(inputField).toBeVisible();
|
||||
await inputField.fill(variable.value);
|
||||
await inputField.blur();
|
||||
},
|
||||
};
|
||||
|
||||
export type Variable = {
|
||||
@@ -89,8 +103,16 @@ export type Variable = {
|
||||
value: string;
|
||||
};
|
||||
|
||||
export async function saveDashboard(dashboardPage: DashboardPage, page: Page, selectors: E2ESelectorGroups) {
|
||||
export async function saveDashboard(
|
||||
dashboardPage: DashboardPage,
|
||||
page: Page,
|
||||
selectors: E2ESelectorGroups,
|
||||
title?: string
|
||||
) {
|
||||
await dashboardPage.getByGrafanaSelector(selectors.components.NavToolbar.editDashboard.saveButton).click();
|
||||
if (title) {
|
||||
await page.getByTestId(selectors.components.Drawer.DashboardSaveDrawer.saveAsTitleInput).fill(title);
|
||||
}
|
||||
await dashboardPage.getByGrafanaSelector(selectors.components.Drawer.DashboardSaveDrawer.saveButton).click();
|
||||
await expect(page.getByText('Dashboard saved')).toBeVisible();
|
||||
}
|
||||
|
||||
5
go.mod
5
go.mod
@@ -7,7 +7,7 @@ require (
|
||||
buf.build/gen/go/parca-dev/parca/protocolbuffers/go v1.36.2-20250703125925-3f0fcf4bff96.1 // @grafana/observability-traces-and-profiling
|
||||
cloud.google.com/go/kms v1.22.0 // @grafana/grafana-backend-group
|
||||
cloud.google.com/go/storage v1.55.0 // @grafana/grafana-backend-group
|
||||
connectrpc.com/connect v1.18.1 // @grafana/observability-traces-and-profiling
|
||||
connectrpc.com/connect v1.19.1 // @grafana/observability-traces-and-profiling
|
||||
cuelang.org/go v0.11.1 // @grafana/grafana-as-code
|
||||
dario.cat/mergo v1.0.2 // @grafana/grafana-app-platform-squad
|
||||
filippo.io/age v1.2.1 // @grafana/identity-access-team
|
||||
@@ -111,7 +111,7 @@ require (
|
||||
github.com/grafana/nanogit v0.3.0 // indirect; @grafana/grafana-git-ui-sync-team
|
||||
github.com/grafana/otel-profiling-go v0.5.1 // @grafana/grafana-backend-group
|
||||
github.com/grafana/pyroscope-go/godeltaprof v0.1.9 // @grafana/observability-traces-and-profiling
|
||||
github.com/grafana/pyroscope/api v1.2.1-0.20250415190842-3ff7247547ae // @grafana/observability-traces-and-profiling
|
||||
github.com/grafana/pyroscope/api v1.2.1-0.20251118081820-ace37f973a0f // @grafana/observability-traces-and-profiling
|
||||
github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 // @grafana/grafana-search-and-storage
|
||||
github.com/grpc-ecosystem/go-grpc-middleware/providers/prometheus v1.1.0 // @grafana/plugins-platform-backend
|
||||
github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.3.3 // @grafana/grafana-backend-group
|
||||
@@ -681,6 +681,7 @@ require (
|
||||
github.com/go-openapi/swag/stringutils v0.25.4 // indirect
|
||||
github.com/go-openapi/swag/typeutils v0.25.4 // indirect
|
||||
github.com/go-openapi/swag/yamlutils v0.25.4 // indirect
|
||||
github.com/google/gnostic v0.7.1 // indirect
|
||||
github.com/gophercloud/gophercloud/v2 v2.9.0 // indirect
|
||||
github.com/grafana/sqlds/v5 v5.0.3 // indirect
|
||||
github.com/lufia/plan9stats v0.0.0-20240909124753-873cd0166683 // indirect
|
||||
|
||||
10
go.sum
10
go.sum
@@ -627,8 +627,8 @@ cloud.google.com/go/workflows v1.7.0/go.mod h1:JhSrZuVZWuiDfKEFxU0/F1PQjmpnpcoIS
|
||||
cloud.google.com/go/workflows v1.8.0/go.mod h1:ysGhmEajwZxGn1OhGOGKsTXc5PyxOc0vfKf5Af+to4M=
|
||||
cloud.google.com/go/workflows v1.9.0/go.mod h1:ZGkj1aFIOd9c8Gerkjjq7OW7I5+l6cSvT3ujaO/WwSA=
|
||||
cloud.google.com/go/workflows v1.10.0/go.mod h1:fZ8LmRmZQWacon9UCX1r/g/DfAXx5VcPALq2CxzdePw=
|
||||
connectrpc.com/connect v1.18.1 h1:PAg7CjSAGvscaf6YZKUefjoih5Z/qYkyaTrBW8xvYPw=
|
||||
connectrpc.com/connect v1.18.1/go.mod h1:0292hj1rnx8oFrStN7cB4jjVBeqs+Yx5yDIC2prWDO8=
|
||||
connectrpc.com/connect v1.19.1 h1:R5M57z05+90EfEvCY1b7hBxDVOUl45PrtXtAV2fOC14=
|
||||
connectrpc.com/connect v1.19.1/go.mod h1:tN20fjdGlewnSFeZxLKb0xwIZ6ozc3OQs2hTXy4du9w=
|
||||
contrib.go.opencensus.io/exporter/ocagent v0.6.0/go.mod h1:zmKjrJcdo0aYcVS7bmEeSEBLPA9YJp5bjrofdU3pIXs=
|
||||
cuelabs.dev/go/oci/ociregistry v0.0.0-20251212221603-3adeb8663819 h1:Zh+Ur3OsoWpvALHPLT45nOekHkgOt+IOfutBbPqM17I=
|
||||
cuelabs.dev/go/oci/ociregistry v0.0.0-20251212221603-3adeb8663819/go.mod h1:WjmQxb+W6nVNCgj8nXrF24lIz95AHwnSl36tpjDZSU8=
|
||||
@@ -1503,6 +1503,8 @@ github.com/google/cel-go v0.26.1/go.mod h1:A9O8OU9rdvrK5MQyrqfIxo1a0u4g3sF8KB6PU
|
||||
github.com/google/flatbuffers v2.0.8+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
|
||||
github.com/google/flatbuffers v25.2.10+incompatible h1:F3vclr7C3HpB1k9mxCGRMXq6FdUalZ6H/pNX4FP1v0Q=
|
||||
github.com/google/flatbuffers v25.2.10+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
|
||||
github.com/google/gnostic v0.7.1 h1:t5Kc7j/8kYr8t2u11rykRrPPovlEMG4+xdc/SpekATs=
|
||||
github.com/google/gnostic v0.7.1/go.mod h1:KSw6sxnxEBFM8jLPfJd46xZP+yQcfE8XkiqfZx5zR28=
|
||||
github.com/google/gnostic-models v0.7.1 h1:SisTfuFKJSKM5CPZkffwi6coztzzeYUhc3v4yxLWH8c=
|
||||
github.com/google/gnostic-models v0.7.1/go.mod h1:whL5G0m6dmc5cPxKc5bdKdEN3UjI7OUGxBlw57miDrQ=
|
||||
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
|
||||
@@ -1685,8 +1687,8 @@ github.com/grafana/prometheus-alertmanager v0.25.1-0.20250911094103-5456b6e45604
|
||||
github.com/grafana/prometheus-alertmanager v0.25.1-0.20250911094103-5456b6e45604/go.mod h1:O/QP1BCm0HHIzbKvgMzqb5sSyH88rzkFk84F4TfJjBU=
|
||||
github.com/grafana/pyroscope-go/godeltaprof v0.1.9 h1:c1Us8i6eSmkW+Ez05d3co8kasnuOY813tbMN8i/a3Og=
|
||||
github.com/grafana/pyroscope-go/godeltaprof v0.1.9/go.mod h1:2+l7K7twW49Ct4wFluZD3tZ6e0SjanjcUUBPVD/UuGU=
|
||||
github.com/grafana/pyroscope/api v1.2.1-0.20250415190842-3ff7247547ae h1:35W3Wjp9KWnSoV/DuymmyIj5aHE0CYlDQ5m2KeXUPAc=
|
||||
github.com/grafana/pyroscope/api v1.2.1-0.20250415190842-3ff7247547ae/go.mod h1:6CJ1uXmLZ13ufpO9xE4pST+DyaBt0uszzrV0YnoaVLQ=
|
||||
github.com/grafana/pyroscope/api v1.2.1-0.20251118081820-ace37f973a0f h1:fTlIj5n4x5dU63XHItug7GLjtnaeJdPqBlqg4zlABq0=
|
||||
github.com/grafana/pyroscope/api v1.2.1-0.20251118081820-ace37f973a0f/go.mod h1:VBNcIhunCZsJ3/mcYx+j7uFf0P/108eiWa+8+Z9ll3o=
|
||||
github.com/grafana/regexp v0.0.0-20240518133315-a468a5bfb3bc h1:GN2Lv3MGO7AS6PrRoT6yV5+wkrOpcszoIsO4+4ds248=
|
||||
github.com/grafana/regexp v0.0.0-20240518133315-a468a5bfb3bc/go.mod h1:+JKpmjMGhpgPL+rXZ5nsZieVzvarn86asRlBg4uNGnk=
|
||||
github.com/grafana/saml v0.4.15-0.20240917091248-ae3bbdad8a56 h1:SDGrP81Vcd102L3UJEryRd1eestRw73wt+b8vnVEFe0=
|
||||
|
||||
@@ -755,6 +755,8 @@ github.com/felixge/fgprof v0.9.4/go.mod h1:yKl+ERSa++RYOs32d8K6WEXCB4uXdLls4ZaZP
|
||||
github.com/felixge/httpsnoop v1.0.3/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U=
|
||||
github.com/flosch/pongo2/v4 v4.0.2 h1:gv+5Pe3vaSVmiJvh/BZa82b7/00YUGm0PIyVVLop0Hw=
|
||||
github.com/flosch/pongo2/v4 v4.0.2/go.mod h1:B5ObFANs/36VwxxlgKpdchIJHMvHB562PW+BWPhwZD8=
|
||||
github.com/flowstack/go-jsonschema v0.1.1 h1:dCrjGJRXIlbDsLAgTJZTjhwUJnnxVWl1OgNyYh5nyDc=
|
||||
github.com/flowstack/go-jsonschema v0.1.1/go.mod h1:yL7fNggx1o8rm9RlgXv7hTBWxdBM0rVwpMwimd3F3N0=
|
||||
github.com/fluent/fluent-bit-go v0.0.0-20230731091245-a7a013e2473c h1:yKN46XJHYC/gvgH2UsisJ31+n4K3S7QYZSfU2uAWjuI=
|
||||
github.com/fluent/fluent-bit-go v0.0.0-20230731091245-a7a013e2473c/go.mod h1:L92h+dgwElEyUuShEwjbiHjseW410WIcNz+Bjutc8YQ=
|
||||
github.com/fogleman/gg v1.3.0 h1:/7zJX8F6AaYQc57WQCyN9cAIz+4bCJGO9B+dyW29am8=
|
||||
|
||||
@@ -218,8 +218,10 @@ lineage: schemas: [{
|
||||
// Optional field, if you want to extract part of a series name or metric node segment.
|
||||
// Named capture groups can be used to separate the display text and value.
|
||||
regex?: string
|
||||
// Determine whether regex applies to variable value or display text
|
||||
regexApplyTo?: #VariableRegexApplyTo
|
||||
// Optional, indicates whether a custom type variable uses CSV or JSON to define its values
|
||||
valuesFormat?: "csv" | "json" | *"csv"
|
||||
// Determine whether regex applies to variable value or display text
|
||||
regexApplyTo?: #VariableRegexApplyTo
|
||||
// Additional static options for query variable
|
||||
staticOptions?: [...#VariableOption]
|
||||
// Ordering of static options in relation to options returned from data source for query variable
|
||||
|
||||
@@ -295,8 +295,8 @@
|
||||
"@grafana/plugin-ui": "^0.11.1",
|
||||
"@grafana/prometheus": "workspace:*",
|
||||
"@grafana/runtime": "workspace:*",
|
||||
"@grafana/scenes": "6.52.0",
|
||||
"@grafana/scenes-react": "6.52.0",
|
||||
"@grafana/scenes": "v6.52.1",
|
||||
"@grafana/scenes-react": "v6.52.1",
|
||||
"@grafana/schema": "workspace:*",
|
||||
"@grafana/sql": "workspace:*",
|
||||
"@grafana/ui": "workspace:*",
|
||||
|
||||
@@ -400,10 +400,6 @@ export interface FeatureToggles {
|
||||
*/
|
||||
tableSharedCrosshair?: boolean;
|
||||
/**
|
||||
* Use the kubernetes API for feature toggle management in the frontend
|
||||
*/
|
||||
kubernetesFeatureToggles?: boolean;
|
||||
/**
|
||||
* Enabled grafana cloud specific RBAC roles
|
||||
*/
|
||||
cloudRBACRoles?: boolean;
|
||||
@@ -1263,4 +1259,8 @@ export interface FeatureToggles {
|
||||
* Enables the creation of keepers that manage secrets stored on AWS secrets manager
|
||||
*/
|
||||
secretsManagementAppPlatformAwsKeeper?: boolean;
|
||||
/**
|
||||
* Enables profiles exemplars support in profiles drilldown
|
||||
*/
|
||||
profilesExemplars?: boolean;
|
||||
}
|
||||
|
||||
@@ -103,6 +103,7 @@ export interface IntervalVariableModel extends VariableWithOptions {
|
||||
|
||||
export interface CustomVariableModel extends VariableWithMultiSupport {
|
||||
type: 'custom';
|
||||
valuesFormat?: 'csv' | 'json';
|
||||
}
|
||||
|
||||
export interface DataSourceVariableModel extends VariableWithMultiSupport {
|
||||
|
||||
@@ -266,6 +266,9 @@ export const versionedPages = {
|
||||
Controls: {
|
||||
'11.1.0': 'data-testid dashboard controls',
|
||||
},
|
||||
ControlsButton: {
|
||||
'12.3.0': 'data-testid dashboard controls button',
|
||||
},
|
||||
SubMenu: {
|
||||
submenu: {
|
||||
[MIN_GRAFANA_VERSION]: 'Dashboard submenu',
|
||||
|
||||
@@ -25,6 +25,10 @@ export interface GrafanaPyroscopeDataQuery extends common.DataQuery {
|
||||
* Allows to group the results.
|
||||
*/
|
||||
groupBy: Array<string>;
|
||||
/**
|
||||
* If set to true, exemplars will be requested
|
||||
*/
|
||||
includeExemplars: boolean;
|
||||
/**
|
||||
* Specifies the query label selectors.
|
||||
*/
|
||||
@@ -49,6 +53,7 @@ export interface GrafanaPyroscopeDataQuery extends common.DataQuery {
|
||||
|
||||
export const defaultGrafanaPyroscopeDataQuery: Partial<GrafanaPyroscopeDataQuery> = {
|
||||
groupBy: [],
|
||||
includeExemplars: false,
|
||||
labelSelector: '{}',
|
||||
spanSelector: [],
|
||||
};
|
||||
|
||||
@@ -211,6 +211,10 @@ export interface VariableModel {
|
||||
* Type of variable
|
||||
*/
|
||||
type: VariableType;
|
||||
/**
|
||||
* Optional, indicates whether a custom type variable uses CSV or JSON to define its values
|
||||
*/
|
||||
valuesFormat?: ('csv' | 'json');
|
||||
}
|
||||
|
||||
export const defaultVariableModel: Partial<VariableModel> = {
|
||||
@@ -220,6 +224,7 @@ export const defaultVariableModel: Partial<VariableModel> = {
|
||||
options: [],
|
||||
skipUrlSync: false,
|
||||
staticOptions: [],
|
||||
valuesFormat: 'csv',
|
||||
};
|
||||
|
||||
/**
|
||||
|
||||
@@ -317,6 +317,7 @@ export const handyTestingSchema: Spec = {
|
||||
query: 'option1, option2',
|
||||
skipUrlSync: false,
|
||||
allowCustomValue: true,
|
||||
valuesFormat: 'csv',
|
||||
},
|
||||
},
|
||||
{
|
||||
|
||||
@@ -300,7 +300,7 @@ export interface FieldConfig {
|
||||
description?: string;
|
||||
// An explicit path to the field in the datasource. When the frame meta includes a path,
|
||||
// This will default to `${frame.meta.path}/${field.name}
|
||||
//
|
||||
//
|
||||
// When defined, this value can be used as an identifier within the datasource scope, and
|
||||
// may be used to update the results
|
||||
path?: string;
|
||||
@@ -1353,6 +1353,7 @@ export interface CustomVariableSpec {
|
||||
skipUrlSync: boolean;
|
||||
description?: string;
|
||||
allowCustomValue: boolean;
|
||||
valuesFormat?: "csv" | "json";
|
||||
}
|
||||
|
||||
export const defaultCustomVariableSpec = (): CustomVariableSpec => ({
|
||||
@@ -1365,6 +1366,7 @@ export const defaultCustomVariableSpec = (): CustomVariableSpec => ({
|
||||
hide: "dontHide",
|
||||
skipUrlSync: false,
|
||||
allowCustomValue: true,
|
||||
valuesFormat: undefined,
|
||||
});
|
||||
|
||||
// Group variable kind
|
||||
@@ -1549,4 +1551,3 @@ export const defaultSpec = (): Spec => ({
|
||||
title: "",
|
||||
variables: [],
|
||||
});
|
||||
|
||||
|
||||
@@ -1359,6 +1359,7 @@ export interface CustomVariableSpec {
|
||||
skipUrlSync: boolean;
|
||||
description?: string;
|
||||
allowCustomValue: boolean;
|
||||
valuesFormat?: "csv" | "json";
|
||||
}
|
||||
|
||||
export const defaultCustomVariableSpec = (): CustomVariableSpec => ({
|
||||
|
||||
199
packages/grafana-sql/src/SQLVariableSupport.test.tsx
Normal file
199
packages/grafana-sql/src/SQLVariableSupport.test.tsx
Normal file
@@ -0,0 +1,199 @@
|
||||
import { Field, FieldType } from '@grafana/data';
|
||||
import { EditorMode } from '@grafana/plugin-ui';
|
||||
|
||||
import { migrateVariableQuery, convertOriginalFieldsToVariableFields } from './SQLVariableSupport';
|
||||
import { QueryFormat, SQLQuery, SQLQueryMeta } from './types';
|
||||
|
||||
const refId = 'SQLVariableQueryEditor-VariableQuery';
|
||||
const sampleQuery = 'SELECT * FROM users';
|
||||
|
||||
describe('migrateVariableQuery', () => {
|
||||
it('should handle string query', () => {
|
||||
const result = migrateVariableQuery(sampleQuery);
|
||||
expect(result).toMatchObject({
|
||||
refId,
|
||||
rawSql: sampleQuery,
|
||||
query: sampleQuery,
|
||||
editorMode: EditorMode.Code,
|
||||
format: QueryFormat.Table,
|
||||
});
|
||||
});
|
||||
it('should handle empty string query', () => {
|
||||
const result = migrateVariableQuery('');
|
||||
expect(result).toMatchObject({
|
||||
refId,
|
||||
rawSql: '',
|
||||
query: '',
|
||||
editorMode: EditorMode.Builder,
|
||||
format: QueryFormat.Table,
|
||||
});
|
||||
});
|
||||
it('should handle SQLQuery object with rawSql', () => {
|
||||
const rawQuery: SQLQuery = {
|
||||
refId: 'A',
|
||||
rawSql: sampleQuery,
|
||||
format: QueryFormat.Timeseries,
|
||||
editorMode: EditorMode.Code,
|
||||
};
|
||||
const result = migrateVariableQuery(rawQuery);
|
||||
expect(result).toStrictEqual({ ...rawQuery, query: sampleQuery });
|
||||
});
|
||||
it('should preserve all other properties from SQLQuery', () => {
|
||||
const rawQuery: SQLQuery = {
|
||||
refId: 'C',
|
||||
rawSql: sampleQuery,
|
||||
alias: 'test_alias',
|
||||
dataset: 'test_dataset',
|
||||
table: 'test_table',
|
||||
meta: { textField: 'name', valueField: 'id' },
|
||||
};
|
||||
const result = migrateVariableQuery(rawQuery);
|
||||
expect(result).toStrictEqual({ ...rawQuery, query: sampleQuery });
|
||||
});
|
||||
});
|
||||
|
||||
const field = (name: string, type: FieldType = FieldType.string, values: unknown[] = [1, 2, 3]): Field => ({
|
||||
name,
|
||||
type,
|
||||
values,
|
||||
config: {},
|
||||
});
|
||||
|
||||
describe('convertOriginalFieldsToVariableFields', () => {
|
||||
it('should throw error when no fields provided', () => {
|
||||
expect(() => convertOriginalFieldsToVariableFields([])).toThrow('at least one field expected for variable');
|
||||
});
|
||||
|
||||
it('should handle fields with __text and __value names', () => {
|
||||
const fields = [field('__text'), field('__value'), field('other_field')];
|
||||
expect(convertOriginalFieldsToVariableFields(fields).map((r) => r.name)).toStrictEqual([
|
||||
'text',
|
||||
'value',
|
||||
'__text',
|
||||
'__value',
|
||||
'other_field',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle fields with only __text', () => {
|
||||
const fields = [field('__text'), field('other_field')];
|
||||
expect(convertOriginalFieldsToVariableFields(fields).map((r) => r.name)).toStrictEqual([
|
||||
'text',
|
||||
'value',
|
||||
'__text',
|
||||
'other_field',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle fields with only __value', () => {
|
||||
const fields = [field('__value'), field('other_field')];
|
||||
expect(convertOriginalFieldsToVariableFields(fields).map((r) => r.name)).toStrictEqual([
|
||||
'text',
|
||||
'value',
|
||||
'__value',
|
||||
'other_field',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should use first field when no __text or __value present', () => {
|
||||
const fields = [field('id'), field('name'), field('category')];
|
||||
expect(convertOriginalFieldsToVariableFields(fields).map((r) => r.name)).toStrictEqual([
|
||||
'text',
|
||||
'value',
|
||||
'id',
|
||||
'name',
|
||||
'category',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should respect meta.textField and meta.valueField', () => {
|
||||
const fields = [field('id', FieldType.number, [3, 4]), field('display_name'), field('category')];
|
||||
const meta: SQLQueryMeta = {
|
||||
textField: 'display_name',
|
||||
valueField: 'id',
|
||||
};
|
||||
const result = convertOriginalFieldsToVariableFields(fields, meta);
|
||||
expect(convertOriginalFieldsToVariableFields(fields).map((r) => r.name)).toStrictEqual([
|
||||
'text',
|
||||
'value',
|
||||
'id',
|
||||
'display_name',
|
||||
'category',
|
||||
]);
|
||||
expect(result[0]).toStrictEqual({ ...fields[1], name: 'text' }); // display_name -> text
|
||||
expect(result[1]).toStrictEqual({ ...fields[0], name: 'value' }); // id -> value
|
||||
});
|
||||
|
||||
it('should handle meta with non-existent field names', () => {
|
||||
const fields = [field('id'), field('name')];
|
||||
const meta: SQLQueryMeta = {
|
||||
textField: 'non_existent_field',
|
||||
valueField: 'also_non_existent',
|
||||
};
|
||||
const result = convertOriginalFieldsToVariableFields(fields, meta);
|
||||
expect(result.map((r) => r.name)).toStrictEqual(['text', 'value', 'id', 'name']);
|
||||
expect(result[0]).toStrictEqual({ ...fields[0], name: 'text' });
|
||||
expect(result[1]).toStrictEqual({ ...fields[0], name: 'value' });
|
||||
});
|
||||
|
||||
it('should handle partial meta (only textField)', () => {
|
||||
const fields = [field('id'), field('label'), field('description')];
|
||||
const meta: SQLQueryMeta = {
|
||||
textField: 'label',
|
||||
};
|
||||
const result = convertOriginalFieldsToVariableFields(fields, meta);
|
||||
expect(result.map((r) => r.name)).toStrictEqual(['text', 'value', 'id', 'label', 'description']);
|
||||
expect(result[0]).toStrictEqual({ ...fields[1], name: 'text' }); // label -> text
|
||||
expect(result[1]).toStrictEqual({ ...fields[0], name: 'value' }); // fallback to text field
|
||||
});
|
||||
|
||||
it('should handle partial meta (only valueField)', () => {
|
||||
const fields = [field('name'), field('id', FieldType.number), field('type')];
|
||||
const meta: SQLQueryMeta = {
|
||||
valueField: 'id',
|
||||
};
|
||||
const result = convertOriginalFieldsToVariableFields(fields, meta);
|
||||
expect(result.map((r) => r.name)).toStrictEqual(['text', 'value', 'name', 'id', 'type']);
|
||||
expect(result[0]).toStrictEqual({ ...fields[0], name: 'text', type: FieldType.number }); // fallback to value field
|
||||
expect(result[1]).toStrictEqual({ ...fields[1], name: 'value' }); // id -> value
|
||||
});
|
||||
|
||||
it('should not include duplicate "value" or "text" fields in otherFields', () => {
|
||||
const fields = [field('value'), field('text'), field('other')];
|
||||
expect(convertOriginalFieldsToVariableFields(fields).map((r) => r.name)).toStrictEqual(['text', 'value', 'other']);
|
||||
});
|
||||
|
||||
it('should preserve field types and configurations', () => {
|
||||
const fields = [
|
||||
{
|
||||
name: 'id',
|
||||
type: FieldType.number,
|
||||
config: { unit: 'short', displayName: 'ID' },
|
||||
values: [1, 2, 3],
|
||||
},
|
||||
{
|
||||
name: 'name',
|
||||
type: FieldType.string,
|
||||
config: { displayName: 'Name' },
|
||||
values: ['A', 'B', 'C'],
|
||||
},
|
||||
];
|
||||
const meta: SQLQueryMeta = {
|
||||
textField: 'name',
|
||||
valueField: 'id',
|
||||
};
|
||||
const result = convertOriginalFieldsToVariableFields(fields, meta);
|
||||
expect(result[0]).toStrictEqual({
|
||||
name: 'text',
|
||||
type: FieldType.string,
|
||||
config: { displayName: 'Name' },
|
||||
values: ['A', 'B', 'C'],
|
||||
});
|
||||
expect(result[1]).toStrictEqual({
|
||||
name: 'value',
|
||||
type: FieldType.number,
|
||||
config: { unit: 'short', displayName: 'ID' },
|
||||
values: [1, 2, 3],
|
||||
});
|
||||
});
|
||||
});
|
||||
155
packages/grafana-sql/src/SQLVariableSupport.tsx
Normal file
155
packages/grafana-sql/src/SQLVariableSupport.tsx
Normal file
@@ -0,0 +1,155 @@
|
||||
import { useEffect, useState } from 'react';
|
||||
import { Observable } from 'rxjs';
|
||||
import { map } from 'rxjs/operators';
|
||||
|
||||
import {
|
||||
CustomVariableSupport,
|
||||
DataQueryRequest,
|
||||
DataQueryResponse,
|
||||
QueryEditorProps,
|
||||
Field,
|
||||
DataFrame,
|
||||
} from '@grafana/data';
|
||||
import { t } from '@grafana/i18n';
|
||||
import { EditorMode, EditorRows, EditorRow, EditorField } from '@grafana/plugin-ui';
|
||||
import { Combobox, ComboboxOption } from '@grafana/ui';
|
||||
|
||||
import { SqlQueryEditorLazy } from './components/QueryEditorLazy';
|
||||
import { SqlDatasource } from './datasource/SqlDatasource';
|
||||
import { applyQueryDefaults } from './defaults';
|
||||
import { QueryFormat, type SQLQuery, type SQLOptions, type SQLQueryMeta } from './types';
|
||||
|
||||
type SQLVariableQuery = { query: string } & SQLQuery;
|
||||
|
||||
const refId = 'SQLVariableQueryEditor-VariableQuery';
|
||||
|
||||
export class SQLVariableSupport extends CustomVariableSupport<SqlDatasource, SQLQuery> {
|
||||
constructor(readonly datasource: SqlDatasource) {
|
||||
super();
|
||||
}
|
||||
editor = SQLVariablesQueryEditor;
|
||||
query(request: DataQueryRequest<SQLQuery>): Observable<DataQueryResponse> {
|
||||
if (request.targets.length < 1) {
|
||||
throw new Error('no variable query found');
|
||||
}
|
||||
const updatedQuery = migrateVariableQuery(request.targets[0]);
|
||||
return this.datasource.query({ ...request, targets: [updatedQuery] }).pipe(
|
||||
map((d: DataQueryResponse) => {
|
||||
return {
|
||||
...d,
|
||||
data: (d.data || []).map((frame: DataFrame) => ({
|
||||
...frame,
|
||||
fields: convertOriginalFieldsToVariableFields(frame.fields, updatedQuery.meta),
|
||||
})),
|
||||
};
|
||||
})
|
||||
);
|
||||
}
|
||||
getDefaultQuery(): Partial<SQLQuery> {
|
||||
return applyQueryDefaults({ refId, editorMode: EditorMode.Builder, format: QueryFormat.Table });
|
||||
}
|
||||
}
|
||||
|
||||
type SQLVariableQueryEditorProps = QueryEditorProps<SqlDatasource, SQLQuery, SQLOptions>;
|
||||
|
||||
const SQLVariablesQueryEditor = (props: SQLVariableQueryEditorProps) => {
|
||||
const query = migrateVariableQuery(props.query);
|
||||
return (
|
||||
<>
|
||||
<SqlQueryEditorLazy {...props} query={query} />
|
||||
<FieldMapping {...props} query={query} />
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
const FieldMapping = (props: SQLVariableQueryEditorProps) => {
|
||||
const { query, datasource, onChange } = props;
|
||||
const [choices, setChoices] = useState<ComboboxOption[]>([]);
|
||||
useEffect(() => {
|
||||
let isActive = true;
|
||||
// eslint-disable-next-line
|
||||
const subscription = datasource.query({ targets: [query] } as DataQueryRequest<SQLQuery>).subscribe({
|
||||
next: (response) => {
|
||||
if (!isActive) {
|
||||
return;
|
||||
}
|
||||
const fieldNames = (response.data[0] || { fields: [] }).fields.map((f: Field) => f.name);
|
||||
setChoices(fieldNames.map((f: Field) => ({ value: f, label: f })));
|
||||
},
|
||||
error: () => {
|
||||
if (isActive) {
|
||||
setChoices([]);
|
||||
}
|
||||
},
|
||||
});
|
||||
return () => {
|
||||
isActive = false;
|
||||
subscription.unsubscribe();
|
||||
};
|
||||
}, [datasource, query]);
|
||||
const onMetaPropChange = <Key extends keyof SQLQueryMeta, Value extends SQLQueryMeta[Key]>(
|
||||
key: Key,
|
||||
value: Value,
|
||||
meta = query.meta || {}
|
||||
) => {
|
||||
onChange({ ...query, meta: { ...meta, [key]: value } });
|
||||
};
|
||||
return (
|
||||
<EditorRows>
|
||||
<EditorRow>
|
||||
<EditorField label={t('grafana-sql.components.query-meta.variables.valueField', 'Value Field')}>
|
||||
<Combobox
|
||||
isClearable
|
||||
value={query.meta?.valueField}
|
||||
onChange={(e) => onMetaPropChange('valueField', e?.value)}
|
||||
width={40}
|
||||
options={choices}
|
||||
/>
|
||||
</EditorField>
|
||||
<EditorField label={t('grafana-sql.components.query-meta.variables.textField', 'Text Field')}>
|
||||
<Combobox
|
||||
isClearable
|
||||
value={query.meta?.textField}
|
||||
onChange={(e) => onMetaPropChange('textField', e?.value)}
|
||||
width={40}
|
||||
options={choices}
|
||||
/>
|
||||
</EditorField>
|
||||
</EditorRow>
|
||||
</EditorRows>
|
||||
);
|
||||
};
|
||||
|
||||
export const migrateVariableQuery = (rawQuery: string | SQLQuery): SQLVariableQuery => {
|
||||
if (typeof rawQuery !== 'string') {
|
||||
return {
|
||||
...rawQuery,
|
||||
refId: rawQuery.refId || refId,
|
||||
query: rawQuery.rawSql || '',
|
||||
};
|
||||
}
|
||||
return {
|
||||
...applyQueryDefaults({
|
||||
refId,
|
||||
rawSql: rawQuery,
|
||||
editorMode: rawQuery ? EditorMode.Code : EditorMode.Builder,
|
||||
}),
|
||||
query: rawQuery,
|
||||
};
|
||||
};
|
||||
|
||||
export const convertOriginalFieldsToVariableFields = (original_fields: Field[], meta?: SQLQueryMeta): Field[] => {
|
||||
if (original_fields.length < 1) {
|
||||
throw new Error('at least one field expected for variable');
|
||||
}
|
||||
let tf = original_fields.find((f) => f.name === '__text');
|
||||
let vf = original_fields.find((f) => f.name === '__value');
|
||||
if (meta) {
|
||||
tf = meta.textField ? original_fields.find((f) => f.name === meta.textField) : undefined;
|
||||
vf = meta.valueField ? original_fields.find((f) => f.name === meta.valueField) : undefined;
|
||||
}
|
||||
const textField = tf || vf || original_fields[0];
|
||||
const valueField = vf || tf || original_fields[0];
|
||||
const otherFields = original_fields.filter((f: Field) => f.name !== 'value' && f.name !== 'text');
|
||||
return [{ ...textField, name: 'text' }, { ...valueField, name: 'value' }, ...otherFields];
|
||||
};
|
||||
@@ -21,6 +21,7 @@ export { TLSSecretsConfig } from './components/configuration/TLSSecretsConfig';
|
||||
export { useMigrateDatabaseFields } from './components/configuration/useMigrateDatabaseFields';
|
||||
export { SqlQueryEditorLazy } from './components/QueryEditorLazy';
|
||||
export type { QueryHeaderProps } from './components/QueryHeader';
|
||||
export { SQLVariableSupport } from './SQLVariableSupport';
|
||||
export { createSelectClause, haveColumns } from './utils/sql.utils';
|
||||
export { applyQueryDefaults } from './defaults';
|
||||
export { makeVariable } from './utils/testHelpers';
|
||||
|
||||
@@ -69,6 +69,12 @@
|
||||
"placeholder-select-format": "Select format",
|
||||
"run-query": "Run query"
|
||||
},
|
||||
"query-meta": {
|
||||
"variables": {
|
||||
"textField": "Text Field",
|
||||
"valueField": "Value Field"
|
||||
}
|
||||
},
|
||||
"query-toolbox": {
|
||||
"content-hit-ctrlcmdreturn-to-run-query": "Hit CTRL/CMD+Return to run query",
|
||||
"tooltip-collapse": "Collapse editor",
|
||||
|
||||
@@ -50,6 +50,8 @@ export enum QueryFormat {
|
||||
Table = 'table',
|
||||
}
|
||||
|
||||
export type SQLQueryMeta = { valueField?: string; textField?: string };
|
||||
|
||||
export interface SQLQuery extends DataQuery {
|
||||
alias?: string;
|
||||
format?: QueryFormat;
|
||||
@@ -59,6 +61,7 @@ export interface SQLQuery extends DataQuery {
|
||||
sql?: SQLExpression;
|
||||
editorMode?: EditorMode;
|
||||
rawQuery?: boolean;
|
||||
meta?: SQLQueryMeta;
|
||||
}
|
||||
|
||||
export interface NameValue {
|
||||
|
||||
@@ -20,7 +20,6 @@ import (
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/infra/tracing"
|
||||
"github.com/grafana/grafana/pkg/plugins"
|
||||
"github.com/grafana/grafana/pkg/plugins/backendplugin/coreplugin"
|
||||
"github.com/grafana/grafana/pkg/plugins/manager/pluginfakes"
|
||||
"github.com/grafana/grafana/pkg/services/accesscontrol"
|
||||
"github.com/grafana/grafana/pkg/services/caching"
|
||||
@@ -28,6 +27,7 @@ import (
|
||||
"github.com/grafana/grafana/pkg/services/featuremgmt"
|
||||
"github.com/grafana/grafana/pkg/services/oauthtoken/oauthtokentest"
|
||||
"github.com/grafana/grafana/pkg/services/pluginsintegration"
|
||||
"github.com/grafana/grafana/pkg/services/pluginsintegration/coreplugin"
|
||||
"github.com/grafana/grafana/pkg/services/pluginsintegration/pluginaccesscontrol"
|
||||
"github.com/grafana/grafana/pkg/services/pluginsintegration/pluginconfig"
|
||||
"github.com/grafana/grafana/pkg/services/pluginsintegration/plugincontext"
|
||||
|
||||
10
pkg/kinds/dashboard/dashboard_spec_gen.go
generated
10
pkg/kinds/dashboard/dashboard_spec_gen.go
generated
@@ -837,6 +837,8 @@ type VariableModel struct {
|
||||
// Optional field, if you want to extract part of a series name or metric node segment.
|
||||
// Named capture groups can be used to separate the display text and value.
|
||||
Regex *string `json:"regex,omitempty"`
|
||||
// Optional, indicates whether a custom type variable uses CSV or JSON to define its values
|
||||
ValuesFormat *VariableModelValuesFormat `json:"valuesFormat,omitempty"`
|
||||
// Determine whether regex applies to variable value or display text
|
||||
RegexApplyTo *VariableRegexApplyTo `json:"regexApplyTo,omitempty"`
|
||||
// Additional static options for query variable
|
||||
@@ -852,6 +854,7 @@ func NewVariableModel() *VariableModel {
|
||||
Multi: (func(input bool) *bool { return &input })(false),
|
||||
AllowCustomValue: (func(input bool) *bool { return &input })(true),
|
||||
IncludeAll: (func(input bool) *bool { return &input })(false),
|
||||
ValuesFormat: (func(input VariableModelValuesFormat) *VariableModelValuesFormat { return &input })(VariableModelValuesFormatCsv),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1191,6 +1194,13 @@ const (
|
||||
DataTransformerConfigTopicAlertStates DataTransformerConfigTopic = "alertStates"
|
||||
)
|
||||
|
||||
type VariableModelValuesFormat string
|
||||
|
||||
const (
|
||||
VariableModelValuesFormatCsv VariableModelValuesFormat = "csv"
|
||||
VariableModelValuesFormatJson VariableModelValuesFormat = "json"
|
||||
)
|
||||
|
||||
type VariableModelStaticOptionsOrder string
|
||||
|
||||
const (
|
||||
|
||||
@@ -5,7 +5,6 @@ import (
|
||||
|
||||
"github.com/grafana/grafana/pkg/plugins"
|
||||
"github.com/grafana/grafana/pkg/plugins/backendplugin"
|
||||
"github.com/grafana/grafana/pkg/plugins/backendplugin/coreplugin"
|
||||
"github.com/grafana/grafana/pkg/plugins/backendplugin/grpcplugin"
|
||||
"github.com/grafana/grafana/pkg/plugins/backendplugin/pluginextensionv2"
|
||||
"github.com/grafana/grafana/pkg/plugins/log"
|
||||
@@ -27,10 +26,6 @@ func New(providers ...PluginBackendProvider) *Service {
|
||||
}
|
||||
}
|
||||
|
||||
func ProvideService(coreRegistry *coreplugin.Registry) *Service {
|
||||
return New(coreRegistry.BackendFactoryProvider(), DefaultProvider)
|
||||
}
|
||||
|
||||
func (s *Service) BackendFactory(ctx context.Context, p *plugins.Plugin) backendplugin.PluginFactoryFunc {
|
||||
for _, provider := range s.providerChain {
|
||||
if factory := provider(ctx, p); factory != nil {
|
||||
|
||||
@@ -276,7 +276,7 @@ func (b *APIBuilder) oneFlagHandler(w http.ResponseWriter, r *http.Request) {
|
||||
return
|
||||
}
|
||||
|
||||
if b.providerType == setting.GOFFProviderType || b.providerType == setting.OFREPProviderType {
|
||||
if b.providerType == setting.FeaturesServiceProviderType || b.providerType == setting.OFREPProviderType {
|
||||
b.proxyFlagReq(ctx, flagKey, isAuthedReq, w, r)
|
||||
return
|
||||
}
|
||||
@@ -304,7 +304,7 @@ func (b *APIBuilder) allFlagsHandler(w http.ResponseWriter, r *http.Request) {
|
||||
isAuthedReq := b.isAuthenticatedRequest(r)
|
||||
span.SetAttributes(attribute.Bool("authenticated", isAuthedReq))
|
||||
|
||||
if b.providerType == setting.GOFFProviderType || b.providerType == setting.OFREPProviderType {
|
||||
if b.providerType == setting.FeaturesServiceProviderType || b.providerType == setting.OFREPProviderType {
|
||||
b.proxyAllFlagReq(ctx, isAuthedReq, w, r)
|
||||
return
|
||||
}
|
||||
|
||||
11
pkg/server/wire_gen.go
generated
11
pkg/server/wire_gen.go
generated
@@ -37,8 +37,6 @@ import (
|
||||
"github.com/grafana/grafana/pkg/login/social/socialimpl"
|
||||
"github.com/grafana/grafana/pkg/middleware/csrf"
|
||||
"github.com/grafana/grafana/pkg/middleware/loggermw"
|
||||
"github.com/grafana/grafana/pkg/plugins/backendplugin/coreplugin"
|
||||
provider2 "github.com/grafana/grafana/pkg/plugins/backendplugin/provider"
|
||||
manager4 "github.com/grafana/grafana/pkg/plugins/manager"
|
||||
"github.com/grafana/grafana/pkg/plugins/manager/filestore"
|
||||
"github.com/grafana/grafana/pkg/plugins/manager/process"
|
||||
@@ -178,6 +176,7 @@ import (
|
||||
"github.com/grafana/grafana/pkg/services/pluginsintegration/angulardetectorsprovider"
|
||||
"github.com/grafana/grafana/pkg/services/pluginsintegration/angularinspector"
|
||||
"github.com/grafana/grafana/pkg/services/pluginsintegration/angularpatternsstore"
|
||||
"github.com/grafana/grafana/pkg/services/pluginsintegration/coreplugin"
|
||||
"github.com/grafana/grafana/pkg/services/pluginsintegration/dashboards"
|
||||
"github.com/grafana/grafana/pkg/services/pluginsintegration/installsync"
|
||||
"github.com/grafana/grafana/pkg/services/pluginsintegration/keyretriever"
|
||||
@@ -557,7 +556,7 @@ func Initialize(ctx context.Context, cfg *setting.Cfg, opts Options, apiOpts api
|
||||
zipkinService := zipkin.ProvideService(httpclientProvider)
|
||||
jaegerService := jaeger.ProvideService(httpclientProvider)
|
||||
corepluginRegistry := coreplugin.ProvideCoreRegistry(tracer, azuremonitorService, cloudwatchService, cloudmonitoringService, elasticsearchService, graphiteService, influxdbService, lokiService, opentsdbService, prometheusService, tempoService, testdatasourceService, postgresService, mysqlService, mssqlService, grafanadsService, pyroscopeService, parcaService, zipkinService, jaegerService)
|
||||
providerService := provider2.ProvideService(corepluginRegistry)
|
||||
backendFactoryProvider := coreplugin.ProvideCoreProvider(corepluginRegistry)
|
||||
processService := process.ProvideService()
|
||||
retrieverService := retriever.ProvideService(sqlStore, apikeyService, kvStore, userService, orgService)
|
||||
serviceAccountPermissionsService, err := ossaccesscontrol.ProvideServiceAccountPermissions(cfg, featureToggles, routeRegisterImpl, sqlStore, accessControl, ossLicensingService, retrieverService, acimplService, teamService, userService, actionSetService)
|
||||
@@ -573,7 +572,7 @@ func Initialize(ctx context.Context, cfg *setting.Cfg, opts Options, apiOpts api
|
||||
service13 := service6.ProvideService(sqlStore, secretsService)
|
||||
serviceregistrationService := serviceregistration.ProvideService(cfg, featureToggles, registryRegistry, service13)
|
||||
noop := provisionedplugins.NewNoop()
|
||||
initialize := pipeline.ProvideInitializationStage(pluginManagementCfg, inMemory, providerService, processService, serviceregistrationService, acimplService, actionSetService, envVarsProvider, tracingService, noop)
|
||||
initialize := pipeline.ProvideInitializationStage(pluginManagementCfg, inMemory, backendFactoryProvider, processService, serviceregistrationService, acimplService, actionSetService, envVarsProvider, tracingService, noop)
|
||||
terminate, err := pipeline.ProvideTerminationStage(pluginManagementCfg, inMemory, processService)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -1217,7 +1216,7 @@ func InitializeForTest(ctx context.Context, t sqlutil.ITestDB, testingT interfac
|
||||
zipkinService := zipkin.ProvideService(httpclientProvider)
|
||||
jaegerService := jaeger.ProvideService(httpclientProvider)
|
||||
corepluginRegistry := coreplugin.ProvideCoreRegistry(tracer, azuremonitorService, cloudwatchService, cloudmonitoringService, elasticsearchService, graphiteService, influxdbService, lokiService, opentsdbService, prometheusService, tempoService, testdatasourceService, postgresService, mysqlService, mssqlService, grafanadsService, pyroscopeService, parcaService, zipkinService, jaegerService)
|
||||
providerService := provider2.ProvideService(corepluginRegistry)
|
||||
backendFactoryProvider := coreplugin.ProvideCoreProvider(corepluginRegistry)
|
||||
processService := process.ProvideService()
|
||||
retrieverService := retriever.ProvideService(sqlStore, apikeyService, kvStore, userService, orgService)
|
||||
serviceAccountPermissionsService, err := ossaccesscontrol.ProvideServiceAccountPermissions(cfg, featureToggles, routeRegisterImpl, sqlStore, accessControl, ossLicensingService, retrieverService, acimplService, teamService, userService, actionSetService)
|
||||
@@ -1233,7 +1232,7 @@ func InitializeForTest(ctx context.Context, t sqlutil.ITestDB, testingT interfac
|
||||
service13 := service6.ProvideService(sqlStore, secretsService)
|
||||
serviceregistrationService := serviceregistration.ProvideService(cfg, featureToggles, registryRegistry, service13)
|
||||
noop := provisionedplugins.NewNoop()
|
||||
initialize := pipeline.ProvideInitializationStage(pluginManagementCfg, inMemory, providerService, processService, serviceregistrationService, acimplService, actionSetService, envVarsProvider, tracingService, noop)
|
||||
initialize := pipeline.ProvideInitializationStage(pluginManagementCfg, inMemory, backendFactoryProvider, processService, serviceregistrationService, acimplService, actionSetService, envVarsProvider, tracingService, noop)
|
||||
terminate, err := pipeline.ProvideTerminationStage(pluginManagementCfg, inMemory, processService)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -3,11 +3,13 @@ package dualwrite
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
openfgav1 "github.com/openfga/api/proto/openfga/v1"
|
||||
|
||||
claims "github.com/grafana/authlib/types"
|
||||
|
||||
dashboardV1 "github.com/grafana/grafana/apps/dashboard/pkg/apis/dashboard/v1beta1"
|
||||
authzextv1 "github.com/grafana/grafana/pkg/services/authz/proto/v1"
|
||||
"github.com/grafana/grafana/pkg/services/authz/zanzana"
|
||||
)
|
||||
@@ -19,14 +21,30 @@ type legacyTupleCollector func(ctx context.Context, orgID int64) (map[string]map
|
||||
type zanzanaTupleCollector func(ctx context.Context, client zanzana.Client, object string, namespace string) (map[string]*openfgav1.TupleKey, error)
|
||||
|
||||
type resourceReconciler struct {
|
||||
name string
|
||||
legacy legacyTupleCollector
|
||||
zanzana zanzanaTupleCollector
|
||||
client zanzana.Client
|
||||
name string
|
||||
legacy legacyTupleCollector
|
||||
zanzana zanzanaTupleCollector
|
||||
client zanzana.Client
|
||||
orphanObjectPrefix string
|
||||
orphanRelations []string
|
||||
}
|
||||
|
||||
func newResourceReconciler(name string, legacy legacyTupleCollector, zanzana zanzanaTupleCollector, client zanzana.Client) resourceReconciler {
|
||||
return resourceReconciler{name, legacy, zanzana, client}
|
||||
func newResourceReconciler(name string, legacy legacyTupleCollector, zanzanaCollector zanzanaTupleCollector, client zanzana.Client) resourceReconciler {
|
||||
r := resourceReconciler{name: name, legacy: legacy, zanzana: zanzanaCollector, client: client}
|
||||
|
||||
// we only need to worry about orphaned tuples for reconcilers that use the managed permissions collector (i.e. dashboards & folders)
|
||||
switch name {
|
||||
case "managed folder permissions":
|
||||
// prefix for folders is `folder:`
|
||||
r.orphanObjectPrefix = zanzana.NewObjectEntry(zanzana.TypeFolder, "", "", "", "")
|
||||
r.orphanRelations = append([]string{}, zanzana.RelationsFolder...)
|
||||
case "managed dashboard permissions":
|
||||
// prefix for dashboards will be `resource:dashboard.grafana.app/dashboards/`
|
||||
r.orphanObjectPrefix = fmt.Sprintf("%s/", zanzana.NewObjectEntry(zanzana.TypeResource, dashboardV1.APIGroup, dashboardV1.DASHBOARD_RESOURCE, "", ""))
|
||||
r.orphanRelations = append([]string{}, zanzana.RelationsResouce...)
|
||||
}
|
||||
|
||||
return r
|
||||
}
|
||||
|
||||
func (r resourceReconciler) reconcile(ctx context.Context, namespace string) error {
|
||||
@@ -35,6 +53,15 @@ func (r resourceReconciler) reconcile(ctx context.Context, namespace string) err
|
||||
return err
|
||||
}
|
||||
|
||||
// 0. Fetch all tuples currently stored in Zanzana. This will be used later on
|
||||
// to cleanup orphaned tuples.
|
||||
// This order needs to be kept (fetching from Zanzana first) to avoid accidentally
|
||||
// cleaning up new tuples that were added after the legacy tuples were fetched.
|
||||
allTuplesInZanzana, err := r.readAllTuples(ctx, namespace)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to read all tuples from zanzana for %s: %w", r.name, err)
|
||||
}
|
||||
|
||||
// 1. Fetch grafana resources stored in grafana db.
|
||||
res, err := r.legacy(ctx, info.OrgID)
|
||||
if err != nil {
|
||||
@@ -87,6 +114,14 @@ func (r resourceReconciler) reconcile(ctx context.Context, namespace string) err
|
||||
}
|
||||
}
|
||||
|
||||
// when the last managed permission for a resource is removed, the legacy results will no
|
||||
// longer contain any tuples for that resource. this process cleans it up when applicable.
|
||||
orphans, err := r.collectOrphanDeletes(ctx, namespace, allTuplesInZanzana, res)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to collect orphan deletes (%s): %w", r.name, err)
|
||||
}
|
||||
deletes = append(deletes, orphans...)
|
||||
|
||||
if len(writes) == 0 && len(deletes) == 0 {
|
||||
return nil
|
||||
}
|
||||
@@ -119,3 +154,79 @@ func (r resourceReconciler) reconcile(ctx context.Context, namespace string) err
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// collectOrphanDeletes collects tuples that are no longer present in the legacy results
|
||||
// but still are present in zanzana. when that is the case, we need to delete the tuple from
|
||||
// zanzana. this will happen when the last managed permission for a resource is removed.
|
||||
// this is only used for dashboards and folders, as those are the only resources that use the managed permissions collector.
|
||||
func (r resourceReconciler) collectOrphanDeletes(
|
||||
ctx context.Context,
|
||||
namespace string,
|
||||
allTuplesInZanzana []*authzextv1.Tuple,
|
||||
legacyReturnedTuples map[string]map[string]*openfgav1.TupleKey,
|
||||
) ([]*openfgav1.TupleKeyWithoutCondition, error) {
|
||||
if r.orphanObjectPrefix == "" || len(r.orphanRelations) == 0 {
|
||||
return []*openfgav1.TupleKeyWithoutCondition{}, nil
|
||||
}
|
||||
|
||||
seen := map[string]struct{}{}
|
||||
out := []*openfgav1.TupleKeyWithoutCondition{}
|
||||
|
||||
// what relation types we are interested in cleaning up
|
||||
relationsToCleanup := map[string]struct{}{}
|
||||
for _, rel := range r.orphanRelations {
|
||||
relationsToCleanup[rel] = struct{}{}
|
||||
}
|
||||
|
||||
for _, tuple := range allTuplesInZanzana {
|
||||
if tuple == nil || tuple.Key == nil {
|
||||
continue
|
||||
}
|
||||
// only cleanup the particular relation types we are interested in
|
||||
if _, ok := relationsToCleanup[tuple.Key.Relation]; !ok {
|
||||
continue
|
||||
}
|
||||
// only cleanup the particular object types we are interested in (either dashboards or folders)
|
||||
if !strings.HasPrefix(tuple.Key.Object, r.orphanObjectPrefix) {
|
||||
continue
|
||||
}
|
||||
// if legacy returned this object, it's not orphaned
|
||||
if _, ok := legacyReturnedTuples[tuple.Key.Object]; ok {
|
||||
continue
|
||||
}
|
||||
// keep track of the tuples we have already seen and marked for deletion
|
||||
key := fmt.Sprintf("%s|%s|%s", tuple.Key.User, tuple.Key.Relation, tuple.Key.Object)
|
||||
if _, ok := seen[key]; ok {
|
||||
continue
|
||||
}
|
||||
seen[key] = struct{}{}
|
||||
out = append(out, &openfgav1.TupleKeyWithoutCondition{
|
||||
User: tuple.Key.User,
|
||||
Relation: tuple.Key.Relation,
|
||||
Object: tuple.Key.Object,
|
||||
})
|
||||
}
|
||||
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (r resourceReconciler) readAllTuples(ctx context.Context, namespace string) ([]*authzextv1.Tuple, error) {
|
||||
var (
|
||||
out []*authzextv1.Tuple
|
||||
continueToken string
|
||||
)
|
||||
for {
|
||||
res, err := r.client.Read(ctx, &authzextv1.ReadRequest{
|
||||
Namespace: namespace,
|
||||
ContinuationToken: continueToken,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
out = append(out, res.Tuples...)
|
||||
continueToken = res.ContinuationToken
|
||||
if continueToken == "" {
|
||||
return out, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,110 @@
|
||||
package dualwrite
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
authlib "github.com/grafana/authlib/types"
|
||||
openfgav1 "github.com/openfga/api/proto/openfga/v1"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
authzextv1 "github.com/grafana/grafana/pkg/services/authz/proto/v1"
|
||||
"github.com/grafana/grafana/pkg/services/authz/zanzana"
|
||||
)
|
||||
|
||||
type fakeZanzanaClient struct {
|
||||
readTuples []*authzextv1.Tuple
|
||||
writeReqs []*authzextv1.WriteRequest
|
||||
}
|
||||
|
||||
func (f *fakeZanzanaClient) Read(ctx context.Context, req *authzextv1.ReadRequest) (*authzextv1.ReadResponse, error) {
|
||||
return &authzextv1.ReadResponse{
|
||||
Tuples: f.readTuples,
|
||||
ContinuationToken: "",
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (f *fakeZanzanaClient) Write(ctx context.Context, req *authzextv1.WriteRequest) error {
|
||||
f.writeReqs = append(f.writeReqs, req)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (f *fakeZanzanaClient) BatchCheck(ctx context.Context, req *authzextv1.BatchCheckRequest) (*authzextv1.BatchCheckResponse, error) {
|
||||
return &authzextv1.BatchCheckResponse{}, nil
|
||||
}
|
||||
|
||||
func (f *fakeZanzanaClient) Mutate(ctx context.Context, req *authzextv1.MutateRequest) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (f *fakeZanzanaClient) Query(ctx context.Context, req *authzextv1.QueryRequest) (*authzextv1.QueryResponse, error) {
|
||||
return &authzextv1.QueryResponse{}, nil
|
||||
}
|
||||
|
||||
func (f *fakeZanzanaClient) Check(ctx context.Context, info authlib.AuthInfo, req authlib.CheckRequest, folder string) (authlib.CheckResponse, error) {
|
||||
return authlib.CheckResponse{Allowed: true}, nil
|
||||
}
|
||||
|
||||
func (f *fakeZanzanaClient) Compile(ctx context.Context, info authlib.AuthInfo, req authlib.ListRequest) (authlib.ItemChecker, authlib.Zookie, error) {
|
||||
return func(name, folder string) bool { return true }, authlib.NoopZookie{}, nil
|
||||
}
|
||||
|
||||
func TestResourceReconciler_OrphanedManagedDashboardTuplesAreDeleted(t *testing.T) {
|
||||
legacy := func(ctx context.Context, orgID int64) (map[string]map[string]*openfgav1.TupleKey, error) {
|
||||
return map[string]map[string]*openfgav1.TupleKey{}, nil
|
||||
}
|
||||
zCollector := func(ctx context.Context, client zanzana.Client, object string, namespace string) (map[string]*openfgav1.TupleKey, error) {
|
||||
return map[string]*openfgav1.TupleKey{}, nil
|
||||
}
|
||||
|
||||
fake := &fakeZanzanaClient{}
|
||||
r := newResourceReconciler("managed dashboard permissions", legacy, zCollector, fake)
|
||||
|
||||
require.NotEmpty(t, r.orphanObjectPrefix)
|
||||
require.NotEmpty(t, r.orphanRelations)
|
||||
|
||||
relAllowed := r.orphanRelations[0]
|
||||
objAllowed := r.orphanObjectPrefix + "dash-uid-1"
|
||||
|
||||
fake.readTuples = []*authzextv1.Tuple{
|
||||
// should be removed
|
||||
{
|
||||
Key: &authzextv1.TupleKey{
|
||||
User: "user:1",
|
||||
Relation: relAllowed,
|
||||
Object: objAllowed,
|
||||
},
|
||||
},
|
||||
|
||||
// same relation but different object type/prefix - should stay
|
||||
{
|
||||
Key: &authzextv1.TupleKey{
|
||||
User: "user:1",
|
||||
Relation: relAllowed,
|
||||
Object: "folder:some-folder",
|
||||
},
|
||||
},
|
||||
// same prefix but different relation - should stay
|
||||
{
|
||||
Key: &authzextv1.TupleKey{
|
||||
User: "user:1",
|
||||
Relation: zanzana.RelationParent,
|
||||
Object: objAllowed,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
err := r.reconcile(context.Background(), authlib.OrgNamespaceFormatter(1))
|
||||
require.NoError(t, err)
|
||||
|
||||
require.Len(t, fake.writeReqs, 1)
|
||||
wr := fake.writeReqs[0]
|
||||
require.NotNil(t, wr.Deletes)
|
||||
require.Nil(t, wr.Writes)
|
||||
|
||||
require.Len(t, wr.Deletes.TupleKeys, 1)
|
||||
del := wr.Deletes.TupleKeys[0]
|
||||
require.Equal(t, "user:1", del.User)
|
||||
require.Equal(t, relAllowed, del.Relation)
|
||||
require.Equal(t, objAllowed, del.Object)
|
||||
}
|
||||
@@ -32,6 +32,8 @@ func NewOpenFGAServer(cfg setting.ZanzanaServerSettings, store storage.OpenFGADa
|
||||
opts := []server.OpenFGAServiceV1Option{
|
||||
server.WithDatastore(store),
|
||||
server.WithLogger(zlogger.New(logger)),
|
||||
|
||||
// Cache settings
|
||||
server.WithCheckCacheLimit(cfg.CacheSettings.CheckCacheLimit),
|
||||
server.WithCacheControllerEnabled(cfg.CacheSettings.CacheControllerEnabled),
|
||||
server.WithCacheControllerTTL(cfg.CacheSettings.CacheControllerTTL),
|
||||
@@ -40,16 +42,25 @@ func NewOpenFGAServer(cfg setting.ZanzanaServerSettings, store storage.OpenFGADa
|
||||
server.WithCheckIteratorCacheEnabled(cfg.CacheSettings.CheckIteratorCacheEnabled),
|
||||
server.WithCheckIteratorCacheMaxResults(cfg.CacheSettings.CheckIteratorCacheMaxResults),
|
||||
server.WithCheckIteratorCacheTTL(cfg.CacheSettings.CheckIteratorCacheTTL),
|
||||
|
||||
// ListObjects settings
|
||||
server.WithListObjectsMaxResults(cfg.ListObjectsMaxResults),
|
||||
server.WithListObjectsIteratorCacheEnabled(cfg.CacheSettings.ListObjectsIteratorCacheEnabled),
|
||||
server.WithListObjectsIteratorCacheMaxResults(cfg.CacheSettings.ListObjectsIteratorCacheMaxResults),
|
||||
server.WithListObjectsIteratorCacheTTL(cfg.CacheSettings.ListObjectsIteratorCacheTTL),
|
||||
server.WithListObjectsDeadline(cfg.ListObjectsDeadline),
|
||||
|
||||
// Shared iterator settings
|
||||
server.WithSharedIteratorEnabled(cfg.CacheSettings.SharedIteratorEnabled),
|
||||
server.WithSharedIteratorLimit(cfg.CacheSettings.SharedIteratorLimit),
|
||||
server.WithSharedIteratorTTL(cfg.CacheSettings.SharedIteratorTTL),
|
||||
server.WithListObjectsDeadline(cfg.ListObjectsDeadline),
|
||||
|
||||
server.WithContextPropagationToDatastore(true),
|
||||
}
|
||||
|
||||
openfgaOpts := withOpenFGAOptions(cfg)
|
||||
opts = append(opts, openfgaOpts...)
|
||||
|
||||
srv, err := server.NewServerWithOpts(opts...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -58,6 +69,129 @@ func NewOpenFGAServer(cfg setting.ZanzanaServerSettings, store storage.OpenFGADa
|
||||
return srv, nil
|
||||
}
|
||||
|
||||
func withOpenFGAOptions(cfg setting.ZanzanaServerSettings) []server.OpenFGAServiceV1Option {
|
||||
opts := make([]server.OpenFGAServiceV1Option, 0)
|
||||
|
||||
listOpts := withListOptions(cfg)
|
||||
opts = append(opts, listOpts...)
|
||||
|
||||
// Check settings
|
||||
if cfg.OpenFgaServerSettings.MaxConcurrentReadsForCheck != 0 {
|
||||
opts = append(opts, server.WithMaxConcurrentReadsForCheck(cfg.OpenFgaServerSettings.MaxConcurrentReadsForCheck))
|
||||
}
|
||||
if cfg.OpenFgaServerSettings.CheckDatabaseThrottleThreshold != 0 || cfg.OpenFgaServerSettings.CheckDatabaseThrottleDuration != 0 {
|
||||
opts = append(opts, server.WithCheckDatabaseThrottle(cfg.OpenFgaServerSettings.CheckDatabaseThrottleThreshold, cfg.OpenFgaServerSettings.CheckDatabaseThrottleDuration))
|
||||
}
|
||||
|
||||
// Batch check settings
|
||||
if cfg.OpenFgaServerSettings.MaxConcurrentChecksPerBatchCheck != 0 {
|
||||
opts = append(opts, server.WithMaxConcurrentChecksPerBatchCheck(cfg.OpenFgaServerSettings.MaxConcurrentChecksPerBatchCheck))
|
||||
}
|
||||
if cfg.OpenFgaServerSettings.MaxChecksPerBatchCheck != 0 {
|
||||
opts = append(opts, server.WithMaxChecksPerBatchCheck(cfg.OpenFgaServerSettings.MaxChecksPerBatchCheck))
|
||||
}
|
||||
|
||||
// Resolve node settings
|
||||
if cfg.OpenFgaServerSettings.ResolveNodeLimit != 0 {
|
||||
opts = append(opts, server.WithResolveNodeLimit(cfg.OpenFgaServerSettings.ResolveNodeLimit))
|
||||
}
|
||||
if cfg.OpenFgaServerSettings.ResolveNodeBreadthLimit != 0 {
|
||||
opts = append(opts, server.WithResolveNodeBreadthLimit(cfg.OpenFgaServerSettings.ResolveNodeBreadthLimit))
|
||||
}
|
||||
|
||||
// Dispatch throttling settings
|
||||
if cfg.OpenFgaServerSettings.DispatchThrottlingCheckResolverEnabled {
|
||||
opts = append(opts, server.WithDispatchThrottlingCheckResolverEnabled(cfg.OpenFgaServerSettings.DispatchThrottlingCheckResolverEnabled))
|
||||
}
|
||||
if cfg.OpenFgaServerSettings.DispatchThrottlingCheckResolverFrequency != 0 {
|
||||
opts = append(opts, server.WithDispatchThrottlingCheckResolverFrequency(cfg.OpenFgaServerSettings.DispatchThrottlingCheckResolverFrequency))
|
||||
}
|
||||
if cfg.OpenFgaServerSettings.DispatchThrottlingCheckResolverThreshold != 0 {
|
||||
opts = append(opts, server.WithDispatchThrottlingCheckResolverThreshold(cfg.OpenFgaServerSettings.DispatchThrottlingCheckResolverThreshold))
|
||||
}
|
||||
if cfg.OpenFgaServerSettings.DispatchThrottlingCheckResolverMaxThreshold != 0 {
|
||||
opts = append(opts, server.WithDispatchThrottlingCheckResolverMaxThreshold(cfg.OpenFgaServerSettings.DispatchThrottlingCheckResolverMaxThreshold))
|
||||
}
|
||||
|
||||
// Shadow check/query settings
|
||||
if cfg.OpenFgaServerSettings.ShadowCheckResolverTimeout != 0 {
|
||||
opts = append(opts, server.WithShadowCheckResolverTimeout(cfg.OpenFgaServerSettings.ShadowCheckResolverTimeout))
|
||||
}
|
||||
if cfg.OpenFgaServerSettings.ShadowListObjectsQueryTimeout != 0 {
|
||||
opts = append(opts, server.WithShadowListObjectsQueryTimeout(cfg.OpenFgaServerSettings.ShadowListObjectsQueryTimeout))
|
||||
}
|
||||
if cfg.OpenFgaServerSettings.ShadowListObjectsQueryMaxDeltaItems != 0 {
|
||||
opts = append(opts, server.WithShadowListObjectsQueryMaxDeltaItems(cfg.OpenFgaServerSettings.ShadowListObjectsQueryMaxDeltaItems))
|
||||
}
|
||||
|
||||
if cfg.OpenFgaServerSettings.RequestTimeout != 0 {
|
||||
opts = append(opts, server.WithRequestTimeout(cfg.OpenFgaServerSettings.RequestTimeout))
|
||||
}
|
||||
if cfg.OpenFgaServerSettings.MaxAuthorizationModelSizeInBytes != 0 {
|
||||
opts = append(opts, server.WithMaxAuthorizationModelSizeInBytes(cfg.OpenFgaServerSettings.MaxAuthorizationModelSizeInBytes))
|
||||
}
|
||||
if cfg.OpenFgaServerSettings.AuthorizationModelCacheSize != 0 {
|
||||
opts = append(opts, server.WithAuthorizationModelCacheSize(cfg.OpenFgaServerSettings.AuthorizationModelCacheSize))
|
||||
}
|
||||
if cfg.OpenFgaServerSettings.ChangelogHorizonOffset != 0 {
|
||||
opts = append(opts, server.WithChangelogHorizonOffset(cfg.OpenFgaServerSettings.ChangelogHorizonOffset))
|
||||
}
|
||||
|
||||
return opts
|
||||
}
|
||||
|
||||
func withListOptions(cfg setting.ZanzanaServerSettings) []server.OpenFGAServiceV1Option {
|
||||
opts := make([]server.OpenFGAServiceV1Option, 0)
|
||||
|
||||
// ListObjects settings
|
||||
if cfg.OpenFgaServerSettings.MaxConcurrentReadsForListObjects != 0 {
|
||||
opts = append(opts, server.WithMaxConcurrentReadsForListObjects(cfg.OpenFgaServerSettings.MaxConcurrentReadsForListObjects))
|
||||
}
|
||||
if cfg.OpenFgaServerSettings.ListObjectsDispatchThrottlingEnabled {
|
||||
opts = append(opts, server.WithListObjectsDispatchThrottlingEnabled(cfg.OpenFgaServerSettings.ListObjectsDispatchThrottlingEnabled))
|
||||
}
|
||||
if cfg.OpenFgaServerSettings.ListObjectsDispatchThrottlingFrequency != 0 {
|
||||
opts = append(opts, server.WithListObjectsDispatchThrottlingFrequency(cfg.OpenFgaServerSettings.ListObjectsDispatchThrottlingFrequency))
|
||||
}
|
||||
if cfg.OpenFgaServerSettings.ListObjectsDispatchThrottlingThreshold != 0 {
|
||||
opts = append(opts, server.WithListObjectsDispatchThrottlingThreshold(cfg.OpenFgaServerSettings.ListObjectsDispatchThrottlingThreshold))
|
||||
}
|
||||
if cfg.OpenFgaServerSettings.ListObjectsDispatchThrottlingMaxThreshold != 0 {
|
||||
opts = append(opts, server.WithListObjectsDispatchThrottlingMaxThreshold(cfg.OpenFgaServerSettings.ListObjectsDispatchThrottlingMaxThreshold))
|
||||
}
|
||||
if cfg.OpenFgaServerSettings.ListObjectsDatabaseThrottleThreshold != 0 || cfg.OpenFgaServerSettings.ListObjectsDatabaseThrottleDuration != 0 {
|
||||
opts = append(opts, server.WithListObjectsDatabaseThrottle(cfg.OpenFgaServerSettings.ListObjectsDatabaseThrottleThreshold, cfg.OpenFgaServerSettings.ListObjectsDatabaseThrottleDuration))
|
||||
}
|
||||
|
||||
// ListUsers settings
|
||||
if cfg.OpenFgaServerSettings.ListUsersDeadline != 0 {
|
||||
opts = append(opts, server.WithListUsersDeadline(cfg.OpenFgaServerSettings.ListUsersDeadline))
|
||||
}
|
||||
if cfg.OpenFgaServerSettings.ListUsersMaxResults != 0 {
|
||||
opts = append(opts, server.WithListUsersMaxResults(cfg.OpenFgaServerSettings.ListUsersMaxResults))
|
||||
}
|
||||
if cfg.OpenFgaServerSettings.MaxConcurrentReadsForListUsers != 0 {
|
||||
opts = append(opts, server.WithMaxConcurrentReadsForListUsers(cfg.OpenFgaServerSettings.MaxConcurrentReadsForListUsers))
|
||||
}
|
||||
if cfg.OpenFgaServerSettings.ListUsersDispatchThrottlingEnabled {
|
||||
opts = append(opts, server.WithListUsersDispatchThrottlingEnabled(cfg.OpenFgaServerSettings.ListUsersDispatchThrottlingEnabled))
|
||||
}
|
||||
if cfg.OpenFgaServerSettings.ListUsersDispatchThrottlingFrequency != 0 {
|
||||
opts = append(opts, server.WithListUsersDispatchThrottlingFrequency(cfg.OpenFgaServerSettings.ListUsersDispatchThrottlingFrequency))
|
||||
}
|
||||
if cfg.OpenFgaServerSettings.ListUsersDispatchThrottlingThreshold != 0 {
|
||||
opts = append(opts, server.WithListUsersDispatchThrottlingThreshold(cfg.OpenFgaServerSettings.ListUsersDispatchThrottlingThreshold))
|
||||
}
|
||||
if cfg.OpenFgaServerSettings.ListUsersDispatchThrottlingMaxThreshold != 0 {
|
||||
opts = append(opts, server.WithListUsersDispatchThrottlingMaxThreshold(cfg.OpenFgaServerSettings.ListUsersDispatchThrottlingMaxThreshold))
|
||||
}
|
||||
if cfg.OpenFgaServerSettings.ListUsersDatabaseThrottleThreshold != 0 || cfg.OpenFgaServerSettings.ListUsersDatabaseThrottleDuration != 0 {
|
||||
opts = append(opts, server.WithListUsersDatabaseThrottle(cfg.OpenFgaServerSettings.ListUsersDatabaseThrottleThreshold, cfg.OpenFgaServerSettings.ListUsersDatabaseThrottleDuration))
|
||||
}
|
||||
|
||||
return opts
|
||||
}
|
||||
|
||||
func NewOpenFGAHttpServer(cfg setting.ZanzanaServerSettings, srv grpcserver.Provider) (*http.Server, error) {
|
||||
dialOpts := []grpc.DialOption{
|
||||
grpc.WithTransportCredentials(insecure.NewCredentials()),
|
||||
|
||||
@@ -7,7 +7,7 @@ import (
|
||||
"github.com/open-feature/go-sdk/openfeature"
|
||||
)
|
||||
|
||||
func newGOFFProvider(url string, client *http.Client) (openfeature.FeatureProvider, error) {
|
||||
func newFeaturesServiceProvider(url string, client *http.Client) (openfeature.FeatureProvider, error) {
|
||||
options := gofeatureflag.ProviderOptions{
|
||||
Endpoint: url,
|
||||
// consider using github.com/grafana/grafana/pkg/infra/httpclient/provider.go
|
||||
@@ -19,11 +19,11 @@ const (
|
||||
|
||||
// OpenFeatureConfig holds configuration for initializing OpenFeature
|
||||
type OpenFeatureConfig struct {
|
||||
// ProviderType is either "static", "goff", or "ofrep"
|
||||
// ProviderType is either "static", "features-service", or "ofrep"
|
||||
ProviderType string
|
||||
// URL is the GOFF or OFREP service URL (required for GOFF + OFREP providers)
|
||||
// URL is the remote provider's URL (required for features-service + OFREP providers)
|
||||
URL *url.URL
|
||||
// HTTPClient is a pre-configured HTTP client (optional, used for GOFF + OFREP providers)
|
||||
// HTTPClient is a pre-configured HTTP client (optional, used by features-service + OFREP providers)
|
||||
HTTPClient *http.Client
|
||||
// StaticFlags are the feature flags to use with static provider
|
||||
StaticFlags map[string]bool
|
||||
@@ -35,9 +35,9 @@ type OpenFeatureConfig struct {
|
||||
|
||||
// InitOpenFeature initializes OpenFeature with the provided configuration
|
||||
func InitOpenFeature(config OpenFeatureConfig) error {
|
||||
// For GOFF + OFREP providers, ensure we have a URL
|
||||
if (config.ProviderType == setting.GOFFProviderType || config.ProviderType == setting.OFREPProviderType) && (config.URL == nil || config.URL.String() == "") {
|
||||
return fmt.Errorf("URL is required for GOFF + OFREP providers")
|
||||
// For remote providers, ensure we have a URL
|
||||
if (config.ProviderType == setting.FeaturesServiceProviderType || config.ProviderType == setting.OFREPProviderType) && (config.URL == nil || config.URL.String() == "") {
|
||||
return fmt.Errorf("URL is required for remote providers")
|
||||
}
|
||||
|
||||
p, err := createProvider(config.ProviderType, config.URL, config.StaticFlags, config.HTTPClient)
|
||||
@@ -66,10 +66,10 @@ func InitOpenFeatureWithCfg(cfg *setting.Cfg) error {
|
||||
}
|
||||
|
||||
var httpcli *http.Client
|
||||
if cfg.OpenFeature.ProviderType == setting.GOFFProviderType || cfg.OpenFeature.ProviderType == setting.OFREPProviderType {
|
||||
if cfg.OpenFeature.ProviderType == setting.FeaturesServiceProviderType || cfg.OpenFeature.ProviderType == setting.OFREPProviderType {
|
||||
var m *clientauthmiddleware.TokenExchangeMiddleware
|
||||
|
||||
if cfg.OpenFeature.ProviderType == setting.GOFFProviderType {
|
||||
if cfg.OpenFeature.ProviderType == setting.FeaturesServiceProviderType {
|
||||
m, err = clientauthmiddleware.NewTokenExchangeMiddleware(cfg)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to create token exchange middleware: %w", err)
|
||||
@@ -103,13 +103,13 @@ func createProvider(
|
||||
staticFlags map[string]bool,
|
||||
httpClient *http.Client,
|
||||
) (openfeature.FeatureProvider, error) {
|
||||
if providerType == setting.GOFFProviderType || providerType == setting.OFREPProviderType {
|
||||
if providerType == setting.FeaturesServiceProviderType || providerType == setting.OFREPProviderType {
|
||||
if u == nil || u.String() == "" {
|
||||
return nil, fmt.Errorf("feature provider url is required for GOFFProviderType + OFREPProviderType")
|
||||
return nil, fmt.Errorf("feature provider url is required for FeaturesServiceProviderType + OFREPProviderType")
|
||||
}
|
||||
|
||||
if providerType == setting.GOFFProviderType {
|
||||
return newGOFFProvider(u.String(), httpClient)
|
||||
if providerType == setting.FeaturesServiceProviderType {
|
||||
return newFeaturesServiceProvider(u.String(), httpClient)
|
||||
}
|
||||
|
||||
if providerType == setting.OFREPProviderType {
|
||||
|
||||
@@ -35,9 +35,9 @@ func TestCreateProvider(t *testing.T) {
|
||||
expectedProvider: setting.StaticProviderType,
|
||||
},
|
||||
{
|
||||
name: "goff provider",
|
||||
name: "features-service provider",
|
||||
cfg: setting.OpenFeatureSettings{
|
||||
ProviderType: setting.GOFFProviderType,
|
||||
ProviderType: setting.FeaturesServiceProviderType,
|
||||
URL: u,
|
||||
TargetingKey: "grafana",
|
||||
},
|
||||
@@ -45,12 +45,12 @@ func TestCreateProvider(t *testing.T) {
|
||||
Namespace: "*",
|
||||
Audiences: []string{"features.grafana.app"},
|
||||
},
|
||||
expectedProvider: setting.GOFFProviderType,
|
||||
expectedProvider: setting.FeaturesServiceProviderType,
|
||||
},
|
||||
{
|
||||
name: "goff provider with failing token exchange",
|
||||
name: "features-service provider with failing token exchange",
|
||||
cfg: setting.OpenFeatureSettings{
|
||||
ProviderType: setting.GOFFProviderType,
|
||||
ProviderType: setting.FeaturesServiceProviderType,
|
||||
URL: u,
|
||||
TargetingKey: "grafana",
|
||||
},
|
||||
@@ -58,7 +58,7 @@ func TestCreateProvider(t *testing.T) {
|
||||
Namespace: "*",
|
||||
Audiences: []string{"features.grafana.app"},
|
||||
},
|
||||
expectedProvider: setting.GOFFProviderType,
|
||||
expectedProvider: setting.FeaturesServiceProviderType,
|
||||
failSigning: true,
|
||||
},
|
||||
{
|
||||
@@ -107,7 +107,7 @@ func TestCreateProvider(t *testing.T) {
|
||||
|
||||
tokenExchangeMiddleware := middleware.TestingTokenExchangeMiddleware(tokenExchangeClient)
|
||||
httpClient, err := createHTTPClient(tokenExchangeMiddleware)
|
||||
require.NoError(t, err, "failed to create goff http client")
|
||||
require.NoError(t, err, "failed to create features-service http client")
|
||||
provider, err := createProvider(tc.cfg.ProviderType, tc.cfg.URL, nil, httpClient)
|
||||
require.NoError(t, err)
|
||||
|
||||
@@ -115,7 +115,7 @@ func TestCreateProvider(t *testing.T) {
|
||||
require.NoError(t, err, "failed to set provider")
|
||||
|
||||
switch tc.expectedProvider {
|
||||
case setting.GOFFProviderType:
|
||||
case setting.FeaturesServiceProviderType:
|
||||
_, ok := provider.(*gofeatureflag.Provider)
|
||||
assert.True(t, ok, "expected provider to be of type goff.Provider")
|
||||
|
||||
@@ -141,10 +141,10 @@ func testGoFFProvider(t *testing.T, failSigning bool) {
|
||||
_, err := openfeature.NewDefaultClient().BooleanValueDetails(ctx, "test", false, openfeature.NewEvaluationContext("test", map[string]interface{}{"test": "test"}))
|
||||
|
||||
// Error related to the token exchange should be returned if signing fails
|
||||
// otherwise, it should return a connection refused error since the goff URL is not set
|
||||
// otherwise, it should return a connection refused error since the features-service URL is not set
|
||||
if failSigning {
|
||||
assert.ErrorContains(t, err, "failed to exchange token: error signing token", "should return an error when signing fails")
|
||||
} else {
|
||||
assert.ErrorContains(t, err, "connect: connection refused", "should return an error when goff url is not set")
|
||||
assert.ErrorContains(t, err, "connect: connection refused", "should return an error when features-service url is not set")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -650,13 +650,6 @@ var (
|
||||
Stage: FeatureStageExperimental,
|
||||
Owner: grafanaDatavizSquad,
|
||||
},
|
||||
{
|
||||
Name: "kubernetesFeatureToggles",
|
||||
Description: "Use the kubernetes API for feature toggle management in the frontend",
|
||||
Stage: FeatureStageExperimental,
|
||||
FrontendOnly: true,
|
||||
Owner: grafanaOperatorExperienceSquad,
|
||||
},
|
||||
{
|
||||
Name: "cloudRBACRoles",
|
||||
Description: "Enabled grafana cloud specific RBAC roles",
|
||||
@@ -2090,6 +2083,13 @@ var (
|
||||
FrontendOnly: false,
|
||||
Owner: grafanaOperatorExperienceSquad,
|
||||
},
|
||||
{
|
||||
Name: "profilesExemplars",
|
||||
Description: "Enables profiles exemplars support in profiles drilldown",
|
||||
Stage: FeatureStageExperimental,
|
||||
Owner: grafanaObservabilityTracesAndProfilingSquad,
|
||||
FrontendOnly: false,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
2
pkg/services/featuremgmt/toggles_gen.csv
generated
2
pkg/services/featuremgmt/toggles_gen.csv
generated
@@ -90,7 +90,6 @@ pdfTables,preview,@grafana/grafana-operator-experience-squad,false,false,false
|
||||
canvasPanelPanZoom,preview,@grafana/dataviz-squad,false,false,true
|
||||
timeComparison,experimental,@grafana/dataviz-squad,false,false,true
|
||||
tableSharedCrosshair,experimental,@grafana/dataviz-squad,false,false,true
|
||||
kubernetesFeatureToggles,experimental,@grafana/grafana-operator-experience-squad,false,false,true
|
||||
cloudRBACRoles,preview,@grafana/identity-access-team,false,true,false
|
||||
alertingQueryOptimization,GA,@grafana/alerting-squad,false,false,false
|
||||
jitterAlertRulesWithinGroups,preview,@grafana/alerting-squad,false,true,false
|
||||
@@ -283,3 +282,4 @@ useMTPlugins,experimental,@grafana/plugins-platform-backend,false,false,true
|
||||
multiPropsVariables,experimental,@grafana/dashboards-squad,false,false,true
|
||||
smoothingTransformation,experimental,@grafana/datapro,false,false,true
|
||||
secretsManagementAppPlatformAwsKeeper,experimental,@grafana/grafana-operator-experience-squad,false,false,false
|
||||
profilesExemplars,experimental,@grafana/observability-traces-and-profiling,false,false,false
|
||||
|
||||
|
4
pkg/services/featuremgmt/toggles_gen.go
generated
4
pkg/services/featuremgmt/toggles_gen.go
generated
@@ -785,4 +785,8 @@ const (
|
||||
// FlagSecretsManagementAppPlatformAwsKeeper
|
||||
// Enables the creation of keepers that manage secrets stored on AWS secrets manager
|
||||
FlagSecretsManagementAppPlatformAwsKeeper = "secretsManagementAppPlatformAwsKeeper"
|
||||
|
||||
// FlagProfilesExemplars
|
||||
// Enables profiles exemplars support in profiles drilldown
|
||||
FlagProfilesExemplars = "profilesExemplars"
|
||||
)
|
||||
|
||||
15
pkg/services/featuremgmt/toggles_gen.json
generated
15
pkg/services/featuremgmt/toggles_gen.json
generated
@@ -2044,7 +2044,8 @@
|
||||
"metadata": {
|
||||
"name": "kubernetesFeatureToggles",
|
||||
"resourceVersion": "1764664939750",
|
||||
"creationTimestamp": "2024-01-18T05:32:44Z"
|
||||
"creationTimestamp": "2024-01-18T05:32:44Z",
|
||||
"deletionTimestamp": "2026-01-07T12:02:51Z"
|
||||
},
|
||||
"spec": {
|
||||
"description": "Use the kubernetes API for feature toggle management in the frontend",
|
||||
@@ -2866,6 +2867,18 @@
|
||||
"expression": "true"
|
||||
}
|
||||
},
|
||||
{
|
||||
"metadata": {
|
||||
"name": "profilesExemplars",
|
||||
"resourceVersion": "1767777507980",
|
||||
"creationTimestamp": "2026-01-07T09:18:27Z"
|
||||
},
|
||||
"spec": {
|
||||
"description": "Enables profiles exemplars support in profiles drilldown",
|
||||
"stage": "experimental",
|
||||
"codeowner": "@grafana/observability-traces-and-profiling"
|
||||
}
|
||||
},
|
||||
{
|
||||
"metadata": {
|
||||
"name": "prometheusAzureOverrideAudience",
|
||||
|
||||
@@ -14,6 +14,8 @@ import (
|
||||
|
||||
"github.com/grafana/grafana/pkg/plugins"
|
||||
"github.com/grafana/grafana/pkg/plugins/backendplugin"
|
||||
"github.com/grafana/grafana/pkg/plugins/backendplugin/coreplugin"
|
||||
"github.com/grafana/grafana/pkg/plugins/backendplugin/provider"
|
||||
"github.com/grafana/grafana/pkg/plugins/log"
|
||||
"github.com/grafana/grafana/pkg/tsdb/azuremonitor"
|
||||
cloudmonitoring "github.com/grafana/grafana/pkg/tsdb/cloud-monitoring"
|
||||
@@ -92,6 +94,10 @@ func NewRegistry(store map[string]backendplugin.PluginFactoryFunc) *Registry {
|
||||
}
|
||||
}
|
||||
|
||||
func ProvideCoreProvider(coreRegistry *Registry) plugins.BackendFactoryProvider {
|
||||
return provider.New(coreRegistry.BackendFactoryProvider(), provider.DefaultProvider)
|
||||
}
|
||||
|
||||
func ProvideCoreRegistry(tracer trace.Tracer, am *azuremonitor.Service, cw *cloudwatch.Service, cm *cloudmonitoring.Service,
|
||||
es *elasticsearch.Service, grap *graphite.Service, idb *influxdb.Service, lk *loki.Service, otsdb *opentsdb.Service,
|
||||
pr *prometheus.Service, t *tempo.Service, td *testdatasource.Service, pg *postgres.Service, my *mysql.Service,
|
||||
@@ -156,7 +162,7 @@ func asBackendPlugin(svc any) backendplugin.PluginFactoryFunc {
|
||||
|
||||
if opts.QueryDataHandler != nil || opts.CallResourceHandler != nil ||
|
||||
opts.CheckHealthHandler != nil || opts.StreamHandler != nil {
|
||||
return New(opts)
|
||||
return coreplugin.New(opts)
|
||||
}
|
||||
|
||||
return nil
|
||||
@@ -6,7 +6,6 @@ import (
|
||||
"github.com/grafana/grafana/pkg/infra/tracing"
|
||||
"github.com/grafana/grafana/pkg/plugins"
|
||||
"github.com/grafana/grafana/pkg/plugins/auth"
|
||||
"github.com/grafana/grafana/pkg/plugins/backendplugin/coreplugin"
|
||||
"github.com/grafana/grafana/pkg/plugins/config"
|
||||
"github.com/grafana/grafana/pkg/plugins/envvars"
|
||||
"github.com/grafana/grafana/pkg/plugins/manager/loader/angular/angularinspector"
|
||||
@@ -19,6 +18,7 @@ import (
|
||||
"github.com/grafana/grafana/pkg/plugins/manager/registry"
|
||||
"github.com/grafana/grafana/pkg/plugins/manager/signature"
|
||||
"github.com/grafana/grafana/pkg/plugins/pluginassets"
|
||||
"github.com/grafana/grafana/pkg/services/pluginsintegration/coreplugin"
|
||||
"github.com/grafana/grafana/pkg/services/pluginsintegration/pluginaccesscontrol"
|
||||
"github.com/grafana/grafana/pkg/services/pluginsintegration/provisionedplugins"
|
||||
)
|
||||
|
||||
@@ -10,8 +10,6 @@ import (
|
||||
"github.com/grafana/grafana/pkg/infra/tracing"
|
||||
"github.com/grafana/grafana/pkg/plugins"
|
||||
"github.com/grafana/grafana/pkg/plugins/auth"
|
||||
"github.com/grafana/grafana/pkg/plugins/backendplugin/coreplugin"
|
||||
"github.com/grafana/grafana/pkg/plugins/backendplugin/provider"
|
||||
"github.com/grafana/grafana/pkg/plugins/envvars"
|
||||
"github.com/grafana/grafana/pkg/plugins/log"
|
||||
"github.com/grafana/grafana/pkg/plugins/manager/client"
|
||||
@@ -39,6 +37,7 @@ import (
|
||||
"github.com/grafana/grafana/pkg/services/pluginsintegration/angularinspector"
|
||||
"github.com/grafana/grafana/pkg/services/pluginsintegration/angularpatternsstore"
|
||||
"github.com/grafana/grafana/pkg/services/pluginsintegration/clientmiddleware"
|
||||
"github.com/grafana/grafana/pkg/services/pluginsintegration/coreplugin"
|
||||
"github.com/grafana/grafana/pkg/services/pluginsintegration/installsync"
|
||||
"github.com/grafana/grafana/pkg/services/pluginsintegration/keyretriever"
|
||||
"github.com/grafana/grafana/pkg/services/pluginsintegration/keyretriever/dynamic"
|
||||
@@ -146,8 +145,7 @@ var WireSet = wire.NewSet(
|
||||
// WireExtensionSet provides a wire.ProviderSet of plugin providers that can be
|
||||
// extended.
|
||||
var WireExtensionSet = wire.NewSet(
|
||||
provider.ProvideService,
|
||||
wire.Bind(new(plugins.BackendFactoryProvider), new(*provider.Service)),
|
||||
coreplugin.ProvideCoreProvider,
|
||||
signature.ProvideOSSAuthorizer,
|
||||
wire.Bind(new(plugins.PluginLoaderAuthorizer), new(*signature.UnsignedPluginAuthorizer)),
|
||||
ProvideClientWithMiddlewares,
|
||||
|
||||
@@ -19,10 +19,10 @@ import (
|
||||
"github.com/grafana/grafana/pkg/infra/fs"
|
||||
"github.com/grafana/grafana/pkg/infra/tracing"
|
||||
"github.com/grafana/grafana/pkg/plugins"
|
||||
"github.com/grafana/grafana/pkg/plugins/backendplugin/coreplugin"
|
||||
"github.com/grafana/grafana/pkg/services/featuremgmt"
|
||||
"github.com/grafana/grafana/pkg/services/org"
|
||||
"github.com/grafana/grafana/pkg/services/pluginsintegration"
|
||||
"github.com/grafana/grafana/pkg/services/pluginsintegration/coreplugin"
|
||||
"github.com/grafana/grafana/pkg/services/pluginsintegration/pluginstore"
|
||||
"github.com/grafana/grafana/pkg/services/searchV2"
|
||||
"github.com/grafana/grafana/pkg/services/sqlstore"
|
||||
|
||||
@@ -8,8 +8,6 @@ import (
|
||||
"github.com/grafana/grafana/pkg/infra/tracing"
|
||||
"github.com/grafana/grafana/pkg/plugins"
|
||||
"github.com/grafana/grafana/pkg/plugins/backendplugin"
|
||||
"github.com/grafana/grafana/pkg/plugins/backendplugin/coreplugin"
|
||||
"github.com/grafana/grafana/pkg/plugins/backendplugin/provider"
|
||||
pluginsCfg "github.com/grafana/grafana/pkg/plugins/config"
|
||||
"github.com/grafana/grafana/pkg/plugins/manager/client"
|
||||
"github.com/grafana/grafana/pkg/plugins/manager/loader"
|
||||
@@ -27,6 +25,7 @@ import (
|
||||
"github.com/grafana/grafana/pkg/plugins/pluginassets"
|
||||
"github.com/grafana/grafana/pkg/plugins/pluginerrs"
|
||||
"github.com/grafana/grafana/pkg/services/featuremgmt"
|
||||
"github.com/grafana/grafana/pkg/services/pluginsintegration/coreplugin"
|
||||
"github.com/grafana/grafana/pkg/services/pluginsintegration/pipeline"
|
||||
"github.com/grafana/grafana/pkg/services/pluginsintegration/pluginconfig"
|
||||
"github.com/grafana/grafana/pkg/services/pluginsintegration/pluginsources"
|
||||
@@ -52,7 +51,7 @@ func CreateIntegrationTestCtx(t *testing.T, cfg *setting.Cfg, coreRegistry *core
|
||||
disc := pipeline.ProvideDiscoveryStage(pCfg, reg)
|
||||
boot := pipeline.ProvideBootstrapStage(pCfg, signature.ProvideService(pCfg, statickey.New()), pluginassets.NewLocalProvider())
|
||||
valid := pipeline.ProvideValidationStage(pCfg, signature.NewValidator(signature.NewUnsignedAuthorizer(pCfg)), angularInspector)
|
||||
init := pipeline.ProvideInitializationStage(pCfg, reg, provider.ProvideService(coreRegistry), proc, &pluginfakes.FakeAuthService{}, pluginfakes.NewFakeRoleRegistry(), pluginfakes.NewFakeActionSetRegistry(), nil, tracing.InitializeTracerForTest(), provisionedplugins.NewNoop())
|
||||
init := pipeline.ProvideInitializationStage(pCfg, reg, coreplugin.ProvideCoreProvider(coreRegistry), proc, &pluginfakes.FakeAuthService{}, pluginfakes.NewFakeRoleRegistry(), pluginfakes.NewFakeActionSetRegistry(), nil, tracing.InitializeTracerForTest(), provisionedplugins.NewNoop())
|
||||
term, err := pipeline.ProvideTerminationStage(pCfg, reg, proc)
|
||||
require.NoError(t, err)
|
||||
|
||||
@@ -98,7 +97,7 @@ func CreateTestLoader(t *testing.T, cfg *pluginsCfg.PluginManagementCfg, opts Lo
|
||||
if opts.Initializer == nil {
|
||||
reg := registry.ProvideService()
|
||||
coreRegistry := coreplugin.NewRegistry(make(map[string]backendplugin.PluginFactoryFunc))
|
||||
opts.Initializer = pipeline.ProvideInitializationStage(cfg, reg, provider.ProvideService(coreRegistry), process.ProvideService(), &pluginfakes.FakeAuthService{}, pluginfakes.NewFakeRoleRegistry(), pluginfakes.NewFakeActionSetRegistry(), nil, tracing.InitializeTracerForTest(), provisionedplugins.NewNoop())
|
||||
opts.Initializer = pipeline.ProvideInitializationStage(cfg, reg, coreplugin.ProvideCoreProvider(coreRegistry), process.ProvideService(), &pluginfakes.FakeAuthService{}, pluginfakes.NewFakeRoleRegistry(), pluginfakes.NewFakeActionSetRegistry(), nil, tracing.InitializeTracerForTest(), provisionedplugins.NewNoop())
|
||||
}
|
||||
|
||||
if opts.Terminator == nil {
|
||||
|
||||
@@ -6,9 +6,9 @@ import (
|
||||
)
|
||||
|
||||
const (
|
||||
StaticProviderType = "static"
|
||||
GOFFProviderType = "goff"
|
||||
OFREPProviderType = "ofrep"
|
||||
StaticProviderType = "static"
|
||||
FeaturesServiceProviderType = "features-service"
|
||||
OFREPProviderType = "ofrep"
|
||||
)
|
||||
|
||||
type OpenFeatureSettings struct {
|
||||
@@ -34,7 +34,7 @@ func (cfg *Cfg) readOpenFeatureSettings() error {
|
||||
|
||||
cfg.OpenFeature.TargetingKey = config.Key("targetingKey").MustString(defaultTargetingKey)
|
||||
|
||||
if strURL != "" && (cfg.OpenFeature.ProviderType == GOFFProviderType || cfg.OpenFeature.ProviderType == OFREPProviderType) {
|
||||
if strURL != "" && (cfg.OpenFeature.ProviderType == FeaturesServiceProviderType || cfg.OpenFeature.ProviderType == OFREPProviderType) {
|
||||
u, err := url.Parse(strURL)
|
||||
if err != nil {
|
||||
return fmt.Errorf("invalid feature provider url: %w", err)
|
||||
|
||||
@@ -37,6 +37,8 @@ type ZanzanaServerSettings struct {
|
||||
OpenFGAHttpAddr string
|
||||
// Cache settings
|
||||
CacheSettings OpenFgaCacheSettings
|
||||
// OpenFGA server settings
|
||||
OpenFgaServerSettings OpenFgaServerSettings
|
||||
// Max number of results returned by ListObjects() query. Default is 1000.
|
||||
ListObjectsMaxResults uint32
|
||||
// Deadline for the ListObjects() query. Default is 3 seconds.
|
||||
@@ -50,6 +52,92 @@ type ZanzanaServerSettings struct {
|
||||
AllowInsecure bool
|
||||
}
|
||||
|
||||
type OpenFgaServerSettings struct {
|
||||
// ListObjects settings
|
||||
// Max number of concurrent datastore reads for ListObjects queries
|
||||
MaxConcurrentReadsForListObjects uint32
|
||||
// Enable dispatch throttling for ListObjects queries
|
||||
ListObjectsDispatchThrottlingEnabled bool
|
||||
// Frequency for dispatch throttling in ListObjects queries
|
||||
ListObjectsDispatchThrottlingFrequency time.Duration
|
||||
// Threshold for dispatch throttling in ListObjects queries
|
||||
ListObjectsDispatchThrottlingThreshold uint32
|
||||
// Max threshold for dispatch throttling in ListObjects queries
|
||||
ListObjectsDispatchThrottlingMaxThreshold uint32
|
||||
// Database throttle threshold for ListObjects queries
|
||||
ListObjectsDatabaseThrottleThreshold int
|
||||
// Database throttle duration for ListObjects queries
|
||||
ListObjectsDatabaseThrottleDuration time.Duration
|
||||
|
||||
// ListUsers settings
|
||||
// Deadline for ListUsers queries
|
||||
ListUsersDeadline time.Duration
|
||||
// Max number of results returned by ListUsers queries
|
||||
ListUsersMaxResults uint32
|
||||
// Max number of concurrent datastore reads for ListUsers queries
|
||||
MaxConcurrentReadsForListUsers uint32
|
||||
// Enable dispatch throttling for ListUsers queries
|
||||
ListUsersDispatchThrottlingEnabled bool
|
||||
// Frequency for dispatch throttling in ListUsers queries
|
||||
ListUsersDispatchThrottlingFrequency time.Duration
|
||||
// Threshold for dispatch throttling in ListUsers queries
|
||||
ListUsersDispatchThrottlingThreshold uint32
|
||||
// Max threshold for dispatch throttling in ListUsers queries
|
||||
ListUsersDispatchThrottlingMaxThreshold uint32
|
||||
// Database throttle threshold for ListUsers queries
|
||||
ListUsersDatabaseThrottleThreshold int
|
||||
// Database throttle duration for ListUsers queries
|
||||
ListUsersDatabaseThrottleDuration time.Duration
|
||||
|
||||
// Check settings
|
||||
// Max number of concurrent datastore reads for Check queries
|
||||
MaxConcurrentReadsForCheck uint32
|
||||
// Database throttle threshold for Check queries
|
||||
CheckDatabaseThrottleThreshold int
|
||||
// Database throttle duration for Check queries
|
||||
CheckDatabaseThrottleDuration time.Duration
|
||||
|
||||
// Batch check settings
|
||||
// Max number of concurrent checks per batch check request
|
||||
MaxConcurrentChecksPerBatchCheck uint32
|
||||
// Max number of checks per batch check request
|
||||
MaxChecksPerBatchCheck uint32
|
||||
|
||||
// Resolve node settings
|
||||
// Max number of nodes that can be resolved in a single query
|
||||
ResolveNodeLimit uint32
|
||||
// Max breadth of nodes that can be resolved in a single query
|
||||
ResolveNodeBreadthLimit uint32
|
||||
|
||||
// Dispatch throttling settings for Check resolver
|
||||
// Enable dispatch throttling for Check resolver
|
||||
DispatchThrottlingCheckResolverEnabled bool
|
||||
// Frequency for dispatch throttling in Check resolver
|
||||
DispatchThrottlingCheckResolverFrequency time.Duration
|
||||
// Threshold for dispatch throttling in Check resolver
|
||||
DispatchThrottlingCheckResolverThreshold uint32
|
||||
// Max threshold for dispatch throttling in Check resolver
|
||||
DispatchThrottlingCheckResolverMaxThreshold uint32
|
||||
|
||||
// Shadow check/query settings
|
||||
// Timeout for shadow check resolver
|
||||
ShadowCheckResolverTimeout time.Duration
|
||||
// Timeout for shadow ListObjects query
|
||||
ShadowListObjectsQueryTimeout time.Duration
|
||||
// Max delta items for shadow ListObjects query
|
||||
ShadowListObjectsQueryMaxDeltaItems int
|
||||
|
||||
// Request settings
|
||||
// Global request timeout
|
||||
RequestTimeout time.Duration
|
||||
// Max size in bytes for authorization model
|
||||
MaxAuthorizationModelSizeInBytes int
|
||||
// Size of the authorization model cache
|
||||
AuthorizationModelCacheSize int
|
||||
// Offset for changelog horizon
|
||||
ChangelogHorizonOffset int
|
||||
}
|
||||
|
||||
// Parameters to configure OpenFGA cache.
|
||||
type OpenFgaCacheSettings struct {
|
||||
// Number of items that will be kept in the in-memory cache used to resolve Check queries.
|
||||
@@ -156,5 +244,56 @@ func (cfg *Cfg) readZanzanaSettings() {
|
||||
zs.CacheSettings.SharedIteratorLimit = uint32(serverSec.Key("shared_iterator_limit").MustUint(1000))
|
||||
zs.CacheSettings.SharedIteratorTTL = serverSec.Key("shared_iterator_ttl").MustDuration(10 * time.Second)
|
||||
|
||||
openfgaSec := cfg.SectionWithEnvOverrides("openfga")
|
||||
|
||||
// ListObjects settings
|
||||
zs.OpenFgaServerSettings.MaxConcurrentReadsForListObjects = uint32(openfgaSec.Key("max_concurrent_reads_for_list_objects").MustUint(0))
|
||||
zs.OpenFgaServerSettings.ListObjectsDispatchThrottlingEnabled = openfgaSec.Key("list_objects_dispatch_throttling_enabled").MustBool(false)
|
||||
zs.OpenFgaServerSettings.ListObjectsDispatchThrottlingFrequency = openfgaSec.Key("list_objects_dispatch_throttling_frequency").MustDuration(0)
|
||||
zs.OpenFgaServerSettings.ListObjectsDispatchThrottlingThreshold = uint32(openfgaSec.Key("list_objects_dispatch_throttling_threshold").MustUint(0))
|
||||
zs.OpenFgaServerSettings.ListObjectsDispatchThrottlingMaxThreshold = uint32(openfgaSec.Key("list_objects_dispatch_throttling_max_threshold").MustUint(0))
|
||||
zs.OpenFgaServerSettings.ListObjectsDatabaseThrottleThreshold = openfgaSec.Key("list_objects_database_throttle_threshold").MustInt(0)
|
||||
zs.OpenFgaServerSettings.ListObjectsDatabaseThrottleDuration = openfgaSec.Key("list_objects_database_throttle_duration").MustDuration(0)
|
||||
|
||||
// ListUsers settings
|
||||
zs.OpenFgaServerSettings.ListUsersDeadline = openfgaSec.Key("list_users_deadline").MustDuration(0)
|
||||
zs.OpenFgaServerSettings.ListUsersMaxResults = uint32(openfgaSec.Key("list_users_max_results").MustUint(0))
|
||||
zs.OpenFgaServerSettings.MaxConcurrentReadsForListUsers = uint32(openfgaSec.Key("max_concurrent_reads_for_list_users").MustUint(0))
|
||||
zs.OpenFgaServerSettings.ListUsersDispatchThrottlingEnabled = openfgaSec.Key("list_users_dispatch_throttling_enabled").MustBool(false)
|
||||
zs.OpenFgaServerSettings.ListUsersDispatchThrottlingFrequency = openfgaSec.Key("list_users_dispatch_throttling_frequency").MustDuration(0)
|
||||
zs.OpenFgaServerSettings.ListUsersDispatchThrottlingThreshold = uint32(openfgaSec.Key("list_users_dispatch_throttling_threshold").MustUint(0))
|
||||
zs.OpenFgaServerSettings.ListUsersDispatchThrottlingMaxThreshold = uint32(openfgaSec.Key("list_users_dispatch_throttling_max_threshold").MustUint(0))
|
||||
zs.OpenFgaServerSettings.ListUsersDatabaseThrottleThreshold = openfgaSec.Key("list_users_database_throttle_threshold").MustInt(0)
|
||||
zs.OpenFgaServerSettings.ListUsersDatabaseThrottleDuration = openfgaSec.Key("list_users_database_throttle_duration").MustDuration(0)
|
||||
|
||||
// Check settings
|
||||
zs.OpenFgaServerSettings.MaxConcurrentReadsForCheck = uint32(openfgaSec.Key("max_concurrent_reads_for_check").MustUint(0))
|
||||
zs.OpenFgaServerSettings.CheckDatabaseThrottleThreshold = openfgaSec.Key("check_database_throttle_threshold").MustInt(0)
|
||||
zs.OpenFgaServerSettings.CheckDatabaseThrottleDuration = openfgaSec.Key("check_database_throttle_duration").MustDuration(0)
|
||||
|
||||
// Batch check settings
|
||||
zs.OpenFgaServerSettings.MaxConcurrentChecksPerBatchCheck = uint32(openfgaSec.Key("max_concurrent_checks_per_batch_check").MustUint(0))
|
||||
zs.OpenFgaServerSettings.MaxChecksPerBatchCheck = uint32(openfgaSec.Key("max_checks_per_batch_check").MustUint(0))
|
||||
|
||||
// Resolve node settings
|
||||
zs.OpenFgaServerSettings.ResolveNodeLimit = uint32(openfgaSec.Key("resolve_node_limit").MustUint(0))
|
||||
zs.OpenFgaServerSettings.ResolveNodeBreadthLimit = uint32(openfgaSec.Key("resolve_node_breadth_limit").MustUint(0))
|
||||
|
||||
// Dispatch throttling settings for Check resolver
|
||||
zs.OpenFgaServerSettings.DispatchThrottlingCheckResolverEnabled = openfgaSec.Key("dispatch_throttling_check_resolver_enabled").MustBool(false)
|
||||
zs.OpenFgaServerSettings.DispatchThrottlingCheckResolverFrequency = openfgaSec.Key("dispatch_throttling_check_resolver_frequency").MustDuration(0)
|
||||
zs.OpenFgaServerSettings.DispatchThrottlingCheckResolverThreshold = uint32(openfgaSec.Key("dispatch_throttling_check_resolver_threshold").MustUint(0))
|
||||
zs.OpenFgaServerSettings.DispatchThrottlingCheckResolverMaxThreshold = uint32(openfgaSec.Key("dispatch_throttling_check_resolver_max_threshold").MustUint(0))
|
||||
|
||||
// Shadow check/query settings
|
||||
zs.OpenFgaServerSettings.ShadowCheckResolverTimeout = openfgaSec.Key("shadow_check_resolver_timeout").MustDuration(0)
|
||||
zs.OpenFgaServerSettings.ShadowListObjectsQueryTimeout = openfgaSec.Key("shadow_list_objects_query_timeout").MustDuration(0)
|
||||
zs.OpenFgaServerSettings.ShadowListObjectsQueryMaxDeltaItems = openfgaSec.Key("shadow_list_objects_query_max_delta_items").MustInt(0)
|
||||
|
||||
zs.OpenFgaServerSettings.RequestTimeout = openfgaSec.Key("request_timeout").MustDuration(0)
|
||||
zs.OpenFgaServerSettings.MaxAuthorizationModelSizeInBytes = openfgaSec.Key("max_authorization_model_size_in_bytes").MustInt(0)
|
||||
zs.OpenFgaServerSettings.AuthorizationModelCacheSize = openfgaSec.Key("authorization_model_cache_size").MustInt(0)
|
||||
zs.OpenFgaServerSettings.ChangelogHorizonOffset = openfgaSec.Key("changelog_horizon_offset").MustInt(0)
|
||||
|
||||
cfg.ZanzanaServer = zs
|
||||
}
|
||||
|
||||
@@ -1786,6 +1786,13 @@
|
||||
"skipUrlSync": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"valuesFormat": {
|
||||
"enum": [
|
||||
"csv",
|
||||
"json"
|
||||
],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
|
||||
@@ -1801,6 +1801,13 @@
|
||||
"skipUrlSync": {
|
||||
"type": "boolean",
|
||||
"default": false
|
||||
},
|
||||
"valuesFormat": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"csv",
|
||||
"json"
|
||||
]
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
|
||||
@@ -321,7 +321,7 @@ func CreateGrafDir(t *testing.T, opts GrafanaOpts) (string, string) {
|
||||
_, err = openFeatureSect.NewKey("enable_api", strconv.FormatBool(opts.OpenFeatureAPIEnabled))
|
||||
require.NoError(t, err)
|
||||
if !opts.OpenFeatureAPIEnabled {
|
||||
_, err = openFeatureSect.NewKey("provider", "static") // in practice, APIEnabled being false goes with goff type, but trying to make tests work
|
||||
_, err = openFeatureSect.NewKey("provider", "static") // in practice, APIEnabled being false goes with features-service type, but trying to make tests work
|
||||
require.NoError(t, err)
|
||||
_, err = openFeatureSect.NewKey("targetingKey", "grafana")
|
||||
require.NoError(t, err)
|
||||
|
||||
43
pkg/tsdb/grafana-pyroscope-datasource/exemplar/exemplar.go
Normal file
43
pkg/tsdb/grafana-pyroscope-datasource/exemplar/exemplar.go
Normal file
@@ -0,0 +1,43 @@
|
||||
package exemplar
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||
)
|
||||
|
||||
type Exemplar struct {
|
||||
Id string
|
||||
Value float64
|
||||
Timestamp int64
|
||||
}
|
||||
|
||||
func CreateExemplarFrame(labels map[string]string, exemplars []*Exemplar) *data.Frame {
|
||||
frame := data.NewFrame("exemplar")
|
||||
frame.Meta = &data.FrameMeta{
|
||||
DataTopic: data.DataTopicAnnotations,
|
||||
}
|
||||
fields := []*data.Field{
|
||||
data.NewField("Time", nil, []time.Time{}),
|
||||
data.NewField("Value", labels, []float64{}), // add labels here?
|
||||
data.NewField("Id", nil, []string{}),
|
||||
}
|
||||
fields[2].Config = &data.FieldConfig{
|
||||
DisplayName: "Profile ID",
|
||||
}
|
||||
for name := range labels {
|
||||
fields = append(fields, data.NewField(name, nil, []string{}))
|
||||
}
|
||||
frame.Fields = fields
|
||||
|
||||
for _, e := range exemplars {
|
||||
frame.AppendRow(time.UnixMilli(e.Timestamp), e.Value, e.Id)
|
||||
for name, value := range labels {
|
||||
field, _ := frame.FieldByName(name)
|
||||
if field != nil {
|
||||
field.Append(value)
|
||||
}
|
||||
}
|
||||
}
|
||||
return frame
|
||||
}
|
||||
@@ -0,0 +1,34 @@
|
||||
package exemplar
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestCreateExemplarFrame(t *testing.T) {
|
||||
exemplars := []*Exemplar{
|
||||
{Id: "1", Value: 1.0, Timestamp: 100},
|
||||
{Id: "2", Value: 2.0, Timestamp: 200},
|
||||
}
|
||||
labels := map[string]string{
|
||||
"foo": "bar",
|
||||
}
|
||||
frame := CreateExemplarFrame(labels, exemplars)
|
||||
|
||||
require.Equal(t, "exemplar", frame.Name)
|
||||
require.Equal(t, 4, len(frame.Fields))
|
||||
require.Equal(t, "Time", frame.Fields[0].Name)
|
||||
require.Equal(t, "Value", frame.Fields[1].Name)
|
||||
require.Equal(t, "Id", frame.Fields[2].Name)
|
||||
require.Equal(t, "foo", frame.Fields[3].Name)
|
||||
|
||||
rows, err := frame.RowLen()
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 2, rows)
|
||||
row := frame.RowCopy(0)
|
||||
require.Equal(t, 4, len(row))
|
||||
require.Equal(t, 1.0, row[1])
|
||||
require.Equal(t, "1", row[2])
|
||||
require.Equal(t, "bar", row[3])
|
||||
}
|
||||
@@ -18,6 +18,8 @@ import (
|
||||
"github.com/prometheus/prometheus/promql/parser"
|
||||
"go.opentelemetry.io/otel/attribute"
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
|
||||
typesv1 "github.com/grafana/pyroscope/api/gen/proto/go/types/v1"
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -31,7 +33,7 @@ type ProfilingClient interface {
|
||||
ProfileTypes(ctx context.Context, start int64, end int64) ([]*ProfileType, error)
|
||||
LabelNames(ctx context.Context, labelSelector string, start int64, end int64) ([]string, error)
|
||||
LabelValues(ctx context.Context, label string, labelSelector string, start int64, end int64) ([]string, error)
|
||||
GetSeries(ctx context.Context, profileTypeID string, labelSelector string, start int64, end int64, groupBy []string, limit *int64, step float64) (*SeriesResponse, error)
|
||||
GetSeries(ctx context.Context, profileTypeID string, labelSelector string, start int64, end int64, groupBy []string, limit *int64, step float64, exemplarType typesv1.ExemplarType) (*SeriesResponse, error)
|
||||
GetProfile(ctx context.Context, profileTypeID string, labelSelector string, start int64, end int64, maxNodes *int64) (*ProfileResponse, error)
|
||||
GetSpanProfile(ctx context.Context, profileTypeID string, labelSelector string, spanSelector []string, start int64, end int64, maxNodes *int64) (*ProfileResponse, error)
|
||||
}
|
||||
|
||||
@@ -32,6 +32,8 @@ type GrafanaPyroscopeDataQuery struct {
|
||||
Limit *int64 `json:"limit,omitempty"`
|
||||
// Sets the maximum number of nodes in the flamegraph.
|
||||
MaxNodes *int64 `json:"maxNodes,omitempty"`
|
||||
// If set to true, the response will contain annotations
|
||||
Annotations *bool `json:"annotations,omitempty"`
|
||||
// A unique identifier for the query within the list of targets.
|
||||
// In server side expressions, the refId is used as a variable name to identify results.
|
||||
// By default, the UI will assign A->Z; however setting meaningful names may be useful.
|
||||
@@ -41,8 +43,8 @@ type GrafanaPyroscopeDataQuery struct {
|
||||
// Specify the query flavor
|
||||
// TODO make this required and give it a default
|
||||
QueryType *string `json:"queryType,omitempty"`
|
||||
// If set to true, the response will contain annotations
|
||||
Annotations *bool `json:"annotations,omitempty"`
|
||||
// If set to true, exemplars will be requested
|
||||
IncludeExemplars bool `json:"includeExemplars"`
|
||||
// For mixed data sources the selected datasource is on the query level.
|
||||
// For non mixed scenarios this is undefined.
|
||||
// TODO find a better way to do this ^ that's friendly to schema
|
||||
@@ -53,7 +55,8 @@ type GrafanaPyroscopeDataQuery struct {
|
||||
// NewGrafanaPyroscopeDataQuery creates a new GrafanaPyroscopeDataQuery object.
|
||||
func NewGrafanaPyroscopeDataQuery() *GrafanaPyroscopeDataQuery {
|
||||
return &GrafanaPyroscopeDataQuery{
|
||||
LabelSelector: "{}",
|
||||
GroupBy: []string{},
|
||||
LabelSelector: "{}",
|
||||
GroupBy: []string{},
|
||||
IncludeExemplars: false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,14 +8,16 @@ import (
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/tracing"
|
||||
|
||||
typesv1 "github.com/grafana/pyroscope/api/gen/proto/go/types/v1"
|
||||
|
||||
"connectrpc.com/connect"
|
||||
querierv1 "github.com/grafana/pyroscope/api/gen/proto/go/querier/v1"
|
||||
"github.com/grafana/pyroscope/api/gen/proto/go/querier/v1/querierv1connect"
|
||||
"go.opentelemetry.io/otel/attribute"
|
||||
"go.opentelemetry.io/otel/codes"
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
|
||||
querierv1 "github.com/grafana/pyroscope/api/gen/proto/go/querier/v1"
|
||||
"github.com/grafana/pyroscope/api/gen/proto/go/querier/v1/querierv1connect"
|
||||
)
|
||||
|
||||
type ProfileType struct {
|
||||
@@ -49,6 +51,13 @@ type Point struct {
|
||||
// Milliseconds unix timestamp
|
||||
Timestamp int64
|
||||
Annotations []*typesv1.ProfileAnnotation
|
||||
Exemplars []*Exemplar
|
||||
}
|
||||
|
||||
type Exemplar struct {
|
||||
Id string
|
||||
Value uint64
|
||||
Timestamp int64
|
||||
}
|
||||
|
||||
type ProfileResponse struct {
|
||||
@@ -99,7 +108,7 @@ func (c *PyroscopeClient) ProfileTypes(ctx context.Context, start int64, end int
|
||||
}
|
||||
}
|
||||
|
||||
func (c *PyroscopeClient) GetSeries(ctx context.Context, profileTypeID string, labelSelector string, start int64, end int64, groupBy []string, limit *int64, step float64) (*SeriesResponse, error) {
|
||||
func (c *PyroscopeClient) GetSeries(ctx context.Context, profileTypeID string, labelSelector string, start int64, end int64, groupBy []string, limit *int64, step float64, exemplarType typesv1.ExemplarType) (*SeriesResponse, error) {
|
||||
ctx, span := tracing.DefaultTracer().Start(ctx, "datasource.pyroscope.GetSeries", trace.WithAttributes(attribute.String("profileTypeID", profileTypeID), attribute.String("labelSelector", labelSelector)))
|
||||
defer span.End()
|
||||
req := connect.NewRequest(&querierv1.SelectSeriesRequest{
|
||||
@@ -110,6 +119,7 @@ func (c *PyroscopeClient) GetSeries(ctx context.Context, profileTypeID string, l
|
||||
Step: step,
|
||||
GroupBy: groupBy,
|
||||
Limit: limit,
|
||||
ExemplarType: exemplarType,
|
||||
})
|
||||
|
||||
resp, err := c.connectClient.SelectSeries(ctx, req)
|
||||
@@ -137,6 +147,16 @@ func (c *PyroscopeClient) GetSeries(ctx context.Context, profileTypeID string, l
|
||||
Timestamp: p.Timestamp,
|
||||
Annotations: p.Annotations,
|
||||
}
|
||||
if len(p.Exemplars) > 0 {
|
||||
points[i].Exemplars = make([]*Exemplar, len(p.Exemplars))
|
||||
for j, e := range p.Exemplars {
|
||||
points[i].Exemplars[j] = &Exemplar{
|
||||
Id: e.ProfileId,
|
||||
Value: e.Value,
|
||||
Timestamp: e.Timestamp,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
series[i] = &Series{
|
||||
|
||||
@@ -5,10 +5,11 @@ import (
|
||||
"testing"
|
||||
|
||||
"connectrpc.com/connect"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
googlev1 "github.com/grafana/pyroscope/api/gen/proto/go/google/v1"
|
||||
querierv1 "github.com/grafana/pyroscope/api/gen/proto/go/querier/v1"
|
||||
typesv1 "github.com/grafana/pyroscope/api/gen/proto/go/types/v1"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func Test_PyroscopeClient(t *testing.T) {
|
||||
@@ -19,7 +20,7 @@ func Test_PyroscopeClient(t *testing.T) {
|
||||
|
||||
t.Run("GetSeries", func(t *testing.T) {
|
||||
limit := int64(42)
|
||||
resp, err := client.GetSeries(context.Background(), "memory:alloc_objects:count:space:bytes", "{}", 0, 100, []string{}, &limit, 15)
|
||||
resp, err := client.GetSeries(context.Background(), "memory:alloc_objects:count:space:bytes", "{}", 0, 100, []string{}, &limit, 15, typesv1.ExemplarType_EXEMPLAR_TYPE_NONE)
|
||||
require.Nil(t, err)
|
||||
|
||||
series := &SeriesResponse{
|
||||
@@ -32,6 +33,21 @@ func Test_PyroscopeClient(t *testing.T) {
|
||||
require.Equal(t, series, resp)
|
||||
})
|
||||
|
||||
t.Run("GetSeriesWithExemplars", func(t *testing.T) {
|
||||
limit := int64(42)
|
||||
resp, err := client.GetSeries(context.Background(), "memory:alloc_objects:count:space:bytes", "{}", 0, 100, []string{}, &limit, 15, typesv1.ExemplarType_EXEMPLAR_TYPE_INDIVIDUAL)
|
||||
require.Nil(t, err)
|
||||
|
||||
series := &SeriesResponse{
|
||||
Series: []*Series{
|
||||
{Labels: []*LabelPair{{Name: "foo", Value: "bar"}}, Points: []*Point{{Timestamp: int64(1000), Value: 30, Exemplars: []*Exemplar{{Id: "id1", Value: 3, Timestamp: 1000}}}, {Timestamp: int64(2000), Value: 10, Exemplars: []*Exemplar{{Id: "id2", Value: 1, Timestamp: 2000}}}}},
|
||||
},
|
||||
Units: "short",
|
||||
Label: "alloc_objects",
|
||||
}
|
||||
require.Equal(t, series, resp)
|
||||
})
|
||||
|
||||
t.Run("GetProfile", func(t *testing.T) {
|
||||
maxNodes := int64(-1)
|
||||
resp, err := client.GetProfile(context.Background(), "memory:alloc_objects:count:space:bytes", "{}", 0, 100, &maxNodes)
|
||||
@@ -115,6 +131,21 @@ func (f *FakePyroscopeConnectClient) SelectMergeStacktraces(ctx context.Context,
|
||||
|
||||
func (f *FakePyroscopeConnectClient) SelectSeries(ctx context.Context, req *connect.Request[querierv1.SelectSeriesRequest]) (*connect.Response[querierv1.SelectSeriesResponse], error) {
|
||||
f.Req = req
|
||||
if req.Msg.ExemplarType == typesv1.ExemplarType_EXEMPLAR_TYPE_INDIVIDUAL {
|
||||
return &connect.Response[querierv1.SelectSeriesResponse]{
|
||||
Msg: &querierv1.SelectSeriesResponse{
|
||||
Series: []*typesv1.Series{
|
||||
{
|
||||
Labels: []*typesv1.LabelPair{{Name: "foo", Value: "bar"}},
|
||||
Points: []*typesv1.Point{
|
||||
{Timestamp: int64(1000), Value: 30, Exemplars: []*typesv1.Exemplar{{Timestamp: int64(1000), Value: 3, ProfileId: "id1"}}},
|
||||
{Timestamp: int64(2000), Value: 10, Exemplars: []*typesv1.Exemplar{{Timestamp: int64(2000), Value: 1, ProfileId: "id2"}}},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}, nil
|
||||
}
|
||||
return &connect.Response[querierv1.SelectSeriesResponse]{
|
||||
Msg: &querierv1.SelectSeriesResponse{
|
||||
Series: []*typesv1.Series{
|
||||
|
||||
@@ -13,6 +13,7 @@ import (
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/tracing"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/live"
|
||||
"github.com/grafana/grafana/pkg/tsdb/grafana-pyroscope-datasource/exemplar"
|
||||
"github.com/xlab/treeprint"
|
||||
"go.opentelemetry.io/otel/attribute"
|
||||
"go.opentelemetry.io/otel/codes"
|
||||
@@ -21,6 +22,8 @@ import (
|
||||
|
||||
"github.com/grafana/grafana/pkg/tsdb/grafana-pyroscope-datasource/annotation"
|
||||
"github.com/grafana/grafana/pkg/tsdb/grafana-pyroscope-datasource/kinds/dataquery"
|
||||
|
||||
typesv1 "github.com/grafana/pyroscope/api/gen/proto/go/types/v1"
|
||||
)
|
||||
|
||||
type queryModel struct {
|
||||
@@ -36,8 +39,12 @@ const (
|
||||
queryTypeProfile = string(dataquery.PyroscopeQueryTypeProfile)
|
||||
queryTypeMetrics = string(dataquery.PyroscopeQueryTypeMetrics)
|
||||
queryTypeBoth = string(dataquery.PyroscopeQueryTypeBoth)
|
||||
|
||||
exemplarsFeatureToggle = "profilesExemplars"
|
||||
)
|
||||
|
||||
var identityTransformation = func(value float64) float64 { return value }
|
||||
|
||||
// query processes single Pyroscope query transforming the response to data.Frame packaged in DataResponse
|
||||
func (d *PyroscopeDatasource) query(ctx context.Context, pCtx backend.PluginContext, query backend.DataQuery) backend.DataResponse {
|
||||
ctx, span := tracing.DefaultTracer().Start(ctx, "datasource.pyroscope.query", trace.WithAttributes(attribute.String("query_type", query.QueryType)))
|
||||
@@ -77,6 +84,10 @@ func (d *PyroscopeDatasource) query(ctx context.Context, pCtx backend.PluginCont
|
||||
logger.Error("Failed to parse the MinStep using default", "MinStep", dsJson.MinStep, "function", logEntrypoint())
|
||||
}
|
||||
}
|
||||
exemplarType := typesv1.ExemplarType_EXEMPLAR_TYPE_NONE
|
||||
if qm.IncludeExemplars && backend.GrafanaConfigFromContext(ctx).FeatureToggles().IsEnabled(exemplarsFeatureToggle) {
|
||||
exemplarType = typesv1.ExemplarType_EXEMPLAR_TYPE_INDIVIDUAL
|
||||
}
|
||||
seriesResp, err := d.client.GetSeries(
|
||||
gCtx,
|
||||
profileTypeId,
|
||||
@@ -86,6 +97,7 @@ func (d *PyroscopeDatasource) query(ctx context.Context, pCtx backend.PluginCont
|
||||
qm.GroupBy,
|
||||
qm.Limit,
|
||||
math.Max(query.Interval.Seconds(), parsedInterval.Seconds()),
|
||||
exemplarType,
|
||||
)
|
||||
if err != nil {
|
||||
span.RecordError(err)
|
||||
@@ -475,6 +487,7 @@ func seriesToDataFrames(resp *SeriesResponse, withAnnotations bool, stepDuration
|
||||
annotations := make([]*annotation.TimedAnnotation, 0)
|
||||
|
||||
for _, series := range resp.Series {
|
||||
exemplars := make([]*exemplar.Exemplar, 0)
|
||||
// We create separate data frames as the series may not have the same length
|
||||
frame := data.NewFrame("series")
|
||||
frameMeta := &data.FrameMeta{PreferredVisualization: "graph"}
|
||||
@@ -516,14 +529,20 @@ func seriesToDataFrames(resp *SeriesResponse, withAnnotations bool, stepDuration
|
||||
|
||||
// Apply rate calculation for cumulative profiles
|
||||
value := point.Value
|
||||
transformation := identityTransformation
|
||||
if isCumulativeProfile(profileTypeID) && stepDurationSec > 0 {
|
||||
value = value / stepDurationSec
|
||||
transformation = func(value float64) float64 {
|
||||
return value / stepDurationSec
|
||||
}
|
||||
|
||||
// Convert CPU nanoseconds to cores
|
||||
if isCPUTimeProfile(profileTypeID) {
|
||||
value = value / 1e9
|
||||
transformation = func(value float64) float64 {
|
||||
return value / stepDurationSec / 1e9
|
||||
}
|
||||
}
|
||||
}
|
||||
value = transformation(value)
|
||||
valueField.Append(value)
|
||||
if withAnnotations {
|
||||
for _, a := range point.Annotations {
|
||||
@@ -533,10 +552,22 @@ func seriesToDataFrames(resp *SeriesResponse, withAnnotations bool, stepDuration
|
||||
})
|
||||
}
|
||||
}
|
||||
for _, e := range point.Exemplars {
|
||||
exemplars = append(exemplars, &exemplar.Exemplar{
|
||||
Id: e.Id,
|
||||
Value: transformation(float64(e.Value)),
|
||||
Timestamp: e.Timestamp,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
frame.Fields = fields
|
||||
frames = append(frames, frame)
|
||||
|
||||
if len(exemplars) > 0 {
|
||||
frame := exemplar.CreateExemplarFrame(labels, exemplars)
|
||||
frames = append(frames, frame)
|
||||
}
|
||||
}
|
||||
|
||||
if len(annotations) > 0 {
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||
|
||||
typesv1 "github.com/grafana/pyroscope/api/gen/proto/go/types/v1"
|
||||
|
||||
"github.com/grafana/grafana/pkg/tsdb/grafana-pyroscope-datasource/annotation"
|
||||
@@ -487,10 +488,21 @@ func Test_seriesToDataFrame(t *testing.T) {
|
||||
require.Nil(t, frames[0].Meta.Custom)
|
||||
})
|
||||
|
||||
t.Run("CPU time conversion to cores", func(t *testing.T) {
|
||||
t.Run("CPU time conversion to cores with exemplars", func(t *testing.T) {
|
||||
series := &SeriesResponse{
|
||||
Series: []*Series{
|
||||
{Labels: []*LabelPair{}, Points: []*Point{{Timestamp: int64(1000), Value: 3000000000}, {Timestamp: int64(2000), Value: 1500000000}}}, // 3s and 1.5s in nanoseconds
|
||||
{
|
||||
Labels: []*LabelPair{}, Points: []*Point{
|
||||
{
|
||||
Timestamp: int64(1000), Value: 3000000000, // 3s in nanoseconds
|
||||
Exemplars: []*Exemplar{{Value: 300000000, Timestamp: 1000}}, // 0.3s in nanoseconds
|
||||
},
|
||||
{
|
||||
Timestamp: int64(2000), Value: 1500000000, // 1.5s in nanoseconds
|
||||
Exemplars: []*Exemplar{{Value: 150000000, Timestamp: 1000}}, // 0.15s in nanoseconds
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Units: "ns",
|
||||
Label: "cpu",
|
||||
@@ -498,19 +510,32 @@ func Test_seriesToDataFrame(t *testing.T) {
|
||||
// should convert nanoseconds to cores and set unit to "cores"
|
||||
frames, err := seriesToDataFrames(series, false, 15.0, "process_cpu:cpu:nanoseconds:cpu:nanoseconds")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, len(frames))
|
||||
require.Equal(t, 2, len(frames))
|
||||
|
||||
require.Equal(t, "cores", frames[0].Fields[1].Config.Unit)
|
||||
|
||||
// Check values were converted: 3000000000/15/1e9 = 0.2 cores/sec, 1500000000/15/1e9 = 0.1 cores/sec
|
||||
values := fieldValues[float64](frames[0].Fields[1])
|
||||
require.Equal(t, []float64{0.2, 0.1}, values)
|
||||
// Check exemplar values were converted: 300000000/15/1e9 = 0.02 cores/sec, 150000000/15/1e9 = 0.01 cores/sec
|
||||
exemplarValues := fieldValues[float64](frames[1].Fields[1])
|
||||
require.Equal(t, []float64{0.02, 0.01}, exemplarValues)
|
||||
})
|
||||
|
||||
t.Run("Memory allocation unit conversion to bytes/sec", func(t *testing.T) {
|
||||
series := &SeriesResponse{
|
||||
Series: []*Series{
|
||||
{Labels: []*LabelPair{}, Points: []*Point{{Timestamp: int64(1000), Value: 150000000}, {Timestamp: int64(2000), Value: 300000000}}}, // 150 MB, 300 MB
|
||||
{
|
||||
Labels: []*LabelPair{}, Points: []*Point{
|
||||
{
|
||||
Timestamp: int64(1000), Value: 150000000, // 150 MB
|
||||
Exemplars: []*Exemplar{{Value: 15000000, Timestamp: 1000}}, // 15 MB
|
||||
}, {
|
||||
Timestamp: int64(2000), Value: 300000000, // 300 MB
|
||||
Exemplars: []*Exemplar{{Value: 30000000, Timestamp: 1000}}, // 30 MB
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Units: "bytes",
|
||||
Label: "memory_alloc",
|
||||
@@ -518,19 +543,33 @@ func Test_seriesToDataFrame(t *testing.T) {
|
||||
// should convert bytes to binBps and apply rate calculation
|
||||
frames, err := seriesToDataFrames(series, false, 15.0, "memory:alloc_space:bytes:space:bytes")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, len(frames))
|
||||
require.Equal(t, 2, len(frames))
|
||||
|
||||
require.Equal(t, "binBps", frames[0].Fields[1].Config.Unit)
|
||||
|
||||
// Check values were rate calculated: 150000000/15 = 10000000, 300000000/15 = 20000000
|
||||
values := fieldValues[float64](frames[0].Fields[1])
|
||||
require.Equal(t, []float64{10000000, 20000000}, values)
|
||||
// Check exemplar values were rate calculated: 15000000/15 = 1000000, 30000000/15 = 2000000
|
||||
exemplarValues := fieldValues[float64](frames[1].Fields[1])
|
||||
require.Equal(t, []float64{1000000, 2000000}, exemplarValues)
|
||||
})
|
||||
|
||||
t.Run("Count-based profile unit conversion to ops/sec", func(t *testing.T) {
|
||||
series := &SeriesResponse{
|
||||
Series: []*Series{
|
||||
{Labels: []*LabelPair{}, Points: []*Point{{Timestamp: int64(1000), Value: 1500}, {Timestamp: int64(2000), Value: 3000}}}, // 1500, 3000 contentions
|
||||
{
|
||||
Labels: []*LabelPair{}, Points: []*Point{
|
||||
{
|
||||
Timestamp: int64(1000), Value: 1500, // 1500 contentions
|
||||
Exemplars: []*Exemplar{{Value: 150, Timestamp: 1000}}, // 150 contentions
|
||||
|
||||
}, {
|
||||
Timestamp: int64(2000), Value: 3000, // 3000 contentions
|
||||
Exemplars: []*Exemplar{{Value: 300, Timestamp: 1000}}, // 300 contentions
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Units: "short",
|
||||
Label: "contentions",
|
||||
@@ -538,13 +577,16 @@ func Test_seriesToDataFrame(t *testing.T) {
|
||||
// should convert short to ops and apply rate calculation
|
||||
frames, err := seriesToDataFrames(series, false, 15.0, "mutex:contentions:count:contentions:count")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 1, len(frames))
|
||||
require.Equal(t, 2, len(frames))
|
||||
|
||||
require.Equal(t, "ops", frames[0].Fields[1].Config.Unit)
|
||||
|
||||
// Check values were rate calculated: 1500/15 = 100, 3000/15 = 200
|
||||
values := fieldValues[float64](frames[0].Fields[1])
|
||||
require.Equal(t, []float64{100, 200}, values)
|
||||
// Check exemplar values were rate calculated: 150/15 = 10, 300/15 = 20
|
||||
exemplarValues := fieldValues[float64](frames[1].Fields[1])
|
||||
require.Equal(t, []float64{10, 20}, exemplarValues)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -605,7 +647,7 @@ func (f *FakeClient) GetSpanProfile(ctx context.Context, profileTypeID, labelSel
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (f *FakeClient) GetSeries(ctx context.Context, profileTypeID, labelSelector string, start, end int64, groupBy []string, limit *int64, step float64) (*SeriesResponse, error) {
|
||||
func (f *FakeClient) GetSeries(ctx context.Context, profileTypeID, labelSelector string, start, end int64, groupBy []string, limit *int64, step float64, exemplarType typesv1.ExemplarType) (*SeriesResponse, error) {
|
||||
f.Args = []any{profileTypeID, labelSelector, start, end, groupBy, step}
|
||||
return &SeriesResponse{
|
||||
Series: []*Series{
|
||||
|
||||
@@ -3,7 +3,8 @@ import { render, screen, userEvent, waitFor } from 'test/test-utils';
|
||||
import { byLabelText, byRole, byText } from 'testing-library-selector';
|
||||
|
||||
import { setPluginLinksHook } from '@grafana/runtime';
|
||||
import { setupMswServer } from 'app/features/alerting/unified/mockApi';
|
||||
import server from '@grafana/test-utils/server';
|
||||
import { mockAlertRuleApi, setupMswServer } from 'app/features/alerting/unified/mockApi';
|
||||
import { AlertManagerDataSourceJsonData } from 'app/plugins/datasource/alertmanager/types';
|
||||
import { AccessControlAction } from 'app/types/accessControl';
|
||||
import { CombinedRule, RuleIdentifier } from 'app/types/unified-alerting';
|
||||
@@ -22,6 +23,7 @@ import {
|
||||
mockPluginLinkExtension,
|
||||
mockPromAlertingRule,
|
||||
mockRulerGrafanaRecordingRule,
|
||||
mockRulerGrafanaRule,
|
||||
} from '../../mocks';
|
||||
import { grafanaRulerRule } from '../../mocks/grafanaRulerApi';
|
||||
import { grantPermissionsHelper } from '../../test/test-utils';
|
||||
@@ -130,6 +132,8 @@ const dataSources = {
|
||||
};
|
||||
|
||||
describe('RuleViewer', () => {
|
||||
const api = mockAlertRuleApi(server);
|
||||
|
||||
beforeEach(() => {
|
||||
setupDataSources(...Object.values(dataSources));
|
||||
});
|
||||
@@ -249,19 +253,22 @@ describe('RuleViewer', () => {
|
||||
|
||||
expect(screen.getAllByRole('row')).toHaveLength(7);
|
||||
expect(screen.getAllByRole('row')[1]).toHaveTextContent(/6Provisioning2025-01-18 04:35:17/i);
|
||||
expect(screen.getAllByRole('row')[1]).toHaveTextContent('+3-3Latest');
|
||||
expect(screen.getAllByRole('row')[1]).toHaveTextContent('Updated by provisioning service');
|
||||
expect(screen.getAllByRole('row')[1]).toHaveTextContent('+4-3Latest');
|
||||
|
||||
expect(screen.getAllByRole('row')[2]).toHaveTextContent(/5Alerting2025-01-17 04:35:17/i);
|
||||
expect(screen.getAllByRole('row')[2]).toHaveTextContent('+5-5');
|
||||
expect(screen.getAllByRole('row')[2]).toHaveTextContent('+5-6');
|
||||
|
||||
expect(screen.getAllByRole('row')[3]).toHaveTextContent(/4different user2025-01-16 04:35:17/i);
|
||||
expect(screen.getAllByRole('row')[3]).toHaveTextContent('+5-5');
|
||||
expect(screen.getAllByRole('row')[3]).toHaveTextContent('Changed alert title and thresholds');
|
||||
expect(screen.getAllByRole('row')[3]).toHaveTextContent('+6-5');
|
||||
|
||||
expect(screen.getAllByRole('row')[4]).toHaveTextContent(/3user12025-01-15 04:35:17/i);
|
||||
expect(screen.getAllByRole('row')[4]).toHaveTextContent('+5-9');
|
||||
expect(screen.getAllByRole('row')[4]).toHaveTextContent('+5-10');
|
||||
|
||||
expect(screen.getAllByRole('row')[5]).toHaveTextContent(/2User ID foo2025-01-14 04:35:17/i);
|
||||
expect(screen.getAllByRole('row')[5]).toHaveTextContent('+11-7');
|
||||
expect(screen.getAllByRole('row')[5]).toHaveTextContent('Updated evaluation interval and routing');
|
||||
expect(screen.getAllByRole('row')[5]).toHaveTextContent('+12-7');
|
||||
|
||||
expect(screen.getAllByRole('row')[6]).toHaveTextContent(/1Unknown 2025-01-13 04:35:17/i);
|
||||
|
||||
@@ -275,9 +282,10 @@ describe('RuleViewer', () => {
|
||||
await renderRuleViewer(mockRule, mockRuleIdentifier, ActiveTab.VersionHistory);
|
||||
expect(await screen.findByRole('button', { name: /Compare versions/i })).toBeDisabled();
|
||||
|
||||
expect(screen.getByRole('cell', { name: /provisioning/i })).toBeInTheDocument();
|
||||
expect(screen.getByRole('cell', { name: /alerting/i })).toBeInTheDocument();
|
||||
expect(screen.getByRole('cell', { name: /Unknown/i })).toBeInTheDocument();
|
||||
// Check for special updated_by values - use getAllByRole since some text appears in multiple columns
|
||||
expect(screen.getAllByRole('cell', { name: /provisioning/i }).length).toBeGreaterThan(0);
|
||||
expect(screen.getByRole('cell', { name: /^alerting$/i })).toBeInTheDocument();
|
||||
expect(screen.getByRole('cell', { name: /^Unknown$/i })).toBeInTheDocument();
|
||||
expect(screen.getByRole('cell', { name: /user id foo/i })).toBeInTheDocument();
|
||||
});
|
||||
|
||||
@@ -321,6 +329,47 @@ describe('RuleViewer', () => {
|
||||
await renderRuleViewer(rule, ruleIdentifier);
|
||||
expect(screen.queryByText('Labels')).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('shows Notes column when versions have messages', async () => {
|
||||
await renderRuleViewer(mockRule, mockRuleIdentifier, ActiveTab.VersionHistory);
|
||||
|
||||
expect(await screen.findByRole('columnheader', { name: /Notes/i })).toBeInTheDocument();
|
||||
expect(screen.getAllByRole('row')).toHaveLength(7); // 1 header + 6 data rows
|
||||
expect(screen.getByRole('cell', { name: /Updated by provisioning service/i })).toBeInTheDocument();
|
||||
expect(screen.getByRole('cell', { name: /Changed alert title and thresholds/i })).toBeInTheDocument();
|
||||
expect(screen.getByRole('cell', { name: /Updated evaluation interval and routing/i })).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('does not show Notes column when no versions have messages', async () => {
|
||||
const versionsWithoutMessages = [
|
||||
mockRulerGrafanaRule(
|
||||
{},
|
||||
{
|
||||
uid: grafanaRulerRule.grafana_alert.uid,
|
||||
version: 2,
|
||||
updated: '2025-01-14T09:35:17.000Z',
|
||||
updated_by: { uid: 'foo', name: '' },
|
||||
}
|
||||
),
|
||||
mockRulerGrafanaRule(
|
||||
{},
|
||||
{
|
||||
uid: grafanaRulerRule.grafana_alert.uid,
|
||||
version: 1,
|
||||
updated: '2025-01-13T09:35:17.000Z',
|
||||
updated_by: null,
|
||||
}
|
||||
),
|
||||
];
|
||||
api.getAlertRuleVersionHistory(grafanaRulerRule.grafana_alert.uid, versionsWithoutMessages);
|
||||
|
||||
await renderRuleViewer(mockRule, mockRuleIdentifier, ActiveTab.VersionHistory);
|
||||
|
||||
await screen.findByRole('button', { name: /Compare versions/i });
|
||||
|
||||
expect(screen.getAllByRole('row')).toHaveLength(3); // 1 header + 2 data rows
|
||||
expect(screen.queryByRole('columnheader', { name: /Notes/i })).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import { css } from '@emotion/css';
|
||||
import { useMemo, useState } from 'react';
|
||||
|
||||
import { dateTimeFormat, dateTimeFormatTimeAgo } from '@grafana/data';
|
||||
import { Trans, t } from '@grafana/i18n';
|
||||
import { Badge, Button, Checkbox, Column, InteractiveTable, Stack, Text } from '@grafana/ui';
|
||||
import { Badge, Button, Checkbox, Column, InteractiveTable, Stack, Text, useStyles2 } from '@grafana/ui';
|
||||
import { GRAFANA_RULES_SOURCE_NAME } from 'app/features/alerting/unified/utils/datasource';
|
||||
import { computeVersionDiff } from 'app/features/alerting/unified/utils/diff';
|
||||
import { RuleIdentifier } from 'app/types/unified-alerting';
|
||||
@@ -33,6 +34,7 @@ export function VersionHistoryTable({
|
||||
onRestoreError,
|
||||
canRestore,
|
||||
}: VersionHistoryTableProps) {
|
||||
const styles = useStyles2(getStyles);
|
||||
const [showConfirmModal, setShowConfirmModal] = useState(false);
|
||||
const [ruleToRestore, setRuleToRestore] = useState<RulerGrafanaRuleDTO<GrafanaRuleDefinition>>();
|
||||
const ruleToRestoreUid = ruleToRestore?.grafana_alert?.uid ?? '';
|
||||
@@ -41,6 +43,8 @@ export function VersionHistoryTable({
|
||||
[ruleToRestoreUid]
|
||||
);
|
||||
|
||||
const hasAnyNotes = useMemo(() => ruleVersions.some((v) => v.grafana_alert.message), [ruleVersions]);
|
||||
|
||||
const showConfirmation = (ruleToRestore: RulerGrafanaRuleDTO<GrafanaRuleDefinition>) => {
|
||||
setShowConfirmModal(true);
|
||||
setRuleToRestore(ruleToRestore);
|
||||
@@ -52,6 +56,15 @@ export function VersionHistoryTable({
|
||||
|
||||
const unknown = t('alerting.alertVersionHistory.unknown', 'Unknown');
|
||||
|
||||
const notesColumn: Column<RulerGrafanaRuleDTO<GrafanaRuleDefinition>> = {
|
||||
id: 'notes',
|
||||
header: t('core.versionHistory.table.notes', 'Notes'),
|
||||
cell: ({ row }) => {
|
||||
const message = row.original.grafana_alert.message;
|
||||
return message || null;
|
||||
},
|
||||
};
|
||||
|
||||
const columns: Array<Column<RulerGrafanaRuleDTO<GrafanaRuleDefinition>>> = [
|
||||
{
|
||||
disableGrow: true,
|
||||
@@ -91,9 +104,12 @@ export function VersionHistoryTable({
|
||||
if (!value) {
|
||||
return unknown;
|
||||
}
|
||||
return dateTimeFormat(value) + ' (' + dateTimeFormatTimeAgo(value) + ')';
|
||||
return (
|
||||
<span className={styles.nowrap}>{dateTimeFormat(value) + ' (' + dateTimeFormatTimeAgo(value) + ')'}</span>
|
||||
);
|
||||
},
|
||||
},
|
||||
...(hasAnyNotes ? [notesColumn] : []),
|
||||
{
|
||||
id: 'diff',
|
||||
disableGrow: true,
|
||||
@@ -179,3 +195,9 @@ export function VersionHistoryTable({
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
const getStyles = () => ({
|
||||
nowrap: css({
|
||||
whiteSpace: 'nowrap',
|
||||
}),
|
||||
});
|
||||
|
||||
@@ -154,6 +154,7 @@ export const rulerRuleVersionHistoryHandler = () => {
|
||||
uid: 'service',
|
||||
name: '',
|
||||
};
|
||||
draft.grafana_alert.message = 'Updated by provisioning service';
|
||||
}),
|
||||
produce(grafanaRulerRule, (draft: RulerGrafanaRuleDTO<GrafanaRuleDefinition>) => {
|
||||
draft.grafana_alert.version = 5;
|
||||
@@ -171,6 +172,7 @@ export const rulerRuleVersionHistoryHandler = () => {
|
||||
uid: 'different',
|
||||
name: 'different user',
|
||||
};
|
||||
draft.grafana_alert.message = 'Changed alert title and thresholds';
|
||||
}),
|
||||
produce(grafanaRulerRule, (draft: RulerGrafanaRuleDTO<GrafanaRuleDefinition>) => {
|
||||
draft.grafana_alert.version = 3;
|
||||
@@ -193,6 +195,7 @@ export const rulerRuleVersionHistoryHandler = () => {
|
||||
uid: 'foo',
|
||||
name: '',
|
||||
};
|
||||
draft.grafana_alert.message = 'Updated evaluation interval and routing';
|
||||
}),
|
||||
produce(grafanaRulerRule, (draft: RulerGrafanaRuleDTO<GrafanaRuleDefinition>) => {
|
||||
draft.grafana_alert.version = 1;
|
||||
|
||||
@@ -19,6 +19,8 @@ import { AddVariableButton } from './VariableControlsAddButton';
|
||||
|
||||
export function VariableControls({ dashboard }: { dashboard: DashboardScene }) {
|
||||
const { variables } = sceneGraph.getVariables(dashboard)!.useState();
|
||||
const { isEditing } = dashboard.useState();
|
||||
const isEditingNewLayouts = isEditing && config.featureToggles.dashboardNewLayouts;
|
||||
|
||||
// Get visible variables for drilldown layout
|
||||
const visibleVariables = variables.filter((v) => v.state.hide !== VariableHide.inControlsMenu);
|
||||
@@ -35,13 +37,22 @@ export function VariableControls({ dashboard }: { dashboard: DashboardScene }) {
|
||||
// Variables to render (exclude adhoc/groupby when drilldown controls are shown in top row)
|
||||
const variablesToRender = hasDrilldownControls
|
||||
? restVariables.filter((v) => v.state.hide !== VariableHide.inControlsMenu)
|
||||
: variables.filter((v) => v.state.hide !== VariableHide.inControlsMenu);
|
||||
: variables.filter(
|
||||
(v) =>
|
||||
// if we're editing in dynamic dashboards, still shows hidden variable but greyed out
|
||||
(isEditingNewLayouts && v.state.hide === VariableHide.hideVariable) ||
|
||||
v.state.hide !== VariableHide.inControlsMenu
|
||||
);
|
||||
|
||||
return (
|
||||
<>
|
||||
{variablesToRender.length > 0 &&
|
||||
variablesToRender.map((variable) => (
|
||||
<VariableValueSelectWrapper key={variable.state.key} variable={variable} />
|
||||
<VariableValueSelectWrapper
|
||||
key={variable.state.key}
|
||||
variable={variable}
|
||||
isEditingNewLayouts={isEditingNewLayouts}
|
||||
/>
|
||||
))}
|
||||
|
||||
{config.featureToggles.dashboardNewLayouts ? <AddVariableButton dashboard={dashboard} /> : null}
|
||||
@@ -52,14 +63,17 @@ export function VariableControls({ dashboard }: { dashboard: DashboardScene }) {
|
||||
interface VariableSelectProps {
|
||||
variable: SceneVariable;
|
||||
inMenu?: boolean;
|
||||
isEditingNewLayouts?: boolean;
|
||||
}
|
||||
|
||||
export function VariableValueSelectWrapper({ variable, inMenu }: VariableSelectProps) {
|
||||
export function VariableValueSelectWrapper({ variable, inMenu, isEditingNewLayouts }: VariableSelectProps) {
|
||||
const state = useSceneObjectState<SceneVariableState>(variable, { shouldActivateOrKeepAlive: true });
|
||||
const { isSelected, onSelect, isSelectable } = useElementSelection(variable.state.key);
|
||||
const isHidden = state.hide === VariableHide.hideVariable;
|
||||
const shouldShowHiddenVariables = isEditingNewLayouts && isHidden;
|
||||
const styles = useStyles2(getStyles);
|
||||
|
||||
if (state.hide === VariableHide.hideVariable) {
|
||||
if (isHidden && !isEditingNewLayouts) {
|
||||
if (variable.UNSAFE_renderAsHidden) {
|
||||
return <variable.Component model={variable} />;
|
||||
}
|
||||
@@ -97,6 +111,7 @@ export function VariableValueSelectWrapper({ variable, inMenu }: VariableSelectP
|
||||
<div
|
||||
className={cx(
|
||||
styles.switchMenuContainer,
|
||||
shouldShowHiddenVariables && styles.hidden,
|
||||
isSelected && 'dashboard-selected-element',
|
||||
isSelectable && !isSelected && 'dashboard-selectable-element'
|
||||
)}
|
||||
@@ -120,6 +135,7 @@ export function VariableValueSelectWrapper({ variable, inMenu }: VariableSelectP
|
||||
<div
|
||||
className={cx(
|
||||
styles.verticalContainer,
|
||||
shouldShowHiddenVariables && styles.hidden,
|
||||
isSelected && 'dashboard-selected-element',
|
||||
isSelectable && !isSelected && 'dashboard-selectable-element'
|
||||
)}
|
||||
@@ -136,6 +152,7 @@ export function VariableValueSelectWrapper({ variable, inMenu }: VariableSelectP
|
||||
<div
|
||||
className={cx(
|
||||
styles.container,
|
||||
shouldShowHiddenVariables && styles.hidden,
|
||||
isSelected && 'dashboard-selected-element',
|
||||
isSelectable && !isSelected && 'dashboard-selectable-element'
|
||||
)}
|
||||
@@ -223,4 +240,13 @@ const getStyles = (theme: GrafanaTheme2) => ({
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
}),
|
||||
hidden: css({
|
||||
opacity: 0.6,
|
||||
'&:hover': css({
|
||||
opacity: 1,
|
||||
}),
|
||||
label: css({
|
||||
textDecoration: 'line-through',
|
||||
}),
|
||||
}),
|
||||
});
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { css } from '@emotion/css';
|
||||
|
||||
import { GrafanaTheme2 } from '@grafana/data';
|
||||
import { selectors } from '@grafana/e2e-selectors';
|
||||
import { t } from '@grafana/i18n';
|
||||
import { Dropdown, ToolbarButton, useStyles2 } from '@grafana/ui';
|
||||
|
||||
@@ -33,6 +34,7 @@ export function DashboardControlsButton({ dashboard }: { dashboard: DashboardSce
|
||||
<ToolbarButton
|
||||
aria-label={t('dashboard.controls.menu.aria-label', DASHBOARD_CONTROLS_MENU_ARIA_LABEL)}
|
||||
title={t('dashboard.controls.menu.title', DASHBOARD_CONTROLS_MENU_TITLE)}
|
||||
data-testid={selectors.pages.Dashboard.ControlsButton}
|
||||
icon="sliders-v-alt"
|
||||
iconSize="md"
|
||||
variant="canvas"
|
||||
|
||||
@@ -345,6 +345,16 @@ describe('DashboardSceneSerializer', () => {
|
||||
type: 'textbox',
|
||||
name: 'search',
|
||||
},
|
||||
{
|
||||
name: 'custom_csv',
|
||||
type: 'custom',
|
||||
valuesFormat: 'csv',
|
||||
},
|
||||
{
|
||||
name: 'custom_json',
|
||||
type: 'custom',
|
||||
valuesFormat: 'json',
|
||||
},
|
||||
],
|
||||
},
|
||||
});
|
||||
@@ -359,6 +369,9 @@ describe('DashboardSceneSerializer', () => {
|
||||
panel_type_row_count: 1,
|
||||
variable_type_query_count: 2,
|
||||
variable_type_textbox_count: 1,
|
||||
variable_type_custom_count: 2,
|
||||
variable_type_custom_csv_count: 1,
|
||||
variable_type_custom_json_count: 1,
|
||||
settings_nowdelay: undefined,
|
||||
settings_livenow: true,
|
||||
varsWithDataSource: [
|
||||
@@ -701,7 +714,9 @@ describe('DashboardSceneSerializer', () => {
|
||||
panel_type_timeseries_count: 6,
|
||||
variable_type_adhoc_count: 1,
|
||||
variable_type_datasource_count: 1,
|
||||
variable_type_custom_count: 1,
|
||||
variable_type_custom_count: 3,
|
||||
variable_type_custom_csv_count: 2,
|
||||
variable_type_custom_json_count: 1,
|
||||
variable_type_query_count: 1,
|
||||
varsWithDataSource: [
|
||||
{ type: 'query', datasource: 'cloudwatch' },
|
||||
@@ -714,7 +729,7 @@ describe('DashboardSceneSerializer', () => {
|
||||
panelCount: 6,
|
||||
rowCount: 6,
|
||||
tabCount: 4,
|
||||
templateVariableCount: 4,
|
||||
templateVariableCount: 6,
|
||||
maxNestingLevel: 3,
|
||||
dashStructure:
|
||||
'[{"kind":"row","children":[{"kind":"row","children":[{"kind":"tab","children":[{"kind":"panel"},{"kind":"panel"},{"kind":"panel"}]},{"kind":"tab","children":[]}]},{"kind":"row","children":[{"kind":"row","children":[{"kind":"panel"}]}]}]},{"kind":"row","children":[{"kind":"row","children":[{"kind":"tab","children":[{"kind":"panel"}]},{"kind":"tab","children":[{"kind":"panel"}]}]}]}]',
|
||||
@@ -866,6 +881,7 @@ describe('DashboardSceneSerializer', () => {
|
||||
query: 'app1',
|
||||
skipUrlSync: false,
|
||||
allowCustomValue: true,
|
||||
valuesFormat: 'csv',
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
@@ -294,6 +294,7 @@ exports[`Given a scene with custom quick ranges should save quick ranges to save
|
||||
"options": [],
|
||||
"query": "a, b, c",
|
||||
"type": "custom",
|
||||
"valuesFormat": "csv",
|
||||
},
|
||||
{
|
||||
"current": {
|
||||
@@ -680,6 +681,7 @@ exports[`transformSceneToSaveModel Given a scene with rows Should transform back
|
||||
"options": [],
|
||||
"query": "A,B,C,D,E,F,E,G,H,I,J,K,L",
|
||||
"type": "custom",
|
||||
"valuesFormat": "csv",
|
||||
},
|
||||
{
|
||||
"current": {
|
||||
@@ -698,6 +700,7 @@ exports[`transformSceneToSaveModel Given a scene with rows Should transform back
|
||||
"options": [],
|
||||
"query": "Bob : 1, Rob : 2,Sod : 3, Hod : 4, Cod : 5",
|
||||
"type": "custom",
|
||||
"valuesFormat": "csv",
|
||||
},
|
||||
],
|
||||
},
|
||||
@@ -1021,6 +1024,7 @@ exports[`transformSceneToSaveModel Given a simple scene with custom settings Sho
|
||||
"options": [],
|
||||
"query": "a, b, c",
|
||||
"type": "custom",
|
||||
"valuesFormat": "csv",
|
||||
},
|
||||
{
|
||||
"current": {
|
||||
@@ -1381,6 +1385,7 @@ exports[`transformSceneToSaveModel Given a simple scene with variables Should tr
|
||||
"options": [],
|
||||
"query": "a, b, c",
|
||||
"type": "custom",
|
||||
"valuesFormat": "csv",
|
||||
},
|
||||
{
|
||||
"current": {
|
||||
|
||||
@@ -196,6 +196,7 @@ exports[`transformSceneToSaveModelSchemaV2 should transform scene to save model
|
||||
"options": [],
|
||||
"query": "option1, option2",
|
||||
"skipUrlSync": false,
|
||||
"valuesFormat": "csv",
|
||||
},
|
||||
},
|
||||
{
|
||||
|
||||
@@ -376,6 +376,7 @@ describe('sceneVariablesSetToVariables', () => {
|
||||
"options": [],
|
||||
"query": "test,test1,test2",
|
||||
"type": "custom",
|
||||
"valuesFormat": "csv",
|
||||
}
|
||||
`);
|
||||
});
|
||||
@@ -1180,6 +1181,7 @@ describe('sceneVariablesSetToVariables', () => {
|
||||
"options": [],
|
||||
"query": "test,test1,test2",
|
||||
"skipUrlSync": false,
|
||||
"valuesFormat": "csv",
|
||||
},
|
||||
}
|
||||
`);
|
||||
|
||||
@@ -120,6 +120,9 @@ export function sceneVariablesSetToVariables(set: SceneVariables, keepQueryOptio
|
||||
allValue: variable.state.allValue,
|
||||
includeAll: variable.state.includeAll,
|
||||
...(variable.state.allowCustomValue !== undefined && { allowCustomValue: variable.state.allowCustomValue }),
|
||||
// Ensure we persist the backend default when not specified to stay aligned with
|
||||
// transformSaveModelSchemaV2ToScene which injects 'csv' on load.
|
||||
valuesFormat: variable.state.valuesFormat ?? 'csv',
|
||||
};
|
||||
variables.push(customVariable);
|
||||
} else if (sceneUtils.isDataSourceVariable(variable)) {
|
||||
@@ -408,6 +411,7 @@ export function sceneVariablesSetToSchemaV2Variables(
|
||||
allValue: variable.state.allValue,
|
||||
includeAll: variable.state.includeAll ?? false,
|
||||
allowCustomValue: variable.state.allowCustomValue ?? true,
|
||||
valuesFormat: variable.state.valuesFormat ?? 'csv',
|
||||
},
|
||||
};
|
||||
variables.push(customVariable);
|
||||
|
||||
@@ -1169,6 +1169,57 @@
|
||||
"skipUrlSync": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"kind": "CustomVariable",
|
||||
"spec": {
|
||||
"allowCustomValue": true,
|
||||
"current": {
|
||||
"text": "test",
|
||||
"value": "test"
|
||||
},
|
||||
"hide": "dontHide",
|
||||
"includeAll": false,
|
||||
"multi": false,
|
||||
"name": "custom0",
|
||||
"options": [
|
||||
{
|
||||
"selected": true,
|
||||
"text": "test",
|
||||
"value": "test"
|
||||
}
|
||||
],
|
||||
"valuesFormat": "csv",
|
||||
"query": "test",
|
||||
"skipUrlSync": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"kind": "CustomVariable",
|
||||
"spec": {
|
||||
"allowCustomValue": true,
|
||||
"current": {
|
||||
"text": "test",
|
||||
"value": "test"
|
||||
},
|
||||
"hide": "dontHide",
|
||||
"includeAll": false,
|
||||
"multi": false,
|
||||
"name": "custom0",
|
||||
"options": [
|
||||
{
|
||||
"selected": true,
|
||||
"text": "test",
|
||||
"value": "test",
|
||||
"properties": {
|
||||
"testProp": "test"
|
||||
}
|
||||
}
|
||||
],
|
||||
"valuesFormat": "json",
|
||||
"query": "test",
|
||||
"skipUrlSync": false
|
||||
}
|
||||
},
|
||||
{
|
||||
"kind": "DatasourceVariable",
|
||||
"spec": {
|
||||
|
||||
@@ -343,12 +343,12 @@ function createSceneVariableFromVariableModel(variable: TypedVariableModelV2): S
|
||||
}
|
||||
return new AdHocFiltersVariable(adhocVariableState);
|
||||
}
|
||||
|
||||
if (variable.kind === defaultCustomVariableKind().kind) {
|
||||
return new CustomVariable({
|
||||
...commonProperties,
|
||||
value: variable.spec.current?.value ?? '',
|
||||
text: variable.spec.current?.text ?? '',
|
||||
|
||||
query: variable.spec.query,
|
||||
isMulti: variable.spec.multi,
|
||||
allValue: variable.spec.allValue || undefined,
|
||||
@@ -357,6 +357,7 @@ function createSceneVariableFromVariableModel(variable: TypedVariableModelV2): S
|
||||
skipUrlSync: variable.spec.skipUrlSync,
|
||||
hide: transformVariableHideToEnumV1(variable.spec.hide),
|
||||
...(variable.spec.allowCustomValue !== undefined && { allowCustomValue: variable.spec.allowCustomValue }),
|
||||
valuesFormat: variable.spec.valuesFormat || 'csv',
|
||||
});
|
||||
} else if (variable.kind === defaultQueryVariableKind().kind) {
|
||||
return new QueryVariable({
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user