Compare commits
30 Commits
change-hel
...
docs/add-t
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e83c88bf75 | ||
|
|
e81769147f | ||
|
|
70e49947a1 | ||
|
|
5156177079 | ||
|
|
4817ecf6a3 | ||
|
|
c73cab8eef | ||
|
|
a37ebf609e | ||
|
|
b29e8ccb45 | ||
|
|
644f7b7001 | ||
|
|
629570926d | ||
|
|
1b59c82b74 | ||
|
|
f35447435f | ||
|
|
c0dc92e8cd | ||
|
|
7114b9cd3b | ||
|
|
b40d0e6ff4 | ||
|
|
584615cf3f | ||
|
|
5f80a29a28 | ||
|
|
eab5d2b30e | ||
|
|
f3421b9718 | ||
|
|
1addfd69b4 | ||
|
|
d4a627c5fc | ||
|
|
46ef9aaa0a | ||
|
|
6ce672dd00 | ||
|
|
403f4d41de | ||
|
|
6512259acc | ||
|
|
b2dd095bd8 | ||
|
|
e525b529a8 | ||
|
|
7805e18368 | ||
|
|
7a07a49ecc | ||
|
|
9a4e13800d |
3
.github/CODEOWNERS
vendored
3
.github/CODEOWNERS
vendored
@@ -520,7 +520,7 @@ i18next.config.ts @grafana/grafana-frontend-platform
|
||||
/e2e-playwright/various-suite/solo-route.spec.ts @grafana/dashboards-squad
|
||||
/e2e-playwright/various-suite/trace-view-scrolling.spec.ts @grafana/observability-traces-and-profiling
|
||||
/e2e-playwright/various-suite/verify-i18n.spec.ts @grafana/grafana-frontend-platform
|
||||
/e2e-playwright/various-suite/visualization-suggestions.spec.ts @grafana/dashboards-squad
|
||||
/e2e-playwright/various-suite/visualization-suggestions.spec.ts @grafana/dataviz-squad
|
||||
/e2e-playwright/various-suite/perf-test.spec.ts @grafana/grafana-frontend-platform
|
||||
|
||||
# Packages
|
||||
@@ -956,6 +956,7 @@ playwright.storybook.config.ts @grafana/grafana-frontend-platform
|
||||
/public/app/features/notifications/ @grafana/grafana-search-navigate-organise
|
||||
/public/app/features/org/ @grafana/grafana-search-navigate-organise
|
||||
/public/app/features/panel/ @grafana/dashboards-squad
|
||||
/public/app/features/panel/components/VizTypePicker/VisualizationSuggestions.tsx @grafana/dataviz-squad
|
||||
/public/app/features/panel/suggestions/ @grafana/dataviz-squad
|
||||
/public/app/features/playlist/ @grafana/dashboards-squad
|
||||
/public/app/features/plugins/ @grafana/plugins-platform-frontend
|
||||
|
||||
@@ -1603,7 +1603,6 @@
|
||||
"datasource": {
|
||||
"type": "grafana-testdata-datasource"
|
||||
},
|
||||
"description": "",
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
@@ -1671,7 +1670,6 @@
|
||||
"datasource": {
|
||||
"type": "grafana-testdata-datasource"
|
||||
},
|
||||
"hide": false,
|
||||
"max": 98,
|
||||
"min": 5,
|
||||
"noise": 22,
|
||||
@@ -1689,7 +1687,6 @@
|
||||
"datasource": {
|
||||
"type": "grafana-testdata-datasource"
|
||||
},
|
||||
"description": "",
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
@@ -1757,7 +1754,6 @@
|
||||
"datasource": {
|
||||
"type": "grafana-testdata-datasource"
|
||||
},
|
||||
"hide": false,
|
||||
"max": 98,
|
||||
"min": 5,
|
||||
"noise": 22,
|
||||
@@ -1788,7 +1784,6 @@
|
||||
"datasource": {
|
||||
"type": "grafana-testdata-datasource"
|
||||
},
|
||||
"description": "",
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
@@ -1857,7 +1852,6 @@
|
||||
"datasource": {
|
||||
"type": "grafana-testdata-datasource"
|
||||
},
|
||||
"hide": false,
|
||||
"max": 8,
|
||||
"min": 1,
|
||||
"noise": 2,
|
||||
@@ -1875,7 +1869,6 @@
|
||||
"datasource": {
|
||||
"type": "grafana-testdata-datasource"
|
||||
},
|
||||
"description": "",
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
@@ -1944,7 +1937,6 @@
|
||||
"datasource": {
|
||||
"type": "grafana-testdata-datasource"
|
||||
},
|
||||
"hide": false,
|
||||
"max": 12,
|
||||
"min": 1,
|
||||
"noise": 2,
|
||||
@@ -1962,7 +1954,6 @@
|
||||
"datasource": {
|
||||
"type": "grafana-testdata-datasource"
|
||||
},
|
||||
"description": "",
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
@@ -2030,7 +2021,6 @@
|
||||
"datasource": {
|
||||
"type": "grafana-testdata-datasource"
|
||||
},
|
||||
"hide": false,
|
||||
"max": 100,
|
||||
"min": 10,
|
||||
"noise": 22,
|
||||
@@ -2048,7 +2038,6 @@
|
||||
"datasource": {
|
||||
"type": "grafana-testdata-datasource"
|
||||
},
|
||||
"description": "",
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
@@ -2116,7 +2105,6 @@
|
||||
"datasource": {
|
||||
"type": "grafana-testdata-datasource"
|
||||
},
|
||||
"hide": false,
|
||||
"max": 100,
|
||||
"min": 10,
|
||||
"noise": 22,
|
||||
@@ -2129,6 +2117,147 @@
|
||||
],
|
||||
"title": "Backend",
|
||||
"type": "radialbar"
|
||||
},
|
||||
{
|
||||
"collapsed": false,
|
||||
"gridPos": {
|
||||
"h": 1,
|
||||
"w": 24,
|
||||
"x": 0,
|
||||
"y": 66
|
||||
},
|
||||
"id": 35,
|
||||
"panels": [],
|
||||
"title": "Empty data",
|
||||
"type": "row"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "grafana-testdata-datasource"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green",
|
||||
"value": 0
|
||||
},
|
||||
{
|
||||
"color": "red",
|
||||
"value": 80
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 8,
|
||||
"w": 6,
|
||||
"x": 0,
|
||||
"y": 67
|
||||
},
|
||||
"id": 36,
|
||||
"options": {
|
||||
"barWidthFactor": 0.5,
|
||||
"effects": {
|
||||
"barGlow": false,
|
||||
"centerGlow": false,
|
||||
"gradient": true,
|
||||
"rounded": false,
|
||||
"spotlight": false
|
||||
},
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
"calcs": ["lastNotNull"],
|
||||
"fields": "",
|
||||
"values": false
|
||||
},
|
||||
"segmentCount": 1,
|
||||
"segmentSpacing": 0.3,
|
||||
"shape": "gauge",
|
||||
"showThresholdLabels": false,
|
||||
"showThresholdMarkers": true,
|
||||
"sparkline": true
|
||||
},
|
||||
"pluginVersion": "13.0.0-pre",
|
||||
"targets": [
|
||||
{
|
||||
"refId": "A",
|
||||
"scenarioId": "random_walk",
|
||||
"seriesCount": 0
|
||||
}
|
||||
],
|
||||
"title": "Numeric, no series",
|
||||
"type": "gauge"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "grafana-testdata-datasource"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
"mode": "thresholds"
|
||||
},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green",
|
||||
"value": 0
|
||||
},
|
||||
{
|
||||
"color": "red",
|
||||
"value": 80
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 8,
|
||||
"w": 6,
|
||||
"x": 6,
|
||||
"y": 67
|
||||
},
|
||||
"id": 37,
|
||||
"options": {
|
||||
"barWidthFactor": 0.5,
|
||||
"effects": {
|
||||
"barGlow": false,
|
||||
"centerGlow": false,
|
||||
"gradient": true,
|
||||
"rounded": false,
|
||||
"spotlight": false
|
||||
},
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
"calcs": ["lastNotNull"],
|
||||
"fields": "",
|
||||
"values": false
|
||||
},
|
||||
"segmentCount": 1,
|
||||
"segmentSpacing": 0.3,
|
||||
"shape": "gauge",
|
||||
"showThresholdLabels": false,
|
||||
"showThresholdMarkers": true,
|
||||
"sparkline": true
|
||||
},
|
||||
"pluginVersion": "13.0.0-pre",
|
||||
"targets": [
|
||||
{
|
||||
"refId": "A",
|
||||
"scenarioId": "logs"
|
||||
}
|
||||
],
|
||||
"title": "Non-numeric",
|
||||
"type": "gauge"
|
||||
}
|
||||
],
|
||||
"preload": false,
|
||||
|
||||
@@ -198,6 +198,7 @@ type JobStatus struct {
|
||||
Finished int64 `json:"finished,omitempty"`
|
||||
Message string `json:"message,omitempty"`
|
||||
Errors []string `json:"errors,omitempty"`
|
||||
Warnings []string `json:"warnings,omitempty"`
|
||||
|
||||
// Optional value 0-100 that can be set while running
|
||||
Progress float64 `json:"progress,omitempty"`
|
||||
@@ -225,18 +226,20 @@ type JobResourceSummary struct {
|
||||
Kind string `json:"kind,omitempty"`
|
||||
Total int64 `json:"total,omitempty"` // the count (if known)
|
||||
|
||||
Create int64 `json:"create,omitempty"`
|
||||
Update int64 `json:"update,omitempty"`
|
||||
Delete int64 `json:"delete,omitempty"`
|
||||
Write int64 `json:"write,omitempty"` // Create or update (export)
|
||||
Error int64 `json:"error,omitempty"` // The error count
|
||||
Create int64 `json:"create,omitempty"`
|
||||
Update int64 `json:"update,omitempty"`
|
||||
Delete int64 `json:"delete,omitempty"`
|
||||
Write int64 `json:"write,omitempty"` // Create or update (export)
|
||||
Error int64 `json:"error,omitempty"` // The error count
|
||||
Warning int64 `json:"warning,omitempty"` // The warning count
|
||||
|
||||
// No action required (useful for sync)
|
||||
Noop int64 `json:"noop,omitempty"`
|
||||
|
||||
// Report errors for this resource type
|
||||
// Report errors/warnings for this resource type
|
||||
// This may not be an exhaustive list and recommend looking at the logs for more info
|
||||
Errors []string `json:"errors,omitempty"`
|
||||
Errors []string `json:"errors,omitempty"`
|
||||
Warnings []string `json:"warnings,omitempty"`
|
||||
}
|
||||
|
||||
// HistoricJob is an append only log, saving all jobs that have been processed.
|
||||
|
||||
@@ -401,6 +401,11 @@ func (in *JobResourceSummary) DeepCopyInto(out *JobResourceSummary) {
|
||||
*out = make([]string, len(*in))
|
||||
copy(*out, *in)
|
||||
}
|
||||
if in.Warnings != nil {
|
||||
in, out := &in.Warnings, &out.Warnings
|
||||
*out = make([]string, len(*in))
|
||||
copy(*out, *in)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
@@ -468,6 +473,11 @@ func (in *JobStatus) DeepCopyInto(out *JobStatus) {
|
||||
*out = make([]string, len(*in))
|
||||
copy(*out, *in)
|
||||
}
|
||||
if in.Warnings != nil {
|
||||
in, out := &in.Warnings, &out.Warnings
|
||||
*out = make([]string, len(*in))
|
||||
copy(*out, *in)
|
||||
}
|
||||
if in.Summary != nil {
|
||||
in, out := &in.Summary, &out.Summary
|
||||
*out = make([]*JobResourceSummary, len(*in))
|
||||
|
||||
@@ -889,6 +889,13 @@ func schema_pkg_apis_provisioning_v0alpha1_JobResourceSummary(ref common.Referen
|
||||
Format: "int64",
|
||||
},
|
||||
},
|
||||
"warning": {
|
||||
SchemaProps: spec.SchemaProps{
|
||||
Description: "The error count",
|
||||
Type: []string{"integer"},
|
||||
Format: "int64",
|
||||
},
|
||||
},
|
||||
"noop": {
|
||||
SchemaProps: spec.SchemaProps{
|
||||
Description: "No action required (useful for sync)",
|
||||
@@ -898,7 +905,7 @@ func schema_pkg_apis_provisioning_v0alpha1_JobResourceSummary(ref common.Referen
|
||||
},
|
||||
"errors": {
|
||||
SchemaProps: spec.SchemaProps{
|
||||
Description: "Report errors for this resource type This may not be an exhaustive list and recommend looking at the logs for more info",
|
||||
Description: "Report errors/warnings for this resource type This may not be an exhaustive list and recommend looking at the logs for more info",
|
||||
Type: []string{"array"},
|
||||
Items: &spec.SchemaOrArray{
|
||||
Schema: &spec.Schema{
|
||||
@@ -911,6 +918,20 @@ func schema_pkg_apis_provisioning_v0alpha1_JobResourceSummary(ref common.Referen
|
||||
},
|
||||
},
|
||||
},
|
||||
"warnings": {
|
||||
SchemaProps: spec.SchemaProps{
|
||||
Type: []string{"array"},
|
||||
Items: &spec.SchemaOrArray{
|
||||
Schema: &spec.Schema{
|
||||
SchemaProps: spec.SchemaProps{
|
||||
Default: "",
|
||||
Type: []string{"string"},
|
||||
Format: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -1029,6 +1050,20 @@ func schema_pkg_apis_provisioning_v0alpha1_JobStatus(ref common.ReferenceCallbac
|
||||
},
|
||||
},
|
||||
},
|
||||
"warnings": {
|
||||
SchemaProps: spec.SchemaProps{
|
||||
Type: []string{"array"},
|
||||
Items: &spec.SchemaOrArray{
|
||||
Schema: &spec.Schema{
|
||||
SchemaProps: spec.SchemaProps{
|
||||
Default: "",
|
||||
Type: []string{"string"},
|
||||
Format: "",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"progress": {
|
||||
SchemaProps: spec.SchemaProps{
|
||||
Description: "Optional value 0-100 that can be set while running",
|
||||
|
||||
@@ -3,8 +3,10 @@ API rule violation: list_type_missing,github.com/grafana/grafana/apps/provisioni
|
||||
API rule violation: list_type_missing,github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1,FileList,Items
|
||||
API rule violation: list_type_missing,github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1,HistoryList,Items
|
||||
API rule violation: list_type_missing,github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1,JobResourceSummary,Errors
|
||||
API rule violation: list_type_missing,github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1,JobResourceSummary,Warnings
|
||||
API rule violation: list_type_missing,github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1,JobStatus,Errors
|
||||
API rule violation: list_type_missing,github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1,JobStatus,Summary
|
||||
API rule violation: list_type_missing,github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1,JobStatus,Warnings
|
||||
API rule violation: list_type_missing,github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1,ManagerStats,Stats
|
||||
API rule violation: list_type_missing,github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1,MoveJobOptions,Paths
|
||||
API rule violation: list_type_missing,github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1,MoveJobOptions,Resources
|
||||
|
||||
@@ -7,16 +7,18 @@ package v0alpha1
|
||||
// JobResourceSummaryApplyConfiguration represents a declarative configuration of the JobResourceSummary type for use
|
||||
// with apply.
|
||||
type JobResourceSummaryApplyConfiguration struct {
|
||||
Group *string `json:"group,omitempty"`
|
||||
Kind *string `json:"kind,omitempty"`
|
||||
Total *int64 `json:"total,omitempty"`
|
||||
Create *int64 `json:"create,omitempty"`
|
||||
Update *int64 `json:"update,omitempty"`
|
||||
Delete *int64 `json:"delete,omitempty"`
|
||||
Write *int64 `json:"write,omitempty"`
|
||||
Error *int64 `json:"error,omitempty"`
|
||||
Noop *int64 `json:"noop,omitempty"`
|
||||
Errors []string `json:"errors,omitempty"`
|
||||
Group *string `json:"group,omitempty"`
|
||||
Kind *string `json:"kind,omitempty"`
|
||||
Total *int64 `json:"total,omitempty"`
|
||||
Create *int64 `json:"create,omitempty"`
|
||||
Update *int64 `json:"update,omitempty"`
|
||||
Delete *int64 `json:"delete,omitempty"`
|
||||
Write *int64 `json:"write,omitempty"`
|
||||
Error *int64 `json:"error,omitempty"`
|
||||
Warning *int64 `json:"warning,omitempty"`
|
||||
Noop *int64 `json:"noop,omitempty"`
|
||||
Errors []string `json:"errors,omitempty"`
|
||||
Warnings []string `json:"warnings,omitempty"`
|
||||
}
|
||||
|
||||
// JobResourceSummaryApplyConfiguration constructs a declarative configuration of the JobResourceSummary type for use with
|
||||
@@ -89,6 +91,14 @@ func (b *JobResourceSummaryApplyConfiguration) WithError(value int64) *JobResour
|
||||
return b
|
||||
}
|
||||
|
||||
// WithWarning sets the Warning field in the declarative configuration to the given value
|
||||
// and returns the receiver, so that objects can be built by chaining "With" function invocations.
|
||||
// If called multiple times, the Warning field is set to the value of the last call.
|
||||
func (b *JobResourceSummaryApplyConfiguration) WithWarning(value int64) *JobResourceSummaryApplyConfiguration {
|
||||
b.Warning = &value
|
||||
return b
|
||||
}
|
||||
|
||||
// WithNoop sets the Noop field in the declarative configuration to the given value
|
||||
// and returns the receiver, so that objects can be built by chaining "With" function invocations.
|
||||
// If called multiple times, the Noop field is set to the value of the last call.
|
||||
@@ -106,3 +116,13 @@ func (b *JobResourceSummaryApplyConfiguration) WithErrors(values ...string) *Job
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
// WithWarnings adds the given value to the Warnings field in the declarative configuration
|
||||
// and returns the receiver, so that objects can be build by chaining "With" function invocations.
|
||||
// If called multiple times, values provided by each call will be appended to the Warnings field.
|
||||
func (b *JobResourceSummaryApplyConfiguration) WithWarnings(values ...string) *JobResourceSummaryApplyConfiguration {
|
||||
for i := range values {
|
||||
b.Warnings = append(b.Warnings, values[i])
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
@@ -16,6 +16,7 @@ type JobStatusApplyConfiguration struct {
|
||||
Finished *int64 `json:"finished,omitempty"`
|
||||
Message *string `json:"message,omitempty"`
|
||||
Errors []string `json:"errors,omitempty"`
|
||||
Warnings []string `json:"warnings,omitempty"`
|
||||
Progress *float64 `json:"progress,omitempty"`
|
||||
Summary []*provisioningv0alpha1.JobResourceSummary `json:"summary,omitempty"`
|
||||
URLs *RepositoryURLsApplyConfiguration `json:"url,omitempty"`
|
||||
@@ -69,6 +70,16 @@ func (b *JobStatusApplyConfiguration) WithErrors(values ...string) *JobStatusApp
|
||||
return b
|
||||
}
|
||||
|
||||
// WithWarnings adds the given value to the Warnings field in the declarative configuration
|
||||
// and returns the receiver, so that objects can be build by chaining "With" function invocations.
|
||||
// If called multiple times, values provided by each call will be appended to the Warnings field.
|
||||
func (b *JobStatusApplyConfiguration) WithWarnings(values ...string) *JobStatusApplyConfiguration {
|
||||
for i := range values {
|
||||
b.Warnings = append(b.Warnings, values[i])
|
||||
}
|
||||
return b
|
||||
}
|
||||
|
||||
// WithProgress sets the Progress field in the declarative configuration to the given value
|
||||
// and returns the receiver, so that objects can be built by chaining "With" function invocations.
|
||||
// If called multiple times, the Progress field is set to the value of the last call.
|
||||
|
||||
@@ -75,9 +75,9 @@
|
||||
"effects": {
|
||||
"barGlow": false,
|
||||
"centerGlow": false,
|
||||
"gradient": false,
|
||||
"rounded": true,
|
||||
"spotlight": false,
|
||||
"gradient": false
|
||||
"spotlight": false
|
||||
},
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
@@ -152,9 +152,9 @@
|
||||
"effects": {
|
||||
"barGlow": false,
|
||||
"centerGlow": true,
|
||||
"gradient": false,
|
||||
"rounded": true,
|
||||
"spotlight": false,
|
||||
"gradient": false
|
||||
"spotlight": false
|
||||
},
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
@@ -229,9 +229,9 @@
|
||||
"effects": {
|
||||
"barGlow": true,
|
||||
"centerGlow": true,
|
||||
"gradient": false,
|
||||
"rounded": true,
|
||||
"spotlight": false,
|
||||
"gradient": false
|
||||
"spotlight": false
|
||||
},
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
@@ -306,9 +306,9 @@
|
||||
"effects": {
|
||||
"barGlow": true,
|
||||
"centerGlow": true,
|
||||
"gradient": false,
|
||||
"rounded": true,
|
||||
"spotlight": true,
|
||||
"gradient": false
|
||||
"spotlight": true
|
||||
},
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
@@ -383,9 +383,9 @@
|
||||
"effects": {
|
||||
"barGlow": true,
|
||||
"centerGlow": true,
|
||||
"gradient": false,
|
||||
"rounded": true,
|
||||
"spotlight": true,
|
||||
"gradient": false
|
||||
"spotlight": true
|
||||
},
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
@@ -460,9 +460,9 @@
|
||||
"effects": {
|
||||
"barGlow": true,
|
||||
"centerGlow": true,
|
||||
"gradient": false,
|
||||
"rounded": false,
|
||||
"spotlight": true,
|
||||
"gradient": false
|
||||
"spotlight": true
|
||||
},
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
@@ -537,9 +537,9 @@
|
||||
"effects": {
|
||||
"barGlow": true,
|
||||
"centerGlow": true,
|
||||
"gradient": false,
|
||||
"rounded": false,
|
||||
"spotlight": true,
|
||||
"gradient": false
|
||||
"spotlight": true
|
||||
},
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
@@ -627,9 +627,9 @@
|
||||
"effects": {
|
||||
"barGlow": true,
|
||||
"centerGlow": true,
|
||||
"gradient": false,
|
||||
"rounded": true,
|
||||
"spotlight": true,
|
||||
"gradient": false
|
||||
"spotlight": true
|
||||
},
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
@@ -704,9 +704,9 @@
|
||||
"effects": {
|
||||
"barGlow": true,
|
||||
"centerGlow": true,
|
||||
"gradient": false,
|
||||
"rounded": true,
|
||||
"spotlight": true,
|
||||
"gradient": false
|
||||
"spotlight": true
|
||||
},
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
@@ -781,9 +781,9 @@
|
||||
"effects": {
|
||||
"barGlow": true,
|
||||
"centerGlow": true,
|
||||
"gradient": false,
|
||||
"rounded": true,
|
||||
"spotlight": true,
|
||||
"gradient": false
|
||||
"spotlight": true
|
||||
},
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
@@ -858,9 +858,9 @@
|
||||
"effects": {
|
||||
"barGlow": true,
|
||||
"centerGlow": true,
|
||||
"gradient": false,
|
||||
"rounded": true,
|
||||
"spotlight": true,
|
||||
"gradient": false
|
||||
"spotlight": true
|
||||
},
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
@@ -952,9 +952,9 @@
|
||||
"effects": {
|
||||
"barGlow": false,
|
||||
"centerGlow": false,
|
||||
"gradient": false,
|
||||
"rounded": false,
|
||||
"spotlight": false,
|
||||
"gradient": false
|
||||
"spotlight": false
|
||||
},
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
@@ -1029,9 +1029,9 @@
|
||||
"effects": {
|
||||
"barGlow": false,
|
||||
"centerGlow": false,
|
||||
"gradient": false,
|
||||
"rounded": false,
|
||||
"spotlight": false,
|
||||
"gradient": false
|
||||
"spotlight": false
|
||||
},
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
@@ -1106,9 +1106,9 @@
|
||||
"effects": {
|
||||
"barGlow": false,
|
||||
"centerGlow": false,
|
||||
"gradient": true,
|
||||
"rounded": false,
|
||||
"spotlight": false,
|
||||
"gradient": true
|
||||
"spotlight": false
|
||||
},
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
@@ -1183,9 +1183,9 @@
|
||||
"effects": {
|
||||
"barGlow": false,
|
||||
"centerGlow": false,
|
||||
"gradient": false,
|
||||
"rounded": false,
|
||||
"spotlight": false,
|
||||
"gradient": false
|
||||
"spotlight": false
|
||||
},
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
@@ -1260,9 +1260,9 @@
|
||||
"effects": {
|
||||
"barGlow": false,
|
||||
"centerGlow": false,
|
||||
"gradient": false,
|
||||
"rounded": false,
|
||||
"spotlight": false,
|
||||
"gradient": false
|
||||
"spotlight": false
|
||||
},
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
@@ -1354,9 +1354,9 @@
|
||||
"effects": {
|
||||
"barGlow": false,
|
||||
"centerGlow": false,
|
||||
"gradient": true,
|
||||
"rounded": false,
|
||||
"spotlight": false,
|
||||
"gradient": true
|
||||
"spotlight": false
|
||||
},
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
@@ -1435,9 +1435,9 @@
|
||||
"effects": {
|
||||
"barGlow": false,
|
||||
"centerGlow": false,
|
||||
"gradient": true,
|
||||
"rounded": false,
|
||||
"spotlight": false,
|
||||
"gradient": true
|
||||
"spotlight": false
|
||||
},
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
@@ -1516,9 +1516,9 @@
|
||||
"effects": {
|
||||
"barGlow": false,
|
||||
"centerGlow": false,
|
||||
"gradient": true,
|
||||
"rounded": false,
|
||||
"spotlight": false,
|
||||
"gradient": true
|
||||
"spotlight": false
|
||||
},
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
@@ -1565,7 +1565,6 @@
|
||||
"datasource": {
|
||||
"type": "grafana-testdata-datasource"
|
||||
},
|
||||
"description": "",
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
@@ -1606,9 +1605,9 @@
|
||||
"effects": {
|
||||
"barGlow": true,
|
||||
"centerGlow": true,
|
||||
"gradient": true,
|
||||
"rounded": true,
|
||||
"spotlight": true,
|
||||
"gradient": true
|
||||
"spotlight": true
|
||||
},
|
||||
"glow": "both",
|
||||
"orientation": "auto",
|
||||
@@ -1631,7 +1630,6 @@
|
||||
"datasource": {
|
||||
"type": "grafana-testdata-datasource"
|
||||
},
|
||||
"hide": false,
|
||||
"max": 98,
|
||||
"min": 5,
|
||||
"noise": 22,
|
||||
@@ -1649,7 +1647,6 @@
|
||||
"datasource": {
|
||||
"type": "grafana-testdata-datasource"
|
||||
},
|
||||
"description": "",
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
@@ -1690,9 +1687,9 @@
|
||||
"effects": {
|
||||
"barGlow": true,
|
||||
"centerGlow": true,
|
||||
"gradient": true,
|
||||
"rounded": true,
|
||||
"spotlight": true,
|
||||
"gradient": true
|
||||
"spotlight": true
|
||||
},
|
||||
"glow": "both",
|
||||
"orientation": "auto",
|
||||
@@ -1715,7 +1712,6 @@
|
||||
"datasource": {
|
||||
"type": "grafana-testdata-datasource"
|
||||
},
|
||||
"hide": false,
|
||||
"max": 98,
|
||||
"min": 5,
|
||||
"noise": 22,
|
||||
@@ -1746,7 +1742,6 @@
|
||||
"datasource": {
|
||||
"type": "grafana-testdata-datasource"
|
||||
},
|
||||
"description": "",
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
@@ -1788,9 +1783,9 @@
|
||||
"effects": {
|
||||
"barGlow": true,
|
||||
"centerGlow": true,
|
||||
"gradient": true,
|
||||
"rounded": true,
|
||||
"spotlight": true,
|
||||
"gradient": true
|
||||
"spotlight": true
|
||||
},
|
||||
"glow": "both",
|
||||
"orientation": "auto",
|
||||
@@ -1813,7 +1808,6 @@
|
||||
"datasource": {
|
||||
"type": "grafana-testdata-datasource"
|
||||
},
|
||||
"hide": false,
|
||||
"max": 8,
|
||||
"min": 1,
|
||||
"noise": 2,
|
||||
@@ -1831,7 +1825,6 @@
|
||||
"datasource": {
|
||||
"type": "grafana-testdata-datasource"
|
||||
},
|
||||
"description": "",
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
@@ -1873,9 +1866,9 @@
|
||||
"effects": {
|
||||
"barGlow": true,
|
||||
"centerGlow": true,
|
||||
"gradient": true,
|
||||
"rounded": true,
|
||||
"spotlight": true,
|
||||
"gradient": true
|
||||
"spotlight": true
|
||||
},
|
||||
"glow": "both",
|
||||
"orientation": "auto",
|
||||
@@ -1898,7 +1891,6 @@
|
||||
"datasource": {
|
||||
"type": "grafana-testdata-datasource"
|
||||
},
|
||||
"hide": false,
|
||||
"max": 12,
|
||||
"min": 1,
|
||||
"noise": 2,
|
||||
@@ -1916,7 +1908,6 @@
|
||||
"datasource": {
|
||||
"type": "grafana-testdata-datasource"
|
||||
},
|
||||
"description": "",
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
@@ -1957,9 +1948,9 @@
|
||||
"effects": {
|
||||
"barGlow": true,
|
||||
"centerGlow": true,
|
||||
"gradient": true,
|
||||
"rounded": true,
|
||||
"spotlight": true,
|
||||
"gradient": true
|
||||
"spotlight": true
|
||||
},
|
||||
"glow": "both",
|
||||
"orientation": "auto",
|
||||
@@ -1982,7 +1973,6 @@
|
||||
"datasource": {
|
||||
"type": "grafana-testdata-datasource"
|
||||
},
|
||||
"hide": false,
|
||||
"max": 100,
|
||||
"min": 10,
|
||||
"noise": 22,
|
||||
@@ -2000,7 +1990,6 @@
|
||||
"datasource": {
|
||||
"type": "grafana-testdata-datasource"
|
||||
},
|
||||
"description": "",
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
@@ -2041,9 +2030,9 @@
|
||||
"effects": {
|
||||
"barGlow": true,
|
||||
"centerGlow": true,
|
||||
"gradient": true,
|
||||
"rounded": true,
|
||||
"spotlight": true,
|
||||
"gradient": true
|
||||
"spotlight": true
|
||||
},
|
||||
"glow": "both",
|
||||
"orientation": "auto",
|
||||
@@ -2066,7 +2055,6 @@
|
||||
"datasource": {
|
||||
"type": "grafana-testdata-datasource"
|
||||
},
|
||||
"hide": false,
|
||||
"max": 100,
|
||||
"min": 10,
|
||||
"noise": 22,
|
||||
@@ -2079,6 +2067,147 @@
|
||||
],
|
||||
"title": "Backend",
|
||||
"type": "radialbar"
|
||||
},
|
||||
{
|
||||
"collapsed": false,
|
||||
"gridPos": {
|
||||
"h": 1,
|
||||
"w": 24,
|
||||
"x": 0,
|
||||
"y": 66
|
||||
},
|
||||
"id": 35,
|
||||
"panels": [],
|
||||
"title": "Empty data",
|
||||
"type": "row"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "grafana-testdata-datasource"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green",
|
||||
"value": 0
|
||||
},
|
||||
{
|
||||
"color": "red",
|
||||
"value": 80
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 8,
|
||||
"w": 6,
|
||||
"x": 0,
|
||||
"y": 67
|
||||
},
|
||||
"id": 36,
|
||||
"options": {
|
||||
"barWidthFactor": 0.5,
|
||||
"effects": {
|
||||
"barGlow": false,
|
||||
"centerGlow": false,
|
||||
"gradient": true,
|
||||
"rounded": false,
|
||||
"spotlight": false
|
||||
},
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
"calcs": ["lastNotNull"],
|
||||
"fields": "",
|
||||
"values": false
|
||||
},
|
||||
"segmentCount": 1,
|
||||
"segmentSpacing": 0.3,
|
||||
"shape": "gauge",
|
||||
"showThresholdLabels": false,
|
||||
"showThresholdMarkers": true,
|
||||
"sparkline": true
|
||||
},
|
||||
"pluginVersion": "13.0.0-pre",
|
||||
"targets": [
|
||||
{
|
||||
"refId": "A",
|
||||
"scenarioId": "random_walk",
|
||||
"seriesCount": 0
|
||||
}
|
||||
],
|
||||
"title": "Numeric, no series",
|
||||
"type": "gauge"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "grafana-testdata-datasource"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
"mode": "thresholds"
|
||||
},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green",
|
||||
"value": 0
|
||||
},
|
||||
{
|
||||
"color": "red",
|
||||
"value": 80
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 8,
|
||||
"w": 6,
|
||||
"x": 6,
|
||||
"y": 67
|
||||
},
|
||||
"id": 37,
|
||||
"options": {
|
||||
"barWidthFactor": 0.5,
|
||||
"effects": {
|
||||
"barGlow": false,
|
||||
"centerGlow": false,
|
||||
"gradient": true,
|
||||
"rounded": false,
|
||||
"spotlight": false
|
||||
},
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
"calcs": ["lastNotNull"],
|
||||
"fields": "",
|
||||
"values": false
|
||||
},
|
||||
"segmentCount": 1,
|
||||
"segmentSpacing": 0.3,
|
||||
"shape": "gauge",
|
||||
"showThresholdLabels": false,
|
||||
"showThresholdMarkers": true,
|
||||
"sparkline": true
|
||||
},
|
||||
"pluginVersion": "13.0.0-pre",
|
||||
"targets": [
|
||||
{
|
||||
"refId": "A",
|
||||
"scenarioId": "logs"
|
||||
}
|
||||
],
|
||||
"title": "Non-numeric",
|
||||
"type": "gauge"
|
||||
}
|
||||
],
|
||||
"preload": false,
|
||||
@@ -2095,5 +2224,5 @@
|
||||
"timezone": "browser",
|
||||
"title": "Panel tests - Gauge (new)",
|
||||
"uid": "panel-tests-gauge-new",
|
||||
"version": 6
|
||||
"version": 9
|
||||
}
|
||||
|
||||
@@ -59,9 +59,9 @@ For more details on contact points, including how to test them and enable notifi
|
||||
|
||||
## Alertmanager settings
|
||||
|
||||
| Option | Description |
|
||||
| ------ | ---------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| URL | The Alertmanager URL. This field is [protected](ref:configure-contact-points#protected-fields) from modification in Grafana Cloud. |
|
||||
| Option | Description |
|
||||
| ------ | ----------------------------------------------------------------------------------------------------------------- |
|
||||
| URL | The Alertmanager URL. This field is [protected](ref:configure-contact-points) from modification in Grafana Cloud. |
|
||||
|
||||
#### Optional settings
|
||||
|
||||
|
||||
@@ -49,14 +49,14 @@ For more details on contact points, including how to test them and enable notifi
|
||||
|
||||
### Required Settings
|
||||
|
||||
| Key | Description |
|
||||
| ------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| URL | The URL of the REST API of your Jira instance. Supported versions: `2` and `3` (e.g., `https://your-domain.atlassian.net/rest/api/3`). This field is [protected](ref:configure-contact-points#protected-fields) from modification in Grafana Cloud. |
|
||||
| Basic Auth User | Username for authentication. For Jira Cloud, use your email address. |
|
||||
| Basic Auth Password | Password or personal token. For Jira Cloud, you need to obtain a personal token [here](https://id.atlassian.com/manage-profile/security/api-tokens) and use it as the password. |
|
||||
| API Token | An alternative to basic authentication, a bearer token is used to authorize the API requests. See [Jira documentation](https://confluence.atlassian.com/enterprise/using-personal-access-tokens-1026032365.html) for more information. |
|
||||
| Project Key | The project key identifying the project where issues will be created. Project keys are unique identifiers for a project. |
|
||||
| Issue Type | The type of issue to create (e.g., `Task`, `Bug`, `Incident`). Make sure that you specify a type that is available in your project. |
|
||||
| Key | Description |
|
||||
| ------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| URL | The URL of the REST API of your Jira instance. Supported versions: `2` and `3` (e.g., `https://your-domain.atlassian.net/rest/api/3`). This field is [protected](ref:configure-contact-points) from modification in Grafana Cloud. |
|
||||
| Basic Auth User | Username for authentication. For Jira Cloud, use your email address. |
|
||||
| Basic Auth Password | Password or personal token. For Jira Cloud, you need to obtain a personal token [here](https://id.atlassian.com/manage-profile/security/api-tokens) and use it as the password. |
|
||||
| API Token | An alternative to basic authentication, a bearer token is used to authorize the API requests. See [Jira documentation](https://confluence.atlassian.com/enterprise/using-personal-access-tokens-1026032365.html) for more information. |
|
||||
| Project Key | The project key identifying the project where issues will be created. Project keys are unique identifiers for a project. |
|
||||
| Issue Type | The type of issue to create (e.g., `Task`, `Bug`, `Incident`). Make sure that you specify a type that is available in your project. |
|
||||
|
||||
### Optional Settings
|
||||
|
||||
|
||||
@@ -54,10 +54,10 @@ For more details on contact points, including how to test them and enable notifi
|
||||
|
||||
### Required Settings
|
||||
|
||||
| Option | Description |
|
||||
| ---------- | ---------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| Broker URL | The URL of the MQTT broker. This field is [protected](ref:configure-contact-points#protected-fields) from modification in Grafana Cloud. |
|
||||
| Topic | The topic to which the message will be sent. |
|
||||
| Option | Description |
|
||||
| ---------- | ----------------------------------------------------------------------------------------------------------------------- |
|
||||
| Broker URL | The URL of the MQTT broker. This field is [protected](ref:configure-contact-points) from modification in Grafana Cloud. |
|
||||
| Topic | The topic to which the message will be sent. |
|
||||
|
||||
### Optional Settings
|
||||
|
||||
|
||||
@@ -51,8 +51,8 @@ You can customize the `title` and `body` of the Slack message using [notificatio
|
||||
|
||||
If you are using a Slack API Token, complete the following steps.
|
||||
|
||||
1. Follow steps 1 and 2 of the [Slack API Quickstart](https://api.slack.com/start/quickstart).
|
||||
1. Add the [chat:write.public](https://api.slack.com/scopes/chat:write.public) scope to give your app the ability to post in all public channels without joining.
|
||||
1. Follow step 1 of the [Slack API Quickstart](https://docs.slack.dev/app-management/quickstart-app-settings/#creating) to create the app.
|
||||
1. Continue onto the second step of the [Slack API Quickstart](https://docs.slack.dev/app-management/quickstart-app-settings/#scopes) and add the [chat:write.public](https://api.slack.com/scopes/chat:write.public) scope as described to give your app the ability to post in all public channels without joining.
|
||||
1. In OAuth Tokens for Your Workspace, copy the Bot User OAuth Token.
|
||||
1. Open your Slack workplace.
|
||||
1. Right click the channel you want to receive notifications in.
|
||||
|
||||
@@ -62,9 +62,9 @@ For more details on contact points, including how to test them and enable notifi
|
||||
|
||||
## Webhook settings
|
||||
|
||||
| Option | Description |
|
||||
| ------ | ----------------------------------------------------------------------------------------------------------------------------- |
|
||||
| URL | The Webhook URL. This field is [protected](ref:configure-contact-points#protected-fields) from modification in Grafana Cloud. |
|
||||
| Option | Description |
|
||||
| ------ | ------------------------------------------------------------------------------------------------------------ |
|
||||
| URL | The Webhook URL. This field is [protected](ref:configure-contact-points) from modification in Grafana Cloud. |
|
||||
|
||||
#### Optional settings
|
||||
|
||||
|
||||
@@ -81,7 +81,7 @@ Replace the placeholders with your values:
|
||||
|
||||
In your `grafana` directory, create a sub-folder called `dashboards`.
|
||||
|
||||
This guide shows you how to creates three separate dashboards. For all dashboard configurations, replace the placeholders with your values:
|
||||
This guide shows you how to create three separate dashboards. For all dashboard configurations, replace the placeholders with your values:
|
||||
|
||||
- _`<GRAFANA_CLOUD_STACK_NAME>`_: Name of your Grafana Cloud Stack
|
||||
- _`<GRAFANA_OPERATOR_NAMESPACE>`_: Namespace where the `grafana-operator` is deployed in your Kubernetes cluster
|
||||
|
||||
@@ -0,0 +1,147 @@
|
||||
---
|
||||
title: Git Sync deployment scenarios
|
||||
menuTitle: Deployment scenarios
|
||||
description: Learn about common Git Sync deployment patterns and configurations for different organizational needs
|
||||
weight: 450
|
||||
keywords:
|
||||
- git sync
|
||||
- deployment patterns
|
||||
- scenarios
|
||||
- multi-environment
|
||||
- teams
|
||||
---
|
||||
|
||||
# Git Sync deployment scenarios
|
||||
|
||||
This guide shows practical deployment scenarios for Grafana’s Git Sync. Learn how to configure bidirectional synchronization between Grafana and Git repositories for teams, environments, and regions.
|
||||
|
||||
{{< admonition type="caution" >}}
|
||||
Git Sync is an experimental feature. It reflects Grafana’s approach to Observability as Code and might include limitations or breaking changes. For current status and known limitations, refer to the [Git Sync introduction](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/as-code/observability-as-code/provision-resources/intro-git-sync/).
|
||||
{{< /admonition >}}
|
||||
|
||||
## Understand the relationship between key Git Sync components
|
||||
|
||||
Before you explore the scenarios, understand how the key Git Sync components relate:
|
||||
|
||||
- [Grafana instance](#grafana-instance)
|
||||
- [Git repository structure](#git-repository-structure)
|
||||
- [Git Sync repository resource](#git-sync-repository-resource)
|
||||
|
||||
### Grafana instance
|
||||
|
||||
A Grafana instance is a running Grafana server. Multiple instances can:
|
||||
|
||||
- Connect to the same Git repository using different Repository configurations.
|
||||
- Sync from different branches of the same repository.
|
||||
- Sync from different paths within the same repository.
|
||||
- Sync from different repositories.
|
||||
|
||||
### Git repository structure
|
||||
|
||||
You can organize your Git repository in several ways:
|
||||
|
||||
- Single branch, multiple paths: Use different directories for different purposes (for example, `dev/`, `prod/`, `team-a/`).
|
||||
- Multiple branches: Use different branches for different environments or teams (for example, `main`, `develop`, `team-a`).
|
||||
- Multiple repositories: Use separate repositories for different teams or environments.
|
||||
|
||||
### Git Sync repository resource
|
||||
|
||||
A repository resource is a Grafana configuration object that defines:
|
||||
|
||||
- Which Git repository to sync with.
|
||||
- Which branch to use.
|
||||
- Which directory path to synchronize.
|
||||
- Sync behavior and workflows.
|
||||
|
||||
Each repository resource creates bidirectional synchronization between a Grafana instance and a specific location in Git.
|
||||
|
||||
## How does repository sync behave?
|
||||
|
||||
With Git Sync you configure a repository resource to sync with your Grafana instance:
|
||||
|
||||
1. Grafana monitors the specified Git location (repository, branch, and path).
|
||||
2. Grafana creates a folder in Dashboards (typically named after the repository).
|
||||
3. Grafana creates dashboards from dashboard JSON files in Git within this folder.
|
||||
4. Grafana commits dashboard changes made in the UI back to Git.
|
||||
5. Grafana pulls dashboard changes made in Git and updates dashboards in the UI.
|
||||
6. Synchronization occurs at regular intervals (configurable), or instantly if you use webhooks.
|
||||
|
||||
You can find the provisioned dashboards organized in folders under **Dashboards**.
|
||||
|
||||
## Example: Relationship between repository, branch, and path
|
||||
|
||||
Here's a concrete example showing how the three parameters work together:
|
||||
|
||||
**Configuration:**
|
||||
|
||||
- **Repository**: `your-org/grafana-manifests`
|
||||
- **Branch**: `main`
|
||||
- **Path**: `team-platform/grafana/`
|
||||
|
||||
**In Git (on branch `main`):**
|
||||
|
||||
```
|
||||
your-org/grafana-manifests/
|
||||
├── .git/
|
||||
├── README.md
|
||||
├── team-platform/
|
||||
│ └── grafana/
|
||||
│ ├── cpu-metrics.json ← Synced
|
||||
│ ├── memory-usage.json ← Synced
|
||||
│ └── disk-io.json ← Synced
|
||||
├── team-data/
|
||||
│ └── grafana/
|
||||
│ └── pipeline-stats.json ← Not synced (different path)
|
||||
└── other-files.txt ← Not synced (outside path)
|
||||
```
|
||||
|
||||
**In Grafana Dashboards view:**
|
||||
|
||||
```
|
||||
Dashboards
|
||||
└── 📁 grafana-manifests/
|
||||
├── CPU Metrics Dashboard
|
||||
├── Memory Usage Dashboard
|
||||
└── Disk I/O Dashboard
|
||||
```
|
||||
|
||||
**Key points:**
|
||||
|
||||
- Grafana only synchronizes files within the specified path (`team-platform/grafana/`).
|
||||
- Grafana ignores files in other paths or at the repository root.
|
||||
- The folder name in Grafana comes from the repository name.
|
||||
- Dashboard titles come from the JSON file content, not the filename.
|
||||
|
||||
## Repository configuration flexibility
|
||||
|
||||
Git Sync repositories support different combinations of repository URL, branch, and path:
|
||||
|
||||
- Different Git repositories: Each environment or team can use its own repository.
|
||||
- Instance A: `repository: your-org/grafana-prod`.
|
||||
- Instance B: `repository: your-org/grafana-dev`.
|
||||
- Different branches: Use separate branches within the same repository.
|
||||
- Instance A: `repository: your-org/grafana-manifests, branch: main`.
|
||||
- Instance B: `repository: your-org/grafana-manifests, branch: develop`.
|
||||
- Different paths: Use different directory paths within the same repository.
|
||||
- Instance A: `repository: your-org/grafana-manifests, branch: main, path: production/`.
|
||||
- Instance B: `repository: your-org/grafana-manifests, branch: main, path: development/`.
|
||||
- Any combination: Mix and match based on your workflow requirements.
|
||||
|
||||
## Scenarios
|
||||
|
||||
Use these deployment scenarios to plan your Git Sync setup:
|
||||
|
||||
- [Single instance](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/as-code/observability-as-code/provision-resources/git-sync-deployment-scenarios/single-instance/)
|
||||
- [Git Sync for development and production environments](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/as-code/observability-as-code/provision-resources/git-sync-deployment-scenarios/dev-prod/)
|
||||
- [Git Sync with regional replication](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/as-code/observability-as-code/provision-resources/git-sync-deployment-scenarios/multi-region/)
|
||||
- [High availability](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/as-code/observability-as-code/provision-resources/git-sync-deployment-scenarios/high-availability/)
|
||||
- [Git Sync in a shared Grafana instance](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/as-code/observability-as-code/provision-resources/git-sync-deployment-scenarios/multi-team/)
|
||||
|
||||
## Learn more
|
||||
|
||||
Refer to the following documents to learn more:
|
||||
|
||||
- [Git Sync introduction](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/as-code/observability-as-code/provision-resources/intro-git-sync/)
|
||||
- [Git Sync setup guide](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/as-code/observability-as-code/provision-resources/git-sync-setup/)
|
||||
- [Dashboard provisioning](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/administration/provisioning/)
|
||||
- [Observability as Code](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/as-code/observability-as-code/)
|
||||
@@ -0,0 +1,147 @@
|
||||
---
|
||||
title: Git Sync for development and production environments
|
||||
menuTitle: Across environments
|
||||
description: Use separate Grafana instances for development and production with Git-controlled promotion
|
||||
weight: 20
|
||||
---
|
||||
|
||||
# Git Sync for development and production environments
|
||||
|
||||
Use separate Grafana instances for development and production. Each syncs with different Git locations to test dashboards before production.
|
||||
|
||||
## Use it for
|
||||
|
||||
- **Staged deployments**: You need to test dashboard changes before production deployment.
|
||||
- **Change control**: You require approvals before dashboards reach production.
|
||||
- **Quality assurance**: You verify dashboard functionality in a non-production environment.
|
||||
- **Risk mitigation**: You minimize the risk of breaking production dashboards.
|
||||
|
||||
## Architecture
|
||||
|
||||
```
|
||||
┌────────────────────────────────────────────────────────────┐
|
||||
│ GitHub Repository │
|
||||
│ Repository: your-org/grafana-manifests │
|
||||
│ Branch: main │
|
||||
│ │
|
||||
│ grafana-manifests/ │
|
||||
│ ├── dev/ │
|
||||
│ │ ├── dashboard-new.json ← Development dashboards │
|
||||
│ │ └── dashboard-test.json │
|
||||
│ │ │
|
||||
│ └── prod/ │
|
||||
│ ├── dashboard-stable.json ← Production dashboards │
|
||||
│ └── dashboard-approved.json │
|
||||
└────────────────────────────────────────────────────────────┘
|
||||
↕ ↕
|
||||
Git Sync (dev/) Git Sync (prod/)
|
||||
↕ ↕
|
||||
┌─────────────────────┐ ┌─────────────────────┐
|
||||
│ Dev Grafana │ │ Prod Grafana │
|
||||
│ │ │ │
|
||||
│ Repository: │ │ Repository: │
|
||||
│ - path: dev/ │ │ - path: prod/ │
|
||||
│ │ │ │
|
||||
│ Creates folder: │ │ Creates folder: │
|
||||
│ "grafana-manifests"│ │ "grafana-manifests"│
|
||||
└─────────────────────┘ └─────────────────────┘
|
||||
```
|
||||
|
||||
## Repository structure
|
||||
|
||||
**In Git:**
|
||||
|
||||
```
|
||||
your-org/grafana-manifests
|
||||
├── dev/
|
||||
│ ├── dashboard-new.json
|
||||
│ └── dashboard-test.json
|
||||
└── prod/
|
||||
├── dashboard-stable.json
|
||||
└── dashboard-approved.json
|
||||
```
|
||||
|
||||
**In Grafana Dashboards view:**
|
||||
|
||||
**Dev instance:**
|
||||
|
||||
```
|
||||
Dashboards
|
||||
└── 📁 grafana-manifests/
|
||||
├── New Dashboard
|
||||
└── Test Dashboard
|
||||
```
|
||||
|
||||
**Prod instance:**
|
||||
|
||||
```
|
||||
Dashboards
|
||||
└── 📁 grafana-manifests/
|
||||
├── Stable Dashboard
|
||||
└── Approved Dashboard
|
||||
```
|
||||
|
||||
- Both instances create a folder named "grafana-manifests" (from repository name)
|
||||
- Each instance only shows dashboards from its configured path (`dev/` or `prod/`)
|
||||
- Dashboards appear with their titles from the JSON files
|
||||
|
||||
## Configuration parameters
|
||||
|
||||
Development:
|
||||
|
||||
- Repository: `your-org/grafana-manifests`
|
||||
- Branch: `main`
|
||||
- Path: `dev/`
|
||||
|
||||
Production:
|
||||
|
||||
- Repository: `your-org/grafana-manifests`
|
||||
- Branch: `main`
|
||||
- Path: `prod/`
|
||||
|
||||
## How it works
|
||||
|
||||
1. Developers create and modify dashboards in development.
|
||||
2. Git Sync commits changes to `dev/`.
|
||||
3. You review changes in Git.
|
||||
4. You promote approved dashboards from `dev/` to `prod/`.
|
||||
5. Production syncs from `prod/`.
|
||||
6. Production dashboards update.
|
||||
|
||||
## Alternative: Use branches
|
||||
|
||||
Instead of using different paths, you can configure instances to use different branches:
|
||||
|
||||
**Development instance:**
|
||||
|
||||
- **Repository**: `your-org/grafana-manifests`
|
||||
- **Branch**: `develop`
|
||||
- **Path**: `grafana/`
|
||||
|
||||
**Production instance:**
|
||||
|
||||
- **Repository**: `your-org/grafana-manifests`
|
||||
- **Branch**: `main`
|
||||
- **Path**: `grafana/`
|
||||
|
||||
With this approach:
|
||||
|
||||
- Development changes go to the `develop` branch
|
||||
- Use Git merge or pull request workflows to promote changes from `develop` to `main`
|
||||
- Production automatically syncs from the `main` branch
|
||||
|
||||
## Alternative: Use separate repositories for stricter isolation
|
||||
|
||||
For stricter isolation, use completely separate repositories:
|
||||
|
||||
**Development instance:**
|
||||
|
||||
- **Repository**: `your-org/grafana-manifests-dev`
|
||||
- **Branch**: `main`
|
||||
- **Path**: `grafana/`
|
||||
|
||||
**Production instance:**
|
||||
|
||||
- **Repository**: `your-org/grafana-manifests-prod`
|
||||
- **Branch**: `main`
|
||||
- **Path**: `grafana/`
|
||||
@@ -0,0 +1,217 @@
|
||||
---
|
||||
title: Git Sync for high availability environments
|
||||
menuTitle: High availability
|
||||
description: Run multiple Grafana instances serving traffic simultaneously, synchronized via Git Sync
|
||||
weight: 50
|
||||
---
|
||||
|
||||
# Git Sync for high availability environments
|
||||
|
||||
## Primary–replica scenario
|
||||
|
||||
Use a primary Grafana instance and one or more replicas synchronized with the same Git location to enable failover.
|
||||
|
||||
### Use it for
|
||||
|
||||
- **Automatic failover**: You need service continuity when the primary instance fails.
|
||||
- **High availability**: Your organization requires guaranteed dashboard availability.
|
||||
- **Simple HA setup**: You want high availability without the complexity of active–active.
|
||||
- **Maintenance windows**: You perform updates while another instance serves traffic.
|
||||
- **Business continuity**: Dashboard access can't tolerate downtime.
|
||||
|
||||
### Architecture
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────────────────┐
|
||||
│ GitHub Repository │
|
||||
│ Repository: your-org/grafana-manifests │
|
||||
│ Branch: main │
|
||||
│ │
|
||||
│ grafana-manifests/ │
|
||||
│ └── shared/ │
|
||||
│ ├── dashboard-metrics.json │
|
||||
│ ├── dashboard-alerts.json │
|
||||
│ └── dashboard-logs.json │
|
||||
└─────────────────────────────────────────────────────┘
|
||||
↕ ↕
|
||||
Git Sync (shared/) Git Sync (shared/)
|
||||
↕ ↕
|
||||
┌────────────────────┐ ┌────────────────────┐
|
||||
│ Master Grafana │ │ Replica Grafana │
|
||||
│ (Active) │ │ (Standby) │
|
||||
│ │ │ │
|
||||
│ Repository: │ │ Repository: │
|
||||
│ - path: shared/ │ │ - path: shared/ │
|
||||
└────────────────────┘ └────────────────────┘
|
||||
│ │
|
||||
└───────────┬───────────────────┘
|
||||
↓
|
||||
┌──────────────────────┐
|
||||
│ Reverse Proxy │
|
||||
│ (Failover) │
|
||||
└──────────────────────┘
|
||||
```
|
||||
|
||||
### Repository structure
|
||||
|
||||
**In Git:**
|
||||
|
||||
```
|
||||
your-org/grafana-manifests
|
||||
└── shared/
|
||||
├── dashboard-metrics.json
|
||||
├── dashboard-alerts.json
|
||||
└── dashboard-logs.json
|
||||
```
|
||||
|
||||
**In Grafana Dashboards view (both instances):**
|
||||
|
||||
```
|
||||
Dashboards
|
||||
└── 📁 grafana-manifests/
|
||||
├── Metrics Dashboard
|
||||
├── Alerts Dashboard
|
||||
└── Logs Dashboard
|
||||
```
|
||||
|
||||
- Master and replica instances show identical folder structure.
|
||||
- Both sync from the same `shared/` path.
|
||||
- Reverse proxy routes traffic to master (active) instance.
|
||||
- If master fails, proxy automatically fails over to replica (standby).
|
||||
- Users see the same dashboards regardless of which instance is serving traffic.
|
||||
|
||||
### Configuration parameters
|
||||
|
||||
Both master and replica instances use identical parameters:
|
||||
|
||||
**Master instance:**
|
||||
|
||||
- **Repository**: `your-org/grafana-manifests`
|
||||
- **Branch**: `main`
|
||||
- **Path**: `shared/`
|
||||
|
||||
**Replica instance:**
|
||||
|
||||
- **Repository**: `your-org/grafana-manifests`
|
||||
- **Branch**: `main`
|
||||
- **Path**: `shared/`
|
||||
|
||||
### How it works
|
||||
|
||||
1. Both instances stay synchronized through Git.
|
||||
2. Reverse proxy routes traffic to primary.
|
||||
3. Users edit on primary. Git Sync commits changes.
|
||||
4. Both instances pull latest changes to keep replica in sync.
|
||||
5. On primary failure, proxy fails over to replica.
|
||||
|
||||
### Failover considerations
|
||||
|
||||
- Health checks and monitoring.
|
||||
- Continuous syncing to minimize data loss.
|
||||
- Plan failback (automatic or manual).
|
||||
|
||||
## Load balancer scenario
|
||||
|
||||
Run multiple active Grafana instances behind a load balancer. All instances sync from the same Git location.
|
||||
|
||||
### Use it for
|
||||
|
||||
- **High traffic**: Your deployment needs to handle significant user load.
|
||||
- **Load distribution**: You want to distribute user requests across instances.
|
||||
- **Maximum availability**: You need service continuity during maintenance or failures.
|
||||
- **Scalability**: You want to add instances as load increases.
|
||||
- **Performance**: Users need fast response times under heavy load.
|
||||
|
||||
### Architecture
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────────────────┐
|
||||
│ GitHub Repository │
|
||||
│ Repository: your-org/grafana-manifests │
|
||||
│ Branch: main │
|
||||
│ │
|
||||
│ grafana-manifests/ │
|
||||
│ └── shared/ │
|
||||
│ ├── dashboard-metrics.json │
|
||||
│ ├── dashboard-alerts.json │
|
||||
│ └── dashboard-logs.json │
|
||||
└─────────────────────────────────────────────────────┘
|
||||
↕ ↕
|
||||
Git Sync (shared/) Git Sync (shared/)
|
||||
↕ ↕
|
||||
┌────────────────────┐ ┌────────────────────┐
|
||||
│ Grafana Instance 1│ │ Grafana Instance 2│
|
||||
│ (Active) │ │ (Active) │
|
||||
│ │ │ │
|
||||
│ Repository: │ │ Repository: │
|
||||
│ - path: shared/ │ │ - path: shared/ │
|
||||
└────────────────────┘ └────────────────────┘
|
||||
│ │
|
||||
└───────────┬───────────────────┘
|
||||
↓
|
||||
┌──────────────────────┐
|
||||
│ Load Balancer │
|
||||
│ (Round Robin) │
|
||||
└──────────────────────┘
|
||||
```
|
||||
|
||||
### Repository structure
|
||||
|
||||
**In Git:**
|
||||
|
||||
```
|
||||
your-org/grafana-manifests
|
||||
└── shared/
|
||||
├── dashboard-metrics.json
|
||||
├── dashboard-alerts.json
|
||||
└── dashboard-logs.json
|
||||
```
|
||||
|
||||
**In Grafana Dashboards view (all instances):**
|
||||
|
||||
```
|
||||
Dashboards
|
||||
└── 📁 grafana-manifests/
|
||||
├── Metrics Dashboard
|
||||
├── Alerts Dashboard
|
||||
└── Logs Dashboard
|
||||
```
|
||||
|
||||
- All instances show identical folder structure.
|
||||
- All instances sync from the same `shared/` path.
|
||||
- Load balancer distributes requests across all active instances.
|
||||
- Any instance can serve read requests.
|
||||
- Any instance can accept dashboard modifications.
|
||||
- Changes propagate to all instances through Git.
|
||||
|
||||
### Configuration parameters
|
||||
|
||||
All instances use identical parameters:
|
||||
|
||||
**Instance 1:**
|
||||
|
||||
- **Repository**: `your-org/grafana-manifests`
|
||||
- **Branch**: `main`
|
||||
- **Path**: `shared/`
|
||||
|
||||
**Instance 2:**
|
||||
|
||||
- **Repository**: `your-org/grafana-manifests`
|
||||
- **Branch**: `main`
|
||||
- **Path**: `shared/`
|
||||
|
||||
### How it works
|
||||
|
||||
1. All instances stay synchronized through Git.
|
||||
2. Load balancer distributes incoming traffic across all active instances.
|
||||
3. Users can view dashboards from any instance.
|
||||
4. When a user modifies a dashboard on any instance, Git Sync commits the change.
|
||||
5. All other instances pull the updated dashboard during their next sync cycle, or instantly if webhooks are configured.
|
||||
6. If one instance fails, load balancer stops routing traffic to it and remaining instances continue serving.
|
||||
|
||||
### Important considerations
|
||||
|
||||
- **Eventually consistent**: Due to sync intervals, instances may briefly have different dashboard versions.
|
||||
- **Concurrent edits**: Multiple users editing the same dashboard on different instances can cause conflicts.
|
||||
- **Database sharing**: Instances should share the same backend database for user sessions, preferences, and annotations.
|
||||
- **Stateless design**: Design for stateless operation where possible to maximize load balancing effectiveness.
|
||||
@@ -0,0 +1,93 @@
|
||||
---
|
||||
title: Git Sync with regional replication
|
||||
menuTitle: Regional replication
|
||||
description: Synchronize multiple regional Grafana instances from a shared Git location
|
||||
weight: 30
|
||||
---
|
||||
|
||||
# Git Sync with regional replication
|
||||
|
||||
Deploy multiple Grafana instances across regions. Synchronize them with the same Git location to ensure consistent dashboards everywhere.
|
||||
|
||||
## Use it for
|
||||
|
||||
- **Geographic distribution**: You deploy Grafana close to users in different regions.
|
||||
- **Latency reduction**: Users need fast dashboard access from their location.
|
||||
- **Data sovereignty**: You keep dashboard data in specific regions.
|
||||
- **High availability**: You need dashboard availability across regions.
|
||||
- **Consistent experience**: All users see the same dashboards regardless of region.
|
||||
|
||||
## Architecture
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────────────────┐
|
||||
│ GitHub Repository │
|
||||
│ Repository: your-org/grafana-manifests │
|
||||
│ Branch: main │
|
||||
│ │
|
||||
│ grafana-manifests/ │
|
||||
│ └── shared/ │
|
||||
│ ├── dashboard-global.json │
|
||||
│ ├── dashboard-metrics.json │
|
||||
│ └── dashboard-logs.json │
|
||||
└─────────────────────────────────────────────────────┘
|
||||
↕ ↕
|
||||
Git Sync (shared/) Git Sync (shared/)
|
||||
↕ ↕
|
||||
┌────────────────────┐ ┌────────────────────┐
|
||||
│ US Region │ │ EU Region │
|
||||
│ Grafana │ │ Grafana │
|
||||
│ │ │ │
|
||||
│ Repository: │ │ Repository: │
|
||||
│ - path: shared/ │ │ - path: shared/ │
|
||||
└────────────────────┘ └────────────────────┘
|
||||
```
|
||||
|
||||
## Repository structure
|
||||
|
||||
**In Git:**
|
||||
|
||||
```
|
||||
your-org/grafana-manifests
|
||||
└── shared/
|
||||
├── dashboard-global.json
|
||||
├── dashboard-metrics.json
|
||||
└── dashboard-logs.json
|
||||
```
|
||||
|
||||
**In Grafana Dashboards view (all regions):**
|
||||
|
||||
```
|
||||
Dashboards
|
||||
└── 📁 grafana-manifests/
|
||||
├── Global Dashboard
|
||||
├── Metrics Dashboard
|
||||
└── Logs Dashboard
|
||||
```
|
||||
|
||||
- All regional instances (US, EU, etc.) show identical folder structure
|
||||
- Same folder name "grafana-manifests" in every region
|
||||
- Same dashboards synced from the `shared/` path appear everywhere
|
||||
- Users in any region see the exact same dashboards with the same titles
|
||||
|
||||
## Configuration parameters
|
||||
|
||||
All regions:
|
||||
|
||||
- Repository: `your-org/grafana-manifests`
|
||||
- Branch: `main`
|
||||
- Path: `shared/`
|
||||
|
||||
## How it works
|
||||
|
||||
1. All regional instances pull dashboards from `shared/`.
|
||||
2. Any region’s change commits to Git.
|
||||
3. Other regions pull updates during the next sync (or via webhooks).
|
||||
4. Changes propagate across regions per sync interval.
|
||||
|
||||
## Considerations
|
||||
|
||||
- **Write conflicts**: If users in different regions modify the same dashboard simultaneously, Git uses last-write-wins.
|
||||
- **Primary region**: Consider designating one region as the primary location for making dashboard changes.
|
||||
- **Propagation time**: Changes propagate to all regions within the configured sync interval, or instantly if webhooks are configured.
|
||||
- **Network reliability**: Ensure all regions have reliable connectivity to the Git repository.
|
||||
@@ -0,0 +1,169 @@
|
||||
---
|
||||
title: Multiple team Git Sync
|
||||
menuTitle: Shared instance
|
||||
description: Use multiple Git repositories with one Grafana instance, one repository per team
|
||||
weight: 60
|
||||
---
|
||||
|
||||
# Git Sync in a Grafana instance shared by multiple teams
|
||||
|
||||
Use a single Grafana instance with multiple Repository resources, one per team. Each team manages its own dashboards while sharing Grafana.
|
||||
|
||||
## Use it for
|
||||
|
||||
- **Team autonomy**: Different teams manage their own dashboards independently.
|
||||
- **Organizational structure**: Dashboard organization aligns with team structure.
|
||||
- **Resource efficiency**: Multiple teams share Grafana infrastructure.
|
||||
- **Cost optimization**: You reduce infrastructure costs while maintaining team separation.
|
||||
- **Collaboration**: Teams can view each other’s dashboards while managing their own.
|
||||
|
||||
## Architecture
|
||||
|
||||
```
|
||||
┌─────────────────────────┐ ┌─────────────────────────┐
|
||||
│ Platform Team Repo │ │ Data Team Repo │
|
||||
│ platform-dashboards │ │ data-dashboards │
|
||||
│ │ │ │
|
||||
│ platform-dashboards/ │ │ data-dashboards/ │
|
||||
│ └── grafana/ │ │ └── grafana/ │
|
||||
│ ├── k8s.json │ │ ├── pipeline.json │
|
||||
│ └── infra.json │ │ └── analytics.json │
|
||||
└─────────────────────────┘ └─────────────────────────┘
|
||||
↕ ↕
|
||||
Git Sync (grafana/) Git Sync (grafana/)
|
||||
↕ ↕
|
||||
┌──────────────────────────────────────┐
|
||||
│ Grafana Instance │
|
||||
│ │
|
||||
│ Repository 1: │
|
||||
│ - repo: platform-dashboards │
|
||||
│ → Creates "platform-dashboards" │
|
||||
│ │
|
||||
│ Repository 2: │
|
||||
│ - repo: data-dashboards │
|
||||
│ → Creates "data-dashboards" │
|
||||
└──────────────────────────────────────┘
|
||||
```
|
||||
|
||||
## Repository structure
|
||||
|
||||
**In Git (separate repositories):**
|
||||
|
||||
**Platform team repository:**
|
||||
|
||||
```
|
||||
your-org/platform-dashboards
|
||||
└── grafana/
|
||||
├── dashboard-k8s.json
|
||||
└── dashboard-infra.json
|
||||
```
|
||||
|
||||
**Data team repository:**
|
||||
|
||||
```
|
||||
your-org/data-dashboards
|
||||
└── grafana/
|
||||
├── dashboard-pipeline.json
|
||||
└── dashboard-analytics.json
|
||||
```
|
||||
|
||||
**In Grafana Dashboards view:**
|
||||
|
||||
```
|
||||
Dashboards
|
||||
├── 📁 platform-dashboards/
|
||||
│ ├── Kubernetes Dashboard
|
||||
│ └── Infrastructure Dashboard
|
||||
└── 📁 data-dashboards/
|
||||
├── Pipeline Dashboard
|
||||
└── Analytics Dashboard
|
||||
```
|
||||
|
||||
- Two separate folders created (one per Repository resource).
|
||||
- Folder names derived from repository names.
|
||||
- Each team has complete control over their own repository.
|
||||
- Teams can independently manage permissions, branches, and workflows in their repos.
|
||||
- All teams can view each other's dashboards in Grafana but manage only their own.
|
||||
|
||||
## Configuration parameters
|
||||
|
||||
**Platform team repository:**
|
||||
|
||||
- **Repository**: `your-org/platform-dashboards`
|
||||
- **Branch**: `main`
|
||||
- **Path**: `grafana/`
|
||||
|
||||
**Data team repository:**
|
||||
|
||||
- **Repository**: `your-org/data-dashboards`
|
||||
- **Branch**: `main`
|
||||
- **Path**: `grafana/`
|
||||
|
||||
## How it works
|
||||
|
||||
1. Each team has their own Git repository for complete autonomy.
|
||||
2. Each repository resource in Grafana creates a separate folder.
|
||||
3. Platform team dashboards sync from `your-org/platform-dashboards` repository.
|
||||
4. Data team dashboards sync from `your-org/data-dashboards` repository.
|
||||
5. Teams can independently manage their repository settings, access controls, and workflows.
|
||||
6. All teams can view each other's dashboards in Grafana but edit only their own.
|
||||
|
||||
## Scale to more teams
|
||||
|
||||
Adding additional teams is straightforward. For a third team, create a new repository and configure:
|
||||
|
||||
- **Repository**: `your-org/security-dashboards`
|
||||
- **Branch**: `main`
|
||||
- **Path**: `grafana/`
|
||||
|
||||
This creates a new "security-dashboards" folder in the same Grafana instance.
|
||||
|
||||
## Alternative: Shared repository with different paths
|
||||
|
||||
For teams that prefer sharing a single repository, use different paths to separate team dashboards:
|
||||
|
||||
**In Git:**
|
||||
|
||||
```
|
||||
your-org/grafana-manifests
|
||||
├── team-platform/
|
||||
│ ├── dashboard-k8s.json
|
||||
│ └── dashboard-infra.json
|
||||
└── team-data/
|
||||
├── dashboard-pipeline.json
|
||||
└── dashboard-analytics.json
|
||||
```
|
||||
|
||||
**Configuration:**
|
||||
|
||||
**Platform team:**
|
||||
|
||||
- **Repository**: `your-org/grafana-manifests`
|
||||
- **Branch**: `main`
|
||||
- **Path**: `team-platform/`
|
||||
|
||||
**Data team:**
|
||||
|
||||
- **Repository**: `your-org/grafana-manifests`
|
||||
- **Branch**: `main`
|
||||
- **Path**: `team-data/`
|
||||
|
||||
This approach provides simpler repository management but less isolation between teams.
|
||||
|
||||
## Alternative: Different branches per team
|
||||
|
||||
For teams wanting their own branch in a shared repository:
|
||||
|
||||
**Platform team:**
|
||||
|
||||
- **Repository**: `your-org/grafana-manifests`
|
||||
- **Branch**: `team-platform`
|
||||
- **Path**: `grafana/`
|
||||
|
||||
**Data team:**
|
||||
|
||||
- **Repository**: `your-org/grafana-manifests`
|
||||
- **Branch**: `team-data`
|
||||
- **Path**: `grafana/`
|
||||
|
||||
This allows teams to use Git branch workflows for collaboration while sharing the same repository.
|
||||
@@ -0,0 +1,86 @@
|
||||
---
|
||||
title: Single instance Git Sync
|
||||
menuTitle: Single instance
|
||||
description: Synchronize a single Grafana instance with a Git repository
|
||||
weight: 10
|
||||
---
|
||||
|
||||
# Single instance Git Sync
|
||||
|
||||
Use a single Grafana instance synchronized with a Git repository. This is the foundation for Git Sync and helps you understand bidirectional synchronization.
|
||||
|
||||
## Use it for
|
||||
|
||||
- **Getting started**: You want to learn how Git Sync works before implementing complex scenarios.
|
||||
- **Personal projects**: Individual developers manage their own dashboards.
|
||||
- **Small teams**: You have a simple setup without multiple environments or complex workflows.
|
||||
- **Development environments**: You need quick prototyping and testing.
|
||||
|
||||
## Architecture
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────────────────┐
|
||||
│ GitHub Repository │
|
||||
│ Repository: your-org/grafana-manifests │
|
||||
│ Branch: main │
|
||||
│ │
|
||||
│ grafana-manifests/ │
|
||||
│ └── grafana/ │
|
||||
│ ├── dashboard-1.json │
|
||||
│ ├── dashboard-2.json │
|
||||
│ └── dashboard-3.json │
|
||||
└─────────────────────────────────────────────────────┘
|
||||
↕
|
||||
Git Sync (bidirectional)
|
||||
↕
|
||||
┌─────────────────────────────┐
|
||||
│ Grafana Instance │
|
||||
│ │
|
||||
│ Repository Resource: │
|
||||
│ - url: grafana-manifests │
|
||||
│ - branch: main │
|
||||
│ - path: grafana/ │
|
||||
│ │
|
||||
│ Creates folder: │
|
||||
│ "grafana-manifests" │
|
||||
└─────────────────────────────┘
|
||||
```
|
||||
|
||||
## Repository structure
|
||||
|
||||
**In Git:**
|
||||
|
||||
```
|
||||
your-org/grafana-manifests
|
||||
└── grafana/
|
||||
├── dashboard-1.json
|
||||
├── dashboard-2.json
|
||||
└── dashboard-3.json
|
||||
```
|
||||
|
||||
**In Grafana Dashboards view:**
|
||||
|
||||
```
|
||||
Dashboards
|
||||
└── 📁 grafana-manifests/
|
||||
├── Dashboard 1
|
||||
├── Dashboard 2
|
||||
└── Dashboard 3
|
||||
```
|
||||
|
||||
- A folder named "grafana-manifests" (from repository name) contains all synced dashboards.
|
||||
- Each JSON file becomes a dashboard with its title displayed in the folder.
|
||||
- Users browse dashboards organized under this folder structure.
|
||||
|
||||
## Configuration parameters
|
||||
|
||||
Configure your Grafana instance to synchronize with:
|
||||
|
||||
- **Repository**: `your-org/grafana-manifests`
|
||||
- **Branch**: `main`
|
||||
- **Path**: `grafana/`
|
||||
|
||||
## How it works
|
||||
|
||||
1. **From Grafana to Git**: When users create or modify dashboards in Grafana, Git Sync commits changes to the `grafana/` directory on the `main` branch.
|
||||
2. **From Git to Grafana**: When dashboard JSON files are added or modified in the `grafana/` directory, Git Sync pulls these changes into Grafana.
|
||||
@@ -367,5 +367,6 @@ To learn more about using Git Sync:
|
||||
|
||||
- [Work with provisioned dashboards](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/as-code/observability-as-code/provision-resources/provisioned-dashboards/)
|
||||
- [Manage provisioned repositories with Git Sync](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/as-code/observability-as-code/provision-resources/use-git-sync/)
|
||||
- [Git Sync deployment scenarios](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/as-code/observability-as-code/provision-resources/git-sync-deployment-scenarios)
|
||||
- [Export resources](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/as-code/observability-as-code/provision-resources/export-resources/)
|
||||
- [grafanactl documentation](https://grafana.github.io/grafanactl/)
|
||||
|
||||
@@ -127,7 +127,13 @@ An instance can be in one of the following Git Sync states:
|
||||
|
||||
## Common use cases
|
||||
|
||||
You can use Git Sync in the following scenarios.
|
||||
{{< admonition type="note" >}}
|
||||
|
||||
Refer to [Git Sync deployment scenarios](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/as-code/observability-as-code/provision-resources/git-sync-deployment-scenarios) for sample scenarios, including architecture and configuration details.
|
||||
|
||||
{{< /admonition >}}
|
||||
|
||||
You can use Git Sync for the following use cases:
|
||||
|
||||
### Version control and auditing
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@ labels:
|
||||
- cloud
|
||||
title: Manage provisioned repositories with Git Sync
|
||||
menuTitle: Manage repositories with Git Sync
|
||||
weight: 120
|
||||
weight: 400
|
||||
canonical: https://grafana.com/docs/grafana/latest/as-code/observability-as-code/provision-resources/use-git-sync/
|
||||
aliases:
|
||||
- ../../../observability-as-code/provision-resources/use-git-sync/ # /docs/grafana/next/observability-as-code/provision-resources/use-git-sync/
|
||||
|
||||
@@ -113,6 +113,7 @@ The following documentation will help you get started working with Prometheus an
|
||||
- [Configure the Prometheus data source](ref:configure-prometheus-data-source)
|
||||
- [Prometheus query editor](query-editor/)
|
||||
- [Template variables](template-variables/)
|
||||
- [Troubleshooting](troubleshooting/)
|
||||
|
||||
## Exemplars
|
||||
|
||||
|
||||
313
docs/sources/datasources/prometheus/troubleshooting/index.md
Normal file
313
docs/sources/datasources/prometheus/troubleshooting/index.md
Normal file
@@ -0,0 +1,313 @@
|
||||
---
|
||||
aliases:
|
||||
- ../../data-sources/prometheus/troubleshooting/
|
||||
description: Troubleshooting the Prometheus data source in Grafana
|
||||
keywords:
|
||||
- grafana
|
||||
- prometheus
|
||||
- troubleshooting
|
||||
- errors
|
||||
- promql
|
||||
labels:
|
||||
products:
|
||||
- cloud
|
||||
- enterprise
|
||||
- oss
|
||||
menuTitle: Troubleshooting
|
||||
title: Troubleshoot Prometheus data source issues
|
||||
weight: 600
|
||||
---
|
||||
|
||||
# Troubleshoot Prometheus data source issues
|
||||
|
||||
This document provides troubleshooting information for common errors you may encounter when using the Prometheus data source in Grafana.
|
||||
|
||||
## Connection errors
|
||||
|
||||
The following errors occur when Grafana cannot establish or maintain a connection to Prometheus.
|
||||
|
||||
### Failed to connect to Prometheus
|
||||
|
||||
**Error message:** "There was an error returned querying the Prometheus API"
|
||||
|
||||
**Cause:** Grafana cannot establish a network connection to the Prometheus server.
|
||||
|
||||
**Solution:**
|
||||
|
||||
1. Verify that the Prometheus server URL is correct in the data source configuration.
|
||||
1. Check that Prometheus is running and accessible from the Grafana server.
|
||||
1. Ensure the URL includes the protocol (`http://` or `https://`).
|
||||
1. Verify the port is correct (the Prometheus default port is `9090`).
|
||||
1. Ensure there are no firewall rules blocking the connection.
|
||||
1. If Grafana and Prometheus are running in separate containers, use the container IP address or hostname instead of `localhost`.
|
||||
1. For Grafana Cloud, ensure you have configured [Private data source connect](https://grafana.com/docs/grafana-cloud/connect-externally-hosted/private-data-source-connect/) if your Prometheus instance is not publicly accessible.
|
||||
|
||||
### Request timed out
|
||||
|
||||
**Error message:** "context deadline exceeded" or "request timeout"
|
||||
|
||||
**Cause:** The connection to Prometheus timed out before receiving a response.
|
||||
|
||||
**Solution:**
|
||||
|
||||
1. Check the network latency between Grafana and Prometheus.
|
||||
1. Verify that Prometheus is not overloaded or experiencing performance issues.
|
||||
1. Increase the **Query timeout** setting in the data source configuration under **Interval behavior**.
|
||||
1. Check the [Grafana server timeout configuration](https://grafana.com/docs/grafana/latest/setup-grafana/configure-grafana#timeout) for server-level timeout settings.
|
||||
1. Reduce the time range or complexity of your query.
|
||||
1. Check if any network devices (load balancers, proxies) are timing out the connection.
|
||||
|
||||
### Failed to parse data source URL
|
||||
|
||||
**Error message:** "Failed to parse data source URL"
|
||||
|
||||
**Cause:** The URL entered in the data source configuration is not valid.
|
||||
|
||||
**Solution:**
|
||||
|
||||
1. Verify the URL format is correct (for example, `http://localhost:9090` or `https://prometheus.example.com:9090`).
|
||||
1. Ensure the URL includes the protocol (`http://` or `https://`).
|
||||
1. Remove any trailing slashes or invalid characters from the URL.
|
||||
|
||||
## Authentication errors
|
||||
|
||||
The following errors occur when there are issues with authentication credentials or permissions.
|
||||
|
||||
### Unauthorized (401)
|
||||
|
||||
**Error message:** "401 Unauthorized" or "Authorization failed"
|
||||
|
||||
**Cause:** The authentication credentials are invalid or missing.
|
||||
|
||||
**Solution:**
|
||||
|
||||
1. Verify that the username and password are correct if using basic authentication.
|
||||
1. Check that the authentication method selected matches your Prometheus configuration.
|
||||
1. If using a reverse proxy with authentication, verify the credentials are correct.
|
||||
1. For AWS SigV4 authentication, verify the IAM credentials and permissions. Alternatively, consider using the [Amazon Managed Service for Prometheus data source](https://grafana.com/grafana/plugins/grafana-amazonprometheus-datasource/) for simplified AWS authentication.
|
||||
|
||||
### Forbidden (403)
|
||||
|
||||
**Error message:** "403 Forbidden" or "Access denied"
|
||||
|
||||
**Cause:** The authenticated user does not have permission to access the requested resource.
|
||||
|
||||
**Solution:**
|
||||
|
||||
1. Verify the user has read access to the Prometheus API.
|
||||
1. Check Prometheus security settings and access control configuration.
|
||||
1. If using a reverse proxy, verify the proxy is not blocking the request.
|
||||
1. For AWS Managed Prometheus, verify the IAM policy grants the required permissions. Alternatively, consider using the [Amazon Managed Service for Prometheus data source](https://grafana.com/grafana/plugins/grafana-amazonprometheus-datasource/) for simplified AWS authentication.
|
||||
|
||||
## Query errors
|
||||
|
||||
The following errors occur when there are issues with PromQL syntax or query execution.
|
||||
|
||||
### Query syntax error
|
||||
|
||||
**Error message:** "parse error: unexpected character" or "bad_data: 1:X: parse error"
|
||||
|
||||
**Cause:** The PromQL query contains invalid syntax.
|
||||
|
||||
**Alternative cause:** A proxy between Grafana and Prometheus requires authentication. When proxy authentication fails, the proxy redirects the request to an HTML authentication page. Grafana cannot parse the HTML response, which results in a parse error. This appears to be a query issue but is actually a proxy authentication issue.
|
||||
|
||||
**Solution:**
|
||||
|
||||
1. Check your query syntax for typos or invalid characters.
|
||||
1. Verify that metric names and label names are valid identifiers.
|
||||
1. Ensure string values in label matchers are enclosed in quotes.
|
||||
1. Use the Prometheus expression browser to test your query directly.
|
||||
1. Refer to the [Prometheus querying documentation](https://prometheus.io/docs/prometheus/latest/querying/basics/) for syntax guidance.
|
||||
1. If you have a proxy between Grafana and Prometheus, verify that proxy authentication is correctly configured. Check your proxy logs for authentication failures or redirects.
|
||||
|
||||
### Query returns no data for a metric
|
||||
|
||||
**Symptom:** The query returns no data and the visualization is empty.
|
||||
|
||||
**Cause:** The specified metric does not exist in Prometheus, or there is no data for the selected time range.
|
||||
|
||||
**Solution:**
|
||||
|
||||
1. Verify the metric name is spelled correctly.
|
||||
1. Check that the metric is being scraped by Prometheus.
|
||||
1. Use the Prometheus API to browse available metrics at `/api/v1/label/__name__/values`.
|
||||
1. Use the [target metadata API](https://prometheus.io/docs/prometheus/latest/querying/api#querying-target-metadata) to verify which metrics a target exposes.
|
||||
1. Verify the time range includes data for the metric.
|
||||
|
||||
### Query timeout limit exceeded
|
||||
|
||||
**Error message:** "query timed out in expression evaluation" or "query processing would load too many samples"
|
||||
|
||||
**Cause:** The query took longer than the configured timeout limit or would return too many samples.
|
||||
|
||||
**Solution:**
|
||||
|
||||
1. Reduce the time range of your query.
|
||||
1. Add more specific label filters to limit the data scanned.
|
||||
1. Increase the **Query timeout** setting in the data source configuration.
|
||||
1. Use aggregation functions like `sum()`, `avg()`, or `rate()` to reduce the number of time series.
|
||||
1. Increase the `query.timeout` or `query.max-samples` settings in Prometheus if you have admin access.
|
||||
|
||||
### Too many time series
|
||||
|
||||
**Error message:** "exceeded maximum resolution of 11,000 points per timeseries" or "maximum number of series limit exceeded"
|
||||
|
||||
**Cause:** The query is returning more time series or data points than the configured limits allow.
|
||||
|
||||
**Solution:**
|
||||
|
||||
1. Reduce the time range of your query.
|
||||
1. Add label filters to limit the number of time series returned.
|
||||
1. Increase the **Min interval** or **Resolution** in the query options to reduce the number of data points.
|
||||
1. Use aggregation functions to combine time series.
|
||||
1. Adjust the **Series limit** setting in the data source configuration under **Other settings**.
|
||||
|
||||
### Invalid function or aggregation
|
||||
|
||||
**Error message:** "unknown function" or "parse error: unexpected aggregation"
|
||||
|
||||
**Cause:** The query uses an invalid or unsupported PromQL function.
|
||||
|
||||
**Solution:**
|
||||
|
||||
1. Verify the function name is spelled correctly and is a valid PromQL function.
|
||||
1. Check that you are using the correct syntax for the function.
|
||||
1. Ensure your Prometheus version supports the function you are using.
|
||||
1. Refer to the [PromQL functions documentation](https://prometheus.io/docs/prometheus/latest/querying/functions/) for available functions.
|
||||
|
||||
## Configuration errors
|
||||
|
||||
The following errors occur when the data source is not configured correctly.
|
||||
|
||||
### Invalid Prometheus type
|
||||
|
||||
**Error message:** Unexpected behavior when querying metrics or labels
|
||||
|
||||
**Cause:** The **Prometheus type** setting does not match your actual Prometheus-compatible database.
|
||||
|
||||
**Solution:**
|
||||
|
||||
1. Open the data source configuration in Grafana.
|
||||
1. Under **Performance**, select the correct **Prometheus type** (Prometheus, Cortex, Mimir, or Thanos).
|
||||
1. Different database types support different APIs, so setting this incorrectly may cause unexpected behavior.
|
||||
|
||||
### Scrape interval mismatch
|
||||
|
||||
**Error message:** Data appears sparse or aggregated incorrectly
|
||||
|
||||
**Cause:** The **Scrape interval** setting in Grafana does not match the actual scrape interval in Prometheus.
|
||||
|
||||
**Solution:**
|
||||
|
||||
1. Check your Prometheus configuration file for the `scrape_interval` setting.
|
||||
1. Update the **Scrape interval** in the Grafana data source configuration under **Interval behavior** to match.
|
||||
1. If the Grafana interval is higher than the Prometheus interval, you may see less data points than expected.
|
||||
|
||||
## TLS and certificate errors
|
||||
|
||||
The following errors occur when there are issues with TLS configuration.
|
||||
|
||||
### Certificate verification failed
|
||||
|
||||
**Error message:** "x509: certificate signed by unknown authority" or "certificate verify failed"
|
||||
|
||||
**Cause:** Grafana cannot verify the TLS certificate presented by Prometheus.
|
||||
|
||||
**Solution:**
|
||||
|
||||
1. If using a self-signed certificate, enable **Add self-signed certificate** in the TLS settings and add your CA certificate.
|
||||
1. Verify the certificate chain is complete and valid.
|
||||
1. Ensure the certificate has not expired.
|
||||
1. As a temporary workaround for testing, enable **Skip TLS verify** (not recommended for production).
|
||||
|
||||
### TLS handshake error
|
||||
|
||||
**Error message:** "TLS: handshake failure" or "connection reset"
|
||||
|
||||
**Cause:** The TLS handshake between Grafana and Prometheus failed.
|
||||
|
||||
**Solution:**
|
||||
|
||||
1. Verify that Prometheus is configured to use TLS.
|
||||
1. Check that the TLS version and cipher suites are compatible.
|
||||
1. If using client certificates, ensure they are correctly configured in the **TLS client authentication** section.
|
||||
1. Verify the server name matches the certificate's Common Name or Subject Alternative Name.
|
||||
|
||||
## Other common issues
|
||||
|
||||
The following issues don't produce specific error messages but are commonly encountered.
|
||||
|
||||
### Empty query results
|
||||
|
||||
**Cause:** The query returns no data.
|
||||
|
||||
**Solution:**
|
||||
|
||||
1. Verify the time range includes data in Prometheus.
|
||||
1. Check that the metric and label names are correct.
|
||||
1. Test the query directly in the Prometheus expression browser.
|
||||
1. Ensure label filters are not excluding all data.
|
||||
1. For rate or increase functions, ensure the time range is at least twice the scrape interval.
|
||||
|
||||
### Slow query performance
|
||||
|
||||
**Cause:** Queries take a long time to execute.
|
||||
|
||||
**Solution:**
|
||||
|
||||
1. Reduce the time range of your query.
|
||||
1. Add more specific label filters to limit the data scanned.
|
||||
1. Increase the **Min interval** in the query options.
|
||||
1. Check Prometheus server performance and resource utilization.
|
||||
1. Enable **Disable metrics lookup** in the data source configuration for large Prometheus instances.
|
||||
1. Enable **Incremental querying (beta)** to cache query results.
|
||||
1. Consider using recording rules to pre-aggregate frequently queried data.
|
||||
|
||||
### Data appears delayed or missing recent points
|
||||
|
||||
**Cause:** The visualization doesn't show the most recent data.
|
||||
|
||||
**Solution:**
|
||||
|
||||
1. Check the dashboard time range and refresh settings.
|
||||
1. Verify the **Scrape interval** is configured correctly.
|
||||
1. Ensure Prometheus has finished scraping the target.
|
||||
1. Check for clock synchronization issues between Grafana and Prometheus.
|
||||
1. For `rate()` and similar functions, remember that they need at least two data points to calculate.
|
||||
|
||||
### Exemplars not showing
|
||||
|
||||
**Cause:** Exemplar data is not appearing in visualizations.
|
||||
|
||||
**Solution:**
|
||||
|
||||
1. Verify that exemplars are enabled in the data source configuration under **Exemplars**.
|
||||
1. Check that your Prometheus version supports exemplars (2.26+).
|
||||
1. Ensure your instrumented application is sending exemplar data.
|
||||
1. Verify the tracing data source is correctly configured for the exemplar link.
|
||||
1. Enable the **Exemplars** toggle in the query editor.
|
||||
|
||||
### Alerting rules not visible
|
||||
|
||||
**Cause:** Prometheus alerting rules are not appearing in the Grafana Alerting UI.
|
||||
|
||||
**Solution:**
|
||||
|
||||
1. Verify that **Manage alerts via Alerting UI** is enabled in the data source configuration.
|
||||
1. Check that Prometheus has alerting rules configured.
|
||||
1. Ensure Grafana can access the Prometheus rules API endpoint.
|
||||
1. Note that for Prometheus (unlike Mimir), the Alerting UI only supports viewing existing rules, not creating new ones.
|
||||
|
||||
## Get additional help
|
||||
|
||||
If you continue to experience issues after following this troubleshooting guide:
|
||||
|
||||
1. Check the [Prometheus documentation](https://prometheus.io/docs/) for API and PromQL guidance.
|
||||
1. Review the [Grafana community forums](https://community.grafana.com/) for similar issues.
|
||||
1. Contact Grafana Support if you are a Cloud Pro, Cloud Contracted, or Enterprise user.
|
||||
1. When reporting issues, include:
|
||||
- Grafana version
|
||||
- Prometheus version and type (Prometheus, Mimir, Cortex, Thanos)
|
||||
- Error messages (redact sensitive information)
|
||||
- Steps to reproduce
|
||||
- Relevant configuration such as data source settings, query timeout, and TLS settings (redact tokens, passwords, and other credentials)
|
||||
@@ -1138,7 +1138,7 @@ export type JobResourceSummary = {
|
||||
delete?: number;
|
||||
/** Create or update (export) */
|
||||
error?: number;
|
||||
/** Report errors for this resource type This may not be an exhaustive list and recommend looking at the logs for more info */
|
||||
/** Report errors/warnings for this resource type This may not be an exhaustive list and recommend looking at the logs for more info */
|
||||
errors?: string[];
|
||||
group?: string;
|
||||
kind?: string;
|
||||
@@ -1146,6 +1146,9 @@ export type JobResourceSummary = {
|
||||
noop?: number;
|
||||
total?: number;
|
||||
update?: number;
|
||||
/** The error count */
|
||||
warning?: number;
|
||||
warnings?: string[];
|
||||
write?: number;
|
||||
};
|
||||
export type RepositoryUrLs = {
|
||||
@@ -1176,6 +1179,7 @@ export type JobStatus = {
|
||||
summary?: JobResourceSummary[];
|
||||
/** URLs contains URLs for the reference branch or commit if applicable. */
|
||||
url?: RepositoryUrLs;
|
||||
warnings?: string[];
|
||||
};
|
||||
export type Job = {
|
||||
/** APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources */
|
||||
|
||||
@@ -106,6 +106,11 @@ export function RadialGauge(props: RadialGaugeProps) {
|
||||
const gaugeId = useId();
|
||||
const styles = useStyles2(getStyles);
|
||||
|
||||
let effectiveTextMode = textMode;
|
||||
if (effectiveTextMode === 'auto') {
|
||||
effectiveTextMode = vizCount === 1 ? 'value' : 'value_and_name';
|
||||
}
|
||||
|
||||
const startAngle = shape === 'gauge' ? 250 : 0;
|
||||
const endAngle = shape === 'gauge' ? 110 : 360;
|
||||
|
||||
@@ -188,7 +193,7 @@ export function RadialGauge(props: RadialGaugeProps) {
|
||||
// These elements are only added for first value / bar
|
||||
if (barIndex === 0) {
|
||||
if (glowBar) {
|
||||
defs.push(<GlowGradient key="glow-filter" id={glowFilterId} radius={dimensions.radius} />);
|
||||
defs.push(<GlowGradient key="glow-filter" id={glowFilterId} barWidth={dimensions.barWidth} />);
|
||||
}
|
||||
|
||||
if (glowCenter) {
|
||||
@@ -198,14 +203,14 @@ export function RadialGauge(props: RadialGaugeProps) {
|
||||
graphics.push(
|
||||
<RadialText
|
||||
key="radial-text"
|
||||
vizCount={vizCount}
|
||||
textMode={textMode}
|
||||
textMode={effectiveTextMode}
|
||||
displayValue={displayValue.display}
|
||||
dimensions={dimensions}
|
||||
theme={theme}
|
||||
valueManualFontSize={props.valueManualFontSize}
|
||||
nameManualFontSize={props.nameManualFontSize}
|
||||
shape={shape}
|
||||
sparkline={displayValue.sparkline}
|
||||
/>
|
||||
);
|
||||
|
||||
@@ -254,6 +259,7 @@ export function RadialGauge(props: RadialGaugeProps) {
|
||||
theme={theme}
|
||||
color={color}
|
||||
shape={shape}
|
||||
textMode={effectiveTextMode}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
import { css } from '@emotion/css';
|
||||
|
||||
import { FieldDisplay, GrafanaTheme2, FieldConfig } from '@grafana/data';
|
||||
import { GraphFieldConfig, GraphGradientMode, LineInterpolation } from '@grafana/schema';
|
||||
|
||||
import { Sparkline } from '../Sparkline/Sparkline';
|
||||
|
||||
import { RadialShape } from './RadialGauge';
|
||||
import { RadialShape, RadialTextMode } from './RadialGauge';
|
||||
import { GaugeDimensions } from './utils';
|
||||
|
||||
interface RadialSparklineProps {
|
||||
@@ -14,23 +12,22 @@ interface RadialSparklineProps {
|
||||
theme: GrafanaTheme2;
|
||||
color?: string;
|
||||
shape?: RadialShape;
|
||||
textMode: Exclude<RadialTextMode, 'auto'>;
|
||||
}
|
||||
export function RadialSparkline({ sparkline, dimensions, theme, color, shape }: RadialSparklineProps) {
|
||||
export function RadialSparkline({ sparkline, dimensions, theme, color, shape, textMode }: RadialSparklineProps) {
|
||||
const { radius, barWidth } = dimensions;
|
||||
|
||||
if (!sparkline) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const { radius, barWidth } = dimensions;
|
||||
|
||||
const height = radius / 4;
|
||||
const widthFactor = shape === 'gauge' ? 1.6 : 1.4;
|
||||
const width = radius * widthFactor - barWidth;
|
||||
const topPos = shape === 'gauge' ? `${dimensions.gaugeBottomY - height}px` : `calc(50% + ${radius / 2.8}px)`;
|
||||
|
||||
const styles = css({
|
||||
position: 'absolute',
|
||||
top: topPos,
|
||||
});
|
||||
const showNameAndValue = textMode === 'value_and_name';
|
||||
const height = radius / (showNameAndValue ? 4 : 3);
|
||||
const width = radius * (shape === 'gauge' ? 1.6 : 1.4) - barWidth;
|
||||
const topPos =
|
||||
shape === 'gauge'
|
||||
? `${dimensions.gaugeBottomY - height}px`
|
||||
: `calc(50% + ${radius / (showNameAndValue ? 3.3 : 4)}px)`;
|
||||
|
||||
const config: FieldConfig<GraphFieldConfig> = {
|
||||
color: {
|
||||
@@ -45,7 +42,7 @@ export function RadialSparkline({ sparkline, dimensions, theme, color, shape }:
|
||||
};
|
||||
|
||||
return (
|
||||
<div className={styles}>
|
||||
<div style={{ position: 'absolute', top: topPos }}>
|
||||
<Sparkline height={height} width={width} sparkline={sparkline} theme={theme} config={config} />
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -1,6 +1,12 @@
|
||||
import { css } from '@emotion/css';
|
||||
|
||||
import { DisplayValue, DisplayValueAlignmentFactors, formattedValueToString, GrafanaTheme2 } from '@grafana/data';
|
||||
import {
|
||||
DisplayValue,
|
||||
DisplayValueAlignmentFactors,
|
||||
FieldSparkline,
|
||||
formattedValueToString,
|
||||
GrafanaTheme2,
|
||||
} from '@grafana/data';
|
||||
|
||||
import { useStyles2 } from '../../themes/ThemeContext';
|
||||
import { calculateFontSize } from '../../utils/measureText';
|
||||
@@ -8,21 +14,13 @@ import { calculateFontSize } from '../../utils/measureText';
|
||||
import { RadialShape, RadialTextMode } from './RadialGauge';
|
||||
import { GaugeDimensions } from './utils';
|
||||
|
||||
// function toCartesian(centerX: number, centerY: number, radius: number, angleInDegrees: number) {
|
||||
// let radian = ((angleInDegrees - 90) * Math.PI) / 180.0;
|
||||
// return {
|
||||
// x: centerX + radius * Math.cos(radian),
|
||||
// y: centerY + radius * Math.sin(radian),
|
||||
// };
|
||||
// }
|
||||
|
||||
interface RadialTextProps {
|
||||
displayValue: DisplayValue;
|
||||
theme: GrafanaTheme2;
|
||||
dimensions: GaugeDimensions;
|
||||
textMode: RadialTextMode;
|
||||
vizCount: number;
|
||||
textMode: Exclude<RadialTextMode, 'auto'>;
|
||||
shape: RadialShape;
|
||||
sparkline?: FieldSparkline;
|
||||
alignmentFactors?: DisplayValueAlignmentFactors;
|
||||
valueManualFontSize?: number;
|
||||
nameManualFontSize?: number;
|
||||
@@ -33,8 +31,8 @@ export function RadialText({
|
||||
theme,
|
||||
dimensions,
|
||||
textMode,
|
||||
vizCount,
|
||||
shape,
|
||||
sparkline,
|
||||
alignmentFactors,
|
||||
valueManualFontSize,
|
||||
nameManualFontSize,
|
||||
@@ -46,10 +44,6 @@ export function RadialText({
|
||||
return null;
|
||||
}
|
||||
|
||||
if (textMode === 'auto') {
|
||||
textMode = vizCount === 1 ? 'value' : 'value_and_name';
|
||||
}
|
||||
|
||||
const nameToAlignTo = (alignmentFactors ? alignmentFactors.title : displayValue.title) ?? '';
|
||||
const valueToAlignTo = formattedValueToString(alignmentFactors ? alignmentFactors : displayValue);
|
||||
|
||||
@@ -59,7 +53,7 @@ export function RadialText({
|
||||
|
||||
// Not sure where this comes from but svg text is not using body line-height
|
||||
const lineHeight = 1.21;
|
||||
const valueWidthToRadiusFactor = 0.85;
|
||||
const valueWidthToRadiusFactor = 0.82;
|
||||
const nameToHeightFactor = 0.45;
|
||||
const largeRadiusScalingDecay = 0.86;
|
||||
|
||||
@@ -98,18 +92,23 @@ export function RadialText({
|
||||
const valueHeight = valueFontSize * lineHeight;
|
||||
const nameHeight = nameFontSize * lineHeight;
|
||||
|
||||
const valueY = showName ? centerY - nameHeight / 2 : centerY;
|
||||
const valueNameSpacing = valueHeight / 3.5;
|
||||
const nameY = showValue ? valueY + valueHeight / 2 + valueNameSpacing : centerY;
|
||||
const valueY = showName ? centerY - nameHeight * 0.3 : centerY;
|
||||
const nameY = showValue ? valueY + valueHeight * 0.7 : centerY;
|
||||
const nameColor = showValue ? theme.colors.text.secondary : theme.colors.text.primary;
|
||||
const suffixShift = (valueFontSize - unitFontSize * 1.2) / 2;
|
||||
|
||||
// For gauge shape we shift text up a bit
|
||||
const valueDy = shape === 'gauge' ? -valueFontSize * 0.3 : 0;
|
||||
const nameDy = shape === 'gauge' ? -nameFontSize * 0.7 : 0;
|
||||
// adjust the text up on gauges and when sparklines are present
|
||||
let yOffset = 0;
|
||||
if (shape === 'gauge') {
|
||||
// we render from the center of the gauge, so move up by half of half of the total height
|
||||
yOffset -= (valueHeight + nameHeight) / 4;
|
||||
}
|
||||
if (sparkline) {
|
||||
yOffset -= 8;
|
||||
}
|
||||
|
||||
return (
|
||||
<g>
|
||||
<g transform={`translate(0, ${yOffset})`}>
|
||||
{showValue && (
|
||||
<text
|
||||
x={centerX}
|
||||
@@ -119,7 +118,6 @@ export function RadialText({
|
||||
className={styles.text}
|
||||
textAnchor="middle"
|
||||
dominantBaseline="middle"
|
||||
dy={valueDy}
|
||||
>
|
||||
<tspan fontSize={unitFontSize}>{displayValue.prefix ?? ''}</tspan>
|
||||
<tspan>{displayValue.text}</tspan>
|
||||
@@ -133,7 +131,6 @@ export function RadialText({
|
||||
fontSize={nameFontSize}
|
||||
x={centerX}
|
||||
y={nameY}
|
||||
dy={nameDy}
|
||||
textAnchor="middle"
|
||||
dominantBaseline="middle"
|
||||
fill={nameColor}
|
||||
|
||||
@@ -4,11 +4,12 @@ import { GaugeDimensions } from './utils';
|
||||
|
||||
export interface GlowGradientProps {
|
||||
id: string;
|
||||
radius: number;
|
||||
barWidth: number;
|
||||
}
|
||||
|
||||
export function GlowGradient({ id, radius }: GlowGradientProps) {
|
||||
const glowSize = 0.02 * radius;
|
||||
export function GlowGradient({ id, barWidth }: GlowGradientProps) {
|
||||
// 0.75 is the minimum glow size, and it scales with bar width
|
||||
const glowSize = 0.75 + barWidth * 0.08;
|
||||
|
||||
return (
|
||||
<filter id={id} filterUnits="userSpaceOnUse">
|
||||
@@ -82,7 +83,7 @@ export function MiddleCircleGlow({ dimensions, gaugeId, color }: CenterGlowProps
|
||||
<>
|
||||
<defs>
|
||||
<radialGradient id={gradientId} r={'50%'} fr={'0%'}>
|
||||
<stop offset="0%" stopColor={color} stopOpacity={0.2} />
|
||||
<stop offset="0%" stopColor={color} stopOpacity={0.15} />
|
||||
<stop offset="90%" stopColor={color} stopOpacity={0} />
|
||||
</radialGradient>
|
||||
</defs>
|
||||
|
||||
@@ -16,7 +16,7 @@ export interface SparklineProps extends Themeable2 {
|
||||
sparkline: FieldSparkline;
|
||||
}
|
||||
|
||||
export const Sparkline: React.FC<SparklineProps> = memo((props) => {
|
||||
const SparklineFn: React.FC<SparklineProps> = memo((props) => {
|
||||
const { sparkline, config: fieldConfig, theme, width, height } = props;
|
||||
|
||||
const { frame: alignedDataFrame, warning } = prepareSeries(sparkline, fieldConfig);
|
||||
@@ -30,4 +30,14 @@ export const Sparkline: React.FC<SparklineProps> = memo((props) => {
|
||||
return <UPlotChart data={data} config={configBuilder} width={width} height={height} />;
|
||||
});
|
||||
|
||||
Sparkline.displayName = 'Sparkline';
|
||||
SparklineFn.displayName = 'Sparkline';
|
||||
|
||||
// we converted to function component above, but some apps extend Sparkline, so we need
|
||||
// to keep exporting a class component until those apps are all rolled out.
|
||||
// see https://github.com/grafana/app-observability-plugin/pull/2079
|
||||
// eslint-disable-next-line react-prefer-function-component/react-prefer-function-component
|
||||
export class Sparkline extends React.PureComponent<SparklineProps> {
|
||||
render() {
|
||||
return <SparklineFn {...this.props} />;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -119,7 +119,14 @@ describe('Get y range', () => {
|
||||
values: [2, 1.999999999999999, 2.000000000000001, 2, 2],
|
||||
type: FieldType.number,
|
||||
config: {},
|
||||
state: { range: { min: 1.999999999999999, max: 2.000000000000001, delta: 0 } },
|
||||
state: { range: { min: 1.9999999999999999999, max: 2.000000000000000001, delta: 0 } },
|
||||
};
|
||||
const decimalsNotCloseYField: Field = {
|
||||
name: 'y',
|
||||
values: [2, 0.0094, 0.0053, 0.0078, 0.0061],
|
||||
type: FieldType.number,
|
||||
config: {},
|
||||
state: { range: { min: 0.0053, max: 0.0094, delta: 0.0041 } },
|
||||
};
|
||||
const xField: Field = {
|
||||
name: 'x',
|
||||
@@ -183,6 +190,11 @@ describe('Get y range', () => {
|
||||
field: decimalsCloseYField,
|
||||
expected: [2, 4],
|
||||
},
|
||||
{
|
||||
description: 'decimal values which are not close to equal should not be rounded out',
|
||||
field: decimalsNotCloseYField,
|
||||
expected: [0.0053, 0.0094],
|
||||
},
|
||||
])(`should return correct range for $description`, ({ field, expected }) => {
|
||||
const actual = getYRange(getAlignedFrame(field));
|
||||
expect(actual).toEqual(expected);
|
||||
|
||||
@@ -8,6 +8,7 @@ import {
|
||||
FieldType,
|
||||
getFieldColorModeForField,
|
||||
GrafanaTheme2,
|
||||
guessDecimals,
|
||||
isLikelyAscendingVector,
|
||||
nullToValue,
|
||||
roundDecimals,
|
||||
@@ -76,8 +77,6 @@ export function getYRange(alignedFrame: DataFrame): Range.MinMax {
|
||||
min = Math.min(min!, field.config.min ?? Infinity);
|
||||
max = Math.max(max!, field.config.max ?? -Infinity);
|
||||
|
||||
// console.log({ min, max });
|
||||
|
||||
// if noValue is set, ensure that it is included in the range as well
|
||||
const noValue = +field.config?.noValue!;
|
||||
if (!Number.isNaN(noValue)) {
|
||||
@@ -85,9 +84,11 @@ export function getYRange(alignedFrame: DataFrame): Range.MinMax {
|
||||
max = Math.max(max, noValue);
|
||||
}
|
||||
|
||||
const decimals = field.config.decimals ?? Math.max(guessDecimals(min), guessDecimals(max));
|
||||
|
||||
// call roundDecimals to mirror what is going to eventually happen in uplot
|
||||
let roundedMin = roundDecimals(min, field.config.decimals ?? 0);
|
||||
let roundedMax = roundDecimals(max, field.config.decimals ?? 0);
|
||||
let roundedMin = roundDecimals(min, decimals);
|
||||
let roundedMax = roundDecimals(max, decimals);
|
||||
|
||||
// if the rounded min and max are different,
|
||||
// we can return the real min and max.
|
||||
@@ -102,11 +103,9 @@ export function getYRange(alignedFrame: DataFrame): Range.MinMax {
|
||||
roundedMax = 1;
|
||||
} else if (roundedMin < 0) {
|
||||
// both are negative
|
||||
// max = 0;
|
||||
roundedMin *= 2;
|
||||
} else {
|
||||
// both are positive
|
||||
// min = 0;
|
||||
roundedMax *= 2;
|
||||
}
|
||||
|
||||
|
||||
29
pkg/apiserver/auditing/noop.go
Normal file
29
pkg/apiserver/auditing/noop.go
Normal file
@@ -0,0 +1,29 @@
|
||||
package auditing
|
||||
|
||||
import (
|
||||
auditinternal "k8s.io/apiserver/pkg/apis/audit"
|
||||
"k8s.io/apiserver/pkg/audit"
|
||||
"k8s.io/apiserver/pkg/authorization/authorizer"
|
||||
)
|
||||
|
||||
// NoopBackend is a no-op implementation of audit.Backend
|
||||
type NoopBackend struct{}
|
||||
|
||||
func ProvideNoopBackend() audit.Backend { return &NoopBackend{} }
|
||||
|
||||
func (b *NoopBackend) ProcessEvents(k8sEvents ...*auditinternal.Event) bool { return false }
|
||||
|
||||
func (NoopBackend) Run(stopCh <-chan struct{}) error { return nil }
|
||||
|
||||
func (NoopBackend) Shutdown() {}
|
||||
|
||||
func (NoopBackend) String() string { return "" }
|
||||
|
||||
// NoopPolicyRuleEvaluator is a no-op implementation of audit.PolicyRuleEvaluator
|
||||
type NoopPolicyRuleEvaluator struct{}
|
||||
|
||||
func ProvideNoopPolicyRuleEvaluator() audit.PolicyRuleEvaluator { return &NoopPolicyRuleEvaluator{} }
|
||||
|
||||
func (NoopPolicyRuleEvaluator) EvaluatePolicyRule(authorizer.Attributes) audit.RequestAuditConfig {
|
||||
return audit.RequestAuditConfig{Level: auditinternal.LevelNone}
|
||||
}
|
||||
@@ -61,20 +61,24 @@ func (s *legacyStorage) List(ctx context.Context, options *internalversion.ListO
|
||||
}
|
||||
|
||||
func (s *legacyStorage) Get(ctx context.Context, name string, options *metav1.GetOptions) (runtime.Object, error) {
|
||||
start := time.Now()
|
||||
defer func() {
|
||||
metricutil.ObserveWithExemplar(ctx, s.dsConfigHandlerRequestsDuration.WithLabelValues("new", "Get"), time.Since(start).Seconds())
|
||||
}()
|
||||
if s.dsConfigHandlerRequestsDuration != nil {
|
||||
start := time.Now()
|
||||
defer func() {
|
||||
metricutil.ObserveWithExemplar(ctx, s.dsConfigHandlerRequestsDuration.WithLabelValues("new", "Get"), time.Since(start).Seconds())
|
||||
}()
|
||||
}
|
||||
|
||||
return s.datasources.GetDataSource(ctx, name)
|
||||
}
|
||||
|
||||
// Create implements rest.Creater.
|
||||
func (s *legacyStorage) Create(ctx context.Context, obj runtime.Object, createValidation rest.ValidateObjectFunc, options *metav1.CreateOptions) (runtime.Object, error) {
|
||||
start := time.Now()
|
||||
defer func() {
|
||||
metricutil.ObserveWithExemplar(ctx, s.dsConfigHandlerRequestsDuration.WithLabelValues("new", "Create"), time.Since(start).Seconds())
|
||||
}()
|
||||
if s.dsConfigHandlerRequestsDuration != nil {
|
||||
start := time.Now()
|
||||
defer func() {
|
||||
metricutil.ObserveWithExemplar(ctx, s.dsConfigHandlerRequestsDuration.WithLabelValues("new", "Create"), time.Since(start).Seconds())
|
||||
}()
|
||||
}
|
||||
|
||||
ds, ok := obj.(*v0alpha1.DataSource)
|
||||
if !ok {
|
||||
@@ -85,10 +89,12 @@ func (s *legacyStorage) Create(ctx context.Context, obj runtime.Object, createVa
|
||||
|
||||
// Update implements rest.Updater.
|
||||
func (s *legacyStorage) Update(ctx context.Context, name string, objInfo rest.UpdatedObjectInfo, createValidation rest.ValidateObjectFunc, updateValidation rest.ValidateObjectUpdateFunc, forceAllowCreate bool, options *metav1.UpdateOptions) (runtime.Object, bool, error) {
|
||||
start := time.Now()
|
||||
defer func() {
|
||||
metricutil.ObserveWithExemplar(ctx, s.dsConfigHandlerRequestsDuration.WithLabelValues("new", "Create"), time.Since(start).Seconds())
|
||||
}()
|
||||
if s.dsConfigHandlerRequestsDuration != nil {
|
||||
start := time.Now()
|
||||
defer func() {
|
||||
metricutil.ObserveWithExemplar(ctx, s.dsConfigHandlerRequestsDuration.WithLabelValues("new", "Create"), time.Since(start).Seconds())
|
||||
}()
|
||||
}
|
||||
|
||||
old, err := s.Get(ctx, name, &metav1.GetOptions{})
|
||||
if err != nil {
|
||||
@@ -126,10 +132,12 @@ func (s *legacyStorage) Update(ctx context.Context, name string, objInfo rest.Up
|
||||
|
||||
// Delete implements rest.GracefulDeleter.
|
||||
func (s *legacyStorage) Delete(ctx context.Context, name string, deleteValidation rest.ValidateObjectFunc, options *metav1.DeleteOptions) (runtime.Object, bool, error) {
|
||||
start := time.Now()
|
||||
defer func() {
|
||||
metricutil.ObserveWithExemplar(ctx, s.dsConfigHandlerRequestsDuration.WithLabelValues("new", "Create"), time.Since(start).Seconds())
|
||||
}()
|
||||
if s.dsConfigHandlerRequestsDuration != nil {
|
||||
start := time.Now()
|
||||
defer func() {
|
||||
metricutil.ObserveWithExemplar(ctx, s.dsConfigHandlerRequestsDuration.WithLabelValues("new", "Create"), time.Since(start).Seconds())
|
||||
}()
|
||||
}
|
||||
|
||||
err := s.datasources.DeleteDataSource(ctx, name)
|
||||
return nil, false, err
|
||||
|
||||
@@ -3,6 +3,7 @@ package datasource
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"maps"
|
||||
|
||||
@@ -38,14 +39,14 @@ var (
|
||||
// DataSourceAPIBuilder is used just so wire has something unique to return
|
||||
type DataSourceAPIBuilder struct {
|
||||
datasourceResourceInfo utils.ResourceInfo
|
||||
|
||||
pluginJSON plugins.JSONData
|
||||
client PluginClient // will only ever be called with the same plugin id!
|
||||
datasources PluginDatasourceProvider
|
||||
contextProvider PluginContextWrapper
|
||||
accessControl accesscontrol.AccessControl
|
||||
queryTypes *queryV0.QueryTypeDefinitionList
|
||||
configCrudUseNewApis bool
|
||||
pluginJSON plugins.JSONData
|
||||
client PluginClient // will only ever be called with the same plugin id!
|
||||
datasources PluginDatasourceProvider
|
||||
contextProvider PluginContextWrapper
|
||||
accessControl accesscontrol.AccessControl
|
||||
queryTypes *queryV0.QueryTypeDefinitionList
|
||||
configCrudUseNewApis bool
|
||||
dataSourceCRUDMetric *prometheus.HistogramVec
|
||||
}
|
||||
|
||||
func RegisterAPIService(
|
||||
@@ -66,6 +67,16 @@ func RegisterAPIService(
|
||||
var err error
|
||||
var builder *DataSourceAPIBuilder
|
||||
|
||||
dataSourceCRUDMetric := metricutil.NewHistogramVec(prometheus.HistogramOpts{
|
||||
Namespace: "grafana",
|
||||
Name: "ds_config_handler_requests_duration_seconds",
|
||||
Help: "Duration of requests handled by datasource configuration handlers",
|
||||
}, []string{"code_path", "handler"})
|
||||
regErr := reg.Register(dataSourceCRUDMetric)
|
||||
if regErr != nil && !errors.As(regErr, &prometheus.AlreadyRegisteredError{}) {
|
||||
return nil, regErr
|
||||
}
|
||||
|
||||
pluginJSONs, err := getDatasourcePlugins(pluginSources)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error getting list of datasource plugins: %s", err)
|
||||
@@ -91,6 +102,7 @@ func RegisterAPIService(
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
builder.SetDataSourceCRUDMetrics(dataSourceCRUDMetric)
|
||||
|
||||
apiRegistrar.RegisterAPI(builder)
|
||||
}
|
||||
@@ -161,6 +173,10 @@ func (b *DataSourceAPIBuilder) GetGroupVersion() schema.GroupVersion {
|
||||
return b.datasourceResourceInfo.GroupVersion()
|
||||
}
|
||||
|
||||
func (b *DataSourceAPIBuilder) SetDataSourceCRUDMetrics(datasourceCRUDMetric *prometheus.HistogramVec) {
|
||||
b.dataSourceCRUDMetric = datasourceCRUDMetric
|
||||
}
|
||||
|
||||
func addKnownTypes(scheme *runtime.Scheme, gv schema.GroupVersion) {
|
||||
scheme.AddKnownTypes(gv,
|
||||
&datasourceV0.DataSource{},
|
||||
@@ -218,13 +234,9 @@ func (b *DataSourceAPIBuilder) UpdateAPIGroupInfo(apiGroupInfo *genericapiserver
|
||||
|
||||
if b.configCrudUseNewApis {
|
||||
legacyStore := &legacyStorage{
|
||||
datasources: b.datasources,
|
||||
resourceInfo: &ds,
|
||||
dsConfigHandlerRequestsDuration: metricutil.NewHistogramVec(prometheus.HistogramOpts{
|
||||
Namespace: "grafana",
|
||||
Name: "ds_config_handler_requests_duration_seconds",
|
||||
Help: "Duration of requests handled by datasource configuration handlers",
|
||||
}, []string{"code_path", "handler"}),
|
||||
datasources: b.datasources,
|
||||
resourceInfo: &ds,
|
||||
dsConfigHandlerRequestsDuration: b.dataSourceCRUDMetric,
|
||||
}
|
||||
unified, err := grafanaregistry.NewRegistryStore(opts.Scheme, ds, opts.OptsGetter)
|
||||
if err != nil {
|
||||
|
||||
@@ -35,12 +35,13 @@ func maybeNotifyProgress(threshold time.Duration, fn ProgressFn) ProgressFn {
|
||||
|
||||
// FIXME: ProgressRecorder should be initialized in the queue
|
||||
type JobResourceResult struct {
|
||||
Name string
|
||||
Group string
|
||||
Kind string
|
||||
Path string
|
||||
Action repository.FileAction
|
||||
Error error
|
||||
Name string
|
||||
Group string
|
||||
Kind string
|
||||
Path string
|
||||
Action repository.FileAction
|
||||
Error error
|
||||
Warning error
|
||||
}
|
||||
|
||||
type jobProgressRecorder struct {
|
||||
@@ -193,6 +194,10 @@ func (r *jobProgressRecorder) updateSummary(result JobResourceResult) {
|
||||
errorMsg := fmt.Sprintf("%s (file: %s, name: %s, action: %s)", result.Error.Error(), result.Path, result.Name, result.Action)
|
||||
summary.Errors = append(summary.Errors, errorMsg)
|
||||
summary.Error++
|
||||
} else if result.Warning != nil {
|
||||
warningMsg := fmt.Sprintf("%s (file: %s, name: %s, action: %s)", result.Warning.Error(), result.Path, result.Name, result.Action)
|
||||
summary.Warnings = append(summary.Warnings, warningMsg)
|
||||
summary.Warning++
|
||||
} else {
|
||||
switch result.Action {
|
||||
case repository.FileActionDeleted:
|
||||
@@ -266,8 +271,17 @@ func (r *jobProgressRecorder) Complete(ctx context.Context, err error) provision
|
||||
jobStatus.Message = err.Error()
|
||||
}
|
||||
|
||||
jobStatus.Summary = r.summary()
|
||||
summaries := r.summary()
|
||||
jobStatus.Summary = summaries
|
||||
jobStatus.Errors = r.errors
|
||||
|
||||
// Extract warnings from summaries
|
||||
warnings := make([]string, 0)
|
||||
for _, summary := range summaries {
|
||||
warnings = append(warnings, summary.Warnings...)
|
||||
}
|
||||
jobStatus.Warnings = warnings
|
||||
|
||||
jobStatus.URLs = r.refURLs
|
||||
|
||||
tooManyErrors := r.maxErrors > 0 && r.errorCount >= r.maxErrors
|
||||
@@ -283,6 +297,9 @@ func (r *jobProgressRecorder) Complete(ctx context.Context, err error) provision
|
||||
jobStatus.Message = "completed with errors"
|
||||
jobStatus.State = provisioning.JobStateWarning
|
||||
}
|
||||
} else if len(jobStatus.Warnings) > 0 {
|
||||
jobStatus.State = provisioning.JobStateWarning
|
||||
jobStatus.Message = "completed with warnings"
|
||||
}
|
||||
|
||||
// Override message if progress have a more explicit message
|
||||
|
||||
@@ -2,9 +2,11 @@ package jobs
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"testing"
|
||||
|
||||
provisioning "github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1"
|
||||
"github.com/grafana/grafana/apps/provisioning/pkg/repository"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
@@ -83,3 +85,167 @@ func TestJobProgressRecorderCompleteIncludesRefURLs(t *testing.T) {
|
||||
assert.Equal(t, provisioning.JobStateSuccess, finalStatus.State)
|
||||
assert.Equal(t, "completed successfully", finalStatus.Message)
|
||||
}
|
||||
|
||||
func TestJobProgressRecorderWarningStatus(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
// Create a progress recorder
|
||||
mockProgressFn := func(ctx context.Context, status provisioning.JobStatus) error {
|
||||
return nil
|
||||
}
|
||||
recorder := newJobProgressRecorder(mockProgressFn).(*jobProgressRecorder)
|
||||
|
||||
// Record a result with a warning
|
||||
warningErr := errors.New("deprecated API used")
|
||||
result := JobResourceResult{
|
||||
Name: "test-resource",
|
||||
Group: "test.grafana.app",
|
||||
Kind: "Dashboard",
|
||||
Path: "dashboards/test.json",
|
||||
Action: repository.FileActionUpdated,
|
||||
Warning: warningErr,
|
||||
}
|
||||
recorder.Record(ctx, result)
|
||||
|
||||
// Record another result with a different warning
|
||||
warningErr2 := errors.New("missing optional field")
|
||||
result2 := JobResourceResult{
|
||||
Name: "test-resource-2",
|
||||
Group: "test.grafana.app",
|
||||
Kind: "Dashboard",
|
||||
Path: "dashboards/test2.json",
|
||||
Action: repository.FileActionCreated,
|
||||
Warning: warningErr2,
|
||||
}
|
||||
recorder.Record(ctx, result2)
|
||||
|
||||
// Record a result with a warning from a different resource type
|
||||
warningErr3 := errors.New("validation warning")
|
||||
result3 := JobResourceResult{
|
||||
Name: "test-resource-3",
|
||||
Group: "test.grafana.app",
|
||||
Kind: "DataSource",
|
||||
Path: "datasources/test.yaml",
|
||||
Action: repository.FileActionCreated,
|
||||
Warning: warningErr3,
|
||||
}
|
||||
recorder.Record(ctx, result3)
|
||||
|
||||
// Verify warnings are stored in summaries
|
||||
recorder.mu.RLock()
|
||||
require.Len(t, recorder.summaries, 2) // Dashboard and DataSource
|
||||
dashboardSummary := recorder.summaries["test.grafana.app:Dashboard"]
|
||||
require.NotNil(t, dashboardSummary)
|
||||
assert.Equal(t, int64(2), dashboardSummary.Warning)
|
||||
assert.Len(t, dashboardSummary.Warnings, 2)
|
||||
assert.Contains(t, dashboardSummary.Warnings[0], "deprecated API used")
|
||||
assert.Contains(t, dashboardSummary.Warnings[1], "missing optional field")
|
||||
|
||||
datasourceSummary := recorder.summaries["test.grafana.app:DataSource"]
|
||||
require.NotNil(t, datasourceSummary)
|
||||
assert.Equal(t, int64(1), datasourceSummary.Warning)
|
||||
assert.Len(t, datasourceSummary.Warnings, 1)
|
||||
assert.Contains(t, datasourceSummary.Warnings[0], "validation warning")
|
||||
recorder.mu.RUnlock()
|
||||
|
||||
// Complete the job
|
||||
finalStatus := recorder.Complete(ctx, nil)
|
||||
|
||||
// Verify the final status includes warnings
|
||||
require.NotNil(t, finalStatus.Warnings)
|
||||
assert.Len(t, finalStatus.Warnings, 3)
|
||||
assert.Contains(t, finalStatus.Warnings[0], "deprecated API used")
|
||||
assert.Contains(t, finalStatus.Warnings[1], "missing optional field")
|
||||
assert.Contains(t, finalStatus.Warnings[2], "validation warning")
|
||||
|
||||
// Verify the state is set to Warning
|
||||
assert.Equal(t, provisioning.JobStateWarning, finalStatus.State)
|
||||
assert.Equal(t, "completed with warnings", finalStatus.Message)
|
||||
|
||||
// Verify summaries are included
|
||||
require.Len(t, finalStatus.Summary, 2)
|
||||
|
||||
// Verify no errors were recorded
|
||||
assert.Empty(t, finalStatus.Errors)
|
||||
}
|
||||
|
||||
func TestJobProgressRecorderWarningWithErrors(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
// Create a progress recorder
|
||||
mockProgressFn := func(ctx context.Context, status provisioning.JobStatus) error {
|
||||
return nil
|
||||
}
|
||||
recorder := newJobProgressRecorder(mockProgressFn).(*jobProgressRecorder)
|
||||
|
||||
// Record a result with an error (errors take precedence)
|
||||
errorErr := errors.New("failed to process")
|
||||
result := JobResourceResult{
|
||||
Name: "test-resource",
|
||||
Group: "test.grafana.app",
|
||||
Kind: "Dashboard",
|
||||
Path: "dashboards/test.json",
|
||||
Action: repository.FileActionUpdated,
|
||||
Error: errorErr,
|
||||
}
|
||||
recorder.Record(ctx, result)
|
||||
|
||||
// Record a result with only a warning
|
||||
warningErr := errors.New("deprecated API used")
|
||||
result2 := JobResourceResult{
|
||||
Name: "test-resource-2",
|
||||
Group: "test.grafana.app",
|
||||
Kind: "Dashboard",
|
||||
Path: "dashboards/test2.json",
|
||||
Action: repository.FileActionCreated,
|
||||
Warning: warningErr,
|
||||
}
|
||||
recorder.Record(ctx, result2)
|
||||
|
||||
// Complete the job
|
||||
finalStatus := recorder.Complete(ctx, nil)
|
||||
|
||||
// When there are errors, the state should be Warning (not Error unless too many)
|
||||
// and warnings should still be included
|
||||
assert.Equal(t, provisioning.JobStateWarning, finalStatus.State)
|
||||
assert.Equal(t, "completed with errors", finalStatus.Message)
|
||||
assert.Len(t, finalStatus.Errors, 1)
|
||||
assert.Contains(t, finalStatus.Errors[0], "failed to process")
|
||||
|
||||
// Warnings should still be extracted from summaries
|
||||
require.NotNil(t, finalStatus.Warnings)
|
||||
assert.Len(t, finalStatus.Warnings, 1)
|
||||
assert.Contains(t, finalStatus.Warnings[0], "deprecated API used")
|
||||
}
|
||||
|
||||
func TestJobProgressRecorderWarningOnlyNoErrors(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
|
||||
// Create a progress recorder
|
||||
mockProgressFn := func(ctx context.Context, status provisioning.JobStatus) error {
|
||||
return nil
|
||||
}
|
||||
recorder := newJobProgressRecorder(mockProgressFn).(*jobProgressRecorder)
|
||||
|
||||
// Record only warnings, no errors
|
||||
warningErr := errors.New("deprecated API used")
|
||||
result := JobResourceResult{
|
||||
Name: "test-resource",
|
||||
Group: "test.grafana.app",
|
||||
Kind: "Dashboard",
|
||||
Path: "dashboards/test.json",
|
||||
Action: repository.FileActionUpdated,
|
||||
Warning: warningErr,
|
||||
}
|
||||
recorder.Record(ctx, result)
|
||||
|
||||
// Complete the job
|
||||
finalStatus := recorder.Complete(ctx, nil)
|
||||
|
||||
// Verify the state is Warning (not Error) when only warnings exist
|
||||
assert.Equal(t, provisioning.JobStateWarning, finalStatus.State)
|
||||
assert.Equal(t, "completed with warnings", finalStatus.Message)
|
||||
assert.Empty(t, finalStatus.Errors)
|
||||
require.NotNil(t, finalStatus.Warnings)
|
||||
assert.Len(t, finalStatus.Warnings, 1)
|
||||
}
|
||||
|
||||
@@ -38,6 +38,7 @@ func RegisterAPIService(features featuremgmt.FeatureToggles, apiregistration bui
|
||||
}
|
||||
|
||||
func (b *ServiceAPIBuilder) GetAuthorizer() authorizer.Authorizer {
|
||||
//nolint:staticcheck // not yet migrated to Resource Authorizer
|
||||
return roleauthorizer.NewRoleAuthorizer()
|
||||
}
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@ package apiregistry
|
||||
import (
|
||||
"github.com/google/wire"
|
||||
|
||||
"github.com/grafana/grafana/pkg/apiserver/auditing"
|
||||
"github.com/grafana/grafana/pkg/registry/apis/collections"
|
||||
dashboardinternal "github.com/grafana/grafana/pkg/registry/apis/dashboard"
|
||||
"github.com/grafana/grafana/pkg/registry/apis/datasource"
|
||||
@@ -33,6 +34,10 @@ var WireSetExts = wire.NewSet(
|
||||
|
||||
externalgroupmapping.ProvideNoopTeamGroupsREST,
|
||||
wire.Bind(new(externalgroupmapping.TeamGroupsHandler), new(*externalgroupmapping.NoopTeamGroupsREST)),
|
||||
|
||||
// Auditing Options
|
||||
auditing.ProvideNoopBackend,
|
||||
auditing.ProvideNoopPolicyRuleEvaluator,
|
||||
)
|
||||
|
||||
var provisioningExtras = wire.NewSet(
|
||||
|
||||
@@ -5,6 +5,7 @@ import (
|
||||
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
"k8s.io/apimachinery/pkg/runtime/schema"
|
||||
"k8s.io/apiserver/pkg/authorization/authorizer"
|
||||
restclient "k8s.io/client-go/rest"
|
||||
|
||||
"github.com/grafana/grafana-app-sdk/app"
|
||||
@@ -16,6 +17,7 @@ import (
|
||||
"github.com/grafana/grafana/pkg/apimachinery/utils"
|
||||
"github.com/grafana/grafana/pkg/apiserver/rest"
|
||||
"github.com/grafana/grafana/pkg/services/apiserver/appinstaller"
|
||||
roleauthorizer "github.com/grafana/grafana/pkg/services/apiserver/auth/authorizer"
|
||||
"github.com/grafana/grafana/pkg/services/apiserver/endpoints/request"
|
||||
"github.com/grafana/grafana/pkg/services/correlations"
|
||||
"github.com/grafana/grafana/pkg/services/featuremgmt"
|
||||
@@ -60,6 +62,11 @@ func RegisterAppInstaller(
|
||||
return installer, nil
|
||||
}
|
||||
|
||||
func (a *AppInstaller) GetAuthorizer() authorizer.Authorizer {
|
||||
//nolint:staticcheck // not yet migrated to Resource Authorizer
|
||||
return roleauthorizer.NewRoleAuthorizer()
|
||||
}
|
||||
|
||||
func (a *AppInstaller) GetLegacyStorage(requested schema.GroupVersionResource) rest.Storage {
|
||||
kind := correlationsV0.CorrelationKind()
|
||||
gvr := schema.GroupVersionResource{
|
||||
|
||||
@@ -6,17 +6,20 @@ import (
|
||||
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
"k8s.io/apimachinery/pkg/runtime/schema"
|
||||
"k8s.io/apiserver/pkg/authorization/authorizer"
|
||||
restclient "k8s.io/client-go/rest"
|
||||
|
||||
"github.com/grafana/grafana-app-sdk/app"
|
||||
appsdkapiserver "github.com/grafana/grafana-app-sdk/k8s/apiserver"
|
||||
"github.com/grafana/grafana-app-sdk/simple"
|
||||
|
||||
"github.com/grafana/grafana/apps/playlist/pkg/apis"
|
||||
playlistv0alpha1 "github.com/grafana/grafana/apps/playlist/pkg/apis/playlist/v0alpha1"
|
||||
playlistapp "github.com/grafana/grafana/apps/playlist/pkg/app"
|
||||
"github.com/grafana/grafana/pkg/apimachinery/utils"
|
||||
grafanarest "github.com/grafana/grafana/pkg/apiserver/rest"
|
||||
"github.com/grafana/grafana/pkg/services/apiserver/appinstaller"
|
||||
roleauthorizer "github.com/grafana/grafana/pkg/services/apiserver/auth/authorizer"
|
||||
"github.com/grafana/grafana/pkg/services/apiserver/endpoints/request"
|
||||
"github.com/grafana/grafana/pkg/services/featuremgmt"
|
||||
playlistsvc "github.com/grafana/grafana/pkg/services/playlist"
|
||||
@@ -63,6 +66,11 @@ func RegisterAppInstaller(
|
||||
return installer, nil
|
||||
}
|
||||
|
||||
func (p *PlaylistAppInstaller) GetAuthorizer() authorizer.Authorizer {
|
||||
//nolint:staticcheck // not yet migrated to Resource Authorizer
|
||||
return roleauthorizer.NewRoleAuthorizer()
|
||||
}
|
||||
|
||||
// GetLegacyStorage returns the legacy storage for the playlist app.
|
||||
func (p *PlaylistAppInstaller) GetLegacyStorage(requested schema.GroupVersionResource) grafanarest.Storage {
|
||||
gvr := playlistv0alpha1.PlaylistKind().GroupVersionResource()
|
||||
|
||||
@@ -3,12 +3,14 @@ package quotas
|
||||
import (
|
||||
"github.com/grafana/grafana/apps/quotas/pkg/apis"
|
||||
"github.com/grafana/grafana/pkg/storage/unified/resource"
|
||||
"k8s.io/apiserver/pkg/authorization/authorizer"
|
||||
restclient "k8s.io/client-go/rest"
|
||||
|
||||
"github.com/grafana/grafana-app-sdk/app"
|
||||
appsdkapiserver "github.com/grafana/grafana-app-sdk/k8s/apiserver"
|
||||
"github.com/grafana/grafana-app-sdk/simple"
|
||||
quotasapp "github.com/grafana/grafana/apps/quotas/pkg/app"
|
||||
roleauthorizer "github.com/grafana/grafana/pkg/services/apiserver/auth/authorizer"
|
||||
"github.com/grafana/grafana/pkg/services/featuremgmt"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
)
|
||||
@@ -22,6 +24,11 @@ type QuotasAppInstaller struct {
|
||||
cfg *setting.Cfg
|
||||
}
|
||||
|
||||
func (a *QuotasAppInstaller) GetAuthorizer() authorizer.Authorizer {
|
||||
//nolint:staticcheck // not yet migrated to Resource Authorizer
|
||||
return roleauthorizer.NewRoleAuthorizer()
|
||||
}
|
||||
|
||||
func RegisterAppInstaller(
|
||||
cfg *setting.Cfg,
|
||||
features featuremgmt.FeatureToggles,
|
||||
|
||||
9
pkg/server/wire_gen.go
generated
9
pkg/server/wire_gen.go
generated
@@ -14,6 +14,7 @@ import (
|
||||
"github.com/grafana/grafana/pkg/api"
|
||||
"github.com/grafana/grafana/pkg/api/avatar"
|
||||
"github.com/grafana/grafana/pkg/api/routing"
|
||||
"github.com/grafana/grafana/pkg/apiserver/auditing"
|
||||
"github.com/grafana/grafana/pkg/bus"
|
||||
"github.com/grafana/grafana/pkg/configprovider"
|
||||
"github.com/grafana/grafana/pkg/expr"
|
||||
@@ -831,7 +832,9 @@ func Initialize(ctx context.Context, cfg *setting.Cfg, opts Options, apiOpts api
|
||||
}
|
||||
v2 := appregistry.ProvideAppInstallers(featureToggles, playlistAppInstaller, appInstaller, shortURLAppInstaller, alertingRulesAppInstaller, correlationsAppInstaller, alertingNotificationsAppInstaller, logsDrilldownAppInstaller, annotationAppInstaller, exampleAppInstaller, advisorAppInstaller, alertingHistorianAppInstaller, quotasAppInstaller)
|
||||
builderMetrics := builder.ProvideBuilderMetrics(registerer)
|
||||
apiserverService, err := apiserver.ProvideService(cfg, featureToggles, routeRegisterImpl, tracingService, serverLockService, sqlStore, kvStore, middlewareHandler, scopedPluginDatasourceProvider, plugincontextProvider, pluginstoreService, dualwriteService, resourceClient, inlineSecureValueSupport, eventualRestConfigProvider, v, eventualRestConfigProvider, registerer, aggregatorRunner, v2, builderMetrics)
|
||||
backend := auditing.ProvideNoopBackend()
|
||||
policyRuleEvaluator := auditing.ProvideNoopPolicyRuleEvaluator()
|
||||
apiserverService, err := apiserver.ProvideService(cfg, featureToggles, routeRegisterImpl, tracingService, serverLockService, sqlStore, kvStore, middlewareHandler, scopedPluginDatasourceProvider, plugincontextProvider, pluginstoreService, dualwriteService, resourceClient, inlineSecureValueSupport, eventualRestConfigProvider, v, eventualRestConfigProvider, registerer, aggregatorRunner, v2, builderMetrics, backend, policyRuleEvaluator)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@@ -1489,7 +1492,9 @@ func InitializeForTest(ctx context.Context, t sqlutil.ITestDB, testingT interfac
|
||||
}
|
||||
v2 := appregistry.ProvideAppInstallers(featureToggles, playlistAppInstaller, appInstaller, shortURLAppInstaller, alertingRulesAppInstaller, correlationsAppInstaller, alertingNotificationsAppInstaller, logsDrilldownAppInstaller, annotationAppInstaller, exampleAppInstaller, advisorAppInstaller, alertingHistorianAppInstaller, quotasAppInstaller)
|
||||
builderMetrics := builder.ProvideBuilderMetrics(registerer)
|
||||
apiserverService, err := apiserver.ProvideService(cfg, featureToggles, routeRegisterImpl, tracingService, serverLockService, sqlStore, kvStore, middlewareHandler, scopedPluginDatasourceProvider, plugincontextProvider, pluginstoreService, dualwriteService, resourceClient, inlineSecureValueSupport, eventualRestConfigProvider, v, eventualRestConfigProvider, registerer, aggregatorRunner, v2, builderMetrics)
|
||||
backend := auditing.ProvideNoopBackend()
|
||||
policyRuleEvaluator := auditing.ProvideNoopPolicyRuleEvaluator()
|
||||
apiserverService, err := apiserver.ProvideService(cfg, featureToggles, routeRegisterImpl, tracingService, serverLockService, sqlStore, kvStore, middlewareHandler, scopedPluginDatasourceProvider, plugincontextProvider, pluginstoreService, dualwriteService, resourceClient, inlineSecureValueSupport, eventualRestConfigProvider, v, eventualRestConfigProvider, registerer, aggregatorRunner, v2, builderMetrics, backend, policyRuleEvaluator)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -114,6 +114,8 @@ func RegisterAuthorizers(
|
||||
registrar.Register(gv, authorizer)
|
||||
logger.Debug("Registered authorizer", "group", gv.Group, "version", gv.Version, "app")
|
||||
}
|
||||
} else {
|
||||
panic("authorizer cannot be nil for api group: " + installer.GroupVersions()[0].Group)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,6 +15,7 @@ func TestRegisterAuthorizers(t *testing.T) {
|
||||
name string
|
||||
appInstallers []appsdkapiserver.AppInstaller
|
||||
expectedRegisters int
|
||||
expectedPanic bool
|
||||
}{
|
||||
{
|
||||
name: "empty installers list",
|
||||
@@ -30,7 +31,7 @@ func TestRegisterAuthorizers(t *testing.T) {
|
||||
},
|
||||
},
|
||||
},
|
||||
expectedRegisters: 0,
|
||||
expectedPanic: true,
|
||||
},
|
||||
{
|
||||
name: "single installer with authorizer provider",
|
||||
@@ -46,6 +47,20 @@ func TestRegisterAuthorizers(t *testing.T) {
|
||||
},
|
||||
expectedRegisters: 1,
|
||||
},
|
||||
{
|
||||
name: "single installer with invalid authorizer provider",
|
||||
appInstallers: []appsdkapiserver.AppInstaller{
|
||||
&mockAppInstallerWithAuth{
|
||||
mockAppInstaller: &mockAppInstaller{
|
||||
groupVersions: []schema.GroupVersion{
|
||||
{Group: "test.example.com", Version: "v1"},
|
||||
},
|
||||
},
|
||||
mockAuthorizer: nil,
|
||||
},
|
||||
},
|
||||
expectedPanic: true,
|
||||
},
|
||||
{
|
||||
name: "installer with multiple group versions",
|
||||
appInstallers: []appsdkapiserver.AppInstaller{
|
||||
@@ -63,7 +78,7 @@ func TestRegisterAuthorizers(t *testing.T) {
|
||||
expectedRegisters: 3,
|
||||
},
|
||||
{
|
||||
name: "multiple installers with mixed authorizer support",
|
||||
name: "multiple installers with authorizer support",
|
||||
appInstallers: []appsdkapiserver.AppInstaller{
|
||||
&mockAppInstallerWithAuth{
|
||||
mockAppInstaller: &mockAppInstaller{
|
||||
@@ -73,11 +88,6 @@ func TestRegisterAuthorizers(t *testing.T) {
|
||||
},
|
||||
mockAuthorizer: &mockAuthorizer{},
|
||||
},
|
||||
&mockAppInstaller{
|
||||
groupVersions: []schema.GroupVersion{
|
||||
{Group: "other.example.com", Version: "v1"},
|
||||
},
|
||||
},
|
||||
&mockAppInstallerWithAuth{
|
||||
mockAppInstaller: &mockAppInstaller{
|
||||
groupVersions: []schema.GroupVersion{
|
||||
@@ -88,7 +98,7 @@ func TestRegisterAuthorizers(t *testing.T) {
|
||||
mockAuthorizer: &mockAuthorizer{},
|
||||
},
|
||||
},
|
||||
expectedRegisters: 3, // 1 from first installer + 2 from third installer
|
||||
expectedRegisters: 3, // 1 from first installer + 2 from second installer
|
||||
},
|
||||
}
|
||||
|
||||
@@ -96,6 +106,13 @@ func TestRegisterAuthorizers(t *testing.T) {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
registrar := &mockAuthorizerRegistrar{}
|
||||
if tt.expectedPanic {
|
||||
defer func() {
|
||||
if r := recover(); r == nil {
|
||||
t.Errorf("%s case did not panic as expected", t.Name())
|
||||
}
|
||||
}()
|
||||
}
|
||||
RegisterAuthorizers(ctx, tt.appInstallers, registrar)
|
||||
require.Equal(t, tt.expectedRegisters, len(registrar.registrations))
|
||||
})
|
||||
|
||||
@@ -38,12 +38,12 @@ func NewGrafanaBuiltInSTAuthorizer(cfg *setting.Cfg) *GrafanaAuthorizer {
|
||||
|
||||
// Individual services may have explicit implementations
|
||||
apis := make(map[string]authorizer.Authorizer)
|
||||
// The apiVersion flavors will run first and can return early when FGAC has appropriate rules
|
||||
authorizers = append(authorizers, &authorizerForAPI{apis})
|
||||
|
||||
// org role is last -- and will return allow for verbs that match expectations
|
||||
// The apiVersion flavors will run first and can return early when FGAC has appropriate rules
|
||||
// NOTE: role authorizer is now used by some api groups as their specific authorizer
|
||||
// but there are still some apis not directly registered in the embedded delegate that benefit from including it here
|
||||
// org role authorizer is last -- and will return allow for verbs that match expectations
|
||||
// it is only helpful here for remote APIs in some cloud use-cases.
|
||||
//nolint:staticcheck // remove once build handler chains are untangled between local and remote APIs handling
|
||||
authorizers = append(authorizers, NewRoleAuthorizer())
|
||||
return &GrafanaAuthorizer{
|
||||
apis: apis,
|
||||
|
||||
@@ -19,6 +19,7 @@ var orgRoleNoneAsViewerAPIGroups = []string{
|
||||
|
||||
type roleAuthorizer struct{}
|
||||
|
||||
// Deprecated: NewRoleAuthorizer exists for apps that were launched with simplistic authorization requirements. Consider using NewResourceAuthorizer instead.
|
||||
func NewRoleAuthorizer() *roleAuthorizer {
|
||||
return &roleAuthorizer{}
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ import (
|
||||
"k8s.io/apimachinery/pkg/runtime"
|
||||
"k8s.io/apimachinery/pkg/runtime/schema"
|
||||
"k8s.io/apimachinery/pkg/runtime/serializer"
|
||||
"k8s.io/apiserver/pkg/audit"
|
||||
genericapifilters "k8s.io/apiserver/pkg/endpoints/filters"
|
||||
"k8s.io/apiserver/pkg/endpoints/responsewriter"
|
||||
genericapiserver "k8s.io/apiserver/pkg/server"
|
||||
@@ -113,6 +114,9 @@ type service struct {
|
||||
appInstallers []appsdkapiserver.AppInstaller
|
||||
builderMetrics *builder.BuilderMetrics
|
||||
dualWriterMetrics *grafanarest.DualWriterMetrics
|
||||
|
||||
auditBackend audit.Backend
|
||||
auditPolicyRuleEvaluator audit.PolicyRuleEvaluator
|
||||
}
|
||||
|
||||
func ProvideService(
|
||||
@@ -137,6 +141,8 @@ func ProvideService(
|
||||
aggregatorRunner aggregatorrunner.AggregatorRunner,
|
||||
appInstallers []appsdkapiserver.AppInstaller,
|
||||
builderMetrics *builder.BuilderMetrics,
|
||||
auditBackend audit.Backend,
|
||||
auditPolicyRuleEvaluator audit.PolicyRuleEvaluator,
|
||||
) (*service, error) {
|
||||
scheme := builder.ProvideScheme()
|
||||
codecs := builder.ProvideCodecFactory(scheme)
|
||||
@@ -167,6 +173,8 @@ func ProvideService(
|
||||
appInstallers: appInstallers,
|
||||
builderMetrics: builderMetrics,
|
||||
dualWriterMetrics: grafanarest.NewDualWriterMetrics(reg),
|
||||
auditBackend: auditBackend,
|
||||
auditPolicyRuleEvaluator: auditPolicyRuleEvaluator,
|
||||
}
|
||||
// This will be used when running as a dskit service
|
||||
s.NamedService = services.NewBasicService(s.start, s.running, nil).WithName(modules.GrafanaAPIServer)
|
||||
@@ -355,6 +363,10 @@ func (s *service) start(ctx context.Context) error {
|
||||
appinstaller.BuildOpenAPIDefGetter(s.appInstallers),
|
||||
}
|
||||
|
||||
// Auditing Options
|
||||
serverConfig.AuditBackend = s.auditBackend
|
||||
serverConfig.AuditPolicyRuleEvaluator = s.auditPolicyRuleEvaluator
|
||||
|
||||
// Add OpenAPI specs for each group+version (existing builders)
|
||||
err = builder.SetupConfig(
|
||||
s.scheme,
|
||||
|
||||
@@ -77,6 +77,10 @@ var (
|
||||
"user.sync.user-externalUID-mismatch",
|
||||
errutil.WithPublicMessage("User externalUID mismatch"),
|
||||
)
|
||||
errSCIMAuthModuleMismatch = errutil.Unauthorized(
|
||||
"user.sync.scim-auth-module-mismatch",
|
||||
errutil.WithPublicMessage("User was provisioned via SCIM and must login via SAML"),
|
||||
)
|
||||
)
|
||||
|
||||
var (
|
||||
@@ -308,6 +312,21 @@ func (s *UserSync) SyncUserHook(ctx context.Context, id *authn.Identity, _ *auth
|
||||
// just try to fetch the user one more to make the other request work.
|
||||
if errors.Is(err, user.ErrUserAlreadyExists) {
|
||||
usr, _, err = s.getUser(ctx, id)
|
||||
|
||||
// Check if this is a SCIM-provisioned user trying to login via an auth module that is not SAML or GCOM
|
||||
if err == nil && usr != nil && usr.IsProvisioned && id.AuthenticatedBy != login.GrafanaComAuthModule {
|
||||
_, authErr := s.authInfoService.GetAuthInfo(ctx, &login.GetAuthInfoQuery{
|
||||
UserId: usr.ID,
|
||||
AuthModule: id.AuthenticatedBy,
|
||||
})
|
||||
if errors.Is(authErr, user.ErrUserNotFound) {
|
||||
s.log.FromContext(ctx).Error("SCIM-provisioned user attempted login via non-SAML auth module",
|
||||
"user_id", usr.ID,
|
||||
"attempted_module", id.AuthenticatedBy,
|
||||
)
|
||||
return errSCIMAuthModuleMismatch.Errorf("user was provisioned via SCIM but attempted login via %s", id.AuthenticatedBy)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
|
||||
@@ -1926,3 +1926,100 @@ func TestUserSync_SCIMLoginUsageStatSet(t *testing.T) {
|
||||
finalCount := finalStats["stats.features.scim.has_successful_login.count"].(int)
|
||||
require.Equal(t, int(1), finalCount)
|
||||
}
|
||||
|
||||
func TestUserSync_SyncUserHook_SCIMAuthModuleMismatch(t *testing.T) {
|
||||
userSrv := usertest.NewMockService(t)
|
||||
authInfoSrv := authinfotest.NewMockAuthInfoService(t)
|
||||
|
||||
userSrv.On("GetByEmail", mock.Anything, mock.Anything).Return(nil, user.ErrUserNotFound).Once()
|
||||
|
||||
userSrv.On("Create", mock.Anything, mock.Anything).Return(nil, user.ErrUserAlreadyExists).Once()
|
||||
|
||||
userSrv.On("GetByEmail", mock.Anything, mock.Anything).Return(&user.User{
|
||||
ID: 1,
|
||||
Email: "test@test.com",
|
||||
IsProvisioned: true,
|
||||
}, nil).Once()
|
||||
|
||||
authInfoSrv.On("GetAuthInfo", mock.Anything, mock.MatchedBy(func(q *login.GetAuthInfoQuery) bool {
|
||||
return q.AuthModule == "oauth_azuread"
|
||||
})).Return(nil, user.ErrUserNotFound).Once()
|
||||
|
||||
s := ProvideUserSync(
|
||||
userSrv,
|
||||
authinfoimpl.ProvideOSSUserProtectionService(),
|
||||
authInfoSrv,
|
||||
"atest.FakeQuotaService{},
|
||||
tracing.NewNoopTracerService(),
|
||||
featuremgmt.WithFeatures(),
|
||||
setting.NewCfg(),
|
||||
nil,
|
||||
)
|
||||
|
||||
email := "test@test.com"
|
||||
|
||||
err := s.SyncUserHook(context.Background(), &authn.Identity{
|
||||
AuthenticatedBy: "oauth_azuread",
|
||||
ClientParams: authn.ClientParams{
|
||||
SyncUser: true,
|
||||
AllowSignUp: true,
|
||||
LookUpParams: login.UserLookupParams{
|
||||
Email: &email,
|
||||
},
|
||||
},
|
||||
}, nil)
|
||||
|
||||
require.Error(t, err)
|
||||
assert.ErrorIs(t, err, errSCIMAuthModuleMismatch)
|
||||
assert.Contains(t, err.Error(), "SCIM")
|
||||
assert.Contains(t, err.Error(), "oauth_azuread")
|
||||
}
|
||||
|
||||
func TestUserSync_SyncUserHook_SCIMUserAllowsGCOMLogin(t *testing.T) {
|
||||
userSrv := usertest.NewMockService(t)
|
||||
authInfoSrv := authinfotest.NewMockAuthInfoService(t)
|
||||
|
||||
authInfoSrv.On("GetAuthInfo", mock.Anything, mock.MatchedBy(func(q *login.GetAuthInfoQuery) bool {
|
||||
return q.AuthModule == login.GrafanaComAuthModule && q.AuthId == "gcom-user-123"
|
||||
})).Return(nil, user.ErrUserNotFound).Once()
|
||||
|
||||
userSrv.On("GetByEmail", mock.Anything, mock.Anything).Return(nil, user.ErrUserNotFound).Once()
|
||||
userSrv.On("Create", mock.Anything, mock.Anything).Return(nil, user.ErrUserAlreadyExists).Once()
|
||||
|
||||
authInfoSrv.On("GetAuthInfo", mock.Anything, mock.MatchedBy(func(q *login.GetAuthInfoQuery) bool {
|
||||
return q.AuthModule == login.GrafanaComAuthModule && q.AuthId == "gcom-user-123"
|
||||
})).Return(nil, user.ErrUserNotFound).Once()
|
||||
|
||||
userSrv.On("GetByEmail", mock.Anything, mock.Anything).Return(&user.User{
|
||||
ID: 1,
|
||||
Email: "test@test.com",
|
||||
IsProvisioned: true,
|
||||
}, nil).Once()
|
||||
|
||||
s := ProvideUserSync(
|
||||
userSrv,
|
||||
authinfoimpl.ProvideOSSUserProtectionService(),
|
||||
authInfoSrv,
|
||||
"atest.FakeQuotaService{},
|
||||
tracing.NewNoopTracerService(),
|
||||
featuremgmt.WithFeatures(),
|
||||
setting.NewCfg(),
|
||||
nil,
|
||||
)
|
||||
|
||||
email := "test@test.com"
|
||||
|
||||
err := s.SyncUserHook(context.Background(), &authn.Identity{
|
||||
AuthenticatedBy: login.GrafanaComAuthModule,
|
||||
AuthID: "gcom-user-123",
|
||||
ClientParams: authn.ClientParams{
|
||||
SyncUser: true,
|
||||
AllowSignUp: true,
|
||||
LookUpParams: login.UserLookupParams{
|
||||
Email: &email,
|
||||
},
|
||||
},
|
||||
}, nil)
|
||||
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
@@ -4,8 +4,12 @@ import (
|
||||
"google.golang.org/protobuf/types/known/structpb"
|
||||
|
||||
authzv1 "github.com/grafana/authlib/authz/proto/v1"
|
||||
|
||||
dashboardV1 "github.com/grafana/grafana/apps/dashboard/pkg/apis/dashboard/v1beta1"
|
||||
folders "github.com/grafana/grafana/apps/folder/pkg/apis/folder/v1beta1"
|
||||
iamv0alpha1 "github.com/grafana/grafana/apps/iam/pkg/apis/iam/v0alpha1"
|
||||
"github.com/grafana/grafana/pkg/apimachinery/utils"
|
||||
"github.com/grafana/grafana/pkg/services/accesscontrol"
|
||||
authzextv1 "github.com/grafana/grafana/pkg/services/authz/proto/v1"
|
||||
)
|
||||
|
||||
@@ -44,7 +48,8 @@ func getTypeInfo(group, resource string) (typeInfo, bool) {
|
||||
|
||||
func NewResourceInfoFromCheck(r *authzv1.CheckRequest) ResourceInfo {
|
||||
typ, relations := getTypeAndRelations(r.GetGroup(), r.GetResource())
|
||||
return newResource(
|
||||
|
||||
resource := newResource(
|
||||
typ,
|
||||
r.GetGroup(),
|
||||
r.GetResource(),
|
||||
@@ -53,6 +58,19 @@ func NewResourceInfoFromCheck(r *authzv1.CheckRequest) ResourceInfo {
|
||||
r.GetSubresource(),
|
||||
relations,
|
||||
)
|
||||
|
||||
// Special case for creating folders and resources in the root folder
|
||||
if r.GetVerb() == utils.VerbCreate {
|
||||
if resource.IsFolderResource() && resource.name == "" {
|
||||
resource.name = accesscontrol.GeneralFolderUID
|
||||
} else if resource.HasFolderSupport() && resource.folder == "" {
|
||||
resource.folder = accesscontrol.GeneralFolderUID
|
||||
}
|
||||
|
||||
return resource
|
||||
}
|
||||
|
||||
return resource
|
||||
}
|
||||
|
||||
func NewResourceInfoFromBatchItem(i *authzextv1.BatchCheckItem) ResourceInfo {
|
||||
@@ -164,3 +182,15 @@ func (r ResourceInfo) IsValidRelation(relation string) bool {
|
||||
func (r ResourceInfo) HasSubresource() bool {
|
||||
return r.subresource != ""
|
||||
}
|
||||
|
||||
var resourcesWithFolderSupport = map[string]bool{
|
||||
dashboardV1.DashboardResourceInfo.GroupResource().Group: true,
|
||||
}
|
||||
|
||||
func (r ResourceInfo) HasFolderSupport() bool {
|
||||
return resourcesWithFolderSupport[r.group]
|
||||
}
|
||||
|
||||
func (r ResourceInfo) IsFolderResource() bool {
|
||||
return r.group == folders.FolderResourceInfo.GroupResource().Group
|
||||
}
|
||||
|
||||
@@ -228,6 +228,9 @@ func TranslateToResourceTuple(subject string, action, kind, name string) (*openf
|
||||
}
|
||||
|
||||
if name == "*" {
|
||||
if m.group != "" && m.resource != "" {
|
||||
return NewGroupResourceTuple(subject, m.relation, m.group, m.resource, m.subresource), true
|
||||
}
|
||||
return NewGroupResourceTuple(subject, m.relation, translation.group, translation.resource, m.subresource), true
|
||||
}
|
||||
|
||||
|
||||
89
pkg/services/authz/zanzana/common/tuple_test.go
Normal file
89
pkg/services/authz/zanzana/common/tuple_test.go
Normal file
@@ -0,0 +1,89 @@
|
||||
package common
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
openfgav1 "github.com/openfga/api/proto/openfga/v1"
|
||||
"github.com/stretchr/testify/require"
|
||||
"google.golang.org/protobuf/types/known/structpb"
|
||||
)
|
||||
|
||||
type translationTestCase struct {
|
||||
testName string
|
||||
subject string
|
||||
action string
|
||||
kind string
|
||||
name string
|
||||
expected *openfgav1.TupleKey
|
||||
}
|
||||
|
||||
func TestTranslateToResourceTuple(t *testing.T) {
|
||||
tests := []translationTestCase{
|
||||
{
|
||||
testName: "dashboards:read in folders",
|
||||
subject: "user:1",
|
||||
action: "dashboards:read",
|
||||
kind: "folders",
|
||||
name: "*",
|
||||
expected: &openfgav1.TupleKey{
|
||||
User: "user:1",
|
||||
Relation: "get",
|
||||
Object: "group_resource:dashboard.grafana.app/dashboards",
|
||||
},
|
||||
},
|
||||
{
|
||||
testName: "dashboards:read for all dashboards",
|
||||
subject: "user:1",
|
||||
action: "dashboards:read",
|
||||
kind: "dashboards",
|
||||
name: "*",
|
||||
expected: &openfgav1.TupleKey{
|
||||
User: "user:1",
|
||||
Relation: "get",
|
||||
Object: "group_resource:dashboard.grafana.app/dashboards",
|
||||
},
|
||||
},
|
||||
{
|
||||
testName: "dashboards:read for general folder",
|
||||
subject: "user:1",
|
||||
action: "dashboards:read",
|
||||
kind: "folders",
|
||||
name: "general",
|
||||
expected: &openfgav1.TupleKey{
|
||||
User: "user:1",
|
||||
Relation: "resource_get",
|
||||
Object: "folder:general",
|
||||
Condition: &openfgav1.RelationshipCondition{
|
||||
Name: "subresource_filter",
|
||||
Context: &structpb.Struct{
|
||||
Fields: map[string]*structpb.Value{
|
||||
"subresources": structpb.NewListValue(&structpb.ListValue{
|
||||
Values: []*structpb.Value{structpb.NewStringValue("dashboard.grafana.app/dashboards")},
|
||||
}),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
testName: "folders:read",
|
||||
subject: "user:1",
|
||||
action: "folders:read",
|
||||
kind: "folders",
|
||||
name: "*",
|
||||
expected: &openfgav1.TupleKey{
|
||||
User: "user:1",
|
||||
Relation: "get",
|
||||
Object: "group_resource:folder.grafana.app/folders",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.testName, func(t *testing.T) {
|
||||
tuple, ok := TranslateToResourceTuple(test.subject, test.action, test.kind, test.name)
|
||||
require.True(t, ok)
|
||||
require.EqualExportedValues(t, test.expected, tuple)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -212,4 +212,16 @@ func testCheck(t *testing.T, server *Server) {
|
||||
require.NoError(t, err)
|
||||
assert.True(t, res.GetAllowed(), "user should be able to view dashboards in folder 6")
|
||||
})
|
||||
|
||||
t.Run("user:18 should be able to create folder in root folder", func(t *testing.T) {
|
||||
res, err := server.Check(newContextWithNamespace(), newReq("user:18", utils.VerbCreate, folderGroup, folderResource, "", "", ""))
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, true, res.GetAllowed())
|
||||
})
|
||||
|
||||
t.Run("user:18 should be able to create dashboard in root folder", func(t *testing.T) {
|
||||
res, err := server.Check(newContextWithNamespace(), newReq("user:18", utils.VerbCreate, dashboardGroup, dashboardResource, "", "", ""))
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, true, res.GetAllowed())
|
||||
})
|
||||
}
|
||||
|
||||
@@ -71,6 +71,8 @@ func setup(t *testing.T, srv *Server) *Server {
|
||||
common.NewTypedResourceTuple("user:15", common.RelationGet, common.TypeUser, userGroup, userResource, statusSubresource, "1"),
|
||||
common.NewTypedResourceTuple("user:16", common.RelationGet, common.TypeServiceAccount, serviceAccountGroup, serviceAccountResource, statusSubresource, "1"),
|
||||
common.NewFolderTuple("user:17", common.RelationSetView, "4"),
|
||||
common.NewFolderTuple("user:18", common.RelationCreate, "general"),
|
||||
common.NewFolderResourceTuple("user:18", common.RelationCreate, dashboardGroup, dashboardResource, "", "general"),
|
||||
}
|
||||
|
||||
return setupOpenFGADatabase(t, srv, tuples)
|
||||
|
||||
@@ -304,8 +304,15 @@ type DeleteDashboardCommand struct {
|
||||
RemovePermissions bool
|
||||
}
|
||||
|
||||
type ProvisioningConfig struct {
|
||||
Name string
|
||||
OrgID int64
|
||||
Folder string
|
||||
AllowUIUpdates bool
|
||||
}
|
||||
|
||||
type DeleteOrphanedProvisionedDashboardsCommand struct {
|
||||
ReaderNames []string
|
||||
Config []ProvisioningConfig
|
||||
}
|
||||
|
||||
type DashboardProvisioningSearchResults struct {
|
||||
@@ -405,6 +412,8 @@ type DashboardSearchProjection struct {
|
||||
FolderTitle string
|
||||
SortMeta int64
|
||||
Tags []string
|
||||
ManagedBy utils.ManagerKind
|
||||
ManagerId string
|
||||
Deleted *time.Time
|
||||
}
|
||||
|
||||
|
||||
@@ -877,24 +877,32 @@ func (dr *DashboardServiceImpl) waitForSearchQuery(ctx context.Context, query *d
|
||||
}
|
||||
|
||||
func (dr *DashboardServiceImpl) DeleteOrphanedProvisionedDashboards(ctx context.Context, cmd *dashboards.DeleteOrphanedProvisionedDashboardsCommand) error {
|
||||
// cleanup duplicate provisioned dashboards first (this will have the same name and external_id)
|
||||
// note: only works in modes 1-3
|
||||
if err := dr.DeleteDuplicateProvisionedDashboards(ctx); err != nil {
|
||||
dr.log.Error("Failed to delete duplicate provisioned dashboards", "error", err)
|
||||
}
|
||||
|
||||
// check each org for orphaned provisioned dashboards
|
||||
orgs, err := dr.orgService.Search(ctx, &org.SearchOrgsQuery{})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
orgIDs := make([]int64, 0, len(orgs))
|
||||
for _, org := range orgs {
|
||||
orgIDs = append(orgIDs, org.ID)
|
||||
}
|
||||
|
||||
if err := dr.DeleteDuplicateProvisionedDashboards(ctx, orgIDs, cmd.Config); err != nil {
|
||||
dr.log.Error("Failed to delete duplicate provisioned dashboards", "error", err)
|
||||
}
|
||||
|
||||
currentNames := make([]string, 0, len(cmd.Config))
|
||||
for _, cfg := range cmd.Config {
|
||||
currentNames = append(currentNames, cfg.Name)
|
||||
}
|
||||
|
||||
for _, org := range orgs {
|
||||
ctx, _ := identity.WithServiceIdentity(ctx, org.ID)
|
||||
// find all dashboards in the org that have a file repo set that is not in the given readers list
|
||||
foundDashs, err := dr.searchProvisionedDashboardsThroughK8s(ctx, &dashboards.FindPersistedDashboardsQuery{
|
||||
ManagedBy: utils.ManagerKindClassicFP, //nolint:staticcheck
|
||||
ManagerIdentityNotIn: cmd.ReaderNames,
|
||||
ManagerIdentityNotIn: currentNames,
|
||||
OrgId: org.ID,
|
||||
})
|
||||
if err != nil {
|
||||
@@ -921,7 +929,129 @@ func (dr *DashboardServiceImpl) DeleteOrphanedProvisionedDashboards(ctx context.
|
||||
return nil
|
||||
}
|
||||
|
||||
func (dr *DashboardServiceImpl) DeleteDuplicateProvisionedDashboards(ctx context.Context) error {
|
||||
// searchExistingProvisionedData fetches provisioned data for the purposes of
|
||||
// duplication cleanup. Returns the set of folder UIDs for folders with the
|
||||
// given title, and the set of resources contained in those folders.
|
||||
func (dr *DashboardServiceImpl) searchExistingProvisionedData(
|
||||
ctx context.Context, orgID int64, folderTitle string,
|
||||
) ([]string, []dashboards.DashboardSearchProjection, error) {
|
||||
ctx, user := identity.WithServiceIdentity(ctx, orgID)
|
||||
cmd := folder.SearchFoldersQuery{
|
||||
OrgID: orgID,
|
||||
SignedInUser: user,
|
||||
Title: folderTitle,
|
||||
TitleExactMatch: true,
|
||||
}
|
||||
|
||||
searchResults, err := dr.folderService.SearchFolders(ctx, cmd)
|
||||
if err != nil {
|
||||
return nil, nil, fmt.Errorf("checking if provisioning reset is required: %w", err)
|
||||
}
|
||||
|
||||
var matchingFolders []string //nolint:prealloc
|
||||
for _, result := range searchResults {
|
||||
f, err := dr.folderService.Get(ctx, &folder.GetFolderQuery{
|
||||
OrgID: orgID,
|
||||
UID: &result.UID,
|
||||
SignedInUser: user,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
// We are only interested in folders at the top-level of the folder hierarchy.
|
||||
// Cleanup is not performed for provisioned folders that were moved to
|
||||
// a different location.
|
||||
if f.ParentUID != "" {
|
||||
continue
|
||||
}
|
||||
|
||||
matchingFolders = append(matchingFolders, f.UID)
|
||||
}
|
||||
|
||||
if len(matchingFolders) == 0 {
|
||||
// If there are no folders with the same title as the provisioned folder we
|
||||
// are looking for, there is nothing to be cleaned up.
|
||||
return nil, nil, nil
|
||||
}
|
||||
|
||||
resources, err := dr.FindDashboards(ctx, &dashboards.FindPersistedDashboardsQuery{
|
||||
OrgId: orgID,
|
||||
SignedInUser: user,
|
||||
FolderUIDs: matchingFolders,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
return matchingFolders, resources, nil
|
||||
}
|
||||
|
||||
// maybeResetProvisioning will check for duplicated provisioned dashboards in the database. These duplications
|
||||
// happen when multiple provisioned dashboards of the same title are found, or multiple provisioned
|
||||
// folders are found. In this case, provisioned resources are deleted, allowing the provisioning
|
||||
// process to start from scratch after this function returns.
|
||||
func (dr *DashboardServiceImpl) maybeResetProvisioning(ctx context.Context, orgs []int64, configs []dashboards.ProvisioningConfig) {
|
||||
if skipReason := canBeAutomaticallyCleanedUp(configs); skipReason != "" {
|
||||
dr.log.Info("not eligible for automated cleanup", "reason", skipReason)
|
||||
return
|
||||
}
|
||||
|
||||
folderTitle := configs[0].Folder
|
||||
provisionedNames := map[string]bool{}
|
||||
for _, c := range configs {
|
||||
provisionedNames[c.Name] = true
|
||||
}
|
||||
|
||||
for _, orgID := range orgs {
|
||||
ctx, user := identity.WithServiceIdentity(ctx, orgID)
|
||||
provFolders, resources, err := dr.searchExistingProvisionedData(ctx, orgID, folderTitle)
|
||||
if err != nil {
|
||||
dr.log.Error("failed to search for provisioned data for cleanup", "org", orgID, "error", err)
|
||||
continue
|
||||
}
|
||||
|
||||
steps, err := cleanupSteps(provFolders, resources, provisionedNames)
|
||||
if err != nil {
|
||||
dr.log.Warn("not possible to perform automated duplicate cleanup", "org", orgID, "error", err)
|
||||
continue
|
||||
}
|
||||
|
||||
for _, step := range steps {
|
||||
var err error
|
||||
|
||||
switch step.Type {
|
||||
case searchstore.TypeDashboard:
|
||||
err = dr.deleteDashboard(ctx, 0, step.UID, orgID, false)
|
||||
case searchstore.TypeFolder:
|
||||
err = dr.folderService.Delete(ctx, &folder.DeleteFolderCommand{
|
||||
OrgID: orgID,
|
||||
SignedInUser: user,
|
||||
UID: step.UID,
|
||||
})
|
||||
}
|
||||
|
||||
if err == nil {
|
||||
dr.log.Info("deleted duplicated provisioned resource",
|
||||
"type", step.Type, "uid", step.UID,
|
||||
)
|
||||
} else {
|
||||
dr.log.Error("failed to delete duplicated provisioned resource",
|
||||
"type", step.Type, "uid", step.UID, "error", err,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (dr *DashboardServiceImpl) DeleteDuplicateProvisionedDashboards(ctx context.Context, orgs []int64, configs []dashboards.ProvisioningConfig) error {
|
||||
// Start from scratch if duplications that cannot be fixed by the logic
|
||||
// below are found in the database.
|
||||
dr.maybeResetProvisioning(ctx, orgs, configs)
|
||||
|
||||
// cleanup duplicate provisioned dashboards (i.e., with the same name and external_id).
|
||||
// Note: only works in modes 1-3. This logic can be removed once mode5 is
|
||||
// enabled everywhere.
|
||||
duplicates, err := dr.dashboardStore.GetDuplicateProvisionedDashboards(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
@@ -1511,6 +1641,8 @@ func (dr *DashboardServiceImpl) FindDashboards(ctx context.Context, query *dashb
|
||||
FolderTitle: folderTitle,
|
||||
FolderID: folderID,
|
||||
FolderSlug: slugify.Slugify(folderTitle),
|
||||
ManagedBy: hit.ManagedBy.Kind,
|
||||
ManagerId: hit.ManagedBy.ID,
|
||||
Tags: hit.Tags,
|
||||
}
|
||||
|
||||
|
||||
@@ -779,7 +779,7 @@ func TestDeleteOrphanedProvisionedDashboards(t *testing.T) {
|
||||
}, nil).Twice()
|
||||
|
||||
err := service.DeleteOrphanedProvisionedDashboards(context.Background(), &dashboards.DeleteOrphanedProvisionedDashboardsCommand{
|
||||
ReaderNames: []string{"test"},
|
||||
Config: []dashboards.ProvisioningConfig{{Name: "test"}},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
k8sCliMock.AssertExpectations(t)
|
||||
@@ -874,7 +874,7 @@ func TestDeleteOrphanedProvisionedDashboards(t *testing.T) {
|
||||
}, nil).Once()
|
||||
|
||||
err := singleOrgService.DeleteOrphanedProvisionedDashboards(ctx, &dashboards.DeleteOrphanedProvisionedDashboardsCommand{
|
||||
ReaderNames: []string{"test"},
|
||||
Config: []dashboards.ProvisioningConfig{{Name: "test"}},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
k8sCliMock.AssertExpectations(t)
|
||||
@@ -906,7 +906,7 @@ func TestDeleteOrphanedProvisionedDashboards(t *testing.T) {
|
||||
}, nil)
|
||||
|
||||
err := singleOrgService.DeleteOrphanedProvisionedDashboards(ctx, &dashboards.DeleteOrphanedProvisionedDashboardsCommand{
|
||||
ReaderNames: []string{"test"},
|
||||
Config: []dashboards.ProvisioningConfig{{Name: "test"}},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
k8sCliMock.AssertExpectations(t)
|
||||
|
||||
107
pkg/services/dashboards/service/provisioning_cleanup.go
Normal file
107
pkg/services/dashboards/service/provisioning_cleanup.go
Normal file
@@ -0,0 +1,107 @@
|
||||
package service
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
|
||||
"github.com/grafana/grafana/pkg/apimachinery/utils"
|
||||
"github.com/grafana/grafana/pkg/services/dashboards"
|
||||
"github.com/grafana/grafana/pkg/services/sqlstore/searchstore"
|
||||
)
|
||||
|
||||
// canBeAutomaticallyCleanedUp determines whether this instance can be automatically cleaned up
|
||||
// if duplicated provisioned resources are found. To ensure the process does not delete
|
||||
// resources it shouldn't, automatic cleanups only happen if all provisioned dashboards
|
||||
// are stored in the same folder (by title), and no dashboards allow UI updates.
|
||||
func canBeAutomaticallyCleanedUp(configs []dashboards.ProvisioningConfig) string {
|
||||
if len(configs) == 0 {
|
||||
return "no provisioned dashboards"
|
||||
}
|
||||
|
||||
folderTitle := configs[0].Folder
|
||||
if len(folderTitle) == 0 {
|
||||
return fmt.Sprintf("dashboard has no folder: %s", configs[0].Name)
|
||||
}
|
||||
|
||||
for _, cfg := range configs {
|
||||
if cfg.AllowUIUpdates {
|
||||
return "contains dashboards with allowUiUpdates"
|
||||
}
|
||||
|
||||
if cfg.Folder != folderTitle {
|
||||
return "dashboards provisioned across multiple folders"
|
||||
}
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
type deleteProvisionedResource struct {
|
||||
Type string
|
||||
UID string
|
||||
}
|
||||
|
||||
// cleanupSteps computes the sequence of steps to be performed in order to cleanup the
|
||||
// provisioning resources and allow the process to start from scratch when duplication
|
||||
// is detected. The sequence of steps will dictate the order in which dashboards and folders
|
||||
// are to be deleted.
|
||||
func cleanupSteps(provFolders []string, resources []dashboards.DashboardSearchProjection, configDashboards map[string]bool) ([]deleteProvisionedResource, error) {
|
||||
var hasDuplicatedProvisionedDashboard bool
|
||||
var hasUserCreatedResource bool
|
||||
var uniqueNames = map[string]struct{}{}
|
||||
var deleteProvisionedDashboards []deleteProvisionedResource //nolint:prealloc
|
||||
|
||||
for _, r := range resources {
|
||||
// nolint:staticcheck
|
||||
if r.IsFolder || r.ManagedBy != utils.ManagerKindClassicFP {
|
||||
hasUserCreatedResource = true
|
||||
continue
|
||||
}
|
||||
|
||||
// Only delete dashboards if they are included in the provisioning configuration
|
||||
// for this instance.
|
||||
if !configDashboards[r.ManagerId] {
|
||||
continue
|
||||
}
|
||||
|
||||
if _, exists := uniqueNames[r.ManagerId]; exists {
|
||||
hasDuplicatedProvisionedDashboard = true
|
||||
}
|
||||
|
||||
uniqueNames[r.ManagerId] = struct{}{}
|
||||
deleteProvisionedDashboards = append(deleteProvisionedDashboards, deleteProvisionedResource{
|
||||
Type: searchstore.TypeDashboard,
|
||||
UID: r.UID,
|
||||
})
|
||||
}
|
||||
|
||||
if len(provFolders) == 0 {
|
||||
// When there are no provisioned folders, there is nothing to do.
|
||||
return nil, nil
|
||||
} else if len(provFolders) == 1 {
|
||||
// If only one folder was found, keep it and delete the provisioned dashboards if
|
||||
// duplication was found.
|
||||
if hasDuplicatedProvisionedDashboard {
|
||||
return deleteProvisionedDashboards, nil
|
||||
}
|
||||
} else {
|
||||
// If multiple folders were found *and* a user-created resource exists in
|
||||
// one of them, bail, as we wouldn't be able to delete one of the duplicated folders.
|
||||
if hasUserCreatedResource {
|
||||
return nil, errors.New("multiple provisioning folders exist with at least one user-created resource")
|
||||
}
|
||||
|
||||
// Delete provisioned dashboards first, and then the folders.
|
||||
steps := deleteProvisionedDashboards
|
||||
for _, uid := range provFolders {
|
||||
steps = append(steps, deleteProvisionedResource{
|
||||
Type: searchstore.TypeFolder,
|
||||
UID: uid,
|
||||
})
|
||||
}
|
||||
|
||||
return steps, nil
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
}
|
||||
279
pkg/services/dashboards/service/provisioning_cleanup_test.go
Normal file
279
pkg/services/dashboards/service/provisioning_cleanup_test.go
Normal file
@@ -0,0 +1,279 @@
|
||||
package service
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/grafana/grafana/pkg/apimachinery/utils"
|
||||
"github.com/grafana/grafana/pkg/services/dashboards"
|
||||
"github.com/grafana/grafana/pkg/services/sqlstore/searchstore"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func Test_canBeAutomaticallyCleanedUp(t *testing.T) {
|
||||
testCases := []struct {
|
||||
name string
|
||||
configs []dashboards.ProvisioningConfig
|
||||
expectedSkip string
|
||||
}{
|
||||
{
|
||||
name: "no dashboards defined in the configuration",
|
||||
configs: []dashboards.ProvisioningConfig{},
|
||||
expectedSkip: "no provisioned dashboards",
|
||||
},
|
||||
{
|
||||
name: "first defined dashboard has no folder defined",
|
||||
configs: []dashboards.ProvisioningConfig{
|
||||
{Name: "1", Folder: ""},
|
||||
{Folder: "f1"},
|
||||
},
|
||||
expectedSkip: "dashboard has no folder: 1",
|
||||
},
|
||||
{
|
||||
name: "one of the provisioned dashboards has no folder defined",
|
||||
configs: []dashboards.ProvisioningConfig{
|
||||
{Name: "1", Folder: "f1"},
|
||||
{Name: "2", Folder: "f1"},
|
||||
{Name: "3", Folder: ""},
|
||||
{Name: "4", Folder: "f1"},
|
||||
},
|
||||
expectedSkip: "dashboards provisioned across multiple folders",
|
||||
},
|
||||
{
|
||||
name: "one of the provisioned dashboards allows UI updates",
|
||||
configs: []dashboards.ProvisioningConfig{
|
||||
{Name: "1", Folder: "f1"},
|
||||
{Name: "2", Folder: "f1", AllowUIUpdates: true},
|
||||
{Name: "3", Folder: "f1"},
|
||||
{Name: "4", Folder: "f1"},
|
||||
},
|
||||
expectedSkip: "contains dashboards with allowUiUpdates",
|
||||
},
|
||||
{
|
||||
name: "one of the provisioned dashboards is in a different folder",
|
||||
configs: []dashboards.ProvisioningConfig{
|
||||
{Name: "1", Folder: "f1"},
|
||||
{Name: "2", Folder: "f1"},
|
||||
{Name: "3", Folder: "f1"},
|
||||
{Name: "4", Folder: "different"},
|
||||
},
|
||||
expectedSkip: "dashboards provisioned across multiple folders",
|
||||
},
|
||||
{
|
||||
name: "can be skipped when all conditions are met",
|
||||
configs: []dashboards.ProvisioningConfig{
|
||||
{Name: "1", Folder: "f1"},
|
||||
{Name: "2", Folder: "f1"},
|
||||
{Name: "3", Folder: "f1"},
|
||||
{Name: "4", Folder: "f1"},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
require.Equal(t, tc.expectedSkip, canBeAutomaticallyCleanedUp(tc.configs))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func Test_cleanupSteps(t *testing.T) {
|
||||
isDashboard, isFolder := false, true
|
||||
|
||||
fromUser := func(uid, name string, isFolder bool) dashboards.DashboardSearchProjection {
|
||||
return dashboards.DashboardSearchProjection{
|
||||
UID: uid,
|
||||
ManagerId: name,
|
||||
IsFolder: isFolder,
|
||||
}
|
||||
}
|
||||
|
||||
provisioned := func(uid, name string, isFolder bool) dashboards.DashboardSearchProjection {
|
||||
dashboard := fromUser(uid, name, isFolder)
|
||||
dashboard.ManagedBy = utils.ManagerKindClassicFP //nolint:staticcheck
|
||||
return dashboard
|
||||
}
|
||||
|
||||
testCases := []struct {
|
||||
name string
|
||||
provisionedFolders []string
|
||||
provisionedResources []dashboards.DashboardSearchProjection
|
||||
configDashboards []string
|
||||
expectedSteps []deleteProvisionedResource
|
||||
expectedErr string
|
||||
}{
|
||||
{
|
||||
name: "no provisioned folders, nothing to do",
|
||||
provisionedFolders: []string{},
|
||||
configDashboards: []string{"Provisioned1", "Provisioned2", "Provisioned3"},
|
||||
provisionedResources: []dashboards.DashboardSearchProjection{
|
||||
provisioned("d1", "Provisioned1", isDashboard),
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "multiple folders, a user-created dashboard in one of them",
|
||||
provisionedFolders: []string{"folder1", "folder2"},
|
||||
configDashboards: []string{"Provisioned1", "Provisioned2", "Provisioned3"},
|
||||
provisionedResources: []dashboards.DashboardSearchProjection{
|
||||
provisioned("d1", "Provisioned1", isDashboard),
|
||||
provisioned("d2", "Provisioned2", isDashboard),
|
||||
fromUser("d3", "User1", isDashboard),
|
||||
provisioned("d4", "Provisioned3", isDashboard),
|
||||
},
|
||||
expectedErr: "multiple provisioning folders exist with at least one user-created resource",
|
||||
},
|
||||
{
|
||||
name: "multiple folders, a user-created folder in one of them",
|
||||
provisionedFolders: []string{"folder1", "folder2"},
|
||||
configDashboards: []string{"Provisioned1", "Provisioned2", "Provisioned3", "Provisioned4"},
|
||||
provisionedResources: []dashboards.DashboardSearchProjection{
|
||||
provisioned("d1", "Provisioned1", isDashboard),
|
||||
provisioned("d2", "Provisioned2", isDashboard),
|
||||
provisioned("d3", "Provisioned3", isDashboard),
|
||||
fromUser("f1", "UserFolder1", isFolder),
|
||||
},
|
||||
expectedErr: "multiple provisioning folders exist with at least one user-created resource",
|
||||
},
|
||||
{
|
||||
name: "single folder, some dashboards duplicated",
|
||||
provisionedFolders: []string{"folder1"},
|
||||
configDashboards: []string{"Provisioned1", "Provisioned2", "Provisioned3", "Provisioned4"},
|
||||
provisionedResources: []dashboards.DashboardSearchProjection{
|
||||
// Provisioned1 is duplicated.
|
||||
provisioned("d1", "Provisioned1", isDashboard),
|
||||
provisioned("d2", "Provisioned2", isDashboard),
|
||||
provisioned("d3", "Provisioned1", isDashboard),
|
||||
provisioned("d4", "Provisioned3", isDashboard),
|
||||
},
|
||||
expectedSteps: []deleteProvisionedResource{
|
||||
{Type: searchstore.TypeDashboard, UID: "d1"},
|
||||
{Type: searchstore.TypeDashboard, UID: "d2"},
|
||||
{Type: searchstore.TypeDashboard, UID: "d3"},
|
||||
{Type: searchstore.TypeDashboard, UID: "d4"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "single folder, duplicated dashboards, user-created dashboards are ignored",
|
||||
provisionedFolders: []string{"folder1"},
|
||||
configDashboards: []string{"Provisioned1", "Provisioned2", "Provisioned3", "Provisioned4"},
|
||||
provisionedResources: []dashboards.DashboardSearchProjection{
|
||||
// Provisioned1 is duplicated.
|
||||
provisioned("d1", "Provisioned1", isDashboard),
|
||||
provisioned("d2", "Provisioned2", isDashboard),
|
||||
fromUser("d3", "User1", isDashboard),
|
||||
provisioned("d4", "Provisioned3", isDashboard),
|
||||
provisioned("d5", "Provisioned1", isDashboard),
|
||||
},
|
||||
// User dashboard (d3) is not deleted.
|
||||
expectedSteps: []deleteProvisionedResource{
|
||||
{Type: searchstore.TypeDashboard, UID: "d1"},
|
||||
{Type: searchstore.TypeDashboard, UID: "d2"},
|
||||
{Type: searchstore.TypeDashboard, UID: "d4"},
|
||||
{Type: searchstore.TypeDashboard, UID: "d5"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "single folder, duplicated dashboards, user-created folders are ignored",
|
||||
provisionedFolders: []string{"folder1"},
|
||||
configDashboards: []string{"Provisioned1", "Provisioned2", "Provisioned3"},
|
||||
provisionedResources: []dashboards.DashboardSearchProjection{
|
||||
// Provisioned1 is duplicated.
|
||||
provisioned("d1", "Provisioned1", isDashboard),
|
||||
provisioned("d2", "Provisioned2", isDashboard),
|
||||
provisioned("d3", "Provisioned3", isDashboard),
|
||||
provisioned("d4", "Provisioned1", isDashboard),
|
||||
fromUser("f1", "UserFolder1", isFolder),
|
||||
},
|
||||
// User folder (f1) is not deleted.
|
||||
expectedSteps: []deleteProvisionedResource{
|
||||
{Type: searchstore.TypeDashboard, UID: "d1"},
|
||||
{Type: searchstore.TypeDashboard, UID: "d2"},
|
||||
{Type: searchstore.TypeDashboard, UID: "d3"},
|
||||
{Type: searchstore.TypeDashboard, UID: "d4"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "multiple folders, only provisioned dashboards",
|
||||
provisionedFolders: []string{"folder1", "folder2"},
|
||||
configDashboards: []string{"Provisioned1", "Provisioned2", "Provisioned3", "Provisioned4"},
|
||||
provisionedResources: []dashboards.DashboardSearchProjection{
|
||||
provisioned("d1", "Provisioned1", isDashboard),
|
||||
provisioned("d2", "Provisioned2", isDashboard),
|
||||
provisioned("d3", "Provisioned3", isDashboard),
|
||||
provisioned("d4", "Provisioned4", isDashboard),
|
||||
},
|
||||
// Delete all dashboards, then all folders.
|
||||
expectedSteps: []deleteProvisionedResource{
|
||||
{Type: searchstore.TypeDashboard, UID: "d1"},
|
||||
{Type: searchstore.TypeDashboard, UID: "d2"},
|
||||
{Type: searchstore.TypeDashboard, UID: "d3"},
|
||||
{Type: searchstore.TypeDashboard, UID: "d4"},
|
||||
{Type: searchstore.TypeFolder, UID: "folder1"},
|
||||
{Type: searchstore.TypeFolder, UID: "folder2"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "single folder, only deletes dashboards defined in the config file",
|
||||
provisionedFolders: []string{"folder1"},
|
||||
configDashboards: []string{"Provisioned1", "Provisioned2"},
|
||||
provisionedResources: []dashboards.DashboardSearchProjection{
|
||||
provisioned("d1", "Provisioned1", isDashboard),
|
||||
provisioned("d2", "Provisioned2", isDashboard),
|
||||
provisioned("d3", "Provisioned1", isDashboard),
|
||||
provisioned("d4", "Provisioned4", isDashboard),
|
||||
provisioned("d5", "Provisioned4", isDashboard),
|
||||
},
|
||||
// Delete duplicated dashboards, but keep Provisioned4, since it's not in the config file.
|
||||
expectedSteps: []deleteProvisionedResource{
|
||||
{Type: searchstore.TypeDashboard, UID: "d1"},
|
||||
{Type: searchstore.TypeDashboard, UID: "d2"},
|
||||
{Type: searchstore.TypeDashboard, UID: "d3"},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "single folder, no duplicated dashboards",
|
||||
provisionedFolders: []string{"folder1"},
|
||||
configDashboards: []string{"Provisioned1", "Provisioned2", "Provisioned3", "Provisioned4"},
|
||||
provisionedResources: []dashboards.DashboardSearchProjection{
|
||||
provisioned("d1", "Provisioned1", isDashboard),
|
||||
provisioned("d2", "Provisioned2", isDashboard),
|
||||
provisioned("d3", "Provisioned3", isDashboard),
|
||||
provisioned("d4", "Provisioned4", isDashboard),
|
||||
},
|
||||
expectedSteps: nil, // no duplicates, nothing to do
|
||||
},
|
||||
{
|
||||
name: "single folder, no duplicated dashboards, multiple user-created resources",
|
||||
provisionedFolders: []string{"folder1"},
|
||||
configDashboards: []string{"Provisioned1", "Provisioned2", "Provisioned3", "Provisioned4"},
|
||||
provisionedResources: []dashboards.DashboardSearchProjection{
|
||||
provisioned("d1", "Provisioned1", isDashboard),
|
||||
provisioned("d2", "Provisioned2", isDashboard),
|
||||
fromUser("f1", "UserFolder1", isFolder),
|
||||
provisioned("d3", "Provisioned3", isDashboard),
|
||||
fromUser("d4", "User1", isDashboard),
|
||||
provisioned("d5", "Provisioned4", isDashboard),
|
||||
fromUser("d6", "User2", isDashboard),
|
||||
fromUser("f2", "UserFolder2", isFolder),
|
||||
},
|
||||
expectedSteps: nil, // no duplicates in the provisioned set, nothing to do
|
||||
},
|
||||
}
|
||||
|
||||
for _, tc := range testCases {
|
||||
t.Run(tc.name, func(t *testing.T) {
|
||||
provisionedSet := make(map[string]bool)
|
||||
for _, name := range tc.configDashboards {
|
||||
provisionedSet[name] = true
|
||||
}
|
||||
|
||||
steps, err := cleanupSteps(tc.provisionedFolders, tc.provisionedResources, provisionedSet)
|
||||
if tc.expectedErr == "" {
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, tc.expectedSteps, steps)
|
||||
} else {
|
||||
require.Error(t, err)
|
||||
require.Equal(t, tc.expectedErr, err.Error())
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -202,6 +202,11 @@ func (s *Service) searchFoldersFromApiServer(ctx context.Context, query folder.S
|
||||
if query.Title != "" {
|
||||
// allow wildcard search
|
||||
request.Query = "*" + strings.ToLower(query.Title) + "*"
|
||||
// or perform exact match if requested
|
||||
if query.TitleExactMatch {
|
||||
request.Query = query.Title
|
||||
}
|
||||
|
||||
// if using query, you need to specify the fields you want
|
||||
request.Fields = dashboardsearch.IncludeFields
|
||||
}
|
||||
|
||||
@@ -224,12 +224,13 @@ type GetFoldersQuery struct {
|
||||
}
|
||||
|
||||
type SearchFoldersQuery struct {
|
||||
OrgID int64
|
||||
UIDs []string
|
||||
IDs []int64
|
||||
Title string
|
||||
Limit int64
|
||||
SignedInUser identity.Requester `json:"-"`
|
||||
OrgID int64
|
||||
UIDs []string
|
||||
IDs []int64
|
||||
Title string
|
||||
TitleExactMatch bool
|
||||
Limit int64
|
||||
SignedInUser identity.Requester `json:"-"`
|
||||
}
|
||||
|
||||
// GetParentsQuery captures the information required by the folder service to
|
||||
|
||||
@@ -412,11 +412,16 @@ func (srv RulerSrv) RoutePostNameRulesConfig(c *contextmodel.ReqContext, ruleGro
|
||||
deletePermanently = true
|
||||
}
|
||||
|
||||
namespace, err := srv.store.GetNamespaceByUID(c.Req.Context(), namespaceUID, c.GetOrgID(), c.SignedInUser)
|
||||
f, err := srv.store.GetNamespaceByUID(c.Req.Context(), namespaceUID, c.GetOrgID(), c.SignedInUser)
|
||||
if err != nil {
|
||||
return toNamespaceErrorResponse(err)
|
||||
}
|
||||
|
||||
namespace := ngmodels.NewNamespace(f)
|
||||
if err := namespace.ValidateForRuleStorage(); err != nil {
|
||||
return ErrResp(http.StatusBadRequest, fmt.Errorf("%w: %s", ngmodels.ErrAlertRuleFailedValidation, err), "")
|
||||
}
|
||||
|
||||
if err := srv.checkGroupLimits(ruleGroupConfig); err != nil {
|
||||
return ErrResp(http.StatusBadRequest, err, "")
|
||||
}
|
||||
@@ -841,10 +846,14 @@ func (srv RulerSrv) RouteUpdateNamespaceRules(c *contextmodel.ReqContext, body a
|
||||
return ErrResp(http.StatusBadRequest, errors.New("missing request body"), "")
|
||||
}
|
||||
|
||||
namespace, err := srv.store.GetNamespaceByUID(c.Req.Context(), namespaceUID, c.GetOrgID(), c.SignedInUser)
|
||||
f, err := srv.store.GetNamespaceByUID(c.Req.Context(), namespaceUID, c.GetOrgID(), c.SignedInUser)
|
||||
if err != nil {
|
||||
return toNamespaceErrorResponse(err)
|
||||
}
|
||||
namespace := ngmodels.NewNamespace(f)
|
||||
if err := namespace.ValidateForRuleStorage(); err != nil {
|
||||
return ErrResp(http.StatusBadRequest, fmt.Errorf("%w: %s", ngmodels.ErrAlertRuleFailedValidation, err), "")
|
||||
}
|
||||
|
||||
ruleGroups, _, err := srv.searchAuthorizedAlertRules(c.Req.Context(), authorizedRuleGroupQuery{
|
||||
User: c.SignedInUser,
|
||||
|
||||
@@ -18,6 +18,7 @@ import (
|
||||
"github.com/stretchr/testify/mock"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/grafana/grafana/pkg/apimachinery/utils"
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
ac "github.com/grafana/grafana/pkg/services/accesscontrol"
|
||||
"github.com/grafana/grafana/pkg/services/accesscontrol/acimpl"
|
||||
@@ -1288,4 +1289,64 @@ func TestRouteUpdateNamespaceRules(t *testing.T) {
|
||||
updatedRules := getRecordedUpdatedRules(ruleStore)
|
||||
require.Empty(t, updatedRules)
|
||||
})
|
||||
|
||||
t.Run("should reject update when folder is managed by ManagerKindRepo", func(t *testing.T) {
|
||||
ruleStore := fakes.NewRuleStore(t)
|
||||
provisioningStore := fakes.NewFakeProvisioningStore()
|
||||
|
||||
// Create a managed folder
|
||||
managedFolder := randFolder()
|
||||
managedFolder.ManagedBy = utils.ManagerKindRepo
|
||||
ruleStore.Folders[orgID] = append(ruleStore.Folders[orgID], managedFolder)
|
||||
|
||||
// Create some rules in the managed folder
|
||||
ruleGen := models.RuleGen.With(
|
||||
models.RuleGen.WithOrgID(orgID),
|
||||
models.RuleGen.WithNamespaceUID(managedFolder.UID),
|
||||
)
|
||||
rules := ruleGen.GenerateManyRef(2)
|
||||
ruleStore.PutRule(context.Background(), rules...)
|
||||
|
||||
permissions := createPermissionsForRules(rules, orgID)
|
||||
requestCtx := createRequestContextWithPerms(orgID, permissions, nil)
|
||||
|
||||
svc := createServiceWithProvenanceStore(ruleStore, provisioningStore)
|
||||
response := svc.RouteUpdateNamespaceRules(requestCtx, apimodels.UpdateNamespaceRulesRequest{
|
||||
IsPaused: util.Pointer(true),
|
||||
}, managedFolder.UID)
|
||||
|
||||
require.Equal(t, http.StatusBadRequest, response.Status())
|
||||
require.Contains(t, string(response.Body()), "cannot store rules in folder managed by Git Sync")
|
||||
|
||||
// Verify no rules were updated
|
||||
updatedRules := getRecordedUpdatedRules(ruleStore)
|
||||
require.Empty(t, updatedRules)
|
||||
})
|
||||
}
|
||||
|
||||
func TestRoutePostNameRulesConfig(t *testing.T) {
|
||||
t.Run("should reject creation when folder is managed by ManagerKindRepo", func(t *testing.T) {
|
||||
orgID := rand.Int63()
|
||||
ruleStore := fakes.NewRuleStore(t)
|
||||
|
||||
// Create a managed folder
|
||||
managedFolder := randFolder()
|
||||
managedFolder.ManagedBy = utils.ManagerKindRepo
|
||||
ruleStore.Folders[orgID] = append(ruleStore.Folders[orgID], managedFolder)
|
||||
|
||||
permissions := map[int64]map[string][]string{
|
||||
orgID: {
|
||||
dashboards.ScopeFoldersProvider.GetResourceScopeUID(managedFolder.UID): {dashboards.ActionFoldersRead},
|
||||
},
|
||||
}
|
||||
requestCtx := createRequestContextWithPerms(orgID, permissions, nil)
|
||||
|
||||
svc := createService(ruleStore, nil)
|
||||
response := svc.RoutePostNameRulesConfig(requestCtx, apimodels.PostableRuleGroupConfig{
|
||||
Name: "test-group",
|
||||
}, managedFolder.UID)
|
||||
|
||||
require.Equal(t, http.StatusBadRequest, response.Status())
|
||||
require.Contains(t, string(response.Body()), "cannot store rules in folder managed by Git Sync")
|
||||
})
|
||||
}
|
||||
|
||||
@@ -296,7 +296,7 @@ func (srv PrometheusSrv) RouteGetRuleStatuses(c *contextmodel.ReqContext) respon
|
||||
allowedNamespaces := map[string]string{}
|
||||
for namespaceUID, folder := range namespaceMap {
|
||||
// only add namespaces that the user has access to rules in
|
||||
hasAccess, err := srv.authz.HasAccessInFolder(c.Req.Context(), c.SignedInUser, ngmodels.Namespace(*folder.ToFolderReference()))
|
||||
hasAccess, err := srv.authz.HasAccessInFolder(c.Req.Context(), c.SignedInUser, ngmodels.NewNamespace(folder))
|
||||
if err != nil {
|
||||
ruleResponse.Status = "error"
|
||||
ruleResponse.Error = fmt.Sprintf("failed to get namespaces visible to the user: %s", err.Error())
|
||||
|
||||
@@ -204,7 +204,7 @@ func IsNonRetryableError(err error) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
// HasErrors returns true when Results contains at least one element and all elements are errors
|
||||
// IsError returns true when Results contains at least one element and all elements are errors
|
||||
func (evalResults Results) IsError() bool {
|
||||
for _, r := range evalResults {
|
||||
if r.State != Error {
|
||||
|
||||
@@ -24,6 +24,7 @@ import (
|
||||
|
||||
alertingModels "github.com/grafana/alerting/models"
|
||||
|
||||
"github.com/grafana/grafana/pkg/apimachinery/utils"
|
||||
"github.com/grafana/grafana/pkg/services/folder"
|
||||
"github.com/grafana/grafana/pkg/services/quota"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
@@ -397,6 +398,20 @@ type Namespaced interface {
|
||||
|
||||
type Namespace folder.FolderReference
|
||||
|
||||
func NewNamespace(f *folder.Folder) Namespace {
|
||||
return Namespace(*f.ToFolderReference())
|
||||
}
|
||||
|
||||
func (n Namespace) ValidateForRuleStorage() error {
|
||||
if n.UID == "" {
|
||||
return fmt.Errorf("cannot store rules in folder without UID")
|
||||
}
|
||||
if n.ManagedBy == utils.ManagerKindRepo {
|
||||
return fmt.Errorf("cannot store rules in folder managed by Git Sync")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (n Namespace) GetNamespaceUID() string {
|
||||
return n.UID
|
||||
}
|
||||
|
||||
@@ -114,7 +114,7 @@ func (service *AlertRuleService) ListAlertRules(ctx context.Context, user identi
|
||||
}
|
||||
folderUIDs := make([]string, 0, len(folders))
|
||||
for _, f := range folders {
|
||||
access, err := service.authz.HasAccessInFolder(ctx, user, models.Namespace(*f.ToFolderReference()))
|
||||
access, err := service.authz.HasAccessInFolder(ctx, user, models.NewNamespace(f))
|
||||
if err != nil {
|
||||
return nil, nil, "", err
|
||||
}
|
||||
@@ -407,6 +407,9 @@ func (service *AlertRuleService) UpdateRuleGroup(ctx context.Context, user ident
|
||||
if err := models.ValidateRuleGroupInterval(intervalSeconds, service.baseIntervalSeconds); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := service.ensureNamespace(ctx, user, user.GetOrgID(), namespaceUID); err != nil {
|
||||
return err
|
||||
}
|
||||
return service.xact.InTransaction(ctx, func(ctx context.Context) error {
|
||||
query := &models.ListAlertRulesQuery{
|
||||
OrgID: user.GetOrgID(),
|
||||
@@ -471,6 +474,10 @@ func (service *AlertRuleService) ReplaceRuleGroup(ctx context.Context, user iden
|
||||
return err
|
||||
}
|
||||
|
||||
if err := service.ensureNamespace(ctx, user, user.GetOrgID(), group.FolderUID); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// If the rule group is reserved for no-group rules, we cannot have multiple rules in it.
|
||||
if models.IsNoGroupRuleGroup(group.Title) && len(group.Rules) > 1 {
|
||||
return fmt.Errorf("rule group %s is reserved for no-group rules and cannot be used for rule groups with multiple rules", group.Title)
|
||||
@@ -1025,6 +1032,7 @@ func (service *AlertRuleService) checkGroupLimits(group models.AlertRuleGroup) e
|
||||
|
||||
// ensureNamespace ensures that the rule has a valid namespace UID.
|
||||
// If the rule does not have a namespace UID or the namespace (folder) does not exist it will return an error.
|
||||
// If the folder is managed by a manager, it will also return an error.
|
||||
func (service *AlertRuleService) ensureNamespace(ctx context.Context, user identity.Requester, orgID int64, namespaceUID string) error {
|
||||
if namespaceUID == "" {
|
||||
return fmt.Errorf("%w: folderUID must be set", models.ErrAlertRuleFailedValidation)
|
||||
@@ -1037,18 +1045,23 @@ func (service *AlertRuleService) ensureNamespace(ctx context.Context, user ident
|
||||
}
|
||||
|
||||
// ensure the namespace exists
|
||||
_, err := service.folderService.Get(ctx, &folder.GetFolderQuery{
|
||||
f, err := service.folderService.Get(ctx, &folder.GetFolderQuery{
|
||||
OrgID: orgID,
|
||||
UID: &namespaceUID,
|
||||
SignedInUser: user,
|
||||
})
|
||||
if err != nil {
|
||||
if err != nil || f == nil {
|
||||
if errors.Is(err, dashboards.ErrFolderNotFound) {
|
||||
return fmt.Errorf("%w: folder does not exist", models.ErrAlertRuleFailedValidation)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
// check if the folder is managed by a manager
|
||||
if err := models.NewNamespace(f).ValidateForRuleStorage(); err != nil {
|
||||
return fmt.Errorf("%w: %s", models.ErrAlertRuleFailedValidation, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
@@ -16,6 +16,7 @@ import (
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/grafana/grafana/pkg/apimachinery/identity"
|
||||
"github.com/grafana/grafana/pkg/apimachinery/utils"
|
||||
"github.com/grafana/grafana/pkg/bus"
|
||||
"github.com/grafana/grafana/pkg/expr"
|
||||
"github.com/grafana/grafana/pkg/infra/db"
|
||||
@@ -867,6 +868,27 @@ func TestIntegrationAlertRuleService(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, int64(120), rule.IntervalSeconds)
|
||||
})
|
||||
|
||||
t.Run("UpdateRuleGroup should reject when folder is managed by a manager", func(t *testing.T) {
|
||||
service, _, _, ac := initService(t)
|
||||
ac.CanWriteAllRulesFunc = func(ctx context.Context, user identity.Requester) (bool, error) {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
managedFolderUID := "managed-folder-update-group"
|
||||
fs := foldertest.NewFakeService()
|
||||
fs.AddFolder(&folder.Folder{
|
||||
OrgID: orgID,
|
||||
UID: managedFolderUID,
|
||||
Title: "Managed Folder",
|
||||
ManagedBy: utils.ManagerKindRepo,
|
||||
})
|
||||
service.folderService = fs
|
||||
|
||||
err := service.UpdateRuleGroup(context.Background(), u, managedFolderUID, "some-group", 120)
|
||||
require.ErrorIs(t, err, models.ErrAlertRuleFailedValidation)
|
||||
require.ErrorContains(t, err, "cannot store rules in folder managed by Git Sync")
|
||||
})
|
||||
}
|
||||
|
||||
func TestIntegrationCreateAlertRule(t *testing.T) {
|
||||
@@ -1166,6 +1188,30 @@ func TestIntegrationCreateAlertRule(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
require.True(t, models.IsNoGroupRuleGroup(retrievedRule.RuleGroup), "Rule should be considered NoGroup rule")
|
||||
})
|
||||
|
||||
t.Run("should reject creation when folder is managed by a manager", func(t *testing.T) {
|
||||
service, _, _, ac := initService(t)
|
||||
ac.CanWriteAllRulesFunc = func(ctx context.Context, user identity.Requester) (bool, error) {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
managedFolderUID := "managed-folder"
|
||||
fs := foldertest.NewFakeService()
|
||||
fs.AddFolder(&folder.Folder{
|
||||
OrgID: orgID,
|
||||
UID: managedFolderUID,
|
||||
Title: "Managed Folder",
|
||||
ManagedBy: utils.ManagerKindRepo,
|
||||
})
|
||||
service.folderService = fs
|
||||
|
||||
rule := dummyRule("test-managed-folder", orgID)
|
||||
rule.NamespaceUID = managedFolderUID
|
||||
|
||||
_, err := service.CreateAlertRule(context.Background(), u, rule, models.ProvenanceNone)
|
||||
require.ErrorIs(t, err, models.ErrAlertRuleFailedValidation)
|
||||
require.ErrorContains(t, err, "cannot store rules in folder managed by Git Sync")
|
||||
})
|
||||
}
|
||||
|
||||
func TestUpdateAlertRule(t *testing.T) {
|
||||
@@ -1316,6 +1362,36 @@ func TestUpdateAlertRule(t *testing.T) {
|
||||
require.Equal(t, "nogroup-update-new", updated.Title)
|
||||
require.Equal(t, originalInterval, updated.IntervalSeconds)
|
||||
})
|
||||
|
||||
t.Run("should reject update when folder is managed by a manager", func(t *testing.T) {
|
||||
service, ruleStore, provenanceStore, ac := initService(t)
|
||||
ac.CanWriteAllRulesFunc = func(ctx context.Context, user identity.Requester) (bool, error) {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
managedFolderUID := "managed-folder-update"
|
||||
fs := foldertest.NewFakeService()
|
||||
fs.AddFolder(&folder.Folder{
|
||||
OrgID: orgID,
|
||||
UID: managedFolderUID,
|
||||
Title: "Managed Folder",
|
||||
ManagedBy: utils.ManagerKindRepo,
|
||||
})
|
||||
service.folderService = fs
|
||||
|
||||
// Create an existing rule
|
||||
existingRule := dummyRule("test-managed-folder-update", orgID)
|
||||
existingRule.NamespaceUID = managedFolderUID
|
||||
_, err := ruleStore.InsertAlertRules(context.Background(), models.NewUserUID(u), []models.InsertRule{{AlertRule: existingRule}})
|
||||
require.NoError(t, err)
|
||||
require.NoError(t, provenanceStore.SetProvenance(context.Background(), &existingRule, orgID, models.ProvenanceNone))
|
||||
|
||||
// Try to update the rule
|
||||
existingRule.Title = "Updated Title"
|
||||
_, err = service.UpdateAlertRule(context.Background(), u, existingRule, models.ProvenanceNone)
|
||||
require.ErrorIs(t, err, models.ErrAlertRuleFailedValidation)
|
||||
require.ErrorContains(t, err, "cannot store rules in folder managed by Git Sync")
|
||||
})
|
||||
}
|
||||
|
||||
func TestDeleteAlertRule(t *testing.T) {
|
||||
@@ -2054,6 +2130,33 @@ func TestReplaceGroup(t *testing.T) {
|
||||
require.Error(t, err)
|
||||
require.ErrorContains(t, err, "cannot move rule out of this group")
|
||||
})
|
||||
|
||||
t.Run("should reject replace when folder is managed by a manager", func(t *testing.T) {
|
||||
service, _, _, ac := initService(t)
|
||||
ac.CanWriteAllRulesFunc = func(ctx context.Context, user identity.Requester) (bool, error) {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
managedFolderUID := "managed-folder-replace"
|
||||
fs := foldertest.NewFakeService()
|
||||
fs.AddFolder(&folder.Folder{
|
||||
OrgID: orgID,
|
||||
UID: managedFolderUID,
|
||||
Title: "Managed Folder",
|
||||
ManagedBy: utils.ManagerKindRepo,
|
||||
})
|
||||
service.folderService = fs
|
||||
|
||||
group := models.AlertRuleGroup{
|
||||
Title: "test-group",
|
||||
FolderUID: managedFolderUID,
|
||||
Interval: 60,
|
||||
}
|
||||
|
||||
err := service.ReplaceRuleGroup(context.Background(), u, group, models.ProvenanceNone, "")
|
||||
require.ErrorIs(t, err, models.ErrAlertRuleFailedValidation)
|
||||
require.ErrorContains(t, err, "cannot store rules in folder managed by Git Sync")
|
||||
})
|
||||
}
|
||||
|
||||
func TestDeleteRuleGroup(t *testing.T) {
|
||||
|
||||
@@ -530,7 +530,7 @@ func (h *RemoteLokiBackend) getFolderUIDsForFilter(ctx context.Context, query mo
|
||||
uids := make([]string, 0, len(folders))
|
||||
// now keep only UIDs of folder in which user can read rules.
|
||||
for _, f := range folders {
|
||||
hasAccess, err := h.ac.HasAccessInFolder(ctx, query.SignedInUser, models.Namespace(*f.ToFolderReference()))
|
||||
hasAccess, err := h.ac.HasAccessInFolder(ctx, query.SignedInUser, models.NewNamespace(f))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@@ -153,13 +153,20 @@ func (provider *Provisioner) Provision(ctx context.Context) error {
|
||||
|
||||
// CleanUpOrphanedDashboards deletes provisioned dashboards missing a linked reader.
|
||||
func (provider *Provisioner) CleanUpOrphanedDashboards(ctx context.Context) {
|
||||
currentReaders := make([]string, len(provider.fileReaders))
|
||||
configs := make([]dashboards.ProvisioningConfig, len(provider.fileReaders))
|
||||
|
||||
for index, reader := range provider.fileReaders {
|
||||
currentReaders[index] = reader.Cfg.Name
|
||||
configs[index] = dashboards.ProvisioningConfig{
|
||||
Name: reader.Cfg.Name,
|
||||
OrgID: reader.Cfg.OrgID,
|
||||
Folder: reader.Cfg.Folder,
|
||||
AllowUIUpdates: reader.Cfg.AllowUIUpdates,
|
||||
}
|
||||
}
|
||||
|
||||
if err := provider.provisioner.DeleteOrphanedProvisionedDashboards(ctx, &dashboards.DeleteOrphanedProvisionedDashboardsCommand{ReaderNames: currentReaders}); err != nil {
|
||||
if err := provider.provisioner.DeleteOrphanedProvisionedDashboards(
|
||||
ctx, &dashboards.DeleteOrphanedProvisionedDashboardsCommand{Config: configs},
|
||||
); err != nil {
|
||||
provider.log.Warn("Failed to delete orphaned provisioned dashboards", "err", err)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -261,8 +261,8 @@ func RunDashboardUIDMigrations(sess *xorm.Session, driverName string, logger log
|
||||
logger.Info("Starting batched dashboard_uid migration for annotations (newest first)", "batchSize", batchSize)
|
||||
updateSQL := `UPDATE annotation
|
||||
SET dashboard_uid = (SELECT uid FROM dashboard WHERE dashboard.id = annotation.dashboard_id)
|
||||
WHERE dashboard_uid IS NULL
|
||||
AND dashboard_id != 0
|
||||
WHERE dashboard_uid IS NULL
|
||||
AND dashboard_id != 0
|
||||
AND EXISTS (SELECT 1 FROM dashboard WHERE dashboard.id = annotation.dashboard_id)
|
||||
AND annotation.id IN (
|
||||
SELECT id FROM annotation
|
||||
@@ -285,19 +285,19 @@ func RunDashboardUIDMigrations(sess *xorm.Session, driverName string, logger log
|
||||
LIMIT $1
|
||||
)`
|
||||
case MySQL:
|
||||
updateSQL = `UPDATE annotation
|
||||
INNER JOIN dashboard ON annotation.dashboard_id = dashboard.id
|
||||
SET annotation.dashboard_uid = dashboard.uid
|
||||
WHERE annotation.dashboard_uid IS NULL
|
||||
AND annotation.dashboard_id != 0
|
||||
AND annotation.id IN (
|
||||
SELECT id FROM (
|
||||
SELECT id FROM annotation
|
||||
WHERE dashboard_uid IS NULL AND dashboard_id != 0
|
||||
ORDER BY id DESC
|
||||
LIMIT ?
|
||||
) AS batch
|
||||
)`
|
||||
updateSQL = `UPDATE annotation AS a
|
||||
JOIN dashboard AS d ON a.dashboard_id = d.id
|
||||
JOIN (
|
||||
SELECT id
|
||||
FROM annotation
|
||||
WHERE dashboard_uid IS NULL
|
||||
AND dashboard_id != 0
|
||||
ORDER BY id DESC
|
||||
LIMIT ?
|
||||
) AS batch ON batch.id = a.id
|
||||
SET a.dashboard_uid = d.uid
|
||||
WHERE a.dashboard_uid IS NULL
|
||||
AND a.dashboard_id != 0`
|
||||
}
|
||||
|
||||
updatedTotal := int64(0)
|
||||
|
||||
@@ -3696,7 +3696,7 @@
|
||||
"format": "int64"
|
||||
},
|
||||
"errors": {
|
||||
"description": "Report errors for this resource type This may not be an exhaustive list and recommend looking at the logs for more info",
|
||||
"description": "Report errors/warnings for this resource type This may not be an exhaustive list and recommend looking at the logs for more info",
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
@@ -3722,6 +3722,18 @@
|
||||
"type": "integer",
|
||||
"format": "int64"
|
||||
},
|
||||
"warning": {
|
||||
"description": "The error count",
|
||||
"type": "integer",
|
||||
"format": "int64"
|
||||
},
|
||||
"warnings": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"default": ""
|
||||
}
|
||||
},
|
||||
"write": {
|
||||
"type": "integer",
|
||||
"format": "int64"
|
||||
@@ -3849,6 +3861,13 @@
|
||||
"$ref": "#/components/schemas/com.github.grafana.grafana.apps.provisioning.pkg.apis.provisioning.v0alpha1.RepositoryURLs"
|
||||
}
|
||||
]
|
||||
},
|
||||
"warnings": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string",
|
||||
"default": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { t } from '@grafana/i18n';
|
||||
import { config } from '@grafana/runtime';
|
||||
// Maps the ID of the nav item to a translated phrase to later pass to <Trans />
|
||||
// Because the navigation content is dynamic (defined in the backend), we can not use
|
||||
// the normal inline message definition method.
|
||||
@@ -49,9 +48,7 @@ export function getNavTitle(navId: string | undefined) {
|
||||
case 'dashboards/recently-deleted':
|
||||
return t('nav.recently-deleted.title', 'Recently deleted');
|
||||
case 'dashboards/new':
|
||||
return config.featureToggles.dashboardTemplates
|
||||
? t('nav.new-dashboard.empty-title', 'Empty dashboard')
|
||||
: t('nav.new-dashboard.title', 'New dashboard');
|
||||
return t('nav.new-dashboard.title', 'New dashboard');
|
||||
case 'dashboards/folder/new':
|
||||
return t('nav.new-folder.title', 'New folder');
|
||||
case 'dashboards/import':
|
||||
|
||||
@@ -27,6 +27,7 @@ import { BrowseFilters } from './components/BrowseFilters';
|
||||
import { BrowseView } from './components/BrowseView';
|
||||
import CreateNewButton from './components/CreateNewButton';
|
||||
import { FolderActionsButton } from './components/FolderActionsButton';
|
||||
import { RecentlyViewedDashboards } from './components/RecentlyViewedDashboards';
|
||||
import { SearchView } from './components/SearchView';
|
||||
import { getFolderPermissions } from './permissions';
|
||||
import { useHasSelection } from './state/hooks';
|
||||
@@ -178,6 +179,8 @@ const BrowseDashboardsPage = memo(({ queryParams }: { queryParams: Record<string
|
||||
>
|
||||
<Page.Contents className={styles.pageContents}>
|
||||
<ProvisionedFolderPreviewBanner queryParams={queryParams} />
|
||||
{/* only show recently viewed dashboards when in root */}
|
||||
{!folderUID && <RecentlyViewedDashboards />}
|
||||
<div>
|
||||
<FilterInput
|
||||
placeholder={getSearchPlaceholder(searchState.includePanels)}
|
||||
|
||||
@@ -0,0 +1,77 @@
|
||||
import { css } from '@emotion/css';
|
||||
import { useAsync } from 'react-use';
|
||||
|
||||
import { GrafanaTheme2 } from '@grafana/data';
|
||||
import { t, Trans } from '@grafana/i18n';
|
||||
import { evaluateBooleanFlag } from '@grafana/runtime/internal';
|
||||
import { CollapsableSection, Link, Spinner, Text, useStyles2 } from '@grafana/ui';
|
||||
|
||||
import { getRecentlyViewedDashboards } from './utils';
|
||||
|
||||
const MAX_RECENT = 5;
|
||||
|
||||
export function RecentlyViewedDashboards() {
|
||||
const styles = useStyles2(getStyles);
|
||||
|
||||
const { value: recentDashboards = [], loading } = useAsync(async () => {
|
||||
if (!evaluateBooleanFlag('recentlyViewedDashboards', false)) {
|
||||
return [];
|
||||
}
|
||||
return getRecentlyViewedDashboards(MAX_RECENT);
|
||||
}, []);
|
||||
|
||||
if (!evaluateBooleanFlag('recentlyViewedDashboards', false)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<CollapsableSection
|
||||
headerDataTestId="browseDashboardsRecentlyViewedTitle"
|
||||
label={
|
||||
<Text variant="h5" element="h3">
|
||||
<Trans i18nKey="browse-dashboards.recently-viewed.title">Recently viewed</Trans>
|
||||
</Text>
|
||||
}
|
||||
isOpen={true}
|
||||
className={styles.title}
|
||||
contentClassName={styles.content}
|
||||
>
|
||||
{/* placeholder */}
|
||||
{loading && <Spinner />}
|
||||
{/* TODO: Better empty state https://github.com/grafana/grafana/issues/114804 */}
|
||||
{!loading && recentDashboards.length === 0 && (
|
||||
<Text>{t('browse-dashboards.recently-viewed.empty', 'Nothing viewed yet')}</Text>
|
||||
)}
|
||||
|
||||
{/* TODO: implement actual card content */}
|
||||
{!loading && recentDashboards.length > 0 && (
|
||||
<>
|
||||
{recentDashboards.map((dash) => (
|
||||
<div key={dash.uid}>
|
||||
<Link href={dash.url}>{dash.name}</Link>
|
||||
</div>
|
||||
))}
|
||||
</>
|
||||
)}
|
||||
</CollapsableSection>
|
||||
);
|
||||
}
|
||||
|
||||
const getStyles = (theme: GrafanaTheme2) => {
|
||||
const accent = theme.visualization.getColorByName('purple'); // or your own hex
|
||||
|
||||
return {
|
||||
title: css({
|
||||
background: `linear-gradient(90deg, ${accent} 0%, #e478eaff 100%)`,
|
||||
WebkitTextFillColor: 'transparent',
|
||||
backgroundClip: 'text',
|
||||
color: 'transparent',
|
||||
'& button svg': {
|
||||
color: accent,
|
||||
},
|
||||
}),
|
||||
content: css({
|
||||
paddingTop: theme.spacing(0),
|
||||
}),
|
||||
};
|
||||
};
|
||||
@@ -1,6 +1,9 @@
|
||||
import { config } from '@grafana/runtime';
|
||||
import { contextSrv } from 'app/core/services/context_srv';
|
||||
import impressionSrv from 'app/core/services/impression_srv';
|
||||
import { ResourceRef } from 'app/features/provisioning/components/BulkActions/useBulkActionJob';
|
||||
import { getGrafanaSearcher } from 'app/features/search/service/searcher';
|
||||
import { DashboardQueryResult } from 'app/features/search/service/types';
|
||||
|
||||
import { DashboardTreeSelection, DashboardViewItemWithUIItems, BrowseDashboardsPermissions } from '../types';
|
||||
|
||||
@@ -60,3 +63,36 @@ export function canSelectItems(permissions: BrowseDashboardsPermissions) {
|
||||
const canSelectDashboards = canEditDashboards || canDeleteDashboards;
|
||||
return Boolean(canSelectFolders || canSelectDashboards);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns dashboard search results ordered the same way the user opened them.
|
||||
*/
|
||||
export async function getRecentlyViewedDashboards(maxItems = 5): Promise<DashboardQueryResult[]> {
|
||||
try {
|
||||
const recentlyOpened = (await impressionSrv.getDashboardOpened()).slice(0, maxItems);
|
||||
if (!recentlyOpened.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const searchResults = await getGrafanaSearcher().search({
|
||||
kind: ['dashboard'],
|
||||
limit: recentlyOpened.length,
|
||||
uid: recentlyOpened,
|
||||
});
|
||||
|
||||
const dashboards = searchResults.view.toArray();
|
||||
// Keep dashboards in the same order the user opened them.
|
||||
// When a UID is missing from the search response
|
||||
// push it to the end instead of letting indexOf return -1
|
||||
const order = (uid: string) => {
|
||||
const idx = recentlyOpened.indexOf(uid);
|
||||
return idx === -1 ? recentlyOpened.length : idx;
|
||||
};
|
||||
|
||||
dashboards.sort((a, b) => order(a.uid) - order(b.uid));
|
||||
return dashboards;
|
||||
} catch (error) {
|
||||
console.error('Failed to load recently viewed dashboards', error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@ import { useEffect, useRef, useState } from 'react';
|
||||
import { t } from '@grafana/i18n';
|
||||
import { config } from '@grafana/runtime';
|
||||
import { contextSrv } from 'app/core/services/context_srv';
|
||||
import impressionSrv from 'app/core/services/impression_srv';
|
||||
import { getRecentlyViewedDashboards } from 'app/features/browse-dashboards/components/utils';
|
||||
import { getGrafanaSearcher } from 'app/features/search/service/searcher';
|
||||
|
||||
import { CommandPaletteAction } from '../types';
|
||||
@@ -20,20 +20,7 @@ export async function getRecentDashboardActions(): Promise<CommandPaletteAction[
|
||||
return [];
|
||||
}
|
||||
|
||||
const recentUids = (await impressionSrv.getDashboardOpened()).slice(0, MAX_RECENT_DASHBOARDS);
|
||||
const resultsDataFrame = await getGrafanaSearcher().search({
|
||||
kind: ['dashboard'],
|
||||
limit: MAX_RECENT_DASHBOARDS,
|
||||
uid: recentUids,
|
||||
});
|
||||
|
||||
// Search results are alphabetical, so reorder them according to recently viewed
|
||||
const recentResults = resultsDataFrame.view.toArray();
|
||||
recentResults.sort((resultA, resultB) => {
|
||||
const orderA = recentUids.indexOf(resultA.uid);
|
||||
const orderB = recentUids.indexOf(resultB.uid);
|
||||
return orderA - orderB;
|
||||
});
|
||||
const recentResults = await getRecentlyViewedDashboards(MAX_RECENT_DASHBOARDS);
|
||||
|
||||
const recentDashboardActions: CommandPaletteAction[] = recentResults.map((item) => {
|
||||
const { url, name } = item; // items are backed by DataFrameView, so must hold the url in a closure
|
||||
|
||||
@@ -206,6 +206,10 @@ export class DashboardSceneChangeTracker {
|
||||
}
|
||||
|
||||
this._changesWorker!.onmessage = (e: MessageEvent<DashboardChangeInfo>) => {
|
||||
if (!this._dashboard.state.isEditing) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.updateIsDirty(!!e.data.hasChanges);
|
||||
};
|
||||
|
||||
|
||||
@@ -171,7 +171,6 @@ function DashboardControlsRenderer({ model }: SceneComponentProps<DashboardContr
|
||||
<DashboardControlActions dashboard={dashboard} />
|
||||
</div>
|
||||
)}
|
||||
{!hideLinksControls && !editPanel && <DashboardLinksControls links={links} dashboard={dashboard} />}
|
||||
</div>
|
||||
{!hideVariableControls && (
|
||||
<>
|
||||
@@ -179,6 +178,7 @@ function DashboardControlsRenderer({ model }: SceneComponentProps<DashboardContr
|
||||
<DashboardDataLayerControls dashboard={dashboard} />
|
||||
</>
|
||||
)}
|
||||
{!hideLinksControls && !editPanel && <DashboardLinksControls links={links} dashboard={dashboard} />}
|
||||
{!hideDashboardControls && hasDashboardControls && <DashboardControlsButton dashboard={dashboard} />}
|
||||
{editPanel && <PanelEditControls panelEditor={editPanel} />}
|
||||
{showDebugger && <SceneDebugger scene={model} key={'scene-debugger'} />}
|
||||
|
||||
@@ -64,7 +64,6 @@ function getStyles(theme: GrafanaTheme2) {
|
||||
alignItems: 'center',
|
||||
verticalAlign: 'middle',
|
||||
marginBottom: theme.spacing(1),
|
||||
marginRight: theme.spacing(1),
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -36,13 +36,9 @@ export function DashboardLinksControls({ links, dashboard }: Props) {
|
||||
function getStyles(theme: GrafanaTheme2) {
|
||||
return {
|
||||
linksContainer: css({
|
||||
display: 'flex',
|
||||
flexWrap: 'wrap',
|
||||
display: 'inline-flex',
|
||||
gap: theme.spacing(1),
|
||||
maxWidth: '100%',
|
||||
minWidth: 0,
|
||||
order: 1,
|
||||
flex: '1 1 0%',
|
||||
marginRight: theme.spacing(1),
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -271,7 +271,7 @@ export class DashboardScene extends SceneObjectBase<DashboardSceneState> impleme
|
||||
|
||||
public onEnterEditMode = () => {
|
||||
// Save this state
|
||||
this._initialState = sceneUtils.cloneSceneObjectState(this.state);
|
||||
this._initialState = sceneUtils.cloneSceneObjectState(this.state, { isDirty: false });
|
||||
this._initialUrlState = locationService.getLocation();
|
||||
|
||||
// Switch to edit mode
|
||||
|
||||
@@ -19,7 +19,6 @@ import { AddVariableButton } from './VariableControlsAddButton';
|
||||
|
||||
export function VariableControls({ dashboard }: { dashboard: DashboardScene }) {
|
||||
const { variables } = sceneGraph.getVariables(dashboard)!.useState();
|
||||
const styles = useStyles2(getStyles);
|
||||
|
||||
return (
|
||||
<>
|
||||
@@ -28,11 +27,7 @@ export function VariableControls({ dashboard }: { dashboard: DashboardScene }) {
|
||||
.map((variable) => (
|
||||
<VariableValueSelectWrapper key={variable.state.key} variable={variable} />
|
||||
))}
|
||||
{config.featureToggles.dashboardNewLayouts ? (
|
||||
<div className={styles.addButton}>
|
||||
<AddVariableButton dashboard={dashboard} />
|
||||
</div>
|
||||
) : null}
|
||||
{config.featureToggles.dashboardNewLayouts ? <AddVariableButton dashboard={dashboard} /> : null}
|
||||
</>
|
||||
);
|
||||
}
|
||||
@@ -211,11 +206,4 @@ const getStyles = (theme: GrafanaTheme2) => ({
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
}),
|
||||
addButton: css({
|
||||
display: 'inline-flex',
|
||||
alignItems: 'center',
|
||||
verticalAlign: 'middle',
|
||||
marginBottom: theme.spacing(1),
|
||||
marginRight: theme.spacing(1),
|
||||
}),
|
||||
});
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import { css } from '@emotion/css';
|
||||
import { PointerEventHandler, useCallback } from 'react';
|
||||
|
||||
import { GrafanaTheme2 } from '@grafana/data';
|
||||
import { Trans } from '@grafana/i18n';
|
||||
import { Button } from '@grafana/ui';
|
||||
import { Button, useStyles2 } from '@grafana/ui';
|
||||
|
||||
import { openAddVariablePane } from '../settings/variables/VariableAddEditableElement';
|
||||
import { DashboardInteractions } from '../utils/interactions';
|
||||
@@ -9,6 +11,7 @@ import { DashboardInteractions } from '../utils/interactions';
|
||||
import { DashboardScene } from './DashboardScene';
|
||||
|
||||
export function AddVariableButton({ dashboard }: { dashboard: DashboardScene }) {
|
||||
const styles = useStyles2(getStyles);
|
||||
const { editview, editPanel, isEditing, viewPanel } = dashboard.useState();
|
||||
|
||||
const handlePointerDown: PointerEventHandler = useCallback(
|
||||
@@ -30,10 +33,22 @@ export function AddVariableButton({ dashboard }: { dashboard: DashboardScene })
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="dashboard-canvas-add-button">
|
||||
<Button icon="plus" variant="primary" fill="text" onPointerDown={handlePointerDown}>
|
||||
<Trans i18nKey="dashboard-scene.variable-controls.add-variable">Add variable</Trans>
|
||||
</Button>
|
||||
<div className={styles.addButton}>
|
||||
<div className="dashboard-canvas-add-button">
|
||||
<Button icon="plus" variant="primary" fill="text" onPointerDown={handlePointerDown}>
|
||||
<Trans i18nKey="dashboard-scene.variable-controls.add-variable">Add variable</Trans>
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
const getStyles = (theme: GrafanaTheme2) => ({
|
||||
addButton: css({
|
||||
display: 'inline-flex',
|
||||
alignItems: 'center',
|
||||
verticalAlign: 'middle',
|
||||
marginBottom: theme.spacing(1),
|
||||
marginRight: theme.spacing(1),
|
||||
}),
|
||||
});
|
||||
|
||||
@@ -262,4 +262,57 @@ describe('TabsLayoutManager', () => {
|
||||
expect(manager.getVizPanels().length).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('createFromLayout', () => {
|
||||
it('should convert rows with titles to tabs', () => {
|
||||
const rowsLayout = new RowsLayoutManager({
|
||||
rows: [new RowItem({ title: 'Row 1' }), new RowItem({ title: 'Row 2' })],
|
||||
});
|
||||
|
||||
const tabsManager = TabsLayoutManager.createFromLayout(rowsLayout);
|
||||
|
||||
expect(tabsManager.state.tabs).toHaveLength(2);
|
||||
expect(tabsManager.state.tabs[0].state.title).toBe('Row 1');
|
||||
expect(tabsManager.state.tabs[1].state.title).toBe('Row 2');
|
||||
});
|
||||
|
||||
it('should use default title when row has empty title', () => {
|
||||
const rowsLayout = new RowsLayoutManager({
|
||||
rows: [new RowItem({ title: '' })],
|
||||
});
|
||||
|
||||
const tabsManager = TabsLayoutManager.createFromLayout(rowsLayout);
|
||||
|
||||
expect(tabsManager.state.tabs).toHaveLength(1);
|
||||
expect(tabsManager.state.tabs[0].state.title).toBe('New tab');
|
||||
});
|
||||
|
||||
it('should generate unique titles for multiple rows with empty titles', () => {
|
||||
const rowsLayout = new RowsLayoutManager({
|
||||
rows: [new RowItem({ title: '' }), new RowItem({ title: '' }), new RowItem({ title: '' })],
|
||||
});
|
||||
|
||||
const tabsManager = TabsLayoutManager.createFromLayout(rowsLayout);
|
||||
|
||||
expect(tabsManager.state.tabs).toHaveLength(3);
|
||||
expect(tabsManager.state.tabs[0].state.title).toBe('New tab');
|
||||
expect(tabsManager.state.tabs[1].state.title).toBe('New tab 1');
|
||||
expect(tabsManager.state.tabs[2].state.title).toBe('New tab 2');
|
||||
});
|
||||
|
||||
it('should generate unique titles when mixing empty and existing titles', () => {
|
||||
const rowsLayout = new RowsLayoutManager({
|
||||
rows: [
|
||||
new RowItem({ title: 'New row' }), // existing title that matches default
|
||||
new RowItem({ title: '' }), // empty, should get unique title
|
||||
],
|
||||
});
|
||||
|
||||
const tabsManager = TabsLayoutManager.createFromLayout(rowsLayout);
|
||||
|
||||
expect(tabsManager.state.tabs).toHaveLength(2);
|
||||
expect(tabsManager.state.tabs[0].state.title).toBe('New row');
|
||||
expect(tabsManager.state.tabs[1].state.title).toBe('New tab');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -410,6 +410,10 @@ export class TabsLayoutManager extends SceneObjectBase<TabsLayoutManagerState> i
|
||||
let tabs: TabItem[] = [];
|
||||
|
||||
if (layout instanceof RowsLayoutManager) {
|
||||
const existingNames = new Set(
|
||||
layout.state.rows.map((row) => row.state.title).filter((title): title is string => !!title)
|
||||
);
|
||||
|
||||
for (const row of layout.state.rows) {
|
||||
if (row.state.repeatSourceKey) {
|
||||
continue;
|
||||
@@ -420,10 +424,14 @@ export class TabsLayoutManager extends SceneObjectBase<TabsLayoutManagerState> i
|
||||
// We need to clear the target since we don't want to point the original row anymore (if it was set)
|
||||
conditionalRendering?.setTarget(undefined);
|
||||
|
||||
const newTitle =
|
||||
row.state.title || generateUniqueTitle(t('dashboard.tabs-layout.tab.new', 'New tab'), existingNames);
|
||||
existingNames.add(newTitle);
|
||||
|
||||
tabs.push(
|
||||
new TabItem({
|
||||
layout: row.state.layout.clone(),
|
||||
title: row.state.title,
|
||||
title: newTitle,
|
||||
conditionalRendering,
|
||||
repeatByVariable: row.state.repeatByVariable,
|
||||
})
|
||||
|
||||
@@ -119,6 +119,16 @@ export function colorIdEnumToColorIdV2(colorId: FieldColorModeIdV1 | string): Fi
|
||||
return 'continuous-greens';
|
||||
case FieldColorModeIdV1.ContinuousPurples:
|
||||
return 'continuous-purples';
|
||||
case FieldColorModeIdV1.ContinuousViridis:
|
||||
return 'continuous-viridis';
|
||||
case FieldColorModeIdV1.ContinuousMagma:
|
||||
return 'continuous-magma';
|
||||
case FieldColorModeIdV1.ContinuousPlasma:
|
||||
return 'continuous-plasma';
|
||||
case FieldColorModeIdV1.ContinuousInferno:
|
||||
return 'continuous-inferno';
|
||||
case FieldColorModeIdV1.ContinuousCividis:
|
||||
return 'continuous-cividis';
|
||||
case FieldColorModeIdV1.Fixed:
|
||||
return 'fixed';
|
||||
case FieldColorModeIdV1.Shades:
|
||||
|
||||
@@ -1268,6 +1268,16 @@ function colorIdToEnumv1(colorId: FieldColorModeId): FieldColorModeIdV1 {
|
||||
return FieldColorModeIdV1.ContinuousGreens;
|
||||
case 'continuous-purples':
|
||||
return FieldColorModeIdV1.ContinuousPurples;
|
||||
case 'continuous-viridis':
|
||||
return FieldColorModeIdV1.ContinuousViridis;
|
||||
case 'continuous-magma':
|
||||
return FieldColorModeIdV1.ContinuousMagma;
|
||||
case 'continuous-plasma':
|
||||
return FieldColorModeIdV1.ContinuousPlasma;
|
||||
case 'continuous-inferno':
|
||||
return FieldColorModeIdV1.ContinuousInferno;
|
||||
case 'continuous-cividis':
|
||||
return FieldColorModeIdV1.ContinuousCividis;
|
||||
case 'fixed':
|
||||
return FieldColorModeIdV1.Fixed;
|
||||
case 'shades':
|
||||
|
||||
@@ -182,7 +182,6 @@ function getStyles(theme: GrafanaTheme2) {
|
||||
alignItems: 'center',
|
||||
verticalAlign: 'middle',
|
||||
marginBottom: theme.spacing(1),
|
||||
marginRight: theme.spacing(1),
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -789,7 +789,6 @@ const UnthemedLogs: React.FunctionComponent<Props> = (props: Props) => {
|
||||
logOptionsStorageKey={SETTING_KEY_ROOT}
|
||||
timeZone={timeZone}
|
||||
displayedFields={displayedFields}
|
||||
onPermalinkClick={onPermalinkClick}
|
||||
onClickShowField={showField}
|
||||
onClickHideField={hideField}
|
||||
/>
|
||||
|
||||
@@ -19,7 +19,7 @@ import { t } from '@grafana/i18n';
|
||||
import { reportInteraction } from '@grafana/runtime';
|
||||
import { getDragStyles, InlineField, Select, useStyles2 } from '@grafana/ui';
|
||||
import {
|
||||
getSidebarWidth,
|
||||
getFieldSelectorWidth,
|
||||
LogsTableFieldSelector,
|
||||
MIN_WIDTH,
|
||||
} from 'app/features/logs/components/fieldSelector/FieldSelector';
|
||||
@@ -279,7 +279,7 @@ export function LogsTableWrap(props: Props) {
|
||||
// The panel state is updated when the user interacts with the multi-select sidebar
|
||||
}, [currentDataFrame, getColumnsFromProps]);
|
||||
|
||||
const [sidebarWidth, setSidebarWidth] = useState(getSidebarWidth(SETTING_KEY_ROOT));
|
||||
const [sidebarWidth, setSidebarWidth] = useState(getFieldSelectorWidth(SETTING_KEY_ROOT));
|
||||
const tableWidth = props.width - sidebarWidth;
|
||||
|
||||
const styles = useStyles2(getStyles, height, sidebarWidth);
|
||||
|
||||
@@ -35,7 +35,7 @@ export const LogListFieldSelector = ({ containerElement, dataFrames, logs }: Log
|
||||
const { displayedFields, onClickShowField, onClickHideField, setDisplayedFields, logOptionsStorageKey } =
|
||||
useLogListContext();
|
||||
const [sidebarHeight, setSidebarHeight] = useState(220);
|
||||
const [sidebarWidth, setSidebarWidth] = useState(getSidebarWidth(logOptionsStorageKey));
|
||||
const [sidebarWidth, setSidebarWidth] = useState(getFieldSelectorWidth(logOptionsStorageKey));
|
||||
const dragStyles = useStyles2(getDragStyles);
|
||||
|
||||
useLayoutEffect(() => {
|
||||
@@ -74,7 +74,7 @@ export const LogListFieldSelector = ({ containerElement, dataFrames, logs }: Log
|
||||
}, [setSidebarWidthWrapper]);
|
||||
|
||||
const expand = useCallback(() => {
|
||||
const width = getSidebarWidth(logOptionsStorageKey);
|
||||
const width = getFieldSelectorWidth(logOptionsStorageKey);
|
||||
setSidebarWidthWrapper(width < 2 * MIN_WIDTH ? DEFAULT_WIDTH : width);
|
||||
reportInteraction('logs_field_selector_expand_clicked', {
|
||||
mode: 'logs',
|
||||
@@ -205,7 +205,7 @@ export const LogsTableFieldSelector = ({
|
||||
}, [setSidebarWidthWrapper]);
|
||||
|
||||
const expand = useCallback(() => {
|
||||
const width = getSidebarWidth(SETTING_KEY_ROOT);
|
||||
const width = getFieldSelectorWidth(SETTING_KEY_ROOT);
|
||||
setSidebarWidthWrapper(width < 2 * MIN_WIDTH ? DEFAULT_WIDTH : width);
|
||||
reportInteraction('logs_field_selector_expand_clicked', {
|
||||
mode: 'table',
|
||||
@@ -436,7 +436,7 @@ function getSuggestedFields(logs: LogListModel[], displayedFields: string[], def
|
||||
return suggestedFields;
|
||||
}
|
||||
|
||||
export function getSidebarWidth(logOptionsStorageKey?: string): number {
|
||||
export function getFieldSelectorWidth(logOptionsStorageKey?: string): number {
|
||||
const width =
|
||||
(logOptionsStorageKey
|
||||
? parseInt(store.get(`${logOptionsStorageKey}.fieldSelector.width`) ?? DEFAULT_WIDTH, 10)
|
||||
@@ -445,7 +445,7 @@ export function getSidebarWidth(logOptionsStorageKey?: string): number {
|
||||
return width < MIN_WIDTH ? MIN_WIDTH : width;
|
||||
}
|
||||
|
||||
export function getSidebarState(logOptionsStorageKey?: string): boolean | undefined {
|
||||
export function getFieldSelectorState(logOptionsStorageKey?: string): boolean | undefined {
|
||||
if (!logOptionsStorageKey) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
@@ -256,7 +256,11 @@ export const InfiniteScroll = ({
|
||||
if (props.visibleStartIndex === 0) {
|
||||
noScrollRef.current = scrollElement.scrollHeight <= scrollElement.clientHeight;
|
||||
}
|
||||
if (noScrollRef.current || infiniteLoaderState === 'loading' || infiniteLoaderState === 'out-of-bounds') {
|
||||
if (noScrollRef.current) {
|
||||
setInfiniteLoaderState('idle');
|
||||
return;
|
||||
}
|
||||
if (infiniteLoaderState === 'loading' || infiniteLoaderState === 'out-of-bounds') {
|
||||
return;
|
||||
}
|
||||
const lastLogIndex = logs.length - 1;
|
||||
@@ -267,7 +271,7 @@ export const InfiniteScroll = ({
|
||||
setInfiniteLoaderState('idle');
|
||||
}
|
||||
},
|
||||
[infiniteLoaderState, logs.length, scrollElement]
|
||||
[infiniteLoaderState, logs, scrollElement]
|
||||
);
|
||||
|
||||
const getItemKey = useCallback((index: number) => (logs[index] ? logs[index].uid : index.toString()), [logs]);
|
||||
|
||||
@@ -3,7 +3,7 @@ import { createContext, ReactNode, useCallback, useContext, useEffect, useState
|
||||
|
||||
import { LogRowModel, store } from '@grafana/data';
|
||||
|
||||
import { getSidebarWidth } from '../fieldSelector/FieldSelector';
|
||||
import { getFieldSelectorWidth } from '../fieldSelector/FieldSelector';
|
||||
|
||||
import { LogLineDetailsMode } from './LogLineDetails';
|
||||
import { LogListModel } from './processing';
|
||||
@@ -56,6 +56,7 @@ export interface Props {
|
||||
logs: LogRowModel[];
|
||||
logOptionsStorageKey?: string;
|
||||
showControls: boolean;
|
||||
showFieldSelector?: boolean;
|
||||
}
|
||||
|
||||
export const LogDetailsContextProvider = ({
|
||||
@@ -68,12 +69,13 @@ export const LogDetailsContextProvider = ({
|
||||
: getDefaultDetailsMode(containerElement),
|
||||
logs,
|
||||
showControls,
|
||||
showFieldSelector,
|
||||
}: Props) => {
|
||||
const [showDetails, setShowDetails] = useState<LogListModel[]>([]);
|
||||
|
||||
const [currentLog, setCurrentLog] = useState<LogListModel | undefined>(undefined);
|
||||
const [detailsWidth, setDetailsWidthState] = useState(
|
||||
getDetailsWidth(containerElement, logOptionsStorageKey, undefined, detailsModeProp, showControls)
|
||||
getDetailsWidth(containerElement, logOptionsStorageKey, undefined, detailsModeProp, showControls, showFieldSelector)
|
||||
);
|
||||
const [detailsMode, setDetailsMode] = useState<LogLineDetailsMode>(
|
||||
detailsModeProp ?? getDefaultDetailsMode(containerElement)
|
||||
@@ -101,8 +103,10 @@ export const LogDetailsContextProvider = ({
|
||||
|
||||
// Sync log details inline and sidebar width
|
||||
useEffect(() => {
|
||||
setDetailsWidthState(getDetailsWidth(containerElement, logOptionsStorageKey, undefined, detailsMode, showControls));
|
||||
}, [containerElement, detailsMode, logOptionsStorageKey, showControls]);
|
||||
setDetailsWidthState(
|
||||
getDetailsWidth(containerElement, logOptionsStorageKey, undefined, detailsMode, showControls, showFieldSelector)
|
||||
);
|
||||
}, [containerElement, detailsMode, logOptionsStorageKey, showControls, showFieldSelector]);
|
||||
|
||||
// Sync log details width
|
||||
useEffect(() => {
|
||||
@@ -111,13 +115,20 @@ export const LogDetailsContextProvider = ({
|
||||
}
|
||||
const handleResize = debounce(() => {
|
||||
setDetailsWidthState((detailsWidth) =>
|
||||
getDetailsWidth(containerElement, logOptionsStorageKey, detailsWidth, detailsMode, showControls)
|
||||
getDetailsWidth(
|
||||
containerElement,
|
||||
logOptionsStorageKey,
|
||||
detailsWidth,
|
||||
detailsMode,
|
||||
showControls,
|
||||
showFieldSelector
|
||||
)
|
||||
);
|
||||
}, 50);
|
||||
const observer = new ResizeObserver(() => handleResize());
|
||||
observer.observe(containerElement);
|
||||
return () => observer.disconnect();
|
||||
}, [containerElement, detailsMode, logOptionsStorageKey, showControls, showDetails]);
|
||||
}, [containerElement, detailsMode, logOptionsStorageKey, showControls, showDetails, showFieldSelector]);
|
||||
|
||||
const closeDetails = useCallback(() => {
|
||||
showDetails.forEach((log) => removeDetailsScrollPosition(log));
|
||||
@@ -158,7 +169,10 @@ export const LogDetailsContextProvider = ({
|
||||
return;
|
||||
}
|
||||
|
||||
const maxWidth = containerElement.clientWidth - getSidebarWidth(logOptionsStorageKey) - LOG_LIST_MIN_WIDTH;
|
||||
const maxWidth =
|
||||
containerElement.clientWidth -
|
||||
(showFieldSelector ? getFieldSelectorWidth(logOptionsStorageKey) : 0) -
|
||||
LOG_LIST_MIN_WIDTH;
|
||||
if (width > maxWidth) {
|
||||
return;
|
||||
}
|
||||
@@ -166,7 +180,7 @@ export const LogDetailsContextProvider = ({
|
||||
store.set(`${logOptionsStorageKey}.detailsWidth`, width);
|
||||
setDetailsWidthState(width);
|
||||
},
|
||||
[containerElement, logOptionsStorageKey]
|
||||
[containerElement, logOptionsStorageKey, showFieldSelector]
|
||||
);
|
||||
|
||||
return (
|
||||
@@ -196,12 +210,14 @@ export function getDetailsWidth(
|
||||
logOptionsStorageKey?: string,
|
||||
currentWidth?: number,
|
||||
detailsMode: LogLineDetailsMode = 'sidebar',
|
||||
showControls?: boolean
|
||||
showControls?: boolean,
|
||||
showFieldSelector?: boolean
|
||||
) {
|
||||
if (!containerElement) {
|
||||
return 0;
|
||||
}
|
||||
const availableWidth = containerElement.clientWidth - getSidebarWidth(logOptionsStorageKey);
|
||||
const availableWidth =
|
||||
containerElement.clientWidth - (showFieldSelector ? getFieldSelectorWidth(logOptionsStorageKey) : 0);
|
||||
if (detailsMode === 'inline') {
|
||||
return availableWidth - getScrollbarWidth() - (showControls ? LOG_LIST_CONTROLS_WIDTH : 0);
|
||||
}
|
||||
|
||||
@@ -20,6 +20,7 @@ import { setPluginLinksHook } from '@grafana/runtime';
|
||||
import { createTempoDatasource } from 'app/plugins/datasource/tempo/test/mocks';
|
||||
|
||||
import { LOG_LINE_BODY_FIELD_NAME } from '../LogDetailsBody';
|
||||
import { getFieldSelectorWidth } from '../fieldSelector/FieldSelector';
|
||||
import { createLogLine } from '../mocks/logRow';
|
||||
|
||||
import { emptyContextData, LogDetailsContext, LogDetailsContextData } from './LogDetailsContext';
|
||||
@@ -27,6 +28,10 @@ import { LogLineDetails, Props } from './LogLineDetails';
|
||||
import { LogListContext, LogListContextData } from './LogListContext';
|
||||
import { defaultValue } from './__mocks__/LogListContext';
|
||||
|
||||
jest.mock('../fieldSelector/FieldSelector');
|
||||
|
||||
jest.mocked(getFieldSelectorWidth).mockReturnValue(220);
|
||||
|
||||
jest.mock('@grafana/assistant', () => {
|
||||
return {
|
||||
...jest.requireActual('@grafana/assistant'),
|
||||
@@ -79,6 +84,7 @@ const setup = (
|
||||
},
|
||||
timeZone: 'browser',
|
||||
showControls: true,
|
||||
showFieldSelector: true,
|
||||
...(propOverrides || {}),
|
||||
};
|
||||
|
||||
@@ -775,4 +781,24 @@ describe('LogLineDetails', () => {
|
||||
expect(screen.getByText('value')).toBeInTheDocument();
|
||||
expect(screen.getByText('Open service overview for label')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
describe('Width regressions', () => {
|
||||
test('should consider Fields Selector width when enabled', () => {
|
||||
jest.mocked(getFieldSelectorWidth).mockClear();
|
||||
|
||||
setup({ showFieldSelector: true }, { labels: { key1: 'label1', key2: 'label2' } });
|
||||
expect(screen.getByText('Log line')).toBeInTheDocument();
|
||||
expect(screen.getByText('Fields')).toBeInTheDocument();
|
||||
expect(getFieldSelectorWidth).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('should not consider Fields Selector width when disabled', () => {
|
||||
jest.mocked(getFieldSelectorWidth).mockClear();
|
||||
|
||||
setup({ showFieldSelector: false }, { labels: { key1: 'label1', key2: 'label2' } });
|
||||
expect(screen.getByText('Log line')).toBeInTheDocument();
|
||||
expect(screen.getByText('Fields')).toBeInTheDocument();
|
||||
expect(getFieldSelectorWidth).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -7,7 +7,7 @@ import { t } from '@grafana/i18n';
|
||||
import { reportInteraction } from '@grafana/runtime';
|
||||
import { getDragStyles, Icon, Tab, TabsBar, useStyles2 } from '@grafana/ui';
|
||||
|
||||
import { getSidebarWidth } from '../fieldSelector/FieldSelector';
|
||||
import { getFieldSelectorWidth } from '../fieldSelector/FieldSelector';
|
||||
|
||||
import { getDetailsScrollPosition, saveDetailsScrollPosition, useLogDetailsContext } from './LogDetailsContext';
|
||||
import { LogLineDetailsComponent } from './LogLineDetailsComponent';
|
||||
@@ -22,12 +22,13 @@ export interface Props {
|
||||
timeRange: TimeRange;
|
||||
timeZone: string;
|
||||
showControls: boolean;
|
||||
showFieldSelector: boolean | undefined;
|
||||
}
|
||||
|
||||
export type LogLineDetailsMode = 'inline' | 'sidebar';
|
||||
|
||||
export const LogLineDetails = memo(
|
||||
({ containerElement, focusLogLine, logs, timeRange, timeZone, showControls }: Props) => {
|
||||
({ containerElement, focusLogLine, logs, timeRange, timeZone, showControls, showFieldSelector }: Props) => {
|
||||
const { noInteractions, logOptionsStorageKey } = useLogListContext();
|
||||
const { detailsWidth, setDetailsWidth } = useLogDetailsContext();
|
||||
const styles = useStyles2(getStyles, 'sidebar', showControls);
|
||||
@@ -48,7 +49,10 @@ export const LogLineDetails = memo(
|
||||
}
|
||||
}, [noInteractions]);
|
||||
|
||||
const maxWidth = containerElement.clientWidth - getSidebarWidth(logOptionsStorageKey) - LOG_LIST_MIN_WIDTH;
|
||||
const maxWidth =
|
||||
containerElement.clientWidth -
|
||||
(showFieldSelector ? getFieldSelectorWidth(logOptionsStorageKey) : 0) -
|
||||
LOG_LIST_MIN_WIDTH;
|
||||
|
||||
return (
|
||||
<Resizable
|
||||
|
||||
@@ -219,6 +219,7 @@ export const LogList = ({
|
||||
logs={logs}
|
||||
logOptionsStorageKey={logOptionsStorageKey}
|
||||
showControls={showControls}
|
||||
showFieldSelector={showFieldSelector}
|
||||
>
|
||||
<LogListSearchContextProvider>
|
||||
<LogListComponent
|
||||
@@ -458,6 +459,7 @@ const LogListComponent = ({
|
||||
timeRange={timeRange}
|
||||
timeZone={timeZone}
|
||||
showControls={showControls}
|
||||
showFieldSelector={showFieldSelector}
|
||||
/>
|
||||
)}
|
||||
<div className={styles.logListWrapper} ref={wrapperRef}>
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user