Compare commits
31 Commits
ash/react-
...
axelav/das
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
afdbc63250 | ||
|
|
f67bd022be | ||
|
|
ad989ae200 | ||
|
|
fe6c2cdfee | ||
|
|
84b081ce37 | ||
|
|
d4ae044801 | ||
|
|
7038ced64e | ||
|
|
f117691340 | ||
|
|
c99eb8c62e | ||
|
|
c7986976e4 | ||
|
|
e4009a42a1 | ||
|
|
06d11d739b | ||
|
|
74548dbb73 | ||
|
|
751a399b03 | ||
|
|
c9e044b2c7 | ||
|
|
92041e5a05 | ||
|
|
6ee1a6ea7f | ||
|
|
4f66b1df5a | ||
|
|
01f959be97 | ||
|
|
f81deced02 | ||
|
|
ca3bce54a8 | ||
|
|
3d3eeb4472 | ||
|
|
2b6e2c5737 | ||
|
|
306aee16a5 | ||
|
|
8319f62ef4 | ||
|
|
b8b792f78a | ||
|
|
8a0f2fa9f3 | ||
|
|
9d980a9244 | ||
|
|
e442720cdc | ||
|
|
e616d04010 | ||
|
|
8e9675ce1c |
2
.github/CODEOWNERS
vendored
2
.github/CODEOWNERS
vendored
@@ -101,6 +101,7 @@
|
||||
/apps/example/ @grafana/grafana-app-platform-squad
|
||||
/apps/logsdrilldown/ @grafana/observability-logs
|
||||
/apps/annotation/ @grafana/grafana-backend-services-squad
|
||||
/apps/dashvalidator/ @grafana/sharing-squad
|
||||
/pkg/api/ @grafana/grafana-backend-group
|
||||
/pkg/apis/ @grafana/grafana-app-platform-squad
|
||||
/pkg/apis/query @grafana/grafana-datasources-core-services
|
||||
@@ -1190,6 +1191,7 @@ embed.go @grafana/grafana-as-code
|
||||
/pkg/registry/apps/advisor @grafana/plugins-platform-backend
|
||||
/pkg/registry/apps/alerting @grafana/alerting-backend
|
||||
/pkg/registry/apps/plugins @grafana/plugins-platform-backend
|
||||
/pkg/registry/apps/dashvalidator @grafana/sharing-squad
|
||||
/pkg/codegen/ @grafana/grafana-as-code
|
||||
/pkg/codegen/generators @grafana/grafana-as-code
|
||||
/pkg/kinds/*/*_gen.go @grafana/grafana-as-code
|
||||
|
||||
@@ -107,6 +107,7 @@ COPY apps/scope apps/scope
|
||||
COPY apps/logsdrilldown apps/logsdrilldown
|
||||
COPY apps/advisor apps/advisor
|
||||
COPY apps/dashboard apps/dashboard
|
||||
COPY apps/dashvalidator apps/dashvalidator
|
||||
COPY apps/folder apps/folder
|
||||
COPY apps/iam apps/iam
|
||||
COPY apps apps
|
||||
|
||||
9
apps/dashvalidator/Makefile
Normal file
9
apps/dashvalidator/Makefile
Normal file
@@ -0,0 +1,9 @@
|
||||
include ../sdk.mk
|
||||
|
||||
.PHONY: generate # Run Grafana App SDK code generation
|
||||
generate: install-app-sdk update-app-sdk
|
||||
@$(APP_SDK_BIN) generate \
|
||||
--source=./kinds/ \
|
||||
--gogenpath=./pkg/apis \
|
||||
--grouping=group \
|
||||
--defencoding=none
|
||||
@@ -0,0 +1,228 @@
|
||||
{
|
||||
"kind": "CustomResourceDefinition",
|
||||
"apiVersion": "apiextensions.k8s.io/v1",
|
||||
"metadata": {
|
||||
"name": "dashboardcompatibilityscores.dashvalidator.ext.grafana.com"
|
||||
},
|
||||
"spec": {
|
||||
"group": "dashvalidator.ext.grafana.com",
|
||||
"versions": [
|
||||
{
|
||||
"name": "v1alpha1",
|
||||
"served": true,
|
||||
"storage": true,
|
||||
"schema": {
|
||||
"openAPIV3Schema": {
|
||||
"properties": {
|
||||
"spec": {
|
||||
"properties": {
|
||||
"dashboardJson": {
|
||||
"description": "Complete dashboard JSON object to validate.\nMust be a v1 dashboard schema (contains \"panels\" array).\nv2 dashboards (with \"elements\" structure) are not yet supported.",
|
||||
"type": "object",
|
||||
"x-kubernetes-preserve-unknown-fields": true
|
||||
},
|
||||
"datasourceMappings": {
|
||||
"description": "Array of datasources to validate against.\nThe validator will check dashboard queries against each datasource\nand provide per-datasource compatibility results.\n\nMVP: Only single datasource supported (array length = 1), Prometheus type only.\nFuture: Will support multiple datasources for dashboards with mixed queries.",
|
||||
"items": {
|
||||
"description": "DataSourceMapping specifies a datasource to validate dashboard queries against.\nMaps logical datasource references in the dashboard to actual datasource instances.",
|
||||
"properties": {
|
||||
"name": {
|
||||
"description": "Optional human-readable name for display in results.\nIf not provided, UID will be used in error messages.\nExample: \"Production Prometheus (US-West)\"",
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"description": "Type of datasource plugin.\nMVP: Only \"prometheus\" supported.\nFuture: \"mysql\", \"postgres\", \"elasticsearch\", etc.",
|
||||
"type": "string"
|
||||
},
|
||||
"uid": {
|
||||
"description": "Unique identifier of the datasource instance.\nExample: \"prometheus-prod-us-west\"",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": ["uid", "type"],
|
||||
"type": "object"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": ["dashboardJson", "datasourceMappings"],
|
||||
"type": "object"
|
||||
},
|
||||
"status": {
|
||||
"properties": {
|
||||
"additionalFields": {
|
||||
"description": "additionalFields is reserved for future use",
|
||||
"type": "object",
|
||||
"x-kubernetes-preserve-unknown-fields": true
|
||||
},
|
||||
"compatibilityScore": {
|
||||
"description": "Overall compatibility score across all datasources (0-100).\nCalculated as: (total found metrics / total referenced metrics) * 100\n\nScore interpretation:\n- 100: Perfect compatibility, all queries will work\n- 80-99: Excellent, minor missing metrics\n- 50-79: Fair, significant missing metrics\n- 0-49: Poor, most queries will fail",
|
||||
"type": "number"
|
||||
},
|
||||
"datasourceResults": {
|
||||
"description": "Per-datasource validation results.\nArray length matches spec.datasourceMappings.\nEach element contains detailed metrics and query-level breakdown.",
|
||||
"items": {
|
||||
"description": "DataSourceResult contains validation results for a single datasource.\nProvides aggregate statistics and per-query breakdown of compatibility.",
|
||||
"properties": {
|
||||
"checkedQueries": {
|
||||
"description": "Number of queries successfully validated.\nMay be less than totalQueries if some queries couldn't be parsed.",
|
||||
"type": "integer"
|
||||
},
|
||||
"compatibilityScore": {
|
||||
"description": "Overall compatibility score for this datasource (0-100).\nCalculated as: (foundMetrics / totalMetrics) * 100\nUsed to calculate the global compatibilityScore in status.",
|
||||
"type": "number"
|
||||
},
|
||||
"foundMetrics": {
|
||||
"description": "Number of metrics that exist in the datasource schema.\nfoundMetrics \u003c= totalMetrics",
|
||||
"type": "integer"
|
||||
},
|
||||
"missingMetrics": {
|
||||
"description": "Array of metric names that were referenced but don't exist.\nUseful for debugging why a dashboard shows \"no data\".\nExample for Prometheus: [\"http_requests_total\", \"api_latency_seconds\"]",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"name": {
|
||||
"description": "Optional display name (matches DataSourceMapping.name if provided)",
|
||||
"type": "string"
|
||||
},
|
||||
"queryBreakdown": {
|
||||
"description": "Per-query breakdown showing which specific queries have issues.\nOne entry per query target (refId: \"A\", \"B\", \"C\", etc.) in each panel.\nAllows pinpointing exactly which panel/query needs fixing.",
|
||||
"items": {
|
||||
"description": "QueryBreakdown provides compatibility details for a single query within a panel.\nGranular per-query results allow users to identify exactly which queries need fixing.\n\nNote: A panel can have multiple queries (refId: \"A\", \"B\", \"C\", etc.),\nso there may be multiple QueryBreakdown entries for the same panelID.",
|
||||
"properties": {
|
||||
"compatibilityScore": {
|
||||
"description": "Compatibility percentage for this individual query (0-100).\nCalculated as: (foundMetrics / totalMetrics) * 100\n100 = query will work perfectly, 0 = query will return no data.",
|
||||
"type": "number"
|
||||
},
|
||||
"foundMetrics": {
|
||||
"description": "Number of those metrics that exist in the datasource.\nfoundMetrics \u003c= totalMetrics",
|
||||
"type": "integer"
|
||||
},
|
||||
"missingMetrics": {
|
||||
"description": "Array of missing metric names specific to this query.\nHelps identify exactly which part of a query expression will fail.\nEmpty array means query is fully compatible.",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"panelID": {
|
||||
"description": "Numeric panel ID from dashboard JSON.\nUsed to correlate with dashboard structure.",
|
||||
"type": "integer"
|
||||
},
|
||||
"panelTitle": {
|
||||
"description": "Human-readable panel title for context.\nExample: \"CPU Usage\", \"Request Rate\"",
|
||||
"type": "string"
|
||||
},
|
||||
"queryRefId": {
|
||||
"description": "Query identifier within the panel.\nValues: \"A\", \"B\", \"C\", etc. (from panel.targets[].refId)\nUniquely identifies which query in a multi-query panel this refers to.",
|
||||
"type": "string"
|
||||
},
|
||||
"totalMetrics": {
|
||||
"description": "Number of unique metrics referenced in this specific query.\nFor Prometheus: metrics extracted from the PromQL expr.\nExample: rate(http_requests_total[5m]) references 1 metric.",
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"panelTitle",
|
||||
"panelID",
|
||||
"queryRefId",
|
||||
"totalMetrics",
|
||||
"foundMetrics",
|
||||
"missingMetrics",
|
||||
"compatibilityScore"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"totalMetrics": {
|
||||
"description": "Total number of unique metrics/identifiers referenced across all queries.\nFor Prometheus: metric names extracted from PromQL expressions.\nFor SQL datasources: table and column names.",
|
||||
"type": "integer"
|
||||
},
|
||||
"totalQueries": {
|
||||
"description": "Total number of queries in the dashboard targeting this datasource.\nIncludes all panel targets/queries that reference this datasource.",
|
||||
"type": "integer"
|
||||
},
|
||||
"type": {
|
||||
"description": "Datasource type (matches DataSourceMapping.type)",
|
||||
"type": "string"
|
||||
},
|
||||
"uid": {
|
||||
"description": "Datasource UID that was validated (matches DataSourceMapping.uid)",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"uid",
|
||||
"type",
|
||||
"totalQueries",
|
||||
"checkedQueries",
|
||||
"totalMetrics",
|
||||
"foundMetrics",
|
||||
"missingMetrics",
|
||||
"queryBreakdown",
|
||||
"compatibilityScore"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"lastChecked": {
|
||||
"description": "ISO 8601 timestamp of when validation was last performed.\nExample: \"2024-01-15T10:30:00Z\"",
|
||||
"type": "string"
|
||||
},
|
||||
"message": {
|
||||
"description": "Human-readable summary of validation result.\nExamples: \"All queries compatible\", \"3 missing metrics found\"",
|
||||
"type": "string"
|
||||
},
|
||||
"operatorStates": {
|
||||
"additionalProperties": {
|
||||
"properties": {
|
||||
"descriptiveState": {
|
||||
"description": "descriptiveState is an optional more descriptive state field which has no requirements on format",
|
||||
"type": "string"
|
||||
},
|
||||
"details": {
|
||||
"description": "details contains any extra information that is operator-specific",
|
||||
"type": "object",
|
||||
"x-kubernetes-preserve-unknown-fields": true
|
||||
},
|
||||
"lastEvaluation": {
|
||||
"description": "lastEvaluation is the ResourceVersion last evaluated",
|
||||
"type": "string"
|
||||
},
|
||||
"state": {
|
||||
"description": "state describes the state of the lastEvaluation.\nIt is limited to three possible states for machine evaluation.",
|
||||
"enum": ["success", "in_progress", "failed"],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": ["lastEvaluation", "state"],
|
||||
"type": "object"
|
||||
},
|
||||
"description": "operatorStates is a map of operator ID to operator state evaluations.\nAny operator which consumes this kind SHOULD add its state evaluation information to this field.",
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
"required": ["compatibilityScore", "datasourceResults"],
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
"required": ["spec"],
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
"subresources": {
|
||||
"status": {}
|
||||
}
|
||||
}
|
||||
],
|
||||
"names": {
|
||||
"kind": "DashboardCompatibilityScore",
|
||||
"plural": "dashboardcompatibilityscores"
|
||||
},
|
||||
"scope": "Namespaced"
|
||||
}
|
||||
}
|
||||
223
apps/dashvalidator/definitions/dashvalidator-manifest.json
Normal file
223
apps/dashvalidator/definitions/dashvalidator-manifest.json
Normal file
@@ -0,0 +1,223 @@
|
||||
{
|
||||
"apiVersion": "apps.grafana.com/v1alpha1",
|
||||
"kind": "AppManifest",
|
||||
"metadata": {
|
||||
"name": "dashvalidator"
|
||||
},
|
||||
"spec": {
|
||||
"appName": "dashvalidator",
|
||||
"group": "dashvalidator.ext.grafana.com",
|
||||
"versions": [
|
||||
{
|
||||
"name": "v1alpha1",
|
||||
"served": true,
|
||||
"kinds": [
|
||||
{
|
||||
"kind": "DashboardCompatibilityScore",
|
||||
"plural": "DashboardCompatibilityScores",
|
||||
"scope": "Namespaced",
|
||||
"schema": {
|
||||
"spec": {
|
||||
"properties": {
|
||||
"dashboardJson": {
|
||||
"description": "Complete dashboard JSON object to validate.\nMust be a v1 dashboard schema (contains \"panels\" array).\nv2 dashboards (with \"elements\" structure) are not yet supported.",
|
||||
"type": "object",
|
||||
"x-kubernetes-preserve-unknown-fields": true
|
||||
},
|
||||
"datasourceMappings": {
|
||||
"description": "Array of datasources to validate against.\nThe validator will check dashboard queries against each datasource\nand provide per-datasource compatibility results.\n\nMVP: Only single datasource supported (array length = 1), Prometheus type only.\nFuture: Will support multiple datasources for dashboards with mixed queries.",
|
||||
"items": {
|
||||
"description": "DataSourceMapping specifies a datasource to validate dashboard queries against.\nMaps logical datasource references in the dashboard to actual datasource instances.",
|
||||
"properties": {
|
||||
"name": {
|
||||
"description": "Optional human-readable name for display in results.\nIf not provided, UID will be used in error messages.\nExample: \"Production Prometheus (US-West)\"",
|
||||
"type": "string"
|
||||
},
|
||||
"type": {
|
||||
"description": "Type of datasource plugin.\nMVP: Only \"prometheus\" supported.\nFuture: \"mysql\", \"postgres\", \"elasticsearch\", etc.",
|
||||
"type": "string"
|
||||
},
|
||||
"uid": {
|
||||
"description": "Unique identifier of the datasource instance.\nExample: \"prometheus-prod-us-west\"",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": ["uid", "type"],
|
||||
"type": "object"
|
||||
},
|
||||
"type": "array"
|
||||
}
|
||||
},
|
||||
"required": ["dashboardJson", "datasourceMappings"],
|
||||
"type": "object"
|
||||
},
|
||||
"status": {
|
||||
"properties": {
|
||||
"additionalFields": {
|
||||
"description": "additionalFields is reserved for future use",
|
||||
"type": "object",
|
||||
"x-kubernetes-preserve-unknown-fields": true
|
||||
},
|
||||
"compatibilityScore": {
|
||||
"description": "Overall compatibility score across all datasources (0-100).\nCalculated as: (total found metrics / total referenced metrics) * 100\n\nScore interpretation:\n- 100: Perfect compatibility, all queries will work\n- 80-99: Excellent, minor missing metrics\n- 50-79: Fair, significant missing metrics\n- 0-49: Poor, most queries will fail",
|
||||
"type": "number"
|
||||
},
|
||||
"datasourceResults": {
|
||||
"description": "Per-datasource validation results.\nArray length matches spec.datasourceMappings.\nEach element contains detailed metrics and query-level breakdown.",
|
||||
"items": {
|
||||
"description": "DataSourceResult contains validation results for a single datasource.\nProvides aggregate statistics and per-query breakdown of compatibility.",
|
||||
"properties": {
|
||||
"checkedQueries": {
|
||||
"description": "Number of queries successfully validated.\nMay be less than totalQueries if some queries couldn't be parsed.",
|
||||
"type": "integer"
|
||||
},
|
||||
"compatibilityScore": {
|
||||
"description": "Overall compatibility score for this datasource (0-100).\nCalculated as: (foundMetrics / totalMetrics) * 100\nUsed to calculate the global compatibilityScore in status.",
|
||||
"type": "number"
|
||||
},
|
||||
"foundMetrics": {
|
||||
"description": "Number of metrics that exist in the datasource schema.\nfoundMetrics \u003c= totalMetrics",
|
||||
"type": "integer"
|
||||
},
|
||||
"missingMetrics": {
|
||||
"description": "Array of metric names that were referenced but don't exist.\nUseful for debugging why a dashboard shows \"no data\".\nExample for Prometheus: [\"http_requests_total\", \"api_latency_seconds\"]",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"name": {
|
||||
"description": "Optional display name (matches DataSourceMapping.name if provided)",
|
||||
"type": "string"
|
||||
},
|
||||
"queryBreakdown": {
|
||||
"description": "Per-query breakdown showing which specific queries have issues.\nOne entry per query target (refId: \"A\", \"B\", \"C\", etc.) in each panel.\nAllows pinpointing exactly which panel/query needs fixing.",
|
||||
"items": {
|
||||
"description": "QueryBreakdown provides compatibility details for a single query within a panel.\nGranular per-query results allow users to identify exactly which queries need fixing.\n\nNote: A panel can have multiple queries (refId: \"A\", \"B\", \"C\", etc.),\nso there may be multiple QueryBreakdown entries for the same panelID.",
|
||||
"properties": {
|
||||
"compatibilityScore": {
|
||||
"description": "Compatibility percentage for this individual query (0-100).\nCalculated as: (foundMetrics / totalMetrics) * 100\n100 = query will work perfectly, 0 = query will return no data.",
|
||||
"type": "number"
|
||||
},
|
||||
"foundMetrics": {
|
||||
"description": "Number of those metrics that exist in the datasource.\nfoundMetrics \u003c= totalMetrics",
|
||||
"type": "integer"
|
||||
},
|
||||
"missingMetrics": {
|
||||
"description": "Array of missing metric names specific to this query.\nHelps identify exactly which part of a query expression will fail.\nEmpty array means query is fully compatible.",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"panelID": {
|
||||
"description": "Numeric panel ID from dashboard JSON.\nUsed to correlate with dashboard structure.",
|
||||
"type": "integer"
|
||||
},
|
||||
"panelTitle": {
|
||||
"description": "Human-readable panel title for context.\nExample: \"CPU Usage\", \"Request Rate\"",
|
||||
"type": "string"
|
||||
},
|
||||
"queryRefId": {
|
||||
"description": "Query identifier within the panel.\nValues: \"A\", \"B\", \"C\", etc. (from panel.targets[].refId)\nUniquely identifies which query in a multi-query panel this refers to.",
|
||||
"type": "string"
|
||||
},
|
||||
"totalMetrics": {
|
||||
"description": "Number of unique metrics referenced in this specific query.\nFor Prometheus: metrics extracted from the PromQL expr.\nExample: rate(http_requests_total[5m]) references 1 metric.",
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"panelTitle",
|
||||
"panelID",
|
||||
"queryRefId",
|
||||
"totalMetrics",
|
||||
"foundMetrics",
|
||||
"missingMetrics",
|
||||
"compatibilityScore"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"totalMetrics": {
|
||||
"description": "Total number of unique metrics/identifiers referenced across all queries.\nFor Prometheus: metric names extracted from PromQL expressions.\nFor SQL datasources: table and column names.",
|
||||
"type": "integer"
|
||||
},
|
||||
"totalQueries": {
|
||||
"description": "Total number of queries in the dashboard targeting this datasource.\nIncludes all panel targets/queries that reference this datasource.",
|
||||
"type": "integer"
|
||||
},
|
||||
"type": {
|
||||
"description": "Datasource type (matches DataSourceMapping.type)",
|
||||
"type": "string"
|
||||
},
|
||||
"uid": {
|
||||
"description": "Datasource UID that was validated (matches DataSourceMapping.uid)",
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"uid",
|
||||
"type",
|
||||
"totalQueries",
|
||||
"checkedQueries",
|
||||
"totalMetrics",
|
||||
"foundMetrics",
|
||||
"missingMetrics",
|
||||
"queryBreakdown",
|
||||
"compatibilityScore"
|
||||
],
|
||||
"type": "object"
|
||||
},
|
||||
"type": "array"
|
||||
},
|
||||
"lastChecked": {
|
||||
"description": "ISO 8601 timestamp of when validation was last performed.\nExample: \"2024-01-15T10:30:00Z\"",
|
||||
"type": "string"
|
||||
},
|
||||
"message": {
|
||||
"description": "Human-readable summary of validation result.\nExamples: \"All queries compatible\", \"3 missing metrics found\"",
|
||||
"type": "string"
|
||||
},
|
||||
"operatorStates": {
|
||||
"additionalProperties": {
|
||||
"properties": {
|
||||
"descriptiveState": {
|
||||
"description": "descriptiveState is an optional more descriptive state field which has no requirements on format",
|
||||
"type": "string"
|
||||
},
|
||||
"details": {
|
||||
"description": "details contains any extra information that is operator-specific",
|
||||
"type": "object",
|
||||
"x-kubernetes-preserve-unknown-fields": true
|
||||
},
|
||||
"lastEvaluation": {
|
||||
"description": "lastEvaluation is the ResourceVersion last evaluated",
|
||||
"type": "string"
|
||||
},
|
||||
"state": {
|
||||
"description": "state describes the state of the lastEvaluation.\nIt is limited to three possible states for machine evaluation.",
|
||||
"enum": ["success", "in_progress", "failed"],
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": ["lastEvaluation", "state"],
|
||||
"type": "object"
|
||||
},
|
||||
"description": "operatorStates is a map of operator ID to operator state evaluations.\nAny operator which consumes this kind SHOULD add its state evaluation information to this field.",
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
"required": ["compatibilityScore", "datasourceResults"],
|
||||
"type": "object"
|
||||
}
|
||||
},
|
||||
"conversion": false
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"preferredVersion": "v1alpha1"
|
||||
}
|
||||
}
|
||||
247
apps/dashvalidator/go.mod
Normal file
247
apps/dashvalidator/go.mod
Normal file
@@ -0,0 +1,247 @@
|
||||
module github.com/grafana/grafana/apps/dashvalidator
|
||||
|
||||
go 1.25.5
|
||||
|
||||
require (
|
||||
github.com/grafana/grafana v0.0.0-00010101000000-000000000000
|
||||
github.com/grafana/grafana-app-sdk v0.48.7
|
||||
github.com/grafana/grafana-app-sdk/logging v0.48.7
|
||||
github.com/prometheus/prometheus v0.303.1
|
||||
k8s.io/apimachinery v0.34.3
|
||||
k8s.io/kube-openapi v0.0.0-20251125145642-4e65d59e963e
|
||||
)
|
||||
|
||||
require (
|
||||
filippo.io/edwards25519 v1.1.0 // indirect
|
||||
github.com/Machiel/slugify v1.0.1 // indirect
|
||||
github.com/ProtonMail/go-crypto v1.1.6 // indirect
|
||||
github.com/VividCortex/mysqlerr v0.0.0-20170204212430-6c6b55f8796f // indirect
|
||||
github.com/antlr4-go/antlr/v4 v4.13.1 // indirect
|
||||
github.com/apache/arrow-go/v18 v18.4.1 // indirect
|
||||
github.com/armon/go-metrics v0.4.1 // indirect
|
||||
github.com/aws/aws-sdk-go-v2 v1.39.1 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.18.14 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.8 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.8 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.1 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.8 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/sts v1.38.5 // indirect
|
||||
github.com/aws/smithy-go v1.23.1 // indirect
|
||||
github.com/barkimedes/go-deepcopy v0.0.0-20220514131651-17c30cfc62df // indirect
|
||||
github.com/beorn7/perks v1.0.1 // indirect
|
||||
github.com/blang/semver v3.5.1+incompatible // indirect
|
||||
github.com/blang/semver/v4 v4.0.0 // indirect
|
||||
github.com/bluele/gcache v0.0.2 // indirect
|
||||
github.com/bradfitz/gomemcache v0.0.0-20250403215159-8d39553ac7cf // indirect
|
||||
github.com/bwmarrin/snowflake v0.3.0 // indirect
|
||||
github.com/cenkalti/backoff/v5 v5.0.3 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.3.0 // indirect
|
||||
github.com/cheekybits/genny v1.0.0 // indirect
|
||||
github.com/cloudflare/circl v1.6.1 // indirect
|
||||
github.com/coreos/go-systemd/v22 v22.6.0 // indirect
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
|
||||
github.com/dennwc/varint v1.0.0 // indirect
|
||||
github.com/diegoholiveira/jsonlogic/v3 v3.7.4 // indirect
|
||||
github.com/dolthub/go-icu-regex v0.0.0-20250916051405-78a38d478790 // indirect
|
||||
github.com/dustin/go-humanize v1.0.1 // indirect
|
||||
github.com/emicklei/go-restful/v3 v3.13.0 // indirect
|
||||
github.com/fatih/color v1.18.0 // indirect
|
||||
github.com/fxamacker/cbor/v2 v2.9.0 // indirect
|
||||
github.com/getkin/kin-openapi v0.133.0 // indirect
|
||||
github.com/go-jose/go-jose/v4 v4.1.3 // indirect
|
||||
github.com/go-kit/log v0.2.1 // indirect
|
||||
github.com/go-logfmt/logfmt v0.6.1 // indirect
|
||||
github.com/go-logr/logr v1.4.3 // indirect
|
||||
github.com/go-logr/stdr v1.2.2 // indirect
|
||||
github.com/go-openapi/jsonpointer v0.22.4 // indirect
|
||||
github.com/go-openapi/jsonreference v0.21.4 // indirect
|
||||
github.com/go-openapi/swag v0.25.4 // indirect
|
||||
github.com/go-openapi/swag/cmdutils v0.25.4 // indirect
|
||||
github.com/go-openapi/swag/conv v0.25.4 // indirect
|
||||
github.com/go-openapi/swag/fileutils v0.25.4 // indirect
|
||||
github.com/go-openapi/swag/jsonname v0.25.4 // indirect
|
||||
github.com/go-openapi/swag/jsonutils v0.25.4 // indirect
|
||||
github.com/go-openapi/swag/loading v0.25.4 // indirect
|
||||
github.com/go-openapi/swag/mangling v0.25.4 // indirect
|
||||
github.com/go-openapi/swag/netutils v0.25.4 // indirect
|
||||
github.com/go-openapi/swag/stringutils v0.25.4 // indirect
|
||||
github.com/go-openapi/swag/typeutils v0.25.4 // indirect
|
||||
github.com/go-openapi/swag/yamlutils v0.25.4 // indirect
|
||||
github.com/go-sql-driver/mysql v1.9.3 // indirect
|
||||
github.com/go-stack/stack v1.8.1 // indirect
|
||||
github.com/gobwas/glob v0.2.3 // indirect
|
||||
github.com/goccy/go-json v0.10.5 // indirect
|
||||
github.com/gogo/googleapis v1.4.1 // indirect
|
||||
github.com/gogo/protobuf v1.3.2 // indirect
|
||||
github.com/golang-jwt/jwt/v5 v5.3.0 // indirect
|
||||
github.com/golang-migrate/migrate/v4 v4.7.0 // indirect
|
||||
github.com/golang/protobuf v1.5.4 // indirect
|
||||
github.com/google/btree v1.1.3 // indirect
|
||||
github.com/google/flatbuffers v25.2.10+incompatible // indirect
|
||||
github.com/google/gnostic-models v0.7.1 // indirect
|
||||
github.com/google/go-cmp v0.7.0 // indirect
|
||||
github.com/google/uuid v1.6.0 // indirect
|
||||
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f // indirect
|
||||
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f // indirect
|
||||
github.com/grafana/authlib/types v0.0.0-20251119142549-be091cf2f4d4 // indirect
|
||||
github.com/grafana/dataplane/sdata v0.0.9 // indirect
|
||||
github.com/grafana/dskit v0.0.0-20250908063411-6b6da59b5cc4 // indirect
|
||||
github.com/grafana/grafana-aws-sdk v1.3.0 // indirect
|
||||
github.com/grafana/grafana-azure-sdk-go/v2 v2.3.1 // indirect
|
||||
github.com/grafana/grafana-plugin-sdk-go v0.284.0 // indirect
|
||||
github.com/grafana/grafana/pkg/apimachinery v0.0.0 // indirect
|
||||
github.com/grafana/grafana/pkg/apiserver v0.0.0 // indirect
|
||||
github.com/grafana/grafana/pkg/semconv v0.0.0-20250804150913-990f1c69ecc2 // indirect
|
||||
github.com/grafana/otel-profiling-go v0.5.1 // indirect
|
||||
github.com/grafana/pyroscope-go/godeltaprof v0.1.9 // indirect
|
||||
github.com/grafana/regexp v0.0.0-20240518133315-a468a5bfb3bc // indirect
|
||||
github.com/grafana/sqlds/v4 v4.2.7 // indirect
|
||||
github.com/grpc-ecosystem/go-grpc-middleware/providers/prometheus v1.1.0 // indirect
|
||||
github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.3.3 // indirect
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.3 // indirect
|
||||
github.com/hashicorp/errwrap v1.1.0 // indirect
|
||||
github.com/hashicorp/go-hclog v1.6.3 // indirect
|
||||
github.com/hashicorp/go-immutable-radix v1.3.1 // indirect
|
||||
github.com/hashicorp/go-metrics v0.5.4 // indirect
|
||||
github.com/hashicorp/go-msgpack/v2 v2.1.2 // indirect
|
||||
github.com/hashicorp/go-multierror v1.1.1 // indirect
|
||||
github.com/hashicorp/go-plugin v1.7.0 // indirect
|
||||
github.com/hashicorp/go-sockaddr v1.0.7 // indirect
|
||||
github.com/hashicorp/golang-lru v1.0.2 // indirect
|
||||
github.com/hashicorp/golang-lru/v2 v2.0.7 // indirect
|
||||
github.com/hashicorp/memberlist v0.5.2 // indirect
|
||||
github.com/hashicorp/yamux v0.1.2 // indirect
|
||||
github.com/jaegertracing/jaeger-idl v0.5.0 // indirect
|
||||
github.com/jmespath-community/go-jmespath v1.1.1 // indirect
|
||||
github.com/jmoiron/sqlx v1.4.0 // indirect
|
||||
github.com/josharian/intern v1.0.0 // indirect
|
||||
github.com/jpillora/backoff v1.0.0 // indirect
|
||||
github.com/json-iterator/go v1.1.12 // indirect
|
||||
github.com/jszwedko/go-datemath v0.1.1-0.20230526204004-640a500621d6 // indirect
|
||||
github.com/klauspost/compress v1.18.0 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.3.0 // indirect
|
||||
github.com/lib/pq v1.10.9 // indirect
|
||||
github.com/mailru/easyjson v0.9.1 // indirect
|
||||
github.com/mattetti/filebuffer v1.0.1 // indirect
|
||||
github.com/mattn/go-colorable v0.1.14 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
github.com/mattn/go-runewidth v0.0.16 // indirect
|
||||
github.com/mattn/go-sqlite3 v1.14.32 // indirect
|
||||
github.com/mdlayher/socket v0.4.1 // indirect
|
||||
github.com/mdlayher/vsock v1.2.1 // indirect
|
||||
github.com/miekg/dns v1.1.63 // indirect
|
||||
github.com/mitchellh/go-homedir v1.1.0 // indirect
|
||||
github.com/mithrandie/csvq v1.18.1 // indirect
|
||||
github.com/mithrandie/csvq-driver v1.7.0 // indirect
|
||||
github.com/mithrandie/go-file/v2 v2.1.0 // indirect
|
||||
github.com/mithrandie/go-text v1.6.0 // indirect
|
||||
github.com/mithrandie/ternary v1.1.1 // indirect
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||
github.com/modern-go/reflect2 v1.0.3-0.20250322232337-35a7c28c31ee // indirect
|
||||
github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 // indirect
|
||||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
|
||||
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f // indirect
|
||||
github.com/ncruces/go-strftime v0.1.9 // indirect
|
||||
github.com/nikunjy/rules v1.5.0 // indirect
|
||||
github.com/oasdiff/yaml v0.0.0-20250309154309-f31be36b4037 // indirect
|
||||
github.com/oasdiff/yaml3 v0.0.0-20250309153720-d2182401db90 // indirect
|
||||
github.com/oklog/run v1.1.0 // indirect
|
||||
github.com/oklog/ulid v1.3.1 // indirect
|
||||
github.com/olekukonko/tablewriter v0.0.5 // indirect
|
||||
github.com/open-feature/go-sdk v1.16.0 // indirect
|
||||
github.com/open-feature/go-sdk-contrib/providers/go-feature-flag v0.2.6 // indirect
|
||||
github.com/open-feature/go-sdk-contrib/providers/ofrep v0.1.6 // indirect
|
||||
github.com/patrickmn/go-cache v2.1.0+incompatible // indirect
|
||||
github.com/perimeterx/marshmallow v1.1.5 // indirect
|
||||
github.com/pierrec/lz4/v4 v4.1.22 // indirect
|
||||
github.com/pkg/errors v0.9.1 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
|
||||
github.com/prometheus/alertmanager v0.28.2 // indirect
|
||||
github.com/prometheus/client_golang v1.23.2 // indirect
|
||||
github.com/prometheus/client_model v0.6.2 // indirect
|
||||
github.com/prometheus/common v0.67.4 // indirect
|
||||
github.com/prometheus/exporter-toolkit v0.14.0 // indirect
|
||||
github.com/prometheus/procfs v0.19.2 // indirect
|
||||
github.com/puzpuzpuz/xsync/v2 v2.5.1 // indirect
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
|
||||
github.com/rivo/uniseg v0.4.7 // indirect
|
||||
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 // indirect
|
||||
github.com/spf13/pflag v1.0.10 // indirect
|
||||
github.com/stretchr/objx v0.5.2 // indirect
|
||||
github.com/stretchr/testify v1.11.1 // indirect
|
||||
github.com/thomaspoignant/go-feature-flag v1.42.0 // indirect
|
||||
github.com/tjhop/slog-gokit v0.1.5 // indirect
|
||||
github.com/woodsbury/decimal128 v1.4.0 // indirect
|
||||
github.com/x448/float16 v0.8.4 // indirect
|
||||
github.com/zeebo/xxh3 v1.0.2 // indirect
|
||||
go.opentelemetry.io/auto/sdk v1.2.1 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.64.0 // indirect
|
||||
go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.63.0 // indirect
|
||||
go.opentelemetry.io/contrib/propagators/jaeger v1.38.0 // indirect
|
||||
go.opentelemetry.io/contrib/samplers/jaegerremote v0.32.0 // indirect
|
||||
go.opentelemetry.io/otel v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/jaeger v1.17.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/metric v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/sdk v1.39.0 // indirect
|
||||
go.opentelemetry.io/otel/trace v1.39.0 // indirect
|
||||
go.opentelemetry.io/proto/otlp v1.9.0 // indirect
|
||||
go.uber.org/atomic v1.11.0 // indirect
|
||||
go.uber.org/mock v0.6.0 // indirect
|
||||
go.yaml.in/yaml/v2 v2.4.3 // indirect
|
||||
go.yaml.in/yaml/v3 v3.0.4 // indirect
|
||||
golang.org/x/crypto v0.46.0 // indirect
|
||||
golang.org/x/exp v0.0.0-20251209150349-8475f28825e9 // indirect
|
||||
golang.org/x/mod v0.31.0 // indirect
|
||||
golang.org/x/net v0.48.0 // indirect
|
||||
golang.org/x/oauth2 v0.34.0 // indirect
|
||||
golang.org/x/sync v0.19.0 // indirect
|
||||
golang.org/x/sys v0.39.0 // indirect
|
||||
golang.org/x/telemetry v0.0.0-20251203150158-8fff8a5912fc // indirect
|
||||
golang.org/x/term v0.38.0 // indirect
|
||||
golang.org/x/text v0.32.0 // indirect
|
||||
golang.org/x/time v0.14.0 // indirect
|
||||
golang.org/x/tools v0.40.0 // indirect
|
||||
golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da // indirect
|
||||
gomodules.xyz/jsonpatch/v2 v2.5.0 // indirect
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20251213004720-97cd9d5aeac2 // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20251213004720-97cd9d5aeac2 // indirect
|
||||
google.golang.org/grpc v1.77.0 // indirect
|
||||
google.golang.org/protobuf v1.36.11 // indirect
|
||||
gopkg.in/inf.v0 v0.9.1 // indirect
|
||||
gopkg.in/ini.v1 v1.67.0 // indirect
|
||||
gopkg.in/yaml.v2 v2.4.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
k8s.io/api v0.34.3 // indirect
|
||||
k8s.io/apiextensions-apiserver v0.34.3 // indirect
|
||||
k8s.io/apiserver v0.34.3 // indirect
|
||||
k8s.io/client-go v0.34.3 // indirect
|
||||
k8s.io/component-base v0.34.3 // indirect
|
||||
k8s.io/klog/v2 v2.130.1 // indirect
|
||||
k8s.io/utils v0.0.0-20251002143259-bc988d571ff4 // indirect
|
||||
modernc.org/libc v1.66.10 // indirect
|
||||
modernc.org/mathutil v1.7.1 // indirect
|
||||
modernc.org/memory v1.11.0 // indirect
|
||||
modernc.org/sqlite v1.40.1 // indirect
|
||||
sigs.k8s.io/json v0.0.0-20250730193827-2d320260d730 // indirect
|
||||
sigs.k8s.io/randfill v1.0.0 // indirect
|
||||
sigs.k8s.io/structured-merge-diff/v6 v6.3.1 // indirect
|
||||
sigs.k8s.io/yaml v1.6.0 // indirect
|
||||
xorm.io/builder v0.3.13 // indirect
|
||||
)
|
||||
|
||||
// transitive dependencies that need replaced
|
||||
// TODO: stop depending on grafana core
|
||||
replace github.com/grafana/grafana => ../..
|
||||
|
||||
replace github.com/grafana/grafana/pkg/apimachinery => ../../pkg/apimachinery
|
||||
|
||||
replace github.com/grafana/grafana/pkg/apiserver => ../../pkg/apiserver
|
||||
|
||||
replace github.com/grafana/grafana/apps/dashboard => ../dashboard
|
||||
|
||||
replace github.com/grafana/grafana/apps/provisioning => ../provisioning
|
||||
|
||||
replace github.com/prometheus/alertmanager => github.com/grafana/prometheus-alertmanager v0.25.1-0.20250911094103-5456b6e45604
|
||||
1019
apps/dashvalidator/go.sum
Normal file
1019
apps/dashvalidator/go.sum
Normal file
File diff suppressed because it is too large
Load Diff
2
apps/dashvalidator/kinds/cue.mod/module.cue
Normal file
2
apps/dashvalidator/kinds/cue.mod/module.cue
Normal file
@@ -0,0 +1,2 @@
|
||||
module: "github.com/grafana/grafana/apps/dashvalidator/kinds"
|
||||
language: version: "v0.8.2"
|
||||
157
apps/dashvalidator/kinds/dashboardcompatibilityscore.cue
Normal file
157
apps/dashvalidator/kinds/dashboardcompatibilityscore.cue
Normal file
@@ -0,0 +1,157 @@
|
||||
package kinds
|
||||
|
||||
// DashboardCompatibilityScore validates whether a dashboard's queries
|
||||
// are compatible with the target datasource schema.
|
||||
//
|
||||
// This resource checks if metrics, tables, or other identifiers referenced
|
||||
// in dashboard queries actually exist in the configured datasources,
|
||||
// helping users identify dashboards that will show "no data" before deployment.
|
||||
//
|
||||
// MVP: Prometheus datasource only; architecture supports future datasource types.
|
||||
dashboardcompatibilityscorev0alpha1: {
|
||||
kind: "DashboardCompatibilityScore"
|
||||
plural: "dashboardcompatibilityscores"
|
||||
scope: "Namespaced"
|
||||
schema: {
|
||||
spec: {
|
||||
// Complete dashboard JSON object to validate.
|
||||
// Must be a v1 dashboard schema (contains "panels" array).
|
||||
// v2 dashboards (with "elements" structure) are not yet supported.
|
||||
dashboardJson: {...}
|
||||
|
||||
// Array of datasources to validate against.
|
||||
// The validator will check dashboard queries against each datasource
|
||||
// and provide per-datasource compatibility results.
|
||||
//
|
||||
// MVP: Only single datasource supported (array length = 1), Prometheus type only.
|
||||
// Future: Will support multiple datasources for dashboards with mixed queries.
|
||||
datasourceMappings: [...#DataSourceMapping]
|
||||
}
|
||||
status: {
|
||||
// Overall compatibility score across all datasources (0-100).
|
||||
// Calculated as: (total found metrics / total referenced metrics) * 100
|
||||
//
|
||||
// Score interpretation:
|
||||
// - 100: Perfect compatibility, all queries will work
|
||||
// - 80-99: Excellent, minor missing metrics
|
||||
// - 50-79: Fair, significant missing metrics
|
||||
// - 0-49: Poor, most queries will fail
|
||||
compatibilityScore: float64
|
||||
|
||||
// Per-datasource validation results.
|
||||
// Array length matches spec.datasourceMappings.
|
||||
// Each element contains detailed metrics and query-level breakdown.
|
||||
datasourceResults: [...#DataSourceResult]
|
||||
|
||||
// ISO 8601 timestamp of when validation was last performed.
|
||||
// Example: "2024-01-15T10:30:00Z"
|
||||
lastChecked?: string
|
||||
|
||||
// Human-readable summary of validation result.
|
||||
// Examples: "All queries compatible", "3 missing metrics found"
|
||||
message?: string
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// DataSourceMapping specifies a datasource to validate dashboard queries against.
|
||||
// Maps logical datasource references in the dashboard to actual datasource instances.
|
||||
#DataSourceMapping: {
|
||||
// Unique identifier of the datasource instance.
|
||||
// Example: "prometheus-prod-us-west"
|
||||
uid: string
|
||||
|
||||
// Type of datasource plugin.
|
||||
// MVP: Only "prometheus" supported.
|
||||
// Future: "mysql", "postgres", "elasticsearch", etc.
|
||||
type: string
|
||||
|
||||
// Optional human-readable name for display in results.
|
||||
// If not provided, UID will be used in error messages.
|
||||
// Example: "Production Prometheus (US-West)"
|
||||
name?: string
|
||||
}
|
||||
|
||||
// DataSourceResult contains validation results for a single datasource.
|
||||
// Provides aggregate statistics and per-query breakdown of compatibility.
|
||||
#DataSourceResult: {
|
||||
// Datasource UID that was validated (matches DataSourceMapping.uid)
|
||||
uid: string
|
||||
|
||||
// Datasource type (matches DataSourceMapping.type)
|
||||
type: string
|
||||
|
||||
// Optional display name (matches DataSourceMapping.name if provided)
|
||||
name?: string
|
||||
|
||||
// Total number of queries in the dashboard targeting this datasource.
|
||||
// Includes all panel targets/queries that reference this datasource.
|
||||
totalQueries: int
|
||||
|
||||
// Number of queries successfully validated.
|
||||
// May be less than totalQueries if some queries couldn't be parsed.
|
||||
checkedQueries: int
|
||||
|
||||
// Total number of unique metrics/identifiers referenced across all queries.
|
||||
// For Prometheus: metric names extracted from PromQL expressions.
|
||||
// For SQL datasources: table and column names.
|
||||
totalMetrics: int
|
||||
|
||||
// Number of metrics that exist in the datasource schema.
|
||||
// foundMetrics <= totalMetrics
|
||||
foundMetrics: int
|
||||
|
||||
// Array of metric names that were referenced but don't exist.
|
||||
// Useful for debugging why a dashboard shows "no data".
|
||||
// Example for Prometheus: ["http_requests_total", "api_latency_seconds"]
|
||||
missingMetrics: [...string]
|
||||
|
||||
// Per-query breakdown showing which specific queries have issues.
|
||||
// One entry per query target (refId: "A", "B", "C", etc.) in each panel.
|
||||
// Allows pinpointing exactly which panel/query needs fixing.
|
||||
queryBreakdown: [...#QueryBreakdown]
|
||||
|
||||
// Overall compatibility score for this datasource (0-100).
|
||||
// Calculated as: (foundMetrics / totalMetrics) * 100
|
||||
// Used to calculate the global compatibilityScore in status.
|
||||
compatibilityScore: float64
|
||||
}
|
||||
|
||||
// QueryBreakdown provides compatibility details for a single query within a panel.
|
||||
// Granular per-query results allow users to identify exactly which queries need fixing.
|
||||
//
|
||||
// Note: A panel can have multiple queries (refId: "A", "B", "C", etc.),
|
||||
// so there may be multiple QueryBreakdown entries for the same panelID.
|
||||
#QueryBreakdown: {
|
||||
// Human-readable panel title for context.
|
||||
// Example: "CPU Usage", "Request Rate"
|
||||
panelTitle: string
|
||||
|
||||
// Numeric panel ID from dashboard JSON.
|
||||
// Used to correlate with dashboard structure.
|
||||
panelID: int
|
||||
|
||||
// Query identifier within the panel.
|
||||
// Values: "A", "B", "C", etc. (from panel.targets[].refId)
|
||||
// Uniquely identifies which query in a multi-query panel this refers to.
|
||||
queryRefId: string
|
||||
|
||||
// Number of unique metrics referenced in this specific query.
|
||||
// For Prometheus: metrics extracted from the PromQL expr.
|
||||
// Example: rate(http_requests_total[5m]) references 1 metric.
|
||||
totalMetrics: int
|
||||
|
||||
// Number of those metrics that exist in the datasource.
|
||||
// foundMetrics <= totalMetrics
|
||||
foundMetrics: int
|
||||
|
||||
// Array of missing metric names specific to this query.
|
||||
// Helps identify exactly which part of a query expression will fail.
|
||||
// Empty array means query is fully compatible.
|
||||
missingMetrics: [...string]
|
||||
|
||||
// Compatibility percentage for this individual query (0-100).
|
||||
// Calculated as: (foundMetrics / totalMetrics) * 100
|
||||
// 100 = query will work perfectly, 0 = query will return no data.
|
||||
compatibilityScore: float64
|
||||
}
|
||||
110
apps/dashvalidator/kinds/manifest.cue
Normal file
110
apps/dashvalidator/kinds/manifest.cue
Normal file
@@ -0,0 +1,110 @@
|
||||
package kinds
|
||||
|
||||
manifest: {
|
||||
// appName is the unique name of your app. It is used to reference the app from other config objects,
|
||||
// and to generate the group used by your app in the app platform API.
|
||||
appName: "dashvalidator"
|
||||
// groupOverride can be used to specify a non-appName-based API group.
|
||||
// By default, an app's API group is LOWER(REPLACE(appName, '-', '')).ext.grafana.com,
|
||||
// but there are cases where this needs to be changed.
|
||||
// Keep in mind that changing this after an app is deployed can cause problems with clients and/or kind data.
|
||||
groupOverride: "dashvalidator.grafana.app"
|
||||
|
||||
// versions is a map of versions supported by your app. Version names should follow the format "v<integer>" or
|
||||
// "v<integer>(alpha|beta)<integer>". Each version contains the kinds your app manages for that version.
|
||||
// If your app needs access to kinds managed by another app, use permissions.accessKinds to allow your app access.
|
||||
versions: {
|
||||
"v1alpha1": v1alpha1
|
||||
}
|
||||
// extraPermissions contains any additional permissions your app may require to function.
|
||||
// Your app will always have all permissions for each kind it manages (the items defined in 'kinds').
|
||||
extraPermissions: {
|
||||
// If your app needs access to additional kinds supplied by other apps, you can list them here
|
||||
accessKinds: [
|
||||
// Here is an example for your app accessing the playlist kind for reads and watch
|
||||
// {
|
||||
// group: "playlist.grafana.app"
|
||||
// resource: "playlists"
|
||||
// actions: ["get","list","watch"]
|
||||
// }
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
// v1alpha1 is the v1alpha1 version of the app's API.
|
||||
// It includes kinds which the v1alpha1 API serves, and (future) custom routes served globally from the v1alpha1 version.
|
||||
v1alpha1: {
|
||||
// kinds is the list of kinds served by this version
|
||||
kinds: [dashboardcompatibilityscorev0alpha1]
|
||||
// [OPTIONAL]
|
||||
// served indicates whether this particular version is served by the API server.
|
||||
// served should be set to false before a version is removed from the manifest entirely.
|
||||
// served defaults to true if not present.
|
||||
served: true
|
||||
// [OPTIONAL]
|
||||
// Codegen is a trait that tells the grafana-app-sdk, or other code generation tooling, how to process this kind.
|
||||
// If not present, default values within the codegen trait are used.
|
||||
// If you wish to specify codegen per-version, put this section in the version's object
|
||||
// (for example, <no value>v1alpha1) instead.
|
||||
|
||||
routes: {
|
||||
namespaced: {
|
||||
"/check": {
|
||||
"POST": {
|
||||
request: {
|
||||
body: {
|
||||
dashboardJson: {...}
|
||||
datasourceMappings: [...{
|
||||
uid: string
|
||||
type: string
|
||||
name?: string
|
||||
}]
|
||||
}
|
||||
}
|
||||
response: {
|
||||
compatibilityScore: number
|
||||
datasourceResults: [...{
|
||||
uid: string
|
||||
type: string
|
||||
name?: string
|
||||
totalQueries: int
|
||||
checkedQueries: int
|
||||
totalMetrics: int
|
||||
foundMetrics: int
|
||||
missingMetrics: [...string]
|
||||
queryBreakdown: [...{
|
||||
panelTitle: string
|
||||
panelID: int
|
||||
queryRefId: string
|
||||
totalMetrics: int
|
||||
foundMetrics: int
|
||||
missingMetrics: [...string]
|
||||
compatibilityScore: number
|
||||
}]
|
||||
compatibilityScore: number
|
||||
}]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
cluser: {}
|
||||
}
|
||||
codegen: {
|
||||
// [OPTIONAL]
|
||||
// ts contains TypeScript code generation properties for the kind
|
||||
ts: {
|
||||
// [OPTIONAL]
|
||||
// enabled indicates whether the CLI should generate front-end TypeScript code for the kind.
|
||||
// Defaults to true if not present.
|
||||
enabled: true
|
||||
}
|
||||
// [OPTIONAL]
|
||||
// go contains go code generation properties for the kind
|
||||
go: {
|
||||
// [OPTIONAL]
|
||||
// enabled indicates whether the CLI should generate back-end go code for the kind.
|
||||
// Defaults to true if not present.
|
||||
enabled: true
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
package v1alpha1
|
||||
|
||||
import "k8s.io/apimachinery/pkg/runtime/schema"
|
||||
|
||||
const (
|
||||
// APIGroup is the API group used by all kinds in this package
|
||||
APIGroup = "dashvalidator.grafana.app"
|
||||
// APIVersion is the API version used by all kinds in this package
|
||||
APIVersion = "v1alpha1"
|
||||
)
|
||||
|
||||
var (
|
||||
// GroupVersion is a schema.GroupVersion consisting of the Group and Version constants for this package
|
||||
GroupVersion = schema.GroupVersion{
|
||||
Group: APIGroup,
|
||||
Version: APIVersion,
|
||||
}
|
||||
)
|
||||
27
apps/dashvalidator/pkg/apis/dashvalidator/v1alpha1/createcheck_request_body_types_gen.go
generated
Normal file
27
apps/dashvalidator/pkg/apis/dashvalidator/v1alpha1/createcheck_request_body_types_gen.go
generated
Normal file
@@ -0,0 +1,27 @@
|
||||
// Code generated - EDITING IS FUTILE. DO NOT EDIT.
|
||||
|
||||
package v1alpha1
|
||||
|
||||
type CreateCheckRequestBody struct {
|
||||
DashboardJson map[string]any `json:"dashboardJson"`
|
||||
DatasourceMappings []CreateCheckRequestV1alpha1BodyDatasourceMappings `json:"datasourceMappings"`
|
||||
}
|
||||
|
||||
// NewCreateCheckRequestBody creates a new CreateCheckRequestBody object.
|
||||
func NewCreateCheckRequestBody() *CreateCheckRequestBody {
|
||||
return &CreateCheckRequestBody{
|
||||
DashboardJson: map[string]any{},
|
||||
DatasourceMappings: []CreateCheckRequestV1alpha1BodyDatasourceMappings{},
|
||||
}
|
||||
}
|
||||
|
||||
type CreateCheckRequestV1alpha1BodyDatasourceMappings struct {
|
||||
Uid string `json:"uid"`
|
||||
Type string `json:"type"`
|
||||
Name *string `json:"name,omitempty"`
|
||||
}
|
||||
|
||||
// NewCreateCheckRequestV1alpha1BodyDatasourceMappings creates a new CreateCheckRequestV1alpha1BodyDatasourceMappings object.
|
||||
func NewCreateCheckRequestV1alpha1BodyDatasourceMappings() *CreateCheckRequestV1alpha1BodyDatasourceMappings {
|
||||
return &CreateCheckRequestV1alpha1BodyDatasourceMappings{}
|
||||
}
|
||||
56
apps/dashvalidator/pkg/apis/dashvalidator/v1alpha1/createcheck_response_body_types_gen.go
generated
Normal file
56
apps/dashvalidator/pkg/apis/dashvalidator/v1alpha1/createcheck_response_body_types_gen.go
generated
Normal file
@@ -0,0 +1,56 @@
|
||||
// Code generated - EDITING IS FUTILE. DO NOT EDIT.
|
||||
|
||||
package v1alpha1
|
||||
|
||||
// +k8s:openapi-gen=true
|
||||
type CreateCheckBody struct {
|
||||
CompatibilityScore float64 `json:"compatibilityScore"`
|
||||
DatasourceResults []V1alpha1CreateCheckBodyDatasourceResults `json:"datasourceResults"`
|
||||
}
|
||||
|
||||
// NewCreateCheckBody creates a new CreateCheckBody object.
|
||||
func NewCreateCheckBody() *CreateCheckBody {
|
||||
return &CreateCheckBody{
|
||||
DatasourceResults: []V1alpha1CreateCheckBodyDatasourceResults{},
|
||||
}
|
||||
}
|
||||
|
||||
// +k8s:openapi-gen=true
|
||||
type V1alpha1CreateCheckBodyDatasourceResultsQueryBreakdown struct {
|
||||
PanelTitle string `json:"panelTitle"`
|
||||
PanelID int64 `json:"panelID"`
|
||||
QueryRefId string `json:"queryRefId"`
|
||||
TotalMetrics int64 `json:"totalMetrics"`
|
||||
FoundMetrics int64 `json:"foundMetrics"`
|
||||
MissingMetrics []string `json:"missingMetrics"`
|
||||
CompatibilityScore float64 `json:"compatibilityScore"`
|
||||
}
|
||||
|
||||
// NewV1alpha1CreateCheckBodyDatasourceResultsQueryBreakdown creates a new V1alpha1CreateCheckBodyDatasourceResultsQueryBreakdown object.
|
||||
func NewV1alpha1CreateCheckBodyDatasourceResultsQueryBreakdown() *V1alpha1CreateCheckBodyDatasourceResultsQueryBreakdown {
|
||||
return &V1alpha1CreateCheckBodyDatasourceResultsQueryBreakdown{
|
||||
MissingMetrics: []string{},
|
||||
}
|
||||
}
|
||||
|
||||
// +k8s:openapi-gen=true
|
||||
type V1alpha1CreateCheckBodyDatasourceResults struct {
|
||||
Uid string `json:"uid"`
|
||||
Type string `json:"type"`
|
||||
Name *string `json:"name,omitempty"`
|
||||
TotalQueries int64 `json:"totalQueries"`
|
||||
CheckedQueries int64 `json:"checkedQueries"`
|
||||
TotalMetrics int64 `json:"totalMetrics"`
|
||||
FoundMetrics int64 `json:"foundMetrics"`
|
||||
MissingMetrics []string `json:"missingMetrics"`
|
||||
QueryBreakdown []V1alpha1CreateCheckBodyDatasourceResultsQueryBreakdown `json:"queryBreakdown"`
|
||||
CompatibilityScore float64 `json:"compatibilityScore"`
|
||||
}
|
||||
|
||||
// NewV1alpha1CreateCheckBodyDatasourceResults creates a new V1alpha1CreateCheckBodyDatasourceResults object.
|
||||
func NewV1alpha1CreateCheckBodyDatasourceResults() *V1alpha1CreateCheckBodyDatasourceResults {
|
||||
return &V1alpha1CreateCheckBodyDatasourceResults{
|
||||
MissingMetrics: []string{},
|
||||
QueryBreakdown: []V1alpha1CreateCheckBodyDatasourceResultsQueryBreakdown{},
|
||||
}
|
||||
}
|
||||
37
apps/dashvalidator/pkg/apis/dashvalidator/v1alpha1/createcheck_response_object_types_gen.go
generated
Normal file
37
apps/dashvalidator/pkg/apis/dashvalidator/v1alpha1/createcheck_response_object_types_gen.go
generated
Normal file
@@ -0,0 +1,37 @@
|
||||
// Code generated - EDITING IS FUTILE. DO NOT EDIT.
|
||||
|
||||
package v1alpha1
|
||||
|
||||
import (
|
||||
"github.com/grafana/grafana-app-sdk/resource"
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
"k8s.io/apimachinery/pkg/runtime"
|
||||
)
|
||||
|
||||
// +k8s:openapi-gen=true
|
||||
type CreateCheck struct {
|
||||
metav1.TypeMeta `json:",inline"`
|
||||
CreateCheckBody `json:",inline"`
|
||||
}
|
||||
|
||||
func NewCreateCheck() *CreateCheck {
|
||||
return &CreateCheck{}
|
||||
}
|
||||
|
||||
func (t *CreateCheckBody) DeepCopyInto(dst *CreateCheckBody) {
|
||||
_ = resource.CopyObjectInto(dst, t)
|
||||
}
|
||||
|
||||
func (o *CreateCheck) DeepCopyObject() runtime.Object {
|
||||
dst := NewCreateCheck()
|
||||
o.DeepCopyInto(dst)
|
||||
return dst
|
||||
}
|
||||
|
||||
func (o *CreateCheck) DeepCopyInto(dst *CreateCheck) {
|
||||
dst.TypeMeta.APIVersion = o.TypeMeta.APIVersion
|
||||
dst.TypeMeta.Kind = o.TypeMeta.Kind
|
||||
o.CreateCheckBody.DeepCopyInto(&dst.CreateCheckBody)
|
||||
}
|
||||
|
||||
var _ runtime.Object = NewCreateCheck()
|
||||
99
apps/dashvalidator/pkg/apis/dashvalidator/v1alpha1/dashboardcompatibilityscore_client_gen.go
generated
Normal file
99
apps/dashvalidator/pkg/apis/dashvalidator/v1alpha1/dashboardcompatibilityscore_client_gen.go
generated
Normal file
@@ -0,0 +1,99 @@
|
||||
package v1alpha1
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/grafana/grafana-app-sdk/resource"
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
)
|
||||
|
||||
type DashboardCompatibilityScoreClient struct {
|
||||
client *resource.TypedClient[*DashboardCompatibilityScore, *DashboardCompatibilityScoreList]
|
||||
}
|
||||
|
||||
func NewDashboardCompatibilityScoreClient(client resource.Client) *DashboardCompatibilityScoreClient {
|
||||
return &DashboardCompatibilityScoreClient{
|
||||
client: resource.NewTypedClient[*DashboardCompatibilityScore, *DashboardCompatibilityScoreList](client, DashboardCompatibilityScoreKind()),
|
||||
}
|
||||
}
|
||||
|
||||
func NewDashboardCompatibilityScoreClientFromGenerator(generator resource.ClientGenerator) (*DashboardCompatibilityScoreClient, error) {
|
||||
c, err := generator.ClientFor(DashboardCompatibilityScoreKind())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return NewDashboardCompatibilityScoreClient(c), nil
|
||||
}
|
||||
|
||||
func (c *DashboardCompatibilityScoreClient) Get(ctx context.Context, identifier resource.Identifier) (*DashboardCompatibilityScore, error) {
|
||||
return c.client.Get(ctx, identifier)
|
||||
}
|
||||
|
||||
func (c *DashboardCompatibilityScoreClient) List(ctx context.Context, namespace string, opts resource.ListOptions) (*DashboardCompatibilityScoreList, error) {
|
||||
return c.client.List(ctx, namespace, opts)
|
||||
}
|
||||
|
||||
func (c *DashboardCompatibilityScoreClient) ListAll(ctx context.Context, namespace string, opts resource.ListOptions) (*DashboardCompatibilityScoreList, error) {
|
||||
resp, err := c.client.List(ctx, namespace, resource.ListOptions{
|
||||
ResourceVersion: opts.ResourceVersion,
|
||||
Limit: opts.Limit,
|
||||
LabelFilters: opts.LabelFilters,
|
||||
FieldSelectors: opts.FieldSelectors,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for resp.GetContinue() != "" {
|
||||
page, err := c.client.List(ctx, namespace, resource.ListOptions{
|
||||
Continue: resp.GetContinue(),
|
||||
ResourceVersion: opts.ResourceVersion,
|
||||
Limit: opts.Limit,
|
||||
LabelFilters: opts.LabelFilters,
|
||||
FieldSelectors: opts.FieldSelectors,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
resp.SetContinue(page.GetContinue())
|
||||
resp.SetResourceVersion(page.GetResourceVersion())
|
||||
resp.SetItems(append(resp.GetItems(), page.GetItems()...))
|
||||
}
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
func (c *DashboardCompatibilityScoreClient) Create(ctx context.Context, obj *DashboardCompatibilityScore, opts resource.CreateOptions) (*DashboardCompatibilityScore, error) {
|
||||
// Make sure apiVersion and kind are set
|
||||
obj.APIVersion = GroupVersion.Identifier()
|
||||
obj.Kind = DashboardCompatibilityScoreKind().Kind()
|
||||
return c.client.Create(ctx, obj, opts)
|
||||
}
|
||||
|
||||
func (c *DashboardCompatibilityScoreClient) Update(ctx context.Context, obj *DashboardCompatibilityScore, opts resource.UpdateOptions) (*DashboardCompatibilityScore, error) {
|
||||
return c.client.Update(ctx, obj, opts)
|
||||
}
|
||||
|
||||
func (c *DashboardCompatibilityScoreClient) Patch(ctx context.Context, identifier resource.Identifier, req resource.PatchRequest, opts resource.PatchOptions) (*DashboardCompatibilityScore, error) {
|
||||
return c.client.Patch(ctx, identifier, req, opts)
|
||||
}
|
||||
|
||||
func (c *DashboardCompatibilityScoreClient) UpdateStatus(ctx context.Context, identifier resource.Identifier, newStatus DashboardCompatibilityScoreStatus, opts resource.UpdateOptions) (*DashboardCompatibilityScore, error) {
|
||||
return c.client.Update(ctx, &DashboardCompatibilityScore{
|
||||
TypeMeta: metav1.TypeMeta{
|
||||
Kind: DashboardCompatibilityScoreKind().Kind(),
|
||||
APIVersion: GroupVersion.Identifier(),
|
||||
},
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
ResourceVersion: opts.ResourceVersion,
|
||||
Namespace: identifier.Namespace,
|
||||
Name: identifier.Name,
|
||||
},
|
||||
Status: newStatus,
|
||||
}, resource.UpdateOptions{
|
||||
Subresource: "status",
|
||||
ResourceVersion: opts.ResourceVersion,
|
||||
})
|
||||
}
|
||||
|
||||
func (c *DashboardCompatibilityScoreClient) Delete(ctx context.Context, identifier resource.Identifier, opts resource.DeleteOptions) error {
|
||||
return c.client.Delete(ctx, identifier, opts)
|
||||
}
|
||||
28
apps/dashvalidator/pkg/apis/dashvalidator/v1alpha1/dashboardcompatibilityscore_codec_gen.go
generated
Normal file
28
apps/dashvalidator/pkg/apis/dashvalidator/v1alpha1/dashboardcompatibilityscore_codec_gen.go
generated
Normal file
@@ -0,0 +1,28 @@
|
||||
//
|
||||
// Code generated by grafana-app-sdk. DO NOT EDIT.
|
||||
//
|
||||
|
||||
package v1alpha1
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"io"
|
||||
|
||||
"github.com/grafana/grafana-app-sdk/resource"
|
||||
)
|
||||
|
||||
// DashboardCompatibilityScoreJSONCodec is an implementation of resource.Codec for kubernetes JSON encoding
|
||||
type DashboardCompatibilityScoreJSONCodec struct{}
|
||||
|
||||
// Read reads JSON-encoded bytes from `reader` and unmarshals them into `into`
|
||||
func (*DashboardCompatibilityScoreJSONCodec) Read(reader io.Reader, into resource.Object) error {
|
||||
return json.NewDecoder(reader).Decode(into)
|
||||
}
|
||||
|
||||
// Write writes JSON-encoded bytes into `writer` marshaled from `from`
|
||||
func (*DashboardCompatibilityScoreJSONCodec) Write(writer io.Writer, from resource.Object) error {
|
||||
return json.NewEncoder(writer).Encode(from)
|
||||
}
|
||||
|
||||
// Interface compliance checks
|
||||
var _ resource.Codec = &DashboardCompatibilityScoreJSONCodec{}
|
||||
31
apps/dashvalidator/pkg/apis/dashvalidator/v1alpha1/dashboardcompatibilityscore_metadata_gen.go
generated
Normal file
31
apps/dashvalidator/pkg/apis/dashvalidator/v1alpha1/dashboardcompatibilityscore_metadata_gen.go
generated
Normal file
@@ -0,0 +1,31 @@
|
||||
// Code generated - EDITING IS FUTILE. DO NOT EDIT.
|
||||
|
||||
package v1alpha1
|
||||
|
||||
import (
|
||||
time "time"
|
||||
)
|
||||
|
||||
// metadata contains embedded CommonMetadata and can be extended with custom string fields
|
||||
// TODO: use CommonMetadata instead of redefining here; currently needs to be defined here
|
||||
// without external reference as using the CommonMetadata reference breaks thema codegen.
|
||||
type DashboardCompatibilityScoreMetadata struct {
|
||||
UpdateTimestamp time.Time `json:"updateTimestamp"`
|
||||
CreatedBy string `json:"createdBy"`
|
||||
Uid string `json:"uid"`
|
||||
CreationTimestamp time.Time `json:"creationTimestamp"`
|
||||
DeletionTimestamp *time.Time `json:"deletionTimestamp,omitempty"`
|
||||
Finalizers []string `json:"finalizers"`
|
||||
ResourceVersion string `json:"resourceVersion"`
|
||||
Generation int64 `json:"generation"`
|
||||
UpdatedBy string `json:"updatedBy"`
|
||||
Labels map[string]string `json:"labels"`
|
||||
}
|
||||
|
||||
// NewDashboardCompatibilityScoreMetadata creates a new DashboardCompatibilityScoreMetadata object.
|
||||
func NewDashboardCompatibilityScoreMetadata() *DashboardCompatibilityScoreMetadata {
|
||||
return &DashboardCompatibilityScoreMetadata{
|
||||
Finalizers: []string{},
|
||||
Labels: map[string]string{},
|
||||
}
|
||||
}
|
||||
326
apps/dashvalidator/pkg/apis/dashvalidator/v1alpha1/dashboardcompatibilityscore_object_gen.go
generated
Normal file
326
apps/dashvalidator/pkg/apis/dashvalidator/v1alpha1/dashboardcompatibilityscore_object_gen.go
generated
Normal file
@@ -0,0 +1,326 @@
|
||||
//
|
||||
// Code generated by grafana-app-sdk. DO NOT EDIT.
|
||||
//
|
||||
|
||||
package v1alpha1
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/grafana/grafana-app-sdk/resource"
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
"k8s.io/apimachinery/pkg/runtime"
|
||||
"k8s.io/apimachinery/pkg/runtime/schema"
|
||||
"k8s.io/apimachinery/pkg/types"
|
||||
"time"
|
||||
)
|
||||
|
||||
// +k8s:openapi-gen=true
|
||||
type DashboardCompatibilityScore struct {
|
||||
metav1.TypeMeta `json:",inline" yaml:",inline"`
|
||||
metav1.ObjectMeta `json:"metadata" yaml:"metadata"`
|
||||
|
||||
// Spec is the spec of the DashboardCompatibilityScore
|
||||
Spec DashboardCompatibilityScoreSpec `json:"spec" yaml:"spec"`
|
||||
|
||||
Status DashboardCompatibilityScoreStatus `json:"status" yaml:"status"`
|
||||
}
|
||||
|
||||
func NewDashboardCompatibilityScore() *DashboardCompatibilityScore {
|
||||
return &DashboardCompatibilityScore{
|
||||
Spec: *NewDashboardCompatibilityScoreSpec(),
|
||||
Status: *NewDashboardCompatibilityScoreStatus(),
|
||||
}
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScore) GetSpec() any {
|
||||
return o.Spec
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScore) SetSpec(spec any) error {
|
||||
cast, ok := spec.(DashboardCompatibilityScoreSpec)
|
||||
if !ok {
|
||||
return fmt.Errorf("cannot set spec type %#v, not of type Spec", spec)
|
||||
}
|
||||
o.Spec = cast
|
||||
return nil
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScore) GetSubresources() map[string]any {
|
||||
return map[string]any{
|
||||
"status": o.Status,
|
||||
}
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScore) GetSubresource(name string) (any, bool) {
|
||||
switch name {
|
||||
case "status":
|
||||
return o.Status, true
|
||||
default:
|
||||
return nil, false
|
||||
}
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScore) SetSubresource(name string, value any) error {
|
||||
switch name {
|
||||
case "status":
|
||||
cast, ok := value.(DashboardCompatibilityScoreStatus)
|
||||
if !ok {
|
||||
return fmt.Errorf("cannot set status type %#v, not of type DashboardCompatibilityScoreStatus", value)
|
||||
}
|
||||
o.Status = cast
|
||||
return nil
|
||||
default:
|
||||
return fmt.Errorf("subresource '%s' does not exist", name)
|
||||
}
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScore) GetStaticMetadata() resource.StaticMetadata {
|
||||
gvk := o.GroupVersionKind()
|
||||
return resource.StaticMetadata{
|
||||
Name: o.ObjectMeta.Name,
|
||||
Namespace: o.ObjectMeta.Namespace,
|
||||
Group: gvk.Group,
|
||||
Version: gvk.Version,
|
||||
Kind: gvk.Kind,
|
||||
}
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScore) SetStaticMetadata(metadata resource.StaticMetadata) {
|
||||
o.Name = metadata.Name
|
||||
o.Namespace = metadata.Namespace
|
||||
o.SetGroupVersionKind(schema.GroupVersionKind{
|
||||
Group: metadata.Group,
|
||||
Version: metadata.Version,
|
||||
Kind: metadata.Kind,
|
||||
})
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScore) GetCommonMetadata() resource.CommonMetadata {
|
||||
dt := o.DeletionTimestamp
|
||||
var deletionTimestamp *time.Time
|
||||
if dt != nil {
|
||||
deletionTimestamp = &dt.Time
|
||||
}
|
||||
// Legacy ExtraFields support
|
||||
extraFields := make(map[string]any)
|
||||
if o.Annotations != nil {
|
||||
extraFields["annotations"] = o.Annotations
|
||||
}
|
||||
if o.ManagedFields != nil {
|
||||
extraFields["managedFields"] = o.ManagedFields
|
||||
}
|
||||
if o.OwnerReferences != nil {
|
||||
extraFields["ownerReferences"] = o.OwnerReferences
|
||||
}
|
||||
return resource.CommonMetadata{
|
||||
UID: string(o.UID),
|
||||
ResourceVersion: o.ResourceVersion,
|
||||
Generation: o.Generation,
|
||||
Labels: o.Labels,
|
||||
CreationTimestamp: o.CreationTimestamp.Time,
|
||||
DeletionTimestamp: deletionTimestamp,
|
||||
Finalizers: o.Finalizers,
|
||||
UpdateTimestamp: o.GetUpdateTimestamp(),
|
||||
CreatedBy: o.GetCreatedBy(),
|
||||
UpdatedBy: o.GetUpdatedBy(),
|
||||
ExtraFields: extraFields,
|
||||
}
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScore) SetCommonMetadata(metadata resource.CommonMetadata) {
|
||||
o.UID = types.UID(metadata.UID)
|
||||
o.ResourceVersion = metadata.ResourceVersion
|
||||
o.Generation = metadata.Generation
|
||||
o.Labels = metadata.Labels
|
||||
o.CreationTimestamp = metav1.NewTime(metadata.CreationTimestamp)
|
||||
if metadata.DeletionTimestamp != nil {
|
||||
dt := metav1.NewTime(*metadata.DeletionTimestamp)
|
||||
o.DeletionTimestamp = &dt
|
||||
} else {
|
||||
o.DeletionTimestamp = nil
|
||||
}
|
||||
o.Finalizers = metadata.Finalizers
|
||||
if o.Annotations == nil {
|
||||
o.Annotations = make(map[string]string)
|
||||
}
|
||||
if !metadata.UpdateTimestamp.IsZero() {
|
||||
o.SetUpdateTimestamp(metadata.UpdateTimestamp)
|
||||
}
|
||||
if metadata.CreatedBy != "" {
|
||||
o.SetCreatedBy(metadata.CreatedBy)
|
||||
}
|
||||
if metadata.UpdatedBy != "" {
|
||||
o.SetUpdatedBy(metadata.UpdatedBy)
|
||||
}
|
||||
// Legacy support for setting Annotations, ManagedFields, and OwnerReferences via ExtraFields
|
||||
if metadata.ExtraFields != nil {
|
||||
if annotations, ok := metadata.ExtraFields["annotations"]; ok {
|
||||
if cast, ok := annotations.(map[string]string); ok {
|
||||
o.Annotations = cast
|
||||
}
|
||||
}
|
||||
if managedFields, ok := metadata.ExtraFields["managedFields"]; ok {
|
||||
if cast, ok := managedFields.([]metav1.ManagedFieldsEntry); ok {
|
||||
o.ManagedFields = cast
|
||||
}
|
||||
}
|
||||
if ownerReferences, ok := metadata.ExtraFields["ownerReferences"]; ok {
|
||||
if cast, ok := ownerReferences.([]metav1.OwnerReference); ok {
|
||||
o.OwnerReferences = cast
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScore) GetCreatedBy() string {
|
||||
if o.ObjectMeta.Annotations == nil {
|
||||
o.ObjectMeta.Annotations = make(map[string]string)
|
||||
}
|
||||
|
||||
return o.ObjectMeta.Annotations["grafana.com/createdBy"]
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScore) SetCreatedBy(createdBy string) {
|
||||
if o.ObjectMeta.Annotations == nil {
|
||||
o.ObjectMeta.Annotations = make(map[string]string)
|
||||
}
|
||||
|
||||
o.ObjectMeta.Annotations["grafana.com/createdBy"] = createdBy
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScore) GetUpdateTimestamp() time.Time {
|
||||
if o.ObjectMeta.Annotations == nil {
|
||||
o.ObjectMeta.Annotations = make(map[string]string)
|
||||
}
|
||||
|
||||
parsed, _ := time.Parse(time.RFC3339, o.ObjectMeta.Annotations["grafana.com/updateTimestamp"])
|
||||
return parsed
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScore) SetUpdateTimestamp(updateTimestamp time.Time) {
|
||||
if o.ObjectMeta.Annotations == nil {
|
||||
o.ObjectMeta.Annotations = make(map[string]string)
|
||||
}
|
||||
|
||||
o.ObjectMeta.Annotations["grafana.com/updateTimestamp"] = updateTimestamp.Format(time.RFC3339)
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScore) GetUpdatedBy() string {
|
||||
if o.ObjectMeta.Annotations == nil {
|
||||
o.ObjectMeta.Annotations = make(map[string]string)
|
||||
}
|
||||
|
||||
return o.ObjectMeta.Annotations["grafana.com/updatedBy"]
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScore) SetUpdatedBy(updatedBy string) {
|
||||
if o.ObjectMeta.Annotations == nil {
|
||||
o.ObjectMeta.Annotations = make(map[string]string)
|
||||
}
|
||||
|
||||
o.ObjectMeta.Annotations["grafana.com/updatedBy"] = updatedBy
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScore) Copy() resource.Object {
|
||||
return resource.CopyObject(o)
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScore) DeepCopyObject() runtime.Object {
|
||||
return o.Copy()
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScore) DeepCopy() *DashboardCompatibilityScore {
|
||||
cpy := &DashboardCompatibilityScore{}
|
||||
o.DeepCopyInto(cpy)
|
||||
return cpy
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScore) DeepCopyInto(dst *DashboardCompatibilityScore) {
|
||||
dst.TypeMeta.APIVersion = o.TypeMeta.APIVersion
|
||||
dst.TypeMeta.Kind = o.TypeMeta.Kind
|
||||
o.ObjectMeta.DeepCopyInto(&dst.ObjectMeta)
|
||||
o.Spec.DeepCopyInto(&dst.Spec)
|
||||
o.Status.DeepCopyInto(&dst.Status)
|
||||
}
|
||||
|
||||
// Interface compliance compile-time check
|
||||
var _ resource.Object = &DashboardCompatibilityScore{}
|
||||
|
||||
// +k8s:openapi-gen=true
|
||||
type DashboardCompatibilityScoreList struct {
|
||||
metav1.TypeMeta `json:",inline" yaml:",inline"`
|
||||
metav1.ListMeta `json:"metadata" yaml:"metadata"`
|
||||
Items []DashboardCompatibilityScore `json:"items" yaml:"items"`
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScoreList) DeepCopyObject() runtime.Object {
|
||||
return o.Copy()
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScoreList) Copy() resource.ListObject {
|
||||
cpy := &DashboardCompatibilityScoreList{
|
||||
TypeMeta: o.TypeMeta,
|
||||
Items: make([]DashboardCompatibilityScore, len(o.Items)),
|
||||
}
|
||||
o.ListMeta.DeepCopyInto(&cpy.ListMeta)
|
||||
for i := 0; i < len(o.Items); i++ {
|
||||
if item, ok := o.Items[i].Copy().(*DashboardCompatibilityScore); ok {
|
||||
cpy.Items[i] = *item
|
||||
}
|
||||
}
|
||||
return cpy
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScoreList) GetItems() []resource.Object {
|
||||
items := make([]resource.Object, len(o.Items))
|
||||
for i := 0; i < len(o.Items); i++ {
|
||||
items[i] = &o.Items[i]
|
||||
}
|
||||
return items
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScoreList) SetItems(items []resource.Object) {
|
||||
o.Items = make([]DashboardCompatibilityScore, len(items))
|
||||
for i := 0; i < len(items); i++ {
|
||||
o.Items[i] = *items[i].(*DashboardCompatibilityScore)
|
||||
}
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScoreList) DeepCopy() *DashboardCompatibilityScoreList {
|
||||
cpy := &DashboardCompatibilityScoreList{}
|
||||
o.DeepCopyInto(cpy)
|
||||
return cpy
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScoreList) DeepCopyInto(dst *DashboardCompatibilityScoreList) {
|
||||
resource.CopyObjectInto(dst, o)
|
||||
}
|
||||
|
||||
// Interface compliance compile-time check
|
||||
var _ resource.ListObject = &DashboardCompatibilityScoreList{}
|
||||
|
||||
// Copy methods for all subresource types
|
||||
|
||||
// DeepCopy creates a full deep copy of Spec
|
||||
func (s *DashboardCompatibilityScoreSpec) DeepCopy() *DashboardCompatibilityScoreSpec {
|
||||
cpy := &DashboardCompatibilityScoreSpec{}
|
||||
s.DeepCopyInto(cpy)
|
||||
return cpy
|
||||
}
|
||||
|
||||
// DeepCopyInto deep copies Spec into another Spec object
|
||||
func (s *DashboardCompatibilityScoreSpec) DeepCopyInto(dst *DashboardCompatibilityScoreSpec) {
|
||||
resource.CopyObjectInto(dst, s)
|
||||
}
|
||||
|
||||
// DeepCopy creates a full deep copy of DashboardCompatibilityScoreStatus
|
||||
func (s *DashboardCompatibilityScoreStatus) DeepCopy() *DashboardCompatibilityScoreStatus {
|
||||
cpy := &DashboardCompatibilityScoreStatus{}
|
||||
s.DeepCopyInto(cpy)
|
||||
return cpy
|
||||
}
|
||||
|
||||
// DeepCopyInto deep copies DashboardCompatibilityScoreStatus into another DashboardCompatibilityScoreStatus object
|
||||
func (s *DashboardCompatibilityScoreStatus) DeepCopyInto(dst *DashboardCompatibilityScoreStatus) {
|
||||
resource.CopyObjectInto(dst, s)
|
||||
}
|
||||
34
apps/dashvalidator/pkg/apis/dashvalidator/v1alpha1/dashboardcompatibilityscore_schema_gen.go
generated
Normal file
34
apps/dashvalidator/pkg/apis/dashvalidator/v1alpha1/dashboardcompatibilityscore_schema_gen.go
generated
Normal file
@@ -0,0 +1,34 @@
|
||||
//
|
||||
// Code generated by grafana-app-sdk. DO NOT EDIT.
|
||||
//
|
||||
|
||||
package v1alpha1
|
||||
|
||||
import (
|
||||
"github.com/grafana/grafana-app-sdk/resource"
|
||||
)
|
||||
|
||||
// schema is unexported to prevent accidental overwrites
|
||||
var (
|
||||
schemaDashboardCompatibilityScore = resource.NewSimpleSchema("dashvalidator.grafana.app", "v1alpha1", NewDashboardCompatibilityScore(), &DashboardCompatibilityScoreList{}, resource.WithKind("DashboardCompatibilityScore"),
|
||||
resource.WithPlural("dashboardcompatibilityscores"), resource.WithScope(resource.NamespacedScope))
|
||||
kindDashboardCompatibilityScore = resource.Kind{
|
||||
Schema: schemaDashboardCompatibilityScore,
|
||||
Codecs: map[resource.KindEncoding]resource.Codec{
|
||||
resource.KindEncodingJSON: &DashboardCompatibilityScoreJSONCodec{},
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
// Kind returns a resource.Kind for this Schema with a JSON codec
|
||||
func DashboardCompatibilityScoreKind() resource.Kind {
|
||||
return kindDashboardCompatibilityScore
|
||||
}
|
||||
|
||||
// Schema returns a resource.SimpleSchema representation of DashboardCompatibilityScore
|
||||
func DashboardCompatibilityScoreSchema() *resource.SimpleSchema {
|
||||
return schemaDashboardCompatibilityScore
|
||||
}
|
||||
|
||||
// Interface compliance checks
|
||||
var _ resource.Schema = kindDashboardCompatibilityScore
|
||||
48
apps/dashvalidator/pkg/apis/dashvalidator/v1alpha1/dashboardcompatibilityscore_spec_gen.go
generated
Normal file
48
apps/dashvalidator/pkg/apis/dashvalidator/v1alpha1/dashboardcompatibilityscore_spec_gen.go
generated
Normal file
@@ -0,0 +1,48 @@
|
||||
// Code generated - EDITING IS FUTILE. DO NOT EDIT.
|
||||
|
||||
package v1alpha1
|
||||
|
||||
// DataSourceMapping specifies a datasource to validate dashboard queries against.
|
||||
// Maps logical datasource references in the dashboard to actual datasource instances.
|
||||
// +k8s:openapi-gen=true
|
||||
type DashboardCompatibilityScoreDataSourceMapping struct {
|
||||
// Unique identifier of the datasource instance.
|
||||
// Example: "prometheus-prod-us-west"
|
||||
Uid string `json:"uid"`
|
||||
// Type of datasource plugin.
|
||||
// MVP: Only "prometheus" supported.
|
||||
// Future: "mysql", "postgres", "elasticsearch", etc.
|
||||
Type string `json:"type"`
|
||||
// Optional human-readable name for display in results.
|
||||
// If not provided, UID will be used in error messages.
|
||||
// Example: "Production Prometheus (US-West)"
|
||||
Name *string `json:"name,omitempty"`
|
||||
}
|
||||
|
||||
// NewDashboardCompatibilityScoreDataSourceMapping creates a new DashboardCompatibilityScoreDataSourceMapping object.
|
||||
func NewDashboardCompatibilityScoreDataSourceMapping() *DashboardCompatibilityScoreDataSourceMapping {
|
||||
return &DashboardCompatibilityScoreDataSourceMapping{}
|
||||
}
|
||||
|
||||
// +k8s:openapi-gen=true
|
||||
type DashboardCompatibilityScoreSpec struct {
|
||||
// Complete dashboard JSON object to validate.
|
||||
// Must be a v1 dashboard schema (contains "panels" array).
|
||||
// v2 dashboards (with "elements" structure) are not yet supported.
|
||||
DashboardJson map[string]interface{} `json:"dashboardJson"`
|
||||
// Array of datasources to validate against.
|
||||
// The validator will check dashboard queries against each datasource
|
||||
// and provide per-datasource compatibility results.
|
||||
//
|
||||
// MVP: Only single datasource supported (array length = 1), Prometheus type only.
|
||||
// Future: Will support multiple datasources for dashboards with mixed queries.
|
||||
DatasourceMappings []DashboardCompatibilityScoreDataSourceMapping `json:"datasourceMappings"`
|
||||
}
|
||||
|
||||
// NewDashboardCompatibilityScoreSpec creates a new DashboardCompatibilityScoreSpec object.
|
||||
func NewDashboardCompatibilityScoreSpec() *DashboardCompatibilityScoreSpec {
|
||||
return &DashboardCompatibilityScoreSpec{
|
||||
DashboardJson: map[string]interface{}{},
|
||||
DatasourceMappings: []DashboardCompatibilityScoreDataSourceMapping{},
|
||||
}
|
||||
}
|
||||
151
apps/dashvalidator/pkg/apis/dashvalidator/v1alpha1/dashboardcompatibilityscore_status_gen.go
generated
Normal file
151
apps/dashvalidator/pkg/apis/dashvalidator/v1alpha1/dashboardcompatibilityscore_status_gen.go
generated
Normal file
@@ -0,0 +1,151 @@
|
||||
// Code generated - EDITING IS FUTILE. DO NOT EDIT.
|
||||
|
||||
package v1alpha1
|
||||
|
||||
// DataSourceResult contains validation results for a single datasource.
|
||||
// Provides aggregate statistics and per-query breakdown of compatibility.
|
||||
// +k8s:openapi-gen=true
|
||||
type DashboardCompatibilityScoreDataSourceResult struct {
|
||||
// Datasource UID that was validated (matches DataSourceMapping.uid)
|
||||
Uid string `json:"uid"`
|
||||
// Datasource type (matches DataSourceMapping.type)
|
||||
Type string `json:"type"`
|
||||
// Optional display name (matches DataSourceMapping.name if provided)
|
||||
Name *string `json:"name,omitempty"`
|
||||
// Total number of queries in the dashboard targeting this datasource.
|
||||
// Includes all panel targets/queries that reference this datasource.
|
||||
TotalQueries int64 `json:"totalQueries"`
|
||||
// Number of queries successfully validated.
|
||||
// May be less than totalQueries if some queries couldn't be parsed.
|
||||
CheckedQueries int64 `json:"checkedQueries"`
|
||||
// Total number of unique metrics/identifiers referenced across all queries.
|
||||
// For Prometheus: metric names extracted from PromQL expressions.
|
||||
// For SQL datasources: table and column names.
|
||||
TotalMetrics int64 `json:"totalMetrics"`
|
||||
// Number of metrics that exist in the datasource schema.
|
||||
// foundMetrics <= totalMetrics
|
||||
FoundMetrics int64 `json:"foundMetrics"`
|
||||
// Array of metric names that were referenced but don't exist.
|
||||
// Useful for debugging why a dashboard shows "no data".
|
||||
// Example for Prometheus: ["http_requests_total", "api_latency_seconds"]
|
||||
MissingMetrics []string `json:"missingMetrics"`
|
||||
// Per-query breakdown showing which specific queries have issues.
|
||||
// One entry per query target (refId: "A", "B", "C", etc.) in each panel.
|
||||
// Allows pinpointing exactly which panel/query needs fixing.
|
||||
QueryBreakdown []DashboardCompatibilityScoreQueryBreakdown `json:"queryBreakdown"`
|
||||
// Overall compatibility score for this datasource (0-100).
|
||||
// Calculated as: (foundMetrics / totalMetrics) * 100
|
||||
// Used to calculate the global compatibilityScore in status.
|
||||
CompatibilityScore float64 `json:"compatibilityScore"`
|
||||
}
|
||||
|
||||
// NewDashboardCompatibilityScoreDataSourceResult creates a new DashboardCompatibilityScoreDataSourceResult object.
|
||||
func NewDashboardCompatibilityScoreDataSourceResult() *DashboardCompatibilityScoreDataSourceResult {
|
||||
return &DashboardCompatibilityScoreDataSourceResult{
|
||||
MissingMetrics: []string{},
|
||||
QueryBreakdown: []DashboardCompatibilityScoreQueryBreakdown{},
|
||||
}
|
||||
}
|
||||
|
||||
// QueryBreakdown provides compatibility details for a single query within a panel.
|
||||
// Granular per-query results allow users to identify exactly which queries need fixing.
|
||||
//
|
||||
// Note: A panel can have multiple queries (refId: "A", "B", "C", etc.),
|
||||
// so there may be multiple QueryBreakdown entries for the same panelID.
|
||||
// +k8s:openapi-gen=true
|
||||
type DashboardCompatibilityScoreQueryBreakdown struct {
|
||||
// Human-readable panel title for context.
|
||||
// Example: "CPU Usage", "Request Rate"
|
||||
PanelTitle string `json:"panelTitle"`
|
||||
// Numeric panel ID from dashboard JSON.
|
||||
// Used to correlate with dashboard structure.
|
||||
PanelID int64 `json:"panelID"`
|
||||
// Query identifier within the panel.
|
||||
// Values: "A", "B", "C", etc. (from panel.targets[].refId)
|
||||
// Uniquely identifies which query in a multi-query panel this refers to.
|
||||
QueryRefId string `json:"queryRefId"`
|
||||
// Number of unique metrics referenced in this specific query.
|
||||
// For Prometheus: metrics extracted from the PromQL expr.
|
||||
// Example: rate(http_requests_total[5m]) references 1 metric.
|
||||
TotalMetrics int64 `json:"totalMetrics"`
|
||||
// Number of those metrics that exist in the datasource.
|
||||
// foundMetrics <= totalMetrics
|
||||
FoundMetrics int64 `json:"foundMetrics"`
|
||||
// Array of missing metric names specific to this query.
|
||||
// Helps identify exactly which part of a query expression will fail.
|
||||
// Empty array means query is fully compatible.
|
||||
MissingMetrics []string `json:"missingMetrics"`
|
||||
// Compatibility percentage for this individual query (0-100).
|
||||
// Calculated as: (foundMetrics / totalMetrics) * 100
|
||||
// 100 = query will work perfectly, 0 = query will return no data.
|
||||
CompatibilityScore float64 `json:"compatibilityScore"`
|
||||
}
|
||||
|
||||
// NewDashboardCompatibilityScoreQueryBreakdown creates a new DashboardCompatibilityScoreQueryBreakdown object.
|
||||
func NewDashboardCompatibilityScoreQueryBreakdown() *DashboardCompatibilityScoreQueryBreakdown {
|
||||
return &DashboardCompatibilityScoreQueryBreakdown{
|
||||
MissingMetrics: []string{},
|
||||
}
|
||||
}
|
||||
|
||||
// +k8s:openapi-gen=true
|
||||
type DashboardCompatibilityScorestatusOperatorState struct {
|
||||
// lastEvaluation is the ResourceVersion last evaluated
|
||||
LastEvaluation string `json:"lastEvaluation"`
|
||||
// state describes the state of the lastEvaluation.
|
||||
// It is limited to three possible states for machine evaluation.
|
||||
State DashboardCompatibilityScoreStatusOperatorStateState `json:"state"`
|
||||
// descriptiveState is an optional more descriptive state field which has no requirements on format
|
||||
DescriptiveState *string `json:"descriptiveState,omitempty"`
|
||||
// details contains any extra information that is operator-specific
|
||||
Details map[string]interface{} `json:"details,omitempty"`
|
||||
}
|
||||
|
||||
// NewDashboardCompatibilityScorestatusOperatorState creates a new DashboardCompatibilityScorestatusOperatorState object.
|
||||
func NewDashboardCompatibilityScorestatusOperatorState() *DashboardCompatibilityScorestatusOperatorState {
|
||||
return &DashboardCompatibilityScorestatusOperatorState{}
|
||||
}
|
||||
|
||||
// +k8s:openapi-gen=true
|
||||
type DashboardCompatibilityScoreStatus struct {
|
||||
// Overall compatibility score across all datasources (0-100).
|
||||
// Calculated as: (total found metrics / total referenced metrics) * 100
|
||||
//
|
||||
// Score interpretation:
|
||||
// - 100: Perfect compatibility, all queries will work
|
||||
// - 80-99: Excellent, minor missing metrics
|
||||
// - 50-79: Fair, significant missing metrics
|
||||
// - 0-49: Poor, most queries will fail
|
||||
CompatibilityScore float64 `json:"compatibilityScore"`
|
||||
// Per-datasource validation results.
|
||||
// Array length matches spec.datasourceMappings.
|
||||
// Each element contains detailed metrics and query-level breakdown.
|
||||
DatasourceResults []DashboardCompatibilityScoreDataSourceResult `json:"datasourceResults"`
|
||||
// ISO 8601 timestamp of when validation was last performed.
|
||||
// Example: "2024-01-15T10:30:00Z"
|
||||
LastChecked *string `json:"lastChecked,omitempty"`
|
||||
// operatorStates is a map of operator ID to operator state evaluations.
|
||||
// Any operator which consumes this kind SHOULD add its state evaluation information to this field.
|
||||
OperatorStates map[string]DashboardCompatibilityScorestatusOperatorState `json:"operatorStates,omitempty"`
|
||||
// Human-readable summary of validation result.
|
||||
// Examples: "All queries compatible", "3 missing metrics found"
|
||||
Message *string `json:"message,omitempty"`
|
||||
// additionalFields is reserved for future use
|
||||
AdditionalFields map[string]interface{} `json:"additionalFields,omitempty"`
|
||||
}
|
||||
|
||||
// NewDashboardCompatibilityScoreStatus creates a new DashboardCompatibilityScoreStatus object.
|
||||
func NewDashboardCompatibilityScoreStatus() *DashboardCompatibilityScoreStatus {
|
||||
return &DashboardCompatibilityScoreStatus{
|
||||
DatasourceResults: []DashboardCompatibilityScoreDataSourceResult{},
|
||||
}
|
||||
}
|
||||
|
||||
// +k8s:openapi-gen=true
|
||||
type DashboardCompatibilityScoreStatusOperatorStateState string
|
||||
|
||||
const (
|
||||
DashboardCompatibilityScoreStatusOperatorStateStateSuccess DashboardCompatibilityScoreStatusOperatorStateState = "success"
|
||||
DashboardCompatibilityScoreStatusOperatorStateStateInProgress DashboardCompatibilityScoreStatusOperatorStateState = "in_progress"
|
||||
DashboardCompatibilityScoreStatusOperatorStateStateFailed DashboardCompatibilityScoreStatusOperatorStateState = "failed"
|
||||
)
|
||||
File diff suppressed because one or more lines are too long
360
apps/dashvalidator/pkg/app/app.go
Normal file
360
apps/dashvalidator/pkg/app/app.go
Normal file
@@ -0,0 +1,360 @@
|
||||
package app
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
|
||||
"github.com/grafana/grafana-app-sdk/app"
|
||||
"github.com/grafana/grafana-app-sdk/logging"
|
||||
"github.com/grafana/grafana-app-sdk/resource"
|
||||
"github.com/grafana/grafana-app-sdk/simple"
|
||||
"k8s.io/apimachinery/pkg/runtime/schema"
|
||||
|
||||
validatorv1alpha1 "github.com/grafana/grafana/apps/dashvalidator/pkg/apis/dashvalidator/v1alpha1"
|
||||
"github.com/grafana/grafana/apps/dashvalidator/pkg/validator"
|
||||
_ "github.com/grafana/grafana/apps/dashvalidator/pkg/validator/prometheus" // Register prometheus validator via init()
|
||||
"github.com/grafana/grafana/pkg/infra/httpclient"
|
||||
"github.com/grafana/grafana/pkg/services/datasources"
|
||||
"github.com/grafana/grafana/pkg/services/pluginsintegration/plugincontext"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type DashValidatorConfig struct {
|
||||
DatasourceSvc datasources.DataSourceService
|
||||
PluginCtx *plugincontext.Provider
|
||||
HTTPClientProvider httpclient.Provider
|
||||
}
|
||||
|
||||
// checkRequest matches the CUE schema for POST /check request
|
||||
type checkRequest struct {
|
||||
DashboardJSON map[string]interface{} `json:"dashboardJson"`
|
||||
DatasourceMappings []datasourceMapping `json:"datasourceMappings"`
|
||||
}
|
||||
|
||||
// datasourceMapping represents a datasource to validate against
|
||||
type datasourceMapping struct {
|
||||
UID string `json:"uid"`
|
||||
Type string `json:"type"`
|
||||
Name *string `json:"name,omitempty"`
|
||||
}
|
||||
|
||||
// checkResponse matches the CUE schema for POST /check response
|
||||
type checkResponse struct {
|
||||
CompatibilityScore float64 `json:"compatibilityScore"`
|
||||
DatasourceResults []datasourceResult `json:"datasourceResults"`
|
||||
}
|
||||
|
||||
// datasourceResult contains validation results for a single datasource
|
||||
type datasourceResult struct {
|
||||
UID string `json:"uid"`
|
||||
Type string `json:"type"`
|
||||
Name *string `json:"name,omitempty"`
|
||||
TotalQueries int `json:"totalQueries"`
|
||||
CheckedQueries int `json:"checkedQueries"`
|
||||
TotalMetrics int `json:"totalMetrics"`
|
||||
FoundMetrics int `json:"foundMetrics"`
|
||||
MissingMetrics []string `json:"missingMetrics"`
|
||||
QueryBreakdown []queryResult `json:"queryBreakdown"`
|
||||
CompatibilityScore float64 `json:"compatibilityScore"`
|
||||
}
|
||||
|
||||
// queryResult contains validation results for a single query
|
||||
type queryResult struct {
|
||||
PanelTitle string `json:"panelTitle"`
|
||||
PanelID int `json:"panelID"`
|
||||
QueryRefID string `json:"queryRefId"`
|
||||
TotalMetrics int `json:"totalMetrics"`
|
||||
FoundMetrics int `json:"foundMetrics"`
|
||||
MissingMetrics []string `json:"missingMetrics"`
|
||||
CompatibilityScore float64 `json:"compatibilityScore"`
|
||||
}
|
||||
|
||||
func New(cfg app.Config) (app.App, error) {
|
||||
specificConfig, ok := cfg.SpecificConfig.(*DashValidatorConfig)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid config type: expected DashValidatorConfig")
|
||||
}
|
||||
|
||||
log := logging.DefaultLogger.With("app", "dashvalidator")
|
||||
|
||||
// configure our app
|
||||
simpleConfig := simple.AppConfig{
|
||||
Name: "dashvalidator",
|
||||
KubeConfig: cfg.KubeConfig,
|
||||
|
||||
//Define our custom route
|
||||
VersionedCustomRoutes: map[string]simple.AppVersionRouteHandlers{
|
||||
"v1alpha1": {
|
||||
{
|
||||
Namespaced: true,
|
||||
Path: "check",
|
||||
Method: "POST",
|
||||
}: handleCheckRoute(log, specificConfig.DatasourceSvc, specificConfig.PluginCtx, specificConfig.HTTPClientProvider),
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
a, err := simple.NewApp(simpleConfig)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create app: %w", err)
|
||||
}
|
||||
|
||||
return a, nil
|
||||
}
|
||||
|
||||
// custom route handler to check dashboard compatibility
|
||||
func handleCheckRoute(
|
||||
log logging.Logger,
|
||||
datasourceSvc datasources.DataSourceService,
|
||||
pluginCtx *plugincontext.Provider,
|
||||
httpClientProvider httpclient.Provider,
|
||||
) func(context.Context, app.CustomRouteResponseWriter, *app.CustomRouteRequest) error {
|
||||
return func(ctx context.Context, w app.CustomRouteResponseWriter, r *app.CustomRouteRequest) error {
|
||||
logger := log.WithContext(ctx)
|
||||
logger.Info("Received compatibility check request")
|
||||
|
||||
// Step 1: Parse request body
|
||||
body, err := io.ReadAll(r.Body)
|
||||
if err != nil {
|
||||
logger.Error("Failed to read request body", "error", err)
|
||||
w.WriteHeader(http.StatusBadRequest)
|
||||
return json.NewEncoder(w).Encode(map[string]string{
|
||||
"error": "failed to read request body",
|
||||
})
|
||||
}
|
||||
|
||||
var req checkRequest
|
||||
if err := json.Unmarshal(body, &req); err != nil {
|
||||
logger.Error("Failed to parse request JSON", "error", err)
|
||||
w.WriteHeader(http.StatusBadRequest)
|
||||
return json.NewEncoder(w).Encode(map[string]string{
|
||||
"error": "invalid JSON in request body",
|
||||
})
|
||||
}
|
||||
|
||||
// MVP: Only support single datasource validation
|
||||
if len(req.DatasourceMappings) != 1 {
|
||||
logger.Error("MVP only supports single datasource validation", "numDatasources", len(req.DatasourceMappings))
|
||||
w.WriteHeader(http.StatusBadRequest)
|
||||
return json.NewEncoder(w).Encode(map[string]string{
|
||||
"error": fmt.Sprintf("MVP only supports single datasource validation, got %d datasources", len(req.DatasourceMappings)),
|
||||
"code": "invalid_request",
|
||||
})
|
||||
}
|
||||
|
||||
// Step 2: Build validator request
|
||||
validatorReq := validator.DashboardCompatibilityRequest{
|
||||
DashboardJSON: req.DashboardJSON,
|
||||
DatasourceMappings: make([]validator.DatasourceMapping, 0, len(req.DatasourceMappings)),
|
||||
}
|
||||
|
||||
logger.Info("Processing request", "dashboardTitle", req.DashboardJSON["title"], "numMappings", len(req.DatasourceMappings))
|
||||
|
||||
// Get namespace from request (needed for datasource lookup)
|
||||
// Namespace format is typically "org-{orgID}"
|
||||
namespace := r.ResourceIdentifier.Namespace
|
||||
|
||||
// Extract orgID from namespace for logging context
|
||||
orgID := extractOrgIDFromNamespace(namespace)
|
||||
logger = logger.With("orgID", orgID, "namespace", namespace)
|
||||
|
||||
for _, dsMapping := range req.DatasourceMappings {
|
||||
dsLogger := logger.With("datasourceUID", dsMapping.UID, "datasourceType", dsMapping.Type)
|
||||
|
||||
// Convert optional name pointer to string
|
||||
name := ""
|
||||
if dsMapping.Name != nil {
|
||||
name = *dsMapping.Name
|
||||
dsLogger = dsLogger.With("datasourceName", name)
|
||||
}
|
||||
|
||||
// Fetch datasource from Grafana using app-platform method
|
||||
// Parameters: namespace, name (UID), group (datasource type)
|
||||
ds, err := datasourceSvc.GetDataSourceInNamespace(ctx, namespace, dsMapping.UID, dsMapping.Type)
|
||||
if err != nil {
|
||||
dsLogger.Error("Failed to get datasource from namespace", "error", err)
|
||||
|
||||
// Check if it's a not found error vs other errors
|
||||
errMsg := err.Error()
|
||||
statusCode := http.StatusInternalServerError
|
||||
userMsg := fmt.Sprintf("failed to retrieve datasource: %s", dsMapping.UID)
|
||||
|
||||
if strings.Contains(errMsg, "not found") || strings.Contains(errMsg, "does not exist") {
|
||||
statusCode = http.StatusNotFound
|
||||
userMsg = fmt.Sprintf("datasource not found: %s (type: %s)", dsMapping.UID, dsMapping.Type)
|
||||
dsLogger.Warn("Datasource not found in namespace")
|
||||
}
|
||||
|
||||
w.WriteHeader(statusCode)
|
||||
return json.NewEncoder(w).Encode(map[string]string{
|
||||
"error": userMsg,
|
||||
"code": "datasource_error",
|
||||
})
|
||||
}
|
||||
|
||||
dsLogger.Info("Retrieved datasource", "url", ds.URL, "actualType", ds.Type)
|
||||
|
||||
// Validate that the datasource type matches the expected type
|
||||
if ds.Type != dsMapping.Type {
|
||||
dsLogger.Error("Datasource type mismatch",
|
||||
"expectedType", dsMapping.Type,
|
||||
"actualType", ds.Type)
|
||||
w.WriteHeader(http.StatusBadRequest)
|
||||
return json.NewEncoder(w).Encode(map[string]string{
|
||||
"error": fmt.Sprintf("datasource %s has type %s, expected %s", dsMapping.UID, ds.Type, dsMapping.Type),
|
||||
"code": "datasource_wrong_type",
|
||||
})
|
||||
}
|
||||
|
||||
// Validate that this is a supported datasource type
|
||||
// For MVP, we only support Prometheus
|
||||
if !isSupportedDatasourceType(ds.Type) {
|
||||
dsLogger.Error("Unsupported datasource type", "type", ds.Type)
|
||||
w.WriteHeader(http.StatusBadRequest)
|
||||
return json.NewEncoder(w).Encode(map[string]string{
|
||||
"error": fmt.Sprintf("datasource type '%s' is not supported (currently only 'prometheus' is supported)", ds.Type),
|
||||
"code": "datasource_unsupported_type",
|
||||
})
|
||||
}
|
||||
|
||||
// Get authenticated HTTP transport for this datasource
|
||||
transport, err := datasourceSvc.GetHTTPTransport(ctx, ds, httpClientProvider)
|
||||
if err != nil {
|
||||
dsLogger.Error("Failed to get HTTP transport for datasource", "error", err)
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
return json.NewEncoder(w).Encode(map[string]string{
|
||||
"error": fmt.Sprintf("failed to configure authentication for datasource: %s", dsMapping.UID),
|
||||
"code": "datasource_config_error",
|
||||
})
|
||||
}
|
||||
|
||||
// Create HTTP client with authenticated transport
|
||||
httpClient := &http.Client{
|
||||
Transport: transport,
|
||||
}
|
||||
|
||||
validatorReq.DatasourceMappings = append(validatorReq.DatasourceMappings, validator.DatasourceMapping{
|
||||
UID: dsMapping.UID,
|
||||
Type: dsMapping.Type,
|
||||
Name: name,
|
||||
URL: ds.URL,
|
||||
HTTPClient: httpClient, // Pass authenticated client
|
||||
})
|
||||
|
||||
dsLogger.Debug("Datasource configured successfully for validation")
|
||||
}
|
||||
|
||||
// Step 3: Validate dashboard compatibility
|
||||
result, err := validator.ValidateDashboardCompatibility(ctx, validatorReq)
|
||||
if err != nil {
|
||||
logger.Error("Validation failed", "error", err)
|
||||
|
||||
// Check if it's a structured ValidationError with a specific status code
|
||||
statusCode := http.StatusInternalServerError
|
||||
errorCode := "validation_error"
|
||||
errorMsg := fmt.Sprintf("validation failed: %v", err)
|
||||
|
||||
if validationErr := validator.GetValidationError(err); validationErr != nil {
|
||||
statusCode = validationErr.StatusCode
|
||||
errorCode = string(validationErr.Code)
|
||||
errorMsg = validationErr.Message
|
||||
|
||||
// Log additional context from the error
|
||||
for key, value := range validationErr.Details {
|
||||
logger.Error("Validation error detail", key, value)
|
||||
}
|
||||
}
|
||||
|
||||
w.WriteHeader(statusCode)
|
||||
return json.NewEncoder(w).Encode(map[string]string{
|
||||
"error": errorMsg,
|
||||
"code": errorCode,
|
||||
})
|
||||
}
|
||||
|
||||
// Step 4: Convert result to response format
|
||||
response := convertToCheckResponse(result)
|
||||
|
||||
// Step 5: Return response
|
||||
w.WriteHeader(http.StatusOK)
|
||||
return json.NewEncoder(w).Encode(response)
|
||||
}
|
||||
}
|
||||
|
||||
// convertToCheckResponse converts validator result to API response format
|
||||
func convertToCheckResponse(result *validator.DashboardCompatibilityResult) checkResponse {
|
||||
response := checkResponse{
|
||||
CompatibilityScore: result.CompatibilityScore,
|
||||
DatasourceResults: make([]datasourceResult, 0, len(result.DatasourceResults)),
|
||||
}
|
||||
|
||||
for _, dsResult := range result.DatasourceResults {
|
||||
// Convert name string to pointer
|
||||
var name *string
|
||||
if dsResult.Name != "" {
|
||||
name = &dsResult.Name
|
||||
}
|
||||
|
||||
// Convert query results
|
||||
queryBreakdown := make([]queryResult, 0, len(dsResult.QueryBreakdown))
|
||||
for _, qr := range dsResult.QueryBreakdown {
|
||||
queryBreakdown = append(queryBreakdown, queryResult{
|
||||
PanelTitle: qr.PanelTitle,
|
||||
PanelID: qr.PanelID,
|
||||
QueryRefID: qr.QueryRefID,
|
||||
TotalMetrics: qr.TotalMetrics,
|
||||
FoundMetrics: qr.FoundMetrics,
|
||||
MissingMetrics: qr.MissingMetrics,
|
||||
CompatibilityScore: qr.CompatibilityScore,
|
||||
})
|
||||
}
|
||||
|
||||
response.DatasourceResults = append(response.DatasourceResults, datasourceResult{
|
||||
UID: dsResult.UID,
|
||||
Type: dsResult.Type,
|
||||
Name: name,
|
||||
TotalQueries: dsResult.TotalQueries,
|
||||
CheckedQueries: dsResult.CheckedQueries,
|
||||
TotalMetrics: dsResult.TotalMetrics,
|
||||
FoundMetrics: dsResult.FoundMetrics,
|
||||
MissingMetrics: dsResult.MissingMetrics,
|
||||
QueryBreakdown: queryBreakdown,
|
||||
CompatibilityScore: dsResult.CompatibilityScore,
|
||||
})
|
||||
}
|
||||
|
||||
return response
|
||||
}
|
||||
|
||||
// extractOrgIDFromNamespace extracts the org ID from a namespace string
|
||||
// Namespace format is typically "org-{orgID}"
|
||||
func extractOrgIDFromNamespace(namespace string) string {
|
||||
parts := strings.Split(namespace, "-")
|
||||
if len(parts) >= 2 && parts[0] == "org" {
|
||||
return parts[1]
|
||||
}
|
||||
return "unknown"
|
||||
}
|
||||
|
||||
// isSupportedDatasourceType checks if a datasource type is supported
|
||||
// For MVP, we only support Prometheus
|
||||
func isSupportedDatasourceType(dsType string) bool {
|
||||
supportedTypes := map[string]bool{
|
||||
"prometheus": true,
|
||||
}
|
||||
return supportedTypes[strings.ToLower(dsType)]
|
||||
}
|
||||
|
||||
func GetKinds() map[schema.GroupVersion][]resource.Kind {
|
||||
gv := schema.GroupVersion{
|
||||
Group: "dashvalidator.grafana.com",
|
||||
Version: "v1alpha1",
|
||||
}
|
||||
|
||||
return map[schema.GroupVersion][]resource.Kind{
|
||||
gv: {validatorv1alpha1.DashboardCompatibilityScoreKind()},
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
package v1alpha1
|
||||
|
||||
import "k8s.io/apimachinery/pkg/runtime/schema"
|
||||
|
||||
const (
|
||||
// APIGroup is the API group used by all kinds in this package
|
||||
APIGroup = "dashvalidator.ext.grafana.com"
|
||||
// APIVersion is the API version used by all kinds in this package
|
||||
APIVersion = "v1alpha1"
|
||||
)
|
||||
|
||||
var (
|
||||
// GroupVersion is a schema.GroupVersion consisting of the Group and Version constants for this package
|
||||
GroupVersion = schema.GroupVersion{
|
||||
Group: APIGroup,
|
||||
Version: APIVersion,
|
||||
}
|
||||
)
|
||||
@@ -0,0 +1,99 @@
|
||||
package v1alpha1
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/grafana/grafana-app-sdk/resource"
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
)
|
||||
|
||||
type DashboardCompatibilityScoreClient struct {
|
||||
client *resource.TypedClient[*DashboardCompatibilityScore, *DashboardCompatibilityScoreList]
|
||||
}
|
||||
|
||||
func NewDashboardCompatibilityScoreClient(client resource.Client) *DashboardCompatibilityScoreClient {
|
||||
return &DashboardCompatibilityScoreClient{
|
||||
client: resource.NewTypedClient[*DashboardCompatibilityScore, *DashboardCompatibilityScoreList](client, Kind()),
|
||||
}
|
||||
}
|
||||
|
||||
func NewDashboardCompatibilityScoreClientFromGenerator(generator resource.ClientGenerator) (*DashboardCompatibilityScoreClient, error) {
|
||||
c, err := generator.ClientFor(Kind())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return NewDashboardCompatibilityScoreClient(c), nil
|
||||
}
|
||||
|
||||
func (c *DashboardCompatibilityScoreClient) Get(ctx context.Context, identifier resource.Identifier) (*DashboardCompatibilityScore, error) {
|
||||
return c.client.Get(ctx, identifier)
|
||||
}
|
||||
|
||||
func (c *DashboardCompatibilityScoreClient) List(ctx context.Context, namespace string, opts resource.ListOptions) (*DashboardCompatibilityScoreList, error) {
|
||||
return c.client.List(ctx, namespace, opts)
|
||||
}
|
||||
|
||||
func (c *DashboardCompatibilityScoreClient) ListAll(ctx context.Context, namespace string, opts resource.ListOptions) (*DashboardCompatibilityScoreList, error) {
|
||||
resp, err := c.client.List(ctx, namespace, resource.ListOptions{
|
||||
ResourceVersion: opts.ResourceVersion,
|
||||
Limit: opts.Limit,
|
||||
LabelFilters: opts.LabelFilters,
|
||||
FieldSelectors: opts.FieldSelectors,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for resp.GetContinue() != "" {
|
||||
page, err := c.client.List(ctx, namespace, resource.ListOptions{
|
||||
Continue: resp.GetContinue(),
|
||||
ResourceVersion: opts.ResourceVersion,
|
||||
Limit: opts.Limit,
|
||||
LabelFilters: opts.LabelFilters,
|
||||
FieldSelectors: opts.FieldSelectors,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
resp.SetContinue(page.GetContinue())
|
||||
resp.SetResourceVersion(page.GetResourceVersion())
|
||||
resp.SetItems(append(resp.GetItems(), page.GetItems()...))
|
||||
}
|
||||
return resp, nil
|
||||
}
|
||||
|
||||
func (c *DashboardCompatibilityScoreClient) Create(ctx context.Context, obj *DashboardCompatibilityScore, opts resource.CreateOptions) (*DashboardCompatibilityScore, error) {
|
||||
// Make sure apiVersion and kind are set
|
||||
obj.APIVersion = GroupVersion.Identifier()
|
||||
obj.Kind = Kind().Kind()
|
||||
return c.client.Create(ctx, obj, opts)
|
||||
}
|
||||
|
||||
func (c *DashboardCompatibilityScoreClient) Update(ctx context.Context, obj *DashboardCompatibilityScore, opts resource.UpdateOptions) (*DashboardCompatibilityScore, error) {
|
||||
return c.client.Update(ctx, obj, opts)
|
||||
}
|
||||
|
||||
func (c *DashboardCompatibilityScoreClient) Patch(ctx context.Context, identifier resource.Identifier, req resource.PatchRequest, opts resource.PatchOptions) (*DashboardCompatibilityScore, error) {
|
||||
return c.client.Patch(ctx, identifier, req, opts)
|
||||
}
|
||||
|
||||
func (c *DashboardCompatibilityScoreClient) UpdateStatus(ctx context.Context, identifier resource.Identifier, newStatus Status, opts resource.UpdateOptions) (*DashboardCompatibilityScore, error) {
|
||||
return c.client.Update(ctx, &DashboardCompatibilityScore{
|
||||
TypeMeta: metav1.TypeMeta{
|
||||
Kind: Kind().Kind(),
|
||||
APIVersion: GroupVersion.Identifier(),
|
||||
},
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
ResourceVersion: opts.ResourceVersion,
|
||||
Namespace: identifier.Namespace,
|
||||
Name: identifier.Name,
|
||||
},
|
||||
Status: newStatus,
|
||||
}, resource.UpdateOptions{
|
||||
Subresource: "status",
|
||||
ResourceVersion: opts.ResourceVersion,
|
||||
})
|
||||
}
|
||||
|
||||
func (c *DashboardCompatibilityScoreClient) Delete(ctx context.Context, identifier resource.Identifier, opts resource.DeleteOptions) error {
|
||||
return c.client.Delete(ctx, identifier, opts)
|
||||
}
|
||||
@@ -0,0 +1,28 @@
|
||||
//
|
||||
// Code generated by grafana-app-sdk. DO NOT EDIT.
|
||||
//
|
||||
|
||||
package v1alpha1
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"io"
|
||||
|
||||
"github.com/grafana/grafana-app-sdk/resource"
|
||||
)
|
||||
|
||||
// JSONCodec is an implementation of resource.Codec for kubernetes JSON encoding
|
||||
type JSONCodec struct{}
|
||||
|
||||
// Read reads JSON-encoded bytes from `reader` and unmarshals them into `into`
|
||||
func (*JSONCodec) Read(reader io.Reader, into resource.Object) error {
|
||||
return json.NewDecoder(reader).Decode(into)
|
||||
}
|
||||
|
||||
// Write writes JSON-encoded bytes into `writer` marshaled from `from`
|
||||
func (*JSONCodec) Write(writer io.Writer, from resource.Object) error {
|
||||
return json.NewEncoder(writer).Encode(from)
|
||||
}
|
||||
|
||||
// Interface compliance checks
|
||||
var _ resource.Codec = &JSONCodec{}
|
||||
@@ -0,0 +1,31 @@
|
||||
// Code generated - EDITING IS FUTILE. DO NOT EDIT.
|
||||
|
||||
package v1alpha1
|
||||
|
||||
import (
|
||||
time "time"
|
||||
)
|
||||
|
||||
// metadata contains embedded CommonMetadata and can be extended with custom string fields
|
||||
// TODO: use CommonMetadata instead of redefining here; currently needs to be defined here
|
||||
// without external reference as using the CommonMetadata reference breaks thema codegen.
|
||||
type Metadata struct {
|
||||
UpdateTimestamp time.Time `json:"updateTimestamp"`
|
||||
CreatedBy string `json:"createdBy"`
|
||||
Uid string `json:"uid"`
|
||||
CreationTimestamp time.Time `json:"creationTimestamp"`
|
||||
DeletionTimestamp *time.Time `json:"deletionTimestamp,omitempty"`
|
||||
Finalizers []string `json:"finalizers"`
|
||||
ResourceVersion string `json:"resourceVersion"`
|
||||
Generation int64 `json:"generation"`
|
||||
UpdatedBy string `json:"updatedBy"`
|
||||
Labels map[string]string `json:"labels"`
|
||||
}
|
||||
|
||||
// NewMetadata creates a new Metadata object.
|
||||
func NewMetadata() *Metadata {
|
||||
return &Metadata{
|
||||
Finalizers: []string{},
|
||||
Labels: map[string]string{},
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,326 @@
|
||||
//
|
||||
// Code generated by grafana-app-sdk. DO NOT EDIT.
|
||||
//
|
||||
|
||||
package v1alpha1
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/grafana/grafana-app-sdk/resource"
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
"k8s.io/apimachinery/pkg/runtime"
|
||||
"k8s.io/apimachinery/pkg/runtime/schema"
|
||||
"k8s.io/apimachinery/pkg/types"
|
||||
"time"
|
||||
)
|
||||
|
||||
// +k8s:openapi-gen=true
|
||||
type DashboardCompatibilityScore struct {
|
||||
metav1.TypeMeta `json:",inline" yaml:",inline"`
|
||||
metav1.ObjectMeta `json:"metadata" yaml:"metadata"`
|
||||
|
||||
// Spec is the spec of the DashboardCompatibilityScore
|
||||
Spec Spec `json:"spec" yaml:"spec"`
|
||||
|
||||
Status Status `json:"status" yaml:"status"`
|
||||
}
|
||||
|
||||
func NewDashboardCompatibilityScore() *DashboardCompatibilityScore {
|
||||
return &DashboardCompatibilityScore{
|
||||
Spec: *NewSpec(),
|
||||
Status: *NewStatus(),
|
||||
}
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScore) GetSpec() any {
|
||||
return o.Spec
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScore) SetSpec(spec any) error {
|
||||
cast, ok := spec.(Spec)
|
||||
if !ok {
|
||||
return fmt.Errorf("cannot set spec type %#v, not of type Spec", spec)
|
||||
}
|
||||
o.Spec = cast
|
||||
return nil
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScore) GetSubresources() map[string]any {
|
||||
return map[string]any{
|
||||
"status": o.Status,
|
||||
}
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScore) GetSubresource(name string) (any, bool) {
|
||||
switch name {
|
||||
case "status":
|
||||
return o.Status, true
|
||||
default:
|
||||
return nil, false
|
||||
}
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScore) SetSubresource(name string, value any) error {
|
||||
switch name {
|
||||
case "status":
|
||||
cast, ok := value.(Status)
|
||||
if !ok {
|
||||
return fmt.Errorf("cannot set status type %#v, not of type Status", value)
|
||||
}
|
||||
o.Status = cast
|
||||
return nil
|
||||
default:
|
||||
return fmt.Errorf("subresource '%s' does not exist", name)
|
||||
}
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScore) GetStaticMetadata() resource.StaticMetadata {
|
||||
gvk := o.GroupVersionKind()
|
||||
return resource.StaticMetadata{
|
||||
Name: o.ObjectMeta.Name,
|
||||
Namespace: o.ObjectMeta.Namespace,
|
||||
Group: gvk.Group,
|
||||
Version: gvk.Version,
|
||||
Kind: gvk.Kind,
|
||||
}
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScore) SetStaticMetadata(metadata resource.StaticMetadata) {
|
||||
o.Name = metadata.Name
|
||||
o.Namespace = metadata.Namespace
|
||||
o.SetGroupVersionKind(schema.GroupVersionKind{
|
||||
Group: metadata.Group,
|
||||
Version: metadata.Version,
|
||||
Kind: metadata.Kind,
|
||||
})
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScore) GetCommonMetadata() resource.CommonMetadata {
|
||||
dt := o.DeletionTimestamp
|
||||
var deletionTimestamp *time.Time
|
||||
if dt != nil {
|
||||
deletionTimestamp = &dt.Time
|
||||
}
|
||||
// Legacy ExtraFields support
|
||||
extraFields := make(map[string]any)
|
||||
if o.Annotations != nil {
|
||||
extraFields["annotations"] = o.Annotations
|
||||
}
|
||||
if o.ManagedFields != nil {
|
||||
extraFields["managedFields"] = o.ManagedFields
|
||||
}
|
||||
if o.OwnerReferences != nil {
|
||||
extraFields["ownerReferences"] = o.OwnerReferences
|
||||
}
|
||||
return resource.CommonMetadata{
|
||||
UID: string(o.UID),
|
||||
ResourceVersion: o.ResourceVersion,
|
||||
Generation: o.Generation,
|
||||
Labels: o.Labels,
|
||||
CreationTimestamp: o.CreationTimestamp.Time,
|
||||
DeletionTimestamp: deletionTimestamp,
|
||||
Finalizers: o.Finalizers,
|
||||
UpdateTimestamp: o.GetUpdateTimestamp(),
|
||||
CreatedBy: o.GetCreatedBy(),
|
||||
UpdatedBy: o.GetUpdatedBy(),
|
||||
ExtraFields: extraFields,
|
||||
}
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScore) SetCommonMetadata(metadata resource.CommonMetadata) {
|
||||
o.UID = types.UID(metadata.UID)
|
||||
o.ResourceVersion = metadata.ResourceVersion
|
||||
o.Generation = metadata.Generation
|
||||
o.Labels = metadata.Labels
|
||||
o.CreationTimestamp = metav1.NewTime(metadata.CreationTimestamp)
|
||||
if metadata.DeletionTimestamp != nil {
|
||||
dt := metav1.NewTime(*metadata.DeletionTimestamp)
|
||||
o.DeletionTimestamp = &dt
|
||||
} else {
|
||||
o.DeletionTimestamp = nil
|
||||
}
|
||||
o.Finalizers = metadata.Finalizers
|
||||
if o.Annotations == nil {
|
||||
o.Annotations = make(map[string]string)
|
||||
}
|
||||
if !metadata.UpdateTimestamp.IsZero() {
|
||||
o.SetUpdateTimestamp(metadata.UpdateTimestamp)
|
||||
}
|
||||
if metadata.CreatedBy != "" {
|
||||
o.SetCreatedBy(metadata.CreatedBy)
|
||||
}
|
||||
if metadata.UpdatedBy != "" {
|
||||
o.SetUpdatedBy(metadata.UpdatedBy)
|
||||
}
|
||||
// Legacy support for setting Annotations, ManagedFields, and OwnerReferences via ExtraFields
|
||||
if metadata.ExtraFields != nil {
|
||||
if annotations, ok := metadata.ExtraFields["annotations"]; ok {
|
||||
if cast, ok := annotations.(map[string]string); ok {
|
||||
o.Annotations = cast
|
||||
}
|
||||
}
|
||||
if managedFields, ok := metadata.ExtraFields["managedFields"]; ok {
|
||||
if cast, ok := managedFields.([]metav1.ManagedFieldsEntry); ok {
|
||||
o.ManagedFields = cast
|
||||
}
|
||||
}
|
||||
if ownerReferences, ok := metadata.ExtraFields["ownerReferences"]; ok {
|
||||
if cast, ok := ownerReferences.([]metav1.OwnerReference); ok {
|
||||
o.OwnerReferences = cast
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScore) GetCreatedBy() string {
|
||||
if o.ObjectMeta.Annotations == nil {
|
||||
o.ObjectMeta.Annotations = make(map[string]string)
|
||||
}
|
||||
|
||||
return o.ObjectMeta.Annotations["grafana.com/createdBy"]
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScore) SetCreatedBy(createdBy string) {
|
||||
if o.ObjectMeta.Annotations == nil {
|
||||
o.ObjectMeta.Annotations = make(map[string]string)
|
||||
}
|
||||
|
||||
o.ObjectMeta.Annotations["grafana.com/createdBy"] = createdBy
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScore) GetUpdateTimestamp() time.Time {
|
||||
if o.ObjectMeta.Annotations == nil {
|
||||
o.ObjectMeta.Annotations = make(map[string]string)
|
||||
}
|
||||
|
||||
parsed, _ := time.Parse(time.RFC3339, o.ObjectMeta.Annotations["grafana.com/updateTimestamp"])
|
||||
return parsed
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScore) SetUpdateTimestamp(updateTimestamp time.Time) {
|
||||
if o.ObjectMeta.Annotations == nil {
|
||||
o.ObjectMeta.Annotations = make(map[string]string)
|
||||
}
|
||||
|
||||
o.ObjectMeta.Annotations["grafana.com/updateTimestamp"] = updateTimestamp.Format(time.RFC3339)
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScore) GetUpdatedBy() string {
|
||||
if o.ObjectMeta.Annotations == nil {
|
||||
o.ObjectMeta.Annotations = make(map[string]string)
|
||||
}
|
||||
|
||||
return o.ObjectMeta.Annotations["grafana.com/updatedBy"]
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScore) SetUpdatedBy(updatedBy string) {
|
||||
if o.ObjectMeta.Annotations == nil {
|
||||
o.ObjectMeta.Annotations = make(map[string]string)
|
||||
}
|
||||
|
||||
o.ObjectMeta.Annotations["grafana.com/updatedBy"] = updatedBy
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScore) Copy() resource.Object {
|
||||
return resource.CopyObject(o)
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScore) DeepCopyObject() runtime.Object {
|
||||
return o.Copy()
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScore) DeepCopy() *DashboardCompatibilityScore {
|
||||
cpy := &DashboardCompatibilityScore{}
|
||||
o.DeepCopyInto(cpy)
|
||||
return cpy
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScore) DeepCopyInto(dst *DashboardCompatibilityScore) {
|
||||
dst.TypeMeta.APIVersion = o.TypeMeta.APIVersion
|
||||
dst.TypeMeta.Kind = o.TypeMeta.Kind
|
||||
o.ObjectMeta.DeepCopyInto(&dst.ObjectMeta)
|
||||
o.Spec.DeepCopyInto(&dst.Spec)
|
||||
o.Status.DeepCopyInto(&dst.Status)
|
||||
}
|
||||
|
||||
// Interface compliance compile-time check
|
||||
var _ resource.Object = &DashboardCompatibilityScore{}
|
||||
|
||||
// +k8s:openapi-gen=true
|
||||
type DashboardCompatibilityScoreList struct {
|
||||
metav1.TypeMeta `json:",inline" yaml:",inline"`
|
||||
metav1.ListMeta `json:"metadata" yaml:"metadata"`
|
||||
Items []DashboardCompatibilityScore `json:"items" yaml:"items"`
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScoreList) DeepCopyObject() runtime.Object {
|
||||
return o.Copy()
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScoreList) Copy() resource.ListObject {
|
||||
cpy := &DashboardCompatibilityScoreList{
|
||||
TypeMeta: o.TypeMeta,
|
||||
Items: make([]DashboardCompatibilityScore, len(o.Items)),
|
||||
}
|
||||
o.ListMeta.DeepCopyInto(&cpy.ListMeta)
|
||||
for i := 0; i < len(o.Items); i++ {
|
||||
if item, ok := o.Items[i].Copy().(*DashboardCompatibilityScore); ok {
|
||||
cpy.Items[i] = *item
|
||||
}
|
||||
}
|
||||
return cpy
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScoreList) GetItems() []resource.Object {
|
||||
items := make([]resource.Object, len(o.Items))
|
||||
for i := 0; i < len(o.Items); i++ {
|
||||
items[i] = &o.Items[i]
|
||||
}
|
||||
return items
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScoreList) SetItems(items []resource.Object) {
|
||||
o.Items = make([]DashboardCompatibilityScore, len(items))
|
||||
for i := 0; i < len(items); i++ {
|
||||
o.Items[i] = *items[i].(*DashboardCompatibilityScore)
|
||||
}
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScoreList) DeepCopy() *DashboardCompatibilityScoreList {
|
||||
cpy := &DashboardCompatibilityScoreList{}
|
||||
o.DeepCopyInto(cpy)
|
||||
return cpy
|
||||
}
|
||||
|
||||
func (o *DashboardCompatibilityScoreList) DeepCopyInto(dst *DashboardCompatibilityScoreList) {
|
||||
resource.CopyObjectInto(dst, o)
|
||||
}
|
||||
|
||||
// Interface compliance compile-time check
|
||||
var _ resource.ListObject = &DashboardCompatibilityScoreList{}
|
||||
|
||||
// Copy methods for all subresource types
|
||||
|
||||
// DeepCopy creates a full deep copy of Spec
|
||||
func (s *Spec) DeepCopy() *Spec {
|
||||
cpy := &Spec{}
|
||||
s.DeepCopyInto(cpy)
|
||||
return cpy
|
||||
}
|
||||
|
||||
// DeepCopyInto deep copies Spec into another Spec object
|
||||
func (s *Spec) DeepCopyInto(dst *Spec) {
|
||||
resource.CopyObjectInto(dst, s)
|
||||
}
|
||||
|
||||
// DeepCopy creates a full deep copy of Status
|
||||
func (s *Status) DeepCopy() *Status {
|
||||
cpy := &Status{}
|
||||
s.DeepCopyInto(cpy)
|
||||
return cpy
|
||||
}
|
||||
|
||||
// DeepCopyInto deep copies Status into another Status object
|
||||
func (s *Status) DeepCopyInto(dst *Status) {
|
||||
resource.CopyObjectInto(dst, s)
|
||||
}
|
||||
@@ -0,0 +1,34 @@
|
||||
//
|
||||
// Code generated by grafana-app-sdk. DO NOT EDIT.
|
||||
//
|
||||
|
||||
package v1alpha1
|
||||
|
||||
import (
|
||||
"github.com/grafana/grafana-app-sdk/resource"
|
||||
)
|
||||
|
||||
// schema is unexported to prevent accidental overwrites
|
||||
var (
|
||||
schemaDashboardCompatibilityScore = resource.NewSimpleSchema("dashvalidator.ext.grafana.com", "v1alpha1", NewDashboardCompatibilityScore(), &DashboardCompatibilityScoreList{}, resource.WithKind("DashboardCompatibilityScore"),
|
||||
resource.WithPlural("dashboardcompatibilityscores"), resource.WithScope(resource.NamespacedScope))
|
||||
kindDashboardCompatibilityScore = resource.Kind{
|
||||
Schema: schemaDashboardCompatibilityScore,
|
||||
Codecs: map[resource.KindEncoding]resource.Codec{
|
||||
resource.KindEncodingJSON: &JSONCodec{},
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
// Kind returns a resource.Kind for this Schema with a JSON codec
|
||||
func Kind() resource.Kind {
|
||||
return kindDashboardCompatibilityScore
|
||||
}
|
||||
|
||||
// Schema returns a resource.SimpleSchema representation of DashboardCompatibilityScore
|
||||
func Schema() *resource.SimpleSchema {
|
||||
return schemaDashboardCompatibilityScore
|
||||
}
|
||||
|
||||
// Interface compliance checks
|
||||
var _ resource.Schema = kindDashboardCompatibilityScore
|
||||
@@ -0,0 +1,48 @@
|
||||
// Code generated - EDITING IS FUTILE. DO NOT EDIT.
|
||||
|
||||
package v1alpha1
|
||||
|
||||
// DataSourceMapping specifies a datasource to validate dashboard queries against.
|
||||
// Maps logical datasource references in the dashboard to actual datasource instances.
|
||||
// +k8s:openapi-gen=true
|
||||
type DataSourceMapping struct {
|
||||
// Unique identifier of the datasource instance.
|
||||
// Example: "prometheus-prod-us-west"
|
||||
Uid string `json:"uid"`
|
||||
// Type of datasource plugin.
|
||||
// MVP: Only "prometheus" supported.
|
||||
// Future: "mysql", "postgres", "elasticsearch", etc.
|
||||
Type string `json:"type"`
|
||||
// Optional human-readable name for display in results.
|
||||
// If not provided, UID will be used in error messages.
|
||||
// Example: "Production Prometheus (US-West)"
|
||||
Name *string `json:"name,omitempty"`
|
||||
}
|
||||
|
||||
// NewDataSourceMapping creates a new DataSourceMapping object.
|
||||
func NewDataSourceMapping() *DataSourceMapping {
|
||||
return &DataSourceMapping{}
|
||||
}
|
||||
|
||||
// +k8s:openapi-gen=true
|
||||
type Spec struct {
|
||||
// Complete dashboard JSON object to validate.
|
||||
// Must be a v1 dashboard schema (contains "panels" array).
|
||||
// v2 dashboards (with "elements" structure) are not yet supported.
|
||||
DashboardJson map[string]interface{} `json:"dashboardJson"`
|
||||
// Array of datasources to validate against.
|
||||
// The validator will check dashboard queries against each datasource
|
||||
// and provide per-datasource compatibility results.
|
||||
//
|
||||
// MVP: Only single datasource supported (array length = 1), Prometheus type only.
|
||||
// Future: Will support multiple datasources for dashboards with mixed queries.
|
||||
DatasourceMappings []DataSourceMapping `json:"datasourceMappings"`
|
||||
}
|
||||
|
||||
// NewSpec creates a new Spec object.
|
||||
func NewSpec() *Spec {
|
||||
return &Spec{
|
||||
DashboardJson: map[string]interface{}{},
|
||||
DatasourceMappings: []DataSourceMapping{},
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,151 @@
|
||||
// Code generated - EDITING IS FUTILE. DO NOT EDIT.
|
||||
|
||||
package v1alpha1
|
||||
|
||||
// DataSourceResult contains validation results for a single datasource.
|
||||
// Provides aggregate statistics and per-query breakdown of compatibility.
|
||||
// +k8s:openapi-gen=true
|
||||
type DataSourceResult struct {
|
||||
// Datasource UID that was validated (matches DataSourceMapping.uid)
|
||||
Uid string `json:"uid"`
|
||||
// Datasource type (matches DataSourceMapping.type)
|
||||
Type string `json:"type"`
|
||||
// Optional display name (matches DataSourceMapping.name if provided)
|
||||
Name *string `json:"name,omitempty"`
|
||||
// Total number of queries in the dashboard targeting this datasource.
|
||||
// Includes all panel targets/queries that reference this datasource.
|
||||
TotalQueries int64 `json:"totalQueries"`
|
||||
// Number of queries successfully validated.
|
||||
// May be less than totalQueries if some queries couldn't be parsed.
|
||||
CheckedQueries int64 `json:"checkedQueries"`
|
||||
// Total number of unique metrics/identifiers referenced across all queries.
|
||||
// For Prometheus: metric names extracted from PromQL expressions.
|
||||
// For SQL datasources: table and column names.
|
||||
TotalMetrics int64 `json:"totalMetrics"`
|
||||
// Number of metrics that exist in the datasource schema.
|
||||
// foundMetrics <= totalMetrics
|
||||
FoundMetrics int64 `json:"foundMetrics"`
|
||||
// Array of metric names that were referenced but don't exist.
|
||||
// Useful for debugging why a dashboard shows "no data".
|
||||
// Example for Prometheus: ["http_requests_total", "api_latency_seconds"]
|
||||
MissingMetrics []string `json:"missingMetrics"`
|
||||
// Per-query breakdown showing which specific queries have issues.
|
||||
// One entry per query target (refId: "A", "B", "C", etc.) in each panel.
|
||||
// Allows pinpointing exactly which panel/query needs fixing.
|
||||
QueryBreakdown []QueryBreakdown `json:"queryBreakdown"`
|
||||
// Overall compatibility score for this datasource (0-100).
|
||||
// Calculated as: (foundMetrics / totalMetrics) * 100
|
||||
// Used to calculate the global compatibilityScore in status.
|
||||
CompatibilityScore float64 `json:"compatibilityScore"`
|
||||
}
|
||||
|
||||
// NewDataSourceResult creates a new DataSourceResult object.
|
||||
func NewDataSourceResult() *DataSourceResult {
|
||||
return &DataSourceResult{
|
||||
MissingMetrics: []string{},
|
||||
QueryBreakdown: []QueryBreakdown{},
|
||||
}
|
||||
}
|
||||
|
||||
// QueryBreakdown provides compatibility details for a single query within a panel.
|
||||
// Granular per-query results allow users to identify exactly which queries need fixing.
|
||||
//
|
||||
// Note: A panel can have multiple queries (refId: "A", "B", "C", etc.),
|
||||
// so there may be multiple QueryBreakdown entries for the same panelID.
|
||||
// +k8s:openapi-gen=true
|
||||
type QueryBreakdown struct {
|
||||
// Human-readable panel title for context.
|
||||
// Example: "CPU Usage", "Request Rate"
|
||||
PanelTitle string `json:"panelTitle"`
|
||||
// Numeric panel ID from dashboard JSON.
|
||||
// Used to correlate with dashboard structure.
|
||||
PanelID int64 `json:"panelID"`
|
||||
// Query identifier within the panel.
|
||||
// Values: "A", "B", "C", etc. (from panel.targets[].refId)
|
||||
// Uniquely identifies which query in a multi-query panel this refers to.
|
||||
QueryRefId string `json:"queryRefId"`
|
||||
// Number of unique metrics referenced in this specific query.
|
||||
// For Prometheus: metrics extracted from the PromQL expr.
|
||||
// Example: rate(http_requests_total[5m]) references 1 metric.
|
||||
TotalMetrics int64 `json:"totalMetrics"`
|
||||
// Number of those metrics that exist in the datasource.
|
||||
// foundMetrics <= totalMetrics
|
||||
FoundMetrics int64 `json:"foundMetrics"`
|
||||
// Array of missing metric names specific to this query.
|
||||
// Helps identify exactly which part of a query expression will fail.
|
||||
// Empty array means query is fully compatible.
|
||||
MissingMetrics []string `json:"missingMetrics"`
|
||||
// Compatibility percentage for this individual query (0-100).
|
||||
// Calculated as: (foundMetrics / totalMetrics) * 100
|
||||
// 100 = query will work perfectly, 0 = query will return no data.
|
||||
CompatibilityScore float64 `json:"compatibilityScore"`
|
||||
}
|
||||
|
||||
// NewQueryBreakdown creates a new QueryBreakdown object.
|
||||
func NewQueryBreakdown() *QueryBreakdown {
|
||||
return &QueryBreakdown{
|
||||
MissingMetrics: []string{},
|
||||
}
|
||||
}
|
||||
|
||||
// +k8s:openapi-gen=true
|
||||
type StatusOperatorState struct {
|
||||
// lastEvaluation is the ResourceVersion last evaluated
|
||||
LastEvaluation string `json:"lastEvaluation"`
|
||||
// state describes the state of the lastEvaluation.
|
||||
// It is limited to three possible states for machine evaluation.
|
||||
State StatusOperatorStateState `json:"state"`
|
||||
// descriptiveState is an optional more descriptive state field which has no requirements on format
|
||||
DescriptiveState *string `json:"descriptiveState,omitempty"`
|
||||
// details contains any extra information that is operator-specific
|
||||
Details map[string]interface{} `json:"details,omitempty"`
|
||||
}
|
||||
|
||||
// NewStatusOperatorState creates a new StatusOperatorState object.
|
||||
func NewStatusOperatorState() *StatusOperatorState {
|
||||
return &StatusOperatorState{}
|
||||
}
|
||||
|
||||
// +k8s:openapi-gen=true
|
||||
type Status struct {
|
||||
// Overall compatibility score across all datasources (0-100).
|
||||
// Calculated as: (total found metrics / total referenced metrics) * 100
|
||||
//
|
||||
// Score interpretation:
|
||||
// - 100: Perfect compatibility, all queries will work
|
||||
// - 80-99: Excellent, minor missing metrics
|
||||
// - 50-79: Fair, significant missing metrics
|
||||
// - 0-49: Poor, most queries will fail
|
||||
CompatibilityScore float64 `json:"compatibilityScore"`
|
||||
// Per-datasource validation results.
|
||||
// Array length matches spec.datasourceMappings.
|
||||
// Each element contains detailed metrics and query-level breakdown.
|
||||
DatasourceResults []DataSourceResult `json:"datasourceResults"`
|
||||
// ISO 8601 timestamp of when validation was last performed.
|
||||
// Example: "2024-01-15T10:30:00Z"
|
||||
LastChecked *string `json:"lastChecked,omitempty"`
|
||||
// operatorStates is a map of operator ID to operator state evaluations.
|
||||
// Any operator which consumes this kind SHOULD add its state evaluation information to this field.
|
||||
OperatorStates map[string]StatusOperatorState `json:"operatorStates,omitempty"`
|
||||
// Human-readable summary of validation result.
|
||||
// Examples: "All queries compatible", "3 missing metrics found"
|
||||
Message *string `json:"message,omitempty"`
|
||||
// additionalFields is reserved for future use
|
||||
AdditionalFields map[string]interface{} `json:"additionalFields,omitempty"`
|
||||
}
|
||||
|
||||
// NewStatus creates a new Status object.
|
||||
func NewStatus() *Status {
|
||||
return &Status{
|
||||
DatasourceResults: []DataSourceResult{},
|
||||
}
|
||||
}
|
||||
|
||||
// +k8s:openapi-gen=true
|
||||
type StatusOperatorStateState string
|
||||
|
||||
const (
|
||||
StatusOperatorStateStateSuccess StatusOperatorStateState = "success"
|
||||
StatusOperatorStateStateInProgress StatusOperatorStateState = "in_progress"
|
||||
StatusOperatorStateStateFailed StatusOperatorStateState = "failed"
|
||||
)
|
||||
File diff suppressed because one or more lines are too long
568
apps/dashvalidator/pkg/validator/dashboard.go
Normal file
568
apps/dashvalidator/pkg/validator/dashboard.go
Normal file
@@ -0,0 +1,568 @@
|
||||
package validator
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// DashboardCompatibilityRequest contains the dashboard and datasource mappings to validate
|
||||
type DashboardCompatibilityRequest struct {
|
||||
DashboardJSON map[string]interface{} // Dashboard JSON structure
|
||||
DatasourceMappings []DatasourceMapping // List of datasources to validate against
|
||||
}
|
||||
|
||||
// DatasourceMapping maps a datasource UID to its type and optionally name/URL
|
||||
type DatasourceMapping struct {
|
||||
UID string // Datasource UID
|
||||
Type string // Datasource type (prometheus, mysql, etc.)
|
||||
Name string // Optional: Datasource name
|
||||
URL string // Datasource URL
|
||||
HTTPClient *http.Client // Authenticated HTTP client
|
||||
}
|
||||
|
||||
// DashboardCompatibilityResult contains the validation results for a dashboard
|
||||
type DashboardCompatibilityResult struct {
|
||||
CompatibilityScore float64 // Overall compatibility (0.0 - 1.0)
|
||||
DatasourceResults []DatasourceValidationResult // Per-datasource results
|
||||
}
|
||||
|
||||
// DatasourceValidationResult contains validation results for one datasource
|
||||
type DatasourceValidationResult struct {
|
||||
UID string
|
||||
Type string
|
||||
Name string
|
||||
TotalQueries int
|
||||
CheckedQueries int
|
||||
TotalMetrics int
|
||||
FoundMetrics int
|
||||
MissingMetrics []string
|
||||
QueryBreakdown []QueryResult
|
||||
CompatibilityScore float64
|
||||
}
|
||||
|
||||
// ValidateDashboardCompatibility is the main entry point for validating dashboard compatibility
|
||||
// It extracts queries from the dashboard, validates them against each datasource, and returns aggregated results
|
||||
func ValidateDashboardCompatibility(ctx context.Context, req DashboardCompatibilityRequest) (*DashboardCompatibilityResult, error) {
|
||||
// MVP: Only support single datasource validation
|
||||
if len(req.DatasourceMappings) != 1 {
|
||||
return nil, fmt.Errorf("MVP only supports single datasource validation, got %d datasources", len(req.DatasourceMappings))
|
||||
}
|
||||
|
||||
singleDatasource := req.DatasourceMappings[0]
|
||||
|
||||
result := &DashboardCompatibilityResult{
|
||||
DatasourceResults: make([]DatasourceValidationResult, 0, len(req.DatasourceMappings)),
|
||||
}
|
||||
|
||||
// Step 1: Extract queries from dashboard JSON
|
||||
queries, err := extractQueriesFromDashboard(req.DashboardJSON)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to extract queries from dashboard: %w", err)
|
||||
}
|
||||
|
||||
fmt.Printf("[DEBUG] Extracted %d queries from dashboard\n", len(queries))
|
||||
for i, q := range queries {
|
||||
fmt.Printf("[DEBUG] Query %d: DS=%s, RefID=%s, Query=%s\n", i, q.DatasourceUID, q.RefID, q.QueryText)
|
||||
}
|
||||
|
||||
// Step 2: Group queries by datasource UID (with variable resolution for MVP)
|
||||
queriesByDatasource := groupQueriesByDatasource(queries, singleDatasource.UID, req.DashboardJSON)
|
||||
|
||||
fmt.Printf("[DEBUG] Grouped queries by %d datasources\n", len(queriesByDatasource))
|
||||
for dsUID, dsQueries := range queriesByDatasource {
|
||||
fmt.Printf("[DEBUG] Datasource %s has %d queries\n", dsUID, len(dsQueries))
|
||||
}
|
||||
|
||||
// Step 3: Validate each datasource
|
||||
var totalCompatibility float64
|
||||
validatedCount := 0
|
||||
|
||||
for _, dsMapping := range req.DatasourceMappings {
|
||||
fmt.Printf("[DEBUG] Processing datasource mapping: UID=%s, Type=%s, URL=%s\n", dsMapping.UID, dsMapping.Type, dsMapping.URL)
|
||||
|
||||
// Get queries for this datasource
|
||||
dsQueries, ok := queriesByDatasource[dsMapping.UID]
|
||||
if !ok || len(dsQueries) == 0 {
|
||||
// No queries for this datasource, skip
|
||||
fmt.Printf("[DEBUG] No queries found for datasource %s, skipping\n", dsMapping.UID)
|
||||
continue
|
||||
}
|
||||
|
||||
fmt.Printf("[DEBUG] Found %d queries for datasource %s\n", len(dsQueries), dsMapping.UID)
|
||||
|
||||
// Get validator for this datasource type
|
||||
v, err := GetValidator(dsMapping.Type)
|
||||
if err != nil {
|
||||
// Unsupported datasource type, skip but log
|
||||
fmt.Printf("[DEBUG] Failed to get validator for type %s: %v\n", dsMapping.Type, err)
|
||||
continue
|
||||
}
|
||||
|
||||
fmt.Printf("[DEBUG] Got validator for type %s, starting validation\n", dsMapping.Type)
|
||||
|
||||
// Build Datasource struct
|
||||
ds := Datasource{
|
||||
UID: dsMapping.UID,
|
||||
Type: dsMapping.Type,
|
||||
Name: dsMapping.Name,
|
||||
URL: dsMapping.URL,
|
||||
HTTPClient: dsMapping.HTTPClient,
|
||||
}
|
||||
|
||||
// Validate queries
|
||||
validationResult, err := v.ValidateQueries(ctx, dsQueries, ds)
|
||||
if err != nil {
|
||||
// Validation failed for this datasource - return error to caller
|
||||
// This could be a connection error, auth error, or other critical failure
|
||||
return nil, fmt.Errorf("validation failed for datasource %s: %w", dsMapping.UID, err)
|
||||
}
|
||||
|
||||
// Convert to DatasourceValidationResult
|
||||
dsResult := DatasourceValidationResult{
|
||||
UID: dsMapping.UID,
|
||||
Type: dsMapping.Type,
|
||||
Name: dsMapping.Name,
|
||||
TotalQueries: validationResult.TotalQueries,
|
||||
CheckedQueries: validationResult.CheckedQueries,
|
||||
TotalMetrics: validationResult.TotalMetrics,
|
||||
FoundMetrics: validationResult.FoundMetrics,
|
||||
MissingMetrics: validationResult.MissingMetrics,
|
||||
QueryBreakdown: validationResult.QueryBreakdown,
|
||||
CompatibilityScore: validationResult.CompatibilityScore,
|
||||
}
|
||||
|
||||
result.DatasourceResults = append(result.DatasourceResults, dsResult)
|
||||
totalCompatibility += validationResult.CompatibilityScore
|
||||
validatedCount++
|
||||
}
|
||||
|
||||
// Step 4: Calculate overall compatibility score
|
||||
if validatedCount > 0 {
|
||||
result.CompatibilityScore = totalCompatibility / float64(validatedCount)
|
||||
} else {
|
||||
result.CompatibilityScore = 1.0 // No datasources = perfect compatibility
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// extractQueriesFromDashboard parses the dashboard JSON and extracts all queries
|
||||
// Supports both v1 (legacy) and v2 (new) dashboard formats
|
||||
func extractQueriesFromDashboard(dashboardJSON map[string]interface{}) ([]DashboardQuery, error) {
|
||||
var queries []DashboardQuery
|
||||
|
||||
// Debug: Print what keys we have
|
||||
fmt.Printf("[DEBUG] Dashboard JSON keys: ")
|
||||
for key := range dashboardJSON {
|
||||
fmt.Printf("%s, ", key)
|
||||
}
|
||||
fmt.Printf("\n")
|
||||
|
||||
// Detect dashboard version (v1 uses "panels", v2 uses different structure)
|
||||
// For MVP, we only support v1 (legacy format with panels array)
|
||||
if !isV1Dashboard(dashboardJSON) {
|
||||
fmt.Printf("[DEBUG] isV1Dashboard returned false, 'panels' key exists: %v\n", dashboardJSON["panels"] != nil)
|
||||
return nil, fmt.Errorf("unsupported dashboard format: only v1 dashboards are supported in MVP")
|
||||
}
|
||||
|
||||
// Extract panels array
|
||||
panels, ok := dashboardJSON["panels"].([]interface{})
|
||||
if !ok {
|
||||
// No panels in dashboard, return empty array
|
||||
return queries, nil
|
||||
}
|
||||
|
||||
// Iterate through all panels
|
||||
for _, panelInterface := range panels {
|
||||
panel, ok := panelInterface.(map[string]interface{})
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
// Extract queries from this panel
|
||||
panelQueries := extractQueriesFromPanel(panel)
|
||||
queries = append(queries, panelQueries...)
|
||||
|
||||
// Handle nested panels in collapsed rows
|
||||
nestedPanels, hasNested := panel["panels"].([]interface{})
|
||||
if hasNested {
|
||||
for _, nestedPanelInterface := range nestedPanels {
|
||||
nestedPanel, ok := nestedPanelInterface.(map[string]interface{})
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
nestedQueries := extractQueriesFromPanel(nestedPanel)
|
||||
queries = append(queries, nestedQueries...)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return queries, nil
|
||||
}
|
||||
|
||||
// isV1Dashboard checks if a dashboard is in v1 (legacy) format
|
||||
// v1 dashboards have a "panels" array at the top level
|
||||
// v2 dashboards have "elements" map and "layout" structure
|
||||
//
|
||||
// This follows Grafana's official dashboard conversion logic which uses
|
||||
// type-safe assertions to distinguish between formats.
|
||||
// Reference: apps/dashboard/pkg/migration/conversion/v1beta1_to_v2alpha1.go:450
|
||||
func isV1Dashboard(dashboard map[string]interface{}) bool {
|
||||
// Check for v2 indicators first (positive identification)
|
||||
// v2 dashboards use a map of elements, not an array
|
||||
if _, hasElements := dashboard["elements"].(map[string]interface{}); hasElements {
|
||||
return false // Definitely v2
|
||||
}
|
||||
|
||||
// v2 dashboards also have a layout structure
|
||||
if _, hasLayout := dashboard["layout"]; hasLayout {
|
||||
return false // v2 has layout field
|
||||
}
|
||||
|
||||
// Check for v1 panels with type assertion (must be an array)
|
||||
// This is type-safe: `{"panels": "string"}` would fail this check and return false
|
||||
_, hasPanels := dashboard["panels"].([]interface{})
|
||||
return hasPanels
|
||||
}
|
||||
|
||||
// extractQueriesFromPanel extracts all queries/targets from a single panel
|
||||
func extractQueriesFromPanel(panel map[string]interface{}) []DashboardQuery {
|
||||
var queries []DashboardQuery
|
||||
|
||||
// Get panel info for context
|
||||
panelTitle := getStringValue(panel, "title", "Untitled Panel")
|
||||
panelID := getIntValue(panel, "id", 0)
|
||||
|
||||
// Extract targets array (queries)
|
||||
targets, hasTargets := panel["targets"].([]interface{})
|
||||
if !hasTargets {
|
||||
return queries
|
||||
}
|
||||
|
||||
// Iterate through each target/query
|
||||
for _, targetInterface := range targets {
|
||||
target, ok := targetInterface.(map[string]interface{})
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
// Extract datasource UID
|
||||
datasourceUID := extractDatasourceUID(target, panel)
|
||||
if datasourceUID == "" {
|
||||
// Skip queries without datasource
|
||||
continue
|
||||
}
|
||||
|
||||
// Extract query text (different fields for different datasources)
|
||||
queryText := extractQueryText(target)
|
||||
if queryText == "" {
|
||||
// Skip empty queries
|
||||
continue
|
||||
}
|
||||
|
||||
// Extract refId (A, B, C, etc.)
|
||||
refID := getStringValue(target, "refId", "")
|
||||
|
||||
// Build DashboardQuery
|
||||
query := DashboardQuery{
|
||||
DatasourceUID: datasourceUID,
|
||||
RefID: refID,
|
||||
QueryText: queryText,
|
||||
PanelTitle: panelTitle,
|
||||
PanelID: panelID,
|
||||
}
|
||||
|
||||
queries = append(queries, query)
|
||||
}
|
||||
|
||||
return queries
|
||||
}
|
||||
|
||||
// extractDatasourceUID gets the datasource UID from a target, falling back to panel datasource
|
||||
func extractDatasourceUID(target map[string]interface{}, panel map[string]interface{}) string {
|
||||
// Try target-level datasource first
|
||||
if ds, ok := target["datasource"]; ok {
|
||||
if uid := getDatasourceUIDFromValue(ds); uid != "" {
|
||||
return uid
|
||||
}
|
||||
}
|
||||
|
||||
// Fall back to panel-level datasource
|
||||
if ds, ok := panel["datasource"]; ok {
|
||||
if uid := getDatasourceUIDFromValue(ds); uid != "" {
|
||||
return uid
|
||||
}
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
// getDatasourceUIDFromValue extracts UID from datasource value (can be string or object)
|
||||
func getDatasourceUIDFromValue(ds interface{}) string {
|
||||
switch v := ds.(type) {
|
||||
case string:
|
||||
// Direct UID string
|
||||
return v
|
||||
case map[string]interface{}:
|
||||
// Structured datasource reference { uid: "...", type: "..." }
|
||||
return getStringValue(v, "uid", "")
|
||||
default:
|
||||
return ""
|
||||
}
|
||||
}
|
||||
|
||||
// isVariableReference checks if a string is a template variable reference
|
||||
// Matches patterns: ${varname}, $varname, [[varname]]
|
||||
// Follows Grafana's frontend regex: /\$(\w+)|\[\[(\w+?)(?::(\w+))?\]\]|\${(\w+)(?:\.([^:^\}]+))?(?::([^\}]+))?}/g
|
||||
// where \w = [A-Za-z0-9_] (alphanumeric + underscore, NO dashes)
|
||||
func isVariableReference(uid string) bool {
|
||||
if uid == "" {
|
||||
return false
|
||||
}
|
||||
|
||||
// Match ${...} pattern - requires at least one \w character inside braces
|
||||
if len(uid) > 3 && uid[0] == '$' && uid[1] == '{' && uid[len(uid)-1] == '}' {
|
||||
// Extract content between ${ and }
|
||||
content := uid[2 : len(uid)-1]
|
||||
if len(content) == 0 {
|
||||
return false // Empty braces ${} not allowed
|
||||
}
|
||||
// Check if content starts with \w+ (before any . or :)
|
||||
for i, ch := range content {
|
||||
if ch == '.' || ch == ':' {
|
||||
// Found delimiter, check if we had at least one \w before it
|
||||
return i > 0
|
||||
}
|
||||
// Must be alphanumeric or underscore
|
||||
if !((ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') ||
|
||||
(ch >= '0' && ch <= '9') || ch == '_') {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true // All characters were valid \w
|
||||
}
|
||||
|
||||
// Match $varname pattern - requires at least one \w character after $
|
||||
// \w = alphanumeric + underscore (digits ARE allowed, dashes are NOT)
|
||||
if uid[0] == '$' && len(uid) > 1 {
|
||||
for i := 1; i < len(uid); i++ {
|
||||
ch := uid[i]
|
||||
// \w = [A-Za-z0-9_] only (NO dashes)
|
||||
if !((ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') ||
|
||||
(ch >= '0' && ch <= '9') || ch == '_') {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Match [[varname]] pattern - requires at least one \w character inside brackets
|
||||
// Also supports [[varname:format]] syntax
|
||||
if len(uid) > 4 && uid[0] == '[' && uid[1] == '[' &&
|
||||
uid[len(uid)-2] == ']' && uid[len(uid)-1] == ']' {
|
||||
// Extract content between [[ and ]]
|
||||
content := uid[2 : len(uid)-2]
|
||||
if len(content) == 0 {
|
||||
return false // Empty brackets [[]] not allowed
|
||||
}
|
||||
// Check if content starts with \w+ (before any :)
|
||||
for i, ch := range content {
|
||||
if ch == ':' {
|
||||
// Found format delimiter, check if we had at least one \w before it
|
||||
return i > 0
|
||||
}
|
||||
// Must be alphanumeric or underscore
|
||||
if !((ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') ||
|
||||
(ch >= '0' && ch <= '9') || ch == '_') {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true // All characters were valid \w
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// extractVariableName extracts the variable name from a variable reference
|
||||
// Returns only the name part, excluding fieldPath (after .) and format (after :)
|
||||
// Examples: ${var.field} -> "var", [[var:text]] -> "var", $datasource -> "datasource"
|
||||
func extractVariableName(varRef string) string {
|
||||
if !isVariableReference(varRef) {
|
||||
return ""
|
||||
}
|
||||
|
||||
// Handle ${varname} pattern - may include .fieldPath or :format
|
||||
if len(varRef) > 3 && varRef[0] == '$' && varRef[1] == '{' && varRef[len(varRef)-1] == '}' {
|
||||
content := varRef[2 : len(varRef)-1]
|
||||
// Extract only up to . or :
|
||||
for i, ch := range content {
|
||||
if ch == '.' || ch == ':' {
|
||||
return content[:i]
|
||||
}
|
||||
}
|
||||
return content
|
||||
}
|
||||
|
||||
// Handle $varname pattern - no modifiers possible
|
||||
if varRef[0] == '$' && len(varRef) > 1 {
|
||||
return varRef[1:]
|
||||
}
|
||||
|
||||
// Handle [[varname]] pattern - may include :format
|
||||
if len(varRef) > 4 && varRef[0] == '[' && varRef[1] == '[' {
|
||||
content := varRef[2 : len(varRef)-2]
|
||||
// Extract only up to :
|
||||
for i, ch := range content {
|
||||
if ch == ':' {
|
||||
return content[:i]
|
||||
}
|
||||
}
|
||||
return content
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
// isPrometheusVariable checks if a variable reference points to a Prometheus datasource
|
||||
// Looks in dashboard.__inputs for the datasource type
|
||||
func isPrometheusVariable(varRef string, dashboardJSON map[string]interface{}) bool {
|
||||
if !isVariableReference(varRef) {
|
||||
return false
|
||||
}
|
||||
|
||||
varName := extractVariableName(varRef)
|
||||
if varName == "" {
|
||||
return false
|
||||
}
|
||||
|
||||
// Look for __inputs array in dashboard
|
||||
inputs, hasInputs := dashboardJSON["__inputs"].([]interface{})
|
||||
if !hasInputs {
|
||||
// No __inputs, assume it might be Prometheus (MVP: single datasource)
|
||||
// This is a fallback for dashboards without explicit __inputs
|
||||
return true
|
||||
}
|
||||
|
||||
// Search for this variable in __inputs
|
||||
for _, inputInterface := range inputs {
|
||||
input, ok := inputInterface.(map[string]interface{})
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
// Check if this input matches our variable name
|
||||
inputName := getStringValue(input, "name", "")
|
||||
inputType := getStringValue(input, "type", "")
|
||||
inputPluginID := getStringValue(input, "pluginId", "")
|
||||
|
||||
// Match by name (case-insensitive for flexibility)
|
||||
if inputName != "" && varName != "" {
|
||||
if inputName == varName ||
|
||||
strings.EqualFold(inputName, varName) ||
|
||||
strings.Contains(strings.ToLower(varName), strings.ToLower(inputName)) {
|
||||
// Check if it's a datasource input with prometheus plugin
|
||||
if inputType == "datasource" && inputPluginID == "prometheus" {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Not found or not Prometheus
|
||||
return false
|
||||
}
|
||||
|
||||
// resolveDatasourceUID resolves a datasource UID, handling variable references (MVP: single datasource)
|
||||
// For MVP, all Prometheus variables resolve to the single datasource UID
|
||||
func resolveDatasourceUID(uid string, singleDatasourceUID string, dashboardJSON map[string]interface{}) string {
|
||||
// If not a variable, return as-is (concrete UID)
|
||||
if !isVariableReference(uid) {
|
||||
return uid
|
||||
}
|
||||
|
||||
// Check if it's a Prometheus variable
|
||||
if isPrometheusVariable(uid, dashboardJSON) {
|
||||
fmt.Printf("[DEBUG] Resolved Prometheus variable %s to %s\n", uid, singleDatasourceUID)
|
||||
return singleDatasourceUID
|
||||
}
|
||||
|
||||
// Non-Prometheus variable, return as-is (will be ignored in grouping)
|
||||
fmt.Printf("[DEBUG] Variable %s is not a Prometheus variable, skipping\n", uid)
|
||||
return uid
|
||||
}
|
||||
|
||||
// extractQueryText extracts the query text from a target
|
||||
// Different datasources use different field names (expr, query, rawSql, etc.)
|
||||
func extractQueryText(target map[string]interface{}) string {
|
||||
// Try common query field names
|
||||
queryFields := []string{"expr", "query", "rawSql", "rawQuery", "target", "measurement"}
|
||||
|
||||
for _, field := range queryFields {
|
||||
if queryText := getStringValue(target, field, ""); queryText != "" {
|
||||
return queryText
|
||||
}
|
||||
}
|
||||
|
||||
return ""
|
||||
}
|
||||
|
||||
// getStringValue safely extracts a string value from a map
|
||||
func getStringValue(m map[string]interface{}, key string, defaultValue string) string {
|
||||
if value, ok := m[key]; ok {
|
||||
if s, ok := value.(string); ok {
|
||||
return s
|
||||
}
|
||||
}
|
||||
return defaultValue
|
||||
}
|
||||
|
||||
// getIntValue safely extracts an int value from a map
|
||||
func getIntValue(m map[string]interface{}, key string, defaultValue int) int {
|
||||
if value, ok := m[key]; ok {
|
||||
switch v := value.(type) {
|
||||
case int:
|
||||
return v
|
||||
case float64:
|
||||
return int(v)
|
||||
case int64:
|
||||
return int(v)
|
||||
}
|
||||
}
|
||||
return defaultValue
|
||||
}
|
||||
|
||||
// DashboardQuery represents a query extracted from a dashboard panel
|
||||
type DashboardQuery struct {
|
||||
DatasourceUID string // Which datasource this query belongs to
|
||||
RefID string // Query reference ID
|
||||
QueryText string // The actual query
|
||||
PanelTitle string // Panel title
|
||||
PanelID int // Panel ID
|
||||
}
|
||||
|
||||
// groupQueriesByDatasource groups dashboard queries by their datasource UID
|
||||
// For MVP: resolves Prometheus template variables to the single datasource UID
|
||||
func groupQueriesByDatasource(queries []DashboardQuery, singleDatasourceUID string, dashboardJSON map[string]interface{}) map[string][]Query {
|
||||
grouped := make(map[string][]Query)
|
||||
|
||||
for _, dq := range queries {
|
||||
q := Query{
|
||||
RefID: dq.RefID,
|
||||
QueryText: dq.QueryText,
|
||||
PanelTitle: dq.PanelTitle,
|
||||
PanelID: dq.PanelID,
|
||||
}
|
||||
|
||||
// Resolve datasource UID (handles both concrete UIDs and variables)
|
||||
resolvedUID := resolveDatasourceUID(dq.DatasourceUID, singleDatasourceUID, dashboardJSON)
|
||||
|
||||
// Only add to grouping if we got a valid resolved UID
|
||||
if resolvedUID != "" {
|
||||
grouped[resolvedUID] = append(grouped[resolvedUID], q)
|
||||
}
|
||||
}
|
||||
|
||||
return grouped
|
||||
}
|
||||
604
apps/dashvalidator/pkg/validator/dashboard_extraction_test.go
Normal file
604
apps/dashvalidator/pkg/validator/dashboard_extraction_test.go
Normal file
@@ -0,0 +1,604 @@
|
||||
package validator
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
// Note: extractQueryText() uses a hardcoded field priority list because
|
||||
// Grafana doesn't expose datasource query schemas at runtime.
|
||||
// When Grafana adds new datasource types, update the list in dashboard.go
|
||||
// and add corresponding test cases here.
|
||||
|
||||
// =============================================================================
|
||||
// Category 1: extractQueryText Tests
|
||||
// Tests verify the hardcoded field priority list works correctly.
|
||||
// =============================================================================
|
||||
|
||||
func TestExtractQueryText(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
target map[string]interface{}
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "prometheus_expr_field",
|
||||
target: map[string]interface{}{
|
||||
"expr": "up",
|
||||
},
|
||||
expected: "up",
|
||||
},
|
||||
{
|
||||
name: "mysql_rawSql_field",
|
||||
target: map[string]interface{}{
|
||||
"rawSql": "SELECT * FROM users LIMIT 100",
|
||||
},
|
||||
expected: "SELECT * FROM users LIMIT 100",
|
||||
},
|
||||
{
|
||||
name: "generic_query_field",
|
||||
target: map[string]interface{}{
|
||||
"query": "show measurements",
|
||||
},
|
||||
expected: "show measurements",
|
||||
},
|
||||
{
|
||||
name: "field_priority_order",
|
||||
target: map[string]interface{}{
|
||||
"expr": "rate(cpu[5m])", // First priority
|
||||
"query": "show metrics", // Second priority
|
||||
},
|
||||
expected: "rate(cpu[5m])", // Should return expr, not query
|
||||
},
|
||||
{
|
||||
name: "missing_query_fields",
|
||||
target: map[string]interface{}{"refId": "A", "hide": false},
|
||||
expected: "",
|
||||
},
|
||||
{
|
||||
name: "empty_string_value",
|
||||
target: map[string]interface{}{
|
||||
"expr": "",
|
||||
},
|
||||
expected: "",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := extractQueryText(tt.target)
|
||||
require.Equal(t, tt.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// Category 2: getDatasourceUIDFromValue Tests (4 tests)
|
||||
// =============================================================================
|
||||
|
||||
func TestGetDatasourceUIDFromValue(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
value interface{}
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "string_datasource_uid",
|
||||
value: "prom-123",
|
||||
expected: "prom-123",
|
||||
},
|
||||
{
|
||||
name: "object_datasource_with_uid",
|
||||
value: map[string]interface{}{
|
||||
"uid": "prom-123",
|
||||
"type": "prometheus",
|
||||
},
|
||||
expected: "prom-123",
|
||||
},
|
||||
{
|
||||
name: "variable_reference_passed_through",
|
||||
value: "${DS_PROMETHEUS}",
|
||||
expected: "${DS_PROMETHEUS}",
|
||||
},
|
||||
{
|
||||
name: "nil_value",
|
||||
value: nil,
|
||||
expected: "",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := getDatasourceUIDFromValue(tt.value)
|
||||
require.Equal(t, tt.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// Category 3: extractDatasourceUID Tests (5 tests)
|
||||
// =============================================================================
|
||||
|
||||
func TestExtractDatasourceUID(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
target map[string]interface{}
|
||||
panel map[string]interface{}
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "target_level_datasource_string",
|
||||
target: map[string]interface{}{
|
||||
"datasource": "target-ds-123",
|
||||
},
|
||||
panel: map[string]interface{}{},
|
||||
expected: "target-ds-123",
|
||||
},
|
||||
{
|
||||
name: "target_level_datasource_object",
|
||||
target: map[string]interface{}{
|
||||
"datasource": map[string]interface{}{
|
||||
"uid": "target-ds-456",
|
||||
"type": "prometheus",
|
||||
},
|
||||
},
|
||||
panel: map[string]interface{}{},
|
||||
expected: "target-ds-456",
|
||||
},
|
||||
{
|
||||
name: "panel_level_fallback",
|
||||
target: map[string]interface{}{},
|
||||
panel: map[string]interface{}{
|
||||
"datasource": "panel-ds-789",
|
||||
},
|
||||
expected: "panel-ds-789",
|
||||
},
|
||||
{
|
||||
name: "target_level_takes_precedence",
|
||||
target: map[string]interface{}{
|
||||
"datasource": "target-ds",
|
||||
},
|
||||
panel: map[string]interface{}{
|
||||
"datasource": "panel-ds",
|
||||
},
|
||||
expected: "target-ds",
|
||||
},
|
||||
{
|
||||
name: "both_missing_returns_empty",
|
||||
target: map[string]interface{}{},
|
||||
panel: map[string]interface{}{},
|
||||
expected: "",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := extractDatasourceUID(tt.target, tt.panel)
|
||||
require.Equal(t, tt.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// Category 4: extractQueriesFromPanel Tests (8 tests)
|
||||
// =============================================================================
|
||||
|
||||
func TestExtractQueriesFromPanel(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
panel map[string]interface{}
|
||||
expected []DashboardQuery
|
||||
}{
|
||||
{
|
||||
name: "panel_with_single_target",
|
||||
panel: map[string]interface{}{
|
||||
"id": 42,
|
||||
"title": "CPU Usage",
|
||||
"targets": []interface{}{
|
||||
map[string]interface{}{
|
||||
"refId": "A",
|
||||
"expr": "rate(cpu[5m])",
|
||||
"datasource": "prom-main",
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: []DashboardQuery{
|
||||
{
|
||||
DatasourceUID: "prom-main",
|
||||
RefID: "A",
|
||||
QueryText: "rate(cpu[5m])",
|
||||
PanelTitle: "CPU Usage",
|
||||
PanelID: 42,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "panel_with_multiple_targets",
|
||||
panel: map[string]interface{}{
|
||||
"id": 10,
|
||||
"title": "Metrics",
|
||||
"targets": []interface{}{
|
||||
map[string]interface{}{
|
||||
"refId": "A",
|
||||
"expr": "up",
|
||||
"datasource": "prom-1",
|
||||
},
|
||||
map[string]interface{}{
|
||||
"refId": "B",
|
||||
"expr": "down",
|
||||
"datasource": "prom-1",
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: []DashboardQuery{
|
||||
{
|
||||
DatasourceUID: "prom-1",
|
||||
RefID: "A",
|
||||
QueryText: "up",
|
||||
PanelTitle: "Metrics",
|
||||
PanelID: 10,
|
||||
},
|
||||
{
|
||||
DatasourceUID: "prom-1",
|
||||
RefID: "B",
|
||||
QueryText: "down",
|
||||
PanelTitle: "Metrics",
|
||||
PanelID: 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "panel_with_no_targets_field",
|
||||
panel: map[string]interface{}{
|
||||
"id": 1,
|
||||
"title": "Text Panel",
|
||||
},
|
||||
expected: []DashboardQuery{},
|
||||
},
|
||||
{
|
||||
name: "panel_with_empty_targets_array",
|
||||
panel: map[string]interface{}{
|
||||
"id": 2,
|
||||
"title": "Empty",
|
||||
"targets": []interface{}{},
|
||||
},
|
||||
expected: []DashboardQuery{},
|
||||
},
|
||||
{
|
||||
name: "target_missing_datasource_skipped",
|
||||
panel: map[string]interface{}{
|
||||
"id": 3,
|
||||
"title": "Incomplete",
|
||||
"targets": []interface{}{
|
||||
map[string]interface{}{
|
||||
"refId": "A",
|
||||
"expr": "up",
|
||||
// No datasource field
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: []DashboardQuery{}, // Empty because no datasource
|
||||
},
|
||||
{
|
||||
name: "target_missing_query_text_skipped",
|
||||
panel: map[string]interface{}{
|
||||
"id": 4,
|
||||
"title": "No Query",
|
||||
"targets": []interface{}{
|
||||
map[string]interface{}{
|
||||
"refId": "A",
|
||||
"datasource": "prom-1",
|
||||
// No expr/query field
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: []DashboardQuery{}, // Empty because no query text
|
||||
},
|
||||
{
|
||||
name: "panel_metadata_extraction",
|
||||
panel: map[string]interface{}{
|
||||
"id": 999,
|
||||
"title": "Custom Title",
|
||||
"targets": []interface{}{
|
||||
map[string]interface{}{
|
||||
"refId": "Z",
|
||||
"expr": "test_metric",
|
||||
"datasource": "ds-abc",
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: []DashboardQuery{
|
||||
{
|
||||
DatasourceUID: "ds-abc",
|
||||
RefID: "Z",
|
||||
QueryText: "test_metric",
|
||||
PanelTitle: "Custom Title",
|
||||
PanelID: 999,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "panel_id_as_float64",
|
||||
panel: map[string]interface{}{
|
||||
"id": float64(123), // JSON numbers parse as float64
|
||||
"title": "Float ID Panel",
|
||||
"targets": []interface{}{
|
||||
map[string]interface{}{
|
||||
"refId": "A",
|
||||
"expr": "metric",
|
||||
"datasource": "ds-1",
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: []DashboardQuery{
|
||||
{
|
||||
DatasourceUID: "ds-1",
|
||||
RefID: "A",
|
||||
QueryText: "metric",
|
||||
PanelTitle: "Float ID Panel",
|
||||
PanelID: 123,
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := extractQueriesFromPanel(tt.panel)
|
||||
if len(tt.expected) == 0 {
|
||||
require.Empty(t, result)
|
||||
} else {
|
||||
require.Equal(t, tt.expected, result)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// Category 5: Helper Functions Tests
|
||||
// =============================================================================
|
||||
|
||||
func TestGetStringValue(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
m map[string]interface{}
|
||||
key string
|
||||
defaultValue string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "returns_value_if_exists",
|
||||
m: map[string]interface{}{"name": "test"},
|
||||
key: "name",
|
||||
defaultValue: "default",
|
||||
expected: "test",
|
||||
},
|
||||
{
|
||||
name: "returns_default_if_missing",
|
||||
m: map[string]interface{}{"other": "value"},
|
||||
key: "name",
|
||||
defaultValue: "default",
|
||||
expected: "default",
|
||||
},
|
||||
{
|
||||
name: "handles_non_string_type",
|
||||
m: map[string]interface{}{"name": 123},
|
||||
key: "name",
|
||||
defaultValue: "default",
|
||||
expected: "default",
|
||||
},
|
||||
{
|
||||
name: "empty_map_returns_default",
|
||||
m: map[string]interface{}{},
|
||||
key: "name",
|
||||
defaultValue: "default",
|
||||
expected: "default",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := getStringValue(tt.m, tt.key, tt.defaultValue)
|
||||
require.Equal(t, tt.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetIntValue(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
m map[string]interface{}
|
||||
key string
|
||||
defaultValue int
|
||||
expected int
|
||||
}{
|
||||
{
|
||||
name: "returns_int_value",
|
||||
m: map[string]interface{}{"count": 42},
|
||||
key: "count",
|
||||
defaultValue: 0,
|
||||
expected: 42,
|
||||
},
|
||||
{
|
||||
name: "handles_float64_conversion",
|
||||
m: map[string]interface{}{"count": float64(123)},
|
||||
key: "count",
|
||||
defaultValue: 0,
|
||||
expected: 123,
|
||||
},
|
||||
{
|
||||
name: "handles_int64_conversion",
|
||||
m: map[string]interface{}{"count": int64(456)},
|
||||
key: "count",
|
||||
defaultValue: 0,
|
||||
expected: 456,
|
||||
},
|
||||
{
|
||||
name: "returns_default_for_missing",
|
||||
m: map[string]interface{}{},
|
||||
key: "count",
|
||||
defaultValue: 99,
|
||||
expected: 99,
|
||||
},
|
||||
{
|
||||
name: "returns_default_for_invalid_type",
|
||||
m: map[string]interface{}{"count": "not a number"},
|
||||
key: "count",
|
||||
defaultValue: 99,
|
||||
expected: 99,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := getIntValue(tt.m, tt.key, tt.defaultValue)
|
||||
require.Equal(t, tt.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// Category 6: Integration Tests
|
||||
// Real-world dashboard panel structures
|
||||
// =============================================================================
|
||||
|
||||
func TestRealisticPrometheusPanel(t *testing.T) {
|
||||
// Realistic Prometheus panel from actual Grafana dashboard
|
||||
panel := map[string]interface{}{
|
||||
"datasource": map[string]interface{}{
|
||||
"type": "prometheus",
|
||||
"uid": "prometheus-main",
|
||||
},
|
||||
"gridPos": map[string]interface{}{
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
},
|
||||
"id": 28,
|
||||
"title": "Request Rate",
|
||||
"type": "timeseries",
|
||||
"targets": []interface{}{
|
||||
map[string]interface{}{
|
||||
"datasource": map[string]interface{}{
|
||||
"type": "prometheus",
|
||||
"uid": "prometheus-main",
|
||||
},
|
||||
"expr": "rate(http_requests_total{job=\"api\"}[5m])",
|
||||
"refId": "A",
|
||||
"legendFormat": "{{method}} {{status}}",
|
||||
"interval": "",
|
||||
},
|
||||
map[string]interface{}{
|
||||
"datasource": map[string]interface{}{
|
||||
"type": "prometheus",
|
||||
"uid": "prometheus-main",
|
||||
},
|
||||
"expr": "rate(http_requests_total{job=\"worker\"}[5m])",
|
||||
"refId": "B",
|
||||
"legendFormat": "{{method}}",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
result := extractQueriesFromPanel(panel)
|
||||
|
||||
require.Len(t, result, 2)
|
||||
require.Equal(t, "prometheus-main", result[0].DatasourceUID)
|
||||
require.Equal(t, "A", result[0].RefID)
|
||||
require.Equal(t, "rate(http_requests_total{job=\"api\"}[5m])", result[0].QueryText)
|
||||
require.Equal(t, "Request Rate", result[0].PanelTitle)
|
||||
require.Equal(t, 28, result[0].PanelID)
|
||||
|
||||
require.Equal(t, "prometheus-main", result[1].DatasourceUID)
|
||||
require.Equal(t, "B", result[1].RefID)
|
||||
require.Equal(t, "rate(http_requests_total{job=\"worker\"}[5m])", result[1].QueryText)
|
||||
}
|
||||
|
||||
func TestRealisticMySQLPanel(t *testing.T) {
|
||||
// Realistic MySQL panel structure
|
||||
panel := map[string]interface{}{
|
||||
"id": 10,
|
||||
"title": "Recent Users",
|
||||
"type": "table",
|
||||
"datasource": map[string]interface{}{
|
||||
"type": "mysql",
|
||||
"uid": "mysql-prod",
|
||||
},
|
||||
"targets": []interface{}{
|
||||
map[string]interface{}{
|
||||
"datasource": map[string]interface{}{
|
||||
"type": "mysql",
|
||||
"uid": "mysql-prod",
|
||||
},
|
||||
"refId": "A",
|
||||
"rawSql": "SELECT id, username, email FROM users WHERE created_at > NOW() - INTERVAL 1 DAY ORDER BY created_at DESC LIMIT 100",
|
||||
"format": "table",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
result := extractQueriesFromPanel(panel)
|
||||
|
||||
require.Len(t, result, 1)
|
||||
require.Equal(t, "mysql-prod", result[0].DatasourceUID)
|
||||
require.Equal(t, "A", result[0].RefID)
|
||||
require.Contains(t, result[0].QueryText, "SELECT id, username, email FROM users")
|
||||
require.Equal(t, "Recent Users", result[0].PanelTitle)
|
||||
require.Equal(t, 10, result[0].PanelID)
|
||||
}
|
||||
|
||||
func TestMixedDatasourcesPanel(t *testing.T) {
|
||||
// Panel with targets using different datasource types
|
||||
panel := map[string]interface{}{
|
||||
"id": 50,
|
||||
"title": "Mixed Data",
|
||||
"datasource": map[string]interface{}{
|
||||
"type": "prometheus",
|
||||
"uid": "default-prom",
|
||||
},
|
||||
"targets": []interface{}{
|
||||
map[string]interface{}{
|
||||
"datasource": map[string]interface{}{
|
||||
"type": "prometheus",
|
||||
"uid": "prom-1",
|
||||
},
|
||||
"refId": "A",
|
||||
"expr": "up",
|
||||
},
|
||||
map[string]interface{}{
|
||||
"datasource": map[string]interface{}{
|
||||
"type": "elasticsearch",
|
||||
"uid": "elastic-1",
|
||||
},
|
||||
"refId": "B",
|
||||
"query": "status:200",
|
||||
},
|
||||
map[string]interface{}{
|
||||
// Uses panel-level datasource (fallback)
|
||||
"refId": "C",
|
||||
"expr": "down",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
result := extractQueriesFromPanel(panel)
|
||||
|
||||
require.Len(t, result, 3)
|
||||
|
||||
// Prometheus query
|
||||
require.Equal(t, "prom-1", result[0].DatasourceUID)
|
||||
require.Equal(t, "A", result[0].RefID)
|
||||
require.Equal(t, "up", result[0].QueryText)
|
||||
|
||||
// Elasticsearch query
|
||||
require.Equal(t, "elastic-1", result[1].DatasourceUID)
|
||||
require.Equal(t, "B", result[1].RefID)
|
||||
require.Equal(t, "status:200", result[1].QueryText)
|
||||
|
||||
// Query with panel-level datasource fallback
|
||||
require.Equal(t, "default-prom", result[2].DatasourceUID)
|
||||
require.Equal(t, "C", result[2].RefID)
|
||||
require.Equal(t, "down", result[2].QueryText)
|
||||
}
|
||||
197
apps/dashvalidator/pkg/validator/dashboard_test.go
Normal file
197
apps/dashvalidator/pkg/validator/dashboard_test.go
Normal file
@@ -0,0 +1,197 @@
|
||||
package validator
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestIsV1Dashboard(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
dashboard map[string]interface{}
|
||||
expected bool
|
||||
}{
|
||||
{
|
||||
name: "v1 dashboard with panels array",
|
||||
dashboard: map[string]interface{}{
|
||||
"panels": []interface{}{
|
||||
map[string]interface{}{
|
||||
"id": 1,
|
||||
"title": "Panel 1",
|
||||
"type": "timeseries",
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
name: "v1 dashboard with empty panels",
|
||||
dashboard: map[string]interface{}{
|
||||
"panels": []interface{}{},
|
||||
},
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
name: "v2 dashboard with elements map",
|
||||
dashboard: map[string]interface{}{
|
||||
"elements": map[string]interface{}{
|
||||
"panel-1": map[string]interface{}{
|
||||
"kind": "Panel",
|
||||
"spec": map[string]interface{}{
|
||||
"id": 1,
|
||||
"title": "Panel 1",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
name: "v2 dashboard with layout",
|
||||
dashboard: map[string]interface{}{
|
||||
"layout": map[string]interface{}{
|
||||
"kind": "GridLayout",
|
||||
"spec": map[string]interface{}{
|
||||
"items": []interface{}{},
|
||||
},
|
||||
},
|
||||
},
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
name: "v2 dashboard with both elements and layout",
|
||||
dashboard: map[string]interface{}{
|
||||
"elements": map[string]interface{}{
|
||||
"panel-1": map[string]interface{}{
|
||||
"kind": "Panel",
|
||||
},
|
||||
},
|
||||
"layout": map[string]interface{}{
|
||||
"kind": "GridLayout",
|
||||
},
|
||||
},
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
name: "empty dashboard",
|
||||
dashboard: map[string]interface{}{},
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
name: "dashboard with wrong panels type (string instead of array)",
|
||||
dashboard: map[string]interface{}{
|
||||
"panels": "this-should-be-array-not-string",
|
||||
},
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
name: "dashboard with other fields only",
|
||||
dashboard: map[string]interface{}{
|
||||
"title": "Test Dashboard",
|
||||
"uid": "test-uid",
|
||||
"tags": []string{"monitoring"},
|
||||
},
|
||||
expected: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := isV1Dashboard(tt.dashboard)
|
||||
require.Equal(t, tt.expected, result, "isV1Dashboard() returned unexpected result")
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestExtractQueriesFromDashboard_VersionValidation(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
dashboard map[string]interface{}
|
||||
expectError bool
|
||||
errorContains string
|
||||
}{
|
||||
{
|
||||
name: "valid v1 dashboard extracts queries successfully",
|
||||
dashboard: map[string]interface{}{
|
||||
"panels": []interface{}{
|
||||
map[string]interface{}{
|
||||
"id": 1,
|
||||
"title": "CPU Usage",
|
||||
"type": "timeseries",
|
||||
"gridPos": map[string]interface{}{
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
},
|
||||
"targets": []interface{}{
|
||||
map[string]interface{}{
|
||||
"datasource": map[string]interface{}{
|
||||
"type": "prometheus",
|
||||
"uid": "test-prometheus",
|
||||
},
|
||||
"expr": "rate(cpu_usage_total[5m])",
|
||||
"refId": "A",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectError: false,
|
||||
},
|
||||
{
|
||||
name: "v2 dashboard returns unsupported format error",
|
||||
dashboard: map[string]interface{}{
|
||||
"elements": map[string]interface{}{
|
||||
"panel-1": map[string]interface{}{
|
||||
"kind": "Panel",
|
||||
"spec": map[string]interface{}{
|
||||
"id": 1,
|
||||
"title": "Panel 1",
|
||||
"data": map[string]interface{}{
|
||||
"kind": "QueryGroup",
|
||||
},
|
||||
"vizConfig": map[string]interface{}{
|
||||
"kind": "TimeSeriesVisualConfig",
|
||||
"pluginId": "timeseries",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"layout": map[string]interface{}{
|
||||
"kind": "GridLayout",
|
||||
"spec": map[string]interface{}{
|
||||
"items": []interface{}{},
|
||||
},
|
||||
},
|
||||
},
|
||||
expectError: true,
|
||||
errorContains: "unsupported dashboard format",
|
||||
},
|
||||
{
|
||||
name: "invalid dashboard (no panels or elements) returns error",
|
||||
dashboard: map[string]interface{}{
|
||||
"title": "Invalid Dashboard",
|
||||
"description": "This dashboard has no panels or elements",
|
||||
"tags": []string{"test"},
|
||||
},
|
||||
expectError: true,
|
||||
errorContains: "unsupported dashboard format",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
queries, err := extractQueriesFromDashboard(tt.dashboard)
|
||||
|
||||
if tt.expectError {
|
||||
require.Error(t, err, "Expected error but got none")
|
||||
require.Contains(t, err.Error(), tt.errorContains, "Error message doesn't contain expected substring")
|
||||
} else {
|
||||
require.NoError(t, err, "Expected no error but got: %v", err)
|
||||
require.NotNil(t, queries, "Queries should not be nil for valid dashboard")
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
173
apps/dashvalidator/pkg/validator/errors.go
Normal file
173
apps/dashvalidator/pkg/validator/errors.go
Normal file
@@ -0,0 +1,173 @@
|
||||
package validator
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
// ErrorCode represents the type of error that occurred
|
||||
type ErrorCode string
|
||||
|
||||
const (
|
||||
// Datasource-related errors
|
||||
ErrCodeDatasourceNotFound ErrorCode = "datasource_not_found"
|
||||
ErrCodeDatasourceWrongType ErrorCode = "datasource_wrong_type"
|
||||
ErrCodeDatasourceUnreachable ErrorCode = "datasource_unreachable"
|
||||
ErrCodeDatasourceAuth ErrorCode = "datasource_auth_failed"
|
||||
ErrCodeDatasourceConfig ErrorCode = "datasource_config_error"
|
||||
|
||||
// API-related errors
|
||||
ErrCodeAPIUnavailable ErrorCode = "api_unavailable"
|
||||
ErrCodeAPIInvalidResponse ErrorCode = "api_invalid_response"
|
||||
ErrCodeAPIRateLimit ErrorCode = "api_rate_limit"
|
||||
ErrCodeAPITimeout ErrorCode = "api_timeout"
|
||||
|
||||
// Validation errors
|
||||
ErrCodeInvalidDashboard ErrorCode = "invalid_dashboard"
|
||||
ErrCodeUnsupportedDashVersion ErrorCode = "unsupported_dashboard_version"
|
||||
ErrCodeInvalidQuery ErrorCode = "invalid_query"
|
||||
|
||||
// Internal errors
|
||||
ErrCodeInternal ErrorCode = "internal_error"
|
||||
)
|
||||
|
||||
// ValidationError represents a structured error with context
|
||||
type ValidationError struct {
|
||||
Code ErrorCode
|
||||
Message string
|
||||
Details map[string]interface{}
|
||||
StatusCode int
|
||||
Cause error
|
||||
}
|
||||
|
||||
// Error implements the error interface
|
||||
func (e *ValidationError) Error() string {
|
||||
if e.Cause != nil {
|
||||
return fmt.Sprintf("%s: %s (caused by: %v)", e.Code, e.Message, e.Cause)
|
||||
}
|
||||
return fmt.Sprintf("%s: %s", e.Code, e.Message)
|
||||
}
|
||||
|
||||
// Unwrap implements error unwrapping
|
||||
func (e *ValidationError) Unwrap() error {
|
||||
return e.Cause
|
||||
}
|
||||
|
||||
// NewValidationError creates a new ValidationError
|
||||
func NewValidationError(code ErrorCode, message string, statusCode int) *ValidationError {
|
||||
return &ValidationError{
|
||||
Code: code,
|
||||
Message: message,
|
||||
StatusCode: statusCode,
|
||||
Details: make(map[string]interface{}),
|
||||
}
|
||||
}
|
||||
|
||||
// WithCause adds the underlying error cause
|
||||
func (e *ValidationError) WithCause(err error) *ValidationError {
|
||||
e.Cause = err
|
||||
return e
|
||||
}
|
||||
|
||||
// WithDetail adds contextual information
|
||||
func (e *ValidationError) WithDetail(key string, value interface{}) *ValidationError {
|
||||
e.Details[key] = value
|
||||
return e
|
||||
}
|
||||
|
||||
// Common error constructors
|
||||
|
||||
// NewDatasourceNotFoundError creates an error for datasource not found
|
||||
func NewDatasourceNotFoundError(uid string, namespace string) *ValidationError {
|
||||
return NewValidationError(
|
||||
ErrCodeDatasourceNotFound,
|
||||
fmt.Sprintf("datasource not found: %s", uid),
|
||||
http.StatusNotFound,
|
||||
).WithDetail("datasourceUID", uid).WithDetail("namespace", namespace)
|
||||
}
|
||||
|
||||
// NewDatasourceWrongTypeError creates an error for wrong datasource type
|
||||
func NewDatasourceWrongTypeError(uid string, expectedType string, actualType string) *ValidationError {
|
||||
return NewValidationError(
|
||||
ErrCodeDatasourceWrongType,
|
||||
fmt.Sprintf("datasource %s has wrong type: expected %s, got %s", uid, expectedType, actualType),
|
||||
http.StatusBadRequest,
|
||||
).WithDetail("datasourceUID", uid).
|
||||
WithDetail("expectedType", expectedType).
|
||||
WithDetail("actualType", actualType)
|
||||
}
|
||||
|
||||
// NewDatasourceUnreachableError creates an error for unreachable datasource
|
||||
func NewDatasourceUnreachableError(uid string, url string, cause error) *ValidationError {
|
||||
return NewValidationError(
|
||||
ErrCodeDatasourceUnreachable,
|
||||
fmt.Sprintf("datasource %s at %s is unreachable", uid, url),
|
||||
http.StatusServiceUnavailable,
|
||||
).WithDetail("datasourceUID", uid).
|
||||
WithDetail("url", url).
|
||||
WithCause(cause)
|
||||
}
|
||||
|
||||
// NewAPIUnavailableError creates an error for unavailable API
|
||||
func NewAPIUnavailableError(statusCode int, responseBody string, cause error) *ValidationError {
|
||||
return NewValidationError(
|
||||
ErrCodeAPIUnavailable,
|
||||
fmt.Sprintf("Prometheus API returned status %d", statusCode),
|
||||
http.StatusBadGateway,
|
||||
).WithDetail("upstreamStatus", statusCode).
|
||||
WithDetail("responseBody", responseBody).
|
||||
WithCause(cause)
|
||||
}
|
||||
|
||||
// NewAPIInvalidResponseError creates an error for invalid API response
|
||||
func NewAPIInvalidResponseError(message string, cause error) *ValidationError {
|
||||
return NewValidationError(
|
||||
ErrCodeAPIInvalidResponse,
|
||||
fmt.Sprintf("Prometheus API returned invalid response: %s", message),
|
||||
http.StatusBadGateway,
|
||||
).WithCause(cause)
|
||||
}
|
||||
|
||||
// NewAPITimeoutError creates an error for API timeout
|
||||
func NewAPITimeoutError(url string, cause error) *ValidationError {
|
||||
return NewValidationError(
|
||||
ErrCodeAPITimeout,
|
||||
fmt.Sprintf("request to %s timed out", url),
|
||||
http.StatusGatewayTimeout,
|
||||
).WithDetail("url", url).
|
||||
WithCause(cause)
|
||||
}
|
||||
|
||||
// NewDatasourceAuthError creates an error for authentication failures
|
||||
func NewDatasourceAuthError(uid string, statusCode int) *ValidationError {
|
||||
return NewValidationError(
|
||||
ErrCodeDatasourceAuth,
|
||||
fmt.Sprintf("authentication failed for datasource %s (status %d)", uid, statusCode),
|
||||
http.StatusUnauthorized,
|
||||
).WithDetail("datasourceUID", uid).
|
||||
WithDetail("upstreamStatus", statusCode)
|
||||
}
|
||||
|
||||
// IsValidationError checks if an error is a ValidationError
|
||||
func IsValidationError(err error) bool {
|
||||
var validationErr *ValidationError
|
||||
return errors.As(err, &validationErr)
|
||||
}
|
||||
|
||||
// GetValidationError extracts a ValidationError from an error chain
|
||||
func GetValidationError(err error) *ValidationError {
|
||||
var validationErr *ValidationError
|
||||
if errors.As(err, &validationErr) {
|
||||
return validationErr
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetHTTPStatusCode returns the appropriate HTTP status code for an error
|
||||
func GetHTTPStatusCode(err error) int {
|
||||
if validationErr := GetValidationError(err); validationErr != nil {
|
||||
return validationErr.StatusCode
|
||||
}
|
||||
return http.StatusInternalServerError
|
||||
}
|
||||
131
apps/dashvalidator/pkg/validator/errors_test.go
Normal file
131
apps/dashvalidator/pkg/validator/errors_test.go
Normal file
@@ -0,0 +1,131 @@
|
||||
package validator
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"net/http"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestNewDatasourceNotFoundError(t *testing.T) {
|
||||
err := NewDatasourceNotFoundError("test-uid", "org-1")
|
||||
|
||||
require.Equal(t, ErrCodeDatasourceNotFound, err.Code)
|
||||
require.Equal(t, http.StatusNotFound, err.StatusCode)
|
||||
require.Equal(t, "test-uid", err.Details["datasourceUID"])
|
||||
require.Equal(t, "org-1", err.Details["namespace"])
|
||||
}
|
||||
|
||||
func TestNewDatasourceWrongTypeError(t *testing.T) {
|
||||
err := NewDatasourceWrongTypeError("test-uid", "prometheus", "influxdb")
|
||||
|
||||
require.Equal(t, ErrCodeDatasourceWrongType, err.Code)
|
||||
require.Equal(t, http.StatusBadRequest, err.StatusCode)
|
||||
require.Equal(t, "prometheus", err.Details["expectedType"])
|
||||
require.Equal(t, "influxdb", err.Details["actualType"])
|
||||
}
|
||||
|
||||
func TestNewDatasourceUnreachableError(t *testing.T) {
|
||||
cause := errors.New("connection refused")
|
||||
err := NewDatasourceUnreachableError("test-uid", "http://localhost:9090", cause)
|
||||
|
||||
require.Equal(t, ErrCodeDatasourceUnreachable, err.Code)
|
||||
require.Equal(t, http.StatusServiceUnavailable, err.StatusCode)
|
||||
require.Equal(t, cause, err.Cause)
|
||||
require.Equal(t, "http://localhost:9090", err.Details["url"])
|
||||
}
|
||||
|
||||
func TestNewAPIUnavailableError(t *testing.T) {
|
||||
err := NewAPIUnavailableError(503, "service unavailable", nil)
|
||||
|
||||
require.Equal(t, ErrCodeAPIUnavailable, err.Code)
|
||||
require.Equal(t, http.StatusBadGateway, err.StatusCode)
|
||||
require.Equal(t, 503, err.Details["upstreamStatus"])
|
||||
}
|
||||
|
||||
func TestNewAPIInvalidResponseError(t *testing.T) {
|
||||
cause := errors.New("invalid JSON")
|
||||
err := NewAPIInvalidResponseError("missing data field", cause)
|
||||
|
||||
require.Equal(t, ErrCodeAPIInvalidResponse, err.Code)
|
||||
require.Equal(t, http.StatusBadGateway, err.StatusCode)
|
||||
require.Equal(t, cause, err.Cause)
|
||||
}
|
||||
|
||||
func TestNewAPITimeoutError(t *testing.T) {
|
||||
cause := errors.New("context deadline exceeded")
|
||||
err := NewAPITimeoutError("http://localhost:9090/api/v1/query", cause)
|
||||
|
||||
require.Equal(t, ErrCodeAPITimeout, err.Code)
|
||||
require.Equal(t, http.StatusGatewayTimeout, err.StatusCode)
|
||||
require.Equal(t, cause, err.Cause)
|
||||
}
|
||||
|
||||
func TestNewDatasourceAuthError(t *testing.T) {
|
||||
err := NewDatasourceAuthError("test-uid", 401)
|
||||
|
||||
require.Equal(t, ErrCodeDatasourceAuth, err.Code)
|
||||
require.Equal(t, http.StatusUnauthorized, err.StatusCode)
|
||||
require.Equal(t, 401, err.Details["upstreamStatus"])
|
||||
}
|
||||
|
||||
func TestValidationErrorChaining(t *testing.T) {
|
||||
cause := errors.New("network error")
|
||||
err := NewValidationError(ErrCodeInternal, "test error", http.StatusInternalServerError).
|
||||
WithCause(cause).
|
||||
WithDetail("key1", "value1").
|
||||
WithDetail("key2", 123)
|
||||
|
||||
require.Equal(t, cause, err.Cause)
|
||||
require.Equal(t, "value1", err.Details["key1"])
|
||||
require.Equal(t, 123, err.Details["key2"])
|
||||
}
|
||||
|
||||
func TestIsValidationError(t *testing.T) {
|
||||
validationErr := NewDatasourceNotFoundError("test-uid", "org-1")
|
||||
regularErr := errors.New("regular error")
|
||||
|
||||
require.True(t, IsValidationError(validationErr), "expected IsValidationError to return true for ValidationError")
|
||||
require.False(t, IsValidationError(regularErr), "expected IsValidationError to return false for regular error")
|
||||
}
|
||||
|
||||
func TestGetValidationError(t *testing.T) {
|
||||
validationErr := NewDatasourceNotFoundError("test-uid", "org-1")
|
||||
regularErr := errors.New("regular error")
|
||||
|
||||
retrieved := GetValidationError(validationErr)
|
||||
require.NotNil(t, retrieved, "expected GetValidationError to return the ValidationError")
|
||||
require.Equal(t, ErrCodeDatasourceNotFound, retrieved.Code)
|
||||
|
||||
retrieved = GetValidationError(regularErr)
|
||||
require.Nil(t, retrieved, "expected GetValidationError to return nil for regular error")
|
||||
}
|
||||
|
||||
func TestGetHTTPStatusCode(t *testing.T) {
|
||||
validationErr := NewDatasourceNotFoundError("test-uid", "org-1")
|
||||
regularErr := errors.New("regular error")
|
||||
|
||||
require.Equal(t, http.StatusNotFound, GetHTTPStatusCode(validationErr))
|
||||
require.Equal(t, http.StatusInternalServerError, GetHTTPStatusCode(regularErr), "expected default status code for regular error")
|
||||
}
|
||||
|
||||
func TestErrorUnwrap(t *testing.T) {
|
||||
cause := errors.New("underlying error")
|
||||
err := NewDatasourceUnreachableError("test-uid", "http://localhost:9090", cause)
|
||||
|
||||
require.Equal(t, cause, errors.Unwrap(err), "expected Unwrap to return the cause")
|
||||
}
|
||||
|
||||
func TestErrorErrorMethod(t *testing.T) {
|
||||
// Test without cause
|
||||
err1 := NewDatasourceNotFoundError("test-uid", "org-1")
|
||||
require.NotEmpty(t, err1.Error(), "expected non-empty error message")
|
||||
|
||||
// Test with cause
|
||||
cause := errors.New("underlying error")
|
||||
err2 := NewDatasourceUnreachableError("test-uid", "http://localhost:9090", cause)
|
||||
errMsg2 := err2.Error()
|
||||
require.NotEmpty(t, errMsg2, "expected non-empty error message")
|
||||
require.Contains(t, errMsg2, "underlying error", "error message should include cause")
|
||||
}
|
||||
142
apps/dashvalidator/pkg/validator/prometheus/fetcher.go
Normal file
142
apps/dashvalidator/pkg/validator/prometheus/fetcher.go
Normal file
@@ -0,0 +1,142 @@
|
||||
package prometheus
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"path"
|
||||
"strings"
|
||||
|
||||
"github.com/grafana/grafana/apps/dashvalidator/pkg/validator"
|
||||
)
|
||||
|
||||
// Fetcher fetches available metrics from a Prometheus datasource
|
||||
type Fetcher struct{}
|
||||
|
||||
// NewFetcher creates a new Prometheus metrics fetcher
|
||||
func NewFetcher() *Fetcher {
|
||||
return &Fetcher{}
|
||||
}
|
||||
|
||||
// prometheusResponse represents the Prometheus API response structure
|
||||
type prometheusResponse struct {
|
||||
Status string `json:"status"`
|
||||
Data []string `json:"data"`
|
||||
Error string `json:"error,omitempty"`
|
||||
}
|
||||
|
||||
// FetchMetrics queries Prometheus to get all available metric names
|
||||
// It uses the /api/v1/label/__name__/values endpoint
|
||||
// The provided HTTP client should have proper authentication configured
|
||||
func (f *Fetcher) FetchMetrics(ctx context.Context, datasourceURL string, client *http.Client) ([]string, error) {
|
||||
// Build the API URL
|
||||
baseURL, err := url.Parse(datasourceURL)
|
||||
if err != nil {
|
||||
return nil, validator.NewValidationError(
|
||||
validator.ErrCodeDatasourceConfig,
|
||||
"invalid datasource URL",
|
||||
http.StatusBadRequest,
|
||||
).WithCause(err).WithDetail("url", datasourceURL)
|
||||
}
|
||||
|
||||
// Append Prometheus API endpoint to base URL path using path.Join
|
||||
// This correctly handles datasources with existing paths (e.g., /api/prom)
|
||||
endpoint := "api/v1/label/__name__/values"
|
||||
baseURL.Path = path.Join(baseURL.Path, endpoint)
|
||||
|
||||
// Create the request
|
||||
req, err := http.NewRequestWithContext(ctx, http.MethodGet, baseURL.String(), nil)
|
||||
if err != nil {
|
||||
return nil, validator.NewValidationError(
|
||||
validator.ErrCodeInternal,
|
||||
"failed to create HTTP request",
|
||||
http.StatusInternalServerError,
|
||||
).WithCause(err)
|
||||
}
|
||||
|
||||
// Execute the request using the provided authenticated client
|
||||
resp, err := client.Do(req)
|
||||
if err != nil {
|
||||
// Check if it's a timeout error
|
||||
if errors.Is(err, context.DeadlineExceeded) || strings.Contains(err.Error(), "timeout") {
|
||||
return nil, validator.NewAPITimeoutError(baseURL.String(), err)
|
||||
}
|
||||
// Network or connection error - datasource is unreachable
|
||||
return nil, validator.NewDatasourceUnreachableError("", datasourceURL, err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
// Read response body for error reporting
|
||||
body, readErr := io.ReadAll(resp.Body)
|
||||
if readErr != nil {
|
||||
body = []byte("<unable to read response body>")
|
||||
}
|
||||
|
||||
// Check HTTP status code
|
||||
switch resp.StatusCode {
|
||||
case http.StatusOK:
|
||||
// Success - continue to parse response
|
||||
case http.StatusUnauthorized, http.StatusForbidden:
|
||||
// Authentication or authorization failure
|
||||
return nil, validator.NewDatasourceAuthError("", resp.StatusCode).
|
||||
WithDetail("url", baseURL.String()).
|
||||
WithDetail("responseBody", string(body))
|
||||
case http.StatusNotFound:
|
||||
// Endpoint not found - might not be a valid Prometheus instance
|
||||
return nil, validator.NewAPIUnavailableError(
|
||||
resp.StatusCode,
|
||||
string(body),
|
||||
fmt.Errorf("endpoint not found - this may not be a valid Prometheus datasource"),
|
||||
).WithDetail("url", baseURL.String())
|
||||
case http.StatusTooManyRequests:
|
||||
// Rate limiting
|
||||
return nil, validator.NewValidationError(
|
||||
validator.ErrCodeAPIRateLimit,
|
||||
"Prometheus API rate limit exceeded",
|
||||
http.StatusTooManyRequests,
|
||||
).WithDetail("url", baseURL.String()).WithDetail("responseBody", string(body))
|
||||
case http.StatusServiceUnavailable, http.StatusBadGateway, http.StatusGatewayTimeout:
|
||||
// Upstream service is down or unavailable
|
||||
return nil, validator.NewAPIUnavailableError(resp.StatusCode, string(body), nil).
|
||||
WithDetail("url", baseURL.String())
|
||||
default:
|
||||
// Other error status codes
|
||||
return nil, validator.NewAPIUnavailableError(resp.StatusCode, string(body), nil).
|
||||
WithDetail("url", baseURL.String())
|
||||
}
|
||||
|
||||
// Parse the response JSON
|
||||
var promResp prometheusResponse
|
||||
if err := json.Unmarshal(body, &promResp); err != nil {
|
||||
return nil, validator.NewAPIInvalidResponseError(
|
||||
"response is not valid JSON",
|
||||
err,
|
||||
).WithDetail("url", baseURL.String()).WithDetail("responseBody", string(body))
|
||||
}
|
||||
|
||||
// Check Prometheus API status field
|
||||
if promResp.Status != "success" {
|
||||
errorMsg := promResp.Error
|
||||
if errorMsg == "" {
|
||||
errorMsg = "unknown error"
|
||||
}
|
||||
return nil, validator.NewAPIInvalidResponseError(
|
||||
fmt.Sprintf("Prometheus API returned error status: %s", errorMsg),
|
||||
nil,
|
||||
).WithDetail("url", baseURL.String()).WithDetail("prometheusError", errorMsg)
|
||||
}
|
||||
|
||||
// Validate that we got data
|
||||
if promResp.Data == nil {
|
||||
return nil, validator.NewAPIInvalidResponseError(
|
||||
"response missing 'data' field",
|
||||
nil,
|
||||
).WithDetail("url", baseURL.String()).WithDetail("responseBody", string(body))
|
||||
}
|
||||
|
||||
return promResp.Data, nil
|
||||
}
|
||||
49
apps/dashvalidator/pkg/validator/prometheus/parser.go
Normal file
49
apps/dashvalidator/pkg/validator/prometheus/parser.go
Normal file
@@ -0,0 +1,49 @@
|
||||
package prometheus
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/prometheus/prometheus/promql/parser"
|
||||
)
|
||||
|
||||
// Parser extracts metric names from PromQL queries
|
||||
type Parser struct{}
|
||||
|
||||
// NewParser creates a new PromQL parser
|
||||
func NewParser() *Parser {
|
||||
return &Parser{}
|
||||
}
|
||||
|
||||
// ExtractMetrics parses a PromQL query and extracts all metric names
|
||||
// For example: "rate(http_requests_total[5m])" returns ["http_requests_total"]
|
||||
func (p *Parser) ExtractMetrics(query string) ([]string, error) {
|
||||
// Parse the PromQL expression
|
||||
expr, err := parser.ParseExpr(query)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to parse PromQL query: %w", err)
|
||||
}
|
||||
|
||||
// Extract metric names by walking the AST
|
||||
metrics := make(map[string]bool) // Use map to deduplicate
|
||||
parser.Inspect(expr, func(node parser.Node, _ []parser.Node) error {
|
||||
// VectorSelector represents a metric selector like "up" or "up{job="foo"}"
|
||||
if vs, ok := node.(*parser.VectorSelector); ok {
|
||||
metrics[vs.Name] = true
|
||||
}
|
||||
// MatrixSelector represents range queries like "up[5m]"
|
||||
if ms, ok := node.(*parser.MatrixSelector); ok {
|
||||
if vs, ok := ms.VectorSelector.(*parser.VectorSelector); ok {
|
||||
metrics[vs.Name] = true
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
|
||||
// Convert map to slice
|
||||
result := make([]string, 0, len(metrics))
|
||||
for metric := range metrics {
|
||||
result = append(result, metric)
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
137
apps/dashvalidator/pkg/validator/prometheus/parser_test.go
Normal file
137
apps/dashvalidator/pkg/validator/prometheus/parser_test.go
Normal file
@@ -0,0 +1,137 @@
|
||||
package prometheus
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestExtractMetrics(t *testing.T) {
|
||||
parser := NewParser()
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
query string
|
||||
expected []string
|
||||
expectError bool
|
||||
errorContains string
|
||||
}{
|
||||
// Category 1: Basic Extraction (3 tests - covers AST node types)
|
||||
{
|
||||
name: "simple metric",
|
||||
query: "up",
|
||||
expected: []string{"up"},
|
||||
},
|
||||
{
|
||||
name: "metric with labels",
|
||||
query: `up{job="api"}`,
|
||||
expected: []string{"up"},
|
||||
},
|
||||
{
|
||||
name: "range selector",
|
||||
query: "up[5m]",
|
||||
expected: []string{"up"},
|
||||
},
|
||||
|
||||
// Category 2: Function Composition (2 tests - nested complexity)
|
||||
{
|
||||
name: "single function",
|
||||
query: "rate(http_requests_total[5m])",
|
||||
expected: []string{"http_requests_total"},
|
||||
},
|
||||
{
|
||||
name: "nested functions",
|
||||
query: "sum(rate(requests[5m]))",
|
||||
expected: []string{"requests"},
|
||||
},
|
||||
|
||||
// Category 3: Binary Operations (2 tests - multiple metrics)
|
||||
{
|
||||
name: "two metrics",
|
||||
query: "metric_a + metric_b",
|
||||
expected: []string{"metric_a", "metric_b"},
|
||||
},
|
||||
{
|
||||
name: "three metrics nested",
|
||||
query: "(a + b) / c",
|
||||
expected: []string{"a", "b", "c"},
|
||||
},
|
||||
|
||||
// Category 4: Deduplication (1 test - critical behavior)
|
||||
{
|
||||
name: "duplicate metric",
|
||||
query: "up + up",
|
||||
expected: []string{"up"},
|
||||
},
|
||||
|
||||
// Category 5: Edge Cases (2 tests - boundary behaviors)
|
||||
{
|
||||
name: "no metrics (literals only)",
|
||||
query: "1 + 1",
|
||||
expected: []string{},
|
||||
},
|
||||
{
|
||||
name: "built-in function without metric",
|
||||
query: "time()",
|
||||
expected: []string{},
|
||||
},
|
||||
{
|
||||
name: "comparison operator",
|
||||
query: "a > 5",
|
||||
expected: []string{"a"},
|
||||
},
|
||||
|
||||
// Category 6: Real Dashboard Patterns (3 tests - production queries)
|
||||
{
|
||||
name: "binary op with function and labels",
|
||||
query: `(time() - process_start_time_seconds{job="prometheus", instance=~"$node"})`,
|
||||
expected: []string{"process_start_time_seconds"},
|
||||
},
|
||||
{
|
||||
name: "rate with regex label matcher",
|
||||
query: `rate(prometheus_local_storage_ingested_samples_total{instance=~"$node"}[5m])`,
|
||||
expected: []string{"prometheus_local_storage_ingested_samples_total"},
|
||||
},
|
||||
{
|
||||
name: "metric with negation and multiple labels",
|
||||
query: `prometheus_target_interval_length_seconds{quantile!="0.01", quantile!="0.05", instance=~"$node"}`,
|
||||
expected: []string{"prometheus_target_interval_length_seconds"},
|
||||
},
|
||||
|
||||
// Category 7: Error Handling (2 tests - validation)
|
||||
{
|
||||
name: "empty string",
|
||||
query: "",
|
||||
expectError: true,
|
||||
errorContains: "parse",
|
||||
},
|
||||
{
|
||||
name: "malformed expression",
|
||||
query: "{{invalid}}",
|
||||
expectError: true,
|
||||
errorContains: "parse",
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result, err := parser.ExtractMetrics(tt.query)
|
||||
|
||||
// Check error expectation
|
||||
if tt.expectError {
|
||||
require.Error(t, err, "Expected error for query: %q", tt.query)
|
||||
if tt.errorContains != "" {
|
||||
require.ErrorContains(t, err, tt.errorContains,
|
||||
"Error should contain %q for query: %q", tt.errorContains, tt.query)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
require.NoError(t, err, "Unexpected error for query: %q", tt.query)
|
||||
|
||||
// Check result matches expected (order-independent for multiple metrics)
|
||||
require.ElementsMatch(t, tt.expected, result,
|
||||
"ExtractMetrics(%q) returned unexpected metrics", tt.query)
|
||||
})
|
||||
}
|
||||
}
|
||||
149
apps/dashvalidator/pkg/validator/prometheus/validator.go
Normal file
149
apps/dashvalidator/pkg/validator/prometheus/validator.go
Normal file
@@ -0,0 +1,149 @@
|
||||
package prometheus
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"github.com/grafana/grafana/apps/dashvalidator/pkg/validator"
|
||||
)
|
||||
|
||||
// Register Prometheus validator on package import
|
||||
func init() {
|
||||
validator.RegisterValidator("prometheus", func() validator.DatasourceValidator {
|
||||
return NewValidator()
|
||||
})
|
||||
}
|
||||
|
||||
// Validator implements validator.DatasourceValidator for Prometheus datasources
|
||||
type Validator struct {
|
||||
parser *Parser
|
||||
fetcher *Fetcher
|
||||
}
|
||||
|
||||
// NewValidator creates a new Prometheus validator
|
||||
func NewValidator() validator.DatasourceValidator {
|
||||
return &Validator{
|
||||
parser: NewParser(),
|
||||
fetcher: NewFetcher(),
|
||||
}
|
||||
}
|
||||
|
||||
// ValidateQueries validates Prometheus queries against the datasource
|
||||
func (v *Validator) ValidateQueries(ctx context.Context, queries []validator.Query, datasource validator.Datasource) (*validator.ValidationResult, error) {
|
||||
fmt.Printf("[DEBUG PROM] Starting validation for %d queries against datasource %s\n", len(queries), datasource.URL)
|
||||
|
||||
result := &validator.ValidationResult{
|
||||
TotalQueries: len(queries),
|
||||
QueryBreakdown: make([]validator.QueryResult, 0, len(queries)),
|
||||
}
|
||||
|
||||
// Step 1: Parse all queries to extract metrics
|
||||
allMetrics := make(map[string]bool) // Use map to deduplicate
|
||||
queryMetrics := make(map[int][]string)
|
||||
|
||||
for i, query := range queries {
|
||||
fmt.Printf("[DEBUG PROM] Parsing query %d: %s\n", i, query.QueryText)
|
||||
metrics, err := v.parser.ExtractMetrics(query.QueryText)
|
||||
if err != nil {
|
||||
// If we can't parse the query, we still continue with others
|
||||
// but we don't count this query as "checked"
|
||||
fmt.Printf("[DEBUG PROM] Failed to parse query %d: %v\n", i, err)
|
||||
continue
|
||||
}
|
||||
fmt.Printf("[DEBUG PROM] Extracted %d metrics from query %d: %v\n", len(metrics), i, metrics)
|
||||
result.CheckedQueries++
|
||||
queryMetrics[i] = metrics
|
||||
|
||||
// Add to global metrics set
|
||||
for _, metric := range metrics {
|
||||
allMetrics[metric] = true
|
||||
}
|
||||
}
|
||||
|
||||
// Convert map to slice for fetcher
|
||||
metricsToCheck := make([]string, 0, len(allMetrics))
|
||||
for metric := range allMetrics {
|
||||
metricsToCheck = append(metricsToCheck, metric)
|
||||
}
|
||||
result.TotalMetrics = len(metricsToCheck)
|
||||
|
||||
fmt.Printf("[DEBUG PROM] Total metrics to check: %d - %v\n", len(metricsToCheck), metricsToCheck)
|
||||
|
||||
// Step 2: Fetch available metrics from Prometheus
|
||||
fmt.Printf("[DEBUG PROM] Fetching available metrics from %s\n", datasource.URL)
|
||||
availableMetrics, err := v.fetcher.FetchMetrics(ctx, datasource.URL, datasource.HTTPClient)
|
||||
if err != nil {
|
||||
fmt.Printf("[DEBUG PROM] Failed to fetch metrics: %v\n", err)
|
||||
return nil, fmt.Errorf("failed to fetch metrics from Prometheus: %w", err)
|
||||
}
|
||||
fmt.Printf("[DEBUG PROM] Fetched %d available metrics from Prometheus\n", len(availableMetrics))
|
||||
|
||||
// Build a set for O(1) lookup
|
||||
availableSet := make(map[string]bool)
|
||||
for _, metric := range availableMetrics {
|
||||
availableSet[metric] = true
|
||||
}
|
||||
|
||||
// Step 3: Calculate compatibility
|
||||
missingMetricsMap := make(map[string]bool)
|
||||
for _, metric := range metricsToCheck {
|
||||
if !availableSet[metric] {
|
||||
missingMetricsMap[metric] = true
|
||||
}
|
||||
}
|
||||
result.FoundMetrics = result.TotalMetrics - len(missingMetricsMap)
|
||||
|
||||
// Convert missing metrics map to slice
|
||||
result.MissingMetrics = make([]string, 0, len(missingMetricsMap))
|
||||
for metric := range missingMetricsMap {
|
||||
result.MissingMetrics = append(result.MissingMetrics, metric)
|
||||
}
|
||||
|
||||
// Step 4: Build per-query breakdown
|
||||
for i, query := range queries {
|
||||
metrics, ok := queryMetrics[i]
|
||||
if !ok {
|
||||
// Query wasn't parsed successfully, skip
|
||||
continue
|
||||
}
|
||||
|
||||
queryResult := validator.QueryResult{
|
||||
PanelTitle: query.PanelTitle,
|
||||
PanelID: query.PanelID,
|
||||
QueryRefID: query.RefID,
|
||||
TotalMetrics: len(metrics),
|
||||
}
|
||||
|
||||
// Check which metrics from this query are missing
|
||||
queryMissing := make([]string, 0)
|
||||
for _, metric := range metrics {
|
||||
if missingMetricsMap[metric] {
|
||||
queryMissing = append(queryMissing, metric)
|
||||
}
|
||||
}
|
||||
|
||||
queryResult.MissingMetrics = queryMissing
|
||||
queryResult.FoundMetrics = queryResult.TotalMetrics - len(queryMissing)
|
||||
|
||||
// Calculate query-level compatibility score
|
||||
if queryResult.TotalMetrics > 0 {
|
||||
queryResult.CompatibilityScore = float64(queryResult.FoundMetrics) / float64(queryResult.TotalMetrics)
|
||||
} else {
|
||||
queryResult.CompatibilityScore = 1.0 // No metrics = perfect compatibility
|
||||
}
|
||||
|
||||
result.QueryBreakdown = append(result.QueryBreakdown, queryResult)
|
||||
}
|
||||
|
||||
// Step 5: Calculate overall compatibility score
|
||||
if result.TotalMetrics > 0 {
|
||||
result.CompatibilityScore = float64(result.FoundMetrics) / float64(result.TotalMetrics)
|
||||
} else {
|
||||
result.CompatibilityScore = 1.0 // No metrics = perfect compatibility
|
||||
}
|
||||
|
||||
fmt.Printf("[DEBUG PROM] Validation complete! Score: %.2f, Found: %d/%d metrics\n",
|
||||
result.CompatibilityScore, result.FoundMetrics, result.TotalMetrics)
|
||||
|
||||
return result, nil
|
||||
}
|
||||
74
apps/dashvalidator/pkg/validator/types.go
Normal file
74
apps/dashvalidator/pkg/validator/types.go
Normal file
@@ -0,0 +1,74 @@
|
||||
package validator
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
// DatasourceValidator validates dashboard queries against a datasource
|
||||
// Implementations exist per datasource type (Prometheus, MySQL, etc.)
|
||||
type DatasourceValidator interface {
|
||||
// ValidateQueries checks if queries are compatible with the datasource
|
||||
ValidateQueries(ctx context.Context, queries []Query, datasource Datasource) (*ValidationResult, error)
|
||||
}
|
||||
|
||||
// Query represents a dashboard query to validate
|
||||
type Query struct {
|
||||
RefID string // Query reference ID (A, B, C, etc.)
|
||||
QueryText string // The actual query text (PromQL, SQL, etc.)
|
||||
PanelTitle string // Panel title for user-friendly reporting
|
||||
PanelID int // Panel ID for reference
|
||||
}
|
||||
|
||||
// Datasource contains connection information for a datasource
|
||||
type Datasource struct {
|
||||
UID string // Datasource UID from dashboard
|
||||
Type string // Datasource type (prometheus, mysql, etc.)
|
||||
Name string // Datasource name for reporting
|
||||
URL string // Datasource URL for API calls
|
||||
HTTPClient *http.Client // Authenticated HTTP client for making requests
|
||||
}
|
||||
|
||||
// ValidationResult contains validation results for a datasource
|
||||
type ValidationResult struct {
|
||||
TotalQueries int // Total number of queries found
|
||||
CheckedQueries int // Number of queries successfully checked
|
||||
TotalMetrics int // Total metrics/entities referenced
|
||||
FoundMetrics int // Metrics found in datasource
|
||||
MissingMetrics []string // List of missing metrics
|
||||
QueryBreakdown []QueryResult // Per-query results
|
||||
CompatibilityScore float64 // Overall compatibility (0.0 - 1.0)
|
||||
}
|
||||
|
||||
// QueryResult contains validation results for a single query
|
||||
type QueryResult struct {
|
||||
PanelTitle string // Panel title
|
||||
PanelID int // Panel ID
|
||||
QueryRefID string // Query reference ID
|
||||
TotalMetrics int // Metrics in this query
|
||||
FoundMetrics int // Metrics found
|
||||
MissingMetrics []string // Missing metrics for this query
|
||||
CompatibilityScore float64 // Query compatibility (0.0 - 1.0)
|
||||
}
|
||||
|
||||
// validatorRegistry holds registered validator constructors
|
||||
// Validators register themselves using RegisterValidator in their init() functions
|
||||
var validatorRegistry = make(map[string]func() DatasourceValidator)
|
||||
|
||||
// RegisterValidator registers a validator constructor for a datasource type
|
||||
// This is called by validator implementations in their init() functions
|
||||
// Example: validator.RegisterValidator("prometheus", func() validator.DatasourceValidator { return NewValidator() })
|
||||
func RegisterValidator(dsType string, constructor func() DatasourceValidator) {
|
||||
validatorRegistry[dsType] = constructor
|
||||
}
|
||||
|
||||
// GetValidator returns a validator for the given datasource type
|
||||
// Returns an error if the datasource type is not supported
|
||||
func GetValidator(dsType string) (DatasourceValidator, error) {
|
||||
constructor, ok := validatorRegistry[dsType]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("unsupported datasource type: %s", dsType)
|
||||
}
|
||||
return constructor(), nil
|
||||
}
|
||||
164
apps/dashvalidator/pkg/validator/variables_test.go
Normal file
164
apps/dashvalidator/pkg/validator/variables_test.go
Normal file
@@ -0,0 +1,164 @@
|
||||
package validator
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestIsVariableReference(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input string
|
||||
expected bool
|
||||
}{
|
||||
{"dollar brace", "${prometheus}", true},
|
||||
{"dollar simple", "$datasource", true},
|
||||
{"double bracket", "[[prometheus]]", true},
|
||||
{"concrete uid", "abcd1234", false},
|
||||
{"empty string", "", false},
|
||||
{"dollar only", "$", false},
|
||||
{"empty braces", "${}", false},
|
||||
{"number start", "$123", true}, // Changed: Grafana ACCEPTS digits (per \w+ regex)
|
||||
{"all digits", "$999", true}, // New: All digits are valid per \w+
|
||||
{"special chars dash", "$ds-name", false}, // Changed: Grafana REJECTS dashes (not in \w)
|
||||
{"underscore", "$DS_PROMETHEUS", true},
|
||||
{"complex variable", "${DS_PROMETHEUS}", true},
|
||||
{"simple letter", "$p", true},
|
||||
{"with fieldpath", "${var.field}", true}, // New: Test fieldPath syntax
|
||||
{"with format", "[[var:text]]", true}, // New: Test format syntax
|
||||
{"brace with format", "${var:json}", true}, // New: Test brace format syntax
|
||||
{"digit in brackets", "[[123]]", true}, // New: Digits allowed in all patterns
|
||||
{"empty brackets", "[[]]", false}, // New: Empty brackets rejected
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := isVariableReference(tt.input)
|
||||
require.Equal(t, tt.expected, result, "isVariableReference(%q) returned unexpected result", tt.input)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestExtractVariableName(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
input string
|
||||
expected string
|
||||
}{
|
||||
{"dollar brace", "${prometheus}", "prometheus"},
|
||||
{"dollar simple", "$datasource", "datasource"},
|
||||
{"double bracket", "[[prometheus]]", "prometheus"},
|
||||
{"not variable", "concrete-uid", ""},
|
||||
{"empty", "", ""},
|
||||
{"complex name", "${DS_PROMETHEUS}", "DS_PROMETHEUS"},
|
||||
{"with underscore", "$DS_NAME", "DS_NAME"},
|
||||
{"digit variable", "$123", "123"}, // New: Digits are valid
|
||||
{"with fieldpath", "${var.field}", "var"}, // Changed: Extract only name, not fieldPath
|
||||
{"with format brace", "${var:json}", "var"}, // Changed: Extract only name, not format
|
||||
{"with format bracket", "[[var:text]]", "var"}, // Changed: Extract only name, not format
|
||||
{"fieldpath and format", "${var.field:json}", "var"}, // New: Extract only name from complex syntax
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := extractVariableName(tt.input)
|
||||
require.Equal(t, tt.expected, result, "extractVariableName(%q) returned unexpected result", tt.input)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestIsPrometheusVariable(t *testing.T) {
|
||||
// Dashboard with Prometheus __inputs
|
||||
dashboardWithPrometheus := map[string]interface{}{
|
||||
"__inputs": []interface{}{
|
||||
map[string]interface{}{
|
||||
"name": "DS_PROMETHEUS",
|
||||
"type": "datasource",
|
||||
"pluginId": "prometheus",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// Dashboard with MySQL __inputs
|
||||
dashboardWithMySQL := map[string]interface{}{
|
||||
"__inputs": []interface{}{
|
||||
map[string]interface{}{
|
||||
"name": "DS_MYSQL",
|
||||
"type": "datasource",
|
||||
"pluginId": "mysql",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// Dashboard without __inputs
|
||||
dashboardWithoutInputs := map[string]interface{}{
|
||||
"title": "Test Dashboard",
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
varRef string
|
||||
dashboard map[string]interface{}
|
||||
expected bool
|
||||
}{
|
||||
{"prometheus variable with inputs", "${DS_PROMETHEUS}", dashboardWithPrometheus, true},
|
||||
{"prometheus simple var", "$DS_PROMETHEUS", dashboardWithPrometheus, true},
|
||||
{"mysql variable", "${DS_MYSQL}", dashboardWithMySQL, false},
|
||||
{"not variable", "concrete-uid", dashboardWithPrometheus, false},
|
||||
{"variable without inputs", "${prometheus}", dashboardWithoutInputs, true}, // Fallback to true for MVP
|
||||
{"wrong variable name", "${OTHER}", dashboardWithPrometheus, false},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := isPrometheusVariable(tt.varRef, tt.dashboard)
|
||||
require.Equal(t, tt.expected, result, "isPrometheusVariable(%q, dashboard) returned unexpected result", tt.varRef)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestResolveDatasourceUID(t *testing.T) {
|
||||
singleUID := "prom-uid-123"
|
||||
|
||||
dashboardWithPrometheus := map[string]interface{}{
|
||||
"__inputs": []interface{}{
|
||||
map[string]interface{}{
|
||||
"name": "DS_PROMETHEUS",
|
||||
"type": "datasource",
|
||||
"pluginId": "prometheus",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
dashboardWithMySQL := map[string]interface{}{
|
||||
"__inputs": []interface{}{
|
||||
map[string]interface{}{
|
||||
"name": "DS_MYSQL",
|
||||
"type": "datasource",
|
||||
"pluginId": "mysql",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
uid string
|
||||
dashboard map[string]interface{}
|
||||
expectedUID string
|
||||
description string
|
||||
}{
|
||||
{"concrete uid", "concrete-123", dashboardWithPrometheus, "concrete-123", "should return concrete UID as-is"},
|
||||
{"prometheus variable", "${DS_PROMETHEUS}", dashboardWithPrometheus, singleUID, "should resolve to single datasource UID"},
|
||||
{"prometheus simple var", "$DS_PROMETHEUS", dashboardWithPrometheus, singleUID, "should resolve simple $ syntax"},
|
||||
{"mysql variable", "${DS_MYSQL}", dashboardWithMySQL, "${DS_MYSQL}", "should return non-Prometheus variable as-is"},
|
||||
{"empty uid", "", dashboardWithPrometheus, "", "should return empty string as-is"},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := resolveDatasourceUID(tt.uid, singleUID, tt.dashboard)
|
||||
require.Equal(t, tt.expectedUID, result, "resolveDatasourceUID(%q, %q, dashboard): %s", tt.uid, singleUID, tt.description)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,49 @@
|
||||
/*
|
||||
* This file was generated by grafana-app-sdk. DO NOT EDIT.
|
||||
*/
|
||||
import { Spec } from './types.spec.gen';
|
||||
import { Status } from './types.status.gen';
|
||||
|
||||
export interface Metadata {
|
||||
name: string;
|
||||
namespace: string;
|
||||
generateName?: string;
|
||||
selfLink?: string;
|
||||
uid?: string;
|
||||
resourceVersion?: string;
|
||||
generation?: number;
|
||||
creationTimestamp?: string;
|
||||
deletionTimestamp?: string;
|
||||
deletionGracePeriodSeconds?: number;
|
||||
labels?: Record<string, string>;
|
||||
annotations?: Record<string, string>;
|
||||
ownerReferences?: OwnerReference[];
|
||||
finalizers?: string[];
|
||||
managedFields?: ManagedFieldsEntry[];
|
||||
}
|
||||
|
||||
export interface OwnerReference {
|
||||
apiVersion: string;
|
||||
kind: string;
|
||||
name: string;
|
||||
uid: string;
|
||||
controller?: boolean;
|
||||
blockOwnerDeletion?: boolean;
|
||||
}
|
||||
|
||||
export interface ManagedFieldsEntry {
|
||||
manager?: string;
|
||||
operation?: string;
|
||||
apiVersion?: string;
|
||||
time?: string;
|
||||
fieldsType?: string;
|
||||
subresource?: string;
|
||||
}
|
||||
|
||||
export interface DashboardCompatibilityScore {
|
||||
kind: string;
|
||||
apiVersion: string;
|
||||
metadata: Metadata;
|
||||
spec: Spec;
|
||||
status: Status;
|
||||
}
|
||||
30
apps/dashvalidator/plugin/src/generated/dashboardcompatibilityscore/v1alpha1/types.metadata.gen.ts
generated
Normal file
30
apps/dashvalidator/plugin/src/generated/dashboardcompatibilityscore/v1alpha1/types.metadata.gen.ts
generated
Normal file
@@ -0,0 +1,30 @@
|
||||
// Code generated - EDITING IS FUTILE. DO NOT EDIT.
|
||||
|
||||
// metadata contains embedded CommonMetadata and can be extended with custom string fields
|
||||
// TODO: use CommonMetadata instead of redefining here; currently needs to be defined here
|
||||
// without external reference as using the CommonMetadata reference breaks thema codegen.
|
||||
export interface Metadata {
|
||||
updateTimestamp: string;
|
||||
createdBy: string;
|
||||
uid: string;
|
||||
creationTimestamp: string;
|
||||
deletionTimestamp?: string;
|
||||
finalizers: string[];
|
||||
resourceVersion: string;
|
||||
generation: number;
|
||||
updatedBy: string;
|
||||
labels: Record<string, string>;
|
||||
}
|
||||
|
||||
export const defaultMetadata = (): Metadata => ({
|
||||
updateTimestamp: "",
|
||||
createdBy: "",
|
||||
uid: "",
|
||||
creationTimestamp: "",
|
||||
finalizers: [],
|
||||
resourceVersion: "",
|
||||
generation: 0,
|
||||
updatedBy: "",
|
||||
labels: {},
|
||||
});
|
||||
|
||||
42
apps/dashvalidator/plugin/src/generated/dashboardcompatibilityscore/v1alpha1/types.spec.gen.ts
generated
Normal file
42
apps/dashvalidator/plugin/src/generated/dashboardcompatibilityscore/v1alpha1/types.spec.gen.ts
generated
Normal file
@@ -0,0 +1,42 @@
|
||||
// Code generated - EDITING IS FUTILE. DO NOT EDIT.
|
||||
|
||||
// DataSourceMapping specifies a datasource to validate dashboard queries against.
|
||||
// Maps logical datasource references in the dashboard to actual datasource instances.
|
||||
export interface DataSourceMapping {
|
||||
// Unique identifier of the datasource instance.
|
||||
// Example: "prometheus-prod-us-west"
|
||||
uid: string;
|
||||
// Type of datasource plugin.
|
||||
// MVP: Only "prometheus" supported.
|
||||
// Future: "mysql", "postgres", "elasticsearch", etc.
|
||||
type: string;
|
||||
// Optional human-readable name for display in results.
|
||||
// If not provided, UID will be used in error messages.
|
||||
// Example: "Production Prometheus (US-West)"
|
||||
name?: string;
|
||||
}
|
||||
|
||||
export const defaultDataSourceMapping = (): DataSourceMapping => ({
|
||||
uid: "",
|
||||
type: "",
|
||||
});
|
||||
|
||||
export interface Spec {
|
||||
// Complete dashboard JSON object to validate.
|
||||
// Must be a v1 dashboard schema (contains "panels" array).
|
||||
// v2 dashboards (with "elements" structure) are not yet supported.
|
||||
dashboardJson: Record<string, any>;
|
||||
// Array of datasources to validate against.
|
||||
// The validator will check dashboard queries against each datasource
|
||||
// and provide per-datasource compatibility results.
|
||||
//
|
||||
// MVP: Only single datasource supported (array length = 1), Prometheus type only.
|
||||
// Future: Will support multiple datasources for dashboards with mixed queries.
|
||||
datasourceMappings: DataSourceMapping[];
|
||||
}
|
||||
|
||||
export const defaultSpec = (): Spec => ({
|
||||
dashboardJson: {},
|
||||
datasourceMappings: [],
|
||||
});
|
||||
|
||||
142
apps/dashvalidator/plugin/src/generated/dashboardcompatibilityscore/v1alpha1/types.status.gen.ts
generated
Normal file
142
apps/dashvalidator/plugin/src/generated/dashboardcompatibilityscore/v1alpha1/types.status.gen.ts
generated
Normal file
@@ -0,0 +1,142 @@
|
||||
// Code generated - EDITING IS FUTILE. DO NOT EDIT.
|
||||
|
||||
// DataSourceResult contains validation results for a single datasource.
|
||||
// Provides aggregate statistics and per-query breakdown of compatibility.
|
||||
export interface DataSourceResult {
|
||||
// Datasource UID that was validated (matches DataSourceMapping.uid)
|
||||
uid: string;
|
||||
// Datasource type (matches DataSourceMapping.type)
|
||||
type: string;
|
||||
// Optional display name (matches DataSourceMapping.name if provided)
|
||||
name?: string;
|
||||
// Total number of queries in the dashboard targeting this datasource.
|
||||
// Includes all panel targets/queries that reference this datasource.
|
||||
totalQueries: number;
|
||||
// Number of queries successfully validated.
|
||||
// May be less than totalQueries if some queries couldn't be parsed.
|
||||
checkedQueries: number;
|
||||
// Total number of unique metrics/identifiers referenced across all queries.
|
||||
// For Prometheus: metric names extracted from PromQL expressions.
|
||||
// For SQL datasources: table and column names.
|
||||
totalMetrics: number;
|
||||
// Number of metrics that exist in the datasource schema.
|
||||
// foundMetrics <= totalMetrics
|
||||
foundMetrics: number;
|
||||
// Array of metric names that were referenced but don't exist.
|
||||
// Useful for debugging why a dashboard shows "no data".
|
||||
// Example for Prometheus: ["http_requests_total", "api_latency_seconds"]
|
||||
missingMetrics: string[];
|
||||
// Per-query breakdown showing which specific queries have issues.
|
||||
// One entry per query target (refId: "A", "B", "C", etc.) in each panel.
|
||||
// Allows pinpointing exactly which panel/query needs fixing.
|
||||
queryBreakdown: QueryBreakdown[];
|
||||
// Overall compatibility score for this datasource (0-100).
|
||||
// Calculated as: (foundMetrics / totalMetrics) * 100
|
||||
// Used to calculate the global compatibilityScore in status.
|
||||
compatibilityScore: number;
|
||||
}
|
||||
|
||||
export const defaultDataSourceResult = (): DataSourceResult => ({
|
||||
uid: "",
|
||||
type: "",
|
||||
totalQueries: 0,
|
||||
checkedQueries: 0,
|
||||
totalMetrics: 0,
|
||||
foundMetrics: 0,
|
||||
missingMetrics: [],
|
||||
queryBreakdown: [],
|
||||
compatibilityScore: 0,
|
||||
});
|
||||
|
||||
// QueryBreakdown provides compatibility details for a single query within a panel.
|
||||
// Granular per-query results allow users to identify exactly which queries need fixing.
|
||||
//
|
||||
// Note: A panel can have multiple queries (refId: "A", "B", "C", etc.),
|
||||
// so there may be multiple QueryBreakdown entries for the same panelID.
|
||||
export interface QueryBreakdown {
|
||||
// Human-readable panel title for context.
|
||||
// Example: "CPU Usage", "Request Rate"
|
||||
panelTitle: string;
|
||||
// Numeric panel ID from dashboard JSON.
|
||||
// Used to correlate with dashboard structure.
|
||||
panelID: number;
|
||||
// Query identifier within the panel.
|
||||
// Values: "A", "B", "C", etc. (from panel.targets[].refId)
|
||||
// Uniquely identifies which query in a multi-query panel this refers to.
|
||||
queryRefId: string;
|
||||
// Number of unique metrics referenced in this specific query.
|
||||
// For Prometheus: metrics extracted from the PromQL expr.
|
||||
// Example: rate(http_requests_total[5m]) references 1 metric.
|
||||
totalMetrics: number;
|
||||
// Number of those metrics that exist in the datasource.
|
||||
// foundMetrics <= totalMetrics
|
||||
foundMetrics: number;
|
||||
// Array of missing metric names specific to this query.
|
||||
// Helps identify exactly which part of a query expression will fail.
|
||||
// Empty array means query is fully compatible.
|
||||
missingMetrics: string[];
|
||||
// Compatibility percentage for this individual query (0-100).
|
||||
// Calculated as: (foundMetrics / totalMetrics) * 100
|
||||
// 100 = query will work perfectly, 0 = query will return no data.
|
||||
compatibilityScore: number;
|
||||
}
|
||||
|
||||
export const defaultQueryBreakdown = (): QueryBreakdown => ({
|
||||
panelTitle: "",
|
||||
panelID: 0,
|
||||
queryRefId: "",
|
||||
totalMetrics: 0,
|
||||
foundMetrics: 0,
|
||||
missingMetrics: [],
|
||||
compatibilityScore: 0,
|
||||
});
|
||||
|
||||
export interface OperatorState {
|
||||
// lastEvaluation is the ResourceVersion last evaluated
|
||||
lastEvaluation: string;
|
||||
// state describes the state of the lastEvaluation.
|
||||
// It is limited to three possible states for machine evaluation.
|
||||
state: "success" | "in_progress" | "failed";
|
||||
// descriptiveState is an optional more descriptive state field which has no requirements on format
|
||||
descriptiveState?: string;
|
||||
// details contains any extra information that is operator-specific
|
||||
details?: Record<string, any>;
|
||||
}
|
||||
|
||||
export const defaultOperatorState = (): OperatorState => ({
|
||||
lastEvaluation: "",
|
||||
state: "success",
|
||||
});
|
||||
|
||||
export interface Status {
|
||||
// Overall compatibility score across all datasources (0-100).
|
||||
// Calculated as: (total found metrics / total referenced metrics) * 100
|
||||
//
|
||||
// Score interpretation:
|
||||
// - 100: Perfect compatibility, all queries will work
|
||||
// - 80-99: Excellent, minor missing metrics
|
||||
// - 50-79: Fair, significant missing metrics
|
||||
// - 0-49: Poor, most queries will fail
|
||||
compatibilityScore: number;
|
||||
// Per-datasource validation results.
|
||||
// Array length matches spec.datasourceMappings.
|
||||
// Each element contains detailed metrics and query-level breakdown.
|
||||
datasourceResults: DataSourceResult[];
|
||||
// ISO 8601 timestamp of when validation was last performed.
|
||||
// Example: "2024-01-15T10:30:00Z"
|
||||
lastChecked?: string;
|
||||
// operatorStates is a map of operator ID to operator state evaluations.
|
||||
// Any operator which consumes this kind SHOULD add its state evaluation information to this field.
|
||||
operatorStates?: Record<string, OperatorState>;
|
||||
// Human-readable summary of validation result.
|
||||
// Examples: "All queries compatible", "3 missing metrics found"
|
||||
message?: string;
|
||||
// additionalFields is reserved for future use
|
||||
additionalFields?: Record<string, any>;
|
||||
}
|
||||
|
||||
export const defaultStatus = (): Status => ({
|
||||
compatibilityScore: 0,
|
||||
datasourceResults: [],
|
||||
});
|
||||
|
||||
1
go.work
1
go.work
@@ -14,6 +14,7 @@ use (
|
||||
./apps/collections
|
||||
./apps/correlations
|
||||
./apps/dashboard
|
||||
./apps/dashvalidator
|
||||
./apps/example
|
||||
./apps/folder
|
||||
./apps/iam
|
||||
|
||||
@@ -18,6 +18,7 @@ import (
|
||||
"github.com/grafana/grafana/pkg/registry/apps/alerting/rules"
|
||||
"github.com/grafana/grafana/pkg/registry/apps/annotation"
|
||||
"github.com/grafana/grafana/pkg/registry/apps/correlations"
|
||||
"github.com/grafana/grafana/pkg/registry/apps/dashvalidator"
|
||||
"github.com/grafana/grafana/pkg/registry/apps/example"
|
||||
"github.com/grafana/grafana/pkg/registry/apps/logsdrilldown"
|
||||
"github.com/grafana/grafana/pkg/registry/apps/playlist"
|
||||
@@ -46,12 +47,14 @@ func ProvideAppInstallers(
|
||||
advisorAppInstaller *advisor.AdvisorAppInstaller,
|
||||
alertingHistorianAppInstaller *historian.AlertingHistorianAppInstaller,
|
||||
quotasAppInstaller *quotas.QuotasAppInstaller,
|
||||
dashvalidatorAppInstaller *dashvalidator.DashValidatorAppInstaller,
|
||||
) []appsdkapiserver.AppInstaller {
|
||||
featureClient := openfeature.NewDefaultClient()
|
||||
installers := []appsdkapiserver.AppInstaller{
|
||||
playlistAppInstaller,
|
||||
pluginsApplInstaller,
|
||||
exampleAppInstaller,
|
||||
dashvalidatorAppInstaller,
|
||||
}
|
||||
if featureClient.Boolean(context.Background(), featuremgmt.FlagKubernetesUnifiedStorageQuotas, false, openfeature.TransactionContext(context.Background())) {
|
||||
installers = append(installers, quotasAppInstaller)
|
||||
|
||||
@@ -12,6 +12,7 @@ import (
|
||||
"github.com/grafana/grafana/pkg/registry/apps/alerting/rules"
|
||||
"github.com/grafana/grafana/pkg/registry/apps/annotation"
|
||||
"github.com/grafana/grafana/pkg/registry/apps/correlations"
|
||||
"github.com/grafana/grafana/pkg/registry/apps/dashvalidator"
|
||||
"github.com/grafana/grafana/pkg/registry/apps/example"
|
||||
"github.com/grafana/grafana/pkg/registry/apps/playlist"
|
||||
"github.com/grafana/grafana/pkg/registry/apps/plugins"
|
||||
@@ -29,6 +30,7 @@ func TestProvideAppInstallers_Table(t *testing.T) {
|
||||
advisorAppInstaller := &advisor.AdvisorAppInstaller{}
|
||||
historianAppInstaller := &historian.AlertingHistorianAppInstaller{}
|
||||
quotasAppInstaller := "as.QuotasAppInstaller{}
|
||||
dashvalidatorAppInstaller := &dashvalidator.DashValidatorAppInstaller{}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
@@ -45,7 +47,7 @@ func TestProvideAppInstallers_Table(t *testing.T) {
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
features := featuremgmt.WithFeatures(tt.flags...)
|
||||
got := ProvideAppInstallers(features, playlistInstaller, pluginsInstaller, nil, tt.rulesInst, correlationsAppInstaller, notificationsAppInstaller, nil, annotationAppInstaller, exampleAppInstaller, advisorAppInstaller, historianAppInstaller, quotasAppInstaller)
|
||||
got := ProvideAppInstallers(features, playlistInstaller, pluginsInstaller, nil, tt.rulesInst, correlationsAppInstaller, notificationsAppInstaller, nil, annotationAppInstaller, exampleAppInstaller, advisorAppInstaller, historianAppInstaller, quotasAppInstaller, dashvalidatorAppInstaller)
|
||||
if tt.expectRulesApp {
|
||||
require.Contains(t, got, tt.rulesInst)
|
||||
} else {
|
||||
|
||||
70
pkg/registry/apps/dashvalidator/register.go
Normal file
70
pkg/registry/apps/dashvalidator/register.go
Normal file
@@ -0,0 +1,70 @@
|
||||
package dashvalidator
|
||||
|
||||
import (
|
||||
"github.com/grafana/grafana-app-sdk/app"
|
||||
appsdkapiserver "github.com/grafana/grafana-app-sdk/k8s/apiserver"
|
||||
"github.com/grafana/grafana-app-sdk/simple"
|
||||
"k8s.io/apiserver/pkg/authorization/authorizer"
|
||||
restclient "k8s.io/client-go/rest"
|
||||
|
||||
validatorapis "github.com/grafana/grafana/apps/dashvalidator/pkg/apis/manifestdata"
|
||||
validatorapp "github.com/grafana/grafana/apps/dashvalidator/pkg/app"
|
||||
"github.com/grafana/grafana/pkg/infra/httpclient"
|
||||
roleauthorizer "github.com/grafana/grafana/pkg/services/apiserver/auth/authorizer"
|
||||
"github.com/grafana/grafana/pkg/services/datasources"
|
||||
"github.com/grafana/grafana/pkg/services/pluginsintegration/plugincontext"
|
||||
)
|
||||
|
||||
var _ appsdkapiserver.AppInstaller = (*DashValidatorAppInstaller)(nil)
|
||||
|
||||
type DashValidatorAppInstaller struct {
|
||||
appsdkapiserver.AppInstaller
|
||||
}
|
||||
|
||||
// RegisterAppInstaller is called by Wire to create the app installer
|
||||
func RegisterAppInstaller(
|
||||
datasourceSvc datasources.DataSourceService,
|
||||
pluginCtx *plugincontext.Provider,
|
||||
httpClientProvider httpclient.Provider,
|
||||
) (*DashValidatorAppInstaller, error) {
|
||||
// Create specific config for the app
|
||||
specificConfig := &validatorapp.DashValidatorConfig{
|
||||
DatasourceSvc: datasourceSvc,
|
||||
PluginCtx: pluginCtx,
|
||||
HTTPClientProvider: httpClientProvider,
|
||||
}
|
||||
|
||||
// Create the app provider
|
||||
provider := simple.NewAppProvider(
|
||||
validatorapis.LocalManifest(),
|
||||
specificConfig,
|
||||
validatorapp.New,
|
||||
)
|
||||
|
||||
// Create app config
|
||||
appConfig := app.Config{
|
||||
KubeConfig: restclient.Config{}, // Will be overridden by installer
|
||||
ManifestData: *validatorapis.LocalManifest().ManifestData,
|
||||
SpecificConfig: specificConfig,
|
||||
}
|
||||
|
||||
// Create the default installer
|
||||
defaultInstaller, err := appsdkapiserver.NewDefaultAppInstaller(
|
||||
provider,
|
||||
appConfig,
|
||||
validatorapis.NewGoTypeAssociator(),
|
||||
)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &DashValidatorAppInstaller{
|
||||
AppInstaller: defaultInstaller,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// GetAuthorizer provides the authorization for the app
|
||||
func (a *DashValidatorAppInstaller) GetAuthorizer() authorizer.Authorizer {
|
||||
//nolint:staticcheck
|
||||
return roleauthorizer.NewRoleAuthorizer()
|
||||
}
|
||||
@@ -8,6 +8,7 @@ import (
|
||||
"github.com/grafana/grafana/pkg/registry/apps/alerting/rules"
|
||||
"github.com/grafana/grafana/pkg/registry/apps/annotation"
|
||||
"github.com/grafana/grafana/pkg/registry/apps/correlations"
|
||||
"github.com/grafana/grafana/pkg/registry/apps/dashvalidator"
|
||||
"github.com/grafana/grafana/pkg/registry/apps/example"
|
||||
"github.com/grafana/grafana/pkg/registry/apps/logsdrilldown"
|
||||
"github.com/grafana/grafana/pkg/registry/apps/playlist"
|
||||
@@ -30,4 +31,5 @@ var WireSet = wire.NewSet(
|
||||
annotation.RegisterAppInstaller,
|
||||
quotas.RegisterAppInstaller,
|
||||
example.RegisterAppInstaller,
|
||||
dashvalidator.RegisterAppInstaller,
|
||||
)
|
||||
|
||||
13
pkg/server/wire_gen.go
generated
13
pkg/server/wire_gen.go
generated
@@ -84,6 +84,7 @@ import (
|
||||
"github.com/grafana/grafana/pkg/registry/apps/alerting/rules"
|
||||
"github.com/grafana/grafana/pkg/registry/apps/annotation"
|
||||
correlations2 "github.com/grafana/grafana/pkg/registry/apps/correlations"
|
||||
"github.com/grafana/grafana/pkg/registry/apps/dashvalidator"
|
||||
"github.com/grafana/grafana/pkg/registry/apps/example"
|
||||
"github.com/grafana/grafana/pkg/registry/apps/logsdrilldown"
|
||||
"github.com/grafana/grafana/pkg/registry/apps/playlist"
|
||||
@@ -831,7 +832,11 @@ func Initialize(ctx context.Context, cfg *setting.Cfg, opts Options, apiOpts api
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
v2 := appregistry.ProvideAppInstallers(featureToggles, playlistAppInstaller, appInstaller, shortURLAppInstaller, alertingRulesAppInstaller, correlationsAppInstaller, alertingNotificationsAppInstaller, logsDrilldownAppInstaller, annotationAppInstaller, exampleAppInstaller, advisorAppInstaller, alertingHistorianAppInstaller, quotasAppInstaller)
|
||||
dashValidatorAppInstaller, err := dashvalidator.RegisterAppInstaller(service15, plugincontextProvider, httpclientProvider)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
v2 := appregistry.ProvideAppInstallers(featureToggles, playlistAppInstaller, appInstaller, shortURLAppInstaller, alertingRulesAppInstaller, correlationsAppInstaller, alertingNotificationsAppInstaller, logsDrilldownAppInstaller, annotationAppInstaller, exampleAppInstaller, advisorAppInstaller, alertingHistorianAppInstaller, quotasAppInstaller, dashValidatorAppInstaller)
|
||||
builderMetrics := builder.ProvideBuilderMetrics(registerer)
|
||||
backend := auditing.ProvideNoopBackend()
|
||||
policyRuleProvider := auditing.ProvideNoopPolicyRuleProvider()
|
||||
@@ -1499,7 +1504,11 @@ func InitializeForTest(ctx context.Context, t sqlutil.ITestDB, testingT interfac
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
v2 := appregistry.ProvideAppInstallers(featureToggles, playlistAppInstaller, appInstaller, shortURLAppInstaller, alertingRulesAppInstaller, correlationsAppInstaller, alertingNotificationsAppInstaller, logsDrilldownAppInstaller, annotationAppInstaller, exampleAppInstaller, advisorAppInstaller, alertingHistorianAppInstaller, quotasAppInstaller)
|
||||
dashValidatorAppInstaller, err := dashvalidator.RegisterAppInstaller(service15, plugincontextProvider, httpclientProvider)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
v2 := appregistry.ProvideAppInstallers(featureToggles, playlistAppInstaller, appInstaller, shortURLAppInstaller, alertingRulesAppInstaller, correlationsAppInstaller, alertingNotificationsAppInstaller, logsDrilldownAppInstaller, annotationAppInstaller, exampleAppInstaller, advisorAppInstaller, alertingHistorianAppInstaller, quotasAppInstaller, dashValidatorAppInstaller)
|
||||
builderMetrics := builder.ProvideBuilderMetrics(registerer)
|
||||
backend := auditing.ProvideNoopBackend()
|
||||
policyRuleProvider := auditing.ProvideNoopPolicyRuleProvider()
|
||||
|
||||
@@ -2,10 +2,12 @@ import { screen, waitFor } from '@testing-library/react';
|
||||
import React from 'react';
|
||||
import { render } from 'test/test-utils';
|
||||
|
||||
import { DashboardJson } from 'app/features/manage-dashboards/types';
|
||||
|
||||
import { CommunityDashboardSection } from './CommunityDashboardSection';
|
||||
import { fetchCommunityDashboards } from './api/dashboardLibraryApi';
|
||||
import { GnetDashboard } from './types';
|
||||
import { onUseCommunityDashboard } from './utils/communityDashboardHelpers';
|
||||
import { onUseCommunityDashboard, interpolateDashboardForCompatibilityCheck } from './utils/communityDashboardHelpers';
|
||||
|
||||
jest.mock('./api/dashboardLibraryApi', () => ({
|
||||
fetchCommunityDashboards: jest.fn(),
|
||||
@@ -14,21 +16,32 @@ jest.mock('./api/dashboardLibraryApi', () => ({
|
||||
jest.mock('./utils/communityDashboardHelpers', () => ({
|
||||
...jest.requireActual('./utils/communityDashboardHelpers'),
|
||||
onUseCommunityDashboard: jest.fn(),
|
||||
interpolateDashboardForCompatibilityCheck: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('./CompatibilityModal', () => ({
|
||||
CompatibilityModal: jest.fn(() => <div>Compatibility Modal</div>),
|
||||
}));
|
||||
|
||||
// Track the datasource type for mocking
|
||||
let mockDatasourceType = 'prometheus';
|
||||
|
||||
jest.mock('@grafana/runtime', () => ({
|
||||
...jest.requireActual('@grafana/runtime'),
|
||||
getDataSourceSrv: () => ({
|
||||
getInstanceSettings: jest.fn((uid: string) => ({
|
||||
uid,
|
||||
name: `DataSource ${uid}`,
|
||||
type: 'test',
|
||||
type: mockDatasourceType,
|
||||
})),
|
||||
}),
|
||||
}));
|
||||
|
||||
const mockFetchCommunityDashboards = fetchCommunityDashboards as jest.MockedFunction<typeof fetchCommunityDashboards>;
|
||||
const mockOnUseCommunityDashboard = onUseCommunityDashboard as jest.MockedFunction<typeof onUseCommunityDashboard>;
|
||||
const mockInterpolateDashboard = interpolateDashboardForCompatibilityCheck as jest.MockedFunction<
|
||||
typeof interpolateDashboardForCompatibilityCheck
|
||||
>;
|
||||
|
||||
const createMockGnetDashboard = (overrides: Partial<GnetDashboard> = {}): GnetDashboard => ({
|
||||
id: 1,
|
||||
@@ -42,13 +55,14 @@ const createMockGnetDashboard = (overrides: Partial<GnetDashboard> = {}): GnetDa
|
||||
|
||||
const setup = async (
|
||||
props: Partial<React.ComponentProps<typeof CommunityDashboardSection>> = {},
|
||||
successScenario = true
|
||||
successScenario = true,
|
||||
datasourceUid = 'test-datasource-uid'
|
||||
) => {
|
||||
const renderResult = render(
|
||||
<CommunityDashboardSection onShowMapping={jest.fn()} datasourceType="test" {...props} />,
|
||||
<CommunityDashboardSection onShowMapping={jest.fn()} datasourceType={mockDatasourceType} {...props} />,
|
||||
{
|
||||
historyOptions: {
|
||||
initialEntries: ['/test?dashboardLibraryDatasourceUid=test-datasource-uid'],
|
||||
initialEntries: [`/test?dashboardLibraryDatasourceUid=${datasourceUid}`],
|
||||
},
|
||||
}
|
||||
);
|
||||
@@ -65,6 +79,7 @@ const setup = async (
|
||||
describe('CommunityDashboardSection', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
mockDatasourceType = 'prometheus';
|
||||
});
|
||||
|
||||
it('should render', async () => {
|
||||
@@ -122,4 +137,115 @@ describe('CommunityDashboardSection', () => {
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith('Error loading community dashboards', expect.any(Error));
|
||||
consoleErrorSpy.mockRestore();
|
||||
});
|
||||
|
||||
describe('Compatibility Check Feature', () => {
|
||||
it('should show "Check Compatibility" button when datasource type is prometheus', async () => {
|
||||
mockDatasourceType = 'prometheus';
|
||||
mockFetchCommunityDashboards.mockResolvedValue({
|
||||
page: 1,
|
||||
pages: 5,
|
||||
items: [createMockGnetDashboard()],
|
||||
});
|
||||
|
||||
await setup();
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByRole('button', { name: 'Check compatibility' })).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('should hide "Check Compatibility" button when datasource type is not prometheus', async () => {
|
||||
mockDatasourceType = 'influxdb';
|
||||
mockFetchCommunityDashboards.mockResolvedValue({
|
||||
page: 1,
|
||||
pages: 5,
|
||||
items: [createMockGnetDashboard()],
|
||||
});
|
||||
|
||||
await setup();
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Test Dashboard')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
expect(screen.queryByRole('button', { name: 'Check compatibility' })).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should hide "Check Compatibility" button when no datasourceUid in URL', async () => {
|
||||
mockDatasourceType = 'prometheus';
|
||||
mockFetchCommunityDashboards.mockResolvedValue({
|
||||
page: 1,
|
||||
pages: 5,
|
||||
items: [createMockGnetDashboard()],
|
||||
});
|
||||
|
||||
// Render without datasourceUid in URL
|
||||
render(<CommunityDashboardSection onShowMapping={jest.fn()} datasourceType="prometheus" />, {
|
||||
historyOptions: {
|
||||
initialEntries: ['/test'],
|
||||
},
|
||||
});
|
||||
|
||||
// Wait for component to finish initial rendering
|
||||
await waitFor(() => {
|
||||
expect(screen.queryByRole('button', { name: 'Check compatibility' })).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('should call interpolation function and open modal when "Check Compatibility" is clicked', async () => {
|
||||
mockDatasourceType = 'prometheus';
|
||||
mockFetchCommunityDashboards.mockResolvedValue({
|
||||
page: 1,
|
||||
pages: 5,
|
||||
items: [createMockGnetDashboard()],
|
||||
});
|
||||
|
||||
const mockInterpolatedDashboard: DashboardJson = { title: 'Interpolated Dashboard' } as DashboardJson;
|
||||
mockInterpolateDashboard.mockResolvedValue(mockInterpolatedDashboard);
|
||||
|
||||
const { user } = await setup();
|
||||
|
||||
const checkCompatibilityButton = screen.getByRole('button', { name: 'Check compatibility' });
|
||||
await user.click(checkCompatibilityButton);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockInterpolateDashboard).toHaveBeenCalledWith(1, 'test-datasource-uid');
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Compatibility Modal')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('should display error alert when interpolation fails', async () => {
|
||||
const consoleErrorSpy = jest.spyOn(console, 'error').mockImplementation();
|
||||
mockDatasourceType = 'prometheus';
|
||||
mockFetchCommunityDashboards.mockResolvedValue({
|
||||
page: 1,
|
||||
pages: 5,
|
||||
items: [createMockGnetDashboard()],
|
||||
});
|
||||
|
||||
mockInterpolateDashboard.mockRejectedValue(
|
||||
new Error('Unable to automatically map all datasource inputs for this dashboard')
|
||||
);
|
||||
|
||||
const { user } = await setup();
|
||||
|
||||
const checkCompatibilityButton = screen.getByRole('button', { name: 'Check compatibility' });
|
||||
await user.click(checkCompatibilityButton);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Error loading dashboard')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
await waitFor(() => {
|
||||
expect(
|
||||
screen.getByText('Unable to automatically map all datasource inputs for this dashboard')
|
||||
).toBeInTheDocument();
|
||||
});
|
||||
|
||||
consoleErrorSpy.mockRestore();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -7,7 +7,10 @@ import { GrafanaTheme2 } from '@grafana/data';
|
||||
import { Trans, t } from '@grafana/i18n';
|
||||
import { getDataSourceSrv } from '@grafana/runtime';
|
||||
import { Button, useStyles2, Stack, Grid, EmptyState, Alert, FilterInput, Box } from '@grafana/ui';
|
||||
import { DashboardJson } from 'app/features/manage-dashboards/types';
|
||||
import { PluginDashboard } from 'app/types/plugins';
|
||||
|
||||
import { CompatibilityModal } from './CompatibilityModal';
|
||||
import { DashboardCard } from './DashboardCard';
|
||||
import { MappingContext } from './SuggestedDashboardsModal';
|
||||
import { fetchCommunityDashboards } from './api/dashboardLibraryApi';
|
||||
@@ -18,12 +21,13 @@ import {
|
||||
EVENT_LOCATIONS,
|
||||
SOURCE_ENTRY_POINTS,
|
||||
} from './interactions';
|
||||
import { GnetDashboard } from './types';
|
||||
import { GnetDashboard, isGnetDashboard } from './types';
|
||||
import {
|
||||
getThumbnailUrl,
|
||||
getLogoUrl,
|
||||
buildDashboardDetails,
|
||||
onUseCommunityDashboard,
|
||||
interpolateDashboardForCompatibilityCheck,
|
||||
COMMUNITY_PAGE_SIZE_QUERY,
|
||||
COMMUNITY_RESULT_SIZE,
|
||||
} from './utils/communityDashboardHelpers';
|
||||
@@ -44,6 +48,8 @@ export const CommunityDashboardSection = ({ onShowMapping, datasourceType }: Pro
|
||||
const datasourceUid = searchParams.get('dashboardLibraryDatasourceUid');
|
||||
const [searchQuery, setSearchQuery] = useState('');
|
||||
const hasTrackedLoaded = useRef(false);
|
||||
const [selectedDashboardJson, setSelectedDashboardJson] = useState<DashboardJson | null>(null);
|
||||
const [isCompatibilityModalOpen, setIsCompatibilityModalOpen] = useState(false);
|
||||
|
||||
const [debouncedSearchQuery, setDebouncedSearchQuery] = useState('');
|
||||
useDebounce(
|
||||
@@ -151,6 +157,31 @@ export const CommunityDashboardSection = ({ onShowMapping, datasourceType }: Pro
|
||||
[response, datasourceUid, debouncedSearchQuery, onShowMapping]
|
||||
);
|
||||
|
||||
const [{ error: fetchError }, handleCheckCompatibility] = useAsyncFn(
|
||||
async (dashboard: PluginDashboard | GnetDashboard): Promise<void> => {
|
||||
// Type guard: Only GnetDashboards (community dashboards) are supported
|
||||
if (!isGnetDashboard(dashboard)) {
|
||||
console.warn('Compatibility check is only supported for community dashboards (GnetDashboard)');
|
||||
return;
|
||||
}
|
||||
|
||||
if (!datasourceUid || !response?.datasourceType) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const interpolatedDashboard = await interpolateDashboardForCompatibilityCheck(dashboard.id, datasourceUid);
|
||||
|
||||
setSelectedDashboardJson(interpolatedDashboard);
|
||||
setIsCompatibilityModalOpen(true);
|
||||
} catch (err) {
|
||||
console.error('Error preparing dashboard for compatibility check:', err);
|
||||
throw err;
|
||||
}
|
||||
},
|
||||
[datasourceUid, response]
|
||||
);
|
||||
|
||||
return (
|
||||
<Stack direction="column" gap={2} height="100%">
|
||||
{isPreviewDashboardError && (
|
||||
@@ -163,6 +194,20 @@ export const CommunityDashboardSection = ({ onShowMapping, datasourceType }: Pro
|
||||
</Alert>
|
||||
</div>
|
||||
)}
|
||||
{fetchError && (
|
||||
<div>
|
||||
<Alert
|
||||
title={t('dashboard-library.compatibility-check-error-title', 'Error loading dashboard')}
|
||||
severity="error"
|
||||
>
|
||||
{fetchError.message ||
|
||||
t(
|
||||
'dashboard-library.compatibility-check-error-description',
|
||||
'Failed to load dashboard for compatibility check. Please try again.'
|
||||
)}
|
||||
</Alert>
|
||||
</div>
|
||||
)}
|
||||
<FilterInput
|
||||
placeholder={
|
||||
datasourceType
|
||||
@@ -263,6 +308,8 @@ export const CommunityDashboardSection = ({ onShowMapping, datasourceType }: Pro
|
||||
isLogo={isLogo}
|
||||
details={details}
|
||||
kind="suggested_dashboard"
|
||||
showCompatibilityButton={!!datasourceUid && response?.datasourceType === 'prometheus'}
|
||||
onCheckCompatibility={handleCheckCompatibility}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
@@ -278,6 +325,19 @@ export const CommunityDashboardSection = ({ onShowMapping, datasourceType }: Pro
|
||||
</Stack>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Compatibility Modal - conditionally rendered */}
|
||||
{isCompatibilityModalOpen && selectedDashboardJson && datasourceUid && (
|
||||
<CompatibilityModal
|
||||
isOpen={isCompatibilityModalOpen}
|
||||
onDismiss={() => {
|
||||
setIsCompatibilityModalOpen(false);
|
||||
setSelectedDashboardJson(null);
|
||||
}}
|
||||
dashboardJson={selectedDashboardJson}
|
||||
datasourceUid={datasourceUid}
|
||||
/>
|
||||
)}
|
||||
</Stack>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -0,0 +1,330 @@
|
||||
import { render, screen, waitFor } from '@testing-library/react';
|
||||
import userEvent from '@testing-library/user-event';
|
||||
import { ComponentProps } from 'react';
|
||||
|
||||
import { getDataSourceSrv } from '@grafana/runtime';
|
||||
import { DataQuery } from '@grafana/schema';
|
||||
import { DashboardJson } from 'app/features/manage-dashboards/types';
|
||||
|
||||
import { CompatibilityModal } from './CompatibilityModal';
|
||||
import { checkDashboardCompatibility, CompatibilityCheckResult } from './api/compatibilityApi';
|
||||
|
||||
// Mock dependencies
|
||||
jest.mock('@grafana/runtime', () => ({
|
||||
...jest.requireActual('@grafana/runtime'),
|
||||
getDataSourceSrv: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('./api/compatibilityApi', () => ({
|
||||
checkDashboardCompatibility: jest.fn(),
|
||||
}));
|
||||
|
||||
const mockGetDataSourceSrv = getDataSourceSrv as jest.MockedFunction<typeof getDataSourceSrv>;
|
||||
const mockCheckDashboardCompatibility = checkDashboardCompatibility as jest.MockedFunction<
|
||||
typeof checkDashboardCompatibility
|
||||
>;
|
||||
|
||||
// Suppress console.error for expected errors in tests
|
||||
const originalError = console.error;
|
||||
beforeAll(() => {
|
||||
console.error = jest.fn();
|
||||
});
|
||||
afterAll(() => {
|
||||
console.error = originalError;
|
||||
});
|
||||
|
||||
// Prometheus-specific query type (extends DataQuery)
|
||||
interface PrometheusQuery extends DataQuery {
|
||||
expr: string;
|
||||
}
|
||||
|
||||
// Test fixtures
|
||||
const createMockDashboard = (overrides: Partial<DashboardJson> = {}): DashboardJson => {
|
||||
// Create a minimal dashboard for testing purposes
|
||||
// Panels array is intentionally minimal - only includes fields needed for compatibility check
|
||||
const dashboard: DashboardJson = {
|
||||
title: 'Test Dashboard',
|
||||
uid: 'test-uid',
|
||||
schemaVersion: 39,
|
||||
version: 1,
|
||||
panels: [
|
||||
{
|
||||
id: 1,
|
||||
type: 'graph',
|
||||
title: 'CPU Usage',
|
||||
datasource: {
|
||||
type: 'prometheus',
|
||||
uid: 'prometheus-uid-123',
|
||||
},
|
||||
targets: [
|
||||
{
|
||||
refId: 'A',
|
||||
expr: 'rate(cpu_usage_total[5m])',
|
||||
} as PrometheusQuery,
|
||||
],
|
||||
},
|
||||
] as unknown as DashboardJson['panels'],
|
||||
...overrides,
|
||||
};
|
||||
return dashboard;
|
||||
};
|
||||
|
||||
const createMockCompatibilityResult = (score = 100): CompatibilityCheckResult => ({
|
||||
compatibilityScore: score,
|
||||
datasourceResults: [
|
||||
{
|
||||
uid: 'prometheus-uid',
|
||||
type: 'prometheus',
|
||||
name: 'Test Prometheus',
|
||||
totalQueries: 5,
|
||||
checkedQueries: 5,
|
||||
totalMetrics: 10,
|
||||
foundMetrics: score === 100 ? 10 : Math.floor(10 * (score / 100)),
|
||||
missingMetrics: score === 100 ? [] : ['missing_metric_1', 'missing_metric_2'],
|
||||
compatibilityScore: score,
|
||||
queryBreakdown: [],
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const defaultProps: ComponentProps<typeof CompatibilityModal> = {
|
||||
isOpen: true,
|
||||
onDismiss: jest.fn(),
|
||||
dashboardJson: createMockDashboard(),
|
||||
datasourceUid: 'prometheus-uid',
|
||||
};
|
||||
|
||||
describe('CompatibilityModal', () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks();
|
||||
|
||||
// Default mock: datasource found
|
||||
mockGetDataSourceSrv.mockReturnValue({
|
||||
getInstanceSettings: jest.fn().mockReturnValue({
|
||||
uid: 'prometheus-uid',
|
||||
type: 'prometheus',
|
||||
name: 'Test Prometheus',
|
||||
}),
|
||||
} as unknown as ReturnType<typeof getDataSourceSrv>);
|
||||
|
||||
// Default mock: successful API call
|
||||
mockCheckDashboardCompatibility.mockResolvedValue(createMockCompatibilityResult(100));
|
||||
});
|
||||
|
||||
describe('Modal visibility', () => {
|
||||
it('should render modal when isOpen is true', async () => {
|
||||
render(<CompatibilityModal {...defaultProps} />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Dashboard Compatibility Check for Test Dashboard')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('should not render modal content when isOpen is false', async () => {
|
||||
render(<CompatibilityModal {...defaultProps} isOpen={false} />);
|
||||
|
||||
// Wait for any async updates to settle
|
||||
await waitFor(() => {
|
||||
expect(screen.queryByText('Dashboard Compatibility Check for Test Dashboard')).not.toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('should include dashboard title in modal title', async () => {
|
||||
const dashboardWithCustomTitle = createMockDashboard({ title: 'My Custom Dashboard' });
|
||||
render(<CompatibilityModal {...defaultProps} dashboardJson={dashboardWithCustomTitle} />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Dashboard Compatibility Check for My Custom Dashboard')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Loading state', () => {
|
||||
it('should show loading spinner and message while checking compatibility', async () => {
|
||||
// Make API call pending
|
||||
mockCheckDashboardCompatibility.mockImplementation(
|
||||
() => new Promise((resolve) => setTimeout(() => resolve(createMockCompatibilityResult(100)), 1000))
|
||||
);
|
||||
|
||||
render(<CompatibilityModal {...defaultProps} />);
|
||||
|
||||
// Should show loading state immediately
|
||||
expect(screen.getByText('Checking compatibility...')).toBeInTheDocument();
|
||||
expect(screen.getByTestId('Spinner')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Error state', () => {
|
||||
it('should show error alert when dashboard is v2 schema', async () => {
|
||||
// Create a v2 dashboard (has 'elements' property instead of 'panels')
|
||||
const v2Dashboard = { elements: {}, schemaVersion: 40 } as unknown as DashboardJson;
|
||||
|
||||
render(<CompatibilityModal {...defaultProps} dashboardJson={v2Dashboard} />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Error checking compatibility')).toBeInTheDocument();
|
||||
expect(screen.getByText('Failed to check dashboard compatibility. Please try again.')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// API should not be called for v2 dashboards
|
||||
expect(mockCheckDashboardCompatibility).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should show error alert when datasource is not found', async () => {
|
||||
mockGetDataSourceSrv.mockReturnValue({
|
||||
getInstanceSettings: jest.fn().mockReturnValue(null),
|
||||
} as unknown as ReturnType<typeof getDataSourceSrv>);
|
||||
|
||||
render(<CompatibilityModal {...defaultProps} />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Error checking compatibility')).toBeInTheDocument();
|
||||
expect(screen.getByText('Failed to check dashboard compatibility. Please try again.')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('should show error alert when API call fails', async () => {
|
||||
mockCheckDashboardCompatibility.mockRejectedValue(new Error('API Error'));
|
||||
|
||||
render(<CompatibilityModal {...defaultProps} />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Error checking compatibility')).toBeInTheDocument();
|
||||
expect(screen.getByText('Failed to check dashboard compatibility. Please try again.')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('should show retry button in error state', async () => {
|
||||
mockCheckDashboardCompatibility.mockRejectedValue(new Error('API Error'));
|
||||
|
||||
render(<CompatibilityModal {...defaultProps} />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByRole('button', { name: /retry/i })).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('should retry API call when retry button is clicked', async () => {
|
||||
const user = userEvent.setup();
|
||||
|
||||
// First call fails
|
||||
mockCheckDashboardCompatibility.mockRejectedValueOnce(new Error('API Error'));
|
||||
// Second call succeeds
|
||||
mockCheckDashboardCompatibility.mockResolvedValueOnce(createMockCompatibilityResult(100));
|
||||
|
||||
render(<CompatibilityModal {...defaultProps} />);
|
||||
|
||||
// Wait for error state
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Error checking compatibility')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Click retry button
|
||||
const retryButton = screen.getByRole('button', { name: /retry/i });
|
||||
await user.click(retryButton);
|
||||
|
||||
// Should eventually show success state (loading may be too fast to catch)
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Compatibility Score')).toBeInTheDocument();
|
||||
expect(screen.getByText('100%')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// API should have been called twice
|
||||
expect(mockCheckDashboardCompatibility).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Success state', () => {
|
||||
it('should show compatibility score when check succeeds', async () => {
|
||||
mockCheckDashboardCompatibility.mockResolvedValue(createMockCompatibilityResult(100));
|
||||
|
||||
render(<CompatibilityModal {...defaultProps} />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Compatibility Score')).toBeInTheDocument();
|
||||
expect(screen.getByText('100%')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('should display partial compatibility score', async () => {
|
||||
mockCheckDashboardCompatibility.mockResolvedValue(createMockCompatibilityResult(75));
|
||||
|
||||
render(<CompatibilityModal {...defaultProps} />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Compatibility Score')).toBeInTheDocument();
|
||||
expect(screen.getByText('75%')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
it('should display low compatibility score', async () => {
|
||||
mockCheckDashboardCompatibility.mockResolvedValue(createMockCompatibilityResult(25));
|
||||
|
||||
render(<CompatibilityModal {...defaultProps} />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Compatibility Score')).toBeInTheDocument();
|
||||
expect(screen.getByText('25%')).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('API call behavior', () => {
|
||||
it('should call checkDashboardCompatibility with correct parameters', async () => {
|
||||
const dashboardJson = createMockDashboard();
|
||||
render(<CompatibilityModal {...defaultProps} dashboardJson={dashboardJson} />);
|
||||
|
||||
await waitFor(() => {
|
||||
expect(mockCheckDashboardCompatibility).toHaveBeenCalledWith(dashboardJson, [
|
||||
{
|
||||
uid: 'prometheus-uid',
|
||||
type: 'prometheus',
|
||||
name: 'Test Prometheus',
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
it('should not call API when modal is closed', async () => {
|
||||
render(<CompatibilityModal {...defaultProps} isOpen={false} />);
|
||||
|
||||
// Wait for any async updates to settle
|
||||
await waitFor(() => {
|
||||
expect(mockCheckDashboardCompatibility).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
it('should trigger API call when modal opens', async () => {
|
||||
const { rerender } = render(<CompatibilityModal {...defaultProps} isOpen={false} />);
|
||||
|
||||
// API should not be called yet
|
||||
expect(mockCheckDashboardCompatibility).not.toHaveBeenCalled();
|
||||
|
||||
// Open modal
|
||||
rerender(<CompatibilityModal {...defaultProps} isOpen={true} />);
|
||||
|
||||
// API should now be called
|
||||
await waitFor(() => {
|
||||
expect(mockCheckDashboardCompatibility).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Modal interactions', () => {
|
||||
it('should call onDismiss when modal is closed', async () => {
|
||||
const onDismiss = jest.fn();
|
||||
render(<CompatibilityModal {...defaultProps} onDismiss={onDismiss} />);
|
||||
|
||||
// Wait for modal to render
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('Dashboard Compatibility Check for Test Dashboard')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
// Find and click close button (X button in modal header)
|
||||
const closeButton = screen.getByLabelText('Close');
|
||||
await userEvent.click(closeButton);
|
||||
|
||||
expect(onDismiss).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,200 @@
|
||||
import { css } from '@emotion/css';
|
||||
import { useAsyncRetry } from 'react-use';
|
||||
|
||||
import { GrafanaTheme2 } from '@grafana/data';
|
||||
import { Trans, t } from '@grafana/i18n';
|
||||
import { getDataSourceSrv } from '@grafana/runtime';
|
||||
import { Modal, useStyles2, Stack, Alert, Button, Spinner, Text } from '@grafana/ui';
|
||||
import { DashboardJson } from 'app/features/manage-dashboards/types';
|
||||
|
||||
import { checkDashboardCompatibility } from './api/compatibilityApi';
|
||||
|
||||
interface CompatibilityModalProps {
|
||||
/** Controls modal visibility */
|
||||
isOpen: boolean;
|
||||
/** Handler called when modal is dismissed */
|
||||
onDismiss: () => void;
|
||||
/** Dashboard JSON to check (v1 or v2 schema) */
|
||||
dashboardJson: DashboardJson;
|
||||
/** UID of the datasource to check compatibility against */
|
||||
datasourceUid: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Modal component that checks dashboard compatibility with a datasource.
|
||||
*
|
||||
* This modal is self-contained and handles its own data fetching. When opened,
|
||||
* it automatically triggers a compatibility check by calling the dashboard validator
|
||||
* backend API. It displays loading, error, or success states accordingly.
|
||||
*
|
||||
* This component is generic and works with any dashboard source:
|
||||
* - Community dashboards (GnetDashboard)
|
||||
* - Plugin-provided dashboards (PluginDashboard)
|
||||
* - User-created dashboards
|
||||
*
|
||||
* Features #12-15 will add detailed result displays (color-coded scores, missing
|
||||
* metrics lists, and query breakdowns).
|
||||
*/
|
||||
export const CompatibilityModal = ({ isOpen, onDismiss, dashboardJson, datasourceUid }: CompatibilityModalProps) => {
|
||||
const styles = useStyles2(getStyles);
|
||||
|
||||
// Fetch compatibility results when modal opens
|
||||
const {
|
||||
value: result,
|
||||
loading,
|
||||
error,
|
||||
retry,
|
||||
} = useAsyncRetry(async () => {
|
||||
// Don't trigger API call if modal is closed
|
||||
if (!isOpen) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Validate dashboard is v1 schema (reject v2 for MVP)
|
||||
// isDashboardV2Spec checks for 'elements' property at runtime
|
||||
if ('elements' in dashboardJson) {
|
||||
throw new Error(
|
||||
t(
|
||||
'compatibility-modal.v2-not-supported',
|
||||
'Dashboard v2 schema is not yet supported. Compatibility checking is currently only available for v1 dashboards. Support for v2 dashboards is coming soon.'
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
// Fetch datasource details to build mapping
|
||||
const ds = getDataSourceSrv().getInstanceSettings(datasourceUid);
|
||||
if (!ds) {
|
||||
throw new Error(
|
||||
t('compatibility-modal.datasource-not-found', 'Datasource not found with UID: {{uid}}', {
|
||||
uid: datasourceUid,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
// Call compatibility check API with validated dashboard
|
||||
return await checkDashboardCompatibility(dashboardJson, [
|
||||
{
|
||||
uid: ds.uid,
|
||||
type: ds.type,
|
||||
name: ds.name,
|
||||
},
|
||||
]);
|
||||
}, [isOpen, dashboardJson, datasourceUid]);
|
||||
|
||||
return (
|
||||
<Modal
|
||||
title={t('compatibility-modal.title', 'Dashboard Compatibility Check for {{dashboardName}}', {
|
||||
dashboardName: dashboardJson.title || 'Dashboard',
|
||||
})}
|
||||
isOpen={isOpen}
|
||||
onDismiss={onDismiss}
|
||||
className={styles.modal}
|
||||
contentClassName={styles.modalContent}
|
||||
>
|
||||
<div className={styles.contentContainer}>
|
||||
{/* Loading State */}
|
||||
{loading && (
|
||||
<Stack direction="column" alignItems="center" gap={2}>
|
||||
<Spinner size="xl" />
|
||||
<Text>
|
||||
<Trans i18nKey="compatibility-modal.checking">Checking compatibility...</Trans>
|
||||
</Text>
|
||||
</Stack>
|
||||
)}
|
||||
|
||||
{/* Error State */}
|
||||
{!loading && error && (
|
||||
<Stack direction="column" alignItems="center" gap={2}>
|
||||
<Alert title={t('compatibility-modal.error-title', 'Error checking compatibility')} severity="error">
|
||||
<Trans i18nKey="compatibility-modal.error-description">
|
||||
Failed to check dashboard compatibility. Please try again.
|
||||
</Trans>
|
||||
</Alert>
|
||||
<Button variant="secondary" onClick={retry}>
|
||||
<Trans i18nKey="compatibility-modal.retry">Retry</Trans>
|
||||
</Button>
|
||||
</Stack>
|
||||
)}
|
||||
|
||||
{/* Success State - Placeholder for Features #12-15 */}
|
||||
{!loading && !error && result && (
|
||||
<Stack direction="column" gap={3}>
|
||||
<div>
|
||||
<Text element="h3">
|
||||
<Trans i18nKey="compatibility-modal.score-title">Compatibility Score</Trans>
|
||||
</Text>
|
||||
<Text element="p">
|
||||
<pre>{result.compatibilityScore * 100}%</pre>
|
||||
</Text>
|
||||
|
||||
<Text element="p">
|
||||
<pre className={styles.jsonPreview}>{JSON.stringify(result, null, 2)}</pre>
|
||||
</Text>
|
||||
</div>
|
||||
|
||||
{/* Feature #12: CompatibilityScoreDisplay with color coding */}
|
||||
{/* - Large score display with color coding (green >=80%, yellow 50-79%, red <50%) */}
|
||||
{/* - Icon based on score range (check-circle, warning, exclamation-circle) */}
|
||||
{/* - Descriptive text: 'Highly Compatible', 'Partially Compatible', 'Low Compatibility' */}
|
||||
|
||||
{/* Feature #13: DatasourceResultSection component */}
|
||||
{/* - Display datasource name and type */}
|
||||
{/* - Show total queries vs checked queries count */}
|
||||
{/* - Show total metrics vs found metrics count */}
|
||||
{/* - Display number of missing metrics */}
|
||||
|
||||
{/* Feature #14: MissingMetricsList component */}
|
||||
{/* - Collapsible/expandable section with missing metrics */}
|
||||
{/* - Show count of missing metrics in header */}
|
||||
{/* - Display bullet list of missing metric names when expanded */}
|
||||
{/* - Add copy-to-clipboard button for metric names */}
|
||||
{/* - Show 'All metrics found!' message when missingMetrics array is empty */}
|
||||
|
||||
{/* Feature #15: QueryBreakdownTable component */}
|
||||
{/* - Collapsible section with 'Show panel breakdown' toggle */}
|
||||
{/* - Table with columns: Panel Title, Panel ID, Query Ref, Metrics Found/Total, Compatibility % */}
|
||||
{/* - Color-code compatibility percentage in table cells */}
|
||||
{/* - Add sorting capability by compatibility score */}
|
||||
{/* - Show expandable row details with missing metrics list per query */}
|
||||
</Stack>
|
||||
)}
|
||||
</div>
|
||||
</Modal>
|
||||
);
|
||||
};
|
||||
|
||||
function getStyles(theme: GrafanaTheme2) {
|
||||
return {
|
||||
modal: css({
|
||||
width: '90%',
|
||||
maxWidth: '1200px',
|
||||
height: '80vh',
|
||||
display: 'flex',
|
||||
flexDirection: 'column',
|
||||
}),
|
||||
modalContent: css({
|
||||
display: 'flex',
|
||||
flexDirection: 'column',
|
||||
overflow: 'hidden',
|
||||
padding: theme.spacing(3),
|
||||
height: '100%',
|
||||
}),
|
||||
contentContainer: css({
|
||||
flex: 1,
|
||||
overflow: 'auto',
|
||||
display: 'flex',
|
||||
alignItems: 'center',
|
||||
justifyContent: 'center',
|
||||
}),
|
||||
jsonPreview: css({
|
||||
maxHeight: '400px',
|
||||
overflow: 'auto',
|
||||
padding: theme.spacing(2),
|
||||
backgroundColor: theme.colors.background.secondary,
|
||||
borderRadius: theme.shape.radius.default,
|
||||
fontSize: theme.typography.bodySmall.fontSize,
|
||||
whiteSpace: 'pre-wrap',
|
||||
wordBreak: 'break-word',
|
||||
}),
|
||||
};
|
||||
}
|
||||
@@ -313,4 +313,153 @@ describe('DashboardCard', () => {
|
||||
expect(screen.getByRole('heading', { name: 'Community Dashboard' })).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Compatibility button', () => {
|
||||
it('should show compatibility button when showCompatibilityButton={true} and onCheckCompatibility is provided', () => {
|
||||
const mockOnCheckCompatibility = jest.fn();
|
||||
render(
|
||||
<DashboardCard
|
||||
title="Test Dashboard"
|
||||
dashboard={createMockPluginDashboard()}
|
||||
onClick={mockOnClick}
|
||||
showCompatibilityButton={true}
|
||||
onCheckCompatibility={mockOnCheckCompatibility}
|
||||
kind="suggested_dashboard"
|
||||
/>
|
||||
);
|
||||
|
||||
expect(screen.getByRole('button', { name: 'Check compatibility' })).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should not show compatibility button when showCompatibilityButton={false}', () => {
|
||||
const mockOnCheckCompatibility = jest.fn();
|
||||
render(
|
||||
<DashboardCard
|
||||
title="Test Dashboard"
|
||||
dashboard={createMockPluginDashboard()}
|
||||
onClick={mockOnClick}
|
||||
showCompatibilityButton={false}
|
||||
onCheckCompatibility={mockOnCheckCompatibility}
|
||||
kind="suggested_dashboard"
|
||||
/>
|
||||
);
|
||||
|
||||
expect(screen.queryByRole('button', { name: 'Check compatibility' })).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should not show compatibility button when onCheckCompatibility is not provided', () => {
|
||||
render(
|
||||
<DashboardCard
|
||||
title="Test Dashboard"
|
||||
dashboard={createMockPluginDashboard()}
|
||||
onClick={mockOnClick}
|
||||
showCompatibilityButton={true}
|
||||
kind="suggested_dashboard"
|
||||
/>
|
||||
);
|
||||
|
||||
expect(screen.queryByRole('button', { name: 'Check compatibility' })).not.toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should call onCheckCompatibility with dashboard object when button is clicked', async () => {
|
||||
const mockOnCheckCompatibility = jest.fn();
|
||||
const mockDashboard = createMockPluginDashboard();
|
||||
const { user } = render(
|
||||
<DashboardCard
|
||||
title="Test Dashboard"
|
||||
dashboard={mockDashboard}
|
||||
onClick={mockOnClick}
|
||||
showCompatibilityButton={true}
|
||||
onCheckCompatibility={mockOnCheckCompatibility}
|
||||
kind="suggested_dashboard"
|
||||
/>
|
||||
);
|
||||
|
||||
await user.click(screen.getByRole('button', { name: 'Check compatibility' }));
|
||||
|
||||
expect(mockOnCheckCompatibility).toHaveBeenCalledTimes(1);
|
||||
expect(mockOnCheckCompatibility).toHaveBeenCalledWith(mockDashboard);
|
||||
});
|
||||
|
||||
it('should prevent event propagation when compatibility button is clicked', async () => {
|
||||
const mockOnCheckCompatibility = jest.fn();
|
||||
const mockParentClick = jest.fn();
|
||||
const { user } = render(
|
||||
<div onClick={mockParentClick}>
|
||||
<DashboardCard
|
||||
title="Test Dashboard"
|
||||
dashboard={createMockPluginDashboard()}
|
||||
onClick={mockOnClick}
|
||||
showCompatibilityButton={true}
|
||||
onCheckCompatibility={mockOnCheckCompatibility}
|
||||
kind="suggested_dashboard"
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
|
||||
await user.click(screen.getByRole('button', { name: 'Check compatibility' }));
|
||||
|
||||
expect(mockParentClick).not.toHaveBeenCalled();
|
||||
expect(mockOnCheckCompatibility).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should display compatibility tooltip on hover', async () => {
|
||||
const mockOnCheckCompatibility = jest.fn();
|
||||
const { user } = render(
|
||||
<DashboardCard
|
||||
title="Test Dashboard"
|
||||
dashboard={createMockPluginDashboard()}
|
||||
onClick={mockOnClick}
|
||||
showCompatibilityButton={true}
|
||||
onCheckCompatibility={mockOnCheckCompatibility}
|
||||
kind="suggested_dashboard"
|
||||
/>
|
||||
);
|
||||
|
||||
const button = screen.getByRole('button', { name: 'Check compatibility' });
|
||||
await user.hover(button);
|
||||
|
||||
expect(await screen.findByText('Check dashboard compatibility with your datasource')).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should render and work with GnetDashboard type', async () => {
|
||||
const mockOnCheckCompatibility = jest.fn();
|
||||
const mockDashboard = createMockGnetDashboard({ name: 'Community Dashboard' });
|
||||
const { user } = render(
|
||||
<DashboardCard
|
||||
title="Community Dashboard"
|
||||
dashboard={mockDashboard}
|
||||
onClick={mockOnClick}
|
||||
showCompatibilityButton={true}
|
||||
onCheckCompatibility={mockOnCheckCompatibility}
|
||||
kind="suggested_dashboard"
|
||||
/>
|
||||
);
|
||||
|
||||
await user.click(screen.getByRole('button', { name: 'Check compatibility' }));
|
||||
|
||||
expect(mockOnCheckCompatibility).toHaveBeenCalledWith(mockDashboard);
|
||||
});
|
||||
|
||||
it('should render buttons in correct order: primary, details, compatibility', () => {
|
||||
const mockOnCheckCompatibility = jest.fn();
|
||||
const details = createMockDetails();
|
||||
render(
|
||||
<DashboardCard
|
||||
title="Test Dashboard"
|
||||
dashboard={createMockPluginDashboard()}
|
||||
onClick={mockOnClick}
|
||||
details={details}
|
||||
showCompatibilityButton={true}
|
||||
onCheckCompatibility={mockOnCheckCompatibility}
|
||||
kind="suggested_dashboard"
|
||||
/>
|
||||
);
|
||||
|
||||
const buttons = screen.getAllByRole('button');
|
||||
expect(buttons[0]).toHaveTextContent('Use dashboard');
|
||||
expect(buttons[1]).toHaveAttribute('aria-label', 'Details');
|
||||
expect(buttons[2]).toHaveAttribute('aria-label', 'Check compatibility');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -7,7 +7,7 @@ import { Badge, Box, Button, Card, IconButton, Text, TextLink, Tooltip, useStyle
|
||||
import { attachSkeleton, SkeletonComponent } from '@grafana/ui/unstable';
|
||||
import { PluginDashboard } from 'app/types/plugins';
|
||||
|
||||
import { GnetDashboard } from './types';
|
||||
import { GnetDashboard, isGnetDashboard } from './types';
|
||||
|
||||
interface Details {
|
||||
id: string;
|
||||
@@ -28,6 +28,8 @@ interface Props {
|
||||
showDatasourceProvidedBadge?: boolean;
|
||||
dimThumbnail?: boolean; // Apply 50% opacity to thumbnail when badge is shown
|
||||
kind: 'template_dashboard' | 'suggested_dashboard';
|
||||
onCheckCompatibility?: (dashboard: PluginDashboard | GnetDashboard) => void | Promise<void>;
|
||||
showCompatibilityButton?: boolean;
|
||||
}
|
||||
|
||||
function DashboardCardComponent({
|
||||
@@ -40,6 +42,8 @@ function DashboardCardComponent({
|
||||
showDatasourceProvidedBadge,
|
||||
dimThumbnail,
|
||||
kind,
|
||||
onCheckCompatibility,
|
||||
showCompatibilityButton,
|
||||
}: Props) {
|
||||
const styles = useStyles2(getStyles);
|
||||
|
||||
@@ -99,6 +103,30 @@ function DashboardCardComponent({
|
||||
/>
|
||||
</Tooltip>
|
||||
)}
|
||||
{showCompatibilityButton && onCheckCompatibility && (
|
||||
<Tooltip
|
||||
content={t(
|
||||
'dashboard-library.card.check-compatibility-tooltip',
|
||||
'Check dashboard compatibility with your datasource'
|
||||
)}
|
||||
placement="top"
|
||||
>
|
||||
<Button
|
||||
variant="secondary"
|
||||
icon="check-circle"
|
||||
onClick={(e) => {
|
||||
e.stopPropagation();
|
||||
// Only call compatibility check for GnetDashboards (community dashboards)
|
||||
if (isGnetDashboard(dashboard)) {
|
||||
onCheckCompatibility(dashboard);
|
||||
} else {
|
||||
console.warn('Compatibility check is only supported for community dashboards (GnetDashboard)');
|
||||
}
|
||||
}}
|
||||
aria-label={t('dashboard-library.card.check-compatibility-button', 'Check compatibility')}
|
||||
/>
|
||||
</Tooltip>
|
||||
)}
|
||||
</Card.Actions>
|
||||
</Card>
|
||||
);
|
||||
|
||||
@@ -0,0 +1,362 @@
|
||||
import { getAPINamespace } from '@grafana/api-clients';
|
||||
import { BackendSrv, getBackendSrv } from '@grafana/runtime';
|
||||
import { DataQuery } from '@grafana/schema';
|
||||
import { DashboardJson } from 'app/features/manage-dashboards/types';
|
||||
|
||||
import { checkDashboardCompatibility, CompatibilityCheckResult, DatasourceMapping } from './compatibilityApi';
|
||||
|
||||
// Mock dependencies
|
||||
jest.mock('@grafana/runtime', () => ({
|
||||
getBackendSrv: jest.fn(),
|
||||
}));
|
||||
|
||||
jest.mock('@grafana/api-clients', () => ({
|
||||
getAPINamespace: jest.fn(),
|
||||
}));
|
||||
|
||||
const mockGetBackendSrv = getBackendSrv as jest.MockedFunction<typeof getBackendSrv>;
|
||||
const mockGetAPINamespace = getAPINamespace as jest.MockedFunction<typeof getAPINamespace>;
|
||||
|
||||
// Helper to create mock BackendSrv
|
||||
const createMockBackendSrv = (overrides: Partial<BackendSrv> = {}): BackendSrv =>
|
||||
({
|
||||
post: jest.fn(),
|
||||
...overrides,
|
||||
}) as unknown as BackendSrv;
|
||||
|
||||
// Prometheus-specific query type (extends DataQuery)
|
||||
interface PrometheusQuery extends DataQuery {
|
||||
expr: string;
|
||||
}
|
||||
|
||||
// Test fixtures
|
||||
const createMockDashboard = (overrides: Partial<DashboardJson> = {}): DashboardJson => {
|
||||
// Create a minimal dashboard for testing purposes
|
||||
// Panels array is intentionally minimal - only includes fields needed for compatibility check
|
||||
const dashboard: DashboardJson = {
|
||||
title: 'Test Dashboard',
|
||||
uid: 'test-uid',
|
||||
schemaVersion: 39,
|
||||
version: 1,
|
||||
panels: [
|
||||
{
|
||||
id: 1,
|
||||
type: 'graph',
|
||||
title: 'CPU Usage',
|
||||
datasource: {
|
||||
type: 'prometheus',
|
||||
uid: 'prometheus-uid-123',
|
||||
},
|
||||
targets: [
|
||||
{
|
||||
refId: 'A',
|
||||
expr: 'rate(cpu_usage_total[5m])',
|
||||
} as PrometheusQuery,
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
type: 'graph',
|
||||
title: 'Memory Usage',
|
||||
datasource: {
|
||||
type: 'prometheus',
|
||||
uid: 'prometheus-uid-123',
|
||||
},
|
||||
targets: [
|
||||
{
|
||||
refId: 'A',
|
||||
expr: 'memory_usage_bytes',
|
||||
} as PrometheusQuery,
|
||||
],
|
||||
},
|
||||
] as unknown as DashboardJson['panels'],
|
||||
...overrides,
|
||||
};
|
||||
return dashboard;
|
||||
};
|
||||
|
||||
const createMockDatasourceMappings = (): DatasourceMapping[] => [
|
||||
{
|
||||
uid: 'prometheus-uid-123',
|
||||
type: 'prometheus',
|
||||
name: 'Production Prometheus',
|
||||
},
|
||||
];
|
||||
|
||||
describe('compatibilityApi', () => {
|
||||
let mockPost: jest.MockedFunction<BackendSrv['post']>;
|
||||
let consoleErrorSpy: jest.SpyInstance;
|
||||
|
||||
beforeEach(() => {
|
||||
mockPost = jest.fn();
|
||||
mockGetBackendSrv.mockReturnValue(
|
||||
createMockBackendSrv({
|
||||
post: mockPost,
|
||||
})
|
||||
);
|
||||
// Mock getAPINamespace to return 'default' (typical dev environment)
|
||||
mockGetAPINamespace.mockReturnValue('default');
|
||||
// Mock console.error to prevent test failures
|
||||
consoleErrorSpy = jest.spyOn(console, 'error').mockImplementation();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
consoleErrorSpy.mockRestore();
|
||||
});
|
||||
|
||||
describe('checkDashboardCompatibility', () => {
|
||||
it('should successfully check compatibility with high score (100%)', async () => {
|
||||
const mockResponse: CompatibilityCheckResult = {
|
||||
compatibilityScore: 100,
|
||||
datasourceResults: [
|
||||
{
|
||||
uid: 'prometheus-uid-123',
|
||||
type: 'prometheus',
|
||||
name: 'Production Prometheus',
|
||||
totalQueries: 2,
|
||||
checkedQueries: 2,
|
||||
totalMetrics: 2,
|
||||
foundMetrics: 2,
|
||||
missingMetrics: [],
|
||||
compatibilityScore: 100,
|
||||
queryBreakdown: [
|
||||
{
|
||||
panelTitle: 'CPU Usage',
|
||||
panelID: 1,
|
||||
queryRefId: 'A',
|
||||
totalMetrics: 1,
|
||||
foundMetrics: 1,
|
||||
missingMetrics: [],
|
||||
compatibilityScore: 100,
|
||||
},
|
||||
{
|
||||
panelTitle: 'Memory Usage',
|
||||
panelID: 2,
|
||||
queryRefId: 'A',
|
||||
totalMetrics: 1,
|
||||
foundMetrics: 1,
|
||||
missingMetrics: [],
|
||||
compatibilityScore: 100,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
mockPost.mockResolvedValue(mockResponse);
|
||||
|
||||
const dashboard = createMockDashboard();
|
||||
const mappings = createMockDatasourceMappings();
|
||||
|
||||
const result = await checkDashboardCompatibility(dashboard, mappings);
|
||||
|
||||
expect(result).toEqual(mockResponse);
|
||||
expect(mockPost).toHaveBeenCalledWith(
|
||||
'/apis/dashvalidator.grafana.app/v1alpha1/namespaces/default/check',
|
||||
{
|
||||
dashboardJson: dashboard,
|
||||
datasourceMappings: mappings,
|
||||
},
|
||||
{
|
||||
showErrorAlert: false,
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
it('should successfully check compatibility with partial score (50%)', async () => {
|
||||
const mockResponse: CompatibilityCheckResult = {
|
||||
compatibilityScore: 50,
|
||||
datasourceResults: [
|
||||
{
|
||||
uid: 'prometheus-uid-123',
|
||||
type: 'prometheus',
|
||||
name: 'Production Prometheus',
|
||||
totalQueries: 2,
|
||||
checkedQueries: 2,
|
||||
totalMetrics: 2,
|
||||
foundMetrics: 1,
|
||||
missingMetrics: ['http_request_duration_seconds'],
|
||||
compatibilityScore: 50,
|
||||
queryBreakdown: [
|
||||
{
|
||||
panelTitle: 'CPU Usage',
|
||||
panelID: 1,
|
||||
queryRefId: 'A',
|
||||
totalMetrics: 1,
|
||||
foundMetrics: 1,
|
||||
missingMetrics: [],
|
||||
compatibilityScore: 100,
|
||||
},
|
||||
{
|
||||
panelTitle: 'Memory Usage',
|
||||
panelID: 2,
|
||||
queryRefId: 'A',
|
||||
totalMetrics: 1,
|
||||
foundMetrics: 0,
|
||||
missingMetrics: ['http_request_duration_seconds'],
|
||||
compatibilityScore: 0,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
mockPost.mockResolvedValue(mockResponse);
|
||||
|
||||
const dashboard = createMockDashboard();
|
||||
const mappings = createMockDatasourceMappings();
|
||||
|
||||
const result = await checkDashboardCompatibility(dashboard, mappings);
|
||||
|
||||
expect(result).toEqual(mockResponse);
|
||||
expect(result.compatibilityScore).toBe(50);
|
||||
expect(result.datasourceResults[0].missingMetrics).toContain('http_request_duration_seconds');
|
||||
});
|
||||
|
||||
it('should handle HTTP 404 error (datasource not found)', async () => {
|
||||
const error404 = {
|
||||
status: 404,
|
||||
data: {
|
||||
message: 'Datasource not found',
|
||||
code: 'datasource_not_found',
|
||||
},
|
||||
};
|
||||
|
||||
mockPost.mockRejectedValue(error404);
|
||||
|
||||
const dashboard = createMockDashboard();
|
||||
const mappings = createMockDatasourceMappings();
|
||||
|
||||
// Should re-throw original error from getBackendSrv
|
||||
await expect(checkDashboardCompatibility(dashboard, mappings)).rejects.toEqual(error404);
|
||||
|
||||
// Verify error was logged
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith('Dashboard compatibility check failed:', error404);
|
||||
});
|
||||
|
||||
it('should handle HTTP 401 error (authentication failure)', async () => {
|
||||
const error401 = {
|
||||
status: 401,
|
||||
data: {
|
||||
message: 'Authentication failed for datasource',
|
||||
code: 'datasource_auth_failed',
|
||||
},
|
||||
};
|
||||
|
||||
mockPost.mockRejectedValue(error401);
|
||||
|
||||
const dashboard = createMockDashboard();
|
||||
const mappings = createMockDatasourceMappings();
|
||||
|
||||
await expect(checkDashboardCompatibility(dashboard, mappings)).rejects.toEqual(error401);
|
||||
});
|
||||
|
||||
it('should handle HTTP 503 error (datasource unreachable)', async () => {
|
||||
const error503 = {
|
||||
status: 503,
|
||||
data: {
|
||||
message: 'Datasource is unreachable',
|
||||
code: 'datasource_unreachable',
|
||||
},
|
||||
};
|
||||
|
||||
mockPost.mockRejectedValue(error503);
|
||||
|
||||
const dashboard = createMockDashboard();
|
||||
const mappings = createMockDatasourceMappings();
|
||||
|
||||
await expect(checkDashboardCompatibility(dashboard, mappings)).rejects.toEqual(error503);
|
||||
});
|
||||
|
||||
it('should handle HTTP 502 error (invalid Prometheus API response)', async () => {
|
||||
const error502 = {
|
||||
status: 502,
|
||||
data: {
|
||||
message: 'Invalid response from Prometheus API',
|
||||
code: 'api_invalid_response',
|
||||
},
|
||||
};
|
||||
|
||||
mockPost.mockRejectedValue(error502);
|
||||
|
||||
const dashboard = createMockDashboard();
|
||||
const mappings = createMockDatasourceMappings();
|
||||
|
||||
await expect(checkDashboardCompatibility(dashboard, mappings)).rejects.toEqual(error502);
|
||||
});
|
||||
|
||||
it('should handle network error without structured error data', async () => {
|
||||
const networkError = {
|
||||
message: 'Network request failed',
|
||||
};
|
||||
|
||||
mockPost.mockRejectedValue(networkError);
|
||||
|
||||
const dashboard = createMockDashboard();
|
||||
const mappings = createMockDatasourceMappings();
|
||||
|
||||
await expect(checkDashboardCompatibility(dashboard, mappings)).rejects.toEqual(networkError);
|
||||
});
|
||||
|
||||
it('should use namespace from getAPINamespace()', async () => {
|
||||
const mockResponse: CompatibilityCheckResult = {
|
||||
compatibilityScore: 100,
|
||||
datasourceResults: [],
|
||||
};
|
||||
|
||||
mockPost.mockResolvedValue(mockResponse);
|
||||
|
||||
// Change namespace returned by getAPINamespace
|
||||
mockGetAPINamespace.mockReturnValue('custom-namespace');
|
||||
|
||||
const dashboard = createMockDashboard();
|
||||
const mappings = createMockDatasourceMappings();
|
||||
|
||||
await checkDashboardCompatibility(dashboard, mappings);
|
||||
|
||||
expect(mockPost).toHaveBeenCalledWith(
|
||||
'/apis/dashvalidator.grafana.app/v1alpha1/namespaces/custom-namespace/check',
|
||||
expect.any(Object),
|
||||
expect.any(Object)
|
||||
);
|
||||
|
||||
// Reset namespace for other tests
|
||||
mockGetAPINamespace.mockReturnValue('default');
|
||||
});
|
||||
|
||||
it('should handle generic error without proper structure', async () => {
|
||||
const genericError = 'Something went wrong';
|
||||
|
||||
mockPost.mockRejectedValue(genericError);
|
||||
|
||||
const dashboard = createMockDashboard();
|
||||
const mappings = createMockDatasourceMappings();
|
||||
|
||||
await expect(checkDashboardCompatibility(dashboard, mappings)).rejects.toEqual(genericError);
|
||||
});
|
||||
|
||||
it('should disable automatic error alerts', async () => {
|
||||
const mockResponse: CompatibilityCheckResult = {
|
||||
compatibilityScore: 100,
|
||||
datasourceResults: [],
|
||||
};
|
||||
|
||||
mockPost.mockResolvedValue(mockResponse);
|
||||
|
||||
const dashboard = createMockDashboard();
|
||||
const mappings = createMockDatasourceMappings();
|
||||
|
||||
await checkDashboardCompatibility(dashboard, mappings);
|
||||
|
||||
// Verify that showErrorAlert is explicitly set to false
|
||||
expect(mockPost).toHaveBeenCalledWith(
|
||||
expect.any(String),
|
||||
expect.any(Object),
|
||||
expect.objectContaining({
|
||||
showErrorAlert: false,
|
||||
})
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,144 @@
|
||||
import { getAPINamespace } from '@grafana/api-clients';
|
||||
import { getBackendSrv } from '@grafana/runtime';
|
||||
import { DashboardJson } from 'app/features/manage-dashboards/types';
|
||||
|
||||
/**
|
||||
* Represents a datasource mapping for compatibility checking.
|
||||
* Maps dashboard datasource references to actual datasource instances.
|
||||
*/
|
||||
export interface DatasourceMapping {
|
||||
/** Unique identifier of the datasource */
|
||||
uid: string;
|
||||
/** Type of datasource (e.g., 'prometheus', 'loki') */
|
||||
type: string;
|
||||
/** Optional human-readable name for display */
|
||||
name?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Request body for dashboard compatibility check API call
|
||||
*/
|
||||
export interface CheckCompatibilityRequest {
|
||||
/** Complete dashboard JSON object (supports both v1 and v2 schemas) */
|
||||
dashboardJson: DashboardJson;
|
||||
/** Array of datasource mappings to check compatibility against */
|
||||
datasourceMappings: DatasourceMapping[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Breakdown of compatibility metrics for a single query within a panel
|
||||
*/
|
||||
export interface QueryBreakdown {
|
||||
/** Title of the panel containing this query */
|
||||
panelTitle: string;
|
||||
/** Numeric ID of the panel */
|
||||
panelID: number;
|
||||
/** Query reference ID (e.g., 'A', 'B', 'C') */
|
||||
queryRefId: string;
|
||||
/** Total number of metrics extracted from this query */
|
||||
totalMetrics: number;
|
||||
/** Number of metrics found in the datasource */
|
||||
foundMetrics: number;
|
||||
/** List of metric names that were not found */
|
||||
missingMetrics: string[];
|
||||
/** Compatibility score for this query (0-100) */
|
||||
compatibilityScore: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compatibility check result for a single datasource
|
||||
*/
|
||||
export interface DatasourceResult {
|
||||
/** Unique identifier of the datasource */
|
||||
uid: string;
|
||||
/** Type of datasource */
|
||||
type: string;
|
||||
/** Optional human-readable name */
|
||||
name?: string;
|
||||
/** Total number of queries in the dashboard */
|
||||
totalQueries: number;
|
||||
/** Number of queries that were checked */
|
||||
checkedQueries: number;
|
||||
/** Total number of unique metrics extracted from all queries */
|
||||
totalMetrics: number;
|
||||
/** Number of metrics found in the datasource */
|
||||
foundMetrics: number;
|
||||
/** List of all missing metric names across all queries */
|
||||
missingMetrics: string[];
|
||||
/** Overall compatibility score for this datasource (0-100) */
|
||||
compatibilityScore: number;
|
||||
/** Detailed breakdown of compatibility per query */
|
||||
queryBreakdown: QueryBreakdown[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Overall compatibility check result
|
||||
*/
|
||||
export interface CompatibilityCheckResult {
|
||||
/** Overall compatibility score across all datasources (0-100) */
|
||||
compatibilityScore: number;
|
||||
/** Results for each datasource checked */
|
||||
datasourceResults: DatasourceResult[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks dashboard compatibility with specified datasources.
|
||||
*
|
||||
* This function sends the dashboard JSON and datasource mappings to the backend
|
||||
* validation service, which extracts metrics from dashboard queries and checks
|
||||
* if those metrics exist in the target datasource(s).
|
||||
*
|
||||
* Note: The backend currently only supports v1 dashboards (with panels array).
|
||||
* V2 dashboards (with elements) will be rejected by the backend with an appropriate error.
|
||||
*
|
||||
* @param dashboardJson Complete dashboard JSON object (v1 or v2 schema)
|
||||
* @param datasourceMappings Array of datasource mappings to validate against
|
||||
* @returns Promise resolving to compatibility check results
|
||||
* @throws Error if the API call fails or dashboard schema is unsupported
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const result = await checkDashboardCompatibility(
|
||||
* { panels: [...], title: "My Dashboard" },
|
||||
* [{ uid: "prometheus-uid", type: "prometheus" }]
|
||||
* );
|
||||
*
|
||||
* console.log(`Compatibility: ${result.compatibilityScore}%`);
|
||||
* console.log(`Missing metrics: ${result.datasourceResults[0].missingMetrics}`);
|
||||
* ```
|
||||
*/
|
||||
export async function checkDashboardCompatibility(
|
||||
dashboardJson: DashboardJson,
|
||||
datasourceMappings: DatasourceMapping[]
|
||||
): Promise<CompatibilityCheckResult> {
|
||||
// Get namespace from global config (typically 'default' in development)
|
||||
// This follows Kubernetes API convention for Grafana app plugins
|
||||
const namespace = getAPINamespace();
|
||||
|
||||
// Build request body matching backend schema
|
||||
const requestBody: CheckCompatibilityRequest = {
|
||||
dashboardJson,
|
||||
datasourceMappings,
|
||||
};
|
||||
|
||||
try {
|
||||
// Make POST request to the dashboard validator app's /check endpoint
|
||||
// Following Kubernetes API path convention: /apis/{group}/{version}/namespaces/{namespace}/{resource}
|
||||
const response = await getBackendSrv().post<CompatibilityCheckResult>(
|
||||
`/apis/dashvalidator.grafana.app/v1alpha1/namespaces/${namespace}/check`,
|
||||
requestBody,
|
||||
{
|
||||
// Disable automatic error alerts - we'll handle errors in the UI
|
||||
showErrorAlert: false,
|
||||
}
|
||||
);
|
||||
|
||||
return response;
|
||||
} catch (error) {
|
||||
// Log error for debugging
|
||||
console.error('Dashboard compatibility check failed:', error);
|
||||
|
||||
// Re-throw original error for caller to handle
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
import { DashboardJson } from 'app/features/manage-dashboards/types';
|
||||
import { PluginDashboard } from 'app/types/plugins';
|
||||
|
||||
export interface Link {
|
||||
rel: string;
|
||||
@@ -47,3 +48,11 @@ export interface GnetDashboardsResponse {
|
||||
pages: number;
|
||||
items: GnetDashboard[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Type guard to check if a dashboard is a GnetDashboard (community dashboard).
|
||||
* PluginDashboard has fields like importedRevision, importedUri, path that GnetDashboard doesn't have.
|
||||
*/
|
||||
export function isGnetDashboard(dashboard: PluginDashboard | GnetDashboard): dashboard is GnetDashboard {
|
||||
return !('importedRevision' in dashboard || 'importedUri' in dashboard || 'path' in dashboard);
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { locationService } from '@grafana/runtime';
|
||||
import { BackendSrv, getBackendSrv, locationService } from '@grafana/runtime';
|
||||
import { InputType, DataSourceInput, DashboardInput } from 'app/features/manage-dashboards/state/reducers';
|
||||
import { DashboardJson } from 'app/features/manage-dashboards/types';
|
||||
|
||||
@@ -14,6 +14,7 @@ import {
|
||||
getLogoUrl,
|
||||
navigateToTemplate,
|
||||
onUseCommunityDashboard,
|
||||
interpolateDashboardForCompatibilityCheck,
|
||||
} from './communityDashboardHelpers';
|
||||
|
||||
jest.mock('../api/dashboardLibraryApi', () => ({
|
||||
@@ -34,12 +35,34 @@ jest.mock('../interactions', () => ({
|
||||
},
|
||||
}));
|
||||
|
||||
jest.mock('@grafana/runtime', () => ({
|
||||
...jest.requireActual('@grafana/runtime'),
|
||||
getBackendSrv: jest.fn(),
|
||||
locationService: {
|
||||
push: jest.fn(),
|
||||
},
|
||||
}));
|
||||
|
||||
// Mock function references
|
||||
const mockFetchCommunityDashboard = fetchCommunityDashboard as jest.MockedFunction<typeof fetchCommunityDashboard>;
|
||||
const mockTryAutoMapDatasources = tryAutoMapDatasources as jest.MockedFunction<typeof tryAutoMapDatasources>;
|
||||
const mockParseConstantInputs = parseConstantInputs as jest.MockedFunction<typeof parseConstantInputs>;
|
||||
const mockGetBackendSrv = getBackendSrv as jest.MockedFunction<typeof getBackendSrv>;
|
||||
|
||||
// Helper functions for creating mock objects
|
||||
const createMockBackendSrv = (overrides: Partial<BackendSrv> = {}): BackendSrv =>
|
||||
({
|
||||
post: jest.fn(),
|
||||
get: jest.fn(),
|
||||
delete: jest.fn(),
|
||||
patch: jest.fn(),
|
||||
put: jest.fn(),
|
||||
request: jest.fn(),
|
||||
datasourceRequest: jest.fn(),
|
||||
resolveCancelerIfExists: jest.fn(),
|
||||
...overrides,
|
||||
}) as BackendSrv;
|
||||
|
||||
const createMockGnetDashboard = (overrides: Partial<GnetDashboard> = {}): GnetDashboard => ({
|
||||
id: 123,
|
||||
name: 'Test Dashboard',
|
||||
@@ -610,4 +633,125 @@ describe('communityDashboardHelpers', () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('interpolateDashboardForCompatibilityCheck', () => {
|
||||
let mockPost: jest.Mock;
|
||||
|
||||
beforeEach(() => {
|
||||
mockPost = jest.fn();
|
||||
mockGetBackendSrv.mockReturnValue(createMockBackendSrv({ post: mockPost }));
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it('should successfully interpolate dashboard when auto-mapping succeeds', async () => {
|
||||
const dashboardJson = createMockDashboardJson({
|
||||
__inputs: [
|
||||
{
|
||||
name: 'DS_PROMETHEUS',
|
||||
type: InputType.DataSource,
|
||||
label: 'Prometheus',
|
||||
value: '',
|
||||
description: '',
|
||||
pluginId: 'prometheus',
|
||||
info: '',
|
||||
} as DataSourceInput & { description: string },
|
||||
],
|
||||
});
|
||||
|
||||
const interpolatedDashboard = createMockDashboardJson({ title: 'Interpolated Dashboard' });
|
||||
|
||||
mockFetchCommunityDashboard.mockResolvedValue({ json: dashboardJson });
|
||||
mockTryAutoMapDatasources.mockReturnValue({
|
||||
allMapped: true,
|
||||
mappings: [{ name: 'DS_PROMETHEUS', type: 'datasource', value: 'prom-uid', pluginId: 'prometheus' }],
|
||||
unmappedDsInputs: [],
|
||||
});
|
||||
mockPost.mockResolvedValue(interpolatedDashboard);
|
||||
|
||||
const result = await interpolateDashboardForCompatibilityCheck(123, 'prom-uid');
|
||||
|
||||
expect(result).toEqual(interpolatedDashboard);
|
||||
expect(mockFetchCommunityDashboard).toHaveBeenCalledWith(123);
|
||||
expect(mockTryAutoMapDatasources).toHaveBeenCalled();
|
||||
expect(mockPost).toHaveBeenCalledWith('/api/dashboards/interpolate', {
|
||||
dashboard: dashboardJson,
|
||||
overwrite: true,
|
||||
inputs: [{ name: 'DS_PROMETHEUS', type: 'datasource', value: 'prom-uid', pluginId: 'prometheus' }],
|
||||
});
|
||||
});
|
||||
|
||||
it('should throw error when auto-mapping fails', async () => {
|
||||
const dashboardJson = createMockDashboardJson({
|
||||
__inputs: [
|
||||
{
|
||||
name: 'DS_PROMETHEUS',
|
||||
type: InputType.DataSource,
|
||||
label: 'Prometheus',
|
||||
value: '',
|
||||
description: '',
|
||||
pluginId: 'prometheus',
|
||||
info: '',
|
||||
} as DataSourceInput & { description: string },
|
||||
],
|
||||
});
|
||||
|
||||
mockFetchCommunityDashboard.mockResolvedValue({ json: dashboardJson });
|
||||
mockTryAutoMapDatasources.mockReturnValue({
|
||||
allMapped: false,
|
||||
mappings: [],
|
||||
unmappedDsInputs: [
|
||||
{
|
||||
name: 'DS_PROMETHEUS',
|
||||
pluginId: 'prometheus',
|
||||
type: InputType.DataSource,
|
||||
value: '',
|
||||
label: 'Prometheus',
|
||||
description: '',
|
||||
info: '',
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
await expect(interpolateDashboardForCompatibilityCheck(123, 'prom-uid')).rejects.toThrow(
|
||||
'Unable to automatically map all datasource inputs for this dashboard'
|
||||
);
|
||||
|
||||
expect(mockPost).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should throw error when interpolation API fails', async () => {
|
||||
const dashboardJson = createMockDashboardJson();
|
||||
|
||||
mockFetchCommunityDashboard.mockResolvedValue({ json: dashboardJson });
|
||||
mockTryAutoMapDatasources.mockReturnValue({
|
||||
allMapped: true,
|
||||
mappings: [],
|
||||
unmappedDsInputs: [],
|
||||
});
|
||||
mockPost.mockRejectedValue(new Error('API failed'));
|
||||
|
||||
await expect(interpolateDashboardForCompatibilityCheck(123, 'prom-uid')).rejects.toThrow('API failed');
|
||||
});
|
||||
|
||||
it('should handle dashboard with no __inputs', async () => {
|
||||
const dashboardJson = createMockDashboardJson({ __inputs: undefined });
|
||||
const interpolatedDashboard = createMockDashboardJson({ title: 'Interpolated Dashboard' });
|
||||
|
||||
mockFetchCommunityDashboard.mockResolvedValue({ json: dashboardJson });
|
||||
mockTryAutoMapDatasources.mockReturnValue({
|
||||
allMapped: true,
|
||||
mappings: [],
|
||||
unmappedDsInputs: [],
|
||||
});
|
||||
mockPost.mockResolvedValue(interpolatedDashboard);
|
||||
|
||||
const result = await interpolateDashboardForCompatibilityCheck(123, 'prom-uid');
|
||||
|
||||
expect(result).toEqual(interpolatedDashboard);
|
||||
expect(mockTryAutoMapDatasources).toHaveBeenCalledWith([], 'prom-uid');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { PanelModel } from '@grafana/data';
|
||||
import { t } from '@grafana/i18n';
|
||||
import { locationService } from '@grafana/runtime';
|
||||
import { getBackendSrv, locationService } from '@grafana/runtime';
|
||||
import { createErrorNotification } from 'app/core/copy/appNotification';
|
||||
import { notifyApp } from 'app/core/reducers/appNotification';
|
||||
import { DataSourceInput } from 'app/features/manage-dashboards/state/reducers';
|
||||
@@ -311,3 +311,52 @@ export async function onUseCommunityDashboard({
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Interpolate a community dashboard for compatibility checking.
|
||||
*
|
||||
* This function fetches the dashboard from Grafana.com, auto-maps datasource inputs,
|
||||
* and returns the interpolated dashboard with template variables resolved.
|
||||
*
|
||||
* @throws Error if auto-mapping fails - compatibility check requires all datasource inputs to be resolved
|
||||
* @param dashboardId - The Grafana.com dashboard ID
|
||||
* @param datasourceUid - The UID of the datasource to map to
|
||||
* @returns Promise<DashboardJson> - The interpolated dashboard with resolved template variables
|
||||
*/
|
||||
export async function interpolateDashboardForCompatibilityCheck(
|
||||
dashboardId: number,
|
||||
datasourceUid: string
|
||||
): Promise<DashboardJson> {
|
||||
// 1. Fetch full dashboard JSON from Grafana.com
|
||||
const gnetResponse = await fetchCommunityDashboard(dashboardId);
|
||||
const dashboardJson = gnetResponse.json;
|
||||
|
||||
// 2. Extract datasource inputs from dashboard's __inputs array
|
||||
const dsInputs: DataSourceInput[] = dashboardJson.__inputs?.filter(isDataSourceInput) || [];
|
||||
|
||||
// 3. Auto-map datasources using existing utility
|
||||
const mappingResult = tryAutoMapDatasources(dsInputs, datasourceUid);
|
||||
|
||||
// 4. Check if auto-mapping was successful
|
||||
// Compatibility check requires all datasource variables to be resolved
|
||||
if (!mappingResult.allMapped) {
|
||||
throw new Error(
|
||||
t(
|
||||
'dashboard-library.compatibility-auto-map-failed',
|
||||
'Unable to automatically map all datasource inputs for this dashboard. Compatibility check requires all datasource variables to be resolved.'
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
// 5. Prepare inputs array for interpolation API
|
||||
const inputs: InputMapping[] = mappingResult.mappings;
|
||||
|
||||
// 6. Call interpolation endpoint to replace template variables
|
||||
const interpolatedDashboard = await getBackendSrv().post<DashboardJson>('/api/dashboards/interpolate', {
|
||||
dashboard: dashboardJson,
|
||||
overwrite: true,
|
||||
inputs: inputs,
|
||||
});
|
||||
|
||||
return interpolatedDashboard;
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user