Compare commits
27 Commits
provisioni
...
remove-sea
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
56ce565f2e | ||
|
|
32fab0dd13 | ||
|
|
505a95b53e | ||
|
|
82b4ce0ece | ||
|
|
52698cf0da | ||
|
|
d291dfb35b | ||
|
|
9c6feb8de5 | ||
|
|
e7625186af | ||
|
|
574a748e37 | ||
|
|
f883c0ccdb | ||
|
|
75b2c905cd | ||
|
|
2010db5a4b | ||
|
|
45fc95cfc9 | ||
|
|
9c3cdd4814 | ||
|
|
2dad8b7b5b | ||
|
|
9a831ab4e1 | ||
|
|
759035a465 | ||
|
|
6e155523a3 | ||
|
|
5c0ee2d746 | ||
|
|
0c6b97bee2 | ||
|
|
4c79775b57 | ||
|
|
e088c9aac9 | ||
|
|
7182511bcf | ||
|
|
3023a72175 | ||
|
|
30ad61e0e9 | ||
|
|
0b58cd3900 | ||
|
|
4ba2fe6cce |
1
.github/CODEOWNERS
vendored
1
.github/CODEOWNERS
vendored
@@ -384,7 +384,6 @@
|
||||
|
||||
# Grafana app platform
|
||||
/pkg/services/live/ @grafana/grafana-app-platform-squad
|
||||
/pkg/services/searchV2/ @grafana/grafana-app-platform-squad
|
||||
/pkg/services/store/ @grafana/grafana-app-platform-squad
|
||||
/pkg/infra/filestorage/ @grafana/grafana-app-platform-squad
|
||||
/pkg/modules/ @grafana/grafana-app-platform-squad
|
||||
|
||||
@@ -180,12 +180,15 @@ func countAnnotationsV0V1(spec map[string]interface{}) int {
|
||||
return 0
|
||||
}
|
||||
|
||||
annotationList, ok := annotations["list"].([]interface{})
|
||||
if !ok {
|
||||
return 0
|
||||
// Handle both []interface{} (from JSON unmarshaling) and []map[string]interface{} (from programmatic creation)
|
||||
if annotationList, ok := annotations["list"].([]interface{}); ok {
|
||||
return len(annotationList)
|
||||
}
|
||||
if annotationList, ok := annotations["list"].([]map[string]interface{}); ok {
|
||||
return len(annotationList)
|
||||
}
|
||||
|
||||
return len(annotationList)
|
||||
return 0
|
||||
}
|
||||
|
||||
// countLinksV0V1 counts dashboard links in v0alpha1 or v1beta1 dashboard spec
|
||||
@@ -194,12 +197,15 @@ func countLinksV0V1(spec map[string]interface{}) int {
|
||||
return 0
|
||||
}
|
||||
|
||||
links, ok := spec["links"].([]interface{})
|
||||
if !ok {
|
||||
return 0
|
||||
// Handle both []interface{} (from JSON unmarshaling) and []map[string]interface{} (from programmatic creation)
|
||||
if links, ok := spec["links"].([]interface{}); ok {
|
||||
return len(links)
|
||||
}
|
||||
if links, ok := spec["links"].([]map[string]interface{}); ok {
|
||||
return len(links)
|
||||
}
|
||||
|
||||
return len(links)
|
||||
return 0
|
||||
}
|
||||
|
||||
// countVariablesV0V1 counts template variables in v0alpha1 or v1beta1 dashboard spec
|
||||
@@ -213,12 +219,15 @@ func countVariablesV0V1(spec map[string]interface{}) int {
|
||||
return 0
|
||||
}
|
||||
|
||||
variableList, ok := templating["list"].([]interface{})
|
||||
if !ok {
|
||||
return 0
|
||||
// Handle both []interface{} (from JSON unmarshaling) and []map[string]interface{} (from programmatic creation)
|
||||
if variableList, ok := templating["list"].([]interface{}); ok {
|
||||
return len(variableList)
|
||||
}
|
||||
if variableList, ok := templating["list"].([]map[string]interface{}); ok {
|
||||
return len(variableList)
|
||||
}
|
||||
|
||||
return len(variableList)
|
||||
return 0
|
||||
}
|
||||
|
||||
// collectStatsV0V1 collects statistics from v0alpha1 or v1beta1 dashboard
|
||||
|
||||
142
apps/dashboard/pkg/migration/conversion/testdata/input/v1beta1.bom-in-links.json
vendored
Normal file
142
apps/dashboard/pkg/migration/conversion/testdata/input/v1beta1.bom-in-links.json
vendored
Normal file
@@ -0,0 +1,142 @@
|
||||
{
|
||||
"kind": "Dashboard",
|
||||
"apiVersion": "dashboard.grafana.app/v1beta1",
|
||||
"metadata": {
|
||||
"name": "bom-in-links-test",
|
||||
"namespace": "org-1",
|
||||
"labels": {
|
||||
"test": "bom-stripping"
|
||||
}
|
||||
},
|
||||
"spec": {
|
||||
"title": "BOM Stripping Test Dashboard",
|
||||
"description": "Testing that BOM characters are stripped from URLs during conversion",
|
||||
"schemaVersion": 42,
|
||||
"tags": ["test", "bom"],
|
||||
"editable": true,
|
||||
"links": [
|
||||
{
|
||||
"title": "Dashboard link with BOM",
|
||||
"type": "link",
|
||||
"url": "http://example.com?var=${datasource}&other=value",
|
||||
"targetBlank": true,
|
||||
"icon": "external link"
|
||||
}
|
||||
],
|
||||
"panels": [
|
||||
{
|
||||
"id": 1,
|
||||
"type": "table",
|
||||
"title": "Panel with BOM in field config override links",
|
||||
"gridPos": {
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 0
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"custom": {},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{"color": "green"},
|
||||
{"color": "red", "value": 80}
|
||||
]
|
||||
}
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
"matcher": {
|
||||
"id": "byName",
|
||||
"options": "server"
|
||||
},
|
||||
"properties": [
|
||||
{
|
||||
"id": "links",
|
||||
"value": [
|
||||
{
|
||||
"title": "Override link with BOM",
|
||||
"url": "http://localhost:3000/d/test?var-datacenter=${__data.fields[datacenter]}&var-server=${__value.raw}"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"links": [
|
||||
{
|
||||
"title": "Panel data link with BOM",
|
||||
"url": "http://example.com/${__data.fields.cluster}&var=value",
|
||||
"targetBlank": true
|
||||
}
|
||||
],
|
||||
"targets": [
|
||||
{
|
||||
"refId": "A",
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "test-ds"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"type": "timeseries",
|
||||
"title": "Panel with BOM in options dataLinks",
|
||||
"gridPos": {
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 0
|
||||
},
|
||||
"options": {
|
||||
"legend": {
|
||||
"showLegend": true,
|
||||
"displayMode": "list",
|
||||
"placement": "bottom"
|
||||
},
|
||||
"dataLinks": [
|
||||
{
|
||||
"title": "Options data link with BOM",
|
||||
"url": "http://example.com?series=${__series.name}&time=${__value.time}",
|
||||
"targetBlank": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"links": [
|
||||
{
|
||||
"title": "Field config default link with BOM",
|
||||
"url": "http://example.com?field=${__field.name}&value=${__value.raw}",
|
||||
"targetBlank": false
|
||||
}
|
||||
]
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"refId": "A",
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "test-ds"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"time": {
|
||||
"from": "now-6h",
|
||||
"to": "now"
|
||||
},
|
||||
"timepicker": {
|
||||
"refresh_intervals": ["5s", "10s", "30s", "1m", "5m"]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -120,7 +120,7 @@
|
||||
"value": [
|
||||
{
|
||||
"title": "filter",
|
||||
"url": "http://localhost:3000/d/-Y-tnEDWk/templating-nested-template-variables?var-datacenter=${__data.fields[datacenter]}\u0026var-server=${__value.raw}"
|
||||
"url": "http://localhost:3000/d/-Y-tnEDWk/templating-nested-template-variables?var-datacenter=${__data.fields[datacenter]}\u0026var-server=${__value.raw}"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -124,7 +124,7 @@
|
||||
"value": [
|
||||
{
|
||||
"title": "filter",
|
||||
"url": "http://localhost:3000/d/-Y-tnEDWk/templating-nested-template-variables?var-datacenter=${__data.fields[datacenter]}\u0026var-server=${__value.raw}"
|
||||
"url": "http://localhost:3000/d/-Y-tnEDWk/templating-nested-template-variables?var-datacenter=${__data.fields[datacenter]}\u0026var-server=${__value.raw}"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -2051,4 +2051,4 @@
|
||||
"storedVersion": "v0alpha1"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2691,4 +2691,4 @@
|
||||
"storedVersion": "v0alpha1"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2764,4 +2764,4 @@
|
||||
"storedVersion": "v0alpha1"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1173,4 +1173,4 @@
|
||||
"storedVersion": "v0alpha1"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1618,4 +1618,4 @@
|
||||
"storedVersion": "v0alpha1"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1670,4 +1670,4 @@
|
||||
"storedVersion": "v0alpha1"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
161
apps/dashboard/pkg/migration/conversion/testdata/output/v1beta1.bom-in-links.v0alpha1.json
vendored
Normal file
161
apps/dashboard/pkg/migration/conversion/testdata/output/v1beta1.bom-in-links.v0alpha1.json
vendored
Normal file
@@ -0,0 +1,161 @@
|
||||
{
|
||||
"kind": "Dashboard",
|
||||
"apiVersion": "dashboard.grafana.app/v0alpha1",
|
||||
"metadata": {
|
||||
"name": "bom-in-links-test",
|
||||
"namespace": "org-1",
|
||||
"labels": {
|
||||
"test": "bom-stripping"
|
||||
}
|
||||
},
|
||||
"spec": {
|
||||
"description": "Testing that BOM characters are stripped from URLs during conversion",
|
||||
"editable": true,
|
||||
"links": [
|
||||
{
|
||||
"icon": "external link",
|
||||
"targetBlank": true,
|
||||
"title": "Dashboard link with BOM",
|
||||
"type": "link",
|
||||
"url": "http://example.com?var=${datasource}\u0026other=value"
|
||||
}
|
||||
],
|
||||
"panels": [
|
||||
{
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"custom": {},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green"
|
||||
},
|
||||
{
|
||||
"color": "red",
|
||||
"value": 80
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
"matcher": {
|
||||
"id": "byName",
|
||||
"options": "server"
|
||||
},
|
||||
"properties": [
|
||||
{
|
||||
"id": "links",
|
||||
"value": [
|
||||
{
|
||||
"title": "Override link with BOM",
|
||||
"url": "http://localhost:3000/d/test?var-datacenter=${__data.fields[datacenter]}\u0026var-server=${__value.raw}"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 0
|
||||
},
|
||||
"id": 1,
|
||||
"links": [
|
||||
{
|
||||
"targetBlank": true,
|
||||
"title": "Panel data link with BOM",
|
||||
"url": "http://example.com/${__data.fields.cluster}\u0026var=value"
|
||||
}
|
||||
],
|
||||
"targets": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "test-ds"
|
||||
},
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Panel with BOM in field config override links",
|
||||
"type": "table"
|
||||
},
|
||||
{
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"links": [
|
||||
{
|
||||
"targetBlank": false,
|
||||
"title": "Field config default link with BOM",
|
||||
"url": "http://example.com?field=${__field.name}\u0026value=${__value.raw}"
|
||||
}
|
||||
]
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 8,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 0
|
||||
},
|
||||
"id": 2,
|
||||
"options": {
|
||||
"dataLinks": [
|
||||
{
|
||||
"targetBlank": true,
|
||||
"title": "Options data link with BOM",
|
||||
"url": "http://example.com?series=${__series.name}\u0026time=${__value.time}"
|
||||
}
|
||||
],
|
||||
"legend": {
|
||||
"displayMode": "list",
|
||||
"placement": "bottom",
|
||||
"showLegend": true
|
||||
}
|
||||
},
|
||||
"targets": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "test-ds"
|
||||
},
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Panel with BOM in options dataLinks",
|
||||
"type": "timeseries"
|
||||
}
|
||||
],
|
||||
"schemaVersion": 42,
|
||||
"tags": [
|
||||
"test",
|
||||
"bom"
|
||||
],
|
||||
"time": {
|
||||
"from": "now-6h",
|
||||
"to": "now"
|
||||
},
|
||||
"timepicker": {
|
||||
"refresh_intervals": [
|
||||
"5s",
|
||||
"10s",
|
||||
"30s",
|
||||
"1m",
|
||||
"5m"
|
||||
]
|
||||
},
|
||||
"title": "BOM Stripping Test Dashboard"
|
||||
},
|
||||
"status": {
|
||||
"conversion": {
|
||||
"failed": false,
|
||||
"storedVersion": "v1beta1"
|
||||
}
|
||||
}
|
||||
}
|
||||
242
apps/dashboard/pkg/migration/conversion/testdata/output/v1beta1.bom-in-links.v2alpha1.json
vendored
Normal file
242
apps/dashboard/pkg/migration/conversion/testdata/output/v1beta1.bom-in-links.v2alpha1.json
vendored
Normal file
@@ -0,0 +1,242 @@
|
||||
{
|
||||
"kind": "Dashboard",
|
||||
"apiVersion": "dashboard.grafana.app/v2alpha1",
|
||||
"metadata": {
|
||||
"name": "bom-in-links-test",
|
||||
"namespace": "org-1",
|
||||
"labels": {
|
||||
"test": "bom-stripping"
|
||||
}
|
||||
},
|
||||
"spec": {
|
||||
"annotations": [],
|
||||
"cursorSync": "Off",
|
||||
"description": "Testing that BOM characters are stripped from URLs during conversion",
|
||||
"editable": true,
|
||||
"elements": {
|
||||
"panel-1": {
|
||||
"kind": "Panel",
|
||||
"spec": {
|
||||
"id": 1,
|
||||
"title": "Panel with BOM in field config override links",
|
||||
"description": "",
|
||||
"links": [
|
||||
{
|
||||
"title": "Panel data link with BOM",
|
||||
"url": "http://example.com/${__data.fields.cluster}\u0026var=value",
|
||||
"targetBlank": true
|
||||
}
|
||||
],
|
||||
"data": {
|
||||
"kind": "QueryGroup",
|
||||
"spec": {
|
||||
"queries": [
|
||||
{
|
||||
"kind": "PanelQuery",
|
||||
"spec": {
|
||||
"query": {
|
||||
"kind": "prometheus",
|
||||
"spec": {}
|
||||
},
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "test-ds"
|
||||
},
|
||||
"refId": "A",
|
||||
"hidden": false
|
||||
}
|
||||
}
|
||||
],
|
||||
"transformations": [],
|
||||
"queryOptions": {}
|
||||
}
|
||||
},
|
||||
"vizConfig": {
|
||||
"kind": "table",
|
||||
"spec": {
|
||||
"pluginVersion": "",
|
||||
"options": {},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"value": null,
|
||||
"color": "green"
|
||||
},
|
||||
{
|
||||
"value": 80,
|
||||
"color": "red"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
"matcher": {
|
||||
"id": "byName",
|
||||
"options": "server"
|
||||
},
|
||||
"properties": [
|
||||
{
|
||||
"id": "links",
|
||||
"value": [
|
||||
{
|
||||
"title": "Override link with BOM",
|
||||
"url": "http://localhost:3000/d/test?var-datacenter=${__data.fields[datacenter]}\u0026var-server=${__value.raw}"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"panel-2": {
|
||||
"kind": "Panel",
|
||||
"spec": {
|
||||
"id": 2,
|
||||
"title": "Panel with BOM in options dataLinks",
|
||||
"description": "",
|
||||
"links": [],
|
||||
"data": {
|
||||
"kind": "QueryGroup",
|
||||
"spec": {
|
||||
"queries": [
|
||||
{
|
||||
"kind": "PanelQuery",
|
||||
"spec": {
|
||||
"query": {
|
||||
"kind": "prometheus",
|
||||
"spec": {}
|
||||
},
|
||||
"datasource": {
|
||||
"type": "prometheus",
|
||||
"uid": "test-ds"
|
||||
},
|
||||
"refId": "A",
|
||||
"hidden": false
|
||||
}
|
||||
}
|
||||
],
|
||||
"transformations": [],
|
||||
"queryOptions": {}
|
||||
}
|
||||
},
|
||||
"vizConfig": {
|
||||
"kind": "timeseries",
|
||||
"spec": {
|
||||
"pluginVersion": "",
|
||||
"options": {
|
||||
"dataLinks": [
|
||||
{
|
||||
"targetBlank": true,
|
||||
"title": "Options data link with BOM",
|
||||
"url": "http://example.com?series=${__series.name}\u0026time=${__value.time}"
|
||||
}
|
||||
],
|
||||
"legend": {
|
||||
"displayMode": "list",
|
||||
"placement": "bottom",
|
||||
"showLegend": true
|
||||
}
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"links": [
|
||||
{
|
||||
"targetBlank": false,
|
||||
"title": "Field config default link with BOM",
|
||||
"url": "http://example.com?field=${__field.name}\u0026value=${__value.raw}"
|
||||
}
|
||||
]
|
||||
},
|
||||
"overrides": []
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"layout": {
|
||||
"kind": "GridLayout",
|
||||
"spec": {
|
||||
"items": [
|
||||
{
|
||||
"kind": "GridLayoutItem",
|
||||
"spec": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"width": 12,
|
||||
"height": 8,
|
||||
"element": {
|
||||
"kind": "ElementReference",
|
||||
"name": "panel-1"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"kind": "GridLayoutItem",
|
||||
"spec": {
|
||||
"x": 12,
|
||||
"y": 0,
|
||||
"width": 12,
|
||||
"height": 8,
|
||||
"element": {
|
||||
"kind": "ElementReference",
|
||||
"name": "panel-2"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"links": [
|
||||
{
|
||||
"title": "Dashboard link with BOM",
|
||||
"type": "link",
|
||||
"icon": "external link",
|
||||
"tooltip": "",
|
||||
"url": "http://example.com?var=${datasource}\u0026other=value",
|
||||
"tags": [],
|
||||
"asDropdown": false,
|
||||
"targetBlank": true,
|
||||
"includeVars": false,
|
||||
"keepTime": false
|
||||
}
|
||||
],
|
||||
"liveNow": false,
|
||||
"preload": false,
|
||||
"tags": [
|
||||
"test",
|
||||
"bom"
|
||||
],
|
||||
"timeSettings": {
|
||||
"timezone": "browser",
|
||||
"from": "now-6h",
|
||||
"to": "now",
|
||||
"autoRefresh": "",
|
||||
"autoRefreshIntervals": [
|
||||
"5s",
|
||||
"10s",
|
||||
"30s",
|
||||
"1m",
|
||||
"5m"
|
||||
],
|
||||
"hideTimepicker": false,
|
||||
"fiscalYearStartMonth": 0
|
||||
},
|
||||
"title": "BOM Stripping Test Dashboard",
|
||||
"variables": []
|
||||
},
|
||||
"status": {
|
||||
"conversion": {
|
||||
"failed": false,
|
||||
"storedVersion": "v1beta1"
|
||||
}
|
||||
}
|
||||
}
|
||||
246
apps/dashboard/pkg/migration/conversion/testdata/output/v1beta1.bom-in-links.v2beta1.json
vendored
Normal file
246
apps/dashboard/pkg/migration/conversion/testdata/output/v1beta1.bom-in-links.v2beta1.json
vendored
Normal file
@@ -0,0 +1,246 @@
|
||||
{
|
||||
"kind": "Dashboard",
|
||||
"apiVersion": "dashboard.grafana.app/v2beta1",
|
||||
"metadata": {
|
||||
"name": "bom-in-links-test",
|
||||
"namespace": "org-1",
|
||||
"labels": {
|
||||
"test": "bom-stripping"
|
||||
}
|
||||
},
|
||||
"spec": {
|
||||
"annotations": [],
|
||||
"cursorSync": "Off",
|
||||
"description": "Testing that BOM characters are stripped from URLs during conversion",
|
||||
"editable": true,
|
||||
"elements": {
|
||||
"panel-1": {
|
||||
"kind": "Panel",
|
||||
"spec": {
|
||||
"id": 1,
|
||||
"title": "Panel with BOM in field config override links",
|
||||
"description": "",
|
||||
"links": [
|
||||
{
|
||||
"title": "Panel data link with BOM",
|
||||
"url": "http://example.com/${__data.fields.cluster}\u0026var=value",
|
||||
"targetBlank": true
|
||||
}
|
||||
],
|
||||
"data": {
|
||||
"kind": "QueryGroup",
|
||||
"spec": {
|
||||
"queries": [
|
||||
{
|
||||
"kind": "PanelQuery",
|
||||
"spec": {
|
||||
"query": {
|
||||
"kind": "DataQuery",
|
||||
"group": "prometheus",
|
||||
"version": "v0",
|
||||
"datasource": {
|
||||
"name": "test-ds"
|
||||
},
|
||||
"spec": {}
|
||||
},
|
||||
"refId": "A",
|
||||
"hidden": false
|
||||
}
|
||||
}
|
||||
],
|
||||
"transformations": [],
|
||||
"queryOptions": {}
|
||||
}
|
||||
},
|
||||
"vizConfig": {
|
||||
"kind": "VizConfig",
|
||||
"group": "table",
|
||||
"version": "",
|
||||
"spec": {
|
||||
"options": {},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"value": null,
|
||||
"color": "green"
|
||||
},
|
||||
{
|
||||
"value": 80,
|
||||
"color": "red"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
"matcher": {
|
||||
"id": "byName",
|
||||
"options": "server"
|
||||
},
|
||||
"properties": [
|
||||
{
|
||||
"id": "links",
|
||||
"value": [
|
||||
{
|
||||
"title": "Override link with BOM",
|
||||
"url": "http://localhost:3000/d/test?var-datacenter=${__data.fields[datacenter]}\u0026var-server=${__value.raw}"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"panel-2": {
|
||||
"kind": "Panel",
|
||||
"spec": {
|
||||
"id": 2,
|
||||
"title": "Panel with BOM in options dataLinks",
|
||||
"description": "",
|
||||
"links": [],
|
||||
"data": {
|
||||
"kind": "QueryGroup",
|
||||
"spec": {
|
||||
"queries": [
|
||||
{
|
||||
"kind": "PanelQuery",
|
||||
"spec": {
|
||||
"query": {
|
||||
"kind": "DataQuery",
|
||||
"group": "prometheus",
|
||||
"version": "v0",
|
||||
"datasource": {
|
||||
"name": "test-ds"
|
||||
},
|
||||
"spec": {}
|
||||
},
|
||||
"refId": "A",
|
||||
"hidden": false
|
||||
}
|
||||
}
|
||||
],
|
||||
"transformations": [],
|
||||
"queryOptions": {}
|
||||
}
|
||||
},
|
||||
"vizConfig": {
|
||||
"kind": "VizConfig",
|
||||
"group": "timeseries",
|
||||
"version": "",
|
||||
"spec": {
|
||||
"options": {
|
||||
"dataLinks": [
|
||||
{
|
||||
"targetBlank": true,
|
||||
"title": "Options data link with BOM",
|
||||
"url": "http://example.com?series=${__series.name}\u0026time=${__value.time}"
|
||||
}
|
||||
],
|
||||
"legend": {
|
||||
"displayMode": "list",
|
||||
"placement": "bottom",
|
||||
"showLegend": true
|
||||
}
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"links": [
|
||||
{
|
||||
"targetBlank": false,
|
||||
"title": "Field config default link with BOM",
|
||||
"url": "http://example.com?field=${__field.name}\u0026value=${__value.raw}"
|
||||
}
|
||||
]
|
||||
},
|
||||
"overrides": []
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"layout": {
|
||||
"kind": "GridLayout",
|
||||
"spec": {
|
||||
"items": [
|
||||
{
|
||||
"kind": "GridLayoutItem",
|
||||
"spec": {
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"width": 12,
|
||||
"height": 8,
|
||||
"element": {
|
||||
"kind": "ElementReference",
|
||||
"name": "panel-1"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"kind": "GridLayoutItem",
|
||||
"spec": {
|
||||
"x": 12,
|
||||
"y": 0,
|
||||
"width": 12,
|
||||
"height": 8,
|
||||
"element": {
|
||||
"kind": "ElementReference",
|
||||
"name": "panel-2"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"links": [
|
||||
{
|
||||
"title": "Dashboard link with BOM",
|
||||
"type": "link",
|
||||
"icon": "external link",
|
||||
"tooltip": "",
|
||||
"url": "http://example.com?var=${datasource}\u0026other=value",
|
||||
"tags": [],
|
||||
"asDropdown": false,
|
||||
"targetBlank": true,
|
||||
"includeVars": false,
|
||||
"keepTime": false
|
||||
}
|
||||
],
|
||||
"liveNow": false,
|
||||
"preload": false,
|
||||
"tags": [
|
||||
"test",
|
||||
"bom"
|
||||
],
|
||||
"timeSettings": {
|
||||
"timezone": "browser",
|
||||
"from": "now-6h",
|
||||
"to": "now",
|
||||
"autoRefresh": "",
|
||||
"autoRefreshIntervals": [
|
||||
"5s",
|
||||
"10s",
|
||||
"30s",
|
||||
"1m",
|
||||
"5m"
|
||||
],
|
||||
"hideTimepicker": false,
|
||||
"fiscalYearStartMonth": 0
|
||||
},
|
||||
"title": "BOM Stripping Test Dashboard",
|
||||
"variables": []
|
||||
},
|
||||
"status": {
|
||||
"conversion": {
|
||||
"failed": false,
|
||||
"storedVersion": "v1beta1"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -229,6 +229,36 @@ func getBoolField(m map[string]interface{}, key string, defaultValue bool) bool
|
||||
return defaultValue
|
||||
}
|
||||
|
||||
// stripBOM removes Byte Order Mark (BOM) characters from a string.
|
||||
// BOMs (U+FEFF) can be introduced through copy/paste from certain editors
|
||||
// and cause CUE validation errors ("illegal byte order mark").
|
||||
func stripBOM(s string) string {
|
||||
return strings.ReplaceAll(s, "\ufeff", "")
|
||||
}
|
||||
|
||||
// stripBOMFromInterface recursively strips BOM characters from all strings
|
||||
// in an interface{} value (map, slice, or string).
|
||||
func stripBOMFromInterface(v interface{}) interface{} {
|
||||
switch val := v.(type) {
|
||||
case string:
|
||||
return stripBOM(val)
|
||||
case map[string]interface{}:
|
||||
result := make(map[string]interface{}, len(val))
|
||||
for k, v := range val {
|
||||
result[k] = stripBOMFromInterface(v)
|
||||
}
|
||||
return result
|
||||
case []interface{}:
|
||||
result := make([]interface{}, len(val))
|
||||
for i, item := range val {
|
||||
result[i] = stripBOMFromInterface(item)
|
||||
}
|
||||
return result
|
||||
default:
|
||||
return v
|
||||
}
|
||||
}
|
||||
|
||||
func getUnionField[T ~string](m map[string]interface{}, key string) *T {
|
||||
if val, ok := m[key]; ok {
|
||||
if str, ok := val.(string); ok && str != "" {
|
||||
@@ -393,7 +423,8 @@ func transformLinks(dashboard map[string]interface{}) []dashv2alpha1.DashboardDa
|
||||
// Optional field - only set if present
|
||||
if url, exists := linkMap["url"]; exists {
|
||||
if urlStr, ok := url.(string); ok {
|
||||
dashLink.Url = &urlStr
|
||||
cleanUrl := stripBOM(urlStr)
|
||||
dashLink.Url = &cleanUrl
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2239,7 +2270,7 @@ func transformDataLinks(panelMap map[string]interface{}) []dashv2alpha1.Dashboar
|
||||
if linkMap, ok := link.(map[string]interface{}); ok {
|
||||
dataLink := dashv2alpha1.DashboardDataLink{
|
||||
Title: schemaversion.GetStringValue(linkMap, "title"),
|
||||
Url: schemaversion.GetStringValue(linkMap, "url"),
|
||||
Url: stripBOM(schemaversion.GetStringValue(linkMap, "url")),
|
||||
}
|
||||
if _, exists := linkMap["targetBlank"]; exists {
|
||||
targetBlank := getBoolField(linkMap, "targetBlank", false)
|
||||
@@ -2331,6 +2362,12 @@ func buildVizConfig(panelMap map[string]interface{}) dashv2alpha1.DashboardVizCo
|
||||
}
|
||||
}
|
||||
|
||||
// Strip BOMs from options (may contain dataLinks with URLs that have BOMs)
|
||||
cleanedOptions := stripBOMFromInterface(options)
|
||||
if cleanedMap, ok := cleanedOptions.(map[string]interface{}); ok {
|
||||
options = cleanedMap
|
||||
}
|
||||
|
||||
// Build field config by mapping each field individually
|
||||
fieldConfigSource := extractFieldConfigSource(fieldConfig)
|
||||
|
||||
@@ -2474,9 +2511,14 @@ func extractFieldConfigDefaults(defaults map[string]interface{}) dashv2alpha1.Da
|
||||
hasDefaults = true
|
||||
}
|
||||
|
||||
// Extract array field
|
||||
// Extract array field - strip BOMs from link URLs
|
||||
if linksArray, ok := extractArrayField(defaults, "links"); ok {
|
||||
fieldConfigDefaults.Links = linksArray
|
||||
cleanedLinks := stripBOMFromInterface(linksArray)
|
||||
if cleanedArray, ok := cleanedLinks.([]interface{}); ok {
|
||||
fieldConfigDefaults.Links = cleanedArray
|
||||
} else {
|
||||
fieldConfigDefaults.Links = linksArray
|
||||
}
|
||||
hasDefaults = true
|
||||
}
|
||||
|
||||
@@ -2762,9 +2804,11 @@ func extractFieldConfigOverrides(fieldConfig map[string]interface{}) []dashv2alp
|
||||
fieldOverride.Properties = make([]dashv2alpha1.DashboardDynamicConfigValue, 0, len(propertiesArray))
|
||||
for _, property := range propertiesArray {
|
||||
if propertyMap, ok := property.(map[string]interface{}); ok {
|
||||
// Strip BOMs from property values (may contain links with URLs)
|
||||
cleanedValue := stripBOMFromInterface(propertyMap["value"])
|
||||
fieldOverride.Properties = append(fieldOverride.Properties, dashv2alpha1.DashboardDynamicConfigValue{
|
||||
Id: schemaversion.GetStringValue(propertyMap, "id"),
|
||||
Value: propertyMap["value"],
|
||||
Value: cleanedValue,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -186,8 +186,6 @@ github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5/go.mod h1:lmUJ/7eu/Q8
|
||||
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
|
||||
github.com/ProtonMail/go-crypto v1.1.6 h1:ZcV+Ropw6Qn0AX9brlQLAUXfqLBc7Bl+f/DmNxpLfdw=
|
||||
github.com/ProtonMail/go-crypto v1.1.6/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE=
|
||||
github.com/RoaringBitmap/roaring v1.9.3 h1:t4EbC5qQwnisr5PrP9nt0IRhRTb9gMUgQF4t4S2OByM=
|
||||
github.com/RoaringBitmap/roaring v1.9.3/go.mod h1:6AXUsoIEzDTFFQCe1RbGA6uFONMhvejWj5rqITANK90=
|
||||
github.com/RoaringBitmap/roaring/v2 v2.4.5 h1:uGrrMreGjvAtTBobc0g5IrW1D5ldxDQYe2JW2gggRdg=
|
||||
github.com/RoaringBitmap/roaring/v2 v2.4.5/go.mod h1:FiJcsfkGje/nZBZgCu0ZxCPOKD/hVXDS2dXi7/eUFE0=
|
||||
github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo=
|
||||
@@ -286,8 +284,6 @@ github.com/aws/aws-sdk-go-v2/service/sts v1.38.5 h1:+LVB0xBqEgjQoqr9bGZbRzvg212B
|
||||
github.com/aws/aws-sdk-go-v2/service/sts v1.38.5/go.mod h1:xoaxeqnnUaZjPjaICgIy5B+MHCSb/ZSOn4MvkFNOUA0=
|
||||
github.com/aws/smithy-go v1.23.1 h1:sLvcH6dfAFwGkHLZ7dGiYF7aK6mg4CgKA/iDKjLDt9M=
|
||||
github.com/aws/smithy-go v1.23.1/go.mod h1:LEj2LM3rBRQJxPZTB4KuzZkaZYnZPnvgIhb4pu07mx0=
|
||||
github.com/axiomhq/hyperloglog v0.0.0-20240507144631-af9851f82b27 h1:60m4tnanN1ctzIu4V3bfCNJ39BiOPSm1gHFlFjTkRE0=
|
||||
github.com/axiomhq/hyperloglog v0.0.0-20240507144631-af9851f82b27/go.mod h1:k08r+Yj1PRAmuayFiRK6MYuR5Ve4IuZtTfxErMIh0+c=
|
||||
github.com/bahlo/generic-list-go v0.2.0 h1:5sz/EEAK+ls5wF+NeqDpk5+iNdMDXrh3z3nPnH1Wvgk=
|
||||
github.com/bahlo/generic-list-go v0.2.0/go.mod h1:2KvAjgMlE5NNynlg/5iLrrCCZ2+5xWbdbCW3pNTGyYg=
|
||||
github.com/barkimedes/go-deepcopy v0.0.0-20220514131651-17c30cfc62df h1:GSoSVRLoBaFpOOds6QyY1L8AX7uoY+Ln3BHc22W40X0=
|
||||
@@ -349,14 +345,6 @@ github.com/blevesearch/zapx/v16 v16.2.2 h1:MifKJVRTEhMTgSlle2bDRTb39BGc9jXFRLPZc
|
||||
github.com/blevesearch/zapx/v16 v16.2.2/go.mod h1:B9Pk4G1CqtErgQV9DyCSA9Lb7WZe4olYfGw7fVDZ4sk=
|
||||
github.com/bluele/gcache v0.0.2 h1:WcbfdXICg7G/DGBh1PFfcirkWOQV+v077yF1pSy3DGw=
|
||||
github.com/bluele/gcache v0.0.2/go.mod h1:m15KV+ECjptwSPxKhOhQoAFQVtUFjTVkc3H8o0t/fp0=
|
||||
github.com/blugelabs/bluge v0.2.2 h1:gat8CqE6P6tOgeX30XGLOVNTC26cpM2RWVcreXWtYcM=
|
||||
github.com/blugelabs/bluge v0.2.2/go.mod h1:am1LU9jS8dZgWkRzkGLQN3757EgMs3upWrU2fdN9foE=
|
||||
github.com/blugelabs/bluge_segment_api v0.2.0 h1:cCX1Y2y8v0LZ7+EEJ6gH7dW6TtVTW4RhG0vp3R+N2Lo=
|
||||
github.com/blugelabs/bluge_segment_api v0.2.0/go.mod h1:95XA+ZXfRj/IXADm7gZ+iTcWOJPg5jQTY1EReIzl3LA=
|
||||
github.com/blugelabs/ice v1.0.0 h1:um7wf9e6jbkTVCrOyQq3tKK43fBMOvLUYxbj3Qtc4eo=
|
||||
github.com/blugelabs/ice v1.0.0/go.mod h1:gNfFPk5zM+yxJROhthxhVQYjpBO9amuxWXJQ2Lo+IbQ=
|
||||
github.com/blugelabs/ice/v2 v2.0.1 h1:mzHbntLjk2v7eDRgoXCgzOsPKN1Tenu9Svo6l9cTLS4=
|
||||
github.com/blugelabs/ice/v2 v2.0.1/go.mod h1:QxAWSPNwZwsIqS25c3lbIPFQrVvT1sphf5x5DfMLH5M=
|
||||
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4=
|
||||
github.com/bradfitz/gomemcache v0.0.0-20250403215159-8d39553ac7cf h1:TqhNAT4zKbTdLa62d2HDBFdvgSbIGB3eJE8HqhgiL9I=
|
||||
github.com/bradfitz/gomemcache v0.0.0-20250403215159-8d39553ac7cf/go.mod h1:r5xuitiExdLAJ09PR7vBVENGvp4ZuTBeWTGtxuX3K+c=
|
||||
@@ -372,8 +360,6 @@ github.com/bwmarrin/snowflake v0.3.0 h1:xm67bEhkKh6ij1790JB83OujPR5CzNe8QuQqAgIS
|
||||
github.com/bwmarrin/snowflake v0.3.0/go.mod h1:NdZxfVWX+oR6y2K0o6qAYv6gIOP9rjG0/E9WsDpxqwE=
|
||||
github.com/c2h5oh/datasize v0.0.0-20231215233829-aa82cc1e6500 h1:6lhrsTEnloDPXyeZBvSYvQf8u86jbKehZPVDDlkgDl4=
|
||||
github.com/c2h5oh/datasize v0.0.0-20231215233829-aa82cc1e6500/go.mod h1:S/7n9copUssQ56c7aAgHqftWO4LTf4xY6CGWt8Bc+3M=
|
||||
github.com/caio/go-tdigest v3.1.0+incompatible h1:uoVMJ3Q5lXmVLCCqaMGHLBWnbGoN6Lpu7OAUPR60cds=
|
||||
github.com/caio/go-tdigest v3.1.0+incompatible/go.mod h1:sHQM/ubZStBUmF1WbB8FAm8q9GjDajLC5T7ydxE3JHI=
|
||||
github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8=
|
||||
github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
|
||||
github.com/cenkalti/backoff/v5 v5.0.3 h1:ZN+IMa753KfX5hd8vVaMixjnqRZ3y8CuJKRKj1xcsSM=
|
||||
@@ -461,8 +447,6 @@ github.com/dgraph-io/ristretto/v2 v2.2.0 h1:bkY3XzJcXoMuELV8F+vS8kzNgicwQFAaGINA
|
||||
github.com/dgraph-io/ristretto/v2 v2.2.0/go.mod h1:RZrm63UmcBAaYWC1DotLYBmTvgkrs0+XhBd7Npn7/zI=
|
||||
github.com/dgryski/go-farm v0.0.0-20240924180020-3414d57e47da h1:aIftn67I1fkbMa512G+w+Pxci9hJPB8oMnkcP3iZF38=
|
||||
github.com/dgryski/go-farm v0.0.0-20240924180020-3414d57e47da/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw=
|
||||
github.com/dgryski/go-metro v0.0.0-20180109044635-280f6062b5bc h1:8WFBn63wegobsYAX0YjD+8suexZDga5CctH4CCTx2+8=
|
||||
github.com/dgryski/go-metro v0.0.0-20180109044635-280f6062b5bc/go.mod h1:c9O8+fpSOX1DM8cPNSkX/qsBWdkD4yd2dpciOWQjpBw=
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
|
||||
github.com/dhui/dktest v0.3.0/go.mod h1:cyzIUfGsBEbZ6BT7tnXqAShHSXCZhSNmFl70sZ7c1yc=
|
||||
|
||||
20
apps/plugins/README.md
Normal file
20
apps/plugins/README.md
Normal file
@@ -0,0 +1,20 @@
|
||||
# Plugins App
|
||||
|
||||
API documentation is available at http://localhost:3000/swagger?api=plugins.grafana.app-v0alpha1
|
||||
|
||||
## Codegen
|
||||
|
||||
- Go: `make generate`
|
||||
- Frontend: Follow instructions in this [README](../..//packages/grafana-api-clients/README.md)
|
||||
|
||||
## Plugin sync
|
||||
|
||||
The plugin sync pushes the plugins loaded from disk to the plugins API.
|
||||
|
||||
To enable, add these feature toggles in your `custom.ini`:
|
||||
|
||||
```ini
|
||||
[feature_toggles]
|
||||
pluginInstallAPISync = true
|
||||
pluginStoreServiceLoading = true
|
||||
```
|
||||
@@ -98,7 +98,7 @@ You can share dashboards in the following ways:
|
||||
- [As a report](#schedule-a-report)
|
||||
- [As a snapshot](#share-a-snapshot)
|
||||
- [As a PDF export](#export-a-dashboard-as-pdf)
|
||||
- [As a JSON file export](#export-a-dashboard-as-json)
|
||||
- [As a JSON file export](#export-a-dashboard-as-code)
|
||||
- [As an image export](#export-a-dashboard-as-an-image)
|
||||
|
||||
When you share a dashboard externally as a link or by email, those dashboards are included in a list of your shared dashboards. To view the list and manage these dashboards, navigate to **Dashboards > Shared dashboards**.
|
||||
|
||||
@@ -10,7 +10,7 @@ const NUM_NESTED_DASHBOARDS = 60;
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ import testDashboard from '../dashboards/TestDashboard.json';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ test.use({
|
||||
scenes: true,
|
||||
sharingDashboardImage: true, // Enable the export image feature
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import { test, expect } from '@grafana/plugin-e2e';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import { test, expect } from '@grafana/plugin-e2e';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ import testDashboard from '../dashboards/DataLinkWithoutSlugTest.json';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ import testDashboard from '../dashboards/DashboardLiveTest.json';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import { test, expect } from '@grafana/plugin-e2e';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardScene: false, // this test is for the old sharing modal only used when scenes is turned off
|
||||
},
|
||||
});
|
||||
|
||||
@@ -3,7 +3,7 @@ import { test, expect } from '@grafana/plugin-e2e';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardScene: false, // this test is for the old sharing modal only used when scenes is turned off
|
||||
},
|
||||
});
|
||||
|
||||
@@ -4,7 +4,7 @@ test.use({
|
||||
featureToggles: {
|
||||
scenes: true,
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ test.use({
|
||||
featureToggles: {
|
||||
scenes: true,
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ test.use({
|
||||
featureToggles: {
|
||||
scenes: true,
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ test.use({
|
||||
timezoneId: 'Pacific/Easter',
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ const TIMEZONE_DASHBOARD_UID = 'd41dbaa2-a39e-4536-ab2b-caca52f1a9c8';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@ test.use({
|
||||
},
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import { test, expect } from '@grafana/plugin-e2e';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ const PAGE_UNDER_TEST = 'edediimbjhdz4b/a-tall-dashboard';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ import testDashboard from '../dashboards/TestDashboard.json';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ const PAGE_UNDER_TEST = '-Y-tnEDWk/templating-nested-template-variables';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ const DASHBOARD_NAME = 'Test variable output';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -53,7 +53,7 @@ async function assertPreviewValues(
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ const DASHBOARD_NAME = 'Test variable output';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ async function assertPreviewValues(
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ const DASHBOARD_NAME = 'Templating - Nested Template Variables';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ const DASHBOARD_NAME = 'Test variable output';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ const PAGE_UNDER_TEST = 'WVpf2jp7z/repeating-a-panel-horizontally';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ const PAGE_UNDER_TEST = 'OY8Ghjt7k/repeating-a-panel-vertically';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ const PAGE_UNDER_TEST = 'dtpl2Ctnk/repeating-an-empty-row';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ const PAGE_UNDER_TEST = '-Y-tnEDWk/templating-nested-template-variables';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ const DASHBOARD_UID = 'ZqZnVvFZz';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardScene: false, // this test is for the old sharing modal only used when scenes is turned off
|
||||
},
|
||||
});
|
||||
|
||||
@@ -5,7 +5,7 @@ const DASHBOARD_UID = 'yBCC3aKGk';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ const PAGE_UNDER_TEST = 'AejrN1AMz';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -2,18 +2,16 @@ import { Locator } from '@playwright/test';
|
||||
|
||||
import { test, expect } from '@grafana/plugin-e2e';
|
||||
|
||||
import { setVisualization } from './vizpicker-utils';
|
||||
|
||||
test.use({
|
||||
featureToggles: {
|
||||
canvasPanelPanZoom: true,
|
||||
},
|
||||
});
|
||||
test.describe('Canvas Panel - Scene Tests', () => {
|
||||
test.beforeEach(async ({ page, gotoDashboardPage, selectors }) => {
|
||||
test.beforeEach(async ({ page, gotoDashboardPage }) => {
|
||||
const dashboardPage = await gotoDashboardPage({});
|
||||
const panelEditPage = await dashboardPage.addPanel();
|
||||
await setVisualization(panelEditPage, 'Canvas', selectors);
|
||||
await panelEditPage.setVisualization('Canvas');
|
||||
|
||||
// Wait for canvas panel to load
|
||||
await page.waitForSelector('[data-testid="canvas-scene-pan-zoom"]', { timeout: 10000 });
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
import { expect, E2ESelectorGroups, PanelEditPage } from '@grafana/plugin-e2e';
|
||||
|
||||
// this replaces the panelEditPage.setVisualization method used previously in tests, since it
|
||||
// does not know how to use the updated 12.4 viz picker UI to set the visualization
|
||||
export const setVisualization = async (panelEditPage: PanelEditPage, vizName: string, selectors: E2ESelectorGroups) => {
|
||||
const vizPicker = panelEditPage.getByGrafanaSelector(selectors.components.PanelEditor.toggleVizPicker);
|
||||
await expect(vizPicker, '"Change" button should be visible').toBeVisible();
|
||||
await vizPicker.click();
|
||||
|
||||
const allVizTabBtn = panelEditPage.getByGrafanaSelector(selectors.components.Tab.title('All visualizations'));
|
||||
await expect(allVizTabBtn, '"All visualiations" button should be visible').toBeVisible();
|
||||
await allVizTabBtn.click();
|
||||
|
||||
const vizItem = panelEditPage.getByGrafanaSelector(selectors.components.PluginVisualization.item(vizName));
|
||||
await expect(vizItem, `"${vizName}" item should be visible`).toBeVisible();
|
||||
await vizItem.scrollIntoViewIfNeeded();
|
||||
await vizItem.click();
|
||||
|
||||
await expect(vizPicker, '"Change" button should be visible again').toBeVisible();
|
||||
await expect(
|
||||
panelEditPage.getByGrafanaSelector(selectors.components.PanelEditor.OptionsPane.header),
|
||||
'Panel header should have the new viz type name'
|
||||
).toHaveText(vizName);
|
||||
};
|
||||
@@ -1,6 +1,5 @@
|
||||
import { expect, test } from '@grafana/plugin-e2e';
|
||||
|
||||
import { setVisualization } from '../../../panels-suite/vizpicker-utils';
|
||||
import { formatExpectError } from '../errors';
|
||||
import { successfulDataQuery } from '../mocks/queries';
|
||||
|
||||
@@ -25,10 +24,10 @@ test.describe(
|
||||
).toContainText(['Field', 'Max', 'Mean', 'Last']);
|
||||
});
|
||||
|
||||
test('table panel data assertions', async ({ panelEditPage, selectors }) => {
|
||||
test('table panel data assertions', async ({ panelEditPage }) => {
|
||||
await panelEditPage.mockQueryDataResponse(successfulDataQuery, 200);
|
||||
await panelEditPage.datasource.set('gdev-testdata');
|
||||
await setVisualization(panelEditPage, 'Table', selectors);
|
||||
await panelEditPage.setVisualization('Table');
|
||||
await panelEditPage.refreshPanel();
|
||||
await expect(
|
||||
panelEditPage.panel.locator,
|
||||
@@ -44,10 +43,10 @@ test.describe(
|
||||
).toContainText(['val1', 'val2', 'val3', 'val4']);
|
||||
});
|
||||
|
||||
test('timeseries panel - table view assertions', async ({ panelEditPage, selectors }) => {
|
||||
test('timeseries panel - table view assertions', async ({ panelEditPage }) => {
|
||||
await panelEditPage.mockQueryDataResponse(successfulDataQuery, 200);
|
||||
await panelEditPage.datasource.set('gdev-testdata');
|
||||
await setVisualization(panelEditPage, 'Time series', selectors);
|
||||
await panelEditPage.setVisualization('Time series');
|
||||
await panelEditPage.refreshPanel();
|
||||
await panelEditPage.toggleTableView();
|
||||
await expect(
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { expect, test } from '@grafana/plugin-e2e';
|
||||
|
||||
import { setVisualization } from '../../../panels-suite/vizpicker-utils';
|
||||
import { formatExpectError } from '../errors';
|
||||
import { successfulDataQuery } from '../mocks/queries';
|
||||
import { scenarios } from '../mocks/resources';
|
||||
@@ -54,10 +53,10 @@ test.describe(
|
||||
).toHaveText(scenarios.map((s) => s.name));
|
||||
});
|
||||
|
||||
test('mocked query data response', async ({ panelEditPage, page, selectors }) => {
|
||||
test('mocked query data response', async ({ panelEditPage, page }) => {
|
||||
await panelEditPage.mockQueryDataResponse(successfulDataQuery, 200);
|
||||
await panelEditPage.datasource.set('gdev-testdata');
|
||||
await setVisualization(panelEditPage, TABLE_VIZ_NAME, selectors);
|
||||
await panelEditPage.setVisualization(TABLE_VIZ_NAME);
|
||||
await panelEditPage.refreshPanel();
|
||||
await expect(
|
||||
panelEditPage.panel.getErrorIcon(),
|
||||
@@ -76,7 +75,7 @@ test.describe(
|
||||
selectors,
|
||||
page,
|
||||
}) => {
|
||||
await setVisualization(panelEditPage, TABLE_VIZ_NAME, selectors);
|
||||
await panelEditPage.setVisualization(TABLE_VIZ_NAME);
|
||||
await expect(
|
||||
panelEditPage.getByGrafanaSelector(selectors.components.PanelEditor.OptionsPane.header),
|
||||
formatExpectError('Expected panel visualization to be set to table')
|
||||
@@ -93,8 +92,8 @@ test.describe(
|
||||
).toBeVisible();
|
||||
});
|
||||
|
||||
test('Select time zone in timezone picker', async ({ panelEditPage, selectors }) => {
|
||||
await setVisualization(panelEditPage, TIME_SERIES_VIZ_NAME, selectors);
|
||||
test('Select time zone in timezone picker', async ({ panelEditPage }) => {
|
||||
await panelEditPage.setVisualization(TIME_SERIES_VIZ_NAME);
|
||||
const axisOptions = await panelEditPage.getCustomOptions('Axis');
|
||||
const timeZonePicker = axisOptions.getSelect('Time zone');
|
||||
|
||||
@@ -102,8 +101,8 @@ test.describe(
|
||||
await expect(timeZonePicker).toHaveSelected('Europe/Stockholm');
|
||||
});
|
||||
|
||||
test('select unit in unit picker', async ({ panelEditPage, selectors }) => {
|
||||
await setVisualization(panelEditPage, TIME_SERIES_VIZ_NAME, selectors);
|
||||
test('select unit in unit picker', async ({ panelEditPage }) => {
|
||||
await panelEditPage.setVisualization(TIME_SERIES_VIZ_NAME);
|
||||
const standardOptions = panelEditPage.getStandardOptions();
|
||||
const unitPicker = standardOptions.getUnitPicker('Unit');
|
||||
|
||||
@@ -112,8 +111,8 @@ test.describe(
|
||||
await expect(unitPicker).toHaveSelected('Pixels');
|
||||
});
|
||||
|
||||
test('enter value in number input', async ({ panelEditPage, selectors }) => {
|
||||
await setVisualization(panelEditPage, TIME_SERIES_VIZ_NAME, selectors);
|
||||
test('enter value in number input', async ({ panelEditPage }) => {
|
||||
await panelEditPage.setVisualization(TIME_SERIES_VIZ_NAME);
|
||||
const axisOptions = panelEditPage.getCustomOptions('Axis');
|
||||
const lineWith = axisOptions.getNumberInput('Soft min');
|
||||
|
||||
@@ -122,8 +121,8 @@ test.describe(
|
||||
await expect(lineWith).toHaveValue('10');
|
||||
});
|
||||
|
||||
test('enter value in slider', async ({ panelEditPage, selectors }) => {
|
||||
await setVisualization(panelEditPage, TIME_SERIES_VIZ_NAME, selectors);
|
||||
test('enter value in slider', async ({ panelEditPage }) => {
|
||||
await panelEditPage.setVisualization(TIME_SERIES_VIZ_NAME);
|
||||
const graphOptions = panelEditPage.getCustomOptions('Graph styles');
|
||||
const lineWidth = graphOptions.getSliderInput('Line width');
|
||||
|
||||
@@ -132,8 +131,8 @@ test.describe(
|
||||
await expect(lineWidth).toHaveValue('10');
|
||||
});
|
||||
|
||||
test('select value in single value select', async ({ panelEditPage, selectors }) => {
|
||||
await setVisualization(panelEditPage, TIME_SERIES_VIZ_NAME, selectors);
|
||||
test('select value in single value select', async ({ panelEditPage }) => {
|
||||
await panelEditPage.setVisualization(TIME_SERIES_VIZ_NAME);
|
||||
const standardOptions = panelEditPage.getStandardOptions();
|
||||
const colorSchemeSelect = standardOptions.getSelect('Color scheme');
|
||||
|
||||
@@ -141,8 +140,8 @@ test.describe(
|
||||
await expect(colorSchemeSelect).toHaveSelected('Classic palette');
|
||||
});
|
||||
|
||||
test('clear input', async ({ panelEditPage, selectors }) => {
|
||||
await setVisualization(panelEditPage, TIME_SERIES_VIZ_NAME, selectors);
|
||||
test('clear input', async ({ panelEditPage }) => {
|
||||
await panelEditPage.setVisualization(TIME_SERIES_VIZ_NAME);
|
||||
const panelOptions = panelEditPage.getPanelOptions();
|
||||
const title = panelOptions.getTextInput('Title');
|
||||
|
||||
@@ -151,8 +150,8 @@ test.describe(
|
||||
await expect(title).toHaveValue('');
|
||||
});
|
||||
|
||||
test('enter value in input', async ({ panelEditPage, selectors }) => {
|
||||
await setVisualization(panelEditPage, TIME_SERIES_VIZ_NAME, selectors);
|
||||
test('enter value in input', async ({ panelEditPage }) => {
|
||||
await panelEditPage.setVisualization(TIME_SERIES_VIZ_NAME);
|
||||
const panelOptions = panelEditPage.getPanelOptions();
|
||||
const description = panelOptions.getTextInput('Description');
|
||||
|
||||
@@ -161,8 +160,8 @@ test.describe(
|
||||
await expect(description).toHaveValue('This is a panel');
|
||||
});
|
||||
|
||||
test('unchecking switch', async ({ panelEditPage, selectors }) => {
|
||||
await setVisualization(panelEditPage, TIME_SERIES_VIZ_NAME, selectors);
|
||||
test('unchecking switch', async ({ panelEditPage }) => {
|
||||
await panelEditPage.setVisualization(TIME_SERIES_VIZ_NAME);
|
||||
const axisOptions = panelEditPage.getCustomOptions('Axis');
|
||||
const showBorder = axisOptions.getSwitch('Show border');
|
||||
|
||||
@@ -174,8 +173,8 @@ test.describe(
|
||||
await expect(showBorder).toBeChecked({ checked: false });
|
||||
});
|
||||
|
||||
test('checking switch', async ({ panelEditPage, selectors }) => {
|
||||
await setVisualization(panelEditPage, TIME_SERIES_VIZ_NAME, selectors);
|
||||
test('checking switch', async ({ panelEditPage }) => {
|
||||
await panelEditPage.setVisualization(TIME_SERIES_VIZ_NAME);
|
||||
const axisOptions = panelEditPage.getCustomOptions('Axis');
|
||||
const showBorder = axisOptions.getSwitch('Show border');
|
||||
|
||||
@@ -184,8 +183,8 @@ test.describe(
|
||||
await expect(showBorder).toBeChecked();
|
||||
});
|
||||
|
||||
test('re-selecting value in radio button group', async ({ panelEditPage, selectors }) => {
|
||||
await setVisualization(panelEditPage, TIME_SERIES_VIZ_NAME, selectors);
|
||||
test('re-selecting value in radio button group', async ({ panelEditPage }) => {
|
||||
await panelEditPage.setVisualization(TIME_SERIES_VIZ_NAME);
|
||||
const axisOptions = panelEditPage.getCustomOptions('Axis');
|
||||
const placement = axisOptions.getRadioGroup('Placement');
|
||||
|
||||
@@ -196,8 +195,8 @@ test.describe(
|
||||
await expect(placement).toHaveChecked('Auto');
|
||||
});
|
||||
|
||||
test('selecting value in radio button group', async ({ panelEditPage, selectors }) => {
|
||||
await setVisualization(panelEditPage, TIME_SERIES_VIZ_NAME, selectors);
|
||||
test('selecting value in radio button group', async ({ panelEditPage }) => {
|
||||
await panelEditPage.setVisualization(TIME_SERIES_VIZ_NAME);
|
||||
const axisOptions = panelEditPage.getCustomOptions('Axis');
|
||||
const placement = axisOptions.getRadioGroup('Placement');
|
||||
|
||||
|
||||
8
go.mod
8
go.mod
@@ -44,8 +44,6 @@ require (
|
||||
github.com/blang/semver/v4 v4.0.0 // indirect; @grafana/grafana-developer-enablement-squad
|
||||
github.com/blevesearch/bleve/v2 v2.5.0 // @grafana/grafana-search-and-storage
|
||||
github.com/blevesearch/bleve_index_api v1.2.7 // @grafana/grafana-search-and-storage
|
||||
github.com/blugelabs/bluge v0.2.2 // @grafana/grafana-backend-group
|
||||
github.com/blugelabs/bluge_segment_api v0.2.0 // @grafana/grafana-backend-group
|
||||
github.com/bradfitz/gomemcache v0.0.0-20250403215159-8d39553ac7cf // @grafana/grafana-backend-group
|
||||
github.com/bwmarrin/snowflake v0.3.0 // @grafana/grafana-app-platform-squad
|
||||
github.com/centrifugal/centrifuge v0.38.0 // @grafana/grafana-app-platform-squad
|
||||
@@ -324,7 +322,6 @@ require (
|
||||
github.com/Masterminds/squirrel v1.5.4 // indirect
|
||||
github.com/Microsoft/go-winio v0.6.2 // indirect
|
||||
github.com/NYTimes/gziphandler v1.1.1 // indirect
|
||||
github.com/RoaringBitmap/roaring v1.9.3 // indirect
|
||||
github.com/RoaringBitmap/roaring/v2 v2.4.5 // indirect
|
||||
github.com/Yiling-J/theine-go v0.6.2 // indirect
|
||||
github.com/agext/levenshtein v1.2.1 // indirect
|
||||
@@ -355,7 +352,6 @@ require (
|
||||
github.com/aws/aws-sdk-go-v2/service/sso v1.29.4 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.0 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/sts v1.38.5 // indirect
|
||||
github.com/axiomhq/hyperloglog v0.0.0-20240507144631-af9851f82b27 // indirect
|
||||
github.com/bahlo/generic-list-go v0.2.0 // indirect
|
||||
github.com/barkimedes/go-deepcopy v0.0.0-20220514131651-17c30cfc62df // indirect
|
||||
github.com/beorn7/perks v1.0.1 // indirect
|
||||
@@ -378,12 +374,9 @@ require (
|
||||
github.com/blevesearch/zapx/v15 v15.4.1 // indirect
|
||||
github.com/blevesearch/zapx/v16 v16.2.2 // indirect
|
||||
github.com/bluele/gcache v0.0.2 // indirect
|
||||
github.com/blugelabs/ice v1.0.0 // indirect
|
||||
github.com/blugelabs/ice/v2 v2.0.1 // indirect
|
||||
github.com/bufbuild/protocompile v0.14.1 // indirect
|
||||
github.com/buger/jsonparser v1.1.1 // indirect
|
||||
github.com/c2h5oh/datasize v0.0.0-20231215233829-aa82cc1e6500 // indirect
|
||||
github.com/caio/go-tdigest v3.1.0+incompatible // indirect
|
||||
github.com/cenkalti/backoff/v4 v4.3.0 // @grafana/alerting-backend
|
||||
github.com/cenkalti/backoff/v5 v5.0.3 // indirect
|
||||
github.com/centrifugal/protocol v0.17.0 // indirect
|
||||
@@ -402,7 +395,6 @@ require (
|
||||
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
|
||||
github.com/dennwc/varint v1.0.0 // indirect
|
||||
github.com/dgraph-io/ristretto/v2 v2.2.0 // indirect
|
||||
github.com/dgryski/go-metro v0.0.0-20180109044635-280f6062b5bc // indirect
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
|
||||
github.com/diegoholiveira/jsonlogic/v3 v3.7.4 // indirect
|
||||
github.com/distribution/reference v0.6.0 // indirect
|
||||
|
||||
50
go.sum
50
go.sum
@@ -768,11 +768,6 @@ github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbt
|
||||
github.com/PuerkitoBio/urlesc v0.0.0-20160726150825-5bd2802263f2/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
|
||||
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
|
||||
github.com/RaveNoX/go-jsoncommentstrip v1.0.0/go.mod h1:78ihd09MekBnJnxpICcwzCMzGrKSKYe4AqU6PDYYpjk=
|
||||
github.com/RoaringBitmap/gocroaring v0.4.0/go.mod h1:NieMwz7ZqwU2DD73/vvYwv7r4eWBKuPVSXZIpsaMwCI=
|
||||
github.com/RoaringBitmap/roaring v0.9.1/go.mod h1:h1B7iIUOmnAeb5ytYMvnHJwxMc6LUrwBnzXWRuqTQUc=
|
||||
github.com/RoaringBitmap/roaring v0.9.4/go.mod h1:icnadbWcNyfEHlYdr+tDlOTih1Bf/h+rzPpv4sbomAA=
|
||||
github.com/RoaringBitmap/roaring v1.9.3 h1:t4EbC5qQwnisr5PrP9nt0IRhRTb9gMUgQF4t4S2OByM=
|
||||
github.com/RoaringBitmap/roaring v1.9.3/go.mod h1:6AXUsoIEzDTFFQCe1RbGA6uFONMhvejWj5rqITANK90=
|
||||
github.com/RoaringBitmap/roaring/v2 v2.4.5 h1:uGrrMreGjvAtTBobc0g5IrW1D5ldxDQYe2JW2gggRdg=
|
||||
github.com/RoaringBitmap/roaring/v2 v2.4.5/go.mod h1:FiJcsfkGje/nZBZgCu0ZxCPOKD/hVXDS2dXi7/eUFE0=
|
||||
github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo=
|
||||
@@ -825,7 +820,6 @@ github.com/apapsch/go-jsonmerge/v2 v2.0.0/go.mod h1:lvDnEdqiQrp0O42VQGgmlKpxL1AP
|
||||
github.com/apparentlymart/go-textseg/v15 v15.0.0 h1:uYvfpb3DyLSCGWnctWKGj857c6ew1u1fNQOlOtuGxQY=
|
||||
github.com/apparentlymart/go-textseg/v15 v15.0.0/go.mod h1:K8XmNZdhEBkdlyDdvbmmsvpAG721bKi0joRfFdHIWJ4=
|
||||
github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o=
|
||||
github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8=
|
||||
github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY=
|
||||
github.com/armon/go-metrics v0.0.0-20190430140413-ec5e00d3c878/go.mod h1:3AMJUQhVx52RsWOnlkpikZr01T/yAVN2gn0861vByNg=
|
||||
github.com/armon/go-metrics v0.3.10/go.mod h1:4O98XIr/9W0sxpJ8UaYkvjk10Iff7SnFrb4QAOwNTFc=
|
||||
@@ -898,9 +892,6 @@ github.com/aws/aws-sdk-go-v2/service/sts v1.38.5 h1:+LVB0xBqEgjQoqr9bGZbRzvg212B
|
||||
github.com/aws/aws-sdk-go-v2/service/sts v1.38.5/go.mod h1:xoaxeqnnUaZjPjaICgIy5B+MHCSb/ZSOn4MvkFNOUA0=
|
||||
github.com/aws/smithy-go v1.23.1 h1:sLvcH6dfAFwGkHLZ7dGiYF7aK6mg4CgKA/iDKjLDt9M=
|
||||
github.com/aws/smithy-go v1.23.1/go.mod h1:LEj2LM3rBRQJxPZTB4KuzZkaZYnZPnvgIhb4pu07mx0=
|
||||
github.com/axiomhq/hyperloglog v0.0.0-20191112132149-a4c4c47bc57f/go.mod h1:2stgcRjl6QmW+gU2h5E7BQXg4HU0gzxKWDuT5HviN9s=
|
||||
github.com/axiomhq/hyperloglog v0.0.0-20240507144631-af9851f82b27 h1:60m4tnanN1ctzIu4V3bfCNJ39BiOPSm1gHFlFjTkRE0=
|
||||
github.com/axiomhq/hyperloglog v0.0.0-20240507144631-af9851f82b27/go.mod h1:k08r+Yj1PRAmuayFiRK6MYuR5Ve4IuZtTfxErMIh0+c=
|
||||
github.com/bahlo/generic-list-go v0.2.0 h1:5sz/EEAK+ls5wF+NeqDpk5+iNdMDXrh3z3nPnH1Wvgk=
|
||||
github.com/bahlo/generic-list-go v0.2.0/go.mod h1:2KvAjgMlE5NNynlg/5iLrrCCZ2+5xWbdbCW3pNTGyYg=
|
||||
github.com/barkimedes/go-deepcopy v0.0.0-20220514131651-17c30cfc62df h1:GSoSVRLoBaFpOOds6QyY1L8AX7uoY+Ln3BHc22W40X0=
|
||||
@@ -919,7 +910,6 @@ github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
|
||||
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
|
||||
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
|
||||
github.com/bitly/go-hostpool v0.0.0-20171023180738-a3a6125de932/go.mod h1:NOuUCSz6Q9T7+igc/hlvDOUdtWKryOrtFyIVABv/p7k=
|
||||
github.com/bits-and-blooms/bitset v1.2.0/go.mod h1:gIdJ4wp64HaoK2YrL1Q5/N7Y16edYb8uY+O0FJTyyDA=
|
||||
github.com/bits-and-blooms/bitset v1.12.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8=
|
||||
github.com/bits-and-blooms/bitset v1.22.0 h1:Tquv9S8+SGaS3EhyA+up3FXzmkhxPGjQQCkcs2uw7w4=
|
||||
github.com/bits-and-blooms/bitset v1.22.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8=
|
||||
@@ -939,21 +929,16 @@ github.com/blevesearch/go-porterstemmer v1.0.3 h1:GtmsqID0aZdCSNiY8SkuPJ12pD4jI+
|
||||
github.com/blevesearch/go-porterstemmer v1.0.3/go.mod h1:angGc5Ht+k2xhJdZi511LtmxuEf0OVpvUUNrwmM1P7M=
|
||||
github.com/blevesearch/gtreap v0.1.1 h1:2JWigFrzDMR+42WGIN/V2p0cUvn4UP3C4Q5nmaZGW8Y=
|
||||
github.com/blevesearch/gtreap v0.1.1/go.mod h1:QaQyDRAT51sotthUWAH4Sj08awFSSWzgYICSZ3w0tYk=
|
||||
github.com/blevesearch/mmap-go v1.0.2/go.mod h1:ol2qBqYaOUsGdm7aRMRrYGgPvnwLe6Y+7LMvAB5IbSA=
|
||||
github.com/blevesearch/mmap-go v1.0.3/go.mod h1:pYvKl/grLQrBxuaRYgoTssa4rVujYYeenDp++2E+yvs=
|
||||
github.com/blevesearch/mmap-go v1.0.4 h1:OVhDhT5B/M1HNPpYPBKIEJaD0F3Si+CrEKULGCDPWmc=
|
||||
github.com/blevesearch/mmap-go v1.0.4/go.mod h1:EWmEAOmdAS9z/pi/+Toxu99DnsbhG1TIxUoRmJw/pSs=
|
||||
github.com/blevesearch/scorch_segment_api/v2 v2.3.9 h1:X6nJXnNHl7nasXW+U6y2Ns2Aw8F9STszkYkyBfQ+p0o=
|
||||
github.com/blevesearch/scorch_segment_api/v2 v2.3.9/go.mod h1:IrzspZlVjhf4X29oJiEhBxEteTqOY9RlYlk1lCmYHr4=
|
||||
github.com/blevesearch/segment v0.9.0/go.mod h1:9PfHYUdQCgHktBgvtUOF4x+pc4/l8rdH0u5spnW85UQ=
|
||||
github.com/blevesearch/segment v0.9.1 h1:+dThDy+Lvgj5JMxhmOVlgFfkUtZV2kw49xax4+jTfSU=
|
||||
github.com/blevesearch/segment v0.9.1/go.mod h1:zN21iLm7+GnBHWTao9I+Au/7MBiL8pPFtJBJTsk6kQw=
|
||||
github.com/blevesearch/snowballstem v0.9.0 h1:lMQ189YspGP6sXvZQ4WZ+MLawfV8wOmPoD/iWeNXm8s=
|
||||
github.com/blevesearch/snowballstem v0.9.0/go.mod h1:PivSj3JMc8WuaFkTSRDW2SlrulNWPl4ABg1tC/hlgLs=
|
||||
github.com/blevesearch/upsidedown_store_api v1.0.2 h1:U53Q6YoWEARVLd1OYNc9kvhBMGZzVrdmaozG2MfoB+A=
|
||||
github.com/blevesearch/upsidedown_store_api v1.0.2/go.mod h1:M01mh3Gpfy56Ps/UXHjEO/knbqyQ1Oamg8If49gRwrQ=
|
||||
github.com/blevesearch/vellum v1.0.5/go.mod h1:atE0EH3fvk43zzS7t1YNdNC7DbmcC3uz+eMD5xZ2OyQ=
|
||||
github.com/blevesearch/vellum v1.0.7/go.mod h1:doBZpmRhwTsASB4QdUZANlJvqVAUdUyX0ZK7QJCTeBE=
|
||||
github.com/blevesearch/vellum v1.1.0 h1:CinkGyIsgVlYf8Y2LUQHvdelgXr6PYuvoDIajq6yR9w=
|
||||
github.com/blevesearch/vellum v1.1.0/go.mod h1:QgwWryE8ThtNPxtgWJof5ndPfx0/YMBh+W2weHKPw8Y=
|
||||
github.com/blevesearch/zapx/v11 v11.4.1 h1:qFCPlFbsEdwbbckJkysptSQOsHn4s6ZOHL5GMAIAVHA=
|
||||
@@ -970,14 +955,6 @@ github.com/blevesearch/zapx/v16 v16.2.2 h1:MifKJVRTEhMTgSlle2bDRTb39BGc9jXFRLPZc
|
||||
github.com/blevesearch/zapx/v16 v16.2.2/go.mod h1:B9Pk4G1CqtErgQV9DyCSA9Lb7WZe4olYfGw7fVDZ4sk=
|
||||
github.com/bluele/gcache v0.0.2 h1:WcbfdXICg7G/DGBh1PFfcirkWOQV+v077yF1pSy3DGw=
|
||||
github.com/bluele/gcache v0.0.2/go.mod h1:m15KV+ECjptwSPxKhOhQoAFQVtUFjTVkc3H8o0t/fp0=
|
||||
github.com/blugelabs/bluge v0.2.2 h1:gat8CqE6P6tOgeX30XGLOVNTC26cpM2RWVcreXWtYcM=
|
||||
github.com/blugelabs/bluge v0.2.2/go.mod h1:am1LU9jS8dZgWkRzkGLQN3757EgMs3upWrU2fdN9foE=
|
||||
github.com/blugelabs/bluge_segment_api v0.2.0 h1:cCX1Y2y8v0LZ7+EEJ6gH7dW6TtVTW4RhG0vp3R+N2Lo=
|
||||
github.com/blugelabs/bluge_segment_api v0.2.0/go.mod h1:95XA+ZXfRj/IXADm7gZ+iTcWOJPg5jQTY1EReIzl3LA=
|
||||
github.com/blugelabs/ice v1.0.0 h1:um7wf9e6jbkTVCrOyQq3tKK43fBMOvLUYxbj3Qtc4eo=
|
||||
github.com/blugelabs/ice v1.0.0/go.mod h1:gNfFPk5zM+yxJROhthxhVQYjpBO9amuxWXJQ2Lo+IbQ=
|
||||
github.com/blugelabs/ice/v2 v2.0.1 h1:mzHbntLjk2v7eDRgoXCgzOsPKN1Tenu9Svo6l9cTLS4=
|
||||
github.com/blugelabs/ice/v2 v2.0.1/go.mod h1:QxAWSPNwZwsIqS25c3lbIPFQrVvT1sphf5x5DfMLH5M=
|
||||
github.com/bmatcuk/doublestar v1.1.1/go.mod h1:UD6OnuiIn0yFxxA2le/rnRU1G4RaI4UvFv1sNto9p6w=
|
||||
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4=
|
||||
github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8=
|
||||
@@ -996,8 +973,6 @@ github.com/bwmarrin/snowflake v0.3.0 h1:xm67bEhkKh6ij1790JB83OujPR5CzNe8QuQqAgIS
|
||||
github.com/bwmarrin/snowflake v0.3.0/go.mod h1:NdZxfVWX+oR6y2K0o6qAYv6gIOP9rjG0/E9WsDpxqwE=
|
||||
github.com/c2h5oh/datasize v0.0.0-20231215233829-aa82cc1e6500 h1:6lhrsTEnloDPXyeZBvSYvQf8u86jbKehZPVDDlkgDl4=
|
||||
github.com/c2h5oh/datasize v0.0.0-20231215233829-aa82cc1e6500/go.mod h1:S/7n9copUssQ56c7aAgHqftWO4LTf4xY6CGWt8Bc+3M=
|
||||
github.com/caio/go-tdigest v3.1.0+incompatible h1:uoVMJ3Q5lXmVLCCqaMGHLBWnbGoN6Lpu7OAUPR60cds=
|
||||
github.com/caio/go-tdigest v3.1.0+incompatible/go.mod h1:sHQM/ubZStBUmF1WbB8FAm8q9GjDajLC5T7ydxE3JHI=
|
||||
github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
|
||||
github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8=
|
||||
github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
|
||||
@@ -1057,9 +1032,6 @@ github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I=
|
||||
github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo=
|
||||
github.com/containerd/platforms v0.2.1 h1:zvwtM3rz2YHPQsF2CHYM8+KtB5dvhISiXh5ZpSBQv6A=
|
||||
github.com/containerd/platforms v0.2.1/go.mod h1:XHCb+2/hzowdiut9rkudds9bE5yJ7npe7dG/wG+uFPw=
|
||||
github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
|
||||
github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk=
|
||||
github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
|
||||
github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
|
||||
github.com/coreos/go-semver v0.3.1 h1:yi21YpKnrx1gt5R+la8n5WgS0kCrsPp33dmEyHReZr4=
|
||||
github.com/coreos/go-semver v0.3.1/go.mod h1:irMmmIw/7yzSRPWryHsK7EYSg09caPQL03VsM8rvUec=
|
||||
@@ -1069,7 +1041,6 @@ github.com/coreos/go-systemd/v22 v22.6.0 h1:aGVa/v8B7hpb0TKl0MWoAavPDmHvobFe5R5z
|
||||
github.com/coreos/go-systemd/v22 v22.6.0/go.mod h1:iG+pp635Fo7ZmV/j14KUcmEyWF+0X7Lua8rrTWzYgWU=
|
||||
github.com/cpuguy83/dockercfg v0.3.2 h1:DlJTyZGBDlXqUZ2Dk2Q3xHs/FtnooJJVaad2S9GKorA=
|
||||
github.com/cpuguy83/dockercfg v0.3.2/go.mod h1:sugsbF4//dDlL/i+S+rtpIWp+5h0BHJHfjj5/jFyUJc=
|
||||
github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.7 h1:zbFlGlXEAKlwXpmvle3d8Oe3YnkKIK4xSRTd3sHPnBo=
|
||||
@@ -1101,8 +1072,6 @@ github.com/dgraph-io/ristretto/v2 v2.2.0 h1:bkY3XzJcXoMuELV8F+vS8kzNgicwQFAaGINA
|
||||
github.com/dgraph-io/ristretto/v2 v2.2.0/go.mod h1:RZrm63UmcBAaYWC1DotLYBmTvgkrs0+XhBd7Npn7/zI=
|
||||
github.com/dgryski/go-farm v0.0.0-20240924180020-3414d57e47da h1:aIftn67I1fkbMa512G+w+Pxci9hJPB8oMnkcP3iZF38=
|
||||
github.com/dgryski/go-farm v0.0.0-20240924180020-3414d57e47da/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw=
|
||||
github.com/dgryski/go-metro v0.0.0-20180109044635-280f6062b5bc h1:8WFBn63wegobsYAX0YjD+8suexZDga5CctH4CCTx2+8=
|
||||
github.com/dgryski/go-metro v0.0.0-20180109044635-280f6062b5bc/go.mod h1:c9O8+fpSOX1DM8cPNSkX/qsBWdkD4yd2dpciOWQjpBw=
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
|
||||
github.com/dgryski/go-sip13 v0.0.0-20190329191031-25c5027a8c7b/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no=
|
||||
@@ -1817,10 +1786,8 @@ github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47
|
||||
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
||||
github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
||||
github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA=
|
||||
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
|
||||
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
||||
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||
github.com/influxdata/influxdb v1.7.6/go.mod h1:qZna6X/4elxqT3yI9iZYdZrWWdeFOOprn86kgg4+IzY=
|
||||
github.com/influxdata/influxdb v1.7.7/go.mod h1:qZna6X/4elxqT3yI9iZYdZrWWdeFOOprn86kgg4+IzY=
|
||||
github.com/influxdata/influxdb-client-go/v2 v2.13.0 h1:ioBbLmR5NMbAjP4UVA5r9b5xGjpABD7j65pI8kFphDM=
|
||||
github.com/influxdata/influxdb-client-go/v2 v2.13.0/go.mod h1:k+spCbt9hcvqvUiz0sr5D8LolXHqAAOfPw9v/RIRHl4=
|
||||
@@ -1921,7 +1888,6 @@ github.com/kisielk/sqlstruct v0.0.0-20201105191214-5f3e10d3ab46/go.mod h1:yyMNCy
|
||||
github.com/klauspost/asmfmt v1.3.2 h1:4Ri7ox3EwapiOjCki+hw14RyKk201CN4rzyCJRFLpK4=
|
||||
github.com/klauspost/asmfmt v1.3.2/go.mod h1:AG8TuvYojzulgDAMCnYn50l/5QV3Bs/tp6j0HLHbNSE=
|
||||
github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
|
||||
github.com/klauspost/compress v1.15.2/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU=
|
||||
github.com/klauspost/compress v1.15.9/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU=
|
||||
github.com/klauspost/compress v1.17.9/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw=
|
||||
github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo=
|
||||
@@ -1954,8 +1920,6 @@ github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 h1:SOEGU9fKiNWd/HOJuq
|
||||
github.com/lann/builder v0.0.0-20180802200727-47ae307949d0/go.mod h1:dXGbAdH5GtBTC4WfIxhKZfyBF/HBFgRZSWwZ9g/He9o=
|
||||
github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 h1:P6pPBnrTSX3DEVR4fDembhRWSsG5rVo6hYhAB/ADZrk=
|
||||
github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0/go.mod h1:vmVJ0l/dxyfGW6FmdpVm2joNMFikkuWg0EoCKLGUMNw=
|
||||
github.com/leesper/go_rng v0.0.0-20190531154944-a612b043e353 h1:X/79QL0b4YJVO5+OsPH9rF2u428CIrGL/jLmPsoOQQ4=
|
||||
github.com/leesper/go_rng v0.0.0-20190531154944-a612b043e353/go.mod h1:N0SVk0uhy+E1PZ3C9ctsPRlvOPAFPkCNlcPBDkt0N3U=
|
||||
github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII=
|
||||
github.com/lestrrat-go/envload v0.0.0-20180220234015-a3eb8ddeffcc h1:RKf14vYWi2ttpEmkA4aQ3j4u9dStX2t4M8UM6qqNsG8=
|
||||
github.com/lestrrat-go/envload v0.0.0-20180220234015-a3eb8ddeffcc/go.mod h1:kopuH9ugFRkIXf3YoqHKyrJ9YfUFsckUU9S7B+XP+is=
|
||||
@@ -1978,7 +1942,6 @@ github.com/madflojo/testcerts v1.4.0 h1:I09gN0C1ly9IgeVNcAqKk8RAKIJTe3QnFrrPBDyv
|
||||
github.com/madflojo/testcerts v1.4.0/go.mod h1:MW8sh39gLnkKh4K0Nc55AyHEDl9l/FBLDUsQhpmkuo0=
|
||||
github.com/magefile/mage v1.15.0 h1:BvGheCMAsG3bWUDbZ8AyXXpCNwU9u5CB6sM+HNb9HYg=
|
||||
github.com/magefile/mage v1.15.0/go.mod h1:z5UZb/iS3GoOSn0JgWuiw7dxlurVYTu+/jHXqQg881A=
|
||||
github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
|
||||
github.com/magiconair/properties v1.8.6/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60=
|
||||
github.com/magiconair/properties v1.8.10 h1:s31yESBquKXCV9a/ScB3ESkOjUYYv+X0rg8SYxI99mE=
|
||||
github.com/magiconair/properties v1.8.10/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0=
|
||||
@@ -2220,7 +2183,6 @@ github.com/patrickmn/go-cache v2.1.0+incompatible h1:HRMgzkcYKYpi3C8ajMPV8OFXaaR
|
||||
github.com/patrickmn/go-cache v2.1.0+incompatible/go.mod h1:3Qf8kWWT7OJRJbdiICTKqZju1ZixQ/KpMGzzAfe6+WQ=
|
||||
github.com/pborman/getopt v0.0.0-20170112200414-7148bc3a4c30/go.mod h1:85jBQOZwpVEaDAr341tbn15RS4fCAsIst0qp7i8ex1o=
|
||||
github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k=
|
||||
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
|
||||
github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=
|
||||
github.com/pelletier/go-toml/v2 v2.0.5/go.mod h1:OMHamSCAODeSsVrwwvcJOaoN0LIUIaFVNZzmWyNfXas=
|
||||
github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4=
|
||||
@@ -2375,7 +2337,6 @@ github.com/rs/xid v1.6.0 h1:fV591PaemRlL6JfRxGDEPl69wICngIQ3shQtzfy2gxU=
|
||||
github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0=
|
||||
github.com/russellhaering/goxmldsig v1.4.0 h1:8UcDh/xGyQiyrW+Fq5t8f+l2DLB1+zlhYzkPUJ7Qhys=
|
||||
github.com/russellhaering/goxmldsig v1.4.0/go.mod h1:gM4MDENBQf7M+V824SGfyIUVFWydB7n0KkEubVJl+Tw=
|
||||
github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
|
||||
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
@@ -2441,7 +2402,6 @@ github.com/sourcegraph/conc v0.3.1-0.20240121214520-5f936abd7ae8/go.mod h1:3n1Cw
|
||||
github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
|
||||
github.com/spaolacci/murmur3 v1.1.0 h1:7c1g84S4BPRrfL5Xrdp6fOJ206sU9y293DDHaoy0bLI=
|
||||
github.com/spaolacci/murmur3 v1.1.0/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
|
||||
github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ=
|
||||
github.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk=
|
||||
github.com/spf13/afero v1.3.3/go.mod h1:5KUK8ByomD5Ti5Artl0RtHeI5pTF7MIDuXL3yY520V4=
|
||||
github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I=
|
||||
@@ -2449,15 +2409,12 @@ github.com/spf13/afero v1.8.2/go.mod h1:CtAatgMJh6bJEIs48Ay/FOnkljP3WeGUG0MC1RfA
|
||||
github.com/spf13/afero v1.9.2/go.mod h1:iUV7ddyEEZPO5gA3zD4fJt6iStLlL+Lg4m2cihcDf8Y=
|
||||
github.com/spf13/afero v1.15.0 h1:b/YBCLWAJdFWJTN9cLhiXXcD7mzKn9Dm86dNnfyQw1I=
|
||||
github.com/spf13/afero v1.15.0/go.mod h1:NC2ByUVxtQs4b3sIUphxK0NioZnmxgyCrfzeuq8lxMg=
|
||||
github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
|
||||
github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
|
||||
github.com/spf13/cast v1.5.0/go.mod h1:SpXXQ5YoyJw6s3/6cMTQuxvgRl3PCJiyaX9p6b155UU=
|
||||
github.com/spf13/cast v1.10.0 h1:h2x0u2shc1QuLHfxi+cTJvs30+ZAHOGRic8uyGTDWxY=
|
||||
github.com/spf13/cast v1.10.0/go.mod h1:jNfB8QC9IA6ZuY2ZjDp0KtFO2LZZlg4S/7bzP6qqeHo=
|
||||
github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU=
|
||||
github.com/spf13/cobra v1.10.2 h1:DMTTonx5m65Ic0GOoRY2c16WCbHxOOw6xxezuLaBpcU=
|
||||
github.com/spf13/cobra v1.10.2/go.mod h1:7C1pvHqHw5A4vrJfjNwvOdzYu0Gml16OCs2GRiTUUS4=
|
||||
github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
|
||||
github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo=
|
||||
github.com/spf13/pflag v0.0.0-20170130214245-9ff6c6923cff/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
|
||||
github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
|
||||
@@ -2465,7 +2422,6 @@ github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An
|
||||
github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk=
|
||||
github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s=
|
||||
github.com/spf13/viper v1.13.0/go.mod h1:Icm2xNL3/8uyh/wFuB1jI7TiTNKp8632Nwegu+zgdYw=
|
||||
github.com/spf13/viper v1.21.0 h1:x5S+0EU27Lbphp4UKm1C+1oQO+rKx36vfCoaVebLFSU=
|
||||
github.com/spf13/viper v1.21.0/go.mod h1:P0lhsswPGWD/1lZJ9ny3fYnVqxiegrlNrEmgLjbTCAY=
|
||||
@@ -2530,7 +2486,6 @@ github.com/uber/jaeger-client-go v2.30.0+incompatible h1:D6wyKGCecFaSRUpo8lCVbaO
|
||||
github.com/uber/jaeger-client-go v2.30.0+incompatible/go.mod h1:WVhlPFC8FDjOFMMWRy2pZqQJSXxYSwNYOkTr/Z6d3Kk=
|
||||
github.com/uber/jaeger-lib v2.4.1+incompatible h1:td4jdvLcExb4cBISKIpHuGoVXh+dVKhn2Um6rjCsSsg=
|
||||
github.com/uber/jaeger-lib v2.4.1+incompatible/go.mod h1:ComeNDZlWwrWnDv8aPp0Ba6+uUTzImX/AauajbLI56U=
|
||||
github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0=
|
||||
github.com/ugorji/go/codec v1.2.11 h1:BMaWp1Bb6fHwEtbplGBGJ498wD+LKlNSl25MjdZY4dU=
|
||||
github.com/ugorji/go/codec v1.2.11/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
|
||||
github.com/unknwon/bra v0.0.0-20200517080246-1e3013ecaff8 h1:aVGB3YnaS/JNfOW3tiHIlmNmTDg618va+eT0mVomgyI=
|
||||
@@ -2571,7 +2526,6 @@ github.com/xiang90/probing v0.0.0-20221125231312-a49e3df8f510 h1:S2dVYn90KE98chq
|
||||
github.com/xiang90/probing v0.0.0-20221125231312-a49e3df8f510/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU=
|
||||
github.com/xlab/treeprint v1.2.0 h1:HzHnuAF1plUN2zGlAFHbSQP2qJ0ZAD3XF5XD7OesXRQ=
|
||||
github.com/xlab/treeprint v1.2.0/go.mod h1:gj5Gd3gPdKtR1ikdDK6fnFLdmIS0X30kTTuNd/WEJu0=
|
||||
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
|
||||
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 h1:gEOO8jv9F4OT7lGCjxCBTO/36wtF6j2nSip77qHd4x4=
|
||||
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1/go.mod h1:Ohn+xnUBiLI6FVj/9LpzZWtj1/D6lUovWYBkxHVV3aM=
|
||||
github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZqKjWU=
|
||||
@@ -2758,7 +2712,6 @@ gocloud.dev/secrets/hashivault v0.43.0 h1:A966rEMpCRUE9209/+k+A2HP2v2qDnrxGpQn+n
|
||||
gocloud.dev/secrets/hashivault v0.43.0/go.mod h1:KdWKL+TXDi0cXgEd/MTeaidKlotvyJtnTDi71B3rR9U=
|
||||
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||
golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||
golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||
golang.org/x/crypto v0.0.0-20190211182817-74369b46fc67/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20190320223903-b7391e95e576/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
@@ -3023,8 +2976,6 @@ golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5h
|
||||
golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20181221143128-b4a75ba826a6/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190102155601-82a175fd1598/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190204203706-41f3e6584952/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190209173611-3b5209105503/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
@@ -3297,7 +3248,6 @@ golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da/go.mod h1:NDW/Ps6MPRej6f
|
||||
gomodules.xyz/jsonpatch/v2 v2.5.0 h1:JELs8RLM12qJGXU4u/TO3V25KW8GreMKl9pdkk14RM0=
|
||||
gomodules.xyz/jsonpatch/v2 v2.5.0/go.mod h1:AH3dM2RI6uoBZxn3LVrfvJ3E0/9dG4cSrbuBJT4moAY=
|
||||
gonum.org/v1/gonum v0.0.0-20180816165407-929014505bf4/go.mod h1:Y+Yx5eoAFn32cQvJDxZx5Dpnq+c3wtXuadVZAcxbbBo=
|
||||
gonum.org/v1/gonum v0.7.0/go.mod h1:L02bwd0sqlsvRv41G7wGWFCsVNZFv/k1xzGIxeANHGM=
|
||||
gonum.org/v1/gonum v0.8.2/go.mod h1:oe/vMfY3deqTw+1EZJhuvEW2iwGF1bW9wwu7XCu0+v0=
|
||||
gonum.org/v1/gonum v0.9.3/go.mod h1:TZumC3NeyVQskjXqmyWt4S3bINhy7B4eYwW69EbyX+0=
|
||||
gonum.org/v1/gonum v0.11.0/go.mod h1:fSG4YDCxxUZQJ7rKsQrj0gMOg00Il0Z96/qMA4bVQhA=
|
||||
|
||||
@@ -493,6 +493,8 @@ github.com/aws/smithy-go v1.22.5/go.mod h1:t1ufH5HMublsJYulve2RKmHDC15xu1f26kHCp
|
||||
github.com/aws/smithy-go v1.23.0/go.mod h1:t1ufH5HMublsJYulve2RKmHDC15xu1f26kHCp/HgceI=
|
||||
github.com/awslabs/aws-lambda-go-api-proxy v0.16.2 h1:CJyGEyO1CIwOnXTU40urf0mchf6t3voxpvUDikOU9LY=
|
||||
github.com/awslabs/aws-lambda-go-api-proxy v0.16.2/go.mod h1:vxxjwBHe/KbgFeNlAP/Tvp4SsVRL3WQamcWRxqVh0z0=
|
||||
github.com/axiomhq/hyperloglog v0.0.0-20240507144631-af9851f82b27 h1:60m4tnanN1ctzIu4V3bfCNJ39BiOPSm1gHFlFjTkRE0=
|
||||
github.com/axiomhq/hyperloglog v0.0.0-20240507144631-af9851f82b27/go.mod h1:k08r+Yj1PRAmuayFiRK6MYuR5Ve4IuZtTfxErMIh0+c=
|
||||
github.com/aymanbagabas/go-udiff v0.2.0 h1:TK0fH4MteXUDspT88n8CKzvK0X9O2xu9yQjWpi6yML8=
|
||||
github.com/aymanbagabas/go-udiff v0.2.0/go.mod h1:RE4Ex0qsGkTAJoQdQQCA0uG+nAzJO/pI/QwceO5fgrA=
|
||||
github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk=
|
||||
@@ -533,12 +535,12 @@ github.com/campoy/embedmd v1.0.0 h1:V4kI2qTJJLf4J29RzI/MAt2c3Bl4dQSYPuflzwFH2hY=
|
||||
github.com/campoy/embedmd v1.0.0/go.mod h1:oxyr9RCiSXg0M3VJ3ks0UGfp98BpSSGr0kpiX3MzVl8=
|
||||
github.com/cenkalti/backoff/v5 v5.0.2/go.mod h1:rkhZdG3JZukswDf7f0cwqPNk4K0sa+F97BxZthm/crw=
|
||||
github.com/census-instrumentation/opencensus-proto v0.4.1 h1:iKLQ0xPNFxR/2hzXZMrBo8f1j86j5WHzznCCQxV/b8g=
|
||||
github.com/centrifugal/centrifuge v0.37.2/go.mod h1:aj4iRJGhzi3SlL8iUtVezxway1Xf8g+hmNQkLLO7sS8=
|
||||
github.com/centrifugal/protocol v0.16.2/go.mod h1:Q7OpS/8HMXDnL7f9DpNx24IhG96MP88WPpVTTCdrokI=
|
||||
github.com/charmbracelet/harmonica v0.2.0 h1:8NxJWRWg/bzKqqEaaeFNipOu77YR5t8aSwG4pgaUBiQ=
|
||||
github.com/charmbracelet/harmonica v0.2.0/go.mod h1:KSri/1RMQOZLbw7AHqgcBycp8pgJnQMYYT8QZRqZ1Ao=
|
||||
github.com/charmbracelet/x/exp/golden v0.0.0-20241011142426-46044092ad91 h1:payRxjMjKgx2PaCWLZ4p3ro9y97+TVLZNaRZgJwSVDQ=
|
||||
github.com/charmbracelet/x/exp/golden v0.0.0-20241011142426-46044092ad91/go.mod h1:wDlXFlCrmJ8J+swcL/MnGUuYnqgQdW9rhSD61oNMb6U=
|
||||
github.com/centrifugal/centrifuge v0.37.2/go.mod h1:aj4iRJGhzi3SlL8iUtVezxway1Xf8g+hmNQkLLO7sS8=
|
||||
github.com/centrifugal/protocol v0.16.2/go.mod h1:Q7OpS/8HMXDnL7f9DpNx24IhG96MP88WPpVTTCdrokI=
|
||||
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d h1:77cEq6EriyTZ0g/qfRdp61a3Uu/AWrgIq2s0ClJV1g0=
|
||||
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d/go.mod h1:8EPpVsBuRksnlj1mLy4AWzRNQYxauNi62uWcE3to6eA=
|
||||
github.com/chenzhuoyu/iasm v0.9.0 h1:9fhXjVzq5hUy2gkhhgHl95zG2cEAhw9OSGs8toWWAwo=
|
||||
@@ -657,6 +659,8 @@ github.com/denisenkom/go-mssqldb v0.10.0/go.mod h1:xbL0rPBG9cCiLr28tMa8zpbdarY27
|
||||
github.com/devigned/tab v0.1.1/go.mod h1:XG9mPq0dFghrYvoBF3xdRrJzSTX1b7IQrvaL9mzjeJY=
|
||||
github.com/dgryski/go-ddmin v0.0.0-20210904190556-96a6d69f1034 h1:BuCyszxPxUjBrYW2HNVrimC0rBUs2U27jCJGVh0IKTM=
|
||||
github.com/dgryski/go-ddmin v0.0.0-20210904190556-96a6d69f1034/go.mod h1:zz4KxBkcXUWKjIcrc+uphJ1gPh/t18ymGm3PmQ+VGTk=
|
||||
github.com/dgryski/go-metro v0.0.0-20180109044635-280f6062b5bc h1:8WFBn63wegobsYAX0YjD+8suexZDga5CctH4CCTx2+8=
|
||||
github.com/dgryski/go-metro v0.0.0-20180109044635-280f6062b5bc/go.mod h1:c9O8+fpSOX1DM8cPNSkX/qsBWdkD4yd2dpciOWQjpBw=
|
||||
github.com/dgryski/go-sip13 v0.0.0-20190329191031-25c5027a8c7b h1:Yqiad0+sloMPdd/0Fg22actpFx0dekpzt1xJmVNVkU0=
|
||||
github.com/dhui/dktest v0.3.0 h1:kwX5a7EkLcjo7VpsPQSYJcKGbXBXdjI9FGjuUj1jn6I=
|
||||
github.com/dimchansky/utfbom v1.1.1 h1:vV6w1AhK4VMnhBno/TPVCoK9U/LP0PkLCS9tbxHdi/U=
|
||||
|
||||
@@ -285,6 +285,10 @@ const injectedRtkApi = api
|
||||
query: (queryArg) => ({ url: `/snapshots/delete/${queryArg.deleteKey}`, method: 'DELETE' }),
|
||||
invalidatesTags: ['Snapshot'],
|
||||
}),
|
||||
getSnapshotSettings: build.query<GetSnapshotSettingsApiResponse, GetSnapshotSettingsApiArg>({
|
||||
query: () => ({ url: `/snapshots/settings` }),
|
||||
providesTags: ['Snapshot'],
|
||||
}),
|
||||
getSnapshot: build.query<GetSnapshotApiResponse, GetSnapshotApiArg>({
|
||||
query: (queryArg) => ({
|
||||
url: `/snapshots/${queryArg.name}`,
|
||||
@@ -742,6 +746,8 @@ export type DeleteWithKeyApiArg = {
|
||||
/** unique key returned in create */
|
||||
deleteKey: string;
|
||||
};
|
||||
export type GetSnapshotSettingsApiResponse = /** status 200 undefined */ any;
|
||||
export type GetSnapshotSettingsApiArg = void;
|
||||
export type GetSnapshotApiResponse = /** status 200 OK */ Snapshot;
|
||||
export type GetSnapshotApiArg = {
|
||||
/** name of the Snapshot */
|
||||
@@ -1273,6 +1279,8 @@ export const {
|
||||
useLazyListSnapshotQuery,
|
||||
useCreateSnapshotMutation,
|
||||
useDeleteWithKeyMutation,
|
||||
useGetSnapshotSettingsQuery,
|
||||
useLazyGetSnapshotSettingsQuery,
|
||||
useGetSnapshotQuery,
|
||||
useLazyGetSnapshotQuery,
|
||||
useDeleteSnapshotMutation,
|
||||
|
||||
@@ -356,10 +356,6 @@ export interface FeatureToggles {
|
||||
*/
|
||||
dashboardNewLayouts?: boolean;
|
||||
/**
|
||||
* Use the v2 kubernetes API in the frontend for dashboards
|
||||
*/
|
||||
kubernetesDashboardsV2?: boolean;
|
||||
/**
|
||||
* Enables undo/redo in dynamic dashboards
|
||||
*/
|
||||
dashboardUndoRedo?: boolean;
|
||||
@@ -421,6 +417,10 @@ export interface FeatureToggles {
|
||||
*/
|
||||
jitterAlertRulesWithinGroups?: boolean;
|
||||
/**
|
||||
* Enable audit logging with Kubernetes under app platform
|
||||
*/
|
||||
auditLoggingAppPlatform?: boolean;
|
||||
/**
|
||||
* Enable the secrets management API and services under app platform
|
||||
*/
|
||||
secretsManagementAppPlatform?: boolean;
|
||||
|
||||
@@ -48,7 +48,7 @@ describe('MetricsModal', () => {
|
||||
operations: [],
|
||||
};
|
||||
|
||||
setup(query, ['with-labels'], true);
|
||||
setup(query, ['with-labels']);
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('with-labels')).toBeInTheDocument();
|
||||
});
|
||||
@@ -220,6 +220,10 @@ function createDatasource(withLabels?: boolean) {
|
||||
// display different results if their labels are selected in the PromVisualQuery
|
||||
if (withLabels) {
|
||||
languageProvider.queryMetricsMetadata = jest.fn().mockResolvedValue({
|
||||
ALERTS: {
|
||||
type: 'gauge',
|
||||
help: 'alerts help text',
|
||||
},
|
||||
'with-labels': {
|
||||
type: 'with-labels-type',
|
||||
help: 'with-labels-help',
|
||||
@@ -297,7 +301,7 @@ function createProps(query: PromVisualQuery, datasource: PrometheusDatasource, m
|
||||
};
|
||||
}
|
||||
|
||||
function setup(query: PromVisualQuery, metrics: string[], withlabels?: boolean) {
|
||||
function setup(query: PromVisualQuery, metrics: string[]) {
|
||||
const withLabels: boolean = query.labels.length > 0;
|
||||
const datasource = createDatasource(withLabels);
|
||||
const props = createProps(query, datasource, metrics);
|
||||
|
||||
@@ -138,7 +138,7 @@ const MetricsModalContent = (props: MetricsModalProps) => {
|
||||
|
||||
export const MetricsModal = (props: MetricsModalProps) => {
|
||||
return (
|
||||
<MetricsModalContextProvider languageProvider={props.datasource.languageProvider}>
|
||||
<MetricsModalContextProvider languageProvider={props.datasource.languageProvider} timeRange={props.timeRange}>
|
||||
<MetricsModalContent {...props} />
|
||||
</MetricsModalContextProvider>
|
||||
);
|
||||
|
||||
@@ -4,6 +4,7 @@ import { ReactNode } from 'react';
|
||||
import { TimeRange } from '@grafana/data';
|
||||
|
||||
import { PrometheusLanguageProviderInterface } from '../../../language_provider';
|
||||
import { getMockTimeRange } from '../../../test/mocks/datasource';
|
||||
|
||||
import { DEFAULT_RESULTS_PER_PAGE, MetricsModalContextProvider, useMetricsModal } from './MetricsModalContext';
|
||||
import { generateMetricData } from './helpers';
|
||||
@@ -25,7 +26,9 @@ const mockLanguageProvider: PrometheusLanguageProviderInterface = {
|
||||
// Helper to create wrapper component
|
||||
const createWrapper = (languageProvider = mockLanguageProvider) => {
|
||||
return ({ children }: { children: ReactNode }) => (
|
||||
<MetricsModalContextProvider languageProvider={languageProvider}>{children}</MetricsModalContextProvider>
|
||||
<MetricsModalContextProvider languageProvider={languageProvider} timeRange={getMockTimeRange()}>
|
||||
{children}
|
||||
</MetricsModalContextProvider>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -167,6 +170,7 @@ describe('MetricsModalContext', () => {
|
||||
|
||||
it('should handle empty metadata response', async () => {
|
||||
(mockLanguageProvider.queryMetricsMetadata as jest.Mock).mockResolvedValue({});
|
||||
(mockLanguageProvider.queryLabelValues as jest.Mock).mockResolvedValue(['metric1', 'metric2']);
|
||||
|
||||
const { result } = renderHook(() => useMetricsModal(), {
|
||||
wrapper: createWrapper(),
|
||||
@@ -176,7 +180,18 @@ describe('MetricsModalContext', () => {
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
});
|
||||
|
||||
expect(result.current.filteredMetricsData).toEqual([]);
|
||||
expect(result.current.filteredMetricsData).toEqual([
|
||||
{
|
||||
value: 'metric1',
|
||||
type: 'counter',
|
||||
description: 'Test metric',
|
||||
},
|
||||
{
|
||||
value: 'metric2',
|
||||
type: 'counter',
|
||||
description: 'Test metric',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle metadata fetch error', async () => {
|
||||
@@ -239,6 +254,7 @@ describe('MetricsModalContext', () => {
|
||||
}));
|
||||
|
||||
(mockLanguageProvider.queryMetricsMetadata as jest.Mock).mockResolvedValue({
|
||||
ALERTS: { type: 'gauge', help: 'Test alerts help' },
|
||||
test_metric: { type: 'counter', help: 'Test metric' },
|
||||
});
|
||||
|
||||
@@ -250,7 +266,7 @@ describe('MetricsModalContext', () => {
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
});
|
||||
|
||||
expect(result.current.filteredMetricsData).toHaveLength(1);
|
||||
expect(result.current.filteredMetricsData).toHaveLength(2);
|
||||
expect(result.current.selectedTypes).toEqual([]);
|
||||
});
|
||||
|
||||
@@ -318,7 +334,7 @@ describe('MetricsModalContext', () => {
|
||||
};
|
||||
|
||||
const { getByTestId } = render(
|
||||
<MetricsModalContextProvider languageProvider={mockLanguageProvider}>
|
||||
<MetricsModalContextProvider languageProvider={mockLanguageProvider} timeRange={getMockTimeRange()}>
|
||||
<TestComponent />
|
||||
</MetricsModalContextProvider>
|
||||
);
|
||||
|
||||
@@ -52,11 +52,13 @@ const MetricsModalContext = createContext<MetricsModalContextValue | undefined>(
|
||||
|
||||
type MetricsModalContextProviderProps = {
|
||||
languageProvider: PrometheusLanguageProviderInterface;
|
||||
timeRange: TimeRange;
|
||||
};
|
||||
|
||||
export const MetricsModalContextProvider: FC<PropsWithChildren<MetricsModalContextProviderProps>> = ({
|
||||
children,
|
||||
languageProvider,
|
||||
timeRange,
|
||||
}) => {
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
const [metricsData, setMetricsData] = useState<MetricsData>([]);
|
||||
@@ -111,8 +113,16 @@ export const MetricsModalContextProvider: FC<PropsWithChildren<MetricsModalConte
|
||||
setIsLoading(true);
|
||||
const metadata = await languageProvider.queryMetricsMetadata(PROMETHEUS_QUERY_BUILDER_MAX_RESULTS);
|
||||
|
||||
if (Object.keys(metadata).length === 0) {
|
||||
setMetricsData([]);
|
||||
// We receive ALERTS metadata in any case
|
||||
if (Object.keys(metadata).length <= 1) {
|
||||
const fetchedMetrics = await languageProvider.queryLabelValues(
|
||||
timeRange,
|
||||
METRIC_LABEL,
|
||||
undefined,
|
||||
PROMETHEUS_QUERY_BUILDER_MAX_RESULTS
|
||||
);
|
||||
const processedData = fetchedMetrics.map((m) => generateMetricData(m, languageProvider));
|
||||
setMetricsData(processedData);
|
||||
} else {
|
||||
const processedData = Object.keys(metadata).map((m) => generateMetricData(m, languageProvider));
|
||||
setMetricsData(processedData);
|
||||
@@ -122,7 +132,7 @@ export const MetricsModalContextProvider: FC<PropsWithChildren<MetricsModalConte
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
}, [languageProvider]);
|
||||
}, [languageProvider, timeRange]);
|
||||
|
||||
const debouncedBackendSearch = useMemo(
|
||||
() =>
|
||||
|
||||
@@ -17,8 +17,9 @@ export interface SparklineProps extends Themeable2 {
|
||||
showHighlights?: boolean;
|
||||
}
|
||||
|
||||
export const SparklineFn: React.FC<SparklineProps> = memo((props) => {
|
||||
export const Sparkline: React.FC<SparklineProps> = memo((props) => {
|
||||
const { sparkline, config: fieldConfig, theme, width, height, showHighlights } = props;
|
||||
|
||||
const { frame: alignedDataFrame, warning } = prepareSeries(sparkline, theme, fieldConfig, showHighlights);
|
||||
if (warning) {
|
||||
return null;
|
||||
@@ -30,14 +31,4 @@ export const SparklineFn: React.FC<SparklineProps> = memo((props) => {
|
||||
return <UPlotChart data={data} config={configBuilder} width={width} height={height} />;
|
||||
});
|
||||
|
||||
SparklineFn.displayName = 'Sparkline';
|
||||
|
||||
// we converted to function component above, but some apps extend Sparkline, so we need
|
||||
// to keep exporting a class component until those apps are all rolled out.
|
||||
// see https://github.com/grafana/app-observability-plugin/pull/2079
|
||||
// eslint-disable-next-line react-prefer-function-component/react-prefer-function-component
|
||||
export class Sparkline extends React.PureComponent<SparklineProps> {
|
||||
render() {
|
||||
return <SparklineFn {...this.props} />;
|
||||
}
|
||||
}
|
||||
Sparkline.displayName = 'Sparkline';
|
||||
|
||||
@@ -327,11 +327,6 @@ func (hs *HTTPServer) registerRoutes() {
|
||||
apiRoute.Group("/storage", hs.StorageService.RegisterHTTPRoutes)
|
||||
}
|
||||
|
||||
//nolint:staticcheck // not yet migrated to OpenFeature
|
||||
if hs.Features.IsEnabledGlobally(featuremgmt.FlagPanelTitleSearch) {
|
||||
apiRoute.Group("/search-v2", hs.SearchV2HTTPService.RegisterHTTPRoutes)
|
||||
}
|
||||
|
||||
// current org
|
||||
apiRoute.Group("/org", func(orgRoute routing.RouteRegister) {
|
||||
userIDScope := ac.Scope("users", "id", ac.Parameter(":userId"))
|
||||
|
||||
@@ -25,7 +25,6 @@ import (
|
||||
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"github.com/prometheus/client_golang/prometheus/promhttp"
|
||||
|
||||
"github.com/youmark/pkcs8"
|
||||
|
||||
"github.com/grafana/grafana/pkg/api/avatar"
|
||||
@@ -95,7 +94,6 @@ import (
|
||||
"github.com/grafana/grafana/pkg/services/quota"
|
||||
"github.com/grafana/grafana/pkg/services/rendering"
|
||||
"github.com/grafana/grafana/pkg/services/search"
|
||||
"github.com/grafana/grafana/pkg/services/searchV2"
|
||||
"github.com/grafana/grafana/pkg/services/searchusers"
|
||||
"github.com/grafana/grafana/pkg/services/secrets"
|
||||
secretsKV "github.com/grafana/grafana/pkg/services/secrets/kvstore"
|
||||
@@ -160,7 +158,6 @@ type HTTPServer struct {
|
||||
Live *live.GrafanaLive
|
||||
LivePushGateway *pushhttp.Gateway
|
||||
StorageService store.StorageService
|
||||
SearchV2HTTPService searchV2.SearchHTTPService
|
||||
ContextHandler *contexthandler.ContextHandler
|
||||
LoggerMiddleware loggermw.Logger
|
||||
SQLStore db.DB
|
||||
@@ -271,7 +268,7 @@ func ProvideHTTPServer(opts ServerOptions, cfg *setting.Cfg, routeRegister routi
|
||||
publicDashboardsApi *publicdashboardsApi.Api, userService user.Service, tempUserService tempUser.Service,
|
||||
loginAttemptService loginAttempt.Service, orgService org.Service, orgDeletionService org.DeletionService, teamService team.Service,
|
||||
accesscontrolService accesscontrol.Service, navTreeService navtree.Service,
|
||||
annotationRepo annotations.Repository, tagService tag.Service, searchv2HTTPService searchV2.SearchHTTPService, oauthTokenService oauthtoken.OAuthTokenService,
|
||||
annotationRepo annotations.Repository, tagService tag.Service, oauthTokenService oauthtoken.OAuthTokenService,
|
||||
statsService stats.Service, authnService authn.Service, pluginsCDNService *pluginscdn.Service, promGatherer prometheus.Gatherer,
|
||||
starApi *starApi.API, promRegister prometheus.Registerer, clientConfigProvider grafanaapiserver.DirectRestConfigProvider, anonService anonymous.Service,
|
||||
userVerifier user.Verifier, pluginPreinstall pluginchecker.Preinstall,
|
||||
@@ -313,7 +310,6 @@ func ProvideHTTPServer(opts ServerOptions, cfg *setting.Cfg, routeRegister routi
|
||||
ProvisioningService: provisioningService,
|
||||
AccessControl: accessControl,
|
||||
DataProxy: dataSourceProxy,
|
||||
SearchV2HTTPService: searchv2HTTPService,
|
||||
SearchService: searchService,
|
||||
Live: live,
|
||||
LivePushGateway: livePushGateway,
|
||||
|
||||
88
pkg/apiserver/auditing/event.go
Normal file
88
pkg/apiserver/auditing/event.go
Normal file
@@ -0,0 +1,88 @@
|
||||
package auditing
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"time"
|
||||
)
|
||||
|
||||
type Event struct {
|
||||
// The namespace the action was performed in.
|
||||
Namespace string `json:"namespace"`
|
||||
|
||||
// When it happened.
|
||||
ObservedAt time.Time `json:"-"` // see MarshalJSON for why this is omitted
|
||||
|
||||
// Who/what performed the action.
|
||||
SubjectName string `json:"subjectName"`
|
||||
SubjectUID string `json:"subjectUID"`
|
||||
|
||||
// What was performed.
|
||||
Verb string `json:"verb"`
|
||||
|
||||
// The object the action was performed on. For verbs like "list" this will be empty.
|
||||
Object string `json:"object,omitempty"`
|
||||
|
||||
// API information.
|
||||
APIGroup string `json:"apiGroup,omitempty"`
|
||||
APIVersion string `json:"apiVersion,omitempty"`
|
||||
Kind string `json:"kind,omitempty"`
|
||||
|
||||
// Outcome of the action.
|
||||
Outcome EventOutcome `json:"outcome"`
|
||||
|
||||
// Extra fields to add more context to the event.
|
||||
Extra map[string]string `json:"extra,omitempty"`
|
||||
}
|
||||
|
||||
func (e Event) Time() time.Time {
|
||||
return e.ObservedAt
|
||||
}
|
||||
|
||||
func (e Event) MarshalJSON() ([]byte, error) {
|
||||
type Alias Event
|
||||
return json.Marshal(&struct {
|
||||
FormattedTimestamp string `json:"observedAt"`
|
||||
Alias
|
||||
}{
|
||||
FormattedTimestamp: e.ObservedAt.UTC().Format(time.RFC3339Nano),
|
||||
Alias: (Alias)(e),
|
||||
})
|
||||
}
|
||||
|
||||
func (e Event) KVPairs() []any {
|
||||
args := []any{
|
||||
"audit", true,
|
||||
"namespace", e.Namespace,
|
||||
"observedAt", e.ObservedAt.UTC().Format(time.RFC3339Nano),
|
||||
"subjectName", e.SubjectName,
|
||||
"subjectUID", e.SubjectUID,
|
||||
"verb", e.Verb,
|
||||
"object", e.Object,
|
||||
"apiGroup", e.APIGroup,
|
||||
"apiVersion", e.APIVersion,
|
||||
"kind", e.Kind,
|
||||
"outcome", e.Outcome,
|
||||
}
|
||||
|
||||
if len(e.Extra) > 0 {
|
||||
extraArgs := make([]any, 0, len(e.Extra)*2)
|
||||
|
||||
for k, v := range e.Extra {
|
||||
extraArgs = append(extraArgs, "extra_"+k, v)
|
||||
}
|
||||
|
||||
args = append(args, extraArgs...)
|
||||
}
|
||||
|
||||
return args
|
||||
}
|
||||
|
||||
type EventOutcome string
|
||||
|
||||
const (
|
||||
EventOutcomeUnknown EventOutcome = "unknown"
|
||||
EventOutcomeSuccess EventOutcome = "success"
|
||||
EventOutcomeFailureUnauthorized EventOutcome = "failure_unauthorized"
|
||||
EventOutcomeFailureNotFound EventOutcome = "failure_not_found"
|
||||
EventOutcomeFailureGeneric EventOutcome = "failure_generic"
|
||||
)
|
||||
64
pkg/apiserver/auditing/event_test.go
Normal file
64
pkg/apiserver/auditing/event_test.go
Normal file
@@ -0,0 +1,64 @@
|
||||
package auditing_test
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"strconv"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/grafana/grafana/pkg/apiserver/auditing"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestEvent_MarshalJSON(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
t.Run("marshals the event", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
now := time.Now()
|
||||
|
||||
event := auditing.Event{
|
||||
ObservedAt: now,
|
||||
Extra: map[string]string{"k1": "v1", "k2": "v2"},
|
||||
}
|
||||
|
||||
data, err := json.Marshal(event)
|
||||
require.NoError(t, err)
|
||||
|
||||
var result map[string]any
|
||||
require.NoError(t, json.Unmarshal(data, &result))
|
||||
|
||||
require.Equal(t, event.Time().UTC().Format(time.RFC3339Nano), result["observedAt"])
|
||||
require.NotNil(t, result["extra"])
|
||||
require.Len(t, result["extra"], 2)
|
||||
})
|
||||
}
|
||||
|
||||
func TestEvent_KVPairs(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
t.Run("records extra fields", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
extraFields := 2
|
||||
extra := make(map[string]string, 0)
|
||||
for i := 0; i < extraFields; i++ {
|
||||
extra[strconv.Itoa(i)] = "value"
|
||||
}
|
||||
|
||||
event := auditing.Event{Extra: extra}
|
||||
|
||||
kvPairs := event.KVPairs()
|
||||
|
||||
extraCount := 0
|
||||
for i := 0; i < len(kvPairs); i += 2 {
|
||||
if strings.HasPrefix(kvPairs[i].(string), "extra_") {
|
||||
extraCount++
|
||||
}
|
||||
}
|
||||
|
||||
require.Equal(t, extraCount, extraFields)
|
||||
})
|
||||
}
|
||||
55
pkg/apiserver/auditing/logger.go
Normal file
55
pkg/apiserver/auditing/logger.go
Normal file
@@ -0,0 +1,55 @@
|
||||
package auditing
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Sinkable is a log entry abstraction that can be sent to an audit log sink through the different implementing methods.
|
||||
type Sinkable interface {
|
||||
json.Marshaler
|
||||
KVPairs() []any
|
||||
Time() time.Time
|
||||
}
|
||||
|
||||
// Logger specifies the contract for a specific audit logger.
|
||||
type Logger interface {
|
||||
Log(entry Sinkable) error
|
||||
Close() error
|
||||
Type() string
|
||||
}
|
||||
|
||||
// Implementation inspired by https://github.com/grafana/grafana-app-sdk/blob/main/logging/logger.go
|
||||
type loggerContextKey struct{}
|
||||
|
||||
var (
|
||||
// DefaultLogger is the default Logger if one hasn't been provided in the context.
|
||||
// You may use this to add arbitrary audit logging outside of an API request lifecycle.
|
||||
DefaultLogger Logger = &NoopLogger{}
|
||||
|
||||
contextKey = loggerContextKey{}
|
||||
)
|
||||
|
||||
// FromContext returns the Logger set in the context with Context(), or the DefaultLogger if no Logger is set in the context.
|
||||
// If DefaultLogger is nil, it returns a *NoopLogger so that the return is always valid to call methods on without nil-checking.
|
||||
// You may use this to add arbitrary audit logging outside of an API request lifecycle.
|
||||
func FromContext(ctx context.Context) Logger {
|
||||
if l := ctx.Value(contextKey); l != nil {
|
||||
if logger, ok := l.(Logger); ok {
|
||||
return logger
|
||||
}
|
||||
}
|
||||
|
||||
if DefaultLogger != nil {
|
||||
return DefaultLogger
|
||||
}
|
||||
|
||||
return &NoopLogger{}
|
||||
}
|
||||
|
||||
// Context returns a new context built from the provided context with the provided logger in it.
|
||||
// The Logger added with Context() can be retrieved with FromContext()
|
||||
func Context(ctx context.Context, logger Logger) context.Context {
|
||||
return context.WithValue(ctx, contextKey, logger)
|
||||
}
|
||||
@@ -11,9 +11,9 @@ type NoopBackend struct{}
|
||||
|
||||
func ProvideNoopBackend() audit.Backend { return &NoopBackend{} }
|
||||
|
||||
func (b *NoopBackend) ProcessEvents(k8sEvents ...*auditinternal.Event) bool { return false }
|
||||
func (NoopBackend) ProcessEvents(...*auditinternal.Event) bool { return false }
|
||||
|
||||
func (NoopBackend) Run(stopCh <-chan struct{}) error { return nil }
|
||||
func (NoopBackend) Run(<-chan struct{}) error { return nil }
|
||||
|
||||
func (NoopBackend) Shutdown() {}
|
||||
|
||||
@@ -34,3 +34,14 @@ type NoopPolicyRuleEvaluator struct{}
|
||||
func (NoopPolicyRuleEvaluator) EvaluatePolicyRule(authorizer.Attributes) audit.RequestAuditConfig {
|
||||
return audit.RequestAuditConfig{Level: auditinternal.LevelNone}
|
||||
}
|
||||
|
||||
// NoopLogger is a no-op implementation of Logger
|
||||
type NoopLogger struct{}
|
||||
|
||||
func ProvideNoopLogger() Logger { return &NoopLogger{} }
|
||||
|
||||
func (NoopLogger) Type() string { return "noop" }
|
||||
|
||||
func (NoopLogger) Log(Sinkable) error { return nil }
|
||||
|
||||
func (NoopLogger) Close() error { return nil }
|
||||
|
||||
@@ -46,14 +46,23 @@ func (defaultGrafanaPolicyRuleEvaluator) EvaluatePolicyRule(attrs authorizer.Att
|
||||
}
|
||||
}
|
||||
|
||||
// Logging the response object allows us to get the resource name for create requests.
|
||||
level := auditinternal.LevelMetadata
|
||||
if attrs.GetVerb() == utils.VerbCreate {
|
||||
level = auditinternal.LevelRequestResponse
|
||||
}
|
||||
|
||||
return audit.RequestAuditConfig{
|
||||
Level: auditinternal.LevelMetadata,
|
||||
Level: level,
|
||||
|
||||
// Only log on StageResponseComplete, to avoid noisy logs.
|
||||
OmitStages: []auditinternal.Stage{
|
||||
// Only log on StageResponseComplete
|
||||
auditinternal.StageRequestReceived,
|
||||
auditinternal.StageResponseStarted,
|
||||
auditinternal.StagePanic,
|
||||
},
|
||||
OmitManagedFields: false, // Setting it to true causes extra copying/unmarshalling.
|
||||
|
||||
// Setting it to true causes extra copying/unmarshalling.
|
||||
OmitManagedFields: false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -55,7 +55,7 @@ func TestDefaultGrafanaPolicyRuleEvaluator(t *testing.T) {
|
||||
require.Equal(t, auditinternal.LevelNone, config.Level)
|
||||
})
|
||||
|
||||
t.Run("return audit level metadata for other resource requests", func(t *testing.T) {
|
||||
t.Run("return audit level request+response for create requests", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
attrs := authorizer.AttributesRecord{
|
||||
@@ -67,6 +67,22 @@ func TestDefaultGrafanaPolicyRuleEvaluator(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
config := evaluator.EvaluatePolicyRule(attrs)
|
||||
require.Equal(t, auditinternal.LevelRequestResponse, config.Level)
|
||||
})
|
||||
|
||||
t.Run("return audit level metadata for other resource requests", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
attrs := authorizer.AttributesRecord{
|
||||
ResourceRequest: true,
|
||||
Verb: utils.VerbGet,
|
||||
User: &user.DefaultInfo{
|
||||
Name: "test-user",
|
||||
Groups: []string{"test-group"},
|
||||
},
|
||||
}
|
||||
|
||||
config := evaluator.EvaluatePolicyRule(attrs)
|
||||
require.Equal(t, auditinternal.LevelMetadata, config.Level)
|
||||
})
|
||||
|
||||
@@ -12,8 +12,6 @@ import (
|
||||
_ "github.com/Azure/go-autorest/autorest"
|
||||
_ "github.com/Azure/go-autorest/autorest/adal"
|
||||
_ "github.com/beevik/etree"
|
||||
_ "github.com/blugelabs/bluge"
|
||||
_ "github.com/blugelabs/bluge_segment_api"
|
||||
_ "github.com/crewjam/saml"
|
||||
_ "github.com/docker/go-connections/nat"
|
||||
_ "github.com/go-jose/go-jose/v4"
|
||||
|
||||
@@ -8,6 +8,7 @@ import (
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/grafana/grafana/pkg/configprovider"
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
apierrors "k8s.io/apimachinery/pkg/api/errors"
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
@@ -62,7 +63,6 @@ import (
|
||||
"github.com/grafana/grafana/pkg/services/quota"
|
||||
"github.com/grafana/grafana/pkg/services/search/sort"
|
||||
"github.com/grafana/grafana/pkg/services/user"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
"github.com/grafana/grafana/pkg/storage/legacysql"
|
||||
"github.com/grafana/grafana/pkg/storage/legacysql/dualwrite"
|
||||
"github.com/grafana/grafana/pkg/storage/unified/apistore"
|
||||
@@ -128,7 +128,6 @@ type DashboardsAPIBuilder struct {
|
||||
}
|
||||
|
||||
func RegisterAPIService(
|
||||
cfg *setting.Cfg,
|
||||
features featuremgmt.FeatureToggles,
|
||||
apiregistration builder.APIRegistrar,
|
||||
dashboardService dashboards.DashboardService,
|
||||
@@ -154,7 +153,14 @@ func RegisterAPIService(
|
||||
publicDashboardService publicdashboards.Service,
|
||||
snapshotService dashboardsnapshots.Service,
|
||||
dashboardActivityChannel live.DashboardActivityChannel,
|
||||
configProvider configprovider.ConfigProvider,
|
||||
) *DashboardsAPIBuilder {
|
||||
cfg, err := configProvider.Get(context.Background())
|
||||
if err != nil {
|
||||
logging.DefaultLogger.Error("failed to load settings configuration instance", "stackId", cfg.StackID, "err", err)
|
||||
return nil
|
||||
}
|
||||
|
||||
dbp := legacysql.NewDatabaseProvider(sql)
|
||||
namespacer := request.GetNamespaceMapper(cfg)
|
||||
legacyDashboardSearcher := legacysearcher.NewDashboardSearchClient(dashStore, sorter)
|
||||
@@ -237,7 +243,7 @@ func NewAPIService(ac authlib.AccessClient, features featuremgmt.FeatureToggles,
|
||||
}
|
||||
|
||||
func (b *DashboardsAPIBuilder) GetGroupVersions() []schema.GroupVersion {
|
||||
if featuremgmt.AnyEnabled(b.features, featuremgmt.FlagDashboardNewLayouts, featuremgmt.FlagKubernetesDashboardsV2) {
|
||||
if featuremgmt.AnyEnabled(b.features, featuremgmt.FlagDashboardNewLayouts) {
|
||||
// If dashboards v2 is enabled, we want to use v2beta1 as the default API version.
|
||||
return []schema.GroupVersion{
|
||||
dashv2beta1.DashboardResourceInfo.GroupVersion(),
|
||||
@@ -747,7 +753,6 @@ func (b *DashboardsAPIBuilder) storageForVersion(
|
||||
ResourceInfo: *snapshots,
|
||||
Service: b.snapshotService,
|
||||
Namespacer: b.namespacer,
|
||||
Options: b.snapshotOptions,
|
||||
}
|
||||
storage[snapshots.StoragePath()] = snapshotLegacyStore
|
||||
storage[snapshots.StoragePath("dashboard")], err = snapshot.NewDashboardREST(dashboards, b.snapshotService)
|
||||
|
||||
@@ -29,6 +29,8 @@ func GetRoutes(service dashboardsnapshots.Service, options dashv0.SnapshotSharin
|
||||
createCmd := defs["github.com/grafana/grafana/apps/dashboard/pkg/apissnapshot/v0alpha1.DashboardCreateCommand"].Schema
|
||||
createExample := `{"dashboard":{"annotations":{"list":[{"name":"Annotations & Alerts","enable":true,"iconColor":"rgba(0, 211, 255, 1)","snapshotData":[],"type":"dashboard","builtIn":1,"hide":true}]},"editable":true,"fiscalYearStartMonth":0,"graphTooltip":0,"id":203,"links":[],"liveNow":false,"panels":[{"datasource":null,"fieldConfig":{"defaults":{"color":{"mode":"palette-classic"},"custom":{"axisBorderShow":false,"axisCenteredZero":false,"axisColorMode":"text","axisLabel":"","axisPlacement":"auto","barAlignment":0,"drawStyle":"line","fillOpacity":43,"gradientMode":"opacity","hideFrom":{"legend":false,"tooltip":false,"viz":false},"insertNulls":false,"lineInterpolation":"smooth","lineWidth":1,"pointSize":5,"scaleDistribution":{"type":"linear"},"showPoints":"auto","spanNulls":false,"stacking":{"group":"A","mode":"none"},"thresholdsStyle":{"mode":"off"}},"mappings":[],"thresholds":{"mode":"absolute","steps":[{"color":"green","value":null},{"color":"red","value":80}]},"unitScale":true},"overrides":[]},"gridPos":{"h":8,"w":12,"x":0,"y":0},"id":1,"options":{"legend":{"calcs":[],"displayMode":"list","placement":"bottom","showLegend":true},"tooltip":{"mode":"single","sort":"none"}},"pluginVersion":"10.4.0-pre","snapshotData":[{"fields":[{"config":{"color":{"mode":"palette-classic"},"custom":{"axisBorderShow":false,"axisCenteredZero":false,"axisColorMode":"text","axisPlacement":"auto","barAlignment":0,"drawStyle":"line","fillOpacity":43,"gradientMode":"opacity","hideFrom":{"legend":false,"tooltip":false,"viz":false},"lineInterpolation":"smooth","lineWidth":1,"pointSize":5,"showPoints":"auto","thresholdsStyle":{"mode":"off"}},"thresholds":{"mode":"absolute","steps":[{"color":"green","value":null},{"color":"red","value":80}]},"unitScale":true},"name":"time","type":"time","values":[1706030536378,1706034856378,1706039176378,1706043496378,1706047816378,1706052136378]},{"config":{"color":{"mode":"palette-classic"},"custom":{"axisBorderShow":false,"axisCenteredZero":false,"axisColorMode":"text","axisLabel":"","axisPlacement":"auto","barAlignment":0,"drawStyle":"line","fillOpacity":43,"gradientMode":"opacity","hideFrom":{"legend":false,"tooltip":false,"viz":false},"insertNulls":false,"lineInterpolation":"smooth","lineWidth":1,"pointSize":5,"scaleDistribution":{"type":"linear"},"showPoints":"auto","spanNulls":false,"stacking":{"group":"A","mode":"none"},"thresholdsStyle":{"mode":"off"}},"mappings":[],"thresholds":{"mode":"absolute","steps":[{"color":"green","value":null},{"color":"red","value":80}]},"unitScale":true},"name":"A-series","type":"number","values":[1,20,90,30,50,0]}],"refId":"A"}],"targets":[],"title":"Simple example","type":"timeseries","links":[]}],"refresh":"","schemaVersion":39,"snapshot":{"timestamp":"2024-01-23T23:22:16.377Z"},"tags":[],"templating":{"list":[]},"time":{"from":"2024-01-23T17:22:20.380Z","to":"2024-01-23T23:22:20.380Z","raw":{"from":"now-6h","to":"now"}},"timepicker":{},"timezone":"","title":"simple and small","uid":"b22ec8db-399b-403b-b6c7-b0fb30ccb2a5","version":1,"weekStart":""},"name":"simple and small","expires":86400}`
|
||||
createRsp := defs["github.com/grafana/grafana/apps/dashboard/pkg/apissnapshot/v0alpha1.DashboardCreateResponse"].Schema
|
||||
getSettingsRsp := defs["github.com/grafana/grafana/apps/dashboard/pkg/apissnapshot/v0alpha1.SnapshotSharingOptions"].Schema
|
||||
getSettingsRspExample := `{"snapshotsEnabled":true,"externalSnapshotURL":"https://externalurl.com","externalSnapshotName":"external","externalEnabled":true}`
|
||||
|
||||
return &builder.APIRoutes{
|
||||
Namespace: []builder.APIRouteHandler{
|
||||
@@ -167,5 +169,84 @@ func GetRoutes(service dashboardsnapshots.Service, options dashv0.SnapshotSharin
|
||||
})
|
||||
},
|
||||
},
|
||||
{
|
||||
Path: prefix + "/settings",
|
||||
Spec: &spec3.PathProps{
|
||||
Get: &spec3.Operation{
|
||||
VendorExtensible: spec.VendorExtensible{
|
||||
Extensions: map[string]any{
|
||||
"x-grafana-action": "get",
|
||||
"x-kubernetes-group-version-kind": metav1.GroupVersionKind{
|
||||
Group: dashv0.GROUP,
|
||||
Version: dashv0.VERSION,
|
||||
Kind: "SnapshotSharingOptions",
|
||||
},
|
||||
},
|
||||
},
|
||||
OperationProps: spec3.OperationProps{
|
||||
Tags: tags,
|
||||
OperationId: "getSnapshotSettings",
|
||||
Description: "Get Snapshot sharing settings",
|
||||
Parameters: []*spec3.Parameter{
|
||||
{
|
||||
ParameterProps: spec3.ParameterProps{
|
||||
Name: "namespace",
|
||||
In: "path",
|
||||
Required: true,
|
||||
Example: "default",
|
||||
Description: "workspace",
|
||||
Schema: spec.StringProperty(),
|
||||
},
|
||||
},
|
||||
},
|
||||
Responses: &spec3.Responses{
|
||||
ResponsesProps: spec3.ResponsesProps{
|
||||
StatusCodeResponses: map[int]*spec3.Response{
|
||||
200: {
|
||||
ResponseProps: spec3.ResponseProps{
|
||||
Content: map[string]*spec3.MediaType{
|
||||
"application/json": {
|
||||
MediaTypeProps: spec3.MediaTypeProps{
|
||||
Schema: &getSettingsRsp,
|
||||
Example: getSettingsRspExample,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Handler: func(w http.ResponseWriter, r *http.Request) {
|
||||
user, err := identity.GetRequester(r.Context())
|
||||
if err != nil {
|
||||
errhttp.Write(r.Context(), err, w)
|
||||
return
|
||||
}
|
||||
wrap := &contextmodel.ReqContext{
|
||||
Context: &web.Context{
|
||||
Req: r,
|
||||
Resp: web.NewResponseWriter(r.Method, w),
|
||||
},
|
||||
}
|
||||
|
||||
vars := mux.Vars(r)
|
||||
info, err := authlib.ParseNamespace(vars["namespace"])
|
||||
if err != nil {
|
||||
wrap.JsonApiErr(http.StatusBadRequest, "expected namespace", nil)
|
||||
return
|
||||
}
|
||||
if info.OrgID != user.GetOrgID() {
|
||||
wrap.JsonApiErr(http.StatusBadRequest,
|
||||
fmt.Sprintf("user orgId does not match namespace (%d != %d)", info.OrgID, user.GetOrgID()), nil)
|
||||
return
|
||||
}
|
||||
|
||||
wrap.JSON(http.StatusOK, options)
|
||||
},
|
||||
},
|
||||
}}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,6 @@ package snapshot
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"k8s.io/apimachinery/pkg/apis/meta/internalversion"
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
@@ -29,7 +28,6 @@ type SnapshotLegacyStore struct {
|
||||
ResourceInfo utils.ResourceInfo
|
||||
Service dashboardsnapshots.Service
|
||||
Namespacer request.NamespaceMapper
|
||||
Options dashV0.SnapshotSharingOptions
|
||||
}
|
||||
|
||||
func (s *SnapshotLegacyStore) New() runtime.Object {
|
||||
@@ -117,15 +115,6 @@ func (s *SnapshotLegacyStore) List(ctx context.Context, options *internalversion
|
||||
}
|
||||
|
||||
func (s *SnapshotLegacyStore) Get(ctx context.Context, name string, options *metav1.GetOptions) (runtime.Object, error) {
|
||||
info, err := request.NamespaceInfoFrom(ctx, true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
err = s.checkEnabled(info.Value)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
query := dashboardsnapshots.GetDashboardSnapshotQuery{
|
||||
Key: name,
|
||||
}
|
||||
@@ -140,10 +129,3 @@ func (s *SnapshotLegacyStore) Get(ctx context.Context, name string, options *met
|
||||
}
|
||||
return nil, s.ResourceInfo.NewNotFound(name)
|
||||
}
|
||||
|
||||
func (s *SnapshotLegacyStore) checkEnabled(ns string) error {
|
||||
if !s.Options.SnapshotsEnabled {
|
||||
return fmt.Errorf("snapshots not enabled")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -38,7 +38,6 @@ import (
|
||||
"github.com/grafana/grafana/pkg/services/provisioning"
|
||||
publicdashboardsmetric "github.com/grafana/grafana/pkg/services/publicdashboards/metric"
|
||||
"github.com/grafana/grafana/pkg/services/rendering"
|
||||
"github.com/grafana/grafana/pkg/services/searchV2"
|
||||
secretsMigrations "github.com/grafana/grafana/pkg/services/secrets/kvstore/migrations"
|
||||
secretsManager "github.com/grafana/grafana/pkg/services/secrets/manager"
|
||||
"github.com/grafana/grafana/pkg/services/serviceaccounts"
|
||||
@@ -58,7 +57,7 @@ func ProvideBackgroundServiceRegistry(
|
||||
provisioning *provisioning.ProvisioningServiceImpl, usageStats *uss.UsageStats,
|
||||
statsCollector *statscollector.Service, grafanaUpdateChecker *updatemanager.GrafanaService,
|
||||
pluginsUpdateChecker *updatemanager.PluginsService, metrics *metrics.InternalMetricsService,
|
||||
secretsService *secretsManager.SecretsService, remoteCache *remotecache.RemoteCache, StorageService store.StorageService, searchService searchV2.SearchService, entityEventsService store.EntityEventsService,
|
||||
secretsService *secretsManager.SecretsService, remoteCache *remotecache.RemoteCache, StorageService store.StorageService, entityEventsService store.EntityEventsService,
|
||||
saService *samanager.ServiceAccountsService, grpcServerProvider grpcserver.Provider,
|
||||
secretMigrationProvider secretsMigrations.SecretMigrationProvider, loginAttemptService *loginattemptimpl.Service,
|
||||
bundleService *supportbundlesimpl.Service, publicDashboardsMetric *publicdashboardsmetric.Service,
|
||||
@@ -101,7 +100,6 @@ func ProvideBackgroundServiceRegistry(
|
||||
remoteCache,
|
||||
secretsService,
|
||||
StorageService,
|
||||
searchService,
|
||||
entityEventsService,
|
||||
grpcServerProvider,
|
||||
saService,
|
||||
|
||||
@@ -140,7 +140,6 @@ import (
|
||||
"github.com/grafana/grafana/pkg/services/rendering"
|
||||
"github.com/grafana/grafana/pkg/services/search"
|
||||
"github.com/grafana/grafana/pkg/services/search/sort"
|
||||
"github.com/grafana/grafana/pkg/services/searchV2"
|
||||
"github.com/grafana/grafana/pkg/services/secrets"
|
||||
secretsDatabase "github.com/grafana/grafana/pkg/services/secrets/database"
|
||||
secretsStore "github.com/grafana/grafana/pkg/services/secrets/kvstore"
|
||||
@@ -275,8 +274,6 @@ var wireBasicSet = wire.NewSet(
|
||||
datasourceproxy.ProvideService,
|
||||
sort.ProvideService,
|
||||
search.ProvideService,
|
||||
searchV2.ProvideService,
|
||||
searchV2.ProvideSearchHTTPService,
|
||||
store.ProvideService,
|
||||
store.ProvideSystemUsersService,
|
||||
live.ProvideService,
|
||||
|
||||
249
pkg/server/wire_gen.go
generated
249
pkg/server/wire_gen.go
generated
File diff suppressed because one or more lines are too long
@@ -15,6 +15,8 @@ var _ authorizer.Authorizer = &roleAuthorizer{}
|
||||
|
||||
var orgRoleNoneAsViewerAPIGroups = []string{
|
||||
"productactivation.ext.grafana.com",
|
||||
// playlist can be removed after this issue is resolved: https://github.com/grafana/grafana/issues/115712
|
||||
"playlist.grafana.app",
|
||||
}
|
||||
|
||||
type roleAuthorizer struct{}
|
||||
|
||||
@@ -20,9 +20,10 @@ const (
|
||||
|
||||
// Typed errors
|
||||
var (
|
||||
ErrUserTokenNotFound = errors.New("user token not found")
|
||||
ErrInvalidSessionToken = usertoken.ErrInvalidSessionToken
|
||||
ErrExternalSessionNotFound = errors.New("external session not found")
|
||||
ErrUserTokenNotFound = errors.New("user token not found")
|
||||
ErrInvalidSessionToken = usertoken.ErrInvalidSessionToken
|
||||
ErrExternalSessionNotFound = errors.New("external session not found")
|
||||
ErrExternalSessionTokenNotFound = errors.New("session token was nil")
|
||||
)
|
||||
|
||||
type (
|
||||
|
||||
@@ -572,13 +572,6 @@ var (
|
||||
FrontendOnly: false, // The restore backend feature changes behavior based on this flag
|
||||
Owner: grafanaDashboardsSquad,
|
||||
},
|
||||
{
|
||||
Name: "kubernetesDashboardsV2",
|
||||
Description: "Use the v2 kubernetes API in the frontend for dashboards",
|
||||
Stage: FeatureStageExperimental,
|
||||
FrontendOnly: false,
|
||||
Owner: grafanaDashboardsSquad,
|
||||
},
|
||||
{
|
||||
Name: "dashboardUndoRedo",
|
||||
Description: "Enables undo/redo in dynamic dashboards",
|
||||
@@ -688,6 +681,14 @@ var (
|
||||
HideFromDocs: true,
|
||||
RequiresRestart: true,
|
||||
},
|
||||
{
|
||||
Name: "auditLoggingAppPlatform",
|
||||
Description: "Enable audit logging with Kubernetes under app platform",
|
||||
Stage: FeatureStageExperimental,
|
||||
Owner: grafanaOperatorExperienceSquad,
|
||||
HideFromDocs: true,
|
||||
RequiresRestart: true,
|
||||
},
|
||||
{
|
||||
Name: "secretsManagementAppPlatform",
|
||||
Description: "Enable the secrets management API and services under app platform",
|
||||
|
||||
2
pkg/services/featuremgmt/toggles_gen.csv
generated
2
pkg/services/featuremgmt/toggles_gen.csv
generated
@@ -79,7 +79,6 @@ dashboardSceneForViewers,GA,@grafana/dashboards-squad,false,false,true
|
||||
dashboardSceneSolo,GA,@grafana/dashboards-squad,false,false,true
|
||||
dashboardScene,GA,@grafana/dashboards-squad,false,false,true
|
||||
dashboardNewLayouts,experimental,@grafana/dashboards-squad,false,false,false
|
||||
kubernetesDashboardsV2,experimental,@grafana/dashboards-squad,false,false,false
|
||||
dashboardUndoRedo,experimental,@grafana/dashboards-squad,false,false,true
|
||||
unlimitedLayoutsNesting,experimental,@grafana/dashboards-squad,false,false,true
|
||||
drilldownRecommendations,experimental,@grafana/dashboards-squad,false,false,true
|
||||
@@ -95,6 +94,7 @@ kubernetesFeatureToggles,experimental,@grafana/grafana-operator-experience-squad
|
||||
cloudRBACRoles,preview,@grafana/identity-access-team,false,true,false
|
||||
alertingQueryOptimization,GA,@grafana/alerting-squad,false,false,false
|
||||
jitterAlertRulesWithinGroups,preview,@grafana/alerting-squad,false,true,false
|
||||
auditLoggingAppPlatform,experimental,@grafana/grafana-operator-experience-squad,false,true,false
|
||||
secretsManagementAppPlatform,experimental,@grafana/grafana-operator-experience-squad,false,false,false
|
||||
secretsManagementAppPlatformUI,experimental,@grafana/grafana-operator-experience-squad,false,false,false
|
||||
alertingSaveStatePeriodic,privatePreview,@grafana/alerting-squad,false,false,false
|
||||
|
||||
|
8
pkg/services/featuremgmt/toggles_gen.go
generated
8
pkg/services/featuremgmt/toggles_gen.go
generated
@@ -259,10 +259,6 @@ const (
|
||||
// Enables experimental new dashboard layouts
|
||||
FlagDashboardNewLayouts = "dashboardNewLayouts"
|
||||
|
||||
// FlagKubernetesDashboardsV2
|
||||
// Use the v2 kubernetes API in the frontend for dashboards
|
||||
FlagKubernetesDashboardsV2 = "kubernetesDashboardsV2"
|
||||
|
||||
// FlagPdfTables
|
||||
// Enables generating table data as PDF in reporting
|
||||
FlagPdfTables = "pdfTables"
|
||||
@@ -279,6 +275,10 @@ const (
|
||||
// Distributes alert rule evaluations more evenly over time, including spreading out rules within the same group. Disables sequential evaluation if enabled.
|
||||
FlagJitterAlertRulesWithinGroups = "jitterAlertRulesWithinGroups"
|
||||
|
||||
// FlagAuditLoggingAppPlatform
|
||||
// Enable audit logging with Kubernetes under app platform
|
||||
FlagAuditLoggingAppPlatform = "auditLoggingAppPlatform"
|
||||
|
||||
// FlagSecretsManagementAppPlatform
|
||||
// Enable the secrets management API and services under app platform
|
||||
FlagSecretsManagementAppPlatform = "secretsManagementAppPlatform"
|
||||
|
||||
19
pkg/services/featuremgmt/toggles_gen.json
generated
19
pkg/services/featuremgmt/toggles_gen.json
generated
@@ -658,6 +658,20 @@
|
||||
"frontend": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"metadata": {
|
||||
"name": "auditLoggingAppPlatform",
|
||||
"resourceVersion": "1767013056996",
|
||||
"creationTimestamp": "2025-12-29T12:57:36Z"
|
||||
},
|
||||
"spec": {
|
||||
"description": "Enable audit logging with Kubernetes under app platform",
|
||||
"stage": "experimental",
|
||||
"codeowner": "@grafana/grafana-operator-experience-squad",
|
||||
"requiresRestart": true,
|
||||
"hideFromDocs": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"metadata": {
|
||||
"name": "authZGRPCServer",
|
||||
@@ -2003,8 +2017,9 @@
|
||||
{
|
||||
"metadata": {
|
||||
"name": "kubernetesDashboardsV2",
|
||||
"resourceVersion": "1764664939750",
|
||||
"creationTimestamp": "2025-12-02T08:42:19Z"
|
||||
"resourceVersion": "1764236054307",
|
||||
"creationTimestamp": "2025-11-27T09:34:14Z",
|
||||
"deletionTimestamp": "2025-12-05T13:43:57Z"
|
||||
},
|
||||
"spec": {
|
||||
"description": "Use the v2 kubernetes API in the frontend for dashboards",
|
||||
|
||||
@@ -660,6 +660,10 @@ func (o *Service) getExternalSession(ctx context.Context, usr identity.Requester
|
||||
return externalSessions[0], nil
|
||||
}
|
||||
|
||||
if sessionToken == nil {
|
||||
return nil, auth.ErrExternalSessionTokenNotFound
|
||||
}
|
||||
|
||||
// For regular users, we use the session token ID to fetch the external session
|
||||
return o.sessionService.GetExternalSession(ctx, sessionToken.ExternalSessionId)
|
||||
}
|
||||
|
||||
@@ -15,7 +15,6 @@ import (
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/db"
|
||||
"github.com/grafana/grafana/pkg/infra/fs"
|
||||
"github.com/grafana/grafana/pkg/infra/tracing"
|
||||
"github.com/grafana/grafana/pkg/plugins"
|
||||
@@ -24,8 +23,6 @@ import (
|
||||
"github.com/grafana/grafana/pkg/services/org"
|
||||
"github.com/grafana/grafana/pkg/services/pluginsintegration"
|
||||
"github.com/grafana/grafana/pkg/services/pluginsintegration/pluginstore"
|
||||
"github.com/grafana/grafana/pkg/services/searchV2"
|
||||
"github.com/grafana/grafana/pkg/services/sqlstore"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
"github.com/grafana/grafana/pkg/tests/testinfra"
|
||||
"github.com/grafana/grafana/pkg/tests/testsuite"
|
||||
@@ -164,9 +161,7 @@ func TestIntegrationPluginManager(t *testing.T) {
|
||||
pg := postgres.ProvideService()
|
||||
my := mysql.ProvideService()
|
||||
ms := mssql.ProvideService(cfg)
|
||||
db := db.InitTestDB(t, sqlstore.InitTestDBOpt{Cfg: cfg})
|
||||
sv2 := searchV2.ProvideService(cfg, db, nil, nil, tracer, features, nil, nil, nil)
|
||||
graf := grafanads.ProvideService(sv2, nil, features)
|
||||
graf := grafanads.ProvideService(nil, features)
|
||||
pyroscope := pyroscope.ProvideService(hcp)
|
||||
parca := parca.ProvideService(hcp)
|
||||
zipkin := zipkin.ProvideService(hcp)
|
||||
|
||||
@@ -35,7 +35,6 @@ import (
|
||||
"github.com/grafana/grafana/pkg/services/provisioning/datasources"
|
||||
"github.com/grafana/grafana/pkg/services/provisioning/plugins"
|
||||
"github.com/grafana/grafana/pkg/services/quota"
|
||||
"github.com/grafana/grafana/pkg/services/searchV2"
|
||||
"github.com/grafana/grafana/pkg/services/secrets"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
"github.com/grafana/grafana/pkg/storage/legacysql/dualwrite"
|
||||
@@ -57,7 +56,6 @@ func ProvideService(
|
||||
dashboardService dashboardservice.DashboardService,
|
||||
folderService folder.Service,
|
||||
pluginSettings pluginsettings.Service,
|
||||
searchService searchV2.SearchService,
|
||||
quotaService quota.Service,
|
||||
secrectService secrets.Service,
|
||||
orgService org.Service,
|
||||
@@ -84,7 +82,6 @@ func ProvideService(
|
||||
datasourceService: datasourceService,
|
||||
correlationsService: correlationsService,
|
||||
pluginsSettings: pluginSettings,
|
||||
searchService: searchService,
|
||||
quotaService: quotaService,
|
||||
secretService: secrectService,
|
||||
log: log.New("provisioning"),
|
||||
@@ -138,9 +135,6 @@ func (ps *ProvisioningServiceImpl) starting(ctx context.Context) error {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if ps.dashboardProvisioner.HasDashboardSources() {
|
||||
ps.searchService.TriggerReIndex()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -194,7 +188,6 @@ func newProvisioningServiceImpl(
|
||||
provisionDatasources func(context.Context, string, datasources.BaseDataSourceService, datasources.CorrelationsStore, org.Service) error,
|
||||
provisionPlugins func(context.Context, string, pluginstore.Store, pluginsettings.Service, org.Service) error,
|
||||
migratePrometheusType func(context.Context) error,
|
||||
searchService searchV2.SearchService,
|
||||
) (*ProvisioningServiceImpl, error) {
|
||||
s := &ProvisioningServiceImpl{
|
||||
log: log.New("provisioning"),
|
||||
@@ -202,7 +195,6 @@ func newProvisioningServiceImpl(
|
||||
provisionDatasources: provisionDatasources,
|
||||
provisionPlugins: provisionPlugins,
|
||||
Cfg: setting.NewCfg(),
|
||||
searchService: searchService,
|
||||
migratePrometheusType: migratePrometheusType,
|
||||
}
|
||||
|
||||
@@ -238,7 +230,6 @@ type ProvisioningServiceImpl struct {
|
||||
datasourceService datasourceservice.DataSourceService
|
||||
correlationsService correlations.Service
|
||||
pluginsSettings pluginsettings.Service
|
||||
searchService searchV2.SearchService
|
||||
quotaService quota.Service
|
||||
secretService secrets.Service
|
||||
folderService folder.Service
|
||||
|
||||
@@ -20,7 +20,6 @@ import (
|
||||
"github.com/grafana/grafana/pkg/services/provisioning/dashboards"
|
||||
"github.com/grafana/grafana/pkg/services/provisioning/datasources"
|
||||
"github.com/grafana/grafana/pkg/services/provisioning/utils"
|
||||
"github.com/grafana/grafana/pkg/services/searchV2"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
"github.com/grafana/grafana/pkg/storage/legacysql/dualwrite"
|
||||
)
|
||||
@@ -159,8 +158,6 @@ func setup(t *testing.T) *serviceTestStruct {
|
||||
pollChangesChannel <- ctx
|
||||
}
|
||||
|
||||
searchStub := searchV2.NewStubSearchService()
|
||||
|
||||
service, err := newProvisioningServiceImpl(
|
||||
func(context.Context, string, dashboardstore.DashboardProvisioningService, *setting.Cfg, org.Service, utils.DashboardStore, folder.Service, dualwrite.Service, *serverlock.ServerLockService) (dashboards.DashboardProvisioner, error) {
|
||||
serviceTest.dashboardProvisionerInstantiations++
|
||||
@@ -175,7 +172,6 @@ func setup(t *testing.T) *serviceTestStruct {
|
||||
func(context.Context) error {
|
||||
return nil
|
||||
},
|
||||
searchStub,
|
||||
)
|
||||
service.provisionAlerting = func(context.Context, prov_alerting.ProvisionerConfig) error {
|
||||
return nil
|
||||
|
||||
@@ -1,255 +0,0 @@
|
||||
package searchV2
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"sort"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||
|
||||
"github.com/grafana/grafana/pkg/services/accesscontrol"
|
||||
"github.com/grafana/grafana/pkg/services/dashboards"
|
||||
"github.com/grafana/grafana/pkg/services/datasources"
|
||||
"github.com/grafana/grafana/pkg/services/user"
|
||||
)
|
||||
|
||||
func (s *StandardSearchService) addAllowedActionsField(ctx context.Context, orgId int64, user *user.SignedInUser, response *backend.DataResponse) error {
|
||||
references, err := getEntityReferences(response)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
allAllowedActions, err := s.createAllowedActions(ctx, orgId, user, references)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(response.Frames) == 0 {
|
||||
return errors.New("empty response")
|
||||
}
|
||||
|
||||
frame := response.Frames[0]
|
||||
|
||||
allowedActionsField := data.NewFieldFromFieldType(data.FieldTypeJSON, len(allAllowedActions))
|
||||
allowedActionsField.Name = "allowed_actions"
|
||||
frame.Fields = append(frame.Fields, allowedActionsField)
|
||||
|
||||
for i, actions := range allAllowedActions {
|
||||
js, _ := json.Marshal(actions)
|
||||
jsb := json.RawMessage(js)
|
||||
allowedActionsField.Set(i, jsb)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
type allowedActions struct {
|
||||
EntityKind entityKind `json:"kind"`
|
||||
UID string `json:"uid"`
|
||||
Actions []string `json:"actions"`
|
||||
}
|
||||
|
||||
func (s *StandardSearchService) createAllowedActions(ctx context.Context, orgId int64, user *user.SignedInUser, references []entityReferences) ([][]allowedActions, error) {
|
||||
uidsPerKind := make(map[entityKind][]string)
|
||||
for _, refs := range references {
|
||||
if _, ok := uidsPerKind[refs.entityKind]; !ok {
|
||||
uidsPerKind[refs.entityKind] = []string{}
|
||||
}
|
||||
|
||||
uidsPerKind[refs.entityKind] = append(uidsPerKind[refs.entityKind], refs.uid)
|
||||
|
||||
if len(refs.dsUids) > 0 {
|
||||
if _, ok := uidsPerKind[entityKindDatasource]; !ok {
|
||||
uidsPerKind[entityKindDatasource] = []string{}
|
||||
}
|
||||
|
||||
uidsPerKind[entityKindDatasource] = append(uidsPerKind[entityKindDatasource], refs.dsUids...)
|
||||
}
|
||||
}
|
||||
|
||||
allowedActionsByUid := make(map[entityKind]map[string][]string)
|
||||
|
||||
for entKind, uids := range uidsPerKind {
|
||||
if entKind == entityKindPanel {
|
||||
emptyAllowedActions := make(map[string][]string)
|
||||
for _, uid := range uids {
|
||||
emptyAllowedActions[uid] = []string{}
|
||||
}
|
||||
allowedActionsByUid[entityKindPanel] = emptyAllowedActions
|
||||
}
|
||||
|
||||
var prefix string
|
||||
switch entKind {
|
||||
case entityKindFolder:
|
||||
prefix = dashboards.ScopeFoldersPrefix
|
||||
case entityKindDatasource:
|
||||
prefix = datasources.ScopePrefix
|
||||
case entityKindDashboard:
|
||||
prefix = dashboards.ScopeDashboardsPrefix
|
||||
default:
|
||||
continue
|
||||
}
|
||||
|
||||
allowedActionsByUid[entKind] = s.getAllowedActionsByUid(ctx, user, orgId, prefix, uids)
|
||||
}
|
||||
|
||||
dsActionsByUid, ok := allowedActionsByUid[entityKindDatasource]
|
||||
if !ok {
|
||||
dsActionsByUid = make(map[string][]string)
|
||||
}
|
||||
|
||||
out := make([][]allowedActions, 0, len(references))
|
||||
for _, ref := range references {
|
||||
var actions []allowedActions
|
||||
|
||||
selfActions := make([]string, 0)
|
||||
if selfKindActions, ok := allowedActionsByUid[ref.entityKind]; ok {
|
||||
if self, ok := selfKindActions[ref.uid]; ok && len(self) > 0 {
|
||||
selfActions = self
|
||||
}
|
||||
}
|
||||
|
||||
actions = append(actions, allowedActions{
|
||||
EntityKind: ref.entityKind,
|
||||
UID: ref.uid,
|
||||
Actions: selfActions,
|
||||
})
|
||||
|
||||
for _, dsUid := range ref.dsUids {
|
||||
dsActions := make([]string, 0)
|
||||
if dsAct, ok := dsActionsByUid[dsUid]; ok {
|
||||
dsActions = dsAct
|
||||
}
|
||||
|
||||
actions = append(actions, allowedActions{
|
||||
EntityKind: entityKindDatasource,
|
||||
UID: dsUid,
|
||||
Actions: dsActions,
|
||||
})
|
||||
}
|
||||
|
||||
out = append(out, actions)
|
||||
}
|
||||
|
||||
return out, nil
|
||||
}
|
||||
|
||||
func (s *StandardSearchService) getAllowedActionsByUid(ctx context.Context, user *user.SignedInUser,
|
||||
orgID int64, prefix string, resourceIDs []string) map[string][]string {
|
||||
if user.Permissions == nil {
|
||||
return map[string][]string{}
|
||||
}
|
||||
|
||||
permissions, ok := user.Permissions[orgID]
|
||||
if !ok {
|
||||
return map[string][]string{}
|
||||
}
|
||||
|
||||
uidsAsMap := make(map[string]bool)
|
||||
for _, uid := range resourceIDs {
|
||||
uidsAsMap[uid] = true
|
||||
}
|
||||
|
||||
out := make(map[string][]string)
|
||||
resp := accesscontrol.GetResourcesMetadata(ctx, permissions, prefix, uidsAsMap)
|
||||
for uid, meta := range resp {
|
||||
var actions []string
|
||||
for action := range meta {
|
||||
actions = append(actions, action)
|
||||
}
|
||||
sort.Strings(actions)
|
||||
out[uid] = actions
|
||||
}
|
||||
return out
|
||||
}
|
||||
|
||||
type entityReferences struct {
|
||||
entityKind entityKind
|
||||
uid string
|
||||
dsUids []string
|
||||
}
|
||||
|
||||
func getEntityReferences(resp *backend.DataResponse) ([]entityReferences, error) {
|
||||
if resp == nil {
|
||||
return nil, errors.New("nil response")
|
||||
}
|
||||
|
||||
if resp.Error != nil {
|
||||
return nil, resp.Error
|
||||
}
|
||||
|
||||
if len(resp.Frames) == 0 {
|
||||
return nil, errors.New("empty response")
|
||||
}
|
||||
|
||||
frame := resp.Frames[0]
|
||||
|
||||
kindField, idx := frame.FieldByName("kind")
|
||||
if idx == -1 {
|
||||
return nil, errors.New("no kind field")
|
||||
}
|
||||
|
||||
dsUidField, idx := frame.FieldByName("ds_uid")
|
||||
if idx == -1 {
|
||||
return nil, errors.New("no ds_uid field")
|
||||
}
|
||||
uidField, idx := frame.FieldByName("uid")
|
||||
if idx == -1 {
|
||||
return nil, errors.New("no dash_uid field")
|
||||
}
|
||||
|
||||
if dsUidField.Len() != uidField.Len() {
|
||||
return nil, errors.New("mismatched lengths")
|
||||
}
|
||||
|
||||
var out []entityReferences
|
||||
for i := 0; i < dsUidField.Len(); i++ {
|
||||
kind, ok := kindField.At(i).(string)
|
||||
if !ok || kind == "" {
|
||||
return nil, errors.New("invalid value in kind field")
|
||||
}
|
||||
|
||||
uid, ok := uidField.At(i).(string)
|
||||
if !ok || uid == "" {
|
||||
return nil, errors.New("invalid value in uid field")
|
||||
}
|
||||
|
||||
if entityKind(kind) != entityKindDashboard {
|
||||
out = append(out, entityReferences{
|
||||
entityKind: entityKind(kind),
|
||||
uid: uid,
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
uidField, ok := uidField.At(i).(string)
|
||||
if !ok || uidField == "" {
|
||||
return nil, errors.New("invalid value in dash_uid field")
|
||||
}
|
||||
|
||||
rawDsUids, ok := dsUidField.At(i).(json.RawMessage)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("invalid value for uid %s in ds_uid field: %s", uidField, dsUidField.At(i))
|
||||
}
|
||||
|
||||
var uids []string
|
||||
if rawDsUids != nil {
|
||||
jsonValue, err := rawDsUids.MarshalJSON()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
err = json.Unmarshal(jsonValue, &uids)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
out = append(out, entityReferences{entityKind: entityKindDashboard, uid: uid, dsUids: uids})
|
||||
}
|
||||
|
||||
return out, nil
|
||||
}
|
||||
@@ -1,125 +0,0 @@
|
||||
package searchV2
|
||||
|
||||
import (
|
||||
"context"
|
||||
_ "embed"
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/experimental"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/tracing"
|
||||
ac "github.com/grafana/grafana/pkg/services/accesscontrol"
|
||||
accesscontrolmock "github.com/grafana/grafana/pkg/services/accesscontrol/mock"
|
||||
"github.com/grafana/grafana/pkg/services/dashboards"
|
||||
"github.com/grafana/grafana/pkg/services/datasources"
|
||||
"github.com/grafana/grafana/pkg/services/featuremgmt"
|
||||
"github.com/grafana/grafana/pkg/services/user"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
)
|
||||
|
||||
var (
|
||||
//go:embed testdata/search_response_frame.json
|
||||
exampleListFrameJSON string
|
||||
|
||||
orgId = int64(1)
|
||||
permissionsWithScopeAll = map[string][]string{
|
||||
datasources.ActionIDRead: {datasources.ScopeAll},
|
||||
datasources.ActionDelete: {datasources.ScopeAll},
|
||||
ac.ActionDatasourcesExplore: {datasources.ScopeAll},
|
||||
datasources.ActionQuery: {datasources.ScopeAll},
|
||||
datasources.ActionRead: {datasources.ScopeAll},
|
||||
datasources.ActionWrite: {datasources.ScopeAll},
|
||||
datasources.ActionPermissionsRead: {datasources.ScopeAll},
|
||||
datasources.ActionPermissionsWrite: {datasources.ScopeAll},
|
||||
|
||||
dashboards.ActionFoldersCreate: {dashboards.ScopeFoldersAll},
|
||||
dashboards.ActionFoldersRead: {dashboards.ScopeFoldersAll},
|
||||
dashboards.ActionFoldersWrite: {dashboards.ScopeFoldersAll},
|
||||
dashboards.ActionFoldersDelete: {dashboards.ScopeFoldersAll},
|
||||
dashboards.ActionFoldersPermissionsRead: {dashboards.ScopeFoldersAll},
|
||||
dashboards.ActionFoldersPermissionsWrite: {dashboards.ScopeFoldersAll},
|
||||
|
||||
dashboards.ActionDashboardsCreate: {dashboards.ScopeDashboardsAll},
|
||||
dashboards.ActionDashboardsRead: {dashboards.ScopeDashboardsAll},
|
||||
dashboards.ActionDashboardsWrite: {dashboards.ScopeDashboardsAll},
|
||||
dashboards.ActionDashboardsDelete: {dashboards.ScopeDashboardsAll},
|
||||
dashboards.ActionDashboardsPermissionsRead: {dashboards.ScopeDashboardsAll},
|
||||
dashboards.ActionDashboardsPermissionsWrite: {dashboards.ScopeDashboardsAll},
|
||||
}
|
||||
permissionsWithUidScopes = map[string][]string{
|
||||
datasources.ActionIDRead: {},
|
||||
datasources.ActionDelete: {},
|
||||
ac.ActionDatasourcesExplore: {},
|
||||
datasources.ActionQuery: {},
|
||||
datasources.ActionRead: {
|
||||
datasources.ScopeProvider.GetResourceScopeUID("datasource-2"),
|
||||
datasources.ScopeProvider.GetResourceScopeUID("datasource-3"),
|
||||
},
|
||||
datasources.ActionWrite: {},
|
||||
datasources.ActionPermissionsRead: {},
|
||||
datasources.ActionPermissionsWrite: {},
|
||||
|
||||
dashboards.ActionFoldersCreate: {},
|
||||
dashboards.ActionFoldersRead: {
|
||||
dashboards.ScopeFoldersProvider.GetResourceScopeUID("ujaM1h6nz"),
|
||||
},
|
||||
dashboards.ActionFoldersWrite: {},
|
||||
dashboards.ActionFoldersDelete: {},
|
||||
dashboards.ActionFoldersPermissionsRead: {},
|
||||
dashboards.ActionFoldersPermissionsWrite: {},
|
||||
|
||||
dashboards.ActionDashboardsCreate: {},
|
||||
dashboards.ActionDashboardsRead: {},
|
||||
dashboards.ActionDashboardsWrite: {
|
||||
dashboards.ScopeDashboardsProvider.GetResourceScopeUID("7MeksYbmk"),
|
||||
},
|
||||
dashboards.ActionDashboardsDelete: {},
|
||||
dashboards.ActionDashboardsPermissionsRead: {},
|
||||
dashboards.ActionDashboardsPermissionsWrite: {},
|
||||
}
|
||||
)
|
||||
|
||||
func service(t *testing.T) *StandardSearchService {
|
||||
service, ok := ProvideService(&setting.Cfg{Search: setting.SearchSettings{}},
|
||||
nil, nil, accesscontrolmock.New(), tracing.InitializeTracerForTest(), featuremgmt.WithFeatures(),
|
||||
nil, nil, nil).(*StandardSearchService)
|
||||
require.True(t, ok)
|
||||
return service
|
||||
}
|
||||
|
||||
func TestAllowedActionsForPermissionsWithScopeAll(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
permissions map[string][]string
|
||||
}{
|
||||
{
|
||||
name: "scope_all",
|
||||
permissions: permissionsWithScopeAll,
|
||||
},
|
||||
{
|
||||
name: "scope_uids",
|
||||
permissions: permissionsWithUidScopes,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
frame := &data.Frame{}
|
||||
err := frame.UnmarshalJSON([]byte(exampleListFrameJSON))
|
||||
require.NoError(t, err)
|
||||
|
||||
err = service(t).addAllowedActionsField(context.Background(), orgId, &user.SignedInUser{
|
||||
Permissions: map[int64]map[string][]string{
|
||||
orgId: tt.permissions,
|
||||
},
|
||||
}, &backend.DataResponse{
|
||||
Frames: []*data.Frame{frame},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
experimental.CheckGoldenJSONFrame(t, "testdata", fmt.Sprintf("allowed_actions_%s.golden", tt.name), frame, true)
|
||||
}
|
||||
}
|
||||
@@ -1,58 +0,0 @@
|
||||
package searchV2
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/db"
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/services/accesscontrol"
|
||||
"github.com/grafana/grafana/pkg/services/dashboards"
|
||||
"github.com/grafana/grafana/pkg/services/folder"
|
||||
"github.com/grafana/grafana/pkg/services/user"
|
||||
)
|
||||
|
||||
// ResourceFilter checks if a given a uid (resource identifier) check if we have the requested permission
|
||||
type ResourceFilter func(kind entityKind, uid, parentUID string) bool
|
||||
|
||||
// FutureAuthService eventually implemented by the security service
|
||||
type FutureAuthService interface {
|
||||
GetDashboardReadFilter(ctx context.Context, orgID int64, user *user.SignedInUser) (ResourceFilter, error)
|
||||
}
|
||||
|
||||
var _ FutureAuthService = (*simpleAuthService)(nil)
|
||||
|
||||
type simpleAuthService struct {
|
||||
sql db.DB
|
||||
ac accesscontrol.Service
|
||||
folderService folder.Service
|
||||
logger log.Logger
|
||||
}
|
||||
|
||||
func (a *simpleAuthService) GetDashboardReadFilter(ctx context.Context, orgID int64, user *user.SignedInUser) (ResourceFilter, error) {
|
||||
canReadDashboard, canReadFolder := accesscontrol.Checker(user, dashboards.ActionDashboardsRead), accesscontrol.Checker(user, dashboards.ActionFoldersRead)
|
||||
return func(kind entityKind, uid, parent string) bool {
|
||||
switch kind {
|
||||
case entityKindFolder:
|
||||
scopes, err := dashboards.GetInheritedScopes(ctx, orgID, uid, a.folderService)
|
||||
if err != nil {
|
||||
a.logger.Debug("Could not retrieve inherited folder scopes:", "err", err)
|
||||
}
|
||||
scopes = append(scopes, dashboards.ScopeFoldersProvider.GetResourceScopeUID(uid))
|
||||
return canReadFolder(scopes...)
|
||||
case entityKindDashboard:
|
||||
scopes, err := dashboards.GetInheritedScopes(ctx, orgID, parent, a.folderService)
|
||||
if err != nil {
|
||||
a.logger.Debug("Could not retrieve inherited folder scopes:", "err", err)
|
||||
}
|
||||
scopes = append(scopes, dashboards.ScopeDashboardsProvider.GetResourceScopeUID(uid))
|
||||
scopes = append(scopes, dashboards.ScopeFoldersProvider.GetResourceScopeUID(parent))
|
||||
return canReadDashboard(scopes...)
|
||||
|
||||
case entityKindPanel, entityKindDatasource, entityKindQuery:
|
||||
// Not a dashboard or folder. Assume no access.
|
||||
fallthrough
|
||||
default:
|
||||
return false
|
||||
}
|
||||
}, nil
|
||||
}
|
||||
@@ -1,767 +0,0 @@
|
||||
package searchV2
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/blugelabs/bluge"
|
||||
"github.com/blugelabs/bluge/search"
|
||||
"github.com/blugelabs/bluge/search/aggregations"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/infra/slugify"
|
||||
"github.com/grafana/grafana/pkg/services/folder"
|
||||
"github.com/grafana/grafana/pkg/services/store/entity"
|
||||
)
|
||||
|
||||
const (
|
||||
documentFieldUID = "_id" // actually UID!! but bluge likes "_id"
|
||||
documentFieldKind = "kind"
|
||||
documentFieldTag = "tag"
|
||||
documentFieldURL = "url"
|
||||
documentFieldName = "name"
|
||||
documentFieldName_sort = "name_sort"
|
||||
documentFieldName_ngram = "name_ngram"
|
||||
documentFieldLocation = "location" // parent path
|
||||
documentFieldPanelType = "panel_type"
|
||||
documentFieldTransformer = "transformer"
|
||||
documentFieldDSUID = "ds_uid"
|
||||
documentFieldDSType = "ds_type"
|
||||
DocumentFieldCreatedAt = "created_at"
|
||||
DocumentFieldUpdatedAt = "updated_at"
|
||||
)
|
||||
|
||||
func initOrgIndex(dashboards []dashboard, logger log.Logger, extendDoc ExtendDashboardFunc) (*orgIndex, error) {
|
||||
dashboardWriter, err := bluge.OpenWriter(bluge.InMemoryOnlyConfig())
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error opening writer: %v", err)
|
||||
}
|
||||
// Not closing Writer here since we use it later while processing dashboard change events.
|
||||
|
||||
start := time.Now()
|
||||
label := start
|
||||
|
||||
batch := bluge.NewBatch()
|
||||
|
||||
// In order to reduce memory usage while initial indexing we are limiting
|
||||
// the size of batch here.
|
||||
docsInBatch := 0
|
||||
maxBatchSize := 100
|
||||
|
||||
flushIfRequired := func(force bool) error {
|
||||
docsInBatch++
|
||||
needFlush := force || (maxBatchSize > 0 && docsInBatch >= maxBatchSize)
|
||||
if !needFlush {
|
||||
return nil
|
||||
}
|
||||
err := dashboardWriter.Batch(batch)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
docsInBatch = 0
|
||||
batch.Reset()
|
||||
return nil
|
||||
}
|
||||
|
||||
// First index the folders to construct folderIdLookup.
|
||||
folderIdLookup := make(map[int64]string, 50)
|
||||
folderIdLookup[0] = folder.GeneralFolderUID
|
||||
for _, dash := range dashboards {
|
||||
if !dash.isFolder {
|
||||
continue
|
||||
}
|
||||
doc := getFolderDashboardDoc(dash)
|
||||
if err := extendDoc(dash.uid, doc); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
batch.Insert(doc)
|
||||
if err := flushIfRequired(false); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
uid := dash.uid
|
||||
folderIdLookup[dash.id] = uid
|
||||
}
|
||||
|
||||
// Then each dashboard.
|
||||
for _, dash := range dashboards {
|
||||
if dash.isFolder {
|
||||
continue
|
||||
}
|
||||
folderUID := folderIdLookup[dash.folderID]
|
||||
location := folderUID
|
||||
|
||||
doc := getNonFolderDashboardDoc(dash, location)
|
||||
if err := extendDoc(dash.uid, doc); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
batch.Insert(doc)
|
||||
if err := flushIfRequired(false); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Index each panel in dashboard.
|
||||
if location != "" {
|
||||
location += "/"
|
||||
}
|
||||
location += dash.uid
|
||||
docs := getDashboardPanelDocs(dash, location)
|
||||
|
||||
for _, panelDoc := range docs {
|
||||
batch.Insert(panelDoc)
|
||||
if err := flushIfRequired(false); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Flush docs in batch with force as we are in the end.
|
||||
if err := flushIfRequired(true); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
logger.Info("Finish inserting docs into index", "elapsed", time.Since(label))
|
||||
logger.Info("Finish building index", "totalElapsed", time.Since(start))
|
||||
return &orgIndex{
|
||||
writers: map[indexType]*bluge.Writer{
|
||||
indexTypeDashboard: dashboardWriter,
|
||||
},
|
||||
}, err
|
||||
}
|
||||
|
||||
func getFolderDashboardDoc(dash dashboard) *bluge.Document {
|
||||
uid := dash.uid
|
||||
url := fmt.Sprintf("/dashboards/f/%s/%s", dash.uid, dash.slug)
|
||||
if uid == "" {
|
||||
uid = "general"
|
||||
url = "/dashboards"
|
||||
dash.summary.Name = "General"
|
||||
dash.summary.Description = ""
|
||||
}
|
||||
|
||||
return newSearchDocument(uid, dash.summary.Name, dash.summary.Description, url).
|
||||
AddField(bluge.NewKeywordField(documentFieldKind, string(entityKindFolder)).Aggregatable().StoreValue()).
|
||||
AddField(bluge.NewDateTimeField(DocumentFieldCreatedAt, dash.created).Sortable().StoreValue()).
|
||||
AddField(bluge.NewDateTimeField(DocumentFieldUpdatedAt, dash.updated).Sortable().StoreValue())
|
||||
}
|
||||
|
||||
func getNonFolderDashboardDoc(dash dashboard, location string) *bluge.Document {
|
||||
url := fmt.Sprintf("/d/%s/%s", dash.uid, dash.slug)
|
||||
|
||||
// Dashboard document
|
||||
doc := newSearchDocument(dash.uid, dash.summary.Name, dash.summary.Description, url).
|
||||
AddField(bluge.NewKeywordField(documentFieldKind, string(entityKindDashboard)).Aggregatable().StoreValue()).
|
||||
AddField(bluge.NewKeywordField(documentFieldLocation, location).Aggregatable().StoreValue()).
|
||||
AddField(bluge.NewDateTimeField(DocumentFieldCreatedAt, dash.created).Sortable().StoreValue()).
|
||||
AddField(bluge.NewDateTimeField(DocumentFieldUpdatedAt, dash.updated).Sortable().StoreValue())
|
||||
|
||||
// dashboards only use the key part of labels
|
||||
for k := range dash.summary.Labels {
|
||||
doc.AddField(bluge.NewKeywordField(documentFieldTag, k).
|
||||
StoreValue().
|
||||
Aggregatable().
|
||||
SearchTermPositions())
|
||||
}
|
||||
|
||||
for _, ref := range dash.summary.References {
|
||||
if ref.Family == entity.StandardKindDataSource {
|
||||
if ref.Type != "" {
|
||||
doc.AddField(bluge.NewKeywordField(documentFieldDSType, ref.Type).
|
||||
StoreValue().
|
||||
Aggregatable().
|
||||
SearchTermPositions())
|
||||
}
|
||||
if ref.Identifier != "" {
|
||||
doc.AddField(bluge.NewKeywordField(documentFieldDSUID, ref.Identifier).
|
||||
StoreValue().
|
||||
Aggregatable().
|
||||
SearchTermPositions())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return doc
|
||||
}
|
||||
|
||||
func getDashboardPanelDocs(dash dashboard, location string) []*bluge.Document {
|
||||
dashURL := fmt.Sprintf("/d/%s/%s", dash.uid, slugify.Slugify(dash.summary.Name))
|
||||
|
||||
// pre-allocating a little bit more than necessary, possibly
|
||||
docs := make([]*bluge.Document, 0, len(dash.summary.Nested))
|
||||
|
||||
for _, panel := range dash.summary.Nested {
|
||||
if panel.Fields["type"] == "row" {
|
||||
continue // skip rows
|
||||
}
|
||||
idx := strings.LastIndex(panel.UID, "#")
|
||||
panelId, err := strconv.Atoi(panel.UID[idx+1:])
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
url := fmt.Sprintf("%s?viewPanel=%d", dashURL, panelId)
|
||||
doc := newSearchDocument(panel.UID, panel.Name, panel.Description, url).
|
||||
AddField(bluge.NewKeywordField(documentFieldLocation, location).Aggregatable().StoreValue()).
|
||||
AddField(bluge.NewKeywordField(documentFieldKind, string(entityKindPanel)).Aggregatable().StoreValue()) // likely want independent index for this
|
||||
|
||||
for _, ref := range panel.References {
|
||||
switch ref.Family {
|
||||
case entity.StandardKindDashboard:
|
||||
if ref.Type != "" {
|
||||
doc.AddField(bluge.NewKeywordField(documentFieldDSType, ref.Type).
|
||||
StoreValue().
|
||||
Aggregatable().
|
||||
SearchTermPositions())
|
||||
}
|
||||
if ref.Identifier != "" {
|
||||
doc.AddField(bluge.NewKeywordField(documentFieldDSUID, ref.Identifier).
|
||||
StoreValue().
|
||||
Aggregatable().
|
||||
SearchTermPositions())
|
||||
}
|
||||
case entity.ExternalEntityReferencePlugin:
|
||||
if ref.Type == entity.StandardKindPanel && ref.Identifier != "" {
|
||||
doc.AddField(bluge.NewKeywordField(documentFieldPanelType, ref.Identifier).Aggregatable().StoreValue())
|
||||
}
|
||||
case entity.ExternalEntityReferenceRuntime:
|
||||
if ref.Type == entity.ExternalEntityReferenceRuntime_Transformer && ref.Identifier != "" {
|
||||
doc.AddField(bluge.NewKeywordField(documentFieldTransformer, ref.Identifier).Aggregatable())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
docs = append(docs, doc)
|
||||
}
|
||||
return docs
|
||||
}
|
||||
|
||||
// Names need to be indexed a few ways to support key features
|
||||
func newSearchDocument(uid, name, descr, url string) *bluge.Document {
|
||||
doc := bluge.NewDocument(uid)
|
||||
|
||||
if name != "" {
|
||||
doc.AddField(bluge.NewTextField(documentFieldName, name).StoreValue().SearchTermPositions())
|
||||
doc.AddField(bluge.NewTextField(documentFieldName_ngram, name).WithAnalyzer(ngramIndexAnalyzer))
|
||||
|
||||
// Don't add a field for empty names
|
||||
sortStr := formatForNameSortField(name)
|
||||
if len(sortStr) > 0 {
|
||||
doc.AddField(bluge.NewKeywordField(documentFieldName_sort, sortStr).Sortable())
|
||||
}
|
||||
}
|
||||
if url != "" {
|
||||
doc.AddField(bluge.NewKeywordField(documentFieldURL, url).StoreValue())
|
||||
}
|
||||
return doc
|
||||
}
|
||||
|
||||
func getDashboardPanelIDs(index *orgIndex, panelLocation string) ([]string, error) {
|
||||
var panelIDs []string
|
||||
|
||||
reader, cancel, err := index.readerForIndex(indexTypeDashboard)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer cancel()
|
||||
|
||||
fullQuery := bluge.NewBooleanQuery()
|
||||
fullQuery.AddMust(bluge.NewTermQuery(panelLocation).SetField(documentFieldLocation))
|
||||
fullQuery.AddMust(bluge.NewTermQuery(string(entityKindPanel)).SetField(documentFieldKind))
|
||||
req := bluge.NewAllMatches(fullQuery)
|
||||
documentMatchIterator, err := reader.Search(context.Background(), req)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
match, err := documentMatchIterator.Next()
|
||||
for err == nil && match != nil {
|
||||
// load the identifier for this match
|
||||
err = match.VisitStoredFields(func(field string, value []byte) bool {
|
||||
if field == documentFieldUID {
|
||||
panelIDs = append(panelIDs, string(value))
|
||||
}
|
||||
return true
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// load the next document match
|
||||
match, err = documentMatchIterator.Next()
|
||||
}
|
||||
return panelIDs, err
|
||||
}
|
||||
|
||||
func getDocsIDsByLocationPrefix(index *orgIndex, prefix string) ([]string, error) {
|
||||
var ids []string
|
||||
|
||||
reader, cancel, err := index.readerForIndex(indexTypeDashboard)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error getting reader: %w", err)
|
||||
}
|
||||
defer cancel()
|
||||
|
||||
fullQuery := bluge.NewBooleanQuery()
|
||||
fullQuery.AddMust(bluge.NewPrefixQuery(prefix).SetField(documentFieldLocation))
|
||||
req := bluge.NewAllMatches(fullQuery)
|
||||
documentMatchIterator, err := reader.Search(context.Background(), req)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("error search: %w", err)
|
||||
}
|
||||
match, err := documentMatchIterator.Next()
|
||||
for err == nil && match != nil {
|
||||
// load the identifier for this match
|
||||
err = match.VisitStoredFields(func(field string, value []byte) bool {
|
||||
if field == documentFieldUID {
|
||||
ids = append(ids, string(value))
|
||||
}
|
||||
return true
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// load the next document match
|
||||
match, err = documentMatchIterator.Next()
|
||||
}
|
||||
return ids, err
|
||||
}
|
||||
|
||||
func getDashboardLocation(index *orgIndex, dashboardUID string) (string, bool, error) {
|
||||
var dashboardLocation string
|
||||
var found bool
|
||||
|
||||
reader, cancel, err := index.readerForIndex(indexTypeDashboard)
|
||||
if err != nil {
|
||||
return "", false, err
|
||||
}
|
||||
defer cancel()
|
||||
|
||||
fullQuery := bluge.NewBooleanQuery()
|
||||
fullQuery.AddMust(bluge.NewTermQuery(dashboardUID).SetField(documentFieldUID))
|
||||
fullQuery.AddMust(bluge.NewTermQuery(string(entityKindDashboard)).SetField(documentFieldKind))
|
||||
req := bluge.NewAllMatches(fullQuery)
|
||||
documentMatchIterator, err := reader.Search(context.Background(), req)
|
||||
if err != nil {
|
||||
return "", false, err
|
||||
}
|
||||
match, err := documentMatchIterator.Next()
|
||||
for err == nil && match != nil {
|
||||
// load the identifier for this match
|
||||
err = match.VisitStoredFields(func(field string, value []byte) bool {
|
||||
if field == documentFieldLocation {
|
||||
dashboardLocation = string(value)
|
||||
found = true
|
||||
return false
|
||||
}
|
||||
return true
|
||||
})
|
||||
if err != nil {
|
||||
return "", false, err
|
||||
}
|
||||
// load the next document match
|
||||
match, err = documentMatchIterator.Next()
|
||||
}
|
||||
return dashboardLocation, found, err
|
||||
}
|
||||
|
||||
//nolint:gocyclo
|
||||
func doSearchQuery(
|
||||
ctx context.Context,
|
||||
logger log.Logger,
|
||||
index *orgIndex,
|
||||
filter ResourceFilter,
|
||||
q DashboardQuery,
|
||||
extender QueryExtender,
|
||||
appSubUrl string,
|
||||
) *backend.DataResponse {
|
||||
response := &backend.DataResponse{}
|
||||
header := &customMeta{}
|
||||
|
||||
reader, cancel, err := index.readerForIndex(indexTypeDashboard)
|
||||
if err != nil {
|
||||
logger.Error("Error getting reader for dashboard index: %v", err)
|
||||
response.Error = err
|
||||
return response
|
||||
}
|
||||
defer cancel()
|
||||
|
||||
hasConstraints := false
|
||||
fullQuery := bluge.NewBooleanQuery()
|
||||
fullQuery.AddMust(newPermissionFilter(filter, logger))
|
||||
|
||||
// Only show dashboard / folders / panels.
|
||||
if len(q.Kind) > 0 {
|
||||
bq := bluge.NewBooleanQuery()
|
||||
for _, k := range q.Kind {
|
||||
bq.AddShould(bluge.NewTermQuery(k).SetField(documentFieldKind))
|
||||
}
|
||||
fullQuery.AddMust(bq)
|
||||
hasConstraints = true
|
||||
}
|
||||
|
||||
// Explicit UID lookup (stars etc)
|
||||
if len(q.UIDs) > 0 {
|
||||
count := len(q.UIDs) + 3
|
||||
bq := bluge.NewBooleanQuery()
|
||||
for i, v := range q.UIDs {
|
||||
bq.AddShould(bluge.NewTermQuery(v).
|
||||
SetField(documentFieldUID).
|
||||
SetBoost(float64(count - i)))
|
||||
}
|
||||
fullQuery.AddMust(bq)
|
||||
hasConstraints = true
|
||||
}
|
||||
|
||||
// Tags
|
||||
if len(q.Tags) > 0 {
|
||||
bq := bluge.NewBooleanQuery()
|
||||
for _, v := range q.Tags {
|
||||
bq.AddMust(bluge.NewTermQuery(v).SetField(documentFieldTag))
|
||||
}
|
||||
fullQuery.AddMust(bq)
|
||||
hasConstraints = true
|
||||
}
|
||||
|
||||
// Panel type
|
||||
if q.PanelType != "" {
|
||||
fullQuery.AddMust(bluge.NewTermQuery(q.PanelType).SetField(documentFieldPanelType))
|
||||
hasConstraints = true
|
||||
}
|
||||
|
||||
// Datasource
|
||||
if q.Datasource != "" {
|
||||
fullQuery.AddMust(bluge.NewTermQuery(q.Datasource).SetField(documentFieldDSUID))
|
||||
hasConstraints = true
|
||||
}
|
||||
|
||||
// DatasourceType
|
||||
if q.DatasourceType != "" {
|
||||
fullQuery.AddMust(bluge.NewTermQuery(q.DatasourceType).SetField(documentFieldDSType))
|
||||
hasConstraints = true
|
||||
}
|
||||
|
||||
// Folder
|
||||
if q.Location != "" {
|
||||
fullQuery.AddMust(bluge.NewTermQuery(q.Location).SetField(documentFieldLocation))
|
||||
hasConstraints = true
|
||||
}
|
||||
|
||||
isMatchAllQuery := q.Query == "*" || q.Query == ""
|
||||
if isMatchAllQuery {
|
||||
if !hasConstraints {
|
||||
fullQuery.AddShould(bluge.NewMatchAllQuery())
|
||||
}
|
||||
} else {
|
||||
bq := bluge.NewBooleanQuery()
|
||||
|
||||
bq.AddShould(NewSubstringQuery(formatForNameSortField(q.Query)).
|
||||
SetField(documentFieldName_sort).
|
||||
SetBoost(6))
|
||||
|
||||
if shouldUseNgram(q) {
|
||||
bq.AddShould(bluge.NewMatchQuery(q.Query).
|
||||
SetField(documentFieldName_ngram).
|
||||
SetOperator(bluge.MatchQueryOperatorAnd). // all terms must match
|
||||
SetAnalyzer(ngramQueryAnalyzer).SetBoost(1))
|
||||
}
|
||||
|
||||
fullQuery.AddMust(bq)
|
||||
}
|
||||
|
||||
limit := 50 // default view
|
||||
if q.Limit > 0 {
|
||||
limit = q.Limit
|
||||
}
|
||||
|
||||
req := bluge.NewTopNSearch(limit, fullQuery)
|
||||
if q.From > 0 {
|
||||
req.SetFrom(q.From)
|
||||
}
|
||||
if q.Explain {
|
||||
req.ExplainScores()
|
||||
}
|
||||
req.WithStandardAggregations()
|
||||
|
||||
if q.Sort != "" {
|
||||
req.SortBy([]string{q.Sort})
|
||||
header.SortBy = strings.TrimPrefix(q.Sort, "-")
|
||||
}
|
||||
|
||||
for _, t := range q.Facet {
|
||||
lim := t.Limit
|
||||
if lim < 1 {
|
||||
lim = 50
|
||||
}
|
||||
req.AddAggregation(t.Field, aggregations.NewTermsAggregation(search.Field(t.Field), lim))
|
||||
}
|
||||
|
||||
// execute this search on the reader
|
||||
documentMatchIterator, err := reader.Search(ctx, req)
|
||||
if err != nil {
|
||||
logger.Error("Error executing search", "err", err)
|
||||
response.Error = err
|
||||
return response
|
||||
}
|
||||
|
||||
fScore := data.NewFieldFromFieldType(data.FieldTypeFloat64, 0)
|
||||
fUID := data.NewFieldFromFieldType(data.FieldTypeString, 0)
|
||||
fKind := data.NewFieldFromFieldType(data.FieldTypeString, 0)
|
||||
fPType := data.NewFieldFromFieldType(data.FieldTypeString, 0)
|
||||
fName := data.NewFieldFromFieldType(data.FieldTypeString, 0)
|
||||
fURL := data.NewFieldFromFieldType(data.FieldTypeString, 0)
|
||||
fLocation := data.NewFieldFromFieldType(data.FieldTypeString, 0)
|
||||
fTags := data.NewFieldFromFieldType(data.FieldTypeNullableJSON, 0) //nolint:staticcheck
|
||||
fDSUIDs := data.NewFieldFromFieldType(data.FieldTypeJSON, 0)
|
||||
fExplain := data.NewFieldFromFieldType(data.FieldTypeNullableJSON, 0) //nolint:staticcheck
|
||||
|
||||
fScore.Name = "score"
|
||||
fUID.Name = "uid"
|
||||
fKind.Name = "kind"
|
||||
fName.Name = "name"
|
||||
fLocation.Name = "location"
|
||||
fURL.Name = "url"
|
||||
fURL.Config = &data.FieldConfig{
|
||||
Links: []data.DataLink{
|
||||
{Title: "link", URL: "${__value.text}"},
|
||||
},
|
||||
}
|
||||
fPType.Name = "panel_type"
|
||||
fDSUIDs.Name = "ds_uid"
|
||||
fTags.Name = "tags"
|
||||
fExplain.Name = "explain"
|
||||
|
||||
frame := data.NewFrame("Query results", fKind, fUID, fName, fPType, fURL, fTags, fDSUIDs, fLocation)
|
||||
if q.Explain {
|
||||
frame.Fields = append(frame.Fields, fScore, fExplain)
|
||||
}
|
||||
frame.SetMeta(&data.FrameMeta{
|
||||
Type: "search-results",
|
||||
Custom: header,
|
||||
})
|
||||
|
||||
fieldLen := 0
|
||||
ext := extender.GetFramer(frame)
|
||||
|
||||
locationItems := make(map[string]bool, 50)
|
||||
|
||||
// iterate through the document matches
|
||||
match, err := documentMatchIterator.Next()
|
||||
for err == nil && match != nil {
|
||||
uid := ""
|
||||
kind := ""
|
||||
ptype := ""
|
||||
name := ""
|
||||
url := ""
|
||||
loc := ""
|
||||
var dsUIDs []string
|
||||
var tags []string
|
||||
|
||||
err = match.VisitStoredFields(func(field string, value []byte) bool {
|
||||
switch field {
|
||||
case documentFieldUID:
|
||||
uid = string(value)
|
||||
case documentFieldKind:
|
||||
kind = string(value)
|
||||
case documentFieldPanelType:
|
||||
ptype = string(value)
|
||||
case documentFieldName:
|
||||
name = string(value)
|
||||
case documentFieldURL:
|
||||
url = appSubUrl + string(value)
|
||||
case documentFieldLocation:
|
||||
loc = string(value)
|
||||
case documentFieldDSUID:
|
||||
dsUIDs = append(dsUIDs, string(value))
|
||||
case documentFieldTag:
|
||||
tags = append(tags, string(value))
|
||||
default:
|
||||
ext(field, value)
|
||||
}
|
||||
return true
|
||||
})
|
||||
if err != nil {
|
||||
logger.Error("Error loading stored fields", "err", err)
|
||||
response.Error = err
|
||||
return response
|
||||
}
|
||||
|
||||
fKind.Append(kind)
|
||||
fUID.Append(uid)
|
||||
fPType.Append(ptype)
|
||||
fName.Append(name)
|
||||
fURL.Append(url)
|
||||
fLocation.Append(loc)
|
||||
|
||||
// set a key for all path parts we return
|
||||
if !q.SkipLocation {
|
||||
for _, v := range strings.Split(loc, "/") {
|
||||
locationItems[v] = true
|
||||
}
|
||||
}
|
||||
|
||||
if len(tags) > 0 {
|
||||
js, _ := json.Marshal(tags)
|
||||
jsb := json.RawMessage(js)
|
||||
fTags.Append(&jsb)
|
||||
} else {
|
||||
fTags.Append(nil)
|
||||
}
|
||||
|
||||
if len(dsUIDs) == 0 {
|
||||
dsUIDs = []string{}
|
||||
}
|
||||
|
||||
js, _ := json.Marshal(dsUIDs)
|
||||
jsb := json.RawMessage(js)
|
||||
fDSUIDs.Append(jsb)
|
||||
|
||||
if q.Explain {
|
||||
if isMatchAllQuery {
|
||||
fScore.Append(float64(fieldLen + q.From))
|
||||
} else {
|
||||
fScore.Append(match.Score)
|
||||
}
|
||||
if match.Explanation != nil {
|
||||
js, _ := json.Marshal(&match.Explanation)
|
||||
jsb := json.RawMessage(js)
|
||||
fExplain.Append(&jsb)
|
||||
} else {
|
||||
fExplain.Append(nil)
|
||||
}
|
||||
}
|
||||
|
||||
// extend fields to match the longest field
|
||||
fieldLen++
|
||||
for _, f := range frame.Fields {
|
||||
if fieldLen > f.Len() {
|
||||
f.Extend(fieldLen - f.Len())
|
||||
}
|
||||
}
|
||||
|
||||
// load the next document match
|
||||
match, err = documentMatchIterator.Next()
|
||||
}
|
||||
|
||||
// Must call after iterating :)
|
||||
aggs := documentMatchIterator.Aggregations()
|
||||
|
||||
header.Count = aggs.Count() // Total count
|
||||
if q.Explain {
|
||||
header.MaxScore = aggs.Metric("max_score")
|
||||
}
|
||||
|
||||
if len(locationItems) > 0 && !q.SkipLocation {
|
||||
header.Locations = getLocationLookupInfo(ctx, reader, locationItems)
|
||||
}
|
||||
|
||||
response.Frames = append(response.Frames, frame)
|
||||
|
||||
for _, t := range q.Facet {
|
||||
bbb := aggs.Buckets(t.Field)
|
||||
if bbb != nil {
|
||||
size := len(bbb)
|
||||
|
||||
fName := data.NewFieldFromFieldType(data.FieldTypeString, size)
|
||||
fName.Name = t.Field
|
||||
|
||||
fCount := data.NewFieldFromFieldType(data.FieldTypeUint64, size)
|
||||
fCount.Name = "Count"
|
||||
|
||||
for i, v := range bbb {
|
||||
fName.Set(i, v.Name())
|
||||
fCount.Set(i, v.Count())
|
||||
}
|
||||
|
||||
response.Frames = append(response.Frames, data.NewFrame("Facet: "+t.Field, fName, fCount))
|
||||
}
|
||||
}
|
||||
|
||||
return response
|
||||
}
|
||||
|
||||
func shouldUseNgram(q DashboardQuery) bool {
|
||||
var tokens []string
|
||||
if len(q.Query) > ngramEdgeFilterMaxLength {
|
||||
tokens = strings.Fields(q.Query)
|
||||
for _, k := range tokens {
|
||||
// ngram will never match if at least one input token exceeds the max token length,
|
||||
// as all tokens must match simultaneously with the `bluge.MatchQueryOperatorAnd` operator
|
||||
if len(k) > ngramEdgeFilterMaxLength {
|
||||
return false
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func formatForNameSortField(name string) string {
|
||||
return strings.Trim(strings.ToUpper(name), " ")
|
||||
}
|
||||
|
||||
func getLocationLookupInfo(ctx context.Context, reader *bluge.Reader, uids map[string]bool) map[string]locationItem {
|
||||
res := make(map[string]locationItem, len(uids))
|
||||
bq := bluge.NewBooleanQuery()
|
||||
for k := range uids {
|
||||
bq.AddShould(bluge.NewTermQuery(k).SetField(documentFieldUID))
|
||||
}
|
||||
|
||||
req := bluge.NewAllMatches(bq)
|
||||
|
||||
documentMatchIterator, err := reader.Search(ctx, req)
|
||||
if err != nil {
|
||||
return res
|
||||
}
|
||||
|
||||
dvfieldNames := []string{"type"}
|
||||
sctx := search.NewSearchContext(0, 0)
|
||||
|
||||
// execute this search on the reader
|
||||
// iterate through the document matches
|
||||
match, err := documentMatchIterator.Next()
|
||||
for err == nil && match != nil {
|
||||
err = match.LoadDocumentValues(sctx, dvfieldNames)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
|
||||
uid := ""
|
||||
item := locationItem{}
|
||||
|
||||
_ = match.VisitStoredFields(func(field string, value []byte) bool {
|
||||
switch field {
|
||||
case documentFieldUID:
|
||||
uid = string(value)
|
||||
case documentFieldKind:
|
||||
item.Kind = string(value)
|
||||
case documentFieldName:
|
||||
item.Name = string(value)
|
||||
case documentFieldURL:
|
||||
item.URL = string(value)
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
res[uid] = item
|
||||
|
||||
// load the next document match
|
||||
match, err = documentMatchIterator.Next()
|
||||
}
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
type locationItem struct {
|
||||
Name string `json:"name"`
|
||||
Kind string `json:"kind"`
|
||||
URL string `json:"url"`
|
||||
}
|
||||
|
||||
type customMeta struct {
|
||||
Count uint64 `json:"count"`
|
||||
MaxScore float64 `json:"max_score,omitempty"`
|
||||
Locations map[string]locationItem `json:"locationInfo,omitempty"`
|
||||
SortBy string `json:"sortBy,omitempty"`
|
||||
}
|
||||
@@ -1,48 +0,0 @@
|
||||
package searchV2
|
||||
|
||||
import (
|
||||
"github.com/blugelabs/bluge"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||
)
|
||||
|
||||
type ExtendDashboardFunc func(uid string, doc *bluge.Document) error
|
||||
type FramerFunc func(field string, value []byte)
|
||||
|
||||
type QueryExtender interface {
|
||||
GetFramer(frame *data.Frame) FramerFunc
|
||||
}
|
||||
|
||||
type DocumentExtender interface {
|
||||
GetDashboardExtender(orgID int64, uids ...string) ExtendDashboardFunc
|
||||
}
|
||||
|
||||
type DashboardIndexExtender interface {
|
||||
GetDocumentExtender() DocumentExtender
|
||||
GetQueryExtender(query DashboardQuery) QueryExtender
|
||||
}
|
||||
|
||||
type NoopExtender struct{}
|
||||
|
||||
func (n NoopExtender) GetDocumentExtender() DocumentExtender {
|
||||
return &NoopDocumentExtender{}
|
||||
}
|
||||
|
||||
func (n NoopExtender) GetQueryExtender(query DashboardQuery) QueryExtender {
|
||||
return &NoopQueryExtender{}
|
||||
}
|
||||
|
||||
type NoopDocumentExtender struct{}
|
||||
|
||||
func (n NoopDocumentExtender) GetDashboardExtender(_ int64, _ ...string) ExtendDashboardFunc {
|
||||
return func(uid string, doc *bluge.Document) error {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
type NoopQueryExtender struct{}
|
||||
|
||||
func (n NoopQueryExtender) GetFramer(_ *data.Frame) FramerFunc {
|
||||
return func(field string, value []byte) {
|
||||
// really noop
|
||||
}
|
||||
}
|
||||
@@ -1,142 +0,0 @@
|
||||
package searchV2
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/blugelabs/bluge"
|
||||
"github.com/blugelabs/bluge/search"
|
||||
"github.com/blugelabs/bluge/search/searcher"
|
||||
"github.com/blugelabs/bluge/search/similarity"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/services/store/entity"
|
||||
)
|
||||
|
||||
type PermissionFilter struct {
|
||||
log log.Logger
|
||||
filter ResourceFilter
|
||||
}
|
||||
|
||||
type entityKind string
|
||||
|
||||
const (
|
||||
entityKindPanel entityKind = entity.StandardKindPanel
|
||||
entityKindDashboard entityKind = entity.StandardKindDashboard
|
||||
entityKindFolder entityKind = entity.StandardKindFolder
|
||||
entityKindDatasource entityKind = entity.StandardKindDataSource
|
||||
entityKindQuery entityKind = entity.StandardKindQuery
|
||||
)
|
||||
|
||||
func (r entityKind) IsValid() bool {
|
||||
return r == entityKindPanel || r == entityKindDashboard || r == entityKindFolder
|
||||
}
|
||||
|
||||
func (r entityKind) supportsAuthzCheck() bool {
|
||||
return r == entityKindPanel || r == entityKindDashboard || r == entityKindFolder
|
||||
}
|
||||
|
||||
var (
|
||||
permissionFilterFields = []string{documentFieldUID, documentFieldKind, documentFieldLocation}
|
||||
panelIdFieldRegex = regexp.MustCompile(`^(.*)#([0-9]{1,4})$`)
|
||||
panelIdFieldDashboardUidSubmatchIndex = 1
|
||||
panelIdFieldPanelIdSubmatchIndex = 2
|
||||
panelIdFieldRegexExpectedSubmatchCount = 3 // submatches[0] - whole string
|
||||
|
||||
_ bluge.Query = (*PermissionFilter)(nil)
|
||||
)
|
||||
|
||||
func newPermissionFilter(resourceFilter ResourceFilter, log log.Logger) *PermissionFilter {
|
||||
return &PermissionFilter{
|
||||
filter: resourceFilter,
|
||||
log: log,
|
||||
}
|
||||
}
|
||||
|
||||
func (q *PermissionFilter) logAccessDecision(decision bool, kind any, id string, reason string, ctx ...any) {
|
||||
if true {
|
||||
return // TOO much logging right now
|
||||
}
|
||||
|
||||
ctx = append(ctx, "kind", kind, "id", id, "reason", reason)
|
||||
if decision {
|
||||
q.log.Debug("Allowing access", ctx...)
|
||||
} else {
|
||||
q.log.Info("Denying access", ctx...)
|
||||
}
|
||||
}
|
||||
|
||||
func (q *PermissionFilter) canAccess(kind entityKind, id, location string) bool {
|
||||
if !kind.supportsAuthzCheck() {
|
||||
q.logAccessDecision(false, kind, id, "entityDoesNotSupportAuthz")
|
||||
return false
|
||||
}
|
||||
|
||||
// TODO add `kind` to the `ResourceFilter` interface so that we can move the switch out of here
|
||||
//
|
||||
switch kind {
|
||||
case entityKindFolder, entityKindDashboard:
|
||||
decision := q.filter(kind, id, location)
|
||||
q.logAccessDecision(decision, kind, id, "resourceFilter")
|
||||
return decision
|
||||
case entityKindPanel:
|
||||
matches := panelIdFieldRegex.FindStringSubmatch(id)
|
||||
submatchCount := len(matches)
|
||||
if submatchCount != panelIdFieldRegexExpectedSubmatchCount {
|
||||
q.logAccessDecision(false, kind, id, "invalidPanelIdFieldRegexSubmatchCount", "submatchCount", submatchCount, "expectedSubmatchCount", panelIdFieldRegexExpectedSubmatchCount)
|
||||
return false
|
||||
}
|
||||
dashboardUid := matches[panelIdFieldDashboardUidSubmatchIndex]
|
||||
|
||||
// Location is <folder_uid>/<dashboard_uid>
|
||||
if !strings.HasSuffix(location, "/"+dashboardUid) {
|
||||
q.logAccessDecision(false, kind, id, "invalidLocation", "location", location, "dashboardUid", dashboardUid)
|
||||
return false
|
||||
}
|
||||
folderUid := location[:len(location)-len(dashboardUid)-1]
|
||||
|
||||
decision := q.filter(entityKindDashboard, dashboardUid, folderUid)
|
||||
q.logAccessDecision(decision, kind, id, "resourceFilter", "folderUid", folderUid, "dashboardUid", dashboardUid, "panelId", matches[panelIdFieldPanelIdSubmatchIndex])
|
||||
return decision
|
||||
default:
|
||||
q.logAccessDecision(false, kind, id, "reason", "unknownKind")
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
func (q *PermissionFilter) Searcher(i search.Reader, options search.SearcherOptions) (search.Searcher, error) {
|
||||
dvReader, err := i.DocumentValueReader(permissionFilterFields)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
s, err := searcher.NewMatchAllSearcher(i, 1, similarity.ConstantScorer(1), options)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return searcher.NewFilteringSearcher(s, func(d *search.DocumentMatch) bool {
|
||||
var kind, id, location string
|
||||
err := dvReader.VisitDocumentValues(d.Number, func(field string, term []byte) {
|
||||
switch field {
|
||||
case documentFieldKind:
|
||||
kind = string(term)
|
||||
case documentFieldUID:
|
||||
id = string(term)
|
||||
case documentFieldLocation:
|
||||
location = string(term)
|
||||
}
|
||||
})
|
||||
if err != nil {
|
||||
q.logAccessDecision(false, kind, id, "errorWhenVisitingDocumentValues")
|
||||
return false
|
||||
}
|
||||
|
||||
e := entityKind(kind)
|
||||
if !e.IsValid() {
|
||||
q.logAccessDecision(false, kind, id, "invalidEntityKind")
|
||||
return false
|
||||
}
|
||||
|
||||
return q.canAccess(e, id, location)
|
||||
}), err
|
||||
}
|
||||
@@ -1,80 +0,0 @@
|
||||
package searchV2
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"io"
|
||||
"net/http"
|
||||
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
|
||||
"github.com/grafana/grafana/pkg/api/response"
|
||||
"github.com/grafana/grafana/pkg/api/routing"
|
||||
"github.com/grafana/grafana/pkg/middleware"
|
||||
contextmodel "github.com/grafana/grafana/pkg/services/contexthandler/model"
|
||||
)
|
||||
|
||||
type SearchHTTPService interface {
|
||||
RegisterHTTPRoutes(storageRoute routing.RouteRegister)
|
||||
}
|
||||
|
||||
type searchHTTPService struct {
|
||||
search SearchService
|
||||
}
|
||||
|
||||
func ProvideSearchHTTPService(search SearchService) SearchHTTPService {
|
||||
return &searchHTTPService{search: search}
|
||||
}
|
||||
|
||||
func (s *searchHTTPService) RegisterHTTPRoutes(storageRoute routing.RouteRegister) {
|
||||
storageRoute.Post("/", middleware.ReqSignedIn, routing.Wrap(s.doQuery))
|
||||
}
|
||||
|
||||
func (s *searchHTTPService) doQuery(c *contextmodel.ReqContext) response.Response {
|
||||
ctx, span := tracer.Start(c.Req.Context(), "searchV2.doQuery")
|
||||
defer span.End()
|
||||
searchReadinessCheckResp := s.search.IsReady(ctx, c.GetOrgID())
|
||||
if !searchReadinessCheckResp.IsReady {
|
||||
dashboardSearchNotServedRequestsCounter.With(prometheus.Labels{
|
||||
"reason": searchReadinessCheckResp.Reason,
|
||||
}).Inc()
|
||||
|
||||
return response.JSON(http.StatusOK, &backend.DataResponse{
|
||||
Frames: []*data.Frame{{
|
||||
Name: "Loading",
|
||||
}},
|
||||
Error: nil,
|
||||
})
|
||||
}
|
||||
|
||||
body, err := io.ReadAll(c.Req.Body)
|
||||
if err != nil {
|
||||
return response.Error(http.StatusInternalServerError, "error reading bytes", err)
|
||||
}
|
||||
|
||||
query := &DashboardQuery{}
|
||||
err = json.Unmarshal(body, query)
|
||||
if err != nil {
|
||||
return response.Error(http.StatusBadRequest, "error parsing body", err)
|
||||
}
|
||||
|
||||
resp := s.search.doDashboardQuery(ctx, c.SignedInUser, c.GetOrgID(), *query)
|
||||
|
||||
if resp.Error != nil {
|
||||
return response.Error(http.StatusInternalServerError, "error handling search request", resp.Error)
|
||||
}
|
||||
|
||||
if len(resp.Frames) == 0 {
|
||||
msg := "invalid search response"
|
||||
return response.Error(http.StatusInternalServerError, msg, errors.New(msg))
|
||||
}
|
||||
|
||||
bytes, err := resp.MarshalJSON()
|
||||
if err != nil {
|
||||
return response.Error(http.StatusInternalServerError, "error marshalling response", err)
|
||||
}
|
||||
|
||||
return response.JSON(http.StatusOK, bytes)
|
||||
}
|
||||
@@ -1,971 +0,0 @@
|
||||
package searchV2
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"os"
|
||||
"os/exec"
|
||||
"runtime"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/blugelabs/bluge"
|
||||
"go.opentelemetry.io/otel/attribute"
|
||||
"go.opentelemetry.io/otel/trace"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/db"
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/infra/tracing"
|
||||
"github.com/grafana/grafana/pkg/services/featuremgmt"
|
||||
"github.com/grafana/grafana/pkg/services/folder"
|
||||
"github.com/grafana/grafana/pkg/services/store"
|
||||
"github.com/grafana/grafana/pkg/services/store/entity"
|
||||
kdash "github.com/grafana/grafana/pkg/services/store/kind/dashboard"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
)
|
||||
|
||||
type dashboardLoader interface {
|
||||
// LoadDashboards returns slice of dashboards. If dashboardUID is empty – then
|
||||
// implementation must return all dashboards in instance to build an entire
|
||||
// dashboard index for an organization. If dashboardUID is not empty – then only
|
||||
// return dashboard with specified UID or empty slice if not found (this is required
|
||||
// to apply partial update).
|
||||
LoadDashboards(ctx context.Context, orgID int64, dashboardUID string) ([]dashboard, error)
|
||||
}
|
||||
|
||||
type eventStore interface {
|
||||
GetLastEvent(ctx context.Context) (*store.EntityEvent, error)
|
||||
GetAllEventsAfter(ctx context.Context, id int64) ([]*store.EntityEvent, error)
|
||||
}
|
||||
|
||||
type dashboard struct {
|
||||
id int64
|
||||
uid string
|
||||
isFolder bool
|
||||
folderID int64
|
||||
folderUID string
|
||||
slug string
|
||||
created time.Time
|
||||
updated time.Time
|
||||
|
||||
// Use generic structure
|
||||
summary *entity.EntitySummary
|
||||
}
|
||||
|
||||
// buildSignal is sent when search index is accessed in organization for which
|
||||
// we have not constructed an index yet.
|
||||
type buildSignal struct {
|
||||
orgID int64
|
||||
done chan error
|
||||
}
|
||||
|
||||
type orgIndex struct {
|
||||
writers map[indexType]*bluge.Writer
|
||||
}
|
||||
|
||||
type indexType string
|
||||
|
||||
const (
|
||||
indexTypeDashboard indexType = "dashboard"
|
||||
)
|
||||
|
||||
func (i *orgIndex) writerForIndex(idxType indexType) *bluge.Writer {
|
||||
return i.writers[idxType]
|
||||
}
|
||||
|
||||
func (i *orgIndex) readerForIndex(idxType indexType) (*bluge.Reader, func(), error) {
|
||||
reader, err := i.writers[idxType].Reader()
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
return reader, func() { _ = reader.Close() }, nil
|
||||
}
|
||||
|
||||
type searchIndex struct {
|
||||
mu sync.RWMutex
|
||||
loader dashboardLoader
|
||||
perOrgIndex map[int64]*orgIndex
|
||||
initializedOrgs map[int64]bool
|
||||
initialIndexingComplete bool
|
||||
initializationMutex sync.RWMutex
|
||||
eventStore eventStore
|
||||
logger log.Logger
|
||||
buildSignals chan buildSignal
|
||||
extender DocumentExtender
|
||||
syncCh chan chan struct{}
|
||||
tracer tracing.Tracer
|
||||
features featuremgmt.FeatureToggles
|
||||
settings setting.SearchSettings
|
||||
}
|
||||
|
||||
func newSearchIndex(dashLoader dashboardLoader, evStore eventStore, extender DocumentExtender, tracer tracing.Tracer, features featuremgmt.FeatureToggles, settings setting.SearchSettings) *searchIndex {
|
||||
return &searchIndex{
|
||||
loader: dashLoader,
|
||||
eventStore: evStore,
|
||||
perOrgIndex: map[int64]*orgIndex{},
|
||||
initializedOrgs: map[int64]bool{},
|
||||
logger: log.New("searchIndex"),
|
||||
buildSignals: make(chan buildSignal),
|
||||
extender: extender,
|
||||
syncCh: make(chan chan struct{}),
|
||||
tracer: tracer,
|
||||
features: features,
|
||||
settings: settings,
|
||||
}
|
||||
}
|
||||
|
||||
func (i *searchIndex) isInitialized(_ context.Context, orgId int64) IsSearchReadyResponse {
|
||||
i.initializationMutex.RLock()
|
||||
orgInitialized := i.initializedOrgs[orgId]
|
||||
initialInitComplete := i.initialIndexingComplete
|
||||
i.initializationMutex.RUnlock()
|
||||
|
||||
if orgInitialized && initialInitComplete {
|
||||
return IsSearchReadyResponse{IsReady: true}
|
||||
}
|
||||
|
||||
if !initialInitComplete {
|
||||
return IsSearchReadyResponse{IsReady: false, Reason: "initial-indexing-ongoing"}
|
||||
}
|
||||
|
||||
i.triggerBuildingOrgIndex(orgId)
|
||||
return IsSearchReadyResponse{IsReady: false, Reason: "org-indexing-ongoing"}
|
||||
}
|
||||
|
||||
func (i *searchIndex) triggerBuildingOrgIndex(orgId int64) {
|
||||
go func() {
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute)
|
||||
defer cancel()
|
||||
|
||||
doneIndexing := make(chan error, 1)
|
||||
signal := buildSignal{orgID: orgId, done: doneIndexing}
|
||||
select {
|
||||
case i.buildSignals <- signal:
|
||||
case <-ctx.Done():
|
||||
i.logger.Warn("Failed to send a build signal to initialize org index", "orgId", orgId)
|
||||
return
|
||||
}
|
||||
select {
|
||||
case err := <-doneIndexing:
|
||||
if err != nil {
|
||||
i.logger.Error("Failed to build org index", "orgId", orgId, "error", err)
|
||||
} else {
|
||||
i.logger.Debug("Successfully built org index", "orgId", orgId)
|
||||
}
|
||||
case <-ctx.Done():
|
||||
i.logger.Warn("Building org index timeout", "orgId", orgId)
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
func (i *searchIndex) sync(ctx context.Context) error {
|
||||
doneCh := make(chan struct{}, 1)
|
||||
select {
|
||||
case i.syncCh <- doneCh:
|
||||
case <-ctx.Done():
|
||||
return ctx.Err()
|
||||
}
|
||||
select {
|
||||
case <-doneCh:
|
||||
return nil
|
||||
case <-ctx.Done():
|
||||
return ctx.Err()
|
||||
}
|
||||
}
|
||||
|
||||
func (i *searchIndex) run(ctx context.Context, orgIDs []int64, reIndexSignalCh chan struct{}) error {
|
||||
i.logger.Info("Initializing SearchV2", "dashboardLoadingBatchSize", i.settings.DashboardLoadingBatchSize, "fullReindexInterval", i.settings.FullReindexInterval, "indexUpdateInterval", i.settings.IndexUpdateInterval)
|
||||
initialSetupCtx, initialSetupSpan := i.tracer.Start(ctx, "searchV2 initialSetup")
|
||||
|
||||
reIndexInterval := i.settings.FullReindexInterval
|
||||
fullReIndexTimer := time.NewTimer(reIndexInterval)
|
||||
defer fullReIndexTimer.Stop()
|
||||
|
||||
partialUpdateInterval := i.settings.IndexUpdateInterval
|
||||
partialUpdateTimer := time.NewTimer(partialUpdateInterval)
|
||||
defer partialUpdateTimer.Stop()
|
||||
|
||||
var lastEventID int64
|
||||
lastEvent, err := i.eventStore.GetLastEvent(initialSetupCtx)
|
||||
if err != nil {
|
||||
initialSetupSpan.End()
|
||||
return err
|
||||
}
|
||||
if lastEvent != nil {
|
||||
lastEventID = lastEvent.Id
|
||||
}
|
||||
|
||||
err = i.buildInitialIndexes(initialSetupCtx, orgIDs)
|
||||
if err != nil {
|
||||
initialSetupSpan.End()
|
||||
return err
|
||||
}
|
||||
|
||||
// This semaphore channel allows limiting concurrent async re-indexing routines to 1.
|
||||
asyncReIndexSemaphore := make(chan struct{}, 1)
|
||||
|
||||
// Channel to handle signals about asynchronous full re-indexing completion.
|
||||
reIndexDoneCh := make(chan int64, 1)
|
||||
|
||||
i.initializationMutex.Lock()
|
||||
i.initialIndexingComplete = true
|
||||
i.initializationMutex.Unlock()
|
||||
|
||||
initialSetupSpan.End()
|
||||
|
||||
for {
|
||||
select {
|
||||
case doneCh := <-i.syncCh:
|
||||
// Executed on search read requests to make sure index is consistent.
|
||||
lastEventID = i.applyIndexUpdates(ctx, lastEventID)
|
||||
close(doneCh)
|
||||
case <-partialUpdateTimer.C:
|
||||
// Periodically apply updates collected in entity events table.
|
||||
partialIndexUpdateCtx, span := i.tracer.Start(ctx, "searchV2 partial update timer")
|
||||
lastEventID = i.applyIndexUpdates(partialIndexUpdateCtx, lastEventID)
|
||||
span.End()
|
||||
partialUpdateTimer.Reset(partialUpdateInterval)
|
||||
case <-reIndexSignalCh:
|
||||
// External systems may trigger re-indexing, at this moment provisioning does this.
|
||||
i.logger.Info("Full re-indexing due to external signal")
|
||||
fullReIndexTimer.Reset(0)
|
||||
case signal := <-i.buildSignals:
|
||||
buildSignalCtx, span := i.tracer.Start(ctx, "searchV2 build signal")
|
||||
|
||||
// When search read request meets new not-indexed org we build index for it.
|
||||
i.mu.RLock()
|
||||
_, ok := i.perOrgIndex[signal.orgID]
|
||||
if ok {
|
||||
span.End()
|
||||
// Index for org already exists, do nothing.
|
||||
i.mu.RUnlock()
|
||||
close(signal.done)
|
||||
continue
|
||||
}
|
||||
i.mu.RUnlock()
|
||||
lastIndexedEventID := lastEventID
|
||||
// Prevent full re-indexing while we are building index for new org.
|
||||
// Full re-indexing will be later re-started in `case lastIndexedEventID := <-reIndexDoneCh`
|
||||
// branch.
|
||||
fullReIndexTimer.Stop()
|
||||
go func() {
|
||||
defer span.End()
|
||||
// We need semaphore here since asynchronous re-indexing may be in progress already.
|
||||
asyncReIndexSemaphore <- struct{}{}
|
||||
defer func() { <-asyncReIndexSemaphore }()
|
||||
_, err = i.buildOrgIndex(buildSignalCtx, signal.orgID)
|
||||
signal.done <- err
|
||||
reIndexDoneCh <- lastIndexedEventID
|
||||
}()
|
||||
case <-fullReIndexTimer.C:
|
||||
fullReindexCtx, span := i.tracer.Start(ctx, "searchV2 full reindex timer")
|
||||
|
||||
// Periodically rebuild indexes since we could miss updates. At this moment we are issuing
|
||||
// entity events non-atomically (outside of transaction) and do not cover all possible dashboard
|
||||
// change places, so periodic re-indexing fixes possibly broken state. But ideally we should
|
||||
// come to an approach which does not require periodic re-indexing at all. One possible way
|
||||
// is to use DB triggers, see https://github.com/grafana/grafana/pull/47712.
|
||||
lastIndexedEventID := lastEventID
|
||||
go func() {
|
||||
defer span.End()
|
||||
// Do full re-index asynchronously to avoid blocking index synchronization
|
||||
// on read for a long time.
|
||||
|
||||
// We need semaphore here since re-indexing due to build signal may be in progress already.
|
||||
asyncReIndexSemaphore <- struct{}{}
|
||||
defer func() { <-asyncReIndexSemaphore }()
|
||||
|
||||
started := time.Now()
|
||||
i.logger.Info("Start re-indexing", i.withCtxData(fullReindexCtx)...)
|
||||
i.reIndexFromScratch(fullReindexCtx)
|
||||
i.logger.Info("Full re-indexing finished", i.withCtxData(fullReindexCtx, "fullReIndexElapsed", time.Since(started))...)
|
||||
reIndexDoneCh <- lastIndexedEventID
|
||||
}()
|
||||
case lastIndexedEventID := <-reIndexDoneCh:
|
||||
// Asynchronous re-indexing is finished. Set lastEventID to the value which
|
||||
// was actual at the re-indexing start – so that we could re-apply all the
|
||||
// events happened during async index build process and make sure it's consistent.
|
||||
if lastEventID != lastIndexedEventID {
|
||||
i.logger.Info("Re-apply event ID to last indexed", "currentEventID", lastEventID, "lastIndexedEventID", lastIndexedEventID)
|
||||
lastEventID = lastIndexedEventID
|
||||
// Apply events immediately.
|
||||
partialUpdateTimer.Reset(0)
|
||||
}
|
||||
fullReIndexTimer.Reset(reIndexInterval)
|
||||
case <-ctx.Done():
|
||||
return ctx.Err()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (i *searchIndex) buildInitialIndexes(ctx context.Context, orgIDs []int64) error {
|
||||
started := time.Now()
|
||||
i.logger.Info("Start building in-memory indexes")
|
||||
for _, orgID := range orgIDs {
|
||||
err := i.buildInitialIndex(ctx, orgID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("can't build initial dashboard search index for org %d: %w", orgID, err)
|
||||
}
|
||||
}
|
||||
i.logger.Info("Finish building in-memory indexes", "elapsed", time.Since(started))
|
||||
return nil
|
||||
}
|
||||
|
||||
func (i *searchIndex) buildInitialIndex(ctx context.Context, orgID int64) error {
|
||||
debugCtx, debugCtxCancel := context.WithCancel(ctx)
|
||||
if os.Getenv("GF_SEARCH_DEBUG") != "" {
|
||||
go i.debugResourceUsage(debugCtx, 200*time.Millisecond)
|
||||
}
|
||||
|
||||
started := time.Now()
|
||||
numDashboards, err := i.buildOrgIndex(ctx, orgID)
|
||||
if err != nil {
|
||||
debugCtxCancel()
|
||||
return fmt.Errorf("can't build dashboard search index for org ID 1: %w", err)
|
||||
}
|
||||
i.logger.Info("Indexing for org finished", "orgIndexElapsed", time.Since(started), "orgId", orgID, "numDashboards", numDashboards)
|
||||
debugCtxCancel()
|
||||
|
||||
if os.Getenv("GF_SEARCH_DEBUG") != "" {
|
||||
// May help to estimate size of index when introducing changes. Though it's not a direct
|
||||
// match to a memory consumption, but at least make give some relative difference understanding.
|
||||
// Moreover, changes in indexing can cause additional memory consumption upon initial index build
|
||||
// which is not reflected here.
|
||||
i.reportSizeOfIndexDiskBackup(orgID)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// This is a naive implementation of process CPU getting (credits to
|
||||
// https://stackoverflow.com/a/11357813/1288429). Should work on both Linux and Darwin.
|
||||
// Since we only use this during development – seems simple and cheap solution to get
|
||||
// process CPU usage in cross-platform way.
|
||||
func getProcessCPU(currentPid int) (float64, error) {
|
||||
cmd := exec.Command("ps", "aux")
|
||||
var out bytes.Buffer
|
||||
cmd.Stdout = &out
|
||||
err := cmd.Run()
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
for {
|
||||
line, err := out.ReadString('\n')
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
tokens := strings.Split(line, " ")
|
||||
ft := make([]string, 0)
|
||||
for _, t := range tokens {
|
||||
if t != "" && t != "\t" {
|
||||
ft = append(ft, t)
|
||||
}
|
||||
}
|
||||
pid, err := strconv.Atoi(ft[1])
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
if pid != currentPid {
|
||||
continue
|
||||
}
|
||||
cpu, err := strconv.ParseFloat(ft[2], 64)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
return cpu, nil
|
||||
}
|
||||
return 0, errors.New("process not found")
|
||||
}
|
||||
|
||||
func (i *searchIndex) debugResourceUsage(ctx context.Context, frequency time.Duration) {
|
||||
var maxHeapInuse uint64
|
||||
var maxSys uint64
|
||||
|
||||
captureMemStats := func() {
|
||||
var m runtime.MemStats
|
||||
runtime.ReadMemStats(&m)
|
||||
if m.HeapInuse > maxHeapInuse {
|
||||
maxHeapInuse = m.HeapInuse
|
||||
}
|
||||
if m.Sys > maxSys {
|
||||
maxSys = m.Sys
|
||||
}
|
||||
}
|
||||
|
||||
var cpuUtilization []float64
|
||||
|
||||
captureCPUStats := func() {
|
||||
cpu, err := getProcessCPU(os.Getpid())
|
||||
if err != nil {
|
||||
i.logger.Error("CPU stats error", "error", err)
|
||||
return
|
||||
}
|
||||
// Just collect CPU utilization to a slice and show in the of index build.
|
||||
cpuUtilization = append(cpuUtilization, cpu)
|
||||
}
|
||||
|
||||
captureMemStats()
|
||||
captureCPUStats()
|
||||
|
||||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
i.logger.Warn("Resource usage during indexing", "maxHeapInUse", formatBytes(maxHeapInuse), "maxSys", formatBytes(maxSys), "cpuPercent", cpuUtilization)
|
||||
return
|
||||
case <-time.After(frequency):
|
||||
captureMemStats()
|
||||
captureCPUStats()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (i *searchIndex) reportSizeOfIndexDiskBackup(orgID int64) {
|
||||
index, _ := i.getOrgIndex(orgID)
|
||||
reader, cancel, err := index.readerForIndex(indexTypeDashboard)
|
||||
if err != nil {
|
||||
i.logger.Warn("Error getting reader", "error", err)
|
||||
return
|
||||
}
|
||||
defer cancel()
|
||||
|
||||
// create a temp directory to store the index
|
||||
tmpDir, err := os.MkdirTemp("", "grafana.dashboard_index")
|
||||
if err != nil {
|
||||
i.logger.Error("Can't create temp dir", "error", err)
|
||||
return
|
||||
}
|
||||
defer func() {
|
||||
err := os.RemoveAll(tmpDir)
|
||||
if err != nil {
|
||||
i.logger.Error("Can't remove temp dir", "error", err, "tmpDir", tmpDir)
|
||||
return
|
||||
}
|
||||
}()
|
||||
|
||||
cancelCh := make(chan struct{})
|
||||
err = reader.Backup(tmpDir, cancelCh)
|
||||
if err != nil {
|
||||
i.logger.Error("Can't create index disk backup", "error", err)
|
||||
return
|
||||
}
|
||||
|
||||
size, err := dirSize(tmpDir)
|
||||
if err != nil {
|
||||
i.logger.Error("Can't calculate dir size", "error", err)
|
||||
return
|
||||
}
|
||||
|
||||
i.logger.Warn("Size of index disk backup", "size", formatBytes(uint64(size)))
|
||||
}
|
||||
|
||||
func (i *searchIndex) buildOrgIndex(ctx context.Context, orgID int64) (int, error) {
|
||||
spanCtx, span := i.tracer.Start(ctx, "searchV2 buildOrgIndex", trace.WithAttributes(
|
||||
attribute.Int64("org_id", orgID),
|
||||
))
|
||||
|
||||
started := time.Now()
|
||||
ctx, cancel := context.WithTimeout(spanCtx, time.Minute)
|
||||
ctx = log.InitCounter(ctx)
|
||||
|
||||
defer func() {
|
||||
span.End()
|
||||
cancel()
|
||||
}()
|
||||
|
||||
i.logger.Info("Start building org index", "orgId", orgID)
|
||||
dashboards, err := i.loader.LoadDashboards(ctx, orgID, "")
|
||||
orgSearchIndexLoadTime := time.Since(started)
|
||||
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("error loading dashboards: %w, elapsed: %s", err, orgSearchIndexLoadTime.String())
|
||||
}
|
||||
i.logger.Info("Finish loading org dashboards", "elapsed", orgSearchIndexLoadTime, "orgId", orgID)
|
||||
|
||||
dashboardExtender := i.extender.GetDashboardExtender(orgID)
|
||||
|
||||
_, initOrgIndexSpan := i.tracer.Start(ctx, "searchV2 buildOrgIndex init org index", trace.WithAttributes(
|
||||
attribute.Int64("org_id", orgID),
|
||||
attribute.Int("dashboardCount", len(dashboards)),
|
||||
))
|
||||
|
||||
index, err := initOrgIndex(dashboards, i.logger, dashboardExtender)
|
||||
|
||||
initOrgIndexSpan.End()
|
||||
|
||||
if err != nil {
|
||||
return 0, fmt.Errorf("error initializing index: %w", err)
|
||||
}
|
||||
orgSearchIndexTotalTime := time.Since(started)
|
||||
orgSearchIndexBuildTime := orgSearchIndexTotalTime - orgSearchIndexLoadTime
|
||||
|
||||
i.logger.Info("Re-indexed dashboards for organization",
|
||||
i.withCtxData(ctx, "orgId", orgID,
|
||||
"orgSearchIndexLoadTime", orgSearchIndexLoadTime,
|
||||
"orgSearchIndexBuildTime", orgSearchIndexBuildTime,
|
||||
"orgSearchIndexTotalTime", orgSearchIndexTotalTime,
|
||||
"orgSearchDashboardCount", len(dashboards))...)
|
||||
|
||||
i.mu.Lock()
|
||||
if oldIndex, ok := i.perOrgIndex[orgID]; ok {
|
||||
for _, w := range oldIndex.writers {
|
||||
_ = w.Close()
|
||||
}
|
||||
}
|
||||
i.perOrgIndex[orgID] = index
|
||||
i.mu.Unlock()
|
||||
|
||||
i.initializationMutex.Lock()
|
||||
i.initializedOrgs[orgID] = true
|
||||
i.initializationMutex.Unlock()
|
||||
|
||||
if orgID == 1 {
|
||||
go func() {
|
||||
if reader, cancel, err := index.readerForIndex(indexTypeDashboard); err == nil {
|
||||
defer cancel()
|
||||
updateUsageStats(context.Background(), reader, i.logger, i.tracer)
|
||||
}
|
||||
}()
|
||||
}
|
||||
return len(dashboards), nil
|
||||
}
|
||||
|
||||
func (i *searchIndex) getOrgIndex(orgID int64) (*orgIndex, bool) {
|
||||
i.mu.RLock()
|
||||
defer i.mu.RUnlock()
|
||||
r, ok := i.perOrgIndex[orgID]
|
||||
return r, ok
|
||||
}
|
||||
|
||||
func (i *searchIndex) getOrCreateOrgIndex(ctx context.Context, orgID int64) (*orgIndex, error) {
|
||||
index, ok := i.getOrgIndex(orgID)
|
||||
if !ok {
|
||||
// For non-main organization indexes are built lazily.
|
||||
// If we don't have an index then we are blocking here until an index for
|
||||
// an organization is ready. This actually takes time only during the first
|
||||
// access, all the consequent search requests do not fall into this branch.
|
||||
doneIndexing := make(chan error, 1)
|
||||
signal := buildSignal{orgID: orgID, done: doneIndexing}
|
||||
select {
|
||||
case i.buildSignals <- signal:
|
||||
case <-ctx.Done():
|
||||
return nil, ctx.Err()
|
||||
}
|
||||
select {
|
||||
case err := <-doneIndexing:
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
case <-ctx.Done():
|
||||
return nil, ctx.Err()
|
||||
}
|
||||
index, _ = i.getOrgIndex(orgID)
|
||||
}
|
||||
return index, nil
|
||||
}
|
||||
|
||||
func (i *searchIndex) reIndexFromScratch(ctx context.Context) {
|
||||
i.mu.RLock()
|
||||
orgIDs := make([]int64, 0, len(i.perOrgIndex))
|
||||
for orgID := range i.perOrgIndex {
|
||||
orgIDs = append(orgIDs, orgID)
|
||||
}
|
||||
i.mu.RUnlock()
|
||||
|
||||
for _, orgID := range orgIDs {
|
||||
_, err := i.buildOrgIndex(ctx, orgID)
|
||||
if err != nil {
|
||||
i.logger.Error("Error re-indexing dashboards for organization", "orgId", orgID, "error", err)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (i *searchIndex) withCtxData(ctx context.Context, params ...any) []any {
|
||||
traceID := tracing.TraceIDFromContext(ctx, false)
|
||||
if traceID != "" {
|
||||
params = append(params, "traceID", traceID)
|
||||
}
|
||||
|
||||
return params
|
||||
}
|
||||
|
||||
func (i *searchIndex) applyIndexUpdates(ctx context.Context, lastEventID int64) int64 {
|
||||
ctx = log.InitCounter(ctx)
|
||||
events, err := i.eventStore.GetAllEventsAfter(ctx, lastEventID)
|
||||
if err != nil {
|
||||
i.logger.Error("Can't load events", "error", err)
|
||||
return lastEventID
|
||||
}
|
||||
if len(events) == 0 {
|
||||
return lastEventID
|
||||
}
|
||||
started := time.Now()
|
||||
for _, e := range events {
|
||||
err := i.applyEventOnIndex(ctx, e)
|
||||
if err != nil {
|
||||
i.logger.Error("Can't apply event", "error", err)
|
||||
return lastEventID
|
||||
}
|
||||
lastEventID = e.Id
|
||||
}
|
||||
i.logger.Info("Index updates applied", i.withCtxData(ctx, "indexEventsAppliedElapsed", time.Since(started), "numEvents", len(events))...)
|
||||
return lastEventID
|
||||
}
|
||||
|
||||
func (i *searchIndex) applyEventOnIndex(ctx context.Context, e *store.EntityEvent) error {
|
||||
i.logger.Debug("Processing event", "event", e)
|
||||
|
||||
if !strings.HasPrefix(e.EntityId, "database/") {
|
||||
i.logger.Warn("Unknown storage", "entityId", e.EntityId)
|
||||
return nil
|
||||
}
|
||||
// database/org/entityType/path*
|
||||
parts := strings.SplitN(strings.TrimPrefix(e.EntityId, "database/"), "/", 3)
|
||||
if len(parts) != 3 {
|
||||
i.logger.Error("Can't parse entityId", "entityId", e.EntityId)
|
||||
return nil
|
||||
}
|
||||
orgIDStr := parts[0]
|
||||
orgID, err := strconv.ParseInt(orgIDStr, 10, 64)
|
||||
if err != nil {
|
||||
i.logger.Error("Can't extract org ID", "entityId", e.EntityId)
|
||||
return nil
|
||||
}
|
||||
kind := store.EntityType(parts[1])
|
||||
uid := parts[2]
|
||||
return i.applyEvent(ctx, orgID, kind, uid, e.EventType)
|
||||
}
|
||||
|
||||
func (i *searchIndex) applyEvent(ctx context.Context, orgID int64, kind store.EntityType, uid string, _ store.EntityEventType) error {
|
||||
i.mu.Lock()
|
||||
_, ok := i.perOrgIndex[orgID]
|
||||
if !ok {
|
||||
// Skip event for org not yet indexed.
|
||||
i.mu.Unlock()
|
||||
return nil
|
||||
}
|
||||
i.mu.Unlock()
|
||||
|
||||
// Both dashboard and folder share same DB table.
|
||||
dbDashboards, err := i.loader.LoadDashboards(ctx, orgID, uid)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
i.mu.Lock()
|
||||
defer i.mu.Unlock()
|
||||
|
||||
index, ok := i.perOrgIndex[orgID]
|
||||
if !ok {
|
||||
// Skip event for org not yet fully indexed.
|
||||
return nil
|
||||
}
|
||||
|
||||
// In the future we can rely on operation types to reduce work here.
|
||||
if len(dbDashboards) == 0 {
|
||||
switch kind {
|
||||
case store.EntityTypeDashboard:
|
||||
err = i.removeDashboard(ctx, index, uid)
|
||||
case store.EntityTypeFolder:
|
||||
err = i.removeFolder(ctx, index, uid)
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
} else {
|
||||
err = i.updateDashboard(ctx, orgID, index, dbDashboards[0])
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (i *searchIndex) removeDashboard(_ context.Context, index *orgIndex, dashboardUID string) error {
|
||||
dashboardLocation, ok, err := getDashboardLocation(index, dashboardUID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !ok {
|
||||
// No dashboard, nothing to remove.
|
||||
return nil
|
||||
}
|
||||
|
||||
// Find all panel docs to remove with dashboard.
|
||||
panelLocation := dashboardUID
|
||||
if dashboardLocation != "" {
|
||||
panelLocation = dashboardLocation + "/" + dashboardUID
|
||||
}
|
||||
panelIDs, err := getDocsIDsByLocationPrefix(index, panelLocation)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
writer := index.writerForIndex(indexTypeDashboard)
|
||||
|
||||
batch := bluge.NewBatch()
|
||||
batch.Delete(bluge.NewDocument(dashboardUID).ID())
|
||||
for _, panelID := range panelIDs {
|
||||
batch.Delete(bluge.NewDocument(panelID).ID())
|
||||
}
|
||||
|
||||
return writer.Batch(batch)
|
||||
}
|
||||
|
||||
func (i *searchIndex) removeFolder(_ context.Context, index *orgIndex, folderUID string) error {
|
||||
ids, err := getDocsIDsByLocationPrefix(index, folderUID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("error getting by location prefix: %w", err)
|
||||
}
|
||||
|
||||
batch := bluge.NewBatch()
|
||||
batch.Delete(bluge.NewDocument(folderUID).ID())
|
||||
for _, id := range ids {
|
||||
batch.Delete(bluge.NewDocument(id).ID())
|
||||
}
|
||||
writer := index.writerForIndex(indexTypeDashboard)
|
||||
return writer.Batch(batch)
|
||||
}
|
||||
|
||||
func stringInSlice(str string, slice []string) bool {
|
||||
for _, s := range slice {
|
||||
if s == str {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (i *searchIndex) updateDashboard(ctx context.Context, orgID int64, index *orgIndex, dash dashboard) error {
|
||||
extendDoc := i.extender.GetDashboardExtender(orgID, dash.uid)
|
||||
|
||||
writer := index.writerForIndex(indexTypeDashboard)
|
||||
|
||||
var doc *bluge.Document
|
||||
if dash.isFolder {
|
||||
doc = getFolderDashboardDoc(dash)
|
||||
if err := extendDoc(dash.uid, doc); err != nil {
|
||||
return err
|
||||
}
|
||||
return writer.Update(doc.ID(), doc)
|
||||
}
|
||||
|
||||
batch := bluge.NewBatch()
|
||||
|
||||
var folderUID string
|
||||
if dash.folderID == 0 {
|
||||
folderUID = folder.GeneralFolderUID
|
||||
} else {
|
||||
folderUID = dash.folderUID
|
||||
}
|
||||
|
||||
location := folderUID
|
||||
doc = getNonFolderDashboardDoc(dash, location)
|
||||
if err := extendDoc(dash.uid, doc); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if location != "" {
|
||||
location += "/"
|
||||
}
|
||||
location += dash.uid
|
||||
panelDocs := getDashboardPanelDocs(dash, location)
|
||||
actualPanelIDs := make([]string, 0, len(panelDocs))
|
||||
for _, panelDoc := range panelDocs {
|
||||
actualPanelIDs = append(actualPanelIDs, string(panelDoc.ID().Term()))
|
||||
batch.Update(panelDoc.ID(), panelDoc)
|
||||
}
|
||||
|
||||
indexedPanelIDs, err := getDashboardPanelIDs(index, location)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, panelID := range indexedPanelIDs {
|
||||
if !stringInSlice(panelID, actualPanelIDs) {
|
||||
batch.Delete(bluge.NewDocument(panelID).ID())
|
||||
}
|
||||
}
|
||||
|
||||
batch.Update(doc.ID(), doc)
|
||||
|
||||
return writer.Batch(batch)
|
||||
}
|
||||
|
||||
type sqlDashboardLoader struct {
|
||||
sql db.DB
|
||||
logger log.Logger
|
||||
tracer tracing.Tracer
|
||||
settings setting.SearchSettings
|
||||
}
|
||||
|
||||
func newSQLDashboardLoader(sql db.DB, tracer tracing.Tracer, settings setting.SearchSettings) *sqlDashboardLoader {
|
||||
return &sqlDashboardLoader{sql: sql, logger: log.New("sqlDashboardLoader"), tracer: tracer, settings: settings}
|
||||
}
|
||||
|
||||
type dashboardsRes struct {
|
||||
dashboards []*dashboardQueryResult
|
||||
err error
|
||||
}
|
||||
|
||||
func (l sqlDashboardLoader) loadAllDashboards(ctx context.Context, limit int, orgID int64, dashboardUID string) chan *dashboardsRes {
|
||||
ch := make(chan *dashboardsRes, 3)
|
||||
|
||||
go func() {
|
||||
defer close(ch)
|
||||
|
||||
var lastID int64
|
||||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
err := ctx.Err()
|
||||
if err != nil {
|
||||
ch <- &dashboardsRes{
|
||||
dashboards: nil,
|
||||
err: err,
|
||||
}
|
||||
}
|
||||
return
|
||||
default:
|
||||
}
|
||||
|
||||
dashboardQueryCtx, dashboardQuerySpan := l.tracer.Start(ctx, "sqlDashboardLoader dashboardQuery", trace.WithAttributes(
|
||||
attribute.Int64("orgID", orgID),
|
||||
attribute.String("dashboardUID", dashboardUID),
|
||||
attribute.Int64("lastID", lastID),
|
||||
))
|
||||
|
||||
rows := make([]*dashboardQueryResult, 0, limit)
|
||||
err := l.sql.WithDbSession(dashboardQueryCtx, func(sess *db.Session) error {
|
||||
sess.Table("dashboard").
|
||||
Where("org_id = ?", orgID).
|
||||
Where("deleted IS NULL") // don't index soft delete files
|
||||
|
||||
if lastID > 0 {
|
||||
sess.Where("id > ?", lastID)
|
||||
}
|
||||
|
||||
if dashboardUID != "" {
|
||||
sess.Where("uid = ?", dashboardUID)
|
||||
}
|
||||
|
||||
sess.Cols("id", "uid", "is_folder", "folder_id", "folder_uid", "data", "slug", "created", "updated")
|
||||
|
||||
sess.OrderBy("id ASC")
|
||||
sess.Limit(limit)
|
||||
|
||||
return sess.Find(&rows)
|
||||
})
|
||||
|
||||
dashboardQuerySpan.End()
|
||||
|
||||
if err != nil || len(rows) < limit || dashboardUID != "" {
|
||||
ch <- &dashboardsRes{
|
||||
dashboards: rows,
|
||||
err: err,
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
ch <- &dashboardsRes{
|
||||
dashboards: rows,
|
||||
}
|
||||
|
||||
if len(rows) > 0 {
|
||||
lastID = rows[len(rows)-1].Id
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
return ch
|
||||
}
|
||||
|
||||
func (l sqlDashboardLoader) LoadDashboards(ctx context.Context, orgID int64, dashboardUID string) ([]dashboard, error) {
|
||||
ctx, span := l.tracer.Start(ctx, "sqlDashboardLoader LoadDashboards", trace.WithAttributes(
|
||||
attribute.Int64("orgID", orgID),
|
||||
))
|
||||
defer span.End()
|
||||
|
||||
var dashboards []dashboard
|
||||
|
||||
limit := 1
|
||||
|
||||
if dashboardUID == "" {
|
||||
limit = l.settings.DashboardLoadingBatchSize
|
||||
dashboards = make([]dashboard, 0, limit)
|
||||
}
|
||||
|
||||
loadDatasourceCtx, loadDatasourceSpan := l.tracer.Start(ctx, "sqlDashboardLoader LoadDatasourceLookup", trace.WithAttributes(
|
||||
attribute.Int64("orgID", orgID),
|
||||
))
|
||||
|
||||
// key will allow name or uid
|
||||
lookup, err := kdash.LoadDatasourceLookup(loadDatasourceCtx, orgID, l.sql)
|
||||
if err != nil {
|
||||
loadDatasourceSpan.End()
|
||||
return dashboards, err
|
||||
}
|
||||
loadDatasourceSpan.End()
|
||||
|
||||
loadingDashboardCtx, cancelLoadingDashboardCtx := context.WithCancel(ctx)
|
||||
defer cancelLoadingDashboardCtx()
|
||||
|
||||
dashboardsChannel := l.loadAllDashboards(loadingDashboardCtx, limit, orgID, dashboardUID)
|
||||
|
||||
for {
|
||||
res, ok := <-dashboardsChannel
|
||||
if res != nil && res.err != nil {
|
||||
l.logger.Error("Error when loading dashboards", "error", err, "orgID", orgID, "dashboardUID", dashboardUID)
|
||||
break
|
||||
}
|
||||
|
||||
if res == nil || !ok {
|
||||
break
|
||||
}
|
||||
|
||||
rows := res.dashboards
|
||||
|
||||
_, readDashboardSpan := l.tracer.Start(ctx, "sqlDashboardLoader readDashboard", trace.WithAttributes(
|
||||
attribute.Int64("orgID", orgID),
|
||||
attribute.Int("dashboardCount", len(rows)),
|
||||
))
|
||||
|
||||
reader := kdash.NewStaticDashboardSummaryBuilder(lookup, false)
|
||||
|
||||
for _, row := range rows {
|
||||
summary, _, err := reader(ctx, row.Uid, row.Data)
|
||||
if err != nil {
|
||||
l.logger.Warn("Error indexing dashboard data", "error", err, "dashboardId", row.Id, "dashboardSlug", row.Slug)
|
||||
// But append info anyway for now, since we possibly extracted useful information.
|
||||
}
|
||||
dashboards = append(dashboards, dashboard{
|
||||
id: row.Id,
|
||||
uid: row.Uid,
|
||||
isFolder: row.IsFolder,
|
||||
folderID: row.FolderID,
|
||||
folderUID: row.FolderUID,
|
||||
slug: row.Slug,
|
||||
created: row.Created,
|
||||
updated: row.Updated,
|
||||
summary: summary,
|
||||
})
|
||||
}
|
||||
readDashboardSpan.End()
|
||||
}
|
||||
|
||||
return dashboards, err
|
||||
}
|
||||
|
||||
type dashboardQueryResult struct {
|
||||
Id int64
|
||||
Uid string
|
||||
IsFolder bool `xorm:"is_folder"`
|
||||
FolderID int64 `xorm:"folder_id"`
|
||||
FolderUID string `xorm:"folder_uid"`
|
||||
Slug string `xorm:"slug"`
|
||||
Data []byte
|
||||
Created time.Time
|
||||
Updated time.Time
|
||||
}
|
||||
@@ -1,735 +0,0 @@
|
||||
package searchV2
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/blugelabs/bluge"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||
"github.com/grafana/grafana-plugin-sdk-go/experimental"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"github.com/grafana/grafana/pkg/infra/log"
|
||||
"github.com/grafana/grafana/pkg/infra/tracing"
|
||||
"github.com/grafana/grafana/pkg/services/featuremgmt"
|
||||
"github.com/grafana/grafana/pkg/services/store"
|
||||
"github.com/grafana/grafana/pkg/services/store/entity"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
)
|
||||
|
||||
type testDashboardLoader struct {
|
||||
dashboards []dashboard
|
||||
}
|
||||
|
||||
func (t *testDashboardLoader) LoadDashboards(_ context.Context, _ int64, _ string) ([]dashboard, error) {
|
||||
return t.dashboards, nil
|
||||
}
|
||||
|
||||
var testLogger = log.New("index-test-logger")
|
||||
|
||||
var testAllowAllFilter = func(kind entityKind, uid, parent string) bool {
|
||||
return true
|
||||
}
|
||||
|
||||
var testDisallowAllFilter = func(kind entityKind, uid, parent string) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
var testOrgID int64 = 1
|
||||
|
||||
func initTestOrgIndexFromDashes(t *testing.T, dashboards []dashboard) *orgIndex {
|
||||
t.Helper()
|
||||
searchIdx := initTestIndexFromDashesExtended(t, dashboards, &NoopDocumentExtender{})
|
||||
return searchIdx.perOrgIndex[testOrgID]
|
||||
}
|
||||
|
||||
func initTestOrgIndexFromDashesExtended(t *testing.T, dashboards []dashboard, extender DocumentExtender) *orgIndex {
|
||||
t.Helper()
|
||||
searchIdx := initTestIndexFromDashesExtended(t, dashboards, extender)
|
||||
return searchIdx.perOrgIndex[testOrgID]
|
||||
}
|
||||
|
||||
func initTestIndexFromDashes(t *testing.T, dashboards []dashboard) *searchIndex {
|
||||
t.Helper()
|
||||
return initTestIndexFromDashesExtended(t, dashboards, &NoopDocumentExtender{})
|
||||
}
|
||||
|
||||
func initTestIndexFromDashesExtended(t *testing.T, dashboards []dashboard, extender DocumentExtender) *searchIndex {
|
||||
t.Helper()
|
||||
dashboardLoader := &testDashboardLoader{
|
||||
dashboards: dashboards,
|
||||
}
|
||||
index := newSearchIndex(dashboardLoader, &store.MockEntityEventsService{}, extender, tracing.InitializeTracerForTest(), featuremgmt.WithFeatures(), setting.SearchSettings{})
|
||||
require.NotNil(t, index)
|
||||
numDashboards, err := index.buildOrgIndex(context.Background(), testOrgID)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, len(dashboardLoader.dashboards), numDashboards)
|
||||
return index
|
||||
}
|
||||
|
||||
func checkSearchResponse(t *testing.T, fileName string, index *orgIndex, filter ResourceFilter, query DashboardQuery) {
|
||||
t.Helper()
|
||||
checkSearchResponseExtended(t, fileName, index, filter, query, &NoopQueryExtender{})
|
||||
}
|
||||
|
||||
func checkSearchResponseExtended(t *testing.T, fileName string, index *orgIndex, filter ResourceFilter, query DashboardQuery, extender QueryExtender) {
|
||||
t.Helper()
|
||||
resp := doSearchQuery(context.Background(), testLogger, index, filter, query, extender, "/pfix")
|
||||
experimental.CheckGoldenJSONResponse(t, "testdata", fileName, resp, true)
|
||||
}
|
||||
|
||||
func getFrameWithNames(resp *backend.DataResponse) *data.Frame {
|
||||
if resp == nil || len(resp.Frames) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
frame := resp.Frames[0]
|
||||
nameField, idx := frame.FieldByName(documentFieldName)
|
||||
if nameField.Len() == 0 || idx == -1 {
|
||||
return nil
|
||||
}
|
||||
|
||||
scoreField, _ := frame.FieldByName("score")
|
||||
return data.NewFrame("ordering frame", nameField, scoreField)
|
||||
}
|
||||
|
||||
func checkSearchResponseOrdering(t *testing.T, fileName string, index *orgIndex, filter ResourceFilter, query DashboardQuery) {
|
||||
t.Helper()
|
||||
checkSearchResponseOrderingExtended(t, fileName, index, filter, query, &NoopQueryExtender{})
|
||||
}
|
||||
|
||||
func checkSearchResponseOrderingExtended(t *testing.T, fileName string, index *orgIndex, filter ResourceFilter, query DashboardQuery, extender QueryExtender) {
|
||||
t.Helper()
|
||||
query.Explain = true
|
||||
resp := doSearchQuery(context.Background(), testLogger, index, filter, query, extender, "/pfix")
|
||||
experimental.CheckGoldenJSONFrame(t, "testdata", fileName, getFrameWithNames(resp), true)
|
||||
}
|
||||
|
||||
var testDashboards = []dashboard{
|
||||
{
|
||||
id: 1,
|
||||
uid: "1",
|
||||
summary: &entity.EntitySummary{
|
||||
Name: "test",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
uid: "2",
|
||||
summary: &entity.EntitySummary{
|
||||
Name: "boom",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
func TestDashboardIndex(t *testing.T) {
|
||||
t.Run("basic-search", func(t *testing.T) {
|
||||
index := initTestOrgIndexFromDashes(t, testDashboards)
|
||||
checkSearchResponse(t, filepath.Base(t.Name()), index, testAllowAllFilter,
|
||||
DashboardQuery{Query: "boom"},
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("basic-filter", func(t *testing.T) {
|
||||
index := initTestOrgIndexFromDashes(t, testDashboards)
|
||||
checkSearchResponse(t, filepath.Base(t.Name()), index, testDisallowAllFilter,
|
||||
DashboardQuery{Query: "boom"},
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
func TestDashboardIndexUpdates(t *testing.T) {
|
||||
t.Run("dashboard-delete", func(t *testing.T) {
|
||||
index := initTestIndexFromDashes(t, testDashboards)
|
||||
orgIdx, ok := index.getOrgIndex(testOrgID)
|
||||
require.True(t, ok)
|
||||
err := index.removeDashboard(context.Background(), orgIdx, "2")
|
||||
require.NoError(t, err)
|
||||
checkSearchResponse(t, filepath.Base(t.Name()), orgIdx, testAllowAllFilter,
|
||||
DashboardQuery{Query: "boom"},
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("dashboard-create", func(t *testing.T) {
|
||||
index := initTestIndexFromDashes(t, testDashboards)
|
||||
orgIdx, ok := index.getOrgIndex(testOrgID)
|
||||
require.True(t, ok)
|
||||
|
||||
err := index.updateDashboard(context.Background(), testOrgID, orgIdx, dashboard{
|
||||
id: 3,
|
||||
uid: "3",
|
||||
summary: &entity.EntitySummary{
|
||||
Name: "created",
|
||||
},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
checkSearchResponse(t, filepath.Base(t.Name()), orgIdx, testAllowAllFilter,
|
||||
DashboardQuery{Query: "created"},
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("dashboard-update", func(t *testing.T) {
|
||||
index := initTestIndexFromDashes(t, testDashboards)
|
||||
orgIdx, ok := index.getOrgIndex(testOrgID)
|
||||
require.True(t, ok)
|
||||
|
||||
err := index.updateDashboard(context.Background(), testOrgID, orgIdx, dashboard{
|
||||
id: 2,
|
||||
uid: "2",
|
||||
summary: &entity.EntitySummary{
|
||||
Name: "nginx",
|
||||
},
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
checkSearchResponse(t, filepath.Base(t.Name()), orgIdx, testAllowAllFilter,
|
||||
DashboardQuery{Query: "nginx"},
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
var testSortDashboards = []dashboard{
|
||||
{
|
||||
id: 1,
|
||||
uid: "1",
|
||||
summary: &entity.EntitySummary{
|
||||
Name: "a-test",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
uid: "2",
|
||||
summary: &entity.EntitySummary{
|
||||
Name: "z-test",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
type testExtender struct {
|
||||
documentExtender DocumentExtender
|
||||
queryExtender QueryExtender
|
||||
}
|
||||
|
||||
func (t *testExtender) GetDocumentExtender() DocumentExtender {
|
||||
return t.documentExtender
|
||||
}
|
||||
|
||||
func (t *testExtender) GetQueryExtender() QueryExtender {
|
||||
return t.queryExtender
|
||||
}
|
||||
|
||||
type testDocumentExtender struct {
|
||||
ExtendDashboardFunc ExtendDashboardFunc
|
||||
}
|
||||
|
||||
func (t *testDocumentExtender) GetDashboardExtender(_ int64, _ ...string) ExtendDashboardFunc {
|
||||
return t.ExtendDashboardFunc
|
||||
}
|
||||
|
||||
type testQueryExtender struct {
|
||||
getFramer func(frame *data.Frame) FramerFunc
|
||||
}
|
||||
|
||||
func (t *testQueryExtender) GetFramer(frame *data.Frame) FramerFunc {
|
||||
return t.getFramer(frame)
|
||||
}
|
||||
|
||||
func TestDashboardIndexSort(t *testing.T) {
|
||||
var i float64
|
||||
extender := &testExtender{
|
||||
documentExtender: &testDocumentExtender{
|
||||
ExtendDashboardFunc: func(uid string, doc *bluge.Document) error {
|
||||
doc.AddField(bluge.NewNumericField("test", i).StoreValue().Sortable())
|
||||
i++
|
||||
return nil
|
||||
},
|
||||
},
|
||||
queryExtender: &testQueryExtender{
|
||||
getFramer: func(frame *data.Frame) FramerFunc {
|
||||
testNum := data.NewFieldFromFieldType(data.FieldTypeFloat64, 0)
|
||||
testNum.Name = "test num"
|
||||
frame.Fields = append(
|
||||
frame.Fields,
|
||||
testNum,
|
||||
)
|
||||
return func(field string, value []byte) {
|
||||
if field == "test" {
|
||||
if num, err := bluge.DecodeNumericFloat64(value); err == nil {
|
||||
testNum.Append(num)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
t.Run("sort-asc", func(t *testing.T) {
|
||||
index := initTestOrgIndexFromDashesExtended(t, testSortDashboards, extender.GetDocumentExtender())
|
||||
checkSearchResponseExtended(t, filepath.Base(t.Name()), index, testAllowAllFilter,
|
||||
DashboardQuery{Query: "*", Sort: "test"}, extender.GetQueryExtender(),
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("sort-desc", func(t *testing.T) {
|
||||
index := initTestOrgIndexFromDashesExtended(t, testSortDashboards, extender.GetDocumentExtender())
|
||||
checkSearchResponseExtended(t, filepath.Base(t.Name()), index, testAllowAllFilter,
|
||||
DashboardQuery{Query: "*", Sort: "-test"}, extender.GetQueryExtender(),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
var testPrefixDashboards = []dashboard{
|
||||
{
|
||||
id: 1,
|
||||
uid: "1",
|
||||
summary: &entity.EntitySummary{
|
||||
Name: "Archer Data System",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
uid: "2",
|
||||
summary: &entity.EntitySummary{
|
||||
Name: "Document Sync repo",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
func TestDashboardIndex_PrefixSearch(t *testing.T) {
|
||||
t.Run("prefix-search-beginning", func(t *testing.T) {
|
||||
index := initTestOrgIndexFromDashes(t, testPrefixDashboards)
|
||||
checkSearchResponse(t, filepath.Base(t.Name()), index, testAllowAllFilter,
|
||||
DashboardQuery{Query: "Arch"},
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("prefix-search-middle", func(t *testing.T) {
|
||||
index := initTestOrgIndexFromDashes(t, testPrefixDashboards)
|
||||
checkSearchResponse(t, filepath.Base(t.Name()), index, testAllowAllFilter,
|
||||
DashboardQuery{Query: "Syn"},
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("prefix-search-beginning-lower", func(t *testing.T) {
|
||||
index := initTestOrgIndexFromDashes(t, testPrefixDashboards)
|
||||
checkSearchResponse(t, filepath.Base(t.Name()), index, testAllowAllFilter,
|
||||
DashboardQuery{Query: "arch"},
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("prefix-search-middle-lower", func(t *testing.T) {
|
||||
index := initTestOrgIndexFromDashes(t, testPrefixDashboards)
|
||||
checkSearchResponse(t, filepath.Base(t.Name()), index, testAllowAllFilter,
|
||||
DashboardQuery{Query: "syn"},
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
func TestDashboardIndex_MultipleTokensInRow(t *testing.T) {
|
||||
t.Run("multiple-tokens-beginning", func(t *testing.T) {
|
||||
index := initTestOrgIndexFromDashes(t, testPrefixDashboards)
|
||||
checkSearchResponse(t, filepath.Base(t.Name()), index, testAllowAllFilter,
|
||||
DashboardQuery{Query: "Archer da"},
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("multiple-tokens-beginning-lower", func(t *testing.T) {
|
||||
index := initTestOrgIndexFromDashes(t, testPrefixDashboards)
|
||||
checkSearchResponse(t, filepath.Base(t.Name()), index, testAllowAllFilter,
|
||||
DashboardQuery{Query: "da archer"},
|
||||
)
|
||||
})
|
||||
|
||||
// Not sure it is great this matches, but
|
||||
t.Run("multiple-tokens-middle", func(t *testing.T) {
|
||||
index := initTestOrgIndexFromDashes(t, testPrefixDashboards)
|
||||
checkSearchResponse(t, filepath.Base(t.Name()), index, testAllowAllFilter,
|
||||
DashboardQuery{Query: "ar Da"},
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("multiple-tokens-middle-lower", func(t *testing.T) {
|
||||
index := initTestOrgIndexFromDashes(t, testPrefixDashboards)
|
||||
checkSearchResponse(t, filepath.Base(t.Name()), index, testAllowAllFilter,
|
||||
DashboardQuery{Query: "doc sy"},
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
var longPrefixDashboards = []dashboard{
|
||||
{
|
||||
id: 1,
|
||||
uid: "1",
|
||||
summary: &entity.EntitySummary{
|
||||
Name: "Eyjafjallajökull Eruption data",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
func TestDashboardIndex_PrefixNgramExceeded(t *testing.T) {
|
||||
t.Run("prefix-search-ngram-exceeded", func(t *testing.T) {
|
||||
index := initTestOrgIndexFromDashes(t, longPrefixDashboards)
|
||||
checkSearchResponse(t, filepath.Base(t.Name()), index, testAllowAllFilter,
|
||||
DashboardQuery{Query: "Eyjafjallajöku"},
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
var scatteredTokensDashboards = []dashboard{
|
||||
{
|
||||
id: 1,
|
||||
uid: "1",
|
||||
summary: &entity.EntitySummary{
|
||||
Name: "Three can keep a secret, if two of them are dead (Benjamin Franklin)",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 3,
|
||||
uid: "2",
|
||||
summary: &entity.EntitySummary{
|
||||
Name: "A secret is powerful when it is empty (Umberto Eco)",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
func TestDashboardIndex_MultipleTokensScattered(t *testing.T) {
|
||||
t.Run("scattered-tokens-match", func(t *testing.T) {
|
||||
index := initTestOrgIndexFromDashes(t, scatteredTokensDashboards)
|
||||
checkSearchResponse(t, filepath.Base(t.Name()), index, testAllowAllFilter,
|
||||
DashboardQuery{Query: "dead secret"},
|
||||
)
|
||||
})
|
||||
t.Run("scattered-tokens-match-reversed", func(t *testing.T) {
|
||||
index := initTestOrgIndexFromDashes(t, scatteredTokensDashboards)
|
||||
checkSearchResponse(t, filepath.Base(t.Name()), index, testAllowAllFilter,
|
||||
DashboardQuery{Query: "powerful secret"},
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
var dashboardsWithFolders = []dashboard{
|
||||
{
|
||||
id: 1,
|
||||
uid: "1",
|
||||
isFolder: true,
|
||||
summary: &entity.EntitySummary{
|
||||
Name: "My folder",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
uid: "2",
|
||||
folderID: 1,
|
||||
folderUID: "1",
|
||||
summary: &entity.EntitySummary{
|
||||
Name: "Dashboard in folder 1",
|
||||
Nested: []*entity.EntitySummary{
|
||||
newNestedPanel(1, 2, "Panel 1"),
|
||||
newNestedPanel(2, 2, "Panel 2"),
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 3,
|
||||
uid: "3",
|
||||
folderID: 1,
|
||||
folderUID: "1",
|
||||
summary: &entity.EntitySummary{
|
||||
Name: "Dashboard in folder 2",
|
||||
Nested: []*entity.EntitySummary{
|
||||
newNestedPanel(3, 3, "Panel 3"),
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 4,
|
||||
uid: "4",
|
||||
summary: &entity.EntitySummary{
|
||||
Name: "One more dash",
|
||||
Nested: []*entity.EntitySummary{
|
||||
newNestedPanel(4, 4, "Panel 4"),
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
func TestDashboardIndex_Folders(t *testing.T) {
|
||||
t.Run("folders-indexed", func(t *testing.T) {
|
||||
index := initTestOrgIndexFromDashes(t, dashboardsWithFolders)
|
||||
checkSearchResponse(t, filepath.Base(t.Name()), index, testAllowAllFilter,
|
||||
DashboardQuery{Query: "My folder", Kind: []string{string(entityKindFolder)}},
|
||||
)
|
||||
})
|
||||
t.Run("folders-dashboard-has-folder", func(t *testing.T) {
|
||||
index := initTestOrgIndexFromDashes(t, dashboardsWithFolders)
|
||||
// TODO: golden file compare does not work here.
|
||||
resp := doSearchQuery(context.Background(), testLogger, index, testAllowAllFilter,
|
||||
DashboardQuery{Query: "Dashboard in folder", Kind: []string{string(entityKindDashboard)}},
|
||||
&NoopQueryExtender{}, "")
|
||||
custom, ok := resp.Frames[0].Meta.Custom.(*customMeta)
|
||||
require.Equal(t, uint64(2), custom.Count)
|
||||
require.True(t, ok, fmt.Sprintf("actual type: %T", resp.Frames[0].Meta.Custom))
|
||||
require.Equal(t, "/dashboards/f/1/", custom.Locations["1"].URL)
|
||||
})
|
||||
t.Run("folders-dashboard-removed-on-folder-removed", func(t *testing.T) {
|
||||
index := initTestIndexFromDashes(t, dashboardsWithFolders)
|
||||
orgIdx, ok := index.getOrgIndex(testOrgID)
|
||||
require.True(t, ok)
|
||||
err := index.removeFolder(context.Background(), orgIdx, "1")
|
||||
require.NoError(t, err)
|
||||
// In response we expect one dashboard which does not belong to removed folder.
|
||||
checkSearchResponse(t, filepath.Base(t.Name()), orgIdx, testAllowAllFilter,
|
||||
DashboardQuery{Query: "dash", Kind: []string{string(entityKindDashboard)}},
|
||||
)
|
||||
})
|
||||
t.Run("folders-panels-removed-on-folder-removed", func(t *testing.T) {
|
||||
index := initTestIndexFromDashes(t, dashboardsWithFolders)
|
||||
orgIdx, ok := index.getOrgIndex(testOrgID)
|
||||
require.True(t, ok)
|
||||
err := index.removeFolder(context.Background(), orgIdx, "1")
|
||||
require.NoError(t, err)
|
||||
resp := doSearchQuery(context.Background(), testLogger, orgIdx, testAllowAllFilter,
|
||||
DashboardQuery{Query: "Panel", Kind: []string{string(entityKindPanel)}},
|
||||
&NoopQueryExtender{}, "")
|
||||
custom, ok := resp.Frames[0].Meta.Custom.(*customMeta)
|
||||
require.True(t, ok)
|
||||
require.Equal(t, uint64(1), custom.Count) // 1 panel which does not belong to dashboards in removed folder.
|
||||
})
|
||||
}
|
||||
|
||||
var dashboardsWithPanels = []dashboard{
|
||||
{
|
||||
id: 1,
|
||||
uid: "1",
|
||||
summary: &entity.EntitySummary{
|
||||
Name: "My Dash",
|
||||
Nested: []*entity.EntitySummary{
|
||||
newNestedPanel(1, 1, "Panel 1"),
|
||||
newNestedPanel(2, 1, "Panel 2"),
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
func newNestedPanel(id, dashId int64, name string) *entity.EntitySummary {
|
||||
summary := &entity.EntitySummary{
|
||||
Kind: "panel",
|
||||
UID: fmt.Sprintf("%d#%d", dashId, id),
|
||||
}
|
||||
summary.Name = name
|
||||
return summary
|
||||
}
|
||||
|
||||
func TestDashboardIndex_Panels(t *testing.T) {
|
||||
t.Run("panels-indexed", func(t *testing.T) {
|
||||
index := initTestOrgIndexFromDashes(t, dashboardsWithPanels)
|
||||
// TODO: golden file compare does not work here.
|
||||
resp := doSearchQuery(
|
||||
context.Background(), testLogger, index, testAllowAllFilter,
|
||||
DashboardQuery{Query: "Panel", Kind: []string{string(entityKindPanel)}},
|
||||
&NoopQueryExtender{}, "")
|
||||
custom, ok := resp.Frames[0].Meta.Custom.(*customMeta)
|
||||
require.True(t, ok, fmt.Sprintf("actual type: %T", resp.Frames[0].Meta.Custom))
|
||||
require.Equal(t, uint64(2), custom.Count)
|
||||
require.Equal(t, "/d/1/", custom.Locations["1"].URL)
|
||||
})
|
||||
t.Run("panels-panel-removed-on-dashboard-removed", func(t *testing.T) {
|
||||
index := initTestIndexFromDashes(t, dashboardsWithPanels)
|
||||
orgIdx, ok := index.getOrgIndex(testOrgID)
|
||||
require.True(t, ok)
|
||||
err := index.removeDashboard(context.Background(), orgIdx, "1")
|
||||
require.NoError(t, err)
|
||||
checkSearchResponse(t, filepath.Base(t.Name()), orgIdx, testAllowAllFilter,
|
||||
DashboardQuery{Query: "Panel", Kind: []string{string(entityKindPanel)}},
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
var punctuationSplitNgramDashboards = []dashboard{
|
||||
{
|
||||
id: 1,
|
||||
uid: "1",
|
||||
summary: &entity.EntitySummary{
|
||||
Name: "heat-torkel",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
uid: "2",
|
||||
summary: &entity.EntitySummary{
|
||||
Name: "topology heatmap",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
func TestDashboardIndex_PunctuationNgram(t *testing.T) {
|
||||
t.Run("ngram-punctuation-split", func(t *testing.T) {
|
||||
index := initTestOrgIndexFromDashes(t, punctuationSplitNgramDashboards)
|
||||
checkSearchResponse(t, filepath.Base(t.Name()), index, testAllowAllFilter,
|
||||
DashboardQuery{Query: "tork he"},
|
||||
)
|
||||
})
|
||||
|
||||
t.Run("ngram-simple", func(t *testing.T) {
|
||||
index := initTestOrgIndexFromDashes(t, punctuationSplitNgramDashboards)
|
||||
checkSearchResponse(t, filepath.Base(t.Name()), index, testAllowAllFilter,
|
||||
DashboardQuery{Query: "hea"},
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
var camelCaseNgramDashboards = []dashboard{
|
||||
{
|
||||
id: 1,
|
||||
uid: "1",
|
||||
summary: &entity.EntitySummary{
|
||||
Name: "heatTorkel",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
func TestDashboardIndex_CamelCaseNgram(t *testing.T) {
|
||||
t.Run("ngram-camel-case-split", func(t *testing.T) {
|
||||
index := initTestOrgIndexFromDashes(t, camelCaseNgramDashboards)
|
||||
checkSearchResponse(t, filepath.Base(t.Name()), index, testAllowAllFilter,
|
||||
DashboardQuery{Query: "tork"},
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
func dashboardsWithTitles(names ...string) []dashboard {
|
||||
out := make([]dashboard, 0)
|
||||
for i, name := range names {
|
||||
no := int64(i + 1)
|
||||
out = append(out, dashboard{
|
||||
id: no,
|
||||
uid: fmt.Sprintf("%d", no),
|
||||
summary: &entity.EntitySummary{
|
||||
Name: name,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
return out
|
||||
}
|
||||
|
||||
func TestDashboardIndex_MultiTermPrefixMatch(t *testing.T) {
|
||||
var tests = []struct {
|
||||
dashboards []dashboard
|
||||
query string
|
||||
}{
|
||||
{
|
||||
dashboards: dashboardsWithTitles(
|
||||
"Panel Tests - Bar Gauge 2",
|
||||
"Prometheus 2.0",
|
||||
"Prometheus 2.0 Stats",
|
||||
"Prometheus 20.0",
|
||||
"Prometheus Second Word",
|
||||
"Prometheus Stats",
|
||||
"dynamic (2)",
|
||||
"prometheus histogram",
|
||||
"prometheus histogram2",
|
||||
"roci-simple-2",
|
||||
"x not y",
|
||||
),
|
||||
query: "Prometheus 2.",
|
||||
},
|
||||
{
|
||||
dashboards: dashboardsWithTitles(
|
||||
"From AAA",
|
||||
"Grafana Dev Overview & Home",
|
||||
"Home automation",
|
||||
"Prometheus 2.0",
|
||||
"Prometheus 2.0 Stats",
|
||||
"Prometheus 20.0",
|
||||
"Prometheus Stats",
|
||||
"Transforms - config from query",
|
||||
"iot-testing",
|
||||
"prom style with exemplars",
|
||||
"prop history",
|
||||
"simple frame",
|
||||
"with-hide-from",
|
||||
"xy broke",
|
||||
),
|
||||
query: "Prome",
|
||||
},
|
||||
{
|
||||
dashboards: dashboardsWithTitles(
|
||||
"Panel Tests - Bar Gauge 2",
|
||||
"Prometheus 2.0",
|
||||
"Prometheus 2.0 Stats",
|
||||
"Prometheus 20.0",
|
||||
"Prometheus Second Word",
|
||||
"Prometheus Stats",
|
||||
"dynamic (2)",
|
||||
"prometheus histogram",
|
||||
"prometheus histogram2",
|
||||
"roci-simple-2",
|
||||
"x not y",
|
||||
),
|
||||
query: "Prometheus stat",
|
||||
},
|
||||
{
|
||||
dashboards: dashboardsWithTitles(
|
||||
"Loki Tests - Bar Gauge 2",
|
||||
"Loki 2.0",
|
||||
"Loki 2.0 Stats",
|
||||
"Loki 20.0",
|
||||
"Loki Second Word",
|
||||
"Loki Stats",
|
||||
"dynamic (2)",
|
||||
"Loki histogram",
|
||||
"Loki histogram2",
|
||||
"roci-simple-2",
|
||||
"x not y",
|
||||
),
|
||||
query: "Loki 2.",
|
||||
},
|
||||
{
|
||||
dashboards: dashboardsWithTitles(
|
||||
"Loki Tests - Bar Gauge 2",
|
||||
"Loki 2.0",
|
||||
"Loki 2.0 Stats",
|
||||
"Loki 20.0",
|
||||
"Loki Second Word",
|
||||
"Loki Stats",
|
||||
"dynamic (2)",
|
||||
"Loki histogram",
|
||||
"Loki histogram2",
|
||||
"roci-simple-2",
|
||||
"x not y",
|
||||
),
|
||||
query: "Lok",
|
||||
},
|
||||
{
|
||||
dashboards: dashboardsWithTitles(
|
||||
"Loki Tests - Bar Gauge 2",
|
||||
"Loki 2.0",
|
||||
"Loki 2.0 Stats",
|
||||
"Loki 20.0",
|
||||
"Loki Second Word",
|
||||
"Loki Stats",
|
||||
"dynamic (2)",
|
||||
"Loki histogram",
|
||||
"Loki histogram2",
|
||||
"roci-simple-2",
|
||||
"x not y",
|
||||
),
|
||||
query: "Loki stats",
|
||||
},
|
||||
}
|
||||
|
||||
for i, tt := range tests {
|
||||
t.Run(fmt.Sprintf("ordering-tests-%d-[%s]", i+1, tt.query), func(t *testing.T) {
|
||||
index := initTestOrgIndexFromDashes(t, tt.dashboards)
|
||||
checkSearchResponseOrdering(t, filepath.Base(t.Name()), index, testAllowAllFilter,
|
||||
DashboardQuery{Query: tt.query},
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,47 +0,0 @@
|
||||
package searchV2
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/blugelabs/bluge/analysis"
|
||||
"github.com/blugelabs/bluge/analysis/token"
|
||||
"github.com/blugelabs/bluge/analysis/tokenizer"
|
||||
)
|
||||
|
||||
var punctuationReplacer *strings.Replacer
|
||||
|
||||
func init() {
|
||||
var punctuation = "!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~"
|
||||
args := make([]string, 0, len(punctuation)*2)
|
||||
for _, r := range punctuation {
|
||||
args = append(args, string(r), " ")
|
||||
}
|
||||
punctuationReplacer = strings.NewReplacer(args...)
|
||||
}
|
||||
|
||||
type punctuationCharFilter struct{}
|
||||
|
||||
func (t *punctuationCharFilter) Filter(input []byte) []byte {
|
||||
return []byte(punctuationReplacer.Replace(string(input)))
|
||||
}
|
||||
|
||||
const ngramEdgeFilterMaxLength = 7
|
||||
|
||||
var ngramIndexAnalyzer = &analysis.Analyzer{
|
||||
CharFilters: []analysis.CharFilter{&punctuationCharFilter{}},
|
||||
Tokenizer: tokenizer.NewWhitespaceTokenizer(),
|
||||
TokenFilters: []analysis.TokenFilter{
|
||||
token.NewCamelCaseFilter(),
|
||||
token.NewLowerCaseFilter(),
|
||||
token.NewEdgeNgramFilter(token.FRONT, 1, ngramEdgeFilterMaxLength),
|
||||
},
|
||||
}
|
||||
|
||||
var ngramQueryAnalyzer = &analysis.Analyzer{
|
||||
CharFilters: []analysis.CharFilter{&punctuationCharFilter{}},
|
||||
Tokenizer: tokenizer.NewWhitespaceTokenizer(),
|
||||
TokenFilters: []analysis.TokenFilter{
|
||||
token.NewCamelCaseFilter(),
|
||||
token.NewLowerCaseFilter(),
|
||||
},
|
||||
}
|
||||
@@ -1,61 +0,0 @@
|
||||
package searchV2
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func Test_punctuationCharFilter_Filter(t1 *testing.T) {
|
||||
type args struct {
|
||||
input []byte
|
||||
}
|
||||
tests := []struct {
|
||||
name string
|
||||
args args
|
||||
want []byte
|
||||
}{
|
||||
{
|
||||
name: "1",
|
||||
args: args{
|
||||
input: []byte("x-Rays"),
|
||||
},
|
||||
want: []byte("x Rays"),
|
||||
},
|
||||
{
|
||||
name: "2",
|
||||
args: args{
|
||||
input: []byte("x.Rays"),
|
||||
},
|
||||
want: []byte("x Rays"),
|
||||
},
|
||||
{
|
||||
name: "3",
|
||||
args: args{
|
||||
input: []byte("[x,Rays]"),
|
||||
},
|
||||
want: []byte(" x Rays "),
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t1.Run(tt.name, func(t1 *testing.T) {
|
||||
t := &punctuationCharFilter{}
|
||||
if got := t.Filter(tt.args.input); !reflect.DeepEqual(got, tt.want) {
|
||||
t1.Errorf("Filter() = %v, want %v", string(got), string(tt.want))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestNgramIndexAnalyzer(t *testing.T) {
|
||||
stream := ngramIndexAnalyzer.Analyze([]byte("x-rays.and.xRays, and НемногоКириллицы"))
|
||||
expectedTerms := []string{"x", "r", "ra", "ray", "rays", "a", "an", "and", "x", "r", "ra", "ray", "rays", "a", "an", "and", "н", "не", "нем", "немн", "немно", "немног", "немного", "к", "ки", "кир", "кири", "кирил", "кирилл", "кирилли"}
|
||||
|
||||
actualTerms := make([]string, 0, len(stream))
|
||||
for _, t := range stream {
|
||||
actualTerms = append(actualTerms, string(t.Term))
|
||||
}
|
||||
|
||||
require.Equal(t, expectedTerms, actualTerms)
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user