Compare commits

..

15 Commits

Author SHA1 Message Date
Nathan Verzemnieks
79a61a2b63 CloudWatch: fix error source for some query errors 2026-01-02 14:57:50 +01:00
Mustafa Sencer Özcan
dc4c106e91 fix: use memory index if index file already open (#115720)
* feat: add lock structure into bleve index files

* fix: another approach

* fix: new check

* fix: build in memory if index file already open

* fix: update workspace

* fix: add test

* refactor: update func signature

* fix: address comments

* fix: make const
2026-01-02 13:51:51 +01:00
Kristina Demeshchik
33a1c60433 Dashboard: Add lazy loading for repeated panels (#115047)
Co-authored-by: Haris Rozajac <haris.rozajac12@gmail.com>
Co-authored-by: Ivan Ortega <ivanortegaalba@gmail.com>
2026-01-02 08:15:40 +01:00
Stephanie Hingtgen
521670981a Zanzana: Add metric for last reconciliation (#115768) 2025-12-31 12:42:09 -06:00
alerting-team[bot]
79ca4e5aec Alerting: Update alerting module to b7821017d69f2e31500fc0e49cd0ba3b85372a1b (#115767)
* [create-pull-request] automated change

* Fix tests

---------

Co-authored-by: alexander-akhmetov <1875873+alexander-akhmetov@users.noreply.github.com>
Co-authored-by: Alexander Akhmetov <me@alx.cx>
2025-12-31 16:04:41 +00:00
Paul Marbach
e3bc61e7d2 Suggestions: Add intermediate state to avoid unexpected saved states (#115709)
* Suggestions: Add intermediate state to avoid unexpected saved states

* cleanup

* update and add e2es to confirm behavior

* fix some of the change dispatch

* codeowners

* fix js error that this exposed

* Apply suggestion from @fastfrwrd
2025-12-31 10:56:47 -05:00
Stephanie Hingtgen
cc6a75d021 Zanzana: Add folder integration tests (#115766) 2025-12-31 15:15:20 +00:00
Stephanie Hingtgen
6d0f7f3567 AccessControl: Seed basic roles on startup (#115729) 2025-12-31 08:43:29 -06:00
Stephanie Hingtgen
913c0ba3c5 Dashboards: Cleanup integration tests (#115765) 2025-12-31 14:29:01 +00:00
Matheus Macabu
552b6aa717 Secrets: Dont update createdBy when updating a secure value (#115760) 2025-12-31 15:01:22 +01:00
Alexander Akhmetov
2ddb4049c6 Alerting: Fix target datasource description (#115666) 2025-12-31 12:35:35 +00:00
Jo
318a0ebb36 IAM: Authorize writes to zanzana on token permissions (#115645)
* validate writes to zanzana, not reads

* lint ignore
2025-12-31 09:15:00 +00:00
grafana-pr-automation[bot]
bba5c44dc4 I18n: Download translations from Crowdin (#115757)
New Crowdin translations by GitHub Action

Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
2025-12-31 00:42:54 +00:00
Paul Marbach
44e6ea3d8b Gauge: Fix issues found during bug bash (#115740)
* fix warning for VizRepeater styles

* Gauge: Update test dashboard to round two of the segment panels to whole numbers

* Gauge: E2E tests

* add test for sparklines

* Gauge: Change inner glow to be friendlier to our a11y tests

* remove unused CODEOWNER declaration

* expose text mode so that old displayName usage is somewhat preserved

* update migrations to use the value_and_text mode if displayName has a non-empty value

* more test cases

* update unit tests for fixture updates
2025-12-30 15:27:32 -05:00
Kristina Demeshchik
014d4758c6 Dashboards: Prevent row selection when clicking canvas add actions (#115580)
* event propogation issues

* Action items width

* prevent pointer up event
2025-12-30 12:27:38 -07:00
178 changed files with 12799 additions and 523 deletions

4
.github/CODEOWNERS vendored
View File

@@ -384,6 +384,7 @@
# Grafana app platform
/pkg/services/live/ @grafana/grafana-app-platform-squad
/pkg/services/searchV2/ @grafana/grafana-app-platform-squad
/pkg/services/store/ @grafana/grafana-app-platform-squad
/pkg/infra/filestorage/ @grafana/grafana-app-platform-squad
/pkg/modules/ @grafana/grafana-app-platform-squad
@@ -500,7 +501,6 @@ i18next.config.ts @grafana/grafana-frontend-platform
/e2e-playwright/various-suite/filter-annotations.spec.ts @grafana/dashboards-squad
/e2e-playwright/various-suite/frontend-sandbox-app.spec.ts @grafana/plugins-platform-frontend
/e2e-playwright/various-suite/frontend-sandbox-datasource.spec.ts @grafana/plugins-platform-frontend
/e2e-playwright/various-suite/gauge.spec.ts @grafana/dataviz-squad
/e2e-playwright/various-suite/grafana-datasource-random-walk.spec.ts @grafana/grafana-frontend-platform
/e2e-playwright/various-suite/graph-auto-migrate.spec.ts @grafana/dataviz-squad
/e2e-playwright/various-suite/inspect-drawer.spec.ts @grafana/dashboards-squad
@@ -519,7 +519,7 @@ i18next.config.ts @grafana/grafana-frontend-platform
/e2e-playwright/various-suite/solo-route.spec.ts @grafana/dashboards-squad
/e2e-playwright/various-suite/trace-view-scrolling.spec.ts @grafana/observability-traces-and-profiling
/e2e-playwright/various-suite/verify-i18n.spec.ts @grafana/grafana-frontend-platform
/e2e-playwright/various-suite/visualization-suggestions.spec.ts @grafana/dataviz-squad
/e2e-playwright/various-suite/visualization-suggestions*.spec.ts @grafana/dataviz-squad
/e2e-playwright/various-suite/perf-test.spec.ts @grafana/grafana-frontend-platform
# Packages

View File

@@ -157,7 +157,7 @@ require (
github.com/google/go-querystring v1.1.0 // indirect
github.com/google/uuid v1.6.0 // indirect
github.com/google/wire v0.7.0 // indirect
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196 // indirect
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f // indirect
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f // indirect
github.com/grafana/dataplane/sdata v0.0.9 // indirect
github.com/grafana/dskit v0.0.0-20250908063411-6b6da59b5cc4 // indirect

View File

@@ -619,8 +619,8 @@ github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674 h1:JeSE6pjso5THxAzdVpqr6/geYxZytqFMBCOtn/ujyeo=
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674/go.mod h1:r4w70xmWCQKmi1ONH4KIaBptdivuRPyosB9RmPlGEwA=
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196 h1:A9UJtyBBUE7PkRsAITKU05iz+HpHO9SaVjfdo2Df3UQ=
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196/go.mod h1:l7v67cgP7x72ajB9UPZlumdrHqNztpKoqQ52cU8T3LU=
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f h1:Br4SaUL3dnVopKKNhDavCLgehw60jdtl/sIxdfzmVts=
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f/go.mod h1:l7v67cgP7x72ajB9UPZlumdrHqNztpKoqQ52cU8T3LU=
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f h1:Cbm6OKkOcJ+7CSZsGsEJzktC/SIa5bxVeYKQLuYK86o=
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f/go.mod h1:axY0cdOg3q0TZHwpHnIz5x16xZ8ZBxJHShsSHHXcHQg=
github.com/grafana/authlib/types v0.0.0-20251119142549-be091cf2f4d4 h1:Muoy+FMGrHj3GdFbvsMzUT7eusgii9PKf9L1ZaXDDbY=

View File

@@ -4,7 +4,7 @@ go 1.25.5
require (
github.com/go-kit/log v0.2.1
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f
github.com/grafana/dskit v0.0.0-20250908063411-6b6da59b5cc4
github.com/grafana/grafana-app-sdk v0.48.7
github.com/grafana/grafana-app-sdk/logging v0.48.7

View File

@@ -243,8 +243,8 @@ github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196 h1:A9UJtyBBUE7PkRsAITKU05iz+HpHO9SaVjfdo2Df3UQ=
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196/go.mod h1:l7v67cgP7x72ajB9UPZlumdrHqNztpKoqQ52cU8T3LU=
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f h1:Br4SaUL3dnVopKKNhDavCLgehw60jdtl/sIxdfzmVts=
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f/go.mod h1:l7v67cgP7x72ajB9UPZlumdrHqNztpKoqQ52cU8T3LU=
github.com/grafana/dskit v0.0.0-20250908063411-6b6da59b5cc4 h1:jSojuc7njleS3UOz223WDlXOinmuLAIPI0z2vtq8EgI=
github.com/grafana/dskit v0.0.0-20250908063411-6b6da59b5cc4/go.mod h1:VahT+GtfQIM+o8ht2StR6J9g+Ef+C2Vokh5uuSmOD/4=
github.com/grafana/grafana-app-sdk v0.48.7 h1:9mF7nqkqP0QUYYDlznoOt+GIyjzj45wGfUHB32u2ZMo=

View File

@@ -628,6 +628,20 @@
}
],
"title": "Only nulls and no user set min \u0026 max",
"transformations": [
{
"id": "convertFieldType",
"options": {
"conversions": [
{
"destinationType": "number",
"targetField": "A-series"
}
],
"fields": {}
}
}
],
"type": "gauge"
},
{
@@ -1179,4 +1193,4 @@
"title": "Panel Tests - Gauge",
"uid": "_5rDmaQiz",
"weekStart": ""
}
}

View File

@@ -1760,6 +1760,22 @@
"startValue": 0
}
],
"transformations": [
{
"id": "calculateField",
"options": {
"mode": "unary",
"reduce": {
"reducer": "sum"
},
"replaceFields": true,
"unary": {
"operator": "round",
"fieldName": "A-series"
}
}
}
],
"title": "Active gateways",
"type": "radialbar"
},
@@ -1843,6 +1859,22 @@
"startValue": 0
}
],
"transformations": [
{
"id": "calculateField",
"options": {
"mode": "unary",
"reduce": {
"reducer": "sum"
},
"replaceFields": true,
"unary": {
"operator": "round",
"fieldName": "A-series"
}
}
}
],
"title": "Active pods",
"type": "radialbar"
},

View File

@@ -485,6 +485,7 @@
},
"id": 12,
"options": {
"displayName": "My gauge",
"minVizHeight": 75,
"minVizWidth": 75,
"orientation": "auto",

View File

@@ -223,7 +223,7 @@ require (
github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect
github.com/googleapis/gax-go/v2 v2.15.0 // indirect
github.com/gorilla/mux v1.8.1 // indirect
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196 // indirect
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f // indirect
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f // indirect
github.com/grafana/authlib/types v0.0.0-20251119142549-be091cf2f4d4 // indirect
github.com/grafana/dataplane/sdata v0.0.9 // indirect

View File

@@ -186,6 +186,8 @@ github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5/go.mod h1:lmUJ/7eu/Q8
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
github.com/ProtonMail/go-crypto v1.1.6 h1:ZcV+Ropw6Qn0AX9brlQLAUXfqLBc7Bl+f/DmNxpLfdw=
github.com/ProtonMail/go-crypto v1.1.6/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE=
github.com/RoaringBitmap/roaring v1.9.3 h1:t4EbC5qQwnisr5PrP9nt0IRhRTb9gMUgQF4t4S2OByM=
github.com/RoaringBitmap/roaring v1.9.3/go.mod h1:6AXUsoIEzDTFFQCe1RbGA6uFONMhvejWj5rqITANK90=
github.com/RoaringBitmap/roaring/v2 v2.4.5 h1:uGrrMreGjvAtTBobc0g5IrW1D5ldxDQYe2JW2gggRdg=
github.com/RoaringBitmap/roaring/v2 v2.4.5/go.mod h1:FiJcsfkGje/nZBZgCu0ZxCPOKD/hVXDS2dXi7/eUFE0=
github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo=
@@ -284,6 +286,8 @@ github.com/aws/aws-sdk-go-v2/service/sts v1.38.5 h1:+LVB0xBqEgjQoqr9bGZbRzvg212B
github.com/aws/aws-sdk-go-v2/service/sts v1.38.5/go.mod h1:xoaxeqnnUaZjPjaICgIy5B+MHCSb/ZSOn4MvkFNOUA0=
github.com/aws/smithy-go v1.23.1 h1:sLvcH6dfAFwGkHLZ7dGiYF7aK6mg4CgKA/iDKjLDt9M=
github.com/aws/smithy-go v1.23.1/go.mod h1:LEj2LM3rBRQJxPZTB4KuzZkaZYnZPnvgIhb4pu07mx0=
github.com/axiomhq/hyperloglog v0.0.0-20240507144631-af9851f82b27 h1:60m4tnanN1ctzIu4V3bfCNJ39BiOPSm1gHFlFjTkRE0=
github.com/axiomhq/hyperloglog v0.0.0-20240507144631-af9851f82b27/go.mod h1:k08r+Yj1PRAmuayFiRK6MYuR5Ve4IuZtTfxErMIh0+c=
github.com/bahlo/generic-list-go v0.2.0 h1:5sz/EEAK+ls5wF+NeqDpk5+iNdMDXrh3z3nPnH1Wvgk=
github.com/bahlo/generic-list-go v0.2.0/go.mod h1:2KvAjgMlE5NNynlg/5iLrrCCZ2+5xWbdbCW3pNTGyYg=
github.com/barkimedes/go-deepcopy v0.0.0-20220514131651-17c30cfc62df h1:GSoSVRLoBaFpOOds6QyY1L8AX7uoY+Ln3BHc22W40X0=
@@ -345,6 +349,14 @@ github.com/blevesearch/zapx/v16 v16.2.2 h1:MifKJVRTEhMTgSlle2bDRTb39BGc9jXFRLPZc
github.com/blevesearch/zapx/v16 v16.2.2/go.mod h1:B9Pk4G1CqtErgQV9DyCSA9Lb7WZe4olYfGw7fVDZ4sk=
github.com/bluele/gcache v0.0.2 h1:WcbfdXICg7G/DGBh1PFfcirkWOQV+v077yF1pSy3DGw=
github.com/bluele/gcache v0.0.2/go.mod h1:m15KV+ECjptwSPxKhOhQoAFQVtUFjTVkc3H8o0t/fp0=
github.com/blugelabs/bluge v0.2.2 h1:gat8CqE6P6tOgeX30XGLOVNTC26cpM2RWVcreXWtYcM=
github.com/blugelabs/bluge v0.2.2/go.mod h1:am1LU9jS8dZgWkRzkGLQN3757EgMs3upWrU2fdN9foE=
github.com/blugelabs/bluge_segment_api v0.2.0 h1:cCX1Y2y8v0LZ7+EEJ6gH7dW6TtVTW4RhG0vp3R+N2Lo=
github.com/blugelabs/bluge_segment_api v0.2.0/go.mod h1:95XA+ZXfRj/IXADm7gZ+iTcWOJPg5jQTY1EReIzl3LA=
github.com/blugelabs/ice v1.0.0 h1:um7wf9e6jbkTVCrOyQq3tKK43fBMOvLUYxbj3Qtc4eo=
github.com/blugelabs/ice v1.0.0/go.mod h1:gNfFPk5zM+yxJROhthxhVQYjpBO9amuxWXJQ2Lo+IbQ=
github.com/blugelabs/ice/v2 v2.0.1 h1:mzHbntLjk2v7eDRgoXCgzOsPKN1Tenu9Svo6l9cTLS4=
github.com/blugelabs/ice/v2 v2.0.1/go.mod h1:QxAWSPNwZwsIqS25c3lbIPFQrVvT1sphf5x5DfMLH5M=
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4=
github.com/bradfitz/gomemcache v0.0.0-20250403215159-8d39553ac7cf h1:TqhNAT4zKbTdLa62d2HDBFdvgSbIGB3eJE8HqhgiL9I=
github.com/bradfitz/gomemcache v0.0.0-20250403215159-8d39553ac7cf/go.mod h1:r5xuitiExdLAJ09PR7vBVENGvp4ZuTBeWTGtxuX3K+c=
@@ -360,6 +372,8 @@ github.com/bwmarrin/snowflake v0.3.0 h1:xm67bEhkKh6ij1790JB83OujPR5CzNe8QuQqAgIS
github.com/bwmarrin/snowflake v0.3.0/go.mod h1:NdZxfVWX+oR6y2K0o6qAYv6gIOP9rjG0/E9WsDpxqwE=
github.com/c2h5oh/datasize v0.0.0-20231215233829-aa82cc1e6500 h1:6lhrsTEnloDPXyeZBvSYvQf8u86jbKehZPVDDlkgDl4=
github.com/c2h5oh/datasize v0.0.0-20231215233829-aa82cc1e6500/go.mod h1:S/7n9copUssQ56c7aAgHqftWO4LTf4xY6CGWt8Bc+3M=
github.com/caio/go-tdigest v3.1.0+incompatible h1:uoVMJ3Q5lXmVLCCqaMGHLBWnbGoN6Lpu7OAUPR60cds=
github.com/caio/go-tdigest v3.1.0+incompatible/go.mod h1:sHQM/ubZStBUmF1WbB8FAm8q9GjDajLC5T7ydxE3JHI=
github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8=
github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
github.com/cenkalti/backoff/v5 v5.0.3 h1:ZN+IMa753KfX5hd8vVaMixjnqRZ3y8CuJKRKj1xcsSM=
@@ -447,6 +461,8 @@ github.com/dgraph-io/ristretto/v2 v2.2.0 h1:bkY3XzJcXoMuELV8F+vS8kzNgicwQFAaGINA
github.com/dgraph-io/ristretto/v2 v2.2.0/go.mod h1:RZrm63UmcBAaYWC1DotLYBmTvgkrs0+XhBd7Npn7/zI=
github.com/dgryski/go-farm v0.0.0-20240924180020-3414d57e47da h1:aIftn67I1fkbMa512G+w+Pxci9hJPB8oMnkcP3iZF38=
github.com/dgryski/go-farm v0.0.0-20240924180020-3414d57e47da/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw=
github.com/dgryski/go-metro v0.0.0-20180109044635-280f6062b5bc h1:8WFBn63wegobsYAX0YjD+8suexZDga5CctH4CCTx2+8=
github.com/dgryski/go-metro v0.0.0-20180109044635-280f6062b5bc/go.mod h1:c9O8+fpSOX1DM8cPNSkX/qsBWdkD4yd2dpciOWQjpBw=
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
github.com/dhui/dktest v0.3.0/go.mod h1:cyzIUfGsBEbZ6BT7tnXqAShHSXCZhSNmFl70sZ7c1yc=
@@ -811,8 +827,8 @@ github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674 h1:JeSE6pjso5THxAzdVpqr6/geYxZytqFMBCOtn/ujyeo=
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674/go.mod h1:r4w70xmWCQKmi1ONH4KIaBptdivuRPyosB9RmPlGEwA=
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196 h1:A9UJtyBBUE7PkRsAITKU05iz+HpHO9SaVjfdo2Df3UQ=
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196/go.mod h1:l7v67cgP7x72ajB9UPZlumdrHqNztpKoqQ52cU8T3LU=
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f h1:Br4SaUL3dnVopKKNhDavCLgehw60jdtl/sIxdfzmVts=
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f/go.mod h1:l7v67cgP7x72ajB9UPZlumdrHqNztpKoqQ52cU8T3LU=
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f h1:Cbm6OKkOcJ+7CSZsGsEJzktC/SIa5bxVeYKQLuYK86o=
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f/go.mod h1:axY0cdOg3q0TZHwpHnIz5x16xZ8ZBxJHShsSHHXcHQg=
github.com/grafana/authlib/types v0.0.0-20251119142549-be091cf2f4d4 h1:Muoy+FMGrHj3GdFbvsMzUT7eusgii9PKf9L1ZaXDDbY=

View File

@@ -90,7 +90,7 @@ require (
github.com/google/gnostic-models v0.7.1 // indirect
github.com/google/go-cmp v0.7.0 // indirect
github.com/google/uuid v1.6.0 // indirect
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196 // indirect
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f // indirect
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f // indirect
github.com/grafana/authlib/types v0.0.0-20251119142549-be091cf2f4d4 // indirect
github.com/grafana/dataplane/sdata v0.0.9 // indirect

View File

@@ -213,8 +213,8 @@ github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674 h1:JeSE6pjso5THxAzdVpqr6/geYxZytqFMBCOtn/ujyeo=
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674/go.mod h1:r4w70xmWCQKmi1ONH4KIaBptdivuRPyosB9RmPlGEwA=
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196 h1:A9UJtyBBUE7PkRsAITKU05iz+HpHO9SaVjfdo2Df3UQ=
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196/go.mod h1:l7v67cgP7x72ajB9UPZlumdrHqNztpKoqQ52cU8T3LU=
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f h1:Br4SaUL3dnVopKKNhDavCLgehw60jdtl/sIxdfzmVts=
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f/go.mod h1:l7v67cgP7x72ajB9UPZlumdrHqNztpKoqQ52cU8T3LU=
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f h1:Cbm6OKkOcJ+7CSZsGsEJzktC/SIa5bxVeYKQLuYK86o=
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f/go.mod h1:axY0cdOg3q0TZHwpHnIz5x16xZ8ZBxJHShsSHHXcHQg=
github.com/grafana/authlib/types v0.0.0-20251119142549-be091cf2f4d4 h1:Muoy+FMGrHj3GdFbvsMzUT7eusgii9PKf9L1ZaXDDbY=

View File

@@ -600,6 +600,20 @@
"stringInput": "null,null"
}
],
"transformations": [
{
"id": "convertFieldType",
"options": {
"fields": {},
"conversions": [
{
"targetField": "A-series",
"destinationType": "number"
}
]
}
}
],
"title": "Only nulls and no user set min & max",
"type": "gauge"
},

View File

@@ -1718,6 +1718,22 @@
"startValue": 0
}
],
"transformations": [
{
"id": "calculateField",
"options": {
"mode": "unary",
"reduce": {
"reducer": "sum"
},
"replaceFields": true,
"unary": {
"operator": "round",
"fieldName": "A-series"
}
}
}
],
"title": "Active gateways",
"type": "radialbar"
},
@@ -1799,6 +1815,22 @@
"startValue": 0
}
],
"transformations": [
{
"id": "calculateField",
"options": {
"mode": "unary",
"reduce": {
"reducer": "sum"
},
"replaceFields": true,
"unary": {
"operator": "round",
"fieldName": "A-series"
}
}
}
],
"title": "Active pods",
"type": "radialbar"
},

View File

@@ -474,6 +474,7 @@
},
"id": 12,
"options": {
"displayName": "My gauge",
"minVizHeight": 75,
"minVizWidth": 75,
"orientation": "auto",

View File

@@ -134,7 +134,7 @@ To convert data source-managed alert rules to Grafana managed alerts:
Pausing stops alert rule evaluation behavior for the newly created Grafana-managed alert rules.
9. (Optional) In the **Target data source** of the **Recording rules** section, you can select the data source that the imported recording rules will query. By default, it is the data source selected in the **Data source** dropdown.
9. (Optional) In the **Target data source** of the **Recording rules** section, you can select the data source to which the imported recording rules will write metrics. By default, it is the data source selected in the **Data source** dropdown.
10. Click **Import**.

View File

@@ -0,0 +1,101 @@
import { test, expect } from '@grafana/plugin-e2e';
// this test requires a larger viewport so all gauge panels load properly
test.use({
featureToggles: { newGauge: true },
viewport: { width: 1280, height: 3000 },
});
const OLD_GAUGES_DASHBOARD_UID = '_5rDmaQiz';
const NEW_GAUGES_DASHBOARD_UID = 'panel-tests-gauge-new';
test.describe(
'Gauge Panel',
{
tag: ['@panels', '@gauge'],
},
() => {
test('successfully migrates all gauge panels', async ({ gotoDashboardPage, selectors }) => {
const dashboardPage = await gotoDashboardPage({ uid: OLD_GAUGES_DASHBOARD_UID });
// check that gauges are rendered
const gaugeElements = dashboardPage.getByGrafanaSelector(
selectors.components.Panels.Visualization.Gauge.Container
);
await expect(gaugeElements).toHaveCount(16);
// check that no panel errors exist
const errorInfo = dashboardPage.getByGrafanaSelector(selectors.components.Panels.Panel.headerCornerInfo('error'));
await expect(errorInfo).toBeHidden();
});
test('renders new gauge panels', async ({ gotoDashboardPage, selectors }) => {
// open Panel Tests - Gauge
const dashboardPage = await gotoDashboardPage({ uid: NEW_GAUGES_DASHBOARD_UID });
// check that gauges are rendered
const gaugeElements = dashboardPage.getByGrafanaSelector(
selectors.components.Panels.Visualization.Gauge.Container
);
await expect(gaugeElements).toHaveCount(32);
// check that no panel errors exist
const errorInfo = dashboardPage.getByGrafanaSelector(selectors.components.Panels.Panel.headerCornerInfo('error'));
await expect(errorInfo).toBeHidden();
});
test('renders sparklines in gauge panels', async ({ gotoDashboardPage, page }) => {
await gotoDashboardPage({
uid: NEW_GAUGES_DASHBOARD_UID,
queryParams: new URLSearchParams({ editPanel: '11' }),
});
await expect(page.locator('.uplot')).toHaveCount(5);
});
test('"no data"', async ({ gotoDashboardPage, selectors }) => {
const dashboardPage = await gotoDashboardPage({
uid: NEW_GAUGES_DASHBOARD_UID,
queryParams: new URLSearchParams({ editPanel: '36' }),
});
await expect(
dashboardPage.getByGrafanaSelector(selectors.components.Panels.Visualization.Gauge.Container),
'that the gauge does not appear'
).toBeHidden();
await expect(
dashboardPage.getByGrafanaSelector(selectors.components.Panels.Panel.PanelDataErrorMessage),
'that the empty text appears'
).toHaveText('No data');
// update the "No value" option and see if the panel updates
const noValueOption = dashboardPage
.getByGrafanaSelector(selectors.components.PanelEditor.OptionsPane.fieldLabel('Standard options No value'))
.locator('input');
await noValueOption.fill('My empty value');
await noValueOption.blur();
await expect(
dashboardPage.getByGrafanaSelector(selectors.components.Panels.Visualization.Gauge.Container),
'that the empty text shows up in an empty gauge'
).toHaveText('My empty value');
// test the "no numeric fields" message on the next panel
const dashboardPage2 = await gotoDashboardPage({
uid: NEW_GAUGES_DASHBOARD_UID,
queryParams: new URLSearchParams({ editPanel: '37' }),
});
await expect(
dashboardPage2.getByGrafanaSelector(selectors.components.Panels.Visualization.Gauge.Container),
'that the gauge does not appear'
).toBeHidden();
await expect(
dashboardPage2.getByGrafanaSelector(selectors.components.Panels.Panel.PanelDataErrorMessage),
'that the empty text appears'
).toHaveText('Data is missing a number field');
});
}
);

View File

@@ -1,4 +1,4 @@
import { BootData } from '@grafana/data';
import { BootData, PanelPluginMeta } from '@grafana/data';
import { test, expect } from '@grafana/plugin-e2e';
test.describe(
@@ -22,7 +22,7 @@ test.describe(
await dashboardPage.addPanel();
// Get panel types from window object
const panelTypes = await page.evaluate(() => {
const panelTypes: PanelPluginMeta[] = await page.evaluate(() => {
// @grafana/plugin-e2e doesn't export the full bootdata config
// eslint-disable-next-line @typescript-eslint/consistent-type-assertions
const win = window as typeof window & { grafanaBootData: BootData };

View File

@@ -1,27 +0,0 @@
import { test, expect } from '@grafana/plugin-e2e';
// this test requires a larger viewport so all gauge panels load properly
test.use({
viewport: { width: 1280, height: 1080 },
});
test.describe(
'Gauge Panel',
{
tag: ['@various'],
},
() => {
test('Gauge rendering e2e tests', async ({ gotoDashboardPage, selectors, page }) => {
// open Panel Tests - Gauge
const dashboardPage = await gotoDashboardPage({ uid: '_5rDmaQiz' });
// check that gauges are rendered
const gaugeElements = page.locator('.flot-base');
await expect(gaugeElements).toHaveCount(16);
// check that no panel errors exist
const errorInfo = dashboardPage.getByGrafanaSelector(selectors.components.Panels.Panel.headerCornerInfo('error'));
await expect(errorInfo).toBeHidden();
});
}
);

View File

@@ -0,0 +1,178 @@
import { test, expect } from '@grafana/plugin-e2e';
test.use({
featureToggles: {
newVizSuggestions: true,
externalVizSuggestions: false,
},
viewport: {
width: 800,
height: 1500,
},
});
test.describe(
'Visualization suggestions v2',
{
tag: ['@various', '@suggestions'],
},
() => {
test('Should be shown and clickable', async ({ selectors, gotoPanelEditPage }) => {
// Open dashboard with edit panel
const panelEditPage = await gotoPanelEditPage({
dashboard: {
uid: 'aBXrJ0R7z',
},
id: '9',
});
await expect(
panelEditPage.getByGrafanaSelector(selectors.components.Panels.Panel.content).locator('.uplot'),
'time series to be rendered inside panel'
).toBeVisible();
// Try visualization suggestions
await panelEditPage.getByGrafanaSelector(selectors.components.PanelEditor.toggleVizPicker).click();
await panelEditPage.getByGrafanaSelector(selectors.components.Tab.title('Suggestions')).click();
// Verify we see suggestions
await expect(
panelEditPage.getByGrafanaSelector(selectors.components.VisualizationPreview.card('Line chart')),
'line chart suggestion to be rendered'
).toBeVisible();
// TODO: in this part of the test, we will change the query and the transforms and observe suggestions being updated.
// Select a visualization and verify table header is visible from preview
await panelEditPage.getByGrafanaSelector(selectors.components.VisualizationPreview.card('Table')).click();
await expect(
panelEditPage
.getByGrafanaSelector(selectors.components.Panels.Panel.content)
.getByRole('grid')
.getByRole('row')
.first(),
'table to be rendered inside panel'
).toBeVisible();
await expect(
panelEditPage.getByGrafanaSelector(selectors.components.NavToolbar.editDashboard.discardChangesButton),
'discard changes button disabled since panel has not yet changed'
).toBeDisabled();
// apply the suggestion and verify panel options are visible
await panelEditPage.getByGrafanaSelector(selectors.components.VisualizationPreview.confirm('Table')).click();
await expect(
panelEditPage
.getByGrafanaSelector(selectors.components.Panels.Panel.content)
.getByRole('grid')
.getByRole('row')
.first(),
'table to be rendered inside panel'
).toBeVisible();
await expect(
panelEditPage.getByGrafanaSelector(selectors.components.PanelEditor.OptionsPane.header),
'options pane to be rendered'
).toBeVisible();
await expect(
panelEditPage.getByGrafanaSelector(selectors.components.NavToolbar.editDashboard.discardChangesButton),
'discard changes button enabled now that panel is dirty'
).toBeEnabled();
});
test('should not apply suggestion if you navigate toggle the viz picker back off', async ({
selectors,
gotoPanelEditPage,
}) => {
// Open dashboard with edit panel
const panelEditPage = await gotoPanelEditPage({
dashboard: {
uid: 'aBXrJ0R7z',
},
id: '9',
});
await expect(
panelEditPage.getByGrafanaSelector(selectors.components.Panels.Panel.content).locator('.uplot'),
'time series to be rendered inside panel;'
).toBeVisible();
// Try visualization suggestions
await panelEditPage.getByGrafanaSelector(selectors.components.PanelEditor.toggleVizPicker).click();
await panelEditPage.getByGrafanaSelector(selectors.components.Tab.title('Suggestions')).click();
// Verify we see suggestions
await expect(
panelEditPage.getByGrafanaSelector(selectors.components.VisualizationPreview.card('Line chart')),
'line chart suggestion to be rendered'
).toBeVisible();
// Select a visualization
await panelEditPage.getByGrafanaSelector(selectors.components.VisualizationPreview.card('Table')).click();
await expect(
panelEditPage
.getByGrafanaSelector(selectors.components.Panels.Panel.content)
.getByRole('grid')
.getByRole('row')
.first(),
'table to be rendered inside panel'
).toBeVisible();
await expect(
panelEditPage.getByGrafanaSelector(selectors.components.NavToolbar.editDashboard.discardChangesButton)
).toBeDisabled();
// Verify that toggling the viz picker back cancels the suggestion, restores the line chart, shows panel options
await panelEditPage.getByGrafanaSelector(selectors.components.PanelEditor.toggleVizPicker).click();
await expect(
panelEditPage.getByGrafanaSelector(selectors.components.Panels.Panel.content).locator('.uplot'),
'time series to be rendered inside panel'
).toBeVisible();
await expect(
panelEditPage.getByGrafanaSelector(selectors.components.PanelEditor.OptionsPane.header),
'options pane to be rendered'
).toBeVisible();
await expect(
panelEditPage.getByGrafanaSelector(selectors.components.NavToolbar.editDashboard.discardChangesButton),
'discard changes button is still disabled since no changes were applied'
).toBeDisabled();
});
test('should not apply suggestion if you navigate back to the dashboard', async ({
page,
selectors,
gotoPanelEditPage,
}) => {
// Open dashboard with edit panel
const panelEditPage = await gotoPanelEditPage({
dashboard: {
uid: 'aBXrJ0R7z',
},
id: '9',
});
// Try visualization suggestions
await panelEditPage.getByGrafanaSelector(selectors.components.PanelEditor.toggleVizPicker).click();
await panelEditPage.getByGrafanaSelector(selectors.components.Tab.title('Suggestions')).click();
// Verify we see suggestions
await expect(
panelEditPage.getByGrafanaSelector(selectors.components.VisualizationPreview.card('Line chart')),
'line chart suggestion to be rendered'
).toBeVisible();
// Select a visualization
await panelEditPage.getByGrafanaSelector(selectors.components.VisualizationPreview.card('Table')).click();
await expect(page.getByRole('grid').getByRole('row').first(), 'table row to be rendered').toBeVisible();
await expect(
panelEditPage.getByGrafanaSelector(selectors.components.NavToolbar.editDashboard.discardChangesButton)
).toBeDisabled();
// Verify that navigating back to the dashboard cancels the suggestion and restores the line chart.
await panelEditPage
.getByGrafanaSelector(selectors.components.NavToolbar.editDashboard.backToDashboardButton)
.click();
await expect(
page.locator('[data-viz-panel-key="panel-9"]').locator('.uplot'),
'time series to be rendered inside the panel'
).toBeVisible();
});
}
);

View File

@@ -3,7 +3,7 @@ import { test, expect } from '@grafana/plugin-e2e';
test.describe(
'Visualization suggestions',
{
tag: ['@various'],
tag: ['@various', '@suggestions'],
},
() => {
test('Should be shown and clickable', async ({ page, selectors, gotoPanelEditPage }) => {

12
go.mod
View File

@@ -44,6 +44,8 @@ require (
github.com/blang/semver/v4 v4.0.0 // indirect; @grafana/grafana-developer-enablement-squad
github.com/blevesearch/bleve/v2 v2.5.0 // @grafana/grafana-search-and-storage
github.com/blevesearch/bleve_index_api v1.2.7 // @grafana/grafana-search-and-storage
github.com/blugelabs/bluge v0.2.2 // @grafana/grafana-backend-group
github.com/blugelabs/bluge_segment_api v0.2.0 // @grafana/grafana-backend-group
github.com/bradfitz/gomemcache v0.0.0-20250403215159-8d39553ac7cf // @grafana/grafana-backend-group
github.com/bwmarrin/snowflake v0.3.0 // @grafana/grafana-app-platform-squad
github.com/centrifugal/centrifuge v0.38.0 // @grafana/grafana-app-platform-squad
@@ -85,7 +87,7 @@ require (
github.com/googleapis/gax-go/v2 v2.15.0 // @grafana/grafana-backend-group
github.com/gorilla/mux v1.8.1 // @grafana/grafana-backend-group
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674 // @grafana/grafana-app-platform-squad
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196 // @grafana/alerting-backend
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f // @grafana/alerting-backend
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f // @grafana/identity-access-team
github.com/grafana/authlib/types v0.0.0-20251119142549-be091cf2f4d4 // @grafana/identity-access-team
github.com/grafana/dataplane/examples v0.0.1 // @grafana/observability-metrics
@@ -179,6 +181,7 @@ require (
github.com/xlab/treeprint v1.2.0 // @grafana/observability-traces-and-profiling
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // @grafana/grafana-operator-experience-squad
github.com/yudai/gojsondiff v1.0.0 // @grafana/grafana-backend-group
go.etcd.io/bbolt v1.4.2 // @grafana/grafana-search-and-storage
go.opentelemetry.io/collector/pdata v1.44.0 // @grafana/grafana-backend-group
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.64.0 // @grafana/plugins-platform-backend
go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.63.0 // @grafana/grafana-operator-experience-squad
@@ -322,6 +325,7 @@ require (
github.com/Masterminds/squirrel v1.5.4 // indirect
github.com/Microsoft/go-winio v0.6.2 // indirect
github.com/NYTimes/gziphandler v1.1.1 // indirect
github.com/RoaringBitmap/roaring v1.9.3 // indirect
github.com/RoaringBitmap/roaring/v2 v2.4.5 // indirect
github.com/Yiling-J/theine-go v0.6.2 // indirect
github.com/agext/levenshtein v1.2.1 // indirect
@@ -352,6 +356,7 @@ require (
github.com/aws/aws-sdk-go-v2/service/sso v1.29.4 // indirect
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.0 // indirect
github.com/aws/aws-sdk-go-v2/service/sts v1.38.5 // indirect
github.com/axiomhq/hyperloglog v0.0.0-20240507144631-af9851f82b27 // indirect
github.com/bahlo/generic-list-go v0.2.0 // indirect
github.com/barkimedes/go-deepcopy v0.0.0-20220514131651-17c30cfc62df // indirect
github.com/beorn7/perks v1.0.1 // indirect
@@ -374,9 +379,12 @@ require (
github.com/blevesearch/zapx/v15 v15.4.1 // indirect
github.com/blevesearch/zapx/v16 v16.2.2 // indirect
github.com/bluele/gcache v0.0.2 // indirect
github.com/blugelabs/ice v1.0.0 // indirect
github.com/blugelabs/ice/v2 v2.0.1 // indirect
github.com/bufbuild/protocompile v0.14.1 // indirect
github.com/buger/jsonparser v1.1.1 // indirect
github.com/c2h5oh/datasize v0.0.0-20231215233829-aa82cc1e6500 // indirect
github.com/caio/go-tdigest v3.1.0+incompatible // indirect
github.com/cenkalti/backoff/v4 v4.3.0 // @grafana/alerting-backend
github.com/cenkalti/backoff/v5 v5.0.3 // indirect
github.com/centrifugal/protocol v0.17.0 // indirect
@@ -395,6 +403,7 @@ require (
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/dennwc/varint v1.0.0 // indirect
github.com/dgraph-io/ristretto/v2 v2.2.0 // indirect
github.com/dgryski/go-metro v0.0.0-20180109044635-280f6062b5bc // indirect
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
github.com/diegoholiveira/jsonlogic/v3 v3.7.4 // indirect
github.com/distribution/reference v0.6.0 // indirect
@@ -595,7 +604,6 @@ require (
github.com/yuin/gopher-lua v1.1.1 // indirect
github.com/zclconf/go-cty v1.16.3 // indirect
github.com/zeebo/xxh3 v1.0.2 // indirect
go.etcd.io/bbolt v1.4.2 // indirect
go.etcd.io/etcd/api/v3 v3.6.6 // indirect
go.etcd.io/etcd/client/pkg/v3 v3.6.6 // indirect
go.etcd.io/etcd/client/v3 v3.6.6 // indirect

54
go.sum
View File

@@ -768,6 +768,11 @@ github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbt
github.com/PuerkitoBio/urlesc v0.0.0-20160726150825-5bd2802263f2/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
github.com/RaveNoX/go-jsoncommentstrip v1.0.0/go.mod h1:78ihd09MekBnJnxpICcwzCMzGrKSKYe4AqU6PDYYpjk=
github.com/RoaringBitmap/gocroaring v0.4.0/go.mod h1:NieMwz7ZqwU2DD73/vvYwv7r4eWBKuPVSXZIpsaMwCI=
github.com/RoaringBitmap/roaring v0.9.1/go.mod h1:h1B7iIUOmnAeb5ytYMvnHJwxMc6LUrwBnzXWRuqTQUc=
github.com/RoaringBitmap/roaring v0.9.4/go.mod h1:icnadbWcNyfEHlYdr+tDlOTih1Bf/h+rzPpv4sbomAA=
github.com/RoaringBitmap/roaring v1.9.3 h1:t4EbC5qQwnisr5PrP9nt0IRhRTb9gMUgQF4t4S2OByM=
github.com/RoaringBitmap/roaring v1.9.3/go.mod h1:6AXUsoIEzDTFFQCe1RbGA6uFONMhvejWj5rqITANK90=
github.com/RoaringBitmap/roaring/v2 v2.4.5 h1:uGrrMreGjvAtTBobc0g5IrW1D5ldxDQYe2JW2gggRdg=
github.com/RoaringBitmap/roaring/v2 v2.4.5/go.mod h1:FiJcsfkGje/nZBZgCu0ZxCPOKD/hVXDS2dXi7/eUFE0=
github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo=
@@ -820,6 +825,7 @@ github.com/apapsch/go-jsonmerge/v2 v2.0.0/go.mod h1:lvDnEdqiQrp0O42VQGgmlKpxL1AP
github.com/apparentlymart/go-textseg/v15 v15.0.0 h1:uYvfpb3DyLSCGWnctWKGj857c6ew1u1fNQOlOtuGxQY=
github.com/apparentlymart/go-textseg/v15 v15.0.0/go.mod h1:K8XmNZdhEBkdlyDdvbmmsvpAG721bKi0joRfFdHIWJ4=
github.com/armon/circbuf v0.0.0-20150827004946-bbbad097214e/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o=
github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8=
github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY=
github.com/armon/go-metrics v0.0.0-20190430140413-ec5e00d3c878/go.mod h1:3AMJUQhVx52RsWOnlkpikZr01T/yAVN2gn0861vByNg=
github.com/armon/go-metrics v0.3.10/go.mod h1:4O98XIr/9W0sxpJ8UaYkvjk10Iff7SnFrb4QAOwNTFc=
@@ -892,6 +898,9 @@ github.com/aws/aws-sdk-go-v2/service/sts v1.38.5 h1:+LVB0xBqEgjQoqr9bGZbRzvg212B
github.com/aws/aws-sdk-go-v2/service/sts v1.38.5/go.mod h1:xoaxeqnnUaZjPjaICgIy5B+MHCSb/ZSOn4MvkFNOUA0=
github.com/aws/smithy-go v1.23.1 h1:sLvcH6dfAFwGkHLZ7dGiYF7aK6mg4CgKA/iDKjLDt9M=
github.com/aws/smithy-go v1.23.1/go.mod h1:LEj2LM3rBRQJxPZTB4KuzZkaZYnZPnvgIhb4pu07mx0=
github.com/axiomhq/hyperloglog v0.0.0-20191112132149-a4c4c47bc57f/go.mod h1:2stgcRjl6QmW+gU2h5E7BQXg4HU0gzxKWDuT5HviN9s=
github.com/axiomhq/hyperloglog v0.0.0-20240507144631-af9851f82b27 h1:60m4tnanN1ctzIu4V3bfCNJ39BiOPSm1gHFlFjTkRE0=
github.com/axiomhq/hyperloglog v0.0.0-20240507144631-af9851f82b27/go.mod h1:k08r+Yj1PRAmuayFiRK6MYuR5Ve4IuZtTfxErMIh0+c=
github.com/bahlo/generic-list-go v0.2.0 h1:5sz/EEAK+ls5wF+NeqDpk5+iNdMDXrh3z3nPnH1Wvgk=
github.com/bahlo/generic-list-go v0.2.0/go.mod h1:2KvAjgMlE5NNynlg/5iLrrCCZ2+5xWbdbCW3pNTGyYg=
github.com/barkimedes/go-deepcopy v0.0.0-20220514131651-17c30cfc62df h1:GSoSVRLoBaFpOOds6QyY1L8AX7uoY+Ln3BHc22W40X0=
@@ -910,6 +919,7 @@ github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
github.com/bitly/go-hostpool v0.0.0-20171023180738-a3a6125de932/go.mod h1:NOuUCSz6Q9T7+igc/hlvDOUdtWKryOrtFyIVABv/p7k=
github.com/bits-and-blooms/bitset v1.2.0/go.mod h1:gIdJ4wp64HaoK2YrL1Q5/N7Y16edYb8uY+O0FJTyyDA=
github.com/bits-and-blooms/bitset v1.12.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8=
github.com/bits-and-blooms/bitset v1.22.0 h1:Tquv9S8+SGaS3EhyA+up3FXzmkhxPGjQQCkcs2uw7w4=
github.com/bits-and-blooms/bitset v1.22.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8=
@@ -929,16 +939,21 @@ github.com/blevesearch/go-porterstemmer v1.0.3 h1:GtmsqID0aZdCSNiY8SkuPJ12pD4jI+
github.com/blevesearch/go-porterstemmer v1.0.3/go.mod h1:angGc5Ht+k2xhJdZi511LtmxuEf0OVpvUUNrwmM1P7M=
github.com/blevesearch/gtreap v0.1.1 h1:2JWigFrzDMR+42WGIN/V2p0cUvn4UP3C4Q5nmaZGW8Y=
github.com/blevesearch/gtreap v0.1.1/go.mod h1:QaQyDRAT51sotthUWAH4Sj08awFSSWzgYICSZ3w0tYk=
github.com/blevesearch/mmap-go v1.0.2/go.mod h1:ol2qBqYaOUsGdm7aRMRrYGgPvnwLe6Y+7LMvAB5IbSA=
github.com/blevesearch/mmap-go v1.0.3/go.mod h1:pYvKl/grLQrBxuaRYgoTssa4rVujYYeenDp++2E+yvs=
github.com/blevesearch/mmap-go v1.0.4 h1:OVhDhT5B/M1HNPpYPBKIEJaD0F3Si+CrEKULGCDPWmc=
github.com/blevesearch/mmap-go v1.0.4/go.mod h1:EWmEAOmdAS9z/pi/+Toxu99DnsbhG1TIxUoRmJw/pSs=
github.com/blevesearch/scorch_segment_api/v2 v2.3.9 h1:X6nJXnNHl7nasXW+U6y2Ns2Aw8F9STszkYkyBfQ+p0o=
github.com/blevesearch/scorch_segment_api/v2 v2.3.9/go.mod h1:IrzspZlVjhf4X29oJiEhBxEteTqOY9RlYlk1lCmYHr4=
github.com/blevesearch/segment v0.9.0/go.mod h1:9PfHYUdQCgHktBgvtUOF4x+pc4/l8rdH0u5spnW85UQ=
github.com/blevesearch/segment v0.9.1 h1:+dThDy+Lvgj5JMxhmOVlgFfkUtZV2kw49xax4+jTfSU=
github.com/blevesearch/segment v0.9.1/go.mod h1:zN21iLm7+GnBHWTao9I+Au/7MBiL8pPFtJBJTsk6kQw=
github.com/blevesearch/snowballstem v0.9.0 h1:lMQ189YspGP6sXvZQ4WZ+MLawfV8wOmPoD/iWeNXm8s=
github.com/blevesearch/snowballstem v0.9.0/go.mod h1:PivSj3JMc8WuaFkTSRDW2SlrulNWPl4ABg1tC/hlgLs=
github.com/blevesearch/upsidedown_store_api v1.0.2 h1:U53Q6YoWEARVLd1OYNc9kvhBMGZzVrdmaozG2MfoB+A=
github.com/blevesearch/upsidedown_store_api v1.0.2/go.mod h1:M01mh3Gpfy56Ps/UXHjEO/knbqyQ1Oamg8If49gRwrQ=
github.com/blevesearch/vellum v1.0.5/go.mod h1:atE0EH3fvk43zzS7t1YNdNC7DbmcC3uz+eMD5xZ2OyQ=
github.com/blevesearch/vellum v1.0.7/go.mod h1:doBZpmRhwTsASB4QdUZANlJvqVAUdUyX0ZK7QJCTeBE=
github.com/blevesearch/vellum v1.1.0 h1:CinkGyIsgVlYf8Y2LUQHvdelgXr6PYuvoDIajq6yR9w=
github.com/blevesearch/vellum v1.1.0/go.mod h1:QgwWryE8ThtNPxtgWJof5ndPfx0/YMBh+W2weHKPw8Y=
github.com/blevesearch/zapx/v11 v11.4.1 h1:qFCPlFbsEdwbbckJkysptSQOsHn4s6ZOHL5GMAIAVHA=
@@ -955,6 +970,14 @@ github.com/blevesearch/zapx/v16 v16.2.2 h1:MifKJVRTEhMTgSlle2bDRTb39BGc9jXFRLPZc
github.com/blevesearch/zapx/v16 v16.2.2/go.mod h1:B9Pk4G1CqtErgQV9DyCSA9Lb7WZe4olYfGw7fVDZ4sk=
github.com/bluele/gcache v0.0.2 h1:WcbfdXICg7G/DGBh1PFfcirkWOQV+v077yF1pSy3DGw=
github.com/bluele/gcache v0.0.2/go.mod h1:m15KV+ECjptwSPxKhOhQoAFQVtUFjTVkc3H8o0t/fp0=
github.com/blugelabs/bluge v0.2.2 h1:gat8CqE6P6tOgeX30XGLOVNTC26cpM2RWVcreXWtYcM=
github.com/blugelabs/bluge v0.2.2/go.mod h1:am1LU9jS8dZgWkRzkGLQN3757EgMs3upWrU2fdN9foE=
github.com/blugelabs/bluge_segment_api v0.2.0 h1:cCX1Y2y8v0LZ7+EEJ6gH7dW6TtVTW4RhG0vp3R+N2Lo=
github.com/blugelabs/bluge_segment_api v0.2.0/go.mod h1:95XA+ZXfRj/IXADm7gZ+iTcWOJPg5jQTY1EReIzl3LA=
github.com/blugelabs/ice v1.0.0 h1:um7wf9e6jbkTVCrOyQq3tKK43fBMOvLUYxbj3Qtc4eo=
github.com/blugelabs/ice v1.0.0/go.mod h1:gNfFPk5zM+yxJROhthxhVQYjpBO9amuxWXJQ2Lo+IbQ=
github.com/blugelabs/ice/v2 v2.0.1 h1:mzHbntLjk2v7eDRgoXCgzOsPKN1Tenu9Svo6l9cTLS4=
github.com/blugelabs/ice/v2 v2.0.1/go.mod h1:QxAWSPNwZwsIqS25c3lbIPFQrVvT1sphf5x5DfMLH5M=
github.com/bmatcuk/doublestar v1.1.1/go.mod h1:UD6OnuiIn0yFxxA2le/rnRU1G4RaI4UvFv1sNto9p6w=
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4=
github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8=
@@ -973,6 +996,8 @@ github.com/bwmarrin/snowflake v0.3.0 h1:xm67bEhkKh6ij1790JB83OujPR5CzNe8QuQqAgIS
github.com/bwmarrin/snowflake v0.3.0/go.mod h1:NdZxfVWX+oR6y2K0o6qAYv6gIOP9rjG0/E9WsDpxqwE=
github.com/c2h5oh/datasize v0.0.0-20231215233829-aa82cc1e6500 h1:6lhrsTEnloDPXyeZBvSYvQf8u86jbKehZPVDDlkgDl4=
github.com/c2h5oh/datasize v0.0.0-20231215233829-aa82cc1e6500/go.mod h1:S/7n9copUssQ56c7aAgHqftWO4LTf4xY6CGWt8Bc+3M=
github.com/caio/go-tdigest v3.1.0+incompatible h1:uoVMJ3Q5lXmVLCCqaMGHLBWnbGoN6Lpu7OAUPR60cds=
github.com/caio/go-tdigest v3.1.0+incompatible/go.mod h1:sHQM/ubZStBUmF1WbB8FAm8q9GjDajLC5T7ydxE3JHI=
github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8=
github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
@@ -1032,6 +1057,9 @@ github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I=
github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo=
github.com/containerd/platforms v0.2.1 h1:zvwtM3rz2YHPQsF2CHYM8+KtB5dvhISiXh5ZpSBQv6A=
github.com/containerd/platforms v0.2.1/go.mod h1:XHCb+2/hzowdiut9rkudds9bE5yJ7npe7dG/wG+uFPw=
github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk=
github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
github.com/coreos/go-semver v0.3.1 h1:yi21YpKnrx1gt5R+la8n5WgS0kCrsPp33dmEyHReZr4=
github.com/coreos/go-semver v0.3.1/go.mod h1:irMmmIw/7yzSRPWryHsK7EYSg09caPQL03VsM8rvUec=
@@ -1041,6 +1069,7 @@ github.com/coreos/go-systemd/v22 v22.6.0 h1:aGVa/v8B7hpb0TKl0MWoAavPDmHvobFe5R5z
github.com/coreos/go-systemd/v22 v22.6.0/go.mod h1:iG+pp635Fo7ZmV/j14KUcmEyWF+0X7Lua8rrTWzYgWU=
github.com/cpuguy83/dockercfg v0.3.2 h1:DlJTyZGBDlXqUZ2Dk2Q3xHs/FtnooJJVaad2S9GKorA=
github.com/cpuguy83/dockercfg v0.3.2/go.mod h1:sugsbF4//dDlL/i+S+rtpIWp+5h0BHJHfjj5/jFyUJc=
github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE=
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
github.com/cpuguy83/go-md2man/v2 v2.0.6/go.mod h1:oOW0eioCTA6cOiMLiUPZOpcVxMig6NIQQ7OS05n1F4g=
github.com/cpuguy83/go-md2man/v2 v2.0.7 h1:zbFlGlXEAKlwXpmvle3d8Oe3YnkKIK4xSRTd3sHPnBo=
@@ -1072,6 +1101,8 @@ github.com/dgraph-io/ristretto/v2 v2.2.0 h1:bkY3XzJcXoMuELV8F+vS8kzNgicwQFAaGINA
github.com/dgraph-io/ristretto/v2 v2.2.0/go.mod h1:RZrm63UmcBAaYWC1DotLYBmTvgkrs0+XhBd7Npn7/zI=
github.com/dgryski/go-farm v0.0.0-20240924180020-3414d57e47da h1:aIftn67I1fkbMa512G+w+Pxci9hJPB8oMnkcP3iZF38=
github.com/dgryski/go-farm v0.0.0-20240924180020-3414d57e47da/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw=
github.com/dgryski/go-metro v0.0.0-20180109044635-280f6062b5bc h1:8WFBn63wegobsYAX0YjD+8suexZDga5CctH4CCTx2+8=
github.com/dgryski/go-metro v0.0.0-20180109044635-280f6062b5bc/go.mod h1:c9O8+fpSOX1DM8cPNSkX/qsBWdkD4yd2dpciOWQjpBw=
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
github.com/dgryski/go-sip13 v0.0.0-20190329191031-25c5027a8c7b/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no=
@@ -1591,8 +1622,8 @@ github.com/gorilla/sessions v1.2.1 h1:DHd3rPN5lE3Ts3D8rKkQ8x/0kqfeNmBAaiSi+o7Fsg
github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM=
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674 h1:JeSE6pjso5THxAzdVpqr6/geYxZytqFMBCOtn/ujyeo=
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674/go.mod h1:r4w70xmWCQKmi1ONH4KIaBptdivuRPyosB9RmPlGEwA=
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196 h1:A9UJtyBBUE7PkRsAITKU05iz+HpHO9SaVjfdo2Df3UQ=
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196/go.mod h1:l7v67cgP7x72ajB9UPZlumdrHqNztpKoqQ52cU8T3LU=
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f h1:Br4SaUL3dnVopKKNhDavCLgehw60jdtl/sIxdfzmVts=
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f/go.mod h1:l7v67cgP7x72ajB9UPZlumdrHqNztpKoqQ52cU8T3LU=
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f h1:Cbm6OKkOcJ+7CSZsGsEJzktC/SIa5bxVeYKQLuYK86o=
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f/go.mod h1:axY0cdOg3q0TZHwpHnIz5x16xZ8ZBxJHShsSHHXcHQg=
github.com/grafana/authlib/types v0.0.0-20251119142549-be091cf2f4d4 h1:Muoy+FMGrHj3GdFbvsMzUT7eusgii9PKf9L1ZaXDDbY=
@@ -1786,8 +1817,10 @@ github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA=
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
github.com/influxdata/influxdb v1.7.6/go.mod h1:qZna6X/4elxqT3yI9iZYdZrWWdeFOOprn86kgg4+IzY=
github.com/influxdata/influxdb v1.7.7/go.mod h1:qZna6X/4elxqT3yI9iZYdZrWWdeFOOprn86kgg4+IzY=
github.com/influxdata/influxdb-client-go/v2 v2.13.0 h1:ioBbLmR5NMbAjP4UVA5r9b5xGjpABD7j65pI8kFphDM=
github.com/influxdata/influxdb-client-go/v2 v2.13.0/go.mod h1:k+spCbt9hcvqvUiz0sr5D8LolXHqAAOfPw9v/RIRHl4=
@@ -1888,6 +1921,7 @@ github.com/kisielk/sqlstruct v0.0.0-20201105191214-5f3e10d3ab46/go.mod h1:yyMNCy
github.com/klauspost/asmfmt v1.3.2 h1:4Ri7ox3EwapiOjCki+hw14RyKk201CN4rzyCJRFLpK4=
github.com/klauspost/asmfmt v1.3.2/go.mod h1:AG8TuvYojzulgDAMCnYn50l/5QV3Bs/tp6j0HLHbNSE=
github.com/klauspost/compress v1.13.6/go.mod h1:/3/Vjq9QcHkK5uEr5lBEmyoZ1iFhe47etQ6QUkpK6sk=
github.com/klauspost/compress v1.15.2/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU=
github.com/klauspost/compress v1.15.9/go.mod h1:PhcZ0MbTNciWF3rruxRgKxI5NkcHHrHUDtV4Yw2GlzU=
github.com/klauspost/compress v1.17.9/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw=
github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo=
@@ -1920,6 +1954,8 @@ github.com/lann/builder v0.0.0-20180802200727-47ae307949d0 h1:SOEGU9fKiNWd/HOJuq
github.com/lann/builder v0.0.0-20180802200727-47ae307949d0/go.mod h1:dXGbAdH5GtBTC4WfIxhKZfyBF/HBFgRZSWwZ9g/He9o=
github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0 h1:P6pPBnrTSX3DEVR4fDembhRWSsG5rVo6hYhAB/ADZrk=
github.com/lann/ps v0.0.0-20150810152359-62de8c46ede0/go.mod h1:vmVJ0l/dxyfGW6FmdpVm2joNMFikkuWg0EoCKLGUMNw=
github.com/leesper/go_rng v0.0.0-20190531154944-a612b043e353 h1:X/79QL0b4YJVO5+OsPH9rF2u428CIrGL/jLmPsoOQQ4=
github.com/leesper/go_rng v0.0.0-20190531154944-a612b043e353/go.mod h1:N0SVk0uhy+E1PZ3C9ctsPRlvOPAFPkCNlcPBDkt0N3U=
github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII=
github.com/lestrrat-go/envload v0.0.0-20180220234015-a3eb8ddeffcc h1:RKf14vYWi2ttpEmkA4aQ3j4u9dStX2t4M8UM6qqNsG8=
github.com/lestrrat-go/envload v0.0.0-20180220234015-a3eb8ddeffcc/go.mod h1:kopuH9ugFRkIXf3YoqHKyrJ9YfUFsckUU9S7B+XP+is=
@@ -1942,6 +1978,7 @@ github.com/madflojo/testcerts v1.4.0 h1:I09gN0C1ly9IgeVNcAqKk8RAKIJTe3QnFrrPBDyv
github.com/madflojo/testcerts v1.4.0/go.mod h1:MW8sh39gLnkKh4K0Nc55AyHEDl9l/FBLDUsQhpmkuo0=
github.com/magefile/mage v1.15.0 h1:BvGheCMAsG3bWUDbZ8AyXXpCNwU9u5CB6sM+HNb9HYg=
github.com/magefile/mage v1.15.0/go.mod h1:z5UZb/iS3GoOSn0JgWuiw7dxlurVYTu+/jHXqQg881A=
github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
github.com/magiconair/properties v1.8.6/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60=
github.com/magiconair/properties v1.8.10 h1:s31yESBquKXCV9a/ScB3ESkOjUYYv+X0rg8SYxI99mE=
github.com/magiconair/properties v1.8.10/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0=
@@ -2183,6 +2220,7 @@ github.com/patrickmn/go-cache v2.1.0+incompatible h1:HRMgzkcYKYpi3C8ajMPV8OFXaaR
github.com/patrickmn/go-cache v2.1.0+incompatible/go.mod h1:3Qf8kWWT7OJRJbdiICTKqZju1ZixQ/KpMGzzAfe6+WQ=
github.com/pborman/getopt v0.0.0-20170112200414-7148bc3a4c30/go.mod h1:85jBQOZwpVEaDAr341tbn15RS4fCAsIst0qp7i8ex1o=
github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k=
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=
github.com/pelletier/go-toml/v2 v2.0.5/go.mod h1:OMHamSCAODeSsVrwwvcJOaoN0LIUIaFVNZzmWyNfXas=
github.com/pelletier/go-toml/v2 v2.2.4 h1:mye9XuhQ6gvn5h28+VilKrrPoQVanw5PMw/TB0t5Ec4=
@@ -2337,6 +2375,7 @@ github.com/rs/xid v1.6.0 h1:fV591PaemRlL6JfRxGDEPl69wICngIQ3shQtzfy2gxU=
github.com/rs/xid v1.6.0/go.mod h1:7XoLgs4eV+QndskICGsho+ADou8ySMSjJKDIan90Nz0=
github.com/russellhaering/goxmldsig v1.4.0 h1:8UcDh/xGyQiyrW+Fq5t8f+l2DLB1+zlhYzkPUJ7Qhys=
github.com/russellhaering/goxmldsig v1.4.0/go.mod h1:gM4MDENBQf7M+V824SGfyIUVFWydB7n0KkEubVJl+Tw=
github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
@@ -2402,6 +2441,7 @@ github.com/sourcegraph/conc v0.3.1-0.20240121214520-5f936abd7ae8/go.mod h1:3n1Cw
github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
github.com/spaolacci/murmur3 v1.1.0 h1:7c1g84S4BPRrfL5Xrdp6fOJ206sU9y293DDHaoy0bLI=
github.com/spaolacci/murmur3 v1.1.0/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ=
github.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk=
github.com/spf13/afero v1.3.3/go.mod h1:5KUK8ByomD5Ti5Artl0RtHeI5pTF7MIDuXL3yY520V4=
github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z93I=
@@ -2409,12 +2449,15 @@ github.com/spf13/afero v1.8.2/go.mod h1:CtAatgMJh6bJEIs48Ay/FOnkljP3WeGUG0MC1RfA
github.com/spf13/afero v1.9.2/go.mod h1:iUV7ddyEEZPO5gA3zD4fJt6iStLlL+Lg4m2cihcDf8Y=
github.com/spf13/afero v1.15.0 h1:b/YBCLWAJdFWJTN9cLhiXXcD7mzKn9Dm86dNnfyQw1I=
github.com/spf13/afero v1.15.0/go.mod h1:NC2ByUVxtQs4b3sIUphxK0NioZnmxgyCrfzeuq8lxMg=
github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
github.com/spf13/cast v1.5.0/go.mod h1:SpXXQ5YoyJw6s3/6cMTQuxvgRl3PCJiyaX9p6b155UU=
github.com/spf13/cast v1.10.0 h1:h2x0u2shc1QuLHfxi+cTJvs30+ZAHOGRic8uyGTDWxY=
github.com/spf13/cast v1.10.0/go.mod h1:jNfB8QC9IA6ZuY2ZjDp0KtFO2LZZlg4S/7bzP6qqeHo=
github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU=
github.com/spf13/cobra v1.10.2 h1:DMTTonx5m65Ic0GOoRY2c16WCbHxOOw6xxezuLaBpcU=
github.com/spf13/cobra v1.10.2/go.mod h1:7C1pvHqHw5A4vrJfjNwvOdzYu0Gml16OCs2GRiTUUS4=
github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo=
github.com/spf13/pflag v0.0.0-20170130214245-9ff6c6923cff/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
@@ -2422,6 +2465,7 @@ github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An
github.com/spf13/pflag v1.0.9/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/pflag v1.0.10 h1:4EBh2KAYBwaONj6b2Ye1GiHfwjqyROoF4RwYO+vPwFk=
github.com/spf13/pflag v1.0.10/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s=
github.com/spf13/viper v1.13.0/go.mod h1:Icm2xNL3/8uyh/wFuB1jI7TiTNKp8632Nwegu+zgdYw=
github.com/spf13/viper v1.21.0 h1:x5S+0EU27Lbphp4UKm1C+1oQO+rKx36vfCoaVebLFSU=
github.com/spf13/viper v1.21.0/go.mod h1:P0lhsswPGWD/1lZJ9ny3fYnVqxiegrlNrEmgLjbTCAY=
@@ -2486,6 +2530,7 @@ github.com/uber/jaeger-client-go v2.30.0+incompatible h1:D6wyKGCecFaSRUpo8lCVbaO
github.com/uber/jaeger-client-go v2.30.0+incompatible/go.mod h1:WVhlPFC8FDjOFMMWRy2pZqQJSXxYSwNYOkTr/Z6d3Kk=
github.com/uber/jaeger-lib v2.4.1+incompatible h1:td4jdvLcExb4cBISKIpHuGoVXh+dVKhn2Um6rjCsSsg=
github.com/uber/jaeger-lib v2.4.1+incompatible/go.mod h1:ComeNDZlWwrWnDv8aPp0Ba6+uUTzImX/AauajbLI56U=
github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0=
github.com/ugorji/go/codec v1.2.11 h1:BMaWp1Bb6fHwEtbplGBGJ498wD+LKlNSl25MjdZY4dU=
github.com/ugorji/go/codec v1.2.11/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg=
github.com/unknwon/bra v0.0.0-20200517080246-1e3013ecaff8 h1:aVGB3YnaS/JNfOW3tiHIlmNmTDg618va+eT0mVomgyI=
@@ -2526,6 +2571,7 @@ github.com/xiang90/probing v0.0.0-20221125231312-a49e3df8f510 h1:S2dVYn90KE98chq
github.com/xiang90/probing v0.0.0-20221125231312-a49e3df8f510/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU=
github.com/xlab/treeprint v1.2.0 h1:HzHnuAF1plUN2zGlAFHbSQP2qJ0ZAD3XF5XD7OesXRQ=
github.com/xlab/treeprint v1.2.0/go.mod h1:gj5Gd3gPdKtR1ikdDK6fnFLdmIS0X30kTTuNd/WEJu0=
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 h1:gEOO8jv9F4OT7lGCjxCBTO/36wtF6j2nSip77qHd4x4=
github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1/go.mod h1:Ohn+xnUBiLI6FVj/9LpzZWtj1/D6lUovWYBkxHVV3aM=
github.com/xyproto/randomstring v1.0.5 h1:YtlWPoRdgMu3NZtP45drfy1GKoojuR7hmRcnhZqKjWU=
@@ -2712,6 +2758,7 @@ gocloud.dev/secrets/hashivault v0.43.0 h1:A966rEMpCRUE9209/+k+A2HP2v2qDnrxGpQn+n
gocloud.dev/secrets/hashivault v0.43.0/go.mod h1:KdWKL+TXDi0cXgEd/MTeaidKlotvyJtnTDi71B3rR9U=
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190211182817-74369b46fc67/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20190320223903-b7391e95e576/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
@@ -2976,6 +3023,8 @@ golang.org/x/sys v0.0.0-20181026203630-95b1ffbd15a5/go.mod h1:STP8DvDyc/dI5b8T5h
golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181122145206-62eef0e2fa9b/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181221143128-b4a75ba826a6/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190102155601-82a175fd1598/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190204203706-41f3e6584952/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190209173611-3b5209105503/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@@ -3248,6 +3297,7 @@ golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da/go.mod h1:NDW/Ps6MPRej6f
gomodules.xyz/jsonpatch/v2 v2.5.0 h1:JELs8RLM12qJGXU4u/TO3V25KW8GreMKl9pdkk14RM0=
gomodules.xyz/jsonpatch/v2 v2.5.0/go.mod h1:AH3dM2RI6uoBZxn3LVrfvJ3E0/9dG4cSrbuBJT4moAY=
gonum.org/v1/gonum v0.0.0-20180816165407-929014505bf4/go.mod h1:Y+Yx5eoAFn32cQvJDxZx5Dpnq+c3wtXuadVZAcxbbBo=
gonum.org/v1/gonum v0.7.0/go.mod h1:L02bwd0sqlsvRv41G7wGWFCsVNZFv/k1xzGIxeANHGM=
gonum.org/v1/gonum v0.8.2/go.mod h1:oe/vMfY3deqTw+1EZJhuvEW2iwGF1bW9wwu7XCu0+v0=
gonum.org/v1/gonum v0.9.3/go.mod h1:TZumC3NeyVQskjXqmyWt4S3bINhy7B4eYwW69EbyX+0=
gonum.org/v1/gonum v0.11.0/go.mod h1:fSG4YDCxxUZQJ7rKsQrj0gMOg00Il0Z96/qMA4bVQhA=

View File

@@ -493,8 +493,6 @@ github.com/aws/smithy-go v1.22.5/go.mod h1:t1ufH5HMublsJYulve2RKmHDC15xu1f26kHCp
github.com/aws/smithy-go v1.23.0/go.mod h1:t1ufH5HMublsJYulve2RKmHDC15xu1f26kHCp/HgceI=
github.com/awslabs/aws-lambda-go-api-proxy v0.16.2 h1:CJyGEyO1CIwOnXTU40urf0mchf6t3voxpvUDikOU9LY=
github.com/awslabs/aws-lambda-go-api-proxy v0.16.2/go.mod h1:vxxjwBHe/KbgFeNlAP/Tvp4SsVRL3WQamcWRxqVh0z0=
github.com/axiomhq/hyperloglog v0.0.0-20240507144631-af9851f82b27 h1:60m4tnanN1ctzIu4V3bfCNJ39BiOPSm1gHFlFjTkRE0=
github.com/axiomhq/hyperloglog v0.0.0-20240507144631-af9851f82b27/go.mod h1:k08r+Yj1PRAmuayFiRK6MYuR5Ve4IuZtTfxErMIh0+c=
github.com/aymanbagabas/go-udiff v0.2.0 h1:TK0fH4MteXUDspT88n8CKzvK0X9O2xu9yQjWpi6yML8=
github.com/aymanbagabas/go-udiff v0.2.0/go.mod h1:RE4Ex0qsGkTAJoQdQQCA0uG+nAzJO/pI/QwceO5fgrA=
github.com/aymerick/douceur v0.2.0 h1:Mv+mAeH1Q+n9Fr+oyamOlAkUNPWPlA8PPGR0QAaYuPk=
@@ -535,12 +533,12 @@ github.com/campoy/embedmd v1.0.0 h1:V4kI2qTJJLf4J29RzI/MAt2c3Bl4dQSYPuflzwFH2hY=
github.com/campoy/embedmd v1.0.0/go.mod h1:oxyr9RCiSXg0M3VJ3ks0UGfp98BpSSGr0kpiX3MzVl8=
github.com/cenkalti/backoff/v5 v5.0.2/go.mod h1:rkhZdG3JZukswDf7f0cwqPNk4K0sa+F97BxZthm/crw=
github.com/census-instrumentation/opencensus-proto v0.4.1 h1:iKLQ0xPNFxR/2hzXZMrBo8f1j86j5WHzznCCQxV/b8g=
github.com/centrifugal/centrifuge v0.37.2/go.mod h1:aj4iRJGhzi3SlL8iUtVezxway1Xf8g+hmNQkLLO7sS8=
github.com/centrifugal/protocol v0.16.2/go.mod h1:Q7OpS/8HMXDnL7f9DpNx24IhG96MP88WPpVTTCdrokI=
github.com/charmbracelet/harmonica v0.2.0 h1:8NxJWRWg/bzKqqEaaeFNipOu77YR5t8aSwG4pgaUBiQ=
github.com/charmbracelet/harmonica v0.2.0/go.mod h1:KSri/1RMQOZLbw7AHqgcBycp8pgJnQMYYT8QZRqZ1Ao=
github.com/charmbracelet/x/exp/golden v0.0.0-20241011142426-46044092ad91 h1:payRxjMjKgx2PaCWLZ4p3ro9y97+TVLZNaRZgJwSVDQ=
github.com/charmbracelet/x/exp/golden v0.0.0-20241011142426-46044092ad91/go.mod h1:wDlXFlCrmJ8J+swcL/MnGUuYnqgQdW9rhSD61oNMb6U=
github.com/centrifugal/centrifuge v0.37.2/go.mod h1:aj4iRJGhzi3SlL8iUtVezxway1Xf8g+hmNQkLLO7sS8=
github.com/centrifugal/protocol v0.16.2/go.mod h1:Q7OpS/8HMXDnL7f9DpNx24IhG96MP88WPpVTTCdrokI=
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d h1:77cEq6EriyTZ0g/qfRdp61a3Uu/AWrgIq2s0ClJV1g0=
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d/go.mod h1:8EPpVsBuRksnlj1mLy4AWzRNQYxauNi62uWcE3to6eA=
github.com/chenzhuoyu/iasm v0.9.0 h1:9fhXjVzq5hUy2gkhhgHl95zG2cEAhw9OSGs8toWWAwo=
@@ -659,8 +657,6 @@ github.com/denisenkom/go-mssqldb v0.10.0/go.mod h1:xbL0rPBG9cCiLr28tMa8zpbdarY27
github.com/devigned/tab v0.1.1/go.mod h1:XG9mPq0dFghrYvoBF3xdRrJzSTX1b7IQrvaL9mzjeJY=
github.com/dgryski/go-ddmin v0.0.0-20210904190556-96a6d69f1034 h1:BuCyszxPxUjBrYW2HNVrimC0rBUs2U27jCJGVh0IKTM=
github.com/dgryski/go-ddmin v0.0.0-20210904190556-96a6d69f1034/go.mod h1:zz4KxBkcXUWKjIcrc+uphJ1gPh/t18ymGm3PmQ+VGTk=
github.com/dgryski/go-metro v0.0.0-20180109044635-280f6062b5bc h1:8WFBn63wegobsYAX0YjD+8suexZDga5CctH4CCTx2+8=
github.com/dgryski/go-metro v0.0.0-20180109044635-280f6062b5bc/go.mod h1:c9O8+fpSOX1DM8cPNSkX/qsBWdkD4yd2dpciOWQjpBw=
github.com/dgryski/go-sip13 v0.0.0-20190329191031-25c5027a8c7b h1:Yqiad0+sloMPdd/0Fg22actpFx0dekpzt1xJmVNVkU0=
github.com/dhui/dktest v0.3.0 h1:kwX5a7EkLcjo7VpsPQSYJcKGbXBXdjI9FGjuUj1jn6I=
github.com/dimchansky/utfbom v1.1.1 h1:vV6w1AhK4VMnhBno/TPVCoK9U/LP0PkLCS9tbxHdi/U=

View File

@@ -535,6 +535,11 @@ export const versionedComponents = {
'12.3.0': 'data-testid viz-tooltip-wrapper',
},
},
Gauge: {
Container: {
'12.4.0': 'data-testid gauge container',
},
},
},
},
VizLegend: {
@@ -1288,6 +1293,9 @@ export const versionedComponents = {
card: {
[MIN_GRAFANA_VERSION]: (name: string) => `data-testid suggestion-${name}`,
},
confirm: {
'12.4.0': (name: string) => `data-testid suggestion-${name} confirm button`,
},
},
ColorSwatch: {
name: {

View File

@@ -35,6 +35,7 @@ export interface Options extends common.SingleStatBaseOptions {
showThresholdLabels: boolean;
showThresholdMarkers: boolean;
sparkline?: boolean;
textMode?: ('auto' | 'value_and_name' | 'value' | 'name' | 'none');
}
export const defaultOptions: Partial<Options> = {
@@ -48,4 +49,5 @@ export const defaultOptions: Partial<Options> = {
showThresholdLabels: false,
showThresholdMarkers: true,
sparkline: true,
textMode: 'auto',
};

View File

@@ -248,15 +248,17 @@ export function PanelChrome({
const onContentPointerDown = React.useCallback(
(evt: React.PointerEvent) => {
// Ignore clicks inside buttons, links, canvas and svg elments
// When selected, ignore clicks inside buttons, links, canvas and svg elments
// This does prevent a clicks inside a graphs from selecting panel as there is normal div above the canvas element that intercepts the click
if (evt.target instanceof Element && evt.target.closest('button,a,canvas,svg')) {
if (isSelected && evt.target instanceof Element && evt.target.closest('button,a,canvas,svg')) {
// Stop propagation otherwise row config editor will get selected
evt.stopPropagation();
return;
}
onSelect?.(evt);
},
[onSelect]
[isSelected, onSelect]
);
const headerContent = (

View File

@@ -32,24 +32,6 @@ const meta: Meta<StoryProps> = {
controls: {
exclude: ['theme', 'values', 'vizCount'],
},
a11y: {
config: {
rules: [
{
id: 'scrollable-region-focusable',
selector: 'body',
enabled: false,
},
// NOTE: this is necessary due to a false positive with the filered svg glow in one of the examples.
// The color-contrast in this component should be accessible!
{
id: 'color-contrast',
selector: 'text',
enabled: false,
},
],
},
},
},
args: {
barWidthFactor: 0.2,

View File

@@ -2,6 +2,7 @@ import { css, cx } from '@emotion/css';
import { useId } from 'react';
import { DisplayValueAlignmentFactors, FALLBACK_COLOR, FieldDisplay, GrafanaTheme2, TimeRange } from '@grafana/data';
import { selectors } from '@grafana/e2e-selectors';
import { t } from '@grafana/i18n';
import { useStyles2, useTheme2 } from '../../themes/ThemeContext';
@@ -275,7 +276,11 @@ export function RadialGauge(props: RadialGaugeProps) {
}
return (
<div className={styles.vizWrapper} style={{ width, height }}>
<div
data-testid={selectors.components.Panels.Visualization.Gauge.Container}
className={styles.vizWrapper}
style={{ width, height }}
>
{body}
</div>
);

View File

@@ -1,4 +1,4 @@
import { GrafanaTheme2 } from '@grafana/data';
import { colorManipulator, GrafanaTheme2 } from '@grafana/data';
import { RadialGaugeDimensions } from './types';
@@ -25,13 +25,14 @@ export function GlowGradient({ id, barWidth }: GlowGradientProps) {
);
}
const CENTER_GLOW_OPACITY = 0.15;
const CENTER_GLOW_OPACITY = 0.25;
export function CenterGlowGradient({ gaugeId, color }: { gaugeId: string; color: string }) {
const transparentColor = colorManipulator.alpha(color, CENTER_GLOW_OPACITY);
return (
<radialGradient id={`circle-glow-${gaugeId}`} r="50%" fr="0%">
<stop offset="0%" stopColor={color} stopOpacity={CENTER_GLOW_OPACITY} />
<stop offset="90%" stopColor={color} stopOpacity={0} />
<stop offset="0%" stopColor={transparentColor} />
<stop offset="90%" stopColor={'#ffffff00'} />
</radialGradient>
);
}
@@ -44,13 +45,14 @@ export interface CenterGlowProps {
export function MiddleCircleGlow({ dimensions, gaugeId, color }: CenterGlowProps) {
const gradientId = `circle-glow-${gaugeId}`;
const transparentColor = color ? colorManipulator.alpha(color, CENTER_GLOW_OPACITY) : color;
return (
<>
<defs>
<radialGradient id={gradientId} r="50%" fr="0%">
<stop offset="0%" stopColor={color} stopOpacity={CENTER_GLOW_OPACITY} />
<stop offset="90%" stopColor={color} stopOpacity={0} />
<stop offset="0%" stopColor={transparentColor} />
<stop offset="90%" stopColor="#ffffff00" />
</radialGradient>
</defs>
<g>
@@ -86,9 +88,9 @@ export function SpotlightGradient({
return (
<linearGradient x1={x1} y1={y1} x2={x2} y2={y2} id={id} gradientUnits="userSpaceOnUse">
<stop offset="0%" stopColor={'white'} stopOpacity={0.0} />
<stop offset="95%" stopColor={'white'} stopOpacity={0.5} />
{roundedBars && <stop offset="100%" stopColor={'white'} stopOpacity={roundedBars ? 0.7 : 1} />}
<stop offset="0%" stopColor="#ffffff00" />
<stop offset="95%" stopColor="#ffffff88" />
{roundedBars && <stop offset="100%" stopColor={roundedBars ? '#ffffffbb' : 'white'} />}
</linearGradient>
);
}

View File

@@ -167,7 +167,8 @@ export class VizRepeater<V, D = {}> extends PureComponent<PropsWithDefaults<V, D
const repeaterStyle: React.CSSProperties = {
display: 'flex',
overflow: `${minVizWidth ? 'auto' : 'hidden'} ${minVizHeight ? 'auto' : 'hidden'}`,
overflowX: `${minVizWidth ? 'auto' : 'hidden'}`,
overflowY: `${minVizHeight ? 'auto' : 'hidden'}`,
};
let vizHeight = height;

View File

@@ -327,6 +327,11 @@ func (hs *HTTPServer) registerRoutes() {
apiRoute.Group("/storage", hs.StorageService.RegisterHTTPRoutes)
}
//nolint:staticcheck // not yet migrated to OpenFeature
if hs.Features.IsEnabledGlobally(featuremgmt.FlagPanelTitleSearch) {
apiRoute.Group("/search-v2", hs.SearchV2HTTPService.RegisterHTTPRoutes)
}
// current org
apiRoute.Group("/org", func(orgRoute routing.RouteRegister) {
userIDScope := ac.Scope("users", "id", ac.Parameter(":userId"))

View File

@@ -25,6 +25,7 @@ import (
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promhttp"
"github.com/youmark/pkcs8"
"github.com/grafana/grafana/pkg/api/avatar"
@@ -94,6 +95,7 @@ import (
"github.com/grafana/grafana/pkg/services/quota"
"github.com/grafana/grafana/pkg/services/rendering"
"github.com/grafana/grafana/pkg/services/search"
"github.com/grafana/grafana/pkg/services/searchV2"
"github.com/grafana/grafana/pkg/services/searchusers"
"github.com/grafana/grafana/pkg/services/secrets"
secretsKV "github.com/grafana/grafana/pkg/services/secrets/kvstore"
@@ -158,6 +160,7 @@ type HTTPServer struct {
Live *live.GrafanaLive
LivePushGateway *pushhttp.Gateway
StorageService store.StorageService
SearchV2HTTPService searchV2.SearchHTTPService
ContextHandler *contexthandler.ContextHandler
LoggerMiddleware loggermw.Logger
SQLStore db.DB
@@ -268,7 +271,7 @@ func ProvideHTTPServer(opts ServerOptions, cfg *setting.Cfg, routeRegister routi
publicDashboardsApi *publicdashboardsApi.Api, userService user.Service, tempUserService tempUser.Service,
loginAttemptService loginAttempt.Service, orgService org.Service, orgDeletionService org.DeletionService, teamService team.Service,
accesscontrolService accesscontrol.Service, navTreeService navtree.Service,
annotationRepo annotations.Repository, tagService tag.Service, oauthTokenService oauthtoken.OAuthTokenService,
annotationRepo annotations.Repository, tagService tag.Service, searchv2HTTPService searchV2.SearchHTTPService, oauthTokenService oauthtoken.OAuthTokenService,
statsService stats.Service, authnService authn.Service, pluginsCDNService *pluginscdn.Service, promGatherer prometheus.Gatherer,
starApi *starApi.API, promRegister prometheus.Registerer, clientConfigProvider grafanaapiserver.DirectRestConfigProvider, anonService anonymous.Service,
userVerifier user.Verifier, pluginPreinstall pluginchecker.Preinstall,
@@ -310,6 +313,7 @@ func ProvideHTTPServer(opts ServerOptions, cfg *setting.Cfg, routeRegister routi
ProvisioningService: provisioningService,
AccessControl: accessControl,
DataProxy: dataSourceProxy,
SearchV2HTTPService: searchv2HTTPService,
SearchService: searchService,
Live: live,
LivePushGateway: livePushGateway,

View File

@@ -12,6 +12,8 @@ import (
_ "github.com/Azure/go-autorest/autorest"
_ "github.com/Azure/go-autorest/autorest/adal"
_ "github.com/beevik/etree"
_ "github.com/blugelabs/bluge"
_ "github.com/blugelabs/bluge_segment_api"
_ "github.com/crewjam/saml"
_ "github.com/docker/go-connections/nat"
_ "github.com/go-jose/go-jose/v4"

View File

@@ -38,6 +38,7 @@ import (
"github.com/grafana/grafana/pkg/services/provisioning"
publicdashboardsmetric "github.com/grafana/grafana/pkg/services/publicdashboards/metric"
"github.com/grafana/grafana/pkg/services/rendering"
"github.com/grafana/grafana/pkg/services/searchV2"
secretsMigrations "github.com/grafana/grafana/pkg/services/secrets/kvstore/migrations"
secretsManager "github.com/grafana/grafana/pkg/services/secrets/manager"
"github.com/grafana/grafana/pkg/services/serviceaccounts"
@@ -57,7 +58,7 @@ func ProvideBackgroundServiceRegistry(
provisioning *provisioning.ProvisioningServiceImpl, usageStats *uss.UsageStats,
statsCollector *statscollector.Service, grafanaUpdateChecker *updatemanager.GrafanaService,
pluginsUpdateChecker *updatemanager.PluginsService, metrics *metrics.InternalMetricsService,
secretsService *secretsManager.SecretsService, remoteCache *remotecache.RemoteCache, StorageService store.StorageService, entityEventsService store.EntityEventsService,
secretsService *secretsManager.SecretsService, remoteCache *remotecache.RemoteCache, StorageService store.StorageService, searchService searchV2.SearchService, entityEventsService store.EntityEventsService,
saService *samanager.ServiceAccountsService, grpcServerProvider grpcserver.Provider,
secretMigrationProvider secretsMigrations.SecretMigrationProvider, loginAttemptService *loginattemptimpl.Service,
bundleService *supportbundlesimpl.Service, publicDashboardsMetric *publicdashboardsmetric.Service,
@@ -100,6 +101,7 @@ func ProvideBackgroundServiceRegistry(
remoteCache,
secretsService,
StorageService,
searchService,
entityEventsService,
grpcServerProvider,
saService,

View File

@@ -140,6 +140,7 @@ import (
"github.com/grafana/grafana/pkg/services/rendering"
"github.com/grafana/grafana/pkg/services/search"
"github.com/grafana/grafana/pkg/services/search/sort"
"github.com/grafana/grafana/pkg/services/searchV2"
"github.com/grafana/grafana/pkg/services/secrets"
secretsDatabase "github.com/grafana/grafana/pkg/services/secrets/database"
secretsStore "github.com/grafana/grafana/pkg/services/secrets/kvstore"
@@ -274,6 +275,8 @@ var wireBasicSet = wire.NewSet(
datasourceproxy.ProvideService,
sort.ProvideService,
search.ProvideService,
searchV2.ProvideService,
searchV2.ProvideSearchHTTPService,
store.ProvideService,
store.ProvideSystemUsersService,
live.ProvideService,

249
pkg/server/wire_gen.go generated

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,44 @@
package acimpl
import (
"context"
"time"
"github.com/grafana/grafana/pkg/services/accesscontrol"
)
const (
ossBasicRoleSeedLockName = "oss-ac-basic-role-seeder"
ossBasicRoleSeedTimeout = 2 * time.Minute
)
// refreshBasicRolePermissionsInDB ensures basic role permissions are fully derived from in-memory registrations
func (s *Service) refreshBasicRolePermissionsInDB(ctx context.Context, rolesSnapshot map[string][]accesscontrol.Permission) error {
if s.sql == nil || s.seeder == nil {
return nil
}
run := func(ctx context.Context) error {
desired := map[accesscontrol.SeedPermission]struct{}{}
for role, permissions := range rolesSnapshot {
for _, permission := range permissions {
desired[accesscontrol.SeedPermission{BuiltInRole: role, Action: permission.Action, Scope: permission.Scope}] = struct{}{}
}
}
s.seeder.SetDesiredPermissions(desired)
return s.seeder.Seed(ctx)
}
if s.serverLock == nil {
return run(ctx)
}
var err error
errLock := s.serverLock.LockExecuteAndRelease(ctx, ossBasicRoleSeedLockName, ossBasicRoleSeedTimeout, func(ctx context.Context) {
err = run(ctx)
})
if errLock != nil {
return errLock
}
return err
}

View File

@@ -0,0 +1,128 @@
package acimpl
import (
"context"
"testing"
"time"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/infra/db"
"github.com/grafana/grafana/pkg/infra/localcache"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/services/accesscontrol"
"github.com/grafana/grafana/pkg/services/accesscontrol/database"
"github.com/grafana/grafana/pkg/services/accesscontrol/permreg"
"github.com/grafana/grafana/pkg/services/accesscontrol/resourcepermissions"
"github.com/grafana/grafana/pkg/services/featuremgmt"
"github.com/grafana/grafana/pkg/services/org"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/util/testutil"
)
func TestIntegration_OSSBasicRolePermissions_PersistAndRefreshOnRegisterFixedRoles(t *testing.T) {
testutil.SkipIntegrationTestInShortMode(t)
ctx := context.Background()
sql := db.InitTestDB(t)
store := database.ProvideService(sql)
svc := ProvideOSSService(
setting.NewCfg(),
store,
&resourcepermissions.FakeActionSetSvc{},
localcache.ProvideService(),
featuremgmt.WithFeatures(),
tracing.InitializeTracerForTest(),
sql,
permreg.ProvidePermissionRegistry(),
nil,
)
require.NoError(t, svc.DeclareFixedRoles(accesscontrol.RoleRegistration{
Role: accesscontrol.RoleDTO{
Name: "fixed:test:role",
Permissions: []accesscontrol.Permission{
{Action: "test:read", Scope: ""},
},
},
Grants: []string{string(org.RoleViewer)},
}))
require.NoError(t, svc.RegisterFixedRoles(ctx))
// verify permission is persisted to DB for basic:viewer
require.NoError(t, sql.WithDbSession(ctx, func(sess *db.Session) error {
var role accesscontrol.Role
ok, err := sess.Table("role").Where("uid = ?", accesscontrol.BasicRoleUIDPrefix+"viewer").Get(&role)
require.NoError(t, err)
require.True(t, ok)
var count int64
count, err = sess.Table("permission").Where("role_id = ? AND action = ? AND scope = ?", role.ID, "test:read", "").Count()
require.NoError(t, err)
require.Equal(t, int64(1), count)
return nil
}))
// ensure RegisterFixedRoles refreshes it back to defaults
require.NoError(t, sql.WithDbSession(ctx, func(sess *db.Session) error {
ts := time.Now()
var role accesscontrol.Role
ok, err := sess.Table("role").Where("uid = ?", accesscontrol.BasicRoleUIDPrefix+"viewer").Get(&role)
require.NoError(t, err)
require.True(t, ok)
_, err = sess.Exec("DELETE FROM permission WHERE role_id = ?", role.ID)
require.NoError(t, err)
p := accesscontrol.Permission{
RoleID: role.ID,
Action: "custom:keep",
Scope: "",
Created: ts,
Updated: ts,
}
p.Kind, p.Attribute, p.Identifier = accesscontrol.SplitScope(p.Scope)
_, err = sess.Table("permission").Insert(&p)
return err
}))
svc2 := ProvideOSSService(
setting.NewCfg(),
store,
&resourcepermissions.FakeActionSetSvc{},
localcache.ProvideService(),
featuremgmt.WithFeatures(),
tracing.InitializeTracerForTest(),
sql,
permreg.ProvidePermissionRegistry(),
nil,
)
require.NoError(t, svc2.DeclareFixedRoles(accesscontrol.RoleRegistration{
Role: accesscontrol.RoleDTO{
Name: "fixed:test:role",
Permissions: []accesscontrol.Permission{
{Action: "test:read", Scope: ""},
},
},
Grants: []string{string(org.RoleViewer)},
}))
require.NoError(t, svc2.RegisterFixedRoles(ctx))
require.NoError(t, sql.WithDbSession(ctx, func(sess *db.Session) error {
var role accesscontrol.Role
ok, err := sess.Table("role").Where("uid = ?", accesscontrol.BasicRoleUIDPrefix+"viewer").Get(&role)
require.NoError(t, err)
require.True(t, ok)
var count int64
count, err = sess.Table("permission").Where("role_id = ? AND action = ? AND scope = ?", role.ID, "test:read", "").Count()
require.NoError(t, err)
require.Equal(t, int64(1), count)
count, err = sess.Table("permission").Where("role_id = ? AND action = ?", role.ID, "custom:keep").Count()
require.NoError(t, err)
require.Equal(t, int64(0), count)
return nil
}))
}

View File

@@ -30,6 +30,7 @@ import (
"github.com/grafana/grafana/pkg/services/accesscontrol/migrator"
"github.com/grafana/grafana/pkg/services/accesscontrol/permreg"
"github.com/grafana/grafana/pkg/services/accesscontrol/pluginutils"
"github.com/grafana/grafana/pkg/services/accesscontrol/seeding"
"github.com/grafana/grafana/pkg/services/dashboards"
"github.com/grafana/grafana/pkg/services/featuremgmt"
"github.com/grafana/grafana/pkg/services/folder"
@@ -96,6 +97,12 @@ func ProvideOSSService(
roles: accesscontrol.BuildBasicRoleDefinitions(),
store: store,
permRegistry: permRegistry,
sql: db,
serverLock: lock,
}
if backend, ok := store.(*database.AccessControlStore); ok {
s.seeder = seeding.New(log.New("accesscontrol.seeder"), backend, backend)
}
return s
@@ -112,8 +119,11 @@ type Service struct {
rolesMu sync.RWMutex
roles map[string]*accesscontrol.RoleDTO
store accesscontrol.Store
seeder *seeding.Seeder
permRegistry permreg.PermissionRegistry
isInitialized bool
sql db.DB
serverLock *serverlock.ServerLockService
}
func (s *Service) GetUsageStats(_ context.Context) map[string]any {
@@ -431,17 +441,54 @@ func (s *Service) RegisterFixedRoles(ctx context.Context) error {
defer span.End()
s.rolesMu.Lock()
defer s.rolesMu.Unlock()
registrations := s.registrations.Slice()
s.registrations.Range(func(registration accesscontrol.RoleRegistration) bool {
s.registerRolesLocked(registration)
return true
})
s.isInitialized = true
rolesSnapshot := s.getBasicRolePermissionsLocked()
s.rolesMu.Unlock()
if s.seeder != nil {
if err := s.seeder.SeedRoles(ctx, registrations); err != nil {
return err
}
if err := s.seeder.RemoveAbsentRoles(ctx); err != nil {
return err
}
}
if err := s.refreshBasicRolePermissionsInDB(ctx, rolesSnapshot); err != nil {
return err
}
return nil
}
// getBasicRolePermissionsSnapshotFromRegistrationsLocked computes the desired basic role permissions from the
// current registration list, using the shared seeding registration logic.
//
// it has to be called while holding the roles lock
func (s *Service) getBasicRolePermissionsLocked() map[string][]accesscontrol.Permission {
desired := map[accesscontrol.SeedPermission]struct{}{}
s.registrations.Range(func(registration accesscontrol.RoleRegistration) bool {
seeding.AppendDesiredPermissions(desired, s.log, &registration.Role, registration.Grants, registration.Exclude, true)
return true
})
out := make(map[string][]accesscontrol.Permission)
for sp := range desired {
out[sp.BuiltInRole] = append(out[sp.BuiltInRole], accesscontrol.Permission{
Action: sp.Action,
Scope: sp.Scope,
})
}
return out
}
// registerRolesLocked processes a single role registration and adds permissions to basic roles.
// Must be called with s.rolesMu locked.
func (s *Service) registerRolesLocked(registration accesscontrol.RoleRegistration) {
@@ -474,6 +521,7 @@ func (s *Service) DeclarePluginRoles(ctx context.Context, ID, name string, regs
defer span.End()
acRegs := pluginutils.ToRegistrations(ID, name, regs)
updatedBasicRoles := false
for _, r := range acRegs {
if err := pluginutils.ValidatePluginRole(ID, r.Role); err != nil {
return err
@@ -500,11 +548,23 @@ func (s *Service) DeclarePluginRoles(ctx context.Context, ID, name string, regs
if initialized {
s.rolesMu.Lock()
s.registerRolesLocked(r)
updatedBasicRoles = true
s.rolesMu.Unlock()
s.cache.Flush()
}
}
if updatedBasicRoles {
s.rolesMu.RLock()
rolesSnapshot := s.getBasicRolePermissionsLocked()
s.rolesMu.RUnlock()
// plugin roles can be declared after startup - keep DB in sync
if err := s.refreshBasicRolePermissionsInDB(ctx, rolesSnapshot); err != nil {
return err
}
}
return nil
}

View File

@@ -0,0 +1,623 @@
package database
import (
"context"
"strings"
"time"
"github.com/grafana/grafana/pkg/infra/db"
"github.com/grafana/grafana/pkg/services/accesscontrol"
"github.com/grafana/grafana/pkg/services/accesscontrol/seeding"
"github.com/grafana/grafana/pkg/services/sqlstore/migrator"
"github.com/grafana/grafana/pkg/util/xorm/core"
)
const basicRolePermBatchSize = 500
// LoadRoles returns all fixed and plugin roles (global org) with permissions, indexed by role name.
func (s *AccessControlStore) LoadRoles(ctx context.Context) (map[string]*accesscontrol.RoleDTO, error) {
out := map[string]*accesscontrol.RoleDTO{}
err := s.sql.WithDbSession(ctx, func(sess *db.Session) error {
type roleRow struct {
ID int64 `xorm:"id"`
OrgID int64 `xorm:"org_id"`
Version int64 `xorm:"version"`
UID string `xorm:"uid"`
Name string `xorm:"name"`
DisplayName string `xorm:"display_name"`
Description string `xorm:"description"`
Group string `xorm:"group_name"`
Hidden bool `xorm:"hidden"`
Updated time.Time `xorm:"updated"`
Created time.Time `xorm:"created"`
}
roles := []roleRow{}
if err := sess.Table("role").
Where("org_id = ?", accesscontrol.GlobalOrgID).
Where("(name LIKE ? OR name LIKE ?)", accesscontrol.FixedRolePrefix+"%", accesscontrol.PluginRolePrefix+"%").
Find(&roles); err != nil {
return err
}
if len(roles) == 0 {
return nil
}
roleIDs := make([]any, 0, len(roles))
roleByID := make(map[int64]*accesscontrol.RoleDTO, len(roles))
for _, r := range roles {
dto := &accesscontrol.RoleDTO{
ID: r.ID,
OrgID: r.OrgID,
Version: r.Version,
UID: r.UID,
Name: r.Name,
DisplayName: r.DisplayName,
Description: r.Description,
Group: r.Group,
Hidden: r.Hidden,
Updated: r.Updated,
Created: r.Created,
}
out[dto.Name] = dto
roleByID[dto.ID] = dto
roleIDs = append(roleIDs, dto.ID)
}
type permRow struct {
RoleID int64 `xorm:"role_id"`
Action string `xorm:"action"`
Scope string `xorm:"scope"`
}
perms := []permRow{}
if err := sess.Table("permission").In("role_id", roleIDs...).Find(&perms); err != nil {
return err
}
for _, p := range perms {
dto := roleByID[p.RoleID]
if dto == nil {
continue
}
dto.Permissions = append(dto.Permissions, accesscontrol.Permission{
RoleID: p.RoleID,
Action: p.Action,
Scope: p.Scope,
})
}
return nil
})
return out, err
}
func (s *AccessControlStore) SetRole(ctx context.Context, existingRole *accesscontrol.RoleDTO, wantedRole accesscontrol.RoleDTO) error {
if existingRole == nil {
return nil
}
return s.sql.WithDbSession(ctx, func(sess *db.Session) error {
_, err := sess.Table("role").
Where("id = ? AND org_id = ?", existingRole.ID, accesscontrol.GlobalOrgID).
Update(map[string]any{
"display_name": wantedRole.DisplayName,
"description": wantedRole.Description,
"group_name": wantedRole.Group,
"hidden": wantedRole.Hidden,
"updated": time.Now(),
})
return err
})
}
func (s *AccessControlStore) SetPermissions(ctx context.Context, existingRole *accesscontrol.RoleDTO, wantedRole accesscontrol.RoleDTO) error {
if existingRole == nil {
return nil
}
type key struct{ Action, Scope string }
existing := map[key]struct{}{}
for _, p := range existingRole.Permissions {
existing[key{p.Action, p.Scope}] = struct{}{}
}
desired := map[key]struct{}{}
for _, p := range wantedRole.Permissions {
desired[key{p.Action, p.Scope}] = struct{}{}
}
toAdd := make([]accesscontrol.Permission, 0)
toRemove := make([]accesscontrol.SeedPermission, 0)
now := time.Now()
for k := range desired {
if _, ok := existing[k]; ok {
continue
}
perm := accesscontrol.Permission{
RoleID: existingRole.ID,
Action: k.Action,
Scope: k.Scope,
Created: now,
Updated: now,
}
perm.Kind, perm.Attribute, perm.Identifier = accesscontrol.SplitScope(perm.Scope)
toAdd = append(toAdd, perm)
}
for k := range existing {
if _, ok := desired[k]; ok {
continue
}
toRemove = append(toRemove, accesscontrol.SeedPermission{Action: k.Action, Scope: k.Scope})
}
if len(toAdd) == 0 && len(toRemove) == 0 {
return nil
}
return s.sql.WithTransactionalDbSession(ctx, func(sess *db.Session) error {
if len(toRemove) > 0 {
if err := DeleteRolePermissionTuples(sess, s.sql.GetDBType(), existingRole.ID, toRemove); err != nil {
return err
}
}
if len(toAdd) > 0 {
_, err := sess.InsertMulti(toAdd)
return err
}
return nil
})
}
func (s *AccessControlStore) CreateRole(ctx context.Context, role accesscontrol.RoleDTO) error {
now := time.Now()
uid := role.UID
if uid == "" && (strings.HasPrefix(role.Name, accesscontrol.FixedRolePrefix) || strings.HasPrefix(role.Name, accesscontrol.PluginRolePrefix)) {
uid = accesscontrol.PrefixedRoleUID(role.Name)
}
r := accesscontrol.Role{
OrgID: accesscontrol.GlobalOrgID,
Version: role.Version,
UID: uid,
Name: role.Name,
DisplayName: role.DisplayName,
Description: role.Description,
Group: role.Group,
Hidden: role.Hidden,
Created: now,
Updated: now,
}
if r.Version == 0 {
r.Version = 1
}
return s.sql.WithTransactionalDbSession(ctx, func(sess *db.Session) error {
if _, err := sess.Insert(&r); err != nil {
return err
}
if len(role.Permissions) == 0 {
return nil
}
// De-duplicate permissions on (action, scope) to avoid unique constraint violations.
// Some role definitions may accidentally include duplicates.
type permKey struct{ Action, Scope string }
seen := make(map[permKey]struct{}, len(role.Permissions))
perms := make([]accesscontrol.Permission, 0, len(role.Permissions))
for _, p := range role.Permissions {
k := permKey{Action: p.Action, Scope: p.Scope}
if _, ok := seen[k]; ok {
continue
}
seen[k] = struct{}{}
perm := accesscontrol.Permission{
RoleID: r.ID,
Action: p.Action,
Scope: p.Scope,
Created: now,
Updated: now,
}
perm.Kind, perm.Attribute, perm.Identifier = accesscontrol.SplitScope(perm.Scope)
perms = append(perms, perm)
}
_, err := sess.InsertMulti(perms)
return err
})
}
func (s *AccessControlStore) DeleteRoles(ctx context.Context, roleUIDs []string) error {
if len(roleUIDs) == 0 {
return nil
}
uids := make([]any, 0, len(roleUIDs))
for _, uid := range roleUIDs {
uids = append(uids, uid)
}
return s.sql.WithTransactionalDbSession(ctx, func(sess *db.Session) error {
type row struct {
ID int64 `xorm:"id"`
UID string `xorm:"uid"`
}
rows := []row{}
if err := sess.Table("role").
Where("org_id = ?", accesscontrol.GlobalOrgID).
In("uid", uids...).
Find(&rows); err != nil {
return err
}
if len(rows) == 0 {
return nil
}
roleIDs := make([]any, 0, len(rows))
for _, r := range rows {
roleIDs = append(roleIDs, r.ID)
}
// Remove permissions and assignments first to avoid FK issues (if enabled).
{
args := append([]any{"DELETE FROM permission WHERE role_id IN (?" + strings.Repeat(",?", len(roleIDs)-1) + ")"}, roleIDs...)
if _, err := sess.Exec(args...); err != nil {
return err
}
}
{
args := append([]any{"DELETE FROM user_role WHERE role_id IN (?" + strings.Repeat(",?", len(roleIDs)-1) + ")"}, roleIDs...)
if _, err := sess.Exec(args...); err != nil {
return err
}
}
{
args := append([]any{"DELETE FROM team_role WHERE role_id IN (?" + strings.Repeat(",?", len(roleIDs)-1) + ")"}, roleIDs...)
if _, err := sess.Exec(args...); err != nil {
return err
}
}
{
args := append([]any{"DELETE FROM builtin_role WHERE role_id IN (?" + strings.Repeat(",?", len(roleIDs)-1) + ")"}, roleIDs...)
if _, err := sess.Exec(args...); err != nil {
return err
}
}
args := append([]any{"DELETE FROM role WHERE org_id = ? AND uid IN (?" + strings.Repeat(",?", len(uids)-1) + ")", accesscontrol.GlobalOrgID}, uids...)
_, err := sess.Exec(args...)
return err
})
}
// OSS basic-role permission refresh uses seeding.Seeder.Seed() with a desired set computed in memory.
// These methods implement the permission seeding part of seeding.SeedingBackend against the current permission table.
func (s *AccessControlStore) LoadPrevious(ctx context.Context) (map[accesscontrol.SeedPermission]struct{}, error) {
var out map[accesscontrol.SeedPermission]struct{}
err := s.sql.WithDbSession(ctx, func(sess *db.Session) error {
rows, err := LoadBasicRoleSeedPermissions(sess)
if err != nil {
return err
}
out = make(map[accesscontrol.SeedPermission]struct{}, len(rows))
for _, r := range rows {
r.Origin = ""
out[r] = struct{}{}
}
return nil
})
return out, err
}
func (s *AccessControlStore) Apply(ctx context.Context, added, removed []accesscontrol.SeedPermission, updated map[accesscontrol.SeedPermission]accesscontrol.SeedPermission) error {
rolesToUpgrade := seeding.RolesToUpgrade(added, removed)
// Run the same OSS apply logic as ossBasicRoleSeedBackend.Apply inside a single transaction.
return s.sql.WithTransactionalDbSession(ctx, func(sess *db.Session) error {
defs := accesscontrol.BuildBasicRoleDefinitions()
builtinToRoleID, err := EnsureBasicRolesExist(sess, defs)
if err != nil {
return err
}
backend := &ossBasicRoleSeedBackend{
sess: sess,
now: time.Now(),
builtinToRoleID: builtinToRoleID,
desired: nil,
dbType: s.sql.GetDBType(),
}
if err := backend.Apply(ctx, added, removed, updated); err != nil {
return err
}
return BumpBasicRoleVersions(sess, rolesToUpgrade)
})
}
// EnsureBasicRolesExist ensures the built-in basic roles exist in the role table and are bound in builtin_role.
// It returns a mapping from builtin role name (for example "Admin") to role ID.
func EnsureBasicRolesExist(sess *db.Session, defs map[string]*accesscontrol.RoleDTO) (map[string]int64, error) {
uidToBuiltin := make(map[string]string, len(defs))
uids := make([]any, 0, len(defs))
for builtin, def := range defs {
uidToBuiltin[def.UID] = builtin
uids = append(uids, def.UID)
}
type roleRow struct {
ID int64 `xorm:"id"`
UID string `xorm:"uid"`
}
rows := []roleRow{}
if err := sess.Table("role").
Where("org_id = ?", accesscontrol.GlobalOrgID).
In("uid", uids...).
Find(&rows); err != nil {
return nil, err
}
ts := time.Now()
builtinToRoleID := make(map[string]int64, len(defs))
for _, r := range rows {
br, ok := uidToBuiltin[r.UID]
if !ok {
continue
}
builtinToRoleID[br] = r.ID
}
for builtin, def := range defs {
roleID, ok := builtinToRoleID[builtin]
if !ok {
role := accesscontrol.Role{
OrgID: def.OrgID,
Version: def.Version,
UID: def.UID,
Name: def.Name,
DisplayName: def.DisplayName,
Description: def.Description,
Group: def.Group,
Hidden: def.Hidden,
Created: ts,
Updated: ts,
}
if _, err := sess.Insert(&role); err != nil {
return nil, err
}
roleID = role.ID
builtinToRoleID[builtin] = roleID
}
has, err := sess.Table("builtin_role").
Where("role_id = ? AND role = ? AND org_id = ?", roleID, builtin, accesscontrol.GlobalOrgID).
Exist()
if err != nil {
return nil, err
}
if !has {
br := accesscontrol.BuiltinRole{
RoleID: roleID,
OrgID: accesscontrol.GlobalOrgID,
Role: builtin,
Created: ts,
Updated: ts,
}
if _, err := sess.Table("builtin_role").Insert(&br); err != nil {
return nil, err
}
}
}
return builtinToRoleID, nil
}
// DeleteRolePermissionTuples deletes permissions for a single role by (action, scope) pairs.
//
// It uses a row-constructor IN clause where supported (MySQL, Postgres, SQLite) and falls back
// to a WHERE ... OR ... form for MSSQL.
func DeleteRolePermissionTuples(sess *db.Session, dbType core.DbType, roleID int64, perms []accesscontrol.SeedPermission) error {
if len(perms) == 0 {
return nil
}
if dbType == migrator.MSSQL {
// MSSQL doesn't support (action, scope) IN ((?,?),(?,?)) row constructors.
where := make([]string, 0, len(perms))
args := make([]any, 0, 1+len(perms)*2)
args = append(args, roleID)
for _, p := range perms {
where = append(where, "(action = ? AND scope = ?)")
args = append(args, p.Action, p.Scope)
}
_, err := sess.Exec(
append([]any{
"DELETE FROM permission WHERE role_id = ? AND (" + strings.Join(where, " OR ") + ")",
}, args...)...,
)
return err
}
args := make([]any, 0, 1+len(perms)*2)
args = append(args, roleID)
for _, p := range perms {
args = append(args, p.Action, p.Scope)
}
sql := "DELETE FROM permission WHERE role_id = ? AND (action, scope) IN (" +
strings.Repeat("(?, ?),", len(perms)-1) + "(?, ?))"
_, err := sess.Exec(append([]any{sql}, args...)...)
return err
}
type ossBasicRoleSeedBackend struct {
sess *db.Session
now time.Time
builtinToRoleID map[string]int64
desired map[accesscontrol.SeedPermission]struct{}
dbType core.DbType
}
func (b *ossBasicRoleSeedBackend) LoadPrevious(_ context.Context) (map[accesscontrol.SeedPermission]struct{}, error) {
rows, err := LoadBasicRoleSeedPermissions(b.sess)
if err != nil {
return nil, err
}
out := make(map[accesscontrol.SeedPermission]struct{}, len(rows))
for _, r := range rows {
// Ensure the key matches what OSS seeding uses (Origin is always empty for basic role refresh).
r.Origin = ""
out[r] = struct{}{}
}
return out, nil
}
func (b *ossBasicRoleSeedBackend) LoadDesired(_ context.Context) (map[accesscontrol.SeedPermission]struct{}, error) {
return b.desired, nil
}
func (b *ossBasicRoleSeedBackend) Apply(_ context.Context, added, removed []accesscontrol.SeedPermission, updated map[accesscontrol.SeedPermission]accesscontrol.SeedPermission) error {
// Delete removed permissions (this includes user-defined permissions that aren't in desired).
if len(removed) > 0 {
permsByRoleID := map[int64][]accesscontrol.SeedPermission{}
for _, p := range removed {
roleID, ok := b.builtinToRoleID[p.BuiltInRole]
if !ok {
continue
}
permsByRoleID[roleID] = append(permsByRoleID[roleID], p)
}
for roleID, perms := range permsByRoleID {
// Chunk to keep statement sizes and parameter counts bounded.
if err := batch(len(perms), basicRolePermBatchSize, func(start, end int) error {
return DeleteRolePermissionTuples(b.sess, b.dbType, roleID, perms[start:end])
}); err != nil {
return err
}
}
}
// Insert added permissions and updated-target permissions.
toInsertSeed := make([]accesscontrol.SeedPermission, 0, len(added)+len(updated))
toInsertSeed = append(toInsertSeed, added...)
for _, v := range updated {
toInsertSeed = append(toInsertSeed, v)
}
if len(toInsertSeed) == 0 {
return nil
}
// De-duplicate on (role_id, action, scope). This avoids unique constraint violations when:
// - the same permission appears in both added and updated
// - multiple plugin origins grant the same permission (Origin is not persisted in permission table)
type permKey struct {
RoleID int64
Action string
Scope string
}
seen := make(map[permKey]struct{}, len(toInsertSeed))
toInsert := make([]accesscontrol.Permission, 0, len(toInsertSeed))
for _, p := range toInsertSeed {
roleID, ok := b.builtinToRoleID[p.BuiltInRole]
if !ok {
continue
}
k := permKey{RoleID: roleID, Action: p.Action, Scope: p.Scope}
if _, ok := seen[k]; ok {
continue
}
seen[k] = struct{}{}
perm := accesscontrol.Permission{
RoleID: roleID,
Action: p.Action,
Scope: p.Scope,
Created: b.now,
Updated: b.now,
}
perm.Kind, perm.Attribute, perm.Identifier = accesscontrol.SplitScope(perm.Scope)
toInsert = append(toInsert, perm)
}
return batch(len(toInsert), basicRolePermBatchSize, func(start, end int) error {
// MySQL: ignore conflicts to make seeding idempotent under retries/concurrency.
// Conflicts can happen if the same permission already exists (unique on role_id, action, scope).
if b.dbType == migrator.MySQL {
args := make([]any, 0, (end-start)*8)
for i := start; i < end; i++ {
p := toInsert[i]
args = append(args, p.RoleID, p.Action, p.Scope, p.Kind, p.Attribute, p.Identifier, p.Updated, p.Created)
}
sql := append([]any{`INSERT IGNORE INTO permission (role_id, action, scope, kind, attribute, identifier, updated, created) VALUES ` +
strings.Repeat("(?, ?, ?, ?, ?, ?, ?, ?),", end-start-1) + "(?, ?, ?, ?, ?, ?, ?, ?)"}, args...)
_, err := b.sess.Exec(sql...)
return err
}
_, err := b.sess.InsertMulti(toInsert[start:end])
return err
})
}
func batch(count, size int, eachFn func(start, end int) error) error {
for i := 0; i < count; {
end := i + size
if end > count {
end = count
}
if err := eachFn(i, end); err != nil {
return err
}
i = end
}
return nil
}
// BumpBasicRoleVersions increments the role version for the given builtin basic roles (Viewer/Editor/Admin/Grafana Admin).
// Unknown role names are ignored.
func BumpBasicRoleVersions(sess *db.Session, basicRoles []string) error {
if len(basicRoles) == 0 {
return nil
}
defs := accesscontrol.BuildBasicRoleDefinitions()
uids := make([]any, 0, len(basicRoles))
for _, br := range basicRoles {
def, ok := defs[br]
if !ok {
continue
}
uids = append(uids, def.UID)
}
if len(uids) == 0 {
return nil
}
sql := "UPDATE role SET version = version + 1 WHERE org_id = ? AND uid IN (?" + strings.Repeat(",?", len(uids)-1) + ")"
_, err := sess.Exec(append([]any{sql, accesscontrol.GlobalOrgID}, uids...)...)
return err
}
// LoadBasicRoleSeedPermissions returns the current (builtin_role, action, scope) permissions granted to basic roles.
// It sets Origin to empty.
func LoadBasicRoleSeedPermissions(sess *db.Session) ([]accesscontrol.SeedPermission, error) {
rows := []accesscontrol.SeedPermission{}
err := sess.SQL(
`SELECT role.display_name AS builtin_role, p.action, p.scope, '' AS origin
FROM role INNER JOIN permission AS p ON p.role_id = role.id
WHERE role.org_id = ? AND role.name LIKE 'basic:%'`,
accesscontrol.GlobalOrgID,
).Find(&rows)
return rows, err
}

View File

@@ -6,6 +6,8 @@ import (
"strconv"
"time"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
"go.opentelemetry.io/otel"
claims "github.com/grafana/authlib/types"
@@ -13,6 +15,7 @@ import (
"github.com/grafana/grafana/pkg/infra/db"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/serverlock"
"github.com/grafana/grafana/pkg/services/accesscontrol"
"github.com/grafana/grafana/pkg/services/authz/zanzana"
"github.com/grafana/grafana/pkg/services/featuremgmt"
"github.com/grafana/grafana/pkg/services/folder"
@@ -33,12 +36,15 @@ type ZanzanaReconciler struct {
store db.DB
client zanzana.Client
lock *serverlock.ServerLockService
metrics struct {
lastSuccess prometheus.Gauge
}
// reconcilers are migrations that tries to reconcile the state of grafana db to zanzana store.
// These are run periodically to try to maintain a consistent state.
reconcilers []resourceReconciler
}
func ProvideZanzanaReconciler(cfg *setting.Cfg, features featuremgmt.FeatureToggles, client zanzana.Client, store db.DB, lock *serverlock.ServerLockService, folderService folder.Service) *ZanzanaReconciler {
func ProvideZanzanaReconciler(cfg *setting.Cfg, features featuremgmt.FeatureToggles, client zanzana.Client, store db.DB, lock *serverlock.ServerLockService, folderService folder.Service, reg prometheus.Registerer) *ZanzanaReconciler {
zanzanaReconciler := &ZanzanaReconciler{
cfg: cfg,
log: reconcilerLogger,
@@ -92,6 +98,13 @@ func ProvideZanzanaReconciler(cfg *setting.Cfg, features featuremgmt.FeatureTogg
},
}
if reg != nil {
zanzanaReconciler.metrics.lastSuccess = promauto.With(reg).NewGauge(prometheus.GaugeOpts{
Name: "grafana_zanzana_reconcile_last_success_timestamp_seconds",
Help: "Unix timestamp (seconds) when the Zanzana reconciler last completed a reconciliation cycle.",
})
}
if cfg.Anonymous.Enabled {
zanzanaReconciler.reconcilers = append(zanzanaReconciler.reconcilers,
newResourceReconciler(
@@ -118,6 +131,9 @@ func (r *ZanzanaReconciler) Run(ctx context.Context) error {
// Reconcile schedules as job that will run and reconcile resources between
// legacy access control and zanzana.
func (r *ZanzanaReconciler) Reconcile(ctx context.Context) error {
// Ensure we don't reconcile an empty/partial RBAC state before OSS has seeded basic role permissions.
// This matters most during startup where fixed-role loading + basic-role permission refresh runs as another background service.
r.waitForBasicRolesSeeded(ctx)
r.reconcile(ctx)
// FIXME:
@@ -133,6 +149,57 @@ func (r *ZanzanaReconciler) Reconcile(ctx context.Context) error {
}
}
func (r *ZanzanaReconciler) hasBasicRolePermissions(ctx context.Context) bool {
var count int64
// Basic role permissions are stored on "basic:%" roles in the global org (0).
// In a fresh DB, this will be empty until fixed roles are registered and the basic role permission refresh runs.
type row struct {
Count int64 `xorm:"count"`
}
_ = r.store.WithDbSession(ctx, func(sess *db.Session) error {
var rr row
_, err := sess.SQL(
`SELECT COUNT(*) AS count
FROM role INNER JOIN permission AS p ON p.role_id = role.id
WHERE role.org_id = ? AND role.name LIKE ?`,
accesscontrol.GlobalOrgID,
accesscontrol.BasicRolePrefix+"%",
).Get(&rr)
if err != nil {
return err
}
count = rr.Count
return nil
})
return count > 0
}
func (r *ZanzanaReconciler) waitForBasicRolesSeeded(ctx context.Context) {
// Best-effort: don't block forever. If we can't observe basic roles, proceed anyway.
const (
maxWait = 15 * time.Second
interval = 1 * time.Second
)
deadline := time.NewTimer(maxWait)
defer deadline.Stop()
ticker := time.NewTicker(interval)
defer ticker.Stop()
for {
if r.hasBasicRolePermissions(ctx) {
return
}
select {
case <-ctx.Done():
return
case <-deadline.C:
return
case <-ticker.C:
}
}
}
func (r *ZanzanaReconciler) reconcile(ctx context.Context) {
run := func(ctx context.Context, namespace string) {
now := time.Now()
@@ -144,6 +211,9 @@ func (r *ZanzanaReconciler) reconcile(ctx context.Context) {
r.log.Warn("Failed to perform reconciliation for resource", "err", err)
}
}
if r.metrics.lastSuccess != nil {
r.metrics.lastSuccess.SetToCurrentTime()
}
r.log.Debug("Finished reconciliation", "elapsed", time.Since(now))
}

View File

@@ -0,0 +1,67 @@
package dualwrite
import (
"context"
"testing"
"time"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/infra/db"
"github.com/grafana/grafana/pkg/services/accesscontrol"
)
func TestZanzanaReconciler_hasBasicRolePermissions(t *testing.T) {
env := setupTestEnv(t)
r := &ZanzanaReconciler{
store: env.db,
}
ctx := context.Background()
require.False(t, r.hasBasicRolePermissions(ctx))
err := env.db.WithDbSession(ctx, func(sess *db.Session) error {
now := time.Now()
_, err := sess.Exec(
`INSERT INTO role (org_id, uid, name, display_name, group_name, description, hidden, version, created, updated)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
accesscontrol.GlobalOrgID,
"basic_viewer_uid_test",
accesscontrol.BasicRolePrefix+"viewer",
"Viewer",
"Basic",
"Viewer role",
false,
1,
now,
now,
)
if err != nil {
return err
}
var roleID int64
if _, err := sess.SQL(`SELECT id FROM role WHERE org_id = ? AND uid = ?`, accesscontrol.GlobalOrgID, "basic_viewer_uid_test").Get(&roleID); err != nil {
return err
}
_, err = sess.Exec(
`INSERT INTO permission (role_id, action, scope, kind, attribute, identifier, created, updated)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)`,
roleID,
"dashboards:read",
"dashboards:*",
"",
"",
"",
now,
now,
)
return err
})
require.NoError(t, err)
require.True(t, r.hasBasicRolePermissions(ctx))
}

View File

@@ -1,6 +1,7 @@
package accesscontrol
import (
"context"
"encoding/json"
"errors"
"fmt"
@@ -594,3 +595,18 @@ type QueryWithOrg struct {
OrgId *int64 `json:"orgId"`
Global bool `json:"global"`
}
type SeedPermission struct {
BuiltInRole string `xorm:"builtin_role"`
Action string `xorm:"action"`
Scope string `xorm:"scope"`
Origin string `xorm:"origin"`
}
type RoleStore interface {
LoadRoles(ctx context.Context) (map[string]*RoleDTO, error)
SetRole(ctx context.Context, existingRole *RoleDTO, wantedRole RoleDTO) error
SetPermissions(ctx context.Context, existingRole *RoleDTO, wantedRole RoleDTO) error
CreateRole(ctx context.Context, role RoleDTO) error
DeleteRoles(ctx context.Context, roleUIDs []string) error
}

View File

@@ -0,0 +1,451 @@
package seeding
import (
"context"
"fmt"
"regexp"
"slices"
"strings"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/services/accesscontrol"
"github.com/grafana/grafana/pkg/services/accesscontrol/pluginutils"
"github.com/grafana/grafana/pkg/services/pluginsintegration/pluginaccesscontrol"
)
type Seeder struct {
log log.Logger
roleStore accesscontrol.RoleStore
backend SeedingBackend
builtinsPermissions map[accesscontrol.SeedPermission]struct{}
seededFixedRoles map[string]bool
seededPluginRoles map[string]bool
seededPlugins map[string]bool
hasSeededAlready bool
}
// SeedingBackend provides the seed-set specific operations needed to seed.
type SeedingBackend interface {
// LoadPrevious returns the currently stored permissions for previously seeded roles.
LoadPrevious(ctx context.Context) (map[accesscontrol.SeedPermission]struct{}, error)
// Apply updates the database to match the desired permissions.
Apply(ctx context.Context,
added, removed []accesscontrol.SeedPermission,
updated map[accesscontrol.SeedPermission]accesscontrol.SeedPermission,
) error
}
func New(log log.Logger, roleStore accesscontrol.RoleStore, backend SeedingBackend) *Seeder {
return &Seeder{
log: log,
roleStore: roleStore,
backend: backend,
builtinsPermissions: map[accesscontrol.SeedPermission]struct{}{},
seededFixedRoles: map[string]bool{},
seededPluginRoles: map[string]bool{},
seededPlugins: map[string]bool{},
hasSeededAlready: false,
}
}
// SetDesiredPermissions replaces the in-memory desired permission set used by Seed().
func (s *Seeder) SetDesiredPermissions(desired map[accesscontrol.SeedPermission]struct{}) {
if desired == nil {
s.builtinsPermissions = map[accesscontrol.SeedPermission]struct{}{}
return
}
s.builtinsPermissions = desired
}
// Seed loads current and desired permissions, diffs them (including scope updates), applies changes, and bumps versions.
func (s *Seeder) Seed(ctx context.Context) error {
previous, err := s.backend.LoadPrevious(ctx)
if err != nil {
return err
}
// - Do not remove plugin permissions when the plugin didn't register this run (Origin set but not in seededPlugins).
// - Preserve legacy plugin app access permissions in the persisted seed set (these are granted by default).
if len(previous) > 0 {
filtered := make(map[accesscontrol.SeedPermission]struct{}, len(previous))
for p := range previous {
if p.Action == pluginaccesscontrol.ActionAppAccess {
continue
}
if p.Origin != "" && !s.seededPlugins[p.Origin] {
continue
}
filtered[p] = struct{}{}
}
previous = filtered
}
added, removed, updated := s.permissionDiff(previous, s.builtinsPermissions)
if err := s.backend.Apply(ctx, added, removed, updated); err != nil {
return err
}
return nil
}
// SeedRoles populates the database with the roles and their assignments
// It will create roles that do not exist and update roles that have changed
// Do not use for provisioning. Validation is not enforced.
func (s *Seeder) SeedRoles(ctx context.Context, registrationList []accesscontrol.RoleRegistration) error {
roleMap, err := s.roleStore.LoadRoles(ctx)
if err != nil {
return err
}
missingRoles := make([]accesscontrol.RoleRegistration, 0, len(registrationList))
// Diff existing roles with the ones we want to seed.
// If a role is missing, we add it to the missingRoles list
for _, registration := range registrationList {
registration := registration
role, ok := roleMap[registration.Role.Name]
switch {
case registration.Role.IsFixed():
s.seededFixedRoles[registration.Role.Name] = true
case registration.Role.IsPlugin():
s.seededPluginRoles[registration.Role.Name] = true
// To be resilient to failed plugin loadings, we remember the plugins that have registered,
// later we'll ignore permissions and roles of other plugins
s.seededPlugins[pluginutils.PluginIDFromName(registration.Role.Name)] = true
}
s.rememberPermissionAssignments(&registration.Role, registration.Grants, registration.Exclude)
if !ok {
missingRoles = append(missingRoles, registration)
continue
}
if needsRoleUpdate(role, registration.Role) {
if err := s.roleStore.SetRole(ctx, role, registration.Role); err != nil {
return err
}
}
if needsPermissionsUpdate(role, registration.Role) {
if err := s.roleStore.SetPermissions(ctx, role, registration.Role); err != nil {
return err
}
}
}
for _, registration := range missingRoles {
if err := s.roleStore.CreateRole(ctx, registration.Role); err != nil {
return err
}
}
return nil
}
func needsPermissionsUpdate(existingRole *accesscontrol.RoleDTO, wantedRole accesscontrol.RoleDTO) bool {
if existingRole == nil {
return true
}
if len(existingRole.Permissions) != len(wantedRole.Permissions) {
return true
}
for _, p := range wantedRole.Permissions {
found := false
for _, ep := range existingRole.Permissions {
if ep.Action == p.Action && ep.Scope == p.Scope {
found = true
break
}
}
if !found {
return true
}
}
return false
}
func needsRoleUpdate(existingRole *accesscontrol.RoleDTO, wantedRole accesscontrol.RoleDTO) bool {
if existingRole == nil {
return true
}
if existingRole.Name != wantedRole.Name {
return false
}
if existingRole.DisplayName != wantedRole.DisplayName {
return true
}
if existingRole.Description != wantedRole.Description {
return true
}
if existingRole.Group != wantedRole.Group {
return true
}
if existingRole.Hidden != wantedRole.Hidden {
return true
}
return false
}
// Deprecated: SeedRole is deprecated and should not be used.
// SeedRoles only does boot up seeding and should not be used for runtime seeding.
func (s *Seeder) SeedRole(ctx context.Context, role accesscontrol.RoleDTO, builtInRoles []string) error {
addedPermissions := make(map[string]struct{}, len(role.Permissions))
permissions := make([]accesscontrol.Permission, 0, len(role.Permissions))
for _, p := range role.Permissions {
key := fmt.Sprintf("%s:%s", p.Action, p.Scope)
if _, ok := addedPermissions[key]; !ok {
addedPermissions[key] = struct{}{}
permissions = append(permissions, accesscontrol.Permission{Action: p.Action, Scope: p.Scope})
}
}
wantedRole := accesscontrol.RoleDTO{
OrgID: accesscontrol.GlobalOrgID,
Version: role.Version,
UID: role.UID,
Name: role.Name,
DisplayName: role.DisplayName,
Description: role.Description,
Group: role.Group,
Permissions: permissions,
Hidden: role.Hidden,
}
roleMap, err := s.roleStore.LoadRoles(ctx)
if err != nil {
return err
}
existingRole := roleMap[wantedRole.Name]
if existingRole == nil {
if err := s.roleStore.CreateRole(ctx, wantedRole); err != nil {
return err
}
} else {
if needsRoleUpdate(existingRole, wantedRole) {
if err := s.roleStore.SetRole(ctx, existingRole, wantedRole); err != nil {
return err
}
}
if needsPermissionsUpdate(existingRole, wantedRole) {
if err := s.roleStore.SetPermissions(ctx, existingRole, wantedRole); err != nil {
return err
}
}
}
// Remember seeded roles
if wantedRole.IsFixed() {
s.seededFixedRoles[wantedRole.Name] = true
}
isPluginRole := wantedRole.IsPlugin()
if isPluginRole {
s.seededPluginRoles[wantedRole.Name] = true
// To be resilient to failed plugin loadings, we remember the plugins that have registered,
// later we'll ignore permissions and roles of other plugins
s.seededPlugins[pluginutils.PluginIDFromName(role.Name)] = true
}
s.rememberPermissionAssignments(&wantedRole, builtInRoles, []string{})
return nil
}
func (s *Seeder) rememberPermissionAssignments(role *accesscontrol.RoleDTO, builtInRoles []string, excludedRoles []string) {
AppendDesiredPermissions(s.builtinsPermissions, s.log, role, builtInRoles, excludedRoles, true)
}
// AppendDesiredPermissions accumulates permissions from a role registration onto basic roles (Viewer/Editor/Admin/Grafana Admin).
// - It expands parents via accesscontrol.BuiltInRolesWithParents.
// - It can optionally ignore plugin app access permissions (which are granted by default).
func AppendDesiredPermissions(
out map[accesscontrol.SeedPermission]struct{},
logger log.Logger,
role *accesscontrol.RoleDTO,
builtInRoles []string,
excludedRoles []string,
ignorePluginAppAccess bool,
) {
if out == nil || role == nil {
return
}
for builtInRole := range accesscontrol.BuiltInRolesWithParents(builtInRoles) {
// Skip excluded grants
if slices.Contains(excludedRoles, builtInRole) {
continue
}
for _, perm := range role.Permissions {
if ignorePluginAppAccess && perm.Action == pluginaccesscontrol.ActionAppAccess {
logger.Debug("Role is attempting to grant access permission, but this permission is already granted by default and will be ignored",
"role", role.Name, "permission", perm.Action, "scope", perm.Scope)
continue
}
sp := accesscontrol.SeedPermission{
BuiltInRole: builtInRole,
Action: perm.Action,
Scope: perm.Scope,
}
if role.IsPlugin() {
sp.Origin = pluginutils.PluginIDFromName(role.Name)
}
out[sp] = struct{}{}
}
}
}
// permissionDiff returns:
// - added: present in desired permissions, not in previous permissions
// - removed: present in previous permissions, not in desired permissions
// - updated: same role + action, but scope changed
func (s *Seeder) permissionDiff(previous, desired map[accesscontrol.SeedPermission]struct{}) (added, removed []accesscontrol.SeedPermission, updated map[accesscontrol.SeedPermission]accesscontrol.SeedPermission) {
addedSet := make(map[accesscontrol.SeedPermission]struct{}, 0)
for n := range desired {
if _, already := previous[n]; !already {
addedSet[n] = struct{}{}
} else {
delete(previous, n)
}
}
// Check if any of the new permissions is actually an old permission with an updated scope
updated = make(map[accesscontrol.SeedPermission]accesscontrol.SeedPermission, 0)
for n := range addedSet {
for p := range previous {
if n.BuiltInRole == p.BuiltInRole && n.Action == p.Action {
updated[p] = n
delete(addedSet, n)
}
}
}
for p := range addedSet {
added = append(added, p)
}
for p := range previous {
if p.Action == pluginaccesscontrol.ActionAppAccess &&
p.Scope != pluginaccesscontrol.ScopeProvider.GetResourceAllScope() {
// Allows backward compatibility with plugins that have been seeded before the grant ignore rule was added
s.log.Info("This permission already existed so it will not be removed",
"role", p.BuiltInRole, "permission", p.Action, "scope", p.Scope)
continue
}
removed = append(removed, p)
}
return added, removed, updated
}
func (s *Seeder) ClearBasicRolesPluginPermissions(ID string) {
removable := []accesscontrol.SeedPermission{}
for key := range s.builtinsPermissions {
if matchPermissionByPluginID(key, ID) {
removable = append(removable, key)
}
}
for _, perm := range removable {
delete(s.builtinsPermissions, perm)
}
}
func matchPermissionByPluginID(perm accesscontrol.SeedPermission, pluginID string) bool {
if perm.Origin != pluginID {
return false
}
actionTemplate := regexp.MustCompile(fmt.Sprintf("%s[.:]", pluginID))
scopeTemplate := fmt.Sprintf(":%s", pluginID)
return actionTemplate.MatchString(perm.Action) || strings.HasSuffix(perm.Scope, scopeTemplate)
}
// RolesToUpgrade returns the unique basic roles that should have their version incremented.
func RolesToUpgrade(added, removed []accesscontrol.SeedPermission) []string {
set := map[string]struct{}{}
for _, p := range added {
set[p.BuiltInRole] = struct{}{}
}
for _, p := range removed {
set[p.BuiltInRole] = struct{}{}
}
out := make([]string, 0, len(set))
for r := range set {
out = append(out, r)
}
return out
}
func (s *Seeder) ClearPluginRoles(ID string) {
expectedPrefix := fmt.Sprintf("%s%s:", accesscontrol.PluginRolePrefix, ID)
for roleName := range s.seededPluginRoles {
if strings.HasPrefix(roleName, expectedPrefix) {
delete(s.seededPluginRoles, roleName)
}
}
}
func (s *Seeder) MarkSeededAlready() {
s.hasSeededAlready = true
}
func (s *Seeder) HasSeededAlready() bool {
return s.hasSeededAlready
}
func (s *Seeder) RemoveAbsentRoles(ctx context.Context) error {
roleMap, errGet := s.roleStore.LoadRoles(ctx)
if errGet != nil {
s.log.Error("failed to get fixed roles from store", "err", errGet)
return errGet
}
toRemove := []string{}
for _, r := range roleMap {
if r == nil {
continue
}
if r.IsFixed() {
if !s.seededFixedRoles[r.Name] {
s.log.Info("role is not seeded anymore, mark it for deletion", "role", r.Name)
toRemove = append(toRemove, r.UID)
}
continue
}
if r.IsPlugin() {
if !s.seededPlugins[pluginutils.PluginIDFromName(r.Name)] {
// To be resilient to failed plugin loadings
// ignore stored roles related to plugins that have not registered this time
s.log.Debug("plugin role has not been registered on this run skipping its removal", "role", r.Name)
continue
}
if !s.seededPluginRoles[r.Name] {
s.log.Info("role is not seeded anymore, mark it for deletion", "role", r.Name)
toRemove = append(toRemove, r.UID)
}
}
}
if errDelete := s.roleStore.DeleteRoles(ctx, toRemove); errDelete != nil {
s.log.Error("failed to delete absent fixed and plugin roles", "err", errDelete)
return errDelete
}
return nil
}

View File

@@ -78,6 +78,9 @@ func ProvideZanzanaClient(cfg *setting.Cfg, db db.DB, tracer tracing.Tracer, fea
ctx = types.WithAuthInfo(ctx, authnlib.NewAccessTokenAuthInfo(authnlib.Claims[authnlib.AccessTokenClaims]{
Rest: authnlib.AccessTokenClaims{
Namespace: "*",
Permissions: []string{
zanzana.TokenPermissionUpdate,
},
},
}))
return ctx, nil

View File

@@ -4,7 +4,9 @@ import (
"context"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/services/authz/zanzana"
"github.com/grafana/grafana/pkg/setting"
"golang.org/x/exp/slices"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
@@ -30,3 +32,20 @@ func authorize(ctx context.Context, namespace string, ss setting.ZanzanaServerSe
}
return nil
}
func authorizeWrite(ctx context.Context, namespace string, ss setting.ZanzanaServerSettings) error {
if err := authorize(ctx, namespace, ss); err != nil {
return err
}
c, ok := claims.AuthInfoFrom(ctx)
if !ok {
return status.Errorf(codes.Unauthenticated, "unauthenticated")
}
if !slices.Contains(c.GetTokenPermissions(), zanzana.TokenPermissionUpdate) {
return status.Errorf(codes.PermissionDenied, "missing token permission %s", zanzana.TokenPermissionUpdate)
}
return nil
}

View File

@@ -391,7 +391,7 @@ func setupBenchmarkServer(b *testing.B) (*Server, *benchmarkData) {
b.Logf("Total tuples to write: %d", len(allTuples))
// Get store info
ctx := newContextWithNamespace()
ctx := newContextWithZanzanaUpdatePermission()
storeInf, err := srv.getStoreInfo(ctx, benchNamespace)
require.NoError(b, err)

View File

@@ -8,6 +8,7 @@ import (
openfgav1 "github.com/openfga/api/proto/openfga/v1"
"go.opentelemetry.io/otel/codes"
"google.golang.org/grpc/status"
authzextv1 "github.com/grafana/grafana/pkg/services/authz/proto/v1"
)
@@ -35,6 +36,9 @@ func (s *Server) Mutate(ctx context.Context, req *authzextv1.MutateRequest) (*au
if err != nil {
span.RecordError(err)
span.SetStatus(codes.Error, err.Error())
if _, ok := status.FromError(err); ok {
return nil, err
}
s.logger.Error("failed to perform mutate request", "error", err, "namespace", req.GetNamespace())
return nil, errors.New("failed to perform mutate request")
}
@@ -43,7 +47,7 @@ func (s *Server) Mutate(ctx context.Context, req *authzextv1.MutateRequest) (*au
}
func (s *Server) mutate(ctx context.Context, req *authzextv1.MutateRequest) (*authzextv1.MutateResponse, error) {
if err := authorize(ctx, req.GetNamespace(), s.cfg); err != nil {
if err := authorizeWrite(ctx, req.GetNamespace(), s.cfg); err != nil {
return nil, err
}

View File

@@ -30,7 +30,7 @@ func testMutateFolders(t *testing.T, srv *Server) {
setupMutateFolders(t, srv)
t.Run("should create new folder parent relation", func(t *testing.T) {
_, err := srv.Mutate(newContextWithNamespace(), &v1.MutateRequest{
_, err := srv.Mutate(newContextWithZanzanaUpdatePermission(), &v1.MutateRequest{
Namespace: "default",
Operations: []*v1.MutateOperation{
{
@@ -61,7 +61,7 @@ func testMutateFolders(t *testing.T, srv *Server) {
})
t.Run("should delete folder parent relation", func(t *testing.T) {
_, err := srv.Mutate(newContextWithNamespace(), &v1.MutateRequest{
_, err := srv.Mutate(newContextWithZanzanaUpdatePermission(), &v1.MutateRequest{
Namespace: "default",
Operations: []*v1.MutateOperation{
{
@@ -88,7 +88,7 @@ func testMutateFolders(t *testing.T, srv *Server) {
})
t.Run("should clean up all parent relations", func(t *testing.T) {
_, err := srv.Mutate(newContextWithNamespace(), &v1.MutateRequest{
_, err := srv.Mutate(newContextWithZanzanaUpdatePermission(), &v1.MutateRequest{
Namespace: "default",
Operations: []*v1.MutateOperation{
{
@@ -115,7 +115,7 @@ func testMutateFolders(t *testing.T, srv *Server) {
})
t.Run("should perform batch mutate if multiple operations are provided", func(t *testing.T) {
_, err := srv.Mutate(newContextWithNamespace(), &v1.MutateRequest{
_, err := srv.Mutate(newContextWithZanzanaUpdatePermission(), &v1.MutateRequest{
Namespace: "default",
Operations: []*v1.MutateOperation{
{

View File

@@ -25,7 +25,7 @@ func testMutateOrgRoles(t *testing.T, srv *Server) {
setupMutateOrgRoles(t, srv)
t.Run("should update user org role and delete old role", func(t *testing.T) {
_, err := srv.Mutate(newContextWithNamespace(), &v1.MutateRequest{
_, err := srv.Mutate(newContextWithZanzanaUpdatePermission(), &v1.MutateRequest{
Namespace: "default",
Operations: []*v1.MutateOperation{
{
@@ -63,7 +63,7 @@ func testMutateOrgRoles(t *testing.T, srv *Server) {
})
t.Run("should add user org role and delete old role", func(t *testing.T) {
_, err := srv.Mutate(newContextWithNamespace(), &v1.MutateRequest{
_, err := srv.Mutate(newContextWithZanzanaUpdatePermission(), &v1.MutateRequest{
Namespace: "default",
Operations: []*v1.MutateOperation{
{

View File

@@ -28,7 +28,7 @@ func testMutateResourcePermissions(t *testing.T, srv *Server) {
setupMutateResourcePermissions(t, srv)
t.Run("should create new resource permission", func(t *testing.T) {
_, err := srv.Mutate(newContextWithNamespace(), &v1.MutateRequest{
_, err := srv.Mutate(newContextWithZanzanaUpdatePermission(), &v1.MutateRequest{
Namespace: "default",
Operations: []*v1.MutateOperation{
{
@@ -76,7 +76,7 @@ func testMutateResourcePermissions(t *testing.T, srv *Server) {
require.NoError(t, err)
require.Len(t, res.Tuples, 2)
_, err = srv.Mutate(newContextWithNamespace(), &v1.MutateRequest{
_, err = srv.Mutate(newContextWithZanzanaUpdatePermission(), &v1.MutateRequest{
Namespace: "default",
Operations: []*v1.MutateOperation{
{

View File

@@ -25,7 +25,7 @@ func testMutateRoleBindings(t *testing.T, srv *Server) {
setupMutateRoleBindings(t, srv)
t.Run("should update user role and delete old role", func(t *testing.T) {
_, err := srv.Mutate(newContextWithNamespace(), &v1.MutateRequest{
_, err := srv.Mutate(newContextWithZanzanaUpdatePermission(), &v1.MutateRequest{
Namespace: "default",
Operations: []*v1.MutateOperation{
{
@@ -75,7 +75,7 @@ func testMutateRoleBindings(t *testing.T, srv *Server) {
})
t.Run("should assign role to basic role", func(t *testing.T) {
_, err := srv.Mutate(newContextWithNamespace(), &v1.MutateRequest{
_, err := srv.Mutate(newContextWithZanzanaUpdatePermission(), &v1.MutateRequest{
Namespace: "default",
Operations: []*v1.MutateOperation{
{

View File

@@ -25,7 +25,7 @@ func testMutateRoles(t *testing.T, srv *Server) {
setupMutateRoles(t, srv)
t.Run("should update role and delete old role permissions", func(t *testing.T) {
_, err := srv.Mutate(newContextWithNamespace(), &v1.MutateRequest{
_, err := srv.Mutate(newContextWithZanzanaUpdatePermission(), &v1.MutateRequest{
Namespace: "default",
Operations: []*v1.MutateOperation{
{

View File

@@ -25,7 +25,7 @@ func testMutateTeamBindings(t *testing.T, srv *Server) {
setupMutateTeamBindings(t, srv)
t.Run("should update user team binding and delete old team binding", func(t *testing.T) {
_, err := srv.Mutate(newContextWithNamespace(), &v1.MutateRequest{
_, err := srv.Mutate(newContextWithZanzanaUpdatePermission(), &v1.MutateRequest{
Namespace: "default",
Operations: []*v1.MutateOperation{
{

View File

@@ -5,6 +5,8 @@ import (
openfgav1 "github.com/openfga/api/proto/openfga/v1"
"github.com/stretchr/testify/require"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
"google.golang.org/protobuf/types/known/structpb"
iamv0 "github.com/grafana/grafana/apps/iam/pkg/apis/iam/v0alpha1"
@@ -33,7 +35,7 @@ func testMutate(t *testing.T, srv *Server) {
setupMutate(t, srv)
t.Run("should perform multiple mutate operations", func(t *testing.T) {
_, err := srv.Mutate(newContextWithNamespace(), &v1.MutateRequest{
_, err := srv.Mutate(newContextWithZanzanaUpdatePermission(), &v1.MutateRequest{
Namespace: "default",
Operations: []*v1.MutateOperation{
{
@@ -133,6 +135,25 @@ func testMutate(t *testing.T, srv *Server) {
require.NoError(t, err)
require.Len(t, res.Tuples, 0)
})
t.Run("should reject mutate without zanzana:update", func(t *testing.T) {
_, err := srv.Mutate(newContextWithNamespace(), &v1.MutateRequest{
Namespace: "default",
Operations: []*v1.MutateOperation{
{
Operation: &v1.MutateOperation_SetFolderParent{
SetFolderParent: &v1.SetFolderParentOperation{
Folder: "new-folder",
Parent: "1",
DeleteExisting: false,
},
},
},
},
})
require.Error(t, err)
require.Equal(t, codes.PermissionDenied, status.Code(err))
})
}
func TestDeduplicateTupleKeys(t *testing.T) {

View File

@@ -14,6 +14,7 @@ import (
"github.com/grafana/grafana/pkg/infra/db"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/services/authz/zanzana"
"github.com/grafana/grafana/pkg/services/authz/zanzana/common"
"github.com/grafana/grafana/pkg/services/authz/zanzana/store"
"github.com/grafana/grafana/pkg/services/sqlstore"
@@ -218,11 +219,21 @@ func setupOpenFGADatabase(t *testing.T, srv *Server, tuples []*openfgav1.TupleKe
}
func newContextWithNamespace() context.Context {
return newContextWithNamespaceAndPermissions()
}
func newContextWithNamespaceAndPermissions(perms ...string) context.Context {
ctx := context.Background()
ctx = claims.WithAuthInfo(ctx, authnlib.NewAccessTokenAuthInfo(authnlib.Claims[authnlib.AccessTokenClaims]{
Rest: authnlib.AccessTokenClaims{
Namespace: "*",
Namespace: "*",
Permissions: perms,
DelegatedPermissions: perms,
},
}))
return ctx
}
func newContextWithZanzanaUpdatePermission() context.Context {
return newContextWithNamespaceAndPermissions(zanzana.TokenPermissionUpdate)
}

View File

@@ -8,6 +8,7 @@ import (
openfgav1 "github.com/openfga/api/proto/openfga/v1"
"go.opentelemetry.io/otel/codes"
"google.golang.org/grpc/status"
authzextv1 "github.com/grafana/grafana/pkg/services/authz/proto/v1"
"github.com/grafana/grafana/pkg/services/authz/zanzana/common"
@@ -25,6 +26,9 @@ func (s *Server) Write(ctx context.Context, req *authzextv1.WriteRequest) (*auth
if err != nil {
span.RecordError(err)
span.SetStatus(codes.Error, err.Error())
if _, ok := status.FromError(err); ok {
return nil, err
}
s.logger.Error("failed to perform write request", "error", err, "namespace", req.GetNamespace())
return nil, errors.New("failed to perform write request")
}
@@ -33,7 +37,7 @@ func (s *Server) Write(ctx context.Context, req *authzextv1.WriteRequest) (*auth
}
func (s *Server) write(ctx context.Context, req *authzextv1.WriteRequest) (*authzextv1.WriteResponse, error) {
if err := authorize(ctx, req.GetNamespace(), s.cfg); err != nil {
if err := authorizeWrite(ctx, req.GetNamespace(), s.cfg); err != nil {
return nil, err
}

View File

@@ -0,0 +1,46 @@
package server
import (
"testing"
"github.com/grafana/grafana/pkg/services/sqlstore"
"github.com/grafana/grafana/pkg/setting"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
authzextv1 "github.com/grafana/grafana/pkg/services/authz/proto/v1"
"github.com/grafana/grafana/pkg/services/authz/zanzana/common"
"github.com/stretchr/testify/require"
)
func TestWriteAuthorization(t *testing.T) {
cfg := setting.NewCfg()
testStore := sqlstore.NewTestStore(t, sqlstore.WithCfg(cfg))
srv := setupOpenFGAServer(t, testStore, cfg)
setup(t, srv)
req := &authzextv1.WriteRequest{
Namespace: namespace,
Writes: &authzextv1.WriteRequestWrites{
TupleKeys: []*authzextv1.TupleKey{
{
// Folder parent tuples are valid without any relationship condition.
User: "folder:1",
Relation: common.RelationParent,
Object: "folder:write-authz-test",
},
},
},
}
t.Run("denies Write without zanzana:update", func(t *testing.T) {
_, err := srv.Write(newContextWithNamespace(), req)
require.Error(t, err)
require.Equal(t, codes.PermissionDenied, status.Code(err))
})
t.Run("allows Write with zanzana:update", func(t *testing.T) {
_, err := srv.Write(newContextWithZanzanaUpdatePermission(), req)
require.NoError(t, err)
})
}

View File

@@ -16,6 +16,9 @@ const (
TypeNamespace = common.TypeGroupResouce
)
// TokenPermissionUpdate is required for callers to perform write operations against Zanzana (Mutate/Write).
const TokenPermissionUpdate = "zanzana:update" //nolint:gosec // G101: permission identifier, not a credential.
const (
RelationTeamMember = common.RelationTeamMember
RelationTeamAdmin = common.RelationTeamAdmin

View File

@@ -15,6 +15,7 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/backend/httpclient"
"github.com/grafana/grafana/pkg/infra/db"
"github.com/grafana/grafana/pkg/infra/fs"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/plugins"
@@ -23,6 +24,8 @@ import (
"github.com/grafana/grafana/pkg/services/org"
"github.com/grafana/grafana/pkg/services/pluginsintegration"
"github.com/grafana/grafana/pkg/services/pluginsintegration/pluginstore"
"github.com/grafana/grafana/pkg/services/searchV2"
"github.com/grafana/grafana/pkg/services/sqlstore"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tests/testinfra"
"github.com/grafana/grafana/pkg/tests/testsuite"
@@ -161,7 +164,9 @@ func TestIntegrationPluginManager(t *testing.T) {
pg := postgres.ProvideService()
my := mysql.ProvideService()
ms := mssql.ProvideService(cfg)
graf := grafanads.ProvideService(nil, features)
db := db.InitTestDB(t, sqlstore.InitTestDBOpt{Cfg: cfg})
sv2 := searchV2.ProvideService(cfg, db, nil, nil, tracer, features, nil, nil, nil)
graf := grafanads.ProvideService(sv2, nil, features)
pyroscope := pyroscope.ProvideService(hcp)
parca := parca.ProvideService(hcp)
zipkin := zipkin.ProvideService(hcp)

View File

@@ -35,6 +35,7 @@ import (
"github.com/grafana/grafana/pkg/services/provisioning/datasources"
"github.com/grafana/grafana/pkg/services/provisioning/plugins"
"github.com/grafana/grafana/pkg/services/quota"
"github.com/grafana/grafana/pkg/services/searchV2"
"github.com/grafana/grafana/pkg/services/secrets"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/storage/legacysql/dualwrite"
@@ -56,6 +57,7 @@ func ProvideService(
dashboardService dashboardservice.DashboardService,
folderService folder.Service,
pluginSettings pluginsettings.Service,
searchService searchV2.SearchService,
quotaService quota.Service,
secrectService secrets.Service,
orgService org.Service,
@@ -82,6 +84,7 @@ func ProvideService(
datasourceService: datasourceService,
correlationsService: correlationsService,
pluginsSettings: pluginSettings,
searchService: searchService,
quotaService: quotaService,
secretService: secrectService,
log: log.New("provisioning"),
@@ -135,6 +138,9 @@ func (ps *ProvisioningServiceImpl) starting(ctx context.Context) error {
return err
}
}
if ps.dashboardProvisioner.HasDashboardSources() {
ps.searchService.TriggerReIndex()
}
return nil
}
@@ -188,6 +194,7 @@ func newProvisioningServiceImpl(
provisionDatasources func(context.Context, string, datasources.BaseDataSourceService, datasources.CorrelationsStore, org.Service) error,
provisionPlugins func(context.Context, string, pluginstore.Store, pluginsettings.Service, org.Service) error,
migratePrometheusType func(context.Context) error,
searchService searchV2.SearchService,
) (*ProvisioningServiceImpl, error) {
s := &ProvisioningServiceImpl{
log: log.New("provisioning"),
@@ -195,6 +202,7 @@ func newProvisioningServiceImpl(
provisionDatasources: provisionDatasources,
provisionPlugins: provisionPlugins,
Cfg: setting.NewCfg(),
searchService: searchService,
migratePrometheusType: migratePrometheusType,
}
@@ -230,6 +238,7 @@ type ProvisioningServiceImpl struct {
datasourceService datasourceservice.DataSourceService
correlationsService correlations.Service
pluginsSettings pluginsettings.Service
searchService searchV2.SearchService
quotaService quota.Service
secretService secrets.Service
folderService folder.Service

View File

@@ -20,6 +20,7 @@ import (
"github.com/grafana/grafana/pkg/services/provisioning/dashboards"
"github.com/grafana/grafana/pkg/services/provisioning/datasources"
"github.com/grafana/grafana/pkg/services/provisioning/utils"
"github.com/grafana/grafana/pkg/services/searchV2"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/storage/legacysql/dualwrite"
)
@@ -158,6 +159,8 @@ func setup(t *testing.T) *serviceTestStruct {
pollChangesChannel <- ctx
}
searchStub := searchV2.NewStubSearchService()
service, err := newProvisioningServiceImpl(
func(context.Context, string, dashboardstore.DashboardProvisioningService, *setting.Cfg, org.Service, utils.DashboardStore, folder.Service, dualwrite.Service, *serverlock.ServerLockService) (dashboards.DashboardProvisioner, error) {
serviceTest.dashboardProvisionerInstantiations++
@@ -172,6 +175,7 @@ func setup(t *testing.T) *serviceTestStruct {
func(context.Context) error {
return nil
},
searchStub,
)
service.provisionAlerting = func(context.Context, prov_alerting.ProvisionerConfig) error {
return nil

View File

@@ -0,0 +1,255 @@
package searchV2
import (
"context"
"encoding/json"
"errors"
"fmt"
"sort"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/services/accesscontrol"
"github.com/grafana/grafana/pkg/services/dashboards"
"github.com/grafana/grafana/pkg/services/datasources"
"github.com/grafana/grafana/pkg/services/user"
)
func (s *StandardSearchService) addAllowedActionsField(ctx context.Context, orgId int64, user *user.SignedInUser, response *backend.DataResponse) error {
references, err := getEntityReferences(response)
if err != nil {
return err
}
allAllowedActions, err := s.createAllowedActions(ctx, orgId, user, references)
if err != nil {
return err
}
if len(response.Frames) == 0 {
return errors.New("empty response")
}
frame := response.Frames[0]
allowedActionsField := data.NewFieldFromFieldType(data.FieldTypeJSON, len(allAllowedActions))
allowedActionsField.Name = "allowed_actions"
frame.Fields = append(frame.Fields, allowedActionsField)
for i, actions := range allAllowedActions {
js, _ := json.Marshal(actions)
jsb := json.RawMessage(js)
allowedActionsField.Set(i, jsb)
}
return nil
}
type allowedActions struct {
EntityKind entityKind `json:"kind"`
UID string `json:"uid"`
Actions []string `json:"actions"`
}
func (s *StandardSearchService) createAllowedActions(ctx context.Context, orgId int64, user *user.SignedInUser, references []entityReferences) ([][]allowedActions, error) {
uidsPerKind := make(map[entityKind][]string)
for _, refs := range references {
if _, ok := uidsPerKind[refs.entityKind]; !ok {
uidsPerKind[refs.entityKind] = []string{}
}
uidsPerKind[refs.entityKind] = append(uidsPerKind[refs.entityKind], refs.uid)
if len(refs.dsUids) > 0 {
if _, ok := uidsPerKind[entityKindDatasource]; !ok {
uidsPerKind[entityKindDatasource] = []string{}
}
uidsPerKind[entityKindDatasource] = append(uidsPerKind[entityKindDatasource], refs.dsUids...)
}
}
allowedActionsByUid := make(map[entityKind]map[string][]string)
for entKind, uids := range uidsPerKind {
if entKind == entityKindPanel {
emptyAllowedActions := make(map[string][]string)
for _, uid := range uids {
emptyAllowedActions[uid] = []string{}
}
allowedActionsByUid[entityKindPanel] = emptyAllowedActions
}
var prefix string
switch entKind {
case entityKindFolder:
prefix = dashboards.ScopeFoldersPrefix
case entityKindDatasource:
prefix = datasources.ScopePrefix
case entityKindDashboard:
prefix = dashboards.ScopeDashboardsPrefix
default:
continue
}
allowedActionsByUid[entKind] = s.getAllowedActionsByUid(ctx, user, orgId, prefix, uids)
}
dsActionsByUid, ok := allowedActionsByUid[entityKindDatasource]
if !ok {
dsActionsByUid = make(map[string][]string)
}
out := make([][]allowedActions, 0, len(references))
for _, ref := range references {
var actions []allowedActions
selfActions := make([]string, 0)
if selfKindActions, ok := allowedActionsByUid[ref.entityKind]; ok {
if self, ok := selfKindActions[ref.uid]; ok && len(self) > 0 {
selfActions = self
}
}
actions = append(actions, allowedActions{
EntityKind: ref.entityKind,
UID: ref.uid,
Actions: selfActions,
})
for _, dsUid := range ref.dsUids {
dsActions := make([]string, 0)
if dsAct, ok := dsActionsByUid[dsUid]; ok {
dsActions = dsAct
}
actions = append(actions, allowedActions{
EntityKind: entityKindDatasource,
UID: dsUid,
Actions: dsActions,
})
}
out = append(out, actions)
}
return out, nil
}
func (s *StandardSearchService) getAllowedActionsByUid(ctx context.Context, user *user.SignedInUser,
orgID int64, prefix string, resourceIDs []string) map[string][]string {
if user.Permissions == nil {
return map[string][]string{}
}
permissions, ok := user.Permissions[orgID]
if !ok {
return map[string][]string{}
}
uidsAsMap := make(map[string]bool)
for _, uid := range resourceIDs {
uidsAsMap[uid] = true
}
out := make(map[string][]string)
resp := accesscontrol.GetResourcesMetadata(ctx, permissions, prefix, uidsAsMap)
for uid, meta := range resp {
var actions []string
for action := range meta {
actions = append(actions, action)
}
sort.Strings(actions)
out[uid] = actions
}
return out
}
type entityReferences struct {
entityKind entityKind
uid string
dsUids []string
}
func getEntityReferences(resp *backend.DataResponse) ([]entityReferences, error) {
if resp == nil {
return nil, errors.New("nil response")
}
if resp.Error != nil {
return nil, resp.Error
}
if len(resp.Frames) == 0 {
return nil, errors.New("empty response")
}
frame := resp.Frames[0]
kindField, idx := frame.FieldByName("kind")
if idx == -1 {
return nil, errors.New("no kind field")
}
dsUidField, idx := frame.FieldByName("ds_uid")
if idx == -1 {
return nil, errors.New("no ds_uid field")
}
uidField, idx := frame.FieldByName("uid")
if idx == -1 {
return nil, errors.New("no dash_uid field")
}
if dsUidField.Len() != uidField.Len() {
return nil, errors.New("mismatched lengths")
}
var out []entityReferences
for i := 0; i < dsUidField.Len(); i++ {
kind, ok := kindField.At(i).(string)
if !ok || kind == "" {
return nil, errors.New("invalid value in kind field")
}
uid, ok := uidField.At(i).(string)
if !ok || uid == "" {
return nil, errors.New("invalid value in uid field")
}
if entityKind(kind) != entityKindDashboard {
out = append(out, entityReferences{
entityKind: entityKind(kind),
uid: uid,
})
continue
}
uidField, ok := uidField.At(i).(string)
if !ok || uidField == "" {
return nil, errors.New("invalid value in dash_uid field")
}
rawDsUids, ok := dsUidField.At(i).(json.RawMessage)
if !ok {
return nil, fmt.Errorf("invalid value for uid %s in ds_uid field: %s", uidField, dsUidField.At(i))
}
var uids []string
if rawDsUids != nil {
jsonValue, err := rawDsUids.MarshalJSON()
if err != nil {
return nil, err
}
err = json.Unmarshal(jsonValue, &uids)
if err != nil {
return nil, err
}
}
out = append(out, entityReferences{entityKind: entityKindDashboard, uid: uid, dsUids: uids})
}
return out, nil
}

View File

@@ -0,0 +1,125 @@
package searchV2
import (
"context"
_ "embed"
"fmt"
"testing"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana-plugin-sdk-go/experimental"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/infra/tracing"
ac "github.com/grafana/grafana/pkg/services/accesscontrol"
accesscontrolmock "github.com/grafana/grafana/pkg/services/accesscontrol/mock"
"github.com/grafana/grafana/pkg/services/dashboards"
"github.com/grafana/grafana/pkg/services/datasources"
"github.com/grafana/grafana/pkg/services/featuremgmt"
"github.com/grafana/grafana/pkg/services/user"
"github.com/grafana/grafana/pkg/setting"
)
var (
//go:embed testdata/search_response_frame.json
exampleListFrameJSON string
orgId = int64(1)
permissionsWithScopeAll = map[string][]string{
datasources.ActionIDRead: {datasources.ScopeAll},
datasources.ActionDelete: {datasources.ScopeAll},
ac.ActionDatasourcesExplore: {datasources.ScopeAll},
datasources.ActionQuery: {datasources.ScopeAll},
datasources.ActionRead: {datasources.ScopeAll},
datasources.ActionWrite: {datasources.ScopeAll},
datasources.ActionPermissionsRead: {datasources.ScopeAll},
datasources.ActionPermissionsWrite: {datasources.ScopeAll},
dashboards.ActionFoldersCreate: {dashboards.ScopeFoldersAll},
dashboards.ActionFoldersRead: {dashboards.ScopeFoldersAll},
dashboards.ActionFoldersWrite: {dashboards.ScopeFoldersAll},
dashboards.ActionFoldersDelete: {dashboards.ScopeFoldersAll},
dashboards.ActionFoldersPermissionsRead: {dashboards.ScopeFoldersAll},
dashboards.ActionFoldersPermissionsWrite: {dashboards.ScopeFoldersAll},
dashboards.ActionDashboardsCreate: {dashboards.ScopeDashboardsAll},
dashboards.ActionDashboardsRead: {dashboards.ScopeDashboardsAll},
dashboards.ActionDashboardsWrite: {dashboards.ScopeDashboardsAll},
dashboards.ActionDashboardsDelete: {dashboards.ScopeDashboardsAll},
dashboards.ActionDashboardsPermissionsRead: {dashboards.ScopeDashboardsAll},
dashboards.ActionDashboardsPermissionsWrite: {dashboards.ScopeDashboardsAll},
}
permissionsWithUidScopes = map[string][]string{
datasources.ActionIDRead: {},
datasources.ActionDelete: {},
ac.ActionDatasourcesExplore: {},
datasources.ActionQuery: {},
datasources.ActionRead: {
datasources.ScopeProvider.GetResourceScopeUID("datasource-2"),
datasources.ScopeProvider.GetResourceScopeUID("datasource-3"),
},
datasources.ActionWrite: {},
datasources.ActionPermissionsRead: {},
datasources.ActionPermissionsWrite: {},
dashboards.ActionFoldersCreate: {},
dashboards.ActionFoldersRead: {
dashboards.ScopeFoldersProvider.GetResourceScopeUID("ujaM1h6nz"),
},
dashboards.ActionFoldersWrite: {},
dashboards.ActionFoldersDelete: {},
dashboards.ActionFoldersPermissionsRead: {},
dashboards.ActionFoldersPermissionsWrite: {},
dashboards.ActionDashboardsCreate: {},
dashboards.ActionDashboardsRead: {},
dashboards.ActionDashboardsWrite: {
dashboards.ScopeDashboardsProvider.GetResourceScopeUID("7MeksYbmk"),
},
dashboards.ActionDashboardsDelete: {},
dashboards.ActionDashboardsPermissionsRead: {},
dashboards.ActionDashboardsPermissionsWrite: {},
}
)
func service(t *testing.T) *StandardSearchService {
service, ok := ProvideService(&setting.Cfg{Search: setting.SearchSettings{}},
nil, nil, accesscontrolmock.New(), tracing.InitializeTracerForTest(), featuremgmt.WithFeatures(),
nil, nil, nil).(*StandardSearchService)
require.True(t, ok)
return service
}
func TestAllowedActionsForPermissionsWithScopeAll(t *testing.T) {
tests := []struct {
name string
permissions map[string][]string
}{
{
name: "scope_all",
permissions: permissionsWithScopeAll,
},
{
name: "scope_uids",
permissions: permissionsWithUidScopes,
},
}
for _, tt := range tests {
frame := &data.Frame{}
err := frame.UnmarshalJSON([]byte(exampleListFrameJSON))
require.NoError(t, err)
err = service(t).addAllowedActionsField(context.Background(), orgId, &user.SignedInUser{
Permissions: map[int64]map[string][]string{
orgId: tt.permissions,
},
}, &backend.DataResponse{
Frames: []*data.Frame{frame},
})
require.NoError(t, err)
experimental.CheckGoldenJSONFrame(t, "testdata", fmt.Sprintf("allowed_actions_%s.golden", tt.name), frame, true)
}
}

View File

@@ -0,0 +1,58 @@
package searchV2
import (
"context"
"github.com/grafana/grafana/pkg/infra/db"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/services/accesscontrol"
"github.com/grafana/grafana/pkg/services/dashboards"
"github.com/grafana/grafana/pkg/services/folder"
"github.com/grafana/grafana/pkg/services/user"
)
// ResourceFilter checks if a given a uid (resource identifier) check if we have the requested permission
type ResourceFilter func(kind entityKind, uid, parentUID string) bool
// FutureAuthService eventually implemented by the security service
type FutureAuthService interface {
GetDashboardReadFilter(ctx context.Context, orgID int64, user *user.SignedInUser) (ResourceFilter, error)
}
var _ FutureAuthService = (*simpleAuthService)(nil)
type simpleAuthService struct {
sql db.DB
ac accesscontrol.Service
folderService folder.Service
logger log.Logger
}
func (a *simpleAuthService) GetDashboardReadFilter(ctx context.Context, orgID int64, user *user.SignedInUser) (ResourceFilter, error) {
canReadDashboard, canReadFolder := accesscontrol.Checker(user, dashboards.ActionDashboardsRead), accesscontrol.Checker(user, dashboards.ActionFoldersRead)
return func(kind entityKind, uid, parent string) bool {
switch kind {
case entityKindFolder:
scopes, err := dashboards.GetInheritedScopes(ctx, orgID, uid, a.folderService)
if err != nil {
a.logger.Debug("Could not retrieve inherited folder scopes:", "err", err)
}
scopes = append(scopes, dashboards.ScopeFoldersProvider.GetResourceScopeUID(uid))
return canReadFolder(scopes...)
case entityKindDashboard:
scopes, err := dashboards.GetInheritedScopes(ctx, orgID, parent, a.folderService)
if err != nil {
a.logger.Debug("Could not retrieve inherited folder scopes:", "err", err)
}
scopes = append(scopes, dashboards.ScopeDashboardsProvider.GetResourceScopeUID(uid))
scopes = append(scopes, dashboards.ScopeFoldersProvider.GetResourceScopeUID(parent))
return canReadDashboard(scopes...)
case entityKindPanel, entityKindDatasource, entityKindQuery:
// Not a dashboard or folder. Assume no access.
fallthrough
default:
return false
}
}, nil
}

View File

@@ -0,0 +1,767 @@
package searchV2
import (
"context"
"encoding/json"
"fmt"
"strconv"
"strings"
"time"
"github.com/blugelabs/bluge"
"github.com/blugelabs/bluge/search"
"github.com/blugelabs/bluge/search/aggregations"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/slugify"
"github.com/grafana/grafana/pkg/services/folder"
"github.com/grafana/grafana/pkg/services/store/entity"
)
const (
documentFieldUID = "_id" // actually UID!! but bluge likes "_id"
documentFieldKind = "kind"
documentFieldTag = "tag"
documentFieldURL = "url"
documentFieldName = "name"
documentFieldName_sort = "name_sort"
documentFieldName_ngram = "name_ngram"
documentFieldLocation = "location" // parent path
documentFieldPanelType = "panel_type"
documentFieldTransformer = "transformer"
documentFieldDSUID = "ds_uid"
documentFieldDSType = "ds_type"
DocumentFieldCreatedAt = "created_at"
DocumentFieldUpdatedAt = "updated_at"
)
func initOrgIndex(dashboards []dashboard, logger log.Logger, extendDoc ExtendDashboardFunc) (*orgIndex, error) {
dashboardWriter, err := bluge.OpenWriter(bluge.InMemoryOnlyConfig())
if err != nil {
return nil, fmt.Errorf("error opening writer: %v", err)
}
// Not closing Writer here since we use it later while processing dashboard change events.
start := time.Now()
label := start
batch := bluge.NewBatch()
// In order to reduce memory usage while initial indexing we are limiting
// the size of batch here.
docsInBatch := 0
maxBatchSize := 100
flushIfRequired := func(force bool) error {
docsInBatch++
needFlush := force || (maxBatchSize > 0 && docsInBatch >= maxBatchSize)
if !needFlush {
return nil
}
err := dashboardWriter.Batch(batch)
if err != nil {
return err
}
docsInBatch = 0
batch.Reset()
return nil
}
// First index the folders to construct folderIdLookup.
folderIdLookup := make(map[int64]string, 50)
folderIdLookup[0] = folder.GeneralFolderUID
for _, dash := range dashboards {
if !dash.isFolder {
continue
}
doc := getFolderDashboardDoc(dash)
if err := extendDoc(dash.uid, doc); err != nil {
return nil, err
}
batch.Insert(doc)
if err := flushIfRequired(false); err != nil {
return nil, err
}
uid := dash.uid
folderIdLookup[dash.id] = uid
}
// Then each dashboard.
for _, dash := range dashboards {
if dash.isFolder {
continue
}
folderUID := folderIdLookup[dash.folderID]
location := folderUID
doc := getNonFolderDashboardDoc(dash, location)
if err := extendDoc(dash.uid, doc); err != nil {
return nil, err
}
batch.Insert(doc)
if err := flushIfRequired(false); err != nil {
return nil, err
}
// Index each panel in dashboard.
if location != "" {
location += "/"
}
location += dash.uid
docs := getDashboardPanelDocs(dash, location)
for _, panelDoc := range docs {
batch.Insert(panelDoc)
if err := flushIfRequired(false); err != nil {
return nil, err
}
}
}
// Flush docs in batch with force as we are in the end.
if err := flushIfRequired(true); err != nil {
return nil, err
}
logger.Info("Finish inserting docs into index", "elapsed", time.Since(label))
logger.Info("Finish building index", "totalElapsed", time.Since(start))
return &orgIndex{
writers: map[indexType]*bluge.Writer{
indexTypeDashboard: dashboardWriter,
},
}, err
}
func getFolderDashboardDoc(dash dashboard) *bluge.Document {
uid := dash.uid
url := fmt.Sprintf("/dashboards/f/%s/%s", dash.uid, dash.slug)
if uid == "" {
uid = "general"
url = "/dashboards"
dash.summary.Name = "General"
dash.summary.Description = ""
}
return newSearchDocument(uid, dash.summary.Name, dash.summary.Description, url).
AddField(bluge.NewKeywordField(documentFieldKind, string(entityKindFolder)).Aggregatable().StoreValue()).
AddField(bluge.NewDateTimeField(DocumentFieldCreatedAt, dash.created).Sortable().StoreValue()).
AddField(bluge.NewDateTimeField(DocumentFieldUpdatedAt, dash.updated).Sortable().StoreValue())
}
func getNonFolderDashboardDoc(dash dashboard, location string) *bluge.Document {
url := fmt.Sprintf("/d/%s/%s", dash.uid, dash.slug)
// Dashboard document
doc := newSearchDocument(dash.uid, dash.summary.Name, dash.summary.Description, url).
AddField(bluge.NewKeywordField(documentFieldKind, string(entityKindDashboard)).Aggregatable().StoreValue()).
AddField(bluge.NewKeywordField(documentFieldLocation, location).Aggregatable().StoreValue()).
AddField(bluge.NewDateTimeField(DocumentFieldCreatedAt, dash.created).Sortable().StoreValue()).
AddField(bluge.NewDateTimeField(DocumentFieldUpdatedAt, dash.updated).Sortable().StoreValue())
// dashboards only use the key part of labels
for k := range dash.summary.Labels {
doc.AddField(bluge.NewKeywordField(documentFieldTag, k).
StoreValue().
Aggregatable().
SearchTermPositions())
}
for _, ref := range dash.summary.References {
if ref.Family == entity.StandardKindDataSource {
if ref.Type != "" {
doc.AddField(bluge.NewKeywordField(documentFieldDSType, ref.Type).
StoreValue().
Aggregatable().
SearchTermPositions())
}
if ref.Identifier != "" {
doc.AddField(bluge.NewKeywordField(documentFieldDSUID, ref.Identifier).
StoreValue().
Aggregatable().
SearchTermPositions())
}
}
}
return doc
}
func getDashboardPanelDocs(dash dashboard, location string) []*bluge.Document {
dashURL := fmt.Sprintf("/d/%s/%s", dash.uid, slugify.Slugify(dash.summary.Name))
// pre-allocating a little bit more than necessary, possibly
docs := make([]*bluge.Document, 0, len(dash.summary.Nested))
for _, panel := range dash.summary.Nested {
if panel.Fields["type"] == "row" {
continue // skip rows
}
idx := strings.LastIndex(panel.UID, "#")
panelId, err := strconv.Atoi(panel.UID[idx+1:])
if err != nil {
continue
}
url := fmt.Sprintf("%s?viewPanel=%d", dashURL, panelId)
doc := newSearchDocument(panel.UID, panel.Name, panel.Description, url).
AddField(bluge.NewKeywordField(documentFieldLocation, location).Aggregatable().StoreValue()).
AddField(bluge.NewKeywordField(documentFieldKind, string(entityKindPanel)).Aggregatable().StoreValue()) // likely want independent index for this
for _, ref := range panel.References {
switch ref.Family {
case entity.StandardKindDashboard:
if ref.Type != "" {
doc.AddField(bluge.NewKeywordField(documentFieldDSType, ref.Type).
StoreValue().
Aggregatable().
SearchTermPositions())
}
if ref.Identifier != "" {
doc.AddField(bluge.NewKeywordField(documentFieldDSUID, ref.Identifier).
StoreValue().
Aggregatable().
SearchTermPositions())
}
case entity.ExternalEntityReferencePlugin:
if ref.Type == entity.StandardKindPanel && ref.Identifier != "" {
doc.AddField(bluge.NewKeywordField(documentFieldPanelType, ref.Identifier).Aggregatable().StoreValue())
}
case entity.ExternalEntityReferenceRuntime:
if ref.Type == entity.ExternalEntityReferenceRuntime_Transformer && ref.Identifier != "" {
doc.AddField(bluge.NewKeywordField(documentFieldTransformer, ref.Identifier).Aggregatable())
}
}
}
docs = append(docs, doc)
}
return docs
}
// Names need to be indexed a few ways to support key features
func newSearchDocument(uid, name, descr, url string) *bluge.Document {
doc := bluge.NewDocument(uid)
if name != "" {
doc.AddField(bluge.NewTextField(documentFieldName, name).StoreValue().SearchTermPositions())
doc.AddField(bluge.NewTextField(documentFieldName_ngram, name).WithAnalyzer(ngramIndexAnalyzer))
// Don't add a field for empty names
sortStr := formatForNameSortField(name)
if len(sortStr) > 0 {
doc.AddField(bluge.NewKeywordField(documentFieldName_sort, sortStr).Sortable())
}
}
if url != "" {
doc.AddField(bluge.NewKeywordField(documentFieldURL, url).StoreValue())
}
return doc
}
func getDashboardPanelIDs(index *orgIndex, panelLocation string) ([]string, error) {
var panelIDs []string
reader, cancel, err := index.readerForIndex(indexTypeDashboard)
if err != nil {
return nil, err
}
defer cancel()
fullQuery := bluge.NewBooleanQuery()
fullQuery.AddMust(bluge.NewTermQuery(panelLocation).SetField(documentFieldLocation))
fullQuery.AddMust(bluge.NewTermQuery(string(entityKindPanel)).SetField(documentFieldKind))
req := bluge.NewAllMatches(fullQuery)
documentMatchIterator, err := reader.Search(context.Background(), req)
if err != nil {
return nil, err
}
match, err := documentMatchIterator.Next()
for err == nil && match != nil {
// load the identifier for this match
err = match.VisitStoredFields(func(field string, value []byte) bool {
if field == documentFieldUID {
panelIDs = append(panelIDs, string(value))
}
return true
})
if err != nil {
return nil, err
}
// load the next document match
match, err = documentMatchIterator.Next()
}
return panelIDs, err
}
func getDocsIDsByLocationPrefix(index *orgIndex, prefix string) ([]string, error) {
var ids []string
reader, cancel, err := index.readerForIndex(indexTypeDashboard)
if err != nil {
return nil, fmt.Errorf("error getting reader: %w", err)
}
defer cancel()
fullQuery := bluge.NewBooleanQuery()
fullQuery.AddMust(bluge.NewPrefixQuery(prefix).SetField(documentFieldLocation))
req := bluge.NewAllMatches(fullQuery)
documentMatchIterator, err := reader.Search(context.Background(), req)
if err != nil {
return nil, fmt.Errorf("error search: %w", err)
}
match, err := documentMatchIterator.Next()
for err == nil && match != nil {
// load the identifier for this match
err = match.VisitStoredFields(func(field string, value []byte) bool {
if field == documentFieldUID {
ids = append(ids, string(value))
}
return true
})
if err != nil {
return nil, err
}
// load the next document match
match, err = documentMatchIterator.Next()
}
return ids, err
}
func getDashboardLocation(index *orgIndex, dashboardUID string) (string, bool, error) {
var dashboardLocation string
var found bool
reader, cancel, err := index.readerForIndex(indexTypeDashboard)
if err != nil {
return "", false, err
}
defer cancel()
fullQuery := bluge.NewBooleanQuery()
fullQuery.AddMust(bluge.NewTermQuery(dashboardUID).SetField(documentFieldUID))
fullQuery.AddMust(bluge.NewTermQuery(string(entityKindDashboard)).SetField(documentFieldKind))
req := bluge.NewAllMatches(fullQuery)
documentMatchIterator, err := reader.Search(context.Background(), req)
if err != nil {
return "", false, err
}
match, err := documentMatchIterator.Next()
for err == nil && match != nil {
// load the identifier for this match
err = match.VisitStoredFields(func(field string, value []byte) bool {
if field == documentFieldLocation {
dashboardLocation = string(value)
found = true
return false
}
return true
})
if err != nil {
return "", false, err
}
// load the next document match
match, err = documentMatchIterator.Next()
}
return dashboardLocation, found, err
}
//nolint:gocyclo
func doSearchQuery(
ctx context.Context,
logger log.Logger,
index *orgIndex,
filter ResourceFilter,
q DashboardQuery,
extender QueryExtender,
appSubUrl string,
) *backend.DataResponse {
response := &backend.DataResponse{}
header := &customMeta{}
reader, cancel, err := index.readerForIndex(indexTypeDashboard)
if err != nil {
logger.Error("Error getting reader for dashboard index: %v", err)
response.Error = err
return response
}
defer cancel()
hasConstraints := false
fullQuery := bluge.NewBooleanQuery()
fullQuery.AddMust(newPermissionFilter(filter, logger))
// Only show dashboard / folders / panels.
if len(q.Kind) > 0 {
bq := bluge.NewBooleanQuery()
for _, k := range q.Kind {
bq.AddShould(bluge.NewTermQuery(k).SetField(documentFieldKind))
}
fullQuery.AddMust(bq)
hasConstraints = true
}
// Explicit UID lookup (stars etc)
if len(q.UIDs) > 0 {
count := len(q.UIDs) + 3
bq := bluge.NewBooleanQuery()
for i, v := range q.UIDs {
bq.AddShould(bluge.NewTermQuery(v).
SetField(documentFieldUID).
SetBoost(float64(count - i)))
}
fullQuery.AddMust(bq)
hasConstraints = true
}
// Tags
if len(q.Tags) > 0 {
bq := bluge.NewBooleanQuery()
for _, v := range q.Tags {
bq.AddMust(bluge.NewTermQuery(v).SetField(documentFieldTag))
}
fullQuery.AddMust(bq)
hasConstraints = true
}
// Panel type
if q.PanelType != "" {
fullQuery.AddMust(bluge.NewTermQuery(q.PanelType).SetField(documentFieldPanelType))
hasConstraints = true
}
// Datasource
if q.Datasource != "" {
fullQuery.AddMust(bluge.NewTermQuery(q.Datasource).SetField(documentFieldDSUID))
hasConstraints = true
}
// DatasourceType
if q.DatasourceType != "" {
fullQuery.AddMust(bluge.NewTermQuery(q.DatasourceType).SetField(documentFieldDSType))
hasConstraints = true
}
// Folder
if q.Location != "" {
fullQuery.AddMust(bluge.NewTermQuery(q.Location).SetField(documentFieldLocation))
hasConstraints = true
}
isMatchAllQuery := q.Query == "*" || q.Query == ""
if isMatchAllQuery {
if !hasConstraints {
fullQuery.AddShould(bluge.NewMatchAllQuery())
}
} else {
bq := bluge.NewBooleanQuery()
bq.AddShould(NewSubstringQuery(formatForNameSortField(q.Query)).
SetField(documentFieldName_sort).
SetBoost(6))
if shouldUseNgram(q) {
bq.AddShould(bluge.NewMatchQuery(q.Query).
SetField(documentFieldName_ngram).
SetOperator(bluge.MatchQueryOperatorAnd). // all terms must match
SetAnalyzer(ngramQueryAnalyzer).SetBoost(1))
}
fullQuery.AddMust(bq)
}
limit := 50 // default view
if q.Limit > 0 {
limit = q.Limit
}
req := bluge.NewTopNSearch(limit, fullQuery)
if q.From > 0 {
req.SetFrom(q.From)
}
if q.Explain {
req.ExplainScores()
}
req.WithStandardAggregations()
if q.Sort != "" {
req.SortBy([]string{q.Sort})
header.SortBy = strings.TrimPrefix(q.Sort, "-")
}
for _, t := range q.Facet {
lim := t.Limit
if lim < 1 {
lim = 50
}
req.AddAggregation(t.Field, aggregations.NewTermsAggregation(search.Field(t.Field), lim))
}
// execute this search on the reader
documentMatchIterator, err := reader.Search(ctx, req)
if err != nil {
logger.Error("Error executing search", "err", err)
response.Error = err
return response
}
fScore := data.NewFieldFromFieldType(data.FieldTypeFloat64, 0)
fUID := data.NewFieldFromFieldType(data.FieldTypeString, 0)
fKind := data.NewFieldFromFieldType(data.FieldTypeString, 0)
fPType := data.NewFieldFromFieldType(data.FieldTypeString, 0)
fName := data.NewFieldFromFieldType(data.FieldTypeString, 0)
fURL := data.NewFieldFromFieldType(data.FieldTypeString, 0)
fLocation := data.NewFieldFromFieldType(data.FieldTypeString, 0)
fTags := data.NewFieldFromFieldType(data.FieldTypeNullableJSON, 0) //nolint:staticcheck
fDSUIDs := data.NewFieldFromFieldType(data.FieldTypeJSON, 0)
fExplain := data.NewFieldFromFieldType(data.FieldTypeNullableJSON, 0) //nolint:staticcheck
fScore.Name = "score"
fUID.Name = "uid"
fKind.Name = "kind"
fName.Name = "name"
fLocation.Name = "location"
fURL.Name = "url"
fURL.Config = &data.FieldConfig{
Links: []data.DataLink{
{Title: "link", URL: "${__value.text}"},
},
}
fPType.Name = "panel_type"
fDSUIDs.Name = "ds_uid"
fTags.Name = "tags"
fExplain.Name = "explain"
frame := data.NewFrame("Query results", fKind, fUID, fName, fPType, fURL, fTags, fDSUIDs, fLocation)
if q.Explain {
frame.Fields = append(frame.Fields, fScore, fExplain)
}
frame.SetMeta(&data.FrameMeta{
Type: "search-results",
Custom: header,
})
fieldLen := 0
ext := extender.GetFramer(frame)
locationItems := make(map[string]bool, 50)
// iterate through the document matches
match, err := documentMatchIterator.Next()
for err == nil && match != nil {
uid := ""
kind := ""
ptype := ""
name := ""
url := ""
loc := ""
var dsUIDs []string
var tags []string
err = match.VisitStoredFields(func(field string, value []byte) bool {
switch field {
case documentFieldUID:
uid = string(value)
case documentFieldKind:
kind = string(value)
case documentFieldPanelType:
ptype = string(value)
case documentFieldName:
name = string(value)
case documentFieldURL:
url = appSubUrl + string(value)
case documentFieldLocation:
loc = string(value)
case documentFieldDSUID:
dsUIDs = append(dsUIDs, string(value))
case documentFieldTag:
tags = append(tags, string(value))
default:
ext(field, value)
}
return true
})
if err != nil {
logger.Error("Error loading stored fields", "err", err)
response.Error = err
return response
}
fKind.Append(kind)
fUID.Append(uid)
fPType.Append(ptype)
fName.Append(name)
fURL.Append(url)
fLocation.Append(loc)
// set a key for all path parts we return
if !q.SkipLocation {
for _, v := range strings.Split(loc, "/") {
locationItems[v] = true
}
}
if len(tags) > 0 {
js, _ := json.Marshal(tags)
jsb := json.RawMessage(js)
fTags.Append(&jsb)
} else {
fTags.Append(nil)
}
if len(dsUIDs) == 0 {
dsUIDs = []string{}
}
js, _ := json.Marshal(dsUIDs)
jsb := json.RawMessage(js)
fDSUIDs.Append(jsb)
if q.Explain {
if isMatchAllQuery {
fScore.Append(float64(fieldLen + q.From))
} else {
fScore.Append(match.Score)
}
if match.Explanation != nil {
js, _ := json.Marshal(&match.Explanation)
jsb := json.RawMessage(js)
fExplain.Append(&jsb)
} else {
fExplain.Append(nil)
}
}
// extend fields to match the longest field
fieldLen++
for _, f := range frame.Fields {
if fieldLen > f.Len() {
f.Extend(fieldLen - f.Len())
}
}
// load the next document match
match, err = documentMatchIterator.Next()
}
// Must call after iterating :)
aggs := documentMatchIterator.Aggregations()
header.Count = aggs.Count() // Total count
if q.Explain {
header.MaxScore = aggs.Metric("max_score")
}
if len(locationItems) > 0 && !q.SkipLocation {
header.Locations = getLocationLookupInfo(ctx, reader, locationItems)
}
response.Frames = append(response.Frames, frame)
for _, t := range q.Facet {
bbb := aggs.Buckets(t.Field)
if bbb != nil {
size := len(bbb)
fName := data.NewFieldFromFieldType(data.FieldTypeString, size)
fName.Name = t.Field
fCount := data.NewFieldFromFieldType(data.FieldTypeUint64, size)
fCount.Name = "Count"
for i, v := range bbb {
fName.Set(i, v.Name())
fCount.Set(i, v.Count())
}
response.Frames = append(response.Frames, data.NewFrame("Facet: "+t.Field, fName, fCount))
}
}
return response
}
func shouldUseNgram(q DashboardQuery) bool {
var tokens []string
if len(q.Query) > ngramEdgeFilterMaxLength {
tokens = strings.Fields(q.Query)
for _, k := range tokens {
// ngram will never match if at least one input token exceeds the max token length,
// as all tokens must match simultaneously with the `bluge.MatchQueryOperatorAnd` operator
if len(k) > ngramEdgeFilterMaxLength {
return false
}
}
}
return true
}
func formatForNameSortField(name string) string {
return strings.Trim(strings.ToUpper(name), " ")
}
func getLocationLookupInfo(ctx context.Context, reader *bluge.Reader, uids map[string]bool) map[string]locationItem {
res := make(map[string]locationItem, len(uids))
bq := bluge.NewBooleanQuery()
for k := range uids {
bq.AddShould(bluge.NewTermQuery(k).SetField(documentFieldUID))
}
req := bluge.NewAllMatches(bq)
documentMatchIterator, err := reader.Search(ctx, req)
if err != nil {
return res
}
dvfieldNames := []string{"type"}
sctx := search.NewSearchContext(0, 0)
// execute this search on the reader
// iterate through the document matches
match, err := documentMatchIterator.Next()
for err == nil && match != nil {
err = match.LoadDocumentValues(sctx, dvfieldNames)
if err != nil {
continue
}
uid := ""
item := locationItem{}
_ = match.VisitStoredFields(func(field string, value []byte) bool {
switch field {
case documentFieldUID:
uid = string(value)
case documentFieldKind:
item.Kind = string(value)
case documentFieldName:
item.Name = string(value)
case documentFieldURL:
item.URL = string(value)
}
return true
})
res[uid] = item
// load the next document match
match, err = documentMatchIterator.Next()
}
return res
}
type locationItem struct {
Name string `json:"name"`
Kind string `json:"kind"`
URL string `json:"url"`
}
type customMeta struct {
Count uint64 `json:"count"`
MaxScore float64 `json:"max_score,omitempty"`
Locations map[string]locationItem `json:"locationInfo,omitempty"`
SortBy string `json:"sortBy,omitempty"`
}

View File

@@ -0,0 +1,48 @@
package searchV2
import (
"github.com/blugelabs/bluge"
"github.com/grafana/grafana-plugin-sdk-go/data"
)
type ExtendDashboardFunc func(uid string, doc *bluge.Document) error
type FramerFunc func(field string, value []byte)
type QueryExtender interface {
GetFramer(frame *data.Frame) FramerFunc
}
type DocumentExtender interface {
GetDashboardExtender(orgID int64, uids ...string) ExtendDashboardFunc
}
type DashboardIndexExtender interface {
GetDocumentExtender() DocumentExtender
GetQueryExtender(query DashboardQuery) QueryExtender
}
type NoopExtender struct{}
func (n NoopExtender) GetDocumentExtender() DocumentExtender {
return &NoopDocumentExtender{}
}
func (n NoopExtender) GetQueryExtender(query DashboardQuery) QueryExtender {
return &NoopQueryExtender{}
}
type NoopDocumentExtender struct{}
func (n NoopDocumentExtender) GetDashboardExtender(_ int64, _ ...string) ExtendDashboardFunc {
return func(uid string, doc *bluge.Document) error {
return nil
}
}
type NoopQueryExtender struct{}
func (n NoopQueryExtender) GetFramer(_ *data.Frame) FramerFunc {
return func(field string, value []byte) {
// really noop
}
}

View File

@@ -0,0 +1,142 @@
package searchV2
import (
"regexp"
"strings"
"github.com/blugelabs/bluge"
"github.com/blugelabs/bluge/search"
"github.com/blugelabs/bluge/search/searcher"
"github.com/blugelabs/bluge/search/similarity"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/services/store/entity"
)
type PermissionFilter struct {
log log.Logger
filter ResourceFilter
}
type entityKind string
const (
entityKindPanel entityKind = entity.StandardKindPanel
entityKindDashboard entityKind = entity.StandardKindDashboard
entityKindFolder entityKind = entity.StandardKindFolder
entityKindDatasource entityKind = entity.StandardKindDataSource
entityKindQuery entityKind = entity.StandardKindQuery
)
func (r entityKind) IsValid() bool {
return r == entityKindPanel || r == entityKindDashboard || r == entityKindFolder
}
func (r entityKind) supportsAuthzCheck() bool {
return r == entityKindPanel || r == entityKindDashboard || r == entityKindFolder
}
var (
permissionFilterFields = []string{documentFieldUID, documentFieldKind, documentFieldLocation}
panelIdFieldRegex = regexp.MustCompile(`^(.*)#([0-9]{1,4})$`)
panelIdFieldDashboardUidSubmatchIndex = 1
panelIdFieldPanelIdSubmatchIndex = 2
panelIdFieldRegexExpectedSubmatchCount = 3 // submatches[0] - whole string
_ bluge.Query = (*PermissionFilter)(nil)
)
func newPermissionFilter(resourceFilter ResourceFilter, log log.Logger) *PermissionFilter {
return &PermissionFilter{
filter: resourceFilter,
log: log,
}
}
func (q *PermissionFilter) logAccessDecision(decision bool, kind any, id string, reason string, ctx ...any) {
if true {
return // TOO much logging right now
}
ctx = append(ctx, "kind", kind, "id", id, "reason", reason)
if decision {
q.log.Debug("Allowing access", ctx...)
} else {
q.log.Info("Denying access", ctx...)
}
}
func (q *PermissionFilter) canAccess(kind entityKind, id, location string) bool {
if !kind.supportsAuthzCheck() {
q.logAccessDecision(false, kind, id, "entityDoesNotSupportAuthz")
return false
}
// TODO add `kind` to the `ResourceFilter` interface so that we can move the switch out of here
//
switch kind {
case entityKindFolder, entityKindDashboard:
decision := q.filter(kind, id, location)
q.logAccessDecision(decision, kind, id, "resourceFilter")
return decision
case entityKindPanel:
matches := panelIdFieldRegex.FindStringSubmatch(id)
submatchCount := len(matches)
if submatchCount != panelIdFieldRegexExpectedSubmatchCount {
q.logAccessDecision(false, kind, id, "invalidPanelIdFieldRegexSubmatchCount", "submatchCount", submatchCount, "expectedSubmatchCount", panelIdFieldRegexExpectedSubmatchCount)
return false
}
dashboardUid := matches[panelIdFieldDashboardUidSubmatchIndex]
// Location is <folder_uid>/<dashboard_uid>
if !strings.HasSuffix(location, "/"+dashboardUid) {
q.logAccessDecision(false, kind, id, "invalidLocation", "location", location, "dashboardUid", dashboardUid)
return false
}
folderUid := location[:len(location)-len(dashboardUid)-1]
decision := q.filter(entityKindDashboard, dashboardUid, folderUid)
q.logAccessDecision(decision, kind, id, "resourceFilter", "folderUid", folderUid, "dashboardUid", dashboardUid, "panelId", matches[panelIdFieldPanelIdSubmatchIndex])
return decision
default:
q.logAccessDecision(false, kind, id, "reason", "unknownKind")
return false
}
}
func (q *PermissionFilter) Searcher(i search.Reader, options search.SearcherOptions) (search.Searcher, error) {
dvReader, err := i.DocumentValueReader(permissionFilterFields)
if err != nil {
return nil, err
}
s, err := searcher.NewMatchAllSearcher(i, 1, similarity.ConstantScorer(1), options)
if err != nil {
return nil, err
}
return searcher.NewFilteringSearcher(s, func(d *search.DocumentMatch) bool {
var kind, id, location string
err := dvReader.VisitDocumentValues(d.Number, func(field string, term []byte) {
switch field {
case documentFieldKind:
kind = string(term)
case documentFieldUID:
id = string(term)
case documentFieldLocation:
location = string(term)
}
})
if err != nil {
q.logAccessDecision(false, kind, id, "errorWhenVisitingDocumentValues")
return false
}
e := entityKind(kind)
if !e.IsValid() {
q.logAccessDecision(false, kind, id, "invalidEntityKind")
return false
}
return q.canAccess(e, id, location)
}), err
}

View File

@@ -0,0 +1,80 @@
package searchV2
import (
"encoding/json"
"errors"
"io"
"net/http"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/prometheus/client_golang/prometheus"
"github.com/grafana/grafana/pkg/api/response"
"github.com/grafana/grafana/pkg/api/routing"
"github.com/grafana/grafana/pkg/middleware"
contextmodel "github.com/grafana/grafana/pkg/services/contexthandler/model"
)
type SearchHTTPService interface {
RegisterHTTPRoutes(storageRoute routing.RouteRegister)
}
type searchHTTPService struct {
search SearchService
}
func ProvideSearchHTTPService(search SearchService) SearchHTTPService {
return &searchHTTPService{search: search}
}
func (s *searchHTTPService) RegisterHTTPRoutes(storageRoute routing.RouteRegister) {
storageRoute.Post("/", middleware.ReqSignedIn, routing.Wrap(s.doQuery))
}
func (s *searchHTTPService) doQuery(c *contextmodel.ReqContext) response.Response {
ctx, span := tracer.Start(c.Req.Context(), "searchV2.doQuery")
defer span.End()
searchReadinessCheckResp := s.search.IsReady(ctx, c.GetOrgID())
if !searchReadinessCheckResp.IsReady {
dashboardSearchNotServedRequestsCounter.With(prometheus.Labels{
"reason": searchReadinessCheckResp.Reason,
}).Inc()
return response.JSON(http.StatusOK, &backend.DataResponse{
Frames: []*data.Frame{{
Name: "Loading",
}},
Error: nil,
})
}
body, err := io.ReadAll(c.Req.Body)
if err != nil {
return response.Error(http.StatusInternalServerError, "error reading bytes", err)
}
query := &DashboardQuery{}
err = json.Unmarshal(body, query)
if err != nil {
return response.Error(http.StatusBadRequest, "error parsing body", err)
}
resp := s.search.doDashboardQuery(ctx, c.SignedInUser, c.GetOrgID(), *query)
if resp.Error != nil {
return response.Error(http.StatusInternalServerError, "error handling search request", resp.Error)
}
if len(resp.Frames) == 0 {
msg := "invalid search response"
return response.Error(http.StatusInternalServerError, msg, errors.New(msg))
}
bytes, err := resp.MarshalJSON()
if err != nil {
return response.Error(http.StatusInternalServerError, "error marshalling response", err)
}
return response.JSON(http.StatusOK, bytes)
}

View File

@@ -0,0 +1,971 @@
package searchV2
import (
"bytes"
"context"
"errors"
"fmt"
"os"
"os/exec"
"runtime"
"strconv"
"strings"
"sync"
"time"
"github.com/blugelabs/bluge"
"go.opentelemetry.io/otel/attribute"
"go.opentelemetry.io/otel/trace"
"github.com/grafana/grafana/pkg/infra/db"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/services/featuremgmt"
"github.com/grafana/grafana/pkg/services/folder"
"github.com/grafana/grafana/pkg/services/store"
"github.com/grafana/grafana/pkg/services/store/entity"
kdash "github.com/grafana/grafana/pkg/services/store/kind/dashboard"
"github.com/grafana/grafana/pkg/setting"
)
type dashboardLoader interface {
// LoadDashboards returns slice of dashboards. If dashboardUID is empty then
// implementation must return all dashboards in instance to build an entire
// dashboard index for an organization. If dashboardUID is not empty then only
// return dashboard with specified UID or empty slice if not found (this is required
// to apply partial update).
LoadDashboards(ctx context.Context, orgID int64, dashboardUID string) ([]dashboard, error)
}
type eventStore interface {
GetLastEvent(ctx context.Context) (*store.EntityEvent, error)
GetAllEventsAfter(ctx context.Context, id int64) ([]*store.EntityEvent, error)
}
type dashboard struct {
id int64
uid string
isFolder bool
folderID int64
folderUID string
slug string
created time.Time
updated time.Time
// Use generic structure
summary *entity.EntitySummary
}
// buildSignal is sent when search index is accessed in organization for which
// we have not constructed an index yet.
type buildSignal struct {
orgID int64
done chan error
}
type orgIndex struct {
writers map[indexType]*bluge.Writer
}
type indexType string
const (
indexTypeDashboard indexType = "dashboard"
)
func (i *orgIndex) writerForIndex(idxType indexType) *bluge.Writer {
return i.writers[idxType]
}
func (i *orgIndex) readerForIndex(idxType indexType) (*bluge.Reader, func(), error) {
reader, err := i.writers[idxType].Reader()
if err != nil {
return nil, nil, err
}
return reader, func() { _ = reader.Close() }, nil
}
type searchIndex struct {
mu sync.RWMutex
loader dashboardLoader
perOrgIndex map[int64]*orgIndex
initializedOrgs map[int64]bool
initialIndexingComplete bool
initializationMutex sync.RWMutex
eventStore eventStore
logger log.Logger
buildSignals chan buildSignal
extender DocumentExtender
syncCh chan chan struct{}
tracer tracing.Tracer
features featuremgmt.FeatureToggles
settings setting.SearchSettings
}
func newSearchIndex(dashLoader dashboardLoader, evStore eventStore, extender DocumentExtender, tracer tracing.Tracer, features featuremgmt.FeatureToggles, settings setting.SearchSettings) *searchIndex {
return &searchIndex{
loader: dashLoader,
eventStore: evStore,
perOrgIndex: map[int64]*orgIndex{},
initializedOrgs: map[int64]bool{},
logger: log.New("searchIndex"),
buildSignals: make(chan buildSignal),
extender: extender,
syncCh: make(chan chan struct{}),
tracer: tracer,
features: features,
settings: settings,
}
}
func (i *searchIndex) isInitialized(_ context.Context, orgId int64) IsSearchReadyResponse {
i.initializationMutex.RLock()
orgInitialized := i.initializedOrgs[orgId]
initialInitComplete := i.initialIndexingComplete
i.initializationMutex.RUnlock()
if orgInitialized && initialInitComplete {
return IsSearchReadyResponse{IsReady: true}
}
if !initialInitComplete {
return IsSearchReadyResponse{IsReady: false, Reason: "initial-indexing-ongoing"}
}
i.triggerBuildingOrgIndex(orgId)
return IsSearchReadyResponse{IsReady: false, Reason: "org-indexing-ongoing"}
}
func (i *searchIndex) triggerBuildingOrgIndex(orgId int64) {
go func() {
ctx, cancel := context.WithTimeout(context.Background(), 5*time.Minute)
defer cancel()
doneIndexing := make(chan error, 1)
signal := buildSignal{orgID: orgId, done: doneIndexing}
select {
case i.buildSignals <- signal:
case <-ctx.Done():
i.logger.Warn("Failed to send a build signal to initialize org index", "orgId", orgId)
return
}
select {
case err := <-doneIndexing:
if err != nil {
i.logger.Error("Failed to build org index", "orgId", orgId, "error", err)
} else {
i.logger.Debug("Successfully built org index", "orgId", orgId)
}
case <-ctx.Done():
i.logger.Warn("Building org index timeout", "orgId", orgId)
}
}()
}
func (i *searchIndex) sync(ctx context.Context) error {
doneCh := make(chan struct{}, 1)
select {
case i.syncCh <- doneCh:
case <-ctx.Done():
return ctx.Err()
}
select {
case <-doneCh:
return nil
case <-ctx.Done():
return ctx.Err()
}
}
func (i *searchIndex) run(ctx context.Context, orgIDs []int64, reIndexSignalCh chan struct{}) error {
i.logger.Info("Initializing SearchV2", "dashboardLoadingBatchSize", i.settings.DashboardLoadingBatchSize, "fullReindexInterval", i.settings.FullReindexInterval, "indexUpdateInterval", i.settings.IndexUpdateInterval)
initialSetupCtx, initialSetupSpan := i.tracer.Start(ctx, "searchV2 initialSetup")
reIndexInterval := i.settings.FullReindexInterval
fullReIndexTimer := time.NewTimer(reIndexInterval)
defer fullReIndexTimer.Stop()
partialUpdateInterval := i.settings.IndexUpdateInterval
partialUpdateTimer := time.NewTimer(partialUpdateInterval)
defer partialUpdateTimer.Stop()
var lastEventID int64
lastEvent, err := i.eventStore.GetLastEvent(initialSetupCtx)
if err != nil {
initialSetupSpan.End()
return err
}
if lastEvent != nil {
lastEventID = lastEvent.Id
}
err = i.buildInitialIndexes(initialSetupCtx, orgIDs)
if err != nil {
initialSetupSpan.End()
return err
}
// This semaphore channel allows limiting concurrent async re-indexing routines to 1.
asyncReIndexSemaphore := make(chan struct{}, 1)
// Channel to handle signals about asynchronous full re-indexing completion.
reIndexDoneCh := make(chan int64, 1)
i.initializationMutex.Lock()
i.initialIndexingComplete = true
i.initializationMutex.Unlock()
initialSetupSpan.End()
for {
select {
case doneCh := <-i.syncCh:
// Executed on search read requests to make sure index is consistent.
lastEventID = i.applyIndexUpdates(ctx, lastEventID)
close(doneCh)
case <-partialUpdateTimer.C:
// Periodically apply updates collected in entity events table.
partialIndexUpdateCtx, span := i.tracer.Start(ctx, "searchV2 partial update timer")
lastEventID = i.applyIndexUpdates(partialIndexUpdateCtx, lastEventID)
span.End()
partialUpdateTimer.Reset(partialUpdateInterval)
case <-reIndexSignalCh:
// External systems may trigger re-indexing, at this moment provisioning does this.
i.logger.Info("Full re-indexing due to external signal")
fullReIndexTimer.Reset(0)
case signal := <-i.buildSignals:
buildSignalCtx, span := i.tracer.Start(ctx, "searchV2 build signal")
// When search read request meets new not-indexed org we build index for it.
i.mu.RLock()
_, ok := i.perOrgIndex[signal.orgID]
if ok {
span.End()
// Index for org already exists, do nothing.
i.mu.RUnlock()
close(signal.done)
continue
}
i.mu.RUnlock()
lastIndexedEventID := lastEventID
// Prevent full re-indexing while we are building index for new org.
// Full re-indexing will be later re-started in `case lastIndexedEventID := <-reIndexDoneCh`
// branch.
fullReIndexTimer.Stop()
go func() {
defer span.End()
// We need semaphore here since asynchronous re-indexing may be in progress already.
asyncReIndexSemaphore <- struct{}{}
defer func() { <-asyncReIndexSemaphore }()
_, err = i.buildOrgIndex(buildSignalCtx, signal.orgID)
signal.done <- err
reIndexDoneCh <- lastIndexedEventID
}()
case <-fullReIndexTimer.C:
fullReindexCtx, span := i.tracer.Start(ctx, "searchV2 full reindex timer")
// Periodically rebuild indexes since we could miss updates. At this moment we are issuing
// entity events non-atomically (outside of transaction) and do not cover all possible dashboard
// change places, so periodic re-indexing fixes possibly broken state. But ideally we should
// come to an approach which does not require periodic re-indexing at all. One possible way
// is to use DB triggers, see https://github.com/grafana/grafana/pull/47712.
lastIndexedEventID := lastEventID
go func() {
defer span.End()
// Do full re-index asynchronously to avoid blocking index synchronization
// on read for a long time.
// We need semaphore here since re-indexing due to build signal may be in progress already.
asyncReIndexSemaphore <- struct{}{}
defer func() { <-asyncReIndexSemaphore }()
started := time.Now()
i.logger.Info("Start re-indexing", i.withCtxData(fullReindexCtx)...)
i.reIndexFromScratch(fullReindexCtx)
i.logger.Info("Full re-indexing finished", i.withCtxData(fullReindexCtx, "fullReIndexElapsed", time.Since(started))...)
reIndexDoneCh <- lastIndexedEventID
}()
case lastIndexedEventID := <-reIndexDoneCh:
// Asynchronous re-indexing is finished. Set lastEventID to the value which
// was actual at the re-indexing start so that we could re-apply all the
// events happened during async index build process and make sure it's consistent.
if lastEventID != lastIndexedEventID {
i.logger.Info("Re-apply event ID to last indexed", "currentEventID", lastEventID, "lastIndexedEventID", lastIndexedEventID)
lastEventID = lastIndexedEventID
// Apply events immediately.
partialUpdateTimer.Reset(0)
}
fullReIndexTimer.Reset(reIndexInterval)
case <-ctx.Done():
return ctx.Err()
}
}
}
func (i *searchIndex) buildInitialIndexes(ctx context.Context, orgIDs []int64) error {
started := time.Now()
i.logger.Info("Start building in-memory indexes")
for _, orgID := range orgIDs {
err := i.buildInitialIndex(ctx, orgID)
if err != nil {
return fmt.Errorf("can't build initial dashboard search index for org %d: %w", orgID, err)
}
}
i.logger.Info("Finish building in-memory indexes", "elapsed", time.Since(started))
return nil
}
func (i *searchIndex) buildInitialIndex(ctx context.Context, orgID int64) error {
debugCtx, debugCtxCancel := context.WithCancel(ctx)
if os.Getenv("GF_SEARCH_DEBUG") != "" {
go i.debugResourceUsage(debugCtx, 200*time.Millisecond)
}
started := time.Now()
numDashboards, err := i.buildOrgIndex(ctx, orgID)
if err != nil {
debugCtxCancel()
return fmt.Errorf("can't build dashboard search index for org ID 1: %w", err)
}
i.logger.Info("Indexing for org finished", "orgIndexElapsed", time.Since(started), "orgId", orgID, "numDashboards", numDashboards)
debugCtxCancel()
if os.Getenv("GF_SEARCH_DEBUG") != "" {
// May help to estimate size of index when introducing changes. Though it's not a direct
// match to a memory consumption, but at least make give some relative difference understanding.
// Moreover, changes in indexing can cause additional memory consumption upon initial index build
// which is not reflected here.
i.reportSizeOfIndexDiskBackup(orgID)
}
return nil
}
// This is a naive implementation of process CPU getting (credits to
// https://stackoverflow.com/a/11357813/1288429). Should work on both Linux and Darwin.
// Since we only use this during development seems simple and cheap solution to get
// process CPU usage in cross-platform way.
func getProcessCPU(currentPid int) (float64, error) {
cmd := exec.Command("ps", "aux")
var out bytes.Buffer
cmd.Stdout = &out
err := cmd.Run()
if err != nil {
return 0, err
}
for {
line, err := out.ReadString('\n')
if err != nil {
break
}
tokens := strings.Split(line, " ")
ft := make([]string, 0)
for _, t := range tokens {
if t != "" && t != "\t" {
ft = append(ft, t)
}
}
pid, err := strconv.Atoi(ft[1])
if err != nil {
continue
}
if pid != currentPid {
continue
}
cpu, err := strconv.ParseFloat(ft[2], 64)
if err != nil {
return 0, err
}
return cpu, nil
}
return 0, errors.New("process not found")
}
func (i *searchIndex) debugResourceUsage(ctx context.Context, frequency time.Duration) {
var maxHeapInuse uint64
var maxSys uint64
captureMemStats := func() {
var m runtime.MemStats
runtime.ReadMemStats(&m)
if m.HeapInuse > maxHeapInuse {
maxHeapInuse = m.HeapInuse
}
if m.Sys > maxSys {
maxSys = m.Sys
}
}
var cpuUtilization []float64
captureCPUStats := func() {
cpu, err := getProcessCPU(os.Getpid())
if err != nil {
i.logger.Error("CPU stats error", "error", err)
return
}
// Just collect CPU utilization to a slice and show in the of index build.
cpuUtilization = append(cpuUtilization, cpu)
}
captureMemStats()
captureCPUStats()
for {
select {
case <-ctx.Done():
i.logger.Warn("Resource usage during indexing", "maxHeapInUse", formatBytes(maxHeapInuse), "maxSys", formatBytes(maxSys), "cpuPercent", cpuUtilization)
return
case <-time.After(frequency):
captureMemStats()
captureCPUStats()
}
}
}
func (i *searchIndex) reportSizeOfIndexDiskBackup(orgID int64) {
index, _ := i.getOrgIndex(orgID)
reader, cancel, err := index.readerForIndex(indexTypeDashboard)
if err != nil {
i.logger.Warn("Error getting reader", "error", err)
return
}
defer cancel()
// create a temp directory to store the index
tmpDir, err := os.MkdirTemp("", "grafana.dashboard_index")
if err != nil {
i.logger.Error("Can't create temp dir", "error", err)
return
}
defer func() {
err := os.RemoveAll(tmpDir)
if err != nil {
i.logger.Error("Can't remove temp dir", "error", err, "tmpDir", tmpDir)
return
}
}()
cancelCh := make(chan struct{})
err = reader.Backup(tmpDir, cancelCh)
if err != nil {
i.logger.Error("Can't create index disk backup", "error", err)
return
}
size, err := dirSize(tmpDir)
if err != nil {
i.logger.Error("Can't calculate dir size", "error", err)
return
}
i.logger.Warn("Size of index disk backup", "size", formatBytes(uint64(size)))
}
func (i *searchIndex) buildOrgIndex(ctx context.Context, orgID int64) (int, error) {
spanCtx, span := i.tracer.Start(ctx, "searchV2 buildOrgIndex", trace.WithAttributes(
attribute.Int64("org_id", orgID),
))
started := time.Now()
ctx, cancel := context.WithTimeout(spanCtx, time.Minute)
ctx = log.InitCounter(ctx)
defer func() {
span.End()
cancel()
}()
i.logger.Info("Start building org index", "orgId", orgID)
dashboards, err := i.loader.LoadDashboards(ctx, orgID, "")
orgSearchIndexLoadTime := time.Since(started)
if err != nil {
return 0, fmt.Errorf("error loading dashboards: %w, elapsed: %s", err, orgSearchIndexLoadTime.String())
}
i.logger.Info("Finish loading org dashboards", "elapsed", orgSearchIndexLoadTime, "orgId", orgID)
dashboardExtender := i.extender.GetDashboardExtender(orgID)
_, initOrgIndexSpan := i.tracer.Start(ctx, "searchV2 buildOrgIndex init org index", trace.WithAttributes(
attribute.Int64("org_id", orgID),
attribute.Int("dashboardCount", len(dashboards)),
))
index, err := initOrgIndex(dashboards, i.logger, dashboardExtender)
initOrgIndexSpan.End()
if err != nil {
return 0, fmt.Errorf("error initializing index: %w", err)
}
orgSearchIndexTotalTime := time.Since(started)
orgSearchIndexBuildTime := orgSearchIndexTotalTime - orgSearchIndexLoadTime
i.logger.Info("Re-indexed dashboards for organization",
i.withCtxData(ctx, "orgId", orgID,
"orgSearchIndexLoadTime", orgSearchIndexLoadTime,
"orgSearchIndexBuildTime", orgSearchIndexBuildTime,
"orgSearchIndexTotalTime", orgSearchIndexTotalTime,
"orgSearchDashboardCount", len(dashboards))...)
i.mu.Lock()
if oldIndex, ok := i.perOrgIndex[orgID]; ok {
for _, w := range oldIndex.writers {
_ = w.Close()
}
}
i.perOrgIndex[orgID] = index
i.mu.Unlock()
i.initializationMutex.Lock()
i.initializedOrgs[orgID] = true
i.initializationMutex.Unlock()
if orgID == 1 {
go func() {
if reader, cancel, err := index.readerForIndex(indexTypeDashboard); err == nil {
defer cancel()
updateUsageStats(context.Background(), reader, i.logger, i.tracer)
}
}()
}
return len(dashboards), nil
}
func (i *searchIndex) getOrgIndex(orgID int64) (*orgIndex, bool) {
i.mu.RLock()
defer i.mu.RUnlock()
r, ok := i.perOrgIndex[orgID]
return r, ok
}
func (i *searchIndex) getOrCreateOrgIndex(ctx context.Context, orgID int64) (*orgIndex, error) {
index, ok := i.getOrgIndex(orgID)
if !ok {
// For non-main organization indexes are built lazily.
// If we don't have an index then we are blocking here until an index for
// an organization is ready. This actually takes time only during the first
// access, all the consequent search requests do not fall into this branch.
doneIndexing := make(chan error, 1)
signal := buildSignal{orgID: orgID, done: doneIndexing}
select {
case i.buildSignals <- signal:
case <-ctx.Done():
return nil, ctx.Err()
}
select {
case err := <-doneIndexing:
if err != nil {
return nil, err
}
case <-ctx.Done():
return nil, ctx.Err()
}
index, _ = i.getOrgIndex(orgID)
}
return index, nil
}
func (i *searchIndex) reIndexFromScratch(ctx context.Context) {
i.mu.RLock()
orgIDs := make([]int64, 0, len(i.perOrgIndex))
for orgID := range i.perOrgIndex {
orgIDs = append(orgIDs, orgID)
}
i.mu.RUnlock()
for _, orgID := range orgIDs {
_, err := i.buildOrgIndex(ctx, orgID)
if err != nil {
i.logger.Error("Error re-indexing dashboards for organization", "orgId", orgID, "error", err)
continue
}
}
}
func (i *searchIndex) withCtxData(ctx context.Context, params ...any) []any {
traceID := tracing.TraceIDFromContext(ctx, false)
if traceID != "" {
params = append(params, "traceID", traceID)
}
return params
}
func (i *searchIndex) applyIndexUpdates(ctx context.Context, lastEventID int64) int64 {
ctx = log.InitCounter(ctx)
events, err := i.eventStore.GetAllEventsAfter(ctx, lastEventID)
if err != nil {
i.logger.Error("Can't load events", "error", err)
return lastEventID
}
if len(events) == 0 {
return lastEventID
}
started := time.Now()
for _, e := range events {
err := i.applyEventOnIndex(ctx, e)
if err != nil {
i.logger.Error("Can't apply event", "error", err)
return lastEventID
}
lastEventID = e.Id
}
i.logger.Info("Index updates applied", i.withCtxData(ctx, "indexEventsAppliedElapsed", time.Since(started), "numEvents", len(events))...)
return lastEventID
}
func (i *searchIndex) applyEventOnIndex(ctx context.Context, e *store.EntityEvent) error {
i.logger.Debug("Processing event", "event", e)
if !strings.HasPrefix(e.EntityId, "database/") {
i.logger.Warn("Unknown storage", "entityId", e.EntityId)
return nil
}
// database/org/entityType/path*
parts := strings.SplitN(strings.TrimPrefix(e.EntityId, "database/"), "/", 3)
if len(parts) != 3 {
i.logger.Error("Can't parse entityId", "entityId", e.EntityId)
return nil
}
orgIDStr := parts[0]
orgID, err := strconv.ParseInt(orgIDStr, 10, 64)
if err != nil {
i.logger.Error("Can't extract org ID", "entityId", e.EntityId)
return nil
}
kind := store.EntityType(parts[1])
uid := parts[2]
return i.applyEvent(ctx, orgID, kind, uid, e.EventType)
}
func (i *searchIndex) applyEvent(ctx context.Context, orgID int64, kind store.EntityType, uid string, _ store.EntityEventType) error {
i.mu.Lock()
_, ok := i.perOrgIndex[orgID]
if !ok {
// Skip event for org not yet indexed.
i.mu.Unlock()
return nil
}
i.mu.Unlock()
// Both dashboard and folder share same DB table.
dbDashboards, err := i.loader.LoadDashboards(ctx, orgID, uid)
if err != nil {
return err
}
i.mu.Lock()
defer i.mu.Unlock()
index, ok := i.perOrgIndex[orgID]
if !ok {
// Skip event for org not yet fully indexed.
return nil
}
// In the future we can rely on operation types to reduce work here.
if len(dbDashboards) == 0 {
switch kind {
case store.EntityTypeDashboard:
err = i.removeDashboard(ctx, index, uid)
case store.EntityTypeFolder:
err = i.removeFolder(ctx, index, uid)
default:
return nil
}
} else {
err = i.updateDashboard(ctx, orgID, index, dbDashboards[0])
}
if err != nil {
return err
}
return nil
}
func (i *searchIndex) removeDashboard(_ context.Context, index *orgIndex, dashboardUID string) error {
dashboardLocation, ok, err := getDashboardLocation(index, dashboardUID)
if err != nil {
return err
}
if !ok {
// No dashboard, nothing to remove.
return nil
}
// Find all panel docs to remove with dashboard.
panelLocation := dashboardUID
if dashboardLocation != "" {
panelLocation = dashboardLocation + "/" + dashboardUID
}
panelIDs, err := getDocsIDsByLocationPrefix(index, panelLocation)
if err != nil {
return err
}
writer := index.writerForIndex(indexTypeDashboard)
batch := bluge.NewBatch()
batch.Delete(bluge.NewDocument(dashboardUID).ID())
for _, panelID := range panelIDs {
batch.Delete(bluge.NewDocument(panelID).ID())
}
return writer.Batch(batch)
}
func (i *searchIndex) removeFolder(_ context.Context, index *orgIndex, folderUID string) error {
ids, err := getDocsIDsByLocationPrefix(index, folderUID)
if err != nil {
return fmt.Errorf("error getting by location prefix: %w", err)
}
batch := bluge.NewBatch()
batch.Delete(bluge.NewDocument(folderUID).ID())
for _, id := range ids {
batch.Delete(bluge.NewDocument(id).ID())
}
writer := index.writerForIndex(indexTypeDashboard)
return writer.Batch(batch)
}
func stringInSlice(str string, slice []string) bool {
for _, s := range slice {
if s == str {
return true
}
}
return false
}
func (i *searchIndex) updateDashboard(ctx context.Context, orgID int64, index *orgIndex, dash dashboard) error {
extendDoc := i.extender.GetDashboardExtender(orgID, dash.uid)
writer := index.writerForIndex(indexTypeDashboard)
var doc *bluge.Document
if dash.isFolder {
doc = getFolderDashboardDoc(dash)
if err := extendDoc(dash.uid, doc); err != nil {
return err
}
return writer.Update(doc.ID(), doc)
}
batch := bluge.NewBatch()
var folderUID string
if dash.folderID == 0 {
folderUID = folder.GeneralFolderUID
} else {
folderUID = dash.folderUID
}
location := folderUID
doc = getNonFolderDashboardDoc(dash, location)
if err := extendDoc(dash.uid, doc); err != nil {
return err
}
if location != "" {
location += "/"
}
location += dash.uid
panelDocs := getDashboardPanelDocs(dash, location)
actualPanelIDs := make([]string, 0, len(panelDocs))
for _, panelDoc := range panelDocs {
actualPanelIDs = append(actualPanelIDs, string(panelDoc.ID().Term()))
batch.Update(panelDoc.ID(), panelDoc)
}
indexedPanelIDs, err := getDashboardPanelIDs(index, location)
if err != nil {
return err
}
for _, panelID := range indexedPanelIDs {
if !stringInSlice(panelID, actualPanelIDs) {
batch.Delete(bluge.NewDocument(panelID).ID())
}
}
batch.Update(doc.ID(), doc)
return writer.Batch(batch)
}
type sqlDashboardLoader struct {
sql db.DB
logger log.Logger
tracer tracing.Tracer
settings setting.SearchSettings
}
func newSQLDashboardLoader(sql db.DB, tracer tracing.Tracer, settings setting.SearchSettings) *sqlDashboardLoader {
return &sqlDashboardLoader{sql: sql, logger: log.New("sqlDashboardLoader"), tracer: tracer, settings: settings}
}
type dashboardsRes struct {
dashboards []*dashboardQueryResult
err error
}
func (l sqlDashboardLoader) loadAllDashboards(ctx context.Context, limit int, orgID int64, dashboardUID string) chan *dashboardsRes {
ch := make(chan *dashboardsRes, 3)
go func() {
defer close(ch)
var lastID int64
for {
select {
case <-ctx.Done():
err := ctx.Err()
if err != nil {
ch <- &dashboardsRes{
dashboards: nil,
err: err,
}
}
return
default:
}
dashboardQueryCtx, dashboardQuerySpan := l.tracer.Start(ctx, "sqlDashboardLoader dashboardQuery", trace.WithAttributes(
attribute.Int64("orgID", orgID),
attribute.String("dashboardUID", dashboardUID),
attribute.Int64("lastID", lastID),
))
rows := make([]*dashboardQueryResult, 0, limit)
err := l.sql.WithDbSession(dashboardQueryCtx, func(sess *db.Session) error {
sess.Table("dashboard").
Where("org_id = ?", orgID).
Where("deleted IS NULL") // don't index soft delete files
if lastID > 0 {
sess.Where("id > ?", lastID)
}
if dashboardUID != "" {
sess.Where("uid = ?", dashboardUID)
}
sess.Cols("id", "uid", "is_folder", "folder_id", "folder_uid", "data", "slug", "created", "updated")
sess.OrderBy("id ASC")
sess.Limit(limit)
return sess.Find(&rows)
})
dashboardQuerySpan.End()
if err != nil || len(rows) < limit || dashboardUID != "" {
ch <- &dashboardsRes{
dashboards: rows,
err: err,
}
break
}
ch <- &dashboardsRes{
dashboards: rows,
}
if len(rows) > 0 {
lastID = rows[len(rows)-1].Id
}
}
}()
return ch
}
func (l sqlDashboardLoader) LoadDashboards(ctx context.Context, orgID int64, dashboardUID string) ([]dashboard, error) {
ctx, span := l.tracer.Start(ctx, "sqlDashboardLoader LoadDashboards", trace.WithAttributes(
attribute.Int64("orgID", orgID),
))
defer span.End()
var dashboards []dashboard
limit := 1
if dashboardUID == "" {
limit = l.settings.DashboardLoadingBatchSize
dashboards = make([]dashboard, 0, limit)
}
loadDatasourceCtx, loadDatasourceSpan := l.tracer.Start(ctx, "sqlDashboardLoader LoadDatasourceLookup", trace.WithAttributes(
attribute.Int64("orgID", orgID),
))
// key will allow name or uid
lookup, err := kdash.LoadDatasourceLookup(loadDatasourceCtx, orgID, l.sql)
if err != nil {
loadDatasourceSpan.End()
return dashboards, err
}
loadDatasourceSpan.End()
loadingDashboardCtx, cancelLoadingDashboardCtx := context.WithCancel(ctx)
defer cancelLoadingDashboardCtx()
dashboardsChannel := l.loadAllDashboards(loadingDashboardCtx, limit, orgID, dashboardUID)
for {
res, ok := <-dashboardsChannel
if res != nil && res.err != nil {
l.logger.Error("Error when loading dashboards", "error", err, "orgID", orgID, "dashboardUID", dashboardUID)
break
}
if res == nil || !ok {
break
}
rows := res.dashboards
_, readDashboardSpan := l.tracer.Start(ctx, "sqlDashboardLoader readDashboard", trace.WithAttributes(
attribute.Int64("orgID", orgID),
attribute.Int("dashboardCount", len(rows)),
))
reader := kdash.NewStaticDashboardSummaryBuilder(lookup, false)
for _, row := range rows {
summary, _, err := reader(ctx, row.Uid, row.Data)
if err != nil {
l.logger.Warn("Error indexing dashboard data", "error", err, "dashboardId", row.Id, "dashboardSlug", row.Slug)
// But append info anyway for now, since we possibly extracted useful information.
}
dashboards = append(dashboards, dashboard{
id: row.Id,
uid: row.Uid,
isFolder: row.IsFolder,
folderID: row.FolderID,
folderUID: row.FolderUID,
slug: row.Slug,
created: row.Created,
updated: row.Updated,
summary: summary,
})
}
readDashboardSpan.End()
}
return dashboards, err
}
type dashboardQueryResult struct {
Id int64
Uid string
IsFolder bool `xorm:"is_folder"`
FolderID int64 `xorm:"folder_id"`
FolderUID string `xorm:"folder_uid"`
Slug string `xorm:"slug"`
Data []byte
Created time.Time
Updated time.Time
}

View File

@@ -0,0 +1,735 @@
package searchV2
import (
"context"
"fmt"
"path/filepath"
"testing"
"github.com/blugelabs/bluge"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana-plugin-sdk-go/experimental"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/services/featuremgmt"
"github.com/grafana/grafana/pkg/services/store"
"github.com/grafana/grafana/pkg/services/store/entity"
"github.com/grafana/grafana/pkg/setting"
)
type testDashboardLoader struct {
dashboards []dashboard
}
func (t *testDashboardLoader) LoadDashboards(_ context.Context, _ int64, _ string) ([]dashboard, error) {
return t.dashboards, nil
}
var testLogger = log.New("index-test-logger")
var testAllowAllFilter = func(kind entityKind, uid, parent string) bool {
return true
}
var testDisallowAllFilter = func(kind entityKind, uid, parent string) bool {
return false
}
var testOrgID int64 = 1
func initTestOrgIndexFromDashes(t *testing.T, dashboards []dashboard) *orgIndex {
t.Helper()
searchIdx := initTestIndexFromDashesExtended(t, dashboards, &NoopDocumentExtender{})
return searchIdx.perOrgIndex[testOrgID]
}
func initTestOrgIndexFromDashesExtended(t *testing.T, dashboards []dashboard, extender DocumentExtender) *orgIndex {
t.Helper()
searchIdx := initTestIndexFromDashesExtended(t, dashboards, extender)
return searchIdx.perOrgIndex[testOrgID]
}
func initTestIndexFromDashes(t *testing.T, dashboards []dashboard) *searchIndex {
t.Helper()
return initTestIndexFromDashesExtended(t, dashboards, &NoopDocumentExtender{})
}
func initTestIndexFromDashesExtended(t *testing.T, dashboards []dashboard, extender DocumentExtender) *searchIndex {
t.Helper()
dashboardLoader := &testDashboardLoader{
dashboards: dashboards,
}
index := newSearchIndex(dashboardLoader, &store.MockEntityEventsService{}, extender, tracing.InitializeTracerForTest(), featuremgmt.WithFeatures(), setting.SearchSettings{})
require.NotNil(t, index)
numDashboards, err := index.buildOrgIndex(context.Background(), testOrgID)
require.NoError(t, err)
require.Equal(t, len(dashboardLoader.dashboards), numDashboards)
return index
}
func checkSearchResponse(t *testing.T, fileName string, index *orgIndex, filter ResourceFilter, query DashboardQuery) {
t.Helper()
checkSearchResponseExtended(t, fileName, index, filter, query, &NoopQueryExtender{})
}
func checkSearchResponseExtended(t *testing.T, fileName string, index *orgIndex, filter ResourceFilter, query DashboardQuery, extender QueryExtender) {
t.Helper()
resp := doSearchQuery(context.Background(), testLogger, index, filter, query, extender, "/pfix")
experimental.CheckGoldenJSONResponse(t, "testdata", fileName, resp, true)
}
func getFrameWithNames(resp *backend.DataResponse) *data.Frame {
if resp == nil || len(resp.Frames) == 0 {
return nil
}
frame := resp.Frames[0]
nameField, idx := frame.FieldByName(documentFieldName)
if nameField.Len() == 0 || idx == -1 {
return nil
}
scoreField, _ := frame.FieldByName("score")
return data.NewFrame("ordering frame", nameField, scoreField)
}
func checkSearchResponseOrdering(t *testing.T, fileName string, index *orgIndex, filter ResourceFilter, query DashboardQuery) {
t.Helper()
checkSearchResponseOrderingExtended(t, fileName, index, filter, query, &NoopQueryExtender{})
}
func checkSearchResponseOrderingExtended(t *testing.T, fileName string, index *orgIndex, filter ResourceFilter, query DashboardQuery, extender QueryExtender) {
t.Helper()
query.Explain = true
resp := doSearchQuery(context.Background(), testLogger, index, filter, query, extender, "/pfix")
experimental.CheckGoldenJSONFrame(t, "testdata", fileName, getFrameWithNames(resp), true)
}
var testDashboards = []dashboard{
{
id: 1,
uid: "1",
summary: &entity.EntitySummary{
Name: "test",
},
},
{
id: 2,
uid: "2",
summary: &entity.EntitySummary{
Name: "boom",
},
},
}
func TestDashboardIndex(t *testing.T) {
t.Run("basic-search", func(t *testing.T) {
index := initTestOrgIndexFromDashes(t, testDashboards)
checkSearchResponse(t, filepath.Base(t.Name()), index, testAllowAllFilter,
DashboardQuery{Query: "boom"},
)
})
t.Run("basic-filter", func(t *testing.T) {
index := initTestOrgIndexFromDashes(t, testDashboards)
checkSearchResponse(t, filepath.Base(t.Name()), index, testDisallowAllFilter,
DashboardQuery{Query: "boom"},
)
})
}
func TestDashboardIndexUpdates(t *testing.T) {
t.Run("dashboard-delete", func(t *testing.T) {
index := initTestIndexFromDashes(t, testDashboards)
orgIdx, ok := index.getOrgIndex(testOrgID)
require.True(t, ok)
err := index.removeDashboard(context.Background(), orgIdx, "2")
require.NoError(t, err)
checkSearchResponse(t, filepath.Base(t.Name()), orgIdx, testAllowAllFilter,
DashboardQuery{Query: "boom"},
)
})
t.Run("dashboard-create", func(t *testing.T) {
index := initTestIndexFromDashes(t, testDashboards)
orgIdx, ok := index.getOrgIndex(testOrgID)
require.True(t, ok)
err := index.updateDashboard(context.Background(), testOrgID, orgIdx, dashboard{
id: 3,
uid: "3",
summary: &entity.EntitySummary{
Name: "created",
},
})
require.NoError(t, err)
checkSearchResponse(t, filepath.Base(t.Name()), orgIdx, testAllowAllFilter,
DashboardQuery{Query: "created"},
)
})
t.Run("dashboard-update", func(t *testing.T) {
index := initTestIndexFromDashes(t, testDashboards)
orgIdx, ok := index.getOrgIndex(testOrgID)
require.True(t, ok)
err := index.updateDashboard(context.Background(), testOrgID, orgIdx, dashboard{
id: 2,
uid: "2",
summary: &entity.EntitySummary{
Name: "nginx",
},
})
require.NoError(t, err)
checkSearchResponse(t, filepath.Base(t.Name()), orgIdx, testAllowAllFilter,
DashboardQuery{Query: "nginx"},
)
})
}
var testSortDashboards = []dashboard{
{
id: 1,
uid: "1",
summary: &entity.EntitySummary{
Name: "a-test",
},
},
{
id: 2,
uid: "2",
summary: &entity.EntitySummary{
Name: "z-test",
},
},
}
type testExtender struct {
documentExtender DocumentExtender
queryExtender QueryExtender
}
func (t *testExtender) GetDocumentExtender() DocumentExtender {
return t.documentExtender
}
func (t *testExtender) GetQueryExtender() QueryExtender {
return t.queryExtender
}
type testDocumentExtender struct {
ExtendDashboardFunc ExtendDashboardFunc
}
func (t *testDocumentExtender) GetDashboardExtender(_ int64, _ ...string) ExtendDashboardFunc {
return t.ExtendDashboardFunc
}
type testQueryExtender struct {
getFramer func(frame *data.Frame) FramerFunc
}
func (t *testQueryExtender) GetFramer(frame *data.Frame) FramerFunc {
return t.getFramer(frame)
}
func TestDashboardIndexSort(t *testing.T) {
var i float64
extender := &testExtender{
documentExtender: &testDocumentExtender{
ExtendDashboardFunc: func(uid string, doc *bluge.Document) error {
doc.AddField(bluge.NewNumericField("test", i).StoreValue().Sortable())
i++
return nil
},
},
queryExtender: &testQueryExtender{
getFramer: func(frame *data.Frame) FramerFunc {
testNum := data.NewFieldFromFieldType(data.FieldTypeFloat64, 0)
testNum.Name = "test num"
frame.Fields = append(
frame.Fields,
testNum,
)
return func(field string, value []byte) {
if field == "test" {
if num, err := bluge.DecodeNumericFloat64(value); err == nil {
testNum.Append(num)
return
}
}
}
},
},
}
t.Run("sort-asc", func(t *testing.T) {
index := initTestOrgIndexFromDashesExtended(t, testSortDashboards, extender.GetDocumentExtender())
checkSearchResponseExtended(t, filepath.Base(t.Name()), index, testAllowAllFilter,
DashboardQuery{Query: "*", Sort: "test"}, extender.GetQueryExtender(),
)
})
t.Run("sort-desc", func(t *testing.T) {
index := initTestOrgIndexFromDashesExtended(t, testSortDashboards, extender.GetDocumentExtender())
checkSearchResponseExtended(t, filepath.Base(t.Name()), index, testAllowAllFilter,
DashboardQuery{Query: "*", Sort: "-test"}, extender.GetQueryExtender(),
)
})
}
var testPrefixDashboards = []dashboard{
{
id: 1,
uid: "1",
summary: &entity.EntitySummary{
Name: "Archer Data System",
},
},
{
id: 2,
uid: "2",
summary: &entity.EntitySummary{
Name: "Document Sync repo",
},
},
}
func TestDashboardIndex_PrefixSearch(t *testing.T) {
t.Run("prefix-search-beginning", func(t *testing.T) {
index := initTestOrgIndexFromDashes(t, testPrefixDashboards)
checkSearchResponse(t, filepath.Base(t.Name()), index, testAllowAllFilter,
DashboardQuery{Query: "Arch"},
)
})
t.Run("prefix-search-middle", func(t *testing.T) {
index := initTestOrgIndexFromDashes(t, testPrefixDashboards)
checkSearchResponse(t, filepath.Base(t.Name()), index, testAllowAllFilter,
DashboardQuery{Query: "Syn"},
)
})
t.Run("prefix-search-beginning-lower", func(t *testing.T) {
index := initTestOrgIndexFromDashes(t, testPrefixDashboards)
checkSearchResponse(t, filepath.Base(t.Name()), index, testAllowAllFilter,
DashboardQuery{Query: "arch"},
)
})
t.Run("prefix-search-middle-lower", func(t *testing.T) {
index := initTestOrgIndexFromDashes(t, testPrefixDashboards)
checkSearchResponse(t, filepath.Base(t.Name()), index, testAllowAllFilter,
DashboardQuery{Query: "syn"},
)
})
}
func TestDashboardIndex_MultipleTokensInRow(t *testing.T) {
t.Run("multiple-tokens-beginning", func(t *testing.T) {
index := initTestOrgIndexFromDashes(t, testPrefixDashboards)
checkSearchResponse(t, filepath.Base(t.Name()), index, testAllowAllFilter,
DashboardQuery{Query: "Archer da"},
)
})
t.Run("multiple-tokens-beginning-lower", func(t *testing.T) {
index := initTestOrgIndexFromDashes(t, testPrefixDashboards)
checkSearchResponse(t, filepath.Base(t.Name()), index, testAllowAllFilter,
DashboardQuery{Query: "da archer"},
)
})
// Not sure it is great this matches, but
t.Run("multiple-tokens-middle", func(t *testing.T) {
index := initTestOrgIndexFromDashes(t, testPrefixDashboards)
checkSearchResponse(t, filepath.Base(t.Name()), index, testAllowAllFilter,
DashboardQuery{Query: "ar Da"},
)
})
t.Run("multiple-tokens-middle-lower", func(t *testing.T) {
index := initTestOrgIndexFromDashes(t, testPrefixDashboards)
checkSearchResponse(t, filepath.Base(t.Name()), index, testAllowAllFilter,
DashboardQuery{Query: "doc sy"},
)
})
}
var longPrefixDashboards = []dashboard{
{
id: 1,
uid: "1",
summary: &entity.EntitySummary{
Name: "Eyjafjallajökull Eruption data",
},
},
}
func TestDashboardIndex_PrefixNgramExceeded(t *testing.T) {
t.Run("prefix-search-ngram-exceeded", func(t *testing.T) {
index := initTestOrgIndexFromDashes(t, longPrefixDashboards)
checkSearchResponse(t, filepath.Base(t.Name()), index, testAllowAllFilter,
DashboardQuery{Query: "Eyjafjallajöku"},
)
})
}
var scatteredTokensDashboards = []dashboard{
{
id: 1,
uid: "1",
summary: &entity.EntitySummary{
Name: "Three can keep a secret, if two of them are dead (Benjamin Franklin)",
},
},
{
id: 3,
uid: "2",
summary: &entity.EntitySummary{
Name: "A secret is powerful when it is empty (Umberto Eco)",
},
},
}
func TestDashboardIndex_MultipleTokensScattered(t *testing.T) {
t.Run("scattered-tokens-match", func(t *testing.T) {
index := initTestOrgIndexFromDashes(t, scatteredTokensDashboards)
checkSearchResponse(t, filepath.Base(t.Name()), index, testAllowAllFilter,
DashboardQuery{Query: "dead secret"},
)
})
t.Run("scattered-tokens-match-reversed", func(t *testing.T) {
index := initTestOrgIndexFromDashes(t, scatteredTokensDashboards)
checkSearchResponse(t, filepath.Base(t.Name()), index, testAllowAllFilter,
DashboardQuery{Query: "powerful secret"},
)
})
}
var dashboardsWithFolders = []dashboard{
{
id: 1,
uid: "1",
isFolder: true,
summary: &entity.EntitySummary{
Name: "My folder",
},
},
{
id: 2,
uid: "2",
folderID: 1,
folderUID: "1",
summary: &entity.EntitySummary{
Name: "Dashboard in folder 1",
Nested: []*entity.EntitySummary{
newNestedPanel(1, 2, "Panel 1"),
newNestedPanel(2, 2, "Panel 2"),
},
},
},
{
id: 3,
uid: "3",
folderID: 1,
folderUID: "1",
summary: &entity.EntitySummary{
Name: "Dashboard in folder 2",
Nested: []*entity.EntitySummary{
newNestedPanel(3, 3, "Panel 3"),
},
},
},
{
id: 4,
uid: "4",
summary: &entity.EntitySummary{
Name: "One more dash",
Nested: []*entity.EntitySummary{
newNestedPanel(4, 4, "Panel 4"),
},
},
},
}
func TestDashboardIndex_Folders(t *testing.T) {
t.Run("folders-indexed", func(t *testing.T) {
index := initTestOrgIndexFromDashes(t, dashboardsWithFolders)
checkSearchResponse(t, filepath.Base(t.Name()), index, testAllowAllFilter,
DashboardQuery{Query: "My folder", Kind: []string{string(entityKindFolder)}},
)
})
t.Run("folders-dashboard-has-folder", func(t *testing.T) {
index := initTestOrgIndexFromDashes(t, dashboardsWithFolders)
// TODO: golden file compare does not work here.
resp := doSearchQuery(context.Background(), testLogger, index, testAllowAllFilter,
DashboardQuery{Query: "Dashboard in folder", Kind: []string{string(entityKindDashboard)}},
&NoopQueryExtender{}, "")
custom, ok := resp.Frames[0].Meta.Custom.(*customMeta)
require.Equal(t, uint64(2), custom.Count)
require.True(t, ok, fmt.Sprintf("actual type: %T", resp.Frames[0].Meta.Custom))
require.Equal(t, "/dashboards/f/1/", custom.Locations["1"].URL)
})
t.Run("folders-dashboard-removed-on-folder-removed", func(t *testing.T) {
index := initTestIndexFromDashes(t, dashboardsWithFolders)
orgIdx, ok := index.getOrgIndex(testOrgID)
require.True(t, ok)
err := index.removeFolder(context.Background(), orgIdx, "1")
require.NoError(t, err)
// In response we expect one dashboard which does not belong to removed folder.
checkSearchResponse(t, filepath.Base(t.Name()), orgIdx, testAllowAllFilter,
DashboardQuery{Query: "dash", Kind: []string{string(entityKindDashboard)}},
)
})
t.Run("folders-panels-removed-on-folder-removed", func(t *testing.T) {
index := initTestIndexFromDashes(t, dashboardsWithFolders)
orgIdx, ok := index.getOrgIndex(testOrgID)
require.True(t, ok)
err := index.removeFolder(context.Background(), orgIdx, "1")
require.NoError(t, err)
resp := doSearchQuery(context.Background(), testLogger, orgIdx, testAllowAllFilter,
DashboardQuery{Query: "Panel", Kind: []string{string(entityKindPanel)}},
&NoopQueryExtender{}, "")
custom, ok := resp.Frames[0].Meta.Custom.(*customMeta)
require.True(t, ok)
require.Equal(t, uint64(1), custom.Count) // 1 panel which does not belong to dashboards in removed folder.
})
}
var dashboardsWithPanels = []dashboard{
{
id: 1,
uid: "1",
summary: &entity.EntitySummary{
Name: "My Dash",
Nested: []*entity.EntitySummary{
newNestedPanel(1, 1, "Panel 1"),
newNestedPanel(2, 1, "Panel 2"),
},
},
},
}
func newNestedPanel(id, dashId int64, name string) *entity.EntitySummary {
summary := &entity.EntitySummary{
Kind: "panel",
UID: fmt.Sprintf("%d#%d", dashId, id),
}
summary.Name = name
return summary
}
func TestDashboardIndex_Panels(t *testing.T) {
t.Run("panels-indexed", func(t *testing.T) {
index := initTestOrgIndexFromDashes(t, dashboardsWithPanels)
// TODO: golden file compare does not work here.
resp := doSearchQuery(
context.Background(), testLogger, index, testAllowAllFilter,
DashboardQuery{Query: "Panel", Kind: []string{string(entityKindPanel)}},
&NoopQueryExtender{}, "")
custom, ok := resp.Frames[0].Meta.Custom.(*customMeta)
require.True(t, ok, fmt.Sprintf("actual type: %T", resp.Frames[0].Meta.Custom))
require.Equal(t, uint64(2), custom.Count)
require.Equal(t, "/d/1/", custom.Locations["1"].URL)
})
t.Run("panels-panel-removed-on-dashboard-removed", func(t *testing.T) {
index := initTestIndexFromDashes(t, dashboardsWithPanels)
orgIdx, ok := index.getOrgIndex(testOrgID)
require.True(t, ok)
err := index.removeDashboard(context.Background(), orgIdx, "1")
require.NoError(t, err)
checkSearchResponse(t, filepath.Base(t.Name()), orgIdx, testAllowAllFilter,
DashboardQuery{Query: "Panel", Kind: []string{string(entityKindPanel)}},
)
})
}
var punctuationSplitNgramDashboards = []dashboard{
{
id: 1,
uid: "1",
summary: &entity.EntitySummary{
Name: "heat-torkel",
},
},
{
id: 2,
uid: "2",
summary: &entity.EntitySummary{
Name: "topology heatmap",
},
},
}
func TestDashboardIndex_PunctuationNgram(t *testing.T) {
t.Run("ngram-punctuation-split", func(t *testing.T) {
index := initTestOrgIndexFromDashes(t, punctuationSplitNgramDashboards)
checkSearchResponse(t, filepath.Base(t.Name()), index, testAllowAllFilter,
DashboardQuery{Query: "tork he"},
)
})
t.Run("ngram-simple", func(t *testing.T) {
index := initTestOrgIndexFromDashes(t, punctuationSplitNgramDashboards)
checkSearchResponse(t, filepath.Base(t.Name()), index, testAllowAllFilter,
DashboardQuery{Query: "hea"},
)
})
}
var camelCaseNgramDashboards = []dashboard{
{
id: 1,
uid: "1",
summary: &entity.EntitySummary{
Name: "heatTorkel",
},
},
}
func TestDashboardIndex_CamelCaseNgram(t *testing.T) {
t.Run("ngram-camel-case-split", func(t *testing.T) {
index := initTestOrgIndexFromDashes(t, camelCaseNgramDashboards)
checkSearchResponse(t, filepath.Base(t.Name()), index, testAllowAllFilter,
DashboardQuery{Query: "tork"},
)
})
}
func dashboardsWithTitles(names ...string) []dashboard {
out := make([]dashboard, 0)
for i, name := range names {
no := int64(i + 1)
out = append(out, dashboard{
id: no,
uid: fmt.Sprintf("%d", no),
summary: &entity.EntitySummary{
Name: name,
},
})
}
return out
}
func TestDashboardIndex_MultiTermPrefixMatch(t *testing.T) {
var tests = []struct {
dashboards []dashboard
query string
}{
{
dashboards: dashboardsWithTitles(
"Panel Tests - Bar Gauge 2",
"Prometheus 2.0",
"Prometheus 2.0 Stats",
"Prometheus 20.0",
"Prometheus Second Word",
"Prometheus Stats",
"dynamic (2)",
"prometheus histogram",
"prometheus histogram2",
"roci-simple-2",
"x not y",
),
query: "Prometheus 2.",
},
{
dashboards: dashboardsWithTitles(
"From AAA",
"Grafana Dev Overview & Home",
"Home automation",
"Prometheus 2.0",
"Prometheus 2.0 Stats",
"Prometheus 20.0",
"Prometheus Stats",
"Transforms - config from query",
"iot-testing",
"prom style with exemplars",
"prop history",
"simple frame",
"with-hide-from",
"xy broke",
),
query: "Prome",
},
{
dashboards: dashboardsWithTitles(
"Panel Tests - Bar Gauge 2",
"Prometheus 2.0",
"Prometheus 2.0 Stats",
"Prometheus 20.0",
"Prometheus Second Word",
"Prometheus Stats",
"dynamic (2)",
"prometheus histogram",
"prometheus histogram2",
"roci-simple-2",
"x not y",
),
query: "Prometheus stat",
},
{
dashboards: dashboardsWithTitles(
"Loki Tests - Bar Gauge 2",
"Loki 2.0",
"Loki 2.0 Stats",
"Loki 20.0",
"Loki Second Word",
"Loki Stats",
"dynamic (2)",
"Loki histogram",
"Loki histogram2",
"roci-simple-2",
"x not y",
),
query: "Loki 2.",
},
{
dashboards: dashboardsWithTitles(
"Loki Tests - Bar Gauge 2",
"Loki 2.0",
"Loki 2.0 Stats",
"Loki 20.0",
"Loki Second Word",
"Loki Stats",
"dynamic (2)",
"Loki histogram",
"Loki histogram2",
"roci-simple-2",
"x not y",
),
query: "Lok",
},
{
dashboards: dashboardsWithTitles(
"Loki Tests - Bar Gauge 2",
"Loki 2.0",
"Loki 2.0 Stats",
"Loki 20.0",
"Loki Second Word",
"Loki Stats",
"dynamic (2)",
"Loki histogram",
"Loki histogram2",
"roci-simple-2",
"x not y",
),
query: "Loki stats",
},
}
for i, tt := range tests {
t.Run(fmt.Sprintf("ordering-tests-%d-[%s]", i+1, tt.query), func(t *testing.T) {
index := initTestOrgIndexFromDashes(t, tt.dashboards)
checkSearchResponseOrdering(t, filepath.Base(t.Name()), index, testAllowAllFilter,
DashboardQuery{Query: tt.query},
)
})
}
}

View File

@@ -0,0 +1,47 @@
package searchV2
import (
"strings"
"github.com/blugelabs/bluge/analysis"
"github.com/blugelabs/bluge/analysis/token"
"github.com/blugelabs/bluge/analysis/tokenizer"
)
var punctuationReplacer *strings.Replacer
func init() {
var punctuation = "!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~"
args := make([]string, 0, len(punctuation)*2)
for _, r := range punctuation {
args = append(args, string(r), " ")
}
punctuationReplacer = strings.NewReplacer(args...)
}
type punctuationCharFilter struct{}
func (t *punctuationCharFilter) Filter(input []byte) []byte {
return []byte(punctuationReplacer.Replace(string(input)))
}
const ngramEdgeFilterMaxLength = 7
var ngramIndexAnalyzer = &analysis.Analyzer{
CharFilters: []analysis.CharFilter{&punctuationCharFilter{}},
Tokenizer: tokenizer.NewWhitespaceTokenizer(),
TokenFilters: []analysis.TokenFilter{
token.NewCamelCaseFilter(),
token.NewLowerCaseFilter(),
token.NewEdgeNgramFilter(token.FRONT, 1, ngramEdgeFilterMaxLength),
},
}
var ngramQueryAnalyzer = &analysis.Analyzer{
CharFilters: []analysis.CharFilter{&punctuationCharFilter{}},
Tokenizer: tokenizer.NewWhitespaceTokenizer(),
TokenFilters: []analysis.TokenFilter{
token.NewCamelCaseFilter(),
token.NewLowerCaseFilter(),
},
}

View File

@@ -0,0 +1,61 @@
package searchV2
import (
"reflect"
"testing"
"github.com/stretchr/testify/require"
)
func Test_punctuationCharFilter_Filter(t1 *testing.T) {
type args struct {
input []byte
}
tests := []struct {
name string
args args
want []byte
}{
{
name: "1",
args: args{
input: []byte("x-Rays"),
},
want: []byte("x Rays"),
},
{
name: "2",
args: args{
input: []byte("x.Rays"),
},
want: []byte("x Rays"),
},
{
name: "3",
args: args{
input: []byte("[x,Rays]"),
},
want: []byte(" x Rays "),
},
}
for _, tt := range tests {
t1.Run(tt.name, func(t1 *testing.T) {
t := &punctuationCharFilter{}
if got := t.Filter(tt.args.input); !reflect.DeepEqual(got, tt.want) {
t1.Errorf("Filter() = %v, want %v", string(got), string(tt.want))
}
})
}
}
func TestNgramIndexAnalyzer(t *testing.T) {
stream := ngramIndexAnalyzer.Analyze([]byte("x-rays.and.xRays, and НемногоКириллицы"))
expectedTerms := []string{"x", "r", "ra", "ray", "rays", "a", "an", "and", "x", "r", "ra", "ray", "rays", "a", "an", "and", "н", "не", "нем", "немн", "немно", "немног", "немного", "к", "ки", "кир", "кири", "кирил", "кирилл", "кирилли"}
actualTerms := make([]string, 0, len(stream))
for _, t := range stream {
actualTerms = append(actualTerms, string(t.Term))
}
require.Equal(t, expectedTerms, actualTerms)
}

View File

@@ -0,0 +1,90 @@
{
"name": "Panel Tests - Graph - Gradient Area Fills",
"labels": {
"gdev": "",
"graph": "",
"panel-tests": ""
},
"URL": "/d/graph-gradient-area-fills.json/panel-tests-graph-gradient-area-fills",
"nested": [
{
"uid": "graph-gradient-area-fills.json#2",
"kind": "panel",
"name": "Req/s",
"URL": "/d/graph-gradient-area-fills.json/panel-tests-graph-gradient-area-fills?viewPanel=2",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
}
]
},
{
"uid": "graph-gradient-area-fills.json#11",
"kind": "panel",
"name": "Req/s",
"URL": "/d/graph-gradient-area-fills.json/panel-tests-graph-gradient-area-fills?viewPanel=11",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
}
]
},
{
"uid": "graph-gradient-area-fills.json#7",
"kind": "panel",
"name": "Memory",
"URL": "/d/graph-gradient-area-fills.json/panel-tests-graph-gradient-area-fills?viewPanel=7",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
}
]
},
{
"uid": "graph-gradient-area-fills.json#10",
"kind": "panel",
"name": "Req/s",
"URL": "/d/graph-gradient-area-fills.json/panel-tests-graph-gradient-area-fills?viewPanel=10",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
}
]
}
],
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
}
]
}

View File

@@ -0,0 +1,178 @@
{
"name": "Panel Tests - shared tooltips",
"labels": {
"gdev": "",
"graph-ng": "",
"panel-tests": ""
},
"URL": "/d/graph-shared-tooltips.json/panel-tests-shared-tooltips",
"nested": [
{
"uid": "graph-shared-tooltips.json#4",
"kind": "panel",
"name": "two units",
"URL": "/d/graph-shared-tooltips.json/panel-tests-shared-tooltips?viewPanel=4",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "timeseries"
}
]
},
{
"uid": "graph-shared-tooltips.json#13",
"kind": "panel",
"name": "Speed vs Temperature (XY)",
"URL": "/d/graph-shared-tooltips.json/panel-tests-shared-tooltips?viewPanel=13",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "xychart"
},
{
"kind": "transform",
"type": "organize"
},
{
"kind": "transform",
"type": "seriesToColumns"
}
]
},
{
"uid": "graph-shared-tooltips.json#2",
"kind": "panel",
"name": "Cursor info",
"URL": "/d/graph-shared-tooltips.json/panel-tests-shared-tooltips?viewPanel=2",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "debug"
}
]
},
{
"uid": "graph-shared-tooltips.json#5",
"kind": "panel",
"name": "Only temperature",
"URL": "/d/graph-shared-tooltips.json/panel-tests-shared-tooltips?viewPanel=5",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "timeseries"
}
]
},
{
"uid": "graph-shared-tooltips.json#9",
"kind": "panel",
"name": "Only Speed",
"URL": "/d/graph-shared-tooltips.json/panel-tests-shared-tooltips?viewPanel=9",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "timeseries"
}
]
},
{
"uid": "graph-shared-tooltips.json#11",
"kind": "panel",
"name": "Panel Title",
"URL": "/d/graph-shared-tooltips.json/panel-tests-shared-tooltips?viewPanel=11",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "timeseries"
}
]
},
{
"uid": "graph-shared-tooltips.json#8",
"kind": "panel",
"name": "flot panel (temperature)",
"URL": "/d/graph-shared-tooltips.json/panel-tests-shared-tooltips?viewPanel=8",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
}
]
},
{
"uid": "graph-shared-tooltips.json#10",
"kind": "panel",
"name": "flot panel (no units)",
"URL": "/d/graph-shared-tooltips.json/panel-tests-shared-tooltips?viewPanel=10",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
}
]
}
],
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "debug"
},
{
"kind": "panel",
"type": "graph"
},
{
"kind": "panel",
"type": "timeseries"
},
{
"kind": "panel",
"type": "xychart"
}
]
}

View File

@@ -0,0 +1,107 @@
{
"name": "Panel Tests - Graph Time Regions",
"labels": {
"gdev": "",
"graph": "",
"panel-tests": ""
},
"URL": "/d/graph-time-regions.json/panel-tests-graph-time-regions",
"nested": [
{
"uid": "graph-time-regions.json#2",
"kind": "panel",
"name": "Business Hours",
"URL": "/d/graph-time-regions.json/panel-tests-graph-time-regions?viewPanel=2",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
}
]
},
{
"uid": "graph-time-regions.json#4",
"kind": "panel",
"name": "Sunday's 20-23",
"URL": "/d/graph-time-regions.json/panel-tests-graph-time-regions?viewPanel=4",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
}
]
},
{
"uid": "graph-time-regions.json#3",
"kind": "panel",
"name": "Each day of week",
"URL": "/d/graph-time-regions.json/panel-tests-graph-time-regions?viewPanel=3",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
}
]
},
{
"uid": "graph-time-regions.json#5",
"kind": "panel",
"name": "05:00",
"URL": "/d/graph-time-regions.json/panel-tests-graph-time-regions?viewPanel=5",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
}
]
},
{
"uid": "graph-time-regions.json#7",
"kind": "panel",
"name": "From 22:00 to 00:30 (crossing midnight)",
"URL": "/d/graph-time-regions.json/panel-tests-graph-time-regions?viewPanel=7",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
}
]
}
],
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
}
]
}

View File

@@ -0,0 +1,377 @@
{
"name": "Panel Tests - Graph",
"labels": {
"gdev": "",
"graph": "",
"panel-tests": ""
},
"URL": "/d/graph_tests.json/panel-tests-graph",
"nested": [
{
"uid": "graph_tests.json#1",
"kind": "panel",
"name": "No Data Points Warning",
"URL": "/d/graph_tests.json/panel-tests-graph?viewPanel=1",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
}
]
},
{
"uid": "graph_tests.json#2",
"kind": "panel",
"name": "Datapoints Outside Range Warning",
"URL": "/d/graph_tests.json/panel-tests-graph?viewPanel=2",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
}
]
},
{
"uid": "graph_tests.json#3",
"kind": "panel",
"name": "Random walk series",
"URL": "/d/graph_tests.json/panel-tests-graph?viewPanel=3",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
}
]
},
{
"uid": "graph_tests.json#4",
"kind": "panel",
"name": "Millisecond res x-axis and tooltip",
"URL": "/d/graph_tests.json/panel-tests-graph?viewPanel=4",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
}
]
},
{
"uid": "graph_tests.json#6",
"kind": "panel",
"URL": "/d/graph_tests.json/panel-tests-graph?viewPanel=6",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "text"
}
]
},
{
"uid": "graph_tests.json#5",
"kind": "panel",
"name": "2 yaxis and axis labels",
"URL": "/d/graph_tests.json/panel-tests-graph?viewPanel=5",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
}
]
},
{
"uid": "graph_tests.json#7",
"kind": "panel",
"URL": "/d/graph_tests.json/panel-tests-graph?viewPanel=7",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "text"
}
]
},
{
"uid": "graph_tests.json#8",
"kind": "panel",
"name": "null value connected",
"URL": "/d/graph_tests.json/panel-tests-graph?viewPanel=8",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
}
]
},
{
"uid": "graph_tests.json#10",
"kind": "panel",
"name": "null value null as zero",
"URL": "/d/graph_tests.json/panel-tests-graph?viewPanel=10",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
}
]
},
{
"uid": "graph_tests.json#13",
"kind": "panel",
"URL": "/d/graph_tests.json/panel-tests-graph?viewPanel=13",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "text"
}
]
},
{
"uid": "graph_tests.json#9",
"kind": "panel",
"name": "Stacking value ontop of nulls",
"URL": "/d/graph_tests.json/panel-tests-graph?viewPanel=9",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
}
]
},
{
"uid": "graph_tests.json#14",
"kind": "panel",
"URL": "/d/graph_tests.json/panel-tests-graph?viewPanel=14",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "text"
}
]
},
{
"uid": "graph_tests.json#12",
"kind": "panel",
"name": "Stacking all series null segment",
"URL": "/d/graph_tests.json/panel-tests-graph?viewPanel=12",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
}
]
},
{
"uid": "graph_tests.json#15",
"kind": "panel",
"URL": "/d/graph_tests.json/panel-tests-graph?viewPanel=15",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "text"
}
]
},
{
"uid": "graph_tests.json#21",
"kind": "panel",
"name": "Null between points",
"URL": "/d/graph_tests.json/panel-tests-graph?viewPanel=21",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
}
]
},
{
"uid": "graph_tests.json#22",
"kind": "panel",
"URL": "/d/graph_tests.json/panel-tests-graph?viewPanel=22",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "text"
}
]
},
{
"uid": "graph_tests.json#20",
"kind": "panel",
"name": "Legend Table Single Series Should Take Minimum Height",
"URL": "/d/graph_tests.json/panel-tests-graph?viewPanel=20",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
}
]
},
{
"uid": "graph_tests.json#16",
"kind": "panel",
"name": "Legend Table No Scroll Visible",
"URL": "/d/graph_tests.json/panel-tests-graph?viewPanel=16",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
}
]
},
{
"uid": "graph_tests.json#17",
"kind": "panel",
"name": "Legend Table Should Scroll",
"URL": "/d/graph_tests.json/panel-tests-graph?viewPanel=17",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
}
]
},
{
"uid": "graph_tests.json#18",
"kind": "panel",
"name": "Legend Table No Scroll Visible",
"URL": "/d/graph_tests.json/panel-tests-graph?viewPanel=18",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
}
]
},
{
"uid": "graph_tests.json#19",
"kind": "panel",
"name": "Legend Table No Scroll Visible",
"URL": "/d/graph_tests.json/panel-tests-graph?viewPanel=19",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
}
]
}
],
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
},
{
"kind": "panel",
"type": "text"
}
]
}

View File

@@ -0,0 +1,174 @@
{
"name": "Panel Tests - Graph - Y axis ticks",
"labels": {
"gdev": "",
"panel-tests": ""
},
"URL": "/d/graph_y_axis.json/panel-tests-graph-y-axis-ticks",
"nested": [
{
"uid": "graph_y_axis.json#7",
"kind": "panel",
"name": "Data from 0 - 10K (unit short)",
"URL": "/d/graph_y_axis.json/panel-tests-graph-y-axis-ticks?viewPanel=7",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
}
]
},
{
"uid": "graph_y_axis.json#5",
"kind": "panel",
"name": "Data from 0 - 10K (unit bytes metric)",
"URL": "/d/graph_y_axis.json/panel-tests-graph-y-axis-ticks?viewPanel=5",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
}
]
},
{
"uid": "graph_y_axis.json#4",
"kind": "panel",
"name": "Data from 0 - 10K (unit bytes IEC)",
"URL": "/d/graph_y_axis.json/panel-tests-graph-y-axis-ticks?viewPanel=4",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
}
]
},
{
"uid": "graph_y_axis.json#2",
"kind": "panel",
"name": "Data from 0 - 10K (unit short)",
"URL": "/d/graph_y_axis.json/panel-tests-graph-y-axis-ticks?viewPanel=2",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
}
]
},
{
"uid": "graph_y_axis.json#3",
"kind": "panel",
"name": "Data from 0.0002 - 0.001 (unit short)",
"URL": "/d/graph_y_axis.json/panel-tests-graph-y-axis-ticks?viewPanel=3",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
}
]
},
{
"uid": "graph_y_axis.json#6",
"kind": "panel",
"name": "Data from 12000 - 30000 (unit ms)",
"URL": "/d/graph_y_axis.json/panel-tests-graph-y-axis-ticks?viewPanel=6",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
}
]
},
{
"uid": "graph_y_axis.json#9",
"kind": "panel",
"name": "Data from 0 - 1B (unit short)",
"URL": "/d/graph_y_axis.json/panel-tests-graph-y-axis-ticks?viewPanel=9",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
}
]
},
{
"uid": "graph_y_axis.json#10",
"kind": "panel",
"name": "Data from 0 - 1B (unit bytes)",
"URL": "/d/graph_y_axis.json/panel-tests-graph-y-axis-ticks?viewPanel=10",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
}
]
},
{
"uid": "graph_y_axis.json#8",
"kind": "panel",
"name": "Data from 12000 - 30000 (unit ms)",
"URL": "/d/graph_y_axis.json/panel-tests-graph-y-axis-ticks?viewPanel=8",
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
}
]
}
],
"references": [
{
"kind": "ds",
"type": "default.type",
"UID": "default.uid"
},
{
"kind": "panel",
"type": "graph"
}
]
}

View File

@@ -0,0 +1,136 @@
// Code generated by mockery v2.53.4. DO NOT EDIT.
package searchV2
import (
context "context"
backend "github.com/grafana/grafana-plugin-sdk-go/backend"
mock "github.com/stretchr/testify/mock"
user "github.com/grafana/grafana/pkg/services/user"
)
// MockSearchService is an autogenerated mock type for the SearchService type
type MockSearchService struct {
mock.Mock
}
// DoDashboardQuery provides a mock function with given fields: ctx, _a1, orgId, query
func (_m *MockSearchService) DoDashboardQuery(ctx context.Context, _a1 *backend.User, orgId int64, query DashboardQuery) *backend.DataResponse {
ret := _m.Called(ctx, _a1, orgId, query)
if len(ret) == 0 {
panic("no return value specified for DoDashboardQuery")
}
var r0 *backend.DataResponse
if rf, ok := ret.Get(0).(func(context.Context, *backend.User, int64, DashboardQuery) *backend.DataResponse); ok {
r0 = rf(ctx, _a1, orgId, query)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(*backend.DataResponse)
}
}
return r0
}
// IsDisabled provides a mock function with no fields
func (_m *MockSearchService) IsDisabled() bool {
ret := _m.Called()
if len(ret) == 0 {
panic("no return value specified for IsDisabled")
}
var r0 bool
if rf, ok := ret.Get(0).(func() bool); ok {
r0 = rf()
} else {
r0 = ret.Get(0).(bool)
}
return r0
}
// IsReady provides a mock function with given fields: ctx, orgId
func (_m *MockSearchService) IsReady(ctx context.Context, orgId int64) IsSearchReadyResponse {
ret := _m.Called(ctx, orgId)
if len(ret) == 0 {
panic("no return value specified for IsReady")
}
var r0 IsSearchReadyResponse
if rf, ok := ret.Get(0).(func(context.Context, int64) IsSearchReadyResponse); ok {
r0 = rf(ctx, orgId)
} else {
r0 = ret.Get(0).(IsSearchReadyResponse)
}
return r0
}
// RegisterDashboardIndexExtender provides a mock function with given fields: ext
func (_m *MockSearchService) RegisterDashboardIndexExtender(ext DashboardIndexExtender) {
_m.Called(ext)
}
// Run provides a mock function with given fields: ctx
func (_m *MockSearchService) Run(ctx context.Context) error {
ret := _m.Called(ctx)
if len(ret) == 0 {
panic("no return value specified for Run")
}
var r0 error
if rf, ok := ret.Get(0).(func(context.Context) error); ok {
r0 = rf(ctx)
} else {
r0 = ret.Error(0)
}
return r0
}
// TriggerReIndex provides a mock function with no fields
func (_m *MockSearchService) TriggerReIndex() {
_m.Called()
}
// doDashboardQuery provides a mock function with given fields: ctx, _a1, orgId, query
func (_m *MockSearchService) doDashboardQuery(ctx context.Context, _a1 *user.SignedInUser, orgId int64, query DashboardQuery) *backend.DataResponse {
ret := _m.Called(ctx, _a1, orgId, query)
if len(ret) == 0 {
panic("no return value specified for doDashboardQuery")
}
var r0 *backend.DataResponse
if rf, ok := ret.Get(0).(func(context.Context, *user.SignedInUser, int64, DashboardQuery) *backend.DataResponse); ok {
r0 = rf(ctx, _a1, orgId, query)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(*backend.DataResponse)
}
}
return r0
}
// NewMockSearchService creates a new instance of MockSearchService. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
// The first argument is typically a *testing.T value.
func NewMockSearchService(t interface {
mock.TestingT
Cleanup(func())
}) *MockSearchService {
mock := &MockSearchService{}
mock.Mock.Test(t)
t.Cleanup(func() { mock.AssertExpectations(t) })
return mock
}

View File

@@ -0,0 +1,289 @@
package searchV2
import (
"context"
"errors"
"fmt"
"time"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
"go.opentelemetry.io/otel"
"github.com/grafana/grafana/pkg/infra/db"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/registry"
"github.com/grafana/grafana/pkg/services/accesscontrol"
"github.com/grafana/grafana/pkg/services/featuremgmt"
"github.com/grafana/grafana/pkg/services/folder"
"github.com/grafana/grafana/pkg/services/org"
"github.com/grafana/grafana/pkg/services/store"
"github.com/grafana/grafana/pkg/services/user"
"github.com/grafana/grafana/pkg/setting"
)
var (
namespace = "grafana"
subsystem = "search"
dashboardSearchNotServedRequestsCounter = promauto.NewCounterVec(
prometheus.CounterOpts{
Namespace: namespace,
Subsystem: subsystem,
Name: "dashboard_search_requests_not_served_total",
Help: "A counter for dashboard search requests that could not be served due to an ongoing search engine indexing",
},
[]string{"reason"},
)
dashboardSearchFailureRequestsCounter = promauto.NewCounterVec(
prometheus.CounterOpts{
Namespace: namespace,
Subsystem: subsystem,
Name: "dashboard_search_failures_total",
Help: "A counter for failed dashboard search requests",
},
[]string{"reason"},
)
dashboardSearchSuccessRequestsDuration = promauto.NewHistogram(
prometheus.HistogramOpts{
Name: "dashboard_search_successes_duration_seconds",
Buckets: []float64{.005, .01, .025, .05, .1, .25, .5, 1, 2.5, 5, 10, 25, 50, 100},
Namespace: namespace,
Subsystem: subsystem,
})
dashboardSearchFailureRequestsDuration = promauto.NewHistogram(
prometheus.HistogramOpts{
Name: "dashboard_search_failures_duration_seconds",
Buckets: []float64{.005, .01, .025, .05, .1, .25, .5, 1, 2.5, 5, 10, 25, 50, 100},
Namespace: namespace,
Subsystem: subsystem,
})
tracer = otel.Tracer("github.com/grafana/grafana/pkg/services/searchv2")
)
type StandardSearchService struct {
registry.BackgroundService
cfg *setting.Cfg
sql db.DB
auth FutureAuthService // eventually injected from elsewhere
ac accesscontrol.Service
orgService org.Service
userService user.Service
logger log.Logger
dashboardIndex *searchIndex
extender DashboardIndexExtender
reIndexCh chan struct{}
features featuremgmt.FeatureToggles
}
func (s *StandardSearchService) IsReady(ctx context.Context, orgId int64) IsSearchReadyResponse {
return s.dashboardIndex.isInitialized(ctx, orgId)
}
func ProvideService(cfg *setting.Cfg, sql db.DB, entityEventStore store.EntityEventsService,
ac accesscontrol.Service, tracer tracing.Tracer, features featuremgmt.FeatureToggles, orgService org.Service,
userService user.Service, folderService folder.Service) SearchService {
extender := &NoopExtender{}
logger := log.New("searchV2")
s := &StandardSearchService{
cfg: cfg,
sql: sql,
ac: ac,
auth: &simpleAuthService{
sql: sql,
ac: ac,
folderService: folderService,
logger: logger,
},
dashboardIndex: newSearchIndex(
newSQLDashboardLoader(sql, tracer, cfg.Search),
entityEventStore,
extender.GetDocumentExtender(),
tracer,
features,
cfg.Search,
),
logger: logger,
extender: extender,
reIndexCh: make(chan struct{}, 1),
orgService: orgService,
userService: userService,
features: features,
}
return s
}
func (s *StandardSearchService) IsDisabled() bool {
//nolint:staticcheck // not yet migrated to OpenFeature
return !s.features.IsEnabledGlobally(featuremgmt.FlagPanelTitleSearch)
}
func (s *StandardSearchService) Run(ctx context.Context) error {
ctx, span := tracer.Start(ctx, "searchv2.Run")
defer span.End()
orgQuery := &org.SearchOrgsQuery{}
result, err := s.orgService.Search(ctx, orgQuery)
if err != nil {
return fmt.Errorf("can't get org list: %w", err)
}
orgIDs := make([]int64, 0, len(result))
for _, org := range result {
orgIDs = append(orgIDs, org.ID)
}
return s.dashboardIndex.run(ctx, orgIDs, s.reIndexCh)
}
func (s *StandardSearchService) TriggerReIndex() {
select {
case s.reIndexCh <- struct{}{}:
default:
// channel is full => re-index will happen soon anyway.
}
}
func (s *StandardSearchService) RegisterDashboardIndexExtender(ext DashboardIndexExtender) {
s.extender = ext
s.dashboardIndex.extender = ext.GetDocumentExtender()
}
func (s *StandardSearchService) getUser(ctx context.Context, backendUser *backend.User, orgId int64) (*user.SignedInUser, error) {
ctx, span := tracer.Start(ctx, "searchv2.getUser")
defer span.End()
// TODO: get user & user's permissions from the request context
var usr *user.SignedInUser
if s.cfg.Anonymous.Enabled && backendUser.Email == "" && backendUser.Login == "" {
getOrg := org.GetOrgByNameQuery{Name: s.cfg.Anonymous.OrgName}
orga, err := s.orgService.GetByName(ctx, &getOrg)
if err != nil {
s.logger.Error("Anonymous access organization error.", "org_name", s.cfg.Anonymous.OrgName, "error", err)
return nil, err
}
usr = &user.SignedInUser{
OrgID: orga.ID,
OrgName: orga.Name,
OrgRole: org.RoleType(s.cfg.Anonymous.OrgRole),
IsAnonymous: true,
}
} else {
getSignedInUserQuery := &user.GetSignedInUserQuery{
Login: backendUser.Login,
Email: backendUser.Email,
OrgID: orgId,
}
var err error
usr, err = s.userService.GetSignedInUser(ctx, getSignedInUserQuery)
if err != nil {
s.logger.Error("Error while retrieving user", "error", err, "email", backendUser.Email, "login", getSignedInUserQuery.Login)
return nil, errors.New("auth error")
}
if usr == nil {
s.logger.Error("No user found", "email", backendUser.Email)
return nil, errors.New("auth error")
}
}
if usr.Permissions == nil {
usr.Permissions = make(map[int64]map[string][]string)
}
if _, ok := usr.Permissions[orgId]; ok {
// permissions as part of the `s.sql.GetSignedInUser` query - return early
return usr, nil
}
// TODO: ensure this is cached
permissions, err := s.ac.GetUserPermissions(ctx, usr,
accesscontrol.Options{ReloadCache: false})
if err != nil {
s.logger.Error("Failed to retrieve user permissions", "error", err, "email", backendUser.Email)
return nil, errors.New("auth error")
}
usr.Permissions[orgId] = accesscontrol.GroupScopesByActionContext(ctx, permissions)
return usr, nil
}
func (s *StandardSearchService) DoDashboardQuery(ctx context.Context, user *backend.User, orgID int64, q DashboardQuery) *backend.DataResponse {
ctx, span := tracer.Start(ctx, "searchv2.DoDashboardQuery")
defer span.End()
start := time.Now()
signedInUser, err := s.getUser(ctx, user, orgID)
if err != nil {
dashboardSearchFailureRequestsCounter.With(prometheus.Labels{
"reason": "get_user_error",
}).Inc()
duration := time.Since(start).Seconds()
dashboardSearchFailureRequestsDuration.Observe(duration)
return &backend.DataResponse{Error: err}
}
query := s.doDashboardQuery(ctx, signedInUser, orgID, q)
duration := time.Since(start).Seconds()
if query.Error != nil {
dashboardSearchFailureRequestsDuration.Observe(duration)
} else {
dashboardSearchSuccessRequestsDuration.Observe(duration)
}
return query
}
func (s *StandardSearchService) doDashboardQuery(ctx context.Context, signedInUser *user.SignedInUser, orgID int64, q DashboardQuery) *backend.DataResponse {
ctx, span := tracer.Start(ctx, "searchv2.doDashboardQuery")
defer span.End()
rsp := &backend.DataResponse{}
filter, err := s.auth.GetDashboardReadFilter(ctx, orgID, signedInUser)
if err != nil {
dashboardSearchFailureRequestsCounter.With(prometheus.Labels{
"reason": "get_dashboard_filter_error",
}).Inc()
rsp.Error = err
return rsp
}
index, err := s.dashboardIndex.getOrCreateOrgIndex(ctx, orgID)
if err != nil {
dashboardSearchFailureRequestsCounter.With(prometheus.Labels{
"reason": "get_index_error",
}).Inc()
rsp.Error = err
return rsp
}
err = s.dashboardIndex.sync(ctx)
if err != nil {
dashboardSearchFailureRequestsCounter.With(prometheus.Labels{
"reason": "dashboard_index_sync_error",
}).Inc()
rsp.Error = err
return rsp
}
response := doSearchQuery(ctx, s.logger, index, filter, q, s.extender.GetQueryExtender(q), s.cfg.AppSubURL)
if q.WithAllowedActions {
if err := s.addAllowedActionsField(ctx, orgID, signedInUser, response); err != nil {
s.logger.Error("Error when adding the allowedActions field", "err", err)
}
}
if response.Error != nil {
dashboardSearchFailureRequestsCounter.With(prometheus.Labels{
"reason": "search_query_error",
}).Inc()
}
return response
}

View File

@@ -0,0 +1,214 @@
package searchV2
import (
"context"
"fmt"
"testing"
"time"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/infra/db"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/services/accesscontrol/actest"
"github.com/grafana/grafana/pkg/services/dashboards"
"github.com/grafana/grafana/pkg/services/featuremgmt"
"github.com/grafana/grafana/pkg/services/folder/foldertest"
"github.com/grafana/grafana/pkg/services/org"
"github.com/grafana/grafana/pkg/services/org/orgtest"
"github.com/grafana/grafana/pkg/services/store"
"github.com/grafana/grafana/pkg/services/user"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/tests/testsuite"
)
func TestMain(m *testing.M) {
testsuite.Run(m)
}
// setupBenchEnv will set up a database with folderCount folders and dashboardsPerFolder dashboards per folder
// It will also set up and run the search service
// and create a signed in user object with explicit permissions on each dashboard and folder.
func setupBenchEnv(b *testing.B, folderCount, dashboardsPerFolder int) (*StandardSearchService, *user.SignedInUser, error) {
sqlStore := db.InitTestDB(b)
err := populateDB(folderCount, dashboardsPerFolder, sqlStore)
require.NoError(b, err, "error when populating the database")
// load all dashboards and folders
dbLoadingBatchSize := (dashboardsPerFolder + 1) * folderCount
cfg := &setting.Cfg{Search: setting.SearchSettings{DashboardLoadingBatchSize: dbLoadingBatchSize}}
features := featuremgmt.WithFeatures()
orgSvc := &orgtest.FakeOrgService{
ExpectedOrgs: []*org.OrgDTO{{ID: 1}},
}
searchService, ok := ProvideService(cfg, sqlStore, store.NewDummyEntityEventsService(), actest.FakeService{},
tracing.InitializeTracerForTest(), features, orgSvc, nil, foldertest.NewFakeService()).(*StandardSearchService)
require.True(b, ok)
err = runSearchService(searchService)
require.NoError(b, err, "error when running search service")
user := getSignedInUser(folderCount, dashboardsPerFolder)
return searchService, user, nil
}
// Returns a signed in user object with permissions on all dashboards and folders
func getSignedInUser(folderCount, dashboardsPerFolder int) *user.SignedInUser {
folderScopes := make([]string, folderCount)
for i := 1; i <= folderCount; i++ {
folderScopes[i-1] = dashboards.ScopeFoldersProvider.GetResourceScopeUID(fmt.Sprintf("folder%d", i))
}
dashScopes := make([]string, folderCount*dashboardsPerFolder)
for i := folderCount + 1; i <= (folderCount * (dashboardsPerFolder + 1)); i++ {
dashScopes[i-(folderCount+1)] = dashboards.ScopeDashboardsProvider.GetResourceScopeUID(fmt.Sprintf("dashboard%d", i))
}
user := &user.SignedInUser{
UserID: 1,
OrgID: 1,
Permissions: map[int64]map[string][]string{
1: {
dashboards.ActionDashboardsRead: dashScopes,
dashboards.ActionFoldersRead: folderScopes,
},
},
}
return user
}
// Runs initial indexing of search service
func runSearchService(searchService *StandardSearchService) error {
if err := searchService.dashboardIndex.buildInitialIndexes(context.Background(), []int64{int64(1)}); err != nil {
return err
}
searchService.dashboardIndex.initialIndexingComplete = true
// Required for sync that is called during dashboard search
go func() {
for {
doneCh := <-searchService.dashboardIndex.syncCh
close(doneCh)
}
}()
return nil
}
// Populates database with dashboards and folders
func populateDB(folderCount, dashboardsPerFolder int, sqlStore db.DB) error {
// Insert folders
offset := 1
if errInsert := actest.ConcurrentBatch(actest.Concurrency, folderCount, actest.BatchSize, func(start, end int) error {
n := end - start
folders := make([]dashboards.Dashboard, 0, n)
now := time.Now()
for u := start; u < end; u++ {
folderID := int64(u + offset)
folders = append(folders, dashboards.Dashboard{
ID: folderID,
UID: fmt.Sprintf("folder%v", folderID),
Title: fmt.Sprintf("folder%v", folderID),
IsFolder: true,
OrgID: 1,
Created: now,
Updated: now,
})
}
err := sqlStore.WithDbSession(context.Background(), func(sess *db.Session) error {
if _, err := sess.Insert(folders); err != nil {
return err
}
return nil
})
return err
}); errInsert != nil {
return errInsert
}
// Insert dashboards
offset += folderCount
if errInsert := actest.ConcurrentBatch(actest.Concurrency, dashboardsPerFolder*folderCount, actest.BatchSize, func(start, end int) error {
n := end - start
dbs := make([]dashboards.Dashboard, 0, n)
now := time.Now()
for u := start; u < end; u++ {
dashID := int64(u + offset)
folderUID := fmt.Sprintf("folder%v", int64((u+offset)%folderCount+1))
dbs = append(dbs, dashboards.Dashboard{
ID: dashID,
UID: fmt.Sprintf("dashboard%v", dashID),
Title: fmt.Sprintf("dashboard%v", dashID),
IsFolder: false,
FolderUID: folderUID,
OrgID: 1,
Created: now,
Updated: now,
})
}
err := sqlStore.WithDbSession(context.Background(), func(sess *db.Session) error {
if _, err := sess.Insert(dbs); err != nil {
return err
}
return nil
})
return err
}); errInsert != nil {
return errInsert
}
return nil
}
func benchSearchV2(b *testing.B, folderCount, dashboardsPerFolder int) {
searchService, testUser, err := setupBenchEnv(b, folderCount, dashboardsPerFolder)
require.NoError(b, err)
b.ResetTimer()
expectedResultCount := (dashboardsPerFolder + 1) * folderCount
for n := 0; n < b.N; n++ {
result := searchService.doDashboardQuery(context.Background(), testUser, 1, DashboardQuery{Limit: expectedResultCount})
require.NoError(b, result.Error)
require.NotZero(b, len(result.Frames))
for _, field := range result.Frames[0].Fields {
if field.Name == "uid" {
require.Equal(b, expectedResultCount, field.Len())
break
}
}
}
}
// Test with some dashboards and some folders
func BenchmarkSearchV2_10_10(b *testing.B) {
benchSearchV2(b, 10, 10)
} // ~0.0002 s/op
func BenchmarkSearchV2_10_100(b *testing.B) {
benchSearchV2(b, 10, 100)
} // ~0.002 s/op
// Test with many dashboards and only one folder
func BenchmarkSearchV2_1_1k(b *testing.B) {
benchSearchV2(b, 1, 1000)
} // ~0.002 s/op
func BenchmarkSearchV2_1_10k(b *testing.B) {
benchSearchV2(b, 1, 10000)
} // ~0.019 s/op
// Test with a large number of dashboards and folders
func BenchmarkSearchV2_100_100(b *testing.B) {
benchSearchV2(b, 100, 100)
} // ~0.02 s/op
func BenchmarkSearchV2_100_1k(b *testing.B) {
benchSearchV2(b, 100, 1000)
} // ~0.22 s/op
func BenchmarkSearchV2_1k_100(b *testing.B) {
benchSearchV2(b, 1000, 100)
} // ~0.22 s/op

View File

@@ -0,0 +1,56 @@
package searchV2
import (
"context"
"github.com/grafana/grafana-plugin-sdk-go/backend"
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/grafana/grafana/pkg/services/user"
)
type stubSearchService struct {
}
func (s *stubSearchService) doDashboardQuery(ctx context.Context, user *user.SignedInUser, orgId int64, query DashboardQuery) *backend.DataResponse {
return s.DoDashboardQuery(ctx, nil, orgId, query)
}
func (s *stubSearchService) IsReady(ctx context.Context, orgId int64) IsSearchReadyResponse {
return IsSearchReadyResponse{}
}
func (s *stubSearchService) IsDisabled() bool {
return true
}
func (s *stubSearchService) TriggerReIndex() {
// noop.
}
func NewStubSearchService() SearchService {
return &stubSearchService{}
}
func (s *stubSearchService) DoDashboardQuery(ctx context.Context, user *backend.User, orgId int64, query DashboardQuery) *backend.DataResponse {
rsp := &backend.DataResponse{}
// dashboards
fid := data.NewFieldFromFieldType(data.FieldTypeInt64, 0)
uid := data.NewFieldFromFieldType(data.FieldTypeString, 0)
fid.Append(int64(2))
uid.Append("hello")
rsp.Frames = append(rsp.Frames, data.NewFrame("dasboards", fid, uid))
return rsp
}
func (s *stubSearchService) RegisterDashboardIndexExtender(ext DashboardIndexExtender) {
// noop
}
func (s *stubSearchService) Run(_ context.Context) error {
return nil
}

View File

@@ -0,0 +1,102 @@
// based on https://github.com/blugelabs/bluge/blob/57414197005148539c5dc5db8ab581594969df79/query.go#L1407-L1482, license:
// Copyright (c) 2020 Couchbase, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package searchV2
import (
"strings"
"github.com/blugelabs/bluge/search"
"github.com/blugelabs/bluge/search/searcher"
"github.com/blugelabs/bluge/search/similarity"
)
type boost float64
func (b *boost) Value() float64 {
if b == nil {
return 1
}
return float64(*b)
}
type SubstringQuery struct {
substring string
field string
boost *boost
scorer search.Scorer
}
func NewSubstringQuery(wildcard string) *SubstringQuery {
return &SubstringQuery{
substring: wildcard,
}
}
// Wildcard returns the substring being queried
func (q *SubstringQuery) Wildcard() string {
return q.substring
}
func (q *SubstringQuery) SetBoost(b float64) *SubstringQuery {
boostVal := boost(b)
q.boost = &boostVal
return q
}
func (q *SubstringQuery) Boost() float64 {
return q.boost.Value()
}
func (q *SubstringQuery) SetField(f string) *SubstringQuery {
q.field = f
return q
}
func (q *SubstringQuery) Field() string {
return q.field
}
var regexpEscaper = strings.NewReplacer(
// characters in the substring that must
// be escaped in the regexp
"+", `\+`,
"*", `\*`,
"(", `\(`,
")", `\)`,
"^", `\^`,
"$", `\$`,
".", `\.`,
"{", `\{`,
"}", `\}`,
"[", `\[`,
"]", `\]`,
`|`, `\|`,
`\`, `\\`)
func (q *SubstringQuery) Searcher(i search.Reader, options search.SearcherOptions) (search.Searcher, error) {
field := q.field
if q.field == "" {
field = options.DefaultSearchField
}
regexpString := ".*" + regexpEscaper.Replace(q.substring) + ".*"
return searcher.NewRegexpStringSearcher(i, regexpString, field,
q.boost.Value(), q.scorer, similarity.NewCompositeSumScorer(), options)
}
func (q *SubstringQuery) Validate() error {
return nil // real validation delayed until searcher constructor
}

View File

@@ -0,0 +1,329 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "type": "search-results",
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "count": 106,
// "locationInfo": {
// "yboVMzb7z": {
// "kind": "folder",
// "name": "gdev dashboards",
// "url": "/dashboards/f/yboVMzb7z/gdev-dashboards"
// }
// },
// "sortBy": "name_sort"
// }
// }
// Name: Query results
// Dimensions: 9 Fields by 4 Rows
// +----------------+----------------+-------------------------+------------------+--------------------------------------+--------------------------+---------------------------+----------------+------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
// | Name: kind | Name: uid | Name: name | Name: panel_type | Name: url | Name: tags | Name: ds_uid | Name: location | Name: allowed_actions |
// | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: |
// | Type: []string | Type: []string | Type: []string | Type: []string | Type: []string | Type: []*json.RawMessage | Type: []json.RawMessage | Type: []string | Type: []json.RawMessage |
// +----------------+----------------+-------------------------+------------------+--------------------------------------+--------------------------+---------------------------+----------------+------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
// | folder | ujaM1h6nz | abc2 | | /dashboards/f/ujaM1h6nz/abc2 | null | [] | | [{"kind":"folder","uid":"ujaM1h6nz","actions":["folders.permissions:read","folders.permissions:write","folders:create","folders:delete","folders:read","folders:write"]}] |
// | dashboard | 7MeksYbmk | Alerting with TestData | | /d/7MeksYbmk/alerting-with-testdata | [ | [ | yboVMzb7z | [{"kind":"dashboard","uid":"7MeksYbmk","actions":["dashboards.permissions:read","dashboards.permissions:write","dashboards:create","dashboards:delete","dashboards:read","dashboards:write"]},{"kind":"ds","uid":"datasource-1","actions":["datasources.id:read","datasources.permissions:read","datasources.permissions:write","datasources:delete","datasources:explore","datasources:query","datasources:read","datasources:write"]}] |
// | | | | | | "gdev", | "datasource-1" | | |
// | | | | | | "alerting" | ] | | |
// | | | | | | ] | | | |
// | dashboard | vmie2cmWz | Bar Gauge Demo | | /d/vmie2cmWz/bar-gauge-demo | [ | [ | yboVMzb7z | [{"kind":"dashboard","uid":"vmie2cmWz","actions":["dashboards.permissions:read","dashboards.permissions:write","dashboards:create","dashboards:delete","dashboards:read","dashboards:write"]},{"kind":"ds","uid":"datasource-2","actions":["datasources.id:read","datasources.permissions:read","datasources.permissions:write","datasources:delete","datasources:explore","datasources:query","datasources:read","datasources:write"]},{"kind":"ds","uid":"datasource-3","actions":["datasources.id:read","datasources.permissions:read","datasources.permissions:write","datasources:delete","datasources:explore","datasources:query","datasources:read","datasources:write"]},{"kind":"ds","uid":"datasource-4","actions":["datasources.id:read","datasources.permissions:read","datasources.permissions:write","datasources:delete","datasources:explore","datasources:query","datasources:read","datasources:write"]}] |
// | | | | | | "gdev", | "datasource-2", | | |
// | | | | | | "demo" | "datasource-3", | | |
// | | | | | | ] | "datasource-4" | | |
// | | | | | | | ] | | |
// | dashboard | xMsQdBfWz | Bar Gauge Demo Unfilled | | /d/xMsQdBfWz/bar-gauge-demo-unfilled | [ | [] | yboVMzb7z | [{"kind":"dashboard","uid":"xMsQdBfWz","actions":["dashboards.permissions:read","dashboards.permissions:write","dashboards:create","dashboards:delete","dashboards:read","dashboards:write"]}] |
// | | | | | | "gdev", | | | |
// | | | | | | "demo" | | | |
// | | | | | | ] | | | |
// +----------------+----------------+-------------------------+------------------+--------------------------------------+--------------------------+---------------------------+----------------+------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"name": "Query results",
"refId": "Search",
"meta": {
"type": "search-results",
"typeVersion": [
0,
0
],
"custom": {
"count": 106,
"locationInfo": {
"yboVMzb7z": {
"kind": "folder",
"name": "gdev dashboards",
"url": "/dashboards/f/yboVMzb7z/gdev-dashboards"
}
},
"sortBy": "name_sort"
}
},
"fields": [
{
"name": "kind",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "uid",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "name",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "panel_type",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "url",
"type": "string",
"typeInfo": {
"frame": "string"
},
"config": {
"links": [
{
"title": "link",
"url": "${__value.text}"
}
]
}
},
{
"name": "tags",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage",
"nullable": true
}
},
{
"name": "ds_uid",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage"
}
},
{
"name": "location",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "allowed_actions",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage"
}
}
]
},
"data": {
"values": [
[
"folder",
"dashboard",
"dashboard",
"dashboard"
],
[
"ujaM1h6nz",
"7MeksYbmk",
"vmie2cmWz",
"xMsQdBfWz"
],
[
"abc2",
"Alerting with TestData",
"Bar Gauge Demo",
"Bar Gauge Demo Unfilled"
],
[
"",
"",
"",
""
],
[
"/dashboards/f/ujaM1h6nz/abc2",
"/d/7MeksYbmk/alerting-with-testdata",
"/d/vmie2cmWz/bar-gauge-demo",
"/d/xMsQdBfWz/bar-gauge-demo-unfilled"
],
[
null,
[
"gdev",
"alerting"
],
[
"gdev",
"demo"
],
[
"gdev",
"demo"
]
],
[
[],
[
"datasource-1"
],
[
"datasource-2",
"datasource-3",
"datasource-4"
],
[]
],
[
"",
"yboVMzb7z",
"yboVMzb7z",
"yboVMzb7z"
],
[
[
{
"kind": "folder",
"uid": "ujaM1h6nz",
"actions": [
"folders.permissions:read",
"folders.permissions:write",
"folders:create",
"folders:delete",
"folders:read",
"folders:write"
]
}
],
[
{
"kind": "dashboard",
"uid": "7MeksYbmk",
"actions": [
"dashboards.permissions:read",
"dashboards.permissions:write",
"dashboards:create",
"dashboards:delete",
"dashboards:read",
"dashboards:write"
]
},
{
"kind": "ds",
"uid": "datasource-1",
"actions": [
"datasources.id:read",
"datasources.permissions:read",
"datasources.permissions:write",
"datasources:delete",
"datasources:explore",
"datasources:query",
"datasources:read",
"datasources:write"
]
}
],
[
{
"kind": "dashboard",
"uid": "vmie2cmWz",
"actions": [
"dashboards.permissions:read",
"dashboards.permissions:write",
"dashboards:create",
"dashboards:delete",
"dashboards:read",
"dashboards:write"
]
},
{
"kind": "ds",
"uid": "datasource-2",
"actions": [
"datasources.id:read",
"datasources.permissions:read",
"datasources.permissions:write",
"datasources:delete",
"datasources:explore",
"datasources:query",
"datasources:read",
"datasources:write"
]
},
{
"kind": "ds",
"uid": "datasource-3",
"actions": [
"datasources.id:read",
"datasources.permissions:read",
"datasources.permissions:write",
"datasources:delete",
"datasources:explore",
"datasources:query",
"datasources:read",
"datasources:write"
]
},
{
"kind": "ds",
"uid": "datasource-4",
"actions": [
"datasources.id:read",
"datasources.permissions:read",
"datasources.permissions:write",
"datasources:delete",
"datasources:explore",
"datasources:query",
"datasources:read",
"datasources:write"
]
}
],
[
{
"kind": "dashboard",
"uid": "xMsQdBfWz",
"actions": [
"dashboards.permissions:read",
"dashboards.permissions:write",
"dashboards:create",
"dashboards:delete",
"dashboards:read",
"dashboards:write"
]
}
]
]
]
}
}
]
}

View File

@@ -0,0 +1,273 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "type": "search-results",
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "count": 106,
// "locationInfo": {
// "yboVMzb7z": {
// "kind": "folder",
// "name": "gdev dashboards",
// "url": "/dashboards/f/yboVMzb7z/gdev-dashboards"
// }
// },
// "sortBy": "name_sort"
// }
// }
// Name: Query results
// Dimensions: 9 Fields by 4 Rows
// +----------------+----------------+-------------------------+------------------+--------------------------------------+--------------------------+---------------------------+----------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
// | Name: kind | Name: uid | Name: name | Name: panel_type | Name: url | Name: tags | Name: ds_uid | Name: location | Name: allowed_actions |
// | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: |
// | Type: []string | Type: []string | Type: []string | Type: []string | Type: []string | Type: []*json.RawMessage | Type: []json.RawMessage | Type: []string | Type: []json.RawMessage |
// +----------------+----------------+-------------------------+------------------+--------------------------------------+--------------------------+---------------------------+----------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
// | folder | ujaM1h6nz | abc2 | | /dashboards/f/ujaM1h6nz/abc2 | null | [] | | [{"kind":"folder","uid":"ujaM1h6nz","actions":["folders:read"]}] |
// | dashboard | 7MeksYbmk | Alerting with TestData | | /d/7MeksYbmk/alerting-with-testdata | [ | [ | yboVMzb7z | [{"kind":"dashboard","uid":"7MeksYbmk","actions":["dashboards:write"]},{"kind":"ds","uid":"datasource-1","actions":[]}] |
// | | | | | | "gdev", | "datasource-1" | | |
// | | | | | | "alerting" | ] | | |
// | | | | | | ] | | | |
// | dashboard | vmie2cmWz | Bar Gauge Demo | | /d/vmie2cmWz/bar-gauge-demo | [ | [ | yboVMzb7z | [{"kind":"dashboard","uid":"vmie2cmWz","actions":[]},{"kind":"ds","uid":"datasource-2","actions":["datasources:read"]},{"kind":"ds","uid":"datasource-3","actions":["datasources:read"]},{"kind":"ds","uid":"datasource-4","actions":[]}] |
// | | | | | | "gdev", | "datasource-2", | | |
// | | | | | | "demo" | "datasource-3", | | |
// | | | | | | ] | "datasource-4" | | |
// | | | | | | | ] | | |
// | dashboard | xMsQdBfWz | Bar Gauge Demo Unfilled | | /d/xMsQdBfWz/bar-gauge-demo-unfilled | [ | [] | yboVMzb7z | [{"kind":"dashboard","uid":"xMsQdBfWz","actions":[]}] |
// | | | | | | "gdev", | | | |
// | | | | | | "demo" | | | |
// | | | | | | ] | | | |
// +----------------+----------------+-------------------------+------------------+--------------------------------------+--------------------------+---------------------------+----------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"name": "Query results",
"refId": "Search",
"meta": {
"type": "search-results",
"typeVersion": [
0,
0
],
"custom": {
"count": 106,
"locationInfo": {
"yboVMzb7z": {
"kind": "folder",
"name": "gdev dashboards",
"url": "/dashboards/f/yboVMzb7z/gdev-dashboards"
}
},
"sortBy": "name_sort"
}
},
"fields": [
{
"name": "kind",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "uid",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "name",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "panel_type",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "url",
"type": "string",
"typeInfo": {
"frame": "string"
},
"config": {
"links": [
{
"title": "link",
"url": "${__value.text}"
}
]
}
},
{
"name": "tags",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage",
"nullable": true
}
},
{
"name": "ds_uid",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage"
}
},
{
"name": "location",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "allowed_actions",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage"
}
}
]
},
"data": {
"values": [
[
"folder",
"dashboard",
"dashboard",
"dashboard"
],
[
"ujaM1h6nz",
"7MeksYbmk",
"vmie2cmWz",
"xMsQdBfWz"
],
[
"abc2",
"Alerting with TestData",
"Bar Gauge Demo",
"Bar Gauge Demo Unfilled"
],
[
"",
"",
"",
""
],
[
"/dashboards/f/ujaM1h6nz/abc2",
"/d/7MeksYbmk/alerting-with-testdata",
"/d/vmie2cmWz/bar-gauge-demo",
"/d/xMsQdBfWz/bar-gauge-demo-unfilled"
],
[
null,
[
"gdev",
"alerting"
],
[
"gdev",
"demo"
],
[
"gdev",
"demo"
]
],
[
[],
[
"datasource-1"
],
[
"datasource-2",
"datasource-3",
"datasource-4"
],
[]
],
[
"",
"yboVMzb7z",
"yboVMzb7z",
"yboVMzb7z"
],
[
[
{
"kind": "folder",
"uid": "ujaM1h6nz",
"actions": [
"folders:read"
]
}
],
[
{
"kind": "dashboard",
"uid": "7MeksYbmk",
"actions": [
"dashboards:write"
]
},
{
"kind": "ds",
"uid": "datasource-1",
"actions": []
}
],
[
{
"kind": "dashboard",
"uid": "vmie2cmWz",
"actions": []
},
{
"kind": "ds",
"uid": "datasource-2",
"actions": [
"datasources:read"
]
},
{
"kind": "ds",
"uid": "datasource-3",
"actions": [
"datasources:read"
]
},
{
"kind": "ds",
"uid": "datasource-4",
"actions": []
}
],
[
{
"kind": "dashboard",
"uid": "xMsQdBfWz",
"actions": []
}
]
]
]
}
}
]
}

View File

@@ -0,0 +1,122 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "type": "search-results",
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "count": 0
// }
// }
// Name: Query results
// Dimensions: 8 Fields by 0 Rows
// +----------------+----------------+----------------+------------------+----------------+--------------------------+-------------------------+----------------+
// | Name: kind | Name: uid | Name: name | Name: panel_type | Name: url | Name: tags | Name: ds_uid | Name: location |
// | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: |
// | Type: []string | Type: []string | Type: []string | Type: []string | Type: []string | Type: []*json.RawMessage | Type: []json.RawMessage | Type: []string |
// +----------------+----------------+----------------+------------------+----------------+--------------------------+-------------------------+----------------+
// +----------------+----------------+----------------+------------------+----------------+--------------------------+-------------------------+----------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"name": "Query results",
"meta": {
"type": "search-results",
"typeVersion": [
0,
0
],
"custom": {
"count": 0
}
},
"fields": [
{
"name": "kind",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "uid",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "name",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "panel_type",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "url",
"type": "string",
"typeInfo": {
"frame": "string"
},
"config": {
"links": [
{
"title": "link",
"url": "${__value.text}"
}
]
}
},
{
"name": "tags",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage",
"nullable": true
}
},
{
"name": "ds_uid",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage"
}
},
{
"name": "location",
"type": "string",
"typeInfo": {
"frame": "string"
}
}
]
},
"data": {
"values": [
[],
[],
[],
[],
[],
[],
[],
[]
]
}
}
]
}

View File

@@ -0,0 +1,139 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "type": "search-results",
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "count": 1
// }
// }
// Name: Query results
// Dimensions: 8 Fields by 1 Rows
// +----------------+----------------+----------------+------------------+----------------+--------------------------+-------------------------+----------------+
// | Name: kind | Name: uid | Name: name | Name: panel_type | Name: url | Name: tags | Name: ds_uid | Name: location |
// | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: |
// | Type: []string | Type: []string | Type: []string | Type: []string | Type: []string | Type: []*json.RawMessage | Type: []json.RawMessage | Type: []string |
// +----------------+----------------+----------------+------------------+----------------+--------------------------+-------------------------+----------------+
// | dashboard | 2 | boom | | /pfix/d/2/ | null | [] | general |
// +----------------+----------------+----------------+------------------+----------------+--------------------------+-------------------------+----------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"name": "Query results",
"meta": {
"type": "search-results",
"typeVersion": [
0,
0
],
"custom": {
"count": 1
}
},
"fields": [
{
"name": "kind",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "uid",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "name",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "panel_type",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "url",
"type": "string",
"typeInfo": {
"frame": "string"
},
"config": {
"links": [
{
"title": "link",
"url": "${__value.text}"
}
]
}
},
{
"name": "tags",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage",
"nullable": true
}
},
{
"name": "ds_uid",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage"
}
},
{
"name": "location",
"type": "string",
"typeInfo": {
"frame": "string"
}
}
]
},
"data": {
"values": [
[
"dashboard"
],
[
"2"
],
[
"boom"
],
[
""
],
[
"/pfix/d/2/"
],
[
null
],
[
[]
],
[
"general"
]
]
}
}
]
}

View File

@@ -0,0 +1,139 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "type": "search-results",
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "count": 1
// }
// }
// Name: Query results
// Dimensions: 8 Fields by 1 Rows
// +----------------+----------------+----------------+------------------+----------------+--------------------------+-------------------------+----------------+
// | Name: kind | Name: uid | Name: name | Name: panel_type | Name: url | Name: tags | Name: ds_uid | Name: location |
// | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: |
// | Type: []string | Type: []string | Type: []string | Type: []string | Type: []string | Type: []*json.RawMessage | Type: []json.RawMessage | Type: []string |
// +----------------+----------------+----------------+------------------+----------------+--------------------------+-------------------------+----------------+
// | dashboard | 3 | created | | /pfix/d/3/ | null | [] | general |
// +----------------+----------------+----------------+------------------+----------------+--------------------------+-------------------------+----------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"name": "Query results",
"meta": {
"type": "search-results",
"typeVersion": [
0,
0
],
"custom": {
"count": 1
}
},
"fields": [
{
"name": "kind",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "uid",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "name",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "panel_type",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "url",
"type": "string",
"typeInfo": {
"frame": "string"
},
"config": {
"links": [
{
"title": "link",
"url": "${__value.text}"
}
]
}
},
{
"name": "tags",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage",
"nullable": true
}
},
{
"name": "ds_uid",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage"
}
},
{
"name": "location",
"type": "string",
"typeInfo": {
"frame": "string"
}
}
]
},
"data": {
"values": [
[
"dashboard"
],
[
"3"
],
[
"created"
],
[
""
],
[
"/pfix/d/3/"
],
[
null
],
[
[]
],
[
"general"
]
]
}
}
]
}

View File

@@ -0,0 +1,122 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "type": "search-results",
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "count": 0
// }
// }
// Name: Query results
// Dimensions: 8 Fields by 0 Rows
// +----------------+----------------+----------------+------------------+----------------+--------------------------+-------------------------+----------------+
// | Name: kind | Name: uid | Name: name | Name: panel_type | Name: url | Name: tags | Name: ds_uid | Name: location |
// | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: |
// | Type: []string | Type: []string | Type: []string | Type: []string | Type: []string | Type: []*json.RawMessage | Type: []json.RawMessage | Type: []string |
// +----------------+----------------+----------------+------------------+----------------+--------------------------+-------------------------+----------------+
// +----------------+----------------+----------------+------------------+----------------+--------------------------+-------------------------+----------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"name": "Query results",
"meta": {
"type": "search-results",
"typeVersion": [
0,
0
],
"custom": {
"count": 0
}
},
"fields": [
{
"name": "kind",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "uid",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "name",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "panel_type",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "url",
"type": "string",
"typeInfo": {
"frame": "string"
},
"config": {
"links": [
{
"title": "link",
"url": "${__value.text}"
}
]
}
},
{
"name": "tags",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage",
"nullable": true
}
},
{
"name": "ds_uid",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage"
}
},
{
"name": "location",
"type": "string",
"typeInfo": {
"frame": "string"
}
}
]
},
"data": {
"values": [
[],
[],
[],
[],
[],
[],
[],
[]
]
}
}
]
}

View File

@@ -0,0 +1,139 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "type": "search-results",
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "count": 1
// }
// }
// Name: Query results
// Dimensions: 8 Fields by 1 Rows
// +----------------+----------------+----------------+------------------+----------------+--------------------------+-------------------------+----------------+
// | Name: kind | Name: uid | Name: name | Name: panel_type | Name: url | Name: tags | Name: ds_uid | Name: location |
// | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: |
// | Type: []string | Type: []string | Type: []string | Type: []string | Type: []string | Type: []*json.RawMessage | Type: []json.RawMessage | Type: []string |
// +----------------+----------------+----------------+------------------+----------------+--------------------------+-------------------------+----------------+
// | dashboard | 2 | nginx | | /pfix/d/2/ | null | [] | general |
// +----------------+----------------+----------------+------------------+----------------+--------------------------+-------------------------+----------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"name": "Query results",
"meta": {
"type": "search-results",
"typeVersion": [
0,
0
],
"custom": {
"count": 1
}
},
"fields": [
{
"name": "kind",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "uid",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "name",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "panel_type",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "url",
"type": "string",
"typeInfo": {
"frame": "string"
},
"config": {
"links": [
{
"title": "link",
"url": "${__value.text}"
}
]
}
},
{
"name": "tags",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage",
"nullable": true
}
},
{
"name": "ds_uid",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage"
}
},
{
"name": "location",
"type": "string",
"typeInfo": {
"frame": "string"
}
}
]
},
"data": {
"values": [
[
"dashboard"
],
[
"2"
],
[
"nginx"
],
[
""
],
[
"/pfix/d/2/"
],
[
null
],
[
[]
],
[
"general"
]
]
}
}
]
}

View File

@@ -0,0 +1,139 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "type": "search-results",
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "count": 1
// }
// }
// Name: Query results
// Dimensions: 8 Fields by 1 Rows
// +----------------+----------------+----------------+------------------+----------------+--------------------------+-------------------------+----------------+
// | Name: kind | Name: uid | Name: name | Name: panel_type | Name: url | Name: tags | Name: ds_uid | Name: location |
// | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: |
// | Type: []string | Type: []string | Type: []string | Type: []string | Type: []string | Type: []*json.RawMessage | Type: []json.RawMessage | Type: []string |
// +----------------+----------------+----------------+------------------+----------------+--------------------------+-------------------------+----------------+
// | dashboard | 4 | One more dash | | /pfix/d/4/ | null | [] | general |
// +----------------+----------------+----------------+------------------+----------------+--------------------------+-------------------------+----------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"name": "Query results",
"meta": {
"type": "search-results",
"typeVersion": [
0,
0
],
"custom": {
"count": 1
}
},
"fields": [
{
"name": "kind",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "uid",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "name",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "panel_type",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "url",
"type": "string",
"typeInfo": {
"frame": "string"
},
"config": {
"links": [
{
"title": "link",
"url": "${__value.text}"
}
]
}
},
{
"name": "tags",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage",
"nullable": true
}
},
{
"name": "ds_uid",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage"
}
},
{
"name": "location",
"type": "string",
"typeInfo": {
"frame": "string"
}
}
]
},
"data": {
"values": [
[
"dashboard"
],
[
"4"
],
[
"One more dash"
],
[
""
],
[
"/pfix/d/4/"
],
[
null
],
[
[]
],
[
"general"
]
]
}
}
]
}

View File

@@ -0,0 +1,139 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "type": "search-results",
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "count": 1
// }
// }
// Name: Query results
// Dimensions: 8 Fields by 1 Rows
// +----------------+----------------+----------------+------------------+-----------------------+--------------------------+-------------------------+----------------+
// | Name: kind | Name: uid | Name: name | Name: panel_type | Name: url | Name: tags | Name: ds_uid | Name: location |
// | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: |
// | Type: []string | Type: []string | Type: []string | Type: []string | Type: []string | Type: []*json.RawMessage | Type: []json.RawMessage | Type: []string |
// +----------------+----------------+----------------+------------------+-----------------------+--------------------------+-------------------------+----------------+
// | folder | 1 | My folder | | /pfix/dashboards/f/1/ | null | [] | |
// +----------------+----------------+----------------+------------------+-----------------------+--------------------------+-------------------------+----------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"name": "Query results",
"meta": {
"type": "search-results",
"typeVersion": [
0,
0
],
"custom": {
"count": 1
}
},
"fields": [
{
"name": "kind",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "uid",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "name",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "panel_type",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "url",
"type": "string",
"typeInfo": {
"frame": "string"
},
"config": {
"links": [
{
"title": "link",
"url": "${__value.text}"
}
]
}
},
{
"name": "tags",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage",
"nullable": true
}
},
{
"name": "ds_uid",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage"
}
},
{
"name": "location",
"type": "string",
"typeInfo": {
"frame": "string"
}
}
]
},
"data": {
"values": [
[
"folder"
],
[
"1"
],
[
"My folder"
],
[
""
],
[
"/pfix/dashboards/f/1/"
],
[
null
],
[
[]
],
[
""
]
]
}
}
]
}

View File

@@ -0,0 +1,139 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "type": "search-results",
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "count": 1
// }
// }
// Name: Query results
// Dimensions: 8 Fields by 1 Rows
// +----------------+----------------+--------------------+------------------+----------------+--------------------------+-------------------------+----------------+
// | Name: kind | Name: uid | Name: name | Name: panel_type | Name: url | Name: tags | Name: ds_uid | Name: location |
// | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: |
// | Type: []string | Type: []string | Type: []string | Type: []string | Type: []string | Type: []*json.RawMessage | Type: []json.RawMessage | Type: []string |
// +----------------+----------------+--------------------+------------------+----------------+--------------------------+-------------------------+----------------+
// | dashboard | 1 | Archer Data System | | /pfix/d/1/ | null | [] | general |
// +----------------+----------------+--------------------+------------------+----------------+--------------------------+-------------------------+----------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"name": "Query results",
"meta": {
"type": "search-results",
"typeVersion": [
0,
0
],
"custom": {
"count": 1
}
},
"fields": [
{
"name": "kind",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "uid",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "name",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "panel_type",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "url",
"type": "string",
"typeInfo": {
"frame": "string"
},
"config": {
"links": [
{
"title": "link",
"url": "${__value.text}"
}
]
}
},
{
"name": "tags",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage",
"nullable": true
}
},
{
"name": "ds_uid",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage"
}
},
{
"name": "location",
"type": "string",
"typeInfo": {
"frame": "string"
}
}
]
},
"data": {
"values": [
[
"dashboard"
],
[
"1"
],
[
"Archer Data System"
],
[
""
],
[
"/pfix/d/1/"
],
[
null
],
[
[]
],
[
"general"
]
]
}
}
]
}

View File

@@ -0,0 +1,139 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "type": "search-results",
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "count": 1
// }
// }
// Name: Query results
// Dimensions: 8 Fields by 1 Rows
// +----------------+----------------+--------------------+------------------+----------------+--------------------------+-------------------------+----------------+
// | Name: kind | Name: uid | Name: name | Name: panel_type | Name: url | Name: tags | Name: ds_uid | Name: location |
// | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: |
// | Type: []string | Type: []string | Type: []string | Type: []string | Type: []string | Type: []*json.RawMessage | Type: []json.RawMessage | Type: []string |
// +----------------+----------------+--------------------+------------------+----------------+--------------------------+-------------------------+----------------+
// | dashboard | 1 | Archer Data System | | /pfix/d/1/ | null | [] | general |
// +----------------+----------------+--------------------+------------------+----------------+--------------------------+-------------------------+----------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"name": "Query results",
"meta": {
"type": "search-results",
"typeVersion": [
0,
0
],
"custom": {
"count": 1
}
},
"fields": [
{
"name": "kind",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "uid",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "name",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "panel_type",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "url",
"type": "string",
"typeInfo": {
"frame": "string"
},
"config": {
"links": [
{
"title": "link",
"url": "${__value.text}"
}
]
}
},
{
"name": "tags",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage",
"nullable": true
}
},
{
"name": "ds_uid",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage"
}
},
{
"name": "location",
"type": "string",
"typeInfo": {
"frame": "string"
}
}
]
},
"data": {
"values": [
[
"dashboard"
],
[
"1"
],
[
"Archer Data System"
],
[
""
],
[
"/pfix/d/1/"
],
[
null
],
[
[]
],
[
"general"
]
]
}
}
]
}

View File

@@ -0,0 +1,139 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "type": "search-results",
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "count": 1
// }
// }
// Name: Query results
// Dimensions: 8 Fields by 1 Rows
// +----------------+----------------+--------------------+------------------+----------------+--------------------------+-------------------------+----------------+
// | Name: kind | Name: uid | Name: name | Name: panel_type | Name: url | Name: tags | Name: ds_uid | Name: location |
// | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: |
// | Type: []string | Type: []string | Type: []string | Type: []string | Type: []string | Type: []*json.RawMessage | Type: []json.RawMessage | Type: []string |
// +----------------+----------------+--------------------+------------------+----------------+--------------------------+-------------------------+----------------+
// | dashboard | 2 | Document Sync repo | | /pfix/d/2/ | null | [] | general |
// +----------------+----------------+--------------------+------------------+----------------+--------------------------+-------------------------+----------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"name": "Query results",
"meta": {
"type": "search-results",
"typeVersion": [
0,
0
],
"custom": {
"count": 1
}
},
"fields": [
{
"name": "kind",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "uid",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "name",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "panel_type",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "url",
"type": "string",
"typeInfo": {
"frame": "string"
},
"config": {
"links": [
{
"title": "link",
"url": "${__value.text}"
}
]
}
},
{
"name": "tags",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage",
"nullable": true
}
},
{
"name": "ds_uid",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage"
}
},
{
"name": "location",
"type": "string",
"typeInfo": {
"frame": "string"
}
}
]
},
"data": {
"values": [
[
"dashboard"
],
[
"2"
],
[
"Document Sync repo"
],
[
""
],
[
"/pfix/d/2/"
],
[
null
],
[
[]
],
[
"general"
]
]
}
}
]
}

View File

@@ -0,0 +1,139 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "type": "search-results",
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "count": 1
// }
// }
// Name: Query results
// Dimensions: 8 Fields by 1 Rows
// +----------------+----------------+--------------------+------------------+----------------+--------------------------+-------------------------+----------------+
// | Name: kind | Name: uid | Name: name | Name: panel_type | Name: url | Name: tags | Name: ds_uid | Name: location |
// | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: |
// | Type: []string | Type: []string | Type: []string | Type: []string | Type: []string | Type: []*json.RawMessage | Type: []json.RawMessage | Type: []string |
// +----------------+----------------+--------------------+------------------+----------------+--------------------------+-------------------------+----------------+
// | dashboard | 1 | Archer Data System | | /pfix/d/1/ | null | [] | general |
// +----------------+----------------+--------------------+------------------+----------------+--------------------------+-------------------------+----------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"name": "Query results",
"meta": {
"type": "search-results",
"typeVersion": [
0,
0
],
"custom": {
"count": 1
}
},
"fields": [
{
"name": "kind",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "uid",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "name",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "panel_type",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "url",
"type": "string",
"typeInfo": {
"frame": "string"
},
"config": {
"links": [
{
"title": "link",
"url": "${__value.text}"
}
]
}
},
{
"name": "tags",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage",
"nullable": true
}
},
{
"name": "ds_uid",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage"
}
},
{
"name": "location",
"type": "string",
"typeInfo": {
"frame": "string"
}
}
]
},
"data": {
"values": [
[
"dashboard"
],
[
"1"
],
[
"Archer Data System"
],
[
""
],
[
"/pfix/d/1/"
],
[
null
],
[
[]
],
[
"general"
]
]
}
}
]
}

View File

@@ -0,0 +1,139 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "type": "search-results",
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "count": 1
// }
// }
// Name: Query results
// Dimensions: 8 Fields by 1 Rows
// +----------------+----------------+----------------+------------------+----------------+--------------------------+-------------------------+----------------+
// | Name: kind | Name: uid | Name: name | Name: panel_type | Name: url | Name: tags | Name: ds_uid | Name: location |
// | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: |
// | Type: []string | Type: []string | Type: []string | Type: []string | Type: []string | Type: []*json.RawMessage | Type: []json.RawMessage | Type: []string |
// +----------------+----------------+----------------+------------------+----------------+--------------------------+-------------------------+----------------+
// | dashboard | 1 | heatTorkel | | /pfix/d/1/ | null | [] | general |
// +----------------+----------------+----------------+------------------+----------------+--------------------------+-------------------------+----------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"name": "Query results",
"meta": {
"type": "search-results",
"typeVersion": [
0,
0
],
"custom": {
"count": 1
}
},
"fields": [
{
"name": "kind",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "uid",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "name",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "panel_type",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "url",
"type": "string",
"typeInfo": {
"frame": "string"
},
"config": {
"links": [
{
"title": "link",
"url": "${__value.text}"
}
]
}
},
{
"name": "tags",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage",
"nullable": true
}
},
{
"name": "ds_uid",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage"
}
},
{
"name": "location",
"type": "string",
"typeInfo": {
"frame": "string"
}
}
]
},
"data": {
"values": [
[
"dashboard"
],
[
"1"
],
[
"heatTorkel"
],
[
""
],
[
"/pfix/d/1/"
],
[
null
],
[
[]
],
[
"general"
]
]
}
}
]
}

View File

@@ -0,0 +1,139 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "type": "search-results",
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "count": 1
// }
// }
// Name: Query results
// Dimensions: 8 Fields by 1 Rows
// +----------------+----------------+----------------+------------------+----------------+--------------------------+-------------------------+----------------+
// | Name: kind | Name: uid | Name: name | Name: panel_type | Name: url | Name: tags | Name: ds_uid | Name: location |
// | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: |
// | Type: []string | Type: []string | Type: []string | Type: []string | Type: []string | Type: []*json.RawMessage | Type: []json.RawMessage | Type: []string |
// +----------------+----------------+----------------+------------------+----------------+--------------------------+-------------------------+----------------+
// | dashboard | 1 | heat-torkel | | /pfix/d/1/ | null | [] | general |
// +----------------+----------------+----------------+------------------+----------------+--------------------------+-------------------------+----------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"name": "Query results",
"meta": {
"type": "search-results",
"typeVersion": [
0,
0
],
"custom": {
"count": 1
}
},
"fields": [
{
"name": "kind",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "uid",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "name",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "panel_type",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "url",
"type": "string",
"typeInfo": {
"frame": "string"
},
"config": {
"links": [
{
"title": "link",
"url": "${__value.text}"
}
]
}
},
{
"name": "tags",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage",
"nullable": true
}
},
{
"name": "ds_uid",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage"
}
},
{
"name": "location",
"type": "string",
"typeInfo": {
"frame": "string"
}
}
]
},
"data": {
"values": [
[
"dashboard"
],
[
"1"
],
[
"heat-torkel"
],
[
""
],
[
"/pfix/d/1/"
],
[
null
],
[
[]
],
[
"general"
]
]
}
}
]
}

View File

@@ -0,0 +1,148 @@
// 🌟 This was machine generated. Do not edit. 🌟
//
// Frame[0] {
// "type": "search-results",
// "typeVersion": [
// 0,
// 0
// ],
// "custom": {
// "count": 2
// }
// }
// Name: Query results
// Dimensions: 8 Fields by 2 Rows
// +----------------+----------------+------------------+------------------+----------------+--------------------------+-------------------------+----------------+
// | Name: kind | Name: uid | Name: name | Name: panel_type | Name: url | Name: tags | Name: ds_uid | Name: location |
// | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: | Labels: |
// | Type: []string | Type: []string | Type: []string | Type: []string | Type: []string | Type: []*json.RawMessage | Type: []json.RawMessage | Type: []string |
// +----------------+----------------+------------------+------------------+----------------+--------------------------+-------------------------+----------------+
// | dashboard | 1 | heat-torkel | | /pfix/d/1/ | null | [] | general |
// | dashboard | 2 | topology heatmap | | /pfix/d/2/ | null | [] | general |
// +----------------+----------------+------------------+------------------+----------------+--------------------------+-------------------------+----------------+
//
//
// 🌟 This was machine generated. Do not edit. 🌟
{
"status": 200,
"frames": [
{
"schema": {
"name": "Query results",
"meta": {
"type": "search-results",
"typeVersion": [
0,
0
],
"custom": {
"count": 2
}
},
"fields": [
{
"name": "kind",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "uid",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "name",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "panel_type",
"type": "string",
"typeInfo": {
"frame": "string"
}
},
{
"name": "url",
"type": "string",
"typeInfo": {
"frame": "string"
},
"config": {
"links": [
{
"title": "link",
"url": "${__value.text}"
}
]
}
},
{
"name": "tags",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage",
"nullable": true
}
},
{
"name": "ds_uid",
"type": "other",
"typeInfo": {
"frame": "json.RawMessage"
}
},
{
"name": "location",
"type": "string",
"typeInfo": {
"frame": "string"
}
}
]
},
"data": {
"values": [
[
"dashboard",
"dashboard"
],
[
"1",
"2"
],
[
"heat-torkel",
"topology heatmap"
],
[
"",
""
],
[
"/pfix/d/1/",
"/pfix/d/2/"
],
[
null,
null
],
[
[],
[]
],
[
"general",
"general"
]
]
}
}
]
}

Some files were not shown because too many files have changed in this diff Show More