Compare commits

..

139 Commits

Author SHA1 Message Date
Ryan McKinley
352ac8fc5b lint 2025-12-31 11:57:21 +03:00
Ryan McKinley
00917091d7 remove version from snapshot 2025-12-31 10:27:50 +03:00
Ryan McKinley
d73e4a229f Merge remote-tracking branch 'origin/main' into ds-apiserver-schema-builder 2025-12-31 10:08:44 +03:00
Ryan McKinley
96e3fdbfd5 add plugin iniformation 2025-12-18 10:42:50 +03:00
Ryan McKinley
68a65af091 Merge remote-tracking branch 'origin/main' into ds-apiserver-schema-builder 2025-12-18 10:31:57 +03:00
Ryan McKinley
82c045e501 Merge remote-tracking branch 'origin/main' into ds-apiserver-schema-builder 2025-12-17 13:42:34 +03:00
Ryan McKinley
80d806a822 merge main 2025-12-15 08:52:54 +03:00
Ryan McKinley
570146fb36 openapi 2025-12-09 15:39:01 +03:00
Ryan McKinley
61ec394f59 replace 2025-12-09 15:20:31 +03:00
Ryan McKinley
ad0adf79bd fix test spec 2025-12-09 15:08:16 +03:00
Ryan McKinley
1f396581a6 with spec 2025-12-09 15:01:33 +03:00
Ryan McKinley
406502f351 merge main 2025-12-09 14:25:29 +03:00
Ryan McKinley
9648c0956f merge main 2025-11-17 13:44:20 +03:00
Ryan McKinley
008d373f7a fix lint 2025-10-26 22:03:58 +03:00
Ryan McKinley
3f1145fe3b merge main 2025-10-26 21:35:49 +03:00
Ryan McKinley
09c5311797 merge main 2025-10-26 21:01:10 +03:00
Ryan McKinley
6df663584c add conversion 2025-10-16 16:14:27 +03:00
Ryan McKinley
2614a917de Merge remote-tracking branch 'origin/main' into ds-apiserver-schema-builder 2025-10-16 16:08:13 +03:00
Ryan McKinley
a1015f7a9f Merge remote-tracking branch 'origin/main' into ds-apiserver-schema-builder 2025-10-16 16:04:44 +03:00
Ryan McKinley
f17765b70c add comment 2025-10-16 15:54:54 +03:00
Ryan McKinley
9a05906299 add operations test 2025-10-16 15:49:37 +03:00
Ryan McKinley
969ae75b08 update openapi specs 2025-10-16 15:20:30 +03:00
Ryan McKinley
eeb6d105ed update openapi specs 2025-10-16 15:18:53 +03:00
Ryan McKinley
b93fb964b7 Merge remote-tracking branch 'origin/main' into ds-apiserver-schema-builder 2025-10-16 14:56:32 +03:00
Ryan McKinley
d1657d4684 merge main 2025-10-14 15:48:41 +03:00
Ryan McKinley
63374d29c0 merge main 2025-10-08 12:54:38 +03:00
Ryan McKinley
379aff5ff4 merge main 2025-08-28 22:36:57 +03:00
Ryan McKinley
8bf8c07878 Merge remote-tracking branch 'origin/main' into ds-apiserver-schema-builder 2025-08-28 22:31:03 +03:00
Ryan McKinley
f7dc2f6e56 Merge remote-tracking branch 'origin/main' into ds-apiserver-schema-builder 2025-08-27 14:06:47 +03:00
Ryan McKinley
704f533846 generic apiserver 2025-08-27 12:51:17 +03:00
Ryan McKinley
82312c3418 Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-08-27 12:50:11 +03:00
Ryan McKinley
87a9f26997 lint 2025-08-27 10:07:45 +03:00
Ryan McKinley
1fe4415682 Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-08-27 10:04:41 +03:00
Ryan McKinley
a629b70c1f Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-08-27 00:50:13 +03:00
Ryan McKinley
36fe8c6b61 Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-08-26 12:36:14 +03:00
Ryan McKinley
e96dd1b12a comments 2025-08-26 12:21:54 +03:00
Ryan McKinley
a4adcf8896 better comment 2025-08-26 12:09:23 +03:00
Ryan McKinley
1671a8644f add feature toggle 2025-08-26 11:31:02 +03:00
Ryan McKinley
bfcf649e8b remove plugin 2025-08-26 11:21:10 +03:00
Ryan McKinley
ebb4cfadff full coverage for convert 2025-08-26 10:56:36 +03:00
Ryan McKinley
29e9ae1918 Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-08-26 10:51:34 +03:00
Ryan McKinley
540eb6c862 Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-08-26 08:41:07 +03:00
Ryan McKinley
17b20fb464 make update-workspace 2025-08-22 20:58:45 +03:00
Ryan McKinley
dc986afd68 fix build 2025-08-22 20:51:11 +03:00
Ryan McKinley
eba83d8973 feedback 2025-08-22 20:44:19 +03:00
Ryan McKinley
cf89fb2a13 Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-08-22 20:24:22 +03:00
Ryan McKinley
290e8a97f1 merge main 2025-08-22 20:05:53 +03:00
Ryan McKinley
0ed24434c2 merge main 2025-08-21 14:54:26 +03:00
Ryan McKinley
cbfb1e15ed Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-08-21 14:50:30 +03:00
Ryan McKinley
47e379c8d9 Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-08-21 14:27:49 +03:00
Ryan McKinley
500b029b25 exclude false values 2025-08-20 11:37:45 +03:00
Ryan McKinley
bce28b8663 unstructured spec 2025-08-20 10:39:14 +03:00
Ryan McKinley
b0c9350580 unstructured spec 2025-08-20 10:38:52 +03:00
Ryan McKinley
76b4c687b0 update comments 2025-08-20 09:31:41 +03:00
Ryan McKinley
0d6f718255 Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-08-20 09:13:30 +03:00
Ryan McKinley
6b72ddb4d3 cleanup 2025-08-19 23:50:26 +03:00
Ryan McKinley
2f094fdcd9 exclude empty secure values 2025-08-19 23:41:13 +03:00
Ryan McKinley
6a0ce01d18 more comments 2025-08-19 23:21:43 +03:00
Ryan McKinley
d00b8ab76d Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-08-19 23:17:31 +03:00
Ryan McKinley
e577b8d0b8 rename ListDataSources 2025-08-19 22:50:50 +03:00
Ryan McKinley
2270d6cb22 remove dummy validation 2025-08-19 22:01:58 +03:00
Ryan McKinley
c539be48d8 remove dummy validatio 2025-08-19 21:23:32 +03:00
Ryan McKinley
03a2153bd8 merge main 2025-08-19 20:31:38 +03:00
Ryan McKinley
eb01a3e462 Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-08-19 20:25:01 +03:00
Ryan McKinley
f6b6b62f5e update name comment 2025-08-19 13:03:02 +03:00
Ryan McKinley
5a2351387a Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-08-19 11:24:22 +03:00
Ryan McKinley
78d9829d3b Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-08-06 23:44:34 +03:00
Ryan McKinley
d9e1adaa48 rename connections 2025-08-05 14:09:07 +03:00
Ryan McKinley
949b521ac7 Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-08-05 14:04:48 +03:00
Ryan McKinley
1d9572cdb2 Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-07-31 21:00:39 +02:00
Ryan McKinley
f5e649183b do not expose OpenAPI unless it exists 2025-07-29 13:29:28 +02:00
Ryan McKinley
4b44a83802 Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-07-29 13:06:06 +02:00
Ryan McKinley
26dc101ecc Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-07-28 15:42:32 +02:00
Ryan McKinley
ca2f2e8d9e Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-07-28 12:50:03 +02:00
Ryan McKinley
0e10d9cd16 merge main 2025-07-28 12:06:25 +02:00
Ryan McKinley
ed9789179b Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-07-17 10:21:38 -07:00
Ryan McKinley
5890137232 fix unique name constraint 2025-07-11 12:52:23 -07:00
Ryan McKinley
471c4eb89d fix unique name constraint 2025-07-11 12:50:47 -07:00
Ryan McKinley
3ac63ea9c4 update comments 2025-07-11 12:08:12 -07:00
Ryan McKinley
7d2d38fd94 merge main 2025-07-11 09:16:23 -07:00
Ryan McKinley
80dda87868 Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-07-11 09:16:19 -07:00
Ryan McKinley
04a4fd7c61 Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-07-10 14:54:39 -07:00
Ryan McKinley
1d26b455fd use common secure values 2025-07-10 14:39:14 -07:00
Ryan McKinley
8f0109a1ee Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-07-10 14:29:04 -07:00
Ryan McKinley
798e9a32fc unstructured IsZero 2025-07-10 12:46:33 -07:00
Ryan McKinley
f4001d7cdc Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-07-10 11:59:23 -07:00
Ryan McKinley
718c28438e avoid lint issue 2025-07-10 10:00:36 -07:00
Ryan McKinley
b58b9144a5 Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-07-10 09:51:51 -07:00
Ryan McKinley
96830de552 Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-07-09 14:35:20 -07:00
Ryan McKinley
58c956eb19 revert depguard config changes 2025-07-08 18:03:57 -07:00
Ryan McKinley
b1d8e9ca41 Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-07-08 18:00:28 -07:00
Ryan McKinley
4006a61964 update go.mod 2025-07-08 07:10:27 -07:00
Ryan McKinley
0a24935e45 depguard 2025-07-08 07:09:40 -07:00
Ryan McKinley
f25c9b0e03 merge main 2025-07-08 06:29:45 -07:00
Ryan McKinley
5c478e98c4 Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-07-08 06:29:39 -07:00
Ryan McKinley
ddc99a1dca Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-07-07 19:06:12 -07:00
Ryan McKinley
e12d1ba6ca Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-07-07 13:07:20 -07:00
Ryan McKinley
420e070aea Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-07-07 10:38:32 -07:00
Ryan McKinley
732d4351de with hooks 2025-07-03 17:44:44 -07:00
Ryan McKinley
f842bb7af7 merge main 2025-07-03 15:01:11 -07:00
Ryan McKinley
a1e2ba6617 Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-07-03 14:58:45 -07:00
Ryan McKinley
1cade082fc now with routes for testdata 2025-07-03 14:43:12 -07:00
Ryan McKinley
ea38c4ad5a custom jsonschema for testdata 2025-07-03 13:03:41 -07:00
Ryan McKinley
01926ab3c8 Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-07-03 10:30:38 -07:00
Ryan McKinley
84a5282ea2 fix test 2025-07-03 10:21:28 -07:00
Ryan McKinley
35d7bb880a Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-07-03 10:18:39 -07:00
Ryan McKinley
251b7b4b4e hardcode custom spec 2025-07-02 23:08:20 -07:00
Ryan McKinley
4e3197a58f fix spelling 2025-07-02 19:40:19 -07:00
Ryan McKinley
1a883d1167 Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-07-02 19:34:48 -07:00
Ryan McKinley
cc2e96a558 update openapi specs 2025-07-02 14:58:34 -07:00
Ryan McKinley
6b2ebb2d65 lint 2025-07-02 14:43:37 -07:00
Ryan McKinley
7cc36672bb fix integration test 2025-07-02 14:41:52 -07:00
Ryan McKinley
5be096833a run codegen 2025-07-02 14:27:31 -07:00
Ryan McKinley
076d1a5ad5 revert go.mod changes 2025-07-02 13:53:22 -07:00
Ryan McKinley
775ed81b58 Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-07-02 13:37:12 -07:00
Ryan McKinley
fb0aaa321e merge main 2025-07-02 13:33:33 -07:00
Ryan McKinley
c1c1f3a85c merge main 2025-07-02 10:06:43 -07:00
Ryan McKinley
c0f1a6423c Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-07-02 09:56:20 -07:00
Ryan McKinley
a44fdee0ef merge upstream service changes 2025-07-01 17:00:04 -07:00
Ryan McKinley
97a089ef05 Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-07-01 16:53:45 -07:00
Ryan McKinley
b2799f977f use config from secrets service 2025-07-01 13:44:47 -07:00
Ryan McKinley
2d4fd99e7a update openapi 2025-07-01 09:14:01 -07:00
Ryan McKinley
c4842845e7 Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-06-30 13:47:39 -07:00
Ryan McKinley
e9105e6303 Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-06-27 20:08:48 -07:00
Ryan McKinley
841e0bd5e5 update openapi 2025-06-27 16:29:22 -07:00
Ryan McKinley
5c80bc12f0 organize models 2025-06-27 16:03:59 -07:00
Ryan McKinley
60981e813b organize models 2025-06-27 16:03:24 -07:00
Ryan McKinley
94eea040e2 Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-06-27 15:15:34 -07:00
Ryan McKinley
6869160e97 merge main 2025-06-27 12:49:54 -07:00
Ryan McKinley
4dc6ad9257 use plural 2025-06-20 10:14:18 +03:00
Ryan McKinley
b2c368e1cd use plural 2025-06-20 10:09:34 +03:00
Ryan McKinley
1ea3b5440c secure value update 2025-06-20 00:18:14 +03:00
Ryan McKinley
9bbe311bdf Merge remote-tracking branch 'origin/main' into ds-apiserver-with-configs 2025-06-20 00:18:05 +03:00
Ryan McKinley
5a338adf47 add basic tests 2025-06-19 23:42:39 +03:00
Ryan McKinley
1d218d01b2 wire up dual writer 2025-06-19 20:19:48 +03:00
Ryan McKinley
ae6ba4ba44 fill stubs for cloud config 2025-06-19 20:05:36 +03:00
Ryan McKinley
90614fcb91 use readme 2025-06-19 17:57:05 +03:00
Ryan McKinley
1bedd399d6 with configs 2025-06-19 17:33:38 +03:00
Ryan McKinley
0bff610506 with configs 2025-06-19 17:27:40 +03:00
84 changed files with 1519 additions and 2483 deletions

2
.github/CODEOWNERS vendored
View File

@@ -519,7 +519,7 @@ i18next.config.ts @grafana/grafana-frontend-platform
/e2e-playwright/various-suite/solo-route.spec.ts @grafana/dashboards-squad
/e2e-playwright/various-suite/trace-view-scrolling.spec.ts @grafana/observability-traces-and-profiling
/e2e-playwright/various-suite/verify-i18n.spec.ts @grafana/grafana-frontend-platform
/e2e-playwright/various-suite/visualization-suggestions*.spec.ts @grafana/dataviz-squad
/e2e-playwright/various-suite/visualization-suggestions.spec.ts @grafana/dataviz-squad
/e2e-playwright/various-suite/perf-test.spec.ts @grafana/grafana-frontend-platform
# Packages

View File

@@ -157,7 +157,7 @@ require (
github.com/google/go-querystring v1.1.0 // indirect
github.com/google/uuid v1.6.0 // indirect
github.com/google/wire v0.7.0 // indirect
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f // indirect
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196 // indirect
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f // indirect
github.com/grafana/dataplane/sdata v0.0.9 // indirect
github.com/grafana/dskit v0.0.0-20250908063411-6b6da59b5cc4 // indirect

View File

@@ -619,8 +619,8 @@ github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674 h1:JeSE6pjso5THxAzdVpqr6/geYxZytqFMBCOtn/ujyeo=
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674/go.mod h1:r4w70xmWCQKmi1ONH4KIaBptdivuRPyosB9RmPlGEwA=
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f h1:Br4SaUL3dnVopKKNhDavCLgehw60jdtl/sIxdfzmVts=
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f/go.mod h1:l7v67cgP7x72ajB9UPZlumdrHqNztpKoqQ52cU8T3LU=
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196 h1:A9UJtyBBUE7PkRsAITKU05iz+HpHO9SaVjfdo2Df3UQ=
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196/go.mod h1:l7v67cgP7x72ajB9UPZlumdrHqNztpKoqQ52cU8T3LU=
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f h1:Cbm6OKkOcJ+7CSZsGsEJzktC/SIa5bxVeYKQLuYK86o=
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f/go.mod h1:axY0cdOg3q0TZHwpHnIz5x16xZ8ZBxJHShsSHHXcHQg=
github.com/grafana/authlib/types v0.0.0-20251119142549-be091cf2f4d4 h1:Muoy+FMGrHj3GdFbvsMzUT7eusgii9PKf9L1ZaXDDbY=

View File

@@ -4,7 +4,7 @@ go 1.25.5
require (
github.com/go-kit/log v0.2.1
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196
github.com/grafana/dskit v0.0.0-20250908063411-6b6da59b5cc4
github.com/grafana/grafana-app-sdk v0.48.7
github.com/grafana/grafana-app-sdk/logging v0.48.7

View File

@@ -243,8 +243,8 @@ github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f h1:Br4SaUL3dnVopKKNhDavCLgehw60jdtl/sIxdfzmVts=
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f/go.mod h1:l7v67cgP7x72ajB9UPZlumdrHqNztpKoqQ52cU8T3LU=
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196 h1:A9UJtyBBUE7PkRsAITKU05iz+HpHO9SaVjfdo2Df3UQ=
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196/go.mod h1:l7v67cgP7x72ajB9UPZlumdrHqNztpKoqQ52cU8T3LU=
github.com/grafana/dskit v0.0.0-20250908063411-6b6da59b5cc4 h1:jSojuc7njleS3UOz223WDlXOinmuLAIPI0z2vtq8EgI=
github.com/grafana/dskit v0.0.0-20250908063411-6b6da59b5cc4/go.mod h1:VahT+GtfQIM+o8ht2StR6J9g+Ef+C2Vokh5uuSmOD/4=
github.com/grafana/grafana-app-sdk v0.48.7 h1:9mF7nqkqP0QUYYDlznoOt+GIyjzj45wGfUHB32u2ZMo=

View File

@@ -223,7 +223,7 @@ require (
github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect
github.com/googleapis/gax-go/v2 v2.15.0 // indirect
github.com/gorilla/mux v1.8.1 // indirect
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f // indirect
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196 // indirect
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f // indirect
github.com/grafana/authlib/types v0.0.0-20251119142549-be091cf2f4d4 // indirect
github.com/grafana/dataplane/sdata v0.0.9 // indirect

View File

@@ -827,8 +827,8 @@ github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674 h1:JeSE6pjso5THxAzdVpqr6/geYxZytqFMBCOtn/ujyeo=
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674/go.mod h1:r4w70xmWCQKmi1ONH4KIaBptdivuRPyosB9RmPlGEwA=
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f h1:Br4SaUL3dnVopKKNhDavCLgehw60jdtl/sIxdfzmVts=
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f/go.mod h1:l7v67cgP7x72ajB9UPZlumdrHqNztpKoqQ52cU8T3LU=
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196 h1:A9UJtyBBUE7PkRsAITKU05iz+HpHO9SaVjfdo2Df3UQ=
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196/go.mod h1:l7v67cgP7x72ajB9UPZlumdrHqNztpKoqQ52cU8T3LU=
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f h1:Cbm6OKkOcJ+7CSZsGsEJzktC/SIa5bxVeYKQLuYK86o=
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f/go.mod h1:axY0cdOg3q0TZHwpHnIz5x16xZ8ZBxJHShsSHHXcHQg=
github.com/grafana/authlib/types v0.0.0-20251119142549-be091cf2f4d4 h1:Muoy+FMGrHj3GdFbvsMzUT7eusgii9PKf9L1ZaXDDbY=

View File

@@ -90,7 +90,7 @@ require (
github.com/google/gnostic-models v0.7.1 // indirect
github.com/google/go-cmp v0.7.0 // indirect
github.com/google/uuid v1.6.0 // indirect
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f // indirect
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196 // indirect
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f // indirect
github.com/grafana/authlib/types v0.0.0-20251119142549-be091cf2f4d4 // indirect
github.com/grafana/dataplane/sdata v0.0.9 // indirect

View File

@@ -213,8 +213,8 @@ github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674 h1:JeSE6pjso5THxAzdVpqr6/geYxZytqFMBCOtn/ujyeo=
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674/go.mod h1:r4w70xmWCQKmi1ONH4KIaBptdivuRPyosB9RmPlGEwA=
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f h1:Br4SaUL3dnVopKKNhDavCLgehw60jdtl/sIxdfzmVts=
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f/go.mod h1:l7v67cgP7x72ajB9UPZlumdrHqNztpKoqQ52cU8T3LU=
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196 h1:A9UJtyBBUE7PkRsAITKU05iz+HpHO9SaVjfdo2Df3UQ=
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196/go.mod h1:l7v67cgP7x72ajB9UPZlumdrHqNztpKoqQ52cU8T3LU=
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f h1:Cbm6OKkOcJ+7CSZsGsEJzktC/SIa5bxVeYKQLuYK86o=
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f/go.mod h1:axY0cdOg3q0TZHwpHnIz5x16xZ8ZBxJHShsSHHXcHQg=
github.com/grafana/authlib/types v0.0.0-20251119142549-be091cf2f4d4 h1:Muoy+FMGrHj3GdFbvsMzUT7eusgii9PKf9L1ZaXDDbY=

View File

@@ -134,7 +134,7 @@ To convert data source-managed alert rules to Grafana managed alerts:
Pausing stops alert rule evaluation behavior for the newly created Grafana-managed alert rules.
9. (Optional) In the **Target data source** of the **Recording rules** section, you can select the data source to which the imported recording rules will write metrics. By default, it is the data source selected in the **Data source** dropdown.
9. (Optional) In the **Target data source** of the **Recording rules** section, you can select the data source that the imported recording rules will query. By default, it is the data source selected in the **Data source** dropdown.
10. Click **Import**.

View File

@@ -1,4 +1,4 @@
import { BootData, PanelPluginMeta } from '@grafana/data';
import { BootData } from '@grafana/data';
import { test, expect } from '@grafana/plugin-e2e';
test.describe(
@@ -22,7 +22,7 @@ test.describe(
await dashboardPage.addPanel();
// Get panel types from window object
const panelTypes: PanelPluginMeta[] = await page.evaluate(() => {
const panelTypes = await page.evaluate(() => {
// @grafana/plugin-e2e doesn't export the full bootdata config
// eslint-disable-next-line @typescript-eslint/consistent-type-assertions
const win = window as typeof window & { grafanaBootData: BootData };

View File

@@ -1,178 +0,0 @@
import { test, expect } from '@grafana/plugin-e2e';
test.use({
featureToggles: {
newVizSuggestions: true,
externalVizSuggestions: false,
},
viewport: {
width: 800,
height: 1500,
},
});
test.describe(
'Visualization suggestions v2',
{
tag: ['@various', '@suggestions'],
},
() => {
test('Should be shown and clickable', async ({ selectors, gotoPanelEditPage }) => {
// Open dashboard with edit panel
const panelEditPage = await gotoPanelEditPage({
dashboard: {
uid: 'aBXrJ0R7z',
},
id: '9',
});
await expect(
panelEditPage.getByGrafanaSelector(selectors.components.Panels.Panel.content).locator('.uplot'),
'time series to be rendered inside panel'
).toBeVisible();
// Try visualization suggestions
await panelEditPage.getByGrafanaSelector(selectors.components.PanelEditor.toggleVizPicker).click();
await panelEditPage.getByGrafanaSelector(selectors.components.Tab.title('Suggestions')).click();
// Verify we see suggestions
await expect(
panelEditPage.getByGrafanaSelector(selectors.components.VisualizationPreview.card('Line chart')),
'line chart suggestion to be rendered'
).toBeVisible();
// TODO: in this part of the test, we will change the query and the transforms and observe suggestions being updated.
// Select a visualization and verify table header is visible from preview
await panelEditPage.getByGrafanaSelector(selectors.components.VisualizationPreview.card('Table')).click();
await expect(
panelEditPage
.getByGrafanaSelector(selectors.components.Panels.Panel.content)
.getByRole('grid')
.getByRole('row')
.first(),
'table to be rendered inside panel'
).toBeVisible();
await expect(
panelEditPage.getByGrafanaSelector(selectors.components.NavToolbar.editDashboard.discardChangesButton),
'discard changes button disabled since panel has not yet changed'
).toBeDisabled();
// apply the suggestion and verify panel options are visible
await panelEditPage.getByGrafanaSelector(selectors.components.VisualizationPreview.confirm('Table')).click();
await expect(
panelEditPage
.getByGrafanaSelector(selectors.components.Panels.Panel.content)
.getByRole('grid')
.getByRole('row')
.first(),
'table to be rendered inside panel'
).toBeVisible();
await expect(
panelEditPage.getByGrafanaSelector(selectors.components.PanelEditor.OptionsPane.header),
'options pane to be rendered'
).toBeVisible();
await expect(
panelEditPage.getByGrafanaSelector(selectors.components.NavToolbar.editDashboard.discardChangesButton),
'discard changes button enabled now that panel is dirty'
).toBeEnabled();
});
test('should not apply suggestion if you navigate toggle the viz picker back off', async ({
selectors,
gotoPanelEditPage,
}) => {
// Open dashboard with edit panel
const panelEditPage = await gotoPanelEditPage({
dashboard: {
uid: 'aBXrJ0R7z',
},
id: '9',
});
await expect(
panelEditPage.getByGrafanaSelector(selectors.components.Panels.Panel.content).locator('.uplot'),
'time series to be rendered inside panel;'
).toBeVisible();
// Try visualization suggestions
await panelEditPage.getByGrafanaSelector(selectors.components.PanelEditor.toggleVizPicker).click();
await panelEditPage.getByGrafanaSelector(selectors.components.Tab.title('Suggestions')).click();
// Verify we see suggestions
await expect(
panelEditPage.getByGrafanaSelector(selectors.components.VisualizationPreview.card('Line chart')),
'line chart suggestion to be rendered'
).toBeVisible();
// Select a visualization
await panelEditPage.getByGrafanaSelector(selectors.components.VisualizationPreview.card('Table')).click();
await expect(
panelEditPage
.getByGrafanaSelector(selectors.components.Panels.Panel.content)
.getByRole('grid')
.getByRole('row')
.first(),
'table to be rendered inside panel'
).toBeVisible();
await expect(
panelEditPage.getByGrafanaSelector(selectors.components.NavToolbar.editDashboard.discardChangesButton)
).toBeDisabled();
// Verify that toggling the viz picker back cancels the suggestion, restores the line chart, shows panel options
await panelEditPage.getByGrafanaSelector(selectors.components.PanelEditor.toggleVizPicker).click();
await expect(
panelEditPage.getByGrafanaSelector(selectors.components.Panels.Panel.content).locator('.uplot'),
'time series to be rendered inside panel'
).toBeVisible();
await expect(
panelEditPage.getByGrafanaSelector(selectors.components.PanelEditor.OptionsPane.header),
'options pane to be rendered'
).toBeVisible();
await expect(
panelEditPage.getByGrafanaSelector(selectors.components.NavToolbar.editDashboard.discardChangesButton),
'discard changes button is still disabled since no changes were applied'
).toBeDisabled();
});
test('should not apply suggestion if you navigate back to the dashboard', async ({
page,
selectors,
gotoPanelEditPage,
}) => {
// Open dashboard with edit panel
const panelEditPage = await gotoPanelEditPage({
dashboard: {
uid: 'aBXrJ0R7z',
},
id: '9',
});
// Try visualization suggestions
await panelEditPage.getByGrafanaSelector(selectors.components.PanelEditor.toggleVizPicker).click();
await panelEditPage.getByGrafanaSelector(selectors.components.Tab.title('Suggestions')).click();
// Verify we see suggestions
await expect(
panelEditPage.getByGrafanaSelector(selectors.components.VisualizationPreview.card('Line chart')),
'line chart suggestion to be rendered'
).toBeVisible();
// Select a visualization
await panelEditPage.getByGrafanaSelector(selectors.components.VisualizationPreview.card('Table')).click();
await expect(page.getByRole('grid').getByRole('row').first(), 'table row to be rendered').toBeVisible();
await expect(
panelEditPage.getByGrafanaSelector(selectors.components.NavToolbar.editDashboard.discardChangesButton)
).toBeDisabled();
// Verify that navigating back to the dashboard cancels the suggestion and restores the line chart.
await panelEditPage
.getByGrafanaSelector(selectors.components.NavToolbar.editDashboard.backToDashboardButton)
.click();
await expect(
page.locator('[data-viz-panel-key="panel-9"]').locator('.uplot'),
'time series to be rendered inside the panel'
).toBeVisible();
});
}
);

View File

@@ -3,7 +3,7 @@ import { test, expect } from '@grafana/plugin-e2e';
test.describe(
'Visualization suggestions',
{
tag: ['@various', '@suggestions'],
tag: ['@various'],
},
() => {
test('Should be shown and clickable', async ({ page, selectors, gotoPanelEditPage }) => {

4
go.mod
View File

@@ -87,7 +87,7 @@ require (
github.com/googleapis/gax-go/v2 v2.15.0 // @grafana/grafana-backend-group
github.com/gorilla/mux v1.8.1 // @grafana/grafana-backend-group
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674 // @grafana/grafana-app-platform-squad
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f // @grafana/alerting-backend
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196 // @grafana/alerting-backend
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f // @grafana/identity-access-team
github.com/grafana/authlib/types v0.0.0-20251119142549-be091cf2f4d4 // @grafana/identity-access-team
github.com/grafana/dataplane/examples v0.0.1 // @grafana/observability-metrics
@@ -181,7 +181,6 @@ require (
github.com/xlab/treeprint v1.2.0 // @grafana/observability-traces-and-profiling
github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // @grafana/grafana-operator-experience-squad
github.com/yudai/gojsondiff v1.0.0 // @grafana/grafana-backend-group
go.etcd.io/bbolt v1.4.2 // @grafana/grafana-search-and-storage
go.opentelemetry.io/collector/pdata v1.44.0 // @grafana/grafana-backend-group
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.64.0 // @grafana/plugins-platform-backend
go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.63.0 // @grafana/grafana-operator-experience-squad
@@ -604,6 +603,7 @@ require (
github.com/yuin/gopher-lua v1.1.1 // indirect
github.com/zclconf/go-cty v1.16.3 // indirect
github.com/zeebo/xxh3 v1.0.2 // indirect
go.etcd.io/bbolt v1.4.2 // indirect
go.etcd.io/etcd/api/v3 v3.6.6 // indirect
go.etcd.io/etcd/client/pkg/v3 v3.6.6 // indirect
go.etcd.io/etcd/client/v3 v3.6.6 // indirect

4
go.sum
View File

@@ -1622,8 +1622,8 @@ github.com/gorilla/sessions v1.2.1 h1:DHd3rPN5lE3Ts3D8rKkQ8x/0kqfeNmBAaiSi+o7Fsg
github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM=
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674 h1:JeSE6pjso5THxAzdVpqr6/geYxZytqFMBCOtn/ujyeo=
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674/go.mod h1:r4w70xmWCQKmi1ONH4KIaBptdivuRPyosB9RmPlGEwA=
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f h1:Br4SaUL3dnVopKKNhDavCLgehw60jdtl/sIxdfzmVts=
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f/go.mod h1:l7v67cgP7x72ajB9UPZlumdrHqNztpKoqQ52cU8T3LU=
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196 h1:A9UJtyBBUE7PkRsAITKU05iz+HpHO9SaVjfdo2Df3UQ=
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196/go.mod h1:l7v67cgP7x72ajB9UPZlumdrHqNztpKoqQ52cU8T3LU=
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f h1:Cbm6OKkOcJ+7CSZsGsEJzktC/SIa5bxVeYKQLuYK86o=
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f/go.mod h1:axY0cdOg3q0TZHwpHnIz5x16xZ8ZBxJHShsSHHXcHQg=
github.com/grafana/authlib/types v0.0.0-20251119142549-be091cf2f4d4 h1:Muoy+FMGrHj3GdFbvsMzUT7eusgii9PKf9L1ZaXDDbY=

View File

@@ -1293,9 +1293,6 @@ export const versionedComponents = {
card: {
[MIN_GRAFANA_VERSION]: (name: string) => `data-testid suggestion-${name}`,
},
confirm: {
'12.4.0': (name: string) => `data-testid suggestion-${name} confirm button`,
},
},
ColorSwatch: {
name: {

View File

@@ -0,0 +1,44 @@
package v0alpha1
import (
"k8s.io/kube-openapi/pkg/spec3"
"k8s.io/kube-openapi/pkg/validation/spec"
)
// Optional extensions for an explicit datasource type
// NOTE: the properties from this structure will be populated by reading an app-sdk manifest.json
type DataSourceOpenAPIExtension struct {
// When specified, this will replace the default spec
DataSourceSpec *spec.Schema `json:"spec,omitempty"`
// Define which secure values are required
SecureValues []SecureValueInfo `json:"secureValues"`
// Examples added to the POST command
Examples map[string]*spec3.Example `json:"examples,omitempty"`
// Additional Schemas added to the response
Schemas map[string]*spec.Schema `json:"schemas,omitempty"`
// TODO: define query types dynamically here (currently hardcoded)
// Queries *queryV0.QueryTypeDefinitionList `json:"queries,omitempty"`
// Resource routes -- the paths exposed under:
// {group}/{version}/namespaces/{ns}/datasource/{name}/resource/{route}
Routes map[string]*spec3.Path `json:"routes,omitempty"`
// Proxy routes -- the paths exposed under:
// {group}/{version}/namespaces/{ns}/datasource/{name}/proxy/{proxy}
Proxy map[string]*spec3.Path `json:"proxy,omitempty"`
}
type SecureValueInfo struct {
// The key
Key string `json:"string"`
// Description
Description string `json:"description,omitempty"`
// Required secure values
Required bool `json:"required,omitempty"`
}

View File

@@ -12,6 +12,7 @@ import (
"errors"
"fmt"
"log"
"reflect"
)
// returns the current implementation version
@@ -117,6 +118,23 @@ func (j *Json) Interface() any {
return j.data
}
// Check if the underlying data is empty
func (j *Json) IsEmpty() bool {
if j.data == nil {
return true
}
v := reflect.ValueOf(j.data)
switch v.Kind() {
case reflect.Slice, reflect.Array, reflect.Map, reflect.String:
if v.Len() == 0 {
return true
}
default:
return false
}
return false
}
// Encode returns its marshaled data as `[]byte`
func (j *Json) Encode() ([]byte, error) {
return j.MarshalJSON()

View File

@@ -5,6 +5,7 @@ import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestSimplejson(t *testing.T) {
@@ -272,3 +273,45 @@ func TestMustJson(t *testing.T) {
MustJson([]byte(`{`))
})
}
func TestEmpty(t *testing.T) {
testCases := []struct {
name string
input any
empty bool
}{
{
name: "empty map (any)",
input: map[string]any{},
empty: true,
}, {
name: "empty map (string)",
input: map[string]string{},
empty: true,
}, {
name: "empty array (any)",
input: []any{},
empty: true,
}, {
name: "empty array (string)",
input: []string{},
empty: true,
}, {
name: "empty string",
input: "",
empty: true,
}, {
name: "non empty string",
input: "hello",
}, {
name: "key value",
input: map[string]any{"key": "value"},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
js := NewFromAny(tc.input)
require.Equal(t, tc.empty, js.IsEmpty())
})
}
}

View File

@@ -55,19 +55,33 @@ func (r *converter) asDataSource(ds *datasources.DataSource) (*datasourceV0.Data
SetBasicAuthUser(ds.BasicAuthUser).
SetWithCredentials(ds.WithCredentials).
SetIsDefault(ds.IsDefault).
SetReadOnly(ds.ReadOnly).
SetJSONData(ds.JsonData)
SetReadOnly(ds.ReadOnly)
if ds.JsonData != nil && !ds.JsonData.IsEmpty() {
obj.Spec.SetJSONData(ds.JsonData.Interface())
}
rv := int64(0)
if !ds.Created.IsZero() {
obj.CreationTimestamp = metav1.NewTime(ds.Created)
rv = ds.Created.UnixMilli()
}
// Only mark updated if the times have actually changed
if !ds.Updated.IsZero() {
obj.ResourceVersion = fmt.Sprintf("%d", ds.Updated.UnixMilli())
obj.Annotations = map[string]string{
utils.AnnoKeyUpdatedTimestamp: ds.Updated.Format(time.RFC3339),
rv = ds.Updated.UnixMilli()
delta := rv - obj.CreationTimestamp.UnixMilli()
if delta > 1500 {
obj.Annotations = map[string]string{
utils.AnnoKeyUpdatedTimestamp: ds.Updated.UTC().Format(time.RFC3339),
}
}
}
if rv > 0 {
obj.ResourceVersion = strconv.FormatInt(rv, 10)
}
if ds.APIVersion != "" {
obj.APIVersion = fmt.Sprintf("%s/%s", r.group, ds.APIVersion)
}

View File

@@ -0,0 +1,275 @@
package hardcoded
import (
"k8s.io/kube-openapi/pkg/spec3"
"k8s.io/kube-openapi/pkg/validation/spec"
datasourceV0 "github.com/grafana/grafana/pkg/apis/datasource/v0alpha1"
)
func TestdataOpenAPIExtension() (*datasourceV0.DataSourceOpenAPIExtension, error) {
oas := &datasourceV0.DataSourceOpenAPIExtension{
SecureValues: []datasourceV0.SecureValueInfo{ // empty
// {
// Key: "aaa",
// Description: "describe aaa",
// Required: true,
// }, {
// Key: "bbb",
// Description: "describe bbb",
// },
},
Examples: map[string]*spec3.Example{
"": {
ExampleProps: spec3.ExampleProps{
Summary: "Empty testdata",
Value: map[string]any{
"apiVersion": "testdata.datasource.grafana.app/v0alpha1",
"kind": "DataSource",
"metadata": map[string]any{
"name": "my-testdata-datasource",
},
"spec": map[string]any{
"title": "My TestData Datasource",
},
},
},
},
"with-url": {
ExampleProps: spec3.ExampleProps{
Summary: "Testdata with URL (not used)",
Value: map[string]any{
"apiVersion": "testdata.datasource.grafana.app/v0alpha1",
"kind": "DataSource",
"metadata": map[string]any{
"name": "testdata-with-url",
},
"spec": map[string]any{
"title": "TestData with URL",
"url": "http://example.com",
},
},
},
},
},
}
// Dummy spec
p := &spec.Schema{} //SchemaProps: spec.SchemaProps{Type: []string{"object"}}}
p.Description = "Test data does not require any explicit configuration"
p.Required = []string{"title"}
p.AdditionalProperties = &spec.SchemaOrBool{Allows: false}
p.Properties = map[string]spec.Schema{
"title": *spec.StringProperty().WithDescription("display name"),
"url": *spec.StringProperty().WithDescription("not used"),
}
p.Example = map[string]any{
"url": "http://xxxx",
}
oas.DataSourceSpec = p
// Resource routes
// https://github.com/grafana/grafana/blob/main/pkg/tsdb/grafana-testdata-datasource/resource_handler.go#L20
unstructured := spec.MapProperty(nil)
unstructuredResponse := &spec3.Responses{
ResponsesProps: spec3.ResponsesProps{
Default: &spec3.Response{
ResponseProps: spec3.ResponseProps{
Content: map[string]*spec3.MediaType{
"application/json": {
MediaTypeProps: spec3.MediaTypeProps{
Schema: unstructured,
},
},
},
},
},
},
}
unstructuredRequest := &spec3.RequestBody{
RequestBodyProps: spec3.RequestBodyProps{
Content: map[string]*spec3.MediaType{
"application/json": {
MediaTypeProps: spec3.MediaTypeProps{
Schema: unstructured,
},
},
},
},
}
oas.Routes = map[string]*spec3.Path{
"": {
PathProps: spec3.PathProps{
Summary: "hello world",
Get: &spec3.Operation{
OperationProps: spec3.OperationProps{
Responses: &spec3.Responses{
ResponsesProps: spec3.ResponsesProps{
Default: &spec3.Response{
ResponseProps: spec3.ResponseProps{
Content: map[string]*spec3.MediaType{
"text/plain": {
MediaTypeProps: spec3.MediaTypeProps{
Schema: spec.StringProperty(),
},
},
},
},
},
},
},
},
},
},
},
"/scenarios": {
PathProps: spec3.PathProps{
Summary: "hello world",
Get: &spec3.Operation{
OperationProps: spec3.OperationProps{
Responses: unstructuredResponse,
},
},
},
},
"/stream": {
PathProps: spec3.PathProps{
Summary: "Get streaming response",
Get: &spec3.Operation{
OperationProps: spec3.OperationProps{
Parameters: []*spec3.Parameter{
{
ParameterProps: spec3.ParameterProps{
Name: "count",
In: "query",
Schema: spec.Int64Property(),
Description: "number of points that will be returned",
Example: 10,
},
},
{
ParameterProps: spec3.ParameterProps{
Name: "start",
In: "query",
Schema: spec.Int64Property(),
Description: "the start value",
},
},
{
ParameterProps: spec3.ParameterProps{
Name: "flush",
In: "query",
Schema: spec.Int64Property(),
Description: "How often the result is flushed (1-100%)",
Example: 100,
},
},
{
ParameterProps: spec3.ParameterProps{
Name: "speed",
In: "query",
Schema: spec.StringProperty(),
Description: "the clock cycle",
Example: "100ms",
},
},
{
ParameterProps: spec3.ParameterProps{
Name: "format",
In: "query",
Schema: spec.StringProperty().WithEnum("json", "influx"),
Description: "the response format",
},
},
},
Responses: unstructuredResponse,
},
},
},
},
"/boom": {
PathProps: spec3.PathProps{
Summary: "force a panic",
Get: &spec3.Operation{
OperationProps: spec3.OperationProps{
Responses: unstructuredResponse,
},
},
Post: &spec3.Operation{
OperationProps: spec3.OperationProps{
Responses: unstructuredResponse,
},
},
},
},
"/test": {
PathProps: spec3.PathProps{
Summary: "Echo any request",
Post: &spec3.Operation{
OperationProps: spec3.OperationProps{
RequestBody: unstructuredRequest,
Responses: unstructuredResponse,
},
},
},
},
"/test/json": {
PathProps: spec3.PathProps{
Summary: "Echo json request",
Post: &spec3.Operation{
OperationProps: spec3.OperationProps{
RequestBody: unstructuredRequest,
Responses: unstructuredResponse,
},
},
},
},
"/sims": {
PathProps: spec3.PathProps{
Description: "Get list of simulations",
Get: &spec3.Operation{
OperationProps: spec3.OperationProps{
Responses: unstructuredResponse,
},
},
},
},
"/sim/{key}": {
PathProps: spec3.PathProps{
Description: "Get list of simulations",
Get: &spec3.Operation{
OperationProps: spec3.OperationProps{
Parameters: []*spec3.Parameter{
{
ParameterProps: spec3.ParameterProps{
Name: "key",
In: "path",
Description: "simulation key (should include hz)",
},
},
},
Responses: unstructuredResponse,
},
},
Post: &spec3.Operation{
OperationProps: spec3.OperationProps{
Parameters: []*spec3.Parameter{
{
ParameterProps: spec3.ParameterProps{
Name: "key",
In: "path",
Description: "simulation key (should include hz)",
},
},
},
RequestBody: unstructuredRequest,
Responses: unstructuredResponse,
},
},
},
},
}
return oas, nil
}

View File

@@ -0,0 +1,24 @@
package hardcoded
import (
"encoding/json"
"fmt"
"testing"
"github.com/stretchr/testify/require"
// "sigs.k8s.io/yaml" // uses the same structure as json!
)
func TestSpec(t *testing.T) {
info, err := TestdataOpenAPIExtension()
require.NoError(t, err)
require.NotNil(t, info)
jj, err := json.MarshalIndent(info, "", " ")
require.NoError(t, err)
fmt.Printf("%s\n", string(jj))
// jj, err = yaml.Marshal(info)
// require.NoError(t, err)
// fmt.Printf("%s\n", string(jj))
}

View File

@@ -2,6 +2,7 @@ package datasource
import (
"context"
"errors"
"fmt"
"time"
@@ -9,11 +10,15 @@ import (
"k8s.io/apimachinery/pkg/apis/meta/internalversion"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/util/validation/field"
"k8s.io/apiserver/pkg/registry/rest"
apierrors "k8s.io/apimachinery/pkg/api/errors"
"github.com/grafana/grafana/pkg/apimachinery/utils"
"github.com/grafana/grafana/pkg/apis/datasource/v0alpha1"
"github.com/grafana/grafana/pkg/infra/metrics/metricutil"
"github.com/grafana/grafana/pkg/services/datasources"
)
var (
@@ -84,7 +89,16 @@ func (s *legacyStorage) Create(ctx context.Context, obj runtime.Object, createVa
if !ok {
return nil, fmt.Errorf("expected a datasource object")
}
return s.datasources.CreateDataSource(ctx, ds)
obj, err := s.datasources.CreateDataSource(ctx, ds)
if err != nil {
switch {
case errors.Is(err, datasources.ErrDataSourceNameExists):
return nil, apierrors.NewInvalid(s.resourceInfo.GroupVersionKind().GroupKind(), ds.Name, field.ErrorList{
field.Invalid(field.NewPath("spec", "title"), ds.Spec.Title(), "a datasource with this title already exists")})
}
return nil, err
}
return obj, nil
}
// Update implements rest.Updater.

View File

@@ -2,17 +2,35 @@ package datasource
import (
"fmt"
"maps"
"slices"
"strings"
"k8s.io/kube-openapi/pkg/spec3"
"k8s.io/kube-openapi/pkg/validation/spec"
common "github.com/grafana/grafana/pkg/apimachinery/apis/common/v0alpha1"
datasourceV0 "github.com/grafana/grafana/pkg/apis/datasource/v0alpha1"
"github.com/grafana/grafana/pkg/registry/apis/query/queryschema"
"github.com/grafana/grafana/pkg/services/apiserver/builder"
)
func (b *DataSourceAPIBuilder) PostProcessOpenAPI(oas *spec3.OpenAPI) (*spec3.OpenAPI, error) {
// The plugin description
oas.Info.Description = b.pluginJSON.Info.Description
// Add plugin information
info := map[string]any{
"plugin": b.pluginJSON.ID,
}
if b.pluginJSON.Info.Version != "" {
info["version"] = b.pluginJSON.Info.Version
}
if b.pluginJSON.Info.Build.Time > 0 {
info["build"] = b.pluginJSON.Info.Build.Time
}
oas.Info.AddExtension("plugin", info)
// The root api URL
root := "/apis/" + b.datasourceResourceInfo.GroupVersion().String() + "/"
@@ -28,6 +46,17 @@ func (b *DataSourceAPIBuilder) PostProcessOpenAPI(oas *spec3.OpenAPI) (*spec3.Op
return nil, err
}
// Set the operation ID for the query path
query := oas.Paths.Paths[root+"namespaces/{namespace}/datasources/{name}/query"]
if query != nil && query.Post != nil {
query.Post.OperationId = "queryDataSource"
for _, p := range query.Parameters {
if p.Name == "name" {
p.Description = "DataSource identifier"
}
}
}
// Hide the resource routes -- explicit ones will be added if defined below
prefix := root + "namespaces/{namespace}/datasources/{name}/resource"
r := oas.Paths.Paths[prefix]
@@ -52,7 +81,7 @@ func (b *DataSourceAPIBuilder) PostProcessOpenAPI(oas *spec3.OpenAPI) (*spec3.Op
// Mark connections as deprecated
delete(oas.Paths.Paths, root+"namespaces/{namespace}/connections/{name}")
query := oas.Paths.Paths[root+"namespaces/{namespace}/connections/{name}/query"]
query = oas.Paths.Paths[root+"namespaces/{namespace}/connections/{name}/query"]
for query == nil || query.Post == nil {
return nil, fmt.Errorf("missing temporary connection path")
}
@@ -70,5 +99,111 @@ func (b *DataSourceAPIBuilder) PostProcessOpenAPI(oas *spec3.OpenAPI) (*spec3.Op
},
}
if b.schemaProvider == nil {
return oas, nil
}
custom, err := b.schemaProvider()
if err != nil {
return nil, err
}
return applyCustomSchemas(root, ds, oas, custom)
}
func applyCustomSchemas(root string, ds *spec.Schema, oas *spec3.OpenAPI, custom *datasourceV0.DataSourceOpenAPIExtension) (*spec3.OpenAPI, error) {
if custom == nil {
return oas, nil // nothing special
}
// Add custom schemas
maps.Copy(oas.Components.Schemas, custom.Schemas)
// Replace the generic DataSourceSpec with the explicit one
if custom.DataSourceSpec != nil {
oas.Components.Schemas["DataSourceSpec"] = custom.DataSourceSpec
ds.Properties["spec"] = spec.Schema{
SchemaProps: spec.SchemaProps{
Ref: spec.MustCreateRef("#/components/schemas/DataSourceSpec"),
},
}
}
if len(custom.SecureValues) > 0 {
example := common.InlineSecureValues{}
ref := spec.MustCreateRef("#/components/schemas/com.github.grafana.grafana.pkg.apimachinery.apis.common.v0alpha1.InlineSecureValue")
secure := &spec.Schema{
SchemaProps: spec.SchemaProps{
Properties: make(map[string]spec.Schema),
AdditionalProperties: &spec.SchemaOrBool{Allows: false},
}}
secure.Description = "custom secure value definition"
for _, v := range custom.SecureValues {
secure.Properties[v.Key] = spec.Schema{
SchemaProps: spec.SchemaProps{
Description: v.Description,
Ref: ref,
},
}
if v.Required {
secure.Required = append(secure.Required, v.Key)
example[v.Key] = common.InlineSecureValue{Create: "***"}
}
}
if len(example) > 0 {
secure.Example = example
}
// Link the explicit secure values in the resource
oas.Components.Schemas["SecureValues"] = secure
ds.Properties["secure"] = spec.Schema{
SchemaProps: spec.SchemaProps{
Ref: spec.MustCreateRef("#/components/schemas/SecureValues"),
},
}
}
// Add examples to the POST request
if len(custom.Examples) > 0 {
ds := oas.Paths.Paths[root+"namespaces/{namespace}/datasources"]
if ds != nil && ds.Post != nil {
for _, c := range ds.Post.RequestBody.Content {
c.Examples = custom.Examples
}
}
}
if len(custom.Routes) > 0 {
ds := oas.Paths.Paths[root+"namespaces/{namespace}/datasources/{name}"]
if ds == nil || len(ds.Parameters) < 2 {
return nil, fmt.Errorf("missing Parameters")
}
prefix := root + "namespaces/{namespace}/datasources/{name}/resource"
for k := range oas.Paths.Paths {
if strings.HasPrefix(k, prefix) {
delete(oas.Paths.Paths, k)
}
}
for k, v := range custom.Routes {
if k != "" && !strings.HasPrefix(k, "/") {
return nil, fmt.Errorf("path must have slash prefix")
}
v.Parameters = append(v.Parameters, ds.Parameters[0:2]...)
for m, op := range builder.GetPathOperations(v) {
if op.Extensions == nil {
op.Extensions = make(spec.Extensions)
}
if !slices.Contains(op.Tags, "Route") {
op.Tags = append(op.Tags, "Route") // Custom resource?
}
tmp := strings.ReplaceAll(strings.ReplaceAll(k, "{", ""), "}", "")
op.OperationId = fmt.Sprintf("%s_route%s", strings.ToLower(m), strings.ReplaceAll(tmp, "/", "_"))
}
oas.Paths.Paths[prefix+k] = v
}
}
return oas, nil
}

View File

@@ -25,6 +25,7 @@ import (
"github.com/grafana/grafana/pkg/plugins"
"github.com/grafana/grafana/pkg/plugins/manager/sources"
"github.com/grafana/grafana/pkg/promlib/models"
"github.com/grafana/grafana/pkg/registry/apis/datasource/hardcoded"
"github.com/grafana/grafana/pkg/registry/apis/query/queryschema"
"github.com/grafana/grafana/pkg/services/accesscontrol"
"github.com/grafana/grafana/pkg/services/apiserver/builder"
@@ -39,14 +40,16 @@ var (
// DataSourceAPIBuilder is used just so wire has something unique to return
type DataSourceAPIBuilder struct {
datasourceResourceInfo utils.ResourceInfo
pluginJSON plugins.JSONData
client PluginClient // will only ever be called with the same plugin id!
datasources PluginDatasourceProvider
contextProvider PluginContextWrapper
accessControl accesscontrol.AccessControl
queryTypes *queryV0.QueryTypeDefinitionList
configCrudUseNewApis bool
dataSourceCRUDMetric *prometheus.HistogramVec
pluginJSON plugins.JSONData
client PluginClient // will only ever be called with the same plugin id!
datasources PluginDatasourceProvider
contextProvider PluginContextWrapper
accessControl accesscontrol.AccessControl
queryTypes *queryV0.QueryTypeDefinitionList
schemaProvider func() (*datasourceV0.DataSourceOpenAPIExtension, error)
configCrudUseNewApis bool
dataSourceCRUDMetric *prometheus.HistogramVec
}
func RegisterAPIService(
@@ -59,8 +62,17 @@ func RegisterAPIService(
reg prometheus.Registerer,
pluginSources sources.Registry,
) (*DataSourceAPIBuilder, error) {
//nolint:staticcheck
useQueryTypes := features.IsEnabledGlobally(featuremgmt.FlagDatasourceQueryTypes)
//nolint:staticcheck
configCrudUseNewApis := features.IsEnabledGlobally(featuremgmt.FlagQueryServiceWithConnections)
//nolint:staticcheck
isExperimental := features.IsEnabledGlobally(featuremgmt.FlagGrafanaAPIServerWithExperimentalAPIs)
//nolint:staticcheck // not yet migrated to OpenFeature
if !features.IsEnabledGlobally(featuremgmt.FlagQueryServiceWithConnections) && !features.IsEnabledGlobally(featuremgmt.FlagGrafanaAPIServerWithExperimentalAPIs) {
if !configCrudUseNewApis && !isExperimental {
return nil, nil
}
@@ -82,28 +94,33 @@ func RegisterAPIService(
return nil, fmt.Errorf("error getting list of datasource plugins: %s", err)
}
for _, pluginJSON := range pluginJSONs {
client, ok := pluginClient.(PluginClient)
if !ok {
return nil, fmt.Errorf("plugin client is not a PluginClient: %T", pluginClient)
}
// For the ST runner, the client can talk to any plugin
client, ok := pluginClient.(PluginClient)
if !ok {
return nil, fmt.Errorf("plugin client is not a PluginClient: %T", pluginClient)
}
for _, pluginJSON := range pluginJSONs {
builder, err = NewDataSourceAPIBuilder(
pluginJSON,
client,
datasources.GetDatasourceProvider(pluginJSON),
contextProvider,
accessControl,
//nolint:staticcheck // not yet migrated to OpenFeature
features.IsEnabledGlobally(featuremgmt.FlagDatasourceQueryTypes),
//nolint:staticcheck // not yet migrated to OpenFeature
features.IsEnabledGlobally(featuremgmt.FlagQueryServiceWithConnections),
useQueryTypes, // Exposes the query type OpenAPI schema
configCrudUseNewApis, // Enables the new connections-based datasource config CRUD APIs
)
if err != nil {
return nil, err
}
builder.SetDataSourceCRUDMetrics(dataSourceCRUDMetric)
// Hardcoded schemas for testdata
// NOTE: this will be driven by the pluginJSON/manifest soon
if pluginJSON.ID == "grafana-testdata-datasource" {
builder.schemaProvider = hardcoded.TestdataOpenAPIExtension
}
apiRegistrar.RegisterAPI(builder)
}
return builder, nil // only used for wire
@@ -247,6 +264,7 @@ func (b *DataSourceAPIBuilder) UpdateAPIGroupInfo(apiGroupInfo *genericapiserver
return err
}
} else {
// Read-only access to datasource connection info
storage[ds.StoragePath()] = &connectionAccess{
datasources: b.datasources,
resourceInfo: ds,

View File

@@ -3,6 +3,7 @@
"name": "unique-identifier",
"namespace": "org-0",
"uid": "YpaSG5GQAdxtLZtF6BqQWCeYXOhbVi5C4Cg4oILnJC0X",
"resourceVersion": "1015203600000",
"generation": 8,
"creationTimestamp": "2002-03-04T01:00:00Z",
"labels": {
@@ -10,7 +11,6 @@
}
},
"spec": {
"jsonData": null,
"title": "Display name"
}
}

View File

@@ -5,7 +5,6 @@
"uid": "boDNh7zU3nXj46rOXIJI7r44qaxjs8yy9I9dOj1MyBoX"
},
"spec": {
"jsonData": null,
"title": "Hello testdata"
}
}

View File

@@ -847,7 +847,7 @@ func Initialize(ctx context.Context, cfg *setting.Cfg, opts Options, apiOpts api
if err != nil {
return nil, err
}
zanzanaReconciler := dualwrite2.ProvideZanzanaReconciler(cfg, featureToggles, zanzanaClient, sqlStore, serverLockService, folderimplService, registerer)
zanzanaReconciler := dualwrite2.ProvideZanzanaReconciler(cfg, featureToggles, zanzanaClient, sqlStore, serverLockService, folderimplService)
investigationsAppProvider := investigations.RegisterApp(cfg)
appregistryService, err := appregistry.ProvideBuilderRunners(apiserverService, eventualRestConfigProvider, featureToggles, investigationsAppProvider, cfg)
if err != nil {
@@ -1509,7 +1509,7 @@ func InitializeForTest(ctx context.Context, t sqlutil.ITestDB, testingT interfac
if err != nil {
return nil, err
}
zanzanaReconciler := dualwrite2.ProvideZanzanaReconciler(cfg, featureToggles, zanzanaClient, sqlStore, serverLockService, folderimplService, registerer)
zanzanaReconciler := dualwrite2.ProvideZanzanaReconciler(cfg, featureToggles, zanzanaClient, sqlStore, serverLockService, folderimplService)
investigationsAppProvider := investigations.RegisterApp(cfg)
appregistryService, err := appregistry.ProvideBuilderRunners(apiserverService, eventualRestConfigProvider, featureToggles, investigationsAppProvider, cfg)
if err != nil {

View File

@@ -1,44 +0,0 @@
package acimpl
import (
"context"
"time"
"github.com/grafana/grafana/pkg/services/accesscontrol"
)
const (
ossBasicRoleSeedLockName = "oss-ac-basic-role-seeder"
ossBasicRoleSeedTimeout = 2 * time.Minute
)
// refreshBasicRolePermissionsInDB ensures basic role permissions are fully derived from in-memory registrations
func (s *Service) refreshBasicRolePermissionsInDB(ctx context.Context, rolesSnapshot map[string][]accesscontrol.Permission) error {
if s.sql == nil || s.seeder == nil {
return nil
}
run := func(ctx context.Context) error {
desired := map[accesscontrol.SeedPermission]struct{}{}
for role, permissions := range rolesSnapshot {
for _, permission := range permissions {
desired[accesscontrol.SeedPermission{BuiltInRole: role, Action: permission.Action, Scope: permission.Scope}] = struct{}{}
}
}
s.seeder.SetDesiredPermissions(desired)
return s.seeder.Seed(ctx)
}
if s.serverLock == nil {
return run(ctx)
}
var err error
errLock := s.serverLock.LockExecuteAndRelease(ctx, ossBasicRoleSeedLockName, ossBasicRoleSeedTimeout, func(ctx context.Context) {
err = run(ctx)
})
if errLock != nil {
return errLock
}
return err
}

View File

@@ -1,128 +0,0 @@
package acimpl
import (
"context"
"testing"
"time"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/infra/db"
"github.com/grafana/grafana/pkg/infra/localcache"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/services/accesscontrol"
"github.com/grafana/grafana/pkg/services/accesscontrol/database"
"github.com/grafana/grafana/pkg/services/accesscontrol/permreg"
"github.com/grafana/grafana/pkg/services/accesscontrol/resourcepermissions"
"github.com/grafana/grafana/pkg/services/featuremgmt"
"github.com/grafana/grafana/pkg/services/org"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/util/testutil"
)
func TestIntegration_OSSBasicRolePermissions_PersistAndRefreshOnRegisterFixedRoles(t *testing.T) {
testutil.SkipIntegrationTestInShortMode(t)
ctx := context.Background()
sql := db.InitTestDB(t)
store := database.ProvideService(sql)
svc := ProvideOSSService(
setting.NewCfg(),
store,
&resourcepermissions.FakeActionSetSvc{},
localcache.ProvideService(),
featuremgmt.WithFeatures(),
tracing.InitializeTracerForTest(),
sql,
permreg.ProvidePermissionRegistry(),
nil,
)
require.NoError(t, svc.DeclareFixedRoles(accesscontrol.RoleRegistration{
Role: accesscontrol.RoleDTO{
Name: "fixed:test:role",
Permissions: []accesscontrol.Permission{
{Action: "test:read", Scope: ""},
},
},
Grants: []string{string(org.RoleViewer)},
}))
require.NoError(t, svc.RegisterFixedRoles(ctx))
// verify permission is persisted to DB for basic:viewer
require.NoError(t, sql.WithDbSession(ctx, func(sess *db.Session) error {
var role accesscontrol.Role
ok, err := sess.Table("role").Where("uid = ?", accesscontrol.BasicRoleUIDPrefix+"viewer").Get(&role)
require.NoError(t, err)
require.True(t, ok)
var count int64
count, err = sess.Table("permission").Where("role_id = ? AND action = ? AND scope = ?", role.ID, "test:read", "").Count()
require.NoError(t, err)
require.Equal(t, int64(1), count)
return nil
}))
// ensure RegisterFixedRoles refreshes it back to defaults
require.NoError(t, sql.WithDbSession(ctx, func(sess *db.Session) error {
ts := time.Now()
var role accesscontrol.Role
ok, err := sess.Table("role").Where("uid = ?", accesscontrol.BasicRoleUIDPrefix+"viewer").Get(&role)
require.NoError(t, err)
require.True(t, ok)
_, err = sess.Exec("DELETE FROM permission WHERE role_id = ?", role.ID)
require.NoError(t, err)
p := accesscontrol.Permission{
RoleID: role.ID,
Action: "custom:keep",
Scope: "",
Created: ts,
Updated: ts,
}
p.Kind, p.Attribute, p.Identifier = accesscontrol.SplitScope(p.Scope)
_, err = sess.Table("permission").Insert(&p)
return err
}))
svc2 := ProvideOSSService(
setting.NewCfg(),
store,
&resourcepermissions.FakeActionSetSvc{},
localcache.ProvideService(),
featuremgmt.WithFeatures(),
tracing.InitializeTracerForTest(),
sql,
permreg.ProvidePermissionRegistry(),
nil,
)
require.NoError(t, svc2.DeclareFixedRoles(accesscontrol.RoleRegistration{
Role: accesscontrol.RoleDTO{
Name: "fixed:test:role",
Permissions: []accesscontrol.Permission{
{Action: "test:read", Scope: ""},
},
},
Grants: []string{string(org.RoleViewer)},
}))
require.NoError(t, svc2.RegisterFixedRoles(ctx))
require.NoError(t, sql.WithDbSession(ctx, func(sess *db.Session) error {
var role accesscontrol.Role
ok, err := sess.Table("role").Where("uid = ?", accesscontrol.BasicRoleUIDPrefix+"viewer").Get(&role)
require.NoError(t, err)
require.True(t, ok)
var count int64
count, err = sess.Table("permission").Where("role_id = ? AND action = ? AND scope = ?", role.ID, "test:read", "").Count()
require.NoError(t, err)
require.Equal(t, int64(1), count)
count, err = sess.Table("permission").Where("role_id = ? AND action = ?", role.ID, "custom:keep").Count()
require.NoError(t, err)
require.Equal(t, int64(0), count)
return nil
}))
}

View File

@@ -30,7 +30,6 @@ import (
"github.com/grafana/grafana/pkg/services/accesscontrol/migrator"
"github.com/grafana/grafana/pkg/services/accesscontrol/permreg"
"github.com/grafana/grafana/pkg/services/accesscontrol/pluginutils"
"github.com/grafana/grafana/pkg/services/accesscontrol/seeding"
"github.com/grafana/grafana/pkg/services/dashboards"
"github.com/grafana/grafana/pkg/services/featuremgmt"
"github.com/grafana/grafana/pkg/services/folder"
@@ -97,12 +96,6 @@ func ProvideOSSService(
roles: accesscontrol.BuildBasicRoleDefinitions(),
store: store,
permRegistry: permRegistry,
sql: db,
serverLock: lock,
}
if backend, ok := store.(*database.AccessControlStore); ok {
s.seeder = seeding.New(log.New("accesscontrol.seeder"), backend, backend)
}
return s
@@ -119,11 +112,8 @@ type Service struct {
rolesMu sync.RWMutex
roles map[string]*accesscontrol.RoleDTO
store accesscontrol.Store
seeder *seeding.Seeder
permRegistry permreg.PermissionRegistry
isInitialized bool
sql db.DB
serverLock *serverlock.ServerLockService
}
func (s *Service) GetUsageStats(_ context.Context) map[string]any {
@@ -441,54 +431,17 @@ func (s *Service) RegisterFixedRoles(ctx context.Context) error {
defer span.End()
s.rolesMu.Lock()
registrations := s.registrations.Slice()
defer s.rolesMu.Unlock()
s.registrations.Range(func(registration accesscontrol.RoleRegistration) bool {
s.registerRolesLocked(registration)
return true
})
s.isInitialized = true
rolesSnapshot := s.getBasicRolePermissionsLocked()
s.rolesMu.Unlock()
if s.seeder != nil {
if err := s.seeder.SeedRoles(ctx, registrations); err != nil {
return err
}
if err := s.seeder.RemoveAbsentRoles(ctx); err != nil {
return err
}
}
if err := s.refreshBasicRolePermissionsInDB(ctx, rolesSnapshot); err != nil {
return err
}
return nil
}
// getBasicRolePermissionsSnapshotFromRegistrationsLocked computes the desired basic role permissions from the
// current registration list, using the shared seeding registration logic.
//
// it has to be called while holding the roles lock
func (s *Service) getBasicRolePermissionsLocked() map[string][]accesscontrol.Permission {
desired := map[accesscontrol.SeedPermission]struct{}{}
s.registrations.Range(func(registration accesscontrol.RoleRegistration) bool {
seeding.AppendDesiredPermissions(desired, s.log, &registration.Role, registration.Grants, registration.Exclude, true)
return true
})
out := make(map[string][]accesscontrol.Permission)
for sp := range desired {
out[sp.BuiltInRole] = append(out[sp.BuiltInRole], accesscontrol.Permission{
Action: sp.Action,
Scope: sp.Scope,
})
}
return out
}
// registerRolesLocked processes a single role registration and adds permissions to basic roles.
// Must be called with s.rolesMu locked.
func (s *Service) registerRolesLocked(registration accesscontrol.RoleRegistration) {
@@ -521,7 +474,6 @@ func (s *Service) DeclarePluginRoles(ctx context.Context, ID, name string, regs
defer span.End()
acRegs := pluginutils.ToRegistrations(ID, name, regs)
updatedBasicRoles := false
for _, r := range acRegs {
if err := pluginutils.ValidatePluginRole(ID, r.Role); err != nil {
return err
@@ -548,23 +500,11 @@ func (s *Service) DeclarePluginRoles(ctx context.Context, ID, name string, regs
if initialized {
s.rolesMu.Lock()
s.registerRolesLocked(r)
updatedBasicRoles = true
s.rolesMu.Unlock()
s.cache.Flush()
}
}
if updatedBasicRoles {
s.rolesMu.RLock()
rolesSnapshot := s.getBasicRolePermissionsLocked()
s.rolesMu.RUnlock()
// plugin roles can be declared after startup - keep DB in sync
if err := s.refreshBasicRolePermissionsInDB(ctx, rolesSnapshot); err != nil {
return err
}
}
return nil
}

View File

@@ -1,623 +0,0 @@
package database
import (
"context"
"strings"
"time"
"github.com/grafana/grafana/pkg/infra/db"
"github.com/grafana/grafana/pkg/services/accesscontrol"
"github.com/grafana/grafana/pkg/services/accesscontrol/seeding"
"github.com/grafana/grafana/pkg/services/sqlstore/migrator"
"github.com/grafana/grafana/pkg/util/xorm/core"
)
const basicRolePermBatchSize = 500
// LoadRoles returns all fixed and plugin roles (global org) with permissions, indexed by role name.
func (s *AccessControlStore) LoadRoles(ctx context.Context) (map[string]*accesscontrol.RoleDTO, error) {
out := map[string]*accesscontrol.RoleDTO{}
err := s.sql.WithDbSession(ctx, func(sess *db.Session) error {
type roleRow struct {
ID int64 `xorm:"id"`
OrgID int64 `xorm:"org_id"`
Version int64 `xorm:"version"`
UID string `xorm:"uid"`
Name string `xorm:"name"`
DisplayName string `xorm:"display_name"`
Description string `xorm:"description"`
Group string `xorm:"group_name"`
Hidden bool `xorm:"hidden"`
Updated time.Time `xorm:"updated"`
Created time.Time `xorm:"created"`
}
roles := []roleRow{}
if err := sess.Table("role").
Where("org_id = ?", accesscontrol.GlobalOrgID).
Where("(name LIKE ? OR name LIKE ?)", accesscontrol.FixedRolePrefix+"%", accesscontrol.PluginRolePrefix+"%").
Find(&roles); err != nil {
return err
}
if len(roles) == 0 {
return nil
}
roleIDs := make([]any, 0, len(roles))
roleByID := make(map[int64]*accesscontrol.RoleDTO, len(roles))
for _, r := range roles {
dto := &accesscontrol.RoleDTO{
ID: r.ID,
OrgID: r.OrgID,
Version: r.Version,
UID: r.UID,
Name: r.Name,
DisplayName: r.DisplayName,
Description: r.Description,
Group: r.Group,
Hidden: r.Hidden,
Updated: r.Updated,
Created: r.Created,
}
out[dto.Name] = dto
roleByID[dto.ID] = dto
roleIDs = append(roleIDs, dto.ID)
}
type permRow struct {
RoleID int64 `xorm:"role_id"`
Action string `xorm:"action"`
Scope string `xorm:"scope"`
}
perms := []permRow{}
if err := sess.Table("permission").In("role_id", roleIDs...).Find(&perms); err != nil {
return err
}
for _, p := range perms {
dto := roleByID[p.RoleID]
if dto == nil {
continue
}
dto.Permissions = append(dto.Permissions, accesscontrol.Permission{
RoleID: p.RoleID,
Action: p.Action,
Scope: p.Scope,
})
}
return nil
})
return out, err
}
func (s *AccessControlStore) SetRole(ctx context.Context, existingRole *accesscontrol.RoleDTO, wantedRole accesscontrol.RoleDTO) error {
if existingRole == nil {
return nil
}
return s.sql.WithDbSession(ctx, func(sess *db.Session) error {
_, err := sess.Table("role").
Where("id = ? AND org_id = ?", existingRole.ID, accesscontrol.GlobalOrgID).
Update(map[string]any{
"display_name": wantedRole.DisplayName,
"description": wantedRole.Description,
"group_name": wantedRole.Group,
"hidden": wantedRole.Hidden,
"updated": time.Now(),
})
return err
})
}
func (s *AccessControlStore) SetPermissions(ctx context.Context, existingRole *accesscontrol.RoleDTO, wantedRole accesscontrol.RoleDTO) error {
if existingRole == nil {
return nil
}
type key struct{ Action, Scope string }
existing := map[key]struct{}{}
for _, p := range existingRole.Permissions {
existing[key{p.Action, p.Scope}] = struct{}{}
}
desired := map[key]struct{}{}
for _, p := range wantedRole.Permissions {
desired[key{p.Action, p.Scope}] = struct{}{}
}
toAdd := make([]accesscontrol.Permission, 0)
toRemove := make([]accesscontrol.SeedPermission, 0)
now := time.Now()
for k := range desired {
if _, ok := existing[k]; ok {
continue
}
perm := accesscontrol.Permission{
RoleID: existingRole.ID,
Action: k.Action,
Scope: k.Scope,
Created: now,
Updated: now,
}
perm.Kind, perm.Attribute, perm.Identifier = accesscontrol.SplitScope(perm.Scope)
toAdd = append(toAdd, perm)
}
for k := range existing {
if _, ok := desired[k]; ok {
continue
}
toRemove = append(toRemove, accesscontrol.SeedPermission{Action: k.Action, Scope: k.Scope})
}
if len(toAdd) == 0 && len(toRemove) == 0 {
return nil
}
return s.sql.WithTransactionalDbSession(ctx, func(sess *db.Session) error {
if len(toRemove) > 0 {
if err := DeleteRolePermissionTuples(sess, s.sql.GetDBType(), existingRole.ID, toRemove); err != nil {
return err
}
}
if len(toAdd) > 0 {
_, err := sess.InsertMulti(toAdd)
return err
}
return nil
})
}
func (s *AccessControlStore) CreateRole(ctx context.Context, role accesscontrol.RoleDTO) error {
now := time.Now()
uid := role.UID
if uid == "" && (strings.HasPrefix(role.Name, accesscontrol.FixedRolePrefix) || strings.HasPrefix(role.Name, accesscontrol.PluginRolePrefix)) {
uid = accesscontrol.PrefixedRoleUID(role.Name)
}
r := accesscontrol.Role{
OrgID: accesscontrol.GlobalOrgID,
Version: role.Version,
UID: uid,
Name: role.Name,
DisplayName: role.DisplayName,
Description: role.Description,
Group: role.Group,
Hidden: role.Hidden,
Created: now,
Updated: now,
}
if r.Version == 0 {
r.Version = 1
}
return s.sql.WithTransactionalDbSession(ctx, func(sess *db.Session) error {
if _, err := sess.Insert(&r); err != nil {
return err
}
if len(role.Permissions) == 0 {
return nil
}
// De-duplicate permissions on (action, scope) to avoid unique constraint violations.
// Some role definitions may accidentally include duplicates.
type permKey struct{ Action, Scope string }
seen := make(map[permKey]struct{}, len(role.Permissions))
perms := make([]accesscontrol.Permission, 0, len(role.Permissions))
for _, p := range role.Permissions {
k := permKey{Action: p.Action, Scope: p.Scope}
if _, ok := seen[k]; ok {
continue
}
seen[k] = struct{}{}
perm := accesscontrol.Permission{
RoleID: r.ID,
Action: p.Action,
Scope: p.Scope,
Created: now,
Updated: now,
}
perm.Kind, perm.Attribute, perm.Identifier = accesscontrol.SplitScope(perm.Scope)
perms = append(perms, perm)
}
_, err := sess.InsertMulti(perms)
return err
})
}
func (s *AccessControlStore) DeleteRoles(ctx context.Context, roleUIDs []string) error {
if len(roleUIDs) == 0 {
return nil
}
uids := make([]any, 0, len(roleUIDs))
for _, uid := range roleUIDs {
uids = append(uids, uid)
}
return s.sql.WithTransactionalDbSession(ctx, func(sess *db.Session) error {
type row struct {
ID int64 `xorm:"id"`
UID string `xorm:"uid"`
}
rows := []row{}
if err := sess.Table("role").
Where("org_id = ?", accesscontrol.GlobalOrgID).
In("uid", uids...).
Find(&rows); err != nil {
return err
}
if len(rows) == 0 {
return nil
}
roleIDs := make([]any, 0, len(rows))
for _, r := range rows {
roleIDs = append(roleIDs, r.ID)
}
// Remove permissions and assignments first to avoid FK issues (if enabled).
{
args := append([]any{"DELETE FROM permission WHERE role_id IN (?" + strings.Repeat(",?", len(roleIDs)-1) + ")"}, roleIDs...)
if _, err := sess.Exec(args...); err != nil {
return err
}
}
{
args := append([]any{"DELETE FROM user_role WHERE role_id IN (?" + strings.Repeat(",?", len(roleIDs)-1) + ")"}, roleIDs...)
if _, err := sess.Exec(args...); err != nil {
return err
}
}
{
args := append([]any{"DELETE FROM team_role WHERE role_id IN (?" + strings.Repeat(",?", len(roleIDs)-1) + ")"}, roleIDs...)
if _, err := sess.Exec(args...); err != nil {
return err
}
}
{
args := append([]any{"DELETE FROM builtin_role WHERE role_id IN (?" + strings.Repeat(",?", len(roleIDs)-1) + ")"}, roleIDs...)
if _, err := sess.Exec(args...); err != nil {
return err
}
}
args := append([]any{"DELETE FROM role WHERE org_id = ? AND uid IN (?" + strings.Repeat(",?", len(uids)-1) + ")", accesscontrol.GlobalOrgID}, uids...)
_, err := sess.Exec(args...)
return err
})
}
// OSS basic-role permission refresh uses seeding.Seeder.Seed() with a desired set computed in memory.
// These methods implement the permission seeding part of seeding.SeedingBackend against the current permission table.
func (s *AccessControlStore) LoadPrevious(ctx context.Context) (map[accesscontrol.SeedPermission]struct{}, error) {
var out map[accesscontrol.SeedPermission]struct{}
err := s.sql.WithDbSession(ctx, func(sess *db.Session) error {
rows, err := LoadBasicRoleSeedPermissions(sess)
if err != nil {
return err
}
out = make(map[accesscontrol.SeedPermission]struct{}, len(rows))
for _, r := range rows {
r.Origin = ""
out[r] = struct{}{}
}
return nil
})
return out, err
}
func (s *AccessControlStore) Apply(ctx context.Context, added, removed []accesscontrol.SeedPermission, updated map[accesscontrol.SeedPermission]accesscontrol.SeedPermission) error {
rolesToUpgrade := seeding.RolesToUpgrade(added, removed)
// Run the same OSS apply logic as ossBasicRoleSeedBackend.Apply inside a single transaction.
return s.sql.WithTransactionalDbSession(ctx, func(sess *db.Session) error {
defs := accesscontrol.BuildBasicRoleDefinitions()
builtinToRoleID, err := EnsureBasicRolesExist(sess, defs)
if err != nil {
return err
}
backend := &ossBasicRoleSeedBackend{
sess: sess,
now: time.Now(),
builtinToRoleID: builtinToRoleID,
desired: nil,
dbType: s.sql.GetDBType(),
}
if err := backend.Apply(ctx, added, removed, updated); err != nil {
return err
}
return BumpBasicRoleVersions(sess, rolesToUpgrade)
})
}
// EnsureBasicRolesExist ensures the built-in basic roles exist in the role table and are bound in builtin_role.
// It returns a mapping from builtin role name (for example "Admin") to role ID.
func EnsureBasicRolesExist(sess *db.Session, defs map[string]*accesscontrol.RoleDTO) (map[string]int64, error) {
uidToBuiltin := make(map[string]string, len(defs))
uids := make([]any, 0, len(defs))
for builtin, def := range defs {
uidToBuiltin[def.UID] = builtin
uids = append(uids, def.UID)
}
type roleRow struct {
ID int64 `xorm:"id"`
UID string `xorm:"uid"`
}
rows := []roleRow{}
if err := sess.Table("role").
Where("org_id = ?", accesscontrol.GlobalOrgID).
In("uid", uids...).
Find(&rows); err != nil {
return nil, err
}
ts := time.Now()
builtinToRoleID := make(map[string]int64, len(defs))
for _, r := range rows {
br, ok := uidToBuiltin[r.UID]
if !ok {
continue
}
builtinToRoleID[br] = r.ID
}
for builtin, def := range defs {
roleID, ok := builtinToRoleID[builtin]
if !ok {
role := accesscontrol.Role{
OrgID: def.OrgID,
Version: def.Version,
UID: def.UID,
Name: def.Name,
DisplayName: def.DisplayName,
Description: def.Description,
Group: def.Group,
Hidden: def.Hidden,
Created: ts,
Updated: ts,
}
if _, err := sess.Insert(&role); err != nil {
return nil, err
}
roleID = role.ID
builtinToRoleID[builtin] = roleID
}
has, err := sess.Table("builtin_role").
Where("role_id = ? AND role = ? AND org_id = ?", roleID, builtin, accesscontrol.GlobalOrgID).
Exist()
if err != nil {
return nil, err
}
if !has {
br := accesscontrol.BuiltinRole{
RoleID: roleID,
OrgID: accesscontrol.GlobalOrgID,
Role: builtin,
Created: ts,
Updated: ts,
}
if _, err := sess.Table("builtin_role").Insert(&br); err != nil {
return nil, err
}
}
}
return builtinToRoleID, nil
}
// DeleteRolePermissionTuples deletes permissions for a single role by (action, scope) pairs.
//
// It uses a row-constructor IN clause where supported (MySQL, Postgres, SQLite) and falls back
// to a WHERE ... OR ... form for MSSQL.
func DeleteRolePermissionTuples(sess *db.Session, dbType core.DbType, roleID int64, perms []accesscontrol.SeedPermission) error {
if len(perms) == 0 {
return nil
}
if dbType == migrator.MSSQL {
// MSSQL doesn't support (action, scope) IN ((?,?),(?,?)) row constructors.
where := make([]string, 0, len(perms))
args := make([]any, 0, 1+len(perms)*2)
args = append(args, roleID)
for _, p := range perms {
where = append(where, "(action = ? AND scope = ?)")
args = append(args, p.Action, p.Scope)
}
_, err := sess.Exec(
append([]any{
"DELETE FROM permission WHERE role_id = ? AND (" + strings.Join(where, " OR ") + ")",
}, args...)...,
)
return err
}
args := make([]any, 0, 1+len(perms)*2)
args = append(args, roleID)
for _, p := range perms {
args = append(args, p.Action, p.Scope)
}
sql := "DELETE FROM permission WHERE role_id = ? AND (action, scope) IN (" +
strings.Repeat("(?, ?),", len(perms)-1) + "(?, ?))"
_, err := sess.Exec(append([]any{sql}, args...)...)
return err
}
type ossBasicRoleSeedBackend struct {
sess *db.Session
now time.Time
builtinToRoleID map[string]int64
desired map[accesscontrol.SeedPermission]struct{}
dbType core.DbType
}
func (b *ossBasicRoleSeedBackend) LoadPrevious(_ context.Context) (map[accesscontrol.SeedPermission]struct{}, error) {
rows, err := LoadBasicRoleSeedPermissions(b.sess)
if err != nil {
return nil, err
}
out := make(map[accesscontrol.SeedPermission]struct{}, len(rows))
for _, r := range rows {
// Ensure the key matches what OSS seeding uses (Origin is always empty for basic role refresh).
r.Origin = ""
out[r] = struct{}{}
}
return out, nil
}
func (b *ossBasicRoleSeedBackend) LoadDesired(_ context.Context) (map[accesscontrol.SeedPermission]struct{}, error) {
return b.desired, nil
}
func (b *ossBasicRoleSeedBackend) Apply(_ context.Context, added, removed []accesscontrol.SeedPermission, updated map[accesscontrol.SeedPermission]accesscontrol.SeedPermission) error {
// Delete removed permissions (this includes user-defined permissions that aren't in desired).
if len(removed) > 0 {
permsByRoleID := map[int64][]accesscontrol.SeedPermission{}
for _, p := range removed {
roleID, ok := b.builtinToRoleID[p.BuiltInRole]
if !ok {
continue
}
permsByRoleID[roleID] = append(permsByRoleID[roleID], p)
}
for roleID, perms := range permsByRoleID {
// Chunk to keep statement sizes and parameter counts bounded.
if err := batch(len(perms), basicRolePermBatchSize, func(start, end int) error {
return DeleteRolePermissionTuples(b.sess, b.dbType, roleID, perms[start:end])
}); err != nil {
return err
}
}
}
// Insert added permissions and updated-target permissions.
toInsertSeed := make([]accesscontrol.SeedPermission, 0, len(added)+len(updated))
toInsertSeed = append(toInsertSeed, added...)
for _, v := range updated {
toInsertSeed = append(toInsertSeed, v)
}
if len(toInsertSeed) == 0 {
return nil
}
// De-duplicate on (role_id, action, scope). This avoids unique constraint violations when:
// - the same permission appears in both added and updated
// - multiple plugin origins grant the same permission (Origin is not persisted in permission table)
type permKey struct {
RoleID int64
Action string
Scope string
}
seen := make(map[permKey]struct{}, len(toInsertSeed))
toInsert := make([]accesscontrol.Permission, 0, len(toInsertSeed))
for _, p := range toInsertSeed {
roleID, ok := b.builtinToRoleID[p.BuiltInRole]
if !ok {
continue
}
k := permKey{RoleID: roleID, Action: p.Action, Scope: p.Scope}
if _, ok := seen[k]; ok {
continue
}
seen[k] = struct{}{}
perm := accesscontrol.Permission{
RoleID: roleID,
Action: p.Action,
Scope: p.Scope,
Created: b.now,
Updated: b.now,
}
perm.Kind, perm.Attribute, perm.Identifier = accesscontrol.SplitScope(perm.Scope)
toInsert = append(toInsert, perm)
}
return batch(len(toInsert), basicRolePermBatchSize, func(start, end int) error {
// MySQL: ignore conflicts to make seeding idempotent under retries/concurrency.
// Conflicts can happen if the same permission already exists (unique on role_id, action, scope).
if b.dbType == migrator.MySQL {
args := make([]any, 0, (end-start)*8)
for i := start; i < end; i++ {
p := toInsert[i]
args = append(args, p.RoleID, p.Action, p.Scope, p.Kind, p.Attribute, p.Identifier, p.Updated, p.Created)
}
sql := append([]any{`INSERT IGNORE INTO permission (role_id, action, scope, kind, attribute, identifier, updated, created) VALUES ` +
strings.Repeat("(?, ?, ?, ?, ?, ?, ?, ?),", end-start-1) + "(?, ?, ?, ?, ?, ?, ?, ?)"}, args...)
_, err := b.sess.Exec(sql...)
return err
}
_, err := b.sess.InsertMulti(toInsert[start:end])
return err
})
}
func batch(count, size int, eachFn func(start, end int) error) error {
for i := 0; i < count; {
end := i + size
if end > count {
end = count
}
if err := eachFn(i, end); err != nil {
return err
}
i = end
}
return nil
}
// BumpBasicRoleVersions increments the role version for the given builtin basic roles (Viewer/Editor/Admin/Grafana Admin).
// Unknown role names are ignored.
func BumpBasicRoleVersions(sess *db.Session, basicRoles []string) error {
if len(basicRoles) == 0 {
return nil
}
defs := accesscontrol.BuildBasicRoleDefinitions()
uids := make([]any, 0, len(basicRoles))
for _, br := range basicRoles {
def, ok := defs[br]
if !ok {
continue
}
uids = append(uids, def.UID)
}
if len(uids) == 0 {
return nil
}
sql := "UPDATE role SET version = version + 1 WHERE org_id = ? AND uid IN (?" + strings.Repeat(",?", len(uids)-1) + ")"
_, err := sess.Exec(append([]any{sql, accesscontrol.GlobalOrgID}, uids...)...)
return err
}
// LoadBasicRoleSeedPermissions returns the current (builtin_role, action, scope) permissions granted to basic roles.
// It sets Origin to empty.
func LoadBasicRoleSeedPermissions(sess *db.Session) ([]accesscontrol.SeedPermission, error) {
rows := []accesscontrol.SeedPermission{}
err := sess.SQL(
`SELECT role.display_name AS builtin_role, p.action, p.scope, '' AS origin
FROM role INNER JOIN permission AS p ON p.role_id = role.id
WHERE role.org_id = ? AND role.name LIKE 'basic:%'`,
accesscontrol.GlobalOrgID,
).Find(&rows)
return rows, err
}

View File

@@ -6,8 +6,6 @@ import (
"strconv"
"time"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
"go.opentelemetry.io/otel"
claims "github.com/grafana/authlib/types"
@@ -15,7 +13,6 @@ import (
"github.com/grafana/grafana/pkg/infra/db"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/serverlock"
"github.com/grafana/grafana/pkg/services/accesscontrol"
"github.com/grafana/grafana/pkg/services/authz/zanzana"
"github.com/grafana/grafana/pkg/services/featuremgmt"
"github.com/grafana/grafana/pkg/services/folder"
@@ -36,15 +33,12 @@ type ZanzanaReconciler struct {
store db.DB
client zanzana.Client
lock *serverlock.ServerLockService
metrics struct {
lastSuccess prometheus.Gauge
}
// reconcilers are migrations that tries to reconcile the state of grafana db to zanzana store.
// These are run periodically to try to maintain a consistent state.
reconcilers []resourceReconciler
}
func ProvideZanzanaReconciler(cfg *setting.Cfg, features featuremgmt.FeatureToggles, client zanzana.Client, store db.DB, lock *serverlock.ServerLockService, folderService folder.Service, reg prometheus.Registerer) *ZanzanaReconciler {
func ProvideZanzanaReconciler(cfg *setting.Cfg, features featuremgmt.FeatureToggles, client zanzana.Client, store db.DB, lock *serverlock.ServerLockService, folderService folder.Service) *ZanzanaReconciler {
zanzanaReconciler := &ZanzanaReconciler{
cfg: cfg,
log: reconcilerLogger,
@@ -98,13 +92,6 @@ func ProvideZanzanaReconciler(cfg *setting.Cfg, features featuremgmt.FeatureTogg
},
}
if reg != nil {
zanzanaReconciler.metrics.lastSuccess = promauto.With(reg).NewGauge(prometheus.GaugeOpts{
Name: "grafana_zanzana_reconcile_last_success_timestamp_seconds",
Help: "Unix timestamp (seconds) when the Zanzana reconciler last completed a reconciliation cycle.",
})
}
if cfg.Anonymous.Enabled {
zanzanaReconciler.reconcilers = append(zanzanaReconciler.reconcilers,
newResourceReconciler(
@@ -131,9 +118,6 @@ func (r *ZanzanaReconciler) Run(ctx context.Context) error {
// Reconcile schedules as job that will run and reconcile resources between
// legacy access control and zanzana.
func (r *ZanzanaReconciler) Reconcile(ctx context.Context) error {
// Ensure we don't reconcile an empty/partial RBAC state before OSS has seeded basic role permissions.
// This matters most during startup where fixed-role loading + basic-role permission refresh runs as another background service.
r.waitForBasicRolesSeeded(ctx)
r.reconcile(ctx)
// FIXME:
@@ -149,57 +133,6 @@ func (r *ZanzanaReconciler) Reconcile(ctx context.Context) error {
}
}
func (r *ZanzanaReconciler) hasBasicRolePermissions(ctx context.Context) bool {
var count int64
// Basic role permissions are stored on "basic:%" roles in the global org (0).
// In a fresh DB, this will be empty until fixed roles are registered and the basic role permission refresh runs.
type row struct {
Count int64 `xorm:"count"`
}
_ = r.store.WithDbSession(ctx, func(sess *db.Session) error {
var rr row
_, err := sess.SQL(
`SELECT COUNT(*) AS count
FROM role INNER JOIN permission AS p ON p.role_id = role.id
WHERE role.org_id = ? AND role.name LIKE ?`,
accesscontrol.GlobalOrgID,
accesscontrol.BasicRolePrefix+"%",
).Get(&rr)
if err != nil {
return err
}
count = rr.Count
return nil
})
return count > 0
}
func (r *ZanzanaReconciler) waitForBasicRolesSeeded(ctx context.Context) {
// Best-effort: don't block forever. If we can't observe basic roles, proceed anyway.
const (
maxWait = 15 * time.Second
interval = 1 * time.Second
)
deadline := time.NewTimer(maxWait)
defer deadline.Stop()
ticker := time.NewTicker(interval)
defer ticker.Stop()
for {
if r.hasBasicRolePermissions(ctx) {
return
}
select {
case <-ctx.Done():
return
case <-deadline.C:
return
case <-ticker.C:
}
}
}
func (r *ZanzanaReconciler) reconcile(ctx context.Context) {
run := func(ctx context.Context, namespace string) {
now := time.Now()
@@ -211,9 +144,6 @@ func (r *ZanzanaReconciler) reconcile(ctx context.Context) {
r.log.Warn("Failed to perform reconciliation for resource", "err", err)
}
}
if r.metrics.lastSuccess != nil {
r.metrics.lastSuccess.SetToCurrentTime()
}
r.log.Debug("Finished reconciliation", "elapsed", time.Since(now))
}

View File

@@ -1,67 +0,0 @@
package dualwrite
import (
"context"
"testing"
"time"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/infra/db"
"github.com/grafana/grafana/pkg/services/accesscontrol"
)
func TestZanzanaReconciler_hasBasicRolePermissions(t *testing.T) {
env := setupTestEnv(t)
r := &ZanzanaReconciler{
store: env.db,
}
ctx := context.Background()
require.False(t, r.hasBasicRolePermissions(ctx))
err := env.db.WithDbSession(ctx, func(sess *db.Session) error {
now := time.Now()
_, err := sess.Exec(
`INSERT INTO role (org_id, uid, name, display_name, group_name, description, hidden, version, created, updated)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
accesscontrol.GlobalOrgID,
"basic_viewer_uid_test",
accesscontrol.BasicRolePrefix+"viewer",
"Viewer",
"Basic",
"Viewer role",
false,
1,
now,
now,
)
if err != nil {
return err
}
var roleID int64
if _, err := sess.SQL(`SELECT id FROM role WHERE org_id = ? AND uid = ?`, accesscontrol.GlobalOrgID, "basic_viewer_uid_test").Get(&roleID); err != nil {
return err
}
_, err = sess.Exec(
`INSERT INTO permission (role_id, action, scope, kind, attribute, identifier, created, updated)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)`,
roleID,
"dashboards:read",
"dashboards:*",
"",
"",
"",
now,
now,
)
return err
})
require.NoError(t, err)
require.True(t, r.hasBasicRolePermissions(ctx))
}

View File

@@ -1,7 +1,6 @@
package accesscontrol
import (
"context"
"encoding/json"
"errors"
"fmt"
@@ -595,18 +594,3 @@ type QueryWithOrg struct {
OrgId *int64 `json:"orgId"`
Global bool `json:"global"`
}
type SeedPermission struct {
BuiltInRole string `xorm:"builtin_role"`
Action string `xorm:"action"`
Scope string `xorm:"scope"`
Origin string `xorm:"origin"`
}
type RoleStore interface {
LoadRoles(ctx context.Context) (map[string]*RoleDTO, error)
SetRole(ctx context.Context, existingRole *RoleDTO, wantedRole RoleDTO) error
SetPermissions(ctx context.Context, existingRole *RoleDTO, wantedRole RoleDTO) error
CreateRole(ctx context.Context, role RoleDTO) error
DeleteRoles(ctx context.Context, roleUIDs []string) error
}

View File

@@ -1,451 +0,0 @@
package seeding
import (
"context"
"fmt"
"regexp"
"slices"
"strings"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/services/accesscontrol"
"github.com/grafana/grafana/pkg/services/accesscontrol/pluginutils"
"github.com/grafana/grafana/pkg/services/pluginsintegration/pluginaccesscontrol"
)
type Seeder struct {
log log.Logger
roleStore accesscontrol.RoleStore
backend SeedingBackend
builtinsPermissions map[accesscontrol.SeedPermission]struct{}
seededFixedRoles map[string]bool
seededPluginRoles map[string]bool
seededPlugins map[string]bool
hasSeededAlready bool
}
// SeedingBackend provides the seed-set specific operations needed to seed.
type SeedingBackend interface {
// LoadPrevious returns the currently stored permissions for previously seeded roles.
LoadPrevious(ctx context.Context) (map[accesscontrol.SeedPermission]struct{}, error)
// Apply updates the database to match the desired permissions.
Apply(ctx context.Context,
added, removed []accesscontrol.SeedPermission,
updated map[accesscontrol.SeedPermission]accesscontrol.SeedPermission,
) error
}
func New(log log.Logger, roleStore accesscontrol.RoleStore, backend SeedingBackend) *Seeder {
return &Seeder{
log: log,
roleStore: roleStore,
backend: backend,
builtinsPermissions: map[accesscontrol.SeedPermission]struct{}{},
seededFixedRoles: map[string]bool{},
seededPluginRoles: map[string]bool{},
seededPlugins: map[string]bool{},
hasSeededAlready: false,
}
}
// SetDesiredPermissions replaces the in-memory desired permission set used by Seed().
func (s *Seeder) SetDesiredPermissions(desired map[accesscontrol.SeedPermission]struct{}) {
if desired == nil {
s.builtinsPermissions = map[accesscontrol.SeedPermission]struct{}{}
return
}
s.builtinsPermissions = desired
}
// Seed loads current and desired permissions, diffs them (including scope updates), applies changes, and bumps versions.
func (s *Seeder) Seed(ctx context.Context) error {
previous, err := s.backend.LoadPrevious(ctx)
if err != nil {
return err
}
// - Do not remove plugin permissions when the plugin didn't register this run (Origin set but not in seededPlugins).
// - Preserve legacy plugin app access permissions in the persisted seed set (these are granted by default).
if len(previous) > 0 {
filtered := make(map[accesscontrol.SeedPermission]struct{}, len(previous))
for p := range previous {
if p.Action == pluginaccesscontrol.ActionAppAccess {
continue
}
if p.Origin != "" && !s.seededPlugins[p.Origin] {
continue
}
filtered[p] = struct{}{}
}
previous = filtered
}
added, removed, updated := s.permissionDiff(previous, s.builtinsPermissions)
if err := s.backend.Apply(ctx, added, removed, updated); err != nil {
return err
}
return nil
}
// SeedRoles populates the database with the roles and their assignments
// It will create roles that do not exist and update roles that have changed
// Do not use for provisioning. Validation is not enforced.
func (s *Seeder) SeedRoles(ctx context.Context, registrationList []accesscontrol.RoleRegistration) error {
roleMap, err := s.roleStore.LoadRoles(ctx)
if err != nil {
return err
}
missingRoles := make([]accesscontrol.RoleRegistration, 0, len(registrationList))
// Diff existing roles with the ones we want to seed.
// If a role is missing, we add it to the missingRoles list
for _, registration := range registrationList {
registration := registration
role, ok := roleMap[registration.Role.Name]
switch {
case registration.Role.IsFixed():
s.seededFixedRoles[registration.Role.Name] = true
case registration.Role.IsPlugin():
s.seededPluginRoles[registration.Role.Name] = true
// To be resilient to failed plugin loadings, we remember the plugins that have registered,
// later we'll ignore permissions and roles of other plugins
s.seededPlugins[pluginutils.PluginIDFromName(registration.Role.Name)] = true
}
s.rememberPermissionAssignments(&registration.Role, registration.Grants, registration.Exclude)
if !ok {
missingRoles = append(missingRoles, registration)
continue
}
if needsRoleUpdate(role, registration.Role) {
if err := s.roleStore.SetRole(ctx, role, registration.Role); err != nil {
return err
}
}
if needsPermissionsUpdate(role, registration.Role) {
if err := s.roleStore.SetPermissions(ctx, role, registration.Role); err != nil {
return err
}
}
}
for _, registration := range missingRoles {
if err := s.roleStore.CreateRole(ctx, registration.Role); err != nil {
return err
}
}
return nil
}
func needsPermissionsUpdate(existingRole *accesscontrol.RoleDTO, wantedRole accesscontrol.RoleDTO) bool {
if existingRole == nil {
return true
}
if len(existingRole.Permissions) != len(wantedRole.Permissions) {
return true
}
for _, p := range wantedRole.Permissions {
found := false
for _, ep := range existingRole.Permissions {
if ep.Action == p.Action && ep.Scope == p.Scope {
found = true
break
}
}
if !found {
return true
}
}
return false
}
func needsRoleUpdate(existingRole *accesscontrol.RoleDTO, wantedRole accesscontrol.RoleDTO) bool {
if existingRole == nil {
return true
}
if existingRole.Name != wantedRole.Name {
return false
}
if existingRole.DisplayName != wantedRole.DisplayName {
return true
}
if existingRole.Description != wantedRole.Description {
return true
}
if existingRole.Group != wantedRole.Group {
return true
}
if existingRole.Hidden != wantedRole.Hidden {
return true
}
return false
}
// Deprecated: SeedRole is deprecated and should not be used.
// SeedRoles only does boot up seeding and should not be used for runtime seeding.
func (s *Seeder) SeedRole(ctx context.Context, role accesscontrol.RoleDTO, builtInRoles []string) error {
addedPermissions := make(map[string]struct{}, len(role.Permissions))
permissions := make([]accesscontrol.Permission, 0, len(role.Permissions))
for _, p := range role.Permissions {
key := fmt.Sprintf("%s:%s", p.Action, p.Scope)
if _, ok := addedPermissions[key]; !ok {
addedPermissions[key] = struct{}{}
permissions = append(permissions, accesscontrol.Permission{Action: p.Action, Scope: p.Scope})
}
}
wantedRole := accesscontrol.RoleDTO{
OrgID: accesscontrol.GlobalOrgID,
Version: role.Version,
UID: role.UID,
Name: role.Name,
DisplayName: role.DisplayName,
Description: role.Description,
Group: role.Group,
Permissions: permissions,
Hidden: role.Hidden,
}
roleMap, err := s.roleStore.LoadRoles(ctx)
if err != nil {
return err
}
existingRole := roleMap[wantedRole.Name]
if existingRole == nil {
if err := s.roleStore.CreateRole(ctx, wantedRole); err != nil {
return err
}
} else {
if needsRoleUpdate(existingRole, wantedRole) {
if err := s.roleStore.SetRole(ctx, existingRole, wantedRole); err != nil {
return err
}
}
if needsPermissionsUpdate(existingRole, wantedRole) {
if err := s.roleStore.SetPermissions(ctx, existingRole, wantedRole); err != nil {
return err
}
}
}
// Remember seeded roles
if wantedRole.IsFixed() {
s.seededFixedRoles[wantedRole.Name] = true
}
isPluginRole := wantedRole.IsPlugin()
if isPluginRole {
s.seededPluginRoles[wantedRole.Name] = true
// To be resilient to failed plugin loadings, we remember the plugins that have registered,
// later we'll ignore permissions and roles of other plugins
s.seededPlugins[pluginutils.PluginIDFromName(role.Name)] = true
}
s.rememberPermissionAssignments(&wantedRole, builtInRoles, []string{})
return nil
}
func (s *Seeder) rememberPermissionAssignments(role *accesscontrol.RoleDTO, builtInRoles []string, excludedRoles []string) {
AppendDesiredPermissions(s.builtinsPermissions, s.log, role, builtInRoles, excludedRoles, true)
}
// AppendDesiredPermissions accumulates permissions from a role registration onto basic roles (Viewer/Editor/Admin/Grafana Admin).
// - It expands parents via accesscontrol.BuiltInRolesWithParents.
// - It can optionally ignore plugin app access permissions (which are granted by default).
func AppendDesiredPermissions(
out map[accesscontrol.SeedPermission]struct{},
logger log.Logger,
role *accesscontrol.RoleDTO,
builtInRoles []string,
excludedRoles []string,
ignorePluginAppAccess bool,
) {
if out == nil || role == nil {
return
}
for builtInRole := range accesscontrol.BuiltInRolesWithParents(builtInRoles) {
// Skip excluded grants
if slices.Contains(excludedRoles, builtInRole) {
continue
}
for _, perm := range role.Permissions {
if ignorePluginAppAccess && perm.Action == pluginaccesscontrol.ActionAppAccess {
logger.Debug("Role is attempting to grant access permission, but this permission is already granted by default and will be ignored",
"role", role.Name, "permission", perm.Action, "scope", perm.Scope)
continue
}
sp := accesscontrol.SeedPermission{
BuiltInRole: builtInRole,
Action: perm.Action,
Scope: perm.Scope,
}
if role.IsPlugin() {
sp.Origin = pluginutils.PluginIDFromName(role.Name)
}
out[sp] = struct{}{}
}
}
}
// permissionDiff returns:
// - added: present in desired permissions, not in previous permissions
// - removed: present in previous permissions, not in desired permissions
// - updated: same role + action, but scope changed
func (s *Seeder) permissionDiff(previous, desired map[accesscontrol.SeedPermission]struct{}) (added, removed []accesscontrol.SeedPermission, updated map[accesscontrol.SeedPermission]accesscontrol.SeedPermission) {
addedSet := make(map[accesscontrol.SeedPermission]struct{}, 0)
for n := range desired {
if _, already := previous[n]; !already {
addedSet[n] = struct{}{}
} else {
delete(previous, n)
}
}
// Check if any of the new permissions is actually an old permission with an updated scope
updated = make(map[accesscontrol.SeedPermission]accesscontrol.SeedPermission, 0)
for n := range addedSet {
for p := range previous {
if n.BuiltInRole == p.BuiltInRole && n.Action == p.Action {
updated[p] = n
delete(addedSet, n)
}
}
}
for p := range addedSet {
added = append(added, p)
}
for p := range previous {
if p.Action == pluginaccesscontrol.ActionAppAccess &&
p.Scope != pluginaccesscontrol.ScopeProvider.GetResourceAllScope() {
// Allows backward compatibility with plugins that have been seeded before the grant ignore rule was added
s.log.Info("This permission already existed so it will not be removed",
"role", p.BuiltInRole, "permission", p.Action, "scope", p.Scope)
continue
}
removed = append(removed, p)
}
return added, removed, updated
}
func (s *Seeder) ClearBasicRolesPluginPermissions(ID string) {
removable := []accesscontrol.SeedPermission{}
for key := range s.builtinsPermissions {
if matchPermissionByPluginID(key, ID) {
removable = append(removable, key)
}
}
for _, perm := range removable {
delete(s.builtinsPermissions, perm)
}
}
func matchPermissionByPluginID(perm accesscontrol.SeedPermission, pluginID string) bool {
if perm.Origin != pluginID {
return false
}
actionTemplate := regexp.MustCompile(fmt.Sprintf("%s[.:]", pluginID))
scopeTemplate := fmt.Sprintf(":%s", pluginID)
return actionTemplate.MatchString(perm.Action) || strings.HasSuffix(perm.Scope, scopeTemplate)
}
// RolesToUpgrade returns the unique basic roles that should have their version incremented.
func RolesToUpgrade(added, removed []accesscontrol.SeedPermission) []string {
set := map[string]struct{}{}
for _, p := range added {
set[p.BuiltInRole] = struct{}{}
}
for _, p := range removed {
set[p.BuiltInRole] = struct{}{}
}
out := make([]string, 0, len(set))
for r := range set {
out = append(out, r)
}
return out
}
func (s *Seeder) ClearPluginRoles(ID string) {
expectedPrefix := fmt.Sprintf("%s%s:", accesscontrol.PluginRolePrefix, ID)
for roleName := range s.seededPluginRoles {
if strings.HasPrefix(roleName, expectedPrefix) {
delete(s.seededPluginRoles, roleName)
}
}
}
func (s *Seeder) MarkSeededAlready() {
s.hasSeededAlready = true
}
func (s *Seeder) HasSeededAlready() bool {
return s.hasSeededAlready
}
func (s *Seeder) RemoveAbsentRoles(ctx context.Context) error {
roleMap, errGet := s.roleStore.LoadRoles(ctx)
if errGet != nil {
s.log.Error("failed to get fixed roles from store", "err", errGet)
return errGet
}
toRemove := []string{}
for _, r := range roleMap {
if r == nil {
continue
}
if r.IsFixed() {
if !s.seededFixedRoles[r.Name] {
s.log.Info("role is not seeded anymore, mark it for deletion", "role", r.Name)
toRemove = append(toRemove, r.UID)
}
continue
}
if r.IsPlugin() {
if !s.seededPlugins[pluginutils.PluginIDFromName(r.Name)] {
// To be resilient to failed plugin loadings
// ignore stored roles related to plugins that have not registered this time
s.log.Debug("plugin role has not been registered on this run skipping its removal", "role", r.Name)
continue
}
if !s.seededPluginRoles[r.Name] {
s.log.Info("role is not seeded anymore, mark it for deletion", "role", r.Name)
toRemove = append(toRemove, r.UID)
}
}
}
if errDelete := s.roleStore.DeleteRoles(ctx, toRemove); errDelete != nil {
s.log.Error("failed to delete absent fixed and plugin roles", "err", errDelete)
return errDelete
}
return nil
}

View File

@@ -4,6 +4,7 @@ import (
"bytes"
"encoding/json"
"maps"
"net/http"
"strings"
"sync"
@@ -232,11 +233,9 @@ func getOpenAPIPostProcessor(version string, builders []APIGroupBuilder, gvs []s
parent := copy.Paths.Paths[path[:idx+6]]
if parent != nil && parent.Get != nil {
for _, op := range GetPathOperations(spec) {
if op != nil && op.Extensions != nil {
action, ok := op.Extensions.GetString("x-kubernetes-action")
if ok && action == "connect" {
op.Tags = parent.Get.Tags
}
action, ok := op.Extensions.GetString("x-kubernetes-action")
if ok && action == "connect" {
op.Tags = parent.Get.Tags
}
}
}
@@ -249,15 +248,32 @@ func getOpenAPIPostProcessor(version string, builders []APIGroupBuilder, gvs []s
}
}
func GetPathOperations(path *spec3.Path) []*spec3.Operation {
return []*spec3.Operation{
path.Get,
path.Head,
path.Delete,
path.Patch,
path.Post,
path.Put,
path.Trace,
path.Options,
// GetPathOperations returns the set of non-nil operations defined on a path
func GetPathOperations(path *spec3.Path) map[string]*spec3.Operation {
ops := make(map[string]*spec3.Operation)
if path.Get != nil {
ops[http.MethodGet] = path.Get
}
if path.Head != nil {
ops[http.MethodHead] = path.Head
}
if path.Delete != nil {
ops[http.MethodDelete] = path.Delete
}
if path.Post != nil {
ops[http.MethodPost] = path.Post
}
if path.Put != nil {
ops[http.MethodPut] = path.Put
}
if path.Patch != nil {
ops[http.MethodPatch] = path.Patch
}
if path.Trace != nil {
ops[http.MethodTrace] = path.Trace
}
if path.Options != nil {
ops[http.MethodOptions] = path.Options
}
return ops
}

View File

@@ -0,0 +1,76 @@
package builder
import (
"slices"
"strings"
"testing"
"github.com/stretchr/testify/require"
"k8s.io/kube-openapi/pkg/spec3"
)
func TestOpenAPI_GetPathOperations(t *testing.T) {
testCases := []struct {
name string
input *spec3.Path
expect []string // the methods we should see
exclude []string // the methods we should never see
}{
{
name: "some operations",
input: &spec3.Path{
PathProps: spec3.PathProps{
Get: &spec3.Operation{OperationProps: spec3.OperationProps{Summary: "get"}},
Post: &spec3.Operation{OperationProps: spec3.OperationProps{Summary: "post"}},
Delete: &spec3.Operation{OperationProps: spec3.OperationProps{Summary: "delete"}},
},
},
expect: []string{"GET", "POST", "DELETE"},
exclude: []string{"PUT", "PATCH", "OPTIONS", "HEAD", "TRACE"},
},
{
name: "all operations",
input: &spec3.Path{
PathProps: spec3.PathProps{
Get: &spec3.Operation{OperationProps: spec3.OperationProps{Summary: "get"}},
Post: &spec3.Operation{OperationProps: spec3.OperationProps{Summary: "post"}},
Delete: &spec3.Operation{OperationProps: spec3.OperationProps{Summary: "delete"}},
Put: &spec3.Operation{OperationProps: spec3.OperationProps{Summary: "put"}},
Patch: &spec3.Operation{OperationProps: spec3.OperationProps{Summary: "patch"}},
Options: &spec3.Operation{OperationProps: spec3.OperationProps{Summary: "options"}},
Head: &spec3.Operation{OperationProps: spec3.OperationProps{Summary: "head"}},
Trace: &spec3.Operation{OperationProps: spec3.OperationProps{Summary: "trace"}},
},
},
expect: []string{"GET", "POST", "DELETE", "PUT", "PATCH", "OPTIONS", "HEAD", "TRACE"},
exclude: []string{},
},
}
for _, tt := range testCases {
t.Run(tt.name, func(t *testing.T) {
expect := make(map[string]bool)
for _, k := range tt.expect {
expect[k] = true
}
for k, op := range GetPathOperations(tt.input) {
require.NotNil(t, op)
require.Equal(t, strings.ToLower(k), op.Summary)
if !expect[k] {
if slices.Contains(tt.expect, k) {
require.Fail(t, "method returned multiple times", k)
} else {
require.Fail(t, "unexpected method", k)
}
}
delete(expect, k)
require.NotContains(t, tt.exclude, k, "exclude")
}
if len(expect) > 0 {
require.Fail(t, "missing expected method", expect)
}
})
}
}

View File

@@ -78,9 +78,6 @@ func ProvideZanzanaClient(cfg *setting.Cfg, db db.DB, tracer tracing.Tracer, fea
ctx = types.WithAuthInfo(ctx, authnlib.NewAccessTokenAuthInfo(authnlib.Claims[authnlib.AccessTokenClaims]{
Rest: authnlib.AccessTokenClaims{
Namespace: "*",
Permissions: []string{
zanzana.TokenPermissionUpdate,
},
},
}))
return ctx, nil

View File

@@ -4,9 +4,7 @@ import (
"context"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/services/authz/zanzana"
"github.com/grafana/grafana/pkg/setting"
"golang.org/x/exp/slices"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
@@ -32,20 +30,3 @@ func authorize(ctx context.Context, namespace string, ss setting.ZanzanaServerSe
}
return nil
}
func authorizeWrite(ctx context.Context, namespace string, ss setting.ZanzanaServerSettings) error {
if err := authorize(ctx, namespace, ss); err != nil {
return err
}
c, ok := claims.AuthInfoFrom(ctx)
if !ok {
return status.Errorf(codes.Unauthenticated, "unauthenticated")
}
if !slices.Contains(c.GetTokenPermissions(), zanzana.TokenPermissionUpdate) {
return status.Errorf(codes.PermissionDenied, "missing token permission %s", zanzana.TokenPermissionUpdate)
}
return nil
}

View File

@@ -391,7 +391,7 @@ func setupBenchmarkServer(b *testing.B) (*Server, *benchmarkData) {
b.Logf("Total tuples to write: %d", len(allTuples))
// Get store info
ctx := newContextWithZanzanaUpdatePermission()
ctx := newContextWithNamespace()
storeInf, err := srv.getStoreInfo(ctx, benchNamespace)
require.NoError(b, err)

View File

@@ -8,7 +8,6 @@ import (
openfgav1 "github.com/openfga/api/proto/openfga/v1"
"go.opentelemetry.io/otel/codes"
"google.golang.org/grpc/status"
authzextv1 "github.com/grafana/grafana/pkg/services/authz/proto/v1"
)
@@ -36,9 +35,6 @@ func (s *Server) Mutate(ctx context.Context, req *authzextv1.MutateRequest) (*au
if err != nil {
span.RecordError(err)
span.SetStatus(codes.Error, err.Error())
if _, ok := status.FromError(err); ok {
return nil, err
}
s.logger.Error("failed to perform mutate request", "error", err, "namespace", req.GetNamespace())
return nil, errors.New("failed to perform mutate request")
}
@@ -47,7 +43,7 @@ func (s *Server) Mutate(ctx context.Context, req *authzextv1.MutateRequest) (*au
}
func (s *Server) mutate(ctx context.Context, req *authzextv1.MutateRequest) (*authzextv1.MutateResponse, error) {
if err := authorizeWrite(ctx, req.GetNamespace(), s.cfg); err != nil {
if err := authorize(ctx, req.GetNamespace(), s.cfg); err != nil {
return nil, err
}

View File

@@ -30,7 +30,7 @@ func testMutateFolders(t *testing.T, srv *Server) {
setupMutateFolders(t, srv)
t.Run("should create new folder parent relation", func(t *testing.T) {
_, err := srv.Mutate(newContextWithZanzanaUpdatePermission(), &v1.MutateRequest{
_, err := srv.Mutate(newContextWithNamespace(), &v1.MutateRequest{
Namespace: "default",
Operations: []*v1.MutateOperation{
{
@@ -61,7 +61,7 @@ func testMutateFolders(t *testing.T, srv *Server) {
})
t.Run("should delete folder parent relation", func(t *testing.T) {
_, err := srv.Mutate(newContextWithZanzanaUpdatePermission(), &v1.MutateRequest{
_, err := srv.Mutate(newContextWithNamespace(), &v1.MutateRequest{
Namespace: "default",
Operations: []*v1.MutateOperation{
{
@@ -88,7 +88,7 @@ func testMutateFolders(t *testing.T, srv *Server) {
})
t.Run("should clean up all parent relations", func(t *testing.T) {
_, err := srv.Mutate(newContextWithZanzanaUpdatePermission(), &v1.MutateRequest{
_, err := srv.Mutate(newContextWithNamespace(), &v1.MutateRequest{
Namespace: "default",
Operations: []*v1.MutateOperation{
{
@@ -115,7 +115,7 @@ func testMutateFolders(t *testing.T, srv *Server) {
})
t.Run("should perform batch mutate if multiple operations are provided", func(t *testing.T) {
_, err := srv.Mutate(newContextWithZanzanaUpdatePermission(), &v1.MutateRequest{
_, err := srv.Mutate(newContextWithNamespace(), &v1.MutateRequest{
Namespace: "default",
Operations: []*v1.MutateOperation{
{

View File

@@ -25,7 +25,7 @@ func testMutateOrgRoles(t *testing.T, srv *Server) {
setupMutateOrgRoles(t, srv)
t.Run("should update user org role and delete old role", func(t *testing.T) {
_, err := srv.Mutate(newContextWithZanzanaUpdatePermission(), &v1.MutateRequest{
_, err := srv.Mutate(newContextWithNamespace(), &v1.MutateRequest{
Namespace: "default",
Operations: []*v1.MutateOperation{
{
@@ -63,7 +63,7 @@ func testMutateOrgRoles(t *testing.T, srv *Server) {
})
t.Run("should add user org role and delete old role", func(t *testing.T) {
_, err := srv.Mutate(newContextWithZanzanaUpdatePermission(), &v1.MutateRequest{
_, err := srv.Mutate(newContextWithNamespace(), &v1.MutateRequest{
Namespace: "default",
Operations: []*v1.MutateOperation{
{

View File

@@ -28,7 +28,7 @@ func testMutateResourcePermissions(t *testing.T, srv *Server) {
setupMutateResourcePermissions(t, srv)
t.Run("should create new resource permission", func(t *testing.T) {
_, err := srv.Mutate(newContextWithZanzanaUpdatePermission(), &v1.MutateRequest{
_, err := srv.Mutate(newContextWithNamespace(), &v1.MutateRequest{
Namespace: "default",
Operations: []*v1.MutateOperation{
{
@@ -76,7 +76,7 @@ func testMutateResourcePermissions(t *testing.T, srv *Server) {
require.NoError(t, err)
require.Len(t, res.Tuples, 2)
_, err = srv.Mutate(newContextWithZanzanaUpdatePermission(), &v1.MutateRequest{
_, err = srv.Mutate(newContextWithNamespace(), &v1.MutateRequest{
Namespace: "default",
Operations: []*v1.MutateOperation{
{

View File

@@ -25,7 +25,7 @@ func testMutateRoleBindings(t *testing.T, srv *Server) {
setupMutateRoleBindings(t, srv)
t.Run("should update user role and delete old role", func(t *testing.T) {
_, err := srv.Mutate(newContextWithZanzanaUpdatePermission(), &v1.MutateRequest{
_, err := srv.Mutate(newContextWithNamespace(), &v1.MutateRequest{
Namespace: "default",
Operations: []*v1.MutateOperation{
{
@@ -75,7 +75,7 @@ func testMutateRoleBindings(t *testing.T, srv *Server) {
})
t.Run("should assign role to basic role", func(t *testing.T) {
_, err := srv.Mutate(newContextWithZanzanaUpdatePermission(), &v1.MutateRequest{
_, err := srv.Mutate(newContextWithNamespace(), &v1.MutateRequest{
Namespace: "default",
Operations: []*v1.MutateOperation{
{

View File

@@ -25,7 +25,7 @@ func testMutateRoles(t *testing.T, srv *Server) {
setupMutateRoles(t, srv)
t.Run("should update role and delete old role permissions", func(t *testing.T) {
_, err := srv.Mutate(newContextWithZanzanaUpdatePermission(), &v1.MutateRequest{
_, err := srv.Mutate(newContextWithNamespace(), &v1.MutateRequest{
Namespace: "default",
Operations: []*v1.MutateOperation{
{

View File

@@ -25,7 +25,7 @@ func testMutateTeamBindings(t *testing.T, srv *Server) {
setupMutateTeamBindings(t, srv)
t.Run("should update user team binding and delete old team binding", func(t *testing.T) {
_, err := srv.Mutate(newContextWithZanzanaUpdatePermission(), &v1.MutateRequest{
_, err := srv.Mutate(newContextWithNamespace(), &v1.MutateRequest{
Namespace: "default",
Operations: []*v1.MutateOperation{
{

View File

@@ -5,8 +5,6 @@ import (
openfgav1 "github.com/openfga/api/proto/openfga/v1"
"github.com/stretchr/testify/require"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
"google.golang.org/protobuf/types/known/structpb"
iamv0 "github.com/grafana/grafana/apps/iam/pkg/apis/iam/v0alpha1"
@@ -35,7 +33,7 @@ func testMutate(t *testing.T, srv *Server) {
setupMutate(t, srv)
t.Run("should perform multiple mutate operations", func(t *testing.T) {
_, err := srv.Mutate(newContextWithZanzanaUpdatePermission(), &v1.MutateRequest{
_, err := srv.Mutate(newContextWithNamespace(), &v1.MutateRequest{
Namespace: "default",
Operations: []*v1.MutateOperation{
{
@@ -135,25 +133,6 @@ func testMutate(t *testing.T, srv *Server) {
require.NoError(t, err)
require.Len(t, res.Tuples, 0)
})
t.Run("should reject mutate without zanzana:update", func(t *testing.T) {
_, err := srv.Mutate(newContextWithNamespace(), &v1.MutateRequest{
Namespace: "default",
Operations: []*v1.MutateOperation{
{
Operation: &v1.MutateOperation_SetFolderParent{
SetFolderParent: &v1.SetFolderParentOperation{
Folder: "new-folder",
Parent: "1",
DeleteExisting: false,
},
},
},
},
})
require.Error(t, err)
require.Equal(t, codes.PermissionDenied, status.Code(err))
})
}
func TestDeduplicateTupleKeys(t *testing.T) {

View File

@@ -14,7 +14,6 @@ import (
"github.com/grafana/grafana/pkg/infra/db"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/services/authz/zanzana"
"github.com/grafana/grafana/pkg/services/authz/zanzana/common"
"github.com/grafana/grafana/pkg/services/authz/zanzana/store"
"github.com/grafana/grafana/pkg/services/sqlstore"
@@ -219,21 +218,11 @@ func setupOpenFGADatabase(t *testing.T, srv *Server, tuples []*openfgav1.TupleKe
}
func newContextWithNamespace() context.Context {
return newContextWithNamespaceAndPermissions()
}
func newContextWithNamespaceAndPermissions(perms ...string) context.Context {
ctx := context.Background()
ctx = claims.WithAuthInfo(ctx, authnlib.NewAccessTokenAuthInfo(authnlib.Claims[authnlib.AccessTokenClaims]{
Rest: authnlib.AccessTokenClaims{
Namespace: "*",
Permissions: perms,
DelegatedPermissions: perms,
Namespace: "*",
},
}))
return ctx
}
func newContextWithZanzanaUpdatePermission() context.Context {
return newContextWithNamespaceAndPermissions(zanzana.TokenPermissionUpdate)
}

View File

@@ -8,7 +8,6 @@ import (
openfgav1 "github.com/openfga/api/proto/openfga/v1"
"go.opentelemetry.io/otel/codes"
"google.golang.org/grpc/status"
authzextv1 "github.com/grafana/grafana/pkg/services/authz/proto/v1"
"github.com/grafana/grafana/pkg/services/authz/zanzana/common"
@@ -26,9 +25,6 @@ func (s *Server) Write(ctx context.Context, req *authzextv1.WriteRequest) (*auth
if err != nil {
span.RecordError(err)
span.SetStatus(codes.Error, err.Error())
if _, ok := status.FromError(err); ok {
return nil, err
}
s.logger.Error("failed to perform write request", "error", err, "namespace", req.GetNamespace())
return nil, errors.New("failed to perform write request")
}
@@ -37,7 +33,7 @@ func (s *Server) Write(ctx context.Context, req *authzextv1.WriteRequest) (*auth
}
func (s *Server) write(ctx context.Context, req *authzextv1.WriteRequest) (*authzextv1.WriteResponse, error) {
if err := authorizeWrite(ctx, req.GetNamespace(), s.cfg); err != nil {
if err := authorize(ctx, req.GetNamespace(), s.cfg); err != nil {
return nil, err
}

View File

@@ -1,46 +0,0 @@
package server
import (
"testing"
"github.com/grafana/grafana/pkg/services/sqlstore"
"github.com/grafana/grafana/pkg/setting"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
authzextv1 "github.com/grafana/grafana/pkg/services/authz/proto/v1"
"github.com/grafana/grafana/pkg/services/authz/zanzana/common"
"github.com/stretchr/testify/require"
)
func TestWriteAuthorization(t *testing.T) {
cfg := setting.NewCfg()
testStore := sqlstore.NewTestStore(t, sqlstore.WithCfg(cfg))
srv := setupOpenFGAServer(t, testStore, cfg)
setup(t, srv)
req := &authzextv1.WriteRequest{
Namespace: namespace,
Writes: &authzextv1.WriteRequestWrites{
TupleKeys: []*authzextv1.TupleKey{
{
// Folder parent tuples are valid without any relationship condition.
User: "folder:1",
Relation: common.RelationParent,
Object: "folder:write-authz-test",
},
},
},
}
t.Run("denies Write without zanzana:update", func(t *testing.T) {
_, err := srv.Write(newContextWithNamespace(), req)
require.Error(t, err)
require.Equal(t, codes.PermissionDenied, status.Code(err))
})
t.Run("allows Write with zanzana:update", func(t *testing.T) {
_, err := srv.Write(newContextWithZanzanaUpdatePermission(), req)
require.NoError(t, err)
})
}

View File

@@ -16,9 +16,6 @@ const (
TypeNamespace = common.TypeGroupResouce
)
// TokenPermissionUpdate is required for callers to perform write operations against Zanzana (Mutate/Write).
const TokenPermissionUpdate = "zanzana:update" //nolint:gosec // G101: permission identifier, not a credential.
const (
RelationTeamMember = common.RelationTeamMember
RelationTeamAdmin = common.RelationTeamAdmin

View File

@@ -1,13 +1,12 @@
SELECT
{{ .Ident "created" }},
{{ .Ident "created_by" }},
{{ .Ident "version" }},
{{ .Ident "active" }},
{{ .Ident "namespace" }},
{{ .Ident "name" }}
FROM
{{ .Ident "secret_secure_value" }}
WHERE
WHERE
{{ .Ident "namespace" }} = {{ .Arg .Namespace }} AND
{{ .Ident "name" }} = {{ .Arg .Name }}
ORDER BY {{ .Ident "version" }} DESC

View File

@@ -122,7 +122,7 @@ func (sv *secureValueDB) toKubernetes() (*secretv1beta1.SecureValue, error) {
}
// toCreateRow maps a Kubernetes resource into a DB row for new resources being created/inserted.
func toCreateRow(createdAt, updatedAt int64, keeper string, sv *secretv1beta1.SecureValue, createdBy, updatedBy string) (*secureValueDB, error) {
func toCreateRow(createdAt, updatedAt int64, keeper string, sv *secretv1beta1.SecureValue, actorUID string) (*secureValueDB, error) {
row, err := toRow(keeper, sv, "")
if err != nil {
return nil, fmt.Errorf("failed to convert SecureValue to secureValueDB: %w", err)
@@ -130,9 +130,9 @@ func toCreateRow(createdAt, updatedAt int64, keeper string, sv *secretv1beta1.Se
row.GUID = uuid.New().String()
row.Created = createdAt
row.CreatedBy = createdBy
row.CreatedBy = actorUID
row.Updated = updatedAt
row.UpdatedBy = updatedBy
row.UpdatedBy = actorUID
return row, nil
}

View File

@@ -85,7 +85,7 @@ func (s *secureValueMetadataStorage) Create(ctx context.Context, keeper string,
var row *secureValueDB
err := s.db.Transaction(ctx, func(ctx context.Context) error {
latest, err := s.getLatestVersionAndCreated(ctx, xkube.Namespace(sv.Namespace), sv.Name)
latest, err := s.getLatestVersionAndCreatedAt(ctx, xkube.Namespace(sv.Namespace), sv.Name)
if err != nil {
return fmt.Errorf("fetching latest secure value version: %w", err)
}
@@ -110,13 +110,7 @@ func (s *secureValueMetadataStorage) Create(ctx context.Context, keeper string,
}
updatedAt := now
createdBy := actorUID
if latest.createdBy != "" {
createdBy = latest.createdBy
}
updatedBy := actorUID
row, err = toCreateRow(createdAt, updatedAt, keeper, sv, createdBy, updatedBy)
row, err = toCreateRow(createdAt, updatedAt, keeper, sv, actorUID)
if err != nil {
return fmt.Errorf("to create row: %w", err)
}
@@ -167,14 +161,13 @@ func (s *secureValueMetadataStorage) Create(ctx context.Context, keeper string,
return createdSecureValue, nil
}
type versionAndCreated struct {
type versionAndCreatedAt struct {
createdAt int64
createdBy string
version int64
}
func (s *secureValueMetadataStorage) getLatestVersionAndCreated(ctx context.Context, namespace xkube.Namespace, name string) (versionAndCreated, error) {
ctx, span := s.tracer.Start(ctx, "SecureValueMetadataStorage.getLatestVersionAndCreated", trace.WithAttributes(
func (s *secureValueMetadataStorage) getLatestVersionAndCreatedAt(ctx context.Context, namespace xkube.Namespace, name string) (versionAndCreatedAt, error) {
ctx, span := s.tracer.Start(ctx, "SecureValueMetadataStorage.getLatestVersionAndCreatedAt", trace.WithAttributes(
attribute.String("name", name),
attribute.String("namespace", namespace.String()),
))
@@ -188,48 +181,45 @@ func (s *secureValueMetadataStorage) getLatestVersionAndCreated(ctx context.Cont
q, err := sqltemplate.Execute(sqlGetLatestSecureValueVersionAndCreatedAt, req)
if err != nil {
return versionAndCreated{}, fmt.Errorf("execute template %q: %w", sqlGetLatestSecureValueVersionAndCreatedAt.Name(), err)
return versionAndCreatedAt{}, fmt.Errorf("execute template %q: %w", sqlGetLatestSecureValueVersionAndCreatedAt.Name(), err)
}
rows, err := s.db.QueryContext(ctx, q, req.GetArgs()...)
if err != nil {
return versionAndCreated{}, fmt.Errorf("fetching latest version for secure value: namespace=%+v name=%+v %w", namespace, name, err)
return versionAndCreatedAt{}, fmt.Errorf("fetching latest version for secure value: namespace=%+v name=%+v %w", namespace, name, err)
}
defer func() { _ = rows.Close() }()
if err := rows.Err(); err != nil {
return versionAndCreated{}, fmt.Errorf("error executing query: %w", err)
return versionAndCreatedAt{}, fmt.Errorf("error executing query: %w", err)
}
if !rows.Next() {
return versionAndCreated{}, nil
return versionAndCreatedAt{}, nil
}
var (
createdAt int64
createdBy string
version int64
active bool
namespaceFromDB string
nameFromDB string
)
if err := rows.Scan(&createdAt, &createdBy, &version, &active, &namespaceFromDB, &nameFromDB); err != nil {
return versionAndCreated{}, fmt.Errorf("scanning version and created from returned rows: %w", err)
if err := rows.Scan(&createdAt, &version, &active, &namespaceFromDB, &nameFromDB); err != nil {
return versionAndCreatedAt{}, fmt.Errorf("scanning version from returned rows: %w", err)
}
if namespaceFromDB != namespace.String() || nameFromDB != name {
return versionAndCreated{}, fmt.Errorf("bug: expected to find version and created for namespace=%+v name=%+v but got for namespace=%+v name=%+v",
return versionAndCreatedAt{}, fmt.Errorf("bug: expected to find latest version for namespace=%+v name=%+v but got version for namespace=%+v name=%+v",
namespace, name, namespaceFromDB, nameFromDB)
}
if !active {
createdAt = 0
createdBy = ""
}
return versionAndCreated{
return versionAndCreatedAt{
createdAt: createdAt,
createdBy: createdBy,
version: version,
}, nil
}

View File

@@ -1,13 +1,12 @@
SELECT
`created`,
`created_by`,
`version`,
`active`,
`namespace`,
`name`
FROM
`secret_secure_value`
WHERE
WHERE
`namespace` = 'ns' AND
`name` = 'name'
ORDER BY `version` DESC

View File

@@ -1,13 +1,12 @@
SELECT
"created",
"created_by",
"version",
"active",
"namespace",
"name"
FROM
"secret_secure_value"
WHERE
WHERE
"namespace" = 'ns' AND
"name" = 'name'
ORDER BY "version" DESC

View File

@@ -1,13 +1,12 @@
SELECT
"created",
"created_by",
"version",
"active",
"namespace",
"name"
FROM
"secret_secure_value"
WHERE
WHERE
"namespace" = 'ns' AND
"name" = 'name'
ORDER BY "version" DESC

View File

@@ -25,7 +25,6 @@ import (
bleveSearch "github.com/blevesearch/bleve/v2/search/searcher"
index "github.com/blevesearch/bleve_index_api"
"github.com/prometheus/client_golang/prometheus"
bolterrors "go.etcd.io/bbolt/errors"
"go.opentelemetry.io/otel"
"go.opentelemetry.io/otel/attribute"
"go.uber.org/atomic"
@@ -45,7 +44,6 @@ import (
const (
indexStorageMemory = "memory"
indexStorageFile = "file"
boltTimeout = "500ms"
)
// Keys used to store internal data in index.
@@ -417,25 +415,14 @@ func (b *bleveBackend) BuildIndex(
// This happens on startup, or when memory-based index has expired. (We don't expire file-based indexes)
// If we do have an unexpired cached index already, we always build a new index from scratch.
if cachedIndex == nil && !rebuild {
result := b.findPreviousFileBasedIndex(resourceDir)
if result != nil && result.IsOpen {
// Index file exists but is opened by another process, fallback to memory.
// Keep the name so we can skip cleanup of that directory.
newIndexType = indexStorageMemory
fileIndexName = result.Name
} else if result != nil && result.Index != nil {
// Found and opened existing index successfully
index = result.Index
fileIndexName = result.Name
indexRV = result.RV
}
index, fileIndexName, indexRV = b.findPreviousFileBasedIndex(resourceDir)
}
if newIndexType == indexStorageFile && index != nil {
if index != nil {
build = false
logWithDetails.Debug("Existing index found on filesystem", "indexRV", indexRV, "directory", filepath.Join(resourceDir, fileIndexName))
defer closeIndexOnExit(index, "") // Close index, but don't delete directory.
} else if newIndexType == indexStorageFile {
} else {
// Building index from scratch. Index name has a time component in it to be unique, but if
// we happen to create non-unique name, we bump the time and try again.
@@ -462,9 +449,7 @@ func (b *bleveBackend) BuildIndex(
logWithDetails.Info("Building index using filesystem", "directory", indexDir)
defer closeIndexOnExit(index, indexDir) // Close index, and delete new index directory.
}
}
if newIndexType == indexStorageMemory {
} else {
index, err = newBleveIndex("", mapper, time.Now(), b.opts.BuildVersion)
if err != nil {
return nil, fmt.Errorf("error creating new in-memory bleve index: %w", err)
@@ -567,30 +552,30 @@ func cleanFileSegment(input string) string {
return input
}
// cleanOldIndexes deletes all subdirectories inside resourceDir, skipping directory with "skipName".
// cleanOldIndexes deletes all subdirectories inside dir, skipping directory with "skipName".
// "skipName" can be empty.
func (b *bleveBackend) cleanOldIndexes(resourceDir string, skipName string) {
entries, err := os.ReadDir(resourceDir)
func (b *bleveBackend) cleanOldIndexes(dir string, skipName string) {
files, err := os.ReadDir(dir)
if err != nil {
if os.IsNotExist(err) {
return
}
b.log.Warn("error cleaning folders from", "directory", resourceDir, "error", err)
b.log.Warn("error cleaning folders from", "directory", dir, "error", err)
return
}
for _, ent := range entries {
if ent.IsDir() && ent.Name() != skipName {
indexDir := filepath.Join(resourceDir, ent.Name())
if !isPathWithinRoot(indexDir, b.opts.Root) {
b.log.Warn("Skipping cleanup of directory", "directory", indexDir)
for _, file := range files {
if file.IsDir() && file.Name() != skipName {
fpath := filepath.Join(dir, file.Name())
if !isPathWithinRoot(fpath, b.opts.Root) {
b.log.Warn("Skipping cleanup of directory", "directory", fpath)
continue
}
err = os.RemoveAll(indexDir)
err = os.RemoveAll(fpath)
if err != nil {
b.log.Error("Unable to remove old index folder", "directory", indexDir, "error", err)
b.log.Error("Unable to remove old index folder", "directory", fpath, "error", err)
} else {
b.log.Info("Removed old index folder", "directory", indexDir)
b.log.Info("Removed old index folder", "directory", fpath)
}
}
}
@@ -637,17 +622,10 @@ func formatIndexName(now time.Time) string {
return now.Format("20060102-150405")
}
type fileIndex struct {
Index bleve.Index
Name string
RV int64
IsOpen bool
}
func (b *bleveBackend) findPreviousFileBasedIndex(resourceDir string) *fileIndex {
func (b *bleveBackend) findPreviousFileBasedIndex(resourceDir string) (bleve.Index, string, int64) {
entries, err := os.ReadDir(resourceDir)
if err != nil {
return nil
return nil, "", 0
}
for _, ent := range entries {
@@ -657,13 +635,8 @@ func (b *bleveBackend) findPreviousFileBasedIndex(resourceDir string) *fileIndex
indexName := ent.Name()
indexDir := filepath.Join(resourceDir, indexName)
idx, err := bleve.OpenUsing(indexDir, map[string]interface{}{"bolt_timeout": boltTimeout})
idx, err := bleve.Open(indexDir)
if err != nil {
if errors.Is(err, bolterrors.ErrTimeout) {
b.log.Debug("Index is opened by another process (timeout), skipping", "indexDir", indexDir)
return &fileIndex{Name: indexName, IsOpen: true}
}
b.log.Debug("error opening index", "indexDir", indexDir, "err", err)
continue
}
@@ -675,14 +648,10 @@ func (b *bleveBackend) findPreviousFileBasedIndex(resourceDir string) *fileIndex
continue
}
return &fileIndex{
Index: idx,
Name: indexName,
RV: indexRV,
}
return idx, indexName, indexRV
}
return nil
return nil, "", 0
}
// Stop closes all indexes and stops background tasks.

View File

@@ -1583,76 +1583,3 @@ func docCount(t *testing.T, idx resource.ResourceIndex) int {
require.NoError(t, err)
return int(cnt)
}
func TestBleveBackendFallsBackToMemory(t *testing.T) {
ns := resource.NamespacedResource{
Namespace: "test",
Group: "group",
Resource: "resource",
}
tmpDir := t.TempDir()
// First, create a file-based index with one backend and keep it open
backend1, reg1 := setupBleveBackend(t, withRootDir(tmpDir))
index1, err := backend1.BuildIndex(context.Background(), ns, 100 /* file based */, nil, "test", indexTestDocs(ns, 10, 100), nil, false)
require.NoError(t, err)
require.NotNil(t, index1)
// Verify first index is file-based
bleveIdx1, ok := index1.(*bleveIndex)
require.True(t, ok)
require.Equal(t, indexStorageFile, bleveIdx1.indexStorage)
checkOpenIndexes(t, reg1, 0, 1)
// Now create a second backend using the same directory
// This simulates another instance trying to open the same index
backend2, reg2 := setupBleveBackend(t, withRootDir(tmpDir))
// BuildIndex should detect the file is locked and fallback to memory
index2, err := backend2.BuildIndex(context.Background(), ns, 100 /* file based */, nil, "test", indexTestDocs(ns, 10, 100), nil, false)
require.NoError(t, err)
require.NotNil(t, index2)
// Verify second index fell back to in-memory despite size being above file threshold
bleveIdx2, ok := index2.(*bleveIndex)
require.True(t, ok)
require.Equal(t, indexStorageMemory, bleveIdx2.indexStorage)
// Verify metrics show 1 memory index and 0 file indexes for backend2
checkOpenIndexes(t, reg2, 1, 0)
// Verify the in-memory index works correctly
require.Equal(t, 10, docCount(t, index2))
// Clean up: close first backend to release the file lock
backend1.Stop()
}
func TestBleveSkipCleanOldIndexesOnMemoryFallback(t *testing.T) {
ns := resource.NamespacedResource{
Namespace: "test",
Group: "group",
Resource: "resource",
}
tmpDir := t.TempDir()
backend1, _ := setupBleveBackend(t, withRootDir(tmpDir))
_, err := backend1.BuildIndex(context.Background(), ns, 100 /* file based */, nil, "test", indexTestDocs(ns, 10, 100), nil, false)
require.NoError(t, err)
// Now create a second backend using the same directory
// This simulates another instance trying to open the same index
backend2, _ := setupBleveBackend(t, withRootDir(tmpDir))
// BuildIndex should detect the file is locked and fallback to memory
_, err = backend2.BuildIndex(context.Background(), ns, 100 /* file based */, nil, "test", indexTestDocs(ns, 10, 100), nil, false)
require.NoError(t, err)
// Verify that the index directory still exists (i.e., cleanOldIndexes was skipped)
verifyDirEntriesCount(t, backend2.getResourceDir(ns), 1)
// Clean up: close first backend to release the file lock
backend1.Stop()
}

View File

@@ -2470,7 +2470,7 @@ var expNonEmailNotifications = map[string][]string{
"title_link": "http://localhost:3000/alerting/grafana/UID_SlackAlert1/view?orgId=1",
"text": "Integration Test ",
"fallback": "Integration Test [FIRING:1] SlackAlert1 (default)",
"footer": "Grafana",
"footer": "Grafana v",
"footer_icon": "https://grafana.com/static/assets/img/fav32.png",
"color": "#D63232",
"ts": %s,
@@ -2490,7 +2490,7 @@ var expNonEmailNotifications = map[string][]string{
"title_link": "http://localhost:3000/alerting/grafana/UID_SlackAlert2/view?orgId=1",
"text": "**Firing**\n\nValue: A=1\nLabels:\n - alertname = SlackAlert2\n - grafana_folder = default\nAnnotations:\nSource: http://localhost:3000/alerting/grafana/UID_SlackAlert2/view?orgId=1\nSilence: http://localhost:3000/alerting/silence/new?alertmanager=grafana&matcher=__alert_rule_uid__%%3DUID_SlackAlert2&orgId=1\n",
"fallback": "[FIRING:1] SlackAlert2 (default)",
"footer": "Grafana",
"footer": "Grafana v",
"footer_icon": "https://grafana.com/static/assets/img/fav32.png",
"color": "#D63232",
"ts": %s,

View File

@@ -2699,24 +2699,6 @@
"secure": false,
"dependsOn": "",
"subformOptions": null
},
{
"element": "input",
"inputType": "text",
"label": "Footer",
"description": "Templated footer of the slack message",
"placeholder": "{{ template \"slack.default.footer\" . }}",
"propertyName": "footer",
"selectOptions": null,
"showWhen": {
"field": "",
"is": ""
},
"required": false,
"validationRule": "",
"secure": false,
"dependsOn": "",
"subformOptions": null
}
]
},

View File

@@ -7017,24 +7017,6 @@
"secure": false,
"dependsOn": "",
"subformOptions": null
},
{
"element": "input",
"inputType": "text",
"label": "Footer",
"description": "Templated footer of the slack message",
"placeholder": "{{ template \"slack.default.footer\" . }}",
"propertyName": "footer",
"selectOptions": null,
"showWhen": {
"field": "",
"is": ""
},
"required": false,
"validationRule": "",
"secure": false,
"dependsOn": "",
"subformOptions": null
}
]
},

View File

@@ -132,6 +132,53 @@ func TestIntegrationDashboardAPIValidation(t *testing.T) {
}
}
// TestIntegrationDashboardAPIAuthorization tests the dashboard K8s API with authorization checks
func TestIntegrationDashboardAPIAuthorization(t *testing.T) {
testutil.SkipIntegrationTestInShortMode(t)
dualWriterModes := []rest.DualWriterMode{rest.Mode0, rest.Mode1, rest.Mode2, rest.Mode3, rest.Mode4, rest.Mode5}
for _, dualWriterMode := range dualWriterModes {
t.Run(fmt.Sprintf("DualWriterMode %d", dualWriterMode), func(t *testing.T) {
helper := apis.NewK8sTestHelper(t, testinfra.GrafanaOpts{
DisableDataMigrations: true,
DisableAnonymous: true,
UnifiedStorageConfig: map[string]setting.UnifiedStorageConfig{
"dashboards.dashboard.grafana.app": {
DualWriterMode: dualWriterMode,
},
"folders.folder.grafana.app": {
DualWriterMode: dualWriterMode,
},
},
UnifiedStorageEnableSearch: true,
})
t.Cleanup(func() {
helper.Shutdown()
})
org1Ctx := createTestContext(t, helper, helper.Org1, dualWriterMode)
org2Ctx := createTestContext(t, helper, helper.OrgB, dualWriterMode)
t.Run("Authorization tests for all identity types", func(t *testing.T) {
runAuthorizationTests(t, org1Ctx)
})
t.Run("Dashboard permission tests", func(t *testing.T) {
runDashboardPermissionTests(t, org1Ctx, true)
})
t.Run("Cross-organization tests", func(t *testing.T) {
runCrossOrgTests(t, org1Ctx, org2Ctx)
})
t.Run("Dashboard HTTP API test", func(t *testing.T) {
runDashboardHttpTest(t, org1Ctx, org2Ctx)
})
})
}
}
// TestIntegrationDashboardAPI tests the dashboard K8s API
func TestIntegrationDashboardAPI(t *testing.T) {
testutil.SkipIntegrationTestInShortMode(t)

View File

@@ -8,7 +8,6 @@ import (
"net/http"
"strings"
"testing"
"time"
"github.com/stretchr/testify/require"
"github.com/xlab/treeprint"
@@ -32,33 +31,6 @@ import (
"github.com/grafana/grafana/pkg/util/testutil"
)
func TestIntegrationFolderTreeZanzana(t *testing.T) {
testutil.SkipIntegrationTestInShortMode(t)
runIntegrationFolderTree(t, testinfra.GrafanaOpts{
DisableDataMigrations: true,
AppModeProduction: true,
DisableAnonymous: true,
APIServerStorageType: "unified",
UnifiedStorageConfig: map[string]setting.UnifiedStorageConfig{
"dashboards.dashboard.grafana.app": {
DualWriterMode: grafanarest.Mode5,
},
folderV1.RESOURCEGROUP: {
DualWriterMode: grafanarest.Mode5,
},
},
EnableFeatureToggles: []string{
"zanzana",
"zanzanaNoLegacyClient",
"kubernetesAuthzZanzanaSync",
},
UnifiedStorageEnableSearch: true,
ZanzanaReconciliationInterval: 100 * time.Millisecond,
DisableZanzanaCache: true,
})
}
func TestIntegrationFolderTree(t *testing.T) {
testutil.SkipIntegrationTestInShortMode(t)
@@ -75,7 +47,7 @@ func TestIntegrationFolderTree(t *testing.T) {
}
for _, mode := range modes {
t.Run(fmt.Sprintf("mode %d", mode), func(t *testing.T) {
runIntegrationFolderTree(t, testinfra.GrafanaOpts{
helper := apis.NewK8sTestHelper(t, testinfra.GrafanaOpts{
DisableDataMigrations: true,
AppModeProduction: true,
DisableAnonymous: true,
@@ -90,122 +62,113 @@ func TestIntegrationFolderTree(t *testing.T) {
},
UnifiedStorageEnableSearch: mode >= grafanarest.Mode3, // make sure modes 0-3 work without search enabled
})
})
}
}
defer helper.Shutdown()
func runIntegrationFolderTree(t *testing.T, opts testinfra.GrafanaOpts) {
if !db.IsTestDbSQLite() {
t.Skip("test only on sqlite for now")
}
helper := apis.NewK8sTestHelper(t, opts)
defer helper.Shutdown()
apis.AwaitZanzanaReconcileNext(t, helper)
tests := []struct {
Name string
Definition FolderDefinition
Expected []ExpectedTree
}{
{
Name: "admin-only-tree",
Definition: FolderDefinition{
Children: []FolderDefinition{
{Name: "top",
Creator: helper.Org1.Admin,
tests := []struct {
Name string
Definition FolderDefinition
Expected []ExpectedTree
}{
{
Name: "admin-only-tree",
Definition: FolderDefinition{
Children: []FolderDefinition{
{Name: "middle",
{Name: "top",
Creator: helper.Org1.Admin,
Children: []FolderDefinition{
{Name: "child",
{Name: "middle",
Creator: helper.Org1.Admin,
Permissions: []FolderPermission{{
Permission: "View",
User: helper.Org1.None,
}},
Children: []FolderDefinition{
{Name: "child",
Creator: helper.Org1.Admin,
Permissions: []FolderPermission{{
Permission: "View",
User: helper.Org1.None,
}},
},
},
},
},
},
},
},
},
},
Expected: []ExpectedTree{
{User: helper.Org1.Admin, Listing: `
Expected: []ExpectedTree{
{User: helper.Org1.Admin, Listing: `
└── top (admin,edit,save,delete)
....└── middle (admin,edit,save,delete)
........└── child (admin,edit,save,delete)`},
{User: helper.Org1.Viewer, Listing: `
{User: helper.Org1.Viewer, Listing: `
└── top (view)
....└── middle (view)
........└── child (view)`},
{User: helper.Org1.None, Listing: `
{User: helper.Org1.None, Listing: `
└── sharedwithme (???)
....└── child (view)`,
E403: []string{"top", "middle"},
E403: []string{"top", "middle"},
},
},
},
},
},
}
}
var statusCode int
for _, tt := range tests {
t.Run(tt.Name, func(t *testing.T) {
tt.Definition.RequireUniqueName(t, make(map[string]bool))
var statusCode int
for _, tt := range tests {
t.Run(tt.Name, func(t *testing.T) {
tt.Definition.RequireUniqueName(t, make(map[string]bool))
tt.Definition.CreateWithLegacyAPI(t, helper, "")
tt.Definition.CreateWithLegacyAPI(t, helper, "")
// CreateWithLegacyAPI
for _, expect := range tt.Expected {
unstructured, client := getFolderClients(t, expect.User)
t.Run(fmt.Sprintf("query as %s", expect.User.Identity.GetLogin()), func(t *testing.T) {
legacy := getFoldersFromLegacyAPISearch(t, client)
legacy.requireEqual(t, expect.Listing, "legacy")
for _, expect := range tt.Expected {
unstructured, client := getFolderClients(t, expect.User)
t.Run(fmt.Sprintf("query as %s", expect.User.Identity.GetLogin()), func(t *testing.T) {
legacy := getFoldersFromLegacyAPISearch(t, client)
legacy.requireEqual(t, expect.Listing, "legacy")
listed := getFoldersFromAPIServerList(t, unstructured)
listed.requireEqual(t, expect.Listing, "listed")
listed := getFoldersFromAPIServerList(t, unstructured)
listed.requireEqual(t, expect.Listing, "listed")
search := getFoldersFromDashboardV0Search(t, client, expect.User.Identity.GetNamespace())
search.requireEqual(t, expect.Listing, "search")
search := getFoldersFromDashboardV0Search(t, client, expect.User.Identity.GetNamespace())
search.requireEqual(t, expect.Listing, "search")
// ensure sure GET also works on each folder we can list
listed.forEach(func(fv *FolderView) {
if fv.Name == folder.SharedWithMeFolderUID {
return // skip it
}
found, err := unstructured.Get(context.Background(), fv.Name, v1.GetOptions{})
require.NoErrorf(t, err, "getting folder: %s", fv.Name)
require.Equal(t, found.GetName(), fv.Name)
})
// ensure sure GET also works on each folder we can list
listed.forEach(func(fv *FolderView) {
if fv.Name == folder.SharedWithMeFolderUID {
return // skip it
}
found, err := unstructured.Get(context.Background(), fv.Name, v1.GetOptions{})
require.NoErrorf(t, err, "getting folder: %s", fv.Name)
require.Equal(t, found.GetName(), fv.Name)
})
// Forbidden things should really be hidden
for _, name := range expect.E403 {
_, err := unstructured.Get(context.Background(), name, v1.GetOptions{})
require.Error(t, err)
require.Truef(t, apierrors.IsForbidden(err), "error: %w", err) // 404 vs 403 ????
// Forbidden things should really be hidden
for _, name := range expect.E403 {
_, err := unstructured.Get(context.Background(), name, v1.GetOptions{})
require.Error(t, err)
require.Truef(t, apierrors.IsForbidden(err), "error: %w", err) // 404 vs 403 ????
result := client.Get().AbsPath("api", "folders", name).
Do(context.Background()).
StatusCode(&statusCode)
require.Equal(t, int(http.StatusForbidden), statusCode)
require.Error(t, result.Error())
result := client.Get().AbsPath("api", "folders", name).
Do(context.Background()).
StatusCode(&statusCode)
require.Equal(t, int(http.StatusForbidden), statusCode)
require.Error(t, result.Error())
// Verify sub-resources are hidden
for _, sub := range []string{"access", "parents", "children", "counts"} {
_, err := unstructured.Get(context.Background(), name, v1.GetOptions{}, sub)
require.Error(t, err, "expect error for subresource", sub)
require.Truef(t, apierrors.IsForbidden(err), "error: %w", err) // 404 vs 403 ????
}
// Verify sub-resources are hidden
for _, sub := range []string{"access", "parents", "children", "counts"} {
_, err := unstructured.Get(context.Background(), name, v1.GetOptions{}, sub)
require.Error(t, err, "expect error for subresource", sub)
require.Truef(t, apierrors.IsForbidden(err), "error: %w", err) // 404 vs 403 ????
}
// Verify legacy API access is also hidden
for _, sub := range []string{"permissions", "counts"} {
result := client.Get().AbsPath("api", "folders", name, sub).
Do(context.Background()).
StatusCode(&statusCode)
require.Equalf(t, int(http.StatusForbidden), statusCode, "legacy access to: %s", sub)
require.Error(t, result.Error())
}
// Verify legacy API access is also hidden
for _, sub := range []string{"permissions", "counts"} {
result := client.Get().AbsPath("api", "folders", name, sub).
Do(context.Background()).
StatusCode(&statusCode)
require.Equalf(t, int(http.StatusForbidden), statusCode, "legacy access to: %s", sub)
require.Error(t, result.Error())
}
}
})
}
})
}
@@ -249,8 +212,6 @@ func (f *FolderDefinition) CreateWithLegacyAPI(t *testing.T, h *apis.K8sTestHelp
})
require.NoError(t, err)
apis.AwaitZanzanaReconcileNext(t, h)
var statusCode int
result := client.Post().AbsPath("api", "folders").
Body(body).

View File

@@ -873,10 +873,19 @@ func VerifyOpenAPISnapshots(t *testing.T, dir string, gv schema.GroupVersion, h
require.Failf(t, "Not OK", "Code[%d] %s", rsp.Response.StatusCode, string(rsp.Body))
}
var prettyJSON bytes.Buffer
err := json.Indent(&prettyJSON, rsp.Body, "", " ")
schema := map[string]any{}
err := json.Unmarshal(rsp.Body, &schema)
require.NoError(t, err)
info, found, err := unstructured.NestedMap(schema, "info", "plugin")
require.NoError(t, err)
if found {
delete(info, "version") // the version is unstable in test environments
err = unstructured.SetNestedMap(schema, info, "info", "plugin")
require.NoError(t, err)
}
pretty, err := json.MarshalIndent(schema, "", " ")
require.NoError(t, err)
pretty := prettyJSON.String()
write := false
fpath := filepath.Join(dir, fmt.Sprintf("%s-%s.json", gv.Group, gv.Version))
@@ -885,7 +894,7 @@ func VerifyOpenAPISnapshots(t *testing.T, dir string, gv schema.GroupVersion, h
// We can ignore the gosec G304 warning since this is a test and the function is only called with explicit paths
body, err := os.ReadFile(fpath)
if err == nil {
if !assert.JSONEq(t, string(body), pretty) {
if !assert.JSONEq(t, string(body), string(pretty)) {
t.Logf("openapi spec has changed: %s", path)
t.Fail()
write = true
@@ -896,7 +905,7 @@ func VerifyOpenAPISnapshots(t *testing.T, dir string, gv schema.GroupVersion, h
}
if write {
e2 := os.WriteFile(fpath, []byte(pretty), 0o644)
e2 := os.WriteFile(fpath, pretty, 0o644)
if e2 != nil {
t.Errorf("error writing file: %s", e2.Error())
}

View File

@@ -2,7 +2,10 @@
"openapi": "3.0.0",
"info": {
"description": "Generates test data in different forms",
"title": "testdata.datasource.grafana.app/v0alpha1"
"title": "testdata.datasource.grafana.app/v0alpha1",
"plugin": {
"plugin": "grafana-testdata-datasource"
}
},
"paths": {
"/apis/testdata.datasource.grafana.app/v0alpha1/": {
@@ -377,7 +380,7 @@
"DataSource"
],
"description": "Query the TestData datasources",
"operationId": "createDataSourceQuery",
"operationId": "queryDataSource",
"requestBody": {
"content": {
"application/json": {
@@ -410,7 +413,7 @@
{
"name": "name",
"in": "path",
"description": "name of the QueryDataResponse",
"description": "DataSource identifier",
"required": true,
"schema": {
"type": "string",
@@ -430,36 +433,462 @@
]
},
"/apis/testdata.datasource.grafana.app/v0alpha1/namespaces/{namespace}/datasources/{name}/resource": {
"summary": "hello world",
"get": {
"tags": [
"DataSource"
"Route"
],
"description": "Get resources in the datasource plugin. NOTE, additional routes may exist, but are not exposed via OpenAPI",
"operationId": "getDataSourceResource",
"operationId": "get_route",
"responses": {
"200": {
"description": "OK",
"default": {
"content": {
"*/*": {
"text/plain": {
"schema": {
"type": "string"
}
}
}
}
},
"x-kubernetes-action": "connect",
"x-kubernetes-group-version-kind": {
"group": "testdata.datasource.grafana.app",
"version": "v0alpha1",
"kind": "Status"
}
},
"parameters": [
{
"name": "name",
"in": "path",
"description": "name of the Status",
"description": "name of the DataSource",
"required": true,
"schema": {
"type": "string",
"uniqueItems": true
}
},
{
"name": "namespace",
"in": "path",
"description": "object name and auth scope, such as for teams and projects",
"required": true,
"schema": {
"type": "string",
"uniqueItems": true
}
}
]
},
"/apis/testdata.datasource.grafana.app/v0alpha1/namespaces/{namespace}/datasources/{name}/resource/boom": {
"summary": "force a panic",
"get": {
"tags": [
"Route"
],
"operationId": "get_route_boom",
"responses": {
"default": {
"content": {
"application/json": {
"schema": {
"type": "object",
"additionalProperties": true
}
}
}
}
}
},
"post": {
"tags": [
"Route"
],
"operationId": "post_route_boom",
"responses": {
"default": {
"content": {
"application/json": {
"schema": {
"type": "object",
"additionalProperties": true
}
}
}
}
}
},
"parameters": [
{
"name": "name",
"in": "path",
"description": "name of the DataSource",
"required": true,
"schema": {
"type": "string",
"uniqueItems": true
}
},
{
"name": "namespace",
"in": "path",
"description": "object name and auth scope, such as for teams and projects",
"required": true,
"schema": {
"type": "string",
"uniqueItems": true
}
}
]
},
"/apis/testdata.datasource.grafana.app/v0alpha1/namespaces/{namespace}/datasources/{name}/resource/scenarios": {
"summary": "hello world",
"get": {
"tags": [
"Route"
],
"operationId": "get_route_scenarios",
"responses": {
"default": {
"content": {
"application/json": {
"schema": {
"type": "object",
"additionalProperties": true
}
}
}
}
}
},
"parameters": [
{
"name": "name",
"in": "path",
"description": "name of the DataSource",
"required": true,
"schema": {
"type": "string",
"uniqueItems": true
}
},
{
"name": "namespace",
"in": "path",
"description": "object name and auth scope, such as for teams and projects",
"required": true,
"schema": {
"type": "string",
"uniqueItems": true
}
}
]
},
"/apis/testdata.datasource.grafana.app/v0alpha1/namespaces/{namespace}/datasources/{name}/resource/sim/{key}": {
"description": "Get list of simulations",
"get": {
"tags": [
"Route"
],
"operationId": "get_route_sim_key",
"parameters": [
{
"name": "key",
"in": "path",
"description": "simulation key (should include hz)"
}
],
"responses": {
"default": {
"content": {
"application/json": {
"schema": {
"type": "object",
"additionalProperties": true
}
}
}
}
}
},
"post": {
"tags": [
"Route"
],
"operationId": "post_route_sim_key",
"parameters": [
{
"name": "key",
"in": "path",
"description": "simulation key (should include hz)"
}
],
"requestBody": {
"content": {
"application/json": {
"schema": {
"type": "object",
"additionalProperties": true
}
}
}
},
"responses": {
"default": {
"content": {
"application/json": {
"schema": {
"type": "object",
"additionalProperties": true
}
}
}
}
}
},
"parameters": [
{
"name": "name",
"in": "path",
"description": "name of the DataSource",
"required": true,
"schema": {
"type": "string",
"uniqueItems": true
}
},
{
"name": "namespace",
"in": "path",
"description": "object name and auth scope, such as for teams and projects",
"required": true,
"schema": {
"type": "string",
"uniqueItems": true
}
}
]
},
"/apis/testdata.datasource.grafana.app/v0alpha1/namespaces/{namespace}/datasources/{name}/resource/sims": {
"description": "Get list of simulations",
"get": {
"tags": [
"Route"
],
"operationId": "get_route_sims",
"responses": {
"default": {
"content": {
"application/json": {
"schema": {
"type": "object",
"additionalProperties": true
}
}
}
}
}
},
"parameters": [
{
"name": "name",
"in": "path",
"description": "name of the DataSource",
"required": true,
"schema": {
"type": "string",
"uniqueItems": true
}
},
{
"name": "namespace",
"in": "path",
"description": "object name and auth scope, such as for teams and projects",
"required": true,
"schema": {
"type": "string",
"uniqueItems": true
}
}
]
},
"/apis/testdata.datasource.grafana.app/v0alpha1/namespaces/{namespace}/datasources/{name}/resource/stream": {
"summary": "Get streaming response",
"get": {
"tags": [
"Route"
],
"operationId": "get_route_stream",
"parameters": [
{
"name": "count",
"in": "query",
"description": "number of points that will be returned",
"schema": {
"type": "integer",
"format": "int64"
},
"example": 10
},
{
"name": "start",
"in": "query",
"description": "the start value",
"schema": {
"type": "integer",
"format": "int64"
}
},
{
"name": "flush",
"in": "query",
"description": "How often the result is flushed (1-100%)",
"schema": {
"type": "integer",
"format": "int64"
},
"example": 100
},
{
"name": "speed",
"in": "query",
"description": "the clock cycle",
"schema": {
"type": "string"
},
"example": "100ms"
},
{
"name": "format",
"in": "query",
"description": "the response format",
"schema": {
"type": "string",
"enum": [
"json",
"influx"
]
}
}
],
"responses": {
"default": {
"content": {
"application/json": {
"schema": {
"type": "object",
"additionalProperties": true
}
}
}
}
}
},
"parameters": [
{
"name": "name",
"in": "path",
"description": "name of the DataSource",
"required": true,
"schema": {
"type": "string",
"uniqueItems": true
}
},
{
"name": "namespace",
"in": "path",
"description": "object name and auth scope, such as for teams and projects",
"required": true,
"schema": {
"type": "string",
"uniqueItems": true
}
}
]
},
"/apis/testdata.datasource.grafana.app/v0alpha1/namespaces/{namespace}/datasources/{name}/resource/test": {
"summary": "Echo any request",
"post": {
"tags": [
"Route"
],
"operationId": "post_route_test",
"requestBody": {
"content": {
"application/json": {
"schema": {
"type": "object",
"additionalProperties": true
}
}
}
},
"responses": {
"default": {
"content": {
"application/json": {
"schema": {
"type": "object",
"additionalProperties": true
}
}
}
}
}
},
"parameters": [
{
"name": "name",
"in": "path",
"description": "name of the DataSource",
"required": true,
"schema": {
"type": "string",
"uniqueItems": true
}
},
{
"name": "namespace",
"in": "path",
"description": "object name and auth scope, such as for teams and projects",
"required": true,
"schema": {
"type": "string",
"uniqueItems": true
}
}
]
},
"/apis/testdata.datasource.grafana.app/v0alpha1/namespaces/{namespace}/datasources/{name}/resource/test/json": {
"summary": "Echo json request",
"post": {
"tags": [
"Route"
],
"operationId": "post_route_test_json",
"requestBody": {
"content": {
"application/json": {
"schema": {
"type": "object",
"additionalProperties": true
}
}
}
},
"responses": {
"default": {
"content": {
"application/json": {
"schema": {
"type": "object",
"additionalProperties": true
}
}
}
}
}
},
"parameters": [
{
"name": "name",
"in": "path",
"description": "name of the DataSource",
"required": true,
"schema": {
"type": "string",
@@ -530,6 +959,26 @@
},
"components": {
"schemas": {
"DataSourceSpec": {
"description": "Test data does not require any explicit configuration",
"required": [
"title"
],
"properties": {
"title": {
"description": "display name",
"type": "string"
},
"url": {
"description": "not used",
"type": "string"
}
},
"additionalProperties": false,
"example": {
"url": "http://xxxx"
}
},
"QueryRequestSchema": {
"description": "Schema for a set of queries sent to the query method",
"type": "object",
@@ -650,12 +1099,7 @@
}
},
"spec": {
"description": "DataSource configuration -- these properties are all visible to anyone able to query the data source from their browser",
"allOf": [
{
"$ref": "#/components/schemas/com.github.grafana.grafana.pkg.apis.datasource.v0alpha1.UnstructuredSpec"
}
]
"$ref": "#/components/schemas/DataSourceSpec"
}
},
"x-kubernetes-group-version-kind": [

View File

@@ -1,87 +0,0 @@
package apis
import (
"bytes"
"context"
"net/http"
"testing"
"time"
dto "github.com/prometheus/client_model/go"
"github.com/prometheus/common/expfmt"
"github.com/prometheus/common/model"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/services/featuremgmt"
)
const zanzanaReconcileLastSuccessMetric = "grafana_zanzana_reconcile_last_success_timestamp_seconds"
// AwaitZanzanaReconcileNext waits for the next Zanzana reconciliation cycle to complete.
// It is a no-op unless the `zanzana` feature toggle is enabled for the running test env.
func AwaitZanzanaReconcileNext(t *testing.T, helper *K8sTestHelper) {
t.Helper()
enabled := false
if helper != nil {
enabled = helper.GetEnv().FeatureToggles.GetEnabled(context.Background())[featuremgmt.FlagZanzana]
}
if helper == nil || !enabled {
return
}
prev, ok := getZanzanaReconcileLastSuccessTimestampSeconds(t, helper)
if !ok {
prev = 0
}
require.EventuallyWithT(t, func(c *assert.CollectT) {
ts, ok := getZanzanaReconcileLastSuccessTimestampSeconds(t, helper)
assert.True(c, ok, "expected to find %s in /metrics", zanzanaReconcileLastSuccessMetric)
if !ok {
return
}
assert.Greater(c, ts, prev, "expected %s (%v) > %v", zanzanaReconcileLastSuccessMetric, ts, prev)
}, 30*time.Second, 50*time.Millisecond)
}
func getZanzanaReconcileLastSuccessTimestampSeconds(t *testing.T, helper *K8sTestHelper) (float64, bool) {
t.Helper()
rsp := DoRequest(helper, RequestParams{
User: helper.Org1.Admin,
Path: "/metrics",
Accept: "text/plain",
}, &struct{}{})
if rsp.Response == nil || rsp.Response.StatusCode != http.StatusOK {
return 0, false
}
parser := expfmt.NewTextParser(model.UTF8Validation)
metrics, err := parser.TextToMetricFamilies(bytes.NewReader(rsp.Body))
if err != nil {
return 0, false
}
metric := metrics[zanzanaReconcileLastSuccessMetric]
if metric == nil || len(metric.Metric) == 0 {
return 0, false
}
m := metric.Metric[0]
switch metric.GetType() {
case dto.MetricType_GAUGE:
if m.Gauge == nil {
return 0, false
}
return m.Gauge.GetValue(), true
case dto.MetricType_UNTYPED:
if m.Untyped == nil {
return 0, false
}
return m.Untyped.GetValue(), true
default:
return 0, false
}
}

View File

@@ -609,20 +609,6 @@ func CreateGrafDir(t *testing.T, opts GrafanaOpts) (string, string) {
require.NoError(t, err)
}
if opts.ZanzanaReconciliationInterval != 0 {
rbacSect, err := cfg.NewSection("rbac")
require.NoError(t, err)
_, err = rbacSect.NewKey("zanzana_reconciliation_interval", opts.ZanzanaReconciliationInterval.String())
require.NoError(t, err)
}
if opts.DisableZanzanaCache {
rbacSect, err := cfg.NewSection("rbac")
require.NoError(t, err)
_, err = rbacSect.NewKey("disable_zanzana_cache", "true")
require.NoError(t, err)
}
dashboardsSection, err := getOrCreateSection("dashboards")
require.NoError(t, err)
_, err = dashboardsSection.NewKey("min_refresh_interval", "10s")
@@ -701,8 +687,6 @@ type GrafanaOpts struct {
SecretsManagerEnableDBMigrations bool
OpenFeatureAPIEnabled bool
DisableAuthZClientCache bool
ZanzanaReconciliationInterval time.Duration
DisableZanzanaCache bool
// Allow creating grafana dir beforehand
Dir string

View File

@@ -33,11 +33,11 @@ func (ds *DataSource) parseResponse(ctx context.Context, metricDataOutputs []*cl
dataRes := backend.DataResponse{}
if response.HasArithmeticError {
dataRes.Error = backend.DownstreamErrorf("ArithmeticError in query %q: %s", queryRow.RefId, response.ArithmeticErrorMessage)
dataRes.Error = fmt.Errorf("ArithmeticError in query %q: %s", queryRow.RefId, response.ArithmeticErrorMessage)
}
if response.HasPermissionError {
dataRes.Error = backend.DownstreamErrorf("PermissionError in query %q: %s", queryRow.RefId, response.PermissionErrorMessage)
dataRes.Error = fmt.Errorf("PermissionError in query %q: %s", queryRow.RefId, response.PermissionErrorMessage)
}
var err error

View File

@@ -192,7 +192,7 @@ export const preparePlotConfigBuilder: UPlotConfigPrepFn<UPlotConfigOptions> = (
});
const xField = frame.fields[0];
const xAxisHidden = xField.config.custom?.axisPlacement === AxisPlacement.Hidden;
const xAxisHidden = xField.config.custom.axisPlacement === AxisPlacement.Hidden;
builder.addAxis({
show: !xAxisHidden,

View File

@@ -51,7 +51,6 @@ export interface PanelEditorState extends SceneObjectState {
panelRef: SceneObjectRef<VizPanel>;
showLibraryPanelSaveModal?: boolean;
showLibraryPanelUnlinkModal?: boolean;
editPreview?: VizPanel;
tableView?: VizPanel;
pluginLoadErrror?: string;
/**
@@ -151,9 +150,6 @@ export class PanelEditor extends SceneObjectBase<PanelEditorState> {
const changedState = layoutItem.state;
const originalState = this._layoutItemState!;
this.setState({ editPreview: undefined });
this.state.optionsPane?.setState({ editPreviewRef: undefined });
// Temp fix for old edit mode
if (this._layoutItem instanceof DashboardGridItem && !config.featureToggles.dashboardNewLayouts) {
this._layoutItem.handleEditChange();
@@ -260,40 +256,16 @@ export class PanelEditor extends SceneObjectBase<PanelEditorState> {
);
// Setup options pane
const optionsPane = new PanelOptionsPane({
panelRef: this.state.panelRef,
editPreviewRef: this.state.editPreview?.getRef(),
searchQuery: '',
listMode: OptionFilter.All,
isVizPickerOpen: isUnconfigured,
isNewPanel: this.state.isNewPanel,
});
this.setState({
optionsPane,
optionsPane: new PanelOptionsPane({
panelRef: this.state.panelRef,
searchQuery: '',
listMode: OptionFilter.All,
isVizPickerOpen: isUnconfigured,
isNewPanel: this.state.isNewPanel,
}),
isInitializing: false,
});
this._subs.add(
optionsPane.subscribeToState((newState, oldState) => {
if (newState.isVizPickerOpen !== oldState.isVizPickerOpen) {
const panel = this.state.panelRef.resolve();
let editPreview: VizPanel | undefined;
if (newState.isVizPickerOpen) {
// we just "pick" timeseries, viz type will likely be overridden by Suggestions.
const editPreviewBuilder = PanelBuilders.timeseries()
.setTitle(panel.state.title)
.setDescription(panel.state.description);
if (panel.state.$data) {
editPreviewBuilder.setData(new DataProviderSharer({ source: panel.state.$data.getRef() }));
}
editPreview = editPreviewBuilder.build();
}
this.setState({ editPreview });
optionsPane.setState({ editPreviewRef: editPreview?.getRef() });
}
})
);
} else {
// plugin changed after first time initialization
// Just update data pane

View File

@@ -81,7 +81,7 @@ export function PanelEditorRenderer({ model }: SceneComponentProps<PanelEditor>)
function VizAndDataPane({ model }: SceneComponentProps<PanelEditor>) {
const dashboard = getDashboardSceneFor(model);
const { dataPane, showLibraryPanelSaveModal, showLibraryPanelUnlinkModal, tableView, editPreview } = model.useState();
const { dataPane, showLibraryPanelSaveModal, showLibraryPanelUnlinkModal, tableView } = model.useState();
const panel = model.getPanel();
const libraryPanel = getLibraryPanelBehavior(panel);
const { controls } = dashboard.useState();
@@ -113,7 +113,7 @@ function VizAndDataPane({ model }: SceneComponentProps<PanelEditor>) {
)}
<div {...containerProps}>
<div {...primaryProps} className={cx(primaryProps.className, isScrollingLayout && styles.fixedSizeViz)}>
<VizWrapper panel={editPreview ?? panel} tableView={tableView} />
<VizWrapper panel={panel} tableView={tableView} />
</div>
{showLibraryPanelSaveModal && libraryPanel && (
<SaveLibraryVizPanelModal

View File

@@ -27,7 +27,7 @@ describe('PanelOptionsPane', () => {
expect(panel.state.pluginId).toBe('timeseries');
optionsPane.onChangePanel({ pluginId: 'table' });
optionsPane.onChangePanelPlugin({ pluginId: 'table' });
expect(optionsPane['_cachedPluginOptions']['timeseries']?.options).toBe(panel.state.options);
expect(optionsPane['_cachedPluginOptions']['timeseries']?.fieldConfig).toBe(panel.state.fieldConfig);
@@ -52,7 +52,7 @@ describe('PanelOptionsPane', () => {
panel.setState({ $data: undefined });
panel.activate();
optionsPane.onChangePanel({
optionsPane.onChangePanelPlugin({
pluginId: 'table',
options: { showHeader: false },
fieldConfig: {
@@ -114,7 +114,7 @@ describe('PanelOptionsPane', () => {
expect(panel.state.fieldConfig.overrides[1].properties).toHaveLength(1);
expect(panel.state.fieldConfig.defaults.custom).toHaveProperty('axisBorderShow');
optionsPane.onChangePanel({ pluginId: 'table' });
optionsPane.onChangePanelPlugin({ pluginId: 'table' });
expect(mockFn).toHaveBeenCalled();
expect(mockFn.mock.calls[0][2].defaults.color?.mode).toBe('palette-classic');
@@ -146,8 +146,8 @@ describe('PanelOptionsPane', () => {
const mockOnFieldConfigChange = jest.fn();
panel.onFieldConfigChange = mockOnFieldConfigChange;
// Call onChangePanel with fieldConfig that has overrides
optionsPane.onChangePanel({
// Call onChangePanelPlugin with fieldConfig that has overrides
optionsPane.onChangePanelPlugin({
pluginId: 'table',
fieldConfig: {
defaults: { unit: 'percent' },
@@ -178,7 +178,7 @@ describe('PanelOptionsPane', () => {
panel.onFieldConfigChange = mockOnFieldConfigChange;
// Call without fieldConfig
optionsPane.onChangePanel({
optionsPane.onChangePanelPlugin({
pluginId: 'table',
options: { showHeader: false },
});

View File

@@ -41,7 +41,6 @@ export interface PanelOptionsPaneState extends SceneObjectState {
panelRef: SceneObjectRef<VizPanel>;
isNewPanel?: boolean;
hasPickedViz?: boolean;
editPreviewRef?: SceneObjectRef<VizPanel>;
}
interface PluginOptionsCache {
@@ -64,7 +63,8 @@ export class PanelOptionsPane extends SceneObjectBase<PanelOptionsPaneState> {
});
};
onChangePanel = (options: VizTypeChangeDetails, panel = this.state.panelRef.resolve()) => {
onChangePanelPlugin = (options: VizTypeChangeDetails) => {
const panel = this.state.panelRef.resolve();
const { options: prevOptions, fieldConfig: prevFieldConfig, pluginId: prevPluginId } = panel.state;
const pluginId = options.pluginId;
@@ -137,10 +137,8 @@ export class PanelOptionsPane extends SceneObjectBase<PanelOptionsPaneState> {
}
function PanelOptionsPaneComponent({ model }: SceneComponentProps<PanelOptionsPane>) {
const { isVizPickerOpen, searchQuery, listMode, panelRef, isNewPanel, hasPickedViz, editPreviewRef } =
model.useState();
const { isVizPickerOpen, searchQuery, listMode, panelRef, isNewPanel, hasPickedViz } = model.useState();
const panel = panelRef.resolve();
const editPreview = editPreviewRef?.resolve() ?? panel; // if something goes wrong, at least update the panel.
const { pluginId } = panel.useState();
const { data } = sceneGraph.getData(panel).useState();
const styles = useStyles2(getStyles);
@@ -231,8 +229,7 @@ function PanelOptionsPaneComponent({ model }: SceneComponentProps<PanelOptionsPa
{isVizPickerOpen && (
<PanelVizTypePicker
panel={panel}
editPreview={editPreview}
onChange={model.onChangePanel}
onChange={model.onChangePanelPlugin}
onClose={model.onToggleVizPicker}
data={data}
showBackButton={config.featureToggles.newVizSuggestions ? hasPickedViz || !isNewPanel : true}

View File

@@ -23,8 +23,7 @@ export interface Props {
data?: PanelData;
showBackButton?: boolean;
panel: VizPanel;
editPreview: VizPanel;
onChange: (options: VizTypeChangeDetails, panel?: VizPanel) => void;
onChange: (options: VizTypeChangeDetails) => void;
onClose: () => void;
}
@@ -42,7 +41,7 @@ const getTabs = (): Array<{ label: string; value: VisualizationSelectPaneTab }>
: [allVisualizationsTab, suggestionsTab];
};
export function PanelVizTypePicker({ panel, editPreview, data, onChange, onClose, showBackButton }: Props) {
export function PanelVizTypePicker({ panel, data, onChange, onClose, showBackButton }: Props) {
const styles = useStyles2(getStyles);
const panelModel = useMemo(() => new PanelModelCompatibilityWrapper(panel), [panel]);
const filterId = useId();
@@ -98,55 +97,49 @@ export function PanelVizTypePicker({ panel, editPreview, data, onChange, onClose
</TabsBar>
<ScrollContainer>
<TabContent className={styles.tabContent}>
<Stack gap={1} direction="column">
<Field
tabIndex={0}
className={styles.searchField}
noMargin
htmlFor={filterId}
aria-label={t('dashboard-scene.panel-viz-type-picker.placeholder-search-for', 'Search for...')}
>
<Stack direction="row" gap={1}>
{showBackButton && (
<Button
aria-label={t('dashboard-scene.panel-viz-type-picker.title-close', 'Close')}
fill="text"
variant="secondary"
icon="arrow-left"
data-testid={selectors.components.PanelEditor.toggleVizPicker}
onClick={onClose}
>
<Trans i18nKey="dashboard-scene.panel-viz-type-picker.button.close">Back</Trans>
</Button>
)}
<FilterInput
id={filterId}
className={styles.filter}
value={searchQuery}
onChange={setSearchQuery}
placeholder={t('dashboard-scene.panel-viz-type-picker.placeholder-search-for', 'Search for...')}
/>
</Stack>
</Field>
{listMode === VisualizationSelectPaneTab.Suggestions && (
<VisualizationSuggestions onChange={onChange} panel={panelModel} data={data} />
)}
{listMode === VisualizationSelectPaneTab.Visualizations && (
<Stack gap={1} direction="column">
<Field
tabIndex={0}
className={styles.searchField}
noMargin
htmlFor={filterId}
aria-label={t('dashboard-scene.panel-viz-type-picker.placeholder-search-for', 'Search for...')}
>
<Stack direction="row" gap={1}>
{showBackButton && (
<Button
aria-label={t('dashboard-scene.panel-viz-type-picker.title-close', 'Close')}
fill="text"
variant="secondary"
icon="arrow-left"
data-testid={selectors.components.PanelEditor.toggleVizPicker}
onClick={onClose}
>
<Trans i18nKey="dashboard-scene.panel-viz-type-picker.button.close">Back</Trans>
</Button>
)}
<FilterInput
id={filterId}
className={styles.filter}
value={searchQuery}
onChange={setSearchQuery}
placeholder={t('dashboard-scene.panel-viz-type-picker.placeholder-search-for', 'Search for...')}
/>
</Stack>
</Field>
{listMode === VisualizationSelectPaneTab.Suggestions && (
<VisualizationSuggestions
onChange={onChange}
panel={panelModel}
editPreview={editPreview}
data={data}
searchQuery={searchQuery}
/>
)}
{listMode === VisualizationSelectPaneTab.Visualizations && (
<VizTypePicker
pluginId={panel.state.pluginId}
searchQuery={searchQuery}
trackSearch={trackSearch}
onChange={onChange}
/>
)}
</Stack>
</Stack>
)}
</TabContent>
</ScrollContainer>
</div>
@@ -162,7 +155,7 @@ const getStyles = (theme: GrafanaTheme2) => ({
gap: theme.spacing(2),
}),
searchField: css({
margin: theme.spacing(0.5, 0, 1, 0), // input glow with the boundary without this
marginTop: theme.spacing(0.5), // input glow with the boundary without this
}),
tabs: css({
width: '100%',

View File

@@ -90,6 +90,7 @@ import { DashboardGridItem } from './layout-default/DashboardGridItem';
import { DefaultGridLayoutManager } from './layout-default/DefaultGridLayoutManager';
import { addNewRowTo } from './layouts-shared/addNew';
import { clearClipboard } from './layouts-shared/paste';
import { getIsLazy } from './layouts-shared/utils';
import { DashboardLayoutManager } from './types/DashboardLayoutManager';
import { LayoutParent } from './types/LayoutParent';
@@ -198,7 +199,7 @@ export class DashboardScene extends SceneObjectBase<DashboardSceneState> impleme
meta: {},
editable: true,
$timeRange: state.$timeRange ?? new SceneTimeRange({}),
body: state.body ?? DefaultGridLayoutManager.fromVizPanels([]),
body: state.body ?? DefaultGridLayoutManager.fromVizPanels([], getIsLazy(state.preload)),
links: state.links ?? [],
...state,
editPane: new DashboardEditPane(),

View File

@@ -1,7 +1,7 @@
import React, { useContext, useEffect, useState } from 'react';
import { Trans } from '@grafana/i18n';
import { LazyLoader, VizPanel } from '@grafana/scenes';
import { VizPanel } from '@grafana/scenes';
import { Box, Spinner } from '@grafana/ui';
import { DashboardScene } from './DashboardScene';
@@ -51,23 +51,11 @@ export function useSoloPanelContext() {
return useContext(SoloPanelContext);
}
export function renderMatchingSoloPanels(
soloPanelContext: SoloPanelContextValue,
panels: VizPanel[],
isLazy?: boolean
) {
export function renderMatchingSoloPanels(soloPanelContext: SoloPanelContextValue, panels: VizPanel[]) {
const matches: React.ReactNode[] = [];
for (const panel of panels) {
if (soloPanelContext.matches(panel)) {
if (isLazy) {
matches.push(
<LazyLoader key={panel.state.key!}>
<panel.Component model={panel} />
</LazyLoader>
);
} else {
matches.push(<panel.Component model={panel} key={panel.state.key} />);
}
matches.push(<panel.Component model={panel} key={panel.state.key} />);
}
}

View File

@@ -8,7 +8,6 @@ import { useStyles2 } from '@grafana/ui';
import { ConditionalRenderingGroup } from '../../conditional-rendering/group/ConditionalRenderingGroup';
import { useIsConditionallyHidden } from '../../conditional-rendering/hooks/useIsConditionallyHidden';
import { useDashboardState } from '../../utils/utils';
import { SoloPanelContextValueWithSearchStringFilter } from '../PanelSearchLayout';
import { renderMatchingSoloPanels, useSoloPanelContext } from '../SoloPanelContext';
import { getIsLazy } from '../layouts-shared/utils';
@@ -90,11 +89,7 @@ export function AutoGridItemRenderer({ model }: SceneComponentProps<AutoGridItem
);
if (soloPanelContext) {
// Use lazy loading only for panel search layout (SoloPanelContextValueWithSearchStringFilter)
// as it renders multiple panels in a grid. Skip lazy loading for viewPanel URL param
// (SoloPanelContextWithPathIdFilter) since single panels should render immediately.
const useLazyForSoloPanel = isLazy && soloPanelContext instanceof SoloPanelContextValueWithSearchStringFilter;
return renderMatchingSoloPanels(soloPanelContext, [body, ...repeatedPanels], useLazyForSoloPanel);
return renderMatchingSoloPanels(soloPanelContext, [body, ...repeatedPanels]);
}
const isDragging = !!draggingKey;

View File

@@ -1,43 +1,17 @@
import { css } from '@emotion/css';
import { RefObject, useMemo } from 'react';
import { useMemo } from 'react';
import { config } from '@grafana/runtime';
import { LazyLoader, SceneComponentProps, VizPanel } from '@grafana/scenes';
import { SceneComponentProps } from '@grafana/scenes';
import { GRID_CELL_HEIGHT, GRID_CELL_VMARGIN } from 'app/core/constants';
import { useDashboardState } from '../../utils/utils';
import { SoloPanelContextValueWithSearchStringFilter } from '../PanelSearchLayout';
import { renderMatchingSoloPanels, useSoloPanelContext } from '../SoloPanelContext';
import { getIsLazy } from '../layouts-shared/utils';
import { DashboardGridItem, RepeatDirection } from './DashboardGridItem';
interface PanelWrapperProps {
panel: VizPanel;
isLazy: boolean;
containerRef?: RefObject<HTMLDivElement>;
}
function PanelWrapper({ panel, isLazy, containerRef }: PanelWrapperProps) {
if (isLazy) {
return (
<LazyLoader key={panel.state.key!} ref={containerRef} className={panelWrapper}>
<panel.Component model={panel} />
</LazyLoader>
);
}
return (
<div className={panelWrapper} ref={containerRef}>
<panel.Component model={panel} />
</div>
);
}
export function DashboardGridItemRenderer({ model }: SceneComponentProps<DashboardGridItem>) {
const { repeatedPanels = [], itemHeight, variableName, body } = model.useState();
const soloPanelContext = useSoloPanelContext();
const { preload } = useDashboardState(model);
const isLazy = useMemo(() => getIsLazy(preload), [preload]);
const layoutStyle = useLayoutStyle(
model.getRepeatDirection(),
model.getChildCount(),
@@ -46,22 +20,26 @@ export function DashboardGridItemRenderer({ model }: SceneComponentProps<Dashboa
);
if (soloPanelContext) {
// Use lazy loading only for panel search layout (SoloPanelContextValueWithSearchStringFilter)
// as it renders multiple panels in a grid. Skip lazy loading for viewPanel URL param
// (SoloPanelContextWithPathIdFilter) since single panels should render immediately.
const useLazyForSoloPanel = isLazy && soloPanelContext instanceof SoloPanelContextValueWithSearchStringFilter;
return renderMatchingSoloPanels(soloPanelContext, [body, ...repeatedPanels], useLazyForSoloPanel);
return renderMatchingSoloPanels(soloPanelContext, [body, ...repeatedPanels]);
}
if (!variableName) {
return <PanelWrapper panel={body} isLazy={isLazy} containerRef={model.containerRef} />;
return (
<div className={panelWrapper} ref={model.containerRef}>
<body.Component model={body} key={body.state.key} />
</div>
);
}
return (
<div className={layoutStyle} ref={model.containerRef}>
<PanelWrapper panel={body} isLazy={isLazy} />
<div className={panelWrapper} key={body.state.key}>
<body.Component model={body} key={body.state.key} />
</div>
{repeatedPanels.map((panel) => (
<PanelWrapper key={panel.state.key!} panel={panel} isLazy={isLazy} />
<div className={panelWrapper} key={panel.state.key}>
<panel.Component model={panel} key={panel.state.key} />
</div>
))}
</div>
);

View File

@@ -47,6 +47,7 @@ import { AutoGridItem } from '../layout-auto-grid/AutoGridItem';
import { CanvasGridAddActions } from '../layouts-shared/CanvasGridAddActions';
import { clearClipboard, getDashboardGridItemFromClipboard } from '../layouts-shared/paste';
import { dashboardCanvasAddButtonHoverStyles } from '../layouts-shared/styles';
import { getIsLazy } from '../layouts-shared/utils';
import { DashboardLayoutGrid } from '../types/DashboardLayoutGrid';
import { DashboardLayoutManager } from '../types/DashboardLayoutManager';
import { LayoutRegistryItem } from '../types/LayoutRegistryItem';
@@ -564,10 +565,11 @@ export class DefaultGridLayoutManager
public static createFromLayout(currentLayout: DashboardLayoutManager): DefaultGridLayoutManager {
const panels = currentLayout.getVizPanels();
return DefaultGridLayoutManager.fromVizPanels(panels);
const isLazy = getIsLazy(getDashboardSceneFor(currentLayout).state.preload)!;
return DefaultGridLayoutManager.fromVizPanels(panels, isLazy);
}
public static fromVizPanels(panels: VizPanel[] = []): DefaultGridLayoutManager {
public static fromVizPanels(panels: VizPanel[] = [], isLazy?: boolean | undefined): DefaultGridLayoutManager {
const children: DashboardGridItem[] = [];
const panelHeight = 10;
const panelWidth = GRID_COLUMN_COUNT / 3;
@@ -605,6 +607,7 @@ export class DefaultGridLayoutManager
children: children,
isDraggable: true,
isResizable: true,
isLazy,
}),
});
}
@@ -612,7 +615,8 @@ export class DefaultGridLayoutManager
public static fromGridItems(
gridItems: SceneGridItemLike[],
isDraggable?: boolean,
isResizable?: boolean
isResizable?: boolean,
isLazy?: boolean | undefined
): DefaultGridLayoutManager {
const children = gridItems.reduce<SceneGridItemLike[]>((acc, gridItem) => {
gridItem.clearParent();
@@ -626,6 +630,7 @@ export class DefaultGridLayoutManager
children,
isDraggable,
isResizable,
isLazy,
}),
});
}

View File

@@ -358,7 +358,8 @@ export class RowsLayoutManager extends SceneObjectBase<RowsLayoutManagerState> i
layout: DefaultGridLayoutManager.fromGridItems(
rowConfig.children,
rowConfig.isDraggable ?? layout.state.grid.state.isDraggable,
rowConfig.isResizable ?? layout.state.grid.state.isResizable
rowConfig.isResizable ?? layout.state.grid.state.isResizable,
layout.state.grid.state.isLazy
),
})
);

View File

@@ -9,10 +9,8 @@ import {
PanelPluginMeta,
PanelPluginVisualizationSuggestion,
} from '@grafana/data';
import { selectors } from '@grafana/e2e-selectors';
import { Trans, t } from '@grafana/i18n';
import { config } from '@grafana/runtime';
import { VizPanel } from '@grafana/scenes';
import { Alert, Button, Icon, Spinner, Text, useStyles2 } from '@grafana/ui';
import { UNCONFIGURED_PANEL_PLUGIN_ID } from 'app/features/dashboard-scene/scene/UnconfiguredPanel';
@@ -25,47 +23,25 @@ import { VisualizationSuggestionCard } from './VisualizationSuggestionCard';
import { VizTypeChangeDetails } from './types';
export interface Props {
onChange: (options: VizTypeChangeDetails, panel?: VizPanel) => void;
editPreview?: VizPanel;
onChange: (options: VizTypeChangeDetails) => void;
data?: PanelData;
panel?: PanelModel;
searchQuery?: string;
}
const useSuggestions = (data: PanelData | undefined, searchQuery: string | undefined) => {
const useSuggestions = (data: PanelData | undefined) => {
const [hasFetched, setHasFetched] = useState(false);
const { value, loading, error, retry } = useAsyncRetry(async () => {
await new Promise((resolve) => setTimeout(resolve, hasFetched ? 75 : 0));
setHasFetched(true);
return await getAllSuggestions(data);
}, [hasFetched, data]);
const filteredValue = useMemo(() => {
if (!value || !searchQuery) {
return value;
}
const lowerCaseQuery = searchQuery.toLowerCase();
const filteredSuggestions = value.suggestions.filter(
(suggestion) =>
suggestion.name.toLowerCase().includes(lowerCaseQuery) ||
suggestion.pluginId.toLowerCase().includes(lowerCaseQuery) ||
suggestion.description?.toLowerCase().includes(lowerCaseQuery)
);
return {
...value,
suggestions: filteredSuggestions,
};
}, [value, searchQuery]);
return { value: filteredValue, loading, error, retry };
return { value, loading, error, retry };
};
export function VisualizationSuggestions({ onChange, editPreview, data, panel, searchQuery }: Props) {
export function VisualizationSuggestions({ onChange, data, panel }: Props) {
const styles = useStyles2(getStyles);
const { value: result, loading, error, retry } = useSuggestions(data, searchQuery);
const { value: result, loading, error, retry } = useSuggestions(data);
const suggestions = result?.suggestions;
const hasLoadingErrors = result?.hasErrors ?? false;
@@ -97,21 +73,18 @@ export function VisualizationSuggestions({ onChange, editPreview, data, panel, s
const applySuggestion = useCallback(
(suggestion: PanelPluginVisualizationSuggestion, isPreview?: boolean) => {
onChange(
{
pluginId: suggestion.pluginId,
options: suggestion.options,
fieldConfig: suggestion.fieldConfig,
withModKey: isPreview,
},
isPreview ? editPreview : undefined
);
onChange({
pluginId: suggestion.pluginId,
options: suggestion.options,
fieldConfig: suggestion.fieldConfig,
withModKey: isPreview,
});
if (isPreview) {
setSuggestionHash(suggestion.hash);
}
},
[onChange, editPreview]
[onChange]
);
useEffect(() => {
@@ -212,13 +185,17 @@ export function VisualizationSuggestions({ onChange, editPreview, data, panel, s
variant="primary"
size={'md'}
className={styles.applySuggestionButton}
data-testid={selectors.components.VisualizationPreview.confirm(suggestion.name)}
aria-label={t(
'panel.visualization-suggestions.apply-suggestion-aria-label',
'Apply {{suggestionName}} visualization',
{ suggestionName: suggestion.name }
)}
onClick={() => applySuggestion(suggestion, false)}
onClick={() =>
onChange({
pluginId: suggestion.pluginId,
withModKey: false,
})
}
>
{t('panel.visualization-suggestions.use-this-suggestion', 'Use this suggestion')}
</Button>