Compare commits

..

7 Commits

Author SHA1 Message Date
Georges Chaudy 1022f04063 Refactor Watch method to implement batch event processing and authorization 2026-01-09 10:25:50 +01:00
Georges Chaudy 5e7d0392a3 Refactor authorization checks to utilize batch checks instead of compile across multiple services
- Integrated batch authorization checks using the authz package in List functions for IAM and SecureValue services, improving efficiency in permission validation.
- Updated List functions to handle pagination and authorization in a more streamlined manner, reducing redundant checks.
- Enhanced the server's List method to support batch authorization for resource listing, ensuring proper access control.
- Refactored related test cases to validate the new batch authorization logic and ensure comprehensive coverage of various scenarios.
2026-01-09 10:25:36 +01:00
Georges Chaudy aea8d434c9 point to authlib in branch 2026-01-08 15:33:35 +01:00
Georges Chaudy cddb1f9fa6 point to authlib in branch 2026-01-06 17:01:09 +01:00
Georges Chaudy d2c78f5799 Implement BatchCheck method in Authz service with comprehensive unit tests
- Added BatchCheck method to the Authz service, enabling multiple access checks in a single request with optimized batching.
- Implemented request validation, grouping checks by namespace and action to enhance performance.
- Developed extensive unit tests for BatchCheck, covering various scenarios including empty checks, invalid namespaces, and user permission checks.
- Enhanced caching behavior for permissions and integrated folder inheritance checks.
- Updated related test cases to ensure robust validation of the new functionality.
2026-01-06 16:59:24 +01:00
Georges Chaudy 4f3f9ebc04 Add unit tests for BatchCheck method in LegacyAccessClient
- Implemented multiple test cases to validate the behavior of the BatchCheck method, including scenarios for empty checks, unknown resources, admin permissions, unchecked verbs, and scope validation.
- Ensured proper handling of multiple checks with mixed results and the use of a resolver for resource mapping.
- Added tests for caching behavior based on action to optimize performance.
2026-01-06 16:59:24 +01:00
Georges Chaudy 1498970e74 Implement BatchCheck functionality in LegacyAccessClient and update related proto definitions
- Added BatchCheck method to LegacyAccessClient for handling batch authorization checks.
- Updated proto definitions to remove BatchCheckRequest and BatchCheckResponse messages, replacing them with a new structure.
- Adjusted related client and server implementations to align with the new BatchCheck structure.
- Modified tests to validate the new BatchCheck functionality and ensure proper integration with existing authorization logic.
2026-01-06 16:59:23 +01:00
84 changed files with 3282 additions and 4510 deletions
+6 -6
View File
@@ -69,12 +69,12 @@ require (
github.com/at-wat/mqtt-go v0.19.6 // indirect
github.com/aws/aws-sdk-go v1.55.7 // indirect
github.com/aws/aws-sdk-go-v2 v1.40.0 // indirect
github.com/aws/aws-sdk-go-v2/credentials v1.18.21 // indirect
github.com/aws/aws-sdk-go-v2/credentials v1.18.14 // indirect
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.14 // indirect
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.14 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.3 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.13 // indirect
github.com/aws/aws-sdk-go-v2/service/sts v1.39.1 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.1 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.8 // indirect
github.com/aws/aws-sdk-go-v2/service/sts v1.38.5 // indirect
github.com/aws/smithy-go v1.23.2 // indirect
github.com/barkimedes/go-deepcopy v0.0.0-20220514131651-17c30cfc62df // indirect
github.com/benbjohnson/clock v1.3.5 // indirect
@@ -162,14 +162,14 @@ require (
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f // indirect
github.com/grafana/dataplane/sdata v0.0.9 // indirect
github.com/grafana/dskit v0.0.0-20250908063411-6b6da59b5cc4 // indirect
github.com/grafana/grafana-aws-sdk v1.4.2 // indirect
github.com/grafana/grafana-aws-sdk v1.3.0 // indirect
github.com/grafana/grafana-azure-sdk-go/v2 v2.3.1 // indirect
github.com/grafana/grafana/apps/provisioning v0.0.0 // indirect
github.com/grafana/grafana/pkg/apiserver v0.0.0 // indirect
github.com/grafana/grafana/pkg/semconv v0.0.0-20250804150913-990f1c69ecc2 // indirect
github.com/grafana/otel-profiling-go v0.5.1 // indirect
github.com/grafana/pyroscope-go/godeltaprof v0.1.9 // indirect
github.com/grafana/sqlds/v5 v5.0.3 // indirect
github.com/grafana/sqlds/v4 v4.2.7 // indirect
github.com/grpc-ecosystem/go-grpc-middleware/providers/prometheus v1.1.0 // indirect
github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.3.3 // indirect
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.1-0.20191002090509-6af20e3a5340 // indirect
+22 -22
View File
@@ -177,38 +177,38 @@ github.com/aws/aws-sdk-go-v2 v1.40.0 h1:/WMUA0kjhZExjOQN2z3oLALDREea1A7TobfuiBrK
github.com/aws/aws-sdk-go-v2 v1.40.0/go.mod h1:c9pm7VwuW0UPxAEYGyTmyurVcNrbF6Rt/wixFqDhcjE=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.11 h1:12SpdwU8Djs+YGklkinSSlcrPyj3H4VifVsKf78KbwA=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.11/go.mod h1:dd+Lkp6YmMryke+qxW/VnKyhMBDTYP41Q2Bb+6gNZgY=
github.com/aws/aws-sdk-go-v2/config v1.31.17 h1:QFl8lL6RgakNK86vusim14P2k8BFSxjvUkcWLDjgz9Y=
github.com/aws/aws-sdk-go-v2/config v1.31.17/go.mod h1:V8P7ILjp/Uef/aX8TjGk6OHZN6IKPM5YW6S78QnRD5c=
github.com/aws/aws-sdk-go-v2/credentials v1.18.21 h1:56HGpsgnmD+2/KpG0ikvvR8+3v3COCwaF4r+oWwOeNA=
github.com/aws/aws-sdk-go-v2/credentials v1.18.21/go.mod h1:3YELwedmQbw7cXNaII2Wywd+YY58AmLPwX4LzARgmmA=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.13 h1:T1brd5dR3/fzNFAQch/iBKeX07/ffu/cLu+q+RuzEWk=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.13/go.mod h1:Peg/GBAQ6JDt+RoBf4meB1wylmAipb7Kg2ZFakZTlwk=
github.com/aws/aws-sdk-go-v2/config v1.31.10 h1:7LllDZAegXU3yk41mwM6KcPu0wmjKGQB1bg99bNdQm4=
github.com/aws/aws-sdk-go-v2/config v1.31.10/go.mod h1:Ge6gzXPjqu4v0oHvgAwvGzYcK921GU0hQM25WF/Kl+8=
github.com/aws/aws-sdk-go-v2/credentials v1.18.14 h1:TxkI7QI+sFkTItN/6cJuMZEIVMFXeu2dI1ZffkXngKI=
github.com/aws/aws-sdk-go-v2/credentials v1.18.14/go.mod h1:12x4Uw/vijC11XkctTjy92TNCQ+UnNJkT7fzX0Yd93E=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.8 h1:gLD09eaJUdiszm7vd1btiQUYE0Hj+0I2b8AS+75z9AY=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.8/go.mod h1:4RW3oMPt1POR74qVOC4SbubxAwdP4pCT0nSw3jycOU4=
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.17.84 h1:cTXRdLkpBanlDwISl+5chq5ui1d1YWg4PWMR9c3kXyw=
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.17.84/go.mod h1:kwSy5X7tfIHN39uucmjQVs2LvDdXEjQucgQQEqCggEo=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.14 h1:PZHqQACxYb8mYgms4RZbhZG0a7dPW06xOjmaH0EJC/I=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.14/go.mod h1:VymhrMJUWs69D8u0/lZ7jSB6WgaG/NqHi3gX0aYf6U0=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.14 h1:bOS19y6zlJwagBfHxs0ESzr1XCOU2KXJCWcq3E2vfjY=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.14/go.mod h1:1ipeGBMAxZ0xcTm6y6paC2C/J6f6OO7LBODV9afuAyM=
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 h1:WKuaxf++XKWlHWu9ECbMlha8WOEGm0OUEZqm4K/Gcfk=
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4/go.mod h1:ZWy7j6v1vWGmPReu0iSGvRiise4YI5SkR3OHKTZ6Wuc=
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3 h1:bIqFDwgGXXN1Kpp99pDOdKMTTb5d2KyU5X/BZxjOkRo=
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3/go.mod h1:H5O/EsxDWyU+LP/V8i5sm8cxoZgc2fdNR9bxlOFrQTo=
github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.36 h1:GMYy2EOWfzdP3wfVAGXBNKY5vK4K8vMET4sYOYltmqs=
github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.36/go.mod h1:gDhdAV6wL3PmPqBhiPbnlS447GoWs8HTTOYef9/9Inw=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.3 h1:x2Ibm/Af8Fi+BH+Hsn9TXGdT+hKbDd5XOTZxTMxDk7o=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.3/go.mod h1:IW1jwyrQgMdhisceG8fQLmQIydcT/jWY21rFhzgaKwo=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.1 h1:oegbebPEMA/1Jny7kvwejowCaHz1FWZAQ94WXFNCyTM=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.1/go.mod h1:kemo5Myr9ac0U9JfSjMo9yHLtw+pECEHsFtJ9tqCEI8=
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.7.4 h1:nAP2GYbfh8dd2zGZqFRSMlq+/F6cMPBUuCsGAMkN074=
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.7.4/go.mod h1:LT10DsiGjLWh4GbjInf9LQejkYEhBgBCjLG5+lvk4EE=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.13 h1:kDqdFvMY4AtKoACfzIGD8A0+hbT41KTKF//gq7jITfM=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.13/go.mod h1:lmKuogqSU3HzQCwZ9ZtcqOc5XGMqtDK7OIc2+DxiUEg=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.8 h1:M6JI2aGFEzYxsF6CXIuRBnkge9Wf9a2xU39rNeXgu10=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.8/go.mod h1:Fw+MyTwlwjFsSTE31mH211Np+CUslml8mzc0AFEG09s=
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.17 h1:qcLWgdhq45sDM9na4cvXax9dyLitn8EYBRl8Ak4XtG4=
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.17/go.mod h1:M+jkjBFZ2J6DJrjMv2+vkBbuht6kxJYtJiwoVgX4p4U=
github.com/aws/aws-sdk-go-v2/service/s3 v1.84.0 h1:0reDqfEN+tB+sozj2r92Bep8MEwBZgtAXTND1Kk9OXg=
github.com/aws/aws-sdk-go-v2/service/s3 v1.84.0/go.mod h1:kUklwasNoCn5YpyAqC/97r6dzTA1SRKJfKq16SXeoDU=
github.com/aws/aws-sdk-go-v2/service/sso v1.30.1 h1:0JPwLz1J+5lEOfy/g0SURC9cxhbQ1lIMHMa+AHZSzz0=
github.com/aws/aws-sdk-go-v2/service/sso v1.30.1/go.mod h1:fKvyjJcz63iL/ftA6RaM8sRCtN4r4zl4tjL3qw5ec7k=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.5 h1:OWs0/j2UYR5LOGi88sD5/lhN6TDLG6SfA7CqsQO9zF0=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.5/go.mod h1:klO+ejMvYsB4QATfEOIXk8WAEwN4N0aBfJpvC+5SZBo=
github.com/aws/aws-sdk-go-v2/service/sts v1.39.1 h1:mLlUgHn02ue8whiR4BmxxGJLR2gwU6s6ZzJ5wDamBUs=
github.com/aws/aws-sdk-go-v2/service/sts v1.39.1/go.mod h1:E19xDjpzPZC7LS2knI9E6BaRFDK43Eul7vd6rSq2HWk=
github.com/aws/aws-sdk-go-v2/service/sso v1.29.4 h1:FTdEN9dtWPB0EOURNtDPmwGp6GGvMqRJCAihkSl/1No=
github.com/aws/aws-sdk-go-v2/service/sso v1.29.4/go.mod h1:mYubxV9Ff42fZH4kexj43gFPhgc/LyC7KqvUKt1watc=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.0 h1:I7ghctfGXrscr7r1Ga/mDqSJKm7Fkpl5Mwq79Z+rZqU=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.0/go.mod h1:Zo9id81XP6jbayIFWNuDpA6lMBWhsVy+3ou2jLa4JnA=
github.com/aws/aws-sdk-go-v2/service/sts v1.38.5 h1:+LVB0xBqEgjQoqr9bGZbRzvg212B0f17JdflleJRNR4=
github.com/aws/aws-sdk-go-v2/service/sts v1.38.5/go.mod h1:xoaxeqnnUaZjPjaICgIy5B+MHCSb/ZSOn4MvkFNOUA0=
github.com/aws/smithy-go v1.23.2 h1:Crv0eatJUQhaManss33hS5r40CG3ZFH+21XSkqMrIUM=
github.com/aws/smithy-go v1.23.2/go.mod h1:LEj2LM3rBRQJxPZTB4KuzZkaZYnZPnvgIhb4pu07mx0=
github.com/bahlo/generic-list-go v0.2.0 h1:5sz/EEAK+ls5wF+NeqDpk5+iNdMDXrh3z3nPnH1Wvgk=
@@ -637,8 +637,8 @@ github.com/grafana/grafana-app-sdk v0.48.7 h1:9mF7nqkqP0QUYYDlznoOt+GIyjzj45wGfU
github.com/grafana/grafana-app-sdk v0.48.7/go.mod h1:DWsaaH39ZMHwSOSoUBaeW8paMrRaYsjRYlLwCJYd78k=
github.com/grafana/grafana-app-sdk/logging v0.48.7 h1:Oa5qg473gka5+W/WQk61Xbw4YdAv+wV2Z4bJtzeCaQw=
github.com/grafana/grafana-app-sdk/logging v0.48.7/go.mod h1:5u3KalezoBAAo2Y3ytDYDAIIPvEqFLLDSxeiK99QxDU=
github.com/grafana/grafana-aws-sdk v1.4.2 h1:GrUEoLbs46r8rG/GZL4L2b63Bo+rkIYKdtCT7kT5KkM=
github.com/grafana/grafana-aws-sdk v1.4.2/go.mod h1:1qnZdYs6gQzxxF0dDodaE7Rn9fiMzuhwvtaAZ7ySnhY=
github.com/grafana/grafana-aws-sdk v1.3.0 h1:/bfJzP93rCel1GbWoRSq0oUo424MZXt8jAp2BK9w8tM=
github.com/grafana/grafana-aws-sdk v1.3.0/go.mod h1:VGycF0JkCGKND2O5je1ucOqPJ0ZNhZYzV3c2bNBAaGk=
github.com/grafana/grafana-azure-sdk-go/v2 v2.3.1 h1:FFcEA01tW+SmuJIuDbHOdgUBL+d7DPrZ2N4zwzPhfGk=
github.com/grafana/grafana-azure-sdk-go/v2 v2.3.1/go.mod h1:Oi4anANlCuTCc66jCyqIzfVbgLXFll8Wja+Y4vfANlc=
github.com/grafana/grafana-plugin-sdk-go v0.284.0 h1:1bK7eWsnPBLUWDcWJWe218Ik5ad0a5JpEL4mH9ry7Ws=
@@ -655,8 +655,8 @@ github.com/grafana/pyroscope-go/godeltaprof v0.1.9 h1:c1Us8i6eSmkW+Ez05d3co8kasn
github.com/grafana/pyroscope-go/godeltaprof v0.1.9/go.mod h1:2+l7K7twW49Ct4wFluZD3tZ6e0SjanjcUUBPVD/UuGU=
github.com/grafana/regexp v0.0.0-20240518133315-a468a5bfb3bc h1:GN2Lv3MGO7AS6PrRoT6yV5+wkrOpcszoIsO4+4ds248=
github.com/grafana/regexp v0.0.0-20240518133315-a468a5bfb3bc/go.mod h1:+JKpmjMGhpgPL+rXZ5nsZieVzvarn86asRlBg4uNGnk=
github.com/grafana/sqlds/v5 v5.0.3 h1:+yUMUxfa0WANQsmS9xtTFSRX1Q55Iv1B9EjlrW4VlBU=
github.com/grafana/sqlds/v5 v5.0.3/go.mod h1:GKeTTiC+GeR1X0z3f0Iee+hZnNgN62uQpj5XVMx5Uew=
github.com/grafana/sqlds/v4 v4.2.7 h1:sFQhsS7DBakNMdxa++yOfJ9BVvkZwFJ0B95o57K0/XA=
github.com/grafana/sqlds/v4 v4.2.7/go.mod h1:BQRjUG8rOqrBI4NAaeoWrIMuoNgfi8bdhCJ+5cgEfLU=
github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 h1:UH//fgunKIs4JdUbpDl1VZCDaL56wXCB/5+wF6uHfaI=
github.com/grpc-ecosystem/go-grpc-middleware v1.4.0/go.mod h1:g5qyo/la0ALbONm6Vbp88Yd8NsDy6rZz+RcrMPxvld8=
github.com/grpc-ecosystem/go-grpc-middleware/providers/prometheus v1.1.0 h1:QGLs/O40yoNK9vmy4rhUGBVyMf1lISBGtXRpsu/Qu/o=
+11 -11
View File
@@ -108,22 +108,22 @@ require (
github.com/aws/aws-sdk-go v1.55.7 // indirect
github.com/aws/aws-sdk-go-v2 v1.40.0 // indirect
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.11 // indirect
github.com/aws/aws-sdk-go-v2/config v1.31.17 // indirect
github.com/aws/aws-sdk-go-v2/credentials v1.18.21 // indirect
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.13 // indirect
github.com/aws/aws-sdk-go-v2/config v1.31.10 // indirect
github.com/aws/aws-sdk-go-v2/credentials v1.18.14 // indirect
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.8 // indirect
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.17.84 // indirect
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.14 // indirect
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.14 // indirect
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 // indirect
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3 // indirect
github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.36 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.3 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.1 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.7.4 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.13 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.8 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.17 // indirect
github.com/aws/aws-sdk-go-v2/service/s3 v1.84.0 // indirect
github.com/aws/aws-sdk-go-v2/service/sso v1.30.1 // indirect
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.5 // indirect
github.com/aws/aws-sdk-go-v2/service/sts v1.39.1 // indirect
github.com/aws/aws-sdk-go-v2/service/sso v1.29.4 // indirect
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.0 // indirect
github.com/aws/aws-sdk-go-v2/service/sts v1.38.5 // indirect
github.com/aws/smithy-go v1.23.2 // indirect
github.com/bahlo/generic-list-go v0.2.0 // indirect
github.com/barkimedes/go-deepcopy v0.0.0-20220514131651-17c30cfc62df // indirect
@@ -229,7 +229,7 @@ require (
github.com/grafana/authlib/types v0.0.0-20251119142549-be091cf2f4d4 // indirect
github.com/grafana/dataplane/sdata v0.0.9 // indirect
github.com/grafana/dskit v0.0.0-20250908063411-6b6da59b5cc4 // indirect
github.com/grafana/grafana-aws-sdk v1.4.2 // indirect
github.com/grafana/grafana-aws-sdk v1.3.0 // indirect
github.com/grafana/grafana-azure-sdk-go/v2 v2.3.1 // indirect
github.com/grafana/grafana-plugin-sdk-go v0.284.0 // indirect
github.com/grafana/grafana/apps/dashboard v0.0.0 // indirect
@@ -242,7 +242,7 @@ require (
github.com/grafana/otel-profiling-go v0.5.1 // indirect
github.com/grafana/pyroscope-go/godeltaprof v0.1.9 // indirect
github.com/grafana/regexp v0.0.0-20240518133315-a468a5bfb3bc // indirect
github.com/grafana/sqlds/v5 v5.0.3 // indirect
github.com/grafana/sqlds/v4 v4.2.7 // indirect
github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 // indirect
github.com/grpc-ecosystem/go-grpc-middleware/providers/prometheus v1.1.0 // indirect
github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.3.3 // indirect
+22 -22
View File
@@ -242,20 +242,20 @@ github.com/aws/aws-sdk-go-v2 v1.40.0 h1:/WMUA0kjhZExjOQN2z3oLALDREea1A7TobfuiBrK
github.com/aws/aws-sdk-go-v2 v1.40.0/go.mod h1:c9pm7VwuW0UPxAEYGyTmyurVcNrbF6Rt/wixFqDhcjE=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.11 h1:12SpdwU8Djs+YGklkinSSlcrPyj3H4VifVsKf78KbwA=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.11/go.mod h1:dd+Lkp6YmMryke+qxW/VnKyhMBDTYP41Q2Bb+6gNZgY=
github.com/aws/aws-sdk-go-v2/config v1.31.17 h1:QFl8lL6RgakNK86vusim14P2k8BFSxjvUkcWLDjgz9Y=
github.com/aws/aws-sdk-go-v2/config v1.31.17/go.mod h1:V8P7ILjp/Uef/aX8TjGk6OHZN6IKPM5YW6S78QnRD5c=
github.com/aws/aws-sdk-go-v2/credentials v1.18.21 h1:56HGpsgnmD+2/KpG0ikvvR8+3v3COCwaF4r+oWwOeNA=
github.com/aws/aws-sdk-go-v2/credentials v1.18.21/go.mod h1:3YELwedmQbw7cXNaII2Wywd+YY58AmLPwX4LzARgmmA=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.13 h1:T1brd5dR3/fzNFAQch/iBKeX07/ffu/cLu+q+RuzEWk=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.13/go.mod h1:Peg/GBAQ6JDt+RoBf4meB1wylmAipb7Kg2ZFakZTlwk=
github.com/aws/aws-sdk-go-v2/config v1.31.10 h1:7LllDZAegXU3yk41mwM6KcPu0wmjKGQB1bg99bNdQm4=
github.com/aws/aws-sdk-go-v2/config v1.31.10/go.mod h1:Ge6gzXPjqu4v0oHvgAwvGzYcK921GU0hQM25WF/Kl+8=
github.com/aws/aws-sdk-go-v2/credentials v1.18.14 h1:TxkI7QI+sFkTItN/6cJuMZEIVMFXeu2dI1ZffkXngKI=
github.com/aws/aws-sdk-go-v2/credentials v1.18.14/go.mod h1:12x4Uw/vijC11XkctTjy92TNCQ+UnNJkT7fzX0Yd93E=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.8 h1:gLD09eaJUdiszm7vd1btiQUYE0Hj+0I2b8AS+75z9AY=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.8/go.mod h1:4RW3oMPt1POR74qVOC4SbubxAwdP4pCT0nSw3jycOU4=
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.17.84 h1:cTXRdLkpBanlDwISl+5chq5ui1d1YWg4PWMR9c3kXyw=
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.17.84/go.mod h1:kwSy5X7tfIHN39uucmjQVs2LvDdXEjQucgQQEqCggEo=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.14 h1:PZHqQACxYb8mYgms4RZbhZG0a7dPW06xOjmaH0EJC/I=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.14/go.mod h1:VymhrMJUWs69D8u0/lZ7jSB6WgaG/NqHi3gX0aYf6U0=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.14 h1:bOS19y6zlJwagBfHxs0ESzr1XCOU2KXJCWcq3E2vfjY=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.14/go.mod h1:1ipeGBMAxZ0xcTm6y6paC2C/J6f6OO7LBODV9afuAyM=
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 h1:WKuaxf++XKWlHWu9ECbMlha8WOEGm0OUEZqm4K/Gcfk=
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4/go.mod h1:ZWy7j6v1vWGmPReu0iSGvRiise4YI5SkR3OHKTZ6Wuc=
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3 h1:bIqFDwgGXXN1Kpp99pDOdKMTTb5d2KyU5X/BZxjOkRo=
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3/go.mod h1:H5O/EsxDWyU+LP/V8i5sm8cxoZgc2fdNR9bxlOFrQTo=
github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.36 h1:GMYy2EOWfzdP3wfVAGXBNKY5vK4K8vMET4sYOYltmqs=
github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.36/go.mod h1:gDhdAV6wL3PmPqBhiPbnlS447GoWs8HTTOYef9/9Inw=
github.com/aws/aws-sdk-go-v2/service/cloudwatch v1.45.3 h1:Nn3qce+OHZuMj/edx4its32uxedAmquCDxtZkrdeiD4=
@@ -264,12 +264,12 @@ github.com/aws/aws-sdk-go-v2/service/cloudwatchlogs v1.51.0 h1:e5cbPZYTIY2nUEFie
github.com/aws/aws-sdk-go-v2/service/cloudwatchlogs v1.51.0/go.mod h1:UseIHRfrm7PqeZo6fcTb6FUCXzCnh1KJbQbmOfxArGM=
github.com/aws/aws-sdk-go-v2/service/ec2 v1.225.2 h1:IfMb3Ar8xEaWjgH/zeVHYD8izwJdQgRP5mKCTDt4GNk=
github.com/aws/aws-sdk-go-v2/service/ec2 v1.225.2/go.mod h1:35jGWx7ECvCwTsApqicFYzZ7JFEnBc6oHUuOQ3xIS54=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.3 h1:x2Ibm/Af8Fi+BH+Hsn9TXGdT+hKbDd5XOTZxTMxDk7o=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.3/go.mod h1:IW1jwyrQgMdhisceG8fQLmQIydcT/jWY21rFhzgaKwo=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.1 h1:oegbebPEMA/1Jny7kvwejowCaHz1FWZAQ94WXFNCyTM=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.1/go.mod h1:kemo5Myr9ac0U9JfSjMo9yHLtw+pECEHsFtJ9tqCEI8=
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.7.4 h1:nAP2GYbfh8dd2zGZqFRSMlq+/F6cMPBUuCsGAMkN074=
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.7.4/go.mod h1:LT10DsiGjLWh4GbjInf9LQejkYEhBgBCjLG5+lvk4EE=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.13 h1:kDqdFvMY4AtKoACfzIGD8A0+hbT41KTKF//gq7jITfM=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.13/go.mod h1:lmKuogqSU3HzQCwZ9ZtcqOc5XGMqtDK7OIc2+DxiUEg=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.8 h1:M6JI2aGFEzYxsF6CXIuRBnkge9Wf9a2xU39rNeXgu10=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.8/go.mod h1:Fw+MyTwlwjFsSTE31mH211Np+CUslml8mzc0AFEG09s=
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.17 h1:qcLWgdhq45sDM9na4cvXax9dyLitn8EYBRl8Ak4XtG4=
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.17/go.mod h1:M+jkjBFZ2J6DJrjMv2+vkBbuht6kxJYtJiwoVgX4p4U=
github.com/aws/aws-sdk-go-v2/service/kms v1.41.2 h1:zJeUxFP7+XP52u23vrp4zMcVhShTWbNO8dHV6xCSvFo=
@@ -282,12 +282,12 @@ github.com/aws/aws-sdk-go-v2/service/s3 v1.84.0 h1:0reDqfEN+tB+sozj2r92Bep8MEwBZ
github.com/aws/aws-sdk-go-v2/service/s3 v1.84.0/go.mod h1:kUklwasNoCn5YpyAqC/97r6dzTA1SRKJfKq16SXeoDU=
github.com/aws/aws-sdk-go-v2/service/secretsmanager v1.40.1 h1:w6a0H79HrHf3lr+zrw+pSzR5B+caiQFAKiNHlrUcnoc=
github.com/aws/aws-sdk-go-v2/service/secretsmanager v1.40.1/go.mod h1:c6Vg0BRiU7v0MVhHupw90RyL120QBwAMLbDCzptGeMk=
github.com/aws/aws-sdk-go-v2/service/sso v1.30.1 h1:0JPwLz1J+5lEOfy/g0SURC9cxhbQ1lIMHMa+AHZSzz0=
github.com/aws/aws-sdk-go-v2/service/sso v1.30.1/go.mod h1:fKvyjJcz63iL/ftA6RaM8sRCtN4r4zl4tjL3qw5ec7k=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.5 h1:OWs0/j2UYR5LOGi88sD5/lhN6TDLG6SfA7CqsQO9zF0=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.5/go.mod h1:klO+ejMvYsB4QATfEOIXk8WAEwN4N0aBfJpvC+5SZBo=
github.com/aws/aws-sdk-go-v2/service/sts v1.39.1 h1:mLlUgHn02ue8whiR4BmxxGJLR2gwU6s6ZzJ5wDamBUs=
github.com/aws/aws-sdk-go-v2/service/sts v1.39.1/go.mod h1:E19xDjpzPZC7LS2knI9E6BaRFDK43Eul7vd6rSq2HWk=
github.com/aws/aws-sdk-go-v2/service/sso v1.29.4 h1:FTdEN9dtWPB0EOURNtDPmwGp6GGvMqRJCAihkSl/1No=
github.com/aws/aws-sdk-go-v2/service/sso v1.29.4/go.mod h1:mYubxV9Ff42fZH4kexj43gFPhgc/LyC7KqvUKt1watc=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.0 h1:I7ghctfGXrscr7r1Ga/mDqSJKm7Fkpl5Mwq79Z+rZqU=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.0/go.mod h1:Zo9id81XP6jbayIFWNuDpA6lMBWhsVy+3ou2jLa4JnA=
github.com/aws/aws-sdk-go-v2/service/sts v1.38.5 h1:+LVB0xBqEgjQoqr9bGZbRzvg212B0f17JdflleJRNR4=
github.com/aws/aws-sdk-go-v2/service/sts v1.38.5/go.mod h1:xoaxeqnnUaZjPjaICgIy5B+MHCSb/ZSOn4MvkFNOUA0=
github.com/aws/smithy-go v1.23.2 h1:Crv0eatJUQhaManss33hS5r40CG3ZFH+21XSkqMrIUM=
github.com/aws/smithy-go v1.23.2/go.mod h1:LEj2LM3rBRQJxPZTB4KuzZkaZYnZPnvgIhb4pu07mx0=
github.com/axiomhq/hyperloglog v0.0.0-20240507144631-af9851f82b27 h1:60m4tnanN1ctzIu4V3bfCNJ39BiOPSm1gHFlFjTkRE0=
@@ -853,8 +853,8 @@ github.com/grafana/grafana-app-sdk v0.48.7 h1:9mF7nqkqP0QUYYDlznoOt+GIyjzj45wGfU
github.com/grafana/grafana-app-sdk v0.48.7/go.mod h1:DWsaaH39ZMHwSOSoUBaeW8paMrRaYsjRYlLwCJYd78k=
github.com/grafana/grafana-app-sdk/logging v0.48.7 h1:Oa5qg473gka5+W/WQk61Xbw4YdAv+wV2Z4bJtzeCaQw=
github.com/grafana/grafana-app-sdk/logging v0.48.7/go.mod h1:5u3KalezoBAAo2Y3ytDYDAIIPvEqFLLDSxeiK99QxDU=
github.com/grafana/grafana-aws-sdk v1.4.2 h1:GrUEoLbs46r8rG/GZL4L2b63Bo+rkIYKdtCT7kT5KkM=
github.com/grafana/grafana-aws-sdk v1.4.2/go.mod h1:1qnZdYs6gQzxxF0dDodaE7Rn9fiMzuhwvtaAZ7ySnhY=
github.com/grafana/grafana-aws-sdk v1.3.0 h1:/bfJzP93rCel1GbWoRSq0oUo424MZXt8jAp2BK9w8tM=
github.com/grafana/grafana-aws-sdk v1.3.0/go.mod h1:VGycF0JkCGKND2O5je1ucOqPJ0ZNhZYzV3c2bNBAaGk=
github.com/grafana/grafana-azure-sdk-go/v2 v2.3.1 h1:FFcEA01tW+SmuJIuDbHOdgUBL+d7DPrZ2N4zwzPhfGk=
github.com/grafana/grafana-azure-sdk-go/v2 v2.3.1/go.mod h1:Oi4anANlCuTCc66jCyqIzfVbgLXFll8Wja+Y4vfANlc=
github.com/grafana/grafana-cloud-migration-snapshot v1.9.0 h1:JOzchPgptwJdruYoed7x28lFDwhzs7kssResYsnC0iI=
@@ -891,8 +891,8 @@ github.com/grafana/pyroscope/api v1.2.1-0.20250415190842-3ff7247547ae h1:35W3Wjp
github.com/grafana/pyroscope/api v1.2.1-0.20250415190842-3ff7247547ae/go.mod h1:6CJ1uXmLZ13ufpO9xE4pST+DyaBt0uszzrV0YnoaVLQ=
github.com/grafana/regexp v0.0.0-20240518133315-a468a5bfb3bc h1:GN2Lv3MGO7AS6PrRoT6yV5+wkrOpcszoIsO4+4ds248=
github.com/grafana/regexp v0.0.0-20240518133315-a468a5bfb3bc/go.mod h1:+JKpmjMGhpgPL+rXZ5nsZieVzvarn86asRlBg4uNGnk=
github.com/grafana/sqlds/v5 v5.0.3 h1:+yUMUxfa0WANQsmS9xtTFSRX1Q55Iv1B9EjlrW4VlBU=
github.com/grafana/sqlds/v5 v5.0.3/go.mod h1:GKeTTiC+GeR1X0z3f0Iee+hZnNgN62uQpj5XVMx5Uew=
github.com/grafana/sqlds/v4 v4.2.7 h1:sFQhsS7DBakNMdxa++yOfJ9BVvkZwFJ0B95o57K0/XA=
github.com/grafana/sqlds/v4 v4.2.7/go.mod h1:BQRjUG8rOqrBI4NAaeoWrIMuoNgfi8bdhCJ+5cgEfLU=
github.com/grafana/tempo v1.5.1-0.20250529124718-87c2dc380cec h1:wnzJov9RhSHGaTYGzTygL4qq986fLen8xSqnQgaMd28=
github.com/grafana/tempo v1.5.1-0.20250529124718-87c2dc380cec/go.mod h1:j1IY7J2rUz7TcTjFVVx6HCpyTlYOJPtXuGRZ7sI+vSo=
github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 h1:UH//fgunKIs4JdUbpDl1VZCDaL56wXCB/5+wF6uHfaI=
+6 -6
View File
@@ -31,12 +31,12 @@ require (
github.com/apache/arrow-go/v18 v18.4.1 // indirect
github.com/armon/go-metrics v0.4.1 // indirect
github.com/aws/aws-sdk-go-v2 v1.40.0 // indirect
github.com/aws/aws-sdk-go-v2/credentials v1.18.21 // indirect
github.com/aws/aws-sdk-go-v2/credentials v1.18.14 // indirect
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.14 // indirect
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.14 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.3 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.13 // indirect
github.com/aws/aws-sdk-go-v2/service/sts v1.39.1 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.1 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.8 // indirect
github.com/aws/aws-sdk-go-v2/service/sts v1.38.5 // indirect
github.com/aws/smithy-go v1.23.2 // indirect
github.com/barkimedes/go-deepcopy v0.0.0-20220514131651-17c30cfc62df // indirect
github.com/beorn7/perks v1.0.1 // indirect
@@ -97,14 +97,14 @@ require (
github.com/grafana/authlib/types v0.0.0-20251119142549-be091cf2f4d4 // indirect
github.com/grafana/dataplane/sdata v0.0.9 // indirect
github.com/grafana/dskit v0.0.0-20250908063411-6b6da59b5cc4 // indirect
github.com/grafana/grafana-aws-sdk v1.4.2 // indirect
github.com/grafana/grafana-aws-sdk v1.3.0 // indirect
github.com/grafana/grafana-azure-sdk-go/v2 v2.3.1 // indirect
github.com/grafana/grafana-plugin-sdk-go v0.284.0 // indirect
github.com/grafana/grafana/pkg/apiserver v0.0.0 // indirect
github.com/grafana/grafana/pkg/semconv v0.0.0-20250804150913-990f1c69ecc2 // indirect
github.com/grafana/otel-profiling-go v0.5.1 // indirect
github.com/grafana/pyroscope-go/godeltaprof v0.1.9 // indirect
github.com/grafana/sqlds/v5 v5.0.3 // indirect
github.com/grafana/sqlds/v4 v4.2.7 // indirect
github.com/grpc-ecosystem/go-grpc-middleware/providers/prometheus v1.1.0 // indirect
github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.3.3 // indirect
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.1-0.20191002090509-6af20e3a5340 // indirect
+12 -12
View File
@@ -30,18 +30,18 @@ github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJ
github.com/armon/go-metrics v0.4.1/go.mod h1:E6amYzXo6aW1tqzoZGT755KkbgrJsSdpwZ+3JqfkOG4=
github.com/aws/aws-sdk-go-v2 v1.40.0 h1:/WMUA0kjhZExjOQN2z3oLALDREea1A7TobfuiBrKlwc=
github.com/aws/aws-sdk-go-v2 v1.40.0/go.mod h1:c9pm7VwuW0UPxAEYGyTmyurVcNrbF6Rt/wixFqDhcjE=
github.com/aws/aws-sdk-go-v2/credentials v1.18.21 h1:56HGpsgnmD+2/KpG0ikvvR8+3v3COCwaF4r+oWwOeNA=
github.com/aws/aws-sdk-go-v2/credentials v1.18.21/go.mod h1:3YELwedmQbw7cXNaII2Wywd+YY58AmLPwX4LzARgmmA=
github.com/aws/aws-sdk-go-v2/credentials v1.18.14 h1:TxkI7QI+sFkTItN/6cJuMZEIVMFXeu2dI1ZffkXngKI=
github.com/aws/aws-sdk-go-v2/credentials v1.18.14/go.mod h1:12x4Uw/vijC11XkctTjy92TNCQ+UnNJkT7fzX0Yd93E=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.14 h1:PZHqQACxYb8mYgms4RZbhZG0a7dPW06xOjmaH0EJC/I=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.14/go.mod h1:VymhrMJUWs69D8u0/lZ7jSB6WgaG/NqHi3gX0aYf6U0=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.14 h1:bOS19y6zlJwagBfHxs0ESzr1XCOU2KXJCWcq3E2vfjY=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.14/go.mod h1:1ipeGBMAxZ0xcTm6y6paC2C/J6f6OO7LBODV9afuAyM=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.3 h1:x2Ibm/Af8Fi+BH+Hsn9TXGdT+hKbDd5XOTZxTMxDk7o=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.3/go.mod h1:IW1jwyrQgMdhisceG8fQLmQIydcT/jWY21rFhzgaKwo=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.13 h1:kDqdFvMY4AtKoACfzIGD8A0+hbT41KTKF//gq7jITfM=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.13/go.mod h1:lmKuogqSU3HzQCwZ9ZtcqOc5XGMqtDK7OIc2+DxiUEg=
github.com/aws/aws-sdk-go-v2/service/sts v1.39.1 h1:mLlUgHn02ue8whiR4BmxxGJLR2gwU6s6ZzJ5wDamBUs=
github.com/aws/aws-sdk-go-v2/service/sts v1.39.1/go.mod h1:E19xDjpzPZC7LS2knI9E6BaRFDK43Eul7vd6rSq2HWk=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.1 h1:oegbebPEMA/1Jny7kvwejowCaHz1FWZAQ94WXFNCyTM=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.1/go.mod h1:kemo5Myr9ac0U9JfSjMo9yHLtw+pECEHsFtJ9tqCEI8=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.8 h1:M6JI2aGFEzYxsF6CXIuRBnkge9Wf9a2xU39rNeXgu10=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.8/go.mod h1:Fw+MyTwlwjFsSTE31mH211Np+CUslml8mzc0AFEG09s=
github.com/aws/aws-sdk-go-v2/service/sts v1.38.5 h1:+LVB0xBqEgjQoqr9bGZbRzvg212B0f17JdflleJRNR4=
github.com/aws/aws-sdk-go-v2/service/sts v1.38.5/go.mod h1:xoaxeqnnUaZjPjaICgIy5B+MHCSb/ZSOn4MvkFNOUA0=
github.com/aws/smithy-go v1.23.2 h1:Crv0eatJUQhaManss33hS5r40CG3ZFH+21XSkqMrIUM=
github.com/aws/smithy-go v1.23.2/go.mod h1:LEj2LM3rBRQJxPZTB4KuzZkaZYnZPnvgIhb4pu07mx0=
github.com/barkimedes/go-deepcopy v0.0.0-20220514131651-17c30cfc62df h1:GSoSVRLoBaFpOOds6QyY1L8AX7uoY+Ln3BHc22W40X0=
@@ -229,8 +229,8 @@ github.com/grafana/grafana-app-sdk v0.48.7 h1:9mF7nqkqP0QUYYDlznoOt+GIyjzj45wGfU
github.com/grafana/grafana-app-sdk v0.48.7/go.mod h1:DWsaaH39ZMHwSOSoUBaeW8paMrRaYsjRYlLwCJYd78k=
github.com/grafana/grafana-app-sdk/logging v0.48.7 h1:Oa5qg473gka5+W/WQk61Xbw4YdAv+wV2Z4bJtzeCaQw=
github.com/grafana/grafana-app-sdk/logging v0.48.7/go.mod h1:5u3KalezoBAAo2Y3ytDYDAIIPvEqFLLDSxeiK99QxDU=
github.com/grafana/grafana-aws-sdk v1.4.2 h1:GrUEoLbs46r8rG/GZL4L2b63Bo+rkIYKdtCT7kT5KkM=
github.com/grafana/grafana-aws-sdk v1.4.2/go.mod h1:1qnZdYs6gQzxxF0dDodaE7Rn9fiMzuhwvtaAZ7ySnhY=
github.com/grafana/grafana-aws-sdk v1.3.0 h1:/bfJzP93rCel1GbWoRSq0oUo424MZXt8jAp2BK9w8tM=
github.com/grafana/grafana-aws-sdk v1.3.0/go.mod h1:VGycF0JkCGKND2O5je1ucOqPJ0ZNhZYzV3c2bNBAaGk=
github.com/grafana/grafana-azure-sdk-go/v2 v2.3.1 h1:FFcEA01tW+SmuJIuDbHOdgUBL+d7DPrZ2N4zwzPhfGk=
github.com/grafana/grafana-azure-sdk-go/v2 v2.3.1/go.mod h1:Oi4anANlCuTCc66jCyqIzfVbgLXFll8Wja+Y4vfANlc=
github.com/grafana/grafana-plugin-sdk-go v0.284.0 h1:1bK7eWsnPBLUWDcWJWe218Ik5ad0a5JpEL4mH9ry7Ws=
@@ -243,8 +243,8 @@ github.com/grafana/prometheus-alertmanager v0.25.1-0.20250911094103-5456b6e45604
github.com/grafana/prometheus-alertmanager v0.25.1-0.20250911094103-5456b6e45604/go.mod h1:O/QP1BCm0HHIzbKvgMzqb5sSyH88rzkFk84F4TfJjBU=
github.com/grafana/pyroscope-go/godeltaprof v0.1.9 h1:c1Us8i6eSmkW+Ez05d3co8kasnuOY813tbMN8i/a3Og=
github.com/grafana/pyroscope-go/godeltaprof v0.1.9/go.mod h1:2+l7K7twW49Ct4wFluZD3tZ6e0SjanjcUUBPVD/UuGU=
github.com/grafana/sqlds/v5 v5.0.3 h1:+yUMUxfa0WANQsmS9xtTFSRX1Q55Iv1B9EjlrW4VlBU=
github.com/grafana/sqlds/v5 v5.0.3/go.mod h1:GKeTTiC+GeR1X0z3f0Iee+hZnNgN62uQpj5XVMx5Uew=
github.com/grafana/sqlds/v4 v4.2.7 h1:sFQhsS7DBakNMdxa++yOfJ9BVvkZwFJ0B95o57K0/XA=
github.com/grafana/sqlds/v4 v4.2.7/go.mod h1:BQRjUG8rOqrBI4NAaeoWrIMuoNgfi8bdhCJ+5cgEfLU=
github.com/grpc-ecosystem/go-grpc-middleware/providers/prometheus v1.1.0 h1:QGLs/O40yoNK9vmy4rhUGBVyMf1lISBGtXRpsu/Qu/o=
github.com/grpc-ecosystem/go-grpc-middleware/providers/prometheus v1.1.0/go.mod h1:hM2alZsMUni80N33RBe6J0e423LB+odMj7d3EMP9l20=
github.com/grpc-ecosystem/go-grpc-middleware/v2 v2.3.3 h1:B+8ClL/kCQkRiU82d9xajRPKYMrB7E0MbtzWVi1K4ns=
@@ -428,25 +428,12 @@ Or using a Kubernetes format, for example `kubernetes-dashboard.json`:
You _must_ use the Kubernetes resource format to provision dashboards v2 / dynamic dashboards.
It later polls that path every `updateIntervalSeconds` for updates to the dashboard files and updates its database.
{{< admonition type="note" >}}
Grafana installs dashboards at the root level if you don't set the `folder` field.
{{< /admonition >}}
#### Detect updates to provisioned dashboards files
After Grafana provisions your dashboards, it checks the filesystem for changes and updates dashboards as needed.
The mechanism Grafana uses to do this depends on your `updateIntervalSeconds` value:
- **More than 10 seconds**: Grafana polls the path at that interval.
- **10 seconds or less**: Grafana watches the filesystem for changes and updates dashboards when it detects them.
{{< admonition type="note" >}}
When `updateIntervalSeconds` is 10 or less, Grafana relies on filesystem watch events to detect changes.
Depending on your filesystem and how you mount or sync dashboard files (for example, Docker bind mounts or some network filesystems), those events might not reach Grafana.
To work around this, set `updateIntervalSeconds` to more than 10 to force polling, or update your setup so filesystem watch events are propagated.
{{< /admonition >}}
#### Make changes to a provisioned dashboard
You can make changes to a provisioned dashboard in the Grafana UI but its not possible to automatically save the changes back to the provisioning source.
+22 -22
View File
@@ -89,18 +89,18 @@ require (
github.com/gorilla/mux v1.8.1 // @grafana/grafana-backend-group
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674 // @grafana/grafana-app-platform-squad
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f // @grafana/alerting-backend
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f // @grafana/identity-access-team
github.com/grafana/authlib/types v0.0.0-20251119142549-be091cf2f4d4 // @grafana/identity-access-team
github.com/grafana/authlib v0.0.0-20260106131612-bb61e476969f // @grafana/identity-access-team
github.com/grafana/authlib/types v0.0.0-20260106131612-bb61e476969f // @grafana/identity-access-team
github.com/grafana/dataplane/examples v0.0.1 // @grafana/observability-metrics
github.com/grafana/dataplane/sdata v0.0.9 // @grafana/observability-metrics
github.com/grafana/dskit v0.0.0-20250908063411-6b6da59b5cc4 // @grafana/grafana-backend-group
github.com/grafana/dskit v0.0.0-20251204003651-27988664e6ff // @grafana/grafana-backend-group
github.com/grafana/e2e v0.1.1 // @grafana-app-platform-squad
github.com/grafana/gofpdf v0.0.0-20250307124105-3b9c5d35577f // @grafana/sharing-squad
github.com/grafana/gomemcache v0.0.0-20250318131618-74242eea118d // @grafana/grafana-operator-experience-squad
github.com/grafana/gomemcache v0.0.0-20251127154401-74f93547077b // @grafana/grafana-operator-experience-squad
github.com/grafana/grafana-api-golang-client v0.27.0 // @grafana/alerting-backend
github.com/grafana/grafana-app-sdk v0.48.7 // @grafana/grafana-app-platform-squad
github.com/grafana/grafana-app-sdk/logging v0.48.7 // @grafana/grafana-app-platform-squad
github.com/grafana/grafana-aws-sdk v1.4.2 // @grafana/aws-datasources
github.com/grafana/grafana-aws-sdk v1.3.0 // @grafana/aws-datasources
github.com/grafana/grafana-azure-sdk-go/v2 v2.3.1 // @grafana/partner-datasources
github.com/grafana/grafana-cloud-migration-snapshot v1.9.0 // @grafana/grafana-operator-experience-squad
github.com/grafana/grafana-google-sdk-go v0.4.2 // @grafana/partner-datasources
@@ -151,7 +151,7 @@ require (
github.com/openfga/api/proto v0.0.0-20250909172242-b4b2a12f5c67 // @grafana/identity-access-team
github.com/openfga/language/pkg/go v0.2.0-beta.2.0.20251027165255-0f8f255e5f6c // @grafana/identity-access-team
github.com/openfga/openfga v1.11.1 // @grafana/identity-access-team
github.com/opentracing-contrib/go-grpc v0.1.1 // @grafana/grafana-search-and-storage
github.com/opentracing-contrib/go-grpc v0.1.2 // @grafana/grafana-search-and-storage
github.com/opentracing/opentracing-go v1.2.0 // @grafana/grafana-search-and-storage
github.com/openzipkin/zipkin-go v0.4.3 // @grafana/oss-big-tent
github.com/patrickmn/go-cache v2.1.0+incompatible // @grafana/alerting-backend
@@ -342,23 +342,23 @@ require (
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect
github.com/at-wat/mqtt-go v0.19.6 // indirect
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.11 // indirect
github.com/aws/aws-sdk-go-v2/config v1.31.17 // indirect
github.com/aws/aws-sdk-go-v2/credentials v1.18.21 // indirect
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.13 // indirect
github.com/aws/aws-sdk-go-v2/config v1.31.10 // indirect
github.com/aws/aws-sdk-go-v2/credentials v1.18.14 // indirect
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.8 // indirect
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.17.84 // indirect
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.14 // indirect
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.14 // indirect
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 // indirect
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3 // indirect
github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.36 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.3 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.1 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.7.4 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.13 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.8 // indirect
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.17 // indirect
github.com/aws/aws-sdk-go-v2/service/kms v1.41.2 // indirect
github.com/aws/aws-sdk-go-v2/service/s3 v1.84.0 // indirect
github.com/aws/aws-sdk-go-v2/service/sso v1.30.1 // indirect
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.5 // indirect
github.com/aws/aws-sdk-go-v2/service/sts v1.39.1 // indirect
github.com/aws/aws-sdk-go-v2/service/sso v1.29.4 // indirect
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.0 // indirect
github.com/aws/aws-sdk-go-v2/service/sts v1.38.5 // indirect
github.com/axiomhq/hyperloglog v0.0.0-20240507144631-af9851f82b27 // indirect
github.com/bahlo/generic-list-go v0.2.0 // indirect
github.com/barkimedes/go-deepcopy v0.0.0-20220514131651-17c30cfc62df // indirect
@@ -456,6 +456,7 @@ require (
github.com/gopherjs/gopherjs v1.17.2 // indirect
github.com/grafana/jsonparser v0.0.0-20240425183733-ea80629e1a32 // indirect
github.com/grafana/regexp v0.0.0-20240518133315-a468a5bfb3bc // indirect
github.com/grafana/sqlds/v4 v4.2.7 // indirect
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.1-0.20191002090509-6af20e3a5340 // indirect
github.com/hashicorp/consul/api v1.31.2 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect
@@ -471,7 +472,7 @@ require (
github.com/hashicorp/go-uuid v1.0.3 // indirect
github.com/hashicorp/golang-lru v1.0.2 // indirect
github.com/hashicorp/hcl v1.0.1-vault-7 // indirect
github.com/hashicorp/memberlist v0.5.2 // indirect
github.com/hashicorp/memberlist v0.5.3 // indirect
github.com/hashicorp/serf v0.10.2 // indirect
github.com/hashicorp/vault/api v1.20.0 // indirect
github.com/hashicorp/yamux v0.1.2 // indirect
@@ -516,7 +517,7 @@ require (
github.com/mdlayher/socket v0.4.1 // indirect
github.com/mdlayher/vsock v1.2.1 // indirect
github.com/mfridman/interpolate v0.0.2 // indirect
github.com/miekg/dns v1.1.63 // indirect
github.com/miekg/dns v1.1.68 // indirect
github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8 // indirect
github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3 // indirect
github.com/mitchellh/copystructure v1.2.0 // indirect
@@ -549,17 +550,17 @@ require (
github.com/open-telemetry/opentelemetry-collector-contrib/pkg/translator/jaeger v0.124.1 // indirect
github.com/opencontainers/go-digest v1.0.0 // indirect
github.com/opencontainers/image-spec v1.1.1 // indirect
github.com/opentracing-contrib/go-stdlib v1.0.0 // indirect
github.com/opentracing-contrib/go-stdlib v1.1.0 // indirect
github.com/pelletier/go-toml/v2 v2.2.4 // indirect
github.com/perimeterx/marshmallow v1.1.5 // indirect
github.com/pierrec/lz4/v4 v4.1.22 // indirect
github.com/pires/go-proxyproto v0.7.0 // indirect
github.com/pires/go-proxyproto v0.8.1 // indirect
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect
github.com/pkg/errors v0.9.1 // indirect
github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/prometheus/common/sigv4 v0.1.0 // indirect
github.com/prometheus/exporter-toolkit v0.14.0 // indirect
github.com/prometheus/exporter-toolkit v0.15.0 // indirect
github.com/prometheus/procfs v0.19.2 // indirect
github.com/protocolbuffers/txtpbfmt v0.0.0-20251124094003-fcb97cc64c7b // indirect
github.com/puzpuzpuz/xsync/v2 v2.5.1 // indirect
@@ -573,7 +574,7 @@ require (
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 // indirect
github.com/segmentio/asm v1.2.0 // indirect
github.com/segmentio/encoding v0.5.3 // indirect
github.com/sercand/kuberesolver/v6 v6.0.0 // indirect
github.com/sercand/kuberesolver/v6 v6.0.1 // indirect
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect
github.com/sethvargo/go-retry v0.3.0 // indirect
github.com/shadowspore/fossil-delta v0.0.0-20241213113458-1d797d70cbe3 // indirect
@@ -682,7 +683,6 @@ require (
github.com/go-openapi/swag/typeutils v0.25.4 // indirect
github.com/go-openapi/swag/yamlutils v0.25.4 // indirect
github.com/gophercloud/gophercloud/v2 v2.9.0 // indirect
github.com/grafana/sqlds/v5 v5.0.3 // indirect
github.com/lufia/plan9stats v0.0.0-20240909124753-873cd0166683 // indirect
github.com/magiconair/properties v1.8.10 // indirect
github.com/moby/go-archive v0.1.0 // indirect
+36 -22
View File
@@ -854,20 +854,20 @@ github.com/aws/aws-sdk-go-v2 v1.40.0 h1:/WMUA0kjhZExjOQN2z3oLALDREea1A7TobfuiBrK
github.com/aws/aws-sdk-go-v2 v1.40.0/go.mod h1:c9pm7VwuW0UPxAEYGyTmyurVcNrbF6Rt/wixFqDhcjE=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.11 h1:12SpdwU8Djs+YGklkinSSlcrPyj3H4VifVsKf78KbwA=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.6.11/go.mod h1:dd+Lkp6YmMryke+qxW/VnKyhMBDTYP41Q2Bb+6gNZgY=
github.com/aws/aws-sdk-go-v2/config v1.31.17 h1:QFl8lL6RgakNK86vusim14P2k8BFSxjvUkcWLDjgz9Y=
github.com/aws/aws-sdk-go-v2/config v1.31.17/go.mod h1:V8P7ILjp/Uef/aX8TjGk6OHZN6IKPM5YW6S78QnRD5c=
github.com/aws/aws-sdk-go-v2/credentials v1.18.21 h1:56HGpsgnmD+2/KpG0ikvvR8+3v3COCwaF4r+oWwOeNA=
github.com/aws/aws-sdk-go-v2/credentials v1.18.21/go.mod h1:3YELwedmQbw7cXNaII2Wywd+YY58AmLPwX4LzARgmmA=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.13 h1:T1brd5dR3/fzNFAQch/iBKeX07/ffu/cLu+q+RuzEWk=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.13/go.mod h1:Peg/GBAQ6JDt+RoBf4meB1wylmAipb7Kg2ZFakZTlwk=
github.com/aws/aws-sdk-go-v2/config v1.31.10 h1:7LllDZAegXU3yk41mwM6KcPu0wmjKGQB1bg99bNdQm4=
github.com/aws/aws-sdk-go-v2/config v1.31.10/go.mod h1:Ge6gzXPjqu4v0oHvgAwvGzYcK921GU0hQM25WF/Kl+8=
github.com/aws/aws-sdk-go-v2/credentials v1.18.14 h1:TxkI7QI+sFkTItN/6cJuMZEIVMFXeu2dI1ZffkXngKI=
github.com/aws/aws-sdk-go-v2/credentials v1.18.14/go.mod h1:12x4Uw/vijC11XkctTjy92TNCQ+UnNJkT7fzX0Yd93E=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.8 h1:gLD09eaJUdiszm7vd1btiQUYE0Hj+0I2b8AS+75z9AY=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.8/go.mod h1:4RW3oMPt1POR74qVOC4SbubxAwdP4pCT0nSw3jycOU4=
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.17.84 h1:cTXRdLkpBanlDwISl+5chq5ui1d1YWg4PWMR9c3kXyw=
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.17.84/go.mod h1:kwSy5X7tfIHN39uucmjQVs2LvDdXEjQucgQQEqCggEo=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.14 h1:PZHqQACxYb8mYgms4RZbhZG0a7dPW06xOjmaH0EJC/I=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.14/go.mod h1:VymhrMJUWs69D8u0/lZ7jSB6WgaG/NqHi3gX0aYf6U0=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.14 h1:bOS19y6zlJwagBfHxs0ESzr1XCOU2KXJCWcq3E2vfjY=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.14/go.mod h1:1ipeGBMAxZ0xcTm6y6paC2C/J6f6OO7LBODV9afuAyM=
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4 h1:WKuaxf++XKWlHWu9ECbMlha8WOEGm0OUEZqm4K/Gcfk=
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.4/go.mod h1:ZWy7j6v1vWGmPReu0iSGvRiise4YI5SkR3OHKTZ6Wuc=
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3 h1:bIqFDwgGXXN1Kpp99pDOdKMTTb5d2KyU5X/BZxjOkRo=
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3/go.mod h1:H5O/EsxDWyU+LP/V8i5sm8cxoZgc2fdNR9bxlOFrQTo=
github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.36 h1:GMYy2EOWfzdP3wfVAGXBNKY5vK4K8vMET4sYOYltmqs=
github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.36/go.mod h1:gDhdAV6wL3PmPqBhiPbnlS447GoWs8HTTOYef9/9Inw=
github.com/aws/aws-sdk-go-v2/service/cloudwatch v1.45.3 h1:Nn3qce+OHZuMj/edx4its32uxedAmquCDxtZkrdeiD4=
@@ -876,12 +876,12 @@ github.com/aws/aws-sdk-go-v2/service/cloudwatchlogs v1.51.0 h1:e5cbPZYTIY2nUEFie
github.com/aws/aws-sdk-go-v2/service/cloudwatchlogs v1.51.0/go.mod h1:UseIHRfrm7PqeZo6fcTb6FUCXzCnh1KJbQbmOfxArGM=
github.com/aws/aws-sdk-go-v2/service/ec2 v1.225.2 h1:IfMb3Ar8xEaWjgH/zeVHYD8izwJdQgRP5mKCTDt4GNk=
github.com/aws/aws-sdk-go-v2/service/ec2 v1.225.2/go.mod h1:35jGWx7ECvCwTsApqicFYzZ7JFEnBc6oHUuOQ3xIS54=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.3 h1:x2Ibm/Af8Fi+BH+Hsn9TXGdT+hKbDd5XOTZxTMxDk7o=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.3/go.mod h1:IW1jwyrQgMdhisceG8fQLmQIydcT/jWY21rFhzgaKwo=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.1 h1:oegbebPEMA/1Jny7kvwejowCaHz1FWZAQ94WXFNCyTM=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.1/go.mod h1:kemo5Myr9ac0U9JfSjMo9yHLtw+pECEHsFtJ9tqCEI8=
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.7.4 h1:nAP2GYbfh8dd2zGZqFRSMlq+/F6cMPBUuCsGAMkN074=
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.7.4/go.mod h1:LT10DsiGjLWh4GbjInf9LQejkYEhBgBCjLG5+lvk4EE=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.13 h1:kDqdFvMY4AtKoACfzIGD8A0+hbT41KTKF//gq7jITfM=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.13/go.mod h1:lmKuogqSU3HzQCwZ9ZtcqOc5XGMqtDK7OIc2+DxiUEg=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.8 h1:M6JI2aGFEzYxsF6CXIuRBnkge9Wf9a2xU39rNeXgu10=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.8/go.mod h1:Fw+MyTwlwjFsSTE31mH211Np+CUslml8mzc0AFEG09s=
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.17 h1:qcLWgdhq45sDM9na4cvXax9dyLitn8EYBRl8Ak4XtG4=
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.17/go.mod h1:M+jkjBFZ2J6DJrjMv2+vkBbuht6kxJYtJiwoVgX4p4U=
github.com/aws/aws-sdk-go-v2/service/kms v1.41.2 h1:zJeUxFP7+XP52u23vrp4zMcVhShTWbNO8dHV6xCSvFo=
@@ -894,12 +894,12 @@ github.com/aws/aws-sdk-go-v2/service/s3 v1.84.0 h1:0reDqfEN+tB+sozj2r92Bep8MEwBZ
github.com/aws/aws-sdk-go-v2/service/s3 v1.84.0/go.mod h1:kUklwasNoCn5YpyAqC/97r6dzTA1SRKJfKq16SXeoDU=
github.com/aws/aws-sdk-go-v2/service/secretsmanager v1.40.1 h1:w6a0H79HrHf3lr+zrw+pSzR5B+caiQFAKiNHlrUcnoc=
github.com/aws/aws-sdk-go-v2/service/secretsmanager v1.40.1/go.mod h1:c6Vg0BRiU7v0MVhHupw90RyL120QBwAMLbDCzptGeMk=
github.com/aws/aws-sdk-go-v2/service/sso v1.30.1 h1:0JPwLz1J+5lEOfy/g0SURC9cxhbQ1lIMHMa+AHZSzz0=
github.com/aws/aws-sdk-go-v2/service/sso v1.30.1/go.mod h1:fKvyjJcz63iL/ftA6RaM8sRCtN4r4zl4tjL3qw5ec7k=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.5 h1:OWs0/j2UYR5LOGi88sD5/lhN6TDLG6SfA7CqsQO9zF0=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.5/go.mod h1:klO+ejMvYsB4QATfEOIXk8WAEwN4N0aBfJpvC+5SZBo=
github.com/aws/aws-sdk-go-v2/service/sts v1.39.1 h1:mLlUgHn02ue8whiR4BmxxGJLR2gwU6s6ZzJ5wDamBUs=
github.com/aws/aws-sdk-go-v2/service/sts v1.39.1/go.mod h1:E19xDjpzPZC7LS2knI9E6BaRFDK43Eul7vd6rSq2HWk=
github.com/aws/aws-sdk-go-v2/service/sso v1.29.4 h1:FTdEN9dtWPB0EOURNtDPmwGp6GGvMqRJCAihkSl/1No=
github.com/aws/aws-sdk-go-v2/service/sso v1.29.4/go.mod h1:mYubxV9Ff42fZH4kexj43gFPhgc/LyC7KqvUKt1watc=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.0 h1:I7ghctfGXrscr7r1Ga/mDqSJKm7Fkpl5Mwq79Z+rZqU=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.0/go.mod h1:Zo9id81XP6jbayIFWNuDpA6lMBWhsVy+3ou2jLa4JnA=
github.com/aws/aws-sdk-go-v2/service/sts v1.38.5 h1:+LVB0xBqEgjQoqr9bGZbRzvg212B0f17JdflleJRNR4=
github.com/aws/aws-sdk-go-v2/service/sts v1.38.5/go.mod h1:xoaxeqnnUaZjPjaICgIy5B+MHCSb/ZSOn4MvkFNOUA0=
github.com/aws/smithy-go v1.23.2 h1:Crv0eatJUQhaManss33hS5r40CG3ZFH+21XSkqMrIUM=
github.com/aws/smithy-go v1.23.2/go.mod h1:LEj2LM3rBRQJxPZTB4KuzZkaZYnZPnvgIhb4pu07mx0=
github.com/axiomhq/hyperloglog v0.0.0-20191112132149-a4c4c47bc57f/go.mod h1:2stgcRjl6QmW+gU2h5E7BQXg4HU0gzxKWDuT5HviN9s=
@@ -1629,14 +1629,20 @@ github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f h1:Br4SaUL3dnVopK
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f/go.mod h1:l7v67cgP7x72ajB9UPZlumdrHqNztpKoqQ52cU8T3LU=
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f h1:Cbm6OKkOcJ+7CSZsGsEJzktC/SIa5bxVeYKQLuYK86o=
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f/go.mod h1:axY0cdOg3q0TZHwpHnIz5x16xZ8ZBxJHShsSHHXcHQg=
github.com/grafana/authlib v0.0.0-20260106131612-bb61e476969f h1:OfVtnO3+Ficm7W69dFD5IaZWlMvOLIWBBnppE99dVkU=
github.com/grafana/authlib v0.0.0-20260106131612-bb61e476969f/go.mod h1:KUNx2Qz7mgh2tm2/TJXx0+uq5SkCrquCFI+dHln2Q50=
github.com/grafana/authlib/types v0.0.0-20251119142549-be091cf2f4d4 h1:Muoy+FMGrHj3GdFbvsMzUT7eusgii9PKf9L1ZaXDDbY=
github.com/grafana/authlib/types v0.0.0-20251119142549-be091cf2f4d4/go.mod h1:qeWYbnWzaYGl88JlL9+DsP1GT2Cudm58rLtx13fKZdw=
github.com/grafana/authlib/types v0.0.0-20251203163023-dd5a97c606e3/go.mod h1:CZ5McGzO/q6lnRb8xvTODCC2bJniQoQ+gho0AVZC/zY=
github.com/grafana/authlib/types v0.0.0-20260106131612-bb61e476969f h1:5ZI6e22sGdg36MAIMJkH6PUHtZU/QuwAScNfgWNlK0I=
github.com/grafana/authlib/types v0.0.0-20260106131612-bb61e476969f/go.mod h1:j+YTXmAcD4zCNyl4QSNqYSEe/q9KgrH1btodnhK29hI=
github.com/grafana/dataplane/examples v0.0.1 h1:K9M5glueWyLoL4//H+EtTQq16lXuHLmOhb6DjSCahzA=
github.com/grafana/dataplane/examples v0.0.1/go.mod h1:h5YwY8s407/17XF5/dS8XrUtsTVV2RnuW8+m1Mp46mg=
github.com/grafana/dataplane/sdata v0.0.9 h1:AGL1LZnCUG4MnQtnWpBPbQ8ZpptaZs14w6kE/MWfg7s=
github.com/grafana/dataplane/sdata v0.0.9/go.mod h1:Jvs5ddpGmn6vcxT7tCTWAZ1mgi4sbcdFt9utQx5uMAU=
github.com/grafana/dskit v0.0.0-20250908063411-6b6da59b5cc4 h1:jSojuc7njleS3UOz223WDlXOinmuLAIPI0z2vtq8EgI=
github.com/grafana/dskit v0.0.0-20250908063411-6b6da59b5cc4/go.mod h1:VahT+GtfQIM+o8ht2StR6J9g+Ef+C2Vokh5uuSmOD/4=
github.com/grafana/dskit v0.0.0-20251204003651-27988664e6ff/go.mod h1:/pHIcyeZJBZbtboXOjRtPaMl5KK+2VRdNJbCHDkpDYs=
github.com/grafana/e2e v0.1.1 h1:/b6xcv5BtoBnx8cZnCiey9DbjEc8z7gXHO5edoeRYxc=
github.com/grafana/e2e v0.1.1/go.mod h1:RpNLgae5VT+BUHvPE+/zSypmOXKwEu4t+tnEMS1ATaE=
github.com/grafana/go-mysql-server v0.20.1-grafana1 h1:yA4Mzt+tTdIlQutBUaiPnepULPQ7CS4hMu2GOpHqT6s=
@@ -1645,14 +1651,15 @@ github.com/grafana/gofpdf v0.0.0-20250307124105-3b9c5d35577f h1:5xkjl5Y/j2QefJKO
github.com/grafana/gofpdf v0.0.0-20250307124105-3b9c5d35577f/go.mod h1:+O5QxOwwgP10jedZHapzXY+IPKTnzHBtIs5UUb9G+kI=
github.com/grafana/gomemcache v0.0.0-20250318131618-74242eea118d h1:oXRJlb9UjVsl6LhqBdbyAQ9YFhExwsj4bjh5vwMNRZY=
github.com/grafana/gomemcache v0.0.0-20250318131618-74242eea118d/go.mod h1:j/s0jkda4UXTemDs7Pgw/vMT06alWc42CHisvYac0qw=
github.com/grafana/gomemcache v0.0.0-20251127154401-74f93547077b/go.mod h1:j/s0jkda4UXTemDs7Pgw/vMT06alWc42CHisvYac0qw=
github.com/grafana/grafana-api-golang-client v0.27.0 h1:zIwMXcbCB4n588i3O2N6HfNcQogCNTd/vPkEXTr7zX8=
github.com/grafana/grafana-api-golang-client v0.27.0/go.mod h1:uNLZEmgKtTjHBtCQMwNn3qsx2mpMb8zU+7T4Xv3NR9Y=
github.com/grafana/grafana-app-sdk v0.48.7 h1:9mF7nqkqP0QUYYDlznoOt+GIyjzj45wGfUHB32u2ZMo=
github.com/grafana/grafana-app-sdk v0.48.7/go.mod h1:DWsaaH39ZMHwSOSoUBaeW8paMrRaYsjRYlLwCJYd78k=
github.com/grafana/grafana-app-sdk/logging v0.48.7 h1:Oa5qg473gka5+W/WQk61Xbw4YdAv+wV2Z4bJtzeCaQw=
github.com/grafana/grafana-app-sdk/logging v0.48.7/go.mod h1:5u3KalezoBAAo2Y3ytDYDAIIPvEqFLLDSxeiK99QxDU=
github.com/grafana/grafana-aws-sdk v1.4.2 h1:GrUEoLbs46r8rG/GZL4L2b63Bo+rkIYKdtCT7kT5KkM=
github.com/grafana/grafana-aws-sdk v1.4.2/go.mod h1:1qnZdYs6gQzxxF0dDodaE7Rn9fiMzuhwvtaAZ7ySnhY=
github.com/grafana/grafana-aws-sdk v1.3.0 h1:/bfJzP93rCel1GbWoRSq0oUo424MZXt8jAp2BK9w8tM=
github.com/grafana/grafana-aws-sdk v1.3.0/go.mod h1:VGycF0JkCGKND2O5je1ucOqPJ0ZNhZYzV3c2bNBAaGk=
github.com/grafana/grafana-azure-sdk-go/v2 v2.3.1 h1:FFcEA01tW+SmuJIuDbHOdgUBL+d7DPrZ2N4zwzPhfGk=
github.com/grafana/grafana-azure-sdk-go/v2 v2.3.1/go.mod h1:Oi4anANlCuTCc66jCyqIzfVbgLXFll8Wja+Y4vfANlc=
github.com/grafana/grafana-cloud-migration-snapshot v1.9.0 h1:JOzchPgptwJdruYoed7x28lFDwhzs7kssResYsnC0iI=
@@ -1691,8 +1698,8 @@ github.com/grafana/regexp v0.0.0-20240518133315-a468a5bfb3bc h1:GN2Lv3MGO7AS6PrR
github.com/grafana/regexp v0.0.0-20240518133315-a468a5bfb3bc/go.mod h1:+JKpmjMGhpgPL+rXZ5nsZieVzvarn86asRlBg4uNGnk=
github.com/grafana/saml v0.4.15-0.20240917091248-ae3bbdad8a56 h1:SDGrP81Vcd102L3UJEryRd1eestRw73wt+b8vnVEFe0=
github.com/grafana/saml v0.4.15-0.20240917091248-ae3bbdad8a56/go.mod h1:S4+611dxnKt8z/ulbvaJzcgSHsuhjVc1QHNTcr1R7Fw=
github.com/grafana/sqlds/v5 v5.0.3 h1:+yUMUxfa0WANQsmS9xtTFSRX1Q55Iv1B9EjlrW4VlBU=
github.com/grafana/sqlds/v5 v5.0.3/go.mod h1:GKeTTiC+GeR1X0z3f0Iee+hZnNgN62uQpj5XVMx5Uew=
github.com/grafana/sqlds/v4 v4.2.7 h1:sFQhsS7DBakNMdxa++yOfJ9BVvkZwFJ0B95o57K0/XA=
github.com/grafana/sqlds/v4 v4.2.7/go.mod h1:BQRjUG8rOqrBI4NAaeoWrIMuoNgfi8bdhCJ+5cgEfLU=
github.com/grafana/tempo v1.5.1-0.20250529124718-87c2dc380cec h1:wnzJov9RhSHGaTYGzTygL4qq986fLen8xSqnQgaMd28=
github.com/grafana/tempo v1.5.1-0.20250529124718-87c2dc380cec/go.mod h1:j1IY7J2rUz7TcTjFVVx6HCpyTlYOJPtXuGRZ7sI+vSo=
github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 h1:UH//fgunKIs4JdUbpDl1VZCDaL56wXCB/5+wF6uHfaI=
@@ -1797,6 +1804,7 @@ github.com/hashicorp/memberlist v0.3.0/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOn
github.com/hashicorp/memberlist v0.5.0/go.mod h1:yvyXLpo0QaGE59Y7hDTsTzDD25JYBZ4mHgHUZ8lrOI0=
github.com/hashicorp/memberlist v0.5.2 h1:rJoNPWZ0juJBgqn48gjy59K5H4rNgvUoM1kUD7bXiuI=
github.com/hashicorp/memberlist v0.5.2/go.mod h1:Ri9p/tRShbjYnpNf4FFPXG7wxEGY4Nrcn6E7jrVa//4=
github.com/hashicorp/memberlist v0.5.3/go.mod h1:h60o12SZn/ua/j0B6iKAZezA4eDaGsIuPO70eOaJ6WE=
github.com/hashicorp/nomad/api v0.0.0-20241218080744-e3ac00f30eec h1:+YBzb977VrmffaCX/OBm17dEVJUcWn5dW+eqs3aIJ/A=
github.com/hashicorp/nomad/api v0.0.0-20241218080744-e3ac00f30eec/go.mod h1:svtxn6QnrQ69P23VvIWMR34tg3vmwLz4UdUzm1dSCgE=
github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc=
@@ -2051,6 +2059,7 @@ github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKju
github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI=
github.com/miekg/dns v1.1.63 h1:8M5aAw6OMZfFXTT7K5V0Eu5YiiL8l7nUAkyN6C9YwaY=
github.com/miekg/dns v1.1.63/go.mod h1:6NGHfjhpmr5lt3XPLuyfDJi5AXbNIPM9PY6H6sF1Nfs=
github.com/miekg/dns v1.1.68/go.mod h1:fujopn7TB3Pu3JM69XaawiU0wqjpL9/8xGop5UrTPps=
github.com/migueleliasweb/go-github-mock v1.1.0 h1:GKaOBPsrPGkAKgtfuWY8MclS1xR6MInkx1SexJucMwE=
github.com/migueleliasweb/go-github-mock v1.1.0/go.mod h1:pYe/XlGs4BGMfRY4vmeixVsODHnVDDhJ9zoi0qzSMHc=
github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8 h1:AMFGa4R4MiIpspGNG7Z948v4n35fFGB3RR3G/ry4FWs=
@@ -2205,9 +2214,11 @@ github.com/openfga/openfga v1.11.1 h1:+cJBPi/J+RWPRg+cXOjwWEwjauiW8rdE3kEzcFy1ME
github.com/openfga/openfga v1.11.1/go.mod h1:MuTGr/ghY7t2sEGwS/59pq9SkqO0QY1kQLIe8Upt+G8=
github.com/opentracing-contrib/go-grpc v0.1.1 h1:Ws7IN1zyiL1DFqKQPhRXuKe5pLYzMfdxnC1qtajE2PE=
github.com/opentracing-contrib/go-grpc v0.1.1/go.mod h1:Nu6sz+4zzgxXu8rvKfnwjBEmHsuhTigxRwV2RhELrS8=
github.com/opentracing-contrib/go-grpc v0.1.2/go.mod h1:glU6rl1Fhfp9aXUHkE36K2mR4ht8vih0ekOVlWKEUHM=
github.com/opentracing-contrib/go-stdlib v0.0.0-20190519235532-cf7a6c988dc9/go.mod h1:PLldrQSroqzH70Xl+1DQcGnefIbqsKR7UDaiux3zV+w=
github.com/opentracing-contrib/go-stdlib v1.0.0 h1:TBS7YuVotp8myLon4Pv7BtCBzOTo1DeZCld0Z63mW2w=
github.com/opentracing-contrib/go-stdlib v1.0.0/go.mod h1:qtI1ogk+2JhVPIXVc6q+NHziSmy2W5GbdQZFUHADCBU=
github.com/opentracing-contrib/go-stdlib v1.1.0/go.mod h1:S0p+X9p6dcBkoMTL+Qq2VOvxKs9ys5PpYWXWqlCS0bQ=
github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
github.com/opentracing/opentracing-go v1.2.0 h1:uEJPy/1a5RIPAJ0Ov+OIO8OxWu77jEv+1B0VhjKrZUs=
github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc=
@@ -2241,6 +2252,7 @@ github.com/pierrec/lz4/v4 v4.1.22 h1:cKFw6uJDK+/gfw5BcDL0JL5aBsAFdsIT18eRtLj7VIU
github.com/pierrec/lz4/v4 v4.1.22/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
github.com/pires/go-proxyproto v0.7.0 h1:IukmRewDQFWC7kfnb66CSomk2q/seBuilHBYFwyq0Hs=
github.com/pires/go-proxyproto v0.7.0/go.mod h1:Vz/1JPY/OACxWGQNIRY2BeyDmpoaWmEP40O9LbuiFR4=
github.com/pires/go-proxyproto v0.8.1/go.mod h1:ZKAAyp3cgy5Y5Mo4n9AlScrkCZwUy0g3Jf+slqQVcuU=
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ=
github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU=
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
@@ -2319,6 +2331,7 @@ github.com/prometheus/common/sigv4 v0.1.0/go.mod h1:2Jkxxk9yYvCkE5G1sQT7GuEXm57J
github.com/prometheus/exporter-toolkit v0.11.0/go.mod h1:BVnENhnNecpwoTLiABx7mrPB/OLRIgN74qlQbV+FK1Q=
github.com/prometheus/exporter-toolkit v0.14.0 h1:NMlswfibpcZZ+H0sZBiTjrA3/aBFHkNZqE+iCj5EmRg=
github.com/prometheus/exporter-toolkit v0.14.0/go.mod h1:Gu5LnVvt7Nr/oqTBUC23WILZepW0nffNo10XdhQcwWA=
github.com/prometheus/exporter-toolkit v0.15.0/go.mod h1:OyRWd2iTo6Xge9Kedvv0IhCrJSBu36JCfJ2yVniRIYk=
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/prometheus/procfs v0.0.0-20181204211112-1dc9a6cbc91a/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
@@ -2403,6 +2416,7 @@ github.com/segmentio/encoding v0.5.3 h1:OjMgICtcSFuNvQCdwqMCv9Tg7lEOXGwm1J5RPQcc
github.com/segmentio/encoding v0.5.3/go.mod h1:HS1ZKa3kSN32ZHVZ7ZLPLXWvOVIiZtyJnO1gPH1sKt0=
github.com/sercand/kuberesolver/v6 v6.0.0 h1:ScvS2Ga9snVkpOahln/BCLySr3/iBAHJf25u66DweZ0=
github.com/sercand/kuberesolver/v6 v6.0.0/go.mod h1:Dxkqms3OJadP5zirIBPLi9FV8Qpys3T3w40XPEcVsu0=
github.com/sercand/kuberesolver/v6 v6.0.1/go.mod h1:C0tsTuRMONSY+Xf7pv7RMW1/JlewY1+wS8SZE+1lf1s=
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN3Uc8sB6B/s6Z4t2xvBgU1htSHuq8=
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4=
github.com/sethvargo/go-retry v0.3.0 h1:EEt31A35QhrcRZtrYFDTBg91cqZVnFL2navjDrah2SE=
+15 -18
View File
@@ -317,6 +317,7 @@ github.com/GoogleCloudPlatform/opentelemetry-operations-go/exporter/trace v1.29.
github.com/GoogleCloudPlatform/opentelemetry-operations-go/internal/resourcemapping v0.51.0/go.mod h1:otE2jQekW/PqXk1Awf5lmfokJx4uwuqcj1ab5SpGeW0=
github.com/GoogleCloudPlatform/opentelemetry-operations-go/propagator v0.53.0 h1:RAHqDHJmNMLe6JvDoRIlXmb72w+62Ue/k5p/qP9yfAg=
github.com/GoogleCloudPlatform/opentelemetry-operations-go/propagator v0.53.0/go.mod h1:dtCRwgvytbGKWdlrjMOg9geBoRwRpCYWIOM/JhVsDIc=
github.com/HdrHistogram/hdrhistogram-go v1.2.0/go.mod h1:CiIeGiHSd06zjX+FypuEJ5EQ07KKtxZ+8J6hszwVQig=
github.com/IBM/go-sdk-core/v5 v5.17.4 h1:VGb9+mRrnS2HpHZFM5hy4J6ppIWnwNrw0G+tLSgcJLc=
github.com/IBM/go-sdk-core/v5 v5.17.4/go.mod h1:KsAAI7eStAWwQa4F96MLy+whYSh39JzNjklZRbN/8ns=
github.com/IBM/ibm-cos-sdk-go v1.11.0 h1:Jp55NLN3OvBwucMGpP5wNybyjncsmTZ9+GPHai/1cE8=
@@ -424,30 +425,23 @@ github.com/aws/aws-msk-iam-sasl-signer-go v1.0.1/go.mod h1:MVYeeOhILFFemC/XlYTCl
github.com/aws/aws-sdk-go-v2 v1.36.5/go.mod h1:EYrzvCCN9CMUTa5+6lf6MM4tq3Zjp8UhSGR/cBsjai0=
github.com/aws/aws-sdk-go-v2 v1.38.1/go.mod h1:9Q0OoGQoboYIAJyslFyF1f5K1Ryddop8gqMhWx/n4Wg=
github.com/aws/aws-sdk-go-v2 v1.39.1/go.mod h1:sDioUELIUO9Znk23YVmIk86/9DOpkbyyVb1i/gUNFXY=
github.com/aws/aws-sdk-go-v2 v1.39.6/go.mod h1:c9pm7VwuW0UPxAEYGyTmyurVcNrbF6Rt/wixFqDhcjE=
github.com/aws/aws-sdk-go-v2/config v1.29.17/go.mod h1:9P4wwACpbeXs9Pm9w1QTh6BwWwJjwYvJ1iCt5QbCXh8=
github.com/aws/aws-sdk-go-v2/config v1.31.2/go.mod h1:17ft42Yb2lF6OigqSYiDAiUcX4RIkEMY6XxEMJsrAes=
github.com/aws/aws-sdk-go-v2/config v1.31.10/go.mod h1:Ge6gzXPjqu4v0oHvgAwvGzYcK921GU0hQM25WF/Kl+8=
github.com/aws/aws-sdk-go-v2/credentials v1.17.70/go.mod h1:M+lWhhmomVGgtuPOhO85u4pEa3SmssPTdcYpP/5J/xc=
github.com/aws/aws-sdk-go-v2/credentials v1.18.6/go.mod h1:/jdQkh1iVPa01xndfECInp1v1Wnp70v3K4MvtlLGVEc=
github.com/aws/aws-sdk-go-v2/credentials v1.18.14/go.mod h1:12x4Uw/vijC11XkctTjy92TNCQ+UnNJkT7fzX0Yd93E=
github.com/aws/aws-sdk-go-v2/feature/dynamodb/attributevalue v1.19.5 h1:oUEqVqonG3xuarrsze1KVJ30KagNYDemikTbdu8KlN8=
github.com/aws/aws-sdk-go-v2/feature/dynamodb/attributevalue v1.19.5/go.mod h1:VNM08cHlOsIbSHRqb6D/M2L4kKXfJv3A2/f0GNbOQSc=
github.com/aws/aws-sdk-go-v2/feature/dynamodb/expression v1.7.87 h1:oDPArGgCrG/4aTi86ij3S2PB59XXkTSKYVNQlmqRHXQ=
github.com/aws/aws-sdk-go-v2/feature/dynamodb/expression v1.7.87/go.mod h1:ZeQC4gVarhdcWeM1c90DyBLaBCNhEeAbKUXwVI/byvw=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.32/go.mod h1:h4Sg6FQdexC1yYG9RDnOvLbW1a/P986++/Y/a+GyEM8=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.4/go.mod h1:9xzb8/SV62W6gHQGC/8rrvgNXU6ZoYM3sAIJCIrXJxY=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.18.8/go.mod h1:4RW3oMPt1POR74qVOC4SbubxAwdP4pCT0nSw3jycOU4=
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.17.69/go.mod h1:GJj8mmO6YT6EqgduWocwhMoxTLFitkhIrK+owzrYL2I=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.36/go.mod h1:Q1lnJArKRXkenyog6+Y+zr7WDpk4e6XlR6gs20bbeNo=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.4/go.mod h1:l4bdfCD7XyyZA9BolKBo1eLqgaJxl0/x91PL4Yqe0ao=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.8/go.mod h1:KcGkXFVU8U28qS4KvLEcPxytPZPBcRawaH2Pf/0jptE=
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.13/go.mod h1:oGnKwIYZ4XttyU2JWxFrwvhF6YKiK/9/wmE3v3Iu9K8=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.36/go.mod h1:UdyGa7Q91id/sdyHPwth+043HhmP6yP9MBHgbZM0xo8=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.4/go.mod h1:yDmJgqOiH4EA8Hndnv4KwAo8jCGTSnM5ASG1nBI+toA=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.8/go.mod h1:JnA+hPWeYAVbDssp83tv+ysAG8lTfLVXvSsyKg/7xNA=
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.13/go.mod h1:YE94ZoDArI7awZqJzBAZ3PDD2zSfuP7w6P2knOzIn8M=
github.com/aws/aws-sdk-go-v2/internal/ini v1.8.3/go.mod h1:H5O/EsxDWyU+LP/V8i5sm8cxoZgc2fdNR9bxlOFrQTo=
github.com/aws/aws-sdk-go-v2/internal/v4a v1.3.34/go.mod h1:zf7Vcd1ViW7cPqYWEHLHJkS50X0JS2IKz9Cgaj6ugrs=
github.com/aws/aws-sdk-go-v2/service/dynamodb v1.44.0 h1:A99gjqZDbdhjtjJVZrmVzVKO2+p3MSg35bDWtbMQVxw=
github.com/aws/aws-sdk-go-v2/service/dynamodb v1.44.0/go.mod h1:mWB0GE1bqcVSvpW7OtFA0sKuHk52+IqtnsYU2jUfYAs=
@@ -455,13 +449,11 @@ github.com/aws/aws-sdk-go-v2/service/dynamodbstreams v1.26.0 h1:0wOCTKrmwkyC8Bk7
github.com/aws/aws-sdk-go-v2/service/dynamodbstreams v1.26.0/go.mod h1:He/RikglWUczbkV+fkdpcV/3GdL/rTRNVy7VaUiezMo=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.4/go.mod h1:/xFi9KtvBXP97ppCz1TAEvU1Uf66qvid89rbem3wCzQ=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.0/go.mod h1:eb3gfbVIxIoGgJsi9pGne19dhCBpK6opTYpQqAmdy44=
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.1/go.mod h1:kemo5Myr9ac0U9JfSjMo9yHLtw+pECEHsFtJ9tqCEI8=
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.7.0/go.mod h1:iu6FSzgt+M2/x3Dk8zhycdIcHjEFb36IS8HVUVFoMg0=
github.com/aws/aws-sdk-go-v2/service/internal/endpoint-discovery v1.10.17 h1:x187MqiHwBGjMGAed8Y8K1VGuCtFvQvXb24r+bwmSdo=
github.com/aws/aws-sdk-go-v2/service/internal/endpoint-discovery v1.10.17/go.mod h1:mC9qMbA6e1pwEq6X3zDGtZRXMG2YaElJkbJlMVHLs5I=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.17/go.mod h1:ygpklyoaypuyDvOM5ujWGrYWpAK3h7ugnmKCU/76Ys4=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.4/go.mod h1:nLEfLnVMmLvyIG58/6gsSA03F1voKGaCfHV7+lR8S7s=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.8/go.mod h1:Fw+MyTwlwjFsSTE31mH211Np+CUslml8mzc0AFEG09s=
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.18.15/go.mod h1:ZH34PJUc8ApjBIfgQCFvkWcUDBtl/WTD+uiYHjd8igA=
github.com/aws/aws-sdk-go-v2/service/kinesis v1.33.0 h1:JPXkrQk5OS/+Q81fKH97Ll/Vmmy0p9vwHhxw+V+tVjg=
github.com/aws/aws-sdk-go-v2/service/kinesis v1.33.0/go.mod h1:dJngkoVMrq0K7QvRkdRZYM4NUp6cdWa2GBdpm8zoY8U=
@@ -495,13 +487,10 @@ github.com/aws/aws-sdk-go-v2/service/ssm v1.60.1 h1:OwMzNDe5VVTXD4kGmeK/FtqAITiV
github.com/aws/aws-sdk-go-v2/service/ssm v1.60.1/go.mod h1:IyVabkWrs8SNdOEZLyFFcW9bUltV4G6OQS0s6H20PHg=
github.com/aws/aws-sdk-go-v2/service/sso v1.25.5/go.mod h1:b7SiVprpU+iGazDUqvRSLf5XmCdn+JtT1on7uNL6Ipc=
github.com/aws/aws-sdk-go-v2/service/sso v1.28.2/go.mod h1:n9bTZFZcBa9hGGqVz3i/a6+NG0zmZgtkB9qVVFDqPA8=
github.com/aws/aws-sdk-go-v2/service/sso v1.29.4/go.mod h1:mYubxV9Ff42fZH4kexj43gFPhgc/LyC7KqvUKt1watc=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.30.3/go.mod h1:vq/GQR1gOFLquZMSrxUK/cpvKCNVYibNyJ1m7JrU88E=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.33.2/go.mod h1:eknndR9rU8UpE/OmFpqU78V1EcXPKFTTm5l/buZYgvM=
github.com/aws/aws-sdk-go-v2/service/ssooidc v1.35.0/go.mod h1:Zo9id81XP6jbayIFWNuDpA6lMBWhsVy+3ou2jLa4JnA=
github.com/aws/aws-sdk-go-v2/service/sts v1.34.0/go.mod h1:7ph2tGpfQvwzgistp2+zga9f+bCjlQJPkPUmMgDSD7w=
github.com/aws/aws-sdk-go-v2/service/sts v1.38.0/go.mod h1:bEPcjW7IbolPfK67G1nilqWyoxYMSPrDiIQ3RdIdKgo=
github.com/aws/aws-sdk-go-v2/service/sts v1.38.5/go.mod h1:xoaxeqnnUaZjPjaICgIy5B+MHCSb/ZSOn4MvkFNOUA0=
github.com/aws/smithy-go v1.22.4/go.mod h1:t1ufH5HMublsJYulve2RKmHDC15xu1f26kHCp/HgceI=
github.com/aws/smithy-go v1.22.5 h1:P9ATCXPMb2mPjYBgueqJNCA5S9UfktsW0tTxi+a7eqw=
github.com/aws/smithy-go v1.22.5/go.mod h1:t1ufH5HMublsJYulve2RKmHDC15xu1f26kHCp/HgceI=
@@ -891,7 +880,6 @@ github.com/gookit/color v1.4.2/go.mod h1:fqRyamkC1W8uxl+lxCQxOT09l/vYfZ+QeiX3rKQ
github.com/gookit/color v1.5.0/go.mod h1:43aQb+Zerm/BWh2GnrgOQm7ffz7tvQXEKV6BFMl7wAo=
github.com/gookit/color v1.5.4 h1:FZmqs7XOyGgCAxmWyPslpiok1k05wmY3SJTytgvYFs0=
github.com/gookit/color v1.5.4/go.mod h1:pZJOeOS8DM43rXbp4AZo1n9zCU2qjpcRko0b6/QJi9w=
github.com/gophercloud/gophercloud v1.13.0 h1:8iY9d1DAbzMW6Vok1AxbbK5ZaUjzMp0tdyt4fX9IeJ0=
github.com/gophercloud/gophercloud v1.13.0/go.mod h1:aAVqcocTSXh2vYFZ1JTvx4EQmfgzxRcNupUfxZbBNDM=
github.com/gorilla/context v1.1.1 h1:AWwleXJkX/nhcU9bZSnZoi3h/qGYqQAGhq6zZe/aQW8=
github.com/gorilla/css v1.0.0 h1:BQqNyPTi50JCFMTw/b67hByjMVXZRwGha6wxVGkeihY=
@@ -902,19 +890,23 @@ github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB7
github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/grafana/alerting v0.0.0-20250729175202-b4b881b7b263/go.mod h1:VKxaR93Gff0ZlO2sPcdPVob1a/UzArFEW5zx3Bpyhls=
github.com/grafana/alerting v0.0.0-20251009192429-9427c24835ae/go.mod h1:VGjS5gDwWEADPP6pF/drqLxEImgeuHlEW5u8E5EfIrM=
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196/go.mod h1:l7v67cgP7x72ajB9UPZlumdrHqNztpKoqQ52cU8T3LU=
github.com/grafana/authlib v0.0.0-20250710201142-9542f2f28d43/go.mod h1:1fWkOiL+m32NBgRHZtlZGz2ji868tPZACYbqP3nBRJI=
github.com/grafana/authlib/types v0.0.0-20250710201142-9542f2f28d43/go.mod h1:qeWYbnWzaYGl88JlL9+DsP1GT2Cudm58rLtx13fKZdw=
github.com/grafana/authlib/types v0.0.0-20250926065801-df98203cff37/go.mod h1:qeWYbnWzaYGl88JlL9+DsP1GT2Cudm58rLtx13fKZdw=
github.com/grafana/authlib/types v0.0.0-20251203163023-dd5a97c606e3 h1:T4AMrL8ZB1U25m/+FOmkqWPnz0X7u/Oqj1ISg4OrS2c=
github.com/grafana/cloudflare-go v0.0.0-20230110200409-c627cf6792f2 h1:qhugDMdQ4Vp68H0tp/0iN17DM2ehRo1rLEdOFe/gB8I=
github.com/grafana/cloudflare-go v0.0.0-20230110200409-c627cf6792f2/go.mod h1:w/aiO1POVIeXUQyl0VQSZjl5OAGDTL5aX+4v0RA1tcw=
github.com/grafana/codejen v0.0.4-0.20230321061741-77f656893a3d/go.mod h1:zmwwM/DRyQB7pfuBjTWII3CWtxcXh8LTwAYGfDfpR6s=
github.com/grafana/cog v0.0.43/go.mod h1:TDunc7TYF7EfzjwFOlC5AkMe3To/U2KqyyG3QVvrF38=
github.com/grafana/dskit v0.0.0-20250611075409-46f51e1ce914/go.mod h1:OiN4P4aC6LwLzLbEupH3Ue83VfQoNMfG48rsna8jI/E=
github.com/grafana/dskit v0.0.0-20250818234656-8ff9c6532e85/go.mod h1:kImsvJ1xnmeT9Z6StK+RdEKLzlpzBsKwJbEQfmBJdFs=
github.com/grafana/dskit v0.0.0-20251204003651-27988664e6ff h1:eDbrQsfY1Y3vMfuy5suGI2DRNC1DFBcZMFMlNbPrdiE=
github.com/grafana/go-gelf/v2 v2.0.1 h1:BOChP0h/jLeD+7F9mL7tq10xVkDG15he3T1zHuQaWak=
github.com/grafana/go-gelf/v2 v2.0.1/go.mod h1:lexHie0xzYGwCgiRGcvZ723bSNyNI8ZRD4s0CLobh90=
github.com/grafana/go-mysql-server v0.20.1-0.20251027172658-317a8d46ffa4/go.mod h1:EeYR0apo+8j2Dyxmn2ghkPlirO2S5mT1xHBrA+Efys8=
github.com/grafana/gomemcache v0.0.0-20250228145437-da7b95fd2ac1/go.mod h1:j/s0jkda4UXTemDs7Pgw/vMT06alWc42CHisvYac0qw=
github.com/grafana/gomemcache v0.0.0-20251127154401-74f93547077b h1:5qp8/5YPt/Z2RW5QHsxvwE05+LWQYIXydP2MwOkMfb8=
github.com/grafana/grafana-app-sdk v0.40.1/go.mod h1:4P8h7VB6KcDjX9bAoBQc6IP8iNylxe6bSXLR9gA39gM=
github.com/grafana/grafana-app-sdk v0.40.2/go.mod h1:BbNXPNki3mtbkWxYqJsyA1Cj9AShSyaY33z8WkyfVv0=
github.com/grafana/grafana-app-sdk v0.41.0 h1:SYHN3U7B1myRKY3UZZDkFsue9TDmAOap0UrQVTqtYBU=
@@ -946,7 +938,6 @@ github.com/grafana/grafana-aws-sdk v1.0.2 h1:98eBuHYFmgvH0xO9kKf4RBsEsgQRp8EOA/9
github.com/grafana/grafana-aws-sdk v1.0.2/go.mod h1:hO7q7yWV+t6dmiyJjMa3IbuYnYkBua+G/IAlOPVIYKE=
github.com/grafana/grafana-aws-sdk v1.1.0/go.mod h1:7e+47EdHynteYWGoT5Ere9KeOXQObsk8F0vkOLQ1tz8=
github.com/grafana/grafana-aws-sdk v1.2.0/go.mod h1:bBo7qOmM3f61vO+2JxTolNUph1l2TmtzmWcU9/Im+8A=
github.com/grafana/grafana-aws-sdk v1.3.0/go.mod h1:VGycF0JkCGKND2O5je1ucOqPJ0ZNhZYzV3c2bNBAaGk=
github.com/grafana/grafana-azure-sdk-go/v2 v2.1.6/go.mod h1:V7y2BmsWxS3A9Ohebwn4OiSfJJqi//4JQydQ8fHTduo=
github.com/grafana/grafana-azure-sdk-go/v2 v2.2.0/go.mod h1:H9sVh9A4yg5egMGZeh0mifxT1Q/uqwKe1LBjBJU6pN8=
github.com/grafana/grafana-plugin-sdk-go v0.263.0/go.mod h1:U43Cnrj/9DNYyvFcNdeUWNjMXTKNB0jcTcQGpWKd2gw=
@@ -994,7 +985,6 @@ github.com/grafana/prometheus-alertmanager v0.25.1-0.20250604130045-92c8f6389b36
github.com/grafana/prometheus-alertmanager v0.25.1-0.20250604130045-92c8f6389b36/go.mod h1:O/QP1BCm0HHIzbKvgMzqb5sSyH88rzkFk84F4TfJjBU=
github.com/grafana/pyroscope-go/godeltaprof v0.1.8/go.mod h1:2+l7K7twW49Ct4wFluZD3tZ6e0SjanjcUUBPVD/UuGU=
github.com/grafana/sqlds/v4 v4.2.4/go.mod h1:BQRjUG8rOqrBI4NAaeoWrIMuoNgfi8bdhCJ+5cgEfLU=
github.com/grafana/sqlds/v4 v4.2.7/go.mod h1:BQRjUG8rOqrBI4NAaeoWrIMuoNgfi8bdhCJ+5cgEfLU=
github.com/grafana/tail v0.0.0-20230510142333-77b18831edf0 h1:bjh0PVYSVVFxzINqPFYJmAmJNrWPgnVjuSdYJGHmtFU=
github.com/grafana/tail v0.0.0-20230510142333-77b18831edf0/go.mod h1:7t5XR+2IA8P2qggOAHTj/GCZfoLBle3OvNSYh1VkRBU=
github.com/gregjones/httpcache v0.0.0-20190611155906-901d90724c79 h1:+ngKgrYPPJrOjhax5N+uePQ0Fh1Z7PheYoUI/0nzkPA=
@@ -1038,6 +1028,7 @@ github.com/hashicorp/logutils v1.0.0 h1:dLEQVugN8vlakKOUE3ihGLTZJRB4j+M2cdTm/ORI
github.com/hashicorp/mdns v1.0.5 h1:1M5hW1cunYeoXOqHwEb/GBDDHAFo0Yqb/uz/beC6LbE=
github.com/hashicorp/mdns v1.0.5/go.mod h1:mtBihi+LeNXGtG8L9dX59gAEa12BDtBQSp4v/YAJqrc=
github.com/hashicorp/memberlist v0.3.1/go.mod h1:MS2lj3INKhZjWNqd3N0m3J+Jxf3DAOnAH9VT3Sh9MUE=
github.com/hashicorp/memberlist v0.5.3 h1:tQ1jOCypD0WvMemw/ZhhtH+PWpzcftQvgCorLu0hndk=
github.com/hashicorp/raft v1.7.0 h1:4u24Qn6lQ6uwziM++UgsyiT64Q8GyRn43CV41qPiz1o=
github.com/hashicorp/raft v1.7.0/go.mod h1:N1sKh6Vn47mrWvEArQgILTyng8GoDRNYlgKyK7PMjs0=
github.com/hashicorp/raft-wal v0.4.1 h1:aU8XZ6x8R9BAIB/83Z1dTDtXvDVmv9YVYeXxd/1QBSA=
@@ -1198,6 +1189,7 @@ github.com/mfridman/xflag v0.1.0/go.mod h1:/483ywM5ZO5SuMVjrIGquYNE5CzLrj5Ux/LxW
github.com/microcosm-cc/bluemonday v1.0.25 h1:4NEwSfiJ+Wva0VxN5B8OwMicaJvD8r9tlJWm9rtloEg=
github.com/microcosm-cc/bluemonday v1.0.25/go.mod h1:ZIOjCQp1OrzBBPIJmfX4qDYFuhU02nx4bn030ixfHLE=
github.com/miekg/dns v1.1.56/go.mod h1:cRm6Oo2C8TY9ZS/TqsSrseAcncm74lfK5G+ikN2SWWY=
github.com/miekg/dns v1.1.68 h1:jsSRkNozw7G/mnmXULynzMNIsgY2dHC8LO6U6Ij2JEA=
github.com/miekg/pkcs11 v1.1.1/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs=
github.com/minio/minio-go/v7 v7.0.75/go.mod h1:qydcVzV8Hqtj1VtEocfxbmVFa2siu6HGa+LDEPogjD8=
github.com/minio/sha256-simd v0.1.1 h1:5QHSlgo3nt5yKOJrC7W8w7X+NFl8cMPZm96iu8kKUJU=
@@ -1357,6 +1349,8 @@ github.com/openfga/api/proto v0.0.0-20250127102726-f9709139a369/go.mod h1:m74TNg
github.com/openfga/language/pkg/go v0.2.0-beta.2.0.20250428093642-7aeebe78bbfe/go.mod h1:5Z0pbTT7Jz/oQFLfadb+C5t5NwHrduAO7j7L07Ec1GM=
github.com/openfga/openfga v1.10.0/go.mod h1:6/m4GTwQsqECsGYQVD3t5sCX97rh3smnmxbMa3YAtJk=
github.com/opentracing-contrib/go-grpc v0.0.0-20210225150812-73cb765af46e/go.mod h1:DYR5Eij8rJl8h7gblRrOZ8g0kW1umSpKqYIBTgeDtLo=
github.com/opentracing-contrib/go-grpc v0.1.2 h1:MP16Ozc59kqqwn1v18aQxpeGZhsBanJ2iurZYaQSZ+g=
github.com/opentracing-contrib/go-stdlib v1.1.0 h1:cZBWc4pA4e65tqTJddbflK435S0tDImj6c9BMvkdUH0=
github.com/oschwald/geoip2-golang v1.11.0 h1:hNENhCn1Uyzhf9PTmquXENiWS6AlxAEnBII6r8krA3w=
github.com/oschwald/geoip2-golang v1.11.0/go.mod h1:P9zG+54KPEFOliZ29i7SeYZ/GM6tfEL+rgSn03hYuUo=
github.com/oschwald/maxminddb-golang v1.13.0 h1:R8xBorY71s84yO06NgTmQvqvTvlS/bnYZrrWX1MElnU=
@@ -1379,6 +1373,7 @@ github.com/philhofer/fwd v1.1.2/go.mod h1:qkPdfjR2SIEbspLqpe1tO4n5yICnr2DY7mqEx2
github.com/phpdave11/gofpdf v1.4.2 h1:KPKiIbfwbvC/wOncwhrpRdXVj2CZTCFlw4wnoyjtHfQ=
github.com/pierrec/lz4 v2.6.1+incompatible h1:9UY3+iC23yxF0UfGaYrGplQ+79Rg+h/q9FV9ix19jjM=
github.com/pierrec/lz4 v2.6.1+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
github.com/pires/go-proxyproto v0.8.1 h1:9KEixbdJfhrbtjpz/ZwCdWDD2Xem0NZ38qMYaASJgp0=
github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8/go.mod h1:HKlIX3XHQyzLZPlr7++PzdhaXEj94dEiJgZDTsxEqUI=
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e h1:aoZm08cpOy4WuID//EZDgcC4zIxODThtZNPirFr42+A=
github.com/pkg/sftp v1.13.1 h1:I2qBYMChEhIjOgazfJmV3/mZM256btk6wkCDRmW7JYs=
@@ -1403,6 +1398,7 @@ github.com/prometheus/common v0.67.1/go.mod h1:RpmT9v35q2Y+lsieQsdOh5sXZ6ajUGC8N
github.com/prometheus/common v0.67.2/go.mod h1:63W3KZb1JOKgcjlIr64WW/LvFGAqKPj0atm+knVGEko=
github.com/prometheus/common/assets v0.2.0 h1:0P5OrzoHrYBOSM1OigWL3mY8ZvV2N4zIE/5AahrSrfM=
github.com/prometheus/exporter-toolkit v0.10.1-0.20230714054209-2f4150c63f97/go.mod h1:LoBCZeRh+5hX+fSULNyFnagYlQG/gBsyA/deNzROkq8=
github.com/prometheus/exporter-toolkit v0.15.0 h1:Pcle5sSViwR1x0gdPd0wtYrPQENBieQAM7TmT0qtb2U=
github.com/prometheus/procfs v0.0.0-20190425082905-87a4384529e0/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
github.com/prometheus/statsd_exporter v0.21.0/go.mod h1:rbT83sZq2V+p73lHhPZfMc3MLCHmSHelCh9hSGYNLTQ=
github.com/prometheus/statsd_exporter v0.26.1 h1:ucbIAdPmwAUcA+dU+Opok8Qt81Aw8HanlO+2N/Wjv7w=
@@ -1464,6 +1460,7 @@ github.com/sercand/kuberesolver v2.4.0+incompatible h1:WE2OlRf6wjLxHwNkkFLQGaZcV
github.com/sercand/kuberesolver v2.4.0+incompatible/go.mod h1:lWF3GL0xptCB/vCiJPl/ZshwPsX/n4Y7u0CW9E7aQIQ=
github.com/sercand/kuberesolver/v5 v5.1.1 h1:CYH+d67G0sGBj7q5wLK61yzqJJ8gLLC8aeprPTHb6yY=
github.com/sercand/kuberesolver/v5 v5.1.1/go.mod h1:Fs1KbKhVRnB2aDWN12NjKCB+RgYMWZJ294T3BtmVCpQ=
github.com/sercand/kuberesolver/v6 v6.0.1 h1:XZUTA0gy/lgDYp/UhEwv7Js24F1j8NJ833QrWv0Xux4=
github.com/sergi/go-diff v1.2.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
github.com/shirou/gopsutil v3.21.11+incompatible h1:+1+c1VGhc88SSonWP6foOcLhvnKlUeu/erjjvaPEYiI=
github.com/shirou/gopsutil/v3 v3.23.12 h1:z90NtUkp3bMtmICZKpC4+WaknU1eXtp5vtbQ11DgpE4=
@@ -1854,7 +1851,6 @@ go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.4
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.60.0/go.mod h1:rg+RlpR5dKwaS95IyyZqj5Wd4E13lk/msnTS0Xl9lJM=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.61.0/go.mod h1:snMWehoOh2wsEwnvvwtDyFCxVeDAODenXHtn5vzrKjo=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.62.0/go.mod h1:ru6KHrNtNHxM4nD/vd6QrLVWgKhxPYgblq4VAtNawTQ=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.63.0/go.mod h1:fvPi2qXDqFs8M4B4fmJhE92TyQs9Ydjlg3RvfUp+NbQ=
go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.60.0/go.mod h1:CosX/aS4eHnG9D7nESYpV753l4j9q5j3SL/PUYd2lR8=
go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.61.0/go.mod h1:HfvuU0kW9HewH14VCOLImqKvUgONodURG7Alj/IrnGI=
go.opentelemetry.io/contrib/instrumentation/net/http/httptrace/otelhttptrace v0.62.0/go.mod h1:WfEApdZDMlLUAev/0QQpr8EJ/z0VWDKYZ5tF5RH5T1U=
@@ -1963,6 +1959,7 @@ golang.org/x/crypto v0.41.0/go.mod h1:pO5AFd7FA68rFak7rOAGVuygIISepHftHnr8dr6+sU
golang.org/x/crypto v0.42.0/go.mod h1:4+rDnOTJhQCx2q7/j6rAN5XDw8kPjeaXEUR2eL94ix8=
golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0=
golang.org/x/crypto v0.44.0/go.mod h1:013i+Nw79BMiQiMsOPcVCB5ZIJbYkerPrGnOa00tvmc=
golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4=
golang.org/x/exp v0.0.0-20230321023759-10a507213a29/go.mod h1:CxIveKay+FTh1D0yPZemJVgC/95VzuuOLq5Qi4xnoYc=
golang.org/x/exp v0.0.0-20230515195305-f3d0a9c9a5cc/go.mod h1:V1LtkGg67GoY2N1AnLN78QLrzxkLyJw7RJb1gzOOz9w=
golang.org/x/exp v0.0.0-20240325151524-a685a6edb6d8/go.mod h1:CQ1k9gNrJ50XIzaKCRR2hssIjF07kZFEiieALBM/ARQ=
@@ -1975,7 +1972,7 @@ golang.org/x/exp v0.0.0-20250408133849-7e4ce0ab07d0/go.mod h1:S9Xr4PYopiDyqSyp5N
golang.org/x/exp v0.0.0-20250506013437-ce4c2cf36ca6/go.mod h1:U6Lno4MTRCDY+Ba7aCcauB9T60gsv5s4ralQzP72ZoQ=
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b/go.mod h1:3//PLf8L/X+8b4vuAfHzxeRUl04Adcb341+IGKfnqS8=
golang.org/x/exp v0.0.0-20250811191247-51f88131bc50/go.mod h1:rT6SFzZ7oxADUDx58pcaKFTcZ+inxAa9fTrYx/uVYwg=
golang.org/x/exp v0.0.0-20251002181428-27f1f14c8bb9/go.mod h1:TwQYMMnGpvZyc+JpB/UAuTNIsVJifOlSkrZkhcvpVUk=
golang.org/x/exp v0.0.0-20251125195548-87e1e737ad39/go.mod h1:46edojNIoXTNOhySWIWdix628clX9ODXwPsQuG6hsK0=
golang.org/x/exp/typeparams v0.0.0-20220218215828-6cf2b201936e h1:qyrTQ++p1afMkO4DPEeLGq/3oTsdlvdH4vqZUBWzUKM=
golang.org/x/exp/typeparams v0.0.0-20220218215828-6cf2b201936e/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk=
golang.org/x/image v0.25.0 h1:Y6uW6rH1y5y/LK1J8BPWZtr6yZ7hrsy6hFrXjgsc2fQ=
@@ -2069,7 +2066,6 @@ golang.org/x/term v0.32.0/go.mod h1:uZG1FhGx848Sqfsq4/DlJr3xGGsYMu/L5GW4abiaEPQ=
golang.org/x/term v0.33.0/go.mod h1:s18+ql9tYWp1IfpV9DmCtQDDSRBUjKaw9M1eAv5UeF0=
golang.org/x/term v0.34.0/go.mod h1:5jC53AEywhIVebHgPVeg0mj8OD3VO9OzclacVrqpaAw=
golang.org/x/term v0.35.0/go.mod h1:TPGtkTLesOwf2DE8CgVYiZinHAOuy5AYUYT1lENIZnA=
golang.org/x/term v0.36.0/go.mod h1:Qu394IJq6V6dCBRgwqshf3mPF85AqzYEzofzRdZkWss=
golang.org/x/text v0.12.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.17.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
@@ -2148,6 +2144,7 @@ google.golang.org/genproto/googleapis/api v0.0.0-20250818200422-3122310a409c/go.
google.golang.org/genproto/googleapis/api v0.0.0-20250825161204-c5933d9347a5/go.mod h1:j3QtIyytwqGr1JUDtYXwtMXWPKsEa5LtzIFN1Wn5WvE=
google.golang.org/genproto/googleapis/api v0.0.0-20250929231259-57b25ae835d4/go.mod h1:NnuHhy+bxcg30o7FnVAZbXsPHUDQ9qKWAQKCD7VxFtk=
google.golang.org/genproto/googleapis/api v0.0.0-20251111163417-95abcf5c77ba/go.mod h1:G5IanEx8/PgI9w6CFcYQf7jMtHQhZruvfM1i3qOqk5U=
google.golang.org/genproto/googleapis/api v0.0.0-20251124214823-79d6a2a48846/go.mod h1:Fk4kyraUvqD7i5H6S43sj2W98fbZa75lpZz/eUyhfO0=
google.golang.org/genproto/googleapis/api v0.0.0-20251202230838-ff82c1b0f217/go.mod h1:+rXWjjaukWZun3mLfjmVnQi18E1AsFbDN9QdJ5YXLto=
google.golang.org/genproto/googleapis/bytestream v0.0.0-20250603155806-513f23925822 h1:zWFRixYR5QlotL+Uv3YfsPRENIrQFXiGs+iwqel6fOQ=
google.golang.org/genproto/googleapis/bytestream v0.0.0-20250603155806-513f23925822/go.mod h1:h6yxum/C2qRb4txaZRLDHK8RyS0H/o2oEDeKY4onY/Y=
@@ -1,59 +0,0 @@
// Code generated - EDITING IS FUTILE. DO NOT EDIT.
//
// Generated by:
// public/app/plugins/gen.go
// Using jennies:
// TSTypesJenny
// PluginTsTypesJenny
//
// Run 'make gen-cue' from repository root to regenerate.
export const pluginVersion = "12.4.0-pre";
export interface Options {
buildLinkToLogLine?: unknown;
controlsStorageKey?: string;
/**
* isFilterLabelActive?: _
* onClickFilterString?: _
* onClickFilterOutString?: _
* onClickShowField?: _
* onClickHideField?: _
* onLogOptionsChange?: _
* logRowMenuIconsBefore?: _
* logRowMenuIconsAfter?: _
* logLineMenuCustomItems?: _
* onNewLogsReceived?: _
*/
displayedFields?: Array<string>;
/**
* wrapLogMessage: bool
* prettifyLogMessage: bool
* enableLogDetails: bool
* syntaxHighlighting?: bool
* sortOrder: common.LogsSortOrder
* dedupStrategy: common.LogsDedupStrategy
* enableInfiniteScrolling?: bool
* noInteractions?: bool
* showLogAttributes?: bool
* fontSize?: "default" | "small" @cuetsy(kind="enum", memberNames="default|small")
* detailsMode?: "inline" | "sidebar" @cuetsy(kind="enum", memberNames="inline|sidebar")
* timestampResolution?: "ms" | "ns" @cuetsy(kind="enum", memberNames="ms|ns")
* TODO: figure out how to define callbacks
*/
onClickFilterLabel?: unknown;
onClickFilterOutLabel?: unknown;
setDisplayedFields?: unknown;
/**
* showLabels: bool
* showCommonLabels: bool
* showFieldSelector?: bool
* showTime: bool
* showLogContextToggle: bool
*/
showControls?: boolean;
}
export const defaultOptions: Partial<Options> = {
displayedFields: [],
};
@@ -1,4 +1,4 @@
import { useId, memo, HTMLAttributes, ReactNode, SVGProps } from 'react';
import { useId, memo, HTMLAttributes, ReactNode } from 'react';
import { FieldDisplay } from '@grafana/data';
@@ -50,13 +50,14 @@ export const RadialArcPath = memo(
}: RadialArcPathProps) => {
const id = useId();
const isGradient = 'gradient' in rest;
const bgDivStyle: HTMLAttributes<HTMLDivElement>['style'] = { width: '100%', height: '100%' };
if ('color' in rest) {
bgDivStyle.backgroundColor = rest.color;
} else {
bgDivStyle.backgroundImage = getGradientCss(rest.gradient, shape);
}
const { vizWidth, vizHeight, radius, centerX, centerY, barWidth } = dimensions;
const pad = Math.ceil(Math.max(2, barWidth / 2)); // pad to cover stroke caps and glow in Safari
const boxX = Math.round(centerX - radius - barWidth - pad);
const boxY = Math.round(centerY - radius - barWidth - pad);
const boxSize = Math.round((radius + barWidth) * 2 + pad * 2);
const { radius, centerX, centerY, barWidth } = dimensions;
const path = drawRadialArcPath(angle, arcLengthDeg, dimensions, roundedBars);
@@ -71,14 +72,9 @@ export const RadialArcPath = memo(
const dotRadius =
endpointMarker === 'point' ? Math.min((barWidth / 2) * DOT_RADIUS_FACTOR, MAX_DOT_RADIUS) : barWidth / 2;
const bgDivStyle: HTMLAttributes<HTMLDivElement>['style'] = { width: boxSize, height: vizHeight, marginLeft: boxX };
const pathProps: SVGProps<SVGPathElement> = {};
let barEndcapColors: [string, string] | undefined;
let endpointMarks: ReactNode = null;
if (isGradient) {
bgDivStyle.backgroundImage = getGradientCss(rest.gradient, shape);
if ('gradient' in rest) {
if (endpointMarker && (rest.gradient?.length ?? 0) > 0) {
switch (endpointMarker) {
case 'point':
@@ -119,39 +115,25 @@ export const RadialArcPath = memo(
if (barEndcaps) {
barEndcapColors = getBarEndcapColors(rest.gradient, fieldDisplay.display.percent);
}
pathProps.fill = 'none';
pathProps.stroke = 'white';
} else {
bgDivStyle.backgroundColor = rest.color;
pathProps.fill = 'none';
pathProps.stroke = rest.color;
}
const pathEl = (
<path d={path} strokeWidth={barWidth} strokeLinecap={roundedBars ? 'round' : 'butt'} {...pathProps} />
);
return (
<>
{isGradient && (
<defs>
<mask id={id} maskUnits="userSpaceOnUse" maskContentUnits="userSpaceOnUse">
<rect x={boxX} y={boxY} width={boxSize} height={boxSize} fill="black" />
{pathEl}
</mask>
</defs>
)}
{/* FIXME: optimize this by only using clippath + foreign obj for gradients */}
<clipPath id={id}>
<path d={path} />
</clipPath>
<g filter={glowFilter}>
{isGradient ? (
<foreignObject x={0} y={0} width={vizWidth} height={vizHeight} mask={`url(#${id})`}>
<div style={bgDivStyle} />
</foreignObject>
) : (
pathEl
)}
<foreignObject
x={centerX - radius - barWidth}
y={centerY - radius - barWidth}
width={(radius + barWidth) * 2}
height={(radius + barWidth) * 2}
clipPath={`url(#${id})`}
>
<div style={bgDivStyle} />
</foreignObject>
{barEndcapColors?.[0] && <circle cx={xStart} cy={yStart} r={barWidth / 2} fill={barEndcapColors[0]} />}
{barEndcapColors?.[1] && (
<circle cx={xEnd} cy={yEnd} r={barWidth / 2} fill={barEndcapColors[1]} opacity={0.5} />
@@ -1,5 +1,5 @@
import { css, cx } from '@emotion/css';
import { useId, ReactNode } from 'react';
import { useId } from 'react';
import { DisplayValueAlignmentFactors, FALLBACK_COLOR, FieldDisplay, GrafanaTheme2, TimeRange } from '@grafana/data';
import { selectors } from '@grafana/e2e-selectors';
@@ -107,14 +107,14 @@ export function RadialGauge(props: RadialGaugeProps) {
const startAngle = shape === 'gauge' ? 250 : 0;
const endAngle = shape === 'gauge' ? 110 : 360;
const defs: ReactNode[] = [];
const graphics: ReactNode[] = [];
let sparklineElement: ReactNode | null = null;
const defs: React.ReactNode[] = [];
const graphics: React.ReactNode[] = [];
let sparklineElement: React.ReactNode | null = null;
for (let barIndex = 0; barIndex < values.length; barIndex++) {
const displayValue = values[barIndex];
const { angle, angleRange } = getValueAngleForValue(displayValue, startAngle, endAngle);
const gradientStops = gradient ? buildGradientColors(theme, displayValue) : undefined;
const gradientStops = buildGradientColors(gradient, theme, displayValue);
const color = displayValue.display.color ?? FALLBACK_COLOR;
const dimensions = calculateDimensions(
width,
@@ -131,9 +131,7 @@ export function RadialGauge(props: RadialGaugeProps) {
// FIXME: I want to move the ids for these filters into a context which the children
// can reference via a hook, rather than passing them down as props
const spotlightGradientId = `spotlight-${barIndex}-${gaugeId}`;
const spotlightGradientRef = endpointMarker === 'glow' ? `url(#${spotlightGradientId})` : undefined;
const glowFilterId = `glow-${gaugeId}`;
const glowFilterRef = glowBar ? `url(#${glowFilterId})` : undefined;
if (endpointMarker === 'glow') {
defs.push(
@@ -156,7 +154,7 @@ export function RadialGauge(props: RadialGaugeProps) {
fieldDisplay={displayValue}
angleRange={angleRange}
startAngle={startAngle}
glowFilter={glowFilterRef}
glowFilter={`url(#${glowFilterId})`}
segmentCount={segmentCount}
segmentSpacing={segmentSpacing}
shape={shape}
@@ -172,8 +170,8 @@ export function RadialGauge(props: RadialGaugeProps) {
angleRange={angleRange}
startAngle={startAngle}
roundedBars={roundedBars}
glowFilter={glowFilterRef}
endpointMarkerGlowFilter={spotlightGradientRef}
glowFilter={`url(#${glowFilterId})`}
endpointMarkerGlowFilter={`url(#${spotlightGradientId})`}
shape={shape}
gradient={gradientStops}
fieldDisplay={displayValue}
@@ -185,7 +183,7 @@ export function RadialGauge(props: RadialGaugeProps) {
// These elements are only added for first value / bar
if (barIndex === 0) {
if (glowBar) {
defs.push(<GlowGradient key={glowFilterId} id={glowFilterId} barWidth={dimensions.barWidth} />);
defs.push(<GlowGradient key="glow-filter" id={glowFilterId} barWidth={dimensions.barWidth} />);
}
if (glowCenter) {
@@ -236,7 +234,7 @@ export function RadialGauge(props: RadialGaugeProps) {
endAngle={endAngle}
angleRange={angleRange}
roundedBars={roundedBars}
glowFilter={glowFilterRef}
glowFilter={`url(#${glowFilterId})`}
shape={shape}
gradient={gradientStops}
/>
@@ -262,7 +260,7 @@ export function RadialGauge(props: RadialGaugeProps) {
const body = (
<>
<svg width={width} height={height} role="img" aria-label={t('gauge.category-gauge', 'Gauge')}>
{defs.length > 0 && <defs>{defs}</defs>}
<defs>{defs}</defs>
{graphics}
</svg>
{sparklineElement}
@@ -1,3 +1,4 @@
import { css } from '@emotion/css';
import { memo } from 'react';
import {
@@ -8,6 +9,7 @@ import {
GrafanaTheme2,
} from '@grafana/data';
import { useStyles2 } from '../../themes/ThemeContext';
import { calculateFontSize } from '../../utils/measureText';
import { RadialShape, RadialTextMode, RadialGaugeDimensions } from './types';
@@ -48,6 +50,7 @@ export const RadialText = memo(
valueManualFontSize,
nameManualFontSize,
}: RadialTextProps) => {
const styles = useStyles2(getStyles);
const { centerX, centerY, radius, barWidth } = dimensions;
if (textMode === 'none') {
@@ -103,9 +106,10 @@ export const RadialText = memo(
const valueY = showName ? centerY - nameHeight * (1 - VALUE_SPACE_PERCENTAGE) : centerY;
const nameY = showValue ? valueY + valueHeight * VALUE_SPACE_PERCENTAGE : centerY;
const nameColor = showValue ? theme.colors.text.secondary : theme.colors.text.primary;
const suffixShift = (valueFontSize - unitFontSize * LINE_HEIGHT_FACTOR) / 2;
// adjust the text up on gauges and when sparklines are present
let yOffset = valueFontSize / 4;
let yOffset = 0;
if (shape === 'gauge') {
// we render from the center of the gauge, so move up by half of half of the total height
yOffset -= (valueHeight + nameHeight) / 4;
@@ -122,12 +126,15 @@ export const RadialText = memo(
y={valueY}
fontSize={valueFontSize}
fill={theme.colors.text.primary}
className={styles.text}
textAnchor="middle"
dominantBaseline="text-bottom"
dominantBaseline="middle"
>
<tspan fontSize={unitFontSize}>{displayValue.prefix ?? ''}</tspan>
<tspan>{displayValue.text}</tspan>
<tspan fontSize={unitFontSize}>{displayValue.suffix ?? ''}</tspan>
<tspan className={styles.text} fontSize={unitFontSize} dy={suffixShift}>
{displayValue.suffix ?? ''}
</tspan>
</text>
)}
{showName && (
@@ -136,7 +143,7 @@ export const RadialText = memo(
x={centerX}
y={nameY}
textAnchor="middle"
dominantBaseline="text-bottom"
dominantBaseline="middle"
fill={nameColor}
>
{displayValue.title}
@@ -148,3 +155,9 @@ export const RadialText = memo(
);
RadialText.displayName = 'RadialText';
const getStyles = (_theme: GrafanaTheme2) => ({
text: css({
verticalAlign: 'bottom',
}),
});
@@ -1,17 +1,17 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`RadialGauge utils drawRadialArcPath should draw correct path for center x and y 1`] = `"M 150 120 A 80 80 0 1 1 149.98603736605492 120.00000121846968"`;
exports[`RadialGauge utils drawRadialArcPath should draw correct path for center x and y 1`] = `"M 150 110 A 90 90 0 1 1 149.98429203681178 110.00000137077838 A 10 10 0 0 1 149.98778269529805 130.00000106616096 A 70 70 0 1 0 150 130 A 10 10 0 0 1 150 110 Z"`;
exports[`RadialGauge utils drawRadialArcPath should draw correct path for half arc 1`] = `"M 100 20 A 80 80 0 0 1 100 180"`;
exports[`RadialGauge utils drawRadialArcPath should draw correct path for half arc 1`] = `"M 100 10 A 90 90 0 0 1 100 190 L 100 170 A 70 70 0 0 0 100 30 L 100 10 Z"`;
exports[`RadialGauge utils drawRadialArcPath should draw correct path for narrow bar width 1`] = `"M 100 20 A 80 80 0 0 1 100 180"`;
exports[`RadialGauge utils drawRadialArcPath should draw correct path for narrow bar width 1`] = `"M 100 17.5 A 82.5 82.5 0 0 1 100 182.5 L 100 177.5 A 77.5 77.5 0 0 0 100 22.5 L 100 17.5 Z"`;
exports[`RadialGauge utils drawRadialArcPath should draw correct path for narrow radius 1`] = `"M 100 50 A 50 50 0 0 1 100 150"`;
exports[`RadialGauge utils drawRadialArcPath should draw correct path for narrow radius 1`] = `"M 100 40 A 60 60 0 0 1 100 160 L 100 140 A 40 40 0 0 0 100 60 L 100 40 Z"`;
exports[`RadialGauge utils drawRadialArcPath should draw correct path for quarter arc 1`] = `"M 100 20 A 80 80 0 0 1 180 100"`;
exports[`RadialGauge utils drawRadialArcPath should draw correct path for quarter arc 1`] = `"M 100 10 A 90 90 0 0 1 190 100 L 170 100 A 70 70 0 0 0 100 30 L 100 10 Z"`;
exports[`RadialGauge utils drawRadialArcPath should draw correct path for rounded bars 1`] = `"M 100 20 A 80 80 0 1 1 20 100.00000000000001"`;
exports[`RadialGauge utils drawRadialArcPath should draw correct path for rounded bars 1`] = `"M 100 10 A 90 90 0 1 1 10 100.00000000000001 A 10 10 0 0 1 30 100.00000000000001 A 70 70 0 1 0 100 30 A 10 10 0 0 1 100 10 Z"`;
exports[`RadialGauge utils drawRadialArcPath should draw correct path for three quarter arc 1`] = `"M 100 20 A 80 80 0 1 1 20 100.00000000000001"`;
exports[`RadialGauge utils drawRadialArcPath should draw correct path for three quarter arc 1`] = `"M 100 10 A 90 90 0 1 1 10 100.00000000000001 L 30 100.00000000000001 A 70 70 0 1 0 100 30 L 100 10 Z"`;
exports[`RadialGauge utils drawRadialArcPath should draw correct path for wide bar width 1`] = `"M 100 20 A 80 80 0 0 1 100 180"`;
exports[`RadialGauge utils drawRadialArcPath should draw correct path for wide bar width 1`] = `"M 100 -5 A 105 105 0 0 1 100 205 L 100 155 A 55 55 0 0 0 100 45 L 100 -5 Z"`;
@@ -1,6 +1,6 @@
import { defaultsDeep } from 'lodash';
import { createTheme, Field, FieldDisplay, FieldType, ThresholdsMode } from '@grafana/data';
import { createTheme, FALLBACK_COLOR, Field, FieldDisplay, FieldType, ThresholdsMode } from '@grafana/data';
import { FieldColorModeId } from '@grafana/schema';
import {
@@ -50,9 +50,35 @@ describe('RadialGauge color utils', () => {
},
});
it('should return the baseColor if gradient is false-y', () => {
expect(
buildGradientColors(false, createTheme(), buildFieldDisplay(createField(FieldColorModeId.Fixed)), '#FF0000')
).toEqual([
{ color: '#FF0000', percent: 0 },
{ color: '#FF0000', percent: 1 },
]);
expect(
buildGradientColors(undefined, createTheme(), buildFieldDisplay(createField(FieldColorModeId.Fixed)), '#FF0000')
).toEqual([
{ color: '#FF0000', percent: 0 },
{ color: '#FF0000', percent: 1 },
]);
});
it('uses the fallback color if no baseColor is set', () => {
expect(buildGradientColors(false, createTheme(), buildFieldDisplay(createField(FieldColorModeId.Fixed)))).toEqual(
[
{ color: FALLBACK_COLOR, percent: 0 },
{ color: FALLBACK_COLOR, percent: 1 },
]
);
});
it('should map threshold colors correctly (with baseColor if displayProcessor does not return colors)', () => {
expect(
buildGradientColors(
true,
createTheme(),
buildFieldDisplay(createField(FieldColorModeId.Thresholds), {
view: { getFieldDisplayProcessor: jest.fn(() => jest.fn(() => ({ color: '#444444' }))) },
@@ -63,13 +89,14 @@ describe('RadialGauge color utils', () => {
it('should map threshold colors correctly (with baseColor if displayProcessor does not return colors)', () => {
expect(
buildGradientColors(createTheme(), buildFieldDisplay(createField(FieldColorModeId.Thresholds)), '#FF0000')
buildGradientColors(true, createTheme(), buildFieldDisplay(createField(FieldColorModeId.Thresholds)), '#FF0000')
).toMatchSnapshot();
});
it('should return gradient colors for continuous color modes', () => {
expect(
buildGradientColors(
true,
createTheme(),
buildFieldDisplay(createField(FieldColorModeId.ContinuousCividis)),
'#00FF00'
@@ -80,6 +107,7 @@ describe('RadialGauge color utils', () => {
it.each(['dark', 'light'] as const)('should return gradient colors for by-value color mode in %s theme', (mode) => {
expect(
buildGradientColors(
true,
createTheme({ colors: { mode } }),
buildFieldDisplay(createField(FieldColorModeId.ContinuousBlues))
)
@@ -89,6 +117,7 @@ describe('RadialGauge color utils', () => {
it.each(['dark', 'light'] as const)('should return gradient colors for fixed color mode in %s theme', (mode) => {
expect(
buildGradientColors(
true,
createTheme({ colors: { mode } }),
buildFieldDisplay(createField(FieldColorModeId.Fixed)),
'#442299'
@@ -7,10 +7,18 @@ import { GradientStop, RadialShape } from './types';
import { getFieldConfigMinMax, getFieldDisplayProcessor, getValuePercentageForValue } from './utils';
export function buildGradientColors(
gradient = false,
theme: GrafanaTheme2,
fieldDisplay: FieldDisplay,
baseColor = fieldDisplay.display.color ?? FALLBACK_COLOR
): GradientStop[] {
if (!gradient) {
return [
{ color: baseColor, percent: 0 },
{ color: baseColor, percent: 1 },
];
}
const colorMode = getFieldColorMode(fieldDisplay.field.color?.mode);
// thresholds get special handling
@@ -2,20 +2,14 @@ import { colorManipulator, GrafanaTheme2 } from '@grafana/data';
import { RadialGaugeDimensions } from './types';
// some utility transparent white colors for gradients
const TRANSPARENT_WHITE = '#ffffff00';
const MOSTLY_TRANSPARENT_WHITE = '#ffffff88';
const MOSTLY_OPAQUE_WHITE = '#ffffffbb';
const OPAQUE_WHITE = '#ffffff';
const MIN_GLOW_SIZE = 0.75;
const GLOW_FACTOR = 0.08;
export interface GlowGradientProps {
id: string;
barWidth: number;
}
const MIN_GLOW_SIZE = 0.75;
const GLOW_FACTOR = 0.08;
export function GlowGradient({ id, barWidth }: GlowGradientProps) {
// 0.75 is the minimum glow size, and it scales with bar width
const glowSize = MIN_GLOW_SIZE + barWidth * GLOW_FACTOR;
@@ -33,6 +27,16 @@ export function GlowGradient({ id, barWidth }: GlowGradientProps) {
const CENTER_GLOW_OPACITY = 0.25;
export function CenterGlowGradient({ gaugeId, color }: { gaugeId: string; color: string }) {
const transparentColor = colorManipulator.alpha(color, CENTER_GLOW_OPACITY);
return (
<radialGradient id={`circle-glow-${gaugeId}`} r="50%" fr="0%">
<stop offset="0%" stopColor={transparentColor} />
<stop offset="90%" stopColor={'#ffffff00'} />
</radialGradient>
);
}
export interface CenterGlowProps {
dimensions: RadialGaugeDimensions;
gaugeId: string;
@@ -48,7 +52,7 @@ export function MiddleCircleGlow({ dimensions, gaugeId, color }: CenterGlowProps
<defs>
<radialGradient id={gradientId} r="50%" fr="0%">
<stop offset="0%" stopColor={transparentColor} />
<stop offset="90%" stopColor={TRANSPARENT_WHITE} />
<stop offset="90%" stopColor="#ffffff00" />
</radialGradient>
</defs>
<g>
@@ -58,15 +62,19 @@ export function MiddleCircleGlow({ dimensions, gaugeId, color }: CenterGlowProps
);
}
interface SpotlightGradientProps {
export function SpotlightGradient({
id,
dimensions,
roundedBars,
angle,
theme,
}: {
id: string;
dimensions: RadialGaugeDimensions;
angle: number;
roundedBars: boolean;
theme: GrafanaTheme2;
}
export function SpotlightGradient({ id, dimensions, roundedBars, angle, theme }: SpotlightGradientProps) {
}) {
if (theme.isLight) {
return null;
}
@@ -80,9 +88,9 @@ export function SpotlightGradient({ id, dimensions, roundedBars, angle, theme }:
return (
<linearGradient x1={x1} y1={y1} x2={x2} y2={y2} id={id} gradientUnits="userSpaceOnUse">
<stop offset="0%" stopColor={TRANSPARENT_WHITE} />
<stop offset="95%" stopColor={MOSTLY_TRANSPARENT_WHITE} />
{roundedBars && <stop offset="100%" stopColor={roundedBars ? MOSTLY_OPAQUE_WHITE : OPAQUE_WHITE} />}
<stop offset="0%" stopColor="#ffffff00" />
<stop offset="95%" stopColor="#ffffff88" />
{roundedBars && <stop offset="100%" stopColor={roundedBars ? '#ffffffbb' : 'white'} />}
</linearGradient>
);
}
@@ -2,8 +2,6 @@ export type RadialTextMode = 'auto' | 'value_and_name' | 'value' | 'name' | 'non
export type RadialShape = 'circle' | 'gauge';
export interface RadialGaugeDimensions {
vizHeight: number;
vizWidth: number;
margin: number;
radius: number;
centerX: number;
@@ -283,9 +283,7 @@ describe('RadialGauge utils', () => {
});
describe('drawRadialArcPath', () => {
const defaultDims = Object.freeze({
vizHeight: 220,
vizWidth: 220,
const defaultDims: RadialGaugeDimensions = Object.freeze({
centerX: 100,
centerY: 100,
radius: 80,
@@ -299,7 +297,7 @@ describe('RadialGauge utils', () => {
scaleLabelsSpacing: 0,
scaleLabelsRadius: 0,
gaugeBottomY: 0,
}) satisfies RadialGaugeDimensions;
});
it.each([
{ description: 'quarter arc', startAngle: 0, endAngle: 90 },
@@ -326,6 +324,11 @@ describe('RadialGauge utils', () => {
expect(drawRadialArcPath(0, 360, defaultDims)).toEqual(drawRadialArcPath(0, 359.99, defaultDims));
expect(drawRadialArcPath(0, 380, defaultDims)).toEqual(drawRadialArcPath(0, 380, defaultDims));
});
it('should return empty string if inner radius collapses to zero or below', () => {
const smallRadiusDims = { ...defaultDims, radius: 5, barWidth: 20 };
expect(drawRadialArcPath(0, 180, smallRadiusDims)).toBe('');
});
});
});
@@ -338,9 +341,7 @@ describe('RadialGauge utils', () => {
describe('getOptimalSegmentCount', () => {
it('should adjust segment count based on dimensions and spacing', () => {
const dimensions = {
vizHeight: 220,
vizWidth: 220,
const dimensions: RadialGaugeDimensions = {
centerX: 100,
centerY: 100,
radius: 80,
@@ -354,7 +355,7 @@ describe('RadialGauge utils', () => {
scaleLabelsSpacing: 0,
scaleLabelsRadius: 0,
gaugeBottomY: 0,
} satisfies RadialGaugeDimensions;
};
expect(getOptimalSegmentCount(dimensions, 2, 10, 360)).toBe(8);
expect(getOptimalSegmentCount(dimensions, 1, 5, 360)).toBe(5);
@@ -155,8 +155,6 @@ export function calculateDimensions(
}
return {
vizWidth: width,
vizHeight: height,
margin,
gaugeBottomY: centerY + belowCenterY,
radius: innerRadius,
@@ -187,7 +185,7 @@ export function drawRadialArcPath(
dimensions: RadialGaugeDimensions,
roundedBars?: boolean
): string {
const { radius, centerX, centerY } = dimensions;
const { radius, centerX, centerY, barWidth } = dimensions;
// For some reason a 100% full arc cannot be rendered
if (endAngle >= 360) {
@@ -199,12 +197,66 @@ export function drawRadialArcPath(
const largeArc = endAngle > 180 ? 1 : 0;
let x1 = centerX + radius * Math.cos(startRadians);
let y1 = centerY + radius * Math.sin(startRadians);
let x2 = centerX + radius * Math.cos(endRadians);
let y2 = centerY + radius * Math.sin(endRadians);
const outerR = radius + barWidth / 2;
const innerR = Math.max(0, radius - barWidth / 2);
if (innerR <= 0) {
return ''; // cannot draw arc with 0 inner radius
}
return ['M', x1, y1, 'A', radius, radius, 0, largeArc, 1, x2, y2].join(' ');
// get points for both an inner and outer arc. we draw
// the arc entirely with a path's fill instead of using stroke
// so that it can be used as a clip-path.
const ox1 = centerX + outerR * Math.cos(startRadians);
const oy1 = centerY + outerR * Math.sin(startRadians);
const ox2 = centerX + outerR * Math.cos(endRadians);
const oy2 = centerY + outerR * Math.sin(endRadians);
const ix1 = centerX + innerR * Math.cos(startRadians);
const iy1 = centerY + innerR * Math.sin(startRadians);
const ix2 = centerX + innerR * Math.cos(endRadians);
const iy2 = centerY + innerR * Math.sin(endRadians);
// calculate the cap width in case we're drawing rounded bars
const capR = barWidth / 2;
const pathParts = [
// start at outer start
'M',
ox1,
oy1,
// outer arc from start to end (clockwise)
'A',
outerR,
outerR,
0,
largeArc,
1,
ox2,
oy2,
];
if (roundedBars) {
// rounded end cap: small arc connecting outer end to inner end
pathParts.push('A', capR, capR, 0, 0, 1, ix2, iy2);
} else {
// straight line to inner end (square butt)
pathParts.push('L', ix2, iy2);
}
// inner arc from end back to start (counter-clockwise)
pathParts.push('A', innerR, innerR, 0, largeArc, 0, ix1, iy1);
if (roundedBars) {
// rounded start cap: small arc connecting inner start back to outer start
pathParts.push('A', capR, capR, 0, 0, 1, ox1, oy1);
} else {
// straight line back to outer start (square butt)
pathParts.push('L', ox1, oy1);
}
pathParts.push('Z');
return pathParts.join(' ');
}
export function getAngleBetweenSegments(segmentSpacing: number, segmentCount: number, range: number) {
@@ -1108,18 +1108,12 @@ export function parseStyleJson(rawValue: unknown): CSSProperties | void {
}
}
// Safari 26.0 introduced rendering bugs which require us to disable several features of the table.
// The bugs were later fixed in Safari 26.2.
// Safari 26 introduced rendering bugs which require us to disable several features of the table.
export const IS_SAFARI_26 = (() => {
if (navigator == null) {
return false;
}
const userAgent = navigator.userAgent;
const safariVersionMatch = userAgent.match(/Version\/(\d+)\.(\d+)/);
if (!safariVersionMatch) {
return false;
}
const majorVersion = +safariVersionMatch[1];
const minorVersion = +safariVersionMatch[2];
return majorVersion === 26 && minorVersion <= 1;
const safariVersionMatch = userAgent.match(/Version\/(\d+)\./);
return safariVersionMatch && parseInt(safariVersionMatch[1], 10) === 26;
})();
@@ -66,6 +66,6 @@ export interface UserView {
avatarUrl?: string;
};
/** Datetime string when the user was last active */
lastActiveAt?: DateTimeInput;
lastActiveAt: DateTimeInput;
}
```
@@ -10,7 +10,7 @@ import { Tooltip } from '../Tooltip/Tooltip';
import { UserView } from './types';
export interface UserIconProps {
/** An object that contains the user's details and an optional 'lastActiveAt' status */
/** An object that contains the user's details and 'lastActiveAt' status */
userView: UserView;
/** A boolean value that determines whether the tooltip should be shown or not */
showTooltip?: boolean;
@@ -64,8 +64,7 @@ export const UserIcon = ({
showTooltip = true,
}: PropsWithChildren<UserIconProps>) => {
const { user, lastActiveAt } = userView;
const hasActive = lastActiveAt !== undefined && lastActiveAt !== null;
const isActive = hasActive && dateTime(lastActiveAt).diff(dateTime(), 'minutes', true) >= -15;
const isActive = dateTime(lastActiveAt).diff(dateTime(), 'minutes', true) >= -15;
const theme = useTheme2();
const styles = useMemo(() => getStyles(theme, isActive), [theme, isActive]);
const content = (
@@ -89,20 +88,18 @@ export const UserIcon = ({
const tooltip = (
<div className={styles.tooltipContainer}>
<div className={styles.tooltipName}>{user.name}</div>
{hasActive && (
<div className={styles.tooltipDate}>
{isActive ? (
<div className={styles.dotContainer}>
<span>
<Trans i18nKey="grafana-ui.user-icon.active-text">Active last 15m</Trans>
</span>
<span className={styles.dot}></span>
</div>
) : (
formatViewed(lastActiveAt)
)}
</div>
)}
<div className={styles.tooltipDate}>
{isActive ? (
<div className={styles.dotContainer}>
<span>
<Trans i18nKey="grafana-ui.user-icon.active-text">Active last 15m</Trans>
</span>
<span className={styles.dot}></span>
</div>
) : (
formatViewed(lastActiveAt)
)}
</div>
</div>
);
@@ -60,6 +60,6 @@ export interface UserView {
avatarUrl?: string;
};
/** Datetime string when the user was last active */
lastActiveAt?: DateTimeInput;
lastActiveAt: DateTimeInput;
}
```
@@ -9,7 +9,7 @@ import { UserIcon } from './UserIcon';
import { UserView } from './types';
export interface UsersIndicatorProps {
/** An object that contains the user's details and an optional 'lastActiveAt' status */
/** An object that contains the user's details and 'lastActiveAt' status */
users: UserView[];
/** A limit of how many user icons to show before collapsing them and showing a number of users instead */
limit?: number;
@@ -40,7 +40,7 @@ export const UsersIndicator = ({ users, onClick, limit = 4 }: UsersIndicatorProp
aria-label={t('grafana-ui.users-indicator.container-label', 'Users indicator container')}
>
{limitReached && (
<UserIcon onClick={onClick} userView={{ user: { name: 'Extra users' } }} showTooltip={false}>
<UserIcon onClick={onClick} userView={{ user: { name: 'Extra users' }, lastActiveAt: '' }} showTooltip={false}>
{tooManyUsers
? // eslint-disable-next-line @grafana/i18n/no-untranslated-strings
'...'
@@ -8,5 +8,5 @@ export interface UserView {
avatarUrl?: string;
};
/** Datetime string when the user was last active */
lastActiveAt?: DateTimeInput;
lastActiveAt: DateTimeInput;
}
+61 -38
View File
@@ -4,10 +4,10 @@ import (
"context"
"strconv"
"github.com/grafana/authlib/authz"
authlib "github.com/grafana/authlib/types"
iamv0alpha1 "github.com/grafana/grafana/apps/iam/pkg/apis/iam/v0alpha1"
"github.com/grafana/grafana/pkg/apimachinery/identity"
"github.com/grafana/grafana/pkg/apimachinery/utils"
legacyiamv0 "github.com/grafana/grafana/pkg/apis/iam/v0alpha1"
"github.com/grafana/grafana/pkg/services/apiserver/endpoints/request"
@@ -54,10 +54,10 @@ type ListResponse[T Resource] struct {
type ListFunc[T Resource] func(ctx context.Context, ns authlib.NamespaceInfo, p Pagination) (*ListResponse[T], error)
// List is a helper function that will perform access check on resources if
// prvovided with a authlib.AccessClient.
// provided with a authlib.AccessClient.
func List[T Resource](
ctx context.Context,
resource utils.ResourceInfo,
resourceInfo utils.ResourceInfo,
ac authlib.AccessClient,
p Pagination,
fn ListFunc[T],
@@ -67,63 +67,86 @@ func List[T Resource](
return nil, err
}
ident, err := identity.GetRequester(ctx)
if err != nil {
return nil, err
}
check := func(_, _ string) bool { return true }
if ac != nil {
var err error
check, _, err = ac.Compile(ctx, ident, authlib.ListRequest{
Resource: resource.GroupResource().Resource,
Group: resource.GroupResource().Group,
Verb: "list",
Namespace: ns.Value,
})
if err != nil {
return nil, err
}
}
res := &ListResponse[T]{Items: make([]T, 0, p.Limit)}
first, err := fn(ctx, ns, p)
if err != nil {
return nil, err
}
for _, item := range first.Items {
if !check(item.AuthID(), "") {
continue
}
res.Items = append(res.Items, item)
}
res.Continue = first.Continue
res.RV = first.RV
// If no access client, skip authorization
if ac == nil {
res.Items = append(res.Items, first.Items...)
for len(res.Items) < int(p.Limit) && res.Continue != 0 {
r, err := fn(ctx, ns, Pagination{Limit: p.Limit - int64(len(res.Items)), Continue: res.Continue})
if err != nil {
return nil, err
}
res.Items = append(res.Items, r.Items...)
res.Continue = r.Continue
}
return res, nil
}
// Use FilterAuthorized to batch authorize items
extractFn := func(item T) authz.BatchCheckItem {
return authz.BatchCheckItem{
Name: item.AuthID(),
Folder: "",
Verb: "list",
Group: resourceInfo.GroupResource().Group,
Resource: resourceInfo.GroupResource().Resource,
Namespace: ns.Value,
}
}
// Convert first batch to iter.Seq and filter
firstCandidates := func(yield func(T) bool) {
for _, item := range first.Items {
if !yield(item) {
return
}
}
}
for item, err := range authz.FilterAuthorized(ctx, ac, firstCandidates, extractFn).Items {
if err != nil {
return nil, err
}
res.Items = append(res.Items, item)
}
outer:
for len(res.Items) < int(p.Limit) && res.Continue != 0 {
// FIXME: it is not optimal to reduce the amout we look for here but it is the easiest way to
// FIXME: it is not optimal to reduce the amount we look for here but it is the easiest way to
// correctly handle pagination and continue tokens
r, err := fn(ctx, ns, Pagination{Limit: p.Limit - int64(len(res.Items)), Continue: res.Continue})
if err != nil {
return nil, err
}
for _, item := range r.Items {
if len(res.Items) == int(p.Limit) {
candidates := func(yield func(T) bool) {
for _, item := range r.Items {
if !yield(item) {
return
}
}
}
for item, authErr := range authz.FilterAuthorized(ctx, ac, candidates, extractFn).Items {
if authErr != nil {
return nil, authErr
}
if len(res.Items) >= int(p.Limit) {
res.Continue = r.Continue
break outer
}
if !check(item.AuthID(), "") {
continue
}
res.Items = append(res.Items, item)
}
res.Continue = r.Continue
}
return res, nil
@@ -6,6 +6,7 @@ import (
"strconv"
"time"
"github.com/grafana/authlib/authz"
claims "github.com/grafana/authlib/types"
"go.opentelemetry.io/otel/attribute"
"go.opentelemetry.io/otel/trace"
@@ -297,35 +298,38 @@ func (s *SecureValueService) List(ctx context.Context, namespace xkube.Namespace
s.metrics.SecureValueListDuration.WithLabelValues(strconv.FormatBool(success)).Observe(time.Since(start).Seconds())
}()
user, ok := claims.AuthInfoFrom(ctx)
if !ok {
return nil, fmt.Errorf("missing auth info in context")
}
hasPermissionFor, _, err := s.accessClient.Compile(ctx, user, claims.ListRequest{
Group: secretv1beta1.APIGroup,
Resource: secretv1beta1.SecureValuesResourceInfo.GetName(),
Namespace: namespace.String(),
Verb: utils.VerbGet, // Why not VerbList?
})
if err != nil {
return nil, fmt.Errorf("failed to compile checker: %w", err)
}
secureValuesMetadata, err := s.secureValueMetadataStorage.List(ctx, namespace)
if err != nil {
return nil, fmt.Errorf("fetching secure values from storage: %+w", err)
}
// Convert slice to iter.Seq
candidates := func(yield func(secretv1beta1.SecureValue) bool) {
for _, m := range secureValuesMetadata {
if !yield(m) {
return
}
}
}
extractFn := func(sv secretv1beta1.SecureValue) authz.BatchCheckItem {
return authz.BatchCheckItem{
Name: sv.Name,
Folder: "",
Verb: utils.VerbGet, // Why not VerbList?
Group: secretv1beta1.APIGroup,
Resource: secretv1beta1.SecureValuesResourceInfo.GetName(),
Namespace: namespace.String(),
}
}
out := make([]secretv1beta1.SecureValue, 0)
for _, metadata := range secureValuesMetadata {
// Check whether the user has permission to access this specific SecureValue in the namespace.
if !hasPermissionFor(metadata.Name, "") {
continue
for item, err := range authz.FilterAuthorized(ctx, s.accessClient, candidates, extractFn).Items {
if err != nil {
return nil, fmt.Errorf("failed to check authorization: %w", err)
}
out = append(out, metadata)
out = append(out, item)
}
return &secretv1beta1.SecureValueList{
-10
View File
@@ -248,16 +248,6 @@ func GetComposableKinds() ([]ComposableKind, error) {
CueFile: logsCue,
})
logstableCue, err := loadCueFileWithCommon(root, filepath.Join(root, "./public/app/plugins/panel/logstable/panelcfg.cue"))
if err != nil {
return nil, err
}
kinds = append(kinds, ComposableKind{
Name: "logstable",
Filename: "panelcfg.cue",
CueFile: logstableCue,
})
newsCue, err := loadCueFileWithCommon(root, filepath.Join(root, "./public/app/plugins/panel/news/panelcfg.cue"))
if err != nil {
return nil, err
@@ -1,44 +0,0 @@
package acimpl
import (
"context"
"time"
"github.com/grafana/grafana/pkg/services/accesscontrol"
)
const (
ossBasicRoleSeedLockName = "oss-ac-basic-role-seeder"
ossBasicRoleSeedTimeout = 2 * time.Minute
)
// refreshBasicRolePermissionsInDB ensures basic role permissions are fully derived from in-memory registrations
func (s *Service) refreshBasicRolePermissionsInDB(ctx context.Context, rolesSnapshot map[string][]accesscontrol.Permission) error {
if s.sql == nil || s.seeder == nil {
return nil
}
run := func(ctx context.Context) error {
desired := map[accesscontrol.SeedPermission]struct{}{}
for role, permissions := range rolesSnapshot {
for _, permission := range permissions {
desired[accesscontrol.SeedPermission{BuiltInRole: role, Action: permission.Action, Scope: permission.Scope}] = struct{}{}
}
}
s.seeder.SetDesiredPermissions(desired)
return s.seeder.Seed(ctx)
}
if s.serverLock == nil {
return run(ctx)
}
var err error
errLock := s.serverLock.LockExecuteAndRelease(ctx, ossBasicRoleSeedLockName, ossBasicRoleSeedTimeout, func(ctx context.Context) {
err = run(ctx)
})
if errLock != nil {
return errLock
}
return err
}
@@ -1,128 +0,0 @@
package acimpl
import (
"context"
"testing"
"time"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/infra/db"
"github.com/grafana/grafana/pkg/infra/localcache"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/services/accesscontrol"
"github.com/grafana/grafana/pkg/services/accesscontrol/database"
"github.com/grafana/grafana/pkg/services/accesscontrol/permreg"
"github.com/grafana/grafana/pkg/services/accesscontrol/resourcepermissions"
"github.com/grafana/grafana/pkg/services/featuremgmt"
"github.com/grafana/grafana/pkg/services/org"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/util/testutil"
)
func TestIntegration_OSSBasicRolePermissions_PersistAndRefreshOnRegisterFixedRoles(t *testing.T) {
testutil.SkipIntegrationTestInShortMode(t)
ctx := context.Background()
sql := db.InitTestDB(t)
store := database.ProvideService(sql)
svc := ProvideOSSService(
setting.NewCfg(),
store,
&resourcepermissions.FakeActionSetSvc{},
localcache.ProvideService(),
featuremgmt.WithFeatures(),
tracing.InitializeTracerForTest(),
sql,
permreg.ProvidePermissionRegistry(),
nil,
)
require.NoError(t, svc.DeclareFixedRoles(accesscontrol.RoleRegistration{
Role: accesscontrol.RoleDTO{
Name: "fixed:test:role",
Permissions: []accesscontrol.Permission{
{Action: "test:read", Scope: ""},
},
},
Grants: []string{string(org.RoleViewer)},
}))
require.NoError(t, svc.RegisterFixedRoles(ctx))
// verify permission is persisted to DB for basic:viewer
require.NoError(t, sql.WithDbSession(ctx, func(sess *db.Session) error {
var role accesscontrol.Role
ok, err := sess.Table("role").Where("uid = ?", accesscontrol.BasicRoleUIDPrefix+"viewer").Get(&role)
require.NoError(t, err)
require.True(t, ok)
var count int64
count, err = sess.Table("permission").Where("role_id = ? AND action = ? AND scope = ?", role.ID, "test:read", "").Count()
require.NoError(t, err)
require.Equal(t, int64(1), count)
return nil
}))
// ensure RegisterFixedRoles refreshes it back to defaults
require.NoError(t, sql.WithDbSession(ctx, func(sess *db.Session) error {
ts := time.Now()
var role accesscontrol.Role
ok, err := sess.Table("role").Where("uid = ?", accesscontrol.BasicRoleUIDPrefix+"viewer").Get(&role)
require.NoError(t, err)
require.True(t, ok)
_, err = sess.Exec("DELETE FROM permission WHERE role_id = ?", role.ID)
require.NoError(t, err)
p := accesscontrol.Permission{
RoleID: role.ID,
Action: "custom:keep",
Scope: "",
Created: ts,
Updated: ts,
}
p.Kind, p.Attribute, p.Identifier = accesscontrol.SplitScope(p.Scope)
_, err = sess.Table("permission").Insert(&p)
return err
}))
svc2 := ProvideOSSService(
setting.NewCfg(),
store,
&resourcepermissions.FakeActionSetSvc{},
localcache.ProvideService(),
featuremgmt.WithFeatures(),
tracing.InitializeTracerForTest(),
sql,
permreg.ProvidePermissionRegistry(),
nil,
)
require.NoError(t, svc2.DeclareFixedRoles(accesscontrol.RoleRegistration{
Role: accesscontrol.RoleDTO{
Name: "fixed:test:role",
Permissions: []accesscontrol.Permission{
{Action: "test:read", Scope: ""},
},
},
Grants: []string{string(org.RoleViewer)},
}))
require.NoError(t, svc2.RegisterFixedRoles(ctx))
require.NoError(t, sql.WithDbSession(ctx, func(sess *db.Session) error {
var role accesscontrol.Role
ok, err := sess.Table("role").Where("uid = ?", accesscontrol.BasicRoleUIDPrefix+"viewer").Get(&role)
require.NoError(t, err)
require.True(t, ok)
var count int64
count, err = sess.Table("permission").Where("role_id = ? AND action = ? AND scope = ?", role.ID, "test:read", "").Count()
require.NoError(t, err)
require.Equal(t, int64(1), count)
count, err = sess.Table("permission").Where("role_id = ? AND action = ?", role.ID, "custom:keep").Count()
require.NoError(t, err)
require.Equal(t, int64(0), count)
return nil
}))
}
+2 -62
View File
@@ -30,7 +30,6 @@ import (
"github.com/grafana/grafana/pkg/services/accesscontrol/migrator"
"github.com/grafana/grafana/pkg/services/accesscontrol/permreg"
"github.com/grafana/grafana/pkg/services/accesscontrol/pluginutils"
"github.com/grafana/grafana/pkg/services/accesscontrol/seeding"
"github.com/grafana/grafana/pkg/services/dashboards"
"github.com/grafana/grafana/pkg/services/featuremgmt"
"github.com/grafana/grafana/pkg/services/folder"
@@ -97,12 +96,6 @@ func ProvideOSSService(
roles: accesscontrol.BuildBasicRoleDefinitions(),
store: store,
permRegistry: permRegistry,
sql: db,
serverLock: lock,
}
if backend, ok := store.(*database.AccessControlStore); ok {
s.seeder = seeding.New(log.New("accesscontrol.seeder"), backend, backend)
}
return s
@@ -119,11 +112,8 @@ type Service struct {
rolesMu sync.RWMutex
roles map[string]*accesscontrol.RoleDTO
store accesscontrol.Store
seeder *seeding.Seeder
permRegistry permreg.PermissionRegistry
isInitialized bool
sql db.DB
serverLock *serverlock.ServerLockService
}
func (s *Service) GetUsageStats(_ context.Context) map[string]any {
@@ -441,54 +431,17 @@ func (s *Service) RegisterFixedRoles(ctx context.Context) error {
defer span.End()
s.rolesMu.Lock()
registrations := s.registrations.Slice()
defer s.rolesMu.Unlock()
s.registrations.Range(func(registration accesscontrol.RoleRegistration) bool {
s.registerRolesLocked(registration)
return true
})
s.isInitialized = true
rolesSnapshot := s.getBasicRolePermissionsLocked()
s.rolesMu.Unlock()
if s.seeder != nil {
if err := s.seeder.SeedRoles(ctx, registrations); err != nil {
return err
}
if err := s.seeder.RemoveAbsentRoles(ctx); err != nil {
return err
}
}
if err := s.refreshBasicRolePermissionsInDB(ctx, rolesSnapshot); err != nil {
return err
}
return nil
}
// getBasicRolePermissionsSnapshotFromRegistrationsLocked computes the desired basic role permissions from the
// current registration list, using the shared seeding registration logic.
//
// it has to be called while holding the roles lock
func (s *Service) getBasicRolePermissionsLocked() map[string][]accesscontrol.Permission {
desired := map[accesscontrol.SeedPermission]struct{}{}
s.registrations.Range(func(registration accesscontrol.RoleRegistration) bool {
seeding.AppendDesiredPermissions(desired, s.log, &registration.Role, registration.Grants, registration.Exclude)
return true
})
out := make(map[string][]accesscontrol.Permission)
for sp := range desired {
out[sp.BuiltInRole] = append(out[sp.BuiltInRole], accesscontrol.Permission{
Action: sp.Action,
Scope: sp.Scope,
})
}
return out
}
// registerRolesLocked processes a single role registration and adds permissions to basic roles.
// Must be called with s.rolesMu locked.
func (s *Service) registerRolesLocked(registration accesscontrol.RoleRegistration) {
@@ -521,7 +474,6 @@ func (s *Service) DeclarePluginRoles(ctx context.Context, ID, name string, regs
defer span.End()
acRegs := pluginutils.ToRegistrations(ID, name, regs)
updatedBasicRoles := false
for _, r := range acRegs {
if err := pluginutils.ValidatePluginRole(ID, r.Role); err != nil {
return err
@@ -548,23 +500,11 @@ func (s *Service) DeclarePluginRoles(ctx context.Context, ID, name string, regs
if initialized {
s.rolesMu.Lock()
s.registerRolesLocked(r)
updatedBasicRoles = true
s.rolesMu.Unlock()
s.cache.Flush()
}
}
if updatedBasicRoles {
s.rolesMu.RLock()
rolesSnapshot := s.getBasicRolePermissionsLocked()
s.rolesMu.RUnlock()
// plugin roles can be declared after startup - keep DB in sync
if err := s.refreshBasicRolePermissionsInDB(ctx, rolesSnapshot); err != nil {
return err
}
}
return nil
}
+96
View File
@@ -167,3 +167,99 @@ func (c *LegacyAccessClient) Compile(ctx context.Context, id claims.AuthInfo, re
return check(fmt.Sprintf("%s:%s:%s", opts.Resource, opts.Attr, name))
}, claims.NoopZookie{}, nil
}
func (c *LegacyAccessClient) BatchCheck(ctx context.Context, id claims.AuthInfo, req claims.BatchCheckRequest) (claims.BatchCheckResponse, error) {
ident, ok := id.(identity.Requester)
if !ok {
return claims.BatchCheckResponse{}, errors.New("expected identity.Requester for legacy access control")
}
results := make(map[string]claims.BatchCheckResult, len(req.Checks))
// Cache checkers by action to avoid recreating them for each check
checkerCache := make(map[string]func(scopes ...string) bool)
for _, check := range req.Checks {
opts, ok := c.opts[check.Resource]
if !ok {
// For now w fallback to grafana admin if no options are found for resource.
if ident.GetIsGrafanaAdmin() {
results[check.CorrelationID] = claims.BatchCheckResult{Allowed: true}
} else {
results[check.CorrelationID] = claims.BatchCheckResult{Allowed: false}
}
continue
}
// Check if verb should be skipped
if opts.Unchecked[check.Verb] {
results[check.CorrelationID] = claims.BatchCheckResult{Allowed: true}
continue
}
action, ok := opts.Mapping[check.Verb]
if !ok {
results[check.CorrelationID] = claims.BatchCheckResult{
Allowed: false,
Error: fmt.Errorf("missing action for %s %s", check.Verb, check.Resource),
}
continue
}
// Get or create cached checker for this action
checker, ok := checkerCache[action]
if !ok {
checker = Checker(ident, action)
checkerCache[action] = checker
}
// Handle list and create verbs (no specific name)
// TODO: Should we allow list/create without name in a BatchCheck request?
if check.Name == "" {
if check.Verb == utils.VerbList || check.Verb == utils.VerbCreate {
// For list/create without name, check if user has the action at all
// TODO: Is this correct for Create?
results[check.CorrelationID] = claims.BatchCheckResult{
Allowed: len(ident.GetPermissions()[action]) > 0,
}
} else {
results[check.CorrelationID] = claims.BatchCheckResult{
Allowed: false,
Error: fmt.Errorf("unhandled authorization: %s %s", check.Group, check.Verb),
}
}
continue
}
// Check with resolver or direct scope
var allowed bool
if opts.Resolver != nil {
ns, err := claims.ParseNamespace(check.Namespace)
if err != nil {
results[check.CorrelationID] = claims.BatchCheckResult{
Allowed: false,
Error: err,
}
continue
}
scopes, err := opts.Resolver.Resolve(ctx, ns, check.Name)
if err != nil {
results[check.CorrelationID] = claims.BatchCheckResult{
Allowed: false,
Error: err,
}
continue
}
allowed = checker(scopes...)
} else {
allowed = checker(fmt.Sprintf("%s:%s:%s", opts.Resource, opts.Attr, check.Name))
}
results[check.CorrelationID] = claims.BatchCheckResult{Allowed: allowed}
}
return claims.BatchCheckResponse{
Results: results,
Zookie: claims.NoopZookie{},
}, nil
}
@@ -136,6 +136,220 @@ func TestLegacyAccessClient_Check(t *testing.T) {
})
}
func TestLegacyAccessClient_BatchCheck(t *testing.T) {
ac := acimpl.ProvideAccessControl(featuremgmt.WithFeatures())
t.Run("should return empty results for empty checks", func(t *testing.T) {
a := accesscontrol.NewLegacyAccessClient(ac)
res, err := a.BatchCheck(context.Background(), &identity.StaticRequester{}, authlib.BatchCheckRequest{
Checks: []authlib.BatchCheckItem{},
})
assert.NoError(t, err)
assert.Empty(t, res.Results)
})
t.Run("should reject unknown resource for non-admin", func(t *testing.T) {
a := accesscontrol.NewLegacyAccessClient(ac)
res, err := a.BatchCheck(context.Background(), &identity.StaticRequester{}, authlib.BatchCheckRequest{
Checks: []authlib.BatchCheckItem{
{CorrelationID: "check-1", Verb: "get", Resource: "unknown", Name: "1"},
},
})
assert.NoError(t, err)
assert.False(t, res.Results["check-1"].Allowed)
})
t.Run("should allow unknown resource for grafana admin", func(t *testing.T) {
a := accesscontrol.NewLegacyAccessClient(ac)
res, err := a.BatchCheck(context.Background(), &identity.StaticRequester{IsGrafanaAdmin: true}, authlib.BatchCheckRequest{
Checks: []authlib.BatchCheckItem{
{CorrelationID: "check-1", Verb: "get", Resource: "unknown", Name: "1"},
},
})
assert.NoError(t, err)
assert.True(t, res.Results["check-1"].Allowed)
})
t.Run("should allow unchecked verbs", func(t *testing.T) {
a := accesscontrol.NewLegacyAccessClient(ac, accesscontrol.ResourceAuthorizerOptions{
Resource: "dashboards",
Attr: "uid",
Unchecked: map[string]bool{"get": true},
})
res, err := a.BatchCheck(context.Background(), &identity.StaticRequester{}, authlib.BatchCheckRequest{
Checks: []authlib.BatchCheckItem{
{CorrelationID: "check-1", Verb: "get", Resource: "dashboards", Name: "1"},
},
})
assert.NoError(t, err)
assert.True(t, res.Results["check-1"].Allowed)
})
t.Run("should return error for missing action mapping", func(t *testing.T) {
a := accesscontrol.NewLegacyAccessClient(ac, accesscontrol.ResourceAuthorizerOptions{
Resource: "dashboards",
Attr: "uid",
Mapping: map[string]string{}, // Empty mapping
})
res, err := a.BatchCheck(context.Background(), &identity.StaticRequester{}, authlib.BatchCheckRequest{
Checks: []authlib.BatchCheckItem{
{CorrelationID: "check-1", Verb: "get", Resource: "dashboards", Name: "1"},
},
})
assert.NoError(t, err)
assert.False(t, res.Results["check-1"].Allowed)
assert.Error(t, res.Results["check-1"].Error)
})
t.Run("should allow when user has correct scope", func(t *testing.T) {
a := accesscontrol.NewLegacyAccessClient(ac, accesscontrol.ResourceAuthorizerOptions{
Resource: "dashboards",
Attr: "uid",
Mapping: map[string]string{"get": "dashboards:read"},
})
ident := newIdent(accesscontrol.Permission{Action: "dashboards:read", Scope: "dashboards:uid:1"})
res, err := a.BatchCheck(context.Background(), ident, authlib.BatchCheckRequest{
Checks: []authlib.BatchCheckItem{
{CorrelationID: "check-1", Verb: "get", Resource: "dashboards", Name: "1"},
},
})
assert.NoError(t, err)
assert.True(t, res.Results["check-1"].Allowed)
})
t.Run("should reject when user has wrong scope", func(t *testing.T) {
a := accesscontrol.NewLegacyAccessClient(ac, accesscontrol.ResourceAuthorizerOptions{
Resource: "dashboards",
Attr: "uid",
Mapping: map[string]string{"get": "dashboards:read"},
})
ident := newIdent(accesscontrol.Permission{Action: "dashboards:read", Scope: "dashboards:uid:2"})
res, err := a.BatchCheck(context.Background(), ident, authlib.BatchCheckRequest{
Checks: []authlib.BatchCheckItem{
{CorrelationID: "check-1", Verb: "get", Resource: "dashboards", Name: "1"},
},
})
assert.NoError(t, err)
assert.False(t, res.Results["check-1"].Allowed)
})
t.Run("should handle list without name", func(t *testing.T) {
a := accesscontrol.NewLegacyAccessClient(ac, accesscontrol.ResourceAuthorizerOptions{
Resource: "dashboards",
Attr: "uid",
Mapping: map[string]string{"list": "dashboards:read"},
})
ident := newIdent(accesscontrol.Permission{Action: "dashboards:read", Scope: "dashboards:uid:*"})
res, err := a.BatchCheck(context.Background(), ident, authlib.BatchCheckRequest{
Checks: []authlib.BatchCheckItem{
{CorrelationID: "check-1", Verb: "list", Resource: "dashboards", Name: ""},
},
})
assert.NoError(t, err)
assert.True(t, res.Results["check-1"].Allowed)
})
t.Run("should handle multiple checks with mixed results", func(t *testing.T) {
a := accesscontrol.NewLegacyAccessClient(ac, accesscontrol.ResourceAuthorizerOptions{
Resource: "dashboards",
Attr: "uid",
Mapping: map[string]string{"get": "dashboards:read"},
})
ident := newIdent(
accesscontrol.Permission{Action: "dashboards:read", Scope: "dashboards:uid:1"},
accesscontrol.Permission{Action: "dashboards:read", Scope: "dashboards:uid:3"},
)
res, err := a.BatchCheck(context.Background(), ident, authlib.BatchCheckRequest{
Checks: []authlib.BatchCheckItem{
{CorrelationID: "check-1", Verb: "get", Resource: "dashboards", Name: "1"},
{CorrelationID: "check-2", Verb: "get", Resource: "dashboards", Name: "2"},
{CorrelationID: "check-3", Verb: "get", Resource: "dashboards", Name: "3"},
},
})
assert.NoError(t, err)
assert.True(t, res.Results["check-1"].Allowed)
assert.False(t, res.Results["check-2"].Allowed)
assert.True(t, res.Results["check-3"].Allowed)
})
t.Run("should use resolver when provided", func(t *testing.T) {
resolver := accesscontrol.ResourceResolverFunc(func(ctx context.Context, ns authlib.NamespaceInfo, name string) ([]string, error) {
// Resolve dashboard name to folder scope
return []string{"folders:uid:folder-a"}, nil
})
a := accesscontrol.NewLegacyAccessClient(ac, accesscontrol.ResourceAuthorizerOptions{
Resource: "dashboards",
Attr: "uid",
Mapping: map[string]string{"get": "dashboards:read"},
Resolver: resolver,
})
ident := newIdent(accesscontrol.Permission{Action: "dashboards:read", Scope: "folders:uid:folder-a"})
res, err := a.BatchCheck(context.Background(), ident, authlib.BatchCheckRequest{
Checks: []authlib.BatchCheckItem{
{CorrelationID: "check-1", Verb: "get", Resource: "dashboards", Name: "1", Namespace: "default"},
},
})
assert.NoError(t, err)
assert.True(t, res.Results["check-1"].Allowed)
})
t.Run("should cache checker by action", func(t *testing.T) {
a := accesscontrol.NewLegacyAccessClient(ac, accesscontrol.ResourceAuthorizerOptions{
Resource: "dashboards",
Attr: "uid",
Mapping: map[string]string{"get": "dashboards:read", "update": "dashboards:write"},
})
ident := newIdent(
accesscontrol.Permission{Action: "dashboards:read", Scope: "dashboards:uid:*"},
accesscontrol.Permission{Action: "dashboards:write", Scope: "dashboards:uid:1"},
)
res, err := a.BatchCheck(context.Background(), ident, authlib.BatchCheckRequest{
Checks: []authlib.BatchCheckItem{
{CorrelationID: "read-1", Verb: "get", Resource: "dashboards", Name: "1"},
{CorrelationID: "read-2", Verb: "get", Resource: "dashboards", Name: "2"},
{CorrelationID: "write-1", Verb: "update", Resource: "dashboards", Name: "1"},
{CorrelationID: "write-2", Verb: "update", Resource: "dashboards", Name: "2"},
},
})
assert.NoError(t, err)
// Read with wildcard scope should allow all
assert.True(t, res.Results["read-1"].Allowed)
assert.True(t, res.Results["read-2"].Allowed)
// Write only has scope for uid:1
assert.True(t, res.Results["write-1"].Allowed)
assert.False(t, res.Results["write-2"].Allowed)
})
}
func newIdent(permissions ...accesscontrol.Permission) *identity.StaticRequester {
pmap := map[string][]string{}
for _, p := range permissions {
@@ -1,623 +0,0 @@
package database
import (
"context"
"strings"
"time"
"github.com/grafana/grafana/pkg/infra/db"
"github.com/grafana/grafana/pkg/services/accesscontrol"
"github.com/grafana/grafana/pkg/services/accesscontrol/seeding"
"github.com/grafana/grafana/pkg/services/sqlstore/migrator"
"github.com/grafana/grafana/pkg/util/xorm/core"
)
const basicRolePermBatchSize = 500
// LoadRoles returns all fixed and plugin roles (global org) with permissions, indexed by role name.
func (s *AccessControlStore) LoadRoles(ctx context.Context) (map[string]*accesscontrol.RoleDTO, error) {
out := map[string]*accesscontrol.RoleDTO{}
err := s.sql.WithDbSession(ctx, func(sess *db.Session) error {
type roleRow struct {
ID int64 `xorm:"id"`
OrgID int64 `xorm:"org_id"`
Version int64 `xorm:"version"`
UID string `xorm:"uid"`
Name string `xorm:"name"`
DisplayName string `xorm:"display_name"`
Description string `xorm:"description"`
Group string `xorm:"group_name"`
Hidden bool `xorm:"hidden"`
Updated time.Time `xorm:"updated"`
Created time.Time `xorm:"created"`
}
roles := []roleRow{}
if err := sess.Table("role").
Where("org_id = ?", accesscontrol.GlobalOrgID).
Where("(name LIKE ? OR name LIKE ?)", accesscontrol.FixedRolePrefix+"%", accesscontrol.PluginRolePrefix+"%").
Find(&roles); err != nil {
return err
}
if len(roles) == 0 {
return nil
}
roleIDs := make([]any, 0, len(roles))
roleByID := make(map[int64]*accesscontrol.RoleDTO, len(roles))
for _, r := range roles {
dto := &accesscontrol.RoleDTO{
ID: r.ID,
OrgID: r.OrgID,
Version: r.Version,
UID: r.UID,
Name: r.Name,
DisplayName: r.DisplayName,
Description: r.Description,
Group: r.Group,
Hidden: r.Hidden,
Updated: r.Updated,
Created: r.Created,
}
out[dto.Name] = dto
roleByID[dto.ID] = dto
roleIDs = append(roleIDs, dto.ID)
}
type permRow struct {
RoleID int64 `xorm:"role_id"`
Action string `xorm:"action"`
Scope string `xorm:"scope"`
}
perms := []permRow{}
if err := sess.Table("permission").In("role_id", roleIDs...).Find(&perms); err != nil {
return err
}
for _, p := range perms {
dto := roleByID[p.RoleID]
if dto == nil {
continue
}
dto.Permissions = append(dto.Permissions, accesscontrol.Permission{
RoleID: p.RoleID,
Action: p.Action,
Scope: p.Scope,
})
}
return nil
})
return out, err
}
func (s *AccessControlStore) SetRole(ctx context.Context, existingRole *accesscontrol.RoleDTO, wantedRole accesscontrol.RoleDTO) error {
if existingRole == nil {
return nil
}
return s.sql.WithDbSession(ctx, func(sess *db.Session) error {
_, err := sess.Table("role").
Where("id = ? AND org_id = ?", existingRole.ID, accesscontrol.GlobalOrgID).
Update(map[string]any{
"display_name": wantedRole.DisplayName,
"description": wantedRole.Description,
"group_name": wantedRole.Group,
"hidden": wantedRole.Hidden,
"updated": time.Now(),
})
return err
})
}
func (s *AccessControlStore) SetPermissions(ctx context.Context, existingRole *accesscontrol.RoleDTO, wantedRole accesscontrol.RoleDTO) error {
if existingRole == nil {
return nil
}
type key struct{ Action, Scope string }
existing := map[key]struct{}{}
for _, p := range existingRole.Permissions {
existing[key{p.Action, p.Scope}] = struct{}{}
}
desired := map[key]struct{}{}
for _, p := range wantedRole.Permissions {
desired[key{p.Action, p.Scope}] = struct{}{}
}
toAdd := make([]accesscontrol.Permission, 0)
toRemove := make([]accesscontrol.SeedPermission, 0)
now := time.Now()
for k := range desired {
if _, ok := existing[k]; ok {
continue
}
perm := accesscontrol.Permission{
RoleID: existingRole.ID,
Action: k.Action,
Scope: k.Scope,
Created: now,
Updated: now,
}
perm.Kind, perm.Attribute, perm.Identifier = accesscontrol.SplitScope(perm.Scope)
toAdd = append(toAdd, perm)
}
for k := range existing {
if _, ok := desired[k]; ok {
continue
}
toRemove = append(toRemove, accesscontrol.SeedPermission{Action: k.Action, Scope: k.Scope})
}
if len(toAdd) == 0 && len(toRemove) == 0 {
return nil
}
return s.sql.WithTransactionalDbSession(ctx, func(sess *db.Session) error {
if len(toRemove) > 0 {
if err := DeleteRolePermissionTuples(sess, s.sql.GetDBType(), existingRole.ID, toRemove); err != nil {
return err
}
}
if len(toAdd) > 0 {
_, err := sess.InsertMulti(toAdd)
return err
}
return nil
})
}
func (s *AccessControlStore) CreateRole(ctx context.Context, role accesscontrol.RoleDTO) error {
now := time.Now()
uid := role.UID
if uid == "" && (strings.HasPrefix(role.Name, accesscontrol.FixedRolePrefix) || strings.HasPrefix(role.Name, accesscontrol.PluginRolePrefix)) {
uid = accesscontrol.PrefixedRoleUID(role.Name)
}
r := accesscontrol.Role{
OrgID: accesscontrol.GlobalOrgID,
Version: role.Version,
UID: uid,
Name: role.Name,
DisplayName: role.DisplayName,
Description: role.Description,
Group: role.Group,
Hidden: role.Hidden,
Created: now,
Updated: now,
}
if r.Version == 0 {
r.Version = 1
}
return s.sql.WithTransactionalDbSession(ctx, func(sess *db.Session) error {
if _, err := sess.Insert(&r); err != nil {
return err
}
if len(role.Permissions) == 0 {
return nil
}
// De-duplicate permissions on (action, scope) to avoid unique constraint violations.
// Some role definitions may accidentally include duplicates.
type permKey struct{ Action, Scope string }
seen := make(map[permKey]struct{}, len(role.Permissions))
perms := make([]accesscontrol.Permission, 0, len(role.Permissions))
for _, p := range role.Permissions {
k := permKey{Action: p.Action, Scope: p.Scope}
if _, ok := seen[k]; ok {
continue
}
seen[k] = struct{}{}
perm := accesscontrol.Permission{
RoleID: r.ID,
Action: p.Action,
Scope: p.Scope,
Created: now,
Updated: now,
}
perm.Kind, perm.Attribute, perm.Identifier = accesscontrol.SplitScope(perm.Scope)
perms = append(perms, perm)
}
_, err := sess.InsertMulti(perms)
return err
})
}
func (s *AccessControlStore) DeleteRoles(ctx context.Context, roleUIDs []string) error {
if len(roleUIDs) == 0 {
return nil
}
uids := make([]any, 0, len(roleUIDs))
for _, uid := range roleUIDs {
uids = append(uids, uid)
}
return s.sql.WithTransactionalDbSession(ctx, func(sess *db.Session) error {
type row struct {
ID int64 `xorm:"id"`
UID string `xorm:"uid"`
}
rows := []row{}
if err := sess.Table("role").
Where("org_id = ?", accesscontrol.GlobalOrgID).
In("uid", uids...).
Find(&rows); err != nil {
return err
}
if len(rows) == 0 {
return nil
}
roleIDs := make([]any, 0, len(rows))
for _, r := range rows {
roleIDs = append(roleIDs, r.ID)
}
// Remove permissions and assignments first to avoid FK issues (if enabled).
{
args := append([]any{"DELETE FROM permission WHERE role_id IN (?" + strings.Repeat(",?", len(roleIDs)-1) + ")"}, roleIDs...)
if _, err := sess.Exec(args...); err != nil {
return err
}
}
{
args := append([]any{"DELETE FROM user_role WHERE role_id IN (?" + strings.Repeat(",?", len(roleIDs)-1) + ")"}, roleIDs...)
if _, err := sess.Exec(args...); err != nil {
return err
}
}
{
args := append([]any{"DELETE FROM team_role WHERE role_id IN (?" + strings.Repeat(",?", len(roleIDs)-1) + ")"}, roleIDs...)
if _, err := sess.Exec(args...); err != nil {
return err
}
}
{
args := append([]any{"DELETE FROM builtin_role WHERE role_id IN (?" + strings.Repeat(",?", len(roleIDs)-1) + ")"}, roleIDs...)
if _, err := sess.Exec(args...); err != nil {
return err
}
}
args := append([]any{"DELETE FROM role WHERE org_id = ? AND uid IN (?" + strings.Repeat(",?", len(uids)-1) + ")", accesscontrol.GlobalOrgID}, uids...)
_, err := sess.Exec(args...)
return err
})
}
// OSS basic-role permission refresh uses seeding.Seeder.Seed() with a desired set computed in memory.
// These methods implement the permission seeding part of seeding.SeedingBackend against the current permission table.
func (s *AccessControlStore) LoadPrevious(ctx context.Context) (map[accesscontrol.SeedPermission]struct{}, error) {
var out map[accesscontrol.SeedPermission]struct{}
err := s.sql.WithDbSession(ctx, func(sess *db.Session) error {
rows, err := LoadBasicRoleSeedPermissions(sess)
if err != nil {
return err
}
out = make(map[accesscontrol.SeedPermission]struct{}, len(rows))
for _, r := range rows {
r.Origin = ""
out[r] = struct{}{}
}
return nil
})
return out, err
}
func (s *AccessControlStore) Apply(ctx context.Context, added, removed []accesscontrol.SeedPermission, updated map[accesscontrol.SeedPermission]accesscontrol.SeedPermission) error {
rolesToUpgrade := seeding.RolesToUpgrade(added, removed)
// Run the same OSS apply logic as ossBasicRoleSeedBackend.Apply inside a single transaction.
return s.sql.WithTransactionalDbSession(ctx, func(sess *db.Session) error {
defs := accesscontrol.BuildBasicRoleDefinitions()
builtinToRoleID, err := EnsureBasicRolesExist(sess, defs)
if err != nil {
return err
}
backend := &ossBasicRoleSeedBackend{
sess: sess,
now: time.Now(),
builtinToRoleID: builtinToRoleID,
desired: nil,
dbType: s.sql.GetDBType(),
}
if err := backend.Apply(ctx, added, removed, updated); err != nil {
return err
}
return BumpBasicRoleVersions(sess, rolesToUpgrade)
})
}
// EnsureBasicRolesExist ensures the built-in basic roles exist in the role table and are bound in builtin_role.
// It returns a mapping from builtin role name (for example "Admin") to role ID.
func EnsureBasicRolesExist(sess *db.Session, defs map[string]*accesscontrol.RoleDTO) (map[string]int64, error) {
uidToBuiltin := make(map[string]string, len(defs))
uids := make([]any, 0, len(defs))
for builtin, def := range defs {
uidToBuiltin[def.UID] = builtin
uids = append(uids, def.UID)
}
type roleRow struct {
ID int64 `xorm:"id"`
UID string `xorm:"uid"`
}
rows := []roleRow{}
if err := sess.Table("role").
Where("org_id = ?", accesscontrol.GlobalOrgID).
In("uid", uids...).
Find(&rows); err != nil {
return nil, err
}
ts := time.Now()
builtinToRoleID := make(map[string]int64, len(defs))
for _, r := range rows {
br, ok := uidToBuiltin[r.UID]
if !ok {
continue
}
builtinToRoleID[br] = r.ID
}
for builtin, def := range defs {
roleID, ok := builtinToRoleID[builtin]
if !ok {
role := accesscontrol.Role{
OrgID: def.OrgID,
Version: def.Version,
UID: def.UID,
Name: def.Name,
DisplayName: def.DisplayName,
Description: def.Description,
Group: def.Group,
Hidden: def.Hidden,
Created: ts,
Updated: ts,
}
if _, err := sess.Insert(&role); err != nil {
return nil, err
}
roleID = role.ID
builtinToRoleID[builtin] = roleID
}
has, err := sess.Table("builtin_role").
Where("role_id = ? AND role = ? AND org_id = ?", roleID, builtin, accesscontrol.GlobalOrgID).
Exist()
if err != nil {
return nil, err
}
if !has {
br := accesscontrol.BuiltinRole{
RoleID: roleID,
OrgID: accesscontrol.GlobalOrgID,
Role: builtin,
Created: ts,
Updated: ts,
}
if _, err := sess.Table("builtin_role").Insert(&br); err != nil {
return nil, err
}
}
}
return builtinToRoleID, nil
}
// DeleteRolePermissionTuples deletes permissions for a single role by (action, scope) pairs.
//
// It uses a row-constructor IN clause where supported (MySQL, Postgres, SQLite) and falls back
// to a WHERE ... OR ... form for MSSQL.
func DeleteRolePermissionTuples(sess *db.Session, dbType core.DbType, roleID int64, perms []accesscontrol.SeedPermission) error {
if len(perms) == 0 {
return nil
}
if dbType == migrator.MSSQL {
// MSSQL doesn't support (action, scope) IN ((?,?),(?,?)) row constructors.
where := make([]string, 0, len(perms))
args := make([]any, 0, 1+len(perms)*2)
args = append(args, roleID)
for _, p := range perms {
where = append(where, "(action = ? AND scope = ?)")
args = append(args, p.Action, p.Scope)
}
_, err := sess.Exec(
append([]any{
"DELETE FROM permission WHERE role_id = ? AND (" + strings.Join(where, " OR ") + ")",
}, args...)...,
)
return err
}
args := make([]any, 0, 1+len(perms)*2)
args = append(args, roleID)
for _, p := range perms {
args = append(args, p.Action, p.Scope)
}
sql := "DELETE FROM permission WHERE role_id = ? AND (action, scope) IN (" +
strings.Repeat("(?, ?),", len(perms)-1) + "(?, ?))"
_, err := sess.Exec(append([]any{sql}, args...)...)
return err
}
type ossBasicRoleSeedBackend struct {
sess *db.Session
now time.Time
builtinToRoleID map[string]int64
desired map[accesscontrol.SeedPermission]struct{}
dbType core.DbType
}
func (b *ossBasicRoleSeedBackend) LoadPrevious(_ context.Context) (map[accesscontrol.SeedPermission]struct{}, error) {
rows, err := LoadBasicRoleSeedPermissions(b.sess)
if err != nil {
return nil, err
}
out := make(map[accesscontrol.SeedPermission]struct{}, len(rows))
for _, r := range rows {
// Ensure the key matches what OSS seeding uses (Origin is always empty for basic role refresh).
r.Origin = ""
out[r] = struct{}{}
}
return out, nil
}
func (b *ossBasicRoleSeedBackend) LoadDesired(_ context.Context) (map[accesscontrol.SeedPermission]struct{}, error) {
return b.desired, nil
}
func (b *ossBasicRoleSeedBackend) Apply(_ context.Context, added, removed []accesscontrol.SeedPermission, updated map[accesscontrol.SeedPermission]accesscontrol.SeedPermission) error {
// Delete removed permissions (this includes user-defined permissions that aren't in desired).
if len(removed) > 0 {
permsByRoleID := map[int64][]accesscontrol.SeedPermission{}
for _, p := range removed {
roleID, ok := b.builtinToRoleID[p.BuiltInRole]
if !ok {
continue
}
permsByRoleID[roleID] = append(permsByRoleID[roleID], p)
}
for roleID, perms := range permsByRoleID {
// Chunk to keep statement sizes and parameter counts bounded.
if err := batch(len(perms), basicRolePermBatchSize, func(start, end int) error {
return DeleteRolePermissionTuples(b.sess, b.dbType, roleID, perms[start:end])
}); err != nil {
return err
}
}
}
// Insert added permissions and updated-target permissions.
toInsertSeed := make([]accesscontrol.SeedPermission, 0, len(added)+len(updated))
toInsertSeed = append(toInsertSeed, added...)
for _, v := range updated {
toInsertSeed = append(toInsertSeed, v)
}
if len(toInsertSeed) == 0 {
return nil
}
// De-duplicate on (role_id, action, scope). This avoids unique constraint violations when:
// - the same permission appears in both added and updated
// - multiple plugin origins grant the same permission (Origin is not persisted in permission table)
type permKey struct {
RoleID int64
Action string
Scope string
}
seen := make(map[permKey]struct{}, len(toInsertSeed))
toInsert := make([]accesscontrol.Permission, 0, len(toInsertSeed))
for _, p := range toInsertSeed {
roleID, ok := b.builtinToRoleID[p.BuiltInRole]
if !ok {
continue
}
k := permKey{RoleID: roleID, Action: p.Action, Scope: p.Scope}
if _, ok := seen[k]; ok {
continue
}
seen[k] = struct{}{}
perm := accesscontrol.Permission{
RoleID: roleID,
Action: p.Action,
Scope: p.Scope,
Created: b.now,
Updated: b.now,
}
perm.Kind, perm.Attribute, perm.Identifier = accesscontrol.SplitScope(perm.Scope)
toInsert = append(toInsert, perm)
}
return batch(len(toInsert), basicRolePermBatchSize, func(start, end int) error {
// MySQL: ignore conflicts to make seeding idempotent under retries/concurrency.
// Conflicts can happen if the same permission already exists (unique on role_id, action, scope).
if b.dbType == migrator.MySQL {
args := make([]any, 0, (end-start)*8)
for i := start; i < end; i++ {
p := toInsert[i]
args = append(args, p.RoleID, p.Action, p.Scope, p.Kind, p.Attribute, p.Identifier, p.Updated, p.Created)
}
sql := append([]any{`INSERT IGNORE INTO permission (role_id, action, scope, kind, attribute, identifier, updated, created) VALUES ` +
strings.Repeat("(?, ?, ?, ?, ?, ?, ?, ?),", end-start-1) + "(?, ?, ?, ?, ?, ?, ?, ?)"}, args...)
_, err := b.sess.Exec(sql...)
return err
}
_, err := b.sess.InsertMulti(toInsert[start:end])
return err
})
}
func batch(count, size int, eachFn func(start, end int) error) error {
for i := 0; i < count; {
end := i + size
if end > count {
end = count
}
if err := eachFn(i, end); err != nil {
return err
}
i = end
}
return nil
}
// BumpBasicRoleVersions increments the role version for the given builtin basic roles (Viewer/Editor/Admin/Grafana Admin).
// Unknown role names are ignored.
func BumpBasicRoleVersions(sess *db.Session, basicRoles []string) error {
if len(basicRoles) == 0 {
return nil
}
defs := accesscontrol.BuildBasicRoleDefinitions()
uids := make([]any, 0, len(basicRoles))
for _, br := range basicRoles {
def, ok := defs[br]
if !ok {
continue
}
uids = append(uids, def.UID)
}
if len(uids) == 0 {
return nil
}
sql := "UPDATE role SET version = version + 1 WHERE org_id = ? AND uid IN (?" + strings.Repeat(",?", len(uids)-1) + ")"
_, err := sess.Exec(append([]any{sql, accesscontrol.GlobalOrgID}, uids...)...)
return err
}
// LoadBasicRoleSeedPermissions returns the current (builtin_role, action, scope) permissions granted to basic roles.
// It sets Origin to empty.
func LoadBasicRoleSeedPermissions(sess *db.Session) ([]accesscontrol.SeedPermission, error) {
rows := []accesscontrol.SeedPermission{}
err := sess.SQL(
`SELECT role.display_name AS builtin_role, p.action, p.scope, '' AS origin
FROM role INNER JOIN permission AS p ON p.role_id = role.id
WHERE role.org_id = ? AND role.name LIKE 'basic:%'`,
accesscontrol.GlobalOrgID,
).Find(&rows)
return rows, err
}
@@ -15,7 +15,6 @@ import (
"github.com/grafana/grafana/pkg/infra/db"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/serverlock"
"github.com/grafana/grafana/pkg/services/accesscontrol"
"github.com/grafana/grafana/pkg/services/authz/zanzana"
"github.com/grafana/grafana/pkg/services/featuremgmt"
"github.com/grafana/grafana/pkg/services/folder"
@@ -131,9 +130,6 @@ func (r *ZanzanaReconciler) Run(ctx context.Context) error {
// Reconcile schedules as job that will run and reconcile resources between
// legacy access control and zanzana.
func (r *ZanzanaReconciler) Reconcile(ctx context.Context) error {
// Ensure we don't reconcile an empty/partial RBAC state before OSS has seeded basic role permissions.
// This matters most during startup where fixed-role loading + basic-role permission refresh runs as another background service.
r.waitForBasicRolesSeeded(ctx)
r.reconcile(ctx)
// FIXME:
@@ -149,57 +145,6 @@ func (r *ZanzanaReconciler) Reconcile(ctx context.Context) error {
}
}
func (r *ZanzanaReconciler) hasBasicRolePermissions(ctx context.Context) bool {
var count int64
// Basic role permissions are stored on "basic:%" roles in the global org (0).
// In a fresh DB, this will be empty until fixed roles are registered and the basic role permission refresh runs.
type row struct {
Count int64 `xorm:"count"`
}
_ = r.store.WithDbSession(ctx, func(sess *db.Session) error {
var rr row
_, err := sess.SQL(
`SELECT COUNT(*) AS count
FROM role INNER JOIN permission AS p ON p.role_id = role.id
WHERE role.org_id = ? AND role.name LIKE ?`,
accesscontrol.GlobalOrgID,
accesscontrol.BasicRolePrefix+"%",
).Get(&rr)
if err != nil {
return err
}
count = rr.Count
return nil
})
return count > 0
}
func (r *ZanzanaReconciler) waitForBasicRolesSeeded(ctx context.Context) {
// Best-effort: don't block forever. If we can't observe basic roles, proceed anyway.
const (
maxWait = 15 * time.Second
interval = 1 * time.Second
)
deadline := time.NewTimer(maxWait)
defer deadline.Stop()
ticker := time.NewTicker(interval)
defer ticker.Stop()
for {
if r.hasBasicRolePermissions(ctx) {
return
}
select {
case <-ctx.Done():
return
case <-deadline.C:
return
case <-ticker.C:
}
}
}
func (r *ZanzanaReconciler) reconcile(ctx context.Context) {
run := func(ctx context.Context, namespace string) (ok bool) {
now := time.Now()
@@ -1,67 +0,0 @@
package dualwrite
import (
"context"
"testing"
"time"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/infra/db"
"github.com/grafana/grafana/pkg/services/accesscontrol"
)
func TestZanzanaReconciler_hasBasicRolePermissions(t *testing.T) {
env := setupTestEnv(t)
r := &ZanzanaReconciler{
store: env.db,
}
ctx := context.Background()
require.False(t, r.hasBasicRolePermissions(ctx))
err := env.db.WithDbSession(ctx, func(sess *db.Session) error {
now := time.Now()
_, err := sess.Exec(
`INSERT INTO role (org_id, uid, name, display_name, group_name, description, hidden, version, created, updated)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
accesscontrol.GlobalOrgID,
"basic_viewer_uid_test",
accesscontrol.BasicRolePrefix+"viewer",
"Viewer",
"Basic",
"Viewer role",
false,
1,
now,
now,
)
if err != nil {
return err
}
var roleID int64
if _, err := sess.SQL(`SELECT id FROM role WHERE org_id = ? AND uid = ?`, accesscontrol.GlobalOrgID, "basic_viewer_uid_test").Get(&roleID); err != nil {
return err
}
_, err = sess.Exec(
`INSERT INTO permission (role_id, action, scope, kind, attribute, identifier, created, updated)
VALUES (?, ?, ?, ?, ?, ?, ?, ?)`,
roleID,
"dashboards:read",
"dashboards:*",
"",
"",
"",
now,
now,
)
return err
})
require.NoError(t, err)
require.True(t, r.hasBasicRolePermissions(ctx))
}
-16
View File
@@ -1,7 +1,6 @@
package accesscontrol
import (
"context"
"encoding/json"
"errors"
"fmt"
@@ -595,18 +594,3 @@ type QueryWithOrg struct {
OrgId *int64 `json:"orgId"`
Global bool `json:"global"`
}
type SeedPermission struct {
BuiltInRole string `xorm:"builtin_role"`
Action string `xorm:"action"`
Scope string `xorm:"scope"`
Origin string `xorm:"origin"`
}
type RoleStore interface {
LoadRoles(ctx context.Context) (map[string]*RoleDTO, error)
SetRole(ctx context.Context, existingRole *RoleDTO, wantedRole RoleDTO) error
SetPermissions(ctx context.Context, existingRole *RoleDTO, wantedRole RoleDTO) error
CreateRole(ctx context.Context, role RoleDTO) error
DeleteRoles(ctx context.Context, roleUIDs []string) error
}
@@ -1,452 +0,0 @@
package seeding
import (
"context"
"fmt"
"regexp"
"slices"
"strings"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/services/accesscontrol"
"github.com/grafana/grafana/pkg/services/accesscontrol/pluginutils"
"github.com/grafana/grafana/pkg/services/pluginsintegration/pluginaccesscontrol"
)
type Seeder struct {
log log.Logger
roleStore accesscontrol.RoleStore
backend SeedingBackend
builtinsPermissions map[accesscontrol.SeedPermission]struct{}
seededFixedRoles map[string]bool
seededPluginRoles map[string]bool
seededPlugins map[string]bool
hasSeededAlready bool
}
// SeedingBackend provides the seed-set specific operations needed to seed.
type SeedingBackend interface {
// LoadPrevious returns the currently stored permissions for previously seeded roles.
LoadPrevious(ctx context.Context) (map[accesscontrol.SeedPermission]struct{}, error)
// Apply updates the database to match the desired permissions.
Apply(ctx context.Context,
added, removed []accesscontrol.SeedPermission,
updated map[accesscontrol.SeedPermission]accesscontrol.SeedPermission,
) error
}
func New(log log.Logger, roleStore accesscontrol.RoleStore, backend SeedingBackend) *Seeder {
return &Seeder{
log: log,
roleStore: roleStore,
backend: backend,
builtinsPermissions: map[accesscontrol.SeedPermission]struct{}{},
seededFixedRoles: map[string]bool{},
seededPluginRoles: map[string]bool{},
seededPlugins: map[string]bool{},
hasSeededAlready: false,
}
}
// SetDesiredPermissions replaces the in-memory desired permission set used by Seed().
func (s *Seeder) SetDesiredPermissions(desired map[accesscontrol.SeedPermission]struct{}) {
if desired == nil {
s.builtinsPermissions = map[accesscontrol.SeedPermission]struct{}{}
return
}
s.builtinsPermissions = desired
}
// Seed loads current and desired permissions, diffs them (including scope updates), applies changes, and bumps versions.
func (s *Seeder) Seed(ctx context.Context) error {
previous, err := s.backend.LoadPrevious(ctx)
if err != nil {
return err
}
// - Do not remove plugin permissions when the plugin didn't register this run (Origin set but not in seededPlugins).
// - Preserve legacy plugin app access permissions in the persisted seed set (these are granted by default).
if len(previous) > 0 {
filtered := make(map[accesscontrol.SeedPermission]struct{}, len(previous))
for p := range previous {
// Legacy plugin app access permissions (Origin set) are granted by default and managed outside seeding.
// Keep them out of the diff so seeding doesn't try to remove or "re-add" them on every run.
if p.Action == pluginaccesscontrol.ActionAppAccess && p.Origin != "" {
continue
}
if p.Origin != "" && !s.seededPlugins[p.Origin] {
continue
}
filtered[p] = struct{}{}
}
previous = filtered
}
added, removed, updated := s.permissionDiff(previous, s.builtinsPermissions)
if err := s.backend.Apply(ctx, added, removed, updated); err != nil {
return err
}
return nil
}
// SeedRoles populates the database with the roles and their assignments
// It will create roles that do not exist and update roles that have changed
// Do not use for provisioning. Validation is not enforced.
func (s *Seeder) SeedRoles(ctx context.Context, registrationList []accesscontrol.RoleRegistration) error {
roleMap, err := s.roleStore.LoadRoles(ctx)
if err != nil {
return err
}
missingRoles := make([]accesscontrol.RoleRegistration, 0, len(registrationList))
// Diff existing roles with the ones we want to seed.
// If a role is missing, we add it to the missingRoles list
for _, registration := range registrationList {
registration := registration
role, ok := roleMap[registration.Role.Name]
switch {
case registration.Role.IsFixed():
s.seededFixedRoles[registration.Role.Name] = true
case registration.Role.IsPlugin():
s.seededPluginRoles[registration.Role.Name] = true
// To be resilient to failed plugin loadings, we remember the plugins that have registered,
// later we'll ignore permissions and roles of other plugins
s.seededPlugins[pluginutils.PluginIDFromName(registration.Role.Name)] = true
}
s.rememberPermissionAssignments(&registration.Role, registration.Grants, registration.Exclude)
if !ok {
missingRoles = append(missingRoles, registration)
continue
}
if needsRoleUpdate(role, registration.Role) {
if err := s.roleStore.SetRole(ctx, role, registration.Role); err != nil {
return err
}
}
if needsPermissionsUpdate(role, registration.Role) {
if err := s.roleStore.SetPermissions(ctx, role, registration.Role); err != nil {
return err
}
}
}
for _, registration := range missingRoles {
if err := s.roleStore.CreateRole(ctx, registration.Role); err != nil {
return err
}
}
return nil
}
func needsPermissionsUpdate(existingRole *accesscontrol.RoleDTO, wantedRole accesscontrol.RoleDTO) bool {
if existingRole == nil {
return true
}
if len(existingRole.Permissions) != len(wantedRole.Permissions) {
return true
}
for _, p := range wantedRole.Permissions {
found := false
for _, ep := range existingRole.Permissions {
if ep.Action == p.Action && ep.Scope == p.Scope {
found = true
break
}
}
if !found {
return true
}
}
return false
}
func needsRoleUpdate(existingRole *accesscontrol.RoleDTO, wantedRole accesscontrol.RoleDTO) bool {
if existingRole == nil {
return true
}
if existingRole.Name != wantedRole.Name {
return false
}
if existingRole.DisplayName != wantedRole.DisplayName {
return true
}
if existingRole.Description != wantedRole.Description {
return true
}
if existingRole.Group != wantedRole.Group {
return true
}
if existingRole.Hidden != wantedRole.Hidden {
return true
}
return false
}
// Deprecated: SeedRole is deprecated and should not be used.
// SeedRoles only does boot up seeding and should not be used for runtime seeding.
func (s *Seeder) SeedRole(ctx context.Context, role accesscontrol.RoleDTO, builtInRoles []string) error {
addedPermissions := make(map[string]struct{}, len(role.Permissions))
permissions := make([]accesscontrol.Permission, 0, len(role.Permissions))
for _, p := range role.Permissions {
key := fmt.Sprintf("%s:%s", p.Action, p.Scope)
if _, ok := addedPermissions[key]; !ok {
addedPermissions[key] = struct{}{}
permissions = append(permissions, accesscontrol.Permission{Action: p.Action, Scope: p.Scope})
}
}
wantedRole := accesscontrol.RoleDTO{
OrgID: accesscontrol.GlobalOrgID,
Version: role.Version,
UID: role.UID,
Name: role.Name,
DisplayName: role.DisplayName,
Description: role.Description,
Group: role.Group,
Permissions: permissions,
Hidden: role.Hidden,
}
roleMap, err := s.roleStore.LoadRoles(ctx)
if err != nil {
return err
}
existingRole := roleMap[wantedRole.Name]
if existingRole == nil {
if err := s.roleStore.CreateRole(ctx, wantedRole); err != nil {
return err
}
} else {
if needsRoleUpdate(existingRole, wantedRole) {
if err := s.roleStore.SetRole(ctx, existingRole, wantedRole); err != nil {
return err
}
}
if needsPermissionsUpdate(existingRole, wantedRole) {
if err := s.roleStore.SetPermissions(ctx, existingRole, wantedRole); err != nil {
return err
}
}
}
// Remember seeded roles
if wantedRole.IsFixed() {
s.seededFixedRoles[wantedRole.Name] = true
}
isPluginRole := wantedRole.IsPlugin()
if isPluginRole {
s.seededPluginRoles[wantedRole.Name] = true
// To be resilient to failed plugin loadings, we remember the plugins that have registered,
// later we'll ignore permissions and roles of other plugins
s.seededPlugins[pluginutils.PluginIDFromName(role.Name)] = true
}
s.rememberPermissionAssignments(&wantedRole, builtInRoles, []string{})
return nil
}
func (s *Seeder) rememberPermissionAssignments(role *accesscontrol.RoleDTO, builtInRoles []string, excludedRoles []string) {
AppendDesiredPermissions(s.builtinsPermissions, s.log, role, builtInRoles, excludedRoles)
}
// AppendDesiredPermissions accumulates permissions from a role registration onto basic roles (Viewer/Editor/Admin/Grafana Admin).
// - It expands parents via accesscontrol.BuiltInRolesWithParents.
// - It can optionally ignore plugin app access permissions (which are granted by default).
func AppendDesiredPermissions(
out map[accesscontrol.SeedPermission]struct{},
logger log.Logger,
role *accesscontrol.RoleDTO,
builtInRoles []string,
excludedRoles []string,
) {
if out == nil || role == nil {
return
}
for builtInRole := range accesscontrol.BuiltInRolesWithParents(builtInRoles) {
// Skip excluded grants
if slices.Contains(excludedRoles, builtInRole) {
continue
}
for _, perm := range role.Permissions {
if role.IsPlugin() && perm.Action == pluginaccesscontrol.ActionAppAccess {
logger.Debug("Role is attempting to grant access permission, but this permission is already granted by default and will be ignored",
"role", role.Name, "permission", perm.Action, "scope", perm.Scope)
continue
}
sp := accesscontrol.SeedPermission{
BuiltInRole: builtInRole,
Action: perm.Action,
Scope: perm.Scope,
}
if role.IsPlugin() {
sp.Origin = pluginutils.PluginIDFromName(role.Name)
}
out[sp] = struct{}{}
}
}
}
// permissionDiff returns:
// - added: present in desired permissions, not in previous permissions
// - removed: present in previous permissions, not in desired permissions
// - updated: same role + action, but scope changed
func (s *Seeder) permissionDiff(previous, desired map[accesscontrol.SeedPermission]struct{}) (added, removed []accesscontrol.SeedPermission, updated map[accesscontrol.SeedPermission]accesscontrol.SeedPermission) {
addedSet := make(map[accesscontrol.SeedPermission]struct{}, 0)
for n := range desired {
if _, already := previous[n]; !already {
addedSet[n] = struct{}{}
} else {
delete(previous, n)
}
}
// Check if any of the new permissions is actually an old permission with an updated scope
updated = make(map[accesscontrol.SeedPermission]accesscontrol.SeedPermission, 0)
for n := range addedSet {
for p := range previous {
if n.BuiltInRole == p.BuiltInRole && n.Action == p.Action {
updated[p] = n
delete(addedSet, n)
}
}
}
for p := range addedSet {
added = append(added, p)
}
for p := range previous {
if p.Action == pluginaccesscontrol.ActionAppAccess &&
p.Scope != pluginaccesscontrol.ScopeProvider.GetResourceAllScope() {
// Allows backward compatibility with plugins that have been seeded before the grant ignore rule was added
s.log.Info("This permission already existed so it will not be removed",
"role", p.BuiltInRole, "permission", p.Action, "scope", p.Scope)
continue
}
removed = append(removed, p)
}
return added, removed, updated
}
func (s *Seeder) ClearBasicRolesPluginPermissions(ID string) {
removable := []accesscontrol.SeedPermission{}
for key := range s.builtinsPermissions {
if matchPermissionByPluginID(key, ID) {
removable = append(removable, key)
}
}
for _, perm := range removable {
delete(s.builtinsPermissions, perm)
}
}
func matchPermissionByPluginID(perm accesscontrol.SeedPermission, pluginID string) bool {
if perm.Origin != pluginID {
return false
}
actionTemplate := regexp.MustCompile(fmt.Sprintf("%s[.:]", pluginID))
scopeTemplate := fmt.Sprintf(":%s", pluginID)
return actionTemplate.MatchString(perm.Action) || strings.HasSuffix(perm.Scope, scopeTemplate)
}
// RolesToUpgrade returns the unique basic roles that should have their version incremented.
func RolesToUpgrade(added, removed []accesscontrol.SeedPermission) []string {
set := map[string]struct{}{}
for _, p := range added {
set[p.BuiltInRole] = struct{}{}
}
for _, p := range removed {
set[p.BuiltInRole] = struct{}{}
}
out := make([]string, 0, len(set))
for r := range set {
out = append(out, r)
}
return out
}
func (s *Seeder) ClearPluginRoles(ID string) {
expectedPrefix := fmt.Sprintf("%s%s:", accesscontrol.PluginRolePrefix, ID)
for roleName := range s.seededPluginRoles {
if strings.HasPrefix(roleName, expectedPrefix) {
delete(s.seededPluginRoles, roleName)
}
}
}
func (s *Seeder) MarkSeededAlready() {
s.hasSeededAlready = true
}
func (s *Seeder) HasSeededAlready() bool {
return s.hasSeededAlready
}
func (s *Seeder) RemoveAbsentRoles(ctx context.Context) error {
roleMap, errGet := s.roleStore.LoadRoles(ctx)
if errGet != nil {
s.log.Error("failed to get fixed roles from store", "err", errGet)
return errGet
}
toRemove := []string{}
for _, r := range roleMap {
if r == nil {
continue
}
if r.IsFixed() {
if !s.seededFixedRoles[r.Name] {
s.log.Info("role is not seeded anymore, mark it for deletion", "role", r.Name)
toRemove = append(toRemove, r.UID)
}
continue
}
if r.IsPlugin() {
if !s.seededPlugins[pluginutils.PluginIDFromName(r.Name)] {
// To be resilient to failed plugin loadings
// ignore stored roles related to plugins that have not registered this time
s.log.Debug("plugin role has not been registered on this run skipping its removal", "role", r.Name)
continue
}
if !s.seededPluginRoles[r.Name] {
s.log.Info("role is not seeded anymore, mark it for deletion", "role", r.Name)
toRemove = append(toRemove, r.UID)
}
}
}
if errDelete := s.roleStore.DeleteRoles(ctx, toRemove); errDelete != nil {
s.log.Error("failed to delete absent fixed and plugin roles", "err", errDelete)
return errDelete
}
return nil
}
File diff suppressed because it is too large Load Diff
@@ -9,8 +9,6 @@ import "google/protobuf/timestamp.proto";
import "google/protobuf/wrappers.proto";
service AuthzExtentionService {
rpc BatchCheck(BatchCheckRequest) returns (BatchCheckResponse);
rpc Read(ReadRequest) returns (ReadResponse);
rpc Write(WriteRequest) returns (WriteResponse);
@@ -231,29 +229,6 @@ message WriteRequest {
message WriteResponse {}
message BatchCheckRequest {
string subject = 1;
string namespace = 2;
repeated BatchCheckItem items = 3;
}
message BatchCheckItem {
string verb = 1;
string group = 2;
string resource = 3;
string name = 4;
string subresource = 5;
string folder = 6;
}
message BatchCheckResponse {
map<string, BatchCheckGroupResource> groups = 1;
}
message BatchCheckGroupResource {
map<string, bool> items = 1;
}
message QueryRequest {
string namespace = 1;
QueryOperation operation = 2;
@@ -19,18 +19,16 @@ import (
const _ = grpc.SupportPackageIsVersion8
const (
AuthzExtentionService_BatchCheck_FullMethodName = "/authz.extention.v1.AuthzExtentionService/BatchCheck"
AuthzExtentionService_Read_FullMethodName = "/authz.extention.v1.AuthzExtentionService/Read"
AuthzExtentionService_Write_FullMethodName = "/authz.extention.v1.AuthzExtentionService/Write"
AuthzExtentionService_Mutate_FullMethodName = "/authz.extention.v1.AuthzExtentionService/Mutate"
AuthzExtentionService_Query_FullMethodName = "/authz.extention.v1.AuthzExtentionService/Query"
AuthzExtentionService_Read_FullMethodName = "/authz.extention.v1.AuthzExtentionService/Read"
AuthzExtentionService_Write_FullMethodName = "/authz.extention.v1.AuthzExtentionService/Write"
AuthzExtentionService_Mutate_FullMethodName = "/authz.extention.v1.AuthzExtentionService/Mutate"
AuthzExtentionService_Query_FullMethodName = "/authz.extention.v1.AuthzExtentionService/Query"
)
// AuthzExtentionServiceClient is the client API for AuthzExtentionService service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://pkg.go.dev/google.golang.org/grpc/?tab=doc#ClientConn.NewStream.
type AuthzExtentionServiceClient interface {
BatchCheck(ctx context.Context, in *BatchCheckRequest, opts ...grpc.CallOption) (*BatchCheckResponse, error)
Read(ctx context.Context, in *ReadRequest, opts ...grpc.CallOption) (*ReadResponse, error)
Write(ctx context.Context, in *WriteRequest, opts ...grpc.CallOption) (*WriteResponse, error)
Mutate(ctx context.Context, in *MutateRequest, opts ...grpc.CallOption) (*MutateResponse, error)
@@ -45,16 +43,6 @@ func NewAuthzExtentionServiceClient(cc grpc.ClientConnInterface) AuthzExtentionS
return &authzExtentionServiceClient{cc}
}
func (c *authzExtentionServiceClient) BatchCheck(ctx context.Context, in *BatchCheckRequest, opts ...grpc.CallOption) (*BatchCheckResponse, error) {
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(BatchCheckResponse)
err := c.cc.Invoke(ctx, AuthzExtentionService_BatchCheck_FullMethodName, in, out, cOpts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *authzExtentionServiceClient) Read(ctx context.Context, in *ReadRequest, opts ...grpc.CallOption) (*ReadResponse, error) {
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
out := new(ReadResponse)
@@ -99,7 +87,6 @@ func (c *authzExtentionServiceClient) Query(ctx context.Context, in *QueryReques
// All implementations should embed UnimplementedAuthzExtentionServiceServer
// for forward compatibility
type AuthzExtentionServiceServer interface {
BatchCheck(context.Context, *BatchCheckRequest) (*BatchCheckResponse, error)
Read(context.Context, *ReadRequest) (*ReadResponse, error)
Write(context.Context, *WriteRequest) (*WriteResponse, error)
Mutate(context.Context, *MutateRequest) (*MutateResponse, error)
@@ -110,9 +97,6 @@ type AuthzExtentionServiceServer interface {
type UnimplementedAuthzExtentionServiceServer struct {
}
func (UnimplementedAuthzExtentionServiceServer) BatchCheck(context.Context, *BatchCheckRequest) (*BatchCheckResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method BatchCheck not implemented")
}
func (UnimplementedAuthzExtentionServiceServer) Read(context.Context, *ReadRequest) (*ReadResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method Read not implemented")
}
@@ -137,24 +121,6 @@ func RegisterAuthzExtentionServiceServer(s grpc.ServiceRegistrar, srv AuthzExten
s.RegisterService(&AuthzExtentionService_ServiceDesc, srv)
}
func _AuthzExtentionService_BatchCheck_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(BatchCheckRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(AuthzExtentionServiceServer).BatchCheck(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: AuthzExtentionService_BatchCheck_FullMethodName,
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(AuthzExtentionServiceServer).BatchCheck(ctx, req.(*BatchCheckRequest))
}
return interceptor(ctx, in, info, handler)
}
func _AuthzExtentionService_Read_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(ReadRequest)
if err := dec(in); err != nil {
@@ -234,10 +200,6 @@ var AuthzExtentionService_ServiceDesc = grpc.ServiceDesc{
ServiceName: "authz.extention.v1.AuthzExtentionService",
HandlerType: (*AuthzExtentionServiceServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "BatchCheck",
Handler: _AuthzExtentionService_BatchCheck_Handler,
},
{
MethodName: "Read",
Handler: _AuthzExtentionService_Read_Handler,
+144
View File
@@ -186,6 +186,150 @@ func (s *Service) Check(ctx context.Context, req *authzv1.CheckRequest) (*authzv
return &authzv1.CheckResponse{Allowed: allowed}, nil
}
// BatchCheck implements authzv1.AuthzServiceServer.BatchCheck
// This performs multiple access checks in a single request with optimized batching.
// 1. Validates the subject once
// 2. Groups checks by (namespace, action) to load permissions once per group
// 3. Reuses the folder tree across checks
func (s *Service) BatchCheck(ctx context.Context, req *authzv1.BatchCheckRequest) (*authzv1.BatchCheckResponse, error) {
ctx, span := s.tracer.Start(ctx, "authz_direct_db.service.BatchCheck")
defer span.End()
checks := req.GetChecks()
span.SetAttributes(attribute.Int("check_count", len(checks)))
ctxLogger := s.logger.FromContext(ctx).New(
"subject", req.GetSubject(),
"check_count", len(checks),
)
defer func(start time.Time) {
ctxLogger.Debug("BatchCheck execution time", "duration", time.Since(start).Milliseconds())
}(time.Now())
// Early check for auth info - required for namespace validation
if _, has := types.AuthInfoFrom(ctx); !has {
return nil, status.Error(codes.Internal, "could not get auth info from context")
}
if len(checks) == 0 {
return &authzv1.BatchCheckResponse{
Results: make(map[string]*authzv1.BatchCheckResult),
Zookie: &authzv1.Zookie{Timestamp: time.Now().UnixMilli()},
}, nil
}
// Validate subject once for all checks
userUID, idType, err := s.validateSubject(ctx, req.GetSubject())
if err != nil {
ctxLogger.Error("invalid subject", "error", err)
// Return all checks as denied with the same error
results := make(map[string]*authzv1.BatchCheckResult, len(checks))
for _, item := range checks {
results[item.GetCorrelationId()] = &authzv1.BatchCheckResult{
Allowed: false,
Error: err.Error(),
}
}
return &authzv1.BatchCheckResponse{Results: results, Zookie: &authzv1.Zookie{Timestamp: time.Now().UnixMilli()}}, nil
}
results := make(map[string]*authzv1.BatchCheckResult, len(checks))
// Group checks by (namespace, action) to batch permission lookups
type checkGroup struct {
namespace types.NamespaceInfo
action string
actionSets []string
items []*authzv1.BatchCheckItem
checkReqs []*checkRequest
}
groups := make(map[string]*checkGroup)
// First pass: validate and group checks
for _, item := range checks {
ns, err := validateNamespace(ctx, item.GetNamespace())
if err != nil {
results[item.GetCorrelationId()] = &authzv1.BatchCheckResult{Allowed: false, Error: err.Error()}
continue
}
action, actionSets, err := s.validateAction(ctx, item.GetGroup(), item.GetResource(), item.GetVerb())
if err != nil {
results[item.GetCorrelationId()] = &authzv1.BatchCheckResult{Allowed: false, Error: err.Error()}
continue
}
// Create the internal check request
checkReq := &checkRequest{
Namespace: ns,
UserUID: userUID,
IdentityType: idType,
Action: action,
ActionSets: actionSets,
Group: item.GetGroup(),
Resource: item.GetResource(),
Verb: item.GetVerb(),
Name: item.GetName(),
ParentFolder: item.GetFolder(),
}
// Group by namespace + action
groupKey := ns.Value + ":" + action
if g, ok := groups[groupKey]; ok {
g.items = append(g.items, item)
g.checkReqs = append(g.checkReqs, checkReq)
} else {
groups[groupKey] = &checkGroup{
namespace: ns,
action: action,
actionSets: actionSets,
items: []*authzv1.BatchCheckItem{item},
checkReqs: []*checkRequest{checkReq},
}
}
}
// Second pass: process each group with shared permissions
for _, group := range groups {
// Set namespace in context for this group (required by store methods)
groupCtx := request.WithNamespace(ctx, group.namespace.Value)
// Try to get cached permissions first, then fall back to store
permissions, err := s.getCachedIdentityPermissions(groupCtx, group.namespace, idType, userUID, group.action)
if err != nil {
// Cache miss - fetch from store
permissions, err = s.getIdentityPermissions(groupCtx, group.namespace, idType, userUID, group.action, group.actionSets)
if err != nil {
ctxLogger.Error("could not get permissions", "namespace", group.namespace.Value, "action", group.action, "error", err)
for _, item := range group.items {
results[item.GetCorrelationId()] = &authzv1.BatchCheckResult{Allowed: false, Error: err.Error()}
}
continue
}
}
// Check each item in the group using the shared permissions
for i, item := range group.items {
checkReq := group.checkReqs[i]
allowed, err := s.checkPermission(groupCtx, permissions, checkReq)
if err != nil {
results[item.GetCorrelationId()] = &authzv1.BatchCheckResult{Allowed: false, Error: err.Error()}
continue
}
results[item.GetCorrelationId()] = &authzv1.BatchCheckResult{Allowed: allowed}
}
}
span.SetAttributes(attribute.Int("groups_processed", len(groups)))
return &authzv1.BatchCheckResponse{
Results: results,
Zookie: &authzv1.Zookie{Timestamp: time.Now().UnixMilli()},
}, nil
}
func (s *Service) List(ctx context.Context, req *authzv1.ListRequest) (*authzv1.ListResponse, error) {
ctx, span := s.tracer.Start(ctx, "authz_direct_db.service.List")
defer span.End()
+607
View File
@@ -1829,6 +1829,613 @@ func TestService_CacheList(t *testing.T) {
})
}
func TestService_BatchCheck(t *testing.T) {
callingService := authn.NewAccessTokenAuthInfo(authn.Claims[authn.AccessTokenClaims]{
Claims: jwt.Claims{
Subject: types.NewTypeID(types.TypeAccessPolicy, "some-service"),
Audience: []string{"authzservice"},
},
Rest: authn.AccessTokenClaims{Namespace: "org-12"},
})
t.Run("Require auth info", func(t *testing.T) {
s := setupService()
ctx := context.Background()
_, err := s.BatchCheck(ctx, &authzv1.BatchCheckRequest{
Subject: "user:test-uid",
Checks: []*authzv1.BatchCheckItem{
{
Namespace: "org-12",
Group: "dashboard.grafana.app",
Resource: "dashboards",
Verb: "get",
Name: "dash1",
CorrelationId: "check1",
},
},
})
require.Error(t, err)
require.Contains(t, err.Error(), "could not get auth info")
})
t.Run("Empty checks returns empty results", func(t *testing.T) {
s := setupService()
ctx := types.WithAuthInfo(context.Background(), callingService)
resp, err := s.BatchCheck(ctx, &authzv1.BatchCheckRequest{
Subject: "user:test-uid",
Checks: []*authzv1.BatchCheckItem{},
})
require.NoError(t, err)
require.NotNil(t, resp)
require.Empty(t, resp.Results)
})
type batchCheckTestCase struct {
name string
checks []*authzv1.BatchCheckItem
permissions []accesscontrol.Permission
folders []store.Folder
expectedResults map[string]bool
expectedErrors map[string]bool // true if error expected for this correlation ID
expectGlobalError bool
}
t.Run("Request validation", func(t *testing.T) {
testCases := []batchCheckTestCase{
{
name: "should return error for invalid namespace",
checks: []*authzv1.BatchCheckItem{
{
Namespace: "",
Group: "dashboard.grafana.app",
Resource: "dashboards",
Verb: "get",
Name: "dash1",
CorrelationId: "check1",
},
},
expectedResults: map[string]bool{"check1": false},
expectedErrors: map[string]bool{"check1": true},
},
{
name: "should return error for namespace mismatch",
checks: []*authzv1.BatchCheckItem{
{
Namespace: "org-13",
Group: "dashboard.grafana.app",
Resource: "dashboards",
Verb: "get",
Name: "dash1",
CorrelationId: "check1",
},
},
expectedResults: map[string]bool{"check1": false},
expectedErrors: map[string]bool{"check1": true},
},
{
name: "should return error for unknown group",
checks: []*authzv1.BatchCheckItem{
{
Namespace: "org-12",
Group: "unknown.grafana.app",
Resource: "unknown",
Verb: "get",
Name: "u1",
CorrelationId: "check1",
},
},
expectedResults: map[string]bool{"check1": false},
expectedErrors: map[string]bool{"check1": true},
},
{
name: "should return error for unknown verb",
checks: []*authzv1.BatchCheckItem{
{
Namespace: "org-12",
Group: "dashboard.grafana.app",
Resource: "dashboards",
Verb: "unknown",
Name: "dash1",
CorrelationId: "check1",
},
},
expectedResults: map[string]bool{"check1": false},
expectedErrors: map[string]bool{"check1": true},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
s := setupService()
ctx := types.WithAuthInfo(context.Background(), callingService)
userID := &store.UserIdentifiers{UID: "test-uid", ID: 1}
store := &fakeStore{
userID: userID,
userPermissions: tc.permissions,
}
s.store = store
s.permissionStore = store
s.identityStore = &fakeIdentityStore{}
resp, err := s.BatchCheck(ctx, &authzv1.BatchCheckRequest{
Subject: "user:test-uid",
Checks: tc.checks,
})
require.NoError(t, err)
require.NotNil(t, resp)
for corrID, expectedAllowed := range tc.expectedResults {
result, ok := resp.Results[corrID]
require.True(t, ok, "result for %s not found", corrID)
require.Equal(t, expectedAllowed, result.Allowed, "unexpected allowed for %s", corrID)
if tc.expectedErrors[corrID] {
require.NotEmpty(t, result.Error, "expected error for %s", corrID)
}
}
})
}
})
t.Run("User permission checks", func(t *testing.T) {
testCases := []batchCheckTestCase{
{
name: "should allow user with permission on single resource",
checks: []*authzv1.BatchCheckItem{
{
Namespace: "org-12",
Group: "dashboard.grafana.app",
Resource: "dashboards",
Verb: "get",
Name: "dash1",
CorrelationId: "check1",
},
},
permissions: []accesscontrol.Permission{{Action: "dashboards:read", Scope: "dashboards:uid:dash1"}},
expectedResults: map[string]bool{"check1": true},
},
{
name: "should deny user without permission",
checks: []*authzv1.BatchCheckItem{
{
Namespace: "org-12",
Group: "dashboard.grafana.app",
Resource: "dashboards",
Verb: "get",
Name: "dash1",
CorrelationId: "check1",
},
},
permissions: []accesscontrol.Permission{{Action: "dashboards:read", Scope: "dashboards:uid:dash2"}},
expectedResults: map[string]bool{"check1": false},
},
{
name: "should handle multiple checks with mixed results",
checks: []*authzv1.BatchCheckItem{
{
Namespace: "org-12",
Group: "dashboard.grafana.app",
Resource: "dashboards",
Verb: "get",
Name: "dash1",
CorrelationId: "check1",
},
{
Namespace: "org-12",
Group: "dashboard.grafana.app",
Resource: "dashboards",
Verb: "get",
Name: "dash2",
CorrelationId: "check2",
},
{
Namespace: "org-12",
Group: "dashboard.grafana.app",
Resource: "dashboards",
Verb: "get",
Name: "dash3",
CorrelationId: "check3",
},
},
permissions: []accesscontrol.Permission{
{Action: "dashboards:read", Scope: "dashboards:uid:dash1"},
{Action: "dashboards:read", Scope: "dashboards:uid:dash3"},
},
expectedResults: map[string]bool{
"check1": true,
"check2": false,
"check3": true,
},
},
{
name: "should handle wildcard permission",
checks: []*authzv1.BatchCheckItem{
{
Namespace: "org-12",
Group: "dashboard.grafana.app",
Resource: "dashboards",
Verb: "get",
Name: "dash1",
CorrelationId: "check1",
},
{
Namespace: "org-12",
Group: "dashboard.grafana.app",
Resource: "dashboards",
Verb: "get",
Name: "dash2",
CorrelationId: "check2",
},
},
permissions: []accesscontrol.Permission{{Action: "dashboards:read", Scope: "*", Kind: "*"}},
expectedResults: map[string]bool{"check1": true, "check2": true},
},
{
name: "should handle folder inheritance",
checks: []*authzv1.BatchCheckItem{
{
Namespace: "org-12",
Group: "dashboard.grafana.app",
Resource: "dashboards",
Verb: "get",
Name: "dash1",
Folder: "child",
CorrelationId: "check1",
},
},
permissions: []accesscontrol.Permission{
{Action: "dashboards:read", Scope: "folders:uid:parent", Kind: "folders", Attribute: "uid", Identifier: "parent"},
},
folders: []store.Folder{
{UID: "parent"},
{UID: "child", ParentUID: strPtr("parent")},
},
expectedResults: map[string]bool{"check1": true},
},
{
name: "should handle action sets",
checks: []*authzv1.BatchCheckItem{
{
Namespace: "org-12",
Group: "dashboard.grafana.app",
Resource: "dashboards",
Verb: "get",
Name: "dash1",
CorrelationId: "check1",
},
},
permissions: []accesscontrol.Permission{{Action: "dashboards:admin", Scope: "dashboards:uid:dash1"}},
expectedResults: map[string]bool{"check1": true},
},
{
name: "should handle checks across different resources",
checks: []*authzv1.BatchCheckItem{
{
Namespace: "org-12",
Group: "dashboard.grafana.app",
Resource: "dashboards",
Verb: "get",
Name: "dash1",
CorrelationId: "check1",
},
{
Namespace: "org-12",
Group: "folder.grafana.app",
Resource: "folders",
Verb: "get",
Name: "fold1",
CorrelationId: "check2",
},
},
permissions: []accesscontrol.Permission{
{Action: "dashboards:read", Scope: "dashboards:uid:dash1"},
{Action: "folders:read", Scope: "folders:uid:fold1"},
},
expectedResults: map[string]bool{"check1": true, "check2": true},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
s := setupService()
ctx := types.WithAuthInfo(context.Background(), callingService)
userID := &store.UserIdentifiers{UID: "test-uid", ID: 1}
store := &fakeStore{
userID: userID,
userPermissions: tc.permissions,
folders: tc.folders,
}
s.store = store
s.permissionStore = store
s.folderStore = store
s.identityStore = &fakeIdentityStore{}
if tc.folders != nil {
s.folderCache.Set(ctx, folderCacheKey("org-12"), newFolderTree(tc.folders))
}
resp, err := s.BatchCheck(ctx, &authzv1.BatchCheckRequest{
Subject: "user:test-uid",
Checks: tc.checks,
})
require.NoError(t, err)
require.NotNil(t, resp)
require.Len(t, resp.Results, len(tc.expectedResults))
for corrID, expectedAllowed := range tc.expectedResults {
result, ok := resp.Results[corrID]
require.True(t, ok, "result for %s not found", corrID)
require.Equal(t, expectedAllowed, result.Allowed, "unexpected allowed for %s", corrID)
}
})
}
})
t.Run("Anonymous permission checks", func(t *testing.T) {
testCases := []batchCheckTestCase{
{
name: "should allow anonymous with permission",
checks: []*authzv1.BatchCheckItem{
{
Namespace: "org-12",
Group: "dashboard.grafana.app",
Resource: "dashboards",
Verb: "get",
Name: "dash1",
CorrelationId: "check1",
},
},
permissions: []accesscontrol.Permission{{Action: "dashboards:read", Scope: "dashboards:uid:dash1"}},
expectedResults: map[string]bool{"check1": true},
},
{
name: "should deny anonymous without permission",
checks: []*authzv1.BatchCheckItem{
{
Namespace: "org-12",
Group: "dashboard.grafana.app",
Resource: "dashboards",
Verb: "get",
Name: "dash1",
CorrelationId: "check1",
},
},
permissions: []accesscontrol.Permission{{Action: "dashboards:read", Scope: "dashboards:uid:dash2"}},
expectedResults: map[string]bool{"check1": false},
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
s := setupService()
ctx := types.WithAuthInfo(context.Background(), callingService)
store := &fakeStore{userPermissions: tc.permissions}
s.store = store
s.permissionStore = store
s.identityStore = &fakeIdentityStore{}
resp, err := s.BatchCheck(ctx, &authzv1.BatchCheckRequest{
Subject: "anonymous:0",
Checks: tc.checks,
})
require.NoError(t, err)
require.NotNil(t, resp)
for corrID, expectedAllowed := range tc.expectedResults {
result, ok := resp.Results[corrID]
require.True(t, ok, "result for %s not found", corrID)
require.Equal(t, expectedAllowed, result.Allowed, "unexpected allowed for %s", corrID)
}
})
}
})
t.Run("Rendering permission checks", func(t *testing.T) {
t.Run("should allow rendering with permission", func(t *testing.T) {
s := setupService()
ctx := types.WithAuthInfo(context.Background(), callingService)
resp, err := s.BatchCheck(ctx, &authzv1.BatchCheckRequest{
Subject: "render:0",
Checks: []*authzv1.BatchCheckItem{
{
Namespace: "org-12",
Group: "dashboard.grafana.app",
Resource: "dashboards",
Verb: "get",
Name: "dash1",
CorrelationId: "check1",
},
},
})
require.NoError(t, err)
require.NotNil(t, resp)
require.True(t, resp.Results["check1"].Allowed)
})
t.Run("should deny rendering access to another app resources", func(t *testing.T) {
s := setupService()
ctx := types.WithAuthInfo(context.Background(), callingService)
resp, err := s.BatchCheck(ctx, &authzv1.BatchCheckRequest{
Subject: "render:0",
Checks: []*authzv1.BatchCheckItem{
{
Namespace: "org-12",
Group: "another.grafana.app",
Resource: "dashboards",
Verb: "get",
Name: "dash1",
CorrelationId: "check1",
},
},
})
require.NoError(t, err)
require.NotNil(t, resp)
require.False(t, resp.Results["check1"].Allowed)
require.NotEmpty(t, resp.Results["check1"].Error)
})
})
t.Run("Invalid subject returns errors for all checks", func(t *testing.T) {
s := setupService()
ctx := types.WithAuthInfo(context.Background(), callingService)
store := &fakeStore{}
s.store = store
s.permissionStore = store
s.identityStore = &fakeIdentityStore{}
resp, err := s.BatchCheck(ctx, &authzv1.BatchCheckRequest{
Subject: "invalid:12",
Checks: []*authzv1.BatchCheckItem{
{
Namespace: "org-12",
Group: "dashboard.grafana.app",
Resource: "dashboards",
Verb: "get",
Name: "dash1",
CorrelationId: "check1",
},
{
Namespace: "org-12",
Group: "dashboard.grafana.app",
Resource: "dashboards",
Verb: "get",
Name: "dash2",
CorrelationId: "check2",
},
},
})
require.NoError(t, err)
require.NotNil(t, resp)
require.Len(t, resp.Results, 2)
for _, result := range resp.Results {
require.False(t, result.Allowed)
require.NotEmpty(t, result.Error)
}
})
t.Run("Grouping optimization", func(t *testing.T) {
t.Run("should batch permission lookups for same action", func(t *testing.T) {
s := setupService()
ctx := types.WithAuthInfo(context.Background(), callingService)
userID := &store.UserIdentifiers{UID: "test-uid", ID: 1}
fStore := &fakeStore{
userID: userID,
userPermissions: []accesscontrol.Permission{
{Action: "dashboards:read", Scope: "dashboards:uid:dash1"},
{Action: "dashboards:read", Scope: "dashboards:uid:dash2"},
},
}
s.store = fStore
s.permissionStore = fStore
s.identityStore = &fakeIdentityStore{}
resp, err := s.BatchCheck(ctx, &authzv1.BatchCheckRequest{
Subject: "user:test-uid",
Checks: []*authzv1.BatchCheckItem{
{
Namespace: "org-12",
Group: "dashboard.grafana.app",
Resource: "dashboards",
Verb: "get",
Name: "dash1",
CorrelationId: "check1",
},
{
Namespace: "org-12",
Group: "dashboard.grafana.app",
Resource: "dashboards",
Verb: "get",
Name: "dash2",
CorrelationId: "check2",
},
{
Namespace: "org-12",
Group: "dashboard.grafana.app",
Resource: "dashboards",
Verb: "get",
Name: "dash3",
CorrelationId: "check3",
},
},
})
require.NoError(t, err)
require.NotNil(t, resp)
require.True(t, resp.Results["check1"].Allowed)
require.True(t, resp.Results["check2"].Allowed)
require.False(t, resp.Results["check3"].Allowed)
// Verify permissions were fetched only once (1 call for userID + 1 call for basicRole + 1 call for permissions)
require.Equal(t, 3, fStore.calls)
})
})
}
func TestService_CacheBatchCheck(t *testing.T) {
callingService := authn.NewAccessTokenAuthInfo(authn.Claims[authn.AccessTokenClaims]{
Claims: jwt.Claims{
Subject: types.NewTypeID(types.TypeAccessPolicy, "some-service"),
Audience: []string{"authzservice"},
},
Rest: authn.AccessTokenClaims{Namespace: "org-12"},
})
ctx := types.WithAuthInfo(context.Background(), callingService)
userID := &store.UserIdentifiers{UID: "test-uid", ID: 1}
t.Run("Allow based on cached permissions", func(t *testing.T) {
s := setupService()
s.idCache.Set(ctx, userIdentifierCacheKey("org-12", "test-uid"), *userID)
s.permCache.Set(ctx, userPermCacheKey("org-12", "test-uid", "dashboards:read"), map[string]bool{"dashboards:uid:dash1": true})
resp, err := s.BatchCheck(ctx, &authzv1.BatchCheckRequest{
Subject: "user:test-uid",
Checks: []*authzv1.BatchCheckItem{
{
Namespace: "org-12",
Group: "dashboard.grafana.app",
Resource: "dashboards",
Verb: "get",
Name: "dash1",
CorrelationId: "check1",
},
},
})
require.NoError(t, err)
require.True(t, resp.Results["check1"].Allowed)
})
t.Run("Fallback to database on cache miss", func(t *testing.T) {
s := setupService()
// Populate database but not cache
fStore := &fakeStore{
userID: userID,
userPermissions: []accesscontrol.Permission{{Action: "dashboards:read", Scope: "dashboards:uid:dash2"}},
}
s.store = fStore
s.permissionStore = fStore
s.identityStore = &fakeIdentityStore{}
s.idCache.Set(ctx, userIdentifierCacheKey("org-12", "test-uid"), *userID)
resp, err := s.BatchCheck(ctx, &authzv1.BatchCheckRequest{
Subject: "user:test-uid",
Checks: []*authzv1.BatchCheckItem{
{
Namespace: "org-12",
Group: "dashboard.grafana.app",
Resource: "dashboards",
Verb: "get",
Name: "dash2",
CorrelationId: "check1",
},
},
})
require.NoError(t, err)
require.True(t, resp.Results["check1"].Allowed)
})
}
func setupService() *Service {
cache := cache.NewLocalCache(cache.Config{Expiry: 5 * time.Minute, CleanupInterval: 5 * time.Minute})
logger := log.New("authz-rbac-service")
-1
View File
@@ -13,7 +13,6 @@ type Client interface {
authlib.AccessClient
Read(ctx context.Context, req *authzextv1.ReadRequest) (*authzextv1.ReadResponse, error)
Write(ctx context.Context, req *authzextv1.WriteRequest) error
BatchCheck(ctx context.Context, req *authzextv1.BatchCheckRequest) (*authzextv1.BatchCheckResponse, error)
Mutate(ctx context.Context, req *authzextv1.MutateRequest) error
Query(ctx context.Context, req *authzextv1.QueryRequest) (*authzextv1.QueryResponse, error)
+3 -3
View File
@@ -68,11 +68,11 @@ func (c *Client) Write(ctx context.Context, req *authzextv1.WriteRequest) error
return err
}
func (c *Client) BatchCheck(ctx context.Context, req *authzextv1.BatchCheckRequest) (*authzextv1.BatchCheckResponse, error) {
ctx, span := tracer.Start(ctx, "authlib.zanzana.client.Check")
func (c *Client) BatchCheck(ctx context.Context, id authlib.AuthInfo, req authlib.BatchCheckRequest) (authlib.BatchCheckResponse, error) {
ctx, span := tracer.Start(ctx, "authlib.zanzana.client.BatchCheck")
defer span.End()
return c.authzext.BatchCheck(ctx, req)
return c.authzlibclient.BatchCheck(ctx, id, req)
}
func (c *Client) WriteNew(ctx context.Context, req *authzextv1.WriteRequest) error {
+5 -2
View File
@@ -34,8 +34,11 @@ func (nc NoopClient) Write(ctx context.Context, req *authzextv1.WriteRequest) er
return nil
}
func (nc NoopClient) BatchCheck(ctx context.Context, req *authzextv1.BatchCheckRequest) (*authzextv1.BatchCheckResponse, error) {
return nil, nil
func (nc NoopClient) BatchCheck(ctx context.Context, id authlib.AuthInfo, req authlib.BatchCheckRequest) (authlib.BatchCheckResponse, error) {
return authlib.BatchCheckResponse{
Results: make(map[string]authlib.BatchCheckResult),
Zookie: authlib.NoopZookie{},
}, nil
}
func (nc NoopClient) Mutate(ctx context.Context, req *authzextv1.MutateRequest) error {
@@ -132,3 +132,54 @@ func (c *ShadowClient) Compile(ctx context.Context, id authlib.AuthInfo, req aut
return shadowItemChecker, authlib.NoopZookie{}, err
}
func (c *ShadowClient) BatchCheck(ctx context.Context, id authlib.AuthInfo, req authlib.BatchCheckRequest) (authlib.BatchCheckResponse, error) {
acResChan := make(chan authlib.BatchCheckResponse, 1)
acErrChan := make(chan error, 1)
go func() {
if c.zanzanaClient == nil {
return
}
zanzanaCtx := context.WithoutCancel(ctx)
zanzanaCtxTimeout, cancel := context.WithTimeout(zanzanaCtx, zanzanaTimeout)
defer cancel()
timer := prometheus.NewTimer(c.metrics.evaluationsSeconds.WithLabelValues("zanzana"))
res, err := c.zanzanaClient.BatchCheck(zanzanaCtxTimeout, id, req)
if err != nil {
c.logger.Error("Failed to run zanzana batch check", "error", err)
}
timer.ObserveDuration()
acRes := <-acResChan
acErr := <-acErrChan
if acErr == nil {
// Compare results for each correlation ID
for corrID, acResult := range acRes.Results {
zanzanaResult, exists := res.Results[corrID]
if !exists {
c.metrics.evaluationStatusTotal.WithLabelValues("error").Inc()
c.logger.Warn("Zanzana batch check missing result", "correlationId", corrID, "user", id.GetUID())
continue
}
if zanzanaResult.Allowed != acResult.Allowed {
c.metrics.evaluationStatusTotal.WithLabelValues("error").Inc()
c.logger.Warn("Zanzana batch check result does not match", "expected", acResult.Allowed, "actual", zanzanaResult.Allowed, "correlationId", corrID, "user", id.GetUID())
} else {
c.metrics.evaluationStatusTotal.WithLabelValues("success").Inc()
}
}
}
}()
timer := prometheus.NewTimer(c.metrics.evaluationsSeconds.WithLabelValues("rbac"))
res, err := c.accessClient.BatchCheck(ctx, id, req)
timer.ObserveDuration()
acResChan <- res
acErrChan <- err
return res, err
}
+1 -2
View File
@@ -10,7 +10,6 @@ import (
iamv0alpha1 "github.com/grafana/grafana/apps/iam/pkg/apis/iam/v0alpha1"
"github.com/grafana/grafana/pkg/apimachinery/utils"
"github.com/grafana/grafana/pkg/services/accesscontrol"
authzextv1 "github.com/grafana/grafana/pkg/services/authz/proto/v1"
)
type typeInfo struct {
@@ -73,7 +72,7 @@ func NewResourceInfoFromCheck(r *authzv1.CheckRequest) ResourceInfo {
return resource
}
func NewResourceInfoFromBatchItem(i *authzextv1.BatchCheckItem) ResourceInfo {
func NewResourceInfoFromBatchItem(i *authzv1.BatchCheckItem) ResourceInfo {
typ, relations := getTypeAndRelations(i.GetGroup(), i.GetResource())
return newResource(
typ,
@@ -2,97 +2,463 @@ package server
import (
"context"
"fmt"
"time"
authzv1 "github.com/grafana/authlib/authz/proto/v1"
openfgav1 "github.com/openfga/api/proto/openfga/v1"
"go.opentelemetry.io/otel/attribute"
"go.opentelemetry.io/otel/codes"
"google.golang.org/protobuf/types/known/structpb"
authzextv1 "github.com/grafana/grafana/pkg/services/authz/proto/v1"
"github.com/grafana/grafana/pkg/services/authz/zanzana/common"
)
func (s *Server) BatchCheck(ctx context.Context, r *authzextv1.BatchCheckRequest) (*authzextv1.BatchCheckResponse, error) {
// checkKey represents a unique check to be performed
type checkKey struct {
relation string
object string
}
// batchCheckBuilder encapsulates state for building OpenFGA batch checks
type batchCheckBuilder struct {
subject string
contextuals *openfgav1.ContextualTupleKeys
checks []*openfgav1.BatchCheckItem
checksSeen map[checkKey]bool
checkMapping map[string]checkKey
counter int
}
func newBatchCheckBuilder(subject string, contextuals *openfgav1.ContextualTupleKeys) *batchCheckBuilder {
return &batchCheckBuilder{
subject: subject,
contextuals: contextuals,
checks: make([]*openfgav1.BatchCheckItem, 0),
checksSeen: make(map[checkKey]bool),
checkMapping: make(map[string]checkKey),
counter: 0,
}
}
func (b *batchCheckBuilder) addCheck(relation, object string, context *structpb.Struct) {
if object == "" {
return
}
key := checkKey{relation: relation, object: object}
if b.checksSeen[key] {
return
}
b.checksSeen[key] = true
correlationID := fmt.Sprintf("c%d", b.counter)
b.counter++
b.checks = append(b.checks, &openfgav1.BatchCheckItem{
TupleKey: &openfgav1.CheckRequestTupleKey{
User: b.subject,
Relation: relation,
Object: object,
},
ContextualTuples: b.contextuals,
Context: context,
CorrelationId: correlationID,
})
b.checkMapping[correlationID] = key
}
// BatchCheck implements authzv1.AuthzServiceServer.BatchCheck
// This performs multiple access checks in a single request using OpenFGA's native BatchCheck API.
func (s *Server) BatchCheck(ctx context.Context, r *authzv1.BatchCheckRequest) (*authzv1.BatchCheckResponse, error) {
ctx, span := s.tracer.Start(ctx, "server.BatchCheck")
defer span.End()
if err := authorize(ctx, r.GetNamespace(), s.cfg); err != nil {
span.RecordError(err)
span.SetStatus(codes.Error, err.Error())
return nil, err
}
span.SetAttributes(attribute.Int("check_count", len(r.GetChecks())))
batchRes := &authzextv1.BatchCheckResponse{
Groups: make(map[string]*authzextv1.BatchCheckGroupResource),
}
defer func(t time.Time) {
s.metrics.requestDurationSeconds.WithLabelValues("server.BatchCheck", "").Observe(time.Since(t).Seconds())
}(time.Now())
store, err := s.getStoreInfo(ctx, r.GetNamespace())
res, err := s.batchCheck(ctx, r)
if err != nil {
span.RecordError(err)
span.SetStatus(codes.Error, err.Error())
return nil, err
s.logger.Error("failed to perform batch check request", "error", err)
return nil, fmt.Errorf("failed to perform batch check request: %w", err)
}
return res, nil
}
func (s *Server) batchCheck(ctx context.Context, r *authzv1.BatchCheckRequest) (*authzv1.BatchCheckResponse, error) {
items := r.GetChecks()
if len(items) == 0 {
return &authzv1.BatchCheckResponse{
Results: make(map[string]*authzv1.BatchCheckResult),
}, nil
}
// Group items by namespace
itemsByNamespace := make(map[string][]*authzv1.BatchCheckItem)
for _, item := range items {
ns := item.GetNamespace()
itemsByNamespace[ns] = append(itemsByNamespace[ns], item)
}
// Authorize and get store info for each namespace
stores := make(map[string]*storeInfo)
for namespace := range itemsByNamespace {
if err := authorize(ctx, namespace, s.cfg); err != nil {
return nil, err
}
store, err := s.getStoreInfo(ctx, namespace)
if err != nil {
return nil, err
}
stores[namespace] = store
}
contextuals, err := s.getContextuals(r.GetSubject())
if err != nil {
span.RecordError(err)
span.SetStatus(codes.Error, err.Error())
return nil, err
}
groupResourceAccess := make(map[string]bool)
results := make(map[string]*authzv1.BatchCheckResult, len(items))
subject := r.GetSubject()
for _, item := range r.GetItems() {
res, err := s.batchCheckItem(ctx, r, item, contextuals, store, groupResourceAccess)
if err != nil {
span.RecordError(err)
span.SetStatus(codes.Error, err.Error())
return nil, err
// Process each namespace separately
for namespace, nsItems := range itemsByNamespace {
store := stores[namespace]
// Phase 1: Check GroupResource access (broadest permissions)
// Example: user has "get" on "dashboards" group_resource → all dashboards allowed
s.runGroupResourcePhase(ctx, store, subject, nsItems, contextuals, results)
// Phase 2: Check folder permission inheritance (can_get, can_create, etc. on parent folder)
// Example: user has "can_get" on folder-A → all dashboards in folder-A allowed
s.runFolderPermissionPhase(ctx, store, subject, nsItems, contextuals, results)
// Phase 3: Check folder subresource access (folder_get, folder_create, etc.)
// Example: user has "folder_get" on folder-A → dashboards in folder-A allowed via subresource
s.runFolderSubresourcePhase(ctx, store, subject, nsItems, contextuals, results)
// Phase 4: Check direct resource access
// Example: user has "get" directly on dashboard-123
s.runDirectResourcePhase(ctx, store, subject, nsItems, contextuals, results)
}
// Mark any remaining unresolved items as denied
for _, item := range items {
if _, resolved := results[item.GetCorrelationId()]; !resolved {
results[item.GetCorrelationId()] = &authzv1.BatchCheckResult{Allowed: false}
}
}
return s.buildResponse(results), nil
}
func (s *Server) buildResponse(results map[string]*authzv1.BatchCheckResult) *authzv1.BatchCheckResponse {
return &authzv1.BatchCheckResponse{
Results: results,
Zookie: &authzv1.Zookie{Timestamp: time.Now().UnixMilli()},
}
}
// runGroupResourcePhase checks if the user has GroupResource-level access.
// This is the broadest permission - if allowed, all items in that group are allowed.
func (s *Server) runGroupResourcePhase(
ctx context.Context,
store *storeInfo,
subject string,
items []*authzv1.BatchCheckItem,
contextuals *openfgav1.ContextualTupleKeys,
results map[string]*authzv1.BatchCheckResult,
) {
// Group items by their GroupResource
type grInfo struct {
relation string
grIdent string
items []string // correlation IDs
}
groupedItems := make(map[string]*grInfo) // groupResource -> info
for _, item := range items {
relation := common.VerbMapping[item.GetVerb()]
if !common.IsGroupResourceRelation(relation) {
continue
}
groupResource := common.FormatGroupResource(item.GetGroup(), item.GetResource(), item.GetSubresource())
if _, ok := batchRes.Groups[groupResource]; !ok {
batchRes.Groups[groupResource] = &authzextv1.BatchCheckGroupResource{
Items: make(map[string]bool),
resource := common.NewResourceInfoFromBatchItem(item)
gr := resource.GroupResource()
if _, exists := groupedItems[gr]; !exists {
groupedItems[gr] = &grInfo{
relation: relation,
grIdent: resource.GroupResourceIdent(),
items: make([]string, 0),
}
}
batchRes.Groups[groupResource].Items[item.GetName()] = res.GetAllowed()
groupedItems[gr].items = append(groupedItems[gr].items, item.GetCorrelationId())
}
return batchRes, nil
if len(groupedItems) == 0 {
return
}
// Build batch check for unique GroupResources
builder := newBatchCheckBuilder(subject, contextuals)
grCheckMapping := make(map[string]string) // OpenFGA correlationID -> groupResource
for gr, info := range groupedItems {
correlationID := fmt.Sprintf("gr%d", builder.counter)
builder.counter++
builder.checks = append(builder.checks, &openfgav1.BatchCheckItem{
TupleKey: &openfgav1.CheckRequestTupleKey{
User: subject,
Relation: info.relation,
Object: info.grIdent,
},
ContextualTuples: contextuals,
CorrelationId: correlationID,
})
grCheckMapping[correlationID] = gr
}
openfgaRes, err := s.openfgaClient.BatchCheck(ctx, &openfgav1.BatchCheckRequest{
StoreId: store.ID,
AuthorizationModelId: store.ModelID,
Checks: builder.checks,
})
if err != nil {
s.logger.Warn("Failed to check group resource access", "error", err)
return
}
// Mark all items in allowed GroupResources
for correlationID, result := range openfgaRes.GetResult() {
gr := grCheckMapping[correlationID]
if allowed, ok := result.GetCheckResult().(*openfgav1.BatchCheckSingleResult_Allowed); ok && allowed.Allowed {
for _, itemCorrelationID := range groupedItems[gr].items {
results[itemCorrelationID] = &authzv1.BatchCheckResult{Allowed: true}
}
}
}
}
func (s *Server) batchCheckItem(
// runFolderPermissionPhase checks folder permission inheritance (can_get, can_create, etc.).
// This applies to folder-based resources like dashboards, panels, etc.
func (s *Server) runFolderPermissionPhase(
ctx context.Context,
r *authzextv1.BatchCheckRequest,
item *authzextv1.BatchCheckItem,
contextuals *openfgav1.ContextualTupleKeys,
store *storeInfo,
groupResourceAccess map[string]bool,
) (*authzv1.CheckResponse, error) {
var (
relation = common.VerbMapping[item.GetVerb()]
resource = common.NewResourceInfoFromBatchItem(item)
groupResource = resource.GroupResource()
)
subject string,
items []*authzv1.BatchCheckItem,
contextuals *openfgav1.ContextualTupleKeys,
results map[string]*authzv1.BatchCheckResult,
) {
builder := newBatchCheckBuilder(subject, contextuals)
checkToItems := make(map[checkKey][]string) // checkKey -> correlation IDs
allowed, ok := groupResourceAccess[groupResource]
if !ok {
res, err := s.checkGroupResource(ctx, r.GetSubject(), relation, resource, contextuals, store)
if err != nil {
return nil, err
for _, item := range items {
if _, resolved := results[item.GetCorrelationId()]; resolved {
continue
}
allowed = res.GetAllowed()
groupResourceAccess[groupResource] = res.GetAllowed()
resource := common.NewResourceInfoFromBatchItem(item)
folderIdent := resource.FolderIdent()
// Only folder-based generic resources use folder permission inheritance
if !resource.IsGeneric() || folderIdent == "" || !isFolderPermissionBasedResource(resource.GroupResource()) {
continue
}
relation := common.VerbMapping[item.GetVerb()]
rel := common.FolderPermissionRelation(relation)
key := checkKey{relation: rel, object: folderIdent}
checkToItems[key] = append(checkToItems[key], item.GetCorrelationId())
builder.addCheck(rel, folderIdent, resource.Context())
}
if allowed {
return &authzv1.CheckResponse{Allowed: true}, nil
if len(builder.checks) == 0 {
return
}
if resource.IsGeneric() {
return s.checkGeneric(ctx, r.GetSubject(), relation, resource, contextuals, store)
checkResults, err := s.executeOpenFGABatchChecks(ctx, store, builder)
if err != nil {
s.logger.Warn("Failed folder permission phase", "error", err)
return
}
return s.checkTyped(ctx, r.GetSubject(), relation, resource, contextuals, store)
// Mark items allowed by folder permissions
for key, allowed := range checkResults {
if allowed {
for _, correlationID := range checkToItems[key] {
results[correlationID] = &authzv1.BatchCheckResult{Allowed: true}
}
}
}
}
// runFolderSubresourcePhase checks folder subresource access (folder_get, folder_create, etc.).
func (s *Server) runFolderSubresourcePhase(
ctx context.Context,
store *storeInfo,
subject string,
items []*authzv1.BatchCheckItem,
contextuals *openfgav1.ContextualTupleKeys,
results map[string]*authzv1.BatchCheckResult,
) {
builder := newBatchCheckBuilder(subject, contextuals)
checkToItems := make(map[checkKey][]string)
for _, item := range items {
if _, resolved := results[item.GetCorrelationId()]; resolved {
continue
}
resource := common.NewResourceInfoFromBatchItem(item)
relation := common.VerbMapping[item.GetVerb()]
var objectIdent string
var subresRel string
if resource.IsGeneric() {
// Generic resources: check subresource on folder
folderIdent := resource.FolderIdent()
if folderIdent == "" {
continue
}
subresRel = common.SubresourceRelation(relation)
if !common.IsSubresourceRelation(subresRel) {
continue
}
objectIdent = folderIdent
} else {
// Typed resources: check subresource on the resource itself
if !resource.HasSubresource() || !resource.IsValidRelation(relation) {
continue
}
objectIdent = resource.ResourceIdent()
if objectIdent == "" {
continue
}
subresRel = common.SubresourceRelation(relation)
}
key := checkKey{relation: subresRel, object: objectIdent}
checkToItems[key] = append(checkToItems[key], item.GetCorrelationId())
builder.addCheck(subresRel, objectIdent, resource.Context())
}
if len(builder.checks) == 0 {
return
}
checkResults, err := s.executeOpenFGABatchChecks(ctx, store, builder)
if err != nil {
s.logger.Warn("Failed folder subresource phase", "error", err)
return
}
for key, allowed := range checkResults {
if allowed {
for _, correlationID := range checkToItems[key] {
results[correlationID] = &authzv1.BatchCheckResult{Allowed: true}
}
}
}
}
// runDirectResourcePhase checks direct resource access.
func (s *Server) runDirectResourcePhase(
ctx context.Context,
store *storeInfo,
subject string,
items []*authzv1.BatchCheckItem,
contextuals *openfgav1.ContextualTupleKeys,
results map[string]*authzv1.BatchCheckResult,
) {
builder := newBatchCheckBuilder(subject, contextuals)
checkToItems := make(map[checkKey][]string)
for _, item := range items {
if _, resolved := results[item.GetCorrelationId()]; resolved {
continue
}
resource := common.NewResourceInfoFromBatchItem(item)
relation := common.VerbMapping[item.GetVerb()]
if !resource.IsValidRelation(relation) {
continue
}
resourceIdent := resource.ResourceIdent()
if resourceIdent == "" {
continue
}
// For folders, use the computed permission relation
checkRelation := relation
if resource.Type() == common.TypeFolder {
checkRelation = common.FolderPermissionRelation(relation)
}
key := checkKey{relation: checkRelation, object: resourceIdent}
checkToItems[key] = append(checkToItems[key], item.GetCorrelationId())
builder.addCheck(checkRelation, resourceIdent, resource.Context())
}
if len(builder.checks) == 0 {
return
}
checkResults, err := s.executeOpenFGABatchChecks(ctx, store, builder)
if err != nil {
s.logger.Warn("Failed direct resource phase", "error", err)
return
}
for key, allowed := range checkResults {
if allowed {
for _, correlationID := range checkToItems[key] {
results[correlationID] = &authzv1.BatchCheckResult{Allowed: true}
}
}
}
}
// executeOpenFGABatchChecks executes the OpenFGA batch checks in chunks and returns results
func (s *Server) executeOpenFGABatchChecks(ctx context.Context, store *storeInfo, builder *batchCheckBuilder) (map[checkKey]bool, error) {
const maxChecksPerBatch = 50
checkResults := make(map[checkKey]bool)
for i := 0; i < len(builder.checks); i += maxChecksPerBatch {
end := i + maxChecksPerBatch
if end > len(builder.checks) {
end = len(builder.checks)
}
openfgaRes, err := s.openfgaClient.BatchCheck(ctx, &openfgav1.BatchCheckRequest{
StoreId: store.ID,
AuthorizationModelId: store.ModelID,
Checks: builder.checks[i:end],
})
if err != nil {
return nil, fmt.Errorf("failed to perform OpenFGA batch check: %w", err)
}
// Process results
for correlationID, result := range openfgaRes.GetResult() {
key, ok := builder.checkMapping[correlationID]
if !ok {
continue
}
if allowed, ok := result.GetCheckResult().(*openfgav1.BatchCheckSingleResult_Allowed); ok {
checkResults[key] = allowed.Allowed
}
}
}
return checkResults, nil
}
@@ -1,193 +1,302 @@
package server
import (
"fmt"
"testing"
authzv1 "github.com/grafana/authlib/authz/proto/v1"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/apimachinery/utils"
authzextv1 "github.com/grafana/grafana/pkg/services/authz/proto/v1"
"github.com/grafana/grafana/pkg/services/authz/zanzana/common"
)
func testBatchCheck(t *testing.T, server *Server) {
newReq := func(subject, verb, group, resource, subresource string, items []*authzextv1.BatchCheckItem) *authzextv1.BatchCheckRequest {
for i, item := range items {
items[i] = &authzextv1.BatchCheckItem{
Verb: verb,
Group: group,
Resource: resource,
Subresource: subresource,
Name: item.GetName(),
Folder: item.GetFolder(),
}
// Helper to create a batch check request
newReq := func(subject string, items []*authzv1.BatchCheckItem) *authzv1.BatchCheckRequest {
return &authzv1.BatchCheckRequest{
Subject: subject,
Checks: items,
}
}
return &authzextv1.BatchCheckRequest{
Namespace: namespace,
Subject: subject,
Items: items,
// Helper to create a batch check item with correlation ID (uses default namespace)
newItem := func(verb, group, resource, subresource, folder, name string) *authzv1.BatchCheckItem {
correlationID := fmt.Sprintf("%s-%s-%s-%s", group, resource, folder, name)
return &authzv1.BatchCheckItem{
Namespace: namespace,
Verb: verb,
Group: group,
Resource: resource,
Subresource: subresource,
Name: name,
Folder: folder,
CorrelationId: correlationID,
}
}
t.Run("user:1 should only be able to read resource:dashboard.grafana.app/dashboards/1", func(t *testing.T) {
groupResource := common.FormatGroupResource(dashboardGroup, dashboardResource, "")
res, err := server.BatchCheck(newContextWithNamespace(), newReq("user:1", utils.VerbGet, dashboardGroup, dashboardResource, "", []*authzextv1.BatchCheckItem{
{Name: "1", Folder: "1"},
{Name: "2", Folder: "2"},
res, err := server.BatchCheck(newContextWithNamespace(), newReq("user:1", []*authzv1.BatchCheckItem{
newItem(utils.VerbGet, dashboardGroup, dashboardResource, "", "1", "1"),
newItem(utils.VerbGet, dashboardGroup, dashboardResource, "", "2", "2"),
}))
require.NoError(t, err)
require.Len(t, res.Groups[groupResource].Items, 2)
require.Len(t, res.Results, 2)
assert.True(t, res.Groups[groupResource].Items["1"])
assert.False(t, res.Groups[groupResource].Items["2"])
assert.True(t, res.Results[fmt.Sprintf("%s-%s-%s-%s", dashboardGroup, dashboardResource, "1", "1")].Allowed)
assert.False(t, res.Results[fmt.Sprintf("%s-%s-%s-%s", dashboardGroup, dashboardResource, "2", "2")].Allowed)
})
t.Run("user:2 should be able to read resource:dashboard.grafana.app/dashboards/{1,2} through group_resource", func(t *testing.T) {
groupResource := common.FormatGroupResource(dashboardGroup, dashboardResource, "")
res, err := server.BatchCheck(newContextWithNamespace(), newReq("user:2", utils.VerbGet, dashboardGroup, dashboardResource, "", []*authzextv1.BatchCheckItem{
{Name: "1", Folder: "1"},
{Name: "2", Folder: "2"},
res, err := server.BatchCheck(newContextWithNamespace(), newReq("user:2", []*authzv1.BatchCheckItem{
newItem(utils.VerbGet, dashboardGroup, dashboardResource, "", "1", "1"),
newItem(utils.VerbGet, dashboardGroup, dashboardResource, "", "2", "2"),
}))
require.NoError(t, err)
assert.Len(t, res.Groups[groupResource].Items, 2)
require.Len(t, res.Results, 2)
// user:2 has group_resource access, so both should be allowed
assert.True(t, res.Results[fmt.Sprintf("%s-%s-%s-%s", dashboardGroup, dashboardResource, "1", "1")].Allowed)
assert.True(t, res.Results[fmt.Sprintf("%s-%s-%s-%s", dashboardGroup, dashboardResource, "2", "2")].Allowed)
})
t.Run("user:3 should be able to read resource:dashboard.grafana.app/dashboards/1 with set relation", func(t *testing.T) {
groupResource := common.FormatGroupResource(dashboardGroup, dashboardResource, "")
res, err := server.BatchCheck(newContextWithNamespace(), newReq("user:3", utils.VerbGet, dashboardGroup, dashboardResource, "", []*authzextv1.BatchCheckItem{
{Name: "1", Folder: "1"},
{Name: "2", Folder: "2"},
res, err := server.BatchCheck(newContextWithNamespace(), newReq("user:3", []*authzv1.BatchCheckItem{
newItem(utils.VerbGet, dashboardGroup, dashboardResource, "", "1", "1"),
newItem(utils.VerbGet, dashboardGroup, dashboardResource, "", "2", "2"),
}))
require.NoError(t, err)
require.Len(t, res.Groups[groupResource].Items, 2)
require.Len(t, res.Results, 2)
assert.True(t, res.Groups[groupResource].Items["1"])
assert.False(t, res.Groups[groupResource].Items["2"])
assert.True(t, res.Results[fmt.Sprintf("%s-%s-%s-%s", dashboardGroup, dashboardResource, "1", "1")].Allowed)
assert.False(t, res.Results[fmt.Sprintf("%s-%s-%s-%s", dashboardGroup, dashboardResource, "2", "2")].Allowed)
})
t.Run("user:4 should be able to read all dashboard.grafana.app/dashboards in folder 1 and 3", func(t *testing.T) {
groupResource := common.FormatGroupResource(dashboardGroup, dashboardResource, "")
res, err := server.BatchCheck(newContextWithNamespace(), newReq("user:4", utils.VerbGet, dashboardGroup, dashboardResource, "", []*authzextv1.BatchCheckItem{
{Name: "1", Folder: "1"},
{Name: "2", Folder: "3"},
{Name: "3", Folder: "2"},
res, err := server.BatchCheck(newContextWithNamespace(), newReq("user:4", []*authzv1.BatchCheckItem{
newItem(utils.VerbGet, dashboardGroup, dashboardResource, "", "1", "1"),
newItem(utils.VerbGet, dashboardGroup, dashboardResource, "", "3", "2"),
newItem(utils.VerbGet, dashboardGroup, dashboardResource, "", "2", "3"),
}))
require.NoError(t, err)
require.Len(t, res.Groups[groupResource].Items, 3)
require.Len(t, res.Results, 3)
assert.True(t, res.Groups[groupResource].Items["1"])
assert.True(t, res.Groups[groupResource].Items["2"])
assert.False(t, res.Groups[groupResource].Items["3"])
assert.True(t, res.Results[fmt.Sprintf("%s-%s-%s-%s", dashboardGroup, dashboardResource, "1", "1")].Allowed)
assert.True(t, res.Results[fmt.Sprintf("%s-%s-%s-%s", dashboardGroup, dashboardResource, "3", "2")].Allowed)
assert.False(t, res.Results[fmt.Sprintf("%s-%s-%s-%s", dashboardGroup, dashboardResource, "2", "3")].Allowed)
})
t.Run("user:5 should be able to read resource:dashboard.grafana.app/dashboards/1 through folder with set relation", func(t *testing.T) {
groupResource := common.FormatGroupResource(dashboardGroup, dashboardResource, "")
res, err := server.BatchCheck(newContextWithNamespace(), newReq("user:5", utils.VerbGet, dashboardGroup, dashboardResource, "", []*authzextv1.BatchCheckItem{
{Name: "1", Folder: "1"},
{Name: "2", Folder: "2"},
res, err := server.BatchCheck(newContextWithNamespace(), newReq("user:5", []*authzv1.BatchCheckItem{
newItem(utils.VerbGet, dashboardGroup, dashboardResource, "", "1", "1"),
newItem(utils.VerbGet, dashboardGroup, dashboardResource, "", "2", "2"),
}))
require.NoError(t, err)
require.Len(t, res.Groups[groupResource].Items, 2)
require.Len(t, res.Results, 2)
assert.True(t, res.Groups[groupResource].Items["1"])
assert.False(t, res.Groups[groupResource].Items["2"])
assert.True(t, res.Results[fmt.Sprintf("%s-%s-%s-%s", dashboardGroup, dashboardResource, "1", "1")].Allowed)
assert.False(t, res.Results[fmt.Sprintf("%s-%s-%s-%s", dashboardGroup, dashboardResource, "2", "2")].Allowed)
})
t.Run("user:6 should be able to read folder 1", func(t *testing.T) {
groupResource := common.FormatGroupResource(folderGroup, folderResource, "")
res, err := server.BatchCheck(newContextWithNamespace(), newReq("user:6", utils.VerbGet, folderGroup, folderResource, "", []*authzextv1.BatchCheckItem{
{Name: "1"},
{Name: "2"},
res, err := server.BatchCheck(newContextWithNamespace(), newReq("user:6", []*authzv1.BatchCheckItem{
newItem(utils.VerbGet, folderGroup, folderResource, "", "", "1"),
newItem(utils.VerbGet, folderGroup, folderResource, "", "", "2"),
}))
require.NoError(t, err)
require.Len(t, res.Groups[groupResource].Items, 2)
require.Len(t, res.Results, 2)
assert.True(t, res.Groups[groupResource].Items["1"])
assert.False(t, res.Groups[groupResource].Items["2"])
assert.True(t, res.Results[fmt.Sprintf("%s-%s-%s-%s", folderGroup, folderResource, "", "1")].Allowed)
assert.False(t, res.Results[fmt.Sprintf("%s-%s-%s-%s", folderGroup, folderResource, "", "2")].Allowed)
})
t.Run("user:7 should be able to read folder {1,2} through group_resource access", func(t *testing.T) {
groupResource := common.FormatGroupResource(folderGroup, folderResource, "")
res, err := server.BatchCheck(newContextWithNamespace(), newReq("user:7", utils.VerbGet, folderGroup, folderResource, "", []*authzextv1.BatchCheckItem{
{Name: "1"},
{Name: "2"},
res, err := server.BatchCheck(newContextWithNamespace(), newReq("user:7", []*authzv1.BatchCheckItem{
newItem(utils.VerbGet, folderGroup, folderResource, "", "", "1"),
newItem(utils.VerbGet, folderGroup, folderResource, "", "", "2"),
}))
require.NoError(t, err)
require.Len(t, res.Groups[groupResource].Items, 2)
require.True(t, res.Groups[groupResource].Items["1"])
require.True(t, res.Groups[groupResource].Items["2"])
require.Len(t, res.Results, 2)
assert.True(t, res.Results[fmt.Sprintf("%s-%s-%s-%s", folderGroup, folderResource, "", "1")].Allowed)
assert.True(t, res.Results[fmt.Sprintf("%s-%s-%s-%s", folderGroup, folderResource, "", "2")].Allowed)
})
t.Run("user:8 should be able to read all resoruce:dashboard.grafana.app/dashboards in folder 6 through folder 5", func(t *testing.T) {
groupResource := common.FormatGroupResource(dashboardGroup, dashboardResource, "")
res, err := server.BatchCheck(newContextWithNamespace(), newReq("user:8", utils.VerbGet, dashboardGroup, dashboardResource, "", []*authzextv1.BatchCheckItem{
{Name: "10", Folder: "6"},
{Name: "20", Folder: "6"},
t.Run("user:8 should be able to read all resource:dashboard.grafana.app/dashboards in folder 6 through folder 5", func(t *testing.T) {
res, err := server.BatchCheck(newContextWithNamespace(), newReq("user:8", []*authzv1.BatchCheckItem{
newItem(utils.VerbGet, dashboardGroup, dashboardResource, "", "6", "10"),
newItem(utils.VerbGet, dashboardGroup, dashboardResource, "", "6", "20"),
}))
require.NoError(t, err)
require.Len(t, res.Groups[groupResource].Items, 2)
require.True(t, res.Groups[groupResource].Items["10"])
require.True(t, res.Groups[groupResource].Items["20"])
require.Len(t, res.Results, 2)
assert.True(t, res.Results[fmt.Sprintf("%s-%s-%s-%s", dashboardGroup, dashboardResource, "6", "10")].Allowed)
assert.True(t, res.Results[fmt.Sprintf("%s-%s-%s-%s", dashboardGroup, dashboardResource, "6", "20")].Allowed)
})
t.Run("user:9 should be able to create dashboards in folder 6 through folder 5", func(t *testing.T) {
groupResource := common.FormatGroupResource(dashboardGroup, dashboardResource, "")
res, err := server.BatchCheck(newContextWithNamespace(), newReq("user:9", utils.VerbCreate, dashboardGroup, dashboardResource, "", []*authzextv1.BatchCheckItem{
{Name: "10", Folder: "6"},
{Name: "20", Folder: "6"},
res, err := server.BatchCheck(newContextWithNamespace(), newReq("user:9", []*authzv1.BatchCheckItem{
newItem(utils.VerbCreate, dashboardGroup, dashboardResource, "", "6", "10"),
newItem(utils.VerbCreate, dashboardGroup, dashboardResource, "", "6", "20"),
}))
require.NoError(t, err)
t.Log(res.Groups)
require.Len(t, res.Groups[groupResource].Items, 2)
require.True(t, res.Groups[groupResource].Items["10"])
require.True(t, res.Groups[groupResource].Items["20"])
require.Len(t, res.Results, 2)
assert.True(t, res.Results[fmt.Sprintf("%s-%s-%s-%s", dashboardGroup, dashboardResource, "6", "10")].Allowed)
assert.True(t, res.Results[fmt.Sprintf("%s-%s-%s-%s", dashboardGroup, dashboardResource, "6", "20")].Allowed)
})
t.Run("user:10 should be able to get dashboard status for 10 and 11", func(t *testing.T) {
groupResource := common.FormatGroupResource(dashboardGroup, dashboardResource, statusSubresource)
res, err := server.BatchCheck(newContextWithNamespace(), newReq("user:10", utils.VerbGet, dashboardGroup, dashboardResource, statusSubresource, []*authzextv1.BatchCheckItem{
{Name: "10", Folder: "6"},
{Name: "11", Folder: "6"},
{Name: "12", Folder: "6"},
res, err := server.BatchCheck(newContextWithNamespace(), newReq("user:10", []*authzv1.BatchCheckItem{
newItem(utils.VerbGet, dashboardGroup, dashboardResource, statusSubresource, "6", "10"),
newItem(utils.VerbGet, dashboardGroup, dashboardResource, statusSubresource, "6", "11"),
newItem(utils.VerbGet, dashboardGroup, dashboardResource, statusSubresource, "6", "12"),
}))
require.NoError(t, err)
t.Log(res.Groups)
require.Len(t, res.Groups[groupResource].Items, 3)
require.True(t, res.Groups[groupResource].Items["10"])
require.True(t, res.Groups[groupResource].Items["11"])
require.False(t, res.Groups[groupResource].Items["12"])
require.Len(t, res.Results, 3)
assert.True(t, res.Results[fmt.Sprintf("%s-%s-%s-%s", dashboardGroup, dashboardResource, "6", "10")].Allowed)
assert.True(t, res.Results[fmt.Sprintf("%s-%s-%s-%s", dashboardGroup, dashboardResource, "6", "11")].Allowed)
assert.False(t, res.Results[fmt.Sprintf("%s-%s-%s-%s", dashboardGroup, dashboardResource, "6", "12")].Allowed)
})
t.Run("user:11 should be able to get dashboard status for 10, 11 and 12 through group_resource", func(t *testing.T) {
groupResource := common.FormatGroupResource(dashboardGroup, dashboardResource, statusSubresource)
res, err := server.BatchCheck(newContextWithNamespace(), newReq("user:11", utils.VerbGet, dashboardGroup, dashboardResource, statusSubresource, []*authzextv1.BatchCheckItem{
{Name: "10", Folder: "6"},
{Name: "11", Folder: "6"},
{Name: "12", Folder: "6"},
res, err := server.BatchCheck(newContextWithNamespace(), newReq("user:11", []*authzv1.BatchCheckItem{
newItem(utils.VerbGet, dashboardGroup, dashboardResource, statusSubresource, "6", "10"),
newItem(utils.VerbGet, dashboardGroup, dashboardResource, statusSubresource, "6", "11"),
newItem(utils.VerbGet, dashboardGroup, dashboardResource, statusSubresource, "6", "12"),
}))
require.NoError(t, err)
t.Log(res.Groups)
require.Len(t, res.Groups[groupResource].Items, 3)
require.True(t, res.Groups[groupResource].Items["10"])
require.True(t, res.Groups[groupResource].Items["11"])
require.True(t, res.Groups[groupResource].Items["12"])
require.Len(t, res.Results, 3)
assert.True(t, res.Results[fmt.Sprintf("%s-%s-%s-%s", dashboardGroup, dashboardResource, "6", "10")].Allowed)
assert.True(t, res.Results[fmt.Sprintf("%s-%s-%s-%s", dashboardGroup, dashboardResource, "6", "11")].Allowed)
assert.True(t, res.Results[fmt.Sprintf("%s-%s-%s-%s", dashboardGroup, dashboardResource, "6", "12")].Allowed)
})
t.Run("user:12 should be able to get dashboard status in folder 5 and 6", func(t *testing.T) {
groupResource := common.FormatGroupResource(dashboardGroup, dashboardResource, statusSubresource)
res, err := server.BatchCheck(newContextWithNamespace(), newReq("user:12", utils.VerbGet, dashboardGroup, dashboardResource, statusSubresource, []*authzextv1.BatchCheckItem{
{Name: "10", Folder: "5"},
{Name: "11", Folder: "6"},
{Name: "12", Folder: "6"},
{Name: "13", Folder: "1"},
res, err := server.BatchCheck(newContextWithNamespace(), newReq("user:12", []*authzv1.BatchCheckItem{
newItem(utils.VerbGet, dashboardGroup, dashboardResource, statusSubresource, "5", "10"),
newItem(utils.VerbGet, dashboardGroup, dashboardResource, statusSubresource, "6", "11"),
newItem(utils.VerbGet, dashboardGroup, dashboardResource, statusSubresource, "6", "12"),
newItem(utils.VerbGet, dashboardGroup, dashboardResource, statusSubresource, "1", "13"),
}))
require.NoError(t, err)
require.Len(t, res.Groups[groupResource].Items, 4)
require.True(t, res.Groups[groupResource].Items["10"])
require.True(t, res.Groups[groupResource].Items["11"])
require.True(t, res.Groups[groupResource].Items["12"])
require.False(t, res.Groups[groupResource].Items["13"])
require.Len(t, res.Results, 4)
assert.True(t, res.Results[fmt.Sprintf("%s-%s-%s-%s", dashboardGroup, dashboardResource, "5", "10")].Allowed)
assert.True(t, res.Results[fmt.Sprintf("%s-%s-%s-%s", dashboardGroup, dashboardResource, "6", "11")].Allowed)
assert.True(t, res.Results[fmt.Sprintf("%s-%s-%s-%s", dashboardGroup, dashboardResource, "6", "12")].Allowed)
assert.False(t, res.Results[fmt.Sprintf("%s-%s-%s-%s", dashboardGroup, dashboardResource, "1", "13")].Allowed)
})
// Cross-namespace tests
t.Run("cross-namespace: items with explicit namespace should be authorized against their own namespace", func(t *testing.T) {
// Helper to create item with explicit namespace
newItemWithNamespace := func(ns, verb, group, resource, subresource, folder, name string) *authzv1.BatchCheckItem {
correlationID := fmt.Sprintf("%s-%s-%s-%s-%s", ns, group, resource, folder, name)
return &authzv1.BatchCheckItem{
Namespace: ns,
Verb: verb,
Group: group,
Resource: resource,
Subresource: subresource,
Name: name,
Folder: folder,
CorrelationId: correlationID,
}
}
// user:1 has access to dashboard 1 in folder 1 in "default" namespace
// Both items use explicit namespace
res, err := server.BatchCheck(newContextWithNamespace(), &authzv1.BatchCheckRequest{
Subject: "user:1",
Checks: []*authzv1.BatchCheckItem{
// Item in default namespace (should be allowed - user:1 has access)
newItemWithNamespace(namespace, utils.VerbGet, dashboardGroup, dashboardResource, "", "1", "1"),
// Another item in default namespace with different correlation ID
newItem(utils.VerbGet, dashboardGroup, dashboardResource, "", "1", "1"),
},
})
require.NoError(t, err)
require.Len(t, res.Results, 2)
assert.True(t, res.Results[fmt.Sprintf("%s-%s-%s-%s-%s", namespace, dashboardGroup, dashboardResource, "1", "1")].Allowed)
assert.True(t, res.Results[fmt.Sprintf("%s-%s-%s-%s", dashboardGroup, dashboardResource, "1", "1")].Allowed)
})
t.Run("cross-namespace: items from different namespaces in same batch", func(t *testing.T) {
newItemWithNamespace := func(ns, verb, group, resource, subresource, folder, name string) *authzv1.BatchCheckItem {
correlationID := fmt.Sprintf("%s-%s-%s-%s-%s", ns, group, resource, folder, name)
return &authzv1.BatchCheckItem{
Namespace: ns,
Verb: verb,
Group: group,
Resource: resource,
Subresource: subresource,
Name: name,
Folder: folder,
CorrelationId: correlationID,
}
}
// user:2 has group_resource access in "default" namespace
// They should have access in default but not in other-namespace (no tuples there)
res, err := server.BatchCheck(newContextWithNamespace(), &authzv1.BatchCheckRequest{
Subject: "user:2",
Checks: []*authzv1.BatchCheckItem{
// Items in default namespace (should be allowed - user:2 has group_resource access)
newItemWithNamespace(namespace, utils.VerbGet, dashboardGroup, dashboardResource, "", "1", "1"),
newItemWithNamespace(namespace, utils.VerbGet, dashboardGroup, dashboardResource, "", "2", "2"),
// Items in other-namespace (should be denied - no tuples in other-namespace)
newItemWithNamespace("other-namespace", utils.VerbGet, dashboardGroup, dashboardResource, "", "1", "1"),
newItemWithNamespace("other-namespace", utils.VerbGet, dashboardGroup, dashboardResource, "", "2", "2"),
},
})
require.NoError(t, err)
require.Len(t, res.Results, 4)
// Default namespace items should be allowed
assert.True(t, res.Results[fmt.Sprintf("%s-%s-%s-%s-%s", namespace, dashboardGroup, dashboardResource, "1", "1")].Allowed)
assert.True(t, res.Results[fmt.Sprintf("%s-%s-%s-%s-%s", namespace, dashboardGroup, dashboardResource, "2", "2")].Allowed)
// Other namespace items should be denied (no permissions in that namespace)
assert.False(t, res.Results[fmt.Sprintf("%s-%s-%s-%s-%s", "other-namespace", dashboardGroup, dashboardResource, "1", "1")].Allowed)
assert.False(t, res.Results[fmt.Sprintf("%s-%s-%s-%s-%s", "other-namespace", dashboardGroup, dashboardResource, "2", "2")].Allowed)
})
t.Run("cross-namespace: mixed results across multiple namespaces", func(t *testing.T) {
newItemWithNamespace := func(ns, verb, group, resource, subresource, folder, name string) *authzv1.BatchCheckItem {
correlationID := fmt.Sprintf("%s-%s-%s-%s-%s", ns, group, resource, folder, name)
return &authzv1.BatchCheckItem{
Namespace: ns,
Verb: verb,
Group: group,
Resource: resource,
Subresource: subresource,
Name: name,
Folder: folder,
CorrelationId: correlationID,
}
}
// user:1 has specific access to dashboard 1 in folder 1
// user:2 would have broader access, but we're testing user:1
res, err := server.BatchCheck(newContextWithNamespace(), &authzv1.BatchCheckRequest{
Subject: "user:1",
Checks: []*authzv1.BatchCheckItem{
// Allowed in default namespace
newItemWithNamespace(namespace, utils.VerbGet, dashboardGroup, dashboardResource, "", "1", "1"),
// Denied in default namespace (user:1 doesn't have access to dashboard 2)
newItemWithNamespace(namespace, utils.VerbGet, dashboardGroup, dashboardResource, "", "2", "2"),
// Denied in other-namespace (no tuples)
newItemWithNamespace("other-namespace", utils.VerbGet, dashboardGroup, dashboardResource, "", "1", "1"),
},
})
require.NoError(t, err)
require.Len(t, res.Results, 3)
assert.True(t, res.Results[fmt.Sprintf("%s-%s-%s-%s-%s", namespace, dashboardGroup, dashboardResource, "1", "1")].Allowed)
assert.False(t, res.Results[fmt.Sprintf("%s-%s-%s-%s-%s", namespace, dashboardGroup, dashboardResource, "2", "2")].Allowed)
assert.False(t, res.Results[fmt.Sprintf("%s-%s-%s-%s-%s", "other-namespace", dashboardGroup, dashboardResource, "1", "1")].Allowed)
})
}
@@ -15,7 +15,6 @@ import (
"github.com/grafana/grafana/pkg/apimachinery/utils"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/tracing"
authzextv1 "github.com/grafana/grafana/pkg/services/authz/proto/v1"
"github.com/grafana/grafana/pkg/services/authz/zanzana/common"
"github.com/grafana/grafana/pkg/services/authz/zanzana/store"
"github.com/grafana/grafana/pkg/services/sqlstore"
@@ -37,14 +36,14 @@ const (
// Timeout for List operations
listTimeout = 30 * time.Second
// BenchmarkBatchCheck measures the performance of BatchCheck requests with 50 items per batch.
batchCheckSize = 50
// Resource type constants for benchmarks
benchDashboardGroup = "dashboard.grafana.app"
benchDashboardResource = "dashboards"
benchFolderGroup = "folder.grafana.app"
benchFolderResource = "folders"
// BenchmarkBatchCheck measures the performance of BatchCheck requests with 50 items per batch.
batchCheckSize = 50
)
// benchmarkData holds all the generated test data for benchmarks
@@ -338,6 +337,14 @@ func setupBenchmarkServer(b *testing.B) (*Server, *benchmarkData) {
}
cfg := setting.NewCfg()
cfg.ZanzanaServer.CacheSettings.CheckCacheLimit = 100000 // Cache check results
cfg.ZanzanaServer.CacheSettings.CheckQueryCacheEnabled = true // Cache check subproblems
cfg.ZanzanaServer.CacheSettings.CheckIteratorCacheEnabled = true // Cache DB iterators for checks
cfg.ZanzanaServer.CacheSettings.CheckIteratorCacheMaxResults = 10000 // Max results per iterator
cfg.ZanzanaServer.CacheSettings.SharedIteratorEnabled = true // Share iterators across concurrent checks
cfg.ZanzanaServer.CacheSettings.SharedIteratorLimit = 10000 // Max shared iterators
testStore := sqlstore.NewTestStore(b, sqlstore.WithCfg(cfg))
openFGAStore, err := store.NewEmbeddedStore(cfg, testStore, log.NewNopLogger())
@@ -573,58 +580,64 @@ func BenchmarkCheck(b *testing.B) {
})
}
// BenchmarkBatchCheck measures the performance of BatchCheck requests
func BenchmarkBatchCheck(b *testing.B) {
srv, data := setupBenchmarkServer(b)
ctx := newContextWithNamespace()
// Helper to create batch check requests
newBatchCheckReq := func(subject string, items []*authzextv1.BatchCheckItem) *authzextv1.BatchCheckRequest {
return &authzextv1.BatchCheckRequest{
Namespace: benchNamespace,
Subject: subject,
Items: items,
// Helper to create batch check requests using the new authzv1 API
newBatchCheckReq := func(subject string, items []*authzv1.BatchCheckItem) *authzv1.BatchCheckRequest {
return &authzv1.BatchCheckRequest{
Subject: subject,
Checks: items,
}
}
// Helper to create batch items for resources in folders
createBatchItems := func(resources []string, resourceFolders map[string]string) []*authzextv1.BatchCheckItem {
items := make([]*authzextv1.BatchCheckItem, 0, batchCheckSize)
createBatchItems := func(resources []string, resourceFolders map[string]string) []*authzv1.BatchCheckItem {
items := make([]*authzv1.BatchCheckItem, 0, batchCheckSize)
for i := 0; i < batchCheckSize && i < len(resources); i++ {
resource := resources[i]
items = append(items, &authzextv1.BatchCheckItem{
Verb: utils.VerbGet,
Group: benchDashboardGroup,
Resource: benchDashboardResource,
Name: resource,
Folder: resourceFolders[resource],
items = append(items, &authzv1.BatchCheckItem{
Namespace: benchNamespace,
Verb: utils.VerbGet,
Group: benchDashboardGroup,
Resource: benchDashboardResource,
Name: resource,
Folder: resourceFolders[resource],
CorrelationId: fmt.Sprintf("item-%d", i),
})
}
return items
}
// Helper to create batch items for folders at a specific depth
createFolderBatchItems := func(folders []string, depth int, folderDepths map[string]int) []*authzextv1.BatchCheckItem {
items := make([]*authzextv1.BatchCheckItem, 0, batchCheckSize)
createFolderBatchItems := func(folders []string, depth int, folderDepths map[string]int) []*authzv1.BatchCheckItem {
items := make([]*authzv1.BatchCheckItem, 0, batchCheckSize)
for _, folder := range folders {
if folderDepths[folder] == depth && len(items) < batchCheckSize {
items = append(items, &authzextv1.BatchCheckItem{
Verb: utils.VerbGet,
Group: benchDashboardGroup,
Resource: benchDashboardResource,
Name: fmt.Sprintf("resource-in-%s", folder),
Folder: folder,
items = append(items, &authzv1.BatchCheckItem{
Namespace: benchNamespace,
Verb: utils.VerbGet,
Group: benchDashboardGroup,
Resource: benchDashboardResource,
Name: fmt.Sprintf("resource-in-%s", folder),
Folder: folder,
CorrelationId: fmt.Sprintf("item-%d", len(items)),
})
}
}
// Fill remaining slots if needed
for len(items) < batchCheckSize && len(folders) > 0 {
folder := folders[len(items)%len(folders)]
items = append(items, &authzextv1.BatchCheckItem{
Verb: utils.VerbGet,
Group: benchDashboardGroup,
Resource: benchDashboardResource,
Name: fmt.Sprintf("resource-%d", len(items)),
Folder: folder,
items = append(items, &authzv1.BatchCheckItem{
Namespace: benchNamespace,
Verb: utils.VerbGet,
Group: benchDashboardGroup,
Resource: benchDashboardResource,
Name: fmt.Sprintf("resource-%d", len(items)),
Folder: folder,
CorrelationId: fmt.Sprintf("item-%d", len(items)),
})
}
return items
@@ -636,6 +649,7 @@ func BenchmarkBatchCheck(b *testing.B) {
// User with group_resource permission - should have access to everything
user := data.users[0]
items := createBatchItems(data.resources, data.resourceFolders)
b.Logf("Testing BatchCheck with %d items, user has group_resource permission (all access)", len(items))
b.ResetTimer()
for i := 0; i < b.N; i++ {
@@ -643,7 +657,7 @@ func BenchmarkBatchCheck(b *testing.B) {
if err != nil {
b.Fatal(err)
}
_ = res.Groups
_ = res.Results
}
})
@@ -651,6 +665,7 @@ func BenchmarkBatchCheck(b *testing.B) {
// User with folder permission on shallow folder
user := data.users[usersPerPattern]
items := createFolderBatchItems(data.folders, 1, data.folderDepths)
b.Logf("Testing BatchCheck with %d items at depth 1", len(items))
b.ResetTimer()
for i := 0; i < b.N; i++ {
@@ -658,7 +673,7 @@ func BenchmarkBatchCheck(b *testing.B) {
if err != nil {
b.Fatal(err)
}
_ = res.Groups
_ = res.Results
}
})
@@ -666,6 +681,7 @@ func BenchmarkBatchCheck(b *testing.B) {
// User with folder permission on mid-depth folder
user := data.users[2*usersPerPattern]
items := createFolderBatchItems(data.folders, 4, data.folderDepths)
b.Logf("Testing BatchCheck with %d items at depth 4", len(items))
b.ResetTimer()
for i := 0; i < b.N; i++ {
@@ -673,22 +689,7 @@ func BenchmarkBatchCheck(b *testing.B) {
if err != nil {
b.Fatal(err)
}
_ = res.Groups
}
})
b.Run("FolderInheritance/Depth7", func(b *testing.B) {
// Check access on deepest folders (worst case for inheritance traversal)
user := data.users[usersPerPattern]
items := createFolderBatchItems(data.folders, data.maxDepth, data.folderDepths)
b.ResetTimer()
for i := 0; i < b.N; i++ {
res, err := srv.BatchCheck(ctx, newBatchCheckReq(user, items))
if err != nil {
b.Fatal(err)
}
_ = res.Groups
_ = res.Results
}
})
@@ -696,6 +697,7 @@ func BenchmarkBatchCheck(b *testing.B) {
// User with direct resource permission
user := data.users[4*usersPerPattern]
items := createBatchItems(data.resources, data.resourceFolders)
b.Logf("Testing BatchCheck with %d items, user has direct resource permission", len(items))
b.ResetTimer()
for i := 0; i < b.N; i++ {
@@ -703,22 +705,7 @@ func BenchmarkBatchCheck(b *testing.B) {
if err != nil {
b.Fatal(err)
}
_ = res.Groups
}
})
b.Run("TeamMembership", func(b *testing.B) {
// User who is a team member, team has folder permission
user := data.users[5*usersPerPattern]
items := createBatchItems(data.resources, data.resourceFolders)
b.ResetTimer()
for i := 0; i < b.N; i++ {
res, err := srv.BatchCheck(ctx, newBatchCheckReq(user, items))
if err != nil {
b.Fatal(err)
}
_ = res.Groups
_ = res.Results
}
})
@@ -726,6 +713,7 @@ func BenchmarkBatchCheck(b *testing.B) {
// User with no permissions - tests denial path
user := data.users[len(data.users)-1]
items := createBatchItems(data.resources, data.resourceFolders)
b.Logf("Testing BatchCheck with %d items, user has NO permissions (denial case)", len(items))
b.ResetTimer()
for i := 0; i < b.N; i++ {
@@ -733,24 +721,29 @@ func BenchmarkBatchCheck(b *testing.B) {
if err != nil {
b.Fatal(err)
}
_ = res.Groups
_ = res.Results
}
})
b.Run("MixedFolders", func(b *testing.B) {
// Batch of items across different folder depths
user := data.users[usersPerPattern]
items := make([]*authzextv1.BatchCheckItem, 0, batchCheckSize)
b.Run("MixedAccess", func(b *testing.B) {
// Create items from different folders - user has access to some but not all
user := data.users[3*usersPerPattern] // folder-scoped resource permission
items := make([]*authzv1.BatchCheckItem, 0, batchCheckSize)
// Mix of accessible and inaccessible resources
for i := 0; i < batchCheckSize; i++ {
folder := data.folders[i%len(data.folders)]
items = append(items, &authzextv1.BatchCheckItem{
Verb: utils.VerbGet,
Group: benchDashboardGroup,
Resource: benchDashboardResource,
Name: fmt.Sprintf("resource-%d", i),
Folder: folder,
items = append(items, &authzv1.BatchCheckItem{
Namespace: benchNamespace,
Verb: utils.VerbGet,
Group: benchDashboardGroup,
Resource: benchDashboardResource,
Name: fmt.Sprintf("resource-%d", i),
Folder: folder,
CorrelationId: fmt.Sprintf("item-%d", i),
})
}
b.Logf("Testing BatchCheck with %d items, user has mixed access (some allowed, some denied)", len(items))
b.ResetTimer()
for i := 0; i < b.N; i++ {
@@ -758,9 +751,31 @@ func BenchmarkBatchCheck(b *testing.B) {
if err != nil {
b.Fatal(err)
}
_ = res.Groups
_ = res.Results
}
})
// Test BatchCheck at various folder depths
for depth := 0; depth <= data.maxDepth; depth++ {
depth := depth // capture for closure
if len(data.foldersByDepth[depth]) == 0 {
continue
}
b.Run(fmt.Sprintf("ByDepth/Depth%d", depth), func(b *testing.B) {
user := fmt.Sprintf("user:depth-%d-access", depth)
items := createFolderBatchItems(data.folders, depth, data.folderDepths)
b.Logf("Testing BatchCheck with %d items at depth %d", len(items), depth)
b.ResetTimer()
for i := 0; i < b.N; i++ {
res, err := srv.BatchCheck(ctx, newBatchCheckReq(user, items))
if err != nil {
b.Fatal(err)
}
_ = res.Results
}
})
}
}
// BenchmarkList measures the performance of List requests (Compile equivalent)
+7 -69
View File
@@ -647,6 +647,12 @@
},
"BacktestConfig": {
"properties": {
"annotations": {
"additionalProperties": {
"type": "string"
},
"type": "object"
},
"condition": {
"type": "string"
},
@@ -656,16 +662,8 @@
},
"type": "array"
},
"exec_err_state": {
"enum": [
"OK",
"Alerting",
"Error"
],
"type": "string"
},
"for": {
"type": "string"
"$ref": "#/definitions/Duration"
},
"from": {
"format": "date-time",
@@ -674,22 +672,12 @@
"interval": {
"$ref": "#/definitions/Duration"
},
"keep_firing_for": {
"type": "string"
},
"labels": {
"additionalProperties": {
"type": "string"
},
"type": "object"
},
"missing_series_evals_to_resolve": {
"format": "int64",
"type": "integer"
},
"namespace_uid": {
"type": "string"
},
"no_data_state": {
"enum": [
"Alerting",
@@ -698,18 +686,12 @@
],
"type": "string"
},
"rule_group": {
"type": "string"
},
"title": {
"type": "string"
},
"to": {
"format": "date-time",
"type": "string"
},
"uid": {
"type": "string"
}
},
"type": "object"
@@ -1831,12 +1813,6 @@
"interval": {
"$ref": "#/definitions/Duration"
},
"labels": {
"additionalProperties": {
"type": "string"
},
"type": "object"
},
"limit": {
"format": "int64",
"type": "integer"
@@ -1847,12 +1823,6 @@
"query_offset": {
"type": "string"
},
"remote_write": {
"items": {
"$ref": "#/definitions/RemoteWriteConfig"
},
"type": "array"
},
"rules": {
"items": {
"$ref": "#/definitions/GettableExtendedRuleNode"
@@ -3172,12 +3142,6 @@
"interval": {
"$ref": "#/definitions/Duration"
},
"labels": {
"additionalProperties": {
"type": "string"
},
"type": "object"
},
"limit": {
"format": "int64",
"type": "integer"
@@ -3188,12 +3152,6 @@
"query_offset": {
"type": "string"
},
"remote_write": {
"items": {
"$ref": "#/definitions/RemoteWriteConfig"
},
"type": "array"
},
"rules": {
"items": {
"$ref": "#/definitions/PostableExtendedRuleNode"
@@ -3859,14 +3817,6 @@
},
"type": "object"
},
"RemoteWriteConfig": {
"properties": {
"url": {
"type": "string"
}
},
"type": "object"
},
"ResponseDetails": {
"properties": {
"msg": {
@@ -4143,12 +4093,6 @@
"interval": {
"$ref": "#/definitions/Duration"
},
"labels": {
"additionalProperties": {
"type": "string"
},
"type": "object"
},
"limit": {
"format": "int64",
"type": "integer"
@@ -4159,12 +4103,6 @@
"query_offset": {
"type": "string"
},
"remote_write": {
"items": {
"$ref": "#/definitions/RemoteWriteConfig"
},
"type": "array"
},
"rules": {
"items": {
"$ref": "#/definitions/GettableExtendedRuleNode"
@@ -284,20 +284,11 @@ type PostableRuleGroupConfig struct {
// fields below are used by Mimir/Loki rulers
SourceTenants []string `yaml:"source_tenants,omitempty" json:"source_tenants,omitempty"`
EvaluationDelay *model.Duration `yaml:"evaluation_delay,omitempty" json:"evaluation_delay,omitempty"`
QueryOffset *model.Duration `yaml:"query_offset,omitempty" json:"query_offset,omitempty"`
AlignEvaluationTimeOnInterval bool `yaml:"align_evaluation_time_on_interval,omitempty" json:"align_evaluation_time_on_interval,omitempty"`
Limit int `yaml:"limit,omitempty" json:"limit,omitempty"`
Labels map[string]string `yaml:"labels,omitempty" json:"labels,omitempty"`
// GEM Ruler.
RWConfigs []RemoteWriteConfig `yaml:"remote_write,omitempty" json:"remote_write,omitempty"`
}
type RemoteWriteConfig struct {
URL string `yaml:"url,omitempty" json:"url,omitempty"`
SourceTenants []string `yaml:"source_tenants,omitempty" json:"source_tenants,omitempty"`
EvaluationDelay *model.Duration `yaml:"evaluation_delay,omitempty" json:"evaluation_delay,omitempty"`
QueryOffset *model.Duration `yaml:"query_offset,omitempty" json:"query_offset,omitempty"`
AlignEvaluationTimeOnInterval bool `yaml:"align_evaluation_time_on_interval,omitempty" json:"align_evaluation_time_on_interval,omitempty"`
Limit int `yaml:"limit,omitempty" json:"limit,omitempty"`
}
func (c *PostableRuleGroupConfig) UnmarshalJSON(b []byte) error {
@@ -337,8 +328,8 @@ func (c *PostableRuleGroupConfig) validate() error {
return fmt.Errorf("cannot mix Grafana & Prometheus style rules")
}
if hasGrafRules && (len(c.SourceTenants) > 0 || c.EvaluationDelay != nil || c.QueryOffset != nil || c.AlignEvaluationTimeOnInterval || c.Limit > 0 || len(c.Labels) > 0 || len(c.RWConfigs) > 0) {
return fmt.Errorf("fields source_tenants, evaluation_delay, query_offset, align_evaluation_time_on_interval, limit, labels, and remote_write are not supported for Grafana rules")
if hasGrafRules && (len(c.SourceTenants) > 0 || c.EvaluationDelay != nil || c.QueryOffset != nil || c.AlignEvaluationTimeOnInterval || c.Limit > 0) {
return fmt.Errorf("fields source_tenants, evaluation_delay, query_offset, align_evaluation_time_on_interval and limit are not supported for Grafana rules")
}
return nil
}
@@ -354,16 +345,11 @@ type GettableRuleGroupConfig struct {
// fields below are used by Mimir/Loki rulers
SourceTenants []string `yaml:"source_tenants,omitempty" json:"source_tenants,omitempty"`
EvaluationDelay *model.Duration `yaml:"evaluation_delay,omitempty" json:"evaluation_delay,omitempty"`
QueryOffset *model.Duration `yaml:"query_offset,omitempty" json:"query_offset,omitempty"`
AlignEvaluationTimeOnInterval bool `yaml:"align_evaluation_time_on_interval,omitempty" json:"align_evaluation_time_on_interval,omitempty"`
Limit int `yaml:"limit,omitempty" json:"limit,omitempty"`
Labels map[string]string `yaml:"labels,omitempty" json:"labels,omitempty"`
// GEM Ruler.
RWConfigs []RemoteWriteConfig `yaml:"remote_write,omitempty" json:"remote_write,omitempty"`
SourceTenants []string `yaml:"source_tenants,omitempty" json:"source_tenants,omitempty"`
EvaluationDelay *model.Duration `yaml:"evaluation_delay,omitempty" json:"evaluation_delay,omitempty"`
QueryOffset *model.Duration `yaml:"query_offset,omitempty" json:"query_offset,omitempty"`
AlignEvaluationTimeOnInterval bool `yaml:"align_evaluation_time_on_interval,omitempty" json:"align_evaluation_time_on_interval,omitempty"`
Limit int `yaml:"limit,omitempty" json:"limit,omitempty"`
}
func (c *GettableRuleGroupConfig) UnmarshalJSON(b []byte) error {
+7 -69
View File
@@ -647,6 +647,12 @@
},
"BacktestConfig": {
"properties": {
"annotations": {
"additionalProperties": {
"type": "string"
},
"type": "object"
},
"condition": {
"type": "string"
},
@@ -656,16 +662,8 @@
},
"type": "array"
},
"exec_err_state": {
"enum": [
"OK",
"Alerting",
"Error"
],
"type": "string"
},
"for": {
"type": "string"
"$ref": "#/definitions/Duration"
},
"from": {
"format": "date-time",
@@ -674,22 +672,12 @@
"interval": {
"$ref": "#/definitions/Duration"
},
"keep_firing_for": {
"type": "string"
},
"labels": {
"additionalProperties": {
"type": "string"
},
"type": "object"
},
"missing_series_evals_to_resolve": {
"format": "int64",
"type": "integer"
},
"namespace_uid": {
"type": "string"
},
"no_data_state": {
"enum": [
"Alerting",
@@ -698,18 +686,12 @@
],
"type": "string"
},
"rule_group": {
"type": "string"
},
"title": {
"type": "string"
},
"to": {
"format": "date-time",
"type": "string"
},
"uid": {
"type": "string"
}
},
"type": "object"
@@ -1831,12 +1813,6 @@
"interval": {
"$ref": "#/definitions/Duration"
},
"labels": {
"additionalProperties": {
"type": "string"
},
"type": "object"
},
"limit": {
"format": "int64",
"type": "integer"
@@ -1847,12 +1823,6 @@
"query_offset": {
"type": "string"
},
"remote_write": {
"items": {
"$ref": "#/definitions/RemoteWriteConfig"
},
"type": "array"
},
"rules": {
"items": {
"$ref": "#/definitions/GettableExtendedRuleNode"
@@ -3172,12 +3142,6 @@
"interval": {
"$ref": "#/definitions/Duration"
},
"labels": {
"additionalProperties": {
"type": "string"
},
"type": "object"
},
"limit": {
"format": "int64",
"type": "integer"
@@ -3188,12 +3152,6 @@
"query_offset": {
"type": "string"
},
"remote_write": {
"items": {
"$ref": "#/definitions/RemoteWriteConfig"
},
"type": "array"
},
"rules": {
"items": {
"$ref": "#/definitions/PostableExtendedRuleNode"
@@ -3859,14 +3817,6 @@
},
"type": "object"
},
"RemoteWriteConfig": {
"properties": {
"url": {
"type": "string"
}
},
"type": "object"
},
"ResponseDetails": {
"properties": {
"msg": {
@@ -4143,12 +4093,6 @@
"interval": {
"$ref": "#/definitions/Duration"
},
"labels": {
"additionalProperties": {
"type": "string"
},
"type": "object"
},
"limit": {
"format": "int64",
"type": "integer"
@@ -4159,12 +4103,6 @@
"query_offset": {
"type": "string"
},
"remote_write": {
"items": {
"$ref": "#/definitions/RemoteWriteConfig"
},
"type": "array"
},
"rules": {
"items": {
"$ref": "#/definitions/GettableExtendedRuleNode"
+7 -69
View File
@@ -5072,6 +5072,12 @@
"BacktestConfig": {
"type": "object",
"properties": {
"annotations": {
"type": "object",
"additionalProperties": {
"type": "string"
}
},
"condition": {
"type": "string"
},
@@ -5081,16 +5087,8 @@
"$ref": "#/definitions/AlertQuery"
}
},
"exec_err_state": {
"type": "string",
"enum": [
"OK",
"Alerting",
"Error"
]
},
"for": {
"type": "string"
"$ref": "#/definitions/Duration"
},
"from": {
"type": "string",
@@ -5099,22 +5097,12 @@
"interval": {
"$ref": "#/definitions/Duration"
},
"keep_firing_for": {
"type": "string"
},
"labels": {
"type": "object",
"additionalProperties": {
"type": "string"
}
},
"missing_series_evals_to_resolve": {
"type": "integer",
"format": "int64"
},
"namespace_uid": {
"type": "string"
},
"no_data_state": {
"type": "string",
"enum": [
@@ -5123,18 +5111,12 @@
"OK"
]
},
"rule_group": {
"type": "string"
},
"title": {
"type": "string"
},
"to": {
"type": "string",
"format": "date-time"
},
"uid": {
"type": "string"
}
}
},
@@ -6257,12 +6239,6 @@
"interval": {
"$ref": "#/definitions/Duration"
},
"labels": {
"type": "object",
"additionalProperties": {
"type": "string"
}
},
"limit": {
"type": "integer",
"format": "int64"
@@ -6273,12 +6249,6 @@
"query_offset": {
"type": "string"
},
"remote_write": {
"type": "array",
"items": {
"$ref": "#/definitions/RemoteWriteConfig"
}
},
"rules": {
"type": "array",
"items": {
@@ -7599,12 +7569,6 @@
"interval": {
"$ref": "#/definitions/Duration"
},
"labels": {
"type": "object",
"additionalProperties": {
"type": "string"
}
},
"limit": {
"type": "integer",
"format": "int64"
@@ -7615,12 +7579,6 @@
"query_offset": {
"type": "string"
},
"remote_write": {
"type": "array",
"items": {
"$ref": "#/definitions/RemoteWriteConfig"
}
},
"rules": {
"type": "array",
"items": {
@@ -8285,14 +8243,6 @@
}
}
},
"RemoteWriteConfig": {
"type": "object",
"properties": {
"url": {
"type": "string"
}
}
},
"ResponseDetails": {
"type": "object",
"properties": {
@@ -8570,12 +8520,6 @@
"interval": {
"$ref": "#/definitions/Duration"
},
"labels": {
"type": "object",
"additionalProperties": {
"type": "string"
}
},
"limit": {
"type": "integer",
"format": "int64"
@@ -8586,12 +8530,6 @@
"query_offset": {
"type": "string"
},
"remote_write": {
"type": "array",
"items": {
"$ref": "#/definitions/RemoteWriteConfig"
}
},
"rules": {
"type": "array",
"items": {
+61
View File
@@ -152,6 +152,67 @@ func (c authzLimitedClient) Check(ctx context.Context, id claims.AuthInfo, req c
return resp, nil
}
// BatchCheck implements claims.AccessClient.
func (c authzLimitedClient) BatchCheck(ctx context.Context, id claims.AuthInfo, req claims.BatchCheckRequest) (claims.BatchCheckResponse, error) {
ctx, span := tracer.Start(ctx, "resource.authzLimitedClient.BatchCheck", trace.WithAttributes(
attribute.Int("num_checks", len(req.Checks)),
attribute.Bool("fallback_used", FallbackUsed(ctx)),
))
defer span.End()
if FallbackUsed(ctx) {
span.SetStatus(codes.Error, "BatchCheck not supported with fallback")
return claims.BatchCheckResponse{}, fmt.Errorf("BatchCheck not supported when fallback is used")
}
// Filter checks to only those that require RBAC and validate namespace
rbacChecks := make([]claims.BatchCheckItem, 0, len(req.Checks))
allowedByDefault := make(map[string]bool, len(req.Checks))
for _, check := range req.Checks {
if !claims.NamespaceMatches(id.GetNamespace(), check.Namespace) {
span.SetStatus(codes.Error, "Namespace mismatch")
span.RecordError(claims.ErrNamespaceMismatch)
return claims.BatchCheckResponse{}, claims.ErrNamespaceMismatch
}
if c.IsCompatibleWithRBAC(check.Group, check.Resource) {
rbacChecks = append(rbacChecks, check)
} else {
allowedByDefault[check.CorrelationID] = true
}
}
// If all checks are allowed by default, return early
if len(rbacChecks) == 0 {
results := make(map[string]claims.BatchCheckResult, len(req.Checks))
for _, check := range req.Checks {
results[check.CorrelationID] = claims.BatchCheckResult{
Allowed: true,
}
}
return claims.BatchCheckResponse{Results: results}, nil
}
// Call the underlying client with RBAC checks
resp, err := c.client.BatchCheck(ctx, id, claims.BatchCheckRequest{Checks: rbacChecks})
if err != nil {
c.logger.FromContext(ctx).Error("BatchCheck failed", "error", err, "num_checks", len(rbacChecks))
span.SetStatus(codes.Error, fmt.Sprintf("batch check failed: %v", err))
span.RecordError(err)
return resp, err
}
// Merge results with allowed-by-default checks
for correlationID := range allowedByDefault {
resp.Results[correlationID] = claims.BatchCheckResult{
Allowed: true,
}
}
return resp, nil
}
// Compile implements claims.AccessClient.
func (c authzLimitedClient) Compile(ctx context.Context, id claims.AuthInfo, req claims.ListRequest) (claims.ItemChecker, claims.Zookie, error) {
t := time.Now()
@@ -159,6 +159,97 @@ func TestNamespaceMatching(t *testing.T) {
}
}
func TestAuthzLimitedClient_BatchCheck(t *testing.T) {
mockClient := authlib.FixedAccessClient(true)
client := NewAuthzLimitedClient(mockClient, AuthzOptions{})
t.Run("returns error when fallback is used", func(t *testing.T) {
ctx := WithFallback(context.Background())
req := authlib.BatchCheckRequest{
Checks: []authlib.BatchCheckItem{
{
CorrelationID: "0",
Group: "dashboard.grafana.app",
Resource: "dashboards",
Verb: utils.VerbGet,
Namespace: "stacks-1",
Name: "test-dashboard",
},
},
}
_, err := client.BatchCheck(ctx, &identity.StaticRequester{Namespace: "stacks-1"}, req)
require.Error(t, err)
assert.Contains(t, err.Error(), "fallback")
})
t.Run("works normally without fallback", func(t *testing.T) {
ctx := context.Background()
req := authlib.BatchCheckRequest{
Checks: []authlib.BatchCheckItem{
{
CorrelationID: "0",
Group: "dashboard.grafana.app",
Resource: "dashboards",
Verb: utils.VerbGet,
Namespace: "stacks-1",
Name: "test-dashboard",
},
},
}
resp, err := client.BatchCheck(ctx, &identity.StaticRequester{Namespace: "stacks-1"}, req)
require.NoError(t, err)
require.Len(t, resp.Results, 1)
assert.True(t, resp.Results["0"].Allowed)
})
t.Run("returns error on namespace mismatch", func(t *testing.T) {
ctx := context.Background()
req := authlib.BatchCheckRequest{
Checks: []authlib.BatchCheckItem{
{
CorrelationID: "0",
Group: "dashboard.grafana.app",
Resource: "dashboards",
Verb: utils.VerbGet,
Namespace: "stacks-2", // Different namespace
Name: "test-dashboard",
},
},
}
_, err := client.BatchCheck(ctx, &identity.StaticRequester{Namespace: "stacks-1"}, req)
require.Error(t, err)
assert.ErrorIs(t, err, authlib.ErrNamespaceMismatch)
})
t.Run("allows non-RBAC resources by default", func(t *testing.T) {
// Use a client that would deny if checked
denyClient := authlib.FixedAccessClient(false)
client := NewAuthzLimitedClient(denyClient, AuthzOptions{})
ctx := context.Background()
req := authlib.BatchCheckRequest{
Checks: []authlib.BatchCheckItem{
{
CorrelationID: "0",
Group: "unknown.group",
Resource: "unknown.resource",
Verb: utils.VerbGet,
Namespace: "stacks-1",
Name: "test",
},
},
}
resp, err := client.BatchCheck(ctx, &identity.StaticRequester{Namespace: "stacks-1"}, req)
require.NoError(t, err)
require.Len(t, resp.Results, 1)
assert.True(t, resp.Results["0"].Allowed, "non-RBAC resources should be allowed by default")
})
}
// TestNamespaceMatchingFallback tests namespace matching in Check and Compile methods when fallback is used
func TestNamespaceMatchingFallback(t *testing.T) {
// Create a mock client that always returns allowed=true
+253 -108
View File
@@ -20,6 +20,7 @@ import (
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
"github.com/grafana/authlib/authz"
claims "github.com/grafana/authlib/types"
"github.com/grafana/dskit/backoff"
@@ -1051,78 +1052,93 @@ func (s *server) List(ctx context.Context, req *resourcepb.ListRequest) (*resour
rsp := &resourcepb.ListResponse{}
key := req.Options.Key
checker, _, err := s.access.Compile(ctx, user, claims.ListRequest{
Group: key.Group,
Resource: key.Resource,
Namespace: key.Namespace,
Verb: utils.VerbGet,
})
var trashChecker claims.ItemChecker // only for trash
// Determine verb for authorization
verb := utils.VerbGet
if req.Source == resourcepb.ListRequest_TRASH {
trashChecker, _, err = s.access.Compile(ctx, user, claims.ListRequest{
Group: key.Group,
Resource: key.Resource,
Namespace: key.Namespace,
Verb: utils.VerbSetPermissions, // Basically Admin
})
if err != nil {
return &resourcepb.ListResponse{Error: AsErrorResult(err)}, nil
}
}
if err != nil {
return &resourcepb.ListResponse{Error: AsErrorResult(err)}, nil
}
if checker == nil {
return &resourcepb.ListResponse{Error: &resourcepb.ErrorResult{
Code: http.StatusForbidden,
}}, nil
verb = utils.VerbSetPermissions // Basically Admin for trash
}
// Candidate item for batch authorization
type candidateItem struct {
name string
folder string
resourceVersion int64
value []byte
continueToken string
}
var nextToken string
var iterErr error
// Process items in batches within the iterator
iterFunc := func(iter ListIterator) error {
for iter.Next() {
if err := iter.Error(); err != nil {
// Convert ListIterator to iter.Seq
candidates := func(yield func(candidateItem) bool) {
for iter.Next() {
if !yield(candidateItem{
name: iter.Name(),
folder: iter.Folder(),
resourceVersion: iter.ResourceVersion(),
value: iter.Value(),
continueToken: iter.ContinueToken(),
}) {
return
}
}
}
extractFn := func(c candidateItem) authz.BatchCheckItem {
return authz.BatchCheckItem{
Name: c.name,
Folder: c.folder,
Verb: verb,
Group: key.Group,
Resource: key.Resource,
Namespace: key.Namespace,
}
}
for item, err := range authz.FilterAuthorized(ctx, s.access, candidates, extractFn).Items {
if err != nil {
return err
}
// Trash is only accessible to admins or the user who deleted the object
// For trash items, also check if user is the one who deleted it
if req.Source == resourcepb.ListRequest_TRASH {
if !s.isTrashItemAuthorized(ctx, iter, trashChecker) {
if !s.isTrashItemAuthorizedByValue(ctx, item.value, true) {
continue
}
} else if !checker(iter.Name(), iter.Folder()) {
continue
}
item := &resourcepb.ResourceWrapper{
ResourceVersion: iter.ResourceVersion(),
Value: iter.Value(),
}
rsp.Items = append(rsp.Items, &resourcepb.ResourceWrapper{
ResourceVersion: item.resourceVersion,
Value: item.value,
})
pageBytes += len(item.value)
pageBytes += len(item.Value)
rsp.Items = append(rsp.Items, item)
// Check if we've reached the page limit
if (req.Limit > 0 && len(rsp.Items) >= int(req.Limit)) || pageBytes >= maxPageBytes {
t := iter.ContinueToken()
if iter.Next() {
rsp.NextPageToken = t
}
return iter.Error()
nextToken = item.continueToken
break
}
}
return iter.Error()
}
var rv int64
switch req.Source {
case resourcepb.ListRequest_STORE:
rv, err = s.backend.ListIterator(ctx, req, iterFunc)
rv, iterErr = s.backend.ListIterator(ctx, req, iterFunc)
case resourcepb.ListRequest_HISTORY, resourcepb.ListRequest_TRASH:
rv, err = s.backend.ListHistory(ctx, req, iterFunc)
rv, iterErr = s.backend.ListHistory(ctx, req, iterFunc)
default:
return nil, apierrors.NewBadRequest(fmt.Sprintf("invalid list source: %v", req.Source))
}
if err != nil {
rsp.Error = AsErrorResult(err)
if iterErr != nil {
rsp.Error = AsErrorResult(iterErr)
return rsp, nil
}
@@ -1134,18 +1150,21 @@ func (s *server) List(ctx context.Context, req *resourcepb.ListRequest) (*resour
return rsp, nil
}
rsp.ResourceVersion = rv
return rsp, err
rsp.NextPageToken = nextToken
return rsp, nil
}
// isTrashItemAuthorized checks if the user has access to the trash item.
func (s *server) isTrashItemAuthorized(ctx context.Context, iter ListIterator, trashChecker claims.ItemChecker) bool {
// isTrashItemAuthorizedByValue checks if the user has access to the trash item using the raw value.
// hasAdminPermission indicates whether the user has admin permission (from BatchCheck).
func (s *server) isTrashItemAuthorizedByValue(ctx context.Context, value []byte, hasAdminPermission bool) bool {
user, ok := claims.AuthInfoFrom(ctx)
if !ok || user == nil {
return false
}
partial := &metav1.PartialObjectMetadata{}
err := json.Unmarshal(iter.Value(), partial)
err := json.Unmarshal(value, partial)
if err != nil {
return false
}
@@ -1161,7 +1180,7 @@ func (s *server) isTrashItemAuthorized(ctx context.Context, iter ListIterator, t
}
// Trash is only accessible to admins or the user who deleted the object
return obj.GetUpdatedBy() == user.GetUID() || trashChecker(iter.Name(), iter.Folder())
return obj.GetUpdatedBy() == user.GetUID() || hasAdminPermission
}
func (s *server) initWatcher() error {
@@ -1202,18 +1221,6 @@ func (s *server) Watch(req *resourcepb.WatchRequest, srv resourcepb.ResourceStor
}
key := req.Options.Key
checker, _, err := s.access.Compile(ctx, user, claims.ListRequest{
Group: key.Group,
Resource: key.Resource,
Namespace: key.Namespace,
Verb: utils.VerbGet,
})
if err != nil {
return err
}
if checker == nil {
return apierrors.NewUnauthorized("not allowed to list anything") // ?? or a single error?
}
// Start listening -- this will buffer any changes that happen while we backfill.
// If events are generated faster than we can process them, then some events will be dropped.
@@ -1267,22 +1274,56 @@ func (s *server) Watch(req *resourcepb.WatchRequest, srv resourcepb.ResourceStor
var initialEventsRV int64 // resource version coming from the initial events
if req.SendInitialEvents {
// Backfill the stream by adding every existing entities.
// Backfill the stream by adding every existing entities with batch authorization
type candidateEvent struct {
name string
folder string
value []byte
version int64
}
initialEventsRV, err = s.backend.ListIterator(ctx, &resourcepb.ListRequest{Options: req.Options}, func(iter ListIterator) error {
for iter.Next() {
if err := iter.Error(); err != nil {
// Convert ListIterator to iter.Seq
candidates := func(yield func(candidateEvent) bool) {
for iter.Next() {
if !yield(candidateEvent{
name: iter.Name(),
folder: iter.Folder(),
value: iter.Value(),
version: iter.ResourceVersion(),
}) {
return
}
}
}
extractFn := func(c candidateEvent) authz.BatchCheckItem {
return authz.BatchCheckItem{
Name: c.name,
Folder: c.folder,
Verb: utils.VerbGet,
Group: key.Group,
Resource: key.Resource,
Namespace: key.Namespace,
}
}
for item, err := range authz.FilterAuthorized(ctx, s.access, candidates, extractFn).Items {
if err != nil {
return err
}
if err := srv.Send(&resourcepb.WatchEvent{
Type: resourcepb.WatchEvent_ADDED,
Resource: &resourcepb.WatchEvent_Resource{
Value: iter.Value(),
Version: iter.ResourceVersion(),
Value: item.value,
Version: item.version,
},
}); err != nil {
return err
}
}
return iter.Error()
})
if err != nil {
@@ -1309,6 +1350,127 @@ func (s *server) Watch(req *resourcepb.WatchRequest, srv resourcepb.ResourceStor
default:
since = req.Since
}
// Type to hold candidate events for batch authorization
type candidateWatchEvent struct {
event *WrittenEvent
}
// Type to hold authorized event with its fetched previous object
type authorizedEvent struct {
event *WrittenEvent
previous *resourcepb.ReadResponse // nil if no previous or fetch failed
}
const maxBatchSize = 100
// processEventBatch authorizes and sends a batch of events.
// Errors are logged but never returned to keep the watch running.
processEventBatch := func(batch []*WrittenEvent) {
if len(batch) == 0 {
return
}
// Convert batch to iter.Seq for FilterAuthorized
candidates := func(yield func(candidateWatchEvent) bool) {
for _, event := range batch {
if !yield(candidateWatchEvent{event: event}) {
return
}
}
}
extractFn := func(c candidateWatchEvent) authz.BatchCheckItem {
return authz.BatchCheckItem{
Name: c.event.Key.Name,
Folder: c.event.Folder,
Verb: utils.VerbGet,
Group: key.Group,
Resource: key.Resource,
Namespace: key.Namespace,
}
}
// Step 1: Collect all authorized events
var authorizedEvents []authorizedEvent
for item, err := range authz.FilterAuthorized(ctx, s.access, candidates, extractFn).Items {
if err != nil {
s.log.Error("error during batch authorization", "error", err)
continue
}
authorizedEvents = append(authorizedEvents, authorizedEvent{event: item.event})
}
if len(authorizedEvents) == 0 {
return
}
// Step 2: Fetch previous objects concurrently for events that need them
var wg sync.WaitGroup
for i := range authorizedEvents {
if authorizedEvents[i].event.PreviousRV > 0 {
wg.Add(1)
go func(idx int) {
defer wg.Done()
event := authorizedEvents[idx].event
prevObj, readErr := s.Read(ctx, &resourcepb.ReadRequest{Key: event.Key, ResourceVersion: event.PreviousRV})
if readErr != nil {
s.log.Error("error reading previous object", "key", event.Key, "resource_version", event.PreviousRV, "error", readErr)
return
}
if prevObj.Error != nil {
s.log.Error("error reading previous object", "key", event.Key, "resource_version", event.PreviousRV, "error", prevObj.Error)
return
}
if prevObj.ResourceVersion != event.PreviousRV {
s.log.Error("resource version mismatch", "key", event.Key, "resource_version", event.PreviousRV, "actual", prevObj.ResourceVersion)
return
}
authorizedEvents[idx].previous = prevObj
}(i)
}
}
wg.Wait()
// Step 3: Send all events in order
for _, authEvent := range authorizedEvents {
event := authEvent.event
value := event.Value
// remove the delete marker stored in the value for deleted objects
if event.Type == resourcepb.WatchEvent_DELETED {
value = []byte{}
}
resp := &resourcepb.WatchEvent{
Timestamp: event.Timestamp,
Type: event.Type,
Resource: &resourcepb.WatchEvent_Resource{
Value: value,
Version: event.ResourceVersion,
},
}
if authEvent.previous != nil {
resp.Previous = &resourcepb.WatchEvent_Resource{
Value: authEvent.previous.Value,
Version: authEvent.previous.ResourceVersion,
}
}
if err := srv.Send(resp); err != nil {
s.log.Error("error sending watch event", "key", event.Key, "error", err)
continue
}
if s.storageMetrics != nil {
// record latency - resource version is a unix timestamp in microseconds so we convert to seconds
latencySeconds := float64(time.Now().UnixMicro()-event.ResourceVersion) / 1e6
if latencySeconds > 0 {
s.storageMetrics.WatchEventLatency.WithLabelValues(event.Key.Resource).Observe(latencySeconds)
}
}
}
}
// Main event loop with batching
var batch []*WrittenEvent
for {
select {
case <-ctx.Done():
@@ -1316,57 +1478,40 @@ func (s *server) Watch(req *resourcepb.WatchRequest, srv resourcepb.ResourceStor
case event, ok := <-stream:
if !ok {
// Process any remaining events in the batch before closing
processEventBatch(batch)
s.log.Debug("watch events closed")
return nil
}
s.log.Debug("Server Broadcasting", "type", event.Type, "rv", event.ResourceVersion, "previousRV", event.PreviousRV, "group", event.Key.Group, "namespace", event.Key.Namespace, "resource", event.Key.Resource, "name", event.Key.Name)
if event.ResourceVersion > since && matchesQueryKey(req.Options.Key, event.Key) {
if !checker(event.Key.Name, event.Folder) {
continue
}
batch = append(batch, event)
}
value := event.Value
// remove the delete marker stored in the value for deleted objects
if event.Type == resourcepb.WatchEvent_DELETED {
value = []byte{}
}
resp := &resourcepb.WatchEvent{
Timestamp: event.Timestamp,
Type: event.Type,
Resource: &resourcepb.WatchEvent_Resource{
Value: value,
Version: event.ResourceVersion,
},
}
if event.PreviousRV > 0 {
prevObj, err := s.Read(ctx, &resourcepb.ReadRequest{Key: event.Key, ResourceVersion: event.PreviousRV})
if err != nil {
// This scenario should never happen, but if it does, we should log it and continue
// sending the event without the previous object. The client will decide what to do.
s.log.Error("error reading previous object", "key", event.Key, "resource_version", event.PreviousRV, "error", prevObj.Error)
} else {
if prevObj.ResourceVersion != event.PreviousRV {
s.log.Error("resource version mismatch", "key", event.Key, "resource_version", event.PreviousRV, "actual", prevObj.ResourceVersion)
return fmt.Errorf("resource version mismatch")
}
resp.Previous = &resourcepb.WatchEvent_Resource{
Value: prevObj.Value,
Version: prevObj.ResourceVersion,
}
// Drain any additional events that are already available (non-blocking)
// Stop draining when we reach maxBatchSize to bound memory and latency
draining := true
for draining && len(batch) < maxBatchSize {
select {
case event, ok := <-stream:
if !ok {
// Process the batch before closing
processEventBatch(batch)
s.log.Debug("watch events closed")
return nil
}
}
if err := srv.Send(resp); err != nil {
return err
}
if s.storageMetrics != nil {
// record latency - resource version is a unix timestamp in microseconds so we convert to seconds
latencySeconds := float64(time.Now().UnixMicro()-event.ResourceVersion) / 1e6
if latencySeconds > 0 {
s.storageMetrics.WatchEventLatency.WithLabelValues(event.Key.Resource).Observe(latencySeconds)
s.log.Debug("Server Broadcasting", "type", event.Type, "rv", event.ResourceVersion, "previousRV", event.PreviousRV, "group", event.Key.Group, "namespace", event.Key.Namespace, "resource", event.Key.Resource, "name", event.Key.Name)
if event.ResourceVersion > since && matchesQueryKey(req.Options.Key, event.Key) {
batch = append(batch, event)
}
default:
draining = false
}
}
// Process the collected batch
processEventBatch(batch)
batch = batch[:0] // Reset batch for reuse
}
}
}
+227 -74
View File
@@ -6,6 +6,7 @@ import (
"encoding/json"
"errors"
"fmt"
"iter"
"math"
"os"
"path/filepath"
@@ -22,7 +23,6 @@ import (
"github.com/blevesearch/bleve/v2/mapping"
"github.com/blevesearch/bleve/v2/search"
"github.com/blevesearch/bleve/v2/search/query"
bleveSearch "github.com/blevesearch/bleve/v2/search/searcher"
index "github.com/blevesearch/bleve_index_api"
"github.com/prometheus/client_golang/prometheus"
bolterrors "go.etcd.io/bbolt/errors"
@@ -35,6 +35,7 @@ import (
"github.com/grafana/grafana/pkg/storage/unified/resourcepb"
"github.com/grafana/grafana/pkg/storage/unified/search/builders"
"github.com/grafana/authlib/authz"
authlib "github.com/grafana/authlib/types"
"github.com/grafana/grafana/pkg/apimachinery/utils"
@@ -1300,43 +1301,27 @@ func (b *bleveIndex) toBleveSearchRequest(ctx context.Context, req *resourcepb.R
}
if access != nil {
auth, ok := authlib.AuthInfoFrom(ctx)
if !ok {
return nil, resource.AsErrorResult(fmt.Errorf("missing auth info"))
}
verb := utils.VerbList
if req.Permission == int64(dashboardaccess.PERMISSION_EDIT) {
verb = utils.VerbPatch
}
checker, _, err := access.Compile(ctx, auth, authlib.ListRequest{
Namespace: b.key.Namespace,
Group: b.key.Group,
Resource: b.key.Resource,
Verb: verb,
})
if err != nil {
return nil, resource.AsErrorResult(err)
}
checkers := map[string]authlib.ItemChecker{
b.key.Resource: checker,
// Build resource -> verb mapping for batch authorization
resources := map[string]string{
b.key.Resource: verb,
}
// handle federation
// Handle federation
for _, federated := range req.Federated {
checker, _, err := access.Compile(ctx, auth, authlib.ListRequest{
Namespace: federated.Namespace,
Group: federated.Group,
Resource: federated.Resource,
Verb: utils.VerbList,
})
if err != nil {
return nil, resource.AsErrorResult(err)
}
checkers[federated.Resource] = checker
resources[federated.Resource] = utils.VerbList
}
searchrequest.Query = newPermissionScopedQuery(searchrequest.Query, checkers)
searchrequest.Query = newPermissionScopedQuery(searchrequest.Query, permissionScopedQueryConfig{
access: access,
namespace: b.key.Namespace,
group: b.key.Group,
resources: resources,
})
}
for k, v := range req.Facet {
@@ -1866,71 +1851,239 @@ func newResponseFacet(v *search.FacetResult) *resourcepb.ResourceSearchResponse_
type permissionScopedQuery struct {
query.Query
checkers map[string]authlib.ItemChecker // one checker per resource
log log.Logger
access authlib.AccessClient
namespace string
group string
resources map[string]string // resource -> verb mapping
log log.Logger
}
func newPermissionScopedQuery(q query.Query, checkers map[string]authlib.ItemChecker) *permissionScopedQuery {
type permissionScopedQueryConfig struct {
access authlib.AccessClient
namespace string
group string
resources map[string]string // resource -> verb mapping
}
func newPermissionScopedQuery(q query.Query, cfg permissionScopedQueryConfig) *permissionScopedQuery {
return &permissionScopedQuery{
Query: q,
checkers: checkers,
log: log.New("search_permissions"),
Query: q,
access: cfg.access,
namespace: cfg.namespace,
group: cfg.group,
resources: cfg.resources,
log: log.New("search_permissions"),
}
}
func (q *permissionScopedQuery) Searcher(ctx context.Context, i index.IndexReader, m mapping.IndexMapping, options search.SearcherOptions) (search.Searcher, error) {
// Get a new logger from context, to pass traceIDs etc.
logger := q.log.FromContext(ctx)
searcher, err := q.Query.Searcher(ctx, i, m, options)
if err != nil {
return nil, err
}
dvReader, err := i.DocValueReader([]string{"folder"})
if err != nil {
return nil, err
}
filteringSearcher := bleveSearch.NewFilteringSearcher(ctx, searcher, func(d *search.DocumentMatch) bool {
// The doc ID has the format: <namespace>/<group>/<resourceType>/<name>
// IndexInternalID will be the same as the doc ID when using an in-memory index, but when using a file-based
// index it becomes a binary encoded number that has some other internal meaning. Using ExternalID() will get the
// correct doc ID regardless of the index type.
d.ID, err = i.ExternalID(d.IndexInternalID)
if err != nil {
logger.Debug("Error getting external ID", "error", err)
return false
}
parts := strings.Split(d.ID, "/")
// Exclude doc if id isn't expected format
if len(parts) != 4 {
logger.Debug("Unexpected document ID format", "id", d.ID)
return false
}
ns := parts[0]
resource := parts[2]
name := parts[3]
folder := ""
err = dvReader.VisitDocValues(d.IndexInternalID, func(field string, value []byte) {
if field == "folder" {
folder = string(value)
return newBatchAuthzSearcher(ctx, searcher, i, dvReader, q.access, q.namespace, q.group, q.resources, logger), nil
}
// docInfo holds document information for authorization
type docInfo struct {
doc *search.DocumentMatch
resourceType string
name string
folder string
verb string
}
// batchAuthzSearcher implements a batch-aware authorization filtering searcher
// using FilterAuthorized with iter.Pull2 for efficient batched authorization
type batchAuthzSearcher struct {
ctx context.Context
searcher search.Searcher
indexReader index.IndexReader
dvReader index.DocValueReader
access authlib.AccessClient
namespace string
group string
resources map[string]string // resource -> verb mapping
log log.Logger
// Pull iterator state (lazily initialized)
searchCtx *search.SearchContext
next func() (docInfo, error, bool)
stop func()
}
func newBatchAuthzSearcher(
ctx context.Context,
searcher search.Searcher,
indexReader index.IndexReader,
dvReader index.DocValueReader,
access authlib.AccessClient,
namespace string,
group string,
resources map[string]string,
logger log.Logger,
) *batchAuthzSearcher {
return &batchAuthzSearcher{
ctx: ctx,
searcher: searcher,
indexReader: indexReader,
dvReader: dvReader,
access: access,
namespace: namespace,
group: group,
resources: resources,
log: logger,
}
}
func (s *batchAuthzSearcher) Next(searchCtx *search.SearchContext) (*search.DocumentMatch, error) {
// Lazy initialization of pull iterator
if s.next == nil {
s.searchCtx = searchCtx
s.initPullIterator()
}
info, err, ok := s.next()
if !ok {
return nil, nil // No more documents
}
if err != nil {
return nil, err
}
return info.doc, nil
}
// initPullIterator sets up the FilterAuthorized iterator as a pull iterator
func (s *batchAuthzSearcher) initPullIterator() {
// Create iter.Seq that pulls documents from searcher and parses them
candidates := func(yield func(docInfo) bool) {
for {
doc, err := s.searcher.Next(s.searchCtx)
if err != nil {
s.log.Debug("Error getting next document", "error", err)
return
}
if doc == nil {
return // No more documents
}
})
if err != nil {
logger.Debug("Error reading doc values", "error", err)
return false
}
if _, ok := q.checkers[resource]; !ok {
logger.Debug("No resource checker found", "resource", resource)
return false
}
allowed := q.checkers[resource](name, folder)
if !allowed {
logger.Debug("Denying access", "ns", ns, "name", name, "folder", folder)
}
return allowed
})
return filteringSearcher, nil
info, ok := s.parseDocInfo(doc)
if !ok {
continue // Skip invalid documents
}
if !yield(info) {
return
}
}
}
extractFn := func(info docInfo) authz.BatchCheckItem {
return authz.BatchCheckItem{
Name: info.name,
Folder: info.folder,
Verb: info.verb,
Group: s.group,
Resource: info.resourceType,
Namespace: s.namespace,
}
}
// FilterAuthorized extracts auth from context and batches internally
authzIter := authz.FilterAuthorized(s.ctx, s.access, candidates, extractFn).Items
// Convert push iterator to pull iterator
s.next, s.stop = iter.Pull2(authzIter)
}
// parseDocInfo extracts document information needed for authorization
func (s *batchAuthzSearcher) parseDocInfo(doc *search.DocumentMatch) (docInfo, bool) {
// Get external ID
externalID, err := s.indexReader.ExternalID(doc.IndexInternalID)
if err != nil {
s.log.Debug("Error getting external ID", "error", err)
return docInfo{}, false
}
doc.ID = externalID
// Parse doc ID: <namespace>/<group>/<resourceType>/<name>
parts := strings.Split(doc.ID, "/")
if len(parts) != 4 {
s.log.Debug("Unexpected document ID format", "id", doc.ID)
return docInfo{}, false
}
resourceType := parts[2]
name := parts[3]
// Get folder from doc values
folder := ""
err = s.dvReader.VisitDocValues(doc.IndexInternalID, func(field string, value []byte) {
if field == "folder" {
folder = string(value)
}
})
if err != nil {
s.log.Debug("Error reading doc values", "error", err)
return docInfo{}, false
}
// Check if we have a verb for this resource type
verb, ok := s.resources[resourceType]
if !ok {
s.log.Debug("No verb found for resource", "resource", resourceType)
return docInfo{}, false
}
return docInfo{
doc: doc,
resourceType: resourceType,
name: name,
folder: folder,
verb: verb,
}, true
}
func (s *batchAuthzSearcher) Advance(searchCtx *search.SearchContext, ID index.IndexInternalID) (*search.DocumentMatch, error) {
return s.searcher.Advance(searchCtx, ID)
}
func (s *batchAuthzSearcher) Close() error {
if s.stop != nil {
s.stop()
}
return s.searcher.Close()
}
func (s *batchAuthzSearcher) Size() int {
return s.searcher.Size()
}
func (s *batchAuthzSearcher) DocumentMatchPoolSize() int {
return s.searcher.DocumentMatchPoolSize()
}
func (s *batchAuthzSearcher) Min() int {
return s.searcher.Min()
}
func (s *batchAuthzSearcher) Count() uint64 {
return s.searcher.Count()
}
func (s *batchAuthzSearcher) SetQueryNorm(qnorm float64) {
s.searcher.SetQueryNorm(qnorm)
}
func (s *batchAuthzSearcher) Weight() float64 {
return s.searcher.Weight()
}
// hasTerms - any value that will be split into multiple tokens
+6 -2
View File
@@ -653,8 +653,12 @@ func (nc StubAccessClient) Write(ctx context.Context, req *authzextv1.WriteReque
return nil
}
func (nc StubAccessClient) BatchCheck(ctx context.Context, req *authzextv1.BatchCheckRequest) (*authzextv1.BatchCheckResponse, error) {
return nil, nil
func (nc StubAccessClient) BatchCheck(ctx context.Context, user authlib.AuthInfo, req authlib.BatchCheckRequest) (authlib.BatchCheckResponse, error) {
results := make(map[string]authlib.BatchCheckResult, len(req.Checks))
for _, item := range req.Checks {
results[item.CorrelationID] = authlib.BatchCheckResult{Allowed: nc.resourceResponses[item.Resource]}
}
return authlib.BatchCheckResponse{Results: results}, nil
}
func TestSafeInt64ToInt(t *testing.T) {
+22
View File
@@ -517,6 +517,28 @@ func (m *mockAccessClient) Check(ctx context.Context, user types.AuthInfo, req t
return types.CheckResponse{Allowed: m.allowed}, nil
}
func (m *mockAccessClient) BatchCheck(ctx context.Context, user types.AuthInfo, req types.BatchCheckRequest) (types.BatchCheckResponse, error) {
results := make(map[string]types.BatchCheckResult, len(req.Checks))
for _, check := range req.Checks {
allowed := m.allowed
// Check specific folder:verb mappings if provided
if m.allowedMap != nil {
key := fmt.Sprintf("%s:%s", check.Folder, check.Verb)
if a, exists := m.allowedMap[key]; exists {
allowed = a
}
}
results[check.CorrelationID] = types.BatchCheckResult{
Allowed: allowed,
}
}
return types.BatchCheckResponse{Results: results}, nil
}
func (m *mockAccessClient) Compile(ctx context.Context, user types.AuthInfo, req types.ListRequest) (types.ItemChecker, types.Zookie, error) {
if m.compileFn != nil {
return m.compileFn(user, req), types.NoopZookie{}, nil
@@ -33,6 +33,8 @@ import (
)
func TestIntegrationFolderTreeZanzana(t *testing.T) {
// TODO: Add back OSS seeding and enable this test
t.Skip("Skipping folder tree test with Zanzana")
testutil.SkipIntegrationTestInShortMode(t)
runIntegrationFolderTree(t, testinfra.GrafanaOpts{
+7 -69
View File
@@ -13829,6 +13829,12 @@
"BacktestConfig": {
"type": "object",
"properties": {
"annotations": {
"type": "object",
"additionalProperties": {
"type": "string"
}
},
"condition": {
"type": "string"
},
@@ -13838,16 +13844,8 @@
"$ref": "#/definitions/AlertQuery"
}
},
"exec_err_state": {
"type": "string",
"enum": [
"OK",
"Alerting",
"Error"
]
},
"for": {
"type": "string"
"$ref": "#/definitions/Duration"
},
"from": {
"type": "string",
@@ -13856,22 +13854,12 @@
"interval": {
"$ref": "#/definitions/Duration"
},
"keep_firing_for": {
"type": "string"
},
"labels": {
"type": "object",
"additionalProperties": {
"type": "string"
}
},
"missing_series_evals_to_resolve": {
"type": "integer",
"format": "int64"
},
"namespace_uid": {
"type": "string"
},
"no_data_state": {
"type": "string",
"enum": [
@@ -13880,18 +13868,12 @@
"OK"
]
},
"rule_group": {
"type": "string"
},
"title": {
"type": "string"
},
"to": {
"type": "string",
"format": "date-time"
},
"uid": {
"type": "string"
}
}
},
@@ -16796,12 +16778,6 @@
"interval": {
"$ref": "#/definitions/Duration"
},
"labels": {
"type": "object",
"additionalProperties": {
"type": "string"
}
},
"limit": {
"type": "integer",
"format": "int64"
@@ -16812,12 +16788,6 @@
"query_offset": {
"type": "string"
},
"remote_write": {
"type": "array",
"items": {
"$ref": "#/definitions/RemoteWriteConfig"
}
},
"rules": {
"type": "array",
"items": {
@@ -19293,12 +19263,6 @@
"interval": {
"$ref": "#/definitions/Duration"
},
"labels": {
"type": "object",
"additionalProperties": {
"type": "string"
}
},
"limit": {
"type": "integer",
"format": "int64"
@@ -19309,12 +19273,6 @@
"query_offset": {
"type": "string"
},
"remote_write": {
"type": "array",
"items": {
"$ref": "#/definitions/RemoteWriteConfig"
}
},
"rules": {
"type": "array",
"items": {
@@ -20352,14 +20310,6 @@
}
}
},
"RemoteWriteConfig": {
"type": "object",
"properties": {
"url": {
"type": "string"
}
}
},
"Report": {
"type": "object",
"properties": {
@@ -21059,12 +21009,6 @@
"interval": {
"$ref": "#/definitions/Duration"
},
"labels": {
"type": "object",
"additionalProperties": {
"type": "string"
}
},
"limit": {
"type": "integer",
"format": "int64"
@@ -21075,12 +21019,6 @@
"query_offset": {
"type": "string"
},
"remote_write": {
"type": "array",
"items": {
"$ref": "#/definitions/RemoteWriteConfig"
}
},
"rules": {
"type": "array",
"items": {
@@ -70,8 +70,8 @@ export class PanelTimeRange extends SceneTimeRangeTransformerBase<PanelTimeRange
// set initial values on activate
this.setState({
value: timeRange,
from: typeof timeRange.raw.from === 'string' ? timeRange.raw.from : timeRange.raw.from.toISOString(),
to: typeof timeRange.raw.to === 'string' ? timeRange.raw.to : timeRange.raw.to.toISOString(),
from: timeRange.raw.from.toString(),
to: timeRange.raw.to.toString(),
});
}
+44 -93
View File
@@ -1,40 +1,38 @@
import { css, cx } from '@emotion/css';
import { capitalize, groupBy } from 'lodash';
import { useCallback, useEffect, useState, useRef, useMemo } from 'react';
import * as React from 'react';
import { useCallback, useEffect, useMemo, useRef, useState } from 'react';
import { usePrevious, useUnmount } from 'react-use';
import {
AbsoluteTimeRange,
CoreApp,
SplitOpen,
LogRowModel,
LogsMetaItem,
DataFrame,
DataHoverClearEvent,
DataHoverEvent,
DataQueryResponse,
EventBus,
ExploreLogsPanelState,
ExplorePanelsState,
FieldConfigSource,
AbsoluteTimeRange,
GrafanaTheme2,
LoadingState,
LogLevel,
LogRowContextOptions,
LogRowModel,
LogsDedupDescription,
LogsDedupStrategy,
LogsMetaItem,
LogsSortOrder,
PanelData,
rangeUtil,
RawTimeRange,
serializeStateToUrlParam,
shallowCompare,
SplitOpen,
TimeRange,
TimeZone,
RawTimeRange,
DataQueryResponse,
LogRowContextOptions,
EventBus,
ExplorePanelsState,
TimeRange,
LogsDedupStrategy,
LogsSortOrder,
CoreApp,
LogsDedupDescription,
rangeUtil,
ExploreLogsPanelState,
DataHoverClearEvent,
DataHoverEvent,
serializeStateToUrlParam,
urlUtil,
LogLevel,
shallowCompare,
} from '@grafana/data';
import { t, Trans } from '@grafana/i18n';
import { Trans, t } from '@grafana/i18n';
import { config, reportInteraction } from '@grafana/runtime';
import { DataQuery, DataTopic } from '@grafana/schema';
import {
@@ -49,7 +47,6 @@ import {
Themeable2,
withTheme2,
} from '@grafana/ui';
import { replaceVariables } from '@grafana-plugins/loki/querybuilder/parsingUtils';
import store from 'app/core/store';
import { createAndCopyShortLink, getLogsPermalinkRange } from 'app/core/utils/shortLinks';
import { ControlledLogRows } from 'app/features/logs/components/ControlledLogRows';
@@ -59,17 +56,15 @@ import { LogRowContextModal } from 'app/features/logs/components/log-context/Log
import { LogLineContext } from 'app/features/logs/components/panel/LogLineContext';
import { LogList, LogListOptions } from 'app/features/logs/components/panel/LogList';
import { isDedupStrategy, isLogsSortOrder } from 'app/features/logs/components/panel/LogListContext';
import { dedupLogRows, LogLevelColor } from 'app/features/logs/logsModel';
import { LogLevelColor, dedupLogRows } from 'app/features/logs/logsModel';
import { getLogLevelFromKey, getLogLevelInfo } from 'app/features/logs/utils';
import { LokiQueryDirection } from 'app/plugins/datasource/loki/dataquery.gen';
import { isLokiQuery } from 'app/plugins/datasource/loki/queryUtils';
import { GetFieldLinksFn } from 'app/plugins/panel/logs/types';
import { Options } from 'app/plugins/panel/logstable/panelcfg.gen';
import { getState } from 'app/store/store';
import { ExploreItemState } from 'app/types/explore';
import { useDispatch } from 'app/types/store';
import { LogsTable } from '../../../plugins/panel/logstable/LogsTable';
import {
contentOutlineTrackPinAdded,
contentOutlineTrackPinClicked,
@@ -85,7 +80,7 @@ import { changeQueries, runQueries } from '../state/query';
import { LogsFeedback } from './LogsFeedback';
import { LogsMetaRow } from './LogsMetaRow';
import LogsNavigation from './LogsNavigation';
import { getLogsTableHeight, LogsTableWrap } from './LogsTableWrap';
import { LogsTableWrap, getLogsTableHeight } from './LogsTableWrap';
import { LogsVolumePanelList } from './LogsVolumePanelList';
import { SETTING_KEY_ROOT, SETTINGS_KEYS, visualisationTypeKey } from './utils/logs';
import { getExploreBaseUrl } from './utils/url';
@@ -132,9 +127,8 @@ interface Props extends Themeable2 {
range: TimeRange;
onClickFilterString?: (value: string, refId?: string) => void;
onClickFilterOutString?: (value: string, refId?: string) => void;
onPinLineCallback?: () => void;
loadMoreLogs?(range: AbsoluteTimeRange): void;
onPinLineCallback?: () => void;
}
export type LogsVisualisationType = 'table' | 'logs';
@@ -770,14 +764,6 @@ const UnthemedLogs: React.FunctionComponent<Props> = (props: Props) => {
setFilterLevels(levels.map((level) => getLogLevelFromKey(level)));
}, []);
// @todo ff
const enableNewLogsTable = true;
const panelData: PanelData = {
state: loading ? LoadingState.Loading : LoadingState.Done,
series: props.logsFrames ?? [],
timeRange: props.range,
};
return (
<>
{(!config.featureToggles.newLogsPanel || !config.featureToggles.newLogContext) && getRowContext && contextRow && (
@@ -1003,55 +989,24 @@ const UnthemedLogs: React.FunctionComponent<Props> = (props: Props) => {
<div className={cx(styles.logsSection, visualisationType === 'table' ? styles.logsTable : undefined)}>
{!config.featureToggles.logsPanelControls && visualisationType === 'table' && hasData && (
<div className={styles.logRows} data-testid="logRowsTable">
{/* @todo add flag*/}
{enableNewLogsTable && (
<LogsTable
id={0}
data={panelData}
timeRange={props.range}
timeZone={timeZone}
options={{}}
transparent={false}
width={width - 80}
height={tableHeight}
fieldConfig={{
defaults: {},
overrides: [],
}}
renderCounter={0}
title={''}
eventBus={props.eventBus}
onOptionsChange={function (options: Options): void {
console.log('onOptionsChange NOT IMP:', options);
// throw new Error('Function not implemented.');
}}
onFieldConfigChange={function (config: FieldConfigSource): void {
console.log('onFieldConfigChange NOT IMP:', config);
}}
replaceVariables={replaceVariables}
onChangeTimeRange={onChangeTime}
/>
)}
{!enableNewLogsTable && (
<LogsTableWrap
logsSortOrder={logsSortOrder}
range={props.range}
splitOpen={splitOpen}
timeZone={timeZone}
width={width - 80}
logsFrames={props.logsFrames ?? []}
onClickFilterLabel={onClickFilterLabel}
onClickFilterOutLabel={onClickFilterOutLabel}
panelState={panelState?.logs}
updatePanelState={updatePanelState}
datasourceType={props.datasourceType}
displayedFields={displayedFields}
exploreId={props.exploreId}
absoluteRange={props.absoluteRange}
logRows={props.logRows}
/>
)}
{/* Width should be full width minus logs navigation and padding */}
<LogsTableWrap
logsSortOrder={logsSortOrder}
range={props.range}
splitOpen={splitOpen}
timeZone={timeZone}
width={width - 80}
logsFrames={props.logsFrames ?? []}
onClickFilterLabel={onClickFilterLabel}
onClickFilterOutLabel={onClickFilterOutLabel}
panelState={panelState?.logs}
updatePanelState={updatePanelState}
datasourceType={props.datasourceType}
displayedFields={displayedFields}
exploreId={props.exploreId}
absoluteRange={props.absoluteRange}
logRows={props.logRows}
/>
</div>
)}
{(!config.featureToggles.newLogsPanel || visualisationType === 'table') &&
@@ -1059,10 +1014,6 @@ const UnthemedLogs: React.FunctionComponent<Props> = (props: Props) => {
hasData && (
<div className={styles.controlledLogRowsWrapper} data-testid="logRows">
<ControlledLogRows
fieldConfig={{
defaults: {},
overrides: [],
}}
ref={logsContainerRef}
logsTableFrames={props.logsFrames}
width={width}
@@ -367,7 +367,8 @@ const isFieldFilterable = (field: Field, bodyName: string, timeName: string) =>
return true;
};
// @todo move to logsFrame
// TODO: explore if `logsFrame.ts` can help us with getting the right fields
// TODO Why is typeInfo not defined on the Field interface?
export function getLogsExtractFields(dataFrame: DataFrame) {
return dataFrame.fields
.filter((field: Field & { typeInfo?: { frame: string } }) => {
@@ -28,11 +28,6 @@ interface Props extends CustomCellRendererProps {
index?: number;
}
/**
* @deprecated
* @param props
* @constructor
*/
export function LogsTableActionButtons(props: Props) {
const { exploreId, absoluteRange, logRows, rowIndex, panelState, displayedFields, logsFrame, frame } = props;
@@ -1,5 +1,5 @@
import { css } from '@emotion/css';
import { forwardRef, useCallback, useEffect, useImperativeHandle, useMemo, useRef } from 'react';
import { useEffect, useMemo, useRef, forwardRef, useImperativeHandle, useCallback } from 'react';
import {
AbsoluteTimeRange,
@@ -7,23 +7,17 @@ import {
DataFrame,
EventBusSrv,
ExploreLogsPanelState,
FieldConfigSource,
LoadingState,
LogLevel,
LogRowModel,
LogsMetaItem,
LogsSortOrder,
PanelData,
SplitOpen,
TimeRange,
} from '@grafana/data';
import { getAppEvents, getTemplateSrv } from '@grafana/runtime';
import { PanelContextProvider } from '@grafana/ui';
import { Options } from 'app/plugins/panel/logstable/panelcfg.gen';
import { LogsTable } from '../../../plugins/panel/logstable/LogsTable';
import { LogsVisualisationType } from '../../explore/Logs/Logs';
import { ControlledLogsTable } from './ControlledLogsTable';
import { InfiniteScroll } from './InfiniteScroll';
import { LogRows, Props } from './LogRows';
import { LogListOptions } from './panel/LogList';
@@ -31,12 +25,6 @@ import { LogListContextProvider, useLogListContext } from './panel/LogListContex
import { LogListControls } from './panel/LogListControls';
import { ScrollToLogsEvent } from './panel/virtualization';
// @todo
export const FILTER_FOR_OPERATOR = '=';
export const FILTER_OUT_OPERATOR = '!=';
export type AdHocFilterOperator = typeof FILTER_FOR_OPERATOR | typeof FILTER_OUT_OPERATOR;
export type AdHocFilterItem = { key: string; value: string; operator: AdHocFilterOperator };
export interface ControlledLogRowsProps extends Omit<Props, 'scrollElement'> {
loading: boolean;
logsMeta?: LogsMetaItem[];
@@ -45,7 +33,6 @@ export interface ControlledLogRowsProps extends Omit<Props, 'scrollElement'> {
onLogOptionsChange?: (option: LogListOptions, value: string | boolean | string[]) => void;
range: TimeRange;
filterLevels?: LogLevel[];
fieldConfig: FieldConfigSource;
/** Props added for Table **/
visualisationType: LogsVisualisationType;
@@ -87,35 +74,10 @@ export const ControlledLogRows = forwardRef<HTMLDivElement | null, ControlledLog
prettifyLogMessage,
onLogOptionsChange,
wrapLogMessage,
fieldConfig,
...rest
}: ControlledLogRowsProps,
ref
) => {
const dataFrames = rest.logsTableFrames ?? [];
const panelData: PanelData = {
state: rest.loading ? LoadingState.Loading : LoadingState.Done,
series: dataFrames,
timeRange: rest.timeRange,
};
const eventBus = getAppEvents();
const onCellFilterAdded = (filter: AdHocFilterItem) => {
const { value, key, operator } = filter;
const { onClickFilterLabel, onClickFilterOutLabel } = rest;
if (!onClickFilterLabel || !onClickFilterOutLabel) {
return;
}
if (operator === FILTER_FOR_OPERATOR) {
onClickFilterLabel(key, value, dataFrames[0]);
}
if (operator === FILTER_OUT_OPERATOR) {
onClickFilterOutLabel(key, value, dataFrames[0]);
}
};
return (
<LogListContextProvider
app={rest.app || CoreApp.Unknown}
@@ -135,46 +97,9 @@ export const ControlledLogRows = forwardRef<HTMLDivElement | null, ControlledLog
wrapLogMessage={wrapLogMessage}
>
{rest.visualisationType === 'logs' && (
<LogRowsComponent
ref={ref}
{...rest}
deduplicatedRows={deduplicatedRows}
fieldConfig={{ defaults: {}, overrides: [] }}
/>
)}
{rest.visualisationType === 'table' && rest.updatePanelState && (
<PanelContextProvider
value={{
eventsScope: 'explore',
eventBus: eventBus ?? new EventBusSrv(),
onAddAdHocFilter: onCellFilterAdded,
}}
>
<LogsTable
id={0}
width={rest.width ?? 0}
data={panelData}
options={{}}
transparent={false}
height={800}
fieldConfig={fieldConfig}
renderCounter={0}
title={''}
eventBus={eventBus}
onOptionsChange={function (options: Options): void {
console.log('onOptionsChange not implemented');
}}
onFieldConfigChange={function (config: FieldConfigSource): void {
console.log('onFieldConfigChange not implemented');
}}
replaceVariables={getTemplateSrv().replace.bind(getTemplateSrv())}
onChangeTimeRange={function (timeRange: AbsoluteTimeRange): void {
console.log('onChangeTimeRange not implemented');
}}
{...rest}
/>
</PanelContextProvider>
<LogRowsComponent ref={ref} {...rest} deduplicatedRows={deduplicatedRows} />
)}
{rest.visualisationType === 'table' && rest.updatePanelState && <ControlledLogsTable {...rest} />}
</LogListContextProvider>
);
}
@@ -10,7 +10,6 @@ import {
DataFrame,
LogRowContextOptions,
TimeRange,
FieldConfigSource,
} from '@grafana/data';
import { Trans, t } from '@grafana/i18n';
import { DataQuery } from '@grafana/schema';
@@ -74,7 +73,6 @@ export interface Props {
logRowMenuIconsAfter?: ReactNode[];
scrollElement: HTMLDivElement | null;
renderPreview?: boolean;
fieldConfig?: FieldConfigSource;
}
export type PopoverStateType = {
+10 -10
View File
@@ -29,11 +29,11 @@ function getField(cache: FieldCache, name: string, fieldType: FieldType): FieldW
return field.type === fieldType ? field : undefined;
}
export const LOGS_DATAPLANE_TIMESTAMP_NAME = 'timestamp';
export const LOGS_DATAPLANE_BODY_NAME = 'body';
const LOGS_DATAPLANE_SEVERITY_NAME = 'severity';
const LOGS_DATAPLANE_ID_NAME = 'id';
const LOGS_DATAPLANE_LABELS_NAME = 'labels';
const DATAPLANE_TIMESTAMP_NAME = 'timestamp';
const DATAPLANE_BODY_NAME = 'body';
const DATAPLANE_SEVERITY_NAME = 'severity';
const DATAPLANE_ID_NAME = 'id';
const DATAPLANE_LABELS_NAME = 'labels';
// NOTE: this is a hot fn, we need to avoid allocating new objects here
export function logFrameLabelsToLabels(logFrameLabels: LogFrameLabels): Labels {
@@ -66,17 +66,17 @@ export function logFrameLabelsToLabels(logFrameLabels: LogFrameLabels): Labels {
export function parseDataplaneLogsFrame(frame: DataFrame): LogsFrame | null {
const cache = new FieldCache(frame);
const timestampField = getField(cache, LOGS_DATAPLANE_TIMESTAMP_NAME, FieldType.time);
const bodyField = getField(cache, LOGS_DATAPLANE_BODY_NAME, FieldType.string);
const timestampField = getField(cache, DATAPLANE_TIMESTAMP_NAME, FieldType.time);
const bodyField = getField(cache, DATAPLANE_BODY_NAME, FieldType.string);
// these two are mandatory
if (timestampField === undefined || bodyField === undefined) {
return null;
}
const severityField = getField(cache, LOGS_DATAPLANE_SEVERITY_NAME, FieldType.string) ?? null;
const idField = getField(cache, LOGS_DATAPLANE_ID_NAME, FieldType.string) ?? null;
const labelsField = getField(cache, LOGS_DATAPLANE_LABELS_NAME, FieldType.other) ?? null;
const severityField = getField(cache, DATAPLANE_SEVERITY_NAME, FieldType.string) ?? null;
const idField = getField(cache, DATAPLANE_ID_NAME, FieldType.string) ?? null;
const labelsField = getField(cache, DATAPLANE_LABELS_NAME, FieldType.other) ?? null;
const labels = labelsField === null ? null : labelsField.values;
@@ -17,5 +17,5 @@ export const usePluginConfig = (plugin?: CatalogPlugin) => {
return loadPlugin(plugin.id);
}
return null;
}, [plugin?.id, plugin?.isInstalled, plugin?.isDisabled, plugin?.isFullyInstalled]);
}, [plugin?.id, plugin?.isInstalled, plugin?.isDisabled]);
};
@@ -42,8 +42,6 @@ const histogramPanel = async () =>
await import(/* webpackChunkName: "histogramPanel" */ 'app/plugins/panel/histogram/module');
const livePanel = async () => await import(/* webpackChunkName: "livePanel" */ 'app/plugins/panel/live/module');
const logsPanel = async () => await import(/* webpackChunkName: "logsPanel" */ 'app/plugins/panel/logs/module');
const logsTablePanel = async () =>
await import(/* webpackChunkName: "logsPanel" */ 'app/plugins/panel/logstable/module');
const newsPanel = async () => await import(/* webpackChunkName: "newsPanel" */ 'app/plugins/panel/news/module');
const pieChartPanel = async () =>
await import(/* webpackChunkName: "pieChartPanel" */ 'app/plugins/panel/piechart/module');
@@ -110,7 +108,6 @@ const builtInPlugins: Record<string, System.Module | (() => Promise<System.Modul
'core:plugin/bargauge': barGaugePanel,
'core:plugin/barchart': barChartPanel,
'core:plugin/logs': logsPanel,
'core:plugin/logstable': logsTablePanel,
'core:plugin/traces': tracesPanel,
'core:plugin/welcome': welcomeBanner,
'core:plugin/nodeGraph': nodeGraph,
@@ -653,7 +653,6 @@ export const LogsPanel = ({
sortOrder={sortOrder}
>
<LogRows
fieldConfig={fieldConfig}
scrollElement={scrollElement}
scrollIntoView={scrollIntoView}
permalinkedRowId={getLogsPanelState()?.logs?.id ?? undefined}
@@ -706,7 +705,6 @@ export const LogsPanel = ({
ref={(scrollElement: HTMLDivElement | null) => {
setScrollElement(scrollElement);
}}
fieldConfig={fieldConfig}
visualisationType="logs"
loading={infiniteScrolling}
loadMoreLogs={enableInfiniteScrolling ? loadMoreLogs : undefined}
@@ -1,41 +0,0 @@
import { css } from '@emotion/css';
import { GrafanaTheme2 } from '@grafana/data';
import { CustomCellRendererProps, useStyles2 } from '@grafana/ui';
import { LogsFrame } from '../../../features/logs/logsFrame';
import { LogsNGTableRowActionButtons } from './LogsNGTableRowActionButtons';
import { BuildLinkToLogLine } from './types';
export function LogsTableCustomCellRenderer(props: {
cellProps: CustomCellRendererProps;
logsFrame: LogsFrame;
buildLinkToLog?: BuildLinkToLogLine;
}) {
const styles = useStyles2(getStyles);
return (
<>
<LogsNGTableRowActionButtons
{...props.cellProps}
buildLinkToLog={props.buildLinkToLog ?? buildLinkToLog}
logsFrame={props.logsFrame}
/>
<span className={styles.firstColumnCell}>
{props.cellProps.field.display?.(props.cellProps.value).text ?? String(props.cellProps.value)}
</span>
</>
);
}
const buildLinkToLog: BuildLinkToLogLine = (logsFrame, rowIndex, field) => {
return '@todo';
};
const getStyles = (theme: GrafanaTheme2) => {
return {
firstColumnCell: css({
paddingLeft: theme.spacing(7),
}),
};
};
@@ -1,132 +0,0 @@
import { css } from '@emotion/css';
import { memoize } from 'lodash';
import { useState } from 'react';
import { DataFrame, GrafanaTheme2 } from '@grafana/data';
import { t } from '@grafana/i18n';
import { ClipboardButton, CustomCellRendererProps, IconButton, Modal, useTheme2 } from '@grafana/ui';
import { LogsFrame } from 'app/features/logs/logsFrame';
import { BuildLinkToLogLine } from './types';
interface Props extends CustomCellRendererProps {
logsFrame: LogsFrame;
buildLinkToLog?: BuildLinkToLogLine;
}
/**
* Logs row actions buttons
* @todo use new inspector and default to code mode
* @param props
* @constructor
*/
export function LogsNGTableRowActionButtons(props: Props) {
const { rowIndex, logsFrame, field, frame, buildLinkToLog } = props;
const theme = useTheme2();
const [isInspecting, setIsInspecting] = useState(false);
const styles = getStyles(theme);
const handleViewClick = () => {
setIsInspecting(true);
};
return (
<>
<div className={styles.container}>
<div className={styles.buttonWrapper}>
<IconButton
className={styles.inspectButton}
tooltip={t('explore.logs-table.action-buttons.view-log-line', 'View log line')}
variant="secondary"
aria-label={t('explore.logs-table.action-buttons.view-log-line', 'View log line')}
tooltipPlacement="top"
size="md"
name="eye"
onClick={handleViewClick}
tabIndex={0}
/>
</div>
{buildLinkToLog && (
<div className={styles.buttonWrapper}>
<ClipboardButton
className={styles.clipboardButton}
icon="share-alt"
variant="secondary"
fill="text"
size="md"
tooltip={t('explore.logs-table.action-buttons.copy-link', 'Copy link to log line')}
tooltipPlacement="top"
tabIndex={0}
aria-label={t('explore.logs-table.action-buttons.copy-link', 'Copy link to log line')}
getText={() => buildLinkToLog(logsFrame, rowIndex, field)}
/>
</div>
)}
</div>
{isInspecting && (
<Modal
onDismiss={() => setIsInspecting(false)}
isOpen={true}
title={t('explore.logs-table.action-buttons.inspect-value', 'Inspect value')}
>
<pre>{getLineValue(logsFrame, frame, rowIndex)}</pre>
<Modal.ButtonRow>
<ClipboardButton icon="copy" getText={() => getLineValue(logsFrame, frame, rowIndex)}>
{t('explore.logs-table.action-buttons.copy-to-clipboard', 'Copy to Clipboard')}
</ClipboardButton>
</Modal.ButtonRow>
</Modal>
)}
</>
);
}
const getLineValue = memoize((logsFrame: LogsFrame, frame: DataFrame, rowIndex: number) => {
const bodyFieldName = logsFrame?.bodyField?.name;
const bodyField = bodyFieldName
? frame.fields.find((field) => field.name === bodyFieldName)
: frame.fields.find((field) => field.type === 'string');
return bodyField?.values[rowIndex];
});
export const getStyles = (theme: GrafanaTheme2) => ({
container: css({
background: theme.colors.background.secondary,
boxShadow: theme.shadows.z2,
display: 'flex',
flexDirection: 'row',
height: '100%',
left: 0,
top: 0,
padding: `0 ${theme.spacing(0.5)}`,
position: 'absolute',
zIndex: 1,
}),
buttonWrapper: css({
height: '100%',
'& button svg': {
marginRight: 'auto',
},
'&:hover': {
color: theme.colors.text.link,
},
padding: theme.spacing(0, 1),
display: 'flex',
alignItems: 'center',
}),
inspectButton: css({
borderRadius: theme.shape.radius.default,
display: 'inline-flex',
margin: 0,
overflow: 'hidden',
verticalAlign: 'middle',
cursor: 'pointer',
}),
clipboardButton: css({
height: 30,
lineHeight: '1',
padding: 0,
width: '20px',
cursor: 'pointer',
}),
});
@@ -1,323 +0,0 @@
import { css } from '@emotion/css';
import { useCallback, useEffect, useMemo, useState } from 'react';
import { lastValueFrom } from 'rxjs';
import {
applyFieldOverrides,
DataFrame,
Field,
FieldConfigSource,
GrafanaTheme2,
PanelProps,
transformDataFrame,
useDataLinksContext,
} from '@grafana/data';
import { getTemplateSrv } from '@grafana/runtime';
import { CustomCellRendererProps, TableCellDisplayMode, useStyles2 } from '@grafana/ui';
import { config } from '../../../core/config';
import { getLogsExtractFields } from '../../../features/explore/Logs/LogsTable';
import { FieldNameMetaStore } from '../../../features/explore/Logs/LogsTableWrap';
import { LogsTableFieldSelector } from '../../../features/logs/components/fieldSelector/FieldSelector';
import {
LOGS_DATAPLANE_BODY_NAME,
LOGS_DATAPLANE_TIMESTAMP_NAME,
LogsFrame,
parseLogsFrame,
} from '../../../features/logs/logsFrame';
import { isSetDisplayedFields } from '../logs/types';
import { TablePanel } from '../table/TablePanel';
import type { Options as TableOptions } from '../table/panelcfg.gen';
import { LogsTableCustomCellRenderer } from './CustomCellRenderer';
import type { Options as LogsTableOptions } from './panelcfg.gen';
import { isBuildLinkToLogLine, isOnLogsTableOptionsChange, OnLogsTableOptionsChange } from './types';
interface LogsTablePanelProps extends PanelProps<LogsTableOptions> {
frameIndex?: number;
showHeader?: boolean;
}
// Defaults
const DEFAULT_SIDEBAR_WIDTH = 200;
export const LogsTable = ({
data,
width,
height,
timeRange,
fieldConfig,
options,
eventBus,
frameIndex = 0,
showHeader = true, // @todo not pulling from panel settings
onOptionsChange,
onFieldConfigChange,
replaceVariables,
onChangeTimeRange,
title,
transparent,
timeZone,
id,
renderCounter,
}: LogsTablePanelProps) => {
// Variables
const unTransformedDataFrame = data.series[frameIndex];
// Hooks
const logsFrame: LogsFrame | null = useMemo(() => parseLogsFrame(unTransformedDataFrame), [unTransformedDataFrame]);
const defaultDisplayedFields = useMemo(
() => [
logsFrame?.timeField.name ?? LOGS_DATAPLANE_TIMESTAMP_NAME,
logsFrame?.bodyField.name ?? LOGS_DATAPLANE_BODY_NAME,
],
[logsFrame?.timeField.name, logsFrame?.bodyField.name]
);
// State
const [extractedFrame, setExtractedFrame] = useState<DataFrame[] | null>(null);
const [organizedFrame, setOrganizedFrame] = useState<DataFrame[] | null>(null);
const [displayedFields, setDisplayedFields] = useState<string[]>(options.displayedFields ?? defaultDisplayedFields);
const styles = useStyles2(getStyles, DEFAULT_SIDEBAR_WIDTH, height, width);
const dataLinksContext = useDataLinksContext();
const dataLinkPostProcessor = dataLinksContext.dataLinkPostProcessor;
// Methods
const onLogsTableOptionsChange: OnLogsTableOptionsChange | undefined = isOnLogsTableOptionsChange(onOptionsChange)
? onOptionsChange
: undefined;
const setDisplayedFieldsFn = isSetDisplayedFields(options.setDisplayedFields)
? options.setDisplayedFields
: setDisplayedFields;
// Callbacks
const onTableOptionsChange = useCallback(
(options: TableOptions) => {
onLogsTableOptionsChange?.(options);
},
[onLogsTableOptionsChange]
);
const handleLogsTableOptionsChange = useCallback(
(options: LogsTableOptions) => {
onOptionsChange(options);
},
[onOptionsChange]
);
const handleSetDisplayedFields = useCallback(
(displayedFields: string[]) => {
setDisplayedFieldsFn(displayedFields);
handleLogsTableOptionsChange({ ...options, displayedFields: displayedFields });
},
[handleLogsTableOptionsChange, options, setDisplayedFieldsFn]
);
const handleTableOnFieldConfigChange = useCallback(
(fieldConfig: FieldConfigSource) => {
onFieldConfigChange(fieldConfig);
},
[onFieldConfigChange]
);
/**
* Extract fields transform
*/
useEffect(() => {
const extractFields = async () => {
return await lastValueFrom(
transformDataFrame(getLogsExtractFields(unTransformedDataFrame), [unTransformedDataFrame])
);
};
extractFields().then((data) => {
const extractedFrames = applyFieldOverrides({
data,
fieldConfig,
replaceVariables: replaceVariables ?? getTemplateSrv().replace.bind(getTemplateSrv()),
theme: config.theme2,
timeZone: timeZone,
dataLinkPostProcessor,
});
setExtractedFrame(extractedFrames);
});
}, [dataLinkPostProcessor, fieldConfig, replaceVariables, timeZone, unTransformedDataFrame]);
/**
* Organize fields transform
*/
useEffect(() => {
const organizeFields = async (displayedFields: string[]) => {
if (!extractedFrame) {
return Promise.resolve(null);
}
let indexByName: Record<string, number> = {};
let includeByName: Record<string, boolean> = {};
if (displayedFields) {
for (const [idx, field] of displayedFields.entries()) {
indexByName[field] = idx;
includeByName[field] = true;
}
}
const organizedFrame = await lastValueFrom(
transformDataFrame(
[
{
id: 'organize',
options: {
indexByName,
includeByName,
},
},
],
extractedFrame
)
);
if (!logsFrame) {
throw new Error('missing logsFrame');
}
for (let frameIndex = 0; frameIndex < organizedFrame.length; frameIndex++) {
const frame = organizedFrame[frameIndex];
for (const [fieldIndex, field] of frame.fields.entries()) {
const isFirstField = fieldIndex === 0;
field.config = {
...field.config,
filterable: field.config?.filterable ?? doesFieldSupportAdHocFiltering(field, logsFrame),
custom: {
...field.config.custom,
inspect: field.config?.custom?.inspect ?? doesFieldSupportInspector(field, logsFrame),
// @todo add row actions panel option
cellOptions:
isFirstField && logsFrame
? {
type: TableCellDisplayMode.Custom,
cellComponent: (cellProps: CustomCellRendererProps) => (
<LogsTableCustomCellRenderer
cellProps={cellProps}
logsFrame={logsFrame}
buildLinkToLog={
isBuildLinkToLogLine(options.buildLinkToLogLine) ? options.buildLinkToLogLine : undefined
}
/>
),
}
: field.config.custom?.cellOptions,
},
};
}
}
return organizedFrame;
};
organizeFields(displayedFields).then((frame) => {
if (frame) {
setOrganizedFrame(frame);
}
});
}, [extractedFrame, displayedFields, logsFrame, options.buildLinkToLogLine]);
if (extractedFrame === null || organizedFrame === null || logsFrame === null) {
return;
}
console.log('render::LogsTable', { extractedFrame, organizedFrame });
return (
<div className={styles.wrapper}>
<div className={styles.sidebarWrapper}>
<LogsTableFieldSelector
clear={() => {}}
columnsWithMeta={displayedFieldsToColumns(displayedFields, logsFrame)}
dataFrames={extractedFrame}
logs={[]}
reorder={(columns: string[]) => {}}
setSidebarWidth={(width) => {}}
sidebarWidth={DEFAULT_SIDEBAR_WIDTH}
toggle={(key: string) => {
if (displayedFields.includes(key)) {
handleSetDisplayedFields(displayedFields.filter((f) => f !== key));
} else {
handleSetDisplayedFields([...displayedFields, key]);
}
}}
/>
</div>
<div className={styles.tableWrapper}>
<TablePanel
data={{ ...data, series: organizedFrame }}
width={width - DEFAULT_SIDEBAR_WIDTH}
height={height}
id={id}
timeRange={timeRange}
timeZone={timeZone}
options={{ ...options, frameIndex, showHeader }}
transparent={transparent}
fieldConfig={fieldConfig}
renderCounter={renderCounter}
title={title}
eventBus={eventBus}
onOptionsChange={onTableOptionsChange}
onFieldConfigChange={handleTableOnFieldConfigChange}
replaceVariables={replaceVariables}
onChangeTimeRange={onChangeTimeRange}
/>
</div>
</div>
);
};
function displayedFieldsToColumns(displayedFields: string[], logsFrame: LogsFrame): FieldNameMetaStore {
const columns: FieldNameMetaStore = {};
for (const [idx, field] of displayedFields.entries()) {
columns[field] = {
percentOfLinesWithLabel: 0,
type:
field === logsFrame.bodyField.name
? 'BODY_FIELD'
: field === logsFrame.timeField.name
? 'TIME_FIELD'
: undefined,
active: true,
index: idx,
};
}
return columns;
}
function doesFieldSupportInspector(field: Field, logsFrame: LogsFrame) {
// const unsupportedFields = [logsFrame.bodyField.name]
// return !unsupportedFields.includes(field.name);
return false;
}
function doesFieldSupportAdHocFiltering(field: Field, logsFrame: LogsFrame): boolean {
const unsupportedFields = [logsFrame.timeField.name, logsFrame.bodyField.name];
return !unsupportedFields.includes(field.name);
}
const getStyles = (theme: GrafanaTheme2, sidebarWidth: number, height: number, width: number) => {
return {
tableWrapper: css({
paddingLeft: sidebarWidth,
height,
width,
}),
sidebarWrapper: css({
position: 'absolute',
height: height,
width: sidebarWidth,
}),
wrapper: css({
height,
width,
}),
};
};
@@ -1,234 +0,0 @@
import { FieldConfigProperty, identityOverrideProcessor, PanelPlugin, standardEditorsRegistry } from '@grafana/data';
import { t } from '@grafana/i18n';
import {
TableCellDisplayMode,
TableCellHeight,
TableCellOptions,
TableCellTooltipPlacement,
} from '@grafana/schema/dist/esm/common/common.gen';
import { defaultTableFieldOptions } from '@grafana/schema/dist/esm/veneer/common.types';
import { PaginationEditor } from '../table/PaginationEditor';
import { TableCellOptionEditor } from '../table/TableCellOptionEditor';
import { tablePanelChangedHandler } from '../table/migrations';
import { defaultOptions, FieldConfig as TableFieldConfig } from '../table/panelcfg.gen';
import { tableSuggestionsSupplier } from '../table/suggestions';
import { LogsTable } from './LogsTable';
import { Options } from './panelcfg.gen';
// @todo can we pull stuff from table module instead of manually adding?
export const plugin = new PanelPlugin<Options, TableFieldConfig>(LogsTable)
.setPanelChangeHandler(tablePanelChangedHandler)
.useFieldConfig({
standardOptions: {
[FieldConfigProperty.Actions]: {
hideFromDefaults: false,
},
},
useCustomConfig: (builder) => {
const category = [t('table.category-logs-table', 'Logs Table')];
const cellCategory = [t('table.category-cell-options', 'Cell options')];
builder
.addNumberInput({
path: 'minWidth',
name: t('table.name-min-column-width', 'Minimum column width'),
category,
description: t('table.description-min-column-width', 'The minimum width for column auto resizing'),
settings: {
placeholder: '150',
min: 50,
max: 500,
},
shouldApply: () => true,
defaultValue: defaultTableFieldOptions.minWidth,
})
.addNumberInput({
path: 'width',
name: t('table.name-column-width', 'Column width'),
category,
settings: {
placeholder: t('table.placeholder-column-width', 'auto'),
min: 20,
},
shouldApply: () => true,
defaultValue: defaultTableFieldOptions.width,
})
.addRadio({
path: 'align',
name: t('table.name-column-alignment', 'Column alignment'),
category,
settings: {
options: [
{ label: t('table.column-alignment-options.label-auto', 'Auto'), value: 'auto' },
{ label: t('table.column-alignment-options.label-left', 'Left'), value: 'left' },
{ label: t('table.column-alignment-options.label-center', 'Center'), value: 'center' },
{ label: t('table.column-alignment-options.label-right', 'Right'), value: 'right' },
],
},
defaultValue: defaultTableFieldOptions.align,
})
.addBooleanSwitch({
path: 'filterable',
name: t('table.name-column-filter', 'Column filter'),
category,
description: t('table.description-column-filter', 'Enables/disables field filters in table'),
defaultValue: defaultTableFieldOptions.filterable,
})
.addBooleanSwitch({
path: 'wrapText',
name: t('table.name-wrap-text', 'Wrap text'),
category,
})
.addBooleanSwitch({
path: 'wrapHeaderText',
name: t('table.name-wrap-header-text', 'Wrap header text'),
category,
})
.addBooleanSwitch({
path: 'hideFrom.viz',
name: t('table.name-hide-in-table', 'Hide in table'),
category,
defaultValue: undefined,
hideFromDefaults: true,
})
.addCustomEditor({
id: 'footer.reducers',
category: [t('table.category-table-footer', 'Table footer')],
path: 'footer.reducers',
name: t('table.name-calculation', 'Calculation'),
description: t('table.description-calculation', 'Choose a reducer function / calculation'),
editor: standardEditorsRegistry.get('stats-picker').editor,
override: standardEditorsRegistry.get('stats-picker').editor,
defaultValue: [],
process: identityOverrideProcessor,
shouldApply: () => true,
settings: {
allowMultiple: true,
},
})
.addCustomEditor<void, TableCellOptions>({
id: 'cellOptions',
path: 'cellOptions',
name: t('table.name-cell-type', 'Cell type'),
editor: TableCellOptionEditor,
override: TableCellOptionEditor,
defaultValue: defaultTableFieldOptions.cellOptions,
process: identityOverrideProcessor,
category: cellCategory,
shouldApply: () => true,
})
.addBooleanSwitch({
path: 'inspect',
name: t('table.name-cell-value-inspect', 'Cell value inspect'),
description: t('table.description-cell-value-inspect', 'Enable cell value inspection in a modal window'),
defaultValue: false,
category: cellCategory,
showIf: (cfg) => {
return (
cfg.cellOptions.type === TableCellDisplayMode.Auto ||
cfg.cellOptions.type === TableCellDisplayMode.JSONView ||
cfg.cellOptions.type === TableCellDisplayMode.ColorText ||
cfg.cellOptions.type === TableCellDisplayMode.ColorBackground
);
},
})
.addFieldNamePicker({
path: 'tooltip.field',
name: t('table.name-tooltip-from-field', 'Tooltip from field'),
description: t(
'table.description-tooltip-from-field',
'Render a cell from a field (hidden or visible) in a tooltip'
),
category: cellCategory,
})
.addSelect({
path: 'tooltip.placement',
name: t('table.name-tooltip-placement', 'Tooltip placement'),
category: cellCategory,
settings: {
options: [
{
label: t('table.tooltip-placement-options.label-auto', 'Auto'),
value: TableCellTooltipPlacement.Auto,
},
{
label: t('table.tooltip-placement-options.label-top', 'Top'),
value: TableCellTooltipPlacement.Top,
},
{
label: t('table.tooltip-placement-options.label-right', 'Right'),
value: TableCellTooltipPlacement.Right,
},
{
label: t('table.tooltip-placement-options.label-bottom', 'Bottom'),
value: TableCellTooltipPlacement.Bottom,
},
{
label: t('table.tooltip-placement-options.label-left', 'Left'),
value: TableCellTooltipPlacement.Left,
},
],
},
showIf: (cfg) => cfg.tooltip?.field !== undefined,
})
.addFieldNamePicker({
path: 'styleField',
name: t('table.name-styling-from-field', 'Styling from field'),
description: t('table.description-styling-from-field', 'A field containing JSON objects with CSS properties'),
category: cellCategory,
});
},
})
.setPanelOptions((builder) => {
const category = [t('table.category-table', 'Table')];
builder
.addBooleanSwitch({
path: 'showHeader',
name: t('table.name-show-table-header', 'Show table header'),
category,
defaultValue: defaultOptions.showHeader,
})
.addNumberInput({
path: 'frozenColumns.left',
name: t('table.name-frozen-columns', 'Frozen columns'),
description: t('table.description-frozen-columns', 'Columns are frozen from the left side of the table'),
settings: {
placeholder: 'none',
},
category,
})
.addRadio({
path: 'cellHeight',
name: t('table.name-cell-height', 'Cell height'),
category,
defaultValue: defaultOptions.cellHeight,
settings: {
options: [
{ value: TableCellHeight.Sm, label: t('table.cell-height-options.label-small', 'Small') },
{ value: TableCellHeight.Md, label: t('table.cell-height-options.label-medium', 'Medium') },
{ value: TableCellHeight.Lg, label: t('table.cell-height-options.label-large', 'Large') },
],
},
})
.addNumberInput({
path: 'maxRowHeight',
name: t('table.name-max-height', 'Max row height'),
category,
settings: {
placeholder: t('table.placeholder-max-height', 'none'),
min: 0,
},
})
.addCustomEditor({
id: 'enablePagination',
path: 'enablePagination',
name: t('table.name-enable-pagination', 'Enable pagination'),
category,
editor: PaginationEditor,
defaultValue: defaultOptions?.enablePagination,
});
})
// @todo
//@ts-expect-error
.setSuggestionsSupplier(tableSuggestionsSupplier);
@@ -1,69 +0,0 @@
// Copyright 2026 Grafana Labs
//
// Licensed under the Apache License, Version 2.0 (the "License")
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package grafanaplugin
//import (
// "github.com/grafana/grafana/packages/grafana-schema/src/common"
//)
composableKinds: PanelCfg: {
maturity: "experimental"
lineage: {
schemas: [{
version: [0, 0]
schema: {
Options: {
// showLabels: bool
// showCommonLabels: bool
// showFieldSelector?: bool
// showTime: bool
// showLogContextToggle: bool
showControls?: bool
controlsStorageKey?: string
// wrapLogMessage: bool
// prettifyLogMessage: bool
// enableLogDetails: bool
// syntaxHighlighting?: bool
// sortOrder: common.LogsSortOrder
// dedupStrategy: common.LogsDedupStrategy
// enableInfiniteScrolling?: bool
// noInteractions?: bool
// showLogAttributes?: bool
// fontSize?: "default" | "small" @cuetsy(kind="enum", memberNames="default|small")
// detailsMode?: "inline" | "sidebar" @cuetsy(kind="enum", memberNames="inline|sidebar")
// timestampResolution?: "ms" | "ns" @cuetsy(kind="enum", memberNames="ms|ns")
// @todo filter methods no longer needed as props since these are defined by context?
// onClickFilterLabel?: _
// onClickFilterOutLabel?: _
// isFilterLabelActive?: _
// onClickFilterString?: _
// onClickFilterOutString?: _
// onClickShowField?: _
// onClickHideField?: _
// onLogOptionsChange?: _
// logRowMenuIconsBefore?: _
// logRowMenuIconsAfter?: _
// logLineMenuCustomItems?: _
// onNewLogsReceived?: _
displayedFields?: [...string]
setDisplayedFields?: _
buildLinkToLogLine?: _
} @cuetsy(kind="interface")
}
}]
lenses: []
}
}
-57
View File
@@ -1,57 +0,0 @@
// Code generated - EDITING IS FUTILE. DO NOT EDIT.
//
// Generated by:
// public/app/plugins/gen.go
// Using jennies:
// TSTypesJenny
// PluginTsTypesJenny
//
// Run 'make gen-cue' from repository root to regenerate.
export interface Options {
buildLinkToLogLine?: unknown;
controlsStorageKey?: string;
/**
* isFilterLabelActive?: _
* onClickFilterString?: _
* onClickFilterOutString?: _
* onClickShowField?: _
* onClickHideField?: _
* onLogOptionsChange?: _
* logRowMenuIconsBefore?: _
* logRowMenuIconsAfter?: _
* logLineMenuCustomItems?: _
* onNewLogsReceived?: _
*/
displayedFields?: Array<string>;
/**
* wrapLogMessage: bool
* prettifyLogMessage: bool
* enableLogDetails: bool
* syntaxHighlighting?: bool
* sortOrder: common.LogsSortOrder
* dedupStrategy: common.LogsDedupStrategy
* enableInfiniteScrolling?: bool
* noInteractions?: bool
* showLogAttributes?: bool
* fontSize?: "default" | "small" @cuetsy(kind="enum", memberNames="default|small")
* detailsMode?: "inline" | "sidebar" @cuetsy(kind="enum", memberNames="inline|sidebar")
* timestampResolution?: "ms" | "ns" @cuetsy(kind="enum", memberNames="ms|ns")
* TODO: figure out how to define callbacks
*/
onClickFilterLabel?: unknown;
onClickFilterOutLabel?: unknown;
setDisplayedFields?: unknown;
/**
* showLabels: bool
* showCommonLabels: bool
* showFieldSelector?: bool
* showTime: bool
* showLogContextToggle: bool
*/
showControls?: boolean;
}
export const defaultOptions: Partial<Options> = {
displayedFields: [],
};
@@ -1,23 +0,0 @@
{
"type": "panel",
"name": "Logs Table",
"id": "logs_table",
"state": "alpha",
"info": {
"author": {
"name": "Grafana Labs",
"url": "https://grafana.com"
},
"logos": {
"small": "img/icn-logs-panel.svg",
"large": "img/icn-logs-panel.svg"
},
"links": [
{ "name": "Raise issue", "url": "https://github.com/grafana/grafana/issues/new" },
{
"name": "Documentation",
"url": "https://grafana.com/docs/grafana/latest/panels-visualizations/visualizations/@todo/"
}
]
}
}
@@ -1,16 +0,0 @@
import { Field } from '@grafana/data';
import { LogsFrame } from '../../../features/logs/logsFrame';
import type { Options as TableOptions } from '../table/panelcfg.gen';
import type { Options as LogsTableOptions } from './panelcfg.gen';
export type OnLogsTableOptionsChange = (option: LogsTableOptions & TableOptions) => void;
export type BuildLinkToLogLine = (logsFrame: LogsFrame, rowIndex: number, field: Field) => string;
export function isOnLogsTableOptionsChange(callback: unknown): callback is OnLogsTableOptionsChange {
return typeof callback === 'function';
}
export function isBuildLinkToLogLine(callback: unknown): callback is BuildLinkToLogLine {
return typeof callback === 'function';
}
+7 -69
View File
@@ -3364,6 +3364,12 @@
},
"BacktestConfig": {
"properties": {
"annotations": {
"additionalProperties": {
"type": "string"
},
"type": "object"
},
"condition": {
"type": "string"
},
@@ -3373,16 +3379,8 @@
},
"type": "array"
},
"exec_err_state": {
"enum": [
"OK",
"Alerting",
"Error"
],
"type": "string"
},
"for": {
"type": "string"
"$ref": "#/components/schemas/Duration"
},
"from": {
"format": "date-time",
@@ -3391,22 +3389,12 @@
"interval": {
"$ref": "#/components/schemas/Duration"
},
"keep_firing_for": {
"type": "string"
},
"labels": {
"additionalProperties": {
"type": "string"
},
"type": "object"
},
"missing_series_evals_to_resolve": {
"format": "int64",
"type": "integer"
},
"namespace_uid": {
"type": "string"
},
"no_data_state": {
"enum": [
"Alerting",
@@ -3415,18 +3403,12 @@
],
"type": "string"
},
"rule_group": {
"type": "string"
},
"title": {
"type": "string"
},
"to": {
"format": "date-time",
"type": "string"
},
"uid": {
"type": "string"
}
},
"type": "object"
@@ -6331,12 +6313,6 @@
"interval": {
"$ref": "#/components/schemas/Duration"
},
"labels": {
"additionalProperties": {
"type": "string"
},
"type": "object"
},
"limit": {
"format": "int64",
"type": "integer"
@@ -6347,12 +6323,6 @@
"query_offset": {
"type": "string"
},
"remote_write": {
"items": {
"$ref": "#/components/schemas/RemoteWriteConfig"
},
"type": "array"
},
"rules": {
"items": {
"$ref": "#/components/schemas/GettableExtendedRuleNode"
@@ -8828,12 +8798,6 @@
"interval": {
"$ref": "#/components/schemas/Duration"
},
"labels": {
"additionalProperties": {
"type": "string"
},
"type": "object"
},
"limit": {
"format": "int64",
"type": "integer"
@@ -8844,12 +8808,6 @@
"query_offset": {
"type": "string"
},
"remote_write": {
"items": {
"$ref": "#/components/schemas/RemoteWriteConfig"
},
"type": "array"
},
"rules": {
"items": {
"$ref": "#/components/schemas/PostableExtendedRuleNode"
@@ -9888,14 +9846,6 @@
},
"type": "object"
},
"RemoteWriteConfig": {
"properties": {
"url": {
"type": "string"
}
},
"type": "object"
},
"Report": {
"properties": {
"created": {
@@ -10594,12 +10544,6 @@
"interval": {
"$ref": "#/components/schemas/Duration"
},
"labels": {
"additionalProperties": {
"type": "string"
},
"type": "object"
},
"limit": {
"format": "int64",
"type": "integer"
@@ -10610,12 +10554,6 @@
"query_offset": {
"type": "string"
},
"remote_write": {
"items": {
"$ref": "#/components/schemas/RemoteWriteConfig"
},
"type": "array"
},
"rules": {
"items": {
"$ref": "#/components/schemas/GettableExtendedRuleNode"