Compare commits

..

55 Commits

Author SHA1 Message Date
idastambuk
8c877b081e Cleanup dashboards 2025-12-29 16:12:52 +01:00
Alexander Akhmetov
e38f007d30 Alerting: Fetch alert rule provenances for a page of rules only (#115643)
* Alerting: Fetch alert rule provenances for a page of rules only

* error when failed to fetch provenance
2025-12-24 13:41:46 +01:00
Alexander Akhmetov
c38e515dec Alerting: Fix export of imported Prometheus-style recording rules to terraform (#115661)
Alerting: Fix export imported Prometheus-style recording rules to terraform
2025-12-24 09:49:38 +01:00
Mustafa Sencer Özcan
4f57ebe4ad fix: bump default facet search limit for unified search (#115690)
* fix: bump limit

* feat: add facetLimit query parameter to search API

* fix: set to 500

* fix: update snapshot

* fix: yarn generate-apis
2025-12-24 09:33:24 +01:00
alerting-team[bot]
3f5f0f783b Alerting: Update alerting module to 926c7491019668286c423cad9d2a65f419b14944 (#115704)
[create-pull-request] automated change

Co-authored-by: alexander-akhmetov <1875873+alexander-akhmetov@users.noreply.github.com>
2025-12-24 08:48:06 +01:00
grafana-pr-automation[bot]
5e4e6c1172 I18n: Download translations from Crowdin (#115705)
New Crowdin translations by GitHub Action

Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
2025-12-24 00:42:01 +00:00
Paul Marbach
f5218b5eb8 Sparkline: Add point annotations for some common calcs (#115595) 2025-12-23 16:39:30 -05:00
alerting-team[bot]
a1389bc173 Alerting: Update alerting module to 77a1e2f35be87bebc41a0bf634f336282f0b9b53 (#115498)
* [create-pull-request] automated change

* Remove IsProtectedField and temp structure

* Fix alerting historian

* make update-workspace

---------

Co-authored-by: yuri-tceretian <25988953+yuri-tceretian@users.noreply.github.com>
Co-authored-by: Yuri Tseretyan <yuriy.tseretyan@grafana.com>
Co-authored-by: Alexander Akhmetov <me@alx.cx>
2025-12-23 14:46:44 -05:00
Sergej-Vlasov
0a0f92e85e InspectJsonTab: Force render the layout after change to reflect new gridPos (#115688)
force render the layout after inspect panel change to account for gridPos change
2025-12-23 10:52:50 -05:00
Alexander Akhmetov
45f665d203 Alerting: Config option to set default datasource in Prometheus rule import (#115665)
What is this feature?

Add a config option to set data source to imported rules when X-Grafana-Alerting-Datasource-UID is not present.

Why do we need this feature?

Currently mimirtool requires passing --extra-headers 'X-Grafana-Alerting-Datasource-UID: {uid}' when used with Grafana. This config option allows to specify a default, which is used when the header is missing, making it easier to use and more similar to the case when it's used with Mimir.
2025-12-23 14:24:53 +00:00
Alex Khomenko
47436a3eeb Provisioning: Fix settings error loop (#115677) 2025-12-23 16:16:48 +02:00
Yunwen Zheng
359505a8aa RecentlyViewedDashboards: Retry when error (#115649) 2025-12-23 09:14:00 -05:00
Alexander Akhmetov
ac866b0114 Alerting: Prevent convert API from deleting non-imported rule groups (#115667) 2025-12-23 14:51:45 +01:00
Kristina Demeshchik
521cc11994 Dashboard Outline: Differentiate hover styles between edit and view modes (#115646) 2025-12-23 11:41:49 +01:00
Alexander Akhmetov
84120fb210 Alerting: Fix file import/export of recording rules with target datasource uid (#115663)
Alerting: Fix export of recording rules with target datasource uid
2025-12-23 11:26:15 +01:00
Alexander Akhmetov
096208202e Alerting: Fix a race condition panic in ResetStateByRuleUID (#115662) 2025-12-23 11:23:16 +01:00
Alexander Akhmetov
dd1edf7f16 Alerting: Fix database-based filtering by labels when rules have no labels (#115657)
Alerting: Fix database-based filtering by labels when rules have no labels at all
2025-12-22 21:23:59 +01:00
Haris Rozajac
15b5dcda80 Dashboard V1->V2 Conversion: Default multi to true in GroupBy when multi is not defined in v1 (#115656)
default multi to true when multi is not defined in v1
2025-12-22 11:00:37 +00:00
grafana-pr-automation[bot]
5585595c16 I18n: Download translations from Crowdin (#115604)
New Crowdin translations by GitHub Action

Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
2025-12-20 00:41:06 +00:00
Sean Griffin
6daa7ff729 Clean up Schema Inspector feature code (#115514)
Co-authored-by: Alex Spencer <52186778+alexjonspencer1@users.noreply.github.com>
2025-12-19 16:05:46 -05:00
Paul Marbach
8cfac85b48 Gauge: Add guide dots for rounded bars to help with accuracy, update color logic for more consistent gradients (#115285)
* Gauge: Fit-and-finish tweaks to glows, text position, and sparkline size

* adjust text height and positions a little more

* cohesive no data handling

* more tweaks

* fix migration test

* Fix JSON formatting by adding missing newline

* remove new line

* Gauge: Add guide dots for rounded bars to help with accuracy

* 30% width

* remove spotlight, starting to make gradients a bit more predictable

* fix segmented

* update rotation of gauge color

* update i18n and migration tests

* fix spacing

* more fixture updates

* wip: using clip-path and CSS for drawing the gauge

* wip: overhaul color in gauge

* wip: progress on everything

* refactoring defs into utils

* its all working

* fixme comment

* fix backend migration tests

* remove any other mentions of spotlights

* one more tweak

* update gdev

* add lots of tests and reorganize the code a bit

* fix dev dashboard fixture

* more cleanup, optimization

* fix a couple of bugs

* fix bad import

* disable storybook test due to false positive

* a more sweeping disable of the color-contrast

* update backend tests

* update gradient for fixed color

* test all dark/light theme variants

* set opacity to 0.5 for dots

* move min degrees for start dot render to a const

* change endpoint marks to be configurable

* update gdev and fixtures

* i18n

* shore up testing a bit

* remove period for consistency

* hide glow at small angles

* more testing and cleanup

* addressing PR comments

* Update packages/grafana-ui/src/components/RadialGauge/colors.ts

Co-authored-by: Jesse David Peterson <jesdavpet@users.noreply.github.com>

* Update packages/grafana-ui/src/components/RadialGauge/colors.ts

Co-authored-by: Jesse David Peterson <jesdavpet@users.noreply.github.com>

* break out binary search stuff and write tests

* fix lint issues

---------

Co-authored-by: Jesse David Peterson <jesdavpet@users.noreply.github.com>
2025-12-19 20:41:57 +00:00
Renato Costa
0284d1e669 unified-storage: add UnixTimestamp support to the sqlkv implementation (#115651)
* unified-storage: add `UnixTimestamp` support to sqlkv implementation

* unified-storage: improve tests and enable all of them on sqlkv
2025-12-19 15:35:22 -05:00
Adela Almasan
3522efdf32 VizSuggestions: Error handling (#115428)
* error handling

* retry fetching suggestions

* add translation

* useAsyncRetry

* hasError test

* update error handling

* clean up the text panel stuff for the current version

* cleanup for loop

* some more tests for some failure cases

* fix lint issue

---------

Co-authored-by: Paul Marbach <paul.marbach@grafana.com>
2025-12-19 20:22:26 +00:00
Stephanie Hingtgen
2fbe2f77e3 Folders: Add max depth check with descendant to /apis (#115305) 2025-12-19 20:17:39 +00:00
Will Assis
4164239f56 unified-storage: implement sqlkv Save method (#115458)
* unified-storage: sqlkv save method
2025-12-19 14:27:06 -05:00
Liza Detrick
14c595f206 Logs: Cell format value on inspect should use Code view for arrays, objects, and JSON strings (#115037) 2025-12-19 19:52:02 +01:00
Isabel Matwawana
471d6f5236 Docs: Add suggested dashboards (#114729) 2025-12-19 13:27:39 -05:00
Jesse David Peterson
f91efcfe2c TimeSeries: Fix truncated label text in legend table mode (#115647)
* fix(legend-table): remove arbitrary 600px max width for full width cells

* test(legend-table): backfill test coverage for viz legend table

* test(legend-table): backfill test coverage for viz legend table item

* refactor(legend-table): use derived theme spacing, not hard-coded values
2025-12-19 14:12:01 -04:00
Adela Almasan
49032ae3d7 VizSuggestions: Update selected suggestion styling (#115581)
* update selected suggestion style

* update highlight styles for light theme, add inert to div

* remove commented-out original idea

---------

Co-authored-by: Paul Marbach <paul.marbach@grafana.com>
2025-12-19 12:23:27 -05:00
Rodrigo Vasconcelos de Barros
8b316cca25 Alerting: Add tests for AlertRuleMenu component (#115473)
* Alerting: Add tests for AlertRuleMenu component

* Refactor test mocks according TESTING.md

* Remove duplicate mock functions

* Replace snapshot test with more readable assertion

* Remove SETUP_ALERTING_DEV.md file

* Refactor feature flags usage in tests
2025-12-19 11:39:48 -05:00
Collin Fingar
fa73caf6c8 Snapshots: Fix V2 Snapshot data coupling (#115278)
* Snapshots: Potential fix for rendering V2 snaps

* removing comments

* Added unit test
2025-12-19 11:30:24 -05:00
Alex Khomenko
0e4b1c7b1e Provisioning: Fix error loop in synchronise step (#115570)
* Refactor requiresMigration

* Remove InlineSecureValueWarning

* Prevent error loop

* Fix error loop

* Cleanup

* i18n
2025-12-19 17:57:45 +02:00
Kristina Demeshchik
aa69d97f1e Variables: Show variable reference instead of interpolated datasource in query variable editor (#115624)
show variable ref
2025-12-19 10:38:22 -05:00
Alex Khomenko
7812f783bb Provisioning: Enable editing dashboard via JSON model (#115420)
* Provisioning: Enable save for json model changes

* Do not pass props

* Simplify logic and fix warnings

* add tests

* Show diff for json changes

* Add try/catch
2025-12-19 17:36:46 +02:00
Galen Kistler
b2d6bb7a05 logsdrilldowndefaultcolumns: require plugins:write for non GET operations (#115639)
chore: require plugins:write
2025-12-19 15:26:36 +00:00
Anna Urbiztondo
56dd1ca867 Docs: Note for File Provisioning (#115630)
Note
2025-12-19 14:40:33 +00:00
Deyan Halachliyski
62b2a202de Alerting: Add saved searches feature for alert rules page (#115001)
* Alerting: Add saved searches feature for alert rules page

Add ability to save, rename, delete, and apply search queries on the
Alert Rules page. Includes auto-apply default search on navigation
and UserStorage persistence.

Behind feature toggle `alertingSavedSearches` (disabled by default).

* Alerting: Add i18n translations for saved searches

* Alerting: Remove unused imports in saved searches

* Alerting: Add CODEOWNERS for e2e-playwright/alerting-suite

* Alerting: Add useSavedSearches mock to RulesFilter.v2 tests

* Alerting: Fix failing unit tests for saved searches

- Fix Jest mock hoisting issue in useSavedSearches.test.ts by configuring
  UserStorage mock implementation after imports instead of inline
- Update SavedSearches.test.tsx to use findBy* queries for async popup content
- Fix tests to click apply button instead of text for applying searches
- Update maxLength test to verify attribute instead of trying to exceed it

* Alerting: Fix saved searches test mocking and assertions

- Fix UserStorage mock in useSavedSearches.test.ts by creating mock with
  default Promise-returning functions inside jest.mock() factory, then
  accessing the instance via getMockUserStorageInstance() helper
- Fix SavedSearches.test.tsx apply button tests to use correct accessible
  name "Apply this search" (from tooltip) instead of dynamic aria-label
- Fix disabled button assertion to check native disabled attribute instead
  of relying on aria-disabled which is set inconsistently by Button component
- Use findAllByRole for async popup content queries

* Alerting: Fix test query for disabled save button

Use findByText + closest instead of findByRole to find the disabled
"Save current search" button. The Grafana Button component renders
with conflicting accessibility attributes (disabled="" + aria-disabled="false")
which breaks role-based queries in React Testing Library.

* fix(alerting): preserve UserStorage mock reference before clearAllMocks

* fix(alerting): add missing test mocks for crypto and console

- Mock crypto.randomUUID for Node.js test environment
- Add console.error spy to tests expecting storage/parse errors
- Add console.warn spy to test expecting validation warnings

Fixes jest-fail-on-console failures and crypto.randomUUID TypeError.

* fix(alerting): add console.error spy to save failure test

* fix(alerting): address PR review feedback for saved searches

- Register alertingSavedSearches feature toggle in backend
- Extract shared types to SavedSearches.types.ts to fix circular dependencies
- Extract sub-components: InlineSaveInput, InlineRenameInput, SavedSearchItem
- Remove unused imports (IconButton, Input) and styles from SavedSearches.tsx
- Add try/catch for auto-apply default search error handling
- Remove maxLength validation and corresponding test

* fix(alerting): fix validation error display in saved searches

- Fix useEffect dependency array that was immediately clearing validation errors
- Remove error from deps so errors only clear when user types, not when set
- Run i18n-extract to remove unused error-name-too-long translation key

* fix(alerting): address PR review feedback for saved searches

- Replace toHaveBeenCalled assertions with UI verification using AppNotificationList
- Rename useSavedSearches.test.ts to .tsx for JSX support
- Update README documentation to reflect current test patterns
- Add test cleanup between E2E tests to prevent data leakage

* fix(alerting): remove unused import and fix test wrapper

- Remove unused locationService import from RulesFilter.v2.tsx
- Add missing bootData spread in useSavedSearches.test.tsx mock
- Add createWrapper to renderHook call for user-specific storage key test

* fix(alerting): add Redux wrapper to all useSavedSearches hook tests

All renderHook calls for useSavedSearches now include the createWrapper()
which provides the Redux Provider context required by useAppNotification.

* fix(alerting): use regex patterns in MSW handlers for UserStorage tests

MSW handlers now use regex patterns to match any namespace and user UID,
since UserStorage reads config values from internal imports that aren't
affected by jest.mock of @grafana/runtime.

* fix(alerting): mock UserStorage directly instead of using MSW

Replace MSW HTTP handlers with a direct mock of the UserStorage class.
The MSW approach failed because UserStorage evaluates config.namespace
at module load time, before jest.mock takes effect, causing the regex
patterns to not match the actual request URLs.

This follows the same pattern used in useFavoriteDatasources.test.ts.

* refactor(alerting): use react-hook-form and Dropdown for saved searches

- Migrate InlineRenameInput and InlineSaveInput to react-hook-form
- Replace custom PopupCard with Grafana Dropdown component
- Use useReducer for centralized dropdown state management
- Add stopPropagation handlers to prevent dropdown closing during form interactions
- Update tests to use real useSavedSearches hook with mocked UserStorage
- Consolidate and simplify saved searches test suite

* fix: resolve CI failures in SavedSearches component

- Fix TypeScript TS2540 errors by using MutableRefObject type for refs
- Fix form submission by using onClick instead of type="submit" on IconButton
  (IconButton doesn't forward the type prop to the underlying button)
- Fix action menu tests by stopping click propagation on ActionMenu wrapper
- Fix Escape key handling by focusing the dialog element instead of the
  potentially-disabled save button

* fix(alerting): add navTree to runtime mock in useSavedSearches tests

Add empty navTree array to the @grafana/runtime config mock to prevent
store initialization crash when buildInitialState() calls .find() on
undefined navTree.

* fix(alerting): add error handling for auto-apply default search

Wrap handleApplySearch call in try-catch to prevent unhandled exceptions
when auto-applying the default saved search on navigation.

* fix(alerting): prevent saved searches dropdown from closing when clicking action menu

The nested Dropdown components caused the outer SavedSearches dropdown to close
when clicking on action menu items (Set as default, Rename, Delete). This happened
because @floating-ui/react's useDismiss hook detected clicks on the inner Menu
(rendered via Portal) as "outside" clicks.

Fix: Replace the outer Dropdown with PopupCard and add custom click-outside
handling that explicitly excludes portal elements ([role="menu"] and
[data-popper-placement]). This matches the pattern used before the Dropdown
refactor.

Changes:
- SavedSearches.tsx: Use PopupCard instead of Dropdown, add click-outside handler
- SavedSearchItem.tsx: Add menuPortalRoot prop for action menu positioning
- RulesFilter.v2.tsx: Fix double analytics tracking on auto-apply

* fix(alerting): auto-apply default saved search on page navigation

The default saved search was not being applied when navigating to the
Alert rules page. This was caused by a race condition where `isLoading`
was `false` on initial render (status was 'not-executed'), causing the
auto-apply effect to run before saved searches were loaded.

Fix: Include the uninitialized state in the loading check so the effect
waits until data is actually loaded before attempting to auto-apply.

Also adds tests for the auto-apply functionality.

* fix(alerting): align action menu icon and improve saved search tests

- Fix vertical alignment of three-dot menu icon in saved search items
  by adding flex centering to the wrapper div
- Add feature toggle setup/teardown in saved searches test suite
- Fix location mocking in test for URL search parameter handling

* refactor(alerting): improve saved searches validation and organization

- Rename SavedSearches.types.ts to savedSearchesSchema.ts
- Use react-hook-form's built-in validation instead of manual setError
- Change error handling to throw ValidationError instead of returning it
- Add type guard isValidationError for safe error checking
- Add alphabetical sorting for saved searches (default first)
- Replace console.warn/error with logWarning/logError for analytics
- Extract helper functions: sortSavedSearches, loadSavedSearchesFromStorage, hasUrlSearchQuery

* refactor(alerting): address PR review comments for saved searches (steps 9-12)

- Add comprehensive comment explaining useEffect double-render limitation
  and potential future improvements for default search auto-apply (step 9)
- Add test documenting expected behavior when navigating back to alert list
  after leaving the page - default filter is re-applied (step 10)
- Update RulesFilter.v2.test.tsx to use testWithFeatureToggles helper and
  add MSW UserStorage handlers for future use (step 11)
- Update SavedSearches.test.tsx to use render from test/test-utils and
  byRole selectors for menu items (step 12)

* test(alerting): update saved searches tests for refactored API

- Update mockSavedSearches order to match sorted output (default first, then alphabetically)
- Change validation error tests to use rejects pattern (saveSearch/renameSearch now throw)
- Add hasPermission mock to contextSrv for module-level permission check

* fix(alerting): fix CI failures for saved searches

- Update onRenameComplete type to match throw-based API (Promise<void>)
- Run i18n-extract to add missing translation keys

* fix(alerting): salvage valid entries when saved searches validation fails

Instead of returning an empty array when array validation fails,
iterate through each item and keep only the valid entries.
This prevents losing all saved searches if a single entry is corrupted.

* test(alerting): update test to expect valid entries to be preserved

Update the test assertion to match the new behavior where valid saved
search entries are preserved when some entries fail validation, rather
than discarding all entries.

* fix(alerting): eliminate double API request on saved search auto-apply

Move saved searches loading and auto-apply logic from RulesFilterV2 to
RuleListPage. This ensures the default search filter is applied BEFORE
FilterView mounts, preventing double API requests on initial page load.

- Load saved searches at RuleListPage level
- Gate RuleList rendering until saved searches are loaded
- Pass savedSearchesResult as prop to avoid duplicate hook calls
- Remove auto-apply tests from RulesFilter.v2.test.tsx (behavior moved)

* fix(alerting): mock useSavedSearches in RuleList.v2 tests

The useSavedSearches hook triggers async state updates that complete
after tests finish, causing React act() warnings. Mock the hook to
prevent async operations during tests.

* refactor(alerting): migrate saved searches tests to use MSW

Address code review feedback by migrating UserStorage tests from
jest.mock to MSW-based mocking:

- Add MSW helper functions (setAlertingStorageItem, getAlertingStorageItem)
  to simplify test setup for UserStorage
- Migrate useSavedSearches.test.tsx to use MSW handlers instead of
  jest.mock('@grafana/runtime/internal')
- Migrate RulesFilter.v2.test.tsx to use MSW handlers
- Update README documentation to accurately reflect how tests use MSW
- Add tests for default search auto-apply behavior in RuleListPage
- Simplify comments to be concise and accurate

* fix(alerting): mock UserStorage directly in useSavedSearches tests

The UserStorage class caches its storage spec at the instance level,
and the useSavedSearches hook creates the instance at module level.
This caused test isolation issues where cached state leaked between
tests, making all tests that depended on loading data fail.

Fix by mocking UserStorage class directly instead of relying on MSW
handlers. This gives each test explicit control over what getItem
and setItem return, ensuring proper isolation.

Also update persistence assertions to verify mock.setItem calls
instead of reading from MSW storage (which the mock bypasses).

* refactor(alerting): remove setup helper in SavedSearches tests

Replace the `setup()` helper function with direct `render()` calls
as suggested in PR review. This makes tests more explicit about
what component is being rendered and with what props.

* refactor(alerting): extract default search auto-apply into dedicated hook

Moves the default saved search auto-apply logic from useSavedSearches into
a new useApplyDefaultSearch hook. This improves separation of concerns by
keeping useSavedSearches focused on CRUD operations while the new hook
handles the page-level auto-apply behavior.

Key changes:
- Created useApplyDefaultSearch hook with session-based visit tracking
- Removed getAutoApplySearch method and user-specific session keys from useSavedSearches
- Exported loadDefaultSavedSearch utility for independent default search loading
- Simplified test mocks to use loadDefaultSavedSearch instead of full hook mocking
- Removed unused savedSearchesResult prop passing through component tree

* fix(alerting): improve default search auto-apply timing and test reliability

Replace react-use's auto-executing useAsync with internal useAsync hook
for better control over when default search is loaded. This prevents
race conditions and ensures the async operation only executes when needed.

Test improvements:
- Add proper session storage cleanup in beforeEach
- Use waitFor to handle async operations correctly
- Prevent visited flag from affecting subsequent tests
- Clear mock call history between tests

The internal useAsync hook doesn't auto-execute on mount, allowing us to
control exactly when the default search loads based on conditions rather
than relying on dependency array triggers.

---------

Co-authored-by: Konrad Lalik <konradlalik@gmail.com>
2025-12-19 15:32:27 +01:00
Matheus Macabu
133865182e CI: Add e2e-playwright folder to e2e test detection changes (#115623) 2025-12-19 15:21:22 +01:00
Renato Costa
338ae95ef5 unified-storage: add BatchDelete support to sqlkv implementation (#115573) 2025-12-19 09:15:23 -05:00
Anna Urbiztondo
19c9f21cc4 Docs: Corrections for full instance sync (#115615)
* Corrections for full instance sync

* Edits

* Feedback

* Migration checkbox

* Edit

* Update docs/sources/as-code/observability-as-code/provision-resources/git-sync-setup.md

Co-authored-by: Roberto Jiménez Sánchez <roberto.jimenez@grafana.com>

* Mention to export

* Prettier

---------

Co-authored-by: Roberto Jiménez Sánchez <roberto.jimenez@grafana.com>
2025-12-19 15:13:35 +01:00
Roberto Jiménez Sánchez
9760eef62f Provisioning: fix multi-tenant and single-tenant authorization (#115435)
* feat(auth): add ExtraAudience option to RoundTripper

Add ExtraAudience option to RoundTripper to allow operators to include
additional audiences (e.g., provisioning group) when connecting to the
multitenant aggregator. This ensures tokens include both the target API
server's audience and the provisioning group audience, which is required
to pass the enforceManagerProperties check.

- Add ExtraAudience RoundTripperOption
- Improve documentation and comments
- Add comprehensive test coverage

* fix(operators): add ExtraAudience for dashboards/folders API servers

Operators connecting to dashboards and folders API servers need to include
the provisioning group audience in addition to the target API server's
audience to pass the enforceManagerProperties check.

* provisioning: fix settings/stats authorization for AccessPolicy identities

The settings and stats endpoints were returning 403 for users accessing via
ST->MT because the AccessPolicy identity was routed to the access checker,
which doesn't know about these resources.

This fix handles 'settings' and 'stats' resources before the access checker
path, routing them to the role-based authorization that allows:
- settings: Viewer role (read-only, needed by frontend)
- stats: Admin role (can leak information)

* fix: update BootstrapStep component to remove legacy storage handling and adjust resource counting logic

- Removed legacy storage flag from useResourceStats hook in BootstrapStep.
- Updated BootstrapStepResourceCounting to simplify rendering logic and removed target prop.
- Adjusted tests to reflect changes in resource counting and rendering behavior.

* Revert "fix: update BootstrapStep component to remove legacy storage handling and adjust resource counting logic"

This reverts commit 148802cbb5.

* provisioning: allow any authenticated user for settings/stats endpoints

These are read-only endpoints needed by the frontend:
- settings: returns available repository types and configuration for the wizard
- stats: returns resource counts

Authentication is verified before reaching authorization, so any user who
reaches these endpoints is already authenticated. Requiring specific org
roles failed for AccessPolicy tokens which don't carry traditional roles.

* provisioning: remove redundant admin role check from listFolderFiles

The admin role check in listFolderFiles was redundant (route-level auth already
handles access) and broken for AccessPolicy identities which don't have org roles.

File access is controlled by the AccessClient as documented in the route-level
authorization comment.

* provisioning: add isAdminOrAccessPolicy helper for auth checks

Consolidates authorization logic for provisioning endpoints:
- Adds isAdminOrAccessPolicy() helper that allows admin users OR AccessPolicy identities
- AccessPolicy identities (ST->MT flow) are trusted internal callers without org roles
- Regular users must have admin role (matching frontend navtree restriction)

Used in: authorizeSettings, authorizeStats, authorizeJobs, listFolderFiles

* provisioning: consolidate auth helpers into allowForAdminsOrAccessPolicy

Simplifies authorization by:
- Adding isAccessPolicy() helper for AccessPolicy identity check
- Adding allowForAdminsOrAccessPolicy() that returns Decision directly
- Consolidating stats/settings/jobs into single switch case
- Using consistent pattern in files.go

* provisioning: require admin for files subresource at route level

Aligns route-level authorization with handler-level check in listFolderFiles.
Both now require admin role OR AccessPolicy identity for consistency.

* provisioning: restructure authorization with role-based helpers

Reorganizes authorization code for clarity:

Role-based helpers (all support AccessPolicy for ST->MT flow):
- allowForAdminsOrAccessPolicy: admin role required
- allowForEditorsOrAccessPolicy: editor role required
- allowForViewersOrAccessPolicy: viewer role required

Repository subresources by role:
- Admin: repository CRUD, test, files
- Editor: jobs, resources, sync, history
- Viewer: refs, status (GET only)

Connection subresources by role:
- Admin: connection CRUD
- Viewer: status (GET only)

* provisioning: move refs to admin-only

refs subresource now requires admin role (or AccessPolicy).
Updated documentation comments to reflect current permissions.

* provisioning: add fine-grained permissions for connections

Adds connection permissions following the same pattern as repositories:
- provisioning.connections:create
- provisioning.connections:read
- provisioning.connections:write
- provisioning.connections:delete

Roles:
- fixed:provisioning.connections:reader (granted to Admin)
- fixed:provisioning.connections:writer (granted to Admin)

* provisioning: remove non-existent sync subresource from auth

The sync subresource doesn't exist - syncing is done via the jobs endpoint.
Removed dead code from authorization switch case.

* provisioning: use access checker for fine-grained permissions

Refactors authorization to use b.access.Check() with verb-based checks:

Repository subresources:
- CRUD: uses actual verb (get/create/update/delete)
- test: uses 'update' (write permission)
- files/refs/resources/history/status: uses 'get' (read permission)
- jobs: uses actual verb for jobs resource

Connection subresources:
- CRUD: uses actual verb
- status: uses 'get' (read permission)

The access checker maps verbs to actions defined in accesscontrol.go.
Falls back to admin role for backwards compatibility.

Also removes redundant admin check from listFolderFiles since
authorization is now properly handled at route level.

* provisioning: use verb constants instead of string literals

Uses apiutils.VerbGet, apiutils.VerbUpdate instead of "get", "update".

* provisioning: use access checker for jobs and historicjobs resources

Jobs resource: uses actual verb (create/read/write/delete)
HistoricJobs resource: read-only (historicjobs:read)

* provisioning: allow viewers to access settings endpoint

Settings is read-only and needed by multiple UI pages (not just admin pages).
Stats remains admin-only.

* provisioning: consolidate role-based resource authorization

Extract isRoleBasedResource() and authorizeRoleBasedResource() helpers
to avoid duplicating settings/stats resource checks in multiple places.

* provisioning: use resource name constants instead of hardcoded strings

Replace 'repositories', 'connections', 'jobs', 'historicjobs' with
their corresponding ResourceInfo.GetName() constants.

* provisioning: delegate file authorization to connector

Route level: allow any authenticated user for files subresource
Connector: check repositories:read only for directory listing
Individual file CRUD: handled by DualReadWriter based on actual resource

* provisioning: enhance authorization for files and jobs resources

Updated file authorization to fall back to admin role for listing files. Introduced checkAccessForJobs function to manage job permissions, allowing editors to create and manage jobs while maintaining admin-only access for historic jobs. Improved error messaging for permission denials.

* provisioning: refactor authorization with fine-grained permissions

Authorization changes:
- Use access checker with role-based fallback for backwards compatibility
- Repositories/Connections: admin role fallback
- Jobs: editor role fallback (editors can manage jobs)
- HistoricJobs: admin role fallback (read-only)
- Settings: viewer role (needed by multiple UI pages)
- Stats: admin role

Files subresource:
- Route level allows any authenticated user
- Directory listing checks repositories:read in connector
- Individual file CRUD delegated to DualReadWriter

Refactored checkAccessWithFallback to accept fallback role parameter.

* provisioning: refactor access checker integration for improved authorization

Updated the authorization logic to utilize the new access checker across various resources, including files and jobs. This change simplifies the permission checks by removing redundant identity retrieval and enhances error handling. The access checker now supports role-based fallbacks for admin and editor roles, ensuring backward compatibility while streamlining the authorization process for repository and connection subresources.

* provisioning: remove legacy access checker tests and refactor access checker implementation

Deleted the access_checker_test.go file to streamline the codebase and focus on the updated access checker implementation. Refactored the access checker to enhance clarity and maintainability, ensuring it supports role-based fallback behavior. Updated the access checker integration in the API builder to utilize the new fallback role configuration, improving authorization logic across resources.

* refactor: split AccessChecker into TokenAccessChecker and SessionAccessChecker

- Renamed NewMultiTenantAccessChecker -> NewTokenAccessChecker (uses AuthInfoFrom)
- Renamed NewSingleTenantAccessChecker -> NewSessionAccessChecker (uses GetRequester)
- Split into separate files with their own tests
- Added mockery-generated mock for AccessChecker interface
- Names now reflect identity source rather than deployment mode

* fix: correct error message case and use accessWithAdmin for filesConnector

- Fixed error message to use lowercase 'admin role is required'
- Fixed filesConnector to use accessWithAdmin for proper role fallback
- Formatted code

* refactor: reduce cyclomatic complexity in filesConnector.Connect

Split the Connect handler into smaller focused functions:
- handleRequest: main request processing
- createDualReadWriter: setup dependencies
- parseRequestOptions: extract request options
- handleDirectoryListing: GET directory requests
- handleMethodRequest: route to method handlers
- handleGet/handlePost/handlePut/handleDelete: method-specific logic
- handleMove: move operation logic

* security: remove blind TypeAccessPolicy bypass from access checkers

Removed the code that bypassed authorization for TypeAccessPolicy identities.
All identities now go through proper permission verification via the inner
access checker, which will validate permissions from ServiceIdentityClaims.

This addresses the security concern where TypeAccessPolicy was being trusted
blindly without verifying whether the identity came from the wire or in-process.

* feat: allow editors to access repository refs subresource

Change refs authorization from admin to editor fallback so editors can
view repository branches when pushing changes to dashboards/folders.

- Split refs from other read-only subresources (resources, history, status)
- refs now uses accessWithEditor instead of accessWithAdmin
- Updated documentation comment to reflect authorization levels
- Added integration test TestIntegrationProvisioning_RefsPermissions
  verifying editor access and viewer denial

* tests: add authorization tests for missing provisioning API endpoints

Add comprehensive authorization tests for:
- Repository subresources (test, resources, history, status)
- Connection status subresource
- HistoricJobs resource
- Settings and Stats resources

All authorization paths are now covered by integration tests.

* test: fix RefsPermissions test to use GitHub repository

Use github-readonly.json.tmpl template instead of local folder,
since refs endpoint requires a versioned repository that supports
git operations.

* chore: format test files

* fix: make settings/stats authorization work in MT mode

Update authorizeRoleBasedResource to check authlib.AuthInfoFrom(ctx)
for AccessPolicy identity type in addition to identity.GetRequester(ctx).
This ensures AccessPolicy identities are recognized in MT mode where
identity.GetRequester may not set the identity type correctly.

* fix: remove unused authorization helper functions

Remove allowForAdminsOrAccessPolicy and allowForViewersOrAccessPolicy
as they are no longer used after refactoring to use authorizeRoleBasedResource.

* Fix AccessPolicy identity detection in ST authorizer

- Add check for AccessPolicy identities via GetAuthID() in authorizeRoleBasedResource
- Extended JWT may set identity type to TypeUser but AuthID is 'access-policy:...'
- Forward user ID token in X-Grafana-Id header in RoundTripper for aggregator forwarding

* Revert "Fix AccessPolicy identity detection in ST authorizer"

This reverts commit 0f4885e503.

* Add fine-grained permissions for settings and stats endpoints

- Add provisioning.settings:read action (granted to Viewer role)
- Add provisioning.stats:read action (granted to Admin role)
- Add accessWithViewer to APIBuilder for Viewer role fallback
- Use access checker for settings/stats authorization
- Remove role-based authorization functions (isRoleBasedResource, authorizeRoleBasedResource)

This makes settings and stats consistent with other provisioning resources
and works properly in both ST and MT modes via the access checker.

* Remove AUTHORIZATION_COVERAGE.md

* Add provisioning resources to RBAC mapper

- Add connections, settings, stats to provisioning.grafana.app mappings
- Required for authz service to translate K8s verbs to legacy actions
- Fixes 403 errors for settings/stats in MT mode

* refactor: merge access checkers with original fallthrough behavior

Merge tokenAccessChecker and sessionAccessChecker into a unified
access checker that implements the original fallthrough behavior:

1. First try to get identity from access token (authlib.AuthInfoFrom)
2. If token exists AND (is TypeAccessPolicy OR useExclusivelyAccessCheckerForAuthz),
   use the access checker with token identity
3. If no token or conditions not met, fall back to session identity
   (identity.GetRequester) with optional role-based fallback

This fixes the issue where settings/stats/connections endpoints were
failing in MT mode because the tokenAccessChecker was returning an error
when there was no auth info in context, instead of falling through to
session-based authorization.

The unified checker now properly handles:
- MT mode: tries token first, falls back to session if no token
- ST mode: only uses token for AccessPolicy identities, otherwise session
- Role fallback: applies when configured and access checker denies

* Revert "refactor: merge access checkers with original fallthrough behavior"

This reverts commit 96451f948b.

* Grant settings view role to all

* fix: use actual request verb for settings/stats authorization

Use a.GetVerb() instead of hardcoded VerbGet for settings and stats
authorization. When listing resources (hitting collection endpoint),
the verb is 'list' not 'get', and this mismatch could cause issues
with the RBAC service.

* debug: add logging to access checkers for authorization debugging

Add klog debug logs (V4 level) to token and session access checkers
to help diagnose why settings/stats authorization is failing while
connections works.

* debug: improve access checker logging with grafana-app-sdk logger

- Use grafana-app-sdk logging.FromContext instead of klog
- Add error wrapping with resource.group format for better context
- Log more details including folder, group, and allowed status
- Log error.Error() for better error message visibility

* chore: use generic log messages in access checkers

* Revert "Grant settings view role to all"

This reverts commit 3f5758cf36.

* fix: use request verb for historicjobs authorization

The original role-based check allowed any verb for admins. To preserve
this behavior with the access checker, we should pass the actual verb
from the request instead of hardcoding VerbGet.

---------

Co-authored-by: Charandas Batra <charandas.batra@grafana.com>
2025-12-19 15:11:35 +01:00
Marcus Andersson
ece38641ca Dashboards: Make sure to render dashboard links even if they are marked as "in controls menu" (#115381)
links with type dashboard will now be visible.
2025-12-19 13:48:53 +01:00
Yulia Shanyrova
e9a2828f66 Plugins: Add PluginInsights UI (#115616)
* Add getInsights endpoint, add new component PluginInsights

* fix linting and add styles

* add version option to insights request

* Add plugininsights tests, remove console.logs

* fix the insight items types

* Add getting insights to all the mocks to fix the tests

* remove deprecated lint package

* Add theme colors, added tests to PluginDetailsPanel

* Fix eslint error for plugin details page

* Add pluginInsights feature toggle

* change getInsights with version API call, resolve conflicts with main

* fix typecheck and translation

* updated UI

* update registry go

* fix translation

* light css changes

* remove duplicated feature toggle

* fix the build

* update plugin insights tests

* fix typecheck

* rudderstack added, feedback form added

* fix translation

* Remove isPluginTabId function
2025-12-19 13:40:41 +01:00
Sonia Aguilar
c2275f6ee4 Alerting: Add Cursor frontmatter to CLAUDE.md for auto-loading (#115613)
add Cursor frontmatter to CLAUDE.md for auto-loading
2025-12-19 12:03:45 +00:00
Yulia Shanyrova
b4eb02a6f0 Plugins: Change pageId parameter type in usePluginDetailsTabs (#115612)
* change usePluginDetailsTabs pageId parameter type

* add eslint suppressions
2025-12-19 12:45:15 +01:00
Roberto Jiménez Sánchez
a0751b6e71 Provisioning: Default to folder sync only and block new instance sync repositories (#115569)
* Default to folder sync only and block new instance sync repositories

- Change default allowed_targets to folder-only in backend configuration
- Modify validation to only enforce allowedTargets on CREATE operations
- Add deprecation warning for existing instance sync repositories
- Update frontend defaults and tests to reflect new behavior

Fixes #619

* Update warning message: change 'deprecated' to 'not fully supported'

* Fix health check: don't validate allowedTargets for existing repositories

Health checks for existing repositories should treat them as UPDATE operations,
not CREATE operations, so they don't fail validation for instance sync target.

* Fix tests and update i18n translations

- Update BootstrapStep tests to reflect folder-only default behavior
- Run i18n-extract to update translation file structure

* Fix integration tests

* Fix tests

* Fix provisioning test wizard

* Fix fronted test
2025-12-19 11:44:15 +00:00
Alexander Akhmetov
b5793a5f73 Alerting: Fix receiver_name and has_prometheus_definition filters with compact=true (#115582) 2025-12-19 11:43:46 +01:00
Misi
285f2b1d32 Auth: Allow service accounts to authenticate to ST Grafana (#115536)
* Allow SAs to authn ext_jwt

* Address feedback
2025-12-19 09:28:20 +00:00
Tania
7360194ab9 Chore: Remove unifiedReqeustLog feature flag (#115559)
Chore: Remove unifiedReqeustLog feature flag
2025-12-19 09:55:47 +01:00
Will Assis
99f5f14de7 unified-storage: move rvmanager into its own package (#115445)
* unified-storage: move rvmanager into its own package so it can be reused with sqlkv later
2025-12-18 18:35:32 -05:00
Collin Fingar
606a59584a Saved Queries: Pass editor ref for dynamic dropdown display (#114321)
* Saved Queries: Pass editor ref for dynamic dropdown display

* Updated docs per feedback

* Update docs/sources/visualizations/dashboards/build-dashboards/annotate-visualizations/index.md

Co-authored-by: Isabel Matwawana <76437239+imatwawana@users.noreply.github.com>

* Update docs/sources/visualizations/dashboards/build-dashboards/annotate-visualizations/index.md

Co-authored-by: Isabel Matwawana <76437239+imatwawana@users.noreply.github.com>

* Update docs/sources/visualizations/dashboards/build-dashboards/create-dashboard/index.md

Co-authored-by: Isabel Matwawana <76437239+imatwawana@users.noreply.github.com>

* Update docs/sources/visualizations/dashboards/build-dashboards/create-dashboard/index.md

Co-authored-by: Isabel Matwawana <76437239+imatwawana@users.noreply.github.com>

* Update docs/sources/visualizations/explore/get-started-with-explore.md

Co-authored-by: Isabel Matwawana <76437239+imatwawana@users.noreply.github.com>

* Update docs/sources/visualizations/panels-visualizations/query-transform-data/_index.md

Co-authored-by: Isabel Matwawana <76437239+imatwawana@users.noreply.github.com>

* Update docs/sources/visualizations/panels-visualizations/query-transform-data/_index.md

Co-authored-by: Isabel Matwawana <76437239+imatwawana@users.noreply.github.com>

* Update docs/sources/visualizations/panels-visualizations/query-transform-data/_index.md

Co-authored-by: Isabel Matwawana <76437239+imatwawana@users.noreply.github.com>

* Update docs/sources/visualizations/panels-visualizations/query-transform-data/_index.md

Co-authored-by: Isabel Matwawana <76437239+imatwawana@users.noreply.github.com>

* Update docs/sources/visualizations/panels-visualizations/query-transform-data/_index.md

Co-authored-by: Isabel Matwawana <76437239+imatwawana@users.noreply.github.com>

---------

Co-authored-by: Nathan Marrs <nathanielmarrs@gmail.com>
Co-authored-by: Isabel Matwawana <76437239+imatwawana@users.noreply.github.com>
2025-12-18 18:18:24 -05:00
Nathan Marrs
0ec716a433 Embedded Dashboard Panels: Add Grafana Branding (#115198)
* feat: add Grafana logo to embedded panels

- Add Grafana logo watermark to solo panel view (embedded panels)
- Logo appears in top-right corner with subtle background container
- Logo hides on hover to avoid interfering with panel content
- Uses React state to track hover for reliable behavior across nested elements

* minor formatting

* update changes to match public dashboards styling

* match styles of public dashboards

* feat: add responsive Grafana branding to embedded panels

- Add 'Powered by Grafana' branding with text logo to solo panel view
- Implement responsive scaling based on panel dimensions (0.6x to 1.0x)
- Logo and text scale proportionally with panel size
- Branding hides on hover to avoid interfering with panel content
- Matches public dashboard branding pattern for consistency
- Uses ResizeObserver for efficient responsive updates

* feat: add Grafana branding to embedded solo panels

- Add 'Powered by Grafana' branding with text logo to embedded panels
- Create SoloPanelPageLogo component for reusable branding
- Implement responsive scaling based on panel dimensions
- Add hover-to-hide functionality to avoid content overlap
- Logo scales between 0.6x and 1.0x based on panel size

* refactor: move scale calculation into SoloPanelPageLogo component

- Move responsive scale calculation logic from SoloPanelRenderer to SoloPanelPageLogo
- Logo component now manages its own scaling based on container dimensions
- Improves separation of concerns and component encapsulation

* feat: add hideLogo query parameter to disable embedded panel branding

- Add hideLogo query parameter support to SoloPanelPage
- Logo can be hidden via ?hideLogo, ?hideLogo=true, or ?hideLogo=1
- Useful for customers who want to disable branding and for image rendering scenarios
- Update Props interface to include hideLogo in queryParams type

* feat: hide logo in panel image renderer URLs

- Add hideLogo=true parameter to image renderer URLs in ShareLinkTab
- Ensures logo is hidden when generating panel images through share feature
- Update test to expect hideLogo=true in render URL

* feat: hide logo in old dashboard sharing panel image URLs

- Add hideLogo=true parameter to buildImageUrl in ShareModal utils
- Ensures logo is hidden when generating panel images through old share modal
- Update all ShareLink tests to expect hideLogo=true in render URLs

* test: add comprehensive tests for SoloPanelPage and SoloPanelPageLogo

- Add SoloPanelPageLogo tests covering rendering, hover behavior, theme selection, and scaling
- Add SoloPanelPage tests covering logo visibility based on hideLogo prop
- Test logo hiding functionality (most important behavior)
- Test responsive scaling based on container dimensions
- Test ResizeObserver integration
- All 14 tests passing

* refactor: centralize hideLogo handling in SoloPanelPageLogo

Move hideLogo parsing and decision-making into SoloPanelPageLogo so SoloPanelPage/SoloPanelRenderer only pass through the raw query param value.

* chore: clean up solo logo test and share link params

Remove a duplicate SVG mock in SoloPanelPageLogo.test, and simplify ShareLinkTab image URL building without changing behavior.

* chore: revert ShareLinkTab image query refactor

Restore the previous image URL query-param mutation logic in ShareLinkTab to reduce risk.

* chore: set hideLogo once for ShareLinkTab image URLs

Avoid passing hideLogo twice when building the rendered image URL.

* fix: handle boolean hideLogo query param in SoloPanelPageLogo

Handle query params that are represented as booleans (e.g., ?hideLogo) and arrays, and avoid calling trim() on non-strings.

* fix i18n

* fix(dashboard-scene): address SoloPanelPageLogo review feedback

Avoid double-scaling logo margin, clarify scaling comments, and extend tests for null/array values and ResizeObserver cleanup.

* update margin left on logo to better match text spacing
2025-12-18 15:01:16 -08:00
Leon Sorokin
72e1f1e546 Heatmap: Support for linear y axis (#113337)
* wip

* boop

* Base factor on data

* Add some basic option control

* Remove old comments

* Add feature flag

* Apply feature flag to axis options

* Turn factor calculation into exported function

* Simplify bucket factor function

* Clarify comments

* Fix cell sizing of pre-bucketed heatmaps with log

* Remove unnecessary category change

* Consolidate editor for calculate from data no

* Update bucket function sanity checks

* Wire up scale config from yBucketScale

* Hide bucket controls for heatmap cells

* Fix splits

* Add test coverage

* Fix failing test

* Add basic util test coverage

* Fix tooltip for legacy in linear

* Fix y bucket option width to be consistent

* Hide tick alignment for explicit scale modes

* Clarify comment

* Make sure units are passed properly for linear

* Remove null assertion operator

* Clean up nested ternary

* Add type protection to scaleLog

* Remove repeated code for ySize calcs

* Remove ternary for scaleDistribution

* Add test coverage for YBucketScaleEditor

* Add isHeatmapSparse function to tooltip utils

* Create calculateYSizeDivisor util function

* Fix y axis min and max options and extend to log

* Add toLogBase test coverage

* Create applyExplicitMinMax function

* Add additional test coverage for scale editor

* Run i18n-extract

* Update eslint suppressions

---------

Co-authored-by: Drew Slobodnjak <60050885+drew08t@users.noreply.github.com>
2025-12-18 14:45:00 -08:00
Haris Rozajac
37c1e3fb02 Dashboard Schema v1beta1 to v2alpha1: Preserve string template variable datasource references in query variables (#115516)
* Dashboard migration: preserve legacy string datasource references

Fix v1beta1 → v2alpha1 conversion to handle legacy string datasource
references in QueryVariable, AdhocVariable, and GroupByVariable.

Previously, string datasource references (both template variables like
"$datasource" and direct names/UIDs like "prometheus") were being
dropped during conversion, causing variable chaining to break.

The frontend's DatasourceSrv.getInstanceSettings() already handles
string references by trying uid → name → id lookup at runtime, so we
preserve the string in the uid field and let the frontend resolve it.

* trigger frontend ci tests when dashboard migration code changes

* v1: if string convert to DS ref

* Update migration testdata to fix template variable datasource references

* update
2025-12-18 15:11:09 -07:00
Denis Vodopianov
39c562a911 Revert: chore: a drop-in replacement for FeatureToggles.IsEnabledGlobally in app settings (#115593)
* Revert "chore: a drop-in replacement for FeatureToggles.IsEnabledGlobally in app settings (#113449)"

This reverts commit 26ce2c09d7.

* Change FeatureToggles.IsEnabledGlobally deprecation message
2025-12-18 16:46:32 -05:00
312 changed files with 17095 additions and 3332 deletions

1
.github/CODEOWNERS vendored
View File

@@ -425,6 +425,7 @@ i18next.config.ts @grafana/grafana-frontend-platform
/public/locales/enterprise/i18next.config.ts @grafana/grafana-frontend-platform
/public/app/core/internationalization/ @grafana/grafana-frontend-platform
/e2e/ @grafana/grafana-frontend-platform
/e2e-playwright/alerting-suite/ @grafana/alerting-frontend
/e2e-playwright/cloud-plugins-suite/ @grafana/partner-datasources
/e2e-playwright/dashboard-new-layouts/ @grafana/dashboards-squad
/e2e-playwright/dashboard-cujs/ @grafana/dashboards-squad

View File

@@ -95,9 +95,11 @@ runs:
- 'nx.json'
- 'tsconfig.json'
- '.yarn/**'
- 'apps/dashboard/pkg/migration/**'
- '${{ inputs.self }}'
e2e:
- 'e2e/**'
- 'e2e-playwright/**'
- '.github/actions/setup-enterprise/**'
- '.github/actions/checkout/**'
- 'emails/**'

View File

@@ -157,7 +157,7 @@ require (
github.com/google/go-querystring v1.1.0 // indirect
github.com/google/uuid v1.6.0 // indirect
github.com/google/wire v0.7.0 // indirect
github.com/grafana/alerting v0.0.0-20251212143239-491433b332b7 // indirect
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196 // indirect
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f // indirect
github.com/grafana/dataplane/sdata v0.0.9 // indirect
github.com/grafana/dskit v0.0.0-20250908063411-6b6da59b5cc4 // indirect

View File

@@ -619,8 +619,8 @@ github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674 h1:JeSE6pjso5THxAzdVpqr6/geYxZytqFMBCOtn/ujyeo=
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674/go.mod h1:r4w70xmWCQKmi1ONH4KIaBptdivuRPyosB9RmPlGEwA=
github.com/grafana/alerting v0.0.0-20251212143239-491433b332b7 h1:ZzG/gCclEit9w0QUfQt9GURcOycAIGcsQAhY1u0AEX0=
github.com/grafana/alerting v0.0.0-20251212143239-491433b332b7/go.mod h1:l7v67cgP7x72ajB9UPZlumdrHqNztpKoqQ52cU8T3LU=
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196 h1:A9UJtyBBUE7PkRsAITKU05iz+HpHO9SaVjfdo2Df3UQ=
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196/go.mod h1:l7v67cgP7x72ajB9UPZlumdrHqNztpKoqQ52cU8T3LU=
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f h1:Cbm6OKkOcJ+7CSZsGsEJzktC/SIa5bxVeYKQLuYK86o=
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f/go.mod h1:axY0cdOg3q0TZHwpHnIz5x16xZ8ZBxJHShsSHHXcHQg=
github.com/grafana/authlib/types v0.0.0-20251119142549-be091cf2f4d4 h1:Muoy+FMGrHj3GdFbvsMzUT7eusgii9PKf9L1ZaXDDbY=

View File

@@ -4,7 +4,7 @@ go 1.25.5
require (
github.com/go-kit/log v0.2.1
github.com/grafana/alerting v0.0.0-20251212143239-491433b332b7
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196
github.com/grafana/dskit v0.0.0-20250908063411-6b6da59b5cc4
github.com/grafana/grafana-app-sdk v0.48.7
github.com/grafana/grafana-app-sdk/logging v0.48.7

View File

@@ -243,8 +243,8 @@ github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
github.com/grafana/alerting v0.0.0-20251212143239-491433b332b7 h1:ZzG/gCclEit9w0QUfQt9GURcOycAIGcsQAhY1u0AEX0=
github.com/grafana/alerting v0.0.0-20251212143239-491433b332b7/go.mod h1:l7v67cgP7x72ajB9UPZlumdrHqNztpKoqQ52cU8T3LU=
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196 h1:A9UJtyBBUE7PkRsAITKU05iz+HpHO9SaVjfdo2Df3UQ=
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196/go.mod h1:l7v67cgP7x72ajB9UPZlumdrHqNztpKoqQ52cU8T3LU=
github.com/grafana/dskit v0.0.0-20250908063411-6b6da59b5cc4 h1:jSojuc7njleS3UOz223WDlXOinmuLAIPI0z2vtq8EgI=
github.com/grafana/dskit v0.0.0-20250908063411-6b6da59b5cc4/go.mod h1:VahT+GtfQIM+o8ht2StR6J9g+Ef+C2Vokh5uuSmOD/4=
github.com/grafana/grafana-app-sdk v0.48.7 h1:9mF7nqkqP0QUYYDlznoOt+GIyjzj45wGfUHB32u2ZMo=

View File

@@ -31,6 +31,10 @@ const (
maxLimit = 1000
Namespace = "grafana"
Subsystem = "alerting"
// LogQL field path for alert rule UID after JSON parsing.
// Loki flattens nested JSON fields with underscores: alert.labels.__alert_rule_uid__ -> alert_labels___alert_rule_uid__
lokiAlertRuleUIDField = "alert_labels___alert_rule_uid__"
)
var (
@@ -111,13 +115,13 @@ func buildQuery(query Query) (string, error) {
fmt.Sprintf(`%s=%q`, historian.LabelFrom, historian.LabelFromValue),
}
if query.RuleUID != nil {
selectors = append(selectors,
fmt.Sprintf(`%s=%q`, historian.LabelRuleUID, *query.RuleUID))
}
logql := fmt.Sprintf(`{%s} | json`, strings.Join(selectors, `,`))
// Add ruleUID filter as JSON line filter if specified.
if query.RuleUID != nil && *query.RuleUID != "" {
logql += fmt.Sprintf(` | %s = %q`, lokiAlertRuleUIDField, *query.RuleUID)
}
// Add receiver filter if specified.
if query.Receiver != nil && *query.Receiver != "" {
logql += fmt.Sprintf(` | receiver = %q`, *query.Receiver)
@@ -211,16 +215,13 @@ func parseLokiEntry(s lokiclient.Sample) (Entry, error) {
groupLabels = make(map[string]string)
}
alerts := make([]EntryAlert, len(lokiEntry.Alerts))
for i, a := range lokiEntry.Alerts {
alerts[i] = EntryAlert{
Status: a.Status,
Labels: a.Labels,
Annotations: a.Annotations,
StartsAt: a.StartsAt,
EndsAt: a.EndsAt,
}
}
alerts := []EntryAlert{{
Status: lokiEntry.Alert.Status,
Labels: lokiEntry.Alert.Labels,
Annotations: lokiEntry.Alert.Annotations,
StartsAt: lokiEntry.Alert.StartsAt,
EndsAt: lokiEntry.Alert.EndsAt,
}}
return Entry{
Timestamp: s.T,

View File

@@ -7,6 +7,7 @@ import (
"testing"
"time"
"github.com/grafana/alerting/models"
"github.com/grafana/alerting/notify/historian"
"github.com/grafana/alerting/notify/historian/lokiclient"
"github.com/grafana/grafana-app-sdk/logging"
@@ -133,9 +134,8 @@ func TestBuildQuery(t *testing.T) {
query: Query{
RuleUID: stringPtr("test-rule-uid"),
},
expected: fmt.Sprintf(`{%s=%q,%s=%q} | json`,
historian.LabelFrom, historian.LabelFromValue,
historian.LabelRuleUID, "test-rule-uid"),
expected: fmt.Sprintf(`{%s=%q} | json | alert_labels___alert_rule_uid__ = "test-rule-uid"`,
historian.LabelFrom, historian.LabelFromValue),
},
{
name: "query with receiver filter",
@@ -143,9 +143,8 @@ func TestBuildQuery(t *testing.T) {
RuleUID: stringPtr("test-rule-uid"),
Receiver: stringPtr("email-receiver"),
},
expected: fmt.Sprintf(`{%s=%q,%s=%q} | json | receiver = "email-receiver"`,
historian.LabelFrom, historian.LabelFromValue,
historian.LabelRuleUID, "test-rule-uid"),
expected: fmt.Sprintf(`{%s=%q} | json | alert_labels___alert_rule_uid__ = "test-rule-uid" | receiver = "email-receiver"`,
historian.LabelFrom, historian.LabelFromValue),
},
{
name: "query with status filter",
@@ -153,9 +152,8 @@ func TestBuildQuery(t *testing.T) {
RuleUID: stringPtr("test-rule-uid"),
Status: createStatusPtr(v0alpha1.CreateNotificationqueryRequestNotificationStatusFiring),
},
expected: fmt.Sprintf(`{%s=%q,%s=%q} | json | status = "firing"`,
historian.LabelFrom, historian.LabelFromValue,
historian.LabelRuleUID, "test-rule-uid"),
expected: fmt.Sprintf(`{%s=%q} | json | alert_labels___alert_rule_uid__ = "test-rule-uid" | status = "firing"`,
historian.LabelFrom, historian.LabelFromValue),
},
{
name: "query with success outcome filter",
@@ -163,9 +161,8 @@ func TestBuildQuery(t *testing.T) {
RuleUID: stringPtr("test-rule-uid"),
Outcome: outcomePtr(v0alpha1.CreateNotificationqueryRequestNotificationOutcomeSuccess),
},
expected: fmt.Sprintf(`{%s=%q,%s=%q} | json | error = ""`,
historian.LabelFrom, historian.LabelFromValue,
historian.LabelRuleUID, "test-rule-uid"),
expected: fmt.Sprintf(`{%s=%q} | json | alert_labels___alert_rule_uid__ = "test-rule-uid" | error = ""`,
historian.LabelFrom, historian.LabelFromValue),
},
{
name: "query with error outcome filter",
@@ -173,9 +170,8 @@ func TestBuildQuery(t *testing.T) {
RuleUID: stringPtr("test-rule-uid"),
Outcome: outcomePtr(v0alpha1.CreateNotificationqueryRequestNotificationOutcomeError),
},
expected: fmt.Sprintf(`{%s=%q,%s=%q} | json | error != ""`,
historian.LabelFrom, historian.LabelFromValue,
historian.LabelRuleUID, "test-rule-uid"),
expected: fmt.Sprintf(`{%s=%q} | json | alert_labels___alert_rule_uid__ = "test-rule-uid" | error != ""`,
historian.LabelFrom, historian.LabelFromValue),
},
{
name: "query with many filters",
@@ -185,9 +181,8 @@ func TestBuildQuery(t *testing.T) {
Status: createStatusPtr(v0alpha1.CreateNotificationqueryRequestNotificationStatusResolved),
Outcome: outcomePtr(v0alpha1.CreateNotificationqueryRequestNotificationOutcomeSuccess),
},
expected: fmt.Sprintf(`{%s=%q,%s=%q} | json | receiver = "email-receiver" | status = "resolved" | error = ""`,
historian.LabelFrom, historian.LabelFromValue,
historian.LabelRuleUID, "test-rule-uid"),
expected: fmt.Sprintf(`{%s=%q} | json | alert_labels___alert_rule_uid__ = "test-rule-uid" | receiver = "email-receiver" | status = "resolved" | error = ""`,
historian.LabelFrom, historian.LabelFromValue),
},
{
name: "query with group label matcher",
@@ -277,19 +272,19 @@ func TestParseLokiEntry(t *testing.T) {
GroupLabels: map[string]string{
"alertname": "test-alert",
},
Alerts: []historian.NotificationHistoryLokiEntryAlert{
{
Status: "firing",
Labels: map[string]string{
"severity": "critical",
},
Annotations: map[string]string{
"summary": "Test alert",
},
StartsAt: now,
EndsAt: now.Add(1 * time.Hour),
Alert: historian.NotificationHistoryLokiEntryAlert{
Status: "firing",
Labels: map[string]string{
"severity": "critical",
},
Annotations: map[string]string{
"summary": "Test alert",
},
StartsAt: now,
EndsAt: now.Add(1 * time.Hour),
},
AlertIndex: 0,
AlertCount: 1,
Retry: false,
Duration: 100,
PipelineTime: now,
@@ -335,7 +330,9 @@ func TestParseLokiEntry(t *testing.T) {
Error: "notification failed",
GroupKey: "key:thing",
GroupLabels: map[string]string{},
Alerts: []historian.NotificationHistoryLokiEntryAlert{},
Alert: historian.NotificationHistoryLokiEntryAlert{},
AlertIndex: 0,
AlertCount: 1,
PipelineTime: now,
}),
},
@@ -347,7 +344,7 @@ func TestParseLokiEntry(t *testing.T) {
Outcome: OutcomeError,
GroupKey: "key:thing",
GroupLabels: map[string]string{},
Alerts: []EntryAlert{},
Alerts: []EntryAlert{{}},
Error: stringPtr("notification failed"),
PipelineTime: now,
},
@@ -365,7 +362,7 @@ func TestParseLokiEntry(t *testing.T) {
Status: Status("firing"),
Outcome: OutcomeSuccess,
GroupLabels: map[string]string{},
Alerts: []EntryAlert{},
Alerts: []EntryAlert{{}},
PipelineTime: now,
},
},
@@ -448,7 +445,9 @@ func TestLokiReader_RunQuery(t *testing.T) {
Receiver: "receiver-1",
Status: "firing",
GroupLabels: map[string]string{},
Alerts: []historian.NotificationHistoryLokiEntryAlert{},
Alert: historian.NotificationHistoryLokiEntryAlert{},
AlertIndex: 0,
AlertCount: 1,
PipelineTime: now,
}),
},
@@ -459,7 +458,9 @@ func TestLokiReader_RunQuery(t *testing.T) {
Receiver: "receiver-3",
Status: "firing",
GroupLabels: map[string]string{},
Alerts: []historian.NotificationHistoryLokiEntryAlert{},
Alert: historian.NotificationHistoryLokiEntryAlert{},
AlertIndex: 0,
AlertCount: 1,
PipelineTime: now,
}),
},
@@ -474,7 +475,9 @@ func TestLokiReader_RunQuery(t *testing.T) {
Receiver: "receiver-2",
Status: "firing",
GroupLabels: map[string]string{},
Alerts: []historian.NotificationHistoryLokiEntryAlert{},
Alert: historian.NotificationHistoryLokiEntryAlert{},
AlertIndex: 0,
AlertCount: 1,
PipelineTime: now,
}),
},
@@ -546,19 +549,19 @@ func createMockLokiResponse(timestamp time.Time) lokiclient.QueryRes {
GroupLabels: map[string]string{
"alertname": "test-alert",
},
Alerts: []historian.NotificationHistoryLokiEntryAlert{
{
Status: "firing",
Labels: map[string]string{
"severity": "critical",
},
Annotations: map[string]string{
"summary": "Test alert",
},
StartsAt: timestamp,
EndsAt: timestamp.Add(1 * time.Hour),
Alert: historian.NotificationHistoryLokiEntryAlert{
Status: "firing",
Labels: map[string]string{
"severity": "critical",
},
Annotations: map[string]string{
"summary": "Test alert",
},
StartsAt: timestamp,
EndsAt: timestamp.Add(1 * time.Hour),
},
AlertIndex: 0,
AlertCount: 1,
Retry: false,
Duration: 100,
PipelineTime: timestamp,
@@ -587,10 +590,19 @@ func createLokiEntryJSONWithNilLabels(t *testing.T, timestamp time.Time) string
"status": "firing",
"error": "",
"groupLabels": null,
"alerts": [],
"alert": {},
"alertIndex": 0,
"alertCount": 1,
"retry": false,
"duration": 0,
"pipelineTime": "%s"
}`, timestamp.Format(time.RFC3339Nano))
return jsonStr
}
func TestRuleUIDLabelConstant(t *testing.T) {
// Verify that models.RuleUIDLabel has the expected value.
// If this changes in the alerting module, our LogQL field path constant will be incorrect
// and filtering for a single alert rule by its UID will break.
assert.Equal(t, "__alert_rule_uid__", models.RuleUIDLabel)
}

View File

@@ -0,0 +1,166 @@
{
"kind": "DashboardWithAccessInfo",
"apiVersion": "dashboard.grafana.app/v1beta1",
"metadata": {
"name": "groupby-test"
},
"spec": {
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": {
"type": "grafana",
"uid": "-- Grafana --"
},
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations \u0026 Alerts",
"type": "dashboard"
}
]
},
"editable": true,
"fiscalYearStartMonth": 0,
"graphTooltip": 0,
"links": [],
"panels": [
{
"datasource": {
"type": "prometheus",
"uid": "test-uid"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisBorderShow": false,
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"barWidthFactor": 0.6,
"drawStyle": "line",
"fillOpacity": 0,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "auto",
"showValues": false,
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": 0
},
{
"color": "red",
"value": 80
}
]
}
},
"overrides": []
},
"gridPos": {
"h": 8,
"w": 12,
"x": 12,
"y": 0
},
"id": 2,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"hideZeros": false,
"mode": "single",
"sort": "none"
}
},
"pluginVersion": "12.4.0-pre",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "test-uid"
},
"editorMode": "code",
"expr": "sum(counters_requests)",
"legendFormat": "__auto",
"range": true,
"refId": "A"
}
],
"title": "works with group by var",
"type": "timeseries"
}
],
"preload": false,
"schemaVersion": 42,
"tags": [],
"templating": {
"list": [
{
"current": {
"text": [
"a_legacy_label",
"app",
"exported_instance",
"exported_job"
],
"value": [
"a_legacy_label",
"app",
"exported_instance",
"exported_job"
]
},
"datasource": {
"type": "prometheus",
"uid": "test-uid"
},
"name": "Group by",
"type": "groupby"
}
]
},
"time": {
"from": "now-6h",
"to": "now"
},
"timepicker": {},
"timezone": "browser",
"title": "groupby test",
"weekStart": ""
}
}

View File

@@ -743,7 +743,9 @@
"text": "prod",
"value": "prod"
},
"datasource": "$datasource",
"datasource": {
"uid": "$datasource"
},
"hide": 0,
"includeAll": true,
"label": "cluster",
@@ -764,7 +766,9 @@
"text": "prod",
"value": "prod"
},
"datasource": "$datasource",
"datasource": {
"uid": "$datasource"
},
"hide": 0,
"includeAll": false,
"label": "namespace",

View File

@@ -961,8 +961,12 @@
"hide": "dontHide",
"refresh": "onDashboardLoad",
"skipUrlSync": false,
"datasource": {
"type": "",
"uid": "$datasource"
},
"query": {
"kind": "prometheus",
"kind": "",
"spec": {
"__legacyStringValue": "label_values(up, job)"
}
@@ -988,8 +992,12 @@
"hide": "dontHide",
"refresh": "onDashboardLoad",
"skipUrlSync": false,
"datasource": {
"type": "",
"uid": "$datasource"
},
"query": {
"kind": "prometheus",
"kind": "",
"spec": {
"__legacyStringValue": "label_values(up{job=~\"$cluster\"}, instance)"
}

View File

@@ -978,8 +978,11 @@
"skipUrlSync": false,
"query": {
"kind": "DataQuery",
"group": "prometheus",
"group": "",
"version": "v0",
"datasource": {
"name": "$datasource"
},
"spec": {
"__legacyStringValue": "label_values(up, job)"
}
@@ -1007,8 +1010,11 @@
"skipUrlSync": false,
"query": {
"kind": "DataQuery",
"group": "prometheus",
"group": "",
"version": "v0",
"datasource": {
"name": "$datasource"
},
"spec": {
"__legacyStringValue": "label_values(up{job=~\"$cluster\"}, instance)"
}

View File

@@ -0,0 +1,172 @@
{
"kind": "DashboardWithAccessInfo",
"apiVersion": "dashboard.grafana.app/v0alpha1",
"metadata": {
"name": "groupby-test"
},
"spec": {
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": {
"type": "grafana",
"uid": "-- Grafana --"
},
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations \u0026 Alerts",
"type": "dashboard"
}
]
},
"editable": true,
"fiscalYearStartMonth": 0,
"graphTooltip": 0,
"links": [],
"panels": [
{
"datasource": {
"type": "prometheus",
"uid": "test-uid"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisBorderShow": false,
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"barWidthFactor": 0.6,
"drawStyle": "line",
"fillOpacity": 0,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "auto",
"showValues": false,
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": 0
},
{
"color": "red",
"value": 80
}
]
}
},
"overrides": []
},
"gridPos": {
"h": 8,
"w": 12,
"x": 12,
"y": 0
},
"id": 2,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"hideZeros": false,
"mode": "single",
"sort": "none"
}
},
"pluginVersion": "12.4.0-pre",
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "test-uid"
},
"editorMode": "code",
"expr": "sum(counters_requests)",
"legendFormat": "__auto",
"range": true,
"refId": "A"
}
],
"title": "works with group by var",
"type": "timeseries"
}
],
"preload": false,
"schemaVersion": 42,
"tags": [],
"templating": {
"list": [
{
"current": {
"text": [
"a_legacy_label",
"app",
"exported_instance",
"exported_job"
],
"value": [
"a_legacy_label",
"app",
"exported_instance",
"exported_job"
]
},
"datasource": {
"type": "prometheus",
"uid": "test-uid"
},
"name": "Group by",
"type": "groupby"
}
]
},
"time": {
"from": "now-6h",
"to": "now"
},
"timepicker": {},
"timezone": "browser",
"title": "groupby test",
"weekStart": ""
},
"status": {
"conversion": {
"failed": false,
"storedVersion": "v1beta1"
}
}
}

View File

@@ -0,0 +1,229 @@
{
"kind": "DashboardWithAccessInfo",
"apiVersion": "dashboard.grafana.app/v2alpha1",
"metadata": {
"name": "groupby-test"
},
"spec": {
"annotations": [
{
"kind": "AnnotationQuery",
"spec": {
"datasource": {
"type": "grafana",
"uid": "-- Grafana --"
},
"query": {
"kind": "grafana",
"spec": {}
},
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations \u0026 Alerts",
"builtIn": true,
"legacyOptions": {
"type": "dashboard"
}
}
}
],
"cursorSync": "Off",
"editable": true,
"elements": {
"panel-2": {
"kind": "Panel",
"spec": {
"id": 2,
"title": "works with group by var",
"description": "",
"links": [],
"data": {
"kind": "QueryGroup",
"spec": {
"queries": [
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "prometheus",
"spec": {
"editorMode": "code",
"expr": "sum(counters_requests)",
"legendFormat": "__auto",
"range": true
}
},
"datasource": {
"type": "prometheus",
"uid": "test-uid"
},
"refId": "A",
"hidden": false
}
}
],
"transformations": [],
"queryOptions": {}
}
},
"vizConfig": {
"kind": "timeseries",
"spec": {
"pluginVersion": "12.4.0-pre",
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"hideZeros": false,
"mode": "single",
"sort": "none"
}
},
"fieldConfig": {
"defaults": {
"thresholds": {
"mode": "absolute",
"steps": [
{
"value": 0,
"color": "green"
},
{
"value": 80,
"color": "red"
}
]
},
"color": {
"mode": "palette-classic"
},
"custom": {
"axisBorderShow": false,
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"barWidthFactor": 0.6,
"drawStyle": "line",
"fillOpacity": 0,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "auto",
"showValues": false,
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
}
},
"overrides": []
}
}
}
}
}
},
"layout": {
"kind": "GridLayout",
"spec": {
"items": [
{
"kind": "GridLayoutItem",
"spec": {
"x": 12,
"y": 0,
"width": 12,
"height": 8,
"element": {
"kind": "ElementReference",
"name": "panel-2"
}
}
}
]
}
},
"links": [],
"liveNow": false,
"preload": false,
"tags": [],
"timeSettings": {
"timezone": "browser",
"from": "now-6h",
"to": "now",
"autoRefresh": "",
"autoRefreshIntervals": [
"5s",
"10s",
"30s",
"1m",
"5m",
"15m",
"30m",
"1h",
"2h",
"1d"
],
"hideTimepicker": false,
"fiscalYearStartMonth": 0
},
"title": "groupby test",
"variables": [
{
"kind": "GroupByVariable",
"spec": {
"name": "Group by",
"datasource": {
"type": "prometheus",
"uid": "test-uid"
},
"current": {
"text": [
"a_legacy_label",
"app",
"exported_instance",
"exported_job"
],
"value": [
"a_legacy_label",
"app",
"exported_instance",
"exported_job"
]
},
"options": [],
"multi": true,
"hide": "dontHide",
"skipUrlSync": false
}
}
]
},
"status": {
"conversion": {
"failed": false,
"storedVersion": "v1beta1"
}
}
}

View File

@@ -0,0 +1,232 @@
{
"kind": "DashboardWithAccessInfo",
"apiVersion": "dashboard.grafana.app/v2beta1",
"metadata": {
"name": "groupby-test"
},
"spec": {
"annotations": [
{
"kind": "AnnotationQuery",
"spec": {
"query": {
"kind": "DataQuery",
"group": "grafana",
"version": "v0",
"datasource": {
"name": "-- Grafana --"
},
"spec": {}
},
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations \u0026 Alerts",
"builtIn": true,
"legacyOptions": {
"type": "dashboard"
}
}
}
],
"cursorSync": "Off",
"editable": true,
"elements": {
"panel-2": {
"kind": "Panel",
"spec": {
"id": 2,
"title": "works with group by var",
"description": "",
"links": [],
"data": {
"kind": "QueryGroup",
"spec": {
"queries": [
{
"kind": "PanelQuery",
"spec": {
"query": {
"kind": "DataQuery",
"group": "prometheus",
"version": "v0",
"datasource": {
"name": "test-uid"
},
"spec": {
"editorMode": "code",
"expr": "sum(counters_requests)",
"legendFormat": "__auto",
"range": true
}
},
"refId": "A",
"hidden": false
}
}
],
"transformations": [],
"queryOptions": {}
}
},
"vizConfig": {
"kind": "VizConfig",
"group": "timeseries",
"version": "12.4.0-pre",
"spec": {
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"hideZeros": false,
"mode": "single",
"sort": "none"
}
},
"fieldConfig": {
"defaults": {
"thresholds": {
"mode": "absolute",
"steps": [
{
"value": 0,
"color": "green"
},
{
"value": 80,
"color": "red"
}
]
},
"color": {
"mode": "palette-classic"
},
"custom": {
"axisBorderShow": false,
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"barWidthFactor": 0.6,
"drawStyle": "line",
"fillOpacity": 0,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "auto",
"showValues": false,
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
}
},
"overrides": []
}
}
}
}
}
},
"layout": {
"kind": "GridLayout",
"spec": {
"items": [
{
"kind": "GridLayoutItem",
"spec": {
"x": 12,
"y": 0,
"width": 12,
"height": 8,
"element": {
"kind": "ElementReference",
"name": "panel-2"
}
}
}
]
}
},
"links": [],
"liveNow": false,
"preload": false,
"tags": [],
"timeSettings": {
"timezone": "browser",
"from": "now-6h",
"to": "now",
"autoRefresh": "",
"autoRefreshIntervals": [
"5s",
"10s",
"30s",
"1m",
"5m",
"15m",
"30m",
"1h",
"2h",
"1d"
],
"hideTimepicker": false,
"fiscalYearStartMonth": 0
},
"title": "groupby test",
"variables": [
{
"kind": "GroupByVariable",
"group": "prometheus",
"datasource": {
"name": "test-uid"
},
"spec": {
"name": "Group by",
"current": {
"text": [
"a_legacy_label",
"app",
"exported_instance",
"exported_job"
],
"value": [
"a_legacy_label",
"app",
"exported_instance",
"exported_job"
]
},
"options": [],
"multi": true,
"hide": "dontHide",
"skipUrlSync": false
}
}
]
},
"status": {
"conversion": {
"failed": false,
"storedVersion": "v1beta1"
}
}
}

View File

@@ -2,6 +2,7 @@ package conversion
import (
"context"
"strings"
"k8s.io/apimachinery/pkg/conversion"
"k8s.io/apiserver/pkg/endpoints/request"
@@ -79,5 +80,57 @@ func ConvertDashboard_V0_to_V1beta1(in *dashv0.Dashboard, out *dashv1.Dashboard,
return schemaversion.NewMigrationError(err.Error(), schemaversion.GetSchemaVersion(in.Spec.Object), schemaversion.LATEST_VERSION, "Convert_V0_to_V1")
}
// Normalize template variable datasources from string to object format
// This handles legacy dashboards where query variables have datasource: "$datasource" (string)
// instead of datasource: { uid: "$datasource" } (object)
// our migration pipeline in v36 doesn't address because this was not addressed historically
// in DashboardMigrator - see public/app/features/dashboard/state/DashboardMigrator.ts#L607
// Which means that we have schemaVersion: 42 dashboards where datasource variable references are still strings
normalizeTemplateVariableDatasources(out.Spec.Object)
return nil
}
// normalizeTemplateVariableDatasources converts template variable string datasources to object format.
// Legacy dashboards may have query variables with datasource: "$datasource" (string).
// This normalizes them to datasource: { uid: "$datasource" } for consistent V1→V2 conversion.
func normalizeTemplateVariableDatasources(dashboard map[string]interface{}) {
templating, ok := dashboard["templating"].(map[string]interface{})
if !ok {
return
}
list, ok := templating["list"].([]interface{})
if !ok {
return
}
for _, variable := range list {
varMap, ok := variable.(map[string]interface{})
if !ok {
continue
}
varType, _ := varMap["type"].(string)
if varType != "query" {
continue
}
ds := varMap["datasource"]
if dsStr, ok := ds.(string); ok && isTemplateVariableRef(dsStr) {
// Convert string template variable reference to object format
varMap["datasource"] = map[string]interface{}{
"uid": dsStr,
}
}
}
}
// isTemplateVariableRef checks if a string is a Grafana template variable reference.
// Template variables can be in the form: $varname or ${varname}
func isTemplateVariableRef(s string) bool {
if s == "" {
return false
}
return strings.HasPrefix(s, "$") || strings.HasPrefix(s, "${")
}

View File

@@ -1185,6 +1185,10 @@ func buildQueryVariable(ctx context.Context, varMap map[string]interface{}, comm
// If no UID and no type, use default
datasourceType = getDefaultDatasourceType(ctx, dsIndexProvider)
}
} else if dsStr, ok := datasource.(string); ok && isTemplateVariable(dsStr) {
// Handle datasource variable reference (e.g., "$datasource")
// Only process template variables - other string values are not supported in V2 format
datasourceUID = dsStr
} else {
datasourceType = getDefaultDatasourceType(ctx, dsIndexProvider)
}
@@ -1532,6 +1536,10 @@ func buildAdhocVariable(ctx context.Context, varMap map[string]interface{}, comm
// If no UID and no type, use default
datasourceType = getDefaultDatasourceType(ctx, dsIndexProvider)
}
} else if dsStr, ok := datasource.(string); ok && isTemplateVariable(dsStr) {
// Handle datasource variable reference (e.g., "$datasource")
// Only process template variables - other string values are not supported in V2 format
datasourceUID = dsStr
} else {
datasourceType = getDefaultDatasourceType(ctx, dsIndexProvider)
}
@@ -1709,6 +1717,10 @@ func buildGroupByVariable(ctx context.Context, varMap map[string]interface{}, co
// Resolve Grafana datasource UID when type is "datasource" and UID is empty
datasourceUID = resolveGrafanaDatasourceUID(datasourceType, datasourceUID)
} else if dsStr, ok := datasource.(string); ok && isTemplateVariable(dsStr) {
// Handle datasource variable reference (e.g., "$datasource")
// Only process template variables - other string values are not supported in V2 format
datasourceUID = dsStr
} else {
datasourceType = getDefaultDatasourceType(ctx, dsIndexProvider)
}
@@ -1722,7 +1734,9 @@ func buildGroupByVariable(ctx context.Context, varMap map[string]interface{}, co
Hide: commonProps.Hide,
SkipUrlSync: commonProps.SkipUrlSync,
Current: buildVariableCurrent(varMap["current"]),
Multi: getBoolField(varMap, "multi", false),
// We set it to true by default because GroupByVariable
// constructor defaults to multi: true
Multi: getBoolField(varMap, "multi", true),
},
}

View File

@@ -1760,22 +1760,6 @@
"startValue": 0
}
],
"transformations": [
{
"id": "calculateField",
"options": {
"mode": "unary",
"reduce": {
"reducer": "sum"
},
"replaceFields": true,
"unary": {
"operator": "round",
"fieldName": "A-series"
}
}
}
],
"title": "Active gateways",
"type": "radialbar"
},
@@ -1859,22 +1843,6 @@
"startValue": 0
}
],
"transformations": [
{
"id": "calculateField",
"options": {
"mode": "unary",
"reduce": {
"reducer": "sum"
},
"replaceFields": true,
"unary": {
"operator": "round",
"fieldName": "A-series"
}
}
}
],
"title": "Active pods",
"type": "radialbar"
},

View File

@@ -654,7 +654,9 @@
"text": "prod",
"value": "prod"
},
"datasource": "$datasource",
"datasource": {
"uid": "$datasource"
},
"hide": 0,
"includeAll": true,
"label": "cluster",
@@ -677,7 +679,9 @@
"text": "prod",
"value": "prod"
},
"datasource": "$datasource",
"datasource": {
"uid": "$datasource"
},
"hide": 0,
"includeAll": false,
"label": "namespace",

View File

@@ -737,7 +737,9 @@
"text": "prod",
"value": "prod"
},
"datasource": "$datasource",
"datasource": {
"uid": "$datasource"
},
"hide": 0,
"includeAll": true,
"label": "cluster",
@@ -758,7 +760,9 @@
"text": "prod",
"value": "prod"
},
"datasource": "$datasource",
"datasource": {
"uid": "$datasource"
},
"hide": 0,
"includeAll": false,
"label": "namespace",

View File

@@ -717,7 +717,9 @@
"text": "prod",
"value": "prod"
},
"datasource": "$datasource",
"datasource": {
"uid": "$datasource"
},
"hide": 0,
"includeAll": true,
"label": "cluster",
@@ -739,7 +741,9 @@
"text": "prod",
"value": "prod"
},
"datasource": "$datasource",
"datasource": {
"uid": "$datasource"
},
"hide": 0,
"includeAll": false,
"label": "namespace",

View File

@@ -223,7 +223,7 @@ require (
github.com/googleapis/enterprise-certificate-proxy v0.3.6 // indirect
github.com/googleapis/gax-go/v2 v2.15.0 // indirect
github.com/gorilla/mux v1.8.1 // indirect
github.com/grafana/alerting v0.0.0-20251212143239-491433b332b7 // indirect
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196 // indirect
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f // indirect
github.com/grafana/authlib/types v0.0.0-20251119142549-be091cf2f4d4 // indirect
github.com/grafana/dataplane/sdata v0.0.9 // indirect

View File

@@ -827,8 +827,8 @@ github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674 h1:JeSE6pjso5THxAzdVpqr6/geYxZytqFMBCOtn/ujyeo=
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674/go.mod h1:r4w70xmWCQKmi1ONH4KIaBptdivuRPyosB9RmPlGEwA=
github.com/grafana/alerting v0.0.0-20251212143239-491433b332b7 h1:ZzG/gCclEit9w0QUfQt9GURcOycAIGcsQAhY1u0AEX0=
github.com/grafana/alerting v0.0.0-20251212143239-491433b332b7/go.mod h1:l7v67cgP7x72ajB9UPZlumdrHqNztpKoqQ52cU8T3LU=
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196 h1:A9UJtyBBUE7PkRsAITKU05iz+HpHO9SaVjfdo2Df3UQ=
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196/go.mod h1:l7v67cgP7x72ajB9UPZlumdrHqNztpKoqQ52cU8T3LU=
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f h1:Cbm6OKkOcJ+7CSZsGsEJzktC/SIa5bxVeYKQLuYK86o=
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f/go.mod h1:axY0cdOg3q0TZHwpHnIz5x16xZ8ZBxJHShsSHHXcHQg=
github.com/grafana/authlib/types v0.0.0-20251119142549-be091cf2f4d4 h1:Muoy+FMGrHj3GdFbvsMzUT7eusgii9PKf9L1ZaXDDbY=

View File

@@ -90,7 +90,7 @@ require (
github.com/google/gnostic-models v0.7.1 // indirect
github.com/google/go-cmp v0.7.0 // indirect
github.com/google/uuid v1.6.0 // indirect
github.com/grafana/alerting v0.0.0-20251212143239-491433b332b7 // indirect
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196 // indirect
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f // indirect
github.com/grafana/authlib/types v0.0.0-20251119142549-be091cf2f4d4 // indirect
github.com/grafana/dataplane/sdata v0.0.9 // indirect

View File

@@ -213,8 +213,8 @@ github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674 h1:JeSE6pjso5THxAzdVpqr6/geYxZytqFMBCOtn/ujyeo=
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674/go.mod h1:r4w70xmWCQKmi1ONH4KIaBptdivuRPyosB9RmPlGEwA=
github.com/grafana/alerting v0.0.0-20251212143239-491433b332b7 h1:ZzG/gCclEit9w0QUfQt9GURcOycAIGcsQAhY1u0AEX0=
github.com/grafana/alerting v0.0.0-20251212143239-491433b332b7/go.mod h1:l7v67cgP7x72ajB9UPZlumdrHqNztpKoqQ52cU8T3LU=
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196 h1:A9UJtyBBUE7PkRsAITKU05iz+HpHO9SaVjfdo2Df3UQ=
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196/go.mod h1:l7v67cgP7x72ajB9UPZlumdrHqNztpKoqQ52cU8T3LU=
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f h1:Cbm6OKkOcJ+7CSZsGsEJzktC/SIa5bxVeYKQLuYK86o=
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f/go.mod h1:axY0cdOg3q0TZHwpHnIz5x16xZ8ZBxJHShsSHHXcHQg=
github.com/grafana/authlib/types v0.0.0-20251119142549-be091cf2f4d4 h1:Muoy+FMGrHj3GdFbvsMzUT7eusgii9PKf9L1ZaXDDbY=

View File

@@ -0,0 +1,22 @@
package auth
import (
"context"
authlib "github.com/grafana/authlib/types"
"github.com/grafana/grafana/pkg/apimachinery/identity"
)
//go:generate mockery --name AccessChecker --structname MockAccessChecker --inpackage --filename access_checker_mock.go --with-expecter
// AccessChecker provides access control checks with optional role-based fallback.
type AccessChecker interface {
// Check performs an access check and returns nil if allowed, or an appropriate
// API error if denied. If req.Namespace is empty, it will be filled from the
// identity's namespace.
Check(ctx context.Context, req authlib.CheckRequest, folder string) error
// WithFallbackRole returns an AccessChecker configured with the specified fallback role.
// Whether the fallback is actually applied depends on the implementation.
WithFallbackRole(role identity.RoleType) AccessChecker
}

View File

@@ -0,0 +1,135 @@
// Code generated by mockery v2.53.4. DO NOT EDIT.
package auth
import (
context "context"
identity "github.com/grafana/grafana/pkg/apimachinery/identity"
mock "github.com/stretchr/testify/mock"
types "github.com/grafana/authlib/types"
)
// MockAccessChecker is an autogenerated mock type for the AccessChecker type
type MockAccessChecker struct {
mock.Mock
}
type MockAccessChecker_Expecter struct {
mock *mock.Mock
}
func (_m *MockAccessChecker) EXPECT() *MockAccessChecker_Expecter {
return &MockAccessChecker_Expecter{mock: &_m.Mock}
}
// Check provides a mock function with given fields: ctx, req, folder
func (_m *MockAccessChecker) Check(ctx context.Context, req types.CheckRequest, folder string) error {
ret := _m.Called(ctx, req, folder)
if len(ret) == 0 {
panic("no return value specified for Check")
}
var r0 error
if rf, ok := ret.Get(0).(func(context.Context, types.CheckRequest, string) error); ok {
r0 = rf(ctx, req, folder)
} else {
r0 = ret.Error(0)
}
return r0
}
// MockAccessChecker_Check_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Check'
type MockAccessChecker_Check_Call struct {
*mock.Call
}
// Check is a helper method to define mock.On call
// - ctx context.Context
// - req types.CheckRequest
// - folder string
func (_e *MockAccessChecker_Expecter) Check(ctx interface{}, req interface{}, folder interface{}) *MockAccessChecker_Check_Call {
return &MockAccessChecker_Check_Call{Call: _e.mock.On("Check", ctx, req, folder)}
}
func (_c *MockAccessChecker_Check_Call) Run(run func(ctx context.Context, req types.CheckRequest, folder string)) *MockAccessChecker_Check_Call {
_c.Call.Run(func(args mock.Arguments) {
run(args[0].(context.Context), args[1].(types.CheckRequest), args[2].(string))
})
return _c
}
func (_c *MockAccessChecker_Check_Call) Return(_a0 error) *MockAccessChecker_Check_Call {
_c.Call.Return(_a0)
return _c
}
func (_c *MockAccessChecker_Check_Call) RunAndReturn(run func(context.Context, types.CheckRequest, string) error) *MockAccessChecker_Check_Call {
_c.Call.Return(run)
return _c
}
// WithFallbackRole provides a mock function with given fields: role
func (_m *MockAccessChecker) WithFallbackRole(role identity.RoleType) AccessChecker {
ret := _m.Called(role)
if len(ret) == 0 {
panic("no return value specified for WithFallbackRole")
}
var r0 AccessChecker
if rf, ok := ret.Get(0).(func(identity.RoleType) AccessChecker); ok {
r0 = rf(role)
} else {
if ret.Get(0) != nil {
r0 = ret.Get(0).(AccessChecker)
}
}
return r0
}
// MockAccessChecker_WithFallbackRole_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'WithFallbackRole'
type MockAccessChecker_WithFallbackRole_Call struct {
*mock.Call
}
// WithFallbackRole is a helper method to define mock.On call
// - role identity.RoleType
func (_e *MockAccessChecker_Expecter) WithFallbackRole(role interface{}) *MockAccessChecker_WithFallbackRole_Call {
return &MockAccessChecker_WithFallbackRole_Call{Call: _e.mock.On("WithFallbackRole", role)}
}
func (_c *MockAccessChecker_WithFallbackRole_Call) Run(run func(role identity.RoleType)) *MockAccessChecker_WithFallbackRole_Call {
_c.Call.Run(func(args mock.Arguments) {
run(args[0].(identity.RoleType))
})
return _c
}
func (_c *MockAccessChecker_WithFallbackRole_Call) Return(_a0 AccessChecker) *MockAccessChecker_WithFallbackRole_Call {
_c.Call.Return(_a0)
return _c
}
func (_c *MockAccessChecker_WithFallbackRole_Call) RunAndReturn(run func(identity.RoleType) AccessChecker) *MockAccessChecker_WithFallbackRole_Call {
_c.Call.Return(run)
return _c
}
// NewMockAccessChecker creates a new instance of MockAccessChecker. It also registers a testing interface on the mock and a cleanup function to assert the mocks expectations.
// The first argument is typically a *testing.T value.
func NewMockAccessChecker(t interface {
mock.TestingT
Cleanup(func())
}) *MockAccessChecker {
mock := &MockAccessChecker{}
mock.Mock.Test(t)
t.Cleanup(func() { mock.AssertExpectations(t) })
return mock
}

View File

@@ -1,3 +1,4 @@
// Package auth provides authentication utilities for the provisioning API.
package auth
import (
@@ -6,7 +7,6 @@ import (
"net/http"
"github.com/grafana/authlib/authn"
"github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1"
utilnet "k8s.io/apimachinery/pkg/util/net"
)
@@ -15,29 +15,61 @@ type tokenExchanger interface {
Exchange(ctx context.Context, req authn.TokenExchangeRequest) (*authn.TokenExchangeResponse, error)
}
// RoundTripper injects an exchanged access token for the provisioning API into outgoing requests.
type RoundTripper struct {
client tokenExchanger
transport http.RoundTripper
audience string
// RoundTripperOption configures optional behavior for the RoundTripper.
type RoundTripperOption func(*RoundTripper)
// ExtraAudience appends an additional audience to the token exchange request.
//
// This is primarily used by operators connecting to the multitenant aggregator,
// where the token must include both the target API server's audience (e.g., dashboards,
// folders) and the provisioning group audience. The provisioning group audience is
// required so that the token passes the enforceManagerProperties check, which prevents
// unauthorized updates to provisioned resources.
//
// Example:
//
// authrt.NewRoundTripper(client, rt, "dashboards.grafana.app", authrt.ExtraAudience("provisioning.grafana.app"))
func ExtraAudience(audience string) RoundTripperOption {
return func(rt *RoundTripper) {
rt.extraAudience = audience
}
}
// NewRoundTripper constructs a RoundTripper that exchanges the provided token per request
// and forwards the request to the provided base transport.
func NewRoundTripper(tokenExchangeClient tokenExchanger, base http.RoundTripper, audience string) *RoundTripper {
return &RoundTripper{
// RoundTripper is an http.RoundTripper that performs token exchange before each request.
// It exchanges the service's credentials for an access token scoped to the configured
// audience(s), then injects that token into the outgoing request's X-Access-Token header.
type RoundTripper struct {
client tokenExchanger
transport http.RoundTripper
audience string
extraAudience string
}
// NewRoundTripper creates a RoundTripper that exchanges tokens for each outgoing request.
//
// Parameters:
// - tokenExchangeClient: the client used to exchange credentials for access tokens
// - base: the underlying transport to delegate requests to after token injection
// - audience: the primary audience for the token (typically the target API server's group)
// - opts: optional configuration (e.g., ExtraAudience to include additional audiences)
func NewRoundTripper(tokenExchangeClient tokenExchanger, base http.RoundTripper, audience string, opts ...RoundTripperOption) *RoundTripper {
rt := &RoundTripper{
client: tokenExchangeClient,
transport: base,
audience: audience,
}
for _, opt := range opts {
opt(rt)
}
return rt
}
// RoundTrip exchanges credentials for an access token and injects it into the request.
// The token is scoped to all configured audiences and the wildcard namespace ("*").
func (t *RoundTripper) RoundTrip(req *http.Request) (*http.Response, error) {
// when we want to write resources with the provisioning API, the audience needs to include provisioning
// so that it passes the check in enforceManagerProperties, which prevents others from updating provisioned resources
audiences := []string{t.audience}
if t.audience != v0alpha1.GROUP {
audiences = append(audiences, v0alpha1.GROUP)
if t.extraAudience != "" && t.extraAudience != t.audience {
audiences = append(audiences, t.extraAudience)
}
tokenResponse, err := t.client.Exchange(req.Context(), authn.TokenExchangeRequest{

View File

@@ -71,16 +71,29 @@ func TestRoundTripper_AudiencesAndNamespace(t *testing.T) {
tests := []struct {
name string
audience string
extraAudience string
wantAudiences []string
}{
{
name: "adds group when custom audience",
name: "uses only provided audience by default",
audience: "example-audience",
wantAudiences: []string{"example-audience"},
},
{
name: "uses only group audience by default",
audience: v0alpha1.GROUP,
wantAudiences: []string{v0alpha1.GROUP},
},
{
name: "extra audience adds provisioning group",
audience: "example-audience",
extraAudience: v0alpha1.GROUP,
wantAudiences: []string{"example-audience", v0alpha1.GROUP},
},
{
name: "no duplicate when group audience",
name: "extra audience no duplicate when same as primary",
audience: v0alpha1.GROUP,
extraAudience: v0alpha1.GROUP,
wantAudiences: []string{v0alpha1.GROUP},
},
}
@@ -88,11 +101,15 @@ func TestRoundTripper_AudiencesAndNamespace(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
fx := &fakeExchanger{resp: &authn.TokenExchangeResponse{Token: "abc123"}}
var opts []RoundTripperOption
if tt.extraAudience != "" {
opts = append(opts, ExtraAudience(tt.extraAudience))
}
tr := NewRoundTripper(fx, roundTripperFunc(func(_ *http.Request) (*http.Response, error) {
rr := httptest.NewRecorder()
rr.WriteHeader(http.StatusOK)
return rr.Result(), nil
}), tt.audience)
}), tt.audience, opts...)
req, _ := http.NewRequestWithContext(context.Background(), http.MethodGet, "http://example", nil)
resp, err := tr.RoundTrip(req)

View File

@@ -0,0 +1,153 @@
package auth
import (
"context"
"fmt"
"strings"
apierrors "k8s.io/apimachinery/pkg/api/errors"
"k8s.io/apimachinery/pkg/runtime/schema"
authlib "github.com/grafana/authlib/types"
"github.com/grafana/grafana-app-sdk/logging"
"github.com/grafana/grafana/pkg/apimachinery/identity"
)
// sessionAccessChecker implements AccessChecker using Grafana session identity.
type sessionAccessChecker struct {
inner authlib.AccessChecker
fallbackRole identity.RoleType
}
// NewSessionAccessChecker creates an AccessChecker that gets identity from Grafana
// sessions via GetRequester(ctx). Supports optional role-based fallback via
// WithFallbackRole for backwards compatibility.
func NewSessionAccessChecker(inner authlib.AccessChecker) AccessChecker {
return &sessionAccessChecker{
inner: inner,
fallbackRole: "",
}
}
// WithFallbackRole returns a new AccessChecker with the specified fallback role.
func (c *sessionAccessChecker) WithFallbackRole(role identity.RoleType) AccessChecker {
return &sessionAccessChecker{
inner: c.inner,
fallbackRole: role,
}
}
// Check performs an access check with optional role-based fallback.
// Returns nil if access is allowed, or an appropriate API error if denied.
func (c *sessionAccessChecker) Check(ctx context.Context, req authlib.CheckRequest, folder string) error {
logger := logging.FromContext(ctx).With("logger", "sessionAccessChecker")
// Get identity from Grafana session
requester, err := identity.GetRequester(ctx)
if err != nil {
logger.Debug("failed to get requester",
"resource", req.Resource,
"verb", req.Verb,
"error", err.Error(),
)
return apierrors.NewUnauthorized(fmt.Sprintf("failed to get requester: %v", err))
}
logger.Debug("checking access",
"identityType", requester.GetIdentityType(),
"orgRole", requester.GetOrgRole(),
"namespace", requester.GetNamespace(),
"resource", req.Resource,
"verb", req.Verb,
"group", req.Group,
"name", req.Name,
"folder", folder,
"fallbackRole", c.fallbackRole,
)
// Fill in namespace from identity if not provided
if req.Namespace == "" {
req.Namespace = requester.GetNamespace()
}
// Perform the access check
rsp, err := c.inner.Check(ctx, requester, req, folder)
// Build the GroupResource for error messages
gr := schema.GroupResource{Group: req.Group, Resource: req.Resource}
// No fallback configured, return result directly
if c.fallbackRole == "" {
if err != nil {
logger.Debug("access check error (no fallback)",
"resource", req.Resource,
"verb", req.Verb,
"error", err.Error(),
)
return apierrors.NewForbidden(gr, req.Name, fmt.Errorf("%s.%s is forbidden: %w", req.Resource, req.Group, err))
}
if !rsp.Allowed {
logger.Debug("access check denied (no fallback)",
"resource", req.Resource,
"verb", req.Verb,
"group", req.Group,
"allowed", rsp.Allowed,
)
return apierrors.NewForbidden(gr, req.Name, fmt.Errorf("permission denied"))
}
logger.Debug("access allowed",
"resource", req.Resource,
"verb", req.Verb,
)
return nil
}
// Fallback is configured - apply fallback logic
if err != nil {
if requester.GetOrgRole().Includes(c.fallbackRole) {
logger.Debug("access allowed via role fallback (after error)",
"resource", req.Resource,
"verb", req.Verb,
"fallbackRole", c.fallbackRole,
"orgRole", requester.GetOrgRole(),
)
return nil // Fallback succeeded
}
logger.Debug("access check error (fallback failed)",
"resource", req.Resource,
"verb", req.Verb,
"error", err.Error(),
"fallbackRole", c.fallbackRole,
"orgRole", requester.GetOrgRole(),
)
return apierrors.NewForbidden(gr, req.Name, fmt.Errorf("%s.%s is forbidden: %w", req.Resource, req.Group, err))
}
if rsp.Allowed {
logger.Debug("access allowed",
"resource", req.Resource,
"verb", req.Verb,
)
return nil
}
// Fall back to role for backwards compatibility
if requester.GetOrgRole().Includes(c.fallbackRole) {
logger.Debug("access allowed via role fallback",
"resource", req.Resource,
"verb", req.Verb,
"fallbackRole", c.fallbackRole,
"orgRole", requester.GetOrgRole(),
)
return nil // Fallback succeeded
}
logger.Debug("access denied (fallback role not met)",
"resource", req.Resource,
"verb", req.Verb,
"group", req.Group,
"fallbackRole", c.fallbackRole,
"orgRole", requester.GetOrgRole(),
)
return apierrors.NewForbidden(gr, req.Name, fmt.Errorf("%s role is required", strings.ToLower(string(c.fallbackRole))))
}

View File

@@ -0,0 +1,244 @@
package auth
import (
"context"
"errors"
"testing"
apierrors "k8s.io/apimachinery/pkg/api/errors"
authlib "github.com/grafana/authlib/types"
"github.com/grafana/grafana/pkg/apimachinery/identity"
"github.com/grafana/grafana/pkg/services/user"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
// mockRequester implements identity.Requester for testing.
type mockRequester struct {
identity.Requester
orgRole identity.RoleType
identityType authlib.IdentityType
namespace string
}
func (m *mockRequester) GetOrgRole() identity.RoleType {
return m.orgRole
}
func (m *mockRequester) GetIdentityType() authlib.IdentityType {
return m.identityType
}
func (m *mockRequester) GetNamespace() string {
return m.namespace
}
func TestSessionAccessChecker_Check(t *testing.T) {
ctx := context.Background()
req := authlib.CheckRequest{
Verb: "get",
Group: "provisioning.grafana.app",
Resource: "repositories",
Name: "test-repo",
Namespace: "default",
}
tests := []struct {
name string
fallbackRole identity.RoleType
innerResponse authlib.CheckResponse
innerErr error
requester *mockRequester
expectAllow bool
}{
{
name: "allowed by checker",
fallbackRole: identity.RoleAdmin,
innerResponse: authlib.CheckResponse{Allowed: true},
requester: &mockRequester{orgRole: identity.RoleViewer, identityType: authlib.TypeUser},
expectAllow: true,
},
{
name: "denied by checker, fallback to admin role succeeds",
fallbackRole: identity.RoleAdmin,
innerResponse: authlib.CheckResponse{Allowed: false},
requester: &mockRequester{orgRole: identity.RoleAdmin, identityType: authlib.TypeUser},
expectAllow: true,
},
{
name: "denied by checker, fallback to admin role fails for viewer",
fallbackRole: identity.RoleAdmin,
innerResponse: authlib.CheckResponse{Allowed: false},
requester: &mockRequester{orgRole: identity.RoleViewer, identityType: authlib.TypeUser},
expectAllow: false,
},
{
name: "error from checker, fallback to admin role succeeds",
fallbackRole: identity.RoleAdmin,
innerErr: errors.New("access check failed"),
requester: &mockRequester{orgRole: identity.RoleAdmin, identityType: authlib.TypeUser},
expectAllow: true,
},
{
name: "error from checker, fallback fails for viewer",
fallbackRole: identity.RoleAdmin,
innerErr: errors.New("access check failed"),
requester: &mockRequester{orgRole: identity.RoleViewer, identityType: authlib.TypeUser},
expectAllow: false,
},
{
name: "denied, editor fallback succeeds for editor",
fallbackRole: identity.RoleEditor,
innerResponse: authlib.CheckResponse{Allowed: false},
requester: &mockRequester{orgRole: identity.RoleEditor, identityType: authlib.TypeUser},
expectAllow: true,
},
{
name: "denied, editor fallback fails for viewer",
fallbackRole: identity.RoleEditor,
innerResponse: authlib.CheckResponse{Allowed: false},
requester: &mockRequester{orgRole: identity.RoleViewer, identityType: authlib.TypeUser},
expectAllow: false,
},
{
name: "no fallback configured, denied stays denied",
fallbackRole: "", // no fallback
innerResponse: authlib.CheckResponse{Allowed: false},
requester: &mockRequester{orgRole: identity.RoleAdmin, identityType: authlib.TypeUser},
expectAllow: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
mock := &mockInnerAccessChecker{
response: tt.innerResponse,
err: tt.innerErr,
}
checker := NewSessionAccessChecker(mock)
if tt.fallbackRole != "" {
checker = checker.WithFallbackRole(tt.fallbackRole)
}
// Add requester to context
testCtx := identity.WithRequester(ctx, tt.requester)
err := checker.Check(testCtx, req, "")
if tt.expectAllow {
require.NoError(t, err)
} else {
require.Error(t, err)
assert.True(t, apierrors.IsForbidden(err), "expected Forbidden error, got: %v", err)
}
})
}
}
func TestSessionAccessChecker_NoRequester(t *testing.T) {
mock := &mockInnerAccessChecker{
response: authlib.CheckResponse{Allowed: true},
}
checker := NewSessionAccessChecker(mock)
err := checker.Check(context.Background(), authlib.CheckRequest{}, "")
require.Error(t, err)
assert.True(t, apierrors.IsUnauthorized(err), "expected Unauthorized error")
}
func TestSessionAccessChecker_WithFallbackRole_ImmutableOriginal(t *testing.T) {
mock := &mockInnerAccessChecker{
response: authlib.CheckResponse{Allowed: false},
}
original := NewSessionAccessChecker(mock)
withAdmin := original.WithFallbackRole(identity.RoleAdmin)
withEditor := original.WithFallbackRole(identity.RoleEditor)
ctx := identity.WithRequester(context.Background(), &mockRequester{
orgRole: identity.RoleEditor,
identityType: authlib.TypeUser,
})
req := authlib.CheckRequest{}
// Original should deny (no fallback)
err := original.Check(ctx, req, "")
require.Error(t, err, "original should deny without fallback")
// WithAdmin should deny for editor
err = withAdmin.Check(ctx, req, "")
require.Error(t, err, "admin fallback should deny for editor")
// WithEditor should allow for editor
err = withEditor.Check(ctx, req, "")
require.NoError(t, err, "editor fallback should allow for editor")
}
func TestSessionAccessChecker_WithFallbackRole_ChainedCalls(t *testing.T) {
mock := &mockInnerAccessChecker{
response: authlib.CheckResponse{Allowed: false},
}
// Ensure chained WithFallbackRole calls work correctly
checker := NewSessionAccessChecker(mock).
WithFallbackRole(identity.RoleAdmin).
WithFallbackRole(identity.RoleEditor) // This should override admin
ctx := identity.WithRequester(context.Background(), &mockRequester{
orgRole: identity.RoleEditor,
identityType: authlib.TypeUser,
})
err := checker.Check(ctx, authlib.CheckRequest{}, "")
require.NoError(t, err, "last fallback (editor) should be used")
}
func TestSessionAccessChecker_RealSignedInUser(t *testing.T) {
mock := &mockInnerAccessChecker{
response: authlib.CheckResponse{Allowed: false},
}
checker := NewSessionAccessChecker(mock).WithFallbackRole(identity.RoleAdmin)
// Use a real SignedInUser
signedInUser := &user.SignedInUser{
UserID: 1,
OrgID: 1,
OrgRole: identity.RoleAdmin,
}
ctx := identity.WithRequester(context.Background(), signedInUser)
err := checker.Check(ctx, authlib.CheckRequest{}, "")
require.NoError(t, err, "admin user should be allowed via fallback")
}
func TestSessionAccessChecker_FillsNamespace(t *testing.T) {
mock := &mockInnerAccessChecker{
response: authlib.CheckResponse{Allowed: true},
}
checker := NewSessionAccessChecker(mock)
ctx := identity.WithRequester(context.Background(), &mockRequester{
orgRole: identity.RoleAdmin,
identityType: authlib.TypeUser,
namespace: "org-123",
})
// Request without namespace
req := authlib.CheckRequest{
Verb: "get",
Group: "provisioning.grafana.app",
Resource: "repositories",
Name: "test-repo",
// Namespace intentionally empty
}
err := checker.Check(ctx, req, "")
require.NoError(t, err)
}

View File

@@ -0,0 +1,92 @@
package auth
import (
"context"
"fmt"
apierrors "k8s.io/apimachinery/pkg/api/errors"
"k8s.io/apimachinery/pkg/runtime/schema"
authlib "github.com/grafana/authlib/types"
"github.com/grafana/grafana-app-sdk/logging"
"github.com/grafana/grafana/pkg/apimachinery/identity"
)
// tokenAccessChecker implements AccessChecker using access tokens from context.
type tokenAccessChecker struct {
inner authlib.AccessChecker
}
// NewTokenAccessChecker creates an AccessChecker that gets identity from access tokens
// via AuthInfoFrom(ctx). Role-based fallback is not supported.
func NewTokenAccessChecker(inner authlib.AccessChecker) AccessChecker {
return &tokenAccessChecker{inner: inner}
}
// WithFallbackRole returns the same checker since fallback is not supported.
func (c *tokenAccessChecker) WithFallbackRole(_ identity.RoleType) AccessChecker {
return c
}
// Check performs an access check using AuthInfo from context.
// Returns nil if access is allowed, or an appropriate API error if denied.
func (c *tokenAccessChecker) Check(ctx context.Context, req authlib.CheckRequest, folder string) error {
logger := logging.FromContext(ctx).With("logger", "tokenAccessChecker")
// Get identity from access token in context
id, ok := authlib.AuthInfoFrom(ctx)
if !ok {
logger.Debug("no auth info in context",
"resource", req.Resource,
"verb", req.Verb,
"namespace", req.Namespace,
)
return apierrors.NewUnauthorized("no auth info in context")
}
logger.Debug("checking access",
"identityType", id.GetIdentityType(),
"namespace", id.GetNamespace(),
"resource", req.Resource,
"verb", req.Verb,
"group", req.Group,
"name", req.Name,
"folder", folder,
)
// Fill in namespace from identity if not provided
if req.Namespace == "" {
req.Namespace = id.GetNamespace()
}
// Perform the access check
rsp, err := c.inner.Check(ctx, id, req, folder)
// Build the GroupResource for error messages
gr := schema.GroupResource{Group: req.Group, Resource: req.Resource}
if err != nil {
logger.Debug("access check error",
"resource", req.Resource,
"verb", req.Verb,
"error", err.Error(),
)
return apierrors.NewForbidden(gr, req.Name, fmt.Errorf("%s.%s is forbidden: %w", req.Resource, req.Group, err))
}
if !rsp.Allowed {
logger.Debug("access check denied",
"resource", req.Resource,
"verb", req.Verb,
"group", req.Group,
"identityType", id.GetIdentityType(),
"allowed", rsp.Allowed,
)
return apierrors.NewForbidden(gr, req.Name, fmt.Errorf("permission denied"))
}
logger.Debug("access allowed",
"resource", req.Resource,
"verb", req.Verb,
)
return nil
}

View File

@@ -0,0 +1,137 @@
package auth
import (
"context"
"errors"
"testing"
apierrors "k8s.io/apimachinery/pkg/api/errors"
authlib "github.com/grafana/authlib/types"
"github.com/grafana/grafana/pkg/apimachinery/identity"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestTokenAccessChecker_Check(t *testing.T) {
req := authlib.CheckRequest{
Verb: "get",
Group: "provisioning.grafana.app",
Resource: "repositories",
Name: "test-repo",
Namespace: "default",
}
tests := []struct {
name string
innerResponse authlib.CheckResponse
innerErr error
authInfo *identity.StaticRequester
expectAllow bool
}{
{
name: "allowed by checker",
innerResponse: authlib.CheckResponse{Allowed: true},
authInfo: &identity.StaticRequester{Type: authlib.TypeUser},
expectAllow: true,
},
{
name: "denied by checker",
innerResponse: authlib.CheckResponse{Allowed: false},
authInfo: &identity.StaticRequester{Type: authlib.TypeUser},
expectAllow: false,
},
{
name: "error from checker",
innerErr: errors.New("access check failed"),
authInfo: &identity.StaticRequester{Type: authlib.TypeUser},
expectAllow: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
mock := &mockInnerAccessChecker{
response: tt.innerResponse,
err: tt.innerErr,
}
checker := NewTokenAccessChecker(mock)
// Add auth info to context
testCtx := authlib.WithAuthInfo(context.Background(), tt.authInfo)
err := checker.Check(testCtx, req, "")
if tt.expectAllow {
require.NoError(t, err)
} else {
require.Error(t, err)
assert.True(t, apierrors.IsForbidden(err), "expected Forbidden error, got: %v", err)
}
})
}
}
func TestTokenAccessChecker_NoAuthInfo(t *testing.T) {
mock := &mockInnerAccessChecker{
response: authlib.CheckResponse{Allowed: true},
}
checker := NewTokenAccessChecker(mock)
err := checker.Check(context.Background(), authlib.CheckRequest{}, "")
require.Error(t, err)
assert.True(t, apierrors.IsUnauthorized(err), "expected Unauthorized error")
}
func TestTokenAccessChecker_WithFallbackRole_IsNoOp(t *testing.T) {
mock := &mockInnerAccessChecker{
response: authlib.CheckResponse{Allowed: false},
}
checker := NewTokenAccessChecker(mock)
checkerWithFallback := checker.WithFallbackRole(identity.RoleAdmin)
// They should be the same instance
assert.Same(t, checker, checkerWithFallback, "WithFallbackRole should return same instance")
}
func TestTokenAccessChecker_FillsNamespace(t *testing.T) {
mock := &mockInnerAccessChecker{
response: authlib.CheckResponse{Allowed: true},
}
checker := NewTokenAccessChecker(mock)
ctx := authlib.WithAuthInfo(context.Background(), &identity.StaticRequester{
Type: authlib.TypeUser,
Namespace: "org-123",
})
// Request without namespace
req := authlib.CheckRequest{
Verb: "get",
Group: "provisioning.grafana.app",
Resource: "repositories",
Name: "test-repo",
// Namespace intentionally empty
}
err := checker.Check(ctx, req, "")
require.NoError(t, err)
}
// mockInnerAccessChecker implements authlib.AccessChecker for testing.
type mockInnerAccessChecker struct {
response authlib.CheckResponse
err error
}
func (m *mockInnerAccessChecker) Check(_ context.Context, _ authlib.AuthInfo, _ authlib.CheckRequest, _ string) (authlib.CheckResponse, error) {
return m.response, m.err
}
func (m *mockInnerAccessChecker) Compile(_ context.Context, _ authlib.AuthInfo, _ authlib.ListRequest) (authlib.ItemChecker, authlib.Zookie, error) {
return nil, nil, nil
}

View File

@@ -23,7 +23,13 @@ func NewSimpleRepositoryTester(validator RepositoryValidator) SimpleRepositoryTe
// TestRepository validates the repository and then runs a health check
func (t *SimpleRepositoryTester) TestRepository(ctx context.Context, repo Repository) (*provisioning.TestResults, error) {
errors := t.validator.ValidateRepository(repo)
// Determine if this is a CREATE or UPDATE operation
// If the repository has been observed by the controller (ObservedGeneration > 0),
// it's an existing repository and we should treat it as UPDATE
cfg := repo.Config()
isCreate := cfg.Status.ObservedGeneration == 0
errors := t.validator.ValidateRepository(repo, isCreate)
if len(errors) > 0 {
rsp := &provisioning.TestResults{
Code: http.StatusUnprocessableEntity, // Invalid

View File

@@ -32,7 +32,9 @@ func NewValidator(minSyncInterval time.Duration, allowedTargets []provisioning.S
}
// ValidateRepository solely does configuration checks on the repository object. It does not run a health check or compare against existing repositories.
func (v *RepositoryValidator) ValidateRepository(repo Repository) field.ErrorList {
// isCreate indicates whether this is a CREATE operation (true) or UPDATE operation (false).
// When isCreate is false, allowedTargets validation is skipped to allow existing repositories to continue working.
func (v *RepositoryValidator) ValidateRepository(repo Repository, isCreate bool) field.ErrorList {
list := repo.Validate()
cfg := repo.Config()
@@ -44,7 +46,7 @@ func (v *RepositoryValidator) ValidateRepository(repo Repository) field.ErrorLis
if cfg.Spec.Sync.Target == "" {
list = append(list, field.Required(field.NewPath("spec", "sync", "target"),
"The target type is required when sync is enabled"))
} else if !slices.Contains(v.allowedTargets, cfg.Spec.Sync.Target) {
} else if isCreate && !slices.Contains(v.allowedTargets, cfg.Spec.Sync.Target) {
list = append(list,
field.Invalid(
field.NewPath("spec", "target"),

View File

@@ -303,7 +303,8 @@ func TestValidateRepository(t *testing.T) {
validator := NewValidator(10*time.Second, []provisioning.SyncTargetType{provisioning.SyncTargetTypeFolder}, false)
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
errors := validator.ValidateRepository(tt.repository)
// Tests validate new configurations, so always pass isCreate=true
errors := validator.ValidateRepository(tt.repository, true)
require.Len(t, errors, tt.expectedErrs)
if tt.validateError != nil {
tt.validateError(t, errors)

View File

@@ -1653,6 +1653,11 @@ loki_basic_auth_password =
# Accepts duration formats like: 30s, 1m, 1h.
rule_query_offset = 1m
# Default data source UID to use for query execution when importing Prometheus rules.
# This default is used when the X-Grafana-Alerting-Datasource-UID header is not provided.
# If not set, the header becomes required.
default_datasource_uid =
[recording_rules]
# Enable recording rules.
enabled = true
@@ -2264,7 +2269,7 @@ fail_tests_on_console = true
# List of targets that can be controlled by a repository, separated by |.
# Instance means the whole grafana instance will be controlled by a repository.
# Folder limits it to a folder within the grafana instance.
allowed_targets = instance|folder
allowed_targets = folder
# Whether image rendering is allowed for dashboard previews.
# Requires image rendering service to be configured.

View File

@@ -1615,6 +1615,11 @@ max_annotations_to_keep =
# Accepts duration formats like: 30s, 1m, 1h.
rule_query_offset = 1m
# Default data source UID to use for query execution when importing Prometheus rules.
# This default is used when the X-Grafana-Alerting-Datasource-UID header is not provided.
# If not set, the header becomes required.
default_datasource_uid =
#################################### Recording Rules #####################
[recording_rules]
# Enable recording rules.

View File

@@ -1718,22 +1718,6 @@
"startValue": 0
}
],
"transformations": [
{
"id": "calculateField",
"options": {
"mode": "unary",
"reduce": {
"reducer": "sum"
},
"replaceFields": true,
"unary": {
"operator": "round",
"fieldName": "A-series"
}
}
}
],
"title": "Active gateways",
"type": "radialbar"
},
@@ -1815,22 +1799,6 @@
"startValue": 0
}
],
"transformations": [
{
"id": "calculateField",
"options": {
"mode": "unary",
"reduce": {
"reducer": "sum"
},
"replaceFields": true,
"unary": {
"operator": "round",
"fieldName": "A-series"
}
}
}
],
"title": "Active pods",
"type": "radialbar"
},

View File

@@ -242,6 +242,8 @@ Set to `true` to import recording rules in paused state.
The UID of the data source to use for alert rule queries.
If not specified in the header, Grafana uses the configured default from `unified_alerting.prometheus_conversion.default_datasource_uid`. If neither the header nor the configuration option is provided, the request fails.
#### `X-Grafana-Alerting-Target-Datasource-UID`
The UID of the target data source for recording rules. If not specified, the value from `X-Grafana-Alerting-Datasource-UID` is used.

View File

@@ -128,35 +128,48 @@ The set up process verifies the path and provides an error message if a problem
#### Synchronization limitations
Full instance sync is not available in Grafana Cloud.
{{< admonition type="caution" >}}
In Grafana OSS/Enterprise:
Full instance sync is not available in Grafana Cloud and is experimental and unsupported in Grafana OSS/Enterprise.
{{< /admonition >}}
To have access to full instance sync you must explicitly enable the option.
The following applies:
- If you try to perform a full instance sync with resources that contain alerts or panels, the connection will be blocked.
- You won't be able to create new alerts or library panels after setup is completed.
- If you opted for full instance sync and want to use alerts and library panels, you'll have to delete the provisioned repository and connect again with folder sync.
#### Set up synchronization
Choose to either sync your entire organization resources with external storage, or to sync certain resources to a new Grafana folder (with up to 10 connections).
You can sync external resources into a new folder without affecting the rest of your instance.
- Choose **Sync all resources with external storage** if you want to sync and manage your entire Grafana instance through external storage. With this option, all of your dashboards are synced to that one repository. You can only have one provisioned connection with this selection, and you won't have the option of setting up additional repositories to connect to.
To set up synchronization:
- Choose **Sync external storage to new Grafana folder** to sync external resources into a new folder without affecting the rest of your instance. You can repeat this process for up to 10 connections.
1. Select which resources you want to sync.
Next, enter a **Display name** for the repository connection. Resources stored in this connection appear under the chosen display name in the Grafana UI.
1. Enter a **Display name** for the repository connection. Resources stored in this connection appear under the chosen display name in the Grafana UI.
Click **Synchronize** to continue.
1. Click **Synchronize** to continue.
1. You can repeat this process for up to 10 connections.
{{< admonition type="note" >}}
Optionally, you can export any unmanaged resources into the provisioned folder. See how in [Synchronize with external storage](#synchronize-with-external-storage).
{{< /admonition >}}
### Synchronize with external storage
After this one time step, all future updates are automatically saved to the local file path and provisioned back to the instance.
In this step you proceed to synchronize the resources selected in the previous step. Optionally, you can check the **Migrate existing resources** box to migrate your unmanaged dashboards to the provisioned folder.
During the initial synchronization, your dashboards will be temporarily unavailable. No data or configurations will be lost.
Select **Begin synchronization** to start the process. After this one time step, all future updates are automatically saved to the local file path and provisioned back to the instance.
Note that during the initial synchronization, your dashboards will be temporarily unavailable. No data or configurations will be lost.
How long the process takes depends upon the number of resources involved.
Select **Begin synchronization** to start the process.
### Choose additional settings
If you wish, you can make any files synchronized as as **Read only** so no changes can be made to the resources through Grafana.

View File

@@ -132,17 +132,35 @@ To connect your GitHub repository:
### Choose what to synchronize
In this step, you can decide which elements to synchronize. The available options depend on the status of your Grafana instance:
- If the instance contains resources in an incompatible data format, you'll have to migrate all the data using instance sync. Folder sync won't be supported.
- If there's already another connection using folder sync, instance sync won't be offered.
You can sync external resources into a new folder without affecting the rest of your instance.
To set up synchronization:
- Choose **Sync all resources with external storage** if you want to sync and manage your entire Grafana instance through external storage. With this option, all of your dashboards are synced to that one repository. You can only have one provisioned connection with this selection, and you won't have the option of setting up additional repositories to connect to.
- Choose **Sync external storage to new Grafana folder** to sync external resources into a new folder without affecting the rest of your instance. You can repeat this process for up to 10 connections.
1. Select which resources you want to sync.
Next, enter a **Display name** for the repository connection. Resources stored in this connection appear under the chosen display name in the Grafana UI. Click **Synchronize** to continue.
1. Enter a **Display name** for the repository connection. Resources stored in this connection appear under the chosen display name in the Grafana UI.
1. Click **Synchronize** to continue.
1. You can repeat this process for up to 10 connections.
{{< admonition type="note" >}}
Optionally, you can export any unmanaged resources into the provisioned folder. See how in [Synchronize with external storage](#synchronize-with-external-storage).
{{< /admonition >}}
#### Full instance sync
Full instance sync is not available in Grafana Cloud and is experimental and unsupported in Grafana OSS/Enterprise.
To have access to this option you must enable experimental instance sync on purpose.
### Synchronize with external storage
After this one time step, all future updates are automatically saved to the Git repository and provisioned back to the instance.
Check the **Migrate existing resources** box to migrate your unmanaged dashboards to the provisioned folder.
### Choose additional settings

View File

@@ -47,7 +47,7 @@ Using Git Sync, you can:
{{< admonition type="caution" >}}
Git Sync only works with specific folders for the moment. Full-instance sync is not currently supported.
Full instance sync is not available in Grafana Cloud and is experimental and unsupported in Grafana OSS/Enterprise.
{{< /admonition >}}
@@ -84,7 +84,7 @@ Refer to [Requirements](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/obser
- You can only sync dashboards and folders. Refer to [Supported resources](#supported-resources) for more information.
- If you're using Git Sync in Grafana OSS and Grafana Enterprise, some resources might be in an incompatible data format and won't be synced.
- Full-instance sync is not available in Grafana Cloud and has limitations in Grafana OSS and Grafana Enterprise. Refer to [Choose what to synchronize](../git-sync-setup/#choose-what-to-synchronize) for more details.
- Full-instance sync is not available in Grafana Cloud and is experimental in Grafana OSS and Grafana Enterprise. Refer to [Choose what to synchronize](../git-sync-setup/#choose-what-to-synchronize) for more details.
- When migrating to full instance sync, during the synchronization process your resources will be temporarily unavailable. No one will be able to create, edit, or delete resources during this process.
- If you want to manage existing resources with Git Sync, you need to save them as JSON files and commit them to the synced repository. Open a PR to import, copy, move, or save a dashboard.
- Restoring resources from the UI is currently not possible. As an alternative, you can restore dashboards directly in your GitHub repository by raising a PR, and they will be updated in Grafana.

View File

@@ -112,6 +112,12 @@ For example, this video demonstrates the visual Prometheus query builder:
For general information about querying in Grafana, and common options and user interface elements across all query editors, refer to [Query and transform data](ref:query-transform-data).
## Build a dashboard from the data source
After you've configured a data source, you can start creating a dashboard directly from it, by clicking the **Build a dashboard** button.
For more information, refer to [Begin dashboard creation from data source configuration](https://grafana.com/docs/grafana/<GRAFANA_VERSION>/visualizations/dashboards/build-dashboards/create-dashboard/#begin-dashboard-creation-from-connections).
## Special data sources
Grafana includes three special data sources:

View File

@@ -2052,6 +2052,10 @@ This section applies only to rules imported as Grafana-managed rules. For more i
Set the query offset to imported Grafana-managed rules when `query_offset` is not defined in the original rule group configuration. The default value is `1m`.
#### `default_datasource_uid`
Set the default data source UID to use for query execution when importing Prometheus rules. Grafana uses this default when the `X-Grafana-Alerting-Datasource-UID` header isn't provided during import. If this option isn't set, the header becomes required. The default value is empty.
<hr>
### `[annotations]`

View File

@@ -31,7 +31,6 @@ Most [generally available](https://grafana.com/docs/release-life-cycle/#general-
| `logsContextDatasourceUi` | Allow datasource to provide custom UI for context view | Yes |
| `lokiQuerySplitting` | Split large interval queries into subqueries with smaller time intervals | Yes |
| `influxdbBackendMigration` | Query InfluxDB InfluxQL without the proxy | Yes |
| `unifiedRequestLog` | Writes error logs to the request logger | Yes |
| `logsExploreTableVisualisation` | A table visualisation for logs in Explore | Yes |
| `awsDatasourcesTempCredentials` | Support temporary security credentials in AWS plugins for Grafana Cloud customers | Yes |
| `awsAsyncQueryCaching` | Enable caching for async queries for Redshift and Athena. Requires that the datasource has caching and async query support enabled | Yes |

View File

@@ -163,9 +163,9 @@ To add a new annotation query to a dashboard, follow these steps:
1. To create a query, do one of the following:
- Write or construct a query in the query language of your data source. The annotation query options are different for each data source. For information about annotations in a specific data source, refer to the specific [data source](ref:data-source) topic.
- Click **Replace with saved query** to reuse a [saved query](ref:saved-queries).
- Open the **Saved queries** drop-down menu and click **Replace query** to reuse a [saved query](ref:saved-queries).
1. (Optional) To [save the query](ref:save-query) for reuse, click the **Save query** button (or icon).
1. (Optional) To [save the query](ref:save-query) for reuse, open the **Saved queries** drop-down menu and click the **Save query** option.
1. (Optional) Click **Test annotation query** to ensure that the query is working properly.
1. (Optional) To add subsequent queries, click **+ Add query** or **+ Add from saved queries**, and test them as many times as needed.

View File

@@ -99,7 +99,7 @@ Dashboards and panels allow you to show your data in visual form. Each panel nee
- Understand the query language of the target data source.
- Ensure that data source for which you are writing a query has been added. For more information about adding a data source, refer to [Add a data source](ref:add-a-data-source) if you need instructions.
**To create a dashboard**:
To create a dashboard, follow these steps:
{{< shared id="create-dashboard" >}}
@@ -125,9 +125,9 @@ Dashboards and panels allow you to show your data in visual form. Each panel nee
1. To create a query, do one of the following:
- Write or construct a query in the query language of your data source.
- Click **Replace with saved query** to reuse a [saved query](ref:saved-queries).
- Open the **Saved queries** drop-down menu and click **Replace query** to reuse a [saved query](ref:saved-queries).
1. (Optional) To [save the query](ref:save-query) for reuse, click the **Save query** button (or icon).
1. (Optional) To [save the query](ref:save-query) for reuse, open the **Saved queries** drop-down menu and click the **Save query** option.
1. Click **Refresh** to query the data source.
1. (Optional) To add subsequent queries, click **+ Add query** or **+ Add from saved queries**, and refresh the data source as many times as needed.
@@ -171,6 +171,28 @@ Dashboards and panels allow you to show your data in visual form. Each panel nee
Now, when you want to make more changes to the saved dashboard, click **Edit** in the top-right corner.
### Begin dashboard creation from data source configuration
You can start the process of creating a dashboard directly from a data source rather than from the **Dashboards** page.
To begin building a dashboard directly from a data source, follow these steps:
1. Navigate to **Connections > Data sources**.
1. On the row of the data source for which you want to build a dashboard, click **Build a dashboard**.
The empty dashboard page opens.
1. Do one of the following:
- Click **+Add visualization** to configure all the elements of the new dashboard.
- Select one of the suggested dashboards by clicking its **Use dashboard** button. This can be helpful when you're not sure how to most effectively visualize your data.
The suggested dashboards are specific to your data source type (for example, Prometheus, Loki, or Elasticsearch). If there are more than three dashboard suggestions, you can click **View all** to see the rest of them.
![Empty dashboard with add visualization and suggested dashboard options](/media/docs/grafana/dashboards/screenshot-suggested-dashboards-v12.3.png)
{{< docs/public-preview product="Suggested dashboards" >}}
1. Complete the rest of the dashboard configuration. For more detailed steps, refer to [Create a dashboard](#create-a-dashboard), beginning at step five.
## Copy a dashboard
To copy a dashboard, follow these steps:

View File

@@ -71,8 +71,9 @@ Explore consists of a toolbar, outline, query editor, the ability to add multipl
- **Run query** - Click to run your query.
- **Query editor** - Interface where you construct the query for a specific data source. Query editor elements differ based on data source. In order to run queries across multiple data sources you need to select **Mixed** from the data source picker.
- **Save query** - To [save the query](ref:save-query) for reuse, click the **Save query** button (or icon).
- **Replace with saved query** - Reuse a saved query.
- **Saved queries**:
- **Save query** - To [save the query](ref:save-query) for reuse, click the **Save query** button (or icon).
- **Replace query** - Reuse a saved query.
- **+ Add query** - Add an additional query.
- **+ Add from saved queries** - Add an additional query by reusing a saved query.

View File

@@ -88,8 +88,9 @@ The data section contains tabs where you enter queries, transform your data, and
- **Queries**
- Select your data source. You can also set or update the data source in existing dashboards using the drop-down menu in the **Queries** tab.
- **Save query** - To [save the query](ref:save-query) for reuse, click the **Save query** button (or icon).
- **Replace with saved query** - Reuse a saved query.
- **Saved queries**:
- **Save query** - To [save the query](ref:save-query) for reuse, click the **Save query** button (or icon).
- **Replace query** - Reuse a saved query.
- **+ Add query** - Add an additional query.
- **+ Add from saved queries** - Add an additional query by reusing a saved query.

View File

@@ -156,11 +156,11 @@ In the **Saved queries** drawer, you can:
- Edit a query title, description, tags, or the availability of the query to other users in your organization. By default, saved queries are locked for editing.
- When you access the **Saved queries** drawer from Explore, you can use the **Edit in Explore** option to edit the body of a query.
To access your saved queries, click **+ Add from saved queries** or **Replace with saved query** in the query editor:
To access your saved queries, click **+ Add from saved queries** or open the **Saved queries** drop-down menu and click **Replace query** in the query editor:
{{< figure src="/media/docs/grafana/dashboards/screenshot-use-saved-queries-v12.3.png" max-width="750px" alt="Access saved queries" >}}
Clicking **+ Add from saved queries** adds an additional query, while clicking **Replace with saved query** updates your existing query.
Clicking **+ Add from saved queries** adds an additional query, while clicking **Replace query** in the **Saved queries** drop-down menu updates your existing query.
{{< admonition type="note" >}}
Users with Admin and Editor roles can create and save queries for reuse.
@@ -172,7 +172,7 @@ Viewers can only reuse queries.
To save a query you've created:
1. From the query editor, click the **Save query** icon:
1. From the query editor, open the **Saved queries** drop-down menu and click the **Save query** option:
{{< figure src="/media/docs/grafana/panels-visualizations/screenshot-save-query-v12.2.png" max-width="750px" alt="Save a query" >}}
@@ -227,7 +227,7 @@ To add a query, follow these steps:
1. To create a query, do one of the following:
- Write or construct a query in the query language of your data source.
- Click **Replace with saved query** to reuse a saved query.
- Open the **Saved queries** drop-down menu and click **Replace query** to reuse a saved query.
{{< admonition type="note" >}}
[Saved queries](#saved-queries) is currently in [public preview](https://grafana.com/docs/release-life-cycle/). Grafana Labs offers limited support, and breaking changes might occur prior to the feature being made generally available.
@@ -235,7 +235,7 @@ To add a query, follow these steps:
This feature is only available on Grafana Enterprise and Grafana Cloud.
{{< /admonition >}}
1. (Optional) To [save the query](#save-a-query) for reuse, click the **Save query** button (or icon).
1. (Optional) To [save the query](#save-a-query) for reuse, click the **Save query** option in the **Saved queries** drop-down menu.
1. (Optional) Click **+ Add query** or **Add from saved queries** to add more queries as needed.
1. Click **Run queries**.

View File

@@ -0,0 +1,271 @@
import { Page } from '@playwright/test';
import { test, expect } from '@grafana/plugin-e2e';
/**
* UI selectors for Saved Searches e2e tests.
* Each selector is a function that takes the page and returns a locator.
*/
const ui = {
// Main elements
savedSearchesButton: (page: Page) => page.getByRole('button', { name: /saved searches/i }),
dropdown: (page: Page) => page.getByRole('dialog', { name: /saved searches/i }),
searchInput: (page: Page) => page.getByTestId('search-query-input'),
// Save functionality
saveButton: (page: Page) => page.getByRole('button', { name: /save current search/i }),
saveConfirmButton: (page: Page) => page.getByRole('button', { name: /^save$/i }),
saveNameInput: (page: Page) => page.getByPlaceholder(/enter a name/i),
// Action menu
actionsButton: (page: Page) => page.getByRole('button', { name: /actions/i }),
renameMenuItem: (page: Page) => page.getByText(/rename/i),
deleteMenuItem: (page: Page) => page.getByText(/^delete$/i),
setAsDefaultMenuItem: (page: Page) => page.getByText(/set as default/i),
deleteConfirmButton: (page: Page) => page.getByRole('button', { name: /^delete$/i }),
// Indicators
emptyState: (page: Page) => page.getByText(/no saved searches/i),
defaultIcon: (page: Page) => page.locator('[title="Default search"]'),
duplicateError: (page: Page) => page.getByText(/already exists/i),
};
/**
* Helper to clear saved searches storage.
* UserStorage uses localStorage as fallback, so we clear both potential keys.
*/
async function clearSavedSearches(page: Page) {
await page.evaluate(() => {
// Clear localStorage keys that might contain saved searches
// UserStorage stores under 'grafana.userstorage.alerting' pattern
const keysToRemove = Object.keys(localStorage).filter(
(key) => key.includes('alerting') && (key.includes('savedSearches') || key.includes('userstorage'))
);
keysToRemove.forEach((key) => localStorage.removeItem(key));
// Also clear session storage visited flag
const sessionKeysToRemove = Object.keys(sessionStorage).filter((key) => key.includes('alerting'));
sessionKeysToRemove.forEach((key) => sessionStorage.removeItem(key));
});
}
test.describe(
'Alert Rules - Saved Searches',
{
tag: ['@alerting'],
},
() => {
test.beforeEach(async ({ page }) => {
// Clear any saved searches from previous tests before navigating
await page.goto('/alerting/list');
await clearSavedSearches(page);
await page.reload();
});
test.afterEach(async ({ page }) => {
// Clean up saved searches after each test
await clearSavedSearches(page);
});
test('should display Saved searches button', async ({ page }) => {
await expect(ui.savedSearchesButton(page)).toBeVisible();
});
test('should open dropdown when clicking Saved searches button', async ({ page }) => {
await ui.savedSearchesButton(page).click();
await expect(ui.dropdown(page)).toBeVisible();
});
test('should show empty state when no saved searches exist', async ({ page }) => {
// Storage is cleared in beforeEach, so we should see empty state
await ui.savedSearchesButton(page).click();
await expect(ui.emptyState(page)).toBeVisible();
});
test('should enable Save current search button when search query is entered', async ({ page }) => {
// Enter a search query
await ui.searchInput(page).fill('state:firing');
await ui.searchInput(page).press('Enter');
// Open saved searches
await ui.savedSearchesButton(page).click();
await expect(ui.saveButton(page)).toBeEnabled();
});
test('should disable Save current search button when search query is empty', async ({ page }) => {
await ui.savedSearchesButton(page).click();
await expect(ui.saveButton(page)).toBeDisabled();
});
test('should save a new search', async ({ page }) => {
// Enter a search query
await ui.searchInput(page).fill('state:firing');
await ui.searchInput(page).press('Enter');
// Open saved searches
await ui.savedSearchesButton(page).click();
// Click save button
await ui.saveButton(page).click();
// Enter name and save
await ui.saveNameInput(page).fill('My Firing Rules');
await ui.saveConfirmButton(page).click();
// Verify the saved search appears in the list
await expect(page.getByText('My Firing Rules')).toBeVisible();
});
test('should show validation error for duplicate name', async ({ page }) => {
// First save a search
await ui.searchInput(page).fill('state:firing');
await ui.searchInput(page).press('Enter');
await ui.savedSearchesButton(page).click();
await ui.saveButton(page).click();
await ui.saveNameInput(page).fill('Duplicate Test');
await ui.saveConfirmButton(page).click();
// Try to save another with the same name
await ui.saveButton(page).click();
await ui.saveNameInput(page).fill('Duplicate Test');
await ui.saveConfirmButton(page).click();
// Verify validation error
await expect(ui.duplicateError(page)).toBeVisible();
});
test('should apply a saved search', async ({ page }) => {
// Create a saved search first
await ui.searchInput(page).fill('state:firing');
await ui.searchInput(page).press('Enter');
await ui.savedSearchesButton(page).click();
await ui.saveButton(page).click();
await ui.saveNameInput(page).fill('Apply Test');
await ui.saveConfirmButton(page).click();
// Clear the search
await ui.searchInput(page).clear();
await ui.searchInput(page).press('Enter');
// Apply the saved search
await ui.savedSearchesButton(page).click();
await page.getByRole('button', { name: /apply search.*apply test/i }).click();
// Verify the search input is updated
await expect(ui.searchInput(page)).toHaveValue('state:firing');
});
test('should rename a saved search', async ({ page }) => {
// Create a saved search
await ui.searchInput(page).fill('state:firing');
await ui.searchInput(page).press('Enter');
await ui.savedSearchesButton(page).click();
await ui.saveButton(page).click();
await ui.saveNameInput(page).fill('Original Name');
await ui.saveConfirmButton(page).click();
// Open action menu and click rename
await ui.actionsButton(page).click();
await ui.renameMenuItem(page).click();
// Enter new name
const renameInput = page.getByDisplayValue('Original Name');
await renameInput.clear();
await renameInput.fill('Renamed Search');
await page.keyboard.press('Enter');
// Verify the name was updated
await expect(page.getByText('Renamed Search')).toBeVisible();
await expect(page.getByText('Original Name')).not.toBeVisible();
});
test('should delete a saved search', async ({ page }) => {
// Create a saved search
await ui.searchInput(page).fill('state:firing');
await ui.searchInput(page).press('Enter');
await ui.savedSearchesButton(page).click();
await ui.saveButton(page).click();
await ui.saveNameInput(page).fill('To Delete');
await ui.saveConfirmButton(page).click();
// Verify it was saved
await expect(page.getByText('To Delete')).toBeVisible();
// Open action menu and click delete
await ui.actionsButton(page).click();
await ui.deleteMenuItem(page).click();
// Confirm delete
await ui.deleteConfirmButton(page).click();
// Verify it was deleted
await expect(page.getByText('To Delete')).not.toBeVisible();
});
test('should set a search as default', async ({ page }) => {
// Create a saved search
await ui.searchInput(page).fill('state:firing');
await ui.searchInput(page).press('Enter');
await ui.savedSearchesButton(page).click();
await ui.saveButton(page).click();
await ui.saveNameInput(page).fill('Default Test');
await ui.saveConfirmButton(page).click();
// Set as default
await ui.actionsButton(page).click();
await ui.setAsDefaultMenuItem(page).click();
// Verify the star icon appears (indicating default)
await expect(ui.defaultIcon(page)).toBeVisible();
});
test('should close dropdown when pressing Escape', async ({ page }) => {
await ui.savedSearchesButton(page).click();
await expect(ui.dropdown(page)).toBeVisible();
await page.keyboard.press('Escape');
await expect(ui.dropdown(page)).not.toBeVisible();
});
test('should cancel save mode when pressing Escape', async ({ page }) => {
// Enter a search query
await ui.searchInput(page).fill('state:firing');
await ui.searchInput(page).press('Enter');
await ui.savedSearchesButton(page).click();
// Start save mode
await ui.saveButton(page).click();
await expect(ui.saveNameInput(page)).toBeVisible();
// Press Escape to cancel
await page.keyboard.press('Escape');
// Verify we're back to list mode
await expect(ui.saveNameInput(page)).not.toBeVisible();
await expect(ui.saveButton(page)).toBeVisible();
});
}
);

View File

@@ -1,14 +1,12 @@
import { test, expect } from '@grafana/plugin-e2e';
import testV2DashWithRepeats from '../dashboards/V2DashWithRepeats.json';
import { test, expect } from './fixtures';
import {
checkRepeatedPanelTitles,
verifyChanges,
movePanel,
getPanelPosition,
saveDashboard,
importTestDashboard,
goToEmbeddedPanel,
} from './utils';
@@ -34,8 +32,8 @@ test.describe(
tag: ['@dashboards'],
},
() => {
test('can enable repeats', async ({ dashboardPage, selectors, page }) => {
await importTestDashboard(page, selectors, 'Custom grid repeats - add repeats');
test('can enable repeats', async ({ dashboardPage, selectors, page, importDashboard }) => {
await importDashboard('Custom grid repeats - add repeats');
await dashboardPage.getByGrafanaSelector(selectors.components.NavToolbar.editDashboard.editButton).click();
@@ -62,13 +60,8 @@ test.describe(
await checkRepeatedPanelTitles(dashboardPage, selectors, repeatTitleBase, repeatOptions);
});
test('can update repeats with variable change', async ({ dashboardPage, selectors, page }) => {
await importTestDashboard(
page,
selectors,
'Custom grid repeats - update on variable change',
JSON.stringify(testV2DashWithRepeats)
);
test('can update repeats with variable change', async ({ dashboardPage, selectors, page, importDashboard }) => {
await importDashboard('Custom grid repeats - update on variable change', JSON.stringify(testV2DashWithRepeats));
await dashboardPage
.getByGrafanaSelector(
@@ -94,13 +87,8 @@ test.describe(
)
).toBeHidden();
});
test('can update repeats in edit pane', async ({ dashboardPage, selectors, page }) => {
await importTestDashboard(
page,
selectors,
'Custom grid repeats - update through edit pane',
JSON.stringify(testV2DashWithRepeats)
);
test('can update repeats in edit pane', async ({ dashboardPage, selectors, page, importDashboard }) => {
await importDashboard('Custom grid repeats - update through edit pane', JSON.stringify(testV2DashWithRepeats));
await dashboardPage.getByGrafanaSelector(selectors.components.NavToolbar.editDashboard.editButton).click();
@@ -122,13 +110,8 @@ test.describe(
await checkRepeatedPanelTitles(dashboardPage, selectors, newTitleBase, repeatOptions);
});
test('can update repeats in panel editor', async ({ dashboardPage, selectors, page }) => {
await importTestDashboard(
page,
selectors,
'Custom grid repeats - update through panel editor',
JSON.stringify(testV2DashWithRepeats)
);
test('can update repeats in panel editor', async ({ dashboardPage, selectors, page, importDashboard }) => {
await importDashboard('Custom grid repeats - update through panel editor', JSON.stringify(testV2DashWithRepeats));
await dashboardPage.getByGrafanaSelector(selectors.components.NavToolbar.editDashboard.editButton).click();
@@ -181,10 +164,13 @@ test.describe(
await checkRepeatedPanelTitles(dashboardPage, selectors, newTitleBase, repeatOptions);
});
test('can update repeats in panel editor when loaded directly', async ({ dashboardPage, selectors, page }) => {
await importTestDashboard(
page,
selectors,
test('can update repeats in panel editor when loaded directly', async ({
dashboardPage,
selectors,
page,
importDashboard,
}) => {
await importDashboard(
'Custom grid repeats - update through directly loaded panel editor',
JSON.stringify(testV2DashWithRepeats)
);
@@ -232,13 +218,8 @@ test.describe(
await checkRepeatedPanelTitles(dashboardPage, selectors, newTitleBase, repeatOptions);
});
test('can move repeated panels', async ({ dashboardPage, selectors, page }) => {
await importTestDashboard(
page,
selectors,
'Custom grid repeats - move repeated panels',
JSON.stringify(testV2DashWithRepeats)
);
test('can move repeated panels', async ({ dashboardPage, selectors, page, importDashboard }) => {
await importDashboard('Custom grid repeats - move repeated panels', JSON.stringify(testV2DashWithRepeats));
await dashboardPage.getByGrafanaSelector(selectors.components.NavToolbar.editDashboard.editButton).click();
@@ -276,13 +257,8 @@ test.describe(
`${repeatTitleBase}${repeatOptions.at(-1)}`
);
});
test('can view repeated panel', async ({ dashboardPage, selectors, page }) => {
await importTestDashboard(
page,
selectors,
'Custom grid repeats - move repeated panels',
JSON.stringify(testV2DashWithRepeats)
);
test('can view repeated panel', async ({ dashboardPage, selectors, page, importDashboard }) => {
await importDashboard('Custom grid repeats - move repeated panels', JSON.stringify(testV2DashWithRepeats));
await dashboardPage
.getByGrafanaSelector(selectors.components.Panels.Panel.title(`${repeatTitleBase}${repeatOptions.at(-1)}`))
@@ -332,10 +308,8 @@ test.describe(
).toBeVisible();
});
test('can view embedded repeated panel', async ({ dashboardPage, selectors, page }) => {
await importTestDashboard(
page,
selectors,
test('can view embedded repeated panel', async ({ dashboardPage, selectors, page, importDashboard }) => {
await importDashboard(
'Custom grid repeats - view embedded repeated panel',
JSON.stringify(testV2DashWithRepeats)
);
@@ -353,13 +327,8 @@ test.describe(
)
).toBeVisible();
});
test('can remove repeats', async ({ dashboardPage, selectors, page }) => {
await importTestDashboard(
page,
selectors,
'Custom grid repeats - remove repeats',
JSON.stringify(testV2DashWithRepeats)
);
test('can remove repeats', async ({ dashboardPage, selectors, page, importDashboard }) => {
await importDashboard('Custom grid repeats - remove repeats', JSON.stringify(testV2DashWithRepeats));
// verify 6 panels are present (4 repeats and 2 normal)
expect(

View File

@@ -1,11 +1,9 @@
import { test, expect } from '@grafana/plugin-e2e';
import V2DashWithTabRepeats from '../dashboards/V2DashWithTabRepeats.json';
import { test, expect } from './fixtures';
import {
verifyChanges,
saveDashboard,
importTestDashboard,
goToEmbeddedPanel,
checkRepeatedTabTitles,
groupIntoTab,
@@ -35,8 +33,8 @@ test.describe(
tag: ['@dashboards'],
},
() => {
test('can enable tab repeats', async ({ dashboardPage, selectors, page }) => {
await importTestDashboard(page, selectors, 'Tabs layout repeats - add repeats');
test('can enable tab repeats', async ({ dashboardPage, selectors, page, importDashboard }) => {
await importDashboard('Tabs layout repeats - add repeats');
await dashboardPage.getByGrafanaSelector(selectors.components.NavToolbar.editDashboard.editButton).click();
@@ -69,13 +67,8 @@ test.describe(
await checkRepeatedTabTitles(dashboardPage, selectors, repeatTitleBase, repeatOptions);
});
test('can update tab repeats with variable change', async ({ dashboardPage, selectors, page }) => {
await importTestDashboard(
page,
selectors,
'Tabs layout repeats - update on variable change',
JSON.stringify(V2DashWithTabRepeats)
);
test('can update tab repeats with variable change', async ({ dashboardPage, selectors, page, importDashboard }) => {
await importDashboard('Tabs layout repeats - update on variable change', JSON.stringify(V2DashWithTabRepeats));
const c1Var = dashboardPage.getByGrafanaSelector(selectors.pages.Dashboard.SubMenu.submenuItemLabels('c1'));
await c1Var
@@ -97,13 +90,8 @@ test.describe(
dashboardPage.getByGrafanaSelector(selectors.components.Tab.title(`${repeatTitleBase}${repeatOptions.at(-1)}`))
).toBeHidden();
});
test('can update repeats in edit pane', async ({ dashboardPage, selectors, page }) => {
await importTestDashboard(
page,
selectors,
'Tabs layout repeats - update through edit pane',
JSON.stringify(V2DashWithTabRepeats)
);
test('can update repeats in edit pane', async ({ dashboardPage, selectors, page, importDashboard }) => {
await importDashboard('Tabs layout repeats - update through edit pane', JSON.stringify(V2DashWithTabRepeats));
await dashboardPage.getByGrafanaSelector(selectors.components.NavToolbar.editDashboard.editButton).click();
// select first/original repeat tab to activate edit pane
@@ -125,10 +113,8 @@ test.describe(
await checkRepeatedTabTitles(dashboardPage, selectors, newTitleBase, repeatOptions);
});
test('can update repeats after panel change', async ({ dashboardPage, selectors, page }) => {
await importTestDashboard(
page,
selectors,
test('can update repeats after panel change', async ({ dashboardPage, selectors, page, importDashboard }) => {
await importDashboard(
'Tabs layout repeats - update repeats after panel change',
JSON.stringify(V2DashWithTabRepeats)
);
@@ -165,10 +151,13 @@ test.describe(
).toBeVisible();
});
test('can update repeats after panel change in editor', async ({ dashboardPage, selectors, page }) => {
await importTestDashboard(
page,
selectors,
test('can update repeats after panel change in editor', async ({
dashboardPage,
selectors,
page,
importDashboard,
}) => {
await importDashboard(
'Tabs layout repeats - update repeats after panel change in editor',
JSON.stringify(V2DashWithTabRepeats)
);
@@ -225,10 +214,13 @@ test.describe(
).toBeVisible();
});
test('can hide canvas grid add row action in repeats', async ({ dashboardPage, selectors, page }) => {
await importTestDashboard(
page,
selectors,
test('can hide canvas grid add row action in repeats', async ({
dashboardPage,
selectors,
page,
importDashboard,
}) => {
await importDashboard(
'Tabs layout repeats - hide canvas add action in repeats',
JSON.stringify(V2DashWithTabRepeats)
);
@@ -244,13 +236,8 @@ test.describe(
await expect(dashboardPage.getByGrafanaSelector(selectors.components.CanvasGridAddActions.addRow)).toBeHidden();
});
test('can move repeated tabs', async ({ dashboardPage, selectors, page }) => {
await importTestDashboard(
page,
selectors,
'Tabs layout repeats - move repeated tabs',
JSON.stringify(V2DashWithTabRepeats)
);
test('can move repeated tabs', async ({ dashboardPage, selectors, page, importDashboard }) => {
await importDashboard('Tabs layout repeats - move repeated tabs', JSON.stringify(V2DashWithTabRepeats));
await dashboardPage.getByGrafanaSelector(selectors.components.NavToolbar.editDashboard.editButton).click();
await moveTab(dashboardPage, page, selectors, `${repeatTitleBase}${repeatOptions.at(0)}`, 'New tab');
@@ -269,13 +256,8 @@ test.describe(
expect(normalTab2?.x).toBeLessThan(repeatedTab2?.x || 0);
});
test('can load into repeated tab', async ({ dashboardPage, selectors, page }) => {
await importTestDashboard(
page,
selectors,
'Tabs layout repeats - can load into repeated tab',
JSON.stringify(V2DashWithTabRepeats)
);
test('can load into repeated tab', async ({ dashboardPage, selectors, page, importDashboard }) => {
await importDashboard('Tabs layout repeats - can load into repeated tab', JSON.stringify(V2DashWithTabRepeats));
await dashboardPage
.getByGrafanaSelector(selectors.components.Tab.title(`${repeatTitleBase}${repeatOptions.at(2)}`))
@@ -292,13 +274,8 @@ test.describe(
).toBe('true');
});
test('can view panels in repeated tab', async ({ dashboardPage, selectors, page }) => {
await importTestDashboard(
page,
selectors,
'Tabs layout repeats - view panels in repeated tabs',
JSON.stringify(V2DashWithTabRepeats)
);
test('can view panels in repeated tab', async ({ dashboardPage, selectors, page, importDashboard }) => {
await importDashboard('Tabs layout repeats - view panels in repeated tabs', JSON.stringify(V2DashWithTabRepeats));
// non repeated panel in repeated tab
await dashboardPage.getByGrafanaSelector(selectors.components.Panels.Panel.title('New panel')).first().hover();
@@ -367,10 +344,8 @@ test.describe(
).toBeVisible();
});
test('can view embedded panels in repeated tab', async ({ dashboardPage, selectors, page }) => {
await importTestDashboard(
page,
selectors,
test('can view embedded panels in repeated tab', async ({ dashboardPage, selectors, page, importDashboard }) => {
await importDashboard(
'Tabs layout repeats - view embedded panels in repeated tabs',
JSON.stringify(V2DashWithTabRepeats)
);
@@ -417,13 +392,8 @@ test.describe(
).toBeVisible();
});
test('can remove repeats', async ({ dashboardPage, selectors, page }) => {
await importTestDashboard(
page,
selectors,
'Tabs layout repeats - remove repeats',
JSON.stringify(V2DashWithTabRepeats)
);
test('can remove repeats', async ({ dashboardPage, selectors, page, importDashboard }) => {
await importDashboard('Tabs layout repeats - remove repeats', JSON.stringify(V2DashWithTabRepeats));
// verify 5 tabs are present (4 repeats and 1 normal)
await checkRepeatedTabTitles(dashboardPage, selectors, repeatTitleBase, repeatOptions);

View File

@@ -0,0 +1,31 @@
import { test as base } from '@grafana/plugin-e2e';
import { importTestDashboard } from './utils';
type ImportDashboardFn = (title: string, dashJSON?: string) => Promise<string>;
/**
* Extended test fixtures for dashboard-new-layouts tests.
* Provides `importDashboard` - a wrapped version of `importTestDashboard` that
* automatically cleans up dashboards after each test.
*/
export const test = base.extend<{ importDashboard: ImportDashboardFn }>({
// imports dashboard and cleans it up after the test
importDashboard: async ({ page, selectors, request }, use) => {
const importedUIDs: string[] = [];
const importDashboard: ImportDashboardFn = async (title, dashJSON) => {
const uid = await importTestDashboard(page, selectors, title, dashJSON);
importedUIDs.push(uid);
return uid;
};
await use(importDashboard);
for (const uid of importedUIDs) {
await request.delete(`/api/dashboards/uid/${uid}`);
}
},
});
export { expect } from '@grafana/plugin-e2e';

View File

@@ -160,7 +160,12 @@ export async function verifyChanges(
await dashboardPage.getByGrafanaSelector(selectors.components.Drawer.General.close).click();
}
export async function importTestDashboard(page: Page, selectors: E2ESelectorGroups, title: string, dashInput?: string) {
export async function importTestDashboard(
page: Page,
selectors: E2ESelectorGroups,
title: string,
dashInput?: string
): Promise<string> {
await page.goto(selectors.pages.ImportDashboard.url);
await page
.getByTestId(selectors.components.DashboardImportPage.textarea)
@@ -177,6 +182,15 @@ export async function importTestDashboard(page: Page, selectors: E2ESelectorGrou
}
await expect(page.locator('[data-testid="uplot-main-div"]').first()).toBeVisible();
if (testV2Dashboard.metadata.uid) {
return testV2Dashboard.metadata.uid;
}
// else extract from url
const url = new URL(page.url());
const pathParts = url.pathname.split('/');
const dIndex = pathParts.indexOf('d');
return dIndex !== -1 ? pathParts[dIndex + 1] : '';
}
export async function goToEmbeddedPanel(page: Page) {

View File

@@ -1911,11 +1911,6 @@
"count": 1
}
},
"public/app/features/dashboard-scene/settings/JsonModelEditView.tsx": {
"react/no-unescaped-entities": {
"count": 2
}
},
"public/app/features/dashboard-scene/settings/variables/VariableEditableElement.tsx": {
"react-hooks/rules-of-hooks": {
"count": 4
@@ -2868,11 +2863,6 @@
"count": 1
}
},
"public/app/features/plugins/admin/components/PluginDetailsPage.tsx": {
"@typescript-eslint/consistent-type-assertions": {
"count": 1
}
},
"public/app/features/plugins/admin/helpers.ts": {
"no-restricted-syntax": {
"count": 2
@@ -4339,7 +4329,7 @@
},
"public/app/plugins/panel/heatmap/utils.ts": {
"@typescript-eslint/consistent-type-assertions": {
"count": 16
"count": 14
}
},
"public/app/plugins/panel/histogram/Histogram.tsx": {

2
go.mod
View File

@@ -87,7 +87,7 @@ require (
github.com/googleapis/gax-go/v2 v2.15.0 // @grafana/grafana-backend-group
github.com/gorilla/mux v1.8.1 // @grafana/grafana-backend-group
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674 // @grafana/grafana-app-platform-squad
github.com/grafana/alerting v0.0.0-20251212143239-491433b332b7 // @grafana/alerting-backend
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196 // @grafana/alerting-backend
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f // @grafana/identity-access-team
github.com/grafana/authlib/types v0.0.0-20251119142549-be091cf2f4d4 // @grafana/identity-access-team
github.com/grafana/dataplane/examples v0.0.1 // @grafana/observability-metrics

4
go.sum
View File

@@ -1622,8 +1622,8 @@ github.com/gorilla/sessions v1.2.1 h1:DHd3rPN5lE3Ts3D8rKkQ8x/0kqfeNmBAaiSi+o7Fsg
github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM=
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674 h1:JeSE6pjso5THxAzdVpqr6/geYxZytqFMBCOtn/ujyeo=
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674/go.mod h1:r4w70xmWCQKmi1ONH4KIaBptdivuRPyosB9RmPlGEwA=
github.com/grafana/alerting v0.0.0-20251212143239-491433b332b7 h1:ZzG/gCclEit9w0QUfQt9GURcOycAIGcsQAhY1u0AEX0=
github.com/grafana/alerting v0.0.0-20251212143239-491433b332b7/go.mod h1:l7v67cgP7x72ajB9UPZlumdrHqNztpKoqQ52cU8T3LU=
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196 h1:A9UJtyBBUE7PkRsAITKU05iz+HpHO9SaVjfdo2Df3UQ=
github.com/grafana/alerting v0.0.0-20251223160021-926c74910196/go.mod h1:l7v67cgP7x72ajB9UPZlumdrHqNztpKoqQ52cU8T3LU=
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f h1:Cbm6OKkOcJ+7CSZsGsEJzktC/SIa5bxVeYKQLuYK86o=
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f/go.mod h1:axY0cdOg3q0TZHwpHnIz5x16xZ8ZBxJHShsSHHXcHQg=
github.com/grafana/authlib/types v0.0.0-20251119142549-be091cf2f4d4 h1:Muoy+FMGrHj3GdFbvsMzUT7eusgii9PKf9L1ZaXDDbY=

View File

@@ -793,7 +793,15 @@ github.com/go-openapi/loads v0.22.0/go.mod h1:yLsaTCS92mnSAZX5WWoxszLj0u+Ojl+Zs5
github.com/go-openapi/spec v0.21.0/go.mod h1:78u6VdPw81XU44qEWGhtr982gJ5BWg2c0I5XwVMotYk=
github.com/go-openapi/strfmt v0.23.0/go.mod h1:NrtIpfKtWIygRkKVsxh7XQMDQW5HKQl6S5ik2elW+K4=
github.com/go-openapi/swag v0.22.3/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14=
github.com/go-openapi/swag v0.23.0/go.mod h1:esZ8ITTYEsH1V2trKHjAN8Ai7xHb8RV+YSZ577vPjgQ=
github.com/go-openapi/swag v0.23.1/go.mod h1:STZs8TbRvEQQKUA+JZNAm3EWlgaOBGpyFDqQnDHMef0=
github.com/go-openapi/swag/conv v0.25.1/go.mod h1:Z1mFEGPfyIKPu0806khI3zF+/EUXde+fdeksUl2NiDs=
github.com/go-openapi/swag/fileutils v0.25.1/go.mod h1:+NXtt5xNZZqmpIpjqcujqojGFek9/w55b3ecmOdtg8M=
github.com/go-openapi/swag/jsonutils v0.25.1/go.mod h1:JpEkAjxQXpiaHmRO04N1zE4qbUEg3b7Udll7AMGTNOo=
github.com/go-openapi/swag/loading v0.25.1/go.mod h1:xoIe2EG32NOYYbqxvXgPzne989bWvSNoWoyQVWEZicc=
github.com/go-openapi/swag/mangling v0.25.1/go.mod h1:CdiMQ6pnfAgyQGSOIYnZkXvqhnnwOn997uXZMAd/7mQ=
github.com/go-openapi/swag/stringutils v0.25.1/go.mod h1:JLdSAq5169HaiDUbTvArA2yQxmgn4D6h4A+4HqVvAYg=
github.com/go-openapi/swag/typeutils v0.25.1/go.mod h1:9McMC/oCdS4BKwk2shEB7x17P6HmMmA6dQRtAkSnNb8=
github.com/go-openapi/swag/yamlutils v0.25.1/go.mod h1:cm9ywbzncy3y6uPm/97ysW8+wZ09qsks+9RS8fLWKqg=
github.com/go-openapi/validate v0.24.0/go.mod h1:iyeX1sEufmv3nPbBdX3ieNviWnOZaJ1+zquzJEf2BAQ=
github.com/go-pdf/fpdf v0.6.0 h1:MlgtGIfsdMEEQJr2le6b/HNr1ZlQwxyWr77r2aj2U/8=
github.com/go-playground/assert/v2 v2.0.1 h1:MsBgLAaY856+nPRTKrp3/OZK38U/wa0CcBYNjji3q3A=
@@ -982,7 +990,6 @@ github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0/go.mod h1:YN5jB8ie0yfIUg6VvR9K
github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.1/go.mod h1:5SN9VR2LTsRFsrEC6FHgRbTWrTHu6tqPeKxEQv15giM=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.26.3/go.mod h1:ndYquD05frm2vACXE1nsccT4oJzjhw2arTS2cpUD1PI=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.1/go.mod h1:Zanoh4+gvIgluNqcfMVTJueD4wSS5hT7zTt4Mrutd90=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.27.2/go.mod h1:pkJQ2tZHJ0aFOVEEot6oZmaVEZcRme73eIFmhiVuRWs=
github.com/grpc-ecosystem/grpc-opentracing v0.0.0-20180507213350-8e809c8a8645 h1:MJG/KsmcqMwFAkh8mTnAwhyKoB+sTAnY4CACC110tbU=
github.com/grpc-ecosystem/grpc-opentracing v0.0.0-20180507213350-8e809c8a8645/go.mod h1:6iZfnjpejD4L/4DwD7NryNaJyCQdzwWwH2MWhCA90Kw=
github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed h1:5upAirOpQc1Q53c0bnx2ufif5kANL7bfZWcc6VJWJd8=
@@ -1404,7 +1411,6 @@ github.com/richardartoul/molecule v1.0.0/go.mod h1:uvX/8buq8uVeiZiFht+0lqSLBHF+u
github.com/rivo/uniseg v0.4.4/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
github.com/rogpeppe/fastuuid v1.2.0 h1:Ppwyp6VYCF1nvBTXL3trRso7mXMlRrw9ooo375wvi2s=
github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4=
github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o=
github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg=
github.com/russross/blackfriday v1.6.0 h1:KqfZb0pUVN2lYqZUYRddxF4OR8ZMURnJIG5Y3VRLtww=
github.com/russross/blackfriday v1.6.0/go.mod h1:ti0ldHuxg49ri4ksnFxlkCfN+hvslNlmVHqNRXXJNAY=
@@ -1623,7 +1629,6 @@ go.mongodb.org/mongo-driver v1.11.4/go.mod h1:PTSz5yu21bkT/wXpkS7WR5f0ddqw5queth
go.mongodb.org/mongo-driver v1.14.0/go.mod h1:Vzb0Mk/pa7e6cWw85R4F/endUC3u0U9jGcNU603k65c=
go.mongodb.org/mongo-driver v1.17.3/go.mod h1:Hy04i7O2kC4RS06ZrhPRqj/u4DTYkFDAAccj+rVKqgQ=
go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0=
go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A=
go.opentelemetry.io/collector v0.121.0/go.mod h1:M4TlnmkjIgishm2DNCk9K3hMKTmAsY9w8cNFsp9EchM=
go.opentelemetry.io/collector v0.124.0/go.mod h1:QzERYfmHUedawjr8Ph/CBEEkVqWS8IlxRLAZt+KHlCg=
go.opentelemetry.io/collector/client v1.29.0/go.mod h1:LCUoEV2KCTKA1i+/txZaGsSPVWUcqeOV6wCfNsAippE=
@@ -1839,6 +1844,7 @@ go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.58.0/go.mod h1:
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.60.0/go.mod h1:69uWxva0WgAA/4bu2Yy70SLDBwZXuQ6PbBpbsa5iZrQ=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.61.0/go.mod h1:UHB22Z8QsdRDrnAtX4PntOl36ajSxcdUMt1sF7Y6E7Q=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.62.0/go.mod h1:NfchwuyNoMcZ5MLHwPrODwUF1HWCXWrL31s8gSAdIKY=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.63.0/go.mod h1:h06DGIukJOevXaj/xrNjhi/2098RZzcLTbc0jDAUbsg=
go.opentelemetry.io/contrib/otelconf v0.15.0 h1:BLNiIUsrNcqhSKpsa6CnhE6LdrpY1A8X0szMVsu99eo=
go.opentelemetry.io/contrib/otelconf v0.15.0/go.mod h1:OPH1seO5z9dp1P26gnLtoM9ht7JDvh3Ws6XRHuXqImY=
go.opentelemetry.io/contrib/propagators/aws v1.37.0 h1:cp8AFiM/qjBm10C/ATIRnEDXpD5MBknrA0ANw4T2/ss=
@@ -1910,7 +1916,6 @@ go.opentelemetry.io/proto/otlp v1.0.0/go.mod h1:Sy6pihPLfYHkr3NkUbEhGHFhINUSI/v8
go.opentelemetry.io/proto/otlp v1.5.0/go.mod h1:keN8WnHxOy8PG0rQZjJJ5A2ebUoafqWp0eVQ4yIXvJ4=
go.opentelemetry.io/proto/otlp v1.6.0/go.mod h1:cicgGehlFuNdgZkcALOCh3VE6K/u2tAjzlRhDwmVpZc=
go.opentelemetry.io/proto/otlp v1.7.0/go.mod h1:fSKjH6YJ7HDlwzltzyMj036AJ3ejJLCgCSHGj4efDDo=
go.opentelemetry.io/proto/otlp v1.7.1/go.mod h1:b2rVh6rfI/s2pHWNlB7ILJcRALpcNDzKhACevjI+ZnE=
go.uber.org/atomic v1.10.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0=
go.uber.org/automaxprocs v1.6.0 h1:O3y2/QNTOdbF+e/dpXNNW7Rx2hZ4sTIPyybbxyNqTUs=
go.uber.org/automaxprocs v1.6.0/go.mod h1:ifeIMSnPZuznNm6jmdzmU3/bfk01Fe2fotchwEFJ8r8=
@@ -2118,8 +2123,8 @@ google.golang.org/genproto/googleapis/api v0.0.0-20250728155136-f173205681a0/go.
google.golang.org/genproto/googleapis/api v0.0.0-20250804133106-a7a43d27e69b/go.mod h1:oDOGiMSXHL4sDTJvFvIB9nRQCGdLP1o/iVaqQK8zB+M=
google.golang.org/genproto/googleapis/api v0.0.0-20250818200422-3122310a409c/go.mod h1:ea2MjsO70ssTfCjiwHgI0ZFqcw45Ksuk2ckf9G468GA=
google.golang.org/genproto/googleapis/api v0.0.0-20250825161204-c5933d9347a5/go.mod h1:j3QtIyytwqGr1JUDtYXwtMXWPKsEa5LtzIFN1Wn5WvE=
google.golang.org/genproto/googleapis/api v0.0.0-20250908214217-97024824d090/go.mod h1:U8EXRNSd8sUYyDfs/It7KVWodQr+Hf9xtxyxWudSwEw=
google.golang.org/genproto/googleapis/api v0.0.0-20250929231259-57b25ae835d4/go.mod h1:NnuHhy+bxcg30o7FnVAZbXsPHUDQ9qKWAQKCD7VxFtk=
google.golang.org/genproto/googleapis/api v0.0.0-20251111163417-95abcf5c77ba/go.mod h1:G5IanEx8/PgI9w6CFcYQf7jMtHQhZruvfM1i3qOqk5U=
google.golang.org/genproto/googleapis/api v0.0.0-20251202230838-ff82c1b0f217/go.mod h1:+rXWjjaukWZun3mLfjmVnQi18E1AsFbDN9QdJ5YXLto=
google.golang.org/genproto/googleapis/bytestream v0.0.0-20250603155806-513f23925822 h1:zWFRixYR5QlotL+Uv3YfsPRENIrQFXiGs+iwqel6fOQ=
google.golang.org/genproto/googleapis/bytestream v0.0.0-20250603155806-513f23925822/go.mod h1:h6yxum/C2qRb4txaZRLDHK8RyS0H/o2oEDeKY4onY/Y=
@@ -2150,10 +2155,9 @@ google.golang.org/genproto/googleapis/rpc v0.0.0-20250825161204-c5933d9347a5/go.
google.golang.org/genproto/googleapis/rpc v0.0.0-20250826171959-ef028d996bc1/go.mod h1:GmFNa4BdJZ2a8G+wCe9Bg3wwThLrJun751XstdJt5Og=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250908214217-97024824d090/go.mod h1:GmFNa4BdJZ2a8G+wCe9Bg3wwThLrJun751XstdJt5Og=
google.golang.org/genproto/googleapis/rpc v0.0.0-20250929231259-57b25ae835d4/go.mod h1:HSkG/KdJWusxU1F6CNrwNDjBMgisKxGnc5dAZfT0mjQ=
google.golang.org/genproto/googleapis/rpc v0.0.0-20251002232023-7c0ddcbb5797/go.mod h1:HSkG/KdJWusxU1F6CNrwNDjBMgisKxGnc5dAZfT0mjQ=
google.golang.org/genproto/googleapis/rpc v0.0.0-20251014184007-4626949a642f/go.mod h1:7i2o+ce6H/6BluujYR+kqX3GKH+dChPTQU19wjRPiGk=
google.golang.org/genproto/googleapis/rpc v0.0.0-20251022142026-3a174f9686a8/go.mod h1:7i2o+ce6H/6BluujYR+kqX3GKH+dChPTQU19wjRPiGk=
google.golang.org/genproto/googleapis/rpc v0.0.0-20251103181224-f26f9409b101/go.mod h1:7i2o+ce6H/6BluujYR+kqX3GKH+dChPTQU19wjRPiGk=
google.golang.org/genproto/googleapis/rpc v0.0.0-20251111163417-95abcf5c77ba/go.mod h1:7i2o+ce6H/6BluujYR+kqX3GKH+dChPTQU19wjRPiGk=
google.golang.org/genproto/googleapis/rpc v0.0.0-20251124214823-79d6a2a48846/go.mod h1:7i2o+ce6H/6BluujYR+kqX3GKH+dChPTQU19wjRPiGk=
google.golang.org/genproto/googleapis/rpc v0.0.0-20251202230838-ff82c1b0f217/go.mod h1:7i2o+ce6H/6BluujYR+kqX3GKH+dChPTQU19wjRPiGk=
google.golang.org/grpc v1.23.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
@@ -2177,7 +2181,6 @@ google.golang.org/grpc v1.73.0/go.mod h1:50sbHOUqWoCQGI8V2HQLJM0B+LMlIUjNSZmow7E
google.golang.org/grpc v1.74.2/go.mod h1:CtQ+BGjaAIXHs/5YS3i473GqwBBa1zGQNevxdeBEXrM=
google.golang.org/grpc v1.75.0/go.mod h1:JtPAzKiq4v1xcAB2hydNlWI2RnF85XXcV0mhKXr2ecQ=
google.golang.org/grpc v1.75.1/go.mod h1:JtPAzKiq4v1xcAB2hydNlWI2RnF85XXcV0mhKXr2ecQ=
google.golang.org/grpc v1.76.0/go.mod h1:Ju12QI8M6iQJtbcsV+awF5a4hfJMLi4X0JLo94ULZ6c=
google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0 h1:M1YKkFIboKNieVO5DLUEVzQfGwJD30Nv2jfUgzb5UcE=
google.golang.org/grpc/examples v0.0.0-20230224211313-3775f633ce20 h1:MLBCGN1O7GzIx+cBiwfYPwtmZ41U3Mn/cotLJciaArI=
google.golang.org/grpc/examples v0.0.0-20230224211313-3775f633ce20/go.mod h1:Nr5H8+MlGWr5+xX/STzdoEqJrO+YteqFbMyCsrb6mH0=
@@ -2299,7 +2302,6 @@ sigs.k8s.io/structured-merge-diff/v4 v4.2.3/go.mod h1:qjx8mGObPmV2aSZepjQjbmb2ih
sigs.k8s.io/structured-merge-diff/v4 v4.5.0 h1:nbCitCK2hfnhyiKo6uf2HxUPTCodY6Qaf85SbDIaMBk=
sigs.k8s.io/structured-merge-diff/v4 v4.5.0/go.mod h1:N8f93tFZh9U6vpxwRArLiikrE5/2tiu1w1AGfACIGE4=
sigs.k8s.io/structured-merge-diff/v6 v6.2.0/go.mod h1:M3W8sfWvn2HhQDIbGWj3S099YozAsymCo/wrT5ohRUE=
sigs.k8s.io/structured-merge-diff/v6 v6.3.0/go.mod h1:M3W8sfWvn2HhQDIbGWj3S099YozAsymCo/wrT5ohRUE=
sigs.k8s.io/yaml v1.3.0/go.mod h1:GeOyir5tyXNByN85N/dRIT9es5UQNerPYEKK56eTBm8=
sigs.k8s.io/yaml v1.4.0/go.mod h1:Ejl7/uTz7PSA4eKMyQCUTnhZYNmLIl+5c2lQPGR2BPY=
sigs.k8s.io/yaml v1.5.0/go.mod h1:wZs27Rbxoai4C0f8/9urLZtZtF3avA3gKvGyPdDqTO4=

View File

@@ -243,6 +243,7 @@ const injectedRtkApi = api
type: queryArg['type'],
folder: queryArg.folder,
facet: queryArg.facet,
facetLimit: queryArg.facetLimit,
tags: queryArg.tags,
libraryPanel: queryArg.libraryPanel,
permission: queryArg.permission,
@@ -663,6 +664,8 @@ export type SearchDashboardsAndFoldersApiArg = {
folder?: string;
/** count distinct terms for selected fields */
facet?: string[];
/** maximum number of terms to return per facet (default 50, max 1000) */
facetLimit?: number;
/** tag query filter */
tags?: string[];
/** find dashboards that reference a given libraryPanel */

View File

@@ -3,11 +3,18 @@ import { merge } from 'lodash';
import { toDataFrame } from '../dataframe/processDataFrame';
import { createTheme } from '../themes/createTheme';
import { ReducerID } from '../transformations/fieldReducer';
import { FieldType } from '../types/dataFrame';
import { FieldConfigPropertyItem } from '../types/fieldOverrides';
import { MappingType, SpecialValueMatch, ValueMapping } from '../types/valueMapping';
import { getDisplayProcessor } from './displayProcessor';
import { fixCellTemplateExpressions, getFieldDisplayValues, GetFieldDisplayValuesOptions } from './fieldDisplay';
import {
FieldSparkline,
fixCellTemplateExpressions,
getFieldDisplayValues,
GetFieldDisplayValuesOptions,
getSparklineHighlight,
} from './fieldDisplay';
import { standardFieldConfigEditorRegistry } from './standardFieldConfigEditorRegistry';
describe('FieldDisplay', () => {
@@ -556,3 +563,71 @@ describe('fixCellTemplateExpressions', () => {
);
});
});
describe('getSparklineHighlight', () => {
const sparkline: FieldSparkline = {
y: { name: 'A', type: FieldType.number, values: [null, 2, 3, 4, 10, 8, 8, 8, 9, null], config: {} },
};
it.each([
{
calc: ReducerID.last,
expected: {
type: 'point',
xIdx: 9,
},
},
{
calc: ReducerID.max,
expected: {
type: 'point',
xIdx: 4,
},
},
{
calc: ReducerID.min,
expected: {
type: 'point',
xIdx: 1,
},
},
{
calc: ReducerID.first,
expected: {
type: 'point',
xIdx: 0,
},
},
{
calc: ReducerID.firstNotNull,
expected: {
type: 'point',
xIdx: 1,
},
},
{
calc: ReducerID.lastNotNull,
expected: {
type: 'point',
xIdx: 8,
},
},
{
calc: ReducerID.mean,
expected: {
type: 'line',
y: 6.5,
},
},
{
calc: ReducerID.median,
expected: {
type: 'line',
y: 8,
},
},
])('it calculates the correct highlight for the $calc', ({ calc, expected }) => {
const result = getSparklineHighlight(sparkline, calc);
expect(result).toEqual(expected);
});
});

View File

@@ -126,11 +126,6 @@ export interface FeatureToggles {
*/
disableSSEDataplane?: boolean;
/**
* Writes error logs to the request logger
* @default true
*/
unifiedRequestLog?: boolean;
/**
* Uses JWT-based auth for rendering instead of relying on remote cache
*/
renderAuthJWT?: boolean;
@@ -540,6 +535,10 @@ export interface FeatureToggles {
*/
alertingListViewV2?: boolean;
/**
* Enables saved searches for alert rules list
*/
alertingSavedSearches?: boolean;
/**
* Disables the ability to send alerts to an external Alertmanager datasource.
*/
alertingDisableSendAlertsExternal?: boolean;
@@ -1169,6 +1168,11 @@ export interface FeatureToggles {
*/
externalVizSuggestions?: boolean;
/**
* Enable Y-axis scale configuration options for pre-bucketed heatmap data (heatmap-rows)
* @default false
*/
heatmapRowsAxisOptions?: boolean;
/**
* Restrict PanelChrome contents with overflow: hidden;
* @default true
*/
@@ -1193,6 +1197,11 @@ export interface FeatureToggles {
*/
onlyStoreActionSets?: boolean;
/**
* Show insights for plugins in the plugin details page
* @default false
*/
pluginInsights?: boolean;
/**
* Enables a new panel time settings drawer
*/
panelTimeSettings?: boolean;

View File

@@ -185,6 +185,10 @@ export interface RowsHeatmapOptions {
* Sets the name of the cell when not calculating from data
*/
value?: string;
/**
* Controls the scale distribution of the y-axis buckets
*/
yBucketScale?: ui.ScaleDistributionConfig;
}
export interface Options {

View File

@@ -9,6 +9,7 @@ import {
getBarEndcapColors,
getEndpointMarkerColors,
getGradientCss,
getGradientStopsForPercent,
} from './colors';
export type DeepPartial<T> = {
@@ -254,4 +255,52 @@ describe('RadialGauge color utils', () => {
expect(endDotColor).toBe('#111217');
});
});
describe('getGradientStopsForPercent', () => {
it('should return the correct gradient stops for a given percent', () => {
const gradient = [
{ color: '#ff0000', percent: 0 },
{ color: '#00ff00', percent: 0.5 },
{ color: '#0000ff', percent: 1 },
];
const [left, right] = getGradientStopsForPercent(gradient, 0.25);
expect(left).toEqual({ color: '#ff0000', percent: 0 });
expect(right).toEqual({ color: '#00ff00', percent: 0.5 });
});
it('should handle edge cases where percent is at the boundaries', () => {
const gradient = [
{ color: '#ff0000', percent: 0 },
{ color: '#00ff00', percent: 0.5 },
{ color: '#0000ff', percent: 1 },
];
let [left, right] = getGradientStopsForPercent(gradient, 0);
expect(left).toEqual({ color: '#ff0000', percent: 0 });
expect(right).toEqual({ color: '#ff0000', percent: 0 });
[left, right] = getGradientStopsForPercent(gradient, 1);
expect(left).toEqual({ color: '#0000ff', percent: 1 });
expect(right).toEqual({ color: '#0000ff', percent: 1 });
});
it('should return the same stop if there is one that is equal to the percentage', () => {
const gradient = [
{ color: '#ff0000', percent: 0 },
{ color: '#00ff00', percent: 0.5 },
{ color: '#0000ff', percent: 1 },
];
let [left, right] = getGradientStopsForPercent(gradient, 0);
expect(left).toEqual({ color: '#ff0000', percent: 0 });
expect(right).toEqual({ color: '#ff0000', percent: 0 });
[left, right] = getGradientStopsForPercent(gradient, 0.5);
expect(left).toEqual({ color: '#00ff00', percent: 0.5 });
expect(right).toEqual({ color: '#00ff00', percent: 0.5 });
[left, right] = getGradientStopsForPercent(gradient, 1);
expect(left).toEqual({ color: '#0000ff', percent: 1 });
expect(right).toEqual({ color: '#0000ff', percent: 1 });
});
});
});

View File

@@ -92,6 +92,44 @@ export function buildGradientColors(
];
}
/**
* get the relevant gradient stops surrounding a given percentage. could be same stop if the
* percent matches a stop exactly.
*
* @param sortedGradientStops - gradient stops sorted by percent
* @param percent - percentage 0..1
* @returns {[GradientStop, GradientStop]} - the two gradient stops surrounding the given percentage
*/
export function getGradientStopsForPercent(
sortedGradientStops: GradientStop[],
percent: number
): [GradientStop, GradientStop] {
if (percent <= 0) {
return [sortedGradientStops[0], sortedGradientStops[0]];
}
if (percent >= 1) {
const last = sortedGradientStops.length - 1;
return [sortedGradientStops[last], sortedGradientStops[last]];
}
// find surrounding stops using binary search
let lo = 0;
let hi = sortedGradientStops.length - 1;
while (lo + 1 < hi) {
const mid = (lo + hi) >> 1;
if (percent === sortedGradientStops[mid].percent) {
return [sortedGradientStops[mid], sortedGradientStops[mid]];
}
if (percent < sortedGradientStops[mid].percent) {
hi = mid;
} else {
lo = mid;
}
}
return [sortedGradientStops[lo], sortedGradientStops[hi]];
}
/**
* @alpha - perhaps this should go in colorManipulator.ts
* Given color stops (each with a color and percentage 0..1) returns the color at a given percentage.
@@ -105,34 +143,11 @@ export function colorAtGradientPercent(stops: GradientStop[], percent: number):
throw new Error('colorAtGradientPercent requires at least two color stops');
}
// normalize and sort stops by percent. TODO: is this necessary? is gradientstops always sorted?
const sorted = stops
.map((s) => ({ color: s.color, percent: Math.min(Math.max(0, s.percent), 1) }))
.sort((a, b) => a.percent - b.percent);
// percent outside range
if (percent <= sorted[0].percent) {
return tinycolor(sorted[0].color);
}
if (percent >= sorted[sorted.length - 1].percent) {
return tinycolor(sorted[sorted.length - 1].color);
}
// find surrounding stops using binary search
let lo = 0;
let hi = sorted.length - 1;
while (lo + 1 < hi) {
const mid = (lo + hi) >> 1;
if (percent <= sorted[mid].percent) {
hi = mid;
} else {
lo = mid;
}
}
const left = sorted[lo];
const right = sorted[hi];
.map((s: GradientStop): GradientStop => ({ color: s.color, percent: Math.min(Math.max(0, s.percent), 1) }))
.sort((a: GradientStop, b: GradientStop) => a.percent - b.percent);
const [left, right] = getGradientStopsForPercent(sorted, percent);
const range = right.percent - left.percent;
const t = range === 0 ? 0 : (percent - left.percent) / range; // 0..1
return tinycolor.mix(left.color, right.color, t * 100);
@@ -157,9 +172,10 @@ export function getBarEndcapColors(gradientStops: GradientStop[], percent = 1):
export function getGradientCss(gradientStops: GradientStop[], shape: RadialShape): string {
const colorStrings = gradientStops.map((stop) => `${stop.color} ${(stop.percent * 100).toFixed(2)}%`);
return shape === 'circle'
? `conic-gradient(from 0deg, ${colorStrings.join(', ')})`
: `linear-gradient(90deg, ${colorStrings.join(', ')})`;
if (shape === 'circle') {
return `conic-gradient(from 0deg, ${colorStrings.join(', ')})`;
}
return `linear-gradient(90deg, ${colorStrings.join(', ')})`;
}
// the theme does not make the full palette available to us, and we

View File

@@ -17,7 +17,7 @@ export interface SparklineProps extends Themeable2 {
showHighlights?: boolean;
}
export const Sparkline: React.FC<SparklineProps> = memo((props) => {
export const SparklineFn: React.FC<SparklineProps> = memo((props) => {
const { sparkline, config: fieldConfig, theme, width, height, showHighlights } = props;
const { frame: alignedDataFrame, warning } = prepareSeries(sparkline, theme, fieldConfig, showHighlights);
if (warning) {
@@ -30,4 +30,14 @@ export const Sparkline: React.FC<SparklineProps> = memo((props) => {
return <UPlotChart data={data} config={configBuilder} width={width} height={height} />;
});
Sparkline.displayName = 'Sparkline';
SparklineFn.displayName = 'Sparkline';
// we converted to function component above, but some apps extend Sparkline, so we need
// to keep exporting a class component until those apps are all rolled out.
// see https://github.com/grafana/app-observability-plugin/pull/2079
// eslint-disable-next-line react-prefer-function-component/react-prefer-function-component
export class Sparkline extends React.PureComponent<SparklineProps> {
render() {
return <SparklineFn {...this.props} />;
}
}

View File

@@ -1,6 +1,6 @@
import { Field, FieldSparkline, FieldType } from '@grafana/data';
import { createTheme, Field, FieldSparkline, FieldType, toDataFrame } from '@grafana/data';
import { getYRange, preparePlotFrame } from './utils';
import { getYRange, prepareConfig, preparePlotFrame } from './utils';
describe('Prepare Sparkline plot frame', () => {
it('should return sorted array if x-axis numeric', () => {
@@ -201,3 +201,134 @@ describe('Get y range', () => {
expect(actual[0]).toBeLessThan(actual[1]!);
});
});
describe('prepareConfig', () => {
it('should not throw an error if there are multiple values', () => {
const sparkline: FieldSparkline = {
x: {
name: 'x',
values: [1679839200000, 1680444000000, 1681048800000, 1681653600000, 1682258400000],
type: FieldType.time,
config: {},
},
y: {
name: 'y',
values: [1, 2, 3, 4, 5],
type: FieldType.number,
config: {},
},
};
const dataFrame = toDataFrame({
fields: [sparkline.x, sparkline.y],
});
const config = prepareConfig(sparkline, dataFrame, createTheme());
expect(config.series.length).toBe(1);
});
it('should not throw an error if there is a single value', () => {
const sparkline: FieldSparkline = {
x: {
name: 'x',
values: [1679839200000],
type: FieldType.time,
config: {},
},
y: {
name: 'y',
values: [1],
type: FieldType.number,
config: {},
},
};
const dataFrame = toDataFrame({
fields: [sparkline.x, sparkline.y],
});
const config = prepareConfig(sparkline, dataFrame, createTheme());
expect(config.series.length).toBe(1);
});
it('should not throw an error if there are no values', () => {
const sparkline: FieldSparkline = {
x: {
name: 'x',
values: [],
type: FieldType.time,
config: {},
},
y: {
name: 'y',
values: [],
type: FieldType.number,
config: {},
},
};
const dataFrame = toDataFrame({
fields: [sparkline.x, sparkline.y],
});
const config = prepareConfig(sparkline, dataFrame, createTheme());
expect(config.series.length).toBe(1);
});
it('should set up highlight series if showHighlights is true and highlightIdx exists', () => {
const sparkline: FieldSparkline = {
x: {
name: 'x',
values: [1679839200000, 1680444000000, 1681048800000, 1681653600000, 1682258400000],
type: FieldType.time,
config: {},
},
y: {
name: 'y',
values: [1, 2, 3, 4, 5],
type: FieldType.number,
config: {},
},
highlightIndex: 2,
};
const dataFrame = toDataFrame({
fields: [sparkline.x, sparkline.y],
});
const config = prepareConfig(sparkline, dataFrame, createTheme(), true);
expect(config.series.length).toBe(1);
expect(config.series[0].getConfig().points).toEqual(
expect.objectContaining({
show: true,
filter: [2],
})
);
});
it('should not set up highlight series if showHighlights is false even if highlightIdx exists', () => {
const sparkline: FieldSparkline = {
x: {
name: 'x',
values: [1679839200000, 1680444000000, 1681048800000, 1681653600000, 1682258400000],
type: FieldType.time,
config: {},
},
y: {
name: 'y',
values: [1, 2, 3, 4, 5],
type: FieldType.number,
config: {},
},
highlightIndex: 2,
};
const dataFrame = toDataFrame({
fields: [sparkline.x, sparkline.y],
});
const config = prepareConfig(sparkline, dataFrame, createTheme(), false);
expect(config.series.length).toBe(1);
expect(config.series[0].getConfig().points?.show).not.toBe(true);
});
});

View File

@@ -1,3 +1,4 @@
import { isPlainObject } from 'lodash';
import { useCallback } from 'react';
import * as React from 'react';
@@ -63,7 +64,18 @@ export function CellActions({
tooltip={t('grafana-ui.table.cell-inspect', 'Inspect value')}
onClick={() => {
if (setInspectCell) {
setInspectCell({ value: cell.value, mode: previewMode });
let mode = TableCellInspectorMode.text;
let inspectValue = cell.value;
try {
const parsed = typeof inspectValue === 'string' ? JSON.parse(inspectValue) : inspectValue;
if (Array.isArray(parsed) || isPlainObject(parsed)) {
inspectValue = JSON.stringify(parsed, null, 2);
mode = TableCellInspectorMode.code;
}
} catch {
// do nothing
}
setInspectCell({ value: inspectValue, mode });
}
}}
{...commonButtonProps}

View File

@@ -0,0 +1,78 @@
import { render, screen } from '@testing-library/react';
import { VizLegendTable } from './VizLegendTable';
import { VizLegendItem } from './types';
describe('VizLegendTable', () => {
const mockItems: VizLegendItem[] = [
{ label: 'Series 1', color: 'red', yAxis: 1 },
{ label: 'Series 2', color: 'blue', yAxis: 1 },
{ label: 'Series 3', color: 'green', yAxis: 1 },
];
it('renders without crashing', () => {
const { container } = render(<VizLegendTable items={mockItems} placement="bottom" />);
expect(container.querySelector('table')).toBeInTheDocument();
});
it('renders all items', () => {
render(<VizLegendTable items={mockItems} placement="bottom" />);
expect(screen.getByText('Series 1')).toBeInTheDocument();
expect(screen.getByText('Series 2')).toBeInTheDocument();
expect(screen.getByText('Series 3')).toBeInTheDocument();
});
it('renders table headers when items have display values', () => {
const itemsWithStats: VizLegendItem[] = [
{
label: 'Series 1',
color: 'red',
yAxis: 1,
getDisplayValues: () => [
{ numeric: 100, text: '100', title: 'Max' },
{ numeric: 50, text: '50', title: 'Min' },
],
},
];
render(<VizLegendTable items={itemsWithStats} placement="bottom" />);
expect(screen.getByText('Max')).toBeInTheDocument();
expect(screen.getByText('Min')).toBeInTheDocument();
});
it('renders sort icon when sorted', () => {
const { container } = render(
<VizLegendTable items={mockItems} placement="bottom" sortBy="Name" sortDesc={false} />
);
expect(container.querySelector('svg')).toBeInTheDocument();
});
it('calls onToggleSort when header is clicked', () => {
const onToggleSort = jest.fn();
render(<VizLegendTable items={mockItems} placement="bottom" onToggleSort={onToggleSort} isSortable={true} />);
const header = screen.getByText('Name');
header.click();
expect(onToggleSort).toHaveBeenCalledWith('Name');
});
it('does not call onToggleSort when not sortable', () => {
const onToggleSort = jest.fn();
render(<VizLegendTable items={mockItems} placement="bottom" onToggleSort={onToggleSort} isSortable={false} />);
const header = screen.getByText('Name');
header.click();
expect(onToggleSort).not.toHaveBeenCalled();
});
it('renders with long labels', () => {
const itemsWithLongLabels: VizLegendItem[] = [
{
label: 'This is a very long series name that should be scrollable within its table cell',
color: 'red',
yAxis: 1,
},
];
render(<VizLegendTable items={itemsWithLongLabels} placement="bottom" />);
expect(
screen.getByText('This is a very long series name that should be scrollable within its table cell')
).toBeInTheDocument();
});
});

View File

@@ -119,7 +119,6 @@ const getStyles = (theme: GrafanaTheme2) => ({
table: css({
width: '100%',
'th:first-child': {
width: '100%',
borderBottom: `1px solid ${theme.colors.border.weak}`,
},
}),

View File

@@ -0,0 +1,112 @@
import { render, screen } from '@testing-library/react';
import { LegendTableItem } from './VizLegendTableItem';
import { VizLegendItem } from './types';
describe('LegendTableItem', () => {
const mockItem: VizLegendItem = {
label: 'Series 1',
color: 'red',
yAxis: 1,
};
it('renders without crashing', () => {
const { container } = render(
<table>
<tbody>
<LegendTableItem item={mockItem} />
</tbody>
</table>
);
expect(container.querySelector('tr')).toBeInTheDocument();
});
it('renders label text', () => {
render(
<table>
<tbody>
<LegendTableItem item={mockItem} />
</tbody>
</table>
);
expect(screen.getByText('Series 1')).toBeInTheDocument();
});
it('renders with long label text', () => {
const longLabelItem: VizLegendItem = {
...mockItem,
label: 'This is a very long series name that should be scrollable in the table cell',
};
render(
<table>
<tbody>
<LegendTableItem item={longLabelItem} />
</tbody>
</table>
);
expect(
screen.getByText('This is a very long series name that should be scrollable in the table cell')
).toBeInTheDocument();
});
it('renders stat values when provided', () => {
const itemWithStats: VizLegendItem = {
...mockItem,
getDisplayValues: () => [
{ numeric: 100, text: '100', title: 'Max' },
{ numeric: 50, text: '50', title: 'Min' },
],
};
render(
<table>
<tbody>
<LegendTableItem item={itemWithStats} />
</tbody>
</table>
);
expect(screen.getByText('100')).toBeInTheDocument();
expect(screen.getByText('50')).toBeInTheDocument();
});
it('renders right y-axis indicator when yAxis is 2', () => {
const rightAxisItem: VizLegendItem = {
...mockItem,
yAxis: 2,
};
render(
<table>
<tbody>
<LegendTableItem item={rightAxisItem} />
</tbody>
</table>
);
expect(screen.getByText('(right y-axis)')).toBeInTheDocument();
});
it('calls onLabelClick when label is clicked', () => {
const onLabelClick = jest.fn();
render(
<table>
<tbody>
<LegendTableItem item={mockItem} onLabelClick={onLabelClick} />
</tbody>
</table>
);
const button = screen.getByRole('button');
button.click();
expect(onLabelClick).toHaveBeenCalledWith(mockItem, expect.any(Object));
});
it('does not call onClick when readonly', () => {
const onLabelClick = jest.fn();
render(
<table>
<tbody>
<LegendTableItem item={mockItem} onLabelClick={onLabelClick} readonly={true} />
</tbody>
</table>
);
const button = screen.getByRole('button');
expect(button).toBeDisabled();
});
});

View File

@@ -69,7 +69,7 @@ export const LegendTableItem = ({
return (
<tr className={cx(styles.row, className)}>
<td>
<td className={styles.labelCell}>
<span className={styles.itemWrapper}>
<VizLegendSeriesIcon
color={item.color}
@@ -77,24 +77,26 @@ export const LegendTableItem = ({
readonly={readonly}
lineStyle={item.lineStyle}
/>
<button
disabled={readonly}
type="button"
title={item.label}
onBlur={onMouseOut}
onFocus={onMouseOver}
onMouseOver={onMouseOver}
onMouseOut={onMouseOut}
onClick={!readonly ? onClick : undefined}
className={cx(styles.label, item.disabled && styles.labelDisabled)}
>
{item.label}{' '}
{item.yAxis === 2 && (
<span className={styles.yAxisLabel}>
<Trans i18nKey="grafana-ui.viz-legend.right-axis-indicator">(right y-axis)</Trans>
</span>
)}
</button>
<div className={styles.labelCellInner}>
<button
disabled={readonly}
type="button"
title={item.label}
onBlur={onMouseOut}
onFocus={onMouseOver}
onMouseOver={onMouseOver}
onMouseOut={onMouseOut}
onClick={!readonly ? onClick : undefined}
className={cx(styles.label, item.disabled && styles.labelDisabled)}
>
{item.label}{' '}
{item.yAxis === 2 && (
<span className={styles.yAxisLabel}>
<Trans i18nKey="grafana-ui.viz-legend.right-axis-indicator">(right y-axis)</Trans>
</span>
)}
</button>
</div>
</span>
</td>
{item.getDisplayValues &&
@@ -128,6 +130,27 @@ const getStyles = (theme: GrafanaTheme2) => {
background: rowHoverBg,
},
}),
labelCell: css({
label: 'LegendLabelCell',
maxWidth: 0,
width: '100%',
}),
labelCellInner: css({
label: 'LegendLabelCellInner',
display: 'block',
flex: 1,
minWidth: 0,
overflowX: 'auto',
overflowY: 'hidden',
paddingRight: theme.spacing(3),
scrollbarWidth: 'none',
msOverflowStyle: 'none',
maskImage: `linear-gradient(to right, black calc(100% - ${theme.spacing(3)}), transparent 100%)`,
WebkitMaskImage: `linear-gradient(to right, black calc(100% - ${theme.spacing(3)}), transparent 100%)`,
'&::-webkit-scrollbar': {
display: 'none',
},
}),
label: css({
label: 'LegendLabel',
whiteSpace: 'nowrap',
@@ -135,9 +158,6 @@ const getStyles = (theme: GrafanaTheme2) => {
border: 'none',
fontSize: 'inherit',
padding: 0,
maxWidth: '600px',
textOverflow: 'ellipsis',
overflow: 'hidden',
userSelect: 'text',
}),
labelDisabled: css({

View File

@@ -10,7 +10,6 @@ import (
"github.com/grafana/grafana/pkg/api/response"
contextmodel "github.com/grafana/grafana/pkg/services/contexthandler/model"
"github.com/grafana/grafana/pkg/services/ngalert/models"
)
func (hs *HTTPServer) GetAlertNotifiers() func(*contextmodel.ReqContext) response.Response {
@@ -24,13 +23,13 @@ func (hs *HTTPServer) GetAlertNotifiers() func(*contextmodel.ReqContext) respons
}
type NotifierPlugin struct {
Type string `json:"type"`
TypeAlias string `json:"typeAlias,omitempty"`
Name string `json:"name"`
Heading string `json:"heading"`
Description string `json:"description"`
Info string `json:"info"`
Options []Field `json:"options"`
Type string `json:"type"`
TypeAlias string `json:"typeAlias,omitempty"`
Name string `json:"name"`
Heading string `json:"heading"`
Description string `json:"description"`
Info string `json:"info"`
Options []schema.Field `json:"options"`
}
result := make([]*NotifierPlugin, 0, len(v2))
@@ -45,56 +44,9 @@ func (hs *HTTPServer) GetAlertNotifiers() func(*contextmodel.ReqContext) respons
Description: s.Description,
Heading: s.Heading,
Info: s.Info,
Options: schemaFieldsToFields(s.Type, nil, v1.Options),
Options: v1.Options,
})
}
return response.JSON(http.StatusOK, result)
}
}
type Field struct {
Element schema.ElementType `json:"element"`
InputType schema.InputType `json:"inputType"`
Label string `json:"label"`
Description string `json:"description"`
Placeholder string `json:"placeholder"`
PropertyName string `json:"propertyName"`
SelectOptions []schema.SelectOption `json:"selectOptions"`
ShowWhen schema.ShowWhen `json:"showWhen"`
Required bool `json:"required"`
Protected bool `json:"protected,omitempty"`
ValidationRule string `json:"validationRule"`
Secure bool `json:"secure"`
DependsOn string `json:"dependsOn"`
SubformOptions []Field `json:"subformOptions"`
}
func schemaFieldsToFields(iType schema.IntegrationType, parent schema.IntegrationFieldPath, fields []schema.Field) []Field {
if fields == nil {
return nil
}
result := make([]Field, 0, len(fields))
for _, f := range fields {
result = append(result, schemaFieldToField(iType, parent, f))
}
return result
}
func schemaFieldToField(iType schema.IntegrationType, parent schema.IntegrationFieldPath, f schema.Field) Field {
return Field{
Element: f.Element,
InputType: f.InputType,
Label: f.Label,
Description: f.Description,
Placeholder: f.Placeholder,
PropertyName: f.PropertyName,
SelectOptions: f.SelectOptions,
ShowWhen: f.ShowWhen,
Required: f.Required,
ValidationRule: f.ValidationRule,
Secure: f.Secure,
DependsOn: f.DependsOn,
SubformOptions: schemaFieldsToFields(iType, append(parent, f.PropertyName), f.SubformOptions),
Protected: models.IsProtectedField(iType, append(parent, f.PropertyName)),
}
}

View File

@@ -29,7 +29,8 @@ func ToFolderErrorResponse(err error) response.Response {
errors.Is(err, dashboards.ErrDashboardTypeMismatch) ||
errors.Is(err, dashboards.ErrDashboardInvalidUid) ||
errors.Is(err, dashboards.ErrDashboardUidTooLong) ||
errors.Is(err, folder.ErrFolderCannotBeParentOfItself) {
errors.Is(err, folder.ErrFolderCannotBeParentOfItself) ||
errors.Is(err, folder.ErrMaximumDepthReached) {
return response.Error(http.StatusBadRequest, err.Error(), nil)
}

View File

@@ -30,7 +30,7 @@ func TestToFolderErrorResponse(t *testing.T) {
{
name: "maximum depth reached",
input: folder.ErrMaximumDepthReached.Errorf("Maximum nested folder depth reached"),
want: response.Err(folder.ErrMaximumDepthReached.Errorf("Maximum nested folder depth reached")),
want: response.Error(http.StatusBadRequest, "[folder.maximum-depth-reached] Maximum nested folder depth reached", nil),
},
{
name: "bad request errors",

View File

@@ -214,7 +214,7 @@ func (hs *HTTPServer) MoveFolder(c *contextmodel.ReqContext) response.Response {
cmd.SignedInUser = c.SignedInUser
theFolder, err := hs.folderService.Move(c.Req.Context(), &cmd)
if err != nil {
return response.ErrOrFallback(http.StatusInternalServerError, "move folder failed", err)
return apierrors.ToFolderErrorResponse(err)
}
folderDTO, err := hs.newToFolderDto(c, theFolder)

View File

@@ -64,10 +64,7 @@ func (l *loggerImpl) Middleware() web.Middleware {
// put the start time on context so we can measure it later.
r = r.WithContext(log.InitstartTime(r.Context(), time.Now()))
//nolint:staticcheck // not yet migrated to OpenFeature
if l.flags.IsEnabled(r.Context(), featuremgmt.FlagUnifiedRequestLog) {
r = r.WithContext(errutil.SetUnifiedLogging(r.Context()))
}
r = r.WithContext(errutil.SetUnifiedLogging(r.Context()))
rw := web.Rw(w, r)
next.ServeHTTP(rw, r)

View File

@@ -178,7 +178,7 @@ func setupFromConfig(cfg *setting.Cfg, registry prometheus.Registerer) (controll
APIPath: "/apis",
Host: url,
WrapTransport: transport.WrapperFunc(func(rt http.RoundTripper) http.RoundTripper {
return authrt.NewRoundTripper(tokenExchangeClient, rt, group)
return authrt.NewRoundTripper(tokenExchangeClient, rt, group, authrt.ExtraAudience(provisioning.GROUP))
}),
Transport: &http.Transport{
MaxConnsPerHost: 100,

View File

@@ -115,6 +115,15 @@ func (s *SearchHandler) GetAPIRoutes(defs map[string]common.OpenAPIDefinition) *
Schema: spec.ArrayProperty(spec.StringProperty()),
},
},
{
ParameterProps: spec3.ParameterProps{
Name: "facetLimit",
In: "query",
Description: "maximum number of terms to return per facet (default 50, max 1000)",
Required: false,
Schema: spec.Int64Property(),
},
},
{
ParameterProps: spec3.ParameterProps{
Name: "tags",
@@ -340,6 +349,7 @@ func (s *SearchHandler) DoSearch(w http.ResponseWriter, r *http.Request) {
func convertHttpSearchRequestToResourceSearchRequest(queryParams url.Values, user identity.Requester, getDashboardsUIDsSharedWithUser func() ([]string, error)) (*resourcepb.ResourceSearchRequest, error) {
// get limit and offset from query params
limit := 50
facetLimit := 50
offset := 0
page := 1
if queryParams.Has("limit") {
@@ -422,11 +432,19 @@ func convertHttpSearchRequestToResourceSearchRequest(queryParams url.Values, use
// The facet term fields
if facets, ok := queryParams["facet"]; ok {
if queryParams.Has("facetLimit") {
if parsed, err := strconv.Atoi(queryParams.Get("facetLimit")); err == nil && parsed > 0 {
facetLimit = parsed
if facetLimit > 1000 {
facetLimit = 1000
}
}
}
searchRequest.Facet = make(map[string]*resourcepb.ResourceSearchRequest_Facet)
for _, v := range facets {
searchRequest.Facet[v] = &resourcepb.ResourceSearchRequest_Facet{
Field: v,
Limit: 50,
Limit: int64(facetLimit),
}
}
}

View File

@@ -818,6 +818,38 @@ func TestConvertHttpSearchRequestToResourceSearchRequest(t *testing.T) {
Federated: []*resourcepb.ResourceKey{folderKey},
},
},
"facet fields with custom limit": {
queryString: "facet=tags&facetLimit=500",
expected: &resourcepb.ResourceSearchRequest{
Options: &resourcepb.ListOptions{Key: dashboardKey},
Query: "",
Limit: 50,
Offset: 0,
Page: 1,
Explain: false,
Fields: defaultFields,
Facet: map[string]*resourcepb.ResourceSearchRequest_Facet{
"tags": {Field: "tags", Limit: 500},
},
Federated: []*resourcepb.ResourceKey{folderKey},
},
},
"facet fields with limit exceeding max": {
queryString: "facet=tags&facetLimit=5000",
expected: &resourcepb.ResourceSearchRequest{
Options: &resourcepb.ListOptions{Key: dashboardKey},
Query: "",
Limit: 50,
Offset: 0,
Page: 1,
Explain: false,
Fields: defaultFields,
Facet: map[string]*resourcepb.ResourceSearchRequest_Facet{
"tags": {Field: "tags", Limit: 1000},
},
Federated: []*resourcepb.ResourceKey{folderKey},
},
},
"tag filter": {
queryString: "tag=tag1&tag=tag2",
expected: &resourcepb.ResourceSearchRequest{

View File

@@ -356,7 +356,7 @@ func (b *FolderAPIBuilder) Validate(ctx context.Context, a admission.Attributes,
if !ok {
return fmt.Errorf("obj is not folders.Folder")
}
return validateOnUpdate(ctx, f, old, b.storage, b.parents, folder.MaxNestedFolderDepth)
return validateOnUpdate(ctx, f, old, b.storage, b.parents, b.searcher, folder.MaxNestedFolderDepth)
default:
return nil
}

View File

@@ -376,6 +376,10 @@ func TestFolderAPIBuilder_Validate_Update(t *testing.T) {
m.On("Get", mock.Anything, "new-parent", mock.Anything).Return(
&folders.Folder{},
nil).Once()
// also retrieves old parent for depth difference calculation
m.On("Get", mock.Anything, "valid-parent", mock.Anything).Return(
&folders.Folder{},
nil).Once()
},
},
{

View File

@@ -6,6 +6,7 @@ import (
"slices"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/selection"
"k8s.io/apiserver/pkg/registry/rest"
folders "github.com/grafana/grafana/apps/folder/pkg/apis/folder/v1beta1"
@@ -13,6 +14,7 @@ import (
"github.com/grafana/grafana/pkg/services/accesscontrol"
"github.com/grafana/grafana/pkg/services/dashboards"
"github.com/grafana/grafana/pkg/services/folder"
"github.com/grafana/grafana/pkg/storage/unified/resource"
"github.com/grafana/grafana/pkg/storage/unified/resourcepb"
"github.com/grafana/grafana/pkg/util"
)
@@ -73,6 +75,7 @@ func validateOnUpdate(ctx context.Context,
old *folders.Folder,
getter rest.Getter,
parents parentsGetter,
searcher resourcepb.ResourceIndexClient,
maxDepth int,
) error {
folderObj, err := utils.MetaAccessor(obj)
@@ -95,7 +98,10 @@ func validateOnUpdate(ctx context.Context,
// Validate the move operation
newParent := folderObj.GetFolder()
// If we move to root, we don't need to validate the depth.
// If we move to root, we don't need to validate the depth, because the folder already existed
// before and wasn't too deep. This move will make it more shallow.
//
// We also don't need to validate circular references because the root folder cannot have a parent.
if newParent == folder.RootFolderUID {
return nil
}
@@ -113,9 +119,6 @@ func validateOnUpdate(ctx context.Context,
if !ok {
return fmt.Errorf("expected folder, found %T", parentObj)
}
//FIXME: until we have a way to represent the tree, we can only
// look at folder parents to check how deep the new folder tree will be
info, err := parents(ctx, parent)
if err != nil {
return err
@@ -129,13 +132,162 @@ func validateOnUpdate(ctx context.Context,
}
}
// if by moving a folder we exceed the max depth, return an error
// if by moving a folder we exceed the max depth just from its parents + itself, return an error
if len(info.Items) > maxDepth+1 {
return folder.ErrMaximumDepthReached.Errorf("maximum folder depth reached")
}
// To try to save some computation, get the parents of the old parent (this is typically cheaper
// than looking at the children of the folder). If the old parent has more parents or the same
// number of parents as the new parent, we can return early, because we know the folder had to be
// safe from the creation validation. If we cannot access the older parent, we will continue to check the children.
if canSkipChildrenCheck(ctx, oldFolder, getter, parents, len(info.Items)) {
return nil
}
// Now comes the more expensive part: we need to check if moving this folder will cause
// any descendant folders to exceed the max depth.
//
// Calculate the maximum allowed subtree depth after the move.
allowedDepth := (maxDepth + 1) - len(info.Items)
if allowedDepth <= 0 {
return nil
}
return checkSubtreeDepth(ctx, searcher, obj.Namespace, obj.Name, allowedDepth, maxDepth)
}
// canSkipChildrenCheck determines if we can skip the expensive children depth check.
// If the old parent depth is >= the new parent depth, the folder was already valid
// and this move won't make descendants exceed max depth.
func canSkipChildrenCheck(ctx context.Context, oldFolder utils.GrafanaMetaAccessor, getter rest.Getter, parents parentsGetter, newParentDepth int) bool {
if oldFolder.GetFolder() == folder.RootFolderUID {
return false
}
oldParentObj, err := getter.Get(ctx, oldFolder.GetFolder(), &metav1.GetOptions{})
if err != nil {
return false
}
oldParent, ok := oldParentObj.(*folders.Folder)
if !ok {
return false
}
oldInfo, err := parents(ctx, oldParent)
if err != nil {
return false
}
oldParentDepth := len(oldInfo.Items)
levelDifference := newParentDepth - oldParentDepth
return levelDifference <= 0
}
// checkSubtreeDepth uses a hybrid DFS+batching approach:
// 1. fetches one page of children for the current folder(s)
// 2. batches all those children into one request to get their children
// 3. continues depth-first (batching still) until max depth or violation
// 4. only fetches more siblings after fully exploring current batch
func checkSubtreeDepth(ctx context.Context, searcher resourcepb.ResourceIndexClient, namespace string, folderUID string, remainingDepth int, maxDepth int) error {
if remainingDepth <= 0 {
return nil
}
// Start with the folder being moved
return checkSubtreeDepthBatched(ctx, searcher, namespace, []string{folderUID}, remainingDepth, maxDepth)
}
// checkSubtreeDepthBatched checks depth for a batch of folders at the same level
func checkSubtreeDepthBatched(ctx context.Context, searcher resourcepb.ResourceIndexClient, namespace string, parentUIDs []string, remainingDepth int, maxDepth int) error {
if remainingDepth <= 0 || len(parentUIDs) == 0 {
return nil
}
const pageSize int64 = 1000
var offset int64
totalPages := 0
hasMore := true
// Using an upper limit to ensure no infinite loops can happen
for hasMore && totalPages < 1000 {
totalPages++
var err error
var children []string
children, hasMore, err = getChildrenBatch(ctx, searcher, namespace, parentUIDs, pageSize, offset)
if err != nil {
return fmt.Errorf("failed to get children: %w", err)
}
if len(children) == 0 {
return nil
}
// if we are at the last allowed depth and children exist, we will hit the max
if remainingDepth == 1 {
return folder.ErrMaximumDepthReached.Errorf("maximum folder depth %d would be exceeded after move", maxDepth)
}
if err := checkSubtreeDepthBatched(ctx, searcher, namespace, children, remainingDepth-1, maxDepth); err != nil {
return err
}
if !hasMore {
return nil
}
offset += pageSize
}
return nil
}
// getChildrenBatch fetches children for multiple parents
func getChildrenBatch(ctx context.Context, searcher resourcepb.ResourceIndexClient, namespace string, parentUIDs []string, limit int64, offset int64) ([]string, bool, error) {
if len(parentUIDs) == 0 {
return nil, false, nil
}
resp, err := searcher.Search(ctx, &resourcepb.ResourceSearchRequest{
Options: &resourcepb.ListOptions{
Key: &resourcepb.ResourceKey{
Namespace: namespace,
Group: folders.FolderResourceInfo.GroupVersionResource().Group,
Resource: folders.FolderResourceInfo.GroupVersionResource().Resource,
},
Fields: []*resourcepb.Requirement{{
Key: resource.SEARCH_FIELD_FOLDER,
Operator: string(selection.In),
Values: parentUIDs,
}},
},
Limit: limit,
Offset: offset,
})
if err != nil {
return nil, false, fmt.Errorf("failed to search folders: %w", err)
}
if resp.Error != nil {
return nil, false, fmt.Errorf("search error: %s", resp.Error.Message)
}
if resp.Results == nil || len(resp.Results.Rows) == 0 {
return nil, false, nil
}
children := make([]string, 0, len(resp.Results.Rows))
for _, row := range resp.Results.Rows {
if row.Key != nil {
children = append(children, row.Key.Name)
}
}
hasMore := resp.Results.NextPageToken != ""
return children, hasMore, nil
}
func validateOnDelete(ctx context.Context,
f *folders.Folder,
searcher resourcepb.ResourceIndexClient,

View File

@@ -282,6 +282,7 @@ func TestValidateUpdate(t *testing.T) {
old *folders.Folder
parents *folders.FolderInfoList
parentsError error
allFolders []folders.Folder
expectedErr string
maxDepth int // defaults to 5 unless set
}{
@@ -454,6 +455,74 @@ func TestValidateUpdate(t *testing.T) {
},
expectedErr: "cannot move folder under its own descendant",
},
{
name: "error when moving folder from root to level2 with children exceeds max depth",
folder: &folders.Folder{
ObjectMeta: metav1.ObjectMeta{
Name: "folderWithChildren",
Annotations: map[string]string{
utils.AnnoKeyFolder: "level2",
},
},
Spec: folders.FolderSpec{
Title: "folder with children",
},
},
old: &folders.Folder{
ObjectMeta: metav1.ObjectMeta{
Name: "folderWithChildren",
},
Spec: folders.FolderSpec{
Title: "folder with children",
},
},
parents: &folders.FolderInfoList{
Items: []folders.FolderInfo{
{Name: "level2", Parent: "level1"},
{Name: "level1", Parent: folder.GeneralFolderUID},
{Name: folder.GeneralFolderUID},
},
},
allFolders: []folders.Folder{
{ObjectMeta: metav1.ObjectMeta{Name: "child1", Annotations: map[string]string{utils.AnnoKeyFolder: "folderWithChildren"}}},
{ObjectMeta: metav1.ObjectMeta{Name: "grandchild1", Annotations: map[string]string{utils.AnnoKeyFolder: "child1"}}},
},
maxDepth: 4,
expectedErr: "[folder.maximum-depth-reached]",
},
{
name: "can move folder from root level to level1 with children when within max depth",
folder: &folders.Folder{
ObjectMeta: metav1.ObjectMeta{
Name: "folderWithChildren",
Annotations: map[string]string{
utils.AnnoKeyFolder: "level1",
},
},
Spec: folders.FolderSpec{
Title: "folder with children",
},
},
old: &folders.Folder{
ObjectMeta: metav1.ObjectMeta{
Name: "folderWithChildren",
},
Spec: folders.FolderSpec{
Title: "folder with children",
},
},
parents: &folders.FolderInfoList{
Items: []folders.FolderInfo{
{Name: "level1", Parent: folder.GeneralFolderUID},
{Name: folder.GeneralFolderUID},
},
},
allFolders: []folders.Folder{
{ObjectMeta: metav1.ObjectMeta{Name: "child1", Annotations: map[string]string{utils.AnnoKeyFolder: "folderWithChildren"}}},
{ObjectMeta: metav1.ObjectMeta{Name: "grandchild1", Annotations: map[string]string{utils.AnnoKeyFolder: "child1"}}},
},
maxDepth: 4,
},
}
for _, tt := range tests {
@@ -474,11 +543,17 @@ func TestValidateUpdate(t *testing.T) {
}, nil).Maybe()
}
}
for i := range tt.allFolders {
f := tt.allFolders[i]
m.On("Get", context.Background(), f.Name, &metav1.GetOptions{}).Return(&f, nil).Maybe()
}
err := validateOnUpdate(context.Background(), tt.folder, tt.old, m,
func(ctx context.Context, folder *folders.Folder) (*folders.FolderInfoList, error) {
return tt.parents, tt.parentsError
}, maxDepth)
},
&mockSearchClient{folders: tt.allFolders},
maxDepth)
if tt.expectedErr == "" {
require.NoError(t, err)
@@ -693,8 +768,7 @@ type mockSearchClient struct {
stats *resourcepb.ResourceStatsResponse
statsErr error
search *resourcepb.ResourceSearchResponse
searchErr error
folders []folders.Folder
}
// GetStats implements resourcepb.ResourceIndexClient.
@@ -703,8 +777,37 @@ func (m *mockSearchClient) GetStats(ctx context.Context, in *resourcepb.Resource
}
// Search implements resourcepb.ResourceIndexClient.
func (m *mockSearchClient) Search(ctx context.Context, in *resourcepb.ResourceSearchRequest, opts ...grpc.CallOption) (*resourcepb.ResourceSearchResponse, error) {
return m.search, m.searchErr
func (m *mockSearchClient) Search(ctx context.Context, req *resourcepb.ResourceSearchRequest, opts ...grpc.CallOption) (*resourcepb.ResourceSearchResponse, error) {
// get the list of parents from the search request
parentSet := make(map[string]bool)
if req.Options != nil && req.Options.Fields != nil {
for _, field := range req.Options.Fields {
if field.Key == "folder" && field.Operator == "in" {
for _, v := range field.Values {
parentSet[v] = true
}
}
}
}
// find children that match the parent filter
var rows []*resourcepb.ResourceTableRow
for i := range m.folders {
meta, err := utils.MetaAccessor(&m.folders[i])
if err != nil {
continue
}
parentUID := meta.GetFolder()
if parentSet[parentUID] {
rows = append(rows, &resourcepb.ResourceTableRow{
Key: &resourcepb.ResourceKey{Name: m.folders[i].Name},
})
}
}
return &resourcepb.ResourceSearchResponse{
Results: &resourcepb.ResourceTable{Rows: rows},
}, nil
}
// RebuildIndexes implements resourcepb.ResourceIndexClient.

View File

@@ -12,6 +12,12 @@ const (
ActionProvisioningRepositoriesRead = "provisioning.repositories:read" // GET + LIST.
ActionProvisioningRepositoriesDelete = "provisioning.repositories:delete" // DELETE.
// Connections
ActionProvisioningConnectionsCreate = "provisioning.connections:create" // CREATE.
ActionProvisioningConnectionsWrite = "provisioning.connections:write" // UPDATE.
ActionProvisioningConnectionsRead = "provisioning.connections:read" // GET + LIST.
ActionProvisioningConnectionsDelete = "provisioning.connections:delete" // DELETE.
// Jobs
ActionProvisioningJobsCreate = "provisioning.jobs:create" // CREATE.
ActionProvisioningJobsWrite = "provisioning.jobs:write" // UPDATE.
@@ -20,6 +26,12 @@ const (
// Historic Jobs
ActionProvisioningHistoricJobsRead = "provisioning.historicjobs:read" // GET + LIST.
// Settings (read-only, needed by multiple UI pages)
ActionProvisioningSettingsRead = "provisioning.settings:read" // GET + LIST.
// Stats (read-only, admin-only)
ActionProvisioningStatsRead = "provisioning.stats:read" // GET + LIST.
)
func registerAccessControlRoles(service accesscontrol.Service) error {
@@ -63,6 +75,46 @@ func registerAccessControlRoles(service accesscontrol.Service) error {
Grants: []string{string(org.RoleAdmin)},
}
// Connections
connectionsReader := accesscontrol.RoleRegistration{
Role: accesscontrol.RoleDTO{
Name: "fixed:provisioning.connections:reader",
DisplayName: "Connections Reader",
Description: "Read and list provisioning connections.",
Group: "Provisioning",
Permissions: []accesscontrol.Permission{
{
Action: ActionProvisioningConnectionsRead,
},
},
},
Grants: []string{string(org.RoleAdmin)},
}
connectionsWriter := accesscontrol.RoleRegistration{
Role: accesscontrol.RoleDTO{
Name: "fixed:provisioning.connections:writer",
DisplayName: "Connections Writer",
Description: "Create, update and delete provisioning connections.",
Group: "Provisioning",
Permissions: []accesscontrol.Permission{
{
Action: ActionProvisioningConnectionsCreate,
},
{
Action: ActionProvisioningConnectionsRead,
},
{
Action: ActionProvisioningConnectionsWrite,
},
{
Action: ActionProvisioningConnectionsDelete,
},
},
},
Grants: []string{string(org.RoleAdmin)},
}
// Jobs
jobsReader := accesscontrol.RoleRegistration{
Role: accesscontrol.RoleDTO{
@@ -119,11 +171,47 @@ func registerAccessControlRoles(service accesscontrol.Service) error {
Grants: []string{string(org.RoleAdmin)},
}
// Settings - granted to Viewer (accessible by all logged-in users)
settingsReader := accesscontrol.RoleRegistration{
Role: accesscontrol.RoleDTO{
Name: "fixed:provisioning.settings:reader",
DisplayName: "Settings Reader",
Description: "Read provisioning settings.",
Group: "Provisioning",
Permissions: []accesscontrol.Permission{
{
Action: ActionProvisioningSettingsRead,
},
},
},
Grants: []string{string(org.RoleViewer)},
}
// Stats - granted to Admin only
statsReader := accesscontrol.RoleRegistration{
Role: accesscontrol.RoleDTO{
Name: "fixed:provisioning.stats:reader",
DisplayName: "Stats Reader",
Description: "Read provisioning stats.",
Group: "Provisioning",
Permissions: []accesscontrol.Permission{
{
Action: ActionProvisioningStatsRead,
},
},
},
Grants: []string{string(org.RoleAdmin)},
}
return service.DeclareFixedRoles(
repositoriesReader,
repositoriesWriter,
connectionsReader,
connectionsWriter,
jobsReader,
jobsWriter,
historicJobsReader,
settingsReader,
statsReader,
)
}

View File

@@ -13,9 +13,10 @@ import (
authlib "github.com/grafana/authlib/types"
"github.com/grafana/grafana-app-sdk/logging"
provisioning "github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1"
"github.com/grafana/grafana/apps/provisioning/pkg/auth"
"github.com/grafana/grafana/apps/provisioning/pkg/repository"
"github.com/grafana/grafana/apps/provisioning/pkg/safepath"
"github.com/grafana/grafana/pkg/apimachinery/identity"
"github.com/grafana/grafana/pkg/apimachinery/utils"
"github.com/grafana/grafana/pkg/registry/apis/provisioning/resources"
)
@@ -26,12 +27,12 @@ const (
type filesConnector struct {
getter RepoGetter
access authlib.AccessChecker
access auth.AccessChecker
parsers resources.ParserFactory
clients resources.ClientFactory
}
func NewFilesConnector(getter RepoGetter, parsers resources.ParserFactory, clients resources.ClientFactory, access authlib.AccessChecker) *filesConnector {
func NewFilesConnector(getter RepoGetter, parsers resources.ParserFactory, clients resources.ClientFactory, access auth.AccessChecker) *filesConnector {
return &filesConnector{getter: getter, parsers: parsers, clients: clients, access: access}
}
@@ -74,179 +75,233 @@ func (c *filesConnector) Connect(ctx context.Context, name string, opts runtime.
ctx = logging.Context(ctx, logger)
return WithTimeout(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
repo, err := c.getRepo(ctx, r.Method, name)
if err != nil {
logger.Debug("failed to find repository", "error", err)
responder.Error(err)
return
}
readWriter, ok := repo.(repository.ReaderWriter)
if !ok {
responder.Error(apierrors.NewBadRequest("repository does not support read-writing"))
return
}
parser, err := c.parsers.GetParser(ctx, readWriter)
if err != nil {
responder.Error(fmt.Errorf("failed to get parser: %w", err))
return
}
clients, err := c.clients.Clients(ctx, repo.Config().Namespace)
if err != nil {
responder.Error(fmt.Errorf("failed to get clients: %w", err))
return
}
folderClient, err := clients.Folder(ctx)
if err != nil {
responder.Error(fmt.Errorf("failed to get folder client: %w", err))
return
}
folders := resources.NewFolderManager(readWriter, folderClient, resources.NewEmptyFolderTree())
dualReadWriter := resources.NewDualReadWriter(readWriter, parser, folders, c.access)
query := r.URL.Query()
opts := resources.DualWriteOptions{
Ref: query.Get("ref"),
Message: query.Get("message"),
SkipDryRun: query.Get("skipDryRun") == "true",
OriginalPath: query.Get("originalPath"),
Branch: repo.Config().Branch(),
}
logger := logger.With("url", r.URL.Path, "ref", opts.Ref, "message", opts.Message)
ctx := logging.Context(r.Context(), logger)
opts.Path, err = pathAfterPrefix(r.URL.Path, fmt.Sprintf("/%s/files", name))
if err != nil {
responder.Error(apierrors.NewBadRequest(err.Error()))
return
}
if err := resources.IsPathSupported(opts.Path); err != nil {
responder.Error(apierrors.NewBadRequest(err.Error()))
return
}
isDir := safepath.IsDir(opts.Path)
if r.Method == http.MethodGet && isDir {
files, err := c.listFolderFiles(ctx, opts.Path, opts.Ref, readWriter)
if err != nil {
responder.Error(err)
return
}
responder.Object(http.StatusOK, files)
return
}
if opts.Path == "" {
responder.Error(apierrors.NewBadRequest("missing request path"))
return
}
var obj *provisioning.ResourceWrapper
code := http.StatusOK
switch r.Method {
case http.MethodGet:
resource, err := dualReadWriter.Read(ctx, opts.Path, opts.Ref)
if err != nil {
respondWithError(responder, err)
return
}
obj = resource.AsResourceWrapper()
case http.MethodPost:
// Check if this is a move operation first (originalPath query parameter is present)
if opts.OriginalPath != "" {
// For move operations, only read body for file moves (not directory moves)
if !isDir {
opts.Data, err = readBody(r, filesMaxBodySize)
if err != nil {
responder.Error(err)
return
}
}
resource, err := dualReadWriter.MoveResource(ctx, opts)
if err != nil {
respondWithError(responder, err)
return
}
obj = resource.AsResourceWrapper()
} else if isDir {
obj, err = dualReadWriter.CreateFolder(ctx, opts)
} else {
opts.Data, err = readBody(r, filesMaxBodySize)
if err != nil {
responder.Error(err)
return
}
var resource *resources.ParsedResource
resource, err = dualReadWriter.CreateResource(ctx, opts)
if err != nil {
respondWithError(responder, err)
return
}
obj = resource.AsResourceWrapper()
}
case http.MethodPut:
// TODO: document in API specification
if isDir {
err = apierrors.NewMethodNotSupported(provisioning.RepositoryResourceInfo.GroupResource(), r.Method)
} else {
opts.Data, err = readBody(r, filesMaxBodySize)
if err != nil {
responder.Error(err)
return
}
resource, err := dualReadWriter.UpdateResource(ctx, opts)
if err != nil {
respondWithError(responder, err)
return
}
obj = resource.AsResourceWrapper()
}
case http.MethodDelete:
resource, err := dualReadWriter.Delete(ctx, opts)
if err != nil {
respondWithError(responder, err)
return
}
obj = resource.AsResourceWrapper()
default:
err = apierrors.NewMethodNotSupported(provisioning.RepositoryResourceInfo.GroupResource(), r.Method)
}
if err != nil {
logger.Debug("got an error after processing request", "error", err)
responder.Error(err)
return
}
if len(obj.Errors) > 0 {
code = http.StatusPartialContent
}
logger.Debug("request resulted in valid object", "object", obj)
responder.Object(code, obj)
c.handleRequest(ctx, name, r, responder, logger)
}), 30*time.Second), nil
}
// listFolderFiles returns a list of files in a folder
func (c *filesConnector) listFolderFiles(ctx context.Context, filePath string, ref string, readWriter repository.ReaderWriter) (*provisioning.FileList, error) {
id, err := identity.GetRequester(ctx)
// handleRequest processes the HTTP request for files operations.
func (c *filesConnector) handleRequest(ctx context.Context, name string, r *http.Request, responder rest.Responder, logger logging.Logger) {
repo, err := c.getRepo(ctx, r.Method, name)
if err != nil {
return nil, fmt.Errorf("missing auth info in context")
logger.Debug("failed to find repository", "error", err)
responder.Error(err)
return
}
// TODO: replace with access check on the repo itself
if !id.GetOrgRole().Includes(identity.RoleAdmin) {
return nil, apierrors.NewForbidden(resources.DashboardResource.GroupResource(), "",
fmt.Errorf("requires admin role"))
readWriter, ok := repo.(repository.ReaderWriter)
if !ok {
responder.Error(apierrors.NewBadRequest("repository does not support read-writing"))
return
}
dualReadWriter, err := c.createDualReadWriter(ctx, repo, readWriter)
if err != nil {
responder.Error(err)
return
}
opts, err := c.parseRequestOptions(r, name, repo)
if err != nil {
responder.Error(apierrors.NewBadRequest(err.Error()))
return
}
logger = logger.With("url", r.URL.Path, "ref", opts.Ref, "message", opts.Message)
ctx = logging.Context(r.Context(), logger)
// Handle directory listing separately
isDir := safepath.IsDir(opts.Path)
if r.Method == http.MethodGet && isDir {
c.handleDirectoryListing(ctx, name, opts, readWriter, responder)
return
}
if opts.Path == "" {
responder.Error(apierrors.NewBadRequest("missing request path"))
return
}
obj, err := c.handleMethodRequest(ctx, r, opts, isDir, dualReadWriter)
if err != nil {
logger.Debug("got an error after processing request", "error", err)
respondWithError(responder, err)
return
}
code := http.StatusOK
if len(obj.Errors) > 0 {
code = http.StatusPartialContent
}
logger.Debug("request resulted in valid object", "object", obj)
responder.Object(code, obj)
}
// createDualReadWriter sets up the dual read writer with all required dependencies.
func (c *filesConnector) createDualReadWriter(ctx context.Context, repo repository.Repository, readWriter repository.ReaderWriter) (*resources.DualReadWriter, error) {
parser, err := c.parsers.GetParser(ctx, readWriter)
if err != nil {
return nil, fmt.Errorf("failed to get parser: %w", err)
}
clients, err := c.clients.Clients(ctx, repo.Config().Namespace)
if err != nil {
return nil, fmt.Errorf("failed to get clients: %w", err)
}
folderClient, err := clients.Folder(ctx)
if err != nil {
return nil, fmt.Errorf("failed to get folder client: %w", err)
}
folders := resources.NewFolderManager(readWriter, folderClient, resources.NewEmptyFolderTree())
return resources.NewDualReadWriter(readWriter, parser, folders, c.access), nil
}
// parseRequestOptions extracts options from the HTTP request.
func (c *filesConnector) parseRequestOptions(r *http.Request, name string, repo repository.Repository) (resources.DualWriteOptions, error) {
query := r.URL.Query()
opts := resources.DualWriteOptions{
Ref: query.Get("ref"),
Message: query.Get("message"),
SkipDryRun: query.Get("skipDryRun") == "true",
OriginalPath: query.Get("originalPath"),
Branch: repo.Config().Branch(),
}
path, err := pathAfterPrefix(r.URL.Path, fmt.Sprintf("/%s/files", name))
if err != nil {
return opts, err
}
opts.Path = path
if err := resources.IsPathSupported(opts.Path); err != nil {
return opts, err
}
return opts, nil
}
// handleDirectoryListing handles GET requests for directory listing.
func (c *filesConnector) handleDirectoryListing(ctx context.Context, name string, opts resources.DualWriteOptions, readWriter repository.ReaderWriter, responder rest.Responder) {
if err := c.authorizeListFiles(ctx, name); err != nil {
responder.Error(err)
return
}
files, err := c.listFolderFiles(ctx, opts.Path, opts.Ref, readWriter)
if err != nil {
responder.Error(err)
return
}
responder.Object(http.StatusOK, files)
}
// handleMethodRequest routes the request to the appropriate handler based on HTTP method.
func (c *filesConnector) handleMethodRequest(ctx context.Context, r *http.Request, opts resources.DualWriteOptions, isDir bool, dualReadWriter *resources.DualReadWriter) (*provisioning.ResourceWrapper, error) {
switch r.Method {
case http.MethodGet:
return c.handleGet(ctx, opts, dualReadWriter)
case http.MethodPost:
return c.handlePost(ctx, r, opts, isDir, dualReadWriter)
case http.MethodPut:
return c.handlePut(ctx, r, opts, isDir, dualReadWriter)
case http.MethodDelete:
return c.handleDelete(ctx, opts, dualReadWriter)
default:
return nil, apierrors.NewMethodNotSupported(provisioning.RepositoryResourceInfo.GroupResource(), r.Method)
}
}
func (c *filesConnector) handleGet(ctx context.Context, opts resources.DualWriteOptions, dualReadWriter *resources.DualReadWriter) (*provisioning.ResourceWrapper, error) {
resource, err := dualReadWriter.Read(ctx, opts.Path, opts.Ref)
if err != nil {
return nil, err
}
return resource.AsResourceWrapper(), nil
}
func (c *filesConnector) handlePost(ctx context.Context, r *http.Request, opts resources.DualWriteOptions, isDir bool, dualReadWriter *resources.DualReadWriter) (*provisioning.ResourceWrapper, error) {
// Check if this is a move operation (originalPath query parameter is present)
if opts.OriginalPath != "" {
return c.handleMove(ctx, r, opts, isDir, dualReadWriter)
}
if isDir {
return dualReadWriter.CreateFolder(ctx, opts)
}
data, err := readBody(r, filesMaxBodySize)
if err != nil {
return nil, err
}
opts.Data = data
resource, err := dualReadWriter.CreateResource(ctx, opts)
if err != nil {
return nil, err
}
return resource.AsResourceWrapper(), nil
}
func (c *filesConnector) handleMove(ctx context.Context, r *http.Request, opts resources.DualWriteOptions, isDir bool, dualReadWriter *resources.DualReadWriter) (*provisioning.ResourceWrapper, error) {
// For move operations, only read body for file moves (not directory moves)
if !isDir {
data, err := readBody(r, filesMaxBodySize)
if err != nil {
return nil, err
}
opts.Data = data
}
resource, err := dualReadWriter.MoveResource(ctx, opts)
if err != nil {
return nil, err
}
return resource.AsResourceWrapper(), nil
}
func (c *filesConnector) handlePut(ctx context.Context, r *http.Request, opts resources.DualWriteOptions, isDir bool, dualReadWriter *resources.DualReadWriter) (*provisioning.ResourceWrapper, error) {
if isDir {
return nil, apierrors.NewMethodNotSupported(provisioning.RepositoryResourceInfo.GroupResource(), r.Method)
}
data, err := readBody(r, filesMaxBodySize)
if err != nil {
return nil, err
}
opts.Data = data
resource, err := dualReadWriter.UpdateResource(ctx, opts)
if err != nil {
return nil, err
}
return resource.AsResourceWrapper(), nil
}
func (c *filesConnector) handleDelete(ctx context.Context, opts resources.DualWriteOptions, dualReadWriter *resources.DualReadWriter) (*provisioning.ResourceWrapper, error) {
resource, err := dualReadWriter.Delete(ctx, opts)
if err != nil {
return nil, err
}
return resource.AsResourceWrapper(), nil
}
// authorizeListFiles checks if the user has repositories:read permission for listing files.
// The access checker handles AccessPolicy identities, namespace resolution, and role-based fallback internally.
func (c *filesConnector) authorizeListFiles(ctx context.Context, repoName string) error {
return c.access.Check(ctx, authlib.CheckRequest{
Verb: utils.VerbGet,
Group: provisioning.GROUP,
Resource: provisioning.RepositoryResourceInfo.GetName(),
Name: repoName,
}, "")
}
// listFolderFiles returns a list of files in a folder.
// Authorization is checked via authorizeListFiles before calling this function.
func (c *filesConnector) listFolderFiles(ctx context.Context, filePath string, ref string, readWriter repository.ReaderWriter) (*provisioning.FileList, error) {
// TODO: Implement folder navigation
if len(filePath) > 0 {
return nil, apierrors.NewBadRequest("folder navigation not yet supported")

View File

@@ -29,6 +29,7 @@ import (
"github.com/grafana/grafana-app-sdk/logging"
provisioning "github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1"
"github.com/grafana/grafana/apps/provisioning/pkg/auth"
connectionvalidation "github.com/grafana/grafana/apps/provisioning/pkg/connection"
appcontroller "github.com/grafana/grafana/apps/provisioning/pkg/controller"
clientset "github.com/grafana/grafana/apps/provisioning/pkg/generated/clientset/versioned"
@@ -111,7 +112,10 @@ type APIBuilder struct {
unified resource.ResourceClient
repoFactory repository.Factory
client client.ProvisioningV0alpha1Interface
access authlib.AccessChecker
access auth.AccessChecker
accessWithAdmin auth.AccessChecker
accessWithEditor auth.AccessChecker
accessWithViewer auth.AccessChecker
statusPatcher *appcontroller.RepositoryStatusPatcher
healthChecker *controller.HealthChecker
validator repository.RepositoryValidator
@@ -158,6 +162,14 @@ func NewAPIBuilder(
parsers := resources.NewParserFactory(clients)
resourceLister := resources.NewResourceListerForMigrations(unified)
// Create access checker based on mode
var accessChecker auth.AccessChecker
if useExclusivelyAccessCheckerForAuthz {
accessChecker = auth.NewTokenAccessChecker(access)
} else {
accessChecker = auth.NewSessionAccessChecker(access)
}
b := &APIBuilder{
onlyApiServer: onlyApiServer,
tracer: tracer,
@@ -170,7 +182,10 @@ func NewAPIBuilder(
resourceLister: resourceLister,
dashboardAccess: dashboardAccess,
unified: unified,
access: access,
access: accessChecker,
accessWithAdmin: accessChecker.WithFallbackRole(identity.RoleAdmin),
accessWithEditor: accessChecker.WithFallbackRole(identity.RoleEditor),
accessWithViewer: accessChecker.WithFallbackRole(identity.RoleViewer),
jobHistoryConfig: jobHistoryConfig,
extraWorkers: extraWorkers,
restConfigGetter: restConfigGetter,
@@ -298,161 +313,142 @@ func (b *APIBuilder) GetAuthorizer() authorizer.Authorizer {
}
}
info, ok := authlib.AuthInfoFrom(ctx)
// when running as standalone API server, the identity type may not always match TypeAccessPolicy
// so we allow it to use the access checker if there is any auth info available
if ok && (authlib.IsIdentityType(info.GetIdentityType(), authlib.TypeAccessPolicy) || b.useExclusivelyAccessCheckerForAuthz) {
res, err := b.access.Check(ctx, info, authlib.CheckRequest{
Verb: a.GetVerb(),
Group: a.GetAPIGroup(),
Resource: a.GetResource(),
Name: a.GetName(),
Namespace: a.GetNamespace(),
Subresource: a.GetSubresource(),
Path: a.GetPath(),
}, "")
if err != nil {
return authorizer.DecisionDeny, "failed to perform authorization", err
}
if !res.Allowed {
return authorizer.DecisionDeny, "permission denied", nil
}
return authorizer.DecisionAllow, "", nil
}
id, err := identity.GetRequester(ctx)
if err != nil {
return authorizer.DecisionDeny, "failed to find requester", err
}
return b.authorizeResource(ctx, a, id)
return b.authorizeResource(ctx, a)
})
}
// authorizeResource handles authorization for different resources.
// Different routes may need different permissions.
// * Reading and modifying a repository's configuration requires administrator privileges.
// * Reading a repository's limited configuration (/stats & /settings) requires viewer privileges.
// * Reading a repository's files requires viewer privileges.
// * Reading a repository's refs requires viewer privileges.
// * Editing a repository's files requires editor privileges.
// * Syncing a repository requires editor privileges.
// * Exporting a repository requires administrator privileges.
// * Migrating a repository requires administrator privileges.
// * Testing a repository configuration requires administrator privileges.
// * Viewing a repository's history requires editor privileges.
func (b *APIBuilder) authorizeResource(ctx context.Context, a authorizer.Attributes, id identity.Requester) (authorizer.Decision, string, error) {
// Uses fine-grained permissions defined in accesscontrol.go:
//
// Repositories:
// - CRUD: repositories:create/read/write/delete
// - Subresources: files (any auth), refs (editor), resources/history/status (admin)
// - Test: repositories:write
// - Jobs subresource: jobs:create/read
//
// Connections:
// - CRUD: connections:create/read/write/delete
// - Status: connections:read
//
// Jobs:
// - CRUD: jobs:create/read/write/delete
//
// Historic Jobs:
// - Read-only: historicjobs:read
//
// Settings:
// - settings:read - granted to Viewer (all logged-in users)
//
// Stats:
// - stats:read - granted to Admin only
func (b *APIBuilder) authorizeResource(ctx context.Context, a authorizer.Attributes) (authorizer.Decision, string, error) {
switch a.GetResource() {
case provisioning.RepositoryResourceInfo.GetName():
return b.authorizeRepositorySubresource(a, id)
case "stats":
return b.authorizeStats(id)
case "settings":
return b.authorizeSettings(id)
case provisioning.JobResourceInfo.GetName(), provisioning.HistoricJobResourceInfo.GetName():
return b.authorizeJobs(id)
return b.authorizeRepositorySubresource(ctx, a)
case provisioning.ConnectionResourceInfo.GetName():
return b.authorizeConnectionSubresource(a, id)
return b.authorizeConnectionSubresource(ctx, a)
case provisioning.JobResourceInfo.GetName():
return toAuthorizerDecision(b.accessWithEditor.Check(ctx, authlib.CheckRequest{
Verb: a.GetVerb(),
Group: provisioning.GROUP,
Resource: provisioning.JobResourceInfo.GetName(),
Name: a.GetName(),
Namespace: a.GetNamespace(),
}, ""))
case provisioning.HistoricJobResourceInfo.GetName():
// Historic jobs are read-only and admin-only (not editor)
return toAuthorizerDecision(b.accessWithAdmin.Check(ctx, authlib.CheckRequest{
Verb: a.GetVerb(),
Group: provisioning.GROUP,
Resource: provisioning.HistoricJobResourceInfo.GetName(),
Name: a.GetName(),
Namespace: a.GetNamespace(),
}, ""))
case "settings":
// Settings are read-only and accessible by all logged-in users (Viewer role)
return toAuthorizerDecision(b.accessWithViewer.Check(ctx, authlib.CheckRequest{
Verb: a.GetVerb(),
Group: provisioning.GROUP,
Resource: "settings",
Namespace: a.GetNamespace(),
}, ""))
case "stats":
// Stats are read-only and admin-only
return toAuthorizerDecision(b.accessWithAdmin.Check(ctx, authlib.CheckRequest{
Verb: a.GetVerb(),
Group: provisioning.GROUP,
Resource: "stats",
Namespace: a.GetNamespace(),
}, ""))
default:
return b.authorizeDefault(id)
return b.authorizeDefault(ctx)
}
}
// authorizeRepositorySubresource handles authorization for repository subresources.
func (b *APIBuilder) authorizeRepositorySubresource(a authorizer.Attributes, id identity.Requester) (authorizer.Decision, string, error) {
// TODO: Support more fine-grained permissions than the basic roles. Especially on Enterprise.
switch a.GetSubresource() {
case "", "test":
// Doing something with the repository itself.
if id.GetOrgRole().Includes(identity.RoleAdmin) {
return authorizer.DecisionAllow, "", nil
}
return authorizer.DecisionDeny, "admin role is required", nil
case "jobs":
// Posting jobs requires editor privileges (for syncing).
if id.GetOrgRole().Includes(identity.RoleAdmin) || id.GetOrgRole().Includes(identity.RoleEditor) {
return authorizer.DecisionAllow, "", nil
}
return authorizer.DecisionDeny, "editor role is required", nil
case "refs":
// This is strictly a read operation. It is handy on the frontend for viewers.
if id.GetOrgRole().Includes(identity.RoleViewer) {
return authorizer.DecisionAllow, "", nil
}
return authorizer.DecisionDeny, "viewer role is required", nil
case "files":
// Access to files is controlled by the AccessClient
return authorizer.DecisionAllow, "", nil
case "resources", "sync", "history":
// These are strictly read operations.
// Sync can also be somewhat destructive, but it's expected to be fine to import changes.
if id.GetOrgRole().Includes(identity.RoleEditor) {
return authorizer.DecisionAllow, "", nil
}
return authorizer.DecisionDeny, "editor role is required", nil
case "status":
if id.GetOrgRole().Includes(identity.RoleViewer) && a.GetVerb() == apiutils.VerbGet {
return authorizer.DecisionAllow, "", nil
}
return authorizer.DecisionDeny, "users cannot update the status of a repository", nil
default:
if id.GetIsGrafanaAdmin() {
return authorizer.DecisionAllow, "", nil
}
return authorizer.DecisionDeny, "unmapped subresource defaults to no access", nil
}
}
// authorizeStats handles authorization for stats resource.
func (b *APIBuilder) authorizeStats(id identity.Requester) (authorizer.Decision, string, error) {
// This can leak information one shouldn't necessarily have access to.
if id.GetOrgRole().Includes(identity.RoleAdmin) {
return authorizer.DecisionAllow, "", nil
}
return authorizer.DecisionDeny, "admin role is required", nil
}
// authorizeSettings handles authorization for settings resource.
func (b *APIBuilder) authorizeSettings(id identity.Requester) (authorizer.Decision, string, error) {
// This is strictly a read operation. It is handy on the frontend for viewers.
if id.GetOrgRole().Includes(identity.RoleViewer) {
return authorizer.DecisionAllow, "", nil
}
return authorizer.DecisionDeny, "viewer role is required", nil
}
// authorizeJobs handles authorization for job resources.
func (b *APIBuilder) authorizeJobs(id identity.Requester) (authorizer.Decision, string, error) {
// Jobs are shown on the configuration page.
if id.GetOrgRole().Includes(identity.RoleAdmin) {
return authorizer.DecisionAllow, "", nil
}
return authorizer.DecisionDeny, "admin role is required", nil
}
// authorizeRepositorySubresource handles authorization for connections subresources.
func (b *APIBuilder) authorizeConnectionSubresource(a authorizer.Attributes, id identity.Requester) (authorizer.Decision, string, error) {
// Uses the access checker with verb-based authorization.
func (b *APIBuilder) authorizeRepositorySubresource(ctx context.Context, a authorizer.Attributes) (authorizer.Decision, string, error) {
switch a.GetSubresource() {
// Repository CRUD - use access checker with the actual verb
case "":
// Doing something with the connection itself.
if id.GetOrgRole().Includes(identity.RoleAdmin) {
return authorizer.DecisionAllow, "", nil
}
return authorizer.DecisionDeny, "admin role is required", nil
case "status":
if id.GetOrgRole().Includes(identity.RoleViewer) && a.GetVerb() == apiutils.VerbGet {
return authorizer.DecisionAllow, "", nil
}
return authorizer.DecisionDeny, "users cannot update the status of a connection", nil
return toAuthorizerDecision(b.accessWithAdmin.Check(ctx, authlib.CheckRequest{
Verb: a.GetVerb(),
Group: provisioning.GROUP,
Resource: provisioning.RepositoryResourceInfo.GetName(),
Name: a.GetName(),
Namespace: a.GetNamespace(),
}, ""))
// Test requires write permission (testing before save)
case "test":
return toAuthorizerDecision(b.accessWithAdmin.Check(ctx, authlib.CheckRequest{
Verb: apiutils.VerbUpdate,
Group: provisioning.GROUP,
Resource: provisioning.RepositoryResourceInfo.GetName(),
Name: a.GetName(),
Namespace: a.GetNamespace(),
}, ""))
// Files subresource: allow any authenticated user at route level.
// Directory listing checks repositories:read in the connector.
// Individual file operations are authorized by DualReadWriter based on the actual resource.
case "files":
return authorizer.DecisionAllow, "", nil
// refs subresource - editors need to see branches to push changes
case "refs":
return toAuthorizerDecision(b.accessWithEditor.Check(ctx, authlib.CheckRequest{
Verb: apiutils.VerbGet,
Group: provisioning.GROUP,
Resource: provisioning.RepositoryResourceInfo.GetName(),
Name: a.GetName(),
Namespace: a.GetNamespace(),
}, ""))
// Read-only subresources: resources, history, status (admin only)
case "resources", "history", "status":
return toAuthorizerDecision(b.accessWithAdmin.Check(ctx, authlib.CheckRequest{
Verb: apiutils.VerbGet,
Group: provisioning.GROUP,
Resource: provisioning.RepositoryResourceInfo.GetName(),
Name: a.GetName(),
Namespace: a.GetNamespace(),
}, ""))
// Jobs subresource - check jobs permissions with the verb (editors can manage jobs)
case "jobs":
return toAuthorizerDecision(b.accessWithEditor.Check(ctx, authlib.CheckRequest{
Verb: a.GetVerb(),
Group: provisioning.GROUP,
Resource: provisioning.JobResourceInfo.GetName(),
Namespace: a.GetNamespace(),
}, ""))
default:
id, err := identity.GetRequester(ctx)
if err != nil {
return authorizer.DecisionDeny, "failed to find requester", err
}
if id.GetIsGrafanaAdmin() {
return authorizer.DecisionAllow, "", nil
}
@@ -460,8 +456,60 @@ func (b *APIBuilder) authorizeConnectionSubresource(a authorizer.Attributes, id
}
}
// authorizeConnectionSubresource handles authorization for connection subresources.
// Uses the access checker with verb-based authorization.
func (b *APIBuilder) authorizeConnectionSubresource(ctx context.Context, a authorizer.Attributes) (authorizer.Decision, string, error) {
switch a.GetSubresource() {
// Connection CRUD - use access checker with the actual verb
case "":
return toAuthorizerDecision(b.accessWithAdmin.Check(ctx, authlib.CheckRequest{
Verb: a.GetVerb(),
Group: provisioning.GROUP,
Resource: provisioning.ConnectionResourceInfo.GetName(),
Name: a.GetName(),
Namespace: a.GetNamespace(),
}, ""))
// Status is read-only
case "status":
return toAuthorizerDecision(b.accessWithAdmin.Check(ctx, authlib.CheckRequest{
Verb: apiutils.VerbGet,
Group: provisioning.GROUP,
Resource: provisioning.ConnectionResourceInfo.GetName(),
Name: a.GetName(),
Namespace: a.GetNamespace(),
}, ""))
default:
id, err := identity.GetRequester(ctx)
if err != nil {
return authorizer.DecisionDeny, "failed to find requester", err
}
if id.GetIsGrafanaAdmin() {
return authorizer.DecisionAllow, "", nil
}
return authorizer.DecisionDeny, "unmapped subresource defaults to no access", nil
}
}
// ----------------------------------------------------------------------------
// Authorization helpers
// ----------------------------------------------------------------------------
// toAuthorizerDecision converts an access check error to an authorizer decision tuple.
func toAuthorizerDecision(err error) (authorizer.Decision, string, error) {
if err != nil {
return authorizer.DecisionDeny, err.Error(), nil
}
return authorizer.DecisionAllow, "", nil
}
// authorizeDefault handles authorization for unmapped resources.
func (b *APIBuilder) authorizeDefault(id identity.Requester) (authorizer.Decision, string, error) {
func (b *APIBuilder) authorizeDefault(ctx context.Context) (authorizer.Decision, string, error) {
id, err := identity.GetRequester(ctx)
if err != nil {
return authorizer.DecisionDeny, "failed to find requester", err
}
// We haven't bothered with this kind yet.
if id.GetIsGrafanaAdmin() {
return authorizer.DecisionAllow, "", nil
@@ -558,7 +606,7 @@ func (b *APIBuilder) UpdateAPIGroupInfo(apiGroupInfo *genericapiserver.APIGroupI
// TODO: Add some logic so that the connectors can registered themselves and we don't have logic all over the place
storage[provisioning.RepositoryResourceInfo.StoragePath("test")] = NewTestConnector(b, repository.NewRepositoryTesterWithExistingChecker(repository.NewSimpleRepositoryTester(b.validator), b.VerifyAgainstExistingRepositories))
storage[provisioning.RepositoryResourceInfo.StoragePath("files")] = NewFilesConnector(b, b.parsers, b.clients, b.access)
storage[provisioning.RepositoryResourceInfo.StoragePath("files")] = NewFilesConnector(b, b.parsers, b.clients, b.accessWithAdmin)
storage[provisioning.RepositoryResourceInfo.StoragePath("refs")] = NewRefsConnector(b)
storage[provisioning.RepositoryResourceInfo.StoragePath("resources")] = &listConnector{
getter: b,
@@ -673,7 +721,8 @@ func (b *APIBuilder) Validate(ctx context.Context, a admission.Attributes, o adm
//
// the only time to add configuration checks here is if you need to compare
// the incoming change to the current configuration
list := b.validator.ValidateRepository(repo)
isCreate := a.GetOperation() == admission.Create
list := b.validator.ValidateRepository(repo, isCreate)
cfg := repo.Config()
if a.GetOperation() == admission.Update {

View File

@@ -12,6 +12,7 @@ import (
authlib "github.com/grafana/authlib/types"
"github.com/grafana/grafana-app-sdk/logging"
provisioning "github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1"
"github.com/grafana/grafana/apps/provisioning/pkg/auth"
"github.com/grafana/grafana/apps/provisioning/pkg/repository"
"github.com/grafana/grafana/apps/provisioning/pkg/safepath"
"github.com/grafana/grafana/pkg/apimachinery/apis/common/v0alpha1"
@@ -32,7 +33,7 @@ type DualReadWriter struct {
repo repository.ReaderWriter
parser Parser
folders *FolderManager
access authlib.AccessChecker
access auth.AccessChecker
}
type DualWriteOptions struct {
@@ -48,7 +49,7 @@ type DualWriteOptions struct {
Branch string // Configured default branch
}
func NewDualReadWriter(repo repository.ReaderWriter, parser Parser, folders *FolderManager, access authlib.AccessChecker) *DualReadWriter {
func NewDualReadWriter(repo repository.ReaderWriter, parser Parser, folders *FolderManager, access auth.AccessChecker) *DualReadWriter {
return &DualReadWriter{repo: repo, parser: parser, folders: folders, access: access}
}
@@ -492,11 +493,6 @@ func (r *DualReadWriter) moveFile(ctx context.Context, opts DualWriteOptions) (*
}
func (r *DualReadWriter) authorize(ctx context.Context, parsed *ParsedResource, verb string) error {
id, err := identity.GetRequester(ctx)
if err != nil {
return apierrors.NewUnauthorized(err.Error())
}
var name string
if parsed.Existing != nil {
name = parsed.Existing.GetName()
@@ -504,27 +500,15 @@ func (r *DualReadWriter) authorize(ctx context.Context, parsed *ParsedResource,
name = parsed.Obj.GetName()
}
rsp, err := r.access.Check(ctx, id, authlib.CheckRequest{
Group: parsed.GVR.Group,
Resource: parsed.GVR.Resource,
Namespace: id.GetNamespace(),
Name: name,
Verb: verb,
return r.access.Check(ctx, authlib.CheckRequest{
Group: parsed.GVR.Group,
Resource: parsed.GVR.Resource,
Name: name,
Verb: verb,
}, parsed.Meta.GetFolder())
if err != nil || !rsp.Allowed {
return apierrors.NewForbidden(parsed.GVR.GroupResource(), parsed.Obj.GetName(),
fmt.Errorf("no access to perform %s on the resource", verb))
}
return nil
}
func (r *DualReadWriter) authorizeCreateFolder(ctx context.Context, path string) error {
id, err := identity.GetRequester(ctx)
if err != nil {
return apierrors.NewUnauthorized(err.Error())
}
// Determine parent folder from path
parentFolder := ""
if path != "" {
@@ -537,19 +521,12 @@ func (r *DualReadWriter) authorizeCreateFolder(ctx context.Context, path string)
}
// For folder create operations, use empty name to check parent folder permissions
rsp, err := r.access.Check(ctx, id, authlib.CheckRequest{
Group: FolderResource.Group,
Resource: FolderResource.Resource,
Namespace: id.GetNamespace(),
Name: "", // Empty name for create operations
Verb: utils.VerbCreate,
return r.access.Check(ctx, authlib.CheckRequest{
Group: FolderResource.Group,
Resource: FolderResource.Resource,
Name: "", // Empty name for create operations
Verb: utils.VerbCreate,
}, parentFolder)
if err != nil || !rsp.Allowed {
return apierrors.NewForbidden(FolderResource.GroupResource(), path,
fmt.Errorf("no access to create folder in parent folder '%s'", parentFolder))
}
return nil
}
func (r *DualReadWriter) deleteFolder(ctx context.Context, opts DualWriteOptions) (*ParsedResource, error) {

View File

@@ -76,7 +76,7 @@ func ProvideAppInstallers(
if features.IsEnabledGlobally(featuremgmt.FlagKubernetesLogsDrilldown) {
installers = append(installers, logsdrilldownAppInstaller)
}
//nolint:staticcheck // not yet migrated to OpenFeature
//nolint:staticcheck
if features.IsEnabledGlobally(featuremgmt.FlagKubernetesAnnotations) {
installers = append(installers, annotationAppInstaller)
}

View File

@@ -24,26 +24,38 @@ func GetAuthorizer() authorizer.Authorizer {
return authorizer.DecisionDeny, "valid user is required", err
}
// check if is admin
if u.GetIsGrafanaAdmin() {
return authorizer.DecisionAllow, "isGrafanaAdmin", nil
}
// Auth handling for LogsDrilldownDefaults resource
if attr.GetResource() == "logsdrilldowndefaults" {
// Allow list and get for everyone
if attr.GetVerb() == "list" || attr.GetVerb() == "get" {
return authorizer.DecisionAllow, "", nil
}
// Only allow admins to update (create, update, patch, delete)
if u.GetIsGrafanaAdmin() {
return authorizer.DecisionAllow, "", nil
}
// Deny all other operations for non-admins
return authorizer.DecisionDeny, "admin access required", nil
}
// check if is admin
if u.GetIsGrafanaAdmin() {
return authorizer.DecisionAllow, "", nil
p := u.GetPermissions()
// Auth handling for Logs Drilldown default columns
if attr.GetResource() == "logsdrilldowndefaultcolumns" {
// Allow get for all users
if attr.GetVerb() == "get" {
return authorizer.DecisionAllow, "", nil
}
// require plugins:write permissions for other operations
_, ok := p[accesscontrol.PluginRolePrefix+"write"]
if ok {
return authorizer.DecisionAllow, "user has plugins:write", nil
} else {
return authorizer.DecisionDeny, "user missing plugins:write", nil
}
}
p := u.GetPermissions()
if len(p) == 0 {
return authorizer.DecisionDeny, "no permissions", nil
}

View File

@@ -131,7 +131,8 @@ func (s *ExtendedJWT) authenticateAsUser(
return nil, errExtJWTInvalid.Errorf("failed to parse id token subject: %w", err)
}
if !claims.IsIdentityType(t, claims.TypeUser) {
// TODO: How to support other identity types like render and anonymous here?
if !claims.IsIdentityType(t, claims.TypeUser, claims.TypeServiceAccount) {
return nil, errExtJWTInvalidSubject.Errorf("unexpected identity: %s", idTokenClaims.Subject)
}

View File

@@ -53,6 +53,17 @@ var (
Namespace: "default", // org ID of 1 is special and translates to default
},
}
validIDTokenClaimsWithServiceAccount = idTokenClaims{
Claims: jwt.Claims{
Subject: "service-account:3",
Expiry: jwt.NewNumericDate(time.Date(2023, 5, 3, 0, 0, 0, 0, time.UTC)),
IssuedAt: jwt.NewNumericDate(time.Date(2023, 5, 2, 0, 0, 0, 0, time.UTC)),
},
Rest: authnlib.IDTokenClaims{
AuthenticatedBy: "extended_jwt",
Namespace: "default", // org ID of 1 is special and translates to default
},
}
validIDTokenClaimsWithStackSet = idTokenClaims{
Claims: jwt.Claims{
Subject: "user:2",
@@ -118,7 +129,7 @@ var (
}
invalidSubjectIDTokenClaims = idTokenClaims{
Claims: jwt.Claims{
Subject: "service-account:2",
Subject: "anonymous:2",
Expiry: jwt.NewNumericDate(time.Date(2023, 5, 3, 0, 0, 0, 0, time.UTC)),
IssuedAt: jwt.NewNumericDate(time.Date(2023, 5, 2, 0, 0, 0, 0, time.UTC)),
},
@@ -286,6 +297,29 @@ func TestExtendedJWT_Authenticate(t *testing.T) {
},
},
},
{
name: "should authenticate as service account",
accessToken: &validAccessTokenClaims,
idToken: &validIDTokenClaimsWithServiceAccount,
orgID: 1,
want: &authn.Identity{
ID: "3",
Type: claims.TypeServiceAccount,
OrgID: 1,
AccessTokenClaims: &validAccessTokenClaims,
IDTokenClaims: &validIDTokenClaimsWithServiceAccount,
Namespace: "default",
AuthenticatedBy: "extendedjwt",
AuthID: "access-policy:this-uid",
ClientParams: authn.ClientParams{
FetchSyncedUser: true,
SyncPermissions: true,
FetchPermissionsParams: authn.FetchPermissionsParams{
RestrictedActions: []string{"dashboards:create", "folders:read", "datasources:explore", "datasources.insights:read"},
},
},
},
},
{
name: "should authenticate as user in the user namespace",
accessToken: &validAccessTokenClaimsWildcard,

View File

@@ -279,8 +279,11 @@ func NewMapperRegistry() MapperRegistry {
},
"provisioning.grafana.app": {
"repositories": newResourceTranslation("provisioning.repositories", "uid", false, skipScopeOnAllVerbs),
"connections": newResourceTranslation("provisioning.connections", "uid", false, skipScopeOnAllVerbs),
"jobs": newResourceTranslation("provisioning.jobs", "uid", false, skipScopeOnAllVerbs),
"historicjobs": newResourceTranslation("provisioning.historicjobs", "uid", false, skipScopeOnAllVerbs),
"settings": newResourceTranslation("provisioning.settings", "", false, skipScopeOnAllVerbs),
"stats": newResourceTranslation("provisioning.stats", "", false, skipScopeOnAllVerbs),
},
"secret.grafana.app": {
"securevalues": newResourceTranslation("secret.securevalues", "uid", false, nil),

View File

@@ -23,9 +23,7 @@ type FeatureToggles interface {
// a full server restart for a change to take place.
//
// Deprecated: FeatureToggles.IsEnabledGlobally is deprecated and will be removed in a future release.
// Toggles that must be reliably evaluated at the service startup should be
// changed to settings (see setting.StartupSettings), and/or removed entirely.
// For app registration please use `grafana-apiserver.runtime_config` in settings.ini
// Toggles that must be reliably evaluated at the service startup should be changed to settings and/or removed entirely.
IsEnabledGlobally(flag string) bool
// Get the enabled flags -- this *may* also include disabled flags (with value false)

View File

@@ -185,13 +185,6 @@ var (
Stage: FeatureStageExperimental,
Owner: grafanaDatasourcesCoreServicesSquad,
},
{
Name: "unifiedRequestLog",
Description: "Writes error logs to the request logger",
Stage: FeatureStageGeneralAvailability,
Owner: grafanaBackendGroup,
Expression: "true",
},
{
Name: "renderAuthJWT",
Description: "Uses JWT-based auth for rendering instead of relying on remote cache",
@@ -892,6 +885,13 @@ var (
Owner: grafanaAlertingSquad,
FrontendOnly: true,
},
{
Name: "alertingSavedSearches",
Description: "Enables saved searches for alert rules list",
Stage: FeatureStageExperimental,
Owner: grafanaAlertingSquad,
FrontendOnly: true,
},
{
Name: "alertingDisableSendAlertsExternal",
Description: "Disables the ability to send alerts to an external Alertmanager datasource.",
@@ -1928,6 +1928,14 @@ var (
Owner: grafanaDatavizSquad,
Expression: "false",
},
{
Name: "heatmapRowsAxisOptions",
Description: "Enable Y-axis scale configuration options for pre-bucketed heatmap data (heatmap-rows)",
Stage: FeatureStageExperimental,
FrontendOnly: true,
Owner: grafanaDatavizSquad,
Expression: "false",
},
{
Name: "preventPanelChromeOverflow",
Description: "Restrict PanelChrome contents with overflow: hidden;",
@@ -1967,6 +1975,14 @@ var (
Owner: identityAccessTeam,
Expression: "true",
},
{
Name: "pluginInsights",
Description: "Show insights for plugins in the plugin details page",
Stage: FeatureStageExperimental,
FrontendOnly: true,
Owner: grafanaPluginsPlatformSquad,
Expression: "false",
},
{
Name: "panelTimeSettings",
Description: "Enables a new panel time settings drawer",

View File

@@ -24,7 +24,6 @@ influxqlStreamingParser,experimental,@grafana/partner-datasources,false,false,fa
influxdbRunQueriesInParallel,privatePreview,@grafana/partner-datasources,false,false,false
lokiLogsDataplane,experimental,@grafana/observability-logs,false,false,false
disableSSEDataplane,experimental,@grafana/grafana-datasources-core-services,false,false,false
unifiedRequestLog,GA,@grafana/grafana-backend-group,false,false,false
renderAuthJWT,preview,@grafana/grafana-operator-experience-squad,false,false,false
refactorVariablesTimeRange,preview,@grafana/dashboards-squad,false,false,false
faroDatasourceSelector,preview,@grafana/app-o11y,false,false,true
@@ -123,6 +122,7 @@ suggestedDashboards,experimental,@grafana/sharing-squad,false,false,false
dashboardTemplates,preview,@grafana/sharing-squad,false,false,false
logsExploreTableDefaultVisualization,experimental,@grafana/observability-logs,false,false,true
alertingListViewV2,privatePreview,@grafana/alerting-squad,false,false,true
alertingSavedSearches,experimental,@grafana/alerting-squad,false,false,true
alertingDisableSendAlertsExternal,experimental,@grafana/alerting-squad,false,false,false
preserveDashboardStateWhenNavigating,experimental,@grafana/dashboards-squad,false,false,false
alertingCentralAlertHistory,experimental,@grafana/alerting-squad,false,false,false
@@ -262,11 +262,13 @@ pluginInstallAPISync,experimental,@grafana/plugins-platform-backend,false,false,
newGauge,experimental,@grafana/dataviz-squad,false,false,true
newVizSuggestions,preview,@grafana/dataviz-squad,false,false,true
externalVizSuggestions,experimental,@grafana/dataviz-squad,false,false,true
heatmapRowsAxisOptions,experimental,@grafana/dataviz-squad,false,false,true
preventPanelChromeOverflow,preview,@grafana/grafana-frontend-platform,false,false,true
jaegerEnableGrpcEndpoint,experimental,@grafana/oss-big-tent,false,false,false
pluginStoreServiceLoading,experimental,@grafana/plugins-platform-backend,false,false,false
newPanelPadding,preview,@grafana/dashboards-squad,false,false,true
onlyStoreActionSets,GA,@grafana/identity-access-team,false,false,false
pluginInsights,experimental,@grafana/plugins-platform-backend,false,false,true
panelTimeSettings,experimental,@grafana/dashboards-squad,false,false,false
elasticsearchRawDSLQuery,experimental,@grafana/partner-datasources,false,false,false
kubernetesAnnotations,experimental,@grafana/grafana-backend-services-squad,false,false,false
1 Name Stage Owner requiresDevMode RequiresRestart FrontendOnly
24 influxdbRunQueriesInParallel privatePreview @grafana/partner-datasources false false false
25 lokiLogsDataplane experimental @grafana/observability-logs false false false
26 disableSSEDataplane experimental @grafana/grafana-datasources-core-services false false false
unifiedRequestLog GA @grafana/grafana-backend-group false false false
27 renderAuthJWT preview @grafana/grafana-operator-experience-squad false false false
28 refactorVariablesTimeRange preview @grafana/dashboards-squad false false false
29 faroDatasourceSelector preview @grafana/app-o11y false false true
122 dashboardTemplates preview @grafana/sharing-squad false false false
123 logsExploreTableDefaultVisualization experimental @grafana/observability-logs false false true
124 alertingListViewV2 privatePreview @grafana/alerting-squad false false true
125 alertingSavedSearches experimental @grafana/alerting-squad false false true
126 alertingDisableSendAlertsExternal experimental @grafana/alerting-squad false false false
127 preserveDashboardStateWhenNavigating experimental @grafana/dashboards-squad false false false
128 alertingCentralAlertHistory experimental @grafana/alerting-squad false false false
262 newGauge experimental @grafana/dataviz-squad false false true
263 newVizSuggestions preview @grafana/dataviz-squad false false true
264 externalVizSuggestions experimental @grafana/dataviz-squad false false true
265 heatmapRowsAxisOptions experimental @grafana/dataviz-squad false false true
266 preventPanelChromeOverflow preview @grafana/grafana-frontend-platform false false true
267 jaegerEnableGrpcEndpoint experimental @grafana/oss-big-tent false false false
268 pluginStoreServiceLoading experimental @grafana/plugins-platform-backend false false false
269 newPanelPadding preview @grafana/dashboards-squad false false true
270 onlyStoreActionSets GA @grafana/identity-access-team false false false
271 pluginInsights experimental @grafana/plugins-platform-backend false false true
272 panelTimeSettings experimental @grafana/dashboards-squad false false false
273 elasticsearchRawDSLQuery experimental @grafana/partner-datasources false false false
274 kubernetesAnnotations experimental @grafana/grafana-backend-services-squad false false false

View File

@@ -79,10 +79,6 @@ const (
// Disables dataplane specific processing in server side expressions.
FlagDisableSSEDataplane = "disableSSEDataplane"
// FlagUnifiedRequestLog
// Writes error logs to the request logger
FlagUnifiedRequestLog = "unifiedRequestLog"
// FlagRenderAuthJWT
// Uses JWT-based auth for rendering instead of relying on remote cache
FlagRenderAuthJWT = "renderAuthJWT"

View File

@@ -498,6 +498,19 @@
"codeowner": "@grafana/alerting-squad"
}
},
{
"metadata": {
"name": "alertingSavedSearches",
"resourceVersion": "1765453147546",
"creationTimestamp": "2025-12-11T11:39:07Z"
},
"spec": {
"description": "Enables saved searches for alert rules list",
"stage": "experimental",
"codeowner": "@grafana/alerting-squad",
"frontend": true
}
},
{
"metadata": {
"name": "alertingTriage",
@@ -1647,6 +1660,20 @@
"codeowner": "@grafana/search-and-storage"
}
},
{
"metadata": {
"name": "heatmapRowsAxisOptions",
"resourceVersion": "1765353244400",
"creationTimestamp": "2025-12-10T07:54:04Z"
},
"spec": {
"description": "Enable Y-axis scale configuration options for pre-bucketed heatmap data (heatmap-rows)",
"stage": "experimental",
"codeowner": "@grafana/dataviz-squad",
"frontend": true,
"expression": "false"
}
},
{
"metadata": {
"name": "improvedExternalSessionHandling",
@@ -2706,6 +2733,20 @@
"expression": "false"
}
},
{
"metadata": {
"name": "pluginInsights",
"resourceVersion": "1761300628147",
"creationTimestamp": "2025-10-24T10:10:28Z"
},
"spec": {
"description": "Show insights for plugins in the plugin details page",
"stage": "experimental",
"codeowner": "@grafana/plugins-platform-backend",
"frontend": true,
"expression": "false"
}
},
{
"metadata": {
"name": "pluginInstallAPISync",
@@ -3501,7 +3542,8 @@
"metadata": {
"name": "unifiedRequestLog",
"resourceVersion": "1764664939750",
"creationTimestamp": "2023-03-31T13:38:09Z"
"creationTimestamp": "2023-03-31T13:38:09Z",
"deletionTimestamp": "2025-12-18T14:21:02Z"
},
"spec": {
"description": "Writes error logs to the request logger",

View File

@@ -726,19 +726,6 @@ func (s *Service) moveOnApiServer(ctx context.Context, cmd *folder.MoveFolderCom
return nil, folder.ErrBadRequest.Errorf("k6 project may not be moved")
}
f, err := s.unifiedStore.Get(ctx, folder.GetFolderQuery{
UID: &cmd.UID,
OrgID: cmd.OrgID,
SignedInUser: cmd.SignedInUser,
})
if err != nil {
return nil, err
}
if f != nil && f.ParentUID == accesscontrol.K6FolderUID {
return nil, folder.ErrBadRequest.Errorf("k6 project may not be moved")
}
// Check that the user is allowed to move the folder to the destination folder
hasAccess, evalErr := s.canMoveViaApiServer(ctx, cmd)
if evalErr != nil {
@@ -748,30 +735,7 @@ func (s *Service) moveOnApiServer(ctx context.Context, cmd *folder.MoveFolderCom
return nil, dashboards.ErrFolderAccessDenied
}
// here we get the folder, we need to get the height of current folder
// and the depth of the new parent folder, the sum can't bypass 8
folderHeight, err := s.unifiedStore.GetHeight(ctx, cmd.UID, cmd.OrgID, &cmd.NewParentUID)
if err != nil {
return nil, err
}
parents, err := s.unifiedStore.GetParents(ctx, folder.GetParentsQuery{UID: cmd.NewParentUID, OrgID: cmd.OrgID})
if err != nil {
return nil, err
}
// height of the folder that is being moved + this current folder itself + depth of the NewParent folder should be less than or equal MaxNestedFolderDepth
if folderHeight+len(parents)+1 > folder.MaxNestedFolderDepth {
return nil, folder.ErrMaximumDepthReached.Errorf("failed to move folder")
}
for _, parent := range parents {
// if the current folder is already a parent of newparent, we should return error
if parent.UID == cmd.UID {
return nil, folder.ErrCircularReference.Errorf("failed to move folder")
}
}
f, err = s.unifiedStore.Update(ctx, folder.UpdateFolderCommand{
f, err := s.unifiedStore.Update(ctx, folder.UpdateFolderCommand{
UID: cmd.UID,
OrgID: cmd.OrgID,
NewParentUID: &cmd.NewParentUID,

Some files were not shown because too many files have changed in this diff Show More