Compare commits

...

6 Commits

Author SHA1 Message Date
Rafael Paulovic 37a1a45194 Merge remote-tracking branch 'origin/main' into rm-dualwriter-datasyncer 2026-01-13 13:35:12 +01:00
alerting-team[bot] 5dbbe8164b Alerting: Update alerting module to 98a49ed9557fd9b5f33ecb77cbaa0748f13dc568 (#116197)
* [create-pull-request] automated change

* update prometheus-alertmanager

---------

Co-authored-by: titolins <8942194+titolins@users.noreply.github.com>
Co-authored-by: Tito Lins <tito.linsesilva@grafana.com>
2026-01-13 12:27:35 +00:00
Tobias Skarhed d1064da4cd Scopes: Add RTK Query API client for caching (#115494)
* Scopes API client

* Initial RTK query commit

* Copy API client from generated enterprise folder

* Mock ScopesApiClient for integration tests

* Update e2e tests

* Handle group expansion for dashboard navigation

* Extract integration test mocks

* Move mock to only be for integration tests

* Update path for enterprise sync script

* Re-export mockData

* Disregard caching for search

* Leave name parameters empty

* Disable subscriptions for client requests

* Add functionality to reset cache between mocked requests

* Use grafana-test-utils for scopes integration tests

* Rollback mock setup

* Remove store form window object

* Remove cache helper

* Restore scopenode search functionality

* Improve request erro handling

* Clean up subscription in case subscription: false lies

* Fix logging security risk

* Rewrite tests to cover RTK query usage and improve error catching

* Update USE_LIVE_DATA to be consistent

* Remove unused timout parameter

* Fix error handling

* Make dashboard-navigation test pass
2026-01-13 13:09:08 +01:00
Rafael Paulovic 8cb33642ba Merge remote-tracking branch 'origin/main' into rm-dualwriter-datasyncer 2026-01-13 11:25:42 +01:00
Rafael Paulovic 283ad15e84 Merge remote-tracking branch 'origin/main' into rm-dualwriter-datasyncer 2026-01-13 10:56:27 +01:00
Rafael Paulovic 070dc2288e chore(unified): remove DualWriter data syncer
- migrations are now handled using unified storage migrator framework
2026-01-12 18:09:29 +01:00
57 changed files with 3637 additions and 2051 deletions
+1 -1
View File
@@ -4,7 +4,7 @@ go 1.25.5
require (
github.com/go-kit/log v0.2.1
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f
github.com/grafana/alerting v0.0.0-20260112172717-98a49ed9557f
github.com/grafana/dskit v0.0.0-20250908063411-6b6da59b5cc4
github.com/grafana/grafana-app-sdk v0.48.7
github.com/grafana/grafana-app-sdk/logging v0.48.7
+2 -2
View File
@@ -243,8 +243,8 @@ github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f h1:Br4SaUL3dnVopKKNhDavCLgehw60jdtl/sIxdfzmVts=
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f/go.mod h1:l7v67cgP7x72ajB9UPZlumdrHqNztpKoqQ52cU8T3LU=
github.com/grafana/alerting v0.0.0-20260112172717-98a49ed9557f h1:3bXOyht68qkfvD6Y8z8XoenFbytSSOIkr/s+AqRzj0o=
github.com/grafana/alerting v0.0.0-20260112172717-98a49ed9557f/go.mod h1:Ji0SfJChcwjgq8ljy6Y5CcYfHfAYKXjKYeysOoDS/6s=
github.com/grafana/dskit v0.0.0-20250908063411-6b6da59b5cc4 h1:jSojuc7njleS3UOz223WDlXOinmuLAIPI0z2vtq8EgI=
github.com/grafana/dskit v0.0.0-20250908063411-6b6da59b5cc4/go.mod h1:VahT+GtfQIM+o8ht2StR6J9g+Ef+C2Vokh5uuSmOD/4=
github.com/grafana/grafana-app-sdk v0.48.7 h1:9mF7nqkqP0QUYYDlznoOt+GIyjzj45wGfUHB32u2ZMo=
+1 -1
View File
@@ -97,7 +97,7 @@ require (
github.com/google/gnostic-models v0.7.1 // indirect
github.com/google/go-cmp v0.7.0 // indirect
github.com/google/uuid v1.6.0 // indirect
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f // indirect
github.com/grafana/alerting v0.0.0-20260112172717-98a49ed9557f // indirect
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f // indirect
github.com/grafana/authlib/types v0.0.0-20251119142549-be091cf2f4d4 // indirect
github.com/grafana/dataplane/sdata v0.0.9 // indirect
+2 -2
View File
@@ -215,8 +215,8 @@ github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674 h1:JeSE6pjso5THxAzdVpqr6/geYxZytqFMBCOtn/ujyeo=
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674/go.mod h1:r4w70xmWCQKmi1ONH4KIaBptdivuRPyosB9RmPlGEwA=
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f h1:Br4SaUL3dnVopKKNhDavCLgehw60jdtl/sIxdfzmVts=
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f/go.mod h1:l7v67cgP7x72ajB9UPZlumdrHqNztpKoqQ52cU8T3LU=
github.com/grafana/alerting v0.0.0-20260112172717-98a49ed9557f h1:3bXOyht68qkfvD6Y8z8XoenFbytSSOIkr/s+AqRzj0o=
github.com/grafana/alerting v0.0.0-20260112172717-98a49ed9557f/go.mod h1:Ji0SfJChcwjgq8ljy6Y5CcYfHfAYKXjKYeysOoDS/6s=
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f h1:Cbm6OKkOcJ+7CSZsGsEJzktC/SIa5bxVeYKQLuYK86o=
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f/go.mod h1:axY0cdOg3q0TZHwpHnIz5x16xZ8ZBxJHShsSHHXcHQg=
github.com/grafana/authlib/types v0.0.0-20251119142549-be091cf2f4d4 h1:Muoy+FMGrHj3GdFbvsMzUT7eusgii9PKf9L1ZaXDDbY=
@@ -1,6 +1,7 @@
import { test, expect } from '@grafana/plugin-e2e';
import { setScopes } from '../utils/scope-helpers';
import { setScopes, setupScopeRoutes } from '../utils/scope-helpers';
import { testScopes } from '../utils/scopes';
import {
getAdHocFilterOptionValues,
@@ -13,6 +14,7 @@ import {
} from './cuj-selectors';
import { prepareAPIMocks } from './utils';
const USE_LIVE_DATA = Boolean(process.env.API_CONFIG_PATH);
const DASHBOARD_UNDER_TEST = 'cuj-dashboard-1';
test.use({
@@ -34,6 +36,11 @@ test.describe(
const adHocFilterPills = getAdHocFilterPills(page);
const scopesSelectorInput = getScopesSelectorInput(page);
// Set up routes before any navigation (only for mocked mode)
if (!USE_LIVE_DATA) {
await setupScopeRoutes(page, testScopes());
}
await test.step('1.Apply filtering to a whole dashboard', async () => {
const dashboardPage = await gotoDashboardPage({ uid: DASHBOARD_UNDER_TEST });
@@ -66,6 +66,17 @@ export function getScopesDashboards(page: Page) {
return page.locator('[data-testid^="scopes-dashboards-"][role="treeitem"]');
}
/**
* Clicks the first available dashboard in the scopes dashboard list.
*/
export async function clickFirstScopesDashboard(page: Page) {
const dashboards = getScopesDashboards(page);
// Wait for at least one dashboard to be visible
await expect(dashboards.first()).toBeVisible({ timeout: 10000 });
// Click - Playwright will automatically wait for the element to be actionable
await dashboards.first().click();
}
export function getScopesDashboardsSearchInput(page: Page) {
return page.getByTestId('scopes-dashboards-search');
}
@@ -1,8 +1,10 @@
import { test, expect } from '@grafana/plugin-e2e';
import { setScopes } from '../utils/scope-helpers';
import { setScopes, setupScopeRoutes } from '../utils/scope-helpers';
import { testScopes } from '../utils/scopes';
import {
clickFirstScopesDashboard,
getAdHocFilterPills,
getGroupByInput,
getGroupByValues,
@@ -21,6 +23,7 @@ test.use({
},
});
const USE_LIVE_DATA = Boolean(process.env.API_CONFIG_PATH);
const DASHBOARD_UNDER_TEST = 'cuj-dashboard-1';
const DASHBOARD_UNDER_TEST_2 = 'cuj-dashboard-2';
const NAVIGATE_TO = 'cuj-dashboard-3';
@@ -38,6 +41,11 @@ test.describe(
const adhocFilterPills = getAdHocFilterPills(page);
const groupByValues = getGroupByValues(page);
// Set up routes before any navigation (only for mocked mode)
if (!USE_LIVE_DATA) {
await setupScopeRoutes(page, testScopes());
}
await test.step('1.Search dashboard', async () => {
await gotoDashboardPage({ uid: DASHBOARD_UNDER_TEST });
@@ -74,7 +82,7 @@ test.describe(
await expect(markdownContent).toContainText(`now-12h`);
await scopesDashboards.first().click();
await clickFirstScopesDashboard(page);
await page.waitForURL('**/d/**');
await expect(markdownContent).toBeVisible();
@@ -117,10 +125,10 @@ test.describe(
await groupByVariable.press('Enter');
await groupByVariable.press('Escape');
await expect(scopesDashboards.first()).toBeVisible();
const { getRequests, waitForExpectedRequests } = await trackDashboardReloadRequests(page);
await scopesDashboards.first().click();
await clickFirstScopesDashboard(page);
await page.waitForURL('**/d/**');
await waitForExpectedRequests();
await page.waitForLoadState('networkidle');
@@ -158,8 +166,7 @@ test.describe(
const oldFilters = `GroupByVar: ${selectedValues}\n\nAdHocVar: ${processedPills}`;
await expect(markdownContent).toContainText(oldFilters);
await expect(scopesDashboards.first()).toBeVisible();
await scopesDashboards.first().click();
await clickFirstScopesDashboard(page);
await page.waitForURL('**/d/**');
const newPillCount = await adhocFilterPills.count();
@@ -165,9 +165,8 @@ test.describe(
await refreshBtn.click();
await page.waitForLoadState('networkidle');
expect(await panelContent.textContent()).not.toBe(panelContents);
// Wait for the panel content to change (not just for network to complete)
await expect(panelContent).not.toHaveText(panelContents!, { timeout: 10000 });
});
await test.step('6.Turn off refresh', async () => {
@@ -9,6 +9,7 @@ import {
openScopesSelector,
searchScopes,
selectScope,
setupScopeRoutes,
} from '../utils/scope-helpers';
import { testScopes } from '../utils/scopes';
@@ -36,32 +37,37 @@ test.describe(
const scopesSelector = getScopesSelectorInput(page);
const recentScopesSelector = getRecentScopesSelector(page);
const scopeTreeCheckboxes = getScopeTreeCheckboxes(page);
const scopes = testScopes();
// Set up routes once before any navigation (only for mocked mode)
if (!USE_LIVE_DATA) {
await setupScopeRoutes(page, scopes);
}
await test.step('1.View and select any scope', async () => {
await gotoDashboardPage({ uid: DASHBOARD_UNDER_TEST });
expect.soft(scopesSelector).toHaveAttribute('data-value', '');
const scopes = testScopes();
await openScopesSelector(page, USE_LIVE_DATA ? undefined : scopes); //used only in mocked scopes version
await openScopesSelector(page, USE_LIVE_DATA ? undefined : scopes);
let scopeName = await getScopeTreeName(page, 0);
const firstLevelScopes = scopes[0].children!; //used only in mocked scopes version
const firstLevelScopes = scopes[0].children!;
await expandScopesSelection(page, scopeName, USE_LIVE_DATA ? undefined : firstLevelScopes);
scopeName = await getScopeTreeName(page, 1);
const secondLevelScopes = firstLevelScopes[0].children!; //used only in mocked scopes version
const secondLevelScopes = firstLevelScopes[0].children!;
await expandScopesSelection(page, scopeName, USE_LIVE_DATA ? undefined : secondLevelScopes);
const selectedScopes = [secondLevelScopes[0]]; //used only in mocked scopes version
const selectedScopes = [secondLevelScopes[0]];
scopeName = await getScopeLeafName(page, 0);
let scopeTitle = await getScopeLeafTitle(page, 0);
await selectScope(page, scopeName, USE_LIVE_DATA ? undefined : selectedScopes[0]);
await applyScopes(page, USE_LIVE_DATA ? undefined : selectedScopes); //used only in mocked scopes version
await applyScopes(page, USE_LIVE_DATA ? undefined : selectedScopes);
expect.soft(scopesSelector).toHaveAttribute('data-value', scopeTitle);
});
@@ -70,28 +76,27 @@ test.describe(
await gotoDashboardPage({ uid: DASHBOARD_UNDER_TEST });
expect.soft(scopesSelector).toHaveAttribute('data-value', '');
const scopes = testScopes();
await openScopesSelector(page, USE_LIVE_DATA ? undefined : scopes); //used only in mocked scopes version
await openScopesSelector(page, USE_LIVE_DATA ? undefined : scopes);
let scopeName = await getScopeTreeName(page, 0);
const firstLevelScopes = scopes[0].children!; //used only in mocked scopes version
const firstLevelScopes = scopes[0].children!;
await expandScopesSelection(page, scopeName, USE_LIVE_DATA ? undefined : firstLevelScopes);
scopeName = await getScopeTreeName(page, 1);
const secondLevelScopes = firstLevelScopes[0].children!; //used only in mocked scopes version
const secondLevelScopes = firstLevelScopes[0].children!;
await expandScopesSelection(page, scopeName, USE_LIVE_DATA ? undefined : secondLevelScopes);
const scopeTitles: string[] = [];
const selectedScopes = [secondLevelScopes[0], secondLevelScopes[1]]; //used only in mocked scopes version
const selectedScopes = [secondLevelScopes[0], secondLevelScopes[1]];
for (let i = 0; i < selectedScopes.length; i++) {
scopeName = await getScopeLeafName(page, i);
scopeTitles.push(await getScopeLeafTitle(page, i));
await selectScope(page, scopeName, USE_LIVE_DATA ? undefined : selectedScopes[i]); //used only in mocked scopes version
await selectScope(page, scopeName, USE_LIVE_DATA ? undefined : selectedScopes[i]);
}
await applyScopes(page, USE_LIVE_DATA ? undefined : selectedScopes); //used only in mocked scopes version
await applyScopes(page, USE_LIVE_DATA ? undefined : selectedScopes);
await expect.soft(scopesSelector).toHaveAttribute('data-value', scopeTitles.join(' + '));
});
@@ -102,8 +107,7 @@ test.describe(
expect.soft(scopesSelector).toHaveAttribute('data-value', '');
const scopes = testScopes();
await openScopesSelector(page, USE_LIVE_DATA ? undefined : scopes); //used only in mocked scopes version
await openScopesSelector(page, USE_LIVE_DATA ? undefined : scopes);
await recentScopesSelector.click();
@@ -121,26 +125,25 @@ test.describe(
expect.soft(scopesSelector).toHaveAttribute('data-value', '');
const scopes = testScopes();
await openScopesSelector(page, USE_LIVE_DATA ? undefined : scopes);
let scopeName = await getScopeTreeName(page, 1);
const firstLevelScopes = scopes[2].children!; //used only in mocked scopes version
const firstLevelScopes = scopes[2].children!;
await expandScopesSelection(page, scopeName, USE_LIVE_DATA ? undefined : firstLevelScopes);
scopeName = await getScopeTreeName(page, 1);
const secondLevelScopes = firstLevelScopes[0].children!; //used only in mocked scopes version
const secondLevelScopes = firstLevelScopes[0].children!;
await expandScopesSelection(page, scopeName, USE_LIVE_DATA ? undefined : secondLevelScopes);
const selectedScopes = [secondLevelScopes[0]]; //used only in mocked scopes version
const selectedScopes = [secondLevelScopes[0]];
scopeName = await getScopeLeafName(page, 0);
let scopeTitle = await getScopeLeafTitle(page, 0);
await selectScope(page, scopeName, USE_LIVE_DATA ? undefined : selectedScopes[0]);
await applyScopes(page, USE_LIVE_DATA ? undefined : []); //used only in mocked scopes version
await applyScopes(page, USE_LIVE_DATA ? undefined : []);
expect.soft(scopesSelector).toHaveAttribute('data-value', new RegExp(`^${scopeTitle}`));
});
@@ -148,17 +151,16 @@ test.describe(
await test.step('5.View pre-completed production entity values as I type', async () => {
await gotoDashboardPage({ uid: DASHBOARD_UNDER_TEST });
const scopes = testScopes();
await openScopesSelector(page, USE_LIVE_DATA ? undefined : scopes); //used only in mocked scopes version
await openScopesSelector(page, USE_LIVE_DATA ? undefined : scopes);
let scopeName = await getScopeTreeName(page, 0);
const firstLevelScopes = scopes[0].children!; //used only in mocked scopes version
const firstLevelScopes = scopes[0].children!;
await expandScopesSelection(page, scopeName, USE_LIVE_DATA ? undefined : firstLevelScopes);
scopeName = await getScopeTreeName(page, 1);
const secondLevelScopes = firstLevelScopes[0].children!; //used only in mocked scopes version
const secondLevelScopes = firstLevelScopes[0].children!;
await expandScopesSelection(page, scopeName, USE_LIVE_DATA ? undefined : secondLevelScopes);
const scopeSearchOne = await getScopeLeafTitle(page, 0);
@@ -1,6 +1,6 @@
import { test, expect } from '@grafana/plugin-e2e';
import { applyScopes, openScopesSelector, selectScope } from '../utils/scope-helpers';
import { applyScopes, openScopesSelector, selectScope, setupScopeRoutes } from '../utils/scope-helpers';
import { testScopesWithRedirect } from '../utils/scopes';
test.use({
@@ -16,8 +16,13 @@ test.describe('Scope Redirect Functionality', () => {
test('should redirect to custom URL when scope has redirectUrl', async ({ page, gotoDashboardPage }) => {
const scopes = testScopesWithRedirect();
await test.step('Navigate to dashboard and open scopes selector', async () => {
await test.step('Set up routes and navigate to dashboard', async () => {
// Set up routes BEFORE navigation to ensure all requests are mocked
await setupScopeRoutes(page, scopes);
await gotoDashboardPage({ uid: 'cuj-dashboard-1' });
});
await test.step('Open scopes selector', async () => {
await openScopesSelector(page, scopes);
});
@@ -40,8 +45,12 @@ test.describe('Scope Redirect Functionality', () => {
test('should prioritize redirectUrl over scope navigation fallback', async ({ page, gotoDashboardPage }) => {
const scopes = testScopesWithRedirect();
await test.step('Navigate to dashboard and open scopes selector', async () => {
await test.step('Set up routes and navigate to dashboard', async () => {
await setupScopeRoutes(page, scopes);
await gotoDashboardPage({ uid: 'cuj-dashboard-1' });
});
await test.step('Open scopes selector', async () => {
await openScopesSelector(page, scopes);
});
@@ -68,8 +77,12 @@ test.describe('Scope Redirect Functionality', () => {
}) => {
const scopes = testScopesWithRedirect();
await test.step('Navigate to dashboard and select scope', async () => {
await test.step('Set up routes and navigate to dashboard', async () => {
await setupScopeRoutes(page, scopes);
await gotoDashboardPage({ uid: 'cuj-dashboard-1' });
});
await test.step('Select and apply scope', async () => {
await openScopesSelector(page, scopes);
await selectScope(page, 'sn-redirect-fallback', scopes[1]);
await applyScopes(page, [scopes[1]]);
@@ -112,8 +125,12 @@ test.describe('Scope Redirect Functionality', () => {
}) => {
const scopes = testScopesWithRedirect();
await test.step('Navigate to dashboard and select scope', async () => {
await test.step('Set up routes and navigate to dashboard', async () => {
await setupScopeRoutes(page, scopes);
await gotoDashboardPage({ uid: 'cuj-dashboard-1' });
});
await test.step('Select and apply scope', async () => {
await openScopesSelector(page, scopes);
await selectScope(page, 'sn-redirect-fallback', scopes[1]);
await applyScopes(page, [scopes[1]]);
@@ -151,9 +168,13 @@ test.describe('Scope Redirect Functionality', () => {
test('should not redirect to redirectPath when on active scope navigation', async ({ page, gotoDashboardPage }) => {
const scopes = testScopesWithRedirect();
await test.step('Set up routes and navigate to dashboard', async () => {
await setupScopeRoutes(page, scopes);
await gotoDashboardPage({ uid: 'cuj-dashboard-1' });
});
await test.step('Set up scope navigation to dashboard-1', async () => {
// First, apply a scope that creates scope navigation to dashboard-1 (without redirectPath)
await gotoDashboardPage({ uid: 'cuj-dashboard-1' });
await openScopesSelector(page, scopes);
await selectScope(page, 'sn-redirect-setup', scopes[2]);
await applyScopes(page, [scopes[2]]);
+183 -15
View File
@@ -6,7 +6,150 @@ import { Resource } from '../../public/app/features/apiserver/types';
import { testScopes } from './scopes';
const USE_LIVE_DATA = Boolean(process.env.API_CALLS_CONFIG_PATH);
const USE_LIVE_DATA = Boolean(process.env.API_CONFIG_PATH);
/**
* Sets up all scope-related API routes before navigation.
* This ensures that ALL scope API requests (including those made during initial page load)
* are intercepted by the mocks, preventing RTK Query from caching real API responses.
*
* Call this BEFORE navigating to a page (e.g., before gotoDashboardPage).
*/
export async function setupScopeRoutes(page: Page, scopes: TestScope[]): Promise<void> {
// Route for scope node children (tree structure)
await page.route(`**/apis/scope.grafana.app/v0alpha1/namespaces/*/find/scope_node_children*`, async (route) => {
const url = new URL(route.request().url());
const parentParam = url.searchParams.get('parent');
const queryParam = url.searchParams.get('query');
// Find the appropriate scopes based on parent
let scopesToReturn = scopes;
if (parentParam) {
// Find nested scopes based on parent name
const findChildren = (items: TestScope[]): TestScope[] => {
for (const item of items) {
if (item.name === parentParam && item.children) {
return item.children;
}
if (item.children) {
const found = findChildren(item.children);
if (found.length > 0) {
return found;
}
}
}
return [];
};
scopesToReturn = findChildren(scopes);
if (scopesToReturn.length === 0) {
scopesToReturn = scopes; // Fallback to root scopes
}
}
// Filter by search query if provided
if (queryParam) {
const query = queryParam.toLowerCase();
const filterByQuery = (items: TestScope[]): TestScope[] => {
const results: TestScope[] = [];
for (const item of items) {
// Exact match on name or title containing the query
if (item.name.toLowerCase() === query || item.title.toLowerCase() === query) {
results.push(item);
} else if (item.name.toLowerCase().includes(query) || item.title.toLowerCase().includes(query)) {
results.push(item);
}
// Also search in children
if (item.children) {
results.push(...filterByQuery(item.children));
}
}
return results;
};
scopesToReturn = filterByQuery(scopesToReturn);
}
await route.fulfill({
status: 200,
contentType: 'application/json',
body: JSON.stringify({
apiVersion: 'scope.grafana.app/v0alpha1',
kind: 'FindScopeNodeChildrenResults',
metadata: {},
items: scopesToReturn.map((scope) => ({
kind: 'ScopeNode',
apiVersion: 'scope.grafana.app/v0alpha1',
metadata: {
name: scope.name,
namespace: 'default',
},
spec: {
title: scope.title,
description: scope.title,
disableMultiSelect: scope.disableMultiSelect ?? false,
nodeType: scope.children ? 'container' : 'leaf',
...(parentParam && { parentName: parentParam }),
...((scope.addLinks || scope.children) && {
linkType: 'scope',
linkId: `scope-${scope.name}`,
}),
...(scope.redirectPath && { redirectPath: scope.redirectPath }),
},
})),
}),
});
});
// Route for individual scope fetching
await page.route(`**/apis/scope.grafana.app/v0alpha1/namespaces/*/scopes/*`, async (route) => {
const url = route.request().url();
const scopeName = url.split('/scopes/')[1]?.split('?')[0];
// Find the scope in the test data
const findScope = (items: TestScope[]): TestScope | undefined => {
for (const item of items) {
if (`scope-${item.name}` === scopeName) {
return item;
}
if (item.children) {
const found = findScope(item.children);
if (found) {
return found;
}
}
}
return undefined;
};
const scope = findScope(scopes);
if (scope) {
await route.fulfill({
status: 200,
contentType: 'application/json',
body: JSON.stringify({
kind: 'Scope',
apiVersion: 'scope.grafana.app/v0alpha1',
metadata: {
name: `scope-${scope.name}`,
namespace: 'default',
},
spec: {
title: scope.title,
description: '',
filters: scope.filters,
category: scope.category,
type: scope.type,
},
}),
});
} else {
await route.fulfill({ status: 404 });
}
});
// Note: Dashboard bindings and navigations routes are set up dynamically in applyScopes()
// with scope-specific URL patterns to avoid cache issues. They are not set up here.
}
export type TestScope = {
name: string;
@@ -24,6 +167,9 @@ export type TestScope = {
type ScopeDashboardBinding = Resource<ScopeDashboardBindingSpec, ScopeDashboardBindingStatus, 'ScopeDashboardBinding'>;
/**
* Sets up a route for scope node children requests and waits for the response.
*/
export async function scopeNodeChildrenRequest(
page: Page,
scopes: TestScope[],
@@ -68,10 +214,13 @@ export async function scopeNodeChildrenRequest(
return page.waitForResponse((response) => response.url().includes(`/find/scope_node_children`));
}
/**
* Opens the scopes selector dropdown and waits for the tree to load.
*/
export async function openScopesSelector(page: Page, scopes?: TestScope[]) {
const click = async () => await page.getByTestId('scopes-selector-input').click();
if (!scopes) {
if (!scopes || USE_LIVE_DATA) {
await click();
return;
}
@@ -82,10 +231,13 @@ export async function openScopesSelector(page: Page, scopes?: TestScope[]) {
await responsePromise;
}
/**
* Expands a scope tree node and waits for children to load.
*/
export async function expandScopesSelection(page: Page, parentScope: string, scopes?: TestScope[]) {
const click = async () => await page.getByTestId(`scopes-tree-${parentScope}-expand`).click();
if (!scopes) {
if (!scopes || USE_LIVE_DATA) {
await click();
return;
}
@@ -96,6 +248,9 @@ export async function expandScopesSelection(page: Page, parentScope: string, sco
await responsePromise;
}
/**
* Sets up a route for individual scope requests and waits for the response.
*/
export async function scopeSelectRequest(page: Page, selectedScope: TestScope): Promise<Response> {
await page.route(
`**/apis/scope.grafana.app/v0alpha1/namespaces/*/scopes/scope-${selectedScope.name}`,
@@ -125,6 +280,9 @@ export async function scopeSelectRequest(page: Page, selectedScope: TestScope):
return page.waitForResponse((response) => response.url().includes(`/scopes/scope-${selectedScope.name}`));
}
/**
* Selects a scope in the tree.
*/
export async function selectScope(page: Page, scopeName: string, selectedScope?: TestScope) {
const click = async () => {
const element = page.locator(
@@ -134,7 +292,7 @@ export async function selectScope(page: Page, scopeName: string, selectedScope?:
await element.click({ force: true });
};
if (!selectedScope) {
if (!selectedScope || USE_LIVE_DATA) {
await click();
return;
}
@@ -145,14 +303,22 @@ export async function selectScope(page: Page, scopeName: string, selectedScope?:
await responsePromise;
}
/**
* Applies the selected scopes and waits for the selector to close and page to settle.
* Sets up routes dynamically with scope-specific URL patterns to avoid cache issues.
*/
export async function applyScopes(page: Page, scopes?: TestScope[]) {
const click = async () => {
await page.getByTestId('scopes-selector-apply').scrollIntoViewIfNeeded();
await page.getByTestId('scopes-selector-apply').click({ force: true });
};
if (!scopes) {
if (!scopes || USE_LIVE_DATA) {
await click();
// Wait for the apply button to disappear (selector closed)
await page.waitForSelector('[data-testid="scopes-selector-apply"]', { state: 'hidden', timeout: 5000 });
// Wait for any resulting API calls (dashboard bindings, etc.) to complete
await page.waitForLoadState('networkidle');
return;
}
@@ -166,7 +332,7 @@ export async function applyScopes(page: Page, scopes?: TestScope[]) {
const groups: string[] = ['Most relevant', 'Dashboards', 'Something else', ''];
// Mock scope_dashboard_bindings endpoint
// Mock scope_dashboard_bindings endpoint with scope-specific URL pattern
await page.route(dashboardBindingsUrl, async (route) => {
await route.fulfill({
status: 200,
@@ -220,7 +386,7 @@ export async function applyScopes(page: Page, scopes?: TestScope[]) {
});
});
// Mock scope_navigations endpoint
// Mock scope_navigations endpoint with scope-specific URL pattern
await page.route(scopeNavigationsUrl, async (route) => {
await route.fulfill({
status: 200,
@@ -266,21 +432,23 @@ export async function applyScopes(page: Page, scopes?: TestScope[]) {
(response) =>
response.url().includes(`/find/scope_dashboard_bindings`) || response.url().includes(`/find/scope_navigations`)
);
const scopeRequestPromises: Array<Promise<Response>> = [];
for (const scope of scopes) {
scopeRequestPromises.push(scopeSelectRequest(page, scope));
}
await click();
await responsePromise;
await Promise.all(scopeRequestPromises);
// Wait for the apply button to disappear (selector closed)
await page.waitForSelector('[data-testid="scopes-selector-apply"]', { state: 'hidden', timeout: 5000 });
// Wait for any resulting API calls to complete
await page.waitForLoadState('networkidle');
}
export async function searchScopes(page: Page, value: string, resultScopes: TestScope[]) {
/**
* Searches for scopes in the tree and waits for results.
* Sets up a route dynamically with filtered results to return only matching scopes.
*/
export async function searchScopes(page: Page, value: string, resultScopes?: TestScope[]) {
const click = async () => await page.getByTestId('scopes-tree-search').fill(value);
if (!resultScopes) {
if (!resultScopes || USE_LIVE_DATA) {
await click();
return;
}
+2 -2
View File
@@ -89,7 +89,7 @@ require (
github.com/googleapis/gax-go/v2 v2.15.0 // @grafana/grafana-backend-group
github.com/gorilla/mux v1.8.1 // @grafana/grafana-backend-group
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674 // @grafana/grafana-app-platform-squad
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f // @grafana/alerting-backend
github.com/grafana/alerting v0.0.0-20260112172717-98a49ed9557f // @grafana/alerting-backend
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f // @grafana/identity-access-team
github.com/grafana/authlib/types v0.0.0-20251119142549-be091cf2f4d4 // @grafana/identity-access-team
github.com/grafana/dataplane/examples v0.0.1 // @grafana/observability-metrics
@@ -704,7 +704,7 @@ require (
replace github.com/crewjam/saml => github.com/grafana/saml v0.4.15-0.20240917091248-ae3bbdad8a56
// Use our fork of the upstream Alertmanager.
replace github.com/prometheus/alertmanager => github.com/grafana/prometheus-alertmanager v0.25.1-0.20250911094103-5456b6e45604
replace github.com/prometheus/alertmanager => github.com/grafana/prometheus-alertmanager v0.25.1-0.20260112162805-d29cc9cf7f0f
exclude github.com/mattn/go-sqlite3 v2.0.3+incompatible
+4 -4
View File
@@ -1627,8 +1627,8 @@ github.com/gorilla/sessions v1.2.1 h1:DHd3rPN5lE3Ts3D8rKkQ8x/0kqfeNmBAaiSi+o7Fsg
github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM=
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674 h1:JeSE6pjso5THxAzdVpqr6/geYxZytqFMBCOtn/ujyeo=
github.com/gorilla/websocket v1.5.4-0.20250319132907-e064f32e3674/go.mod h1:r4w70xmWCQKmi1ONH4KIaBptdivuRPyosB9RmPlGEwA=
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f h1:Br4SaUL3dnVopKKNhDavCLgehw60jdtl/sIxdfzmVts=
github.com/grafana/alerting v0.0.0-20251231150637-b7821017d69f/go.mod h1:l7v67cgP7x72ajB9UPZlumdrHqNztpKoqQ52cU8T3LU=
github.com/grafana/alerting v0.0.0-20260112172717-98a49ed9557f h1:3bXOyht68qkfvD6Y8z8XoenFbytSSOIkr/s+AqRzj0o=
github.com/grafana/alerting v0.0.0-20260112172717-98a49ed9557f/go.mod h1:Ji0SfJChcwjgq8ljy6Y5CcYfHfAYKXjKYeysOoDS/6s=
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f h1:Cbm6OKkOcJ+7CSZsGsEJzktC/SIa5bxVeYKQLuYK86o=
github.com/grafana/authlib v0.0.0-20250930082137-a40e2c2b094f/go.mod h1:axY0cdOg3q0TZHwpHnIz5x16xZ8ZBxJHShsSHHXcHQg=
github.com/grafana/authlib/types v0.0.0-20251119142549-be091cf2f4d4 h1:Muoy+FMGrHj3GdFbvsMzUT7eusgii9PKf9L1ZaXDDbY=
@@ -1681,8 +1681,8 @@ github.com/grafana/nanogit v0.3.0 h1:XNEef+4Vi+465ZITJs/g/xgnDRJbWhhJ7iQrAnWZ0oQ
github.com/grafana/nanogit v0.3.0/go.mod h1:6s6CCTpyMOHPpcUZaLGI+rgBEKdmxVbhqSGgCK13j7Y=
github.com/grafana/otel-profiling-go v0.5.1 h1:stVPKAFZSa7eGiqbYuG25VcqYksR6iWvF3YH66t4qL8=
github.com/grafana/otel-profiling-go v0.5.1/go.mod h1:ftN/t5A/4gQI19/8MoWurBEtC6gFw8Dns1sJZ9W4Tls=
github.com/grafana/prometheus-alertmanager v0.25.1-0.20250911094103-5456b6e45604 h1:aXfUhVN/Ewfpbko2CCtL65cIiGgwStOo4lWH2b6gw2U=
github.com/grafana/prometheus-alertmanager v0.25.1-0.20250911094103-5456b6e45604/go.mod h1:O/QP1BCm0HHIzbKvgMzqb5sSyH88rzkFk84F4TfJjBU=
github.com/grafana/prometheus-alertmanager v0.25.1-0.20260112162805-d29cc9cf7f0f h1:9tRhudagkQO2s61SLFLSziIdCm7XlkfypVKDxpcHokg=
github.com/grafana/prometheus-alertmanager v0.25.1-0.20260112162805-d29cc9cf7f0f/go.mod h1:AsVdCBeDFN9QbgpJg+8voDAcgsW0RmNvBd70ecMMdC0=
github.com/grafana/pyroscope-go/godeltaprof v0.1.9 h1:c1Us8i6eSmkW+Ez05d3co8kasnuOY813tbMN8i/a3Og=
github.com/grafana/pyroscope-go/godeltaprof v0.1.9/go.mod h1:2+l7K7twW49Ct4wFluZD3tZ6e0SjanjcUUBPVD/UuGU=
github.com/grafana/pyroscope/api v1.2.1-0.20251118081820-ace37f973a0f h1:fTlIj5n4x5dU63XHItug7GLjtnaeJdPqBlqg4zlABq0=
+1 -1
View File
@@ -38,6 +38,6 @@ use (
./pkg/semconv
)
replace github.com/prometheus/alertmanager => github.com/grafana/prometheus-alertmanager v0.25.1-0.20250911094103-5456b6e45604
replace github.com/prometheus/alertmanager => github.com/grafana/prometheus-alertmanager v0.25.1-0.20260112162805-d29cc9cf7f0f
replace github.com/crewjam/saml => github.com/grafana/saml v0.4.15-0.20240917091248-ae3bbdad8a56
+2
View File
@@ -997,6 +997,8 @@ github.com/grafana/prometheus-alertmanager v0.25.1-0.20250331083058-4563aec7a975
github.com/grafana/prometheus-alertmanager v0.25.1-0.20250331083058-4563aec7a975/go.mod h1:FGdGvhI40Dq+CTQaSzK9evuve774cgOUdGfVO04OXkw=
github.com/grafana/prometheus-alertmanager v0.25.1-0.20250604130045-92c8f6389b36 h1:AjZ58JRw1ZieFH/SdsddF5BXtsDKt5kSrKNPWrzYz3Y=
github.com/grafana/prometheus-alertmanager v0.25.1-0.20250604130045-92c8f6389b36/go.mod h1:O/QP1BCm0HHIzbKvgMzqb5sSyH88rzkFk84F4TfJjBU=
github.com/grafana/prometheus-alertmanager v0.25.1-0.20260112162805-d29cc9cf7f0f h1:9tRhudagkQO2s61SLFLSziIdCm7XlkfypVKDxpcHokg=
github.com/grafana/prometheus-alertmanager v0.25.1-0.20260112162805-d29cc9cf7f0f/go.mod h1:AsVdCBeDFN9QbgpJg+8voDAcgsW0RmNvBd70ecMMdC0=
github.com/grafana/pyroscope-go/godeltaprof v0.1.8/go.mod h1:2+l7K7twW49Ct4wFluZD3tZ6e0SjanjcUUBPVD/UuGU=
github.com/grafana/sqlds/v4 v4.2.4/go.mod h1:BQRjUG8rOqrBI4NAaeoWrIMuoNgfi8bdhCJ+5cgEfLU=
github.com/grafana/sqlds/v4 v4.2.7/go.mod h1:BQRjUG8rOqrBI4NAaeoWrIMuoNgfi8bdhCJ+5cgEfLU=
@@ -34,6 +34,8 @@ export function createBaseQuery({ baseURL }: CreateBaseQueryOptions): BaseQueryF
getBackendSrv().fetch({
...requestOptions,
url: baseURL + requestOptions.url,
// Default to GET so backend_srv correctly skips success alerts for queries
method: requestOptions.method ?? 'GET',
showErrorAlert: requestOptions.showErrorAlert ?? false,
data: requestOptions.body,
headers,
@@ -0,0 +1,500 @@
/**
* Types for Scopes API - matching @grafana/data types
*/
export interface ScopeFilter {
key: string;
value: string;
operator: 'equals' | 'not-equals' | 'regex-match' | 'regex-not-match';
}
export interface ScopeSpec {
title: string;
filters: ScopeFilter[];
}
export interface Scope {
metadata: {
name: string;
};
spec: ScopeSpec;
}
export interface ScopeNodeSpec {
nodeType: 'container' | 'leaf';
title: string;
description?: string;
disableMultiSelect?: boolean;
linkType?: 'scope';
linkId?: string;
parentName: string;
}
export interface ScopeNode {
metadata: {
name: string;
};
spec: ScopeNodeSpec;
}
export interface ScopeDashboardBindingSpec {
dashboard: string;
scope: string;
}
export interface ScopeDashboardBindingStatus {
dashboardTitle: string;
groups?: string[];
}
export interface ScopeDashboardBinding {
metadata: {
name: string;
};
spec: ScopeDashboardBindingSpec;
status: ScopeDashboardBindingStatus;
}
export interface ScopeNavigation {
metadata: {
name: string;
};
spec: {
url: string;
scope: string;
subScope?: string;
preLoadSubScopeChildren?: boolean;
expandOnLoad?: boolean;
disableSubScopeSelection?: boolean;
};
status: {
title: string;
groups?: string[];
};
}
export const MOCK_SCOPES: Scope[] = [
{
metadata: { name: 'cloud' },
spec: {
title: 'Cloud',
filters: [{ key: 'cloud', value: '.*', operator: 'regex-match' }],
},
},
{
metadata: { name: 'dev' },
spec: {
title: 'Dev',
filters: [{ key: 'cloud', value: 'dev', operator: 'equals' }],
},
},
{
metadata: { name: 'ops' },
spec: {
title: 'Ops',
filters: [{ key: 'cloud', value: 'ops', operator: 'equals' }],
},
},
{
metadata: { name: 'prod' },
spec: {
title: 'Prod',
filters: [{ key: 'cloud', value: 'prod', operator: 'equals' }],
},
},
{
metadata: { name: 'grafana' },
spec: {
title: 'Grafana',
filters: [{ key: 'app', value: 'grafana', operator: 'equals' }],
},
},
{
metadata: { name: 'mimir' },
spec: {
title: 'Mimir',
filters: [{ key: 'app', value: 'mimir', operator: 'equals' }],
},
},
{
metadata: { name: 'loki' },
spec: {
title: 'Loki',
filters: [{ key: 'app', value: 'loki', operator: 'equals' }],
},
},
{
metadata: { name: 'tempo' },
spec: {
title: 'Tempo',
filters: [{ key: 'app', value: 'tempo', operator: 'equals' }],
},
},
{
metadata: { name: 'dev-env' },
spec: {
title: 'Development',
filters: [{ key: 'environment', value: 'dev', operator: 'equals' }],
},
},
{
metadata: { name: 'prod-env' },
spec: {
title: 'Production',
filters: [{ key: 'environment', value: 'prod', operator: 'equals' }],
},
},
];
const dashboardBindingsGenerator = (
scopes: string[],
dashboards: Array<{ dashboardTitle: string; dashboardKey?: string; groups?: string[] }>
) =>
scopes.reduce<ScopeDashboardBinding[]>((scopeAcc, scopeTitle) => {
const scope = scopeTitle.toLowerCase().replaceAll(' ', '-').replaceAll('/', '-');
return [
...scopeAcc,
...dashboards.reduce<ScopeDashboardBinding[]>((acc, { dashboardTitle, groups, dashboardKey }, idx) => {
dashboardKey = dashboardKey ?? dashboardTitle.toLowerCase().replaceAll(' ', '-').replaceAll('/', '-');
const group = !groups
? ''
: groups.length === 1
? groups[0] === ''
? ''
: `${groups[0].toLowerCase().replaceAll(' ', '-').replaceAll('/', '-')}-`
: `multiple${idx}-`;
const dashboard = `${group}${dashboardKey}`;
return [
...acc,
{
metadata: { name: `${scope}-${dashboard}` },
spec: {
dashboard,
scope,
},
status: {
dashboardTitle,
groups,
},
},
];
}, []),
];
}, []);
export const MOCK_SCOPE_DASHBOARD_BINDINGS: ScopeDashboardBinding[] = [
...dashboardBindingsGenerator(
['Grafana'],
[
{ dashboardTitle: 'Data Sources', groups: ['General'] },
{ dashboardTitle: 'Usage', groups: ['General'] },
{ dashboardTitle: 'Frontend Errors', groups: ['Observability'] },
{ dashboardTitle: 'Frontend Logs', groups: ['Observability'] },
{ dashboardTitle: 'Backend Errors', groups: ['Observability'] },
{ dashboardTitle: 'Backend Logs', groups: ['Observability'] },
{ dashboardTitle: 'Usage Overview', groups: ['Usage'] },
{ dashboardTitle: 'Data Sources', groups: ['Usage'] },
{ dashboardTitle: 'Stats', groups: ['Usage'] },
{ dashboardTitle: 'Overview', groups: [''] },
{ dashboardTitle: 'Frontend' },
{ dashboardTitle: 'Stats' },
]
),
...dashboardBindingsGenerator(
['Loki', 'Tempo', 'Mimir'],
[
{ dashboardTitle: 'Ingester', groups: ['Components', 'Investigations'] },
{ dashboardTitle: 'Distributor', groups: ['Components', 'Investigations'] },
{ dashboardTitle: 'Compacter', groups: ['Components', 'Investigations'] },
{ dashboardTitle: 'Datasource Errors', groups: ['Observability', 'Investigations'] },
{ dashboardTitle: 'Datasource Logs', groups: ['Observability', 'Investigations'] },
{ dashboardTitle: 'Overview' },
{ dashboardTitle: 'Stats', dashboardKey: 'another-stats' },
]
),
...dashboardBindingsGenerator(
['Dev', 'Ops', 'Prod'],
[
{ dashboardTitle: 'Overview', groups: ['Cardinality Management'] },
{ dashboardTitle: 'Metrics', groups: ['Cardinality Management'] },
{ dashboardTitle: 'Labels', groups: ['Cardinality Management'] },
{ dashboardTitle: 'Overview', groups: ['Usage Insights'] },
{ dashboardTitle: 'Data Sources', groups: ['Usage Insights'] },
{ dashboardTitle: 'Query Errors', groups: ['Usage Insights'] },
{ dashboardTitle: 'Alertmanager', groups: ['Usage Insights'] },
{ dashboardTitle: 'Metrics Ingestion', groups: ['Usage Insights'] },
{ dashboardTitle: 'Billing/Usage' },
]
),
];
export const MOCK_NODES: ScopeNode[] = [
{
metadata: { name: 'applications' },
spec: {
nodeType: 'container',
title: 'Applications',
description: 'Application Scopes',
parentName: '',
},
},
{
metadata: { name: 'cloud' },
spec: {
nodeType: 'container',
title: 'Cloud',
description: 'Cloud Scopes',
disableMultiSelect: true,
linkType: 'scope',
linkId: 'cloud',
parentName: '',
},
},
{
metadata: { name: 'applications-grafana' },
spec: {
nodeType: 'leaf',
title: 'Grafana',
description: 'Grafana',
linkType: 'scope',
linkId: 'grafana',
parentName: 'applications',
},
},
{
metadata: { name: 'applications-mimir' },
spec: {
nodeType: 'leaf',
title: 'Mimir',
description: 'Mimir',
linkType: 'scope',
linkId: 'mimir',
parentName: 'applications',
},
},
{
metadata: { name: 'applications-loki' },
spec: {
nodeType: 'leaf',
title: 'Loki',
description: 'Loki',
linkType: 'scope',
linkId: 'loki',
parentName: 'applications',
},
},
{
metadata: { name: 'applications-tempo' },
spec: {
nodeType: 'leaf',
title: 'Tempo',
description: 'Tempo',
linkType: 'scope',
linkId: 'tempo',
parentName: 'applications',
},
},
{
metadata: { name: 'applications-cloud' },
spec: {
nodeType: 'container',
title: 'Cloud',
description: 'Application/Cloud Scopes',
linkType: 'scope',
linkId: 'cloud',
parentName: 'applications',
},
},
{
metadata: { name: 'applications-cloud-dev' },
spec: {
nodeType: 'leaf',
title: 'Dev',
description: 'Dev',
linkType: 'scope',
linkId: 'dev',
parentName: 'applications-cloud',
},
},
{
metadata: { name: 'applications-cloud-ops' },
spec: {
nodeType: 'leaf',
title: 'Ops',
description: 'Ops',
linkType: 'scope',
linkId: 'ops',
parentName: 'applications-cloud',
},
},
{
metadata: { name: 'applications-cloud-prod' },
spec: {
nodeType: 'leaf',
title: 'Prod',
description: 'Prod',
linkType: 'scope',
linkId: 'prod',
parentName: 'applications-cloud',
},
},
{
metadata: { name: 'cloud-dev' },
spec: {
nodeType: 'leaf',
title: 'Dev',
description: 'Dev',
linkType: 'scope',
linkId: 'dev',
parentName: 'cloud',
},
},
{
metadata: { name: 'cloud-ops' },
spec: {
nodeType: 'leaf',
title: 'Ops',
description: 'Ops',
linkType: 'scope',
linkId: 'ops',
parentName: 'cloud',
},
},
{
metadata: { name: 'cloud-prod' },
spec: {
nodeType: 'leaf',
title: 'Prod',
description: 'Prod',
linkType: 'scope',
linkId: 'prod',
parentName: 'cloud',
},
},
{
metadata: { name: 'cloud-applications' },
spec: {
nodeType: 'container',
title: 'Applications',
description: 'Cloud/Application Scopes',
parentName: 'cloud',
},
},
{
metadata: { name: 'cloud-applications-grafana' },
spec: {
nodeType: 'leaf',
title: 'Grafana',
description: 'Grafana',
linkType: 'scope',
linkId: 'grafana',
parentName: 'cloud-applications',
},
},
{
metadata: { name: 'cloud-applications-mimir' },
spec: {
nodeType: 'leaf',
title: 'Mimir',
description: 'Mimir',
linkType: 'scope',
linkId: 'mimir',
parentName: 'cloud-applications',
},
},
{
metadata: { name: 'cloud-applications-loki' },
spec: {
nodeType: 'leaf',
title: 'Loki',
description: 'Loki',
linkType: 'scope',
linkId: 'loki',
parentName: 'cloud-applications',
},
},
{
metadata: { name: 'cloud-applications-tempo' },
spec: {
nodeType: 'leaf',
title: 'Tempo',
description: 'Tempo',
linkType: 'scope',
linkId: 'tempo',
parentName: 'cloud-applications',
},
},
{
metadata: { name: 'environments' },
spec: {
nodeType: 'container',
title: 'Environments',
description: 'Environment Scopes',
disableMultiSelect: true,
parentName: '',
},
},
{
metadata: { name: 'environments-dev' },
spec: {
nodeType: 'container',
title: 'Development',
description: 'Development Environment',
linkType: 'scope',
linkId: 'dev-env',
parentName: 'environments',
},
},
{
metadata: { name: 'environments-prod' },
spec: {
nodeType: 'container',
title: 'Production',
description: 'Production Environment',
linkType: 'scope',
linkId: 'prod-env',
parentName: 'environments',
},
},
];
export const MOCK_SUB_SCOPE_MIMIR_ITEMS: ScopeNavigation[] = [
{
metadata: { name: 'mimir-item-1' },
spec: {
scope: 'mimir',
url: '/d/mimir-dashboard-1',
},
status: {
title: 'Mimir Dashboard 1',
groups: ['General'],
},
},
{
metadata: { name: 'mimir-item-2' },
spec: {
scope: 'mimir',
url: '/d/mimir-dashboard-2',
},
status: {
title: 'Mimir Dashboard 2',
groups: ['Observability'],
},
},
];
export const MOCK_SUB_SCOPE_LOKI_ITEMS: ScopeNavigation[] = [
{
metadata: { name: 'loki-item-1' },
spec: {
scope: 'loki',
url: '/d/loki-dashboard-1',
},
status: {
title: 'Loki Dashboard 1',
groups: ['General'],
},
},
];
@@ -12,6 +12,7 @@ import appPlatformDashboardv0alpha1Handlers from './apis/dashboard.grafana.app/v
import appPlatformDashboardv1beta1Handlers from './apis/dashboard.grafana.app/v1beta1/handlers';
import appPlatformFolderv1beta1Handlers from './apis/folder.grafana.app/v1beta1/handlers';
import appPlatformIamv0alpha1Handlers from './apis/iam.grafana.app/v0alpha1/handlers';
import appPlatformScopev0alpha1Handlers from './apis/scope.grafana.app/v0alpha1/handlers';
const allHandlers: HttpHandler[] = [
// Legacy handlers
@@ -29,6 +30,7 @@ const allHandlers: HttpHandler[] = [
...appPlatformFolderv1beta1Handlers,
...appPlatformIamv0alpha1Handlers,
...appPlatformCollectionsv1alpha1Handlers,
...appPlatformScopev0alpha1Handlers,
];
export default allHandlers;
@@ -0,0 +1,131 @@
import { HttpResponse, http } from 'msw';
import {
MOCK_NODES,
MOCK_SCOPES,
MOCK_SCOPE_DASHBOARD_BINDINGS,
MOCK_SUB_SCOPE_LOKI_ITEMS,
MOCK_SUB_SCOPE_MIMIR_ITEMS,
ScopeNavigation,
} from '../../../../fixtures/scopes';
import { getErrorResponse } from '../../../helpers';
const API_BASE = '/apis/scope.grafana.app/v0alpha1/namespaces/:namespace';
/**
* GET /apis/scope.grafana.app/v0alpha1/namespaces/:namespace/scopes/:name
*
* Fetches a single scope by name.
*/
const getScopeHandler = () =>
http.get<{ namespace: string; name: string }>(`${API_BASE}/scopes/:name`, ({ params }) => {
const { name } = params;
const scope = MOCK_SCOPES.find((s) => s.metadata.name === name);
if (!scope) {
return HttpResponse.json(getErrorResponse(`scopes.scope.grafana.app "${name}" not found`, 404), {
status: 404,
});
}
return HttpResponse.json(scope);
});
/**
* GET /apis/scope.grafana.app/v0alpha1/namespaces/:namespace/scopenodes/:name
*
* Fetches a single scope node by name.
*/
const getScopeNodeHandler = () =>
http.get<{ namespace: string; name: string }>(`${API_BASE}/scopenodes/:name`, ({ params }) => {
const { name } = params;
const node = MOCK_NODES.find((n) => n.metadata.name === name);
if (!node) {
return HttpResponse.json(getErrorResponse(`scopenodes.scope.grafana.app "${name}" not found`, 404), {
status: 404,
});
}
return HttpResponse.json(node);
});
/**
* GET /apis/scope.grafana.app/v0alpha1/namespaces/:namespace/find/scope_node_children
*
* Finds scope node children based on parent and query filters.
*/
const findScopeNodeChildrenHandler = () =>
http.get(`${API_BASE}/find/scope_node_children`, ({ request }) => {
const url = new URL(request.url);
const parent = url.searchParams.get('parent') ?? '';
const query = url.searchParams.get('query') ?? '';
const limitParam = url.searchParams.get('limit');
const names = url.searchParams.getAll('names');
let filtered = MOCK_NODES.filter(
(node) => node.spec.parentName === parent && node.spec.title.toLowerCase().includes(query.toLowerCase())
);
if (names.length > 0) {
filtered = MOCK_NODES.filter((node) => names.includes(node.metadata.name));
}
if (limitParam) {
const limit = parseInt(limitParam, 10);
filtered = filtered.slice(0, limit);
}
return HttpResponse.json({
items: filtered,
});
});
/**
* GET /apis/scope.grafana.app/v0alpha1/namespaces/:namespace/find/scope_dashboard_bindings
*
* Finds scope dashboard bindings for the given scope names.
*/
const findScopeDashboardBindingsHandler = () =>
http.get(`${API_BASE}/find/scope_dashboard_bindings`, ({ request }) => {
const url = new URL(request.url);
const scopeNames = url.searchParams.getAll('scope');
const bindings = MOCK_SCOPE_DASHBOARD_BINDINGS.filter((b) => scopeNames.includes(b.spec.scope));
return HttpResponse.json({
items: bindings,
});
});
/**
* GET /apis/scope.grafana.app/v0alpha1/namespaces/:namespace/find/scope_navigations
*
* Finds scope navigations for the given scope names.
*/
const findScopeNavigationsHandler = () =>
http.get(`${API_BASE}/find/scope_navigations`, ({ request }) => {
const url = new URL(request.url);
const scopeNames = url.searchParams.getAll('scope');
let items: ScopeNavigation[] = [];
if (scopeNames.includes('mimir')) {
items = [...items, ...MOCK_SUB_SCOPE_MIMIR_ITEMS];
}
if (scopeNames.includes('loki')) {
items = [...items, ...MOCK_SUB_SCOPE_LOKI_ITEMS];
}
return HttpResponse.json({
items,
});
});
export default [
getScopeHandler(),
getScopeNodeHandler(),
findScopeNodeChildrenHandler(),
findScopeDashboardBindingsHandler(),
findScopeNavigationsHandler(),
];
@@ -2,3 +2,12 @@ import { wellFormedTree } from './fixtures/folders';
export const getFolderFixtures = wellFormedTree;
export { MOCK_TEAMS, MOCK_TEAM_GROUPS } from './fixtures/teams';
export {
MOCK_SCOPES,
MOCK_NODES,
MOCK_SCOPE_DASHBOARD_BINDINGS,
MOCK_SUB_SCOPE_MIMIR_ITEMS,
MOCK_SUB_SCOPE_LOKI_ITEMS,
} from './fixtures/scopes';
export { default as allHandlers } from './handlers/all-handlers';
export { default as scopeHandlers } from './handlers/apis/scope.grafana.app/v0alpha1/handlers';
+1 -1
View File
@@ -7,7 +7,6 @@ require (
github.com/grafana/authlib/types v0.0.0-20251119142549-be091cf2f4d4
github.com/grafana/grafana-app-sdk/logging v0.48.7
github.com/grafana/grafana/pkg/apimachinery v0.0.0-20250514132646-acbc7b54ed9e
github.com/prometheus/client_golang v1.23.2
github.com/stretchr/testify v1.11.1
go.opentelemetry.io/contrib/propagators/jaeger v1.38.0
go.opentelemetry.io/otel v1.39.0
@@ -69,6 +68,7 @@ require (
github.com/onsi/gomega v1.36.2 // indirect
github.com/patrickmn/go-cache v2.1.0+incompatible // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
github.com/prometheus/client_golang v1.23.2 // indirect
github.com/prometheus/client_model v0.6.2 // indirect
github.com/prometheus/common v0.67.4 // indirect
github.com/prometheus/procfs v0.19.2 // indirect
-103
View File
@@ -1,15 +1,9 @@
package rest
import (
"context"
"errors"
"fmt"
"time"
apiequality "k8s.io/apimachinery/pkg/api/equality"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/runtime/schema"
"k8s.io/klog/v2"
"github.com/grafana/grafana/pkg/apimachinery/utils"
)
@@ -66,103 +60,6 @@ const (
Mode5
)
type NamespacedKVStore interface {
Get(ctx context.Context, key string) (string, bool, error)
Set(ctx context.Context, key, value string) error
}
type ServerLockService interface {
LockExecuteAndRelease(ctx context.Context, actionName string, maxInterval time.Duration, fn func(ctx context.Context)) error
}
func SetDualWritingMode(
ctx context.Context,
kvs NamespacedKVStore,
cfg *SyncerConfig,
metrics *DualWriterMetrics,
) (DualWriterMode, error) {
if cfg == nil {
return Mode0, errors.New("syncer config is nil")
}
// Mode0 means no DualWriter
if cfg.Mode == Mode0 {
return Mode0, nil
}
toMode := map[string]DualWriterMode{
// It is not possible to initialize a mode 0 dual writer. Mode 0 represents
// writing to legacy storage without Unified Storage enabled.
"1": Mode1,
"2": Mode2,
"3": Mode3,
"4": Mode4,
"5": Mode5,
}
errDualWriterSetCurrentMode := errors.New("failed to set current dual writing mode")
// Use entity name as key
kvMode, ok, err := kvs.Get(ctx, cfg.Kind)
if err != nil {
return Mode0, errors.New("failed to fetch current dual writing mode")
}
currentMode, exists := toMode[kvMode]
// If the mode does not exist in our mapping, we log an error.
if !exists && ok {
// Only log if "ok" because initially all instances will have mode unset for playlists.
klog.Infof("invalid dual writing mode for %s mode: %v", cfg.Kind, kvMode)
}
// If the mode does not exist in our mapping, and we also didn't find an entry for this kind, fallback.
if !exists || !ok {
// Default to mode 1
currentMode = Mode1
if err := kvs.Set(ctx, cfg.Kind, fmt.Sprint(currentMode)); err != nil {
return Mode0, errDualWriterSetCurrentMode
}
}
isUpgradeToReadUnifiedMode := currentMode < Mode3 && cfg.Mode >= Mode3
if !isUpgradeToReadUnifiedMode {
if err := kvs.Set(ctx, cfg.Kind, fmt.Sprint(cfg.Mode)); err != nil {
return Mode0, errDualWriterSetCurrentMode
}
return cfg.Mode, nil
}
// If SkipDataSync is enabled, we can set the mode directly without running the syncer.
if cfg.SkipDataSync {
if err := kvs.Set(ctx, cfg.Kind, fmt.Sprint(cfg.Mode)); err != nil {
return Mode0, errDualWriterSetCurrentMode
}
return cfg.Mode, nil
}
// Transitioning to Mode3 or higher from Mode0, Mode1, or Mode2.
// We need to run the syncer in the current mode before we can upgrade to Mode3 or higher.
cfgModeTmp := cfg.Mode
// Before running the sync, set the syncer config to the current mode, as we have to run the syncer
// once in the current active mode before we can upgrade.
cfg.Mode = currentMode
syncOk, err := runDataSyncer(ctx, cfg, metrics)
// Once we are done with running the syncer, we can change the mode back on the config to the desired one.
cfg.Mode = cfgModeTmp
if err != nil {
klog.Error("data syncer failed for mode:", kvMode, "err", err)
return currentMode, nil
}
if !syncOk {
klog.Info("data syncer not ok for mode:", kvMode)
return currentMode, nil
}
// If sync is successful, update the mode to the desired one.
if err := kvs.Set(ctx, cfg.Kind, fmt.Sprint(cfg.Mode)); err != nil {
return Mode0, errDualWriterSetCurrentMode
}
return cfg.Mode, nil
}
// Compare asserts on the equality of objects returned from both stores (object storage and legacy storage)
func Compare(objA, objB runtime.Object) bool {
if objA == nil || objB == nil {
-328
View File
@@ -1,328 +0,0 @@
package rest
import (
"context"
"fmt"
"math/rand"
"time"
apierrors "k8s.io/apimachinery/pkg/api/errors"
"k8s.io/apimachinery/pkg/api/meta"
metainternalversion "k8s.io/apimachinery/pkg/apis/meta/internalversion"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apiserver/pkg/endpoints/request"
"k8s.io/apiserver/pkg/registry/rest"
"k8s.io/klog/v2"
"github.com/grafana/grafana/pkg/apimachinery/identity"
"github.com/grafana/grafana/pkg/apimachinery/utils"
)
type syncItem struct {
name string
objStorage runtime.Object
objLegacy runtime.Object
accessorStorage utils.GrafanaMetaAccessor
accessorLegacy utils.GrafanaMetaAccessor
}
type SyncerConfig struct {
Kind string
RequestInfo *request.RequestInfo
Mode DualWriterMode
LegacyStorage Storage
Storage Storage
ServerLockService ServerLockService
SkipDataSync bool
DataSyncerInterval time.Duration
DataSyncerRecordsLimit int
}
func (s *SyncerConfig) Validate() error {
if s == nil {
return fmt.Errorf("syncer config is nil")
}
if s.Kind == "" {
return fmt.Errorf("kind must be specified")
}
if s.RequestInfo == nil {
return fmt.Errorf("requestInfo must be specified")
}
if s.ServerLockService == nil {
return fmt.Errorf("serverLockService must be specified")
}
if s.Storage == nil {
return fmt.Errorf("storage must be specified")
}
if s.LegacyStorage == nil {
return fmt.Errorf("legacy storage must be specified")
}
if s.DataSyncerInterval == 0 {
s.DataSyncerInterval = time.Hour
}
if s.DataSyncerRecordsLimit == 0 {
s.DataSyncerRecordsLimit = 1000
}
return nil
}
// StartPeriodicDataSyncer starts a background job that will execute the DataSyncer, syncing the data
// from the hosted grafana backend into the unified storage backend. This is run in the grafana instance.
func StartPeriodicDataSyncer(ctx context.Context, cfg *SyncerConfig, metrics *DualWriterMetrics) error {
if err := cfg.Validate(); err != nil {
return fmt.Errorf("invalid syncer config: %w", err)
}
log := klog.NewKlogr().WithName("legacyToUnifiedStorageDataSyncer").WithValues("mode", cfg.Mode, "resource", cfg.Kind)
log.Info("Starting periodic data syncer")
// run in background
go func() {
r := rand.New(rand.NewSource(time.Now().UnixNano()))
timeWindow := 600 // 600 seconds (10 minutes)
jitterSeconds := r.Int63n(int64(timeWindow))
log.Info("data syncer scheduled", "starting time", time.Now().Add(time.Second*time.Duration(jitterSeconds)))
time.Sleep(time.Second * time.Duration(jitterSeconds))
// run it immediately
syncOK, err := runDataSyncer(ctx, cfg, metrics)
log.Info("data syncer finished", "syncOK", syncOK, "error", err)
ticker := time.NewTicker(cfg.DataSyncerInterval)
for {
select {
case <-ticker.C:
syncOK, err = runDataSyncer(ctx, cfg, metrics)
log.Info("data syncer finished", "syncOK", syncOK, ", error", err)
case <-ctx.Done():
return
}
}
}()
return nil
}
// runDataSyncer will ensure that data between legacy storage and unified storage are in sync.
// The sync implementation depends on the DualWriter mode
func runDataSyncer(ctx context.Context, cfg *SyncerConfig, metrics *DualWriterMetrics) (bool, error) {
if err := cfg.Validate(); err != nil {
return false, fmt.Errorf("invalid syncer config: %w", err)
}
// ensure that execution takes no longer than necessary
timeout := cfg.DataSyncerInterval - time.Minute
ctx, cancelFn := context.WithTimeout(ctx, timeout)
defer cancelFn()
// implementation depends on the current DualWriter mode
switch cfg.Mode {
case Mode1, Mode2:
return legacyToUnifiedStorageDataSyncer(ctx, cfg, metrics)
default:
klog.Info("data syncer not implemented for mode:", cfg.Mode)
return false, nil
}
}
func legacyToUnifiedStorageDataSyncer(ctx context.Context, cfg *SyncerConfig, metrics *DualWriterMetrics) (bool, error) {
if err := cfg.Validate(); err != nil {
return false, fmt.Errorf("invalid syncer config: %w", err)
}
log := klog.NewKlogr().WithName("legacyToUnifiedStorageDataSyncer").WithValues("mode", cfg.Mode, "resource", cfg.Kind)
everythingSynced := false
outOfSync := 0
syncSuccess := 0
syncErr := 0
maxInterval := cfg.DataSyncerInterval + 5*time.Minute
var errSync error
// LockExecuteAndRelease ensures that just a single Grafana server acquires a lock at a time
// The parameter 'maxInterval' is a timeout safeguard, if the LastExecution in the
// database is older than maxInterval, we will assume the lock as timeouted. The 'maxInterval' parameter should be so long
// that is impossible for 2 processes to run at the same time.
err := cfg.ServerLockService.LockExecuteAndRelease(ctx, fmt.Sprintf("legacyToUnifiedStorageDataSyncer-%d-%s", cfg.Mode, cfg.Kind), maxInterval, func(context.Context) {
log.Info("starting legacyToUnifiedStorageDataSyncer")
startSync := time.Now()
ctx = klog.NewContext(ctx, log)
ctx, _ = identity.WithServiceIdentity(ctx, 0)
ctx = request.WithNamespace(ctx, cfg.RequestInfo.Namespace)
ctx = request.WithRequestInfo(ctx, cfg.RequestInfo)
storageList, err := getList(ctx, cfg.Storage, &metainternalversion.ListOptions{
Limit: int64(cfg.DataSyncerRecordsLimit),
})
if err != nil {
log.Error(err, "unable to extract list from storage")
return
}
if len(storageList) >= cfg.DataSyncerRecordsLimit {
errSync = fmt.Errorf("unified storage has more than %d records. Aborting sync", cfg.DataSyncerRecordsLimit)
log.Error(errSync, "Unified storage has more records to be synced than allowed")
return
}
log.Info("got items from unified storage", "items", len(storageList))
legacyList, err := getList(ctx, cfg.LegacyStorage, &metainternalversion.ListOptions{
Limit: int64(cfg.DataSyncerRecordsLimit),
})
if err != nil {
log.Error(err, "unable to extract list from legacy storage")
return
}
log.Info("got items from legacy storage", "items", len(legacyList))
itemsByName := map[string]syncItem{}
for _, obj := range legacyList {
accessor, err := utils.MetaAccessor(obj)
if err != nil {
log.Error(err, "error retrieving accessor data for object from legacy storage")
continue
}
name := accessor.GetName()
item := itemsByName[name]
item.name = name
item.objLegacy = obj
item.accessorLegacy = accessor
itemsByName[name] = item
}
for _, obj := range storageList {
accessor, err := utils.MetaAccessor(obj)
if err != nil {
log.Error(err, "error retrieving accessor data for object from storage")
continue
}
name := accessor.GetName()
item := itemsByName[name]
item.name = name
item.objStorage = obj
item.accessorStorage = accessor
itemsByName[name] = item
}
log.Info("got list of items to be synced", "items", len(itemsByName))
for name, item := range itemsByName {
// upsert if:
// - existing in both legacy and storage, but objects are different, or
// - if it's missing from storage
if item.objLegacy != nil &&
(item.objStorage == nil || !Compare(item.objLegacy, item.objStorage)) {
outOfSync++
if item.objStorage != nil {
item.accessorLegacy.SetResourceVersion(item.accessorStorage.GetResourceVersion())
item.accessorLegacy.SetUID(item.accessorStorage.GetUID())
log.Info("updating item on unified storage", "name", name)
} else {
item.accessorLegacy.SetResourceVersion("")
item.accessorLegacy.SetUID("")
log.Info("inserting item on unified storage", "name", name)
}
objInfo := rest.DefaultUpdatedObjectInfo(item.objLegacy, []rest.TransformFunc{}...)
res, _, err := cfg.Storage.Update(ctx,
name,
objInfo,
func(ctx context.Context, obj runtime.Object) error { return nil },
func(ctx context.Context, obj, old runtime.Object) error { return nil },
true, // force creation
&metav1.UpdateOptions{},
)
if err != nil {
log.WithValues("object", res).Error(err, "could not update in storage")
syncErr++
} else {
syncSuccess++
}
}
// delete if object does not exists on legacy but exists on storage
if item.objLegacy == nil && item.objStorage != nil {
outOfSync++
ctx = request.WithRequestInfo(ctx, &request.RequestInfo{
APIGroup: cfg.RequestInfo.APIGroup,
Resource: cfg.RequestInfo.Resource,
Name: name,
Namespace: cfg.RequestInfo.Namespace,
})
log.Info("deleting item from unified storage", "name", name)
deletedS, _, err := cfg.Storage.Delete(ctx, name, func(ctx context.Context, obj runtime.Object) error { return nil }, &metav1.DeleteOptions{})
if err != nil && !apierrors.IsNotFound(err) {
log.WithValues("objectList", deletedS).Error(err, "could not delete from storage")
syncErr++
continue
}
syncSuccess++
}
}
everythingSynced = outOfSync == syncSuccess
metrics.recordDataSyncerOutcome(cfg.Mode, cfg.Kind, everythingSynced)
metrics.recordDataSyncerDuration(err != nil, cfg.Mode, cfg.Kind, startSync)
log.Info("finished syncing items", "items", len(itemsByName), "updated", syncSuccess, "failed", syncErr, "outcome", everythingSynced)
})
if errSync != nil {
err = errSync
}
return everythingSynced, err
}
func getList(ctx context.Context, obj rest.Lister, listOptions *metainternalversion.ListOptions) ([]runtime.Object, error) {
var allItems []runtime.Object
for {
if int64(len(allItems)) >= listOptions.Limit {
return nil, fmt.Errorf("list has more than %d records. Aborting sync", listOptions.Limit)
}
ll, err := obj.List(ctx, listOptions)
if err != nil {
return nil, err
}
items, err := meta.ExtractList(ll)
if err != nil {
return nil, err
}
allItems = append(allItems, items...)
// Get continue token from the list metadata.
listMeta, err := meta.ListAccessor(ll)
if err != nil {
return nil, err
}
// If no continue token, we're done paginating.
if listMeta.GetContinue() == "" {
break
}
// Set continue token for next page.
listOptions.Continue = listMeta.GetContinue()
}
return allItems, nil
}
@@ -1,248 +0,0 @@
package rest
import (
"context"
"errors"
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/mock"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apiserver/pkg/apis/example"
"k8s.io/apiserver/pkg/endpoints/request"
)
var legacyObj1 = &example.Pod{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ObjectMeta: metav1.ObjectMeta{Name: "foo1", ResourceVersion: "1", CreationTimestamp: metav1.Time{}}, Spec: example.PodSpec{}, Status: example.PodStatus{StartTime: &metav1.Time{Time: time.Now()}}}
var legacyObj2 = &example.Pod{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ObjectMeta: metav1.ObjectMeta{Name: "foo2", ResourceVersion: "1", CreationTimestamp: metav1.Time{}}, Spec: example.PodSpec{}, Status: example.PodStatus{StartTime: &metav1.Time{Time: time.Now()}}}
var legacyObj3 = &example.Pod{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ObjectMeta: metav1.ObjectMeta{Name: "foo3", ResourceVersion: "1", CreationTimestamp: metav1.Time{}}, Spec: example.PodSpec{}, Status: example.PodStatus{StartTime: &metav1.Time{Time: time.Now()}}}
var legacyObj4 = &example.Pod{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ObjectMeta: metav1.ObjectMeta{Name: "foo4", ResourceVersion: "1", CreationTimestamp: metav1.Time{}}, Spec: example.PodSpec{}, Status: example.PodStatus{StartTime: &metav1.Time{Time: time.Now()}}}
var legacyObj2WithHostname = &example.Pod{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ObjectMeta: metav1.ObjectMeta{Name: "foo2", ResourceVersion: "1", CreationTimestamp: metav1.Time{}}, Spec: example.PodSpec{Hostname: "hostname"}, Status: example.PodStatus{StartTime: &metav1.Time{Time: time.Now()}}}
var storageObj1 = &example.Pod{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ObjectMeta: metav1.ObjectMeta{Name: "foo1", ResourceVersion: "1", CreationTimestamp: metav1.Time{}}, Spec: example.PodSpec{}, Status: example.PodStatus{StartTime: &metav1.Time{Time: time.Now()}}}
var storageObj2 = &example.Pod{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ObjectMeta: metav1.ObjectMeta{Name: "foo2", ResourceVersion: "1", CreationTimestamp: metav1.Time{}}, Spec: example.PodSpec{}, Status: example.PodStatus{StartTime: &metav1.Time{Time: time.Now()}}}
var storageObj3 = &example.Pod{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ObjectMeta: metav1.ObjectMeta{Name: "foo3", ResourceVersion: "1", CreationTimestamp: metav1.Time{}}, Spec: example.PodSpec{}, Status: example.PodStatus{StartTime: &metav1.Time{Time: time.Now()}}}
var storageObj4 = &example.Pod{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ObjectMeta: metav1.ObjectMeta{Name: "foo4", ResourceVersion: "1", CreationTimestamp: metav1.Time{}}, Spec: example.PodSpec{}, Status: example.PodStatus{StartTime: &metav1.Time{Time: time.Now()}}}
var legacyListWith3items = &example.PodList{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ListMeta: metav1.ListMeta{},
Items: []example.Pod{
*legacyObj1,
*legacyObj2,
*legacyObj3,
}}
var legacyListWith4items = &example.PodList{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ListMeta: metav1.ListMeta{},
Items: []example.Pod{
*legacyObj1,
*legacyObj2,
*legacyObj3,
*legacyObj4,
}}
var legacyListWith3itemsObj2IsDifferent = &example.PodList{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ListMeta: metav1.ListMeta{},
Items: []example.Pod{
*legacyObj1,
*legacyObj2WithHostname,
*legacyObj3,
}}
var storageListWith3items = &example.PodList{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ListMeta: metav1.ListMeta{},
Items: []example.Pod{
*storageObj1,
*storageObj2,
*storageObj3,
}}
var storageListWith4items = &example.PodList{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ListMeta: metav1.ListMeta{},
Items: []example.Pod{
*storageObj1,
*storageObj2,
*storageObj3,
*storageObj4,
}}
var storageListWith3itemsMissingFoo2 = &example.PodList{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ListMeta: metav1.ListMeta{},
Items: []example.Pod{
*storageObj1,
*storageObj3,
*storageObj4,
}}
func TestLegacyToUnifiedStorage_DataSyncer(t *testing.T) {
type testCase struct {
setupLegacyFn func(m *MockStorage)
setupStorageFn func(m *MockStorage)
name string
expectedOutcome bool
wantErr bool
}
tests :=
[]testCase{
{
name: "both stores are in sync",
setupLegacyFn: func(m *MockStorage) {
m.On("List", mock.Anything, mock.Anything).Return(legacyListWith3items, nil)
},
setupStorageFn: func(m *MockStorage) {
m.On("List", mock.Anything, mock.Anything).Return(storageListWith3items, nil)
},
expectedOutcome: true,
},
{
name: "both stores are in sync - fail to list from legacy",
setupLegacyFn: func(m *MockStorage) {
m.On("List", mock.Anything, mock.Anything).Return(legacyListWith3items, errors.New("error"))
},
setupStorageFn: func(m *MockStorage) {
m.On("List", mock.Anything, mock.Anything).Return(storageListWith3items, nil)
},
expectedOutcome: false,
},
{
name: "both stores are in sync - fail to list from storage",
setupLegacyFn: func(m *MockStorage) {
m.On("List", mock.Anything, mock.Anything).Return(legacyListWith3items, nil).Maybe()
},
setupStorageFn: func(m *MockStorage) {
m.On("List", mock.Anything, mock.Anything).Return(storageListWith3items, errors.New("error"))
},
expectedOutcome: false,
},
{
name: "storage is missing 1 entry (foo4)",
setupLegacyFn: func(m *MockStorage) {
m.On("List", mock.Anything, mock.Anything).Return(legacyListWith4items, nil)
},
setupStorageFn: func(m *MockStorage) {
m.On("List", mock.Anything, mock.Anything).Return(storageListWith3items, nil)
m.On("Update", mock.Anything, "foo4", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(exampleObj, false, nil)
},
expectedOutcome: true,
},
{
name: "storage needs to be update (foo2 is different)",
setupLegacyFn: func(m *MockStorage) {
m.On("List", mock.Anything, mock.Anything).Return(legacyListWith3itemsObj2IsDifferent, nil)
},
setupStorageFn: func(m *MockStorage) {
m.On("List", mock.Anything, mock.Anything).Return(storageListWith3items, nil)
m.On("Update", mock.Anything, "foo2", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(exampleObj, false, nil)
},
expectedOutcome: true,
},
{
name: "storage is missing 1 entry (foo4) - fail to upsert",
setupLegacyFn: func(m *MockStorage) {
m.On("List", mock.Anything, mock.Anything).Return(legacyListWith4items, nil)
},
setupStorageFn: func(m *MockStorage) {
m.On("List", mock.Anything, mock.Anything).Return(storageListWith3items, nil)
m.On("Update", mock.Anything, "foo4", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(exampleObj, false, errors.New("error"))
},
expectedOutcome: false,
},
{
name: "storage has an extra 1 entry (foo4)",
setupLegacyFn: func(m *MockStorage) {
m.On("List", mock.Anything, mock.Anything).Return(legacyListWith3items, nil)
},
setupStorageFn: func(m *MockStorage) {
m.On("List", mock.Anything, mock.Anything).Return(storageListWith4items, nil)
m.On("Delete", mock.Anything, "foo4", mock.Anything, mock.Anything).Return(exampleObj, false, nil)
},
expectedOutcome: true,
},
{
name: "storage has an extra 1 entry (foo4) - fail to delete",
setupLegacyFn: func(m *MockStorage) {
m.On("List", mock.Anything, mock.Anything).Return(legacyListWith3items, nil)
},
setupStorageFn: func(m *MockStorage) {
m.On("List", mock.Anything, mock.Anything).Return(storageListWith4items, nil)
m.On("Delete", mock.Anything, "foo4", mock.Anything, mock.Anything).Return(exampleObj, false, errors.New("error"))
},
expectedOutcome: false,
},
{
name: "storage is missing 1 entry (foo3) and has an extra 1 entry (foo4)",
setupLegacyFn: func(m *MockStorage) {
m.On("List", mock.Anything, mock.Anything).Return(legacyListWith3items, nil)
},
setupStorageFn: func(m *MockStorage) {
m.On("List", mock.Anything, mock.Anything).Return(storageListWith3itemsMissingFoo2, nil)
m.On("Update", mock.Anything, "foo2", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(exampleObj, false, nil)
m.On("Delete", mock.Anything, "foo4", mock.Anything, mock.Anything).Return(exampleObj, false, nil)
},
expectedOutcome: true,
},
}
// mode 1
for _, tt := range tests {
t.Run("Mode-1-"+tt.name, func(t *testing.T) {
ls := NewMockStorage(t)
us := NewMockStorage(t)
if tt.setupLegacyFn != nil {
tt.setupLegacyFn(ls)
}
if tt.setupStorageFn != nil {
tt.setupStorageFn(us)
}
outcome, err := legacyToUnifiedStorageDataSyncer(context.Background(), &SyncerConfig{
Mode: Mode1,
LegacyStorage: ls,
Storage: us,
Kind: "test.kind",
ServerLockService: &fakeServerLock{},
RequestInfo: &request.RequestInfo{},
DataSyncerRecordsLimit: 1000,
DataSyncerInterval: time.Hour,
}, NewDualWriterMetrics(nil))
if tt.wantErr {
assert.Error(t, err)
return
}
assert.NoError(t, err)
assert.Equal(t, tt.expectedOutcome, outcome)
})
}
// mode 2
for _, tt := range tests {
t.Run("Mode-2-"+tt.name, func(t *testing.T) {
ls := NewMockStorage(t)
us := NewMockStorage(t)
if tt.setupLegacyFn != nil {
tt.setupLegacyFn(ls)
}
if tt.setupStorageFn != nil {
tt.setupStorageFn(us)
}
outcome, err := legacyToUnifiedStorageDataSyncer(context.Background(), &SyncerConfig{
Mode: Mode2,
LegacyStorage: ls,
Storage: us,
Kind: "test.kind",
ServerLockService: &fakeServerLock{},
RequestInfo: &request.RequestInfo{},
DataSyncerRecordsLimit: 1000,
DataSyncerInterval: time.Hour,
}, NewDualWriterMetrics(nil))
if tt.wantErr {
assert.Error(t, err)
return
}
assert.NoError(t, err)
assert.Equal(t, tt.expectedOutcome, outcome)
})
}
}
-133
View File
@@ -1,124 +1,18 @@
package rest
import (
"context"
"fmt"
"testing"
"time"
"github.com/stretchr/testify/mock"
"github.com/stretchr/testify/require"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apiserver/pkg/apis/example"
"k8s.io/apiserver/pkg/endpoints/request"
)
var now = time.Now()
var exampleObj = &example.Pod{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ObjectMeta: metav1.ObjectMeta{Name: "foo", ResourceVersion: "1", CreationTimestamp: metav1.Time{}, GenerateName: "foo"}, Spec: example.PodSpec{}, Status: example.PodStatus{StartTime: &metav1.Time{Time: now}}}
var anotherObj = &example.Pod{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ObjectMeta: metav1.ObjectMeta{Name: "bar", ResourceVersion: "2", GenerateName: "foo"}, Spec: example.PodSpec{}, Status: example.PodStatus{StartTime: &metav1.Time{Time: now}}}
var exampleList = &example.PodList{TypeMeta: metav1.TypeMeta{Kind: "foo"}, ListMeta: metav1.ListMeta{}, Items: []example.Pod{*exampleObj}}
var anotherList = &example.PodList{Items: []example.Pod{*anotherObj}}
func TestSetDualWritingMode(t *testing.T) {
type testCase struct {
name string
kvStore *fakeNamespacedKV
desiredMode DualWriterMode
expectedMode DualWriterMode
expectedKVMode string
skipDataSync bool
serverLockError error
}
tests :=
[]testCase{
{
name: "should return a mode 2 dual writer when mode 2 is set as the desired mode",
kvStore: &fakeNamespacedKV{data: map[string]string{"playlist.grafana.app/playlists": "2"}, namespace: "storage.dualwriting"},
desiredMode: Mode2,
expectedMode: Mode2,
expectedKVMode: "2",
},
{
name: "should return a mode 1 dual writer when mode 1 is set as the desired mode",
kvStore: &fakeNamespacedKV{data: map[string]string{"playlist.grafana.app/playlists": "2"}, namespace: "storage.dualwriting"},
desiredMode: Mode1,
expectedMode: Mode1,
expectedKVMode: "1",
},
{
name: "should return mode 3 as desired mode when current mode is > 3",
kvStore: &fakeNamespacedKV{data: map[string]string{"playlist.grafana.app/playlists": "5"}, namespace: "storage.dualwriting"},
desiredMode: Mode3,
expectedMode: Mode3,
expectedKVMode: "3",
},
{
name: "should return mode 3 as desired mode when current mode is 2",
kvStore: &fakeNamespacedKV{data: map[string]string{"playlist.grafana.app/playlists": "2"}, namespace: "storage.dualwriting"},
desiredMode: Mode3,
expectedMode: Mode3,
expectedKVMode: "3",
},
{
name: "should default to mode 0 if there is no desired mode",
kvStore: &fakeNamespacedKV{data: map[string]string{}, namespace: "storage.dualwriting"},
desiredMode: Mode0,
expectedMode: Mode0,
expectedKVMode: "",
},
{
name: "should keep mode2 when trying to go from mode2 to mode3 and the server lock service returns an error",
kvStore: &fakeNamespacedKV{data: map[string]string{"playlist.grafana.app/playlists": "2"}, namespace: "storage.dualwriting"},
desiredMode: Mode3,
expectedMode: Mode2,
expectedKVMode: "2",
serverLockError: fmt.Errorf("lock already exists"),
},
{
name: "should keep mode2 when trying to go from mode2 to mode3 and migration is disabled",
kvStore: &fakeNamespacedKV{data: map[string]string{"playlist.grafana.app/playlists": "2"}, namespace: "storage.dualwriting"},
desiredMode: Mode3,
expectedMode: Mode3,
expectedKVMode: "3",
skipDataSync: true,
},
}
for _, tt := range tests {
us := NewMockStorage(t)
us.On("List", mock.Anything, mock.Anything).Return(anotherList, nil).Maybe()
us.On("Update", mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(exampleObj, false, nil).Maybe()
us.On("Delete", mock.Anything, mock.Anything, mock.Anything, mock.Anything).Return(exampleObj, false, nil).Maybe()
ls := NewMockStorage(t)
ls.On("List", mock.Anything, mock.Anything).Return(exampleList, nil).Maybe()
serverLockSvc := &fakeServerLock{
err: tt.serverLockError,
}
dwMode, err := SetDualWritingMode(context.Background(), tt.kvStore, &SyncerConfig{
LegacyStorage: ls,
Storage: us,
Kind: "playlist.grafana.app/playlists",
Mode: tt.desiredMode,
SkipDataSync: tt.skipDataSync,
ServerLockService: serverLockSvc,
RequestInfo: &request.RequestInfo{},
DataSyncerRecordsLimit: 1000,
DataSyncerInterval: time.Hour,
}, NewDualWriterMetrics(nil))
require.NoError(t, err)
require.Equal(t, tt.expectedMode, dwMode)
kvMode, _, err := tt.kvStore.Get(context.Background(), "playlist.grafana.app/playlists")
require.NoError(t, err)
require.Equal(t, tt.expectedKVMode, kvMode, "expected mode for playlist.grafana.app/playlists")
}
}
func TestCompare(t *testing.T) {
var exampleObjGen1 = &example.Pod{ObjectMeta: metav1.ObjectMeta{Generation: 1}, Spec: example.PodSpec{Hostname: "one"}, Status: example.PodStatus{StartTime: &metav1.Time{Time: time.Unix(0, 0)}}}
@@ -163,30 +57,3 @@ func TestCompare(t *testing.T) {
})
}
}
type fakeNamespacedKV struct {
namespace string
data map[string]string
}
func (f *fakeNamespacedKV) Get(ctx context.Context, key string) (string, bool, error) {
val, ok := f.data[key]
return val, ok, nil
}
func (f *fakeNamespacedKV) Set(ctx context.Context, key, value string) error {
f.data[key] = value
return nil
}
type fakeServerLock struct {
err error
}
func (f *fakeServerLock) LockExecuteAndRelease(ctx context.Context, actionName string, duration time.Duration, fn func(ctx context.Context)) error {
if f.err != nil {
return f.err
}
fn(ctx)
return nil
}
-48
View File
@@ -1,48 +0,0 @@
package rest
import (
"fmt"
"strconv"
"time"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
)
type DualWriterMetrics struct {
// DualWriterSyncerDuration is a metric summary for dual writer sync duration per mode
syncer *prometheus.HistogramVec
// DualWriterDataSyncerOutcome is a metric summary for dual writer data syncer outcome comparison between the 2 stores per mode
syncerOutcome *prometheus.HistogramVec
}
func NewDualWriterMetrics(reg prometheus.Registerer) *DualWriterMetrics {
return &DualWriterMetrics{
syncer: promauto.With(reg).NewHistogramVec(prometheus.HistogramOpts{
Name: "dual_writer_data_syncer_duration_seconds",
Help: "Histogram for the runtime of dual writer data syncer duration per mode",
Namespace: "grafana",
NativeHistogramBucketFactor: 1.1,
}, []string{"is_error", "mode", "resource"}),
syncerOutcome: promauto.With(reg).NewHistogramVec(prometheus.HistogramOpts{
Name: "dual_writer_data_syncer_outcome",
Help: "Histogram for the runtime of dual writer data syncer outcome comparison between the 2 stores per mode",
Namespace: "grafana",
NativeHistogramBucketFactor: 1.1,
}, []string{"mode", "resource"}),
}
}
func (m *DualWriterMetrics) recordDataSyncerDuration(isError bool, mode DualWriterMode, resource string, startFrom time.Time) {
duration := time.Since(startFrom).Seconds()
m.syncer.WithLabelValues(strconv.FormatBool(isError), fmt.Sprintf("%d", mode), resource).Observe(duration)
}
func (m *DualWriterMetrics) recordDataSyncerOutcome(mode DualWriterMode, resource string, synced bool) {
var observeValue float64
if !synced {
observeValue = 1
}
m.syncerOutcome.WithLabelValues(fmt.Sprintf("%d", mode), resource).Observe(observeValue)
}
+2 -2
View File
@@ -835,7 +835,7 @@ func Initialize(ctx context.Context, cfg *setting.Cfg, opts Options, apiOpts api
builderMetrics := builder.ProvideBuilderMetrics(registerer)
backend := auditing.ProvideNoopBackend()
policyRuleProvider := auditing.ProvideNoopPolicyRuleProvider()
apiserverService, err := apiserver.ProvideService(cfg, featureToggles, routeRegisterImpl, tracingService, serverLockService, sqlStore, kvStore, middlewareHandler, scopedPluginDatasourceProvider, plugincontextProvider, pluginstoreService, dualwriteService, resourceClient, inlineSecureValueSupport, eventualRestConfigProvider, v, eventualRestConfigProvider, registerer, aggregatorRunner, v2, builderMetrics, backend, policyRuleProvider)
apiserverService, err := apiserver.ProvideService(cfg, featureToggles, routeRegisterImpl, tracingService, sqlStore, middlewareHandler, scopedPluginDatasourceProvider, plugincontextProvider, pluginstoreService, dualwriteService, resourceClient, inlineSecureValueSupport, eventualRestConfigProvider, v, eventualRestConfigProvider, registerer, aggregatorRunner, v2, builderMetrics, backend, policyRuleProvider)
if err != nil {
return nil, err
}
@@ -1503,7 +1503,7 @@ func InitializeForTest(ctx context.Context, t sqlutil.ITestDB, testingT interfac
builderMetrics := builder.ProvideBuilderMetrics(registerer)
backend := auditing.ProvideNoopBackend()
policyRuleProvider := auditing.ProvideNoopPolicyRuleProvider()
apiserverService, err := apiserver.ProvideService(cfg, featureToggles, routeRegisterImpl, tracingService, serverLockService, sqlStore, kvStore, middlewareHandler, scopedPluginDatasourceProvider, plugincontextProvider, pluginstoreService, dualwriteService, resourceClient, inlineSecureValueSupport, eventualRestConfigProvider, v, eventualRestConfigProvider, registerer, aggregatorRunner, v2, builderMetrics, backend, policyRuleProvider)
apiserverService, err := apiserver.ProvideService(cfg, featureToggles, routeRegisterImpl, tracingService, sqlStore, middlewareHandler, scopedPluginDatasourceProvider, plugincontextProvider, pluginstoreService, dualwriteService, resourceClient, inlineSecureValueSupport, eventualRestConfigProvider, v, eventualRestConfigProvider, registerer, aggregatorRunner, v2, builderMetrics, backend, policyRuleProvider)
if err != nil {
return nil, err
}
-1
View File
@@ -67,7 +67,6 @@ kubernetesPlaylists = true
[unified_storage.playlists.playlist.grafana.app]
dualWriterMode = 2
dualWriterPeriodicDataSyncJobEnabled = true
```
This will create a development kubeconfig and start a parallel ssl listener. It can be registered by
@@ -5,7 +5,6 @@ import (
"errors"
"fmt"
"maps"
"time"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/runtime/schema"
@@ -21,13 +20,10 @@ import (
"github.com/grafana/grafana-app-sdk/logging"
grafanarest "github.com/grafana/grafana/pkg/apiserver/rest"
"github.com/grafana/grafana/pkg/services/apiserver/builder"
"github.com/grafana/grafana/pkg/services/apiserver/endpoints/request"
grafanaapiserveroptions "github.com/grafana/grafana/pkg/services/apiserver/options"
"github.com/grafana/grafana/pkg/storage/legacysql/dualwrite"
)
type LegacyStorageGetterFunc func(schema.GroupVersionResource) grafanarest.Storage
type LegacyStorageProvider interface {
GetLegacyStorage(schema.GroupVersionResource) grafanarest.Storage
}
@@ -47,11 +43,6 @@ type AppInstallerConfig struct {
AllowedV0Alpha1Resources []string
}
// serverLock interface defines a lock mechanism for executing actions with a timeout
type serverLock interface {
LockExecuteAndRelease(ctx context.Context, actionName string, maxInterval time.Duration, fn func(ctx context.Context)) error
}
// AddToScheme adds app installer schemas to the runtime scheme
func AddToScheme(
appInstallers []appsdkapiserver.AppInstaller,
@@ -139,11 +130,7 @@ func InstallAPIs(
server *genericapiserver.GenericAPIServer,
restOpsGetter generic.RESTOptionsGetter,
storageOpts *grafanaapiserveroptions.StorageOptions,
kvStore grafanarest.NamespacedKVStore,
lock serverLock,
namespaceMapper request.NamespaceMapper,
dualWriteService dualwrite.Service,
dualWriterMetrics *grafanarest.DualWriterMetrics,
builderMetrics *builder.BuilderMetrics,
apiResourceConfig *serverstore.ResourceConfig,
) error {
@@ -156,11 +143,7 @@ func InstallAPIs(
installer: installer,
storageOpts: storageOpts,
restOptionsGetter: restOpsGetter,
kvStore: kvStore,
lock: lock,
namespaceMapper: namespaceMapper,
dualWriteService: dualWriteService,
dualWriterMetrics: dualWriterMetrics,
builderMetrics: builderMetrics,
apiResourceConfig: apiResourceConfig,
}
@@ -18,7 +18,6 @@ import (
grafanaregistry "github.com/grafana/grafana/pkg/apiserver/registry/generic"
grafanarest "github.com/grafana/grafana/pkg/apiserver/rest"
"github.com/grafana/grafana/pkg/services/apiserver/builder"
"github.com/grafana/grafana/pkg/services/apiserver/endpoints/request"
grafanaapiserveroptions "github.com/grafana/grafana/pkg/services/apiserver/options"
"github.com/grafana/grafana/pkg/storage/legacysql/dualwrite"
)
@@ -31,11 +30,7 @@ type serverWrapper struct {
installer appsdkapiserver.AppInstaller
restOptionsGetter generic.RESTOptionsGetter
storageOpts *grafanaapiserveroptions.StorageOptions
kvStore grafanarest.NamespacedKVStore
lock serverLock
namespaceMapper request.NamespaceMapper
dualWriteService dualwrite.Service
dualWriterMetrics *grafanarest.DualWriterMetrics
builderMetrics *builder.BuilderMetrics
apiResourceConfig *serverstorage.ResourceConfig
}
@@ -64,16 +59,11 @@ func (s *serverWrapper) InstallAPIGroup(apiGroupInfo *genericapiserver.APIGroupI
if unifiedStorage, ok := storage.(grafanarest.Storage); ok {
log.Debug("Configuring dual writer for storage", "resource", gr.String(), "version", v, "storagePath", storagePath)
storage, err = NewDualWriter(
s.ctx,
gr,
s.storageOpts,
legacyProvider.GetLegacyStorage(gr.WithVersion(v)),
unifiedStorage,
s.kvStore,
s.lock,
s.namespaceMapper,
s.dualWriteService,
s.dualWriterMetrics,
s.builderMetrics,
)
if err != nil {
+3 -75
View File
@@ -1,32 +1,21 @@
package appinstaller
import (
"context"
"time"
"k8s.io/apimachinery/pkg/runtime/schema"
k8srequest "k8s.io/apiserver/pkg/endpoints/request"
"k8s.io/klog/v2"
grafanarest "github.com/grafana/grafana/pkg/apiserver/rest"
"github.com/grafana/grafana/pkg/services/apiserver/builder"
"github.com/grafana/grafana/pkg/services/apiserver/endpoints/request"
"github.com/grafana/grafana/pkg/services/apiserver/options"
"github.com/grafana/grafana/pkg/storage/legacysql/dualwrite"
)
// NewDualWriter creates a dual writer for the given group resource using the provided configuration
func NewDualWriter(
_ context.Context,
gr schema.GroupResource,
storageOpts *options.StorageOptions,
legacy grafanarest.Storage,
storage grafanarest.Storage,
kvStore grafanarest.NamespacedKVStore,
lock serverLock,
namespaceMapper request.NamespaceMapper,
dualWriteService dualwrite.Service,
dualWriterMetrics *grafanarest.DualWriterMetrics,
builderMetrics *builder.BuilderMetrics,
) (grafanarest.Storage, error) {
// Dashboards + Folders may be managed (depends on feature toggles and database state)
@@ -40,54 +29,14 @@ func NewDualWriter(
// when missing this will default to mode zero (legacy only)
var mode = grafanarest.DualWriterMode(0)
var (
dualWriterPeriodicDataSyncJobEnabled bool
dualWriterMigrationDataSyncDisabled bool
dataSyncerInterval = time.Hour
dataSyncerRecordsLimit = 1000
)
resourceConfig, resourceExists := storageOpts.UnifiedStorageConfig[key]
if resourceExists {
mode = resourceConfig.DualWriterMode
dualWriterPeriodicDataSyncJobEnabled = resourceConfig.DualWriterPeriodicDataSyncJobEnabled
dualWriterMigrationDataSyncDisabled = resourceConfig.DualWriterMigrationDataSyncDisabled
dataSyncerInterval = resourceConfig.DataSyncerInterval
dataSyncerRecordsLimit = resourceConfig.DataSyncerRecordsLimit
}
// Force using storage only -- regardless of internal synchronization state
if mode == grafanarest.Mode5 {
builderMetrics.RecordDualWriterModes(gr.Resource, gr.Group, mode, grafanarest.Mode5)
return storage, nil
}
builderMetrics.RecordDualWriterModes(gr.Resource, gr.Group, mode)
// Moving from one version to the next can only happen after the previous step has
// successfully synchronized.
requestInfo := getRequestInfo(gr, namespaceMapper)
syncerCfg := &grafanarest.SyncerConfig{
Kind: key,
RequestInfo: requestInfo,
Mode: mode,
SkipDataSync: dualWriterMigrationDataSyncDisabled,
LegacyStorage: legacy,
Storage: storage,
ServerLockService: lock,
DataSyncerInterval: dataSyncerInterval,
DataSyncerRecordsLimit: dataSyncerRecordsLimit,
}
ctx := context.Background()
// This also sets the currentMode on the syncer config.
currentMode, err := grafanarest.SetDualWritingMode(ctx, kvStore, syncerCfg, dualWriterMetrics)
if err != nil {
return nil, err
}
builderMetrics.RecordDualWriterModes(gr.Resource, gr.Group, mode, currentMode)
switch currentMode {
switch mode {
case grafanarest.Mode0:
return legacy, nil
case grafanarest.Mode4, grafanarest.Mode5:
@@ -95,26 +44,5 @@ func NewDualWriter(
default:
}
if dualWriterPeriodicDataSyncJobEnabled {
// The mode might have changed in SetDualWritingMode, so apply current mode first.
syncerCfg.Mode = currentMode
if err := grafanarest.StartPeriodicDataSyncer(ctx, syncerCfg, dualWriterMetrics); err != nil {
return nil, err
}
}
// when unable to use
if currentMode != mode {
klog.Warningf("Requested DualWrite mode: %d, but using %d for %+v", mode, currentMode, gr)
}
return dualwrite.NewStaticStorage(gr, currentMode, legacy, storage)
}
func getRequestInfo(gr schema.GroupResource, namespaceMapper request.NamespaceMapper) *k8srequest.RequestInfo {
return &k8srequest.RequestInfo{
APIGroup: gr.Group,
Resource: gr.Resource,
Name: "",
Namespace: namespaceMapper(int64(1)),
}
return dualwrite.NewStaticStorage(gr, mode, legacy, storage)
}
+3 -83
View File
@@ -1,7 +1,6 @@
package builder
import (
"context"
"encoding/csv"
"encoding/json"
"errors"
@@ -10,7 +9,6 @@ import (
"os"
"regexp"
"strings"
"time"
"github.com/prometheus/client_golang/prometheus"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
@@ -19,7 +17,6 @@ import (
"k8s.io/apimachinery/pkg/runtime/serializer"
utilruntime "k8s.io/apimachinery/pkg/util/runtime"
openapinamer "k8s.io/apiserver/pkg/endpoints/openapi"
k8srequest "k8s.io/apiserver/pkg/endpoints/request"
"k8s.io/apiserver/pkg/registry/generic"
genericapiserver "k8s.io/apiserver/pkg/server"
serverstorage "k8s.io/apiserver/pkg/server/storage"
@@ -32,7 +29,6 @@ import (
"github.com/grafana/grafana/pkg/apiserver/auditing"
"github.com/grafana/grafana/pkg/apiserver/endpoints/filters"
grafanarest "github.com/grafana/grafana/pkg/apiserver/rest"
"github.com/grafana/grafana/pkg/services/apiserver/endpoints/request"
"github.com/grafana/grafana/pkg/services/apiserver/options"
"github.com/grafana/grafana/pkg/services/featuremgmt"
"github.com/grafana/grafana/pkg/storage/legacysql/dualwrite"
@@ -253,19 +249,6 @@ func SetupConfig(
return nil
}
type ServerLockService interface {
LockExecuteAndRelease(ctx context.Context, actionName string, maxInterval time.Duration, fn func(ctx context.Context)) error
}
func getRequestInfo(gr schema.GroupResource, namespaceMapper request.NamespaceMapper) *k8srequest.RequestInfo {
return &k8srequest.RequestInfo{
APIGroup: gr.Group,
Resource: gr.Resource,
Name: "",
Namespace: namespaceMapper(int64(1)),
}
}
func InstallAPIs(
scheme *runtime.Scheme,
codecs serializer.CodecFactory,
@@ -274,13 +257,9 @@ func InstallAPIs(
builders []APIGroupBuilder,
storageOpts *options.StorageOptions,
reg prometheus.Registerer,
namespaceMapper request.NamespaceMapper,
kvStore grafanarest.NamespacedKVStore,
serverLock ServerLockService,
dualWriteService dualwrite.Service,
optsregister apistore.StorageOptionsRegister,
features featuremgmt.FeatureToggles,
dualWriterMetrics *grafanarest.DualWriterMetrics,
builderMetrics *BuilderMetrics,
apiResourceConfig *serverstorage.ResourceConfig,
) error {
@@ -303,79 +282,20 @@ func InstallAPIs(
// when missing this will default to mode zero (legacy only)
var mode = grafanarest.DualWriterMode(0)
var (
err error
dualWriterPeriodicDataSyncJobEnabled bool
dualWriterMigrationDataSyncDisabled bool
dataSyncerInterval = time.Hour
dataSyncerRecordsLimit = 1000
)
resourceConfig, resourceExists := storageOpts.UnifiedStorageConfig[key]
if resourceExists {
mode = resourceConfig.DualWriterMode
dualWriterPeriodicDataSyncJobEnabled = resourceConfig.DualWriterPeriodicDataSyncJobEnabled
dualWriterMigrationDataSyncDisabled = resourceConfig.DualWriterMigrationDataSyncDisabled
dataSyncerInterval = resourceConfig.DataSyncerInterval
dataSyncerRecordsLimit = resourceConfig.DataSyncerRecordsLimit
}
// Force using storage only -- regardless of internal synchronization state
if mode == grafanarest.Mode5 {
builderMetrics.RecordDualWriterModes(gr.Resource, gr.Group, mode, grafanarest.Mode5)
return storage, nil
}
builderMetrics.RecordDualWriterModes(gr.Resource, gr.Group, mode)
currentMode := mode
if !dualWriterMigrationDataSyncDisabled || dualWriterPeriodicDataSyncJobEnabled {
// TODO: inherited context from main Grafana process
ctx := context.Background()
// Moving from one version to the next can only happen after the previous step has
// successfully synchronized.
requestInfo := getRequestInfo(gr, namespaceMapper)
syncerCfg := &grafanarest.SyncerConfig{
Kind: key,
RequestInfo: requestInfo,
Mode: mode,
SkipDataSync: dualWriterMigrationDataSyncDisabled,
LegacyStorage: legacy,
Storage: storage,
ServerLockService: serverLock,
DataSyncerInterval: dataSyncerInterval,
DataSyncerRecordsLimit: dataSyncerRecordsLimit,
}
// This also sets the currentMode on the syncer config.
currentMode, err = grafanarest.SetDualWritingMode(ctx, kvStore, syncerCfg, dualWriterMetrics)
if err != nil {
return nil, err
}
// when unable to use
if currentMode != mode {
klog.Warningf("Requested DualWrite mode: %d, but using %d for %+v", mode, currentMode, gr)
}
if dualWriterPeriodicDataSyncJobEnabled && (currentMode >= grafanarest.Mode1 && currentMode <= grafanarest.Mode3) {
// The mode might have changed in SetDualWritingMode, so apply current mode first.
syncerCfg.Mode = currentMode
if err := grafanarest.StartPeriodicDataSyncer(ctx, syncerCfg, dualWriterMetrics); err != nil {
return nil, err
}
}
}
builderMetrics.RecordDualWriterModes(gr.Resource, gr.Group, mode, currentMode)
switch currentMode {
switch mode {
case grafanarest.Mode0:
return legacy, nil
case grafanarest.Mode4, grafanarest.Mode5:
return storage, nil
default:
return dualwrite.NewStaticStorage(gr, currentMode, legacy, storage)
return dualwrite.NewStaticStorage(gr, mode, legacy, storage)
}
}
}
+3 -7
View File
@@ -24,11 +24,7 @@ func ProvideBuilderMetrics(reg prometheus.Registerer) *BuilderMetrics {
}
}
func (m *BuilderMetrics) RecordDualWriterModes(resource, group string, targetMode, currentMode grafanarest.DualWriterMode) {
m.dualWriterTargetMode.WithLabelValues(resource, group).Set(float64(targetMode))
m.dualWriterCurrentMode.WithLabelValues(resource, group).Set(float64(currentMode))
}
func ProvideDualWriterMetrics(reg prometheus.Registerer) *grafanarest.DualWriterMetrics {
return grafanarest.NewDualWriterMetrics(reg)
func (m *BuilderMetrics) RecordDualWriterModes(resource, group string, mode grafanarest.DualWriterMode) {
m.dualWriterTargetMode.WithLabelValues(resource, group).Set(float64(mode))
m.dualWriterCurrentMode.WithLabelValues(resource, group).Set(float64(mode))
}
+1 -2
View File
@@ -135,8 +135,7 @@ func (v *unifiedStorageConfigValue) Set(val string) error {
}
(*v.config)[key] = setting.UnifiedStorageConfig{
DualWriterMode: apiserverrest.DualWriterMode(mode),
DualWriterMigrationDataSyncDisabled: true,
DualWriterMode: apiserverrest.DualWriterMode(mode),
}
}
+2 -22
View File
@@ -30,11 +30,8 @@ import (
"github.com/grafana/grafana/pkg/apimachinery/identity"
"github.com/grafana/grafana/pkg/apiserver/auditing"
grafanaresponsewriter "github.com/grafana/grafana/pkg/apiserver/endpoints/responsewriter"
grafanarest "github.com/grafana/grafana/pkg/apiserver/rest"
"github.com/grafana/grafana/pkg/infra/db"
"github.com/grafana/grafana/pkg/infra/kvstore"
"github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/infra/serverlock"
"github.com/grafana/grafana/pkg/infra/tracing"
"github.com/grafana/grafana/pkg/middleware"
"github.com/grafana/grafana/pkg/modules"
@@ -47,7 +44,6 @@ import (
"github.com/grafana/grafana/pkg/services/apiserver/auth/authenticator"
"github.com/grafana/grafana/pkg/services/apiserver/auth/authorizer"
"github.com/grafana/grafana/pkg/services/apiserver/builder"
"github.com/grafana/grafana/pkg/services/apiserver/endpoints/request"
grafanaapiserveroptions "github.com/grafana/grafana/pkg/services/apiserver/options"
"github.com/grafana/grafana/pkg/services/apiserver/utils"
contextmodel "github.com/grafana/grafana/pkg/services/contexthandler/model"
@@ -97,10 +93,8 @@ type service struct {
tracing *tracing.TracingService
metrics prometheus.Registerer
authorizer *authorizer.GrafanaAuthorizer
serverLockService builder.ServerLockService
dualWriter dualwrite.Service
kvStore kvstore.KVStore
authorizer *authorizer.GrafanaAuthorizer
dualWriter dualwrite.Service
pluginClient plugins.Client
datasources datasource.ScopedPluginDatasourceProvider
@@ -114,7 +108,6 @@ type service struct {
aggregatorRunner aggregatorrunner.AggregatorRunner
appInstallers []appsdkapiserver.AppInstaller
builderMetrics *builder.BuilderMetrics
dualWriterMetrics *grafanarest.DualWriterMetrics
auditBackend audit.Backend
auditPolicyRuleProvider auditing.PolicyRuleProvider
@@ -125,9 +118,7 @@ func ProvideService(
features featuremgmt.FeatureToggles,
rr routing.RouteRegister,
tracing *tracing.TracingService,
serverLockService *serverlock.ServerLockService,
db db.DB,
kvStore kvstore.KVStore,
pluginClient plugins.Client,
datasources datasource.ScopedPluginDatasourceProvider,
contextProvider datasource.PluginContextWrapper,
@@ -159,12 +150,10 @@ func ProvideService(
tracing: tracing,
db: db, // For Unified storage
metrics: reg,
kvStore: kvStore,
pluginClient: pluginClient,
datasources: datasources,
contextProvider: contextProvider,
pluginStore: pluginStore,
serverLockService: serverLockService,
dualWriter: dualWriter,
unified: unified,
secrets: secrets,
@@ -173,7 +162,6 @@ func ProvideService(
aggregatorRunner: aggregatorRunner,
appInstallers: appInstallers,
builderMetrics: builderMetrics,
dualWriterMetrics: grafanarest.NewDualWriterMetrics(reg),
auditBackend: auditBackend,
auditPolicyRuleProvider: auditPolicyRuleProvider,
}
@@ -412,13 +400,9 @@ func (s *service) start(ctx context.Context) error {
builders,
o.StorageOptions,
s.metrics,
request.GetNamespaceMapper(s.cfg),
kvstore.WithNamespace(s.kvStore, 0, "storage.dualwriting"),
s.serverLockService,
s.dualWriter,
optsregister,
s.features,
s.dualWriterMetrics,
s.builderMetrics,
apiResourceConfig,
)
@@ -432,11 +416,7 @@ func (s *service) start(ctx context.Context) error {
server,
serverConfig.RESTOptionsGetter,
o.StorageOptions,
kvstore.WithNamespace(s.kvStore, 0, "storage.dualwriting"),
s.serverLockService,
request.GetNamespaceMapper(s.cfg),
s.dualWriter,
s.dualWriterMetrics,
s.builderMetrics,
serverConfig.MergedResourceConfig,
); err != nil {
-1
View File
@@ -7,7 +7,6 @@ import (
)
var WireSet = wire.NewSet(
builder.ProvideDualWriterMetrics,
builder.ProvideBuilderMetrics,
ProvideEventualRestConfigProvider,
wire.Bind(new(RestConfigProvider), new(*eventualRestConfigProvider)),
+1 -7
View File
@@ -629,13 +629,7 @@ type Cfg struct {
}
type UnifiedStorageConfig struct {
DualWriterMode rest.DualWriterMode
DualWriterPeriodicDataSyncJobEnabled bool
DualWriterMigrationDataSyncDisabled bool
// DataSyncerInterval defines how often the data syncer should run for a resource on the grafana instance.
DataSyncerInterval time.Duration
// DataSyncerRecordsLimit defines how many records will be processed at max during a sync invocation.
DataSyncerRecordsLimit int
DualWriterMode rest.DualWriterMode
// EnableMigration indicates whether migration is enabled for the resource.
// If not set, will use the default from MigratedUnifiedResources.
EnableMigration bool
+6 -24
View File
@@ -52,18 +52,6 @@ func (cfg *Cfg) setUnifiedStorageConfig() {
// parse dualWriter modes from the section
dualWriterMode := section.Key("dualWriterMode").MustInt(0)
// parse dualWriter periodic data syncer config
dualWriterPeriodicDataSyncJobEnabled := section.Key("dualWriterPeriodicDataSyncJobEnabled").MustBool(false)
// parse dualWriter migration data sync disabled from resource section
dualWriterMigrationDataSyncDisabled := section.Key("dualWriterMigrationDataSyncDisabled").MustBool(false)
// parse dataSyncerRecordsLimit from resource section
dataSyncerRecordsLimit := section.Key("dataSyncerRecordsLimit").MustInt(1000)
// parse dataSyncerInterval from resource section
dataSyncerInterval := section.Key("dataSyncerInterval").MustDuration(time.Hour)
// parse EnableMigration from resource section
enableMigration := MigratedUnifiedResources[resourceName]
if section.HasKey("enableMigration") {
@@ -78,13 +66,9 @@ func (cfg *Cfg) setUnifiedStorageConfig() {
}
storageConfig[resourceName] = UnifiedStorageConfig{
DualWriterMode: rest.DualWriterMode(dualWriterMode),
DualWriterPeriodicDataSyncJobEnabled: dualWriterPeriodicDataSyncJobEnabled,
DualWriterMigrationDataSyncDisabled: dualWriterMigrationDataSyncDisabled,
DataSyncerRecordsLimit: dataSyncerRecordsLimit,
DataSyncerInterval: dataSyncerInterval,
EnableMigration: enableMigration,
AutoMigrationThreshold: autoMigrationThreshold,
DualWriterMode: rest.DualWriterMode(dualWriterMode),
EnableMigration: enableMigration,
AutoMigrationThreshold: autoMigrationThreshold,
}
}
cfg.UnifiedStorage = storageConfig
@@ -163,10 +147,9 @@ func (cfg *Cfg) enforceMigrationToUnifiedConfigs() {
}
cfg.Logger.Info("Enforcing mode 5 for resource in unified storage", "resource", resource)
cfg.UnifiedStorage[resource] = UnifiedStorageConfig{
DualWriterMode: 5,
DualWriterMigrationDataSyncDisabled: true,
EnableMigration: true,
AutoMigrationThreshold: resourceCfg.AutoMigrationThreshold,
DualWriterMode: 5,
EnableMigration: true,
AutoMigrationThreshold: resourceCfg.AutoMigrationThreshold,
}
}
}
@@ -204,7 +187,6 @@ func (cfg *Cfg) EnableMode5(resource string) {
}
config := cfg.UnifiedStorage[resource]
config.DualWriterMode = rest.Mode5
config.DualWriterMigrationDataSyncDisabled = true
config.EnableMigration = true
cfg.UnifiedStorage[resource] = config
}
+5 -13
View File
@@ -2,7 +2,6 @@ package setting
import (
"testing"
"time"
"github.com/grafana/grafana/pkg/apiserver/rest"
"github.com/stretchr/testify/assert"
@@ -49,10 +48,9 @@ func TestCfg_setUnifiedStorageConfig(t *testing.T) {
}
assert.Equal(t, UnifiedStorageConfig{
DualWriterMode: 5,
DualWriterMigrationDataSyncDisabled: true,
EnableMigration: isEnabled,
AutoMigrationThreshold: expectedThreshold,
DualWriterMode: 5,
EnableMigration: isEnabled,
AutoMigrationThreshold: expectedThreshold,
}, resourceCfg, migratedResource)
}
}
@@ -60,9 +58,6 @@ func TestCfg_setUnifiedStorageConfig(t *testing.T) {
setMigratedResourceKey("dualWriterMode", "1") // migrated resources enabled by default will change to 5 in setUnifiedStorageConfig
setSectionKey("unified_storage.resource.not_migrated.grafana.app", "dualWriterMode", "2")
setSectionKey("unified_storage.resource.not_migrated.grafana.app", "dualWriterPeriodicDataSyncJobEnabled", "true")
setSectionKey("unified_storage.resource.not_migrated.grafana.app", "dataSyncerRecordsLimit", "1001")
setSectionKey("unified_storage.resource.not_migrated.grafana.app", "dataSyncerInterval", "10m")
// Add unified_storage section for index settings
setSectionKey("unified_storage", "index_min_count", "5")
@@ -73,11 +68,8 @@ func TestCfg_setUnifiedStorageConfig(t *testing.T) {
assert.Equal(t, exists, true)
assert.Equal(t, value, UnifiedStorageConfig{
DualWriterMode: 2,
DualWriterPeriodicDataSyncJobEnabled: true,
DataSyncerRecordsLimit: 1001,
DataSyncerInterval: time.Minute * 10,
AutoMigrationThreshold: 0,
DualWriterMode: 2,
AutoMigrationThreshold: 0,
})
validateMigratedResources(false)
@@ -212,7 +212,6 @@ func TestResourceMigration_AutoMigrateEnablesMode5(t *testing.T) {
if tt.wantMode5Enabled {
require.Equal(t, 5, int(config.DualWriterMode), "%s: %s", tt.description, resourceName)
require.True(t, config.EnableMigration, "%s: EnableMigration should be true for %s", tt.description, resourceName)
require.True(t, config.DualWriterMigrationDataSyncDisabled, "%s: DualWriterMigrationDataSyncDisabled should be true for %s", tt.description, resourceName)
} else {
require.Equal(t, 0, int(config.DualWriterMode), "%s: mode should be 0 for %s", tt.description, resourceName)
}
@@ -0,0 +1,16 @@
import { createApi } from '@reduxjs/toolkit/query/react';
import { getAPIBaseURL } from '@grafana/api-clients';
import { createBaseQuery } from '@grafana/api-clients/rtkq';
export const API_GROUP = 'scope.grafana.app' as const;
export const API_VERSION = 'v0alpha1' as const;
export const BASE_URL = getAPIBaseURL(API_GROUP, API_VERSION);
export const api = createApi({
reducerPath: 'scopeAPIv0alpha1',
baseQuery: createBaseQuery({
baseURL: BASE_URL,
}),
endpoints: () => ({}),
});
File diff suppressed because it is too large Load Diff
@@ -0,0 +1,3 @@
import { generatedAPI } from './endpoints.gen';
export const scopeAPIv0alpha1 = generatedAPI;
@@ -0,0 +1,43 @@
#!/bin/bash
# Syncs the scope API client from Enterprise to OSS.
#
# This script:
# 1. Regenerates the Enterprise API client from the OpenAPI spec
# 2. Copies the generated endpoints.gen.ts to OSS
#
# Prerequisites:
# - The OpenAPI spec must exist at data/openapi/scope.grafana.app-v0alpha1.json
# (generated by running TestIntegrationOpenAPIs in pkg/extensions/apiserver/tests/)
#
# Usage: ./sync-from-enterprise.sh
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
GRAFANA_ROOT="$(cd "$SCRIPT_DIR/../../../../.." && pwd)"
# Source and destination directories for the generated API client
ENTERPRISE_SCOPE_API_DIR="$GRAFANA_ROOT/public/app/extensions/api/clients/scope/v0alpha1"
OSS_SCOPE_API_DIR="$SCRIPT_DIR"
cd "$GRAFANA_ROOT"
# Check if OpenAPI spec exists
if [ ! -f "data/openapi/scope.grafana.app-v0alpha1.json" ]; then
echo "Error: OpenAPI spec not found at data/openapi/scope.grafana.app-v0alpha1.json"
echo "Run TestIntegrationOpenAPIs in pkg/extensions/apiserver/tests/ to generate it."
exit 1
fi
echo "Step 1: Generating Enterprise API client from OpenAPI spec..."
yarn workspace @grafana/api-clients process-specs && npx rtk-query-codegen-openapi ./local/generate-enterprise-apis.ts
if [ ! -f "$ENTERPRISE_SCOPE_API_DIR/endpoints.gen.ts" ]; then
echo "Error: Enterprise endpoints.gen.ts not found after generation"
exit 1
fi
echo "Step 2: Copying endpoints.gen.ts from Enterprise to OSS..."
cp "$ENTERPRISE_SCOPE_API_DIR/endpoints.gen.ts" "$OSS_SCOPE_API_DIR/endpoints.gen.ts"
echo "Done! Scope API client synced from Enterprise."
+2
View File
@@ -3,6 +3,7 @@ import { AnyAction, combineReducers } from 'redux';
import { allReducers as allApiClientReducers } from '@grafana/api-clients/rtkq';
import { generatedAPI as legacyAPI } from '@grafana/api-clients/rtkq/legacy';
import { scopeAPIv0alpha1 } from 'app/api/clients/scope/v0alpha1';
import sharedReducers from 'app/core/reducers';
import ldapReducers from 'app/features/admin/state/reducers';
import alertingReducers from 'app/features/alerting/state/reducers';
@@ -52,6 +53,7 @@ const rootReducers = {
[alertingApi.reducerPath]: alertingApi.reducer,
[publicDashboardApi.reducerPath]: publicDashboardApi.reducer,
[browseDashboardsAPI.reducerPath]: browseDashboardsAPI.reducer,
[scopeAPIv0alpha1.reducerPath]: scopeAPIv0alpha1.reducer,
...allApiClientReducers,
};
+528 -203
View File
@@ -1,74 +1,194 @@
import { getBackendSrv, config } from '@grafana/runtime';
import { config } from '@grafana/runtime';
import { MOCK_NODES, MOCK_SCOPES } from '@grafana/test-utils/unstable';
import { scopeAPIv0alpha1 } from 'app/api/clients/scope/v0alpha1';
import { ScopesApiClient } from './ScopesApiClient';
// Mock the runtime dependencies
jest.mock('@grafana/runtime', () => ({
getBackendSrv: jest.fn(),
config: {
featureToggles: {
useMultipleScopeNodesEndpoint: true,
useScopeSingleNodeEndpoint: true,
// Helper to create a mock subscription with unsubscribe method
const createMockSubscription = <T>(data: T): Promise<T> & { unsubscribe: jest.Mock } => {
const subscription = Promise.resolve(data) as Promise<T> & { unsubscribe: jest.Mock };
subscription.unsubscribe = jest.fn();
return subscription;
};
// Mock the RTK Query API and dispatch
jest.mock('app/api/clients/scope/v0alpha1', () => ({
scopeAPIv0alpha1: {
endpoints: {
getScope: {
initiate: jest.fn(),
},
getScopeNode: {
initiate: jest.fn(),
},
getFindScopeNodeChildrenResults: {
initiate: jest.fn(),
},
getFindScopeDashboardBindingsResults: {
initiate: jest.fn(),
},
getFindScopeNavigationsResults: {
initiate: jest.fn(),
},
},
},
}));
jest.mock('@grafana/api-clients', () => ({
getAPIBaseURL: jest.fn().mockReturnValue('/apis/scope.grafana.app/v0alpha1'),
jest.mock('app/store/store', () => ({
dispatch: jest.fn((action) => action),
}));
describe('ScopesApiClient', () => {
let apiClient: ScopesApiClient;
let mockBackendSrv: jest.Mocked<{ get: jest.Mock }>;
beforeEach(() => {
mockBackendSrv = {
get: jest.fn(),
};
(getBackendSrv as jest.Mock).mockReturnValue(mockBackendSrv);
apiClient = new ScopesApiClient();
config.featureToggles.useMultipleScopeNodesEndpoint = true;
config.featureToggles.useScopeSingleNodeEndpoint = true;
jest.clearAllMocks();
});
afterEach(() => {
jest.clearAllMocks();
});
describe('fetchScope', () => {
it('should fetch a scope by name', async () => {
// Expected: MOCK_SCOPES contains a scope with name 'grafana'
const expectedScope = MOCK_SCOPES.find((s) => s.metadata.name === 'grafana');
expect(expectedScope).toBeDefined();
const mockSubscription = createMockSubscription({ data: expectedScope });
(scopeAPIv0alpha1.endpoints.getScope.initiate as jest.Mock).mockReturnValue(mockSubscription);
const result = await apiClient.fetchScope('grafana');
// Validate: result matches the expected scope from MOCK_SCOPES
expect(result).toEqual(expectedScope);
expect(scopeAPIv0alpha1.endpoints.getScope.initiate).toHaveBeenCalledWith(
{ name: 'grafana' },
{ subscribe: false }
);
});
it('should return undefined when scope is not found', async () => {
// Expected: No scope with this name exists in MOCK_SCOPES
const nonExistentScopeName = 'non-existent-scope';
const errorResponse = {
kind: 'Status',
apiVersion: 'v1',
status: 'Failure',
message: `scopes.scope.grafana.app "${nonExistentScopeName}" not found`,
code: 404,
};
const mockSubscription = createMockSubscription({ data: errorResponse });
(scopeAPIv0alpha1.endpoints.getScope.initiate as jest.Mock).mockReturnValue(mockSubscription);
const consoleErrorSpy = jest.spyOn(console, 'error').mockImplementation();
const result = await apiClient.fetchScope(nonExistentScopeName);
// Validate: returns undefined for non-existent scope
expect(result).toBeUndefined();
expect(consoleErrorSpy).toHaveBeenCalled();
consoleErrorSpy.mockRestore();
});
});
describe('fetchMultipleScopes', () => {
it('should fetch multiple scopes in parallel', async () => {
// Expected: Both 'grafana' and 'mimir' exist in MOCK_SCOPES
const scopeNames = ['grafana', 'mimir'];
const expectedScopes = MOCK_SCOPES.filter((s) => scopeNames.includes(s.metadata.name));
const mockSubscriptions = expectedScopes.map((scope) => createMockSubscription({ data: scope }));
(scopeAPIv0alpha1.endpoints.getScope.initiate as jest.Mock)
.mockReturnValueOnce(mockSubscriptions[0])
.mockReturnValueOnce(mockSubscriptions[1]);
const result = await apiClient.fetchMultipleScopes(scopeNames);
// Validate: returns both scopes from MOCK_SCOPES
expect(result).toHaveLength(2);
expect(result.map((s) => s.metadata.name)).toContain('grafana');
expect(result.map((s) => s.metadata.name)).toContain('mimir');
expect(result).toEqual(expect.arrayContaining(expectedScopes));
});
it('should filter out undefined scopes when some fail', async () => {
// Expected: 'grafana' exists in MOCK_SCOPES, 'non-existent' does not
const scopeNames = ['grafana', 'non-existent'];
const expectedScope = MOCK_SCOPES.find((s) => s.metadata.name === 'grafana');
const errorResponse = {
kind: 'Status',
apiVersion: 'v1',
status: 'Failure',
message: 'scopes.scope.grafana.app "non-existent" not found',
code: 404,
};
const mockSubscriptions = [
createMockSubscription({ data: expectedScope }),
createMockSubscription({ data: errorResponse }),
];
(scopeAPIv0alpha1.endpoints.getScope.initiate as jest.Mock)
.mockReturnValueOnce(mockSubscriptions[0])
.mockReturnValueOnce(mockSubscriptions[1]);
const consoleErrorSpy = jest.spyOn(console, 'error').mockImplementation();
const consoleWarnSpy = jest.spyOn(console, 'warn').mockImplementation();
const result = await apiClient.fetchMultipleScopes(scopeNames);
// Validate: only returns the existing scope from MOCK_SCOPES, filters out the non-existent one
expect(result).toHaveLength(1);
expect(result[0]).toEqual(expectedScope);
expect(result[0].metadata.name).toBe('grafana');
// Validate: console.warn is called when some scopes fail
expect(consoleWarnSpy).toHaveBeenCalled();
consoleErrorSpy.mockRestore();
consoleWarnSpy.mockRestore();
});
it('should return empty array when no scopes provided', async () => {
const result = await apiClient.fetchMultipleScopes([]);
// Validate: empty input returns empty array
expect(result).toEqual([]);
});
});
describe('fetchMultipleScopeNodes', () => {
it('should fetch multiple nodes by names', async () => {
const mockNodes = [
{
metadata: { name: 'node-1' },
spec: { nodeType: 'container', title: 'Node 1', parentName: '' },
},
{
metadata: { name: 'node-2' },
spec: { nodeType: 'leaf', title: 'Node 2', parentName: 'node-1' },
},
];
// Expected: Both nodes exist in MOCK_NODES
const nodeNames = ['applications-grafana', 'applications-mimir'];
const expectedNodes = MOCK_NODES.filter((n) => nodeNames.includes(n.metadata.name));
mockBackendSrv.get.mockResolvedValue({ items: mockNodes });
const result = await apiClient.fetchMultipleScopeNodes(['node-1', 'node-2']);
expect(mockBackendSrv.get).toHaveBeenCalledWith('/apis/scope.grafana.app/v0alpha1/find/scope_node_children', {
names: ['node-1', 'node-2'],
const mockSubscription = createMockSubscription({
data: { items: expectedNodes },
});
expect(result).toEqual(mockNodes);
(scopeAPIv0alpha1.endpoints.getFindScopeNodeChildrenResults.initiate as jest.Mock).mockReturnValue(
mockSubscription
);
const result = await apiClient.fetchMultipleScopeNodes(nodeNames);
// Validate: returns the expected nodes from MOCK_NODES
expect(result).toHaveLength(2);
expect(result.map((n) => n.metadata.name)).toContain('applications-grafana');
expect(result.map((n) => n.metadata.name)).toContain('applications-mimir');
expect(result).toEqual(expect.arrayContaining(expectedNodes));
});
it('should return empty array when names array is empty', async () => {
const result = await apiClient.fetchMultipleScopeNodes([]);
expect(mockBackendSrv.get).not.toHaveBeenCalled();
expect(result).toEqual([]);
});
it('should return empty array when feature toggle is disabled', async () => {
config.featureToggles.useMultipleScopeNodesEndpoint = false;
const result = await apiClient.fetchMultipleScopeNodes(['node-1']);
const result = await apiClient.fetchMultipleScopeNodes(['applications-grafana']);
expect(mockBackendSrv.get).not.toHaveBeenCalled();
expect(result).toEqual([]);
// Restore feature toggle
@@ -76,79 +196,94 @@ describe('ScopesApiClient', () => {
});
it('should handle API errors gracefully', async () => {
mockBackendSrv.get.mockRejectedValue(new Error('Network error'));
// Expected: No node with this name exists in MOCK_NODES
const nonExistentNodeName = 'non-existent-node';
const mockSubscription = createMockSubscription({ data: { items: [] } });
(scopeAPIv0alpha1.endpoints.getFindScopeNodeChildrenResults.initiate as jest.Mock).mockReturnValue(
mockSubscription
);
const consoleErrorSpy = jest.spyOn(console, 'error').mockImplementation();
const result = await apiClient.fetchMultipleScopeNodes(['node-1']);
const result = await apiClient.fetchMultipleScopeNodes([nonExistentNodeName]);
// Validate: returns empty array when no matches
expect(result).toEqual([]);
consoleErrorSpy.mockRestore();
});
it('should handle response with no items field', async () => {
mockBackendSrv.get.mockResolvedValue({});
// Expected: Node exists in MOCK_NODES
const nodeName = 'applications-grafana';
const mockSubscription = createMockSubscription({ data: {} });
(scopeAPIv0alpha1.endpoints.getFindScopeNodeChildrenResults.initiate as jest.Mock).mockReturnValue(
mockSubscription
);
const result = await apiClient.fetchMultipleScopeNodes(['node-1']);
expect(result).toEqual([]);
});
it('should handle response with null items', async () => {
mockBackendSrv.get.mockResolvedValue({ items: null });
const result = await apiClient.fetchMultipleScopeNodes(['node-1']);
const result = await apiClient.fetchMultipleScopeNodes([nodeName]);
// Validate: returns empty array when items field is missing
expect(result).toEqual([]);
});
it('should handle large arrays of node names', async () => {
const names = Array.from({ length: 100 }, (_, i) => `node-${i}`);
const mockNodes = names.map((name) => ({
metadata: { name },
spec: { nodeType: 'leaf', title: name, parentName: '' },
}));
// Expected: None of these node names exist in MOCK_NODES
const nonExistentNodeNames = Array.from({ length: 10 }, (_, i) => `node-${i}`);
const mockSubscription = createMockSubscription({ data: { items: [] } });
(scopeAPIv0alpha1.endpoints.getFindScopeNodeChildrenResults.initiate as jest.Mock).mockReturnValue(
mockSubscription
);
mockBackendSrv.get.mockResolvedValue({ items: mockNodes });
const result = await apiClient.fetchMultipleScopeNodes(nonExistentNodeNames);
const result = await apiClient.fetchMultipleScopeNodes(names);
expect(result).toEqual(mockNodes);
expect(mockBackendSrv.get).toHaveBeenCalledWith('/apis/scope.grafana.app/v0alpha1/find/scope_node_children', {
names,
});
// Validate: returns empty array when no matches
expect(Array.isArray(result)).toBe(true);
expect(result).toEqual([]);
});
it('should pass through node names exactly as provided', async () => {
const names = ['node-with-special-chars_123', 'node.with.dots', 'node-with-dashes'];
mockBackendSrv.get.mockResolvedValue({ items: [] });
// Expected: Both nodes exist in MOCK_NODES
const nodeNames = ['applications-grafana', 'applications-mimir'];
const expectedNodes = MOCK_NODES.filter((n) => nodeNames.includes(n.metadata.name));
const mockSubscription = createMockSubscription({
data: { items: expectedNodes },
});
(scopeAPIv0alpha1.endpoints.getFindScopeNodeChildrenResults.initiate as jest.Mock).mockReturnValue(
mockSubscription
);
await apiClient.fetchMultipleScopeNodes(names);
const result = await apiClient.fetchMultipleScopeNodes(nodeNames);
expect(mockBackendSrv.get).toHaveBeenCalledWith('/apis/scope.grafana.app/v0alpha1/find/scope_node_children', {
names,
// Validate: returns nodes matching the provided names
const resultNames = result.map((n) => n.metadata.name);
expect(resultNames).toEqual(expect.arrayContaining(nodeNames));
// Verify we got the expected nodes from MOCK_NODES
expectedNodes.forEach((expectedNode) => {
expect(result).toContainEqual(expectedNode);
});
});
});
describe('fetchScopeNode', () => {
it('should fetch a single scope node by ID', async () => {
const mockNode = {
metadata: { name: 'test-node' },
spec: { nodeType: 'leaf', title: 'Test Node', parentName: 'parent' },
};
// Expected: Node exists in MOCK_NODES
const nodeName = 'applications-grafana';
const expectedNode = MOCK_NODES.find((n) => n.metadata.name === nodeName);
expect(expectedNode).toBeDefined();
mockBackendSrv.get.mockResolvedValue(mockNode);
const mockSubscription = createMockSubscription({ data: expectedNode });
(scopeAPIv0alpha1.endpoints.getScopeNode.initiate as jest.Mock).mockReturnValue(mockSubscription);
const result = await apiClient.fetchScopeNode('test-node');
const result = await apiClient.fetchScopeNode(nodeName);
expect(mockBackendSrv.get).toHaveBeenCalledWith('/apis/scope.grafana.app/v0alpha1/scopenodes/test-node');
expect(result).toEqual(mockNode);
// Validate: result matches the expected node from MOCK_NODES
expect(result).toEqual(expectedNode);
});
it('should return undefined when feature toggle is disabled', async () => {
config.featureToggles.useScopeSingleNodeEndpoint = false;
const result = await apiClient.fetchScopeNode('test-node');
const result = await apiClient.fetchScopeNode('applications-grafana');
expect(mockBackendSrv.get).not.toHaveBeenCalled();
expect(result).toBeUndefined();
// Restore feature toggle
@@ -156,65 +291,95 @@ describe('ScopesApiClient', () => {
});
it('should return undefined on API error', async () => {
mockBackendSrv.get.mockRejectedValue(new Error('Not found'));
// Expected: No node with this name exists in MOCK_NODES
const nonExistentNodeName = 'non-existent-node';
const errorResponse = {
kind: 'Status',
apiVersion: 'v1',
status: 'Failure',
message: `scopenodes.scope.grafana.app "${nonExistentNodeName}" not found`,
code: 404,
};
const mockSubscription = createMockSubscription({ data: errorResponse });
(scopeAPIv0alpha1.endpoints.getScopeNode.initiate as jest.Mock).mockReturnValue(mockSubscription);
const consoleErrorSpy = jest.spyOn(console, 'error').mockImplementation();
const result = await apiClient.fetchScopeNode('non-existent');
const result = await apiClient.fetchScopeNode(nonExistentNodeName);
// Validate: returns undefined for non-existent node
expect(result).toBeUndefined();
consoleErrorSpy.mockRestore();
});
});
describe('fetchNodes', () => {
it('should fetch nodes with parent filter', async () => {
const mockNodes = [
{
metadata: { name: 'child-1' },
spec: { nodeType: 'leaf', title: 'Child 1', parentName: 'parent' },
},
];
// Expected: MOCK_NODES contains nodes with parentName 'applications'
const parentName = 'applications';
const expectedNodes = MOCK_NODES.filter((n) => n.spec.parentName === parentName);
mockBackendSrv.get.mockResolvedValue({ items: mockNodes });
const result = await apiClient.fetchNodes({ parent: 'parent' });
expect(mockBackendSrv.get).toHaveBeenCalledWith('/apis/scope.grafana.app/v0alpha1/find/scope_node_children', {
parent: 'parent',
query: undefined,
limit: 1000,
const mockSubscription = createMockSubscription({
data: { items: expectedNodes },
});
(scopeAPIv0alpha1.endpoints.getFindScopeNodeChildrenResults.initiate as jest.Mock).mockReturnValue(
mockSubscription
);
const result = await apiClient.fetchNodes({ parent: parentName });
// Validate: returns nodes with matching parentName from MOCK_NODES
expect(Array.isArray(result)).toBe(true);
expect(result.length).toBeGreaterThan(0);
result.forEach((node) => {
expect(node.spec.parentName).toBe(parentName);
});
// Verify all returned nodes are from the expected set
result.forEach((node) => {
expect(expectedNodes).toContainEqual(node);
});
expect(result).toEqual(mockNodes);
});
it('should fetch nodes with query filter', async () => {
const mockNodes = [
{
metadata: { name: 'matching-node' },
spec: { nodeType: 'leaf', title: 'Matching Node', parentName: '' },
},
];
// Expected: MOCK_NODES contains nodes with 'Grafana' in title (case-insensitive)
// When query is provided without parent, the API returns nodes matching the query
// In MOCK_NODES, nodes with 'Grafana' in title have parentName 'applications' or 'cloud-applications'
const query = 'Grafana';
const expectedNodes = MOCK_NODES.filter((n) => n.spec.title.toLowerCase().includes(query.toLowerCase()));
mockBackendSrv.get.mockResolvedValue({ items: mockNodes });
const result = await apiClient.fetchNodes({ query: 'matching' });
expect(mockBackendSrv.get).toHaveBeenCalledWith('/apis/scope.grafana.app/v0alpha1/find/scope_node_children', {
parent: undefined,
query: 'matching',
limit: 1000,
const mockSubscription = createMockSubscription({
data: { items: expectedNodes },
});
(scopeAPIv0alpha1.endpoints.getFindScopeNodeChildrenResults.initiate as jest.Mock).mockReturnValue(
mockSubscription
);
const result = await apiClient.fetchNodes({ query });
// Validate: returns nodes matching the query from MOCK_NODES
expect(Array.isArray(result)).toBe(true);
expect(result.length).toBeGreaterThan(0);
result.forEach((node) => {
expect(node.spec.title.toLowerCase()).toContain('grafana');
});
// Verify all returned nodes are from the expected set
result.forEach((node) => {
expect(expectedNodes).toContainEqual(node);
});
expect(result).toEqual(mockNodes);
});
it('should respect custom limit', async () => {
mockBackendSrv.get.mockResolvedValue({ items: [] });
await apiClient.fetchNodes({ limit: 50 });
expect(mockBackendSrv.get).toHaveBeenCalledWith('/apis/scope.grafana.app/v0alpha1/find/scope_node_children', {
parent: undefined,
query: undefined,
limit: 50,
const limit = 5;
const mockNodes = MOCK_NODES.slice(0, limit);
const mockSubscription = createMockSubscription({
data: { items: mockNodes },
});
(scopeAPIv0alpha1.endpoints.getFindScopeNodeChildrenResults.initiate as jest.Mock).mockReturnValue(
mockSubscription
);
const result = await apiClient.fetchNodes({ limit });
expect(result.length).toBeLessThanOrEqual(limit);
});
it('should throw error for invalid limit (too small)', async () => {
@@ -226,137 +391,297 @@ describe('ScopesApiClient', () => {
});
it('should use default limit of 1000 when not specified', async () => {
mockBackendSrv.get.mockResolvedValue({ items: [] });
await apiClient.fetchNodes({});
expect(mockBackendSrv.get).toHaveBeenCalledWith('/apis/scope.grafana.app/v0alpha1/find/scope_node_children', {
parent: undefined,
query: undefined,
limit: 1000,
const mockNodes = MOCK_NODES.slice(0, 1000);
const mockSubscription = createMockSubscription({
data: { items: mockNodes },
});
(scopeAPIv0alpha1.endpoints.getFindScopeNodeChildrenResults.initiate as jest.Mock).mockReturnValue(
mockSubscription
);
const result = await apiClient.fetchNodes({});
expect(Array.isArray(result)).toBe(true);
// Default limit is 1000, so result should not exceed that
expect(result.length).toBeLessThanOrEqual(1000);
});
it('should return empty array on API error', async () => {
mockBackendSrv.get.mockRejectedValue(new Error('API Error'));
const result = await apiClient.fetchNodes({ parent: 'test' });
expect(result).toEqual([]);
});
});
describe('fetchScope', () => {
it('should fetch a scope by name', async () => {
const mockScope = {
metadata: { name: 'test-scope' },
spec: {
title: 'Test Scope',
filters: [],
},
};
mockBackendSrv.get.mockResolvedValue(mockScope);
const result = await apiClient.fetchScope('test-scope');
expect(mockBackendSrv.get).toHaveBeenCalledWith('/apis/scope.grafana.app/v0alpha1/scopes/test-scope');
expect(result).toEqual(mockScope);
});
it('should return undefined on error', async () => {
const mockSubscription = createMockSubscription({ data: { items: [] } });
(scopeAPIv0alpha1.endpoints.getFindScopeNodeChildrenResults.initiate as jest.Mock).mockReturnValue(
mockSubscription
);
const consoleErrorSpy = jest.spyOn(console, 'error').mockImplementation();
mockBackendSrv.get.mockRejectedValue(new Error('Not found'));
const result = await apiClient.fetchScope('non-existent');
const result = await apiClient.fetchNodes({ parent: 'non-existent-parent' });
expect(result).toBeUndefined();
expect(Array.isArray(result)).toBe(true);
consoleErrorSpy.mockRestore();
});
it('should log error to console', async () => {
const consoleErrorSpy = jest.spyOn(console, 'error').mockImplementation();
const error = new Error('Not found');
mockBackendSrv.get.mockRejectedValue(error);
it('should combine parent and query filters', async () => {
// Expected: MOCK_NODES contains nodes with parentName 'applications' and 'Grafana' in title
const parentName = 'applications';
const query = 'Grafana';
const expectedNodes = MOCK_NODES.filter(
(n) => n.spec.parentName === parentName && n.spec.title.toLowerCase().includes(query.toLowerCase())
);
await apiClient.fetchScope('non-existent');
const mockSubscription = createMockSubscription({
data: { items: expectedNodes },
});
(scopeAPIv0alpha1.endpoints.getFindScopeNodeChildrenResults.initiate as jest.Mock).mockReturnValue(
mockSubscription
);
expect(consoleErrorSpy).toHaveBeenCalledWith(error);
consoleErrorSpy.mockRestore();
const result = await apiClient.fetchNodes({ parent: parentName, query });
// Validate: returns nodes matching both filters from MOCK_NODES
expect(Array.isArray(result)).toBe(true);
result.forEach((node) => {
expect(node.spec.parentName).toBe(parentName);
expect(node.spec.title.toLowerCase()).toContain('grafana');
});
// Verify all returned nodes are from the expected set
result.forEach((node) => {
expect(expectedNodes).toContainEqual(node);
});
});
});
describe('fetchMultipleScopes', () => {
it('should fetch multiple scopes in parallel', async () => {
const mockScopes = [
describe('fetchDashboards', () => {
it('should fetch dashboards for scopes', async () => {
// Expected: MOCK_SCOPE_DASHBOARD_BINDINGS contains bindings for 'grafana' scope
const scopeNames = ['grafana'];
const mockBindings = [
{
metadata: { name: 'scope-1' },
spec: { title: 'Scope 1', filters: [] },
},
{
metadata: { name: 'scope-2' },
spec: { title: 'Scope 2', filters: [] },
metadata: { name: 'grafana-binding-1' },
spec: { dashboard: 'dashboard-1', scope: 'grafana' },
status: { dashboardTitle: 'Dashboard 1' },
},
];
const mockSubscription = createMockSubscription({
data: { items: mockBindings },
});
(scopeAPIv0alpha1.endpoints.getFindScopeDashboardBindingsResults.initiate as jest.Mock).mockReturnValue(
mockSubscription
);
mockBackendSrv.get.mockResolvedValueOnce(mockScopes[0]).mockResolvedValueOnce(mockScopes[1]);
const result = await apiClient.fetchDashboards(scopeNames);
const result = await apiClient.fetchMultipleScopes(['scope-1', 'scope-2']);
expect(mockBackendSrv.get).toHaveBeenCalledTimes(2);
expect(result).toEqual(mockScopes);
// Validate: returns bindings for the requested scope
expect(Array.isArray(result)).toBe(true);
result.forEach((binding) => {
expect(binding.spec.scope).toBe('grafana');
});
});
it('should filter out undefined scopes', async () => {
const consoleErrorSpy = jest.spyOn(console, 'error').mockImplementation();
const mockScope = {
metadata: { name: 'scope-1' },
spec: { title: 'Scope 1', filters: [] },
};
it('should fetch dashboards for multiple scopes', async () => {
// Expected: MOCK_SCOPE_DASHBOARD_BINDINGS contains bindings for 'grafana' and 'mimir' scopes
const scopeNames = ['grafana', 'mimir'];
const mockBindings = [
{
metadata: { name: 'grafana-binding-1' },
spec: { dashboard: 'dashboard-1', scope: 'grafana' },
status: { dashboardTitle: 'Dashboard 1' },
},
{
metadata: { name: 'mimir-binding-1' },
spec: { dashboard: 'dashboard-2', scope: 'mimir' },
status: { dashboardTitle: 'Dashboard 2' },
},
];
const mockSubscription = createMockSubscription({
data: { items: mockBindings },
});
(scopeAPIv0alpha1.endpoints.getFindScopeDashboardBindingsResults.initiate as jest.Mock).mockReturnValue(
mockSubscription
);
mockBackendSrv.get.mockResolvedValueOnce(mockScope).mockRejectedValueOnce(new Error('Not found'));
const result = await apiClient.fetchDashboards(scopeNames);
const result = await apiClient.fetchMultipleScopes(['scope-1', 'non-existent']);
expect(result).toEqual([mockScope]);
consoleErrorSpy.mockRestore();
// Validate: returns bindings for either scope
expect(Array.isArray(result)).toBe(true);
result.forEach((binding) => {
expect(scopeNames).toContain(binding.spec.scope);
});
});
it('should return empty array when no scopes provided', async () => {
const result = await apiClient.fetchMultipleScopes([]);
it('should return empty array when no dashboards found', async () => {
const mockSubscription = createMockSubscription({
data: { items: [] },
});
(scopeAPIv0alpha1.endpoints.getFindScopeDashboardBindingsResults.initiate as jest.Mock).mockReturnValue(
mockSubscription
);
const result = await apiClient.fetchDashboards(['non-existent-scope']);
expect(result).toEqual([]);
expect(mockBackendSrv.get).not.toHaveBeenCalled();
});
it('should handle API errors gracefully', async () => {
const mockSubscription = createMockSubscription({
data: { items: [] },
});
(scopeAPIv0alpha1.endpoints.getFindScopeDashboardBindingsResults.initiate as jest.Mock).mockReturnValue(
mockSubscription
);
const consoleErrorSpy = jest.spyOn(console, 'error').mockImplementation();
const result = await apiClient.fetchDashboards(['grafana']);
expect(Array.isArray(result)).toBe(true);
consoleErrorSpy.mockRestore();
});
});
describe('fetchScopeNavigations', () => {
it('should fetch navigations for scopes', async () => {
// Expected: MSW handler returns MOCK_SUB_SCOPE_MIMIR_ITEMS for 'mimir' scope
const scopeName = 'mimir';
const mockNavigations = [
{
metadata: { name: 'mimir-item-1' },
spec: { scope: 'mimir', url: '/d/mimir-dashboard-1' },
status: { title: 'Mimir Dashboard 1' },
},
];
const mockSubscription = createMockSubscription({
data: { items: mockNavigations },
});
(scopeAPIv0alpha1.endpoints.getFindScopeNavigationsResults.initiate as jest.Mock).mockReturnValue(
mockSubscription
);
const result = await apiClient.fetchScopeNavigations([scopeName]);
// Validate: returns navigations for the requested scope
expect(Array.isArray(result)).toBe(true);
result.forEach((nav) => {
expect(nav.spec.scope).toBe('mimir');
});
});
it('should fetch navigations for multiple scopes', async () => {
// Expected: Returns navigations for both 'mimir' and 'loki'
const scopeNames = ['mimir', 'loki'];
const mockNavigations = [
{
metadata: { name: 'mimir-item-1' },
spec: { scope: 'mimir', url: '/d/mimir-dashboard-1' },
status: { title: 'Mimir Dashboard 1' },
},
{
metadata: { name: 'loki-item-1' },
spec: { scope: 'loki', url: '/d/loki-dashboard-1' },
status: { title: 'Loki Dashboard 1' },
},
];
const mockSubscription = createMockSubscription({
data: { items: mockNavigations },
});
(scopeAPIv0alpha1.endpoints.getFindScopeNavigationsResults.initiate as jest.Mock).mockReturnValue(
mockSubscription
);
const result = await apiClient.fetchScopeNavigations(scopeNames);
// Validate: returns navigations for both scopes
expect(Array.isArray(result)).toBe(true);
const resultScopeNames = result.map((nav) => nav.spec.scope);
expect(resultScopeNames.length).toBeGreaterThan(0);
result.forEach((nav) => {
expect(scopeNames).toContain(nav.spec.scope);
});
});
it('should return empty array when no navigations found', async () => {
const mockSubscription = createMockSubscription({
data: { items: [] },
});
(scopeAPIv0alpha1.endpoints.getFindScopeNavigationsResults.initiate as jest.Mock).mockReturnValue(
mockSubscription
);
const result = await apiClient.fetchScopeNavigations(['grafana']);
expect(Array.isArray(result)).toBe(true);
});
it('should handle API errors gracefully', async () => {
const mockSubscription = createMockSubscription({
data: { items: [] },
});
(scopeAPIv0alpha1.endpoints.getFindScopeNavigationsResults.initiate as jest.Mock).mockReturnValue(
mockSubscription
);
const consoleErrorSpy = jest.spyOn(console, 'error').mockImplementation();
const result = await apiClient.fetchScopeNavigations(['mimir']);
expect(Array.isArray(result)).toBe(true);
consoleErrorSpy.mockRestore();
});
});
describe('performance considerations', () => {
it('should make single batched request with fetchMultipleScopeNodes', async () => {
mockBackendSrv.get.mockResolvedValue({ items: [] });
// This test verifies that the method uses the batched endpoint
const nodeNames = [
'applications-grafana',
'applications-mimir',
'applications-loki',
'applications-tempo',
'applications-cloud',
];
const expectedNodes = MOCK_NODES.filter((n) => nodeNames.includes(n.metadata.name));
const mockSubscription = createMockSubscription({
data: { items: expectedNodes },
});
(scopeAPIv0alpha1.endpoints.getFindScopeNodeChildrenResults.initiate as jest.Mock).mockReturnValue(
mockSubscription
);
await apiClient.fetchMultipleScopeNodes(['node-1', 'node-2', 'node-3', 'node-4', 'node-5']);
const result = await apiClient.fetchMultipleScopeNodes(nodeNames);
// Should make exactly 1 API call
expect(mockBackendSrv.get).toHaveBeenCalledTimes(1);
expect(Array.isArray(result)).toBe(true);
// Verify it was called once with all names
expect(scopeAPIv0alpha1.endpoints.getFindScopeNodeChildrenResults.initiate).toHaveBeenCalledTimes(1);
expect(scopeAPIv0alpha1.endpoints.getFindScopeNodeChildrenResults.initiate).toHaveBeenCalledWith(
{ names: nodeNames },
{ subscribe: false }
);
});
it('should make N sequential requests with fetchScopeNode (old pattern)', async () => {
mockBackendSrv.get.mockResolvedValue({
metadata: { name: 'test' },
spec: { nodeType: 'leaf', title: 'Test', parentName: '' },
// This test demonstrates the old pattern of fetching nodes one by one
// Each call makes a separate API request
const nodeNames = [
'applications-grafana',
'applications-mimir',
'applications-loki',
'applications-tempo',
'applications-cloud',
];
const mockNodes = nodeNames.map((name) => MOCK_NODES.find((n) => n.metadata.name === name)).filter(Boolean);
const mockSubscriptions = mockNodes.map((node) => createMockSubscription({ data: node }));
mockSubscriptions.forEach((sub) => {
(scopeAPIv0alpha1.endpoints.getScopeNode.initiate as jest.Mock).mockReturnValueOnce(sub);
});
// Simulate old pattern of fetching nodes one by one
await Promise.all([
apiClient.fetchScopeNode('node-1'),
apiClient.fetchScopeNode('node-2'),
apiClient.fetchScopeNode('node-3'),
apiClient.fetchScopeNode('node-4'),
apiClient.fetchScopeNode('node-5'),
const results = await Promise.all([
apiClient.fetchScopeNode('applications-grafana'),
apiClient.fetchScopeNode('applications-mimir'),
apiClient.fetchScopeNode('applications-loki'),
apiClient.fetchScopeNode('applications-tempo'),
apiClient.fetchScopeNode('applications-cloud'),
]);
// Should make 5 separate API calls
expect(mockBackendSrv.get).toHaveBeenCalledTimes(5);
expect(results).toHaveLength(5);
expect(results.every((r) => r !== undefined)).toBe(true);
// Verify it was called 5 times (once per node)
expect(scopeAPIv0alpha1.endpoints.getScopeNode.initiate).toHaveBeenCalledTimes(5);
});
});
});
+218 -38
View File
@@ -1,25 +1,95 @@
import { getAPIBaseURL } from '@grafana/api-clients';
import { Scope, ScopeDashboardBinding, ScopeNode } from '@grafana/data';
import { getBackendSrv, config } from '@grafana/runtime';
import { config } from '@grafana/runtime';
import { scopeAPIv0alpha1 } from 'app/api/clients/scope/v0alpha1';
import { getMessageFromError } from 'app/core/utils/errors';
import { dispatch } from 'app/store/store';
import { ScopeNavigation } from './dashboards/types';
const apiUrl = getAPIBaseURL('scope.grafana.app', 'v0alpha1');
export class ScopesApiClient {
/**
* Checks if the data is a Kubernetes Status error response.
* @param data The data to check
* @returns true if the data is a Status error, false otherwise
*/
private isStatusError(data: unknown): data is { kind: 'Status'; status: 'Failure'; message?: string; code?: number } {
return (
data !== null &&
typeof data === 'object' &&
'kind' in data &&
data.kind === 'Status' &&
'status' in data &&
data.status === 'Failure'
);
}
/**
* Extracts and validates data from an RTK Query result, checking for error responses.
* @param result The RTK Query result
* @param context Context for error logging (e.g., resource name)
* @returns The data if valid, undefined if it's an error response
*/
private extractDataOrHandleError<T>(result: { data?: T; error?: unknown }, context: string): T | undefined {
if ('data' in result && result.data) {
// Check if the data is actually an error response (Kubernetes Status object)
if (this.isStatusError(result.data)) {
const errorMessage = getMessageFromError(result.data);
console.error(`Failed to fetch %s:`, context, errorMessage);
return undefined;
}
return result.data;
}
if ('error' in result) {
const errorMessage = getMessageFromError(result.error);
console.error(`Failed to fetch %s:`, context, errorMessage);
}
return undefined;
}
async fetchScope(name: string): Promise<Scope | undefined> {
const subscription = dispatch(scopeAPIv0alpha1.endpoints.getScope.initiate({ name }, { subscribe: false }));
try {
return await getBackendSrv().get<Scope>(apiUrl + `/scopes/${name}`);
const result = await subscription;
return this.extractDataOrHandleError(result, `scope: ${name}`);
} catch (err) {
// TODO: maybe some better error handling
console.error(err);
const errorMessage = getMessageFromError(err);
console.error('Failed to fetch scope:', name, errorMessage);
return undefined;
} finally {
// Unsubscribe for extra safety, even though with subscribe: false and awaiting,
// the request completes before return, so this is mostly a no-op
subscription.unsubscribe();
}
}
async fetchMultipleScopes(scopesIds: string[]): Promise<Scope[]> {
const scopes = await Promise.all(scopesIds.map((id) => this.fetchScope(id)));
return scopes.filter((scope) => scope !== undefined);
if (scopesIds.length === 0) {
return [];
}
try {
const scopes = await Promise.all(scopesIds.map((id) => this.fetchScope(id)));
const successfulScopes = scopes.filter((scope) => scope !== undefined);
if (successfulScopes.length < scopesIds.length) {
const failedCount = scopesIds.length - successfulScopes.length;
console.warn(
'Failed to fetch',
failedCount,
'of',
scopesIds.length,
'scope(s). Requested IDs:',
scopesIds.join(', ')
);
}
return successfulScopes;
} catch (err) {
const errorMessage = getMessageFromError(err);
console.error('Failed to fetch multiple scopes:', scopesIds, errorMessage);
return [];
}
}
async fetchMultipleScopeNodes(names: string[]): Promise<ScopeNode[]> {
@@ -27,13 +97,31 @@ export class ScopesApiClient {
return Promise.resolve([]);
}
const subscription = dispatch(
scopeAPIv0alpha1.endpoints.getFindScopeNodeChildrenResults.initiate({ names }, { subscribe: false })
);
try {
const res = await getBackendSrv().get<{ items: ScopeNode[] }>(apiUrl + `/find/scope_node_children`, {
names: names,
});
return res?.items ?? [];
} catch (err) {
const result = await subscription;
if ('data' in result && result.data) {
// The generated API returns items compatible with @grafana/data ScopeNode
return result.data.items ?? [];
}
if ('error' in result) {
const errorMessage = getMessageFromError(result.error);
console.error('Failed to fetch multiple scope nodes:', names, errorMessage);
}
return [];
} catch (err) {
const errorMessage = getMessageFromError(err);
console.error('Failed to fetch multiple scope nodes:', names, errorMessage);
return [];
} finally {
// Unsubscribe for extra safety, even though with subscribe: false and awaiting,
// the request completes before return, so this is mostly a no-op
subscription.unsubscribe();
}
}
@@ -53,46 +141,128 @@ export class ScopesApiClient {
throw new Error('Limit must be between 1 and 10000');
}
const subscription = dispatch(
scopeAPIv0alpha1.endpoints.getFindScopeNodeChildrenResults.initiate(
{
parent: options.parent,
query: options.query,
limit,
},
{ subscribe: false, forceRefetch: true } // Froce refetch for search. Revisit this when necessary
)
);
try {
const nodes =
(
await getBackendSrv().get<{ items: ScopeNode[] }>(apiUrl + `/find/scope_node_children`, {
parent: options.parent,
query: options.query,
limit,
})
)?.items ?? [];
const result = await subscription;
if ('data' in result && result.data) {
// The generated API returns items compatible with @grafana/data ScopeNode
return result.data.items ?? [];
}
if ('error' in result) {
const errorMessage = getMessageFromError(result.error);
const contextParts: string[] = [];
if (options.parent) {
contextParts.push('parent="' + options.parent + '"');
}
if (options.query) {
contextParts.push('query="' + options.query + '"');
}
contextParts.push('limit=' + limit);
const context = contextParts.join(', ');
console.error('Failed to fetch scope nodes:', context, errorMessage);
}
return nodes;
} catch (err) {
return [];
} catch (err) {
const errorMessage = getMessageFromError(err);
const contextParts: string[] = [];
if (options.parent) {
contextParts.push('parent="' + options.parent + '"');
}
if (options.query) {
contextParts.push('query="' + options.query + '"');
}
contextParts.push('limit=' + limit);
const context = contextParts.join(', ');
console.error('Failed to fetch scope nodes:', context, errorMessage);
return [];
} finally {
// Unsubscribe for extra safety, even though with subscribe: false and awaiting,
// the request completes before return, so this is mostly a no-op
subscription.unsubscribe();
}
}
public fetchDashboards = async (scopeNames: string[]): Promise<ScopeDashboardBinding[]> => {
try {
const response = await getBackendSrv().get<{ items: ScopeDashboardBinding[] }>(
apiUrl + `/find/scope_dashboard_bindings`,
const subscription = dispatch(
// Note: `name` is required by generated types but ignored by the query builder (codegen bug)
scopeAPIv0alpha1.endpoints.getFindScopeDashboardBindingsResults.initiate(
{
name: '',
scope: scopeNames,
}
);
},
{ subscribe: false }
)
);
try {
const result = await subscription;
if ('data' in result && result.data) {
// The generated API returns items compatible with @grafana/data ScopeDashboardBinding
return result.data.items ?? [];
}
if ('error' in result) {
const errorMessage = getMessageFromError(result.error);
console.error('Failed to fetch dashboards for scopes:', scopeNames, errorMessage);
}
return response?.items ?? [];
} catch (err) {
return [];
} catch (err) {
const errorMessage = getMessageFromError(err);
console.error('Failed to fetch dashboards for scopes:', scopeNames, errorMessage);
return [];
} finally {
// Unsubscribe for extra safety, even though with subscribe: false and awaiting,
// the request completes before return, so this is mostly a no-op
subscription.unsubscribe();
}
};
public fetchScopeNavigations = async (scopeNames: string[]): Promise<ScopeNavigation[]> => {
const subscription = dispatch(
// Note: `name` is required by generated types but ignored by the query builder (codegen bug)
scopeAPIv0alpha1.endpoints.getFindScopeNavigationsResults.initiate(
{
name: '',
scope: scopeNames,
},
{ subscribe: false }
)
);
try {
const response = await getBackendSrv().get<{ items: ScopeNavigation[] }>(apiUrl + `/find/scope_navigations`, {
scope: scopeNames,
});
const result = await subscription;
if ('data' in result && result.data) {
// The generated API returns items compatible with ScopeNavigation
return result.data.items ?? [];
}
if ('error' in result) {
const errorMessage = getMessageFromError(result.error);
console.error('Failed to fetch scope navigations for scopes:', scopeNames, errorMessage);
}
return response?.items ?? [];
} catch (err) {
return [];
} catch (err) {
const errorMessage = getMessageFromError(err);
console.error('Failed to fetch scope navigations for scopes:', scopeNames, errorMessage);
return [];
} finally {
// Unsubscribe for extra safety, even though with subscribe: false and awaiting,
// the request completes before return, so this is mostly a no-op
subscription.unsubscribe();
}
};
@@ -100,11 +270,21 @@ export class ScopesApiClient {
if (!config.featureToggles.useScopeSingleNodeEndpoint) {
return Promise.resolve(undefined);
}
const subscription = dispatch(
scopeAPIv0alpha1.endpoints.getScopeNode.initiate({ name: scopeNodeId }, { subscribe: false })
);
try {
const response = await getBackendSrv().get<ScopeNode>(apiUrl + `/scopenodes/${scopeNodeId}`);
return response;
const result = await subscription;
return this.extractDataOrHandleError(result, `scope node: ${scopeNodeId}`);
} catch (err) {
const errorMessage = getMessageFromError(err);
console.error('Failed to fetch scope node:', scopeNodeId, errorMessage);
return undefined;
} finally {
// Unsubscribe for extra safety, even though with subscribe: false and awaiting,
// the request completes before return, so this is mostly a no-op
subscription.unsubscribe();
}
};
}
@@ -5,7 +5,11 @@ import { config, locationService } from '@grafana/runtime';
import { ScopesApiClient } from '../ScopesApiClient';
// Import mock data for subScope tests
import { navigationWithSubScope, navigationWithSubScope2, navigationWithSubScopeAndGroups } from '../tests/utils/mocks';
import {
navigationWithSubScope,
navigationWithSubScope2,
navigationWithSubScopeAndGroups,
} from '../tests/utils/mockData';
import { ScopesDashboardsService, filterItemsWithSubScopesInPath } from './ScopesDashboardsService';
import { ScopeNavigation } from './types';
@@ -1,20 +1,24 @@
import { config } from '@grafana/runtime';
import { config, setBackendSrv } from '@grafana/runtime';
import { setupMockServer } from '@grafana/test-utils/server';
import { backendSrv } from 'app/core/services/backend_srv';
import { setDashboardAPI } from 'app/features/dashboard/api/dashboard_api';
import { getDashboardScenePageStateManager } from 'app/features/dashboard-scene/pages/DashboardScenePageStateManager';
import { enterEditMode, updateMyVar, updateScopes, updateTimeRange } from './utils/actions';
import { getDatasource, getInstanceSettings, getMock } from './utils/mocks';
import { getDatasource, getInstanceSettings } from './utils/mocks';
import { renderDashboard, resetScenes } from './utils/render';
jest.mock('@grafana/runtime', () => ({
__esModule: true,
...jest.requireActual('@grafana/runtime'),
useChromeHeaderHeight: jest.fn(),
getBackendSrv: () => ({ get: getMock }),
getDataSourceSrv: () => ({ get: getDatasource, getInstanceSettings }),
usePluginLinks: jest.fn().mockReturnValue({ links: [] }),
}));
setBackendSrv(backendSrv);
setupMockServer();
describe('Dashboard reload', () => {
let dashboardReloadSpy: jest.SpyInstance;
beforeEach(() => {
@@ -1,6 +1,9 @@
import { screen, waitFor } from '@testing-library/react';
import { config, locationService } from '@grafana/runtime';
import { config, locationService, setBackendSrv } from '@grafana/runtime';
import { setupMockServer } from '@grafana/test-utils/server';
import { MOCK_SUB_SCOPE_MIMIR_ITEMS } from '@grafana/test-utils/unstable';
import { backendSrv } from 'app/core/services/backend_srv';
import { ScopesApiClient } from '../ScopesApiClient';
import { ScopesService } from '../ScopesService';
@@ -35,26 +38,25 @@ import {
dashboardWithRootFolder,
dashboardWithRootFolderAndOtherFolder,
dashboardWithTwoFolders,
getDatasource,
getInstanceSettings,
getMock,
navigationWithSubScope,
navigationWithSubScope2,
navigationWithSubScopeDifferent,
navigationWithSubScopeAndGroups,
subScopeMimirItems,
} from './utils/mocks';
} from './utils/mockData';
import { getDatasource, getInstanceSettings } from './utils/mocks';
import { renderDashboard, resetScenes } from './utils/render';
jest.mock('@grafana/runtime', () => ({
__esModule: true,
...jest.requireActual('@grafana/runtime'),
useChromeHeaderHeight: jest.fn(),
getBackendSrv: () => ({ get: getMock }),
getDataSourceSrv: () => ({ get: getDatasource, getInstanceSettings }),
usePluginLinks: jest.fn().mockReturnValue({ links: [] }),
}));
setBackendSrv(backendSrv);
setupMockServer();
describe('Dashboards list', () => {
let fetchDashboardsSpy: jest.SpyInstance;
let fetchScopeNavigationsSpy: jest.SpyInstance;
@@ -539,7 +541,7 @@ describe('Dashboards list', () => {
it('Loads subScope items when folder is expanded', async () => {
const mockNavigations = [navigationWithSubScope];
fetchScopeNavigationsSpy.mockResolvedValueOnce(mockNavigations).mockResolvedValueOnce(subScopeMimirItems);
fetchScopeNavigationsSpy.mockResolvedValueOnce(mockNavigations).mockResolvedValueOnce(MOCK_SUB_SCOPE_MIMIR_ITEMS);
await toggleDashboards();
await updateScopes(scopesService, ['grafana']);
@@ -571,7 +573,7 @@ describe('Dashboards list', () => {
it('Shows loading state while fetching subScope items', async () => {
const mockNavigations = [navigationWithSubScope];
fetchScopeNavigationsSpy.mockResolvedValueOnce(mockNavigations).mockResolvedValueOnce(subScopeMimirItems);
fetchScopeNavigationsSpy.mockResolvedValueOnce(mockNavigations).mockResolvedValueOnce(MOCK_SUB_SCOPE_MIMIR_ITEMS);
await toggleDashboards();
await updateScopes(scopesService, ['grafana']);
@@ -591,7 +593,7 @@ describe('Dashboards list', () => {
it('Multiple subScope folders with same subScope load same content', async () => {
const mockNavigations = [navigationWithSubScope, navigationWithSubScope2];
fetchScopeNavigationsSpy.mockResolvedValueOnce(mockNavigations).mockResolvedValue(subScopeMimirItems);
fetchScopeNavigationsSpy.mockResolvedValueOnce(mockNavigations).mockResolvedValue(MOCK_SUB_SCOPE_MIMIR_ITEMS);
await toggleDashboards();
await updateScopes(scopesService, ['grafana']);
@@ -676,7 +678,7 @@ describe('Dashboards list', () => {
it('Filters search works with loaded subScope content', async () => {
const mockNavigations = [navigationWithSubScope];
fetchScopeNavigationsSpy.mockResolvedValueOnce(mockNavigations).mockResolvedValueOnce(subScopeMimirItems);
fetchScopeNavigationsSpy.mockResolvedValueOnce(mockNavigations).mockResolvedValueOnce(MOCK_SUB_SCOPE_MIMIR_ITEMS);
await toggleDashboards();
await updateScopes(scopesService, ['grafana']);
@@ -715,7 +717,7 @@ describe('Dashboards list', () => {
it('Does not fetch subScope items if folder is already loaded', async () => {
const mockNavigations = [navigationWithSubScope];
fetchScopeNavigationsSpy.mockResolvedValueOnce(mockNavigations).mockResolvedValueOnce(subScopeMimirItems);
fetchScopeNavigationsSpy.mockResolvedValueOnce(mockNavigations).mockResolvedValueOnce(MOCK_SUB_SCOPE_MIMIR_ITEMS);
await toggleDashboards();
await updateScopes(scopesService, ['grafana']);
@@ -1,4 +1,7 @@
import { config, locationService } from '@grafana/runtime';
import { config, locationService, setBackendSrv } from '@grafana/runtime';
import { setupMockServer } from '@grafana/test-utils/server';
import { MOCK_SCOPES } from '@grafana/test-utils/unstable';
import { backendSrv } from 'app/core/services/backend_srv';
import { getDashboardScenePageStateManager } from '../../dashboard-scene/pages/DashboardScenePageStateManager';
import { ScopesService } from '../ScopesService';
@@ -25,7 +28,7 @@ import {
expectResultApplicationsGrafanaSelected,
expectScopesSelectorValue,
} from './utils/assertions';
import { getDatasource, getInstanceSettings, getMock, mocksScopes } from './utils/mocks';
import { getDatasource, getInstanceSettings } from './utils/mocks';
import { renderDashboard, resetScenes } from './utils/render';
import { getListOfScopes } from './utils/selectors';
@@ -33,11 +36,13 @@ jest.mock('@grafana/runtime', () => ({
__esModule: true,
...jest.requireActual('@grafana/runtime'),
useChromeHeaderHeight: jest.fn(),
getBackendSrv: () => ({ get: getMock }),
getDataSourceSrv: () => ({ get: getDatasource, getInstanceSettings }),
usePluginLinks: jest.fn().mockReturnValue({ links: [] }),
}));
setBackendSrv(backendSrv);
setupMockServer();
describe('Selector', () => {
let fetchSelectedScopesSpy: jest.SpyInstance;
let dashboardReloadSpy: jest.SpyInstance;
@@ -67,7 +72,7 @@ describe('Selector', () => {
await selectResultCloud();
await applyScopes();
expect(fetchSelectedScopesSpy).toHaveBeenCalled();
expect(getListOfScopes(scopesService)).toEqual(mocksScopes.filter(({ metadata: { name } }) => name === 'cloud'));
expect(getListOfScopes(scopesService)).toEqual(MOCK_SCOPES.filter(({ metadata: { name } }) => name === 'cloud'));
});
it('Does not save the scopes on close', async () => {
@@ -1,7 +1,9 @@
import { screen } from '@testing-library/react';
import userEvent from '@testing-library/user-event';
import { config, locationService } from '@grafana/runtime';
import { config, locationService, setBackendSrv } from '@grafana/runtime';
import { setupMockServer } from '@grafana/test-utils/server';
import { backendSrv } from 'app/core/services/backend_srv';
import { ScopesService } from '../ScopesService';
@@ -43,18 +45,20 @@ import {
expectScopesHeadline,
expectScopesSelectorValue,
} from './utils/assertions';
import { getDatasource, getInstanceSettings, getMock } from './utils/mocks';
import { getDatasource, getInstanceSettings } from './utils/mocks';
import { renderDashboard, resetScenes } from './utils/render';
jest.mock('@grafana/runtime', () => ({
__esModule: true,
...jest.requireActual('@grafana/runtime'),
useChromeHeaderHeight: jest.fn(),
getBackendSrv: () => ({ get: getMock }),
getDataSourceSrv: () => ({ get: getDatasource, getInstanceSettings }),
usePluginLinks: jest.fn().mockReturnValue({ links: [] }),
}));
setBackendSrv(backendSrv);
setupMockServer();
describe('Tree', () => {
let fetchNodesSpy: jest.SpyInstance;
let fetchScopeSpy: jest.SpyInstance;
@@ -0,0 +1,92 @@
import { ScopeDashboardBinding } from '@grafana/data';
import { ScopeNavigation } from '../../dashboards/types';
// Mock subScope navigation items (specific to these tests)
export const navigationWithSubScope: ScopeNavigation = {
metadata: { name: 'subscope-nav-1' },
spec: {
scope: 'grafana',
subScope: 'mimir',
url: '/d/subscope-dashboard-1',
},
status: {
title: 'Mimir Dashboards',
groups: [], // subScope items ignore groups
},
};
export const navigationWithSubScope2: ScopeNavigation = {
metadata: { name: 'subscope-nav-2' },
spec: {
scope: 'grafana',
subScope: 'mimir',
url: '/d/subscope-dashboard-2',
},
status: {
title: 'Mimir Overview',
groups: [],
},
};
export const navigationWithSubScopeDifferent: ScopeNavigation = {
metadata: { name: 'subscope-nav-3' },
spec: {
scope: 'grafana',
subScope: 'loki',
url: '/d/subscope-dashboard-3',
},
status: {
title: 'Loki Dashboards',
groups: [],
},
};
export const navigationWithSubScopeAndGroups: ScopeNavigation = {
metadata: { name: 'subscope-nav-groups' },
spec: {
scope: 'grafana',
subScope: 'mimir',
url: '/d/subscope-dashboard-groups',
},
status: {
title: 'Mimir with Groups',
groups: ['Group1', 'Group2'], // Should be ignored for subScope items
},
};
const generateScopeDashboardBinding = (dashboardTitle: string, groups?: string[], dashboardId?: string) => ({
metadata: { name: `${dashboardTitle}-name` },
spec: {
dashboard: `${dashboardId ?? dashboardTitle}-dashboard`,
scope: `${dashboardTitle}-scope`,
},
status: {
dashboardTitle,
groups,
},
});
export const dashboardWithoutFolder: ScopeDashboardBinding = generateScopeDashboardBinding('Without Folder');
export const dashboardWithOneFolder: ScopeDashboardBinding = generateScopeDashboardBinding('With one folder', [
'Folder 1',
]);
export const dashboardWithTwoFolders: ScopeDashboardBinding = generateScopeDashboardBinding('With two folders', [
'Folder 1',
'Folder 2',
]);
export const alternativeDashboardWithTwoFolders: ScopeDashboardBinding = generateScopeDashboardBinding(
'Alternative with two folders',
['Folder 1', 'Folder 2'],
'With two folders'
);
export const dashboardWithRootFolder: ScopeDashboardBinding = generateScopeDashboardBinding('With root folder', ['']);
export const alternativeDashboardWithRootFolder: ScopeDashboardBinding = generateScopeDashboardBinding(
'Alternative With root folder',
[''],
'With root folder'
);
export const dashboardWithRootFolderAndOtherFolder: ScopeDashboardBinding = generateScopeDashboardBinding(
'With root folder and other folder',
['', 'Folder 3']
);
@@ -1,594 +1,8 @@
import { Scope, ScopeDashboardBinding, ScopeNode } from '@grafana/data';
import { DataSourceRef } from '@grafana/schema/dist/esm/common/common.gen';
import { getDashboardScenePageStateManager } from 'app/features/dashboard-scene/pages/DashboardScenePageStateManager';
import { ScopeNavigation } from '../../dashboards/types';
export const mocksScopes: Scope[] = [
{
metadata: { name: 'cloud' },
spec: {
title: 'Cloud',
filters: [{ key: 'cloud', value: '.*', operator: 'regex-match' }],
},
},
{
metadata: { name: 'dev' },
spec: {
title: 'Dev',
filters: [{ key: 'cloud', value: 'dev', operator: 'equals' }],
},
},
{
metadata: { name: 'ops' },
spec: {
title: 'Ops',
filters: [{ key: 'cloud', value: 'ops', operator: 'equals' }],
},
},
{
metadata: { name: 'prod' },
spec: {
title: 'Prod',
filters: [{ key: 'cloud', value: 'prod', operator: 'equals' }],
},
},
{
metadata: { name: 'grafana' },
spec: {
title: 'Grafana',
filters: [{ key: 'app', value: 'grafana', operator: 'equals' }],
},
},
{
metadata: { name: 'mimir' },
spec: {
title: 'Mimir',
filters: [{ key: 'app', value: 'mimir', operator: 'equals' }],
},
},
{
metadata: { name: 'loki' },
spec: {
title: 'Loki',
filters: [{ key: 'app', value: 'loki', operator: 'equals' }],
},
},
{
metadata: { name: 'tempo' },
spec: {
title: 'Tempo',
filters: [{ key: 'app', value: 'tempo', operator: 'equals' }],
},
},
{
metadata: { name: 'dev-env' },
spec: {
title: 'Development',
filters: [{ key: 'environment', value: 'dev', operator: 'equals' }],
},
},
{
metadata: { name: 'prod-env' },
spec: {
title: 'Production',
filters: [{ key: 'environment', value: 'prod', operator: 'equals' }],
},
},
] as const;
const dashboardBindingsGenerator = (
scopes: string[],
dashboards: Array<{ dashboardTitle: string; dashboardKey?: string; groups?: string[] }>
) =>
scopes.reduce<ScopeDashboardBinding[]>((scopeAcc, scopeTitle) => {
const scope = scopeTitle.toLowerCase().replaceAll(' ', '-').replaceAll('/', '-');
return [
...scopeAcc,
...dashboards.reduce<ScopeDashboardBinding[]>((acc, { dashboardTitle, groups, dashboardKey }, idx) => {
dashboardKey = dashboardKey ?? dashboardTitle.toLowerCase().replaceAll(' ', '-').replaceAll('/', '-');
const group = !groups
? ''
: groups.length === 1
? groups[0] === ''
? ''
: `${groups[0].toLowerCase().replaceAll(' ', '-').replaceAll('/', '-')}-`
: `multiple${idx}-`;
const dashboard = `${group}${dashboardKey}`;
return [
...acc,
{
metadata: { name: `${scope}-${dashboard}` },
spec: {
dashboard,
scope,
},
status: {
dashboardTitle,
groups,
},
},
];
}, []),
];
}, []);
export const mocksScopeDashboardBindings: ScopeDashboardBinding[] = [
...dashboardBindingsGenerator(
['Grafana'],
[
{ dashboardTitle: 'Data Sources', groups: ['General'] },
{ dashboardTitle: 'Usage', groups: ['General'] },
{ dashboardTitle: 'Frontend Errors', groups: ['Observability'] },
{ dashboardTitle: 'Frontend Logs', groups: ['Observability'] },
{ dashboardTitle: 'Backend Errors', groups: ['Observability'] },
{ dashboardTitle: 'Backend Logs', groups: ['Observability'] },
{ dashboardTitle: 'Usage Overview', groups: ['Usage'] },
{ dashboardTitle: 'Data Sources', groups: ['Usage'] },
{ dashboardTitle: 'Stats', groups: ['Usage'] },
{ dashboardTitle: 'Overview', groups: [''] },
{ dashboardTitle: 'Frontend' },
{ dashboardTitle: 'Stats' },
]
),
...dashboardBindingsGenerator(
['Loki', 'Tempo', 'Mimir'],
[
{ dashboardTitle: 'Ingester', groups: ['Components', 'Investigations'] },
{ dashboardTitle: 'Distributor', groups: ['Components', 'Investigations'] },
{ dashboardTitle: 'Compacter', groups: ['Components', 'Investigations'] },
{ dashboardTitle: 'Datasource Errors', groups: ['Observability', 'Investigations'] },
{ dashboardTitle: 'Datasource Logs', groups: ['Observability', 'Investigations'] },
{ dashboardTitle: 'Overview' },
{ dashboardTitle: 'Stats', dashboardKey: 'another-stats' },
]
),
...dashboardBindingsGenerator(
['Dev', 'Ops', 'Prod'],
[
{ dashboardTitle: 'Overview', groups: ['Cardinality Management'] },
{ dashboardTitle: 'Metrics', groups: ['Cardinality Management'] },
{ dashboardTitle: 'Labels', groups: ['Cardinality Management'] },
{ dashboardTitle: 'Overview', groups: ['Usage Insights'] },
{ dashboardTitle: 'Data Sources', groups: ['Usage Insights'] },
{ dashboardTitle: 'Query Errors', groups: ['Usage Insights'] },
{ dashboardTitle: 'Alertmanager', groups: ['Usage Insights'] },
{ dashboardTitle: 'Metrics Ingestion', groups: ['Usage Insights'] },
{ dashboardTitle: 'Billing/Usage' },
]
),
] as const;
export const mocksNodes: ScopeNode[] = [
{
metadata: { name: 'applications' },
spec: {
nodeType: 'container',
title: 'Applications',
description: 'Application Scopes',
parentName: '',
},
},
{
metadata: { name: 'cloud' },
spec: {
nodeType: 'container',
title: 'Cloud',
description: 'Cloud Scopes',
disableMultiSelect: true,
linkType: 'scope',
linkId: 'cloud',
parentName: '',
},
},
{
metadata: { name: 'applications-grafana' },
spec: {
nodeType: 'leaf',
title: 'Grafana',
description: 'Grafana',
linkType: 'scope',
linkId: 'grafana',
parentName: 'applications',
},
},
{
metadata: { name: 'applications-mimir' },
spec: {
nodeType: 'leaf',
title: 'Mimir',
description: 'Mimir',
linkType: 'scope',
linkId: 'mimir',
parentName: 'applications',
},
},
{
metadata: { name: 'applications-loki' },
spec: {
nodeType: 'leaf',
title: 'Loki',
description: 'Loki',
linkType: 'scope',
linkId: 'loki',
parentName: 'applications',
},
},
{
metadata: { name: 'applications-tempo' },
spec: {
nodeType: 'leaf',
title: 'Tempo',
description: 'Tempo',
linkType: 'scope',
linkId: 'tempo',
parentName: 'applications',
},
},
{
metadata: { name: 'applications-cloud' },
spec: {
nodeType: 'container',
title: 'Cloud',
description: 'Application/Cloud Scopes',
linkType: 'scope',
linkId: 'cloud',
parentName: 'applications',
},
},
{
metadata: { name: 'applications-cloud-dev' },
spec: {
nodeType: 'leaf',
title: 'Dev',
description: 'Dev',
linkType: 'scope',
linkId: 'dev',
parentName: 'applications-cloud',
},
},
{
metadata: { name: 'applications-cloud-ops' },
spec: {
nodeType: 'leaf',
title: 'Ops',
description: 'Ops',
linkType: 'scope',
linkId: 'ops',
parentName: 'applications-cloud',
},
},
{
metadata: { name: 'applications-cloud-prod' },
spec: {
nodeType: 'leaf',
title: 'Prod',
description: 'Prod',
linkType: 'scope',
linkId: 'prod',
parentName: 'applications-cloud',
},
},
{
metadata: { name: 'cloud-dev' },
spec: {
nodeType: 'leaf',
title: 'Dev',
description: 'Dev',
linkType: 'scope',
linkId: 'dev',
parentName: 'cloud',
},
},
{
metadata: { name: 'cloud-ops' },
spec: {
nodeType: 'leaf',
title: 'Ops',
description: 'Ops',
linkType: 'scope',
linkId: 'ops',
parentName: 'cloud',
},
},
{
metadata: { name: 'cloud-prod' },
spec: {
nodeType: 'leaf',
title: 'Prod',
description: 'Prod',
linkType: 'scope',
linkId: 'prod',
parentName: 'cloud',
},
},
{
metadata: { name: 'cloud-applications' },
spec: {
nodeType: 'container',
title: 'Applications',
description: 'Cloud/Application Scopes',
parentName: 'cloud',
},
},
{
metadata: { name: 'cloud-applications-grafana' },
spec: {
nodeType: 'leaf',
title: 'Grafana',
description: 'Grafana',
linkType: 'scope',
linkId: 'grafana',
parentName: 'cloud-applications',
},
},
{
metadata: { name: 'cloud-applications-mimir' },
spec: {
nodeType: 'leaf',
title: 'Mimir',
description: 'Mimir',
linkType: 'scope',
linkId: 'mimir',
parentName: 'cloud-applications',
},
},
{
metadata: { name: 'cloud-applications-loki' },
spec: {
nodeType: 'leaf',
title: 'Loki',
description: 'Loki',
linkType: 'scope',
linkId: 'loki',
parentName: 'cloud-applications',
},
},
{
metadata: { name: 'cloud-applications-tempo' },
spec: {
nodeType: 'leaf',
title: 'Tempo',
description: 'Tempo',
linkType: 'scope',
linkId: 'tempo',
parentName: 'cloud-applications',
},
},
{
metadata: { name: 'environments' },
spec: {
nodeType: 'container',
title: 'Environments',
description: 'Environment Scopes',
disableMultiSelect: true,
parentName: '',
},
},
{
metadata: { name: 'environments-dev' },
spec: {
nodeType: 'container',
title: 'Development',
description: 'Development Environment',
linkType: 'scope',
linkId: 'dev-env',
parentName: 'environments',
},
},
{
metadata: { name: 'environments-prod' },
spec: {
nodeType: 'container',
title: 'Production',
description: 'Production Environment',
linkType: 'scope',
linkId: 'prod-env',
parentName: 'environments',
},
},
] as const;
export const dashboardReloadSpy = jest.spyOn(getDashboardScenePageStateManager(), 'reloadDashboard');
export const getMock = jest
.fn()
.mockImplementation(
(url: string, params: { parent: string; scope: string[]; query?: string } & Record<string, string | string[]>) => {
if (url.startsWith('/apis/scope.grafana.app/v0alpha1/namespaces/default/find/scope_node_children')) {
return {
items: mocksNodes.filter(
({ spec: { title, parentName } }) =>
parentName === params.parent && title.toLowerCase().includes((params.query ?? '').toLowerCase())
),
};
}
if (url.startsWith('/apis/scope.grafana.app/v0alpha1/namespaces/default/scopes/')) {
const name = url.replace('/apis/scope.grafana.app/v0alpha1/namespaces/default/scopes/', '');
return mocksScopes.find((scope) => scope.metadata.name.toLowerCase() === name.toLowerCase()) ?? {};
}
if (url.startsWith('/apis/scope.grafana.app/v0alpha1/namespaces/default/scopenodes/')) {
const name = url.replace('/apis/scope.grafana.app/v0alpha1/namespaces/default/scopenodes/', '');
return mocksNodes.find((node) => node.metadata.name === name);
}
if (url.startsWith('/apis/scope.grafana.app/v0alpha1/namespaces/default/find/scope_dashboard_bindings')) {
return {
items: mocksScopeDashboardBindings.filter(({ spec: { scope: bindingScope } }) =>
params.scope.includes(bindingScope)
),
};
}
if (url.startsWith('/apis/scope.grafana.app/v0alpha1/namespaces/default/find/scope_navigations')) {
// Handle subScope fetch requests
if (params.scope && params.scope.includes('mimir')) {
return {
items: subScopeMimirItems,
};
}
if (params.scope && params.scope.includes('loki')) {
return {
items: subScopeLokiItems,
};
}
// Return empty for other scopes
return {
items: [],
};
}
if (url.startsWith('/api/dashboards/uid/')) {
return {};
}
if (url.startsWith('/apis/dashboard.grafana.app/v0alpha1/namespaces/default/dashboards/')) {
return {
metadata: {
name: '1',
},
};
}
return {};
}
);
const generateScopeDashboardBinding = (dashboardTitle: string, groups?: string[], dashboardId?: string) => ({
metadata: { name: `${dashboardTitle}-name` },
spec: {
dashboard: `${dashboardId ?? dashboardTitle}-dashboard`,
scope: `${dashboardTitle}-scope`,
},
status: {
dashboardTitle,
groups,
},
});
export const dashboardWithoutFolder: ScopeDashboardBinding = generateScopeDashboardBinding('Without Folder');
export const dashboardWithOneFolder: ScopeDashboardBinding = generateScopeDashboardBinding('With one folder', [
'Folder 1',
]);
export const dashboardWithTwoFolders: ScopeDashboardBinding = generateScopeDashboardBinding('With two folders', [
'Folder 1',
'Folder 2',
]);
export const alternativeDashboardWithTwoFolders: ScopeDashboardBinding = generateScopeDashboardBinding(
'Alternative with two folders',
['Folder 1', 'Folder 2'],
'With two folders'
);
export const dashboardWithRootFolder: ScopeDashboardBinding = generateScopeDashboardBinding('With root folder', ['']);
export const alternativeDashboardWithRootFolder: ScopeDashboardBinding = generateScopeDashboardBinding(
'Alternative With root folder',
[''],
'With root folder'
);
export const dashboardWithRootFolderAndOtherFolder: ScopeDashboardBinding = generateScopeDashboardBinding(
'With root folder and other folder',
['', 'Folder 3']
);
// Mock subScope navigation items
export const navigationWithSubScope: ScopeNavigation = {
metadata: { name: 'subscope-nav-1' },
spec: {
scope: 'grafana',
subScope: 'mimir',
url: '/d/subscope-dashboard-1',
},
status: {
title: 'Mimir Dashboards',
groups: [], // subScope items ignore groups
},
};
export const navigationWithSubScope2: ScopeNavigation = {
metadata: { name: 'subscope-nav-2' },
spec: {
scope: 'grafana',
subScope: 'mimir',
url: '/d/subscope-dashboard-2',
},
status: {
title: 'Mimir Overview',
groups: [],
},
};
export const navigationWithSubScopeDifferent: ScopeNavigation = {
metadata: { name: 'subscope-nav-3' },
spec: {
scope: 'grafana',
subScope: 'loki',
url: '/d/subscope-dashboard-3',
},
status: {
title: 'Loki Dashboards',
groups: [],
},
};
export const navigationWithSubScopeAndGroups: ScopeNavigation = {
metadata: { name: 'subscope-nav-groups' },
spec: {
scope: 'grafana',
subScope: 'mimir',
url: '/d/subscope-dashboard-groups',
},
status: {
title: 'Mimir with Groups',
groups: ['Group1', 'Group2'], // Should be ignored for subScope items
},
};
// Mock items that will be loaded when subScope folder is expanded
export const subScopeMimirItems: ScopeNavigation[] = [
{
metadata: { name: 'mimir-item-1' },
spec: {
scope: 'mimir',
url: '/d/mimir-dashboard-1',
},
status: {
title: 'Mimir Dashboard 1',
groups: ['General'],
},
},
{
metadata: { name: 'mimir-item-2' },
spec: {
scope: 'mimir',
url: '/d/mimir-dashboard-2',
},
status: {
title: 'Mimir Dashboard 2',
groups: ['Observability'],
},
},
];
export const subScopeLokiItems: ScopeNavigation[] = [
{
metadata: { name: 'loki-item-1' },
spec: {
scope: 'loki',
url: '/d/loki-dashboard-1',
},
status: {
title: 'Loki Dashboard 1',
groups: ['General'],
},
},
];
export const getDatasource = async (ref: DataSourceRef) => {
if (ref.uid === '-- Grafana --') {
return {
@@ -12,8 +12,6 @@ import { DashboardDataDTO, DashboardDTO, DashboardMeta } from 'app/types/dashboa
import { defaultScopesServices, ScopesContextProvider } from '../../ScopesContextProvider';
import { getMock } from './mocks';
const getDashboardDTO: (
overrideDashboard: Partial<DashboardDataDTO>,
overrideMeta: Partial<DashboardMeta>
@@ -208,7 +206,6 @@ export async function renderDashboard(
export async function resetScenes(spies: jest.SpyInstance[] = []) {
await jest.runOnlyPendingTimersAsync();
jest.useRealTimers();
getMock.mockClear();
spies.forEach((spy) => spy.mockClear());
cleanup();
}
@@ -1,4 +1,6 @@
import { config } from '@grafana/runtime';
import { config, setBackendSrv } from '@grafana/runtime';
import { setupMockServer } from '@grafana/test-utils/server';
import { backendSrv } from 'app/core/services/backend_srv';
import { DashboardScene } from 'app/features/dashboard-scene/scene/DashboardScene';
import { ScopesService } from '../ScopesService';
@@ -10,18 +12,20 @@ import {
expectScopesSelectorClosed,
expectScopesSelectorDisabled,
} from './utils/assertions';
import { getDatasource, getInstanceSettings, getMock } from './utils/mocks';
import { getDatasource, getInstanceSettings } from './utils/mocks';
import { renderDashboard, resetScenes } from './utils/render';
jest.mock('@grafana/runtime', () => ({
__esModule: true,
...jest.requireActual('@grafana/runtime'),
useChromeHeaderHeight: jest.fn(),
getBackendSrv: () => ({ get: getMock }),
getDataSourceSrv: () => ({ get: getDatasource, getInstanceSettings }),
usePluginLinks: jest.fn().mockReturnValue({ links: [] }),
}));
setBackendSrv(backendSrv);
setupMockServer();
describe('View mode', () => {
let dashboardScene: DashboardScene;
let scopesService: ScopesService;
+2
View File
@@ -4,6 +4,7 @@ import { Middleware } from 'redux';
import { allMiddleware as allApiClientMiddleware } from '@grafana/api-clients/rtkq';
import { legacyAPI } from 'app/api/clients/legacy';
import { scopeAPIv0alpha1 } from 'app/api/clients/scope/v0alpha1';
import { browseDashboardsAPI } from 'app/features/browse-dashboards/api/browseDashboardsAPI';
import { publicDashboardApi } from 'app/features/dashboard/api/publicDashboardApi';
import { StoreState } from 'app/types/store';
@@ -40,6 +41,7 @@ export function configureStore(initialState?: Partial<StoreState>) {
publicDashboardApi.middleware,
browseDashboardsAPI.middleware,
legacyAPI.middleware,
scopeAPIv0alpha1.middleware,
...allApiClientMiddleware,
...extraMiddleware
),