Compare commits
15 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| afbb304ff7 | |||
| e7625186af | |||
| 75b2c905cd | |||
| 45fc95cfc9 | |||
| 9c3cdd4814 | |||
| 2dad8b7b5b | |||
| 9a831ab4e1 | |||
| 759035a465 | |||
| 6e155523a3 | |||
| 5c0ee2d746 | |||
| 0c6b97bee2 | |||
| 4c79775b57 | |||
| e088c9aac9 | |||
| 7182511bcf | |||
| 3023a72175 |
@@ -0,0 +1,20 @@
|
||||
# Plugins App
|
||||
|
||||
API documentation is available at http://localhost:3000/swagger?api=plugins.grafana.app-v0alpha1
|
||||
|
||||
## Codegen
|
||||
|
||||
- Go: `make generate`
|
||||
- Frontend: Follow instructions in this [README](../..//packages/grafana-api-clients/README.md)
|
||||
|
||||
## Plugin sync
|
||||
|
||||
The plugin sync pushes the plugins loaded from disk to the plugins API.
|
||||
|
||||
To enable, add these feature toggles in your `custom.ini`:
|
||||
|
||||
```ini
|
||||
[feature_toggles]
|
||||
pluginInstallAPISync = true
|
||||
pluginStoreServiceLoading = true
|
||||
```
|
||||
@@ -98,7 +98,7 @@ You can share dashboards in the following ways:
|
||||
- [As a report](#schedule-a-report)
|
||||
- [As a snapshot](#share-a-snapshot)
|
||||
- [As a PDF export](#export-a-dashboard-as-pdf)
|
||||
- [As a JSON file export](#export-a-dashboard-as-json)
|
||||
- [As a JSON file export](#export-a-dashboard-as-code)
|
||||
- [As an image export](#export-a-dashboard-as-an-image)
|
||||
|
||||
When you share a dashboard externally as a link or by email, those dashboards are included in a list of your shared dashboards. To view the list and manage these dashboards, navigate to **Dashboards > Shared dashboards**.
|
||||
|
||||
@@ -10,7 +10,7 @@ const NUM_NESTED_DASHBOARDS = 60;
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ import testDashboard from '../dashboards/TestDashboard.json';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ test.use({
|
||||
scenes: true,
|
||||
sharingDashboardImage: true, // Enable the export image feature
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import { test, expect } from '@grafana/plugin-e2e';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import { test, expect } from '@grafana/plugin-e2e';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ import testDashboard from '../dashboards/DataLinkWithoutSlugTest.json';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ import testDashboard from '../dashboards/DashboardLiveTest.json';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import { test, expect } from '@grafana/plugin-e2e';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardScene: false, // this test is for the old sharing modal only used when scenes is turned off
|
||||
},
|
||||
});
|
||||
|
||||
@@ -3,7 +3,7 @@ import { test, expect } from '@grafana/plugin-e2e';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardScene: false, // this test is for the old sharing modal only used when scenes is turned off
|
||||
},
|
||||
});
|
||||
|
||||
@@ -4,7 +4,7 @@ test.use({
|
||||
featureToggles: {
|
||||
scenes: true,
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ test.use({
|
||||
featureToggles: {
|
||||
scenes: true,
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ test.use({
|
||||
featureToggles: {
|
||||
scenes: true,
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ test.use({
|
||||
timezoneId: 'Pacific/Easter',
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ const TIMEZONE_DASHBOARD_UID = 'd41dbaa2-a39e-4536-ab2b-caca52f1a9c8';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@ test.use({
|
||||
},
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ import { test, expect } from '@grafana/plugin-e2e';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ const PAGE_UNDER_TEST = 'edediimbjhdz4b/a-tall-dashboard';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ import testDashboard from '../dashboards/TestDashboard.json';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ const PAGE_UNDER_TEST = '-Y-tnEDWk/templating-nested-template-variables';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ const DASHBOARD_NAME = 'Test variable output';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -53,7 +53,7 @@ async function assertPreviewValues(
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ const DASHBOARD_NAME = 'Test variable output';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ async function assertPreviewValues(
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ const DASHBOARD_NAME = 'Templating - Nested Template Variables';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ const DASHBOARD_NAME = 'Test variable output';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ const PAGE_UNDER_TEST = 'WVpf2jp7z/repeating-a-panel-horizontally';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ const PAGE_UNDER_TEST = 'OY8Ghjt7k/repeating-a-panel-vertically';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ const PAGE_UNDER_TEST = 'dtpl2Ctnk/repeating-an-empty-row';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ const PAGE_UNDER_TEST = '-Y-tnEDWk/templating-nested-template-variables';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ const DASHBOARD_UID = 'ZqZnVvFZz';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardScene: false, // this test is for the old sharing modal only used when scenes is turned off
|
||||
},
|
||||
});
|
||||
|
||||
@@ -5,7 +5,7 @@ const DASHBOARD_UID = 'yBCC3aKGk';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ const PAGE_UNDER_TEST = 'AejrN1AMz';
|
||||
test.use({
|
||||
featureToggles: {
|
||||
kubernetesDashboards: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
kubernetesDashboardsV2: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
dashboardNewLayouts: process.env.FORCE_V2_DASHBOARDS_API === 'true',
|
||||
},
|
||||
});
|
||||
|
||||
|
||||
@@ -2,18 +2,16 @@ import { Locator } from '@playwright/test';
|
||||
|
||||
import { test, expect } from '@grafana/plugin-e2e';
|
||||
|
||||
import { setVisualization } from './vizpicker-utils';
|
||||
|
||||
test.use({
|
||||
featureToggles: {
|
||||
canvasPanelPanZoom: true,
|
||||
},
|
||||
});
|
||||
test.describe('Canvas Panel - Scene Tests', () => {
|
||||
test.beforeEach(async ({ page, gotoDashboardPage, selectors }) => {
|
||||
test.beforeEach(async ({ page, gotoDashboardPage }) => {
|
||||
const dashboardPage = await gotoDashboardPage({});
|
||||
const panelEditPage = await dashboardPage.addPanel();
|
||||
await setVisualization(panelEditPage, 'Canvas', selectors);
|
||||
await panelEditPage.setVisualization('Canvas');
|
||||
|
||||
// Wait for canvas panel to load
|
||||
await page.waitForSelector('[data-testid="canvas-scene-pan-zoom"]', { timeout: 10000 });
|
||||
|
||||
@@ -1,24 +0,0 @@
|
||||
import { expect, E2ESelectorGroups, PanelEditPage } from '@grafana/plugin-e2e';
|
||||
|
||||
// this replaces the panelEditPage.setVisualization method used previously in tests, since it
|
||||
// does not know how to use the updated 12.4 viz picker UI to set the visualization
|
||||
export const setVisualization = async (panelEditPage: PanelEditPage, vizName: string, selectors: E2ESelectorGroups) => {
|
||||
const vizPicker = panelEditPage.getByGrafanaSelector(selectors.components.PanelEditor.toggleVizPicker);
|
||||
await expect(vizPicker, '"Change" button should be visible').toBeVisible();
|
||||
await vizPicker.click();
|
||||
|
||||
const allVizTabBtn = panelEditPage.getByGrafanaSelector(selectors.components.Tab.title('All visualizations'));
|
||||
await expect(allVizTabBtn, '"All visualiations" button should be visible').toBeVisible();
|
||||
await allVizTabBtn.click();
|
||||
|
||||
const vizItem = panelEditPage.getByGrafanaSelector(selectors.components.PluginVisualization.item(vizName));
|
||||
await expect(vizItem, `"${vizName}" item should be visible`).toBeVisible();
|
||||
await vizItem.scrollIntoViewIfNeeded();
|
||||
await vizItem.click();
|
||||
|
||||
await expect(vizPicker, '"Change" button should be visible again').toBeVisible();
|
||||
await expect(
|
||||
panelEditPage.getByGrafanaSelector(selectors.components.PanelEditor.OptionsPane.header),
|
||||
'Panel header should have the new viz type name'
|
||||
).toHaveText(vizName);
|
||||
};
|
||||
+4
-5
@@ -1,6 +1,5 @@
|
||||
import { expect, test } from '@grafana/plugin-e2e';
|
||||
|
||||
import { setVisualization } from '../../../panels-suite/vizpicker-utils';
|
||||
import { formatExpectError } from '../errors';
|
||||
import { successfulDataQuery } from '../mocks/queries';
|
||||
|
||||
@@ -25,10 +24,10 @@ test.describe(
|
||||
).toContainText(['Field', 'Max', 'Mean', 'Last']);
|
||||
});
|
||||
|
||||
test('table panel data assertions', async ({ panelEditPage, selectors }) => {
|
||||
test('table panel data assertions', async ({ panelEditPage }) => {
|
||||
await panelEditPage.mockQueryDataResponse(successfulDataQuery, 200);
|
||||
await panelEditPage.datasource.set('gdev-testdata');
|
||||
await setVisualization(panelEditPage, 'Table', selectors);
|
||||
await panelEditPage.setVisualization('Table');
|
||||
await panelEditPage.refreshPanel();
|
||||
await expect(
|
||||
panelEditPage.panel.locator,
|
||||
@@ -44,10 +43,10 @@ test.describe(
|
||||
).toContainText(['val1', 'val2', 'val3', 'val4']);
|
||||
});
|
||||
|
||||
test('timeseries panel - table view assertions', async ({ panelEditPage, selectors }) => {
|
||||
test('timeseries panel - table view assertions', async ({ panelEditPage }) => {
|
||||
await panelEditPage.mockQueryDataResponse(successfulDataQuery, 200);
|
||||
await panelEditPage.datasource.set('gdev-testdata');
|
||||
await setVisualization(panelEditPage, 'Time series', selectors);
|
||||
await panelEditPage.setVisualization('Time series');
|
||||
await panelEditPage.refreshPanel();
|
||||
await panelEditPage.toggleTableView();
|
||||
await expect(
|
||||
|
||||
+25
-26
@@ -1,6 +1,5 @@
|
||||
import { expect, test } from '@grafana/plugin-e2e';
|
||||
|
||||
import { setVisualization } from '../../../panels-suite/vizpicker-utils';
|
||||
import { formatExpectError } from '../errors';
|
||||
import { successfulDataQuery } from '../mocks/queries';
|
||||
import { scenarios } from '../mocks/resources';
|
||||
@@ -54,10 +53,10 @@ test.describe(
|
||||
).toHaveText(scenarios.map((s) => s.name));
|
||||
});
|
||||
|
||||
test('mocked query data response', async ({ panelEditPage, page, selectors }) => {
|
||||
test('mocked query data response', async ({ panelEditPage, page }) => {
|
||||
await panelEditPage.mockQueryDataResponse(successfulDataQuery, 200);
|
||||
await panelEditPage.datasource.set('gdev-testdata');
|
||||
await setVisualization(panelEditPage, TABLE_VIZ_NAME, selectors);
|
||||
await panelEditPage.setVisualization(TABLE_VIZ_NAME);
|
||||
await panelEditPage.refreshPanel();
|
||||
await expect(
|
||||
panelEditPage.panel.getErrorIcon(),
|
||||
@@ -76,7 +75,7 @@ test.describe(
|
||||
selectors,
|
||||
page,
|
||||
}) => {
|
||||
await setVisualization(panelEditPage, TABLE_VIZ_NAME, selectors);
|
||||
await panelEditPage.setVisualization(TABLE_VIZ_NAME);
|
||||
await expect(
|
||||
panelEditPage.getByGrafanaSelector(selectors.components.PanelEditor.OptionsPane.header),
|
||||
formatExpectError('Expected panel visualization to be set to table')
|
||||
@@ -93,8 +92,8 @@ test.describe(
|
||||
).toBeVisible();
|
||||
});
|
||||
|
||||
test('Select time zone in timezone picker', async ({ panelEditPage, selectors }) => {
|
||||
await setVisualization(panelEditPage, TIME_SERIES_VIZ_NAME, selectors);
|
||||
test('Select time zone in timezone picker', async ({ panelEditPage }) => {
|
||||
await panelEditPage.setVisualization(TIME_SERIES_VIZ_NAME);
|
||||
const axisOptions = await panelEditPage.getCustomOptions('Axis');
|
||||
const timeZonePicker = axisOptions.getSelect('Time zone');
|
||||
|
||||
@@ -102,8 +101,8 @@ test.describe(
|
||||
await expect(timeZonePicker).toHaveSelected('Europe/Stockholm');
|
||||
});
|
||||
|
||||
test('select unit in unit picker', async ({ panelEditPage, selectors }) => {
|
||||
await setVisualization(panelEditPage, TIME_SERIES_VIZ_NAME, selectors);
|
||||
test('select unit in unit picker', async ({ panelEditPage }) => {
|
||||
await panelEditPage.setVisualization(TIME_SERIES_VIZ_NAME);
|
||||
const standardOptions = panelEditPage.getStandardOptions();
|
||||
const unitPicker = standardOptions.getUnitPicker('Unit');
|
||||
|
||||
@@ -112,8 +111,8 @@ test.describe(
|
||||
await expect(unitPicker).toHaveSelected('Pixels');
|
||||
});
|
||||
|
||||
test('enter value in number input', async ({ panelEditPage, selectors }) => {
|
||||
await setVisualization(panelEditPage, TIME_SERIES_VIZ_NAME, selectors);
|
||||
test('enter value in number input', async ({ panelEditPage }) => {
|
||||
await panelEditPage.setVisualization(TIME_SERIES_VIZ_NAME);
|
||||
const axisOptions = panelEditPage.getCustomOptions('Axis');
|
||||
const lineWith = axisOptions.getNumberInput('Soft min');
|
||||
|
||||
@@ -122,8 +121,8 @@ test.describe(
|
||||
await expect(lineWith).toHaveValue('10');
|
||||
});
|
||||
|
||||
test('enter value in slider', async ({ panelEditPage, selectors }) => {
|
||||
await setVisualization(panelEditPage, TIME_SERIES_VIZ_NAME, selectors);
|
||||
test('enter value in slider', async ({ panelEditPage }) => {
|
||||
await panelEditPage.setVisualization(TIME_SERIES_VIZ_NAME);
|
||||
const graphOptions = panelEditPage.getCustomOptions('Graph styles');
|
||||
const lineWidth = graphOptions.getSliderInput('Line width');
|
||||
|
||||
@@ -132,8 +131,8 @@ test.describe(
|
||||
await expect(lineWidth).toHaveValue('10');
|
||||
});
|
||||
|
||||
test('select value in single value select', async ({ panelEditPage, selectors }) => {
|
||||
await setVisualization(panelEditPage, TIME_SERIES_VIZ_NAME, selectors);
|
||||
test('select value in single value select', async ({ panelEditPage }) => {
|
||||
await panelEditPage.setVisualization(TIME_SERIES_VIZ_NAME);
|
||||
const standardOptions = panelEditPage.getStandardOptions();
|
||||
const colorSchemeSelect = standardOptions.getSelect('Color scheme');
|
||||
|
||||
@@ -141,8 +140,8 @@ test.describe(
|
||||
await expect(colorSchemeSelect).toHaveSelected('Classic palette');
|
||||
});
|
||||
|
||||
test('clear input', async ({ panelEditPage, selectors }) => {
|
||||
await setVisualization(panelEditPage, TIME_SERIES_VIZ_NAME, selectors);
|
||||
test('clear input', async ({ panelEditPage }) => {
|
||||
await panelEditPage.setVisualization(TIME_SERIES_VIZ_NAME);
|
||||
const panelOptions = panelEditPage.getPanelOptions();
|
||||
const title = panelOptions.getTextInput('Title');
|
||||
|
||||
@@ -151,8 +150,8 @@ test.describe(
|
||||
await expect(title).toHaveValue('');
|
||||
});
|
||||
|
||||
test('enter value in input', async ({ panelEditPage, selectors }) => {
|
||||
await setVisualization(panelEditPage, TIME_SERIES_VIZ_NAME, selectors);
|
||||
test('enter value in input', async ({ panelEditPage }) => {
|
||||
await panelEditPage.setVisualization(TIME_SERIES_VIZ_NAME);
|
||||
const panelOptions = panelEditPage.getPanelOptions();
|
||||
const description = panelOptions.getTextInput('Description');
|
||||
|
||||
@@ -161,8 +160,8 @@ test.describe(
|
||||
await expect(description).toHaveValue('This is a panel');
|
||||
});
|
||||
|
||||
test('unchecking switch', async ({ panelEditPage, selectors }) => {
|
||||
await setVisualization(panelEditPage, TIME_SERIES_VIZ_NAME, selectors);
|
||||
test('unchecking switch', async ({ panelEditPage }) => {
|
||||
await panelEditPage.setVisualization(TIME_SERIES_VIZ_NAME);
|
||||
const axisOptions = panelEditPage.getCustomOptions('Axis');
|
||||
const showBorder = axisOptions.getSwitch('Show border');
|
||||
|
||||
@@ -174,8 +173,8 @@ test.describe(
|
||||
await expect(showBorder).toBeChecked({ checked: false });
|
||||
});
|
||||
|
||||
test('checking switch', async ({ panelEditPage, selectors }) => {
|
||||
await setVisualization(panelEditPage, TIME_SERIES_VIZ_NAME, selectors);
|
||||
test('checking switch', async ({ panelEditPage }) => {
|
||||
await panelEditPage.setVisualization(TIME_SERIES_VIZ_NAME);
|
||||
const axisOptions = panelEditPage.getCustomOptions('Axis');
|
||||
const showBorder = axisOptions.getSwitch('Show border');
|
||||
|
||||
@@ -184,8 +183,8 @@ test.describe(
|
||||
await expect(showBorder).toBeChecked();
|
||||
});
|
||||
|
||||
test('re-selecting value in radio button group', async ({ panelEditPage, selectors }) => {
|
||||
await setVisualization(panelEditPage, TIME_SERIES_VIZ_NAME, selectors);
|
||||
test('re-selecting value in radio button group', async ({ panelEditPage }) => {
|
||||
await panelEditPage.setVisualization(TIME_SERIES_VIZ_NAME);
|
||||
const axisOptions = panelEditPage.getCustomOptions('Axis');
|
||||
const placement = axisOptions.getRadioGroup('Placement');
|
||||
|
||||
@@ -196,8 +195,8 @@ test.describe(
|
||||
await expect(placement).toHaveChecked('Auto');
|
||||
});
|
||||
|
||||
test('selecting value in radio button group', async ({ panelEditPage, selectors }) => {
|
||||
await setVisualization(panelEditPage, TIME_SERIES_VIZ_NAME, selectors);
|
||||
test('selecting value in radio button group', async ({ panelEditPage }) => {
|
||||
await panelEditPage.setVisualization(TIME_SERIES_VIZ_NAME);
|
||||
const axisOptions = panelEditPage.getCustomOptions('Axis');
|
||||
const placement = axisOptions.getRadioGroup('Placement');
|
||||
|
||||
|
||||
+8
@@ -285,6 +285,10 @@ const injectedRtkApi = api
|
||||
query: (queryArg) => ({ url: `/snapshots/delete/${queryArg.deleteKey}`, method: 'DELETE' }),
|
||||
invalidatesTags: ['Snapshot'],
|
||||
}),
|
||||
getSnapshotSettings: build.query<GetSnapshotSettingsApiResponse, GetSnapshotSettingsApiArg>({
|
||||
query: () => ({ url: `/snapshots/settings` }),
|
||||
providesTags: ['Snapshot'],
|
||||
}),
|
||||
getSnapshot: build.query<GetSnapshotApiResponse, GetSnapshotApiArg>({
|
||||
query: (queryArg) => ({
|
||||
url: `/snapshots/${queryArg.name}`,
|
||||
@@ -742,6 +746,8 @@ export type DeleteWithKeyApiArg = {
|
||||
/** unique key returned in create */
|
||||
deleteKey: string;
|
||||
};
|
||||
export type GetSnapshotSettingsApiResponse = /** status 200 undefined */ any;
|
||||
export type GetSnapshotSettingsApiArg = void;
|
||||
export type GetSnapshotApiResponse = /** status 200 OK */ Snapshot;
|
||||
export type GetSnapshotApiArg = {
|
||||
/** name of the Snapshot */
|
||||
@@ -1273,6 +1279,8 @@ export const {
|
||||
useLazyListSnapshotQuery,
|
||||
useCreateSnapshotMutation,
|
||||
useDeleteWithKeyMutation,
|
||||
useGetSnapshotSettingsQuery,
|
||||
useLazyGetSnapshotSettingsQuery,
|
||||
useGetSnapshotQuery,
|
||||
useLazyGetSnapshotQuery,
|
||||
useDeleteSnapshotMutation,
|
||||
|
||||
+4
-4
@@ -356,10 +356,6 @@ export interface FeatureToggles {
|
||||
*/
|
||||
dashboardNewLayouts?: boolean;
|
||||
/**
|
||||
* Use the v2 kubernetes API in the frontend for dashboards
|
||||
*/
|
||||
kubernetesDashboardsV2?: boolean;
|
||||
/**
|
||||
* Enables undo/redo in dynamic dashboards
|
||||
*/
|
||||
dashboardUndoRedo?: boolean;
|
||||
@@ -421,6 +417,10 @@ export interface FeatureToggles {
|
||||
*/
|
||||
jitterAlertRulesWithinGroups?: boolean;
|
||||
/**
|
||||
* Enable audit logging with Kubernetes under app platform
|
||||
*/
|
||||
auditLoggingAppPlatform?: boolean;
|
||||
/**
|
||||
* Enable the secrets management API and services under app platform
|
||||
*/
|
||||
secretsManagementAppPlatform?: boolean;
|
||||
|
||||
+6
-2
@@ -48,7 +48,7 @@ describe('MetricsModal', () => {
|
||||
operations: [],
|
||||
};
|
||||
|
||||
setup(query, ['with-labels'], true);
|
||||
setup(query, ['with-labels']);
|
||||
await waitFor(() => {
|
||||
expect(screen.getByText('with-labels')).toBeInTheDocument();
|
||||
});
|
||||
@@ -220,6 +220,10 @@ function createDatasource(withLabels?: boolean) {
|
||||
// display different results if their labels are selected in the PromVisualQuery
|
||||
if (withLabels) {
|
||||
languageProvider.queryMetricsMetadata = jest.fn().mockResolvedValue({
|
||||
ALERTS: {
|
||||
type: 'gauge',
|
||||
help: 'alerts help text',
|
||||
},
|
||||
'with-labels': {
|
||||
type: 'with-labels-type',
|
||||
help: 'with-labels-help',
|
||||
@@ -297,7 +301,7 @@ function createProps(query: PromVisualQuery, datasource: PrometheusDatasource, m
|
||||
};
|
||||
}
|
||||
|
||||
function setup(query: PromVisualQuery, metrics: string[], withlabels?: boolean) {
|
||||
function setup(query: PromVisualQuery, metrics: string[]) {
|
||||
const withLabels: boolean = query.labels.length > 0;
|
||||
const datasource = createDatasource(withLabels);
|
||||
const props = createProps(query, datasource, metrics);
|
||||
|
||||
+1
-1
@@ -138,7 +138,7 @@ const MetricsModalContent = (props: MetricsModalProps) => {
|
||||
|
||||
export const MetricsModal = (props: MetricsModalProps) => {
|
||||
return (
|
||||
<MetricsModalContextProvider languageProvider={props.datasource.languageProvider}>
|
||||
<MetricsModalContextProvider languageProvider={props.datasource.languageProvider} timeRange={props.timeRange}>
|
||||
<MetricsModalContent {...props} />
|
||||
</MetricsModalContextProvider>
|
||||
);
|
||||
|
||||
+20
-4
@@ -4,6 +4,7 @@ import { ReactNode } from 'react';
|
||||
import { TimeRange } from '@grafana/data';
|
||||
|
||||
import { PrometheusLanguageProviderInterface } from '../../../language_provider';
|
||||
import { getMockTimeRange } from '../../../test/mocks/datasource';
|
||||
|
||||
import { DEFAULT_RESULTS_PER_PAGE, MetricsModalContextProvider, useMetricsModal } from './MetricsModalContext';
|
||||
import { generateMetricData } from './helpers';
|
||||
@@ -25,7 +26,9 @@ const mockLanguageProvider: PrometheusLanguageProviderInterface = {
|
||||
// Helper to create wrapper component
|
||||
const createWrapper = (languageProvider = mockLanguageProvider) => {
|
||||
return ({ children }: { children: ReactNode }) => (
|
||||
<MetricsModalContextProvider languageProvider={languageProvider}>{children}</MetricsModalContextProvider>
|
||||
<MetricsModalContextProvider languageProvider={languageProvider} timeRange={getMockTimeRange()}>
|
||||
{children}
|
||||
</MetricsModalContextProvider>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -167,6 +170,7 @@ describe('MetricsModalContext', () => {
|
||||
|
||||
it('should handle empty metadata response', async () => {
|
||||
(mockLanguageProvider.queryMetricsMetadata as jest.Mock).mockResolvedValue({});
|
||||
(mockLanguageProvider.queryLabelValues as jest.Mock).mockResolvedValue(['metric1', 'metric2']);
|
||||
|
||||
const { result } = renderHook(() => useMetricsModal(), {
|
||||
wrapper: createWrapper(),
|
||||
@@ -176,7 +180,18 @@ describe('MetricsModalContext', () => {
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
});
|
||||
|
||||
expect(result.current.filteredMetricsData).toEqual([]);
|
||||
expect(result.current.filteredMetricsData).toEqual([
|
||||
{
|
||||
value: 'metric1',
|
||||
type: 'counter',
|
||||
description: 'Test metric',
|
||||
},
|
||||
{
|
||||
value: 'metric2',
|
||||
type: 'counter',
|
||||
description: 'Test metric',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should handle metadata fetch error', async () => {
|
||||
@@ -239,6 +254,7 @@ describe('MetricsModalContext', () => {
|
||||
}));
|
||||
|
||||
(mockLanguageProvider.queryMetricsMetadata as jest.Mock).mockResolvedValue({
|
||||
ALERTS: { type: 'gauge', help: 'Test alerts help' },
|
||||
test_metric: { type: 'counter', help: 'Test metric' },
|
||||
});
|
||||
|
||||
@@ -250,7 +266,7 @@ describe('MetricsModalContext', () => {
|
||||
expect(result.current.isLoading).toBe(false);
|
||||
});
|
||||
|
||||
expect(result.current.filteredMetricsData).toHaveLength(1);
|
||||
expect(result.current.filteredMetricsData).toHaveLength(2);
|
||||
expect(result.current.selectedTypes).toEqual([]);
|
||||
});
|
||||
|
||||
@@ -318,7 +334,7 @@ describe('MetricsModalContext', () => {
|
||||
};
|
||||
|
||||
const { getByTestId } = render(
|
||||
<MetricsModalContextProvider languageProvider={mockLanguageProvider}>
|
||||
<MetricsModalContextProvider languageProvider={mockLanguageProvider} timeRange={getMockTimeRange()}>
|
||||
<TestComponent />
|
||||
</MetricsModalContextProvider>
|
||||
);
|
||||
|
||||
+13
-3
@@ -52,11 +52,13 @@ const MetricsModalContext = createContext<MetricsModalContextValue | undefined>(
|
||||
|
||||
type MetricsModalContextProviderProps = {
|
||||
languageProvider: PrometheusLanguageProviderInterface;
|
||||
timeRange: TimeRange;
|
||||
};
|
||||
|
||||
export const MetricsModalContextProvider: FC<PropsWithChildren<MetricsModalContextProviderProps>> = ({
|
||||
children,
|
||||
languageProvider,
|
||||
timeRange,
|
||||
}) => {
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
const [metricsData, setMetricsData] = useState<MetricsData>([]);
|
||||
@@ -111,8 +113,16 @@ export const MetricsModalContextProvider: FC<PropsWithChildren<MetricsModalConte
|
||||
setIsLoading(true);
|
||||
const metadata = await languageProvider.queryMetricsMetadata(PROMETHEUS_QUERY_BUILDER_MAX_RESULTS);
|
||||
|
||||
if (Object.keys(metadata).length === 0) {
|
||||
setMetricsData([]);
|
||||
// We receive ALERTS metadata in any case
|
||||
if (Object.keys(metadata).length <= 1) {
|
||||
const fetchedMetrics = await languageProvider.queryLabelValues(
|
||||
timeRange,
|
||||
METRIC_LABEL,
|
||||
undefined,
|
||||
PROMETHEUS_QUERY_BUILDER_MAX_RESULTS
|
||||
);
|
||||
const processedData = fetchedMetrics.map((m) => generateMetricData(m, languageProvider));
|
||||
setMetricsData(processedData);
|
||||
} else {
|
||||
const processedData = Object.keys(metadata).map((m) => generateMetricData(m, languageProvider));
|
||||
setMetricsData(processedData);
|
||||
@@ -122,7 +132,7 @@ export const MetricsModalContextProvider: FC<PropsWithChildren<MetricsModalConte
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
}, [languageProvider]);
|
||||
}, [languageProvider, timeRange]);
|
||||
|
||||
const debouncedBackendSearch = useMemo(
|
||||
() =>
|
||||
|
||||
@@ -131,7 +131,6 @@ export function Drawer({
|
||||
>
|
||||
<FocusScope restoreFocus contain autoFocus>
|
||||
<div
|
||||
data-grafana-portal-container
|
||||
aria-label={
|
||||
typeof title === 'string'
|
||||
? selectors.components.Drawer.General.title(title)
|
||||
|
||||
@@ -76,24 +76,4 @@ return (
|
||||
);
|
||||
```
|
||||
|
||||
### Usage inside Drawer
|
||||
|
||||
Toggletip automatically detects when it's inside a Drawer (or other focus-trapped container with the `data-grafana-portal-container` attribute) and adjusts its behavior accordingly. No additional configuration is needed:
|
||||
|
||||
```tsx
|
||||
<Drawer title="Settings" onClose={onClose}>
|
||||
<Toggletip content={<Input placeholder="Type here..." />}>
|
||||
<Button>Open Toggletip</Button>
|
||||
</Toggletip>
|
||||
</Drawer>
|
||||
```
|
||||
|
||||
When auto-detected inside a focus-trapped container:
|
||||
|
||||
- The Toggletip content renders inside the Drawer's DOM tree
|
||||
- Focus management defers to the parent container's focus trap
|
||||
- Interactive elements like inputs work correctly
|
||||
|
||||
If you need to override auto-detection or specify a custom container, use the `portalRoot` prop.
|
||||
|
||||
<ArgTypes of={Toggletip} />
|
||||
|
||||
@@ -1,10 +1,6 @@
|
||||
import { Meta, StoryFn } from '@storybook/react';
|
||||
import { useState } from 'react';
|
||||
|
||||
import { Button } from '../Button/Button';
|
||||
import { Drawer } from '../Drawer/Drawer';
|
||||
import { Field } from '../Forms/Field';
|
||||
import { Input } from '../Input/Input';
|
||||
import { ScrollContainer } from '../ScrollContainer/ScrollContainer';
|
||||
import mdx from '../Toggletip/Toggletip.mdx';
|
||||
|
||||
@@ -137,46 +133,4 @@ LongContent.parameters = {
|
||||
},
|
||||
};
|
||||
|
||||
export const InsideDrawer: StoryFn<typeof Toggletip> = () => {
|
||||
const [isDrawerOpen, setIsDrawerOpen] = useState(false);
|
||||
|
||||
return (
|
||||
<>
|
||||
<Button onClick={() => setIsDrawerOpen(true)}>Open Drawer</Button>
|
||||
{isDrawerOpen && (
|
||||
<Drawer title="Drawer with Toggletip" onClose={() => setIsDrawerOpen(false)}>
|
||||
<p style={{ marginBottom: '16px' }}>
|
||||
Toggletip automatically detects when it's inside a Drawer and renders its content within the
|
||||
Drawer's DOM, allowing focus to work correctly. No manual configuration needed!
|
||||
</p>
|
||||
<Toggletip
|
||||
title="Interactive Form"
|
||||
content={
|
||||
<div style={{ display: 'flex', flexDirection: 'column', gap: '8px' }}>
|
||||
<Field label="Name">
|
||||
<Input placeholder="Enter your name" />
|
||||
</Field>
|
||||
<Button variant="primary" size="sm">
|
||||
Submit
|
||||
</Button>
|
||||
</div>
|
||||
}
|
||||
footer="Focus works correctly - auto-detected!"
|
||||
placement="bottom-start"
|
||||
>
|
||||
<Button>Click to show Toggletip</Button>
|
||||
</Toggletip>
|
||||
</Drawer>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
};
|
||||
|
||||
InsideDrawer.parameters = {
|
||||
controls: {
|
||||
hideNoControlsWarning: true,
|
||||
exclude: ['title', 'content', 'footer', 'children', 'placement', 'theme', 'closeButton', 'portalRoot'],
|
||||
},
|
||||
};
|
||||
|
||||
export default meta;
|
||||
|
||||
@@ -11,7 +11,7 @@ import {
|
||||
useInteractions,
|
||||
} from '@floating-ui/react';
|
||||
import { Placement } from '@popperjs/core';
|
||||
import { memo, cloneElement, isValidElement, useRef, useState, useMemo, type JSX } from 'react';
|
||||
import { memo, cloneElement, isValidElement, useRef, useState, type JSX } from 'react';
|
||||
|
||||
import { GrafanaTheme2 } from '@grafana/data';
|
||||
import { t } from '@grafana/i18n';
|
||||
@@ -47,11 +47,6 @@ export interface ToggletipProps {
|
||||
show?: boolean;
|
||||
/** Callback function to be called when the toggletip is opened */
|
||||
onOpen?: () => void;
|
||||
/** Optional root element for the portal. When Toggletip is inside a focus-trapped container like Drawer,
|
||||
* the portal root is auto-detected via the `data-grafana-portal-container` attribute. Use this prop
|
||||
* to override auto-detection or specify a custom container. When inside a focus-trapped container,
|
||||
* the Toggletip disables its own modal focus trap, deferring focus management to the parent. */
|
||||
portalRoot?: HTMLElement;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -72,7 +67,6 @@ export const Toggletip = memo(
|
||||
fitContent = false,
|
||||
onOpen,
|
||||
show,
|
||||
portalRoot,
|
||||
}: ToggletipProps) => {
|
||||
const arrowRef = useRef(null);
|
||||
const grafanaTheme = useTheme2();
|
||||
@@ -116,30 +110,16 @@ export const Toggletip = memo(
|
||||
|
||||
const { getReferenceProps, getFloatingProps } = useInteractions([dismiss, click]);
|
||||
|
||||
// Auto-detect portal container from reference element's ancestors
|
||||
// This allows Toggletip to work automatically inside Drawer and other focus-trapped containers
|
||||
const [referenceElement, setReferenceElement] = useState<Element | null>(null);
|
||||
const autoDetectedPortalRoot = useMemo(() => {
|
||||
if (portalRoot) {
|
||||
return portalRoot;
|
||||
}
|
||||
const container = referenceElement?.closest('[data-grafana-portal-container]');
|
||||
return container instanceof HTMLElement ? container : undefined;
|
||||
}, [portalRoot, referenceElement]);
|
||||
|
||||
return (
|
||||
<>
|
||||
{cloneElement(children, {
|
||||
ref: (node: Element | null) => {
|
||||
refs.setReference(node);
|
||||
setReferenceElement(node);
|
||||
},
|
||||
ref: refs.setReference,
|
||||
tabIndex: 0,
|
||||
'aria-expanded': isOpen,
|
||||
...getReferenceProps(),
|
||||
})}
|
||||
{isOpen && (
|
||||
<Portal root={autoDetectedPortalRoot}>
|
||||
<Portal>
|
||||
<FloatingFocusManager context={context} modal={true}>
|
||||
<div
|
||||
data-testid="toggletip-content"
|
||||
|
||||
@@ -0,0 +1,55 @@
|
||||
package auditing
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Sinkable is a log entry abstraction that can be sent to an audit log sink through the different implementing methods.
|
||||
type Sinkable interface {
|
||||
json.Marshaler
|
||||
KVPairs() []any
|
||||
Time() time.Time
|
||||
}
|
||||
|
||||
// Logger specifies the contract for a specific audit logger.
|
||||
type Logger interface {
|
||||
Log(entry Sinkable) error
|
||||
Close() error
|
||||
Type() string
|
||||
}
|
||||
|
||||
// Implementation inspired by https://github.com/grafana/grafana-app-sdk/blob/main/logging/logger.go
|
||||
type loggerContextKey struct{}
|
||||
|
||||
var (
|
||||
// DefaultLogger is the default Logger if one hasn't been provided in the context.
|
||||
// You may use this to add arbitrary audit logging outside of an API request lifecycle.
|
||||
DefaultLogger Logger = &NoopLogger{}
|
||||
|
||||
contextKey = loggerContextKey{}
|
||||
)
|
||||
|
||||
// FromContext returns the Logger set in the context with Context(), or the DefaultLogger if no Logger is set in the context.
|
||||
// If DefaultLogger is nil, it returns a *NoopLogger so that the return is always valid to call methods on without nil-checking.
|
||||
// You may use this to add arbitrary audit logging outside of an API request lifecycle.
|
||||
func FromContext(ctx context.Context) Logger {
|
||||
if l := ctx.Value(contextKey); l != nil {
|
||||
if logger, ok := l.(Logger); ok {
|
||||
return logger
|
||||
}
|
||||
}
|
||||
|
||||
if DefaultLogger != nil {
|
||||
return DefaultLogger
|
||||
}
|
||||
|
||||
return &NoopLogger{}
|
||||
}
|
||||
|
||||
// Context returns a new context built from the provided context with the provided logger in it.
|
||||
// The Logger added with Context() can be retrieved with FromContext()
|
||||
func Context(ctx context.Context, logger Logger) context.Context {
|
||||
return context.WithValue(ctx, contextKey, logger)
|
||||
}
|
||||
@@ -11,9 +11,9 @@ type NoopBackend struct{}
|
||||
|
||||
func ProvideNoopBackend() audit.Backend { return &NoopBackend{} }
|
||||
|
||||
func (b *NoopBackend) ProcessEvents(k8sEvents ...*auditinternal.Event) bool { return false }
|
||||
func (NoopBackend) ProcessEvents(...*auditinternal.Event) bool { return false }
|
||||
|
||||
func (NoopBackend) Run(stopCh <-chan struct{}) error { return nil }
|
||||
func (NoopBackend) Run(<-chan struct{}) error { return nil }
|
||||
|
||||
func (NoopBackend) Shutdown() {}
|
||||
|
||||
@@ -34,3 +34,14 @@ type NoopPolicyRuleEvaluator struct{}
|
||||
func (NoopPolicyRuleEvaluator) EvaluatePolicyRule(authorizer.Attributes) audit.RequestAuditConfig {
|
||||
return audit.RequestAuditConfig{Level: auditinternal.LevelNone}
|
||||
}
|
||||
|
||||
// NoopLogger is a no-op implementation of Logger
|
||||
type NoopLogger struct{}
|
||||
|
||||
func ProvideNoopLogger() Logger { return &NoopLogger{} }
|
||||
|
||||
func (NoopLogger) Type() string { return "noop" }
|
||||
|
||||
func (NoopLogger) Log(Sinkable) error { return nil }
|
||||
|
||||
func (NoopLogger) Close() error { return nil }
|
||||
|
||||
@@ -46,14 +46,23 @@ func (defaultGrafanaPolicyRuleEvaluator) EvaluatePolicyRule(attrs authorizer.Att
|
||||
}
|
||||
}
|
||||
|
||||
// Logging the response object allows us to get the resource name for create requests.
|
||||
level := auditinternal.LevelMetadata
|
||||
if attrs.GetVerb() == utils.VerbCreate {
|
||||
level = auditinternal.LevelRequestResponse
|
||||
}
|
||||
|
||||
return audit.RequestAuditConfig{
|
||||
Level: auditinternal.LevelMetadata,
|
||||
Level: level,
|
||||
|
||||
// Only log on StageResponseComplete, to avoid noisy logs.
|
||||
OmitStages: []auditinternal.Stage{
|
||||
// Only log on StageResponseComplete
|
||||
auditinternal.StageRequestReceived,
|
||||
auditinternal.StageResponseStarted,
|
||||
auditinternal.StagePanic,
|
||||
},
|
||||
OmitManagedFields: false, // Setting it to true causes extra copying/unmarshalling.
|
||||
|
||||
// Setting it to true causes extra copying/unmarshalling.
|
||||
OmitManagedFields: false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -55,7 +55,7 @@ func TestDefaultGrafanaPolicyRuleEvaluator(t *testing.T) {
|
||||
require.Equal(t, auditinternal.LevelNone, config.Level)
|
||||
})
|
||||
|
||||
t.Run("return audit level metadata for other resource requests", func(t *testing.T) {
|
||||
t.Run("return audit level request+response for create requests", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
attrs := authorizer.AttributesRecord{
|
||||
@@ -67,6 +67,22 @@ func TestDefaultGrafanaPolicyRuleEvaluator(t *testing.T) {
|
||||
},
|
||||
}
|
||||
|
||||
config := evaluator.EvaluatePolicyRule(attrs)
|
||||
require.Equal(t, auditinternal.LevelRequestResponse, config.Level)
|
||||
})
|
||||
|
||||
t.Run("return audit level metadata for other resource requests", func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
attrs := authorizer.AttributesRecord{
|
||||
ResourceRequest: true,
|
||||
Verb: utils.VerbGet,
|
||||
User: &user.DefaultInfo{
|
||||
Name: "test-user",
|
||||
Groups: []string{"test-group"},
|
||||
},
|
||||
}
|
||||
|
||||
config := evaluator.EvaluatePolicyRule(attrs)
|
||||
require.Equal(t, auditinternal.LevelMetadata, config.Level)
|
||||
})
|
||||
|
||||
@@ -8,6 +8,7 @@ import (
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/grafana/grafana/pkg/configprovider"
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
apierrors "k8s.io/apimachinery/pkg/api/errors"
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
@@ -62,7 +63,6 @@ import (
|
||||
"github.com/grafana/grafana/pkg/services/quota"
|
||||
"github.com/grafana/grafana/pkg/services/search/sort"
|
||||
"github.com/grafana/grafana/pkg/services/user"
|
||||
"github.com/grafana/grafana/pkg/setting"
|
||||
"github.com/grafana/grafana/pkg/storage/legacysql"
|
||||
"github.com/grafana/grafana/pkg/storage/legacysql/dualwrite"
|
||||
"github.com/grafana/grafana/pkg/storage/unified/apistore"
|
||||
@@ -128,7 +128,6 @@ type DashboardsAPIBuilder struct {
|
||||
}
|
||||
|
||||
func RegisterAPIService(
|
||||
cfg *setting.Cfg,
|
||||
features featuremgmt.FeatureToggles,
|
||||
apiregistration builder.APIRegistrar,
|
||||
dashboardService dashboards.DashboardService,
|
||||
@@ -154,7 +153,14 @@ func RegisterAPIService(
|
||||
publicDashboardService publicdashboards.Service,
|
||||
snapshotService dashboardsnapshots.Service,
|
||||
dashboardActivityChannel live.DashboardActivityChannel,
|
||||
configProvider configprovider.ConfigProvider,
|
||||
) *DashboardsAPIBuilder {
|
||||
cfg, err := configProvider.Get(context.Background())
|
||||
if err != nil {
|
||||
logging.DefaultLogger.Error("failed to load settings configuration instance", "stackId", cfg.StackID, "err", err)
|
||||
return nil
|
||||
}
|
||||
|
||||
dbp := legacysql.NewDatabaseProvider(sql)
|
||||
namespacer := request.GetNamespaceMapper(cfg)
|
||||
legacyDashboardSearcher := legacysearcher.NewDashboardSearchClient(dashStore, sorter)
|
||||
@@ -237,7 +243,7 @@ func NewAPIService(ac authlib.AccessClient, features featuremgmt.FeatureToggles,
|
||||
}
|
||||
|
||||
func (b *DashboardsAPIBuilder) GetGroupVersions() []schema.GroupVersion {
|
||||
if featuremgmt.AnyEnabled(b.features, featuremgmt.FlagDashboardNewLayouts, featuremgmt.FlagKubernetesDashboardsV2) {
|
||||
if featuremgmt.AnyEnabled(b.features, featuremgmt.FlagDashboardNewLayouts) {
|
||||
// If dashboards v2 is enabled, we want to use v2beta1 as the default API version.
|
||||
return []schema.GroupVersion{
|
||||
dashv2beta1.DashboardResourceInfo.GroupVersion(),
|
||||
@@ -747,7 +753,6 @@ func (b *DashboardsAPIBuilder) storageForVersion(
|
||||
ResourceInfo: *snapshots,
|
||||
Service: b.snapshotService,
|
||||
Namespacer: b.namespacer,
|
||||
Options: b.snapshotOptions,
|
||||
}
|
||||
storage[snapshots.StoragePath()] = snapshotLegacyStore
|
||||
storage[snapshots.StoragePath("dashboard")], err = snapshot.NewDashboardREST(dashboards, b.snapshotService)
|
||||
|
||||
@@ -29,6 +29,8 @@ func GetRoutes(service dashboardsnapshots.Service, options dashv0.SnapshotSharin
|
||||
createCmd := defs["github.com/grafana/grafana/apps/dashboard/pkg/apissnapshot/v0alpha1.DashboardCreateCommand"].Schema
|
||||
createExample := `{"dashboard":{"annotations":{"list":[{"name":"Annotations & Alerts","enable":true,"iconColor":"rgba(0, 211, 255, 1)","snapshotData":[],"type":"dashboard","builtIn":1,"hide":true}]},"editable":true,"fiscalYearStartMonth":0,"graphTooltip":0,"id":203,"links":[],"liveNow":false,"panels":[{"datasource":null,"fieldConfig":{"defaults":{"color":{"mode":"palette-classic"},"custom":{"axisBorderShow":false,"axisCenteredZero":false,"axisColorMode":"text","axisLabel":"","axisPlacement":"auto","barAlignment":0,"drawStyle":"line","fillOpacity":43,"gradientMode":"opacity","hideFrom":{"legend":false,"tooltip":false,"viz":false},"insertNulls":false,"lineInterpolation":"smooth","lineWidth":1,"pointSize":5,"scaleDistribution":{"type":"linear"},"showPoints":"auto","spanNulls":false,"stacking":{"group":"A","mode":"none"},"thresholdsStyle":{"mode":"off"}},"mappings":[],"thresholds":{"mode":"absolute","steps":[{"color":"green","value":null},{"color":"red","value":80}]},"unitScale":true},"overrides":[]},"gridPos":{"h":8,"w":12,"x":0,"y":0},"id":1,"options":{"legend":{"calcs":[],"displayMode":"list","placement":"bottom","showLegend":true},"tooltip":{"mode":"single","sort":"none"}},"pluginVersion":"10.4.0-pre","snapshotData":[{"fields":[{"config":{"color":{"mode":"palette-classic"},"custom":{"axisBorderShow":false,"axisCenteredZero":false,"axisColorMode":"text","axisPlacement":"auto","barAlignment":0,"drawStyle":"line","fillOpacity":43,"gradientMode":"opacity","hideFrom":{"legend":false,"tooltip":false,"viz":false},"lineInterpolation":"smooth","lineWidth":1,"pointSize":5,"showPoints":"auto","thresholdsStyle":{"mode":"off"}},"thresholds":{"mode":"absolute","steps":[{"color":"green","value":null},{"color":"red","value":80}]},"unitScale":true},"name":"time","type":"time","values":[1706030536378,1706034856378,1706039176378,1706043496378,1706047816378,1706052136378]},{"config":{"color":{"mode":"palette-classic"},"custom":{"axisBorderShow":false,"axisCenteredZero":false,"axisColorMode":"text","axisLabel":"","axisPlacement":"auto","barAlignment":0,"drawStyle":"line","fillOpacity":43,"gradientMode":"opacity","hideFrom":{"legend":false,"tooltip":false,"viz":false},"insertNulls":false,"lineInterpolation":"smooth","lineWidth":1,"pointSize":5,"scaleDistribution":{"type":"linear"},"showPoints":"auto","spanNulls":false,"stacking":{"group":"A","mode":"none"},"thresholdsStyle":{"mode":"off"}},"mappings":[],"thresholds":{"mode":"absolute","steps":[{"color":"green","value":null},{"color":"red","value":80}]},"unitScale":true},"name":"A-series","type":"number","values":[1,20,90,30,50,0]}],"refId":"A"}],"targets":[],"title":"Simple example","type":"timeseries","links":[]}],"refresh":"","schemaVersion":39,"snapshot":{"timestamp":"2024-01-23T23:22:16.377Z"},"tags":[],"templating":{"list":[]},"time":{"from":"2024-01-23T17:22:20.380Z","to":"2024-01-23T23:22:20.380Z","raw":{"from":"now-6h","to":"now"}},"timepicker":{},"timezone":"","title":"simple and small","uid":"b22ec8db-399b-403b-b6c7-b0fb30ccb2a5","version":1,"weekStart":""},"name":"simple and small","expires":86400}`
|
||||
createRsp := defs["github.com/grafana/grafana/apps/dashboard/pkg/apissnapshot/v0alpha1.DashboardCreateResponse"].Schema
|
||||
getSettingsRsp := defs["github.com/grafana/grafana/apps/dashboard/pkg/apissnapshot/v0alpha1.SnapshotSharingOptions"].Schema
|
||||
getSettingsRspExample := `{"snapshotsEnabled":true,"externalSnapshotURL":"https://externalurl.com","externalSnapshotName":"external","externalEnabled":true}`
|
||||
|
||||
return &builder.APIRoutes{
|
||||
Namespace: []builder.APIRouteHandler{
|
||||
@@ -167,5 +169,84 @@ func GetRoutes(service dashboardsnapshots.Service, options dashv0.SnapshotSharin
|
||||
})
|
||||
},
|
||||
},
|
||||
{
|
||||
Path: prefix + "/settings",
|
||||
Spec: &spec3.PathProps{
|
||||
Get: &spec3.Operation{
|
||||
VendorExtensible: spec.VendorExtensible{
|
||||
Extensions: map[string]any{
|
||||
"x-grafana-action": "get",
|
||||
"x-kubernetes-group-version-kind": metav1.GroupVersionKind{
|
||||
Group: dashv0.GROUP,
|
||||
Version: dashv0.VERSION,
|
||||
Kind: "SnapshotSharingOptions",
|
||||
},
|
||||
},
|
||||
},
|
||||
OperationProps: spec3.OperationProps{
|
||||
Tags: tags,
|
||||
OperationId: "getSnapshotSettings",
|
||||
Description: "Get Snapshot sharing settings",
|
||||
Parameters: []*spec3.Parameter{
|
||||
{
|
||||
ParameterProps: spec3.ParameterProps{
|
||||
Name: "namespace",
|
||||
In: "path",
|
||||
Required: true,
|
||||
Example: "default",
|
||||
Description: "workspace",
|
||||
Schema: spec.StringProperty(),
|
||||
},
|
||||
},
|
||||
},
|
||||
Responses: &spec3.Responses{
|
||||
ResponsesProps: spec3.ResponsesProps{
|
||||
StatusCodeResponses: map[int]*spec3.Response{
|
||||
200: {
|
||||
ResponseProps: spec3.ResponseProps{
|
||||
Content: map[string]*spec3.MediaType{
|
||||
"application/json": {
|
||||
MediaTypeProps: spec3.MediaTypeProps{
|
||||
Schema: &getSettingsRsp,
|
||||
Example: getSettingsRspExample,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
Handler: func(w http.ResponseWriter, r *http.Request) {
|
||||
user, err := identity.GetRequester(r.Context())
|
||||
if err != nil {
|
||||
errhttp.Write(r.Context(), err, w)
|
||||
return
|
||||
}
|
||||
wrap := &contextmodel.ReqContext{
|
||||
Context: &web.Context{
|
||||
Req: r,
|
||||
Resp: web.NewResponseWriter(r.Method, w),
|
||||
},
|
||||
}
|
||||
|
||||
vars := mux.Vars(r)
|
||||
info, err := authlib.ParseNamespace(vars["namespace"])
|
||||
if err != nil {
|
||||
wrap.JsonApiErr(http.StatusBadRequest, "expected namespace", nil)
|
||||
return
|
||||
}
|
||||
if info.OrgID != user.GetOrgID() {
|
||||
wrap.JsonApiErr(http.StatusBadRequest,
|
||||
fmt.Sprintf("user orgId does not match namespace (%d != %d)", info.OrgID, user.GetOrgID()), nil)
|
||||
return
|
||||
}
|
||||
|
||||
wrap.JSON(http.StatusOK, options)
|
||||
},
|
||||
},
|
||||
}}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,6 @@ package snapshot
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
|
||||
"k8s.io/apimachinery/pkg/apis/meta/internalversion"
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
@@ -29,7 +28,6 @@ type SnapshotLegacyStore struct {
|
||||
ResourceInfo utils.ResourceInfo
|
||||
Service dashboardsnapshots.Service
|
||||
Namespacer request.NamespaceMapper
|
||||
Options dashV0.SnapshotSharingOptions
|
||||
}
|
||||
|
||||
func (s *SnapshotLegacyStore) New() runtime.Object {
|
||||
@@ -117,15 +115,6 @@ func (s *SnapshotLegacyStore) List(ctx context.Context, options *internalversion
|
||||
}
|
||||
|
||||
func (s *SnapshotLegacyStore) Get(ctx context.Context, name string, options *metav1.GetOptions) (runtime.Object, error) {
|
||||
info, err := request.NamespaceInfoFrom(ctx, true)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
err = s.checkEnabled(info.Value)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
query := dashboardsnapshots.GetDashboardSnapshotQuery{
|
||||
Key: name,
|
||||
}
|
||||
@@ -140,10 +129,3 @@ func (s *SnapshotLegacyStore) Get(ctx context.Context, name string, options *met
|
||||
}
|
||||
return nil, s.ResourceInfo.NewNotFound(name)
|
||||
}
|
||||
|
||||
func (s *SnapshotLegacyStore) checkEnabled(ns string) error {
|
||||
if !s.Options.SnapshotsEnabled {
|
||||
return fmt.Errorf("snapshots not enabled")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -131,15 +131,12 @@ func (w *Worker) Process(ctx context.Context, repo repository.Repository, job pr
|
||||
|
||||
func (w *Worker) deleteFiles(ctx context.Context, rw repository.ReaderWriter, progress jobs.JobProgressRecorder, opts provisioning.DeleteJobOptions, paths ...string) error {
|
||||
for _, path := range paths {
|
||||
result := jobs.JobResourceResult{
|
||||
Path: path,
|
||||
Action: repository.FileActionDeleted,
|
||||
}
|
||||
|
||||
progress.SetMessage(ctx, "Deleting "+path)
|
||||
var resultErr error
|
||||
if err := rw.Delete(ctx, path, opts.Ref, "Delete "+path); err != nil {
|
||||
result.Error = fmt.Errorf("deleting file %s: %w", path, err)
|
||||
resultErr = fmt.Errorf("deleting file %s: %w", path, err)
|
||||
}
|
||||
result := jobs.NewJobResourceResultWithoutKind(path, repository.FileActionDeleted, resultErr)
|
||||
progress.Record(ctx, result)
|
||||
if err := progress.TooManyErrors(); err != nil {
|
||||
return err
|
||||
@@ -163,12 +160,6 @@ func (w *Worker) resolveResourcesToPaths(ctx context.Context, rw repository.Read
|
||||
|
||||
resolvedPaths := make([]string, 0, len(resources))
|
||||
for _, resource := range resources {
|
||||
result := jobs.JobResourceResult{
|
||||
Name: resource.Name,
|
||||
Group: resource.Group,
|
||||
Action: repository.FileActionDeleted, // Will be used for deletion later
|
||||
}
|
||||
|
||||
gvk := schema.GroupVersionKind{
|
||||
Group: resource.Group,
|
||||
Kind: resource.Kind,
|
||||
@@ -178,7 +169,8 @@ func (w *Worker) resolveResourcesToPaths(ctx context.Context, rw repository.Read
|
||||
progress.SetMessage(ctx, fmt.Sprintf("Finding path for resource %s/%s/%s", resource.Group, resource.Kind, resource.Name))
|
||||
resourcePath, err := repositoryResources.FindResourcePath(ctx, resource.Name, gvk)
|
||||
if err != nil {
|
||||
result.Error = fmt.Errorf("find path for resource %s/%s/%s: %w", resource.Group, resource.Kind, resource.Name, err)
|
||||
resultErr := fmt.Errorf("find path for resource %s/%s/%s: %w", resource.Group, resource.Kind, resource.Name, err)
|
||||
result := jobs.NewJobResourceResult(resource.Name, resource.Group, resource.Kind, "", repository.FileActionDeleted, resultErr)
|
||||
progress.Record(ctx, result)
|
||||
// Continue with next resource instead of failing fast
|
||||
if err := progress.TooManyErrors(); err != nil {
|
||||
@@ -187,7 +179,6 @@ func (w *Worker) resolveResourcesToPaths(ctx context.Context, rw repository.Read
|
||||
continue
|
||||
}
|
||||
|
||||
result.Path = resourcePath
|
||||
resolvedPaths = append(resolvedPaths, resourcePath)
|
||||
}
|
||||
|
||||
|
||||
@@ -184,10 +184,10 @@ func TestDeleteWorker_ProcessDeleteFilesSuccess(t *testing.T) {
|
||||
mockRepo.On("Delete", mock.Anything, "test/path2", "main", "Delete test/path2").Return(nil)
|
||||
|
||||
mockProgress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Path == "test/path1" && result.Action == repository.FileActionDeleted && result.Error == nil
|
||||
return result.Path() == "test/path1" && result.Action() == repository.FileActionDeleted && result.Error() == nil
|
||||
})).Return()
|
||||
mockProgress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Path == "test/path2" && result.Action == repository.FileActionDeleted && result.Error == nil
|
||||
return result.Path() == "test/path2" && result.Action() == repository.FileActionDeleted && result.Error() == nil
|
||||
})).Return()
|
||||
|
||||
worker := NewWorker(nil, mockWrapFn.Execute, nil, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
|
||||
@@ -224,7 +224,7 @@ func TestDeleteWorker_ProcessDeleteFilesWithError(t *testing.T) {
|
||||
mockRepo.On("Delete", mock.Anything, "test/path1", "main", "Delete test/path1").Return(deleteError)
|
||||
|
||||
mockProgress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Path == "test/path1" && result.Action == repository.FileActionDeleted && errors.Is(result.Error, deleteError)
|
||||
return result.Path() == "test/path1" && result.Action() == repository.FileActionDeleted && errors.Is(result.Error(), deleteError)
|
||||
})).Return()
|
||||
mockProgress.On("TooManyErrors").Return(errors.New("too many errors"))
|
||||
|
||||
@@ -263,7 +263,7 @@ func TestDeleteWorker_ProcessWithSyncWorker(t *testing.T) {
|
||||
mockRepo.On("Delete", mock.Anything, "test/path", "", "Delete test/path").Return(nil)
|
||||
|
||||
mockProgress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Path == "test/path" && result.Action == repository.FileActionDeleted && result.Error == nil
|
||||
return result.Path() == "test/path" && result.Action() == repository.FileActionDeleted && result.Error() == nil
|
||||
})).Return()
|
||||
|
||||
mockProgress.On("ResetResults").Return()
|
||||
@@ -371,7 +371,7 @@ func TestDeleteWorker_deleteFiles(t *testing.T) {
|
||||
mockRepo.On("Delete", mock.Anything, path, "main", "Delete "+path).Return(tt.deleteResults[i]).Once()
|
||||
mockProgress.On("SetMessage", mock.Anything, "Deleting "+path).Return().Once()
|
||||
mockProgress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Path == path && result.Action == repository.FileActionDeleted
|
||||
return result.Path() == path && result.Action() == repository.FileActionDeleted
|
||||
})).Return().Once()
|
||||
|
||||
if tt.tooManyErrors != nil && i == 0 {
|
||||
@@ -469,13 +469,13 @@ func TestDeleteWorker_ProcessWithResourceRefs(t *testing.T) {
|
||||
|
||||
// Mock progress records
|
||||
mockProgress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Path == "test/path1" && result.Action == repository.FileActionDeleted && result.Error == nil
|
||||
return result.Path() == "test/path1" && result.Action() == repository.FileActionDeleted && result.Error() == nil
|
||||
})).Return()
|
||||
mockProgress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Path == "dashboards/test-dashboard.json" && result.Action == repository.FileActionDeleted && result.Error == nil
|
||||
return result.Path() == "dashboards/test-dashboard.json" && result.Action() == repository.FileActionDeleted && result.Error() == nil
|
||||
})).Return()
|
||||
mockProgress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Path == "folders/test-folder.json" && result.Action == repository.FileActionDeleted && result.Error == nil
|
||||
return result.Path() == "folders/test-folder.json" && result.Action() == repository.FileActionDeleted && result.Error() == nil
|
||||
})).Return()
|
||||
|
||||
worker := NewWorker(nil, mockWrapFn.Execute, mockResourcesFactory, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
|
||||
@@ -534,7 +534,7 @@ func TestDeleteWorker_ProcessResourceRefsOnly(t *testing.T) {
|
||||
mockRepo.On("Delete", mock.Anything, "dashboards/test-dashboard.json", "main", "Delete dashboards/test-dashboard.json").Return(nil)
|
||||
|
||||
mockProgress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Path == "dashboards/test-dashboard.json" && result.Action == repository.FileActionDeleted && result.Error == nil
|
||||
return result.Path() == "dashboards/test-dashboard.json" && result.Action() == repository.FileActionDeleted && result.Error() == nil
|
||||
})).Return()
|
||||
|
||||
worker := NewWorker(nil, mockWrapFn.Execute, mockResourcesFactory, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
|
||||
@@ -587,10 +587,10 @@ func TestDeleteWorker_ProcessResourceResolutionError(t *testing.T) {
|
||||
|
||||
// Expect error to be recorded, not thrown
|
||||
mockProgress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Name == "nonexistent-dashboard" &&
|
||||
result.Group == "dashboard.grafana.app" &&
|
||||
result.Action == repository.FileActionDeleted &&
|
||||
result.Error != nil
|
||||
return result.Name() == "nonexistent-dashboard" &&
|
||||
result.Group() == "dashboard.grafana.app" &&
|
||||
result.Action() == repository.FileActionDeleted &&
|
||||
result.Error() != nil
|
||||
})).Return()
|
||||
mockProgress.On("TooManyErrors").Return(nil)
|
||||
|
||||
@@ -727,7 +727,7 @@ func TestDeleteWorker_ProcessResourceResolutionTooManyErrors(t *testing.T) {
|
||||
|
||||
// Mock recording error and TooManyErrors returning error
|
||||
mockProgress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Name == "nonexistent-dashboard" && result.Error != nil
|
||||
return result.Name() == "nonexistent-dashboard" && result.Error() != nil
|
||||
})).Return()
|
||||
mockProgress.On("TooManyErrors").Return(errors.New("too many errors"))
|
||||
|
||||
@@ -810,7 +810,7 @@ func TestDeleteWorker_ProcessMixedResourcesWithPartialFailure(t *testing.T) {
|
||||
mockProgress.On("Complete", mock.Anything, mock.Anything).Return(v0alpha1.JobStatus{})
|
||||
// Record the error for the failed resource
|
||||
mockProgress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Name == "nonexistent-dashboard" && result.Error != nil
|
||||
return result.Name() == "nonexistent-dashboard" && result.Error() != nil
|
||||
})).Return()
|
||||
|
||||
// Allow continuing after error
|
||||
@@ -822,10 +822,10 @@ func TestDeleteWorker_ProcessMixedResourcesWithPartialFailure(t *testing.T) {
|
||||
|
||||
// Record successful deletions
|
||||
mockProgress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Path == "dashboards/valid-dashboard.json" && result.Error == nil
|
||||
return result.Path() == "dashboards/valid-dashboard.json" && result.Error() == nil
|
||||
})).Return()
|
||||
mockProgress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Path == "folders/valid-folder.json" && result.Error == nil
|
||||
return result.Path() == "folders/valid-folder.json" && result.Error() == nil
|
||||
})).Return()
|
||||
|
||||
worker := NewWorker(nil, mockWrapFn.Execute, mockResourcesFactory, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
|
||||
@@ -910,20 +910,20 @@ func TestDeleteWorker_ProcessWithPathDeduplication(t *testing.T) {
|
||||
mockProgress.On("SetMessage", mock.Anything, "Deleting dashboards/test-dashboard.json").Return()
|
||||
mockRepo.On("Delete", mock.Anything, "dashboards/test-dashboard.json", "main", "Delete dashboards/test-dashboard.json").Return(nil)
|
||||
mockProgress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Path == "dashboards/test-dashboard.json" && result.Action == repository.FileActionDeleted && result.Error == nil
|
||||
return result.Path() == "dashboards/test-dashboard.json" && result.Action() == repository.FileActionDeleted && result.Error() == nil
|
||||
})).Return()
|
||||
mockProgress.On("TooManyErrors").Return(nil)
|
||||
|
||||
mockProgress.On("SetMessage", mock.Anything, "Deleting folders/test-folder/").Return()
|
||||
mockRepo.On("Delete", mock.Anything, "folders/test-folder/", "main", "Delete folders/test-folder/").Return(nil)
|
||||
mockProgress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Path == "folders/test-folder/" && result.Action == repository.FileActionDeleted && result.Error == nil
|
||||
return result.Path() == "folders/test-folder/" && result.Action() == repository.FileActionDeleted && result.Error() == nil
|
||||
})).Return()
|
||||
|
||||
mockProgress.On("SetMessage", mock.Anything, "Deleting dashboards/unique-dashboard.json").Return()
|
||||
mockRepo.On("Delete", mock.Anything, "dashboards/unique-dashboard.json", "main", "Delete dashboards/unique-dashboard.json").Return(nil)
|
||||
mockProgress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Path == "dashboards/unique-dashboard.json" && result.Action == repository.FileActionDeleted && result.Error == nil
|
||||
return result.Path() == "dashboards/unique-dashboard.json" && result.Action() == repository.FileActionDeleted && result.Error() == nil
|
||||
})).Return()
|
||||
|
||||
worker := NewWorker(nil, mockWrapFn.Execute, mockResourcesFactory, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
|
||||
|
||||
@@ -44,22 +44,17 @@ func ExportFolders(ctx context.Context, repoName string, options provisioning.Ex
|
||||
|
||||
progress.SetMessage(ctx, "write folders to repository")
|
||||
err := repositoryResources.EnsureFolderTreeExists(ctx, options.Branch, options.Path, tree, func(folder resources.Folder, created bool, err error) error {
|
||||
result := jobs.JobResourceResult{
|
||||
Action: repository.FileActionCreated,
|
||||
Name: folder.ID,
|
||||
Group: resources.FolderResource.Group,
|
||||
Kind: resources.FolderKind.Kind,
|
||||
Path: folder.Path,
|
||||
}
|
||||
action := repository.FileActionCreated
|
||||
|
||||
resultErr := error(nil)
|
||||
if err != nil {
|
||||
result.Error = fmt.Errorf("creating folder %s at path %s: %w", folder.ID, folder.Path, err)
|
||||
resultErr = fmt.Errorf("creating folder %s at path %s: %w", folder.ID, folder.Path, err)
|
||||
}
|
||||
|
||||
if !created {
|
||||
result.Action = repository.FileActionIgnored
|
||||
action = repository.FileActionIgnored
|
||||
}
|
||||
|
||||
result := jobs.NewJobResourceResult(folder.ID, resources.FolderResource.Group, resources.FolderKind.Kind, folder.Path, action, resultErr)
|
||||
progress.Record(ctx, result)
|
||||
if err := progress.TooManyErrors(); err != nil {
|
||||
return err
|
||||
|
||||
@@ -128,10 +128,10 @@ func TestExportFolders(t *testing.T) {
|
||||
progress.On("SetMessage", mock.Anything, "read folder tree from API server").Return()
|
||||
progress.On("SetMessage", mock.Anything, "write folders to repository").Return()
|
||||
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Name == "folder-1-uid" && result.Action == repository.FileActionCreated
|
||||
return result.Name() == "folder-1-uid" && result.Action() == repository.FileActionCreated
|
||||
})).Return()
|
||||
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Name == "folder-2-uid" && result.Action == repository.FileActionCreated
|
||||
return result.Name() == "folder-2-uid" && result.Action() == repository.FileActionCreated
|
||||
})).Return()
|
||||
progress.On("TooManyErrors").Return(nil)
|
||||
progress.On("TooManyErrors").Return(nil)
|
||||
@@ -189,10 +189,10 @@ func TestExportFolders(t *testing.T) {
|
||||
progress.On("SetMessage", mock.Anything, "read folder tree from API server").Return()
|
||||
progress.On("SetMessage", mock.Anything, "write folders to repository").Return()
|
||||
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Name == "folder-1-uid" && result.Action == repository.FileActionIgnored && result.Error != nil && result.Error.Error() == "creating folder folder-1-uid at path grafana/folder-1: didn't work"
|
||||
return result.Name() == "folder-1-uid" && result.Action() == repository.FileActionIgnored && result.Error() != nil && result.Error().Error() == "creating folder folder-1-uid at path grafana/folder-1: didn't work"
|
||||
})).Return()
|
||||
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Name == "folder-2-uid" && result.Action == repository.FileActionCreated
|
||||
return result.Name() == "folder-2-uid" && result.Action() == repository.FileActionCreated
|
||||
})).Return()
|
||||
progress.On("TooManyErrors").Return(nil)
|
||||
progress.On("TooManyErrors").Return(nil)
|
||||
@@ -298,10 +298,10 @@ func TestExportFolders(t *testing.T) {
|
||||
progress.On("SetMessage", mock.Anything, "read folder tree from API server").Return()
|
||||
progress.On("SetMessage", mock.Anything, "write folders to repository").Return()
|
||||
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Name == "parent-folder" && result.Action == repository.FileActionCreated
|
||||
return result.Name() == "parent-folder" && result.Action() == repository.FileActionCreated
|
||||
})).Return()
|
||||
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Name == "child-folder" && result.Action == repository.FileActionCreated
|
||||
return result.Name() == "child-folder" && result.Action() == repository.FileActionCreated
|
||||
})).Return()
|
||||
progress.On("TooManyErrors").Return(nil)
|
||||
progress.On("TooManyErrors").Return(nil)
|
||||
|
||||
@@ -104,18 +104,17 @@ func exportResource(ctx context.Context,
|
||||
// this will work well enough for now, but needs to be revisted as we have a bigger mix of active versions
|
||||
return resources.ForEach(ctx, client, func(item *unstructured.Unstructured) (err error) {
|
||||
gvk := item.GroupVersionKind()
|
||||
result := jobs.JobResourceResult{
|
||||
Name: item.GetName(),
|
||||
Group: gvk.Group,
|
||||
Kind: gvk.Kind,
|
||||
Action: repository.FileActionCreated,
|
||||
}
|
||||
name := item.GetName()
|
||||
action := repository.FileActionCreated
|
||||
path := ""
|
||||
result_err := error(nil)
|
||||
|
||||
// Check if resource is already managed by a repository
|
||||
meta, err := utils.MetaAccessor(item)
|
||||
if err != nil {
|
||||
result.Action = repository.FileActionIgnored
|
||||
result.Error = fmt.Errorf("extracting meta accessor for resource %s: %w", result.Name, err)
|
||||
action = repository.FileActionIgnored
|
||||
meta_error := fmt.Errorf("extracting meta accessor for resource %s: %w", name, err)
|
||||
result := jobs.NewJobResourceResult(name, gvk.Group, gvk.Kind, path, action, meta_error)
|
||||
progress.Record(ctx, result)
|
||||
return nil
|
||||
}
|
||||
@@ -123,7 +122,8 @@ func exportResource(ctx context.Context,
|
||||
manager, _ := meta.GetManagerProperties()
|
||||
// Skip if already managed by any manager (repository, file provisioning, etc.)
|
||||
if manager.Identity != "" {
|
||||
result.Action = repository.FileActionIgnored
|
||||
action = repository.FileActionIgnored
|
||||
result := jobs.NewJobResourceResult(name, gvk.Group, gvk.Kind, path, action, result_err)
|
||||
progress.Record(ctx, result)
|
||||
return nil
|
||||
}
|
||||
@@ -133,19 +133,20 @@ func exportResource(ctx context.Context,
|
||||
}
|
||||
|
||||
if err == nil {
|
||||
result.Path, err = repositoryResources.WriteResourceFileFromObject(ctx, item, resources.WriteOptions{
|
||||
path, err = repositoryResources.WriteResourceFileFromObject(ctx, item, resources.WriteOptions{
|
||||
Path: options.Path,
|
||||
Ref: options.Branch,
|
||||
})
|
||||
}
|
||||
|
||||
if errors.Is(err, resources.ErrAlreadyInRepository) {
|
||||
result.Action = repository.FileActionIgnored
|
||||
action = repository.FileActionIgnored
|
||||
} else if err != nil {
|
||||
result.Action = repository.FileActionIgnored
|
||||
result.Error = fmt.Errorf("writing resource file for %s: %w", result.Name, err)
|
||||
action = repository.FileActionIgnored
|
||||
result_err = fmt.Errorf("writing resource file for %s: %w", name, err)
|
||||
}
|
||||
|
||||
result := jobs.NewJobResourceResult(name, gvk.Group, gvk.Kind, path, action, result_err)
|
||||
progress.Record(ctx, result)
|
||||
if err := progress.TooManyErrors(); err != nil {
|
||||
return err
|
||||
|
||||
@@ -88,10 +88,10 @@ func TestExportResources_Dashboards_Success(t *testing.T) {
|
||||
progress.On("SetMessage", mock.Anything, "start resource export").Return()
|
||||
progress.On("SetMessage", mock.Anything, "export dashboards").Return()
|
||||
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Name == "dashboard-1" && result.Action == repository.FileActionCreated
|
||||
return result.Name() == "dashboard-1" && result.Action() == repository.FileActionCreated
|
||||
})).Return()
|
||||
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Name == "dashboard-2" && result.Action == repository.FileActionCreated
|
||||
return result.Name() == "dashboard-2" && result.Action() == repository.FileActionCreated
|
||||
})).Return()
|
||||
progress.On("TooManyErrors").Return(nil)
|
||||
progress.On("TooManyErrors").Return(nil)
|
||||
@@ -141,10 +141,10 @@ func TestExportResources_Dashboards_WithErrors(t *testing.T) {
|
||||
progress.On("SetMessage", mock.Anything, "start resource export").Return()
|
||||
progress.On("SetMessage", mock.Anything, "export dashboards").Return()
|
||||
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Name == "dashboard-1" && result.Action == repository.FileActionIgnored && result.Error != nil && result.Error.Error() == "writing resource file for dashboard-1: failed to export dashboard"
|
||||
return result.Name() == "dashboard-1" && result.Action() == repository.FileActionIgnored && result.Error() != nil && result.Error().Error() == "writing resource file for dashboard-1: failed to export dashboard"
|
||||
})).Return()
|
||||
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Name == "dashboard-2" && result.Action == repository.FileActionCreated
|
||||
return result.Name() == "dashboard-2" && result.Action() == repository.FileActionCreated
|
||||
})).Return()
|
||||
progress.On("TooManyErrors").Return(nil)
|
||||
progress.On("TooManyErrors").Return(nil)
|
||||
@@ -179,7 +179,7 @@ func TestExportResources_Dashboards_TooManyErrors(t *testing.T) {
|
||||
progress.On("SetMessage", mock.Anything, "start resource export").Return()
|
||||
progress.On("SetMessage", mock.Anything, "export dashboards").Return()
|
||||
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Name == "dashboard-1" && result.Action == repository.FileActionIgnored && result.Error != nil && result.Error.Error() == "writing resource file for dashboard-1: failed to export dashboard"
|
||||
return result.Name() == "dashboard-1" && result.Action() == repository.FileActionIgnored && result.Error() != nil && result.Error().Error() == "writing resource file for dashboard-1: failed to export dashboard"
|
||||
})).Return()
|
||||
progress.On("TooManyErrors").Return(fmt.Errorf("too many errors encountered"))
|
||||
}
|
||||
@@ -209,7 +209,7 @@ func TestExportResources_Dashboards_IgnoresExisting(t *testing.T) {
|
||||
progress.On("SetMessage", mock.Anything, "start resource export").Return()
|
||||
progress.On("SetMessage", mock.Anything, "export dashboards").Return()
|
||||
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Name == "existing-dashboard" && result.Action == repository.FileActionIgnored
|
||||
return result.Name() == "existing-dashboard" && result.Action() == repository.FileActionIgnored
|
||||
})).Return()
|
||||
progress.On("TooManyErrors").Return(nil)
|
||||
}
|
||||
@@ -256,7 +256,7 @@ func TestExportResources_Dashboards_SavedVersion(t *testing.T) {
|
||||
progress.On("SetMessage", mock.Anything, "start resource export").Return()
|
||||
progress.On("SetMessage", mock.Anything, "export dashboards").Return()
|
||||
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Name == "existing-dashboard" && result.Action == repository.FileActionIgnored
|
||||
return result.Name() == "existing-dashboard" && result.Action() == repository.FileActionIgnored
|
||||
})).Return()
|
||||
progress.On("TooManyErrors").Return(nil)
|
||||
}
|
||||
@@ -320,9 +320,9 @@ func TestExportResources_Dashboards_FailedConversionNoStoredVersion(t *testing.T
|
||||
progress.On("SetMessage", mock.Anything, "start resource export").Return()
|
||||
progress.On("SetMessage", mock.Anything, "export dashboards").Return()
|
||||
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Name == "dashboard-no-stored-version" &&
|
||||
result.Action == repository.FileActionIgnored &&
|
||||
result.Error != nil
|
||||
return result.Name() == "dashboard-no-stored-version" &&
|
||||
result.Action() == repository.FileActionIgnored &&
|
||||
result.Error() != nil
|
||||
})).Return()
|
||||
progress.On("TooManyErrors").Return(nil)
|
||||
}
|
||||
@@ -469,20 +469,20 @@ func TestExportResources_Dashboards_Versions(t *testing.T) {
|
||||
progress.On("SetMessage", mock.Anything, "export dashboards").Return()
|
||||
if tt.expectSuccess {
|
||||
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Name == tt.dashboardName && result.Action == repository.FileActionCreated
|
||||
return result.Name() == tt.dashboardName && result.Action() == repository.FileActionCreated
|
||||
})).Return()
|
||||
} else {
|
||||
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
if result.Name != tt.dashboardName {
|
||||
if result.Name() != tt.dashboardName {
|
||||
return false
|
||||
}
|
||||
if result.Action != repository.FileActionIgnored {
|
||||
if result.Action() != repository.FileActionIgnored {
|
||||
return false
|
||||
}
|
||||
if result.Error == nil {
|
||||
if result.Error() == nil {
|
||||
return false
|
||||
}
|
||||
return result.Error.Error() == tt.expectedError
|
||||
return result.Error().Error() == tt.expectedError
|
||||
})).Return()
|
||||
}
|
||||
progress.On("TooManyErrors").Return(nil)
|
||||
@@ -540,7 +540,7 @@ func TestExportResources_Dashboards_SkipsManagedResources(t *testing.T) {
|
||||
progress.On("SetMessage", mock.Anything, "start resource export").Return()
|
||||
progress.On("SetMessage", mock.Anything, "export dashboards").Return()
|
||||
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Name == "managed-dashboard" && result.Action == repository.FileActionIgnored
|
||||
return result.Name() == "managed-dashboard" && result.Action() == repository.FileActionIgnored
|
||||
})).Return()
|
||||
progress.On("TooManyErrors").Return(nil).Maybe()
|
||||
}
|
||||
@@ -608,8 +608,8 @@ func TestExportResources_Dashboards_MultipleVersions(t *testing.T) {
|
||||
progress.On("SetMessage", mock.Anything, "start resource export").Return()
|
||||
progress.On("SetMessage", mock.Anything, "export dashboards").Return()
|
||||
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return (result.Name == "v2alpha-dashboard" || result.Name == "v2beta-dashboard" || result.Name == "v3-dashboard") &&
|
||||
result.Action == repository.FileActionCreated
|
||||
return (result.Name() == "v2alpha-dashboard" || result.Name() == "v2beta-dashboard" || result.Name() == "v3-dashboard") &&
|
||||
result.Action() == repository.FileActionCreated
|
||||
})).Return().Times(3)
|
||||
progress.On("TooManyErrors").Return(nil).Times(3)
|
||||
}
|
||||
|
||||
@@ -0,0 +1,93 @@
|
||||
package jobs
|
||||
|
||||
import (
|
||||
"github.com/grafana/grafana/apps/provisioning/pkg/repository"
|
||||
)
|
||||
|
||||
// JobResourceResult represents the result of a resource operation in a job.
|
||||
type JobResourceResult struct {
|
||||
name string
|
||||
group string
|
||||
kind string
|
||||
path string
|
||||
action repository.FileAction
|
||||
err error
|
||||
warning error
|
||||
}
|
||||
|
||||
// NewJobResourceResultWithoutKind creates a new JobResourceResult without a name, group, and kind.
|
||||
// This is used for operations that cannot be references to a Grafana resource or when the resource is not yet known.
|
||||
func NewJobResourceResultWithoutKind(path string, action repository.FileAction, err error) JobResourceResult {
|
||||
return NewJobResourceResult("", "", "", path, action, err)
|
||||
}
|
||||
|
||||
// Builder for a skipped resource operation. It takes care of assign the right action and job error type.
|
||||
func NewSkippedJobResourceResult(name, group, kind, path string, err error) JobResourceResult {
|
||||
return JobResourceResult{
|
||||
name: name,
|
||||
group: group,
|
||||
kind: kind,
|
||||
path: path,
|
||||
action: repository.FileActionIgnored,
|
||||
warning: err,
|
||||
err: nil,
|
||||
}
|
||||
}
|
||||
|
||||
func isWarningError(err error) bool {
|
||||
return false
|
||||
}
|
||||
|
||||
// newJobResourceResult creates a new JobResourceResult.
|
||||
// err is the error associated with the resource operation (can be nil).
|
||||
func NewJobResourceResult(name, group, kind, path string, action repository.FileAction, err error) JobResourceResult {
|
||||
result := JobResourceResult{
|
||||
name: name,
|
||||
group: group,
|
||||
kind: kind,
|
||||
path: path,
|
||||
action: action,
|
||||
}
|
||||
if isWarningError(err) {
|
||||
result.warning = err
|
||||
} else {
|
||||
result.err = err
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
|
||||
// Name returns the name of the resource.
|
||||
func (r JobResourceResult) Name() string {
|
||||
return r.name
|
||||
}
|
||||
|
||||
// Group returns the group of the resource.
|
||||
func (r JobResourceResult) Group() string {
|
||||
return r.group
|
||||
}
|
||||
|
||||
// Kind returns the kind of the resource.
|
||||
func (r JobResourceResult) Kind() string {
|
||||
return r.kind
|
||||
}
|
||||
|
||||
// Path returns the path of the resource.
|
||||
func (r JobResourceResult) Path() string {
|
||||
return r.path
|
||||
}
|
||||
|
||||
// Action returns the action performed on the resource.
|
||||
func (r JobResourceResult) Action() repository.FileAction {
|
||||
return r.action
|
||||
}
|
||||
|
||||
// Error returns the error associated with the resource operation.
|
||||
func (r JobResourceResult) Error() error {
|
||||
return r.err
|
||||
}
|
||||
|
||||
// Warning returns the warning associated with the resource operation.
|
||||
func (r JobResourceResult) Warning() error {
|
||||
return r.warning
|
||||
}
|
||||
@@ -0,0 +1,63 @@
|
||||
package jobs
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"testing"
|
||||
|
||||
"github.com/grafana/grafana/apps/provisioning/pkg/repository"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestNewSkippedJobResourceResult(t *testing.T) {
|
||||
name := "test-resource"
|
||||
group := "test-group"
|
||||
kind := "test-kind"
|
||||
path := "/test/path"
|
||||
err := errors.New("skip reason")
|
||||
|
||||
result := NewSkippedJobResourceResult(name, group, kind, path, err)
|
||||
|
||||
assert.Equal(t, name, result.Name())
|
||||
assert.Equal(t, group, result.Group())
|
||||
assert.Equal(t, kind, result.Kind())
|
||||
assert.Equal(t, path, result.Path())
|
||||
assert.Equal(t, repository.FileActionIgnored, result.Action())
|
||||
assert.Equal(t, err, result.Warning())
|
||||
assert.Nil(t, result.Error())
|
||||
}
|
||||
|
||||
func TestNewJobResourceResult_WithError(t *testing.T) {
|
||||
name := "test-resource"
|
||||
group := "test-group"
|
||||
kind := "test-kind"
|
||||
path := "/test/path"
|
||||
action := repository.FileActionCreated
|
||||
err := errors.New("operation failed")
|
||||
|
||||
result := NewJobResourceResult(name, group, kind, path, action, err)
|
||||
|
||||
assert.Equal(t, name, result.Name())
|
||||
assert.Equal(t, group, result.Group())
|
||||
assert.Equal(t, kind, result.Kind())
|
||||
assert.Equal(t, path, result.Path())
|
||||
assert.Equal(t, action, result.Action())
|
||||
assert.NotNil(t, result.Error())
|
||||
assert.Equal(t, err, result.Error())
|
||||
}
|
||||
|
||||
func TestNewJobResourceResult_WithoutError(t *testing.T) {
|
||||
name := "test-resource"
|
||||
group := "test-group"
|
||||
kind := "test-kind"
|
||||
path := "/test/path"
|
||||
action := repository.FileActionUpdated
|
||||
|
||||
result := NewJobResourceResult(name, group, kind, path, action, nil)
|
||||
|
||||
assert.Equal(t, name, result.Name())
|
||||
assert.Equal(t, group, result.Group())
|
||||
assert.Equal(t, kind, result.Kind())
|
||||
assert.Equal(t, path, result.Path())
|
||||
assert.Equal(t, action, result.Action())
|
||||
assert.Nil(t, result.Error())
|
||||
}
|
||||
@@ -40,17 +40,15 @@ func (c *namespaceCleaner) Clean(ctx context.Context, namespace string, progress
|
||||
}
|
||||
|
||||
if err = resources.ForEach(ctx, client, func(item *unstructured.Unstructured) error {
|
||||
result := jobs.JobResourceResult{
|
||||
Name: item.GetName(),
|
||||
Kind: item.GetKind(),
|
||||
Group: item.GroupVersionKind().Group,
|
||||
Action: repository.FileActionDeleted,
|
||||
}
|
||||
name := item.GetName()
|
||||
kind := item.GetKind()
|
||||
group := item.GroupVersionKind().Group
|
||||
|
||||
// Skip provisioned resources - only delete unprovisioned (unmanaged) resources
|
||||
meta, err := utils.MetaAccessor(item)
|
||||
if err != nil {
|
||||
result.Error = fmt.Errorf("extracting meta accessor for resource %s: %w", result.Name, err)
|
||||
resultErr := fmt.Errorf("extracting meta accessor for resource %s: %w", name, err)
|
||||
result := jobs.NewJobResourceResult(name, group, kind, "", repository.FileActionDeleted, resultErr)
|
||||
progress.Record(ctx, result)
|
||||
return nil // Continue with next resource
|
||||
}
|
||||
@@ -58,18 +56,20 @@ func (c *namespaceCleaner) Clean(ctx context.Context, namespace string, progress
|
||||
manager, _ := meta.GetManagerProperties()
|
||||
// Skip if resource is managed by any provisioning system
|
||||
if manager.Identity != "" {
|
||||
result.Action = repository.FileActionIgnored
|
||||
result := jobs.NewJobResourceResult(name, group, kind, "", repository.FileActionIgnored, nil)
|
||||
progress.Record(ctx, result)
|
||||
return nil // Skip this resource
|
||||
}
|
||||
|
||||
// Deletion works by name, so we can use any client regardless of version
|
||||
if err := client.Delete(ctx, item.GetName(), metav1.DeleteOptions{}); err != nil {
|
||||
result.Error = fmt.Errorf("deleting resource %s/%s %s: %w", result.Group, result.Kind, result.Name, err)
|
||||
if err := client.Delete(ctx, name, metav1.DeleteOptions{}); err != nil {
|
||||
resultErr := fmt.Errorf("deleting resource %s/%s %s: %w", group, kind, name, err)
|
||||
result := jobs.NewJobResourceResult(name, group, kind, "", repository.FileActionDeleted, resultErr)
|
||||
progress.Record(ctx, result)
|
||||
return fmt.Errorf("delete resource: %w", err)
|
||||
}
|
||||
|
||||
result := jobs.NewJobResourceResult(name, group, kind, "", repository.FileActionDeleted, nil)
|
||||
progress.Record(ctx, result)
|
||||
return nil
|
||||
}); err != nil {
|
||||
|
||||
@@ -124,10 +124,10 @@ func TestNamespaceCleaner_Clean(t *testing.T) {
|
||||
progress := jobs.NewMockJobProgressRecorder(t)
|
||||
progress.On("SetMessage", mock.Anything, mock.Anything).Return()
|
||||
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Action == repository.FileActionDeleted &&
|
||||
result.Name == "test-folder" &&
|
||||
result.Error != nil &&
|
||||
result.Error.Error() == "deleting resource folder.grafana.app/Folder test-folder: delete failed"
|
||||
return result.Action() == repository.FileActionDeleted &&
|
||||
result.Name() == "test-folder" &&
|
||||
result.Error() != nil &&
|
||||
result.Error().Error() == "deleting resource folder.grafana.app/Folder test-folder: delete failed"
|
||||
})).Return()
|
||||
|
||||
err := cleaner.Clean(context.Background(), "test-namespace", progress)
|
||||
@@ -194,21 +194,21 @@ func TestNamespaceCleaner_Clean(t *testing.T) {
|
||||
|
||||
// Expect only unprovisioned resources to be deleted (2 deletions)
|
||||
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Action == repository.FileActionDeleted &&
|
||||
result.Name == "unprovisioned-folder" &&
|
||||
result.Error == nil
|
||||
return result.Action() == repository.FileActionDeleted &&
|
||||
result.Name() == "unprovisioned-folder" &&
|
||||
result.Error() == nil
|
||||
})).Return()
|
||||
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Action == repository.FileActionDeleted &&
|
||||
result.Name == "unprovisioned-dashboard" &&
|
||||
result.Error == nil
|
||||
return result.Action() == repository.FileActionDeleted &&
|
||||
result.Name() == "unprovisioned-dashboard" &&
|
||||
result.Error() == nil
|
||||
})).Return()
|
||||
|
||||
// Expect provisioned resource to be ignored (1 ignore)
|
||||
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Action == repository.FileActionIgnored &&
|
||||
result.Name == "provisioned-dashboard" &&
|
||||
result.Error == nil
|
||||
return result.Action() == repository.FileActionIgnored &&
|
||||
result.Name() == "provisioned-dashboard" &&
|
||||
result.Error() == nil
|
||||
})).Return()
|
||||
|
||||
err := cleaner.Clean(context.Background(), "test-namespace", progress)
|
||||
@@ -267,14 +267,14 @@ func TestNamespaceCleaner_Clean(t *testing.T) {
|
||||
|
||||
// Expect both resources to be ignored (no deletions)
|
||||
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Action == repository.FileActionIgnored &&
|
||||
result.Name == "repo-managed-dashboard" &&
|
||||
result.Error == nil
|
||||
return result.Action() == repository.FileActionIgnored &&
|
||||
result.Name() == "repo-managed-dashboard" &&
|
||||
result.Error() == nil
|
||||
})).Return()
|
||||
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Action == repository.FileActionIgnored &&
|
||||
result.Name == "file-provisioned-folder" &&
|
||||
result.Error == nil
|
||||
return result.Action() == repository.FileActionIgnored &&
|
||||
result.Name() == "file-provisioned-folder" &&
|
||||
result.Error() == nil
|
||||
})).Return()
|
||||
|
||||
err := cleaner.Clean(context.Background(), "test-namespace", progress)
|
||||
|
||||
@@ -142,18 +142,15 @@ func (w *Worker) Process(ctx context.Context, repo repository.Repository, job pr
|
||||
|
||||
func (w *Worker) moveFiles(ctx context.Context, rw repository.ReaderWriter, progress jobs.JobProgressRecorder, opts provisioning.MoveJobOptions, paths ...string) error {
|
||||
for _, path := range paths {
|
||||
result := jobs.JobResourceResult{
|
||||
Path: path,
|
||||
Action: repository.FileActionRenamed,
|
||||
}
|
||||
|
||||
// Construct the target path by combining the job's target path with the file/folder name
|
||||
targetPath := w.constructTargetPath(opts.TargetPath, path)
|
||||
|
||||
progress.SetMessage(ctx, "Moving "+path+" to "+targetPath)
|
||||
var resultErr error
|
||||
if err := rw.Move(ctx, path, targetPath, opts.Ref, "Move "+path+" to "+targetPath); err != nil {
|
||||
result.Error = fmt.Errorf("moving file %s to %s: %w", path, targetPath, err)
|
||||
resultErr = fmt.Errorf("moving file %s to %s: %w", path, targetPath, err)
|
||||
}
|
||||
result := jobs.NewJobResourceResultWithoutKind(path, repository.FileActionRenamed, resultErr)
|
||||
progress.Record(ctx, result)
|
||||
if err := progress.TooManyErrors(); err != nil {
|
||||
return err
|
||||
@@ -191,12 +188,6 @@ func (w *Worker) resolveResourcesToPaths(ctx context.Context, rw repository.Read
|
||||
|
||||
resolvedPaths := make([]string, 0, len(resources))
|
||||
for _, resource := range resources {
|
||||
result := jobs.JobResourceResult{
|
||||
Name: resource.Name,
|
||||
Group: resource.Group,
|
||||
Action: repository.FileActionRenamed, // Will be used for move later
|
||||
}
|
||||
|
||||
gvk := schema.GroupVersionKind{
|
||||
Group: resource.Group,
|
||||
Kind: resource.Kind,
|
||||
@@ -206,7 +197,8 @@ func (w *Worker) resolveResourcesToPaths(ctx context.Context, rw repository.Read
|
||||
progress.SetMessage(ctx, fmt.Sprintf("Finding path for resource %s/%s/%s", resource.Group, resource.Kind, resource.Name))
|
||||
resourcePath, err := repositoryResources.FindResourcePath(ctx, resource.Name, gvk)
|
||||
if err != nil {
|
||||
result.Error = fmt.Errorf("find path for resource %s/%s/%s: %w", resource.Group, resource.Kind, resource.Name, err)
|
||||
resultErr := fmt.Errorf("find path for resource %s/%s/%s: %w", resource.Group, resource.Kind, resource.Name, err)
|
||||
result := jobs.NewJobResourceResult(resource.Name, resource.Group, resource.Kind, "", repository.FileActionRenamed, resultErr)
|
||||
progress.Record(ctx, result)
|
||||
// Continue with next resource instead of failing fast
|
||||
if err := progress.TooManyErrors(); err != nil {
|
||||
@@ -215,7 +207,6 @@ func (w *Worker) resolveResourcesToPaths(ctx context.Context, rw repository.Read
|
||||
continue
|
||||
}
|
||||
|
||||
result.Path = resourcePath
|
||||
resolvedPaths = append(resolvedPaths, resourcePath)
|
||||
}
|
||||
|
||||
|
||||
@@ -220,10 +220,10 @@ func TestMoveWorker_ProcessMoveFilesSuccess(t *testing.T) {
|
||||
mockRepo.On("Move", mock.Anything, "test/path2", "new/location/path2", "main", "Move test/path2 to new/location/path2").Return(nil)
|
||||
|
||||
mockProgress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Path == "test/path1" && result.Action == repository.FileActionRenamed && result.Error == nil
|
||||
return result.Path() == "test/path1" && result.Action() == repository.FileActionRenamed && result.Error() == nil
|
||||
})).Return()
|
||||
mockProgress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Path == "test/path2" && result.Action == repository.FileActionRenamed && result.Error == nil
|
||||
return result.Path() == "test/path2" && result.Action() == repository.FileActionRenamed && result.Error() == nil
|
||||
})).Return()
|
||||
|
||||
worker := NewWorker(nil, mockWrapFn.Execute, nil, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
|
||||
@@ -261,7 +261,7 @@ func TestMoveWorker_ProcessMoveFilesWithError(t *testing.T) {
|
||||
mockRepo.On("Move", mock.Anything, "test/path1", "new/location/path1", "main", "Move test/path1 to new/location/path1").Return(moveError)
|
||||
|
||||
mockProgress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Path == "test/path1" && result.Action == repository.FileActionRenamed && errors.Is(result.Error, moveError)
|
||||
return result.Path() == "test/path1" && result.Action() == repository.FileActionRenamed && errors.Is(result.Error(), moveError)
|
||||
})).Return()
|
||||
mockProgress.On("TooManyErrors").Return(errors.New("too many errors"))
|
||||
|
||||
@@ -300,7 +300,7 @@ func TestMoveWorker_ProcessWithSyncWorker(t *testing.T) {
|
||||
mockRepo.On("Move", mock.Anything, "test/path", "new/location/path", "", "Move test/path to new/location/path").Return(nil)
|
||||
|
||||
mockProgress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Path == "test/path" && result.Action == repository.FileActionRenamed && result.Error == nil
|
||||
return result.Path() == "test/path" && result.Action() == repository.FileActionRenamed && result.Error() == nil
|
||||
})).Return()
|
||||
|
||||
mockProgress.On("ResetResults").Return()
|
||||
@@ -422,7 +422,7 @@ func TestMoveWorker_moveFiles(t *testing.T) {
|
||||
mockRepo.On("Move", mock.Anything, path, expectedTarget, "main", "Move "+path+" to "+expectedTarget).Return(tt.moveResults[i]).Once()
|
||||
mockProgress.On("SetMessage", mock.Anything, "Moving "+path+" to "+expectedTarget).Return().Once()
|
||||
mockProgress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Path == path && result.Action == repository.FileActionRenamed
|
||||
return result.Path() == path && result.Action() == repository.FileActionRenamed
|
||||
})).Return().Once()
|
||||
|
||||
if tt.tooManyErrors != nil && i == 0 {
|
||||
@@ -545,10 +545,10 @@ func TestMoveWorker_ProcessWithResourceReferences(t *testing.T) {
|
||||
mockRepo.On("Move", mock.Anything, "dashboard/file.yaml", "new/location/file.yaml", "", "Move dashboard/file.yaml to new/location/file.yaml").Return(nil)
|
||||
|
||||
mockProgress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Path == "test/path1" && result.Action == repository.FileActionRenamed && result.Error == nil
|
||||
return result.Path() == "test/path1" && result.Action() == repository.FileActionRenamed && result.Error() == nil
|
||||
})).Return()
|
||||
mockProgress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Path == "dashboard/file.yaml" && result.Action == repository.FileActionRenamed && result.Error == nil
|
||||
return result.Path() == "dashboard/file.yaml" && result.Action() == repository.FileActionRenamed && result.Error() == nil
|
||||
})).Return()
|
||||
|
||||
// Add expectations for sync worker (called when ref is empty)
|
||||
@@ -607,9 +607,9 @@ func TestMoveWorker_ProcessResourceReferencesError(t *testing.T) {
|
||||
}).Return("", resourceError)
|
||||
|
||||
mockProgress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Name == "non-existent-uid" && result.Group == "dashboard.grafana.app" &&
|
||||
result.Action == repository.FileActionRenamed &&
|
||||
result.Error != nil && result.Error.Error() == "find path for resource dashboard.grafana.app/Dashboard/non-existent-uid: resource not found"
|
||||
return result.Name() == "non-existent-uid" && result.Group() == "dashboard.grafana.app" &&
|
||||
result.Action() == repository.FileActionRenamed &&
|
||||
result.Error() != nil && result.Error().Error() == "find path for resource dashboard.grafana.app/Dashboard/non-existent-uid: resource not found"
|
||||
})).Return()
|
||||
|
||||
// Add expectations for sync worker (called when ref is empty)
|
||||
@@ -766,8 +766,8 @@ func TestMoveWorker_resolveResourcesToPaths(t *testing.T) {
|
||||
} else if err, ok := tt.resourceErrors[resource.Name]; ok {
|
||||
mockRepoResources.On("FindResourcePath", mock.Anything, resource.Name, gvk).Return("", err)
|
||||
mockProgress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Name == resource.Name && result.Group == resource.Group &&
|
||||
result.Action == repository.FileActionRenamed && result.Error != nil
|
||||
return result.Name() == resource.Name && result.Group() == resource.Group &&
|
||||
result.Action() == repository.FileActionRenamed && result.Error() != nil
|
||||
})).Return()
|
||||
if tt.tooManyErrors != nil {
|
||||
mockProgress.On("TooManyErrors").Return(tt.tooManyErrors)
|
||||
|
||||
@@ -34,16 +34,6 @@ func maybeNotifyProgress(threshold time.Duration, fn ProgressFn) ProgressFn {
|
||||
}
|
||||
|
||||
// FIXME: ProgressRecorder should be initialized in the queue
|
||||
type JobResourceResult struct {
|
||||
Name string
|
||||
Group string
|
||||
Kind string
|
||||
Path string
|
||||
Action repository.FileAction
|
||||
Error error
|
||||
Warning error
|
||||
}
|
||||
|
||||
type jobProgressRecorder struct {
|
||||
mu sync.RWMutex
|
||||
started time.Time
|
||||
@@ -81,11 +71,11 @@ func (r *jobProgressRecorder) Record(ctx context.Context, result JobResourceResu
|
||||
r.mu.Lock()
|
||||
r.resultCount++
|
||||
|
||||
if result.Error != nil {
|
||||
if result.Error() != nil {
|
||||
shouldLogError = true
|
||||
logErr = result.Error
|
||||
logErr = result.Error()
|
||||
if len(r.errors) < 20 {
|
||||
r.errors = append(r.errors, result.Error.Error())
|
||||
r.errors = append(r.errors, result.Error().Error())
|
||||
}
|
||||
r.errorCount++
|
||||
}
|
||||
@@ -93,7 +83,7 @@ func (r *jobProgressRecorder) Record(ctx context.Context, result JobResourceResu
|
||||
r.updateSummary(result)
|
||||
r.mu.Unlock()
|
||||
|
||||
logger := logging.FromContext(ctx).With("path", result.Path, "group", result.Group, "kind", result.Kind, "action", result.Action, "name", result.Name)
|
||||
logger := logging.FromContext(ctx).With("path", result.Path(), "group", result.Group(), "kind", result.Kind(), "action", result.Action(), "name", result.Name())
|
||||
if shouldLogError {
|
||||
logger.Error("job resource operation failed", "err", logErr)
|
||||
} else {
|
||||
@@ -180,26 +170,26 @@ func (r *jobProgressRecorder) summary() []*provisioning.JobResourceSummary {
|
||||
|
||||
func (r *jobProgressRecorder) updateSummary(result JobResourceResult) {
|
||||
// Note: This method is called from Record() which already holds the lock
|
||||
key := result.Group + ":" + result.Kind
|
||||
key := result.Group() + ":" + result.Kind()
|
||||
summary, exists := r.summaries[key]
|
||||
if !exists {
|
||||
summary = &provisioning.JobResourceSummary{
|
||||
Group: result.Group,
|
||||
Kind: result.Kind,
|
||||
Group: result.Group(),
|
||||
Kind: result.Kind(),
|
||||
}
|
||||
r.summaries[key] = summary
|
||||
}
|
||||
|
||||
if result.Error != nil {
|
||||
errorMsg := fmt.Sprintf("%s (file: %s, name: %s, action: %s)", result.Error.Error(), result.Path, result.Name, result.Action)
|
||||
if result.Error() != nil {
|
||||
errorMsg := fmt.Sprintf("%s (file: %s, name: %s, action: %s)", result.Error().Error(), result.Path(), result.Name(), result.Action())
|
||||
summary.Errors = append(summary.Errors, errorMsg)
|
||||
summary.Error++
|
||||
} else if result.Warning != nil {
|
||||
warningMsg := fmt.Sprintf("%s (file: %s, name: %s, action: %s)", result.Warning.Error(), result.Path, result.Name, result.Action)
|
||||
} else if result.Warning() != nil {
|
||||
warningMsg := fmt.Sprintf("%s (file: %s, name: %s, action: %s)", result.Warning().Error(), result.Path(), result.Name(), result.Action())
|
||||
summary.Warnings = append(summary.Warnings, warningMsg)
|
||||
summary.Warning++
|
||||
} else {
|
||||
switch result.Action {
|
||||
switch result.Action() {
|
||||
case repository.FileActionDeleted:
|
||||
summary.Delete++
|
||||
case repository.FileActionUpdated:
|
||||
|
||||
@@ -98,36 +98,36 @@ func TestJobProgressRecorderWarningStatus(t *testing.T) {
|
||||
// Record a result with a warning
|
||||
warningErr := errors.New("deprecated API used")
|
||||
result := JobResourceResult{
|
||||
Name: "test-resource",
|
||||
Group: "test.grafana.app",
|
||||
Kind: "Dashboard",
|
||||
Path: "dashboards/test.json",
|
||||
Action: repository.FileActionUpdated,
|
||||
Warning: warningErr,
|
||||
name: "test-resource",
|
||||
group: "test.grafana.app",
|
||||
kind: "Dashboard",
|
||||
path: "dashboards/test.json",
|
||||
action: repository.FileActionUpdated,
|
||||
warning: warningErr,
|
||||
}
|
||||
recorder.Record(ctx, result)
|
||||
|
||||
// Record another result with a different warning
|
||||
warningErr2 := errors.New("missing optional field")
|
||||
result2 := JobResourceResult{
|
||||
Name: "test-resource-2",
|
||||
Group: "test.grafana.app",
|
||||
Kind: "Dashboard",
|
||||
Path: "dashboards/test2.json",
|
||||
Action: repository.FileActionCreated,
|
||||
Warning: warningErr2,
|
||||
name: "test-resource-2",
|
||||
group: "test.grafana.app",
|
||||
kind: "Dashboard",
|
||||
path: "dashboards/test2.json",
|
||||
action: repository.FileActionCreated,
|
||||
warning: warningErr2,
|
||||
}
|
||||
recorder.Record(ctx, result2)
|
||||
|
||||
// Record a result with a warning from a different resource type
|
||||
warningErr3 := errors.New("validation warning")
|
||||
result3 := JobResourceResult{
|
||||
Name: "test-resource-3",
|
||||
Group: "test.grafana.app",
|
||||
Kind: "DataSource",
|
||||
Path: "datasources/test.yaml",
|
||||
Action: repository.FileActionCreated,
|
||||
Warning: warningErr3,
|
||||
name: "test-resource-3",
|
||||
group: "test.grafana.app",
|
||||
kind: "DataSource",
|
||||
path: "datasources/test.yaml",
|
||||
action: repository.FileActionCreated,
|
||||
warning: warningErr3,
|
||||
}
|
||||
recorder.Record(ctx, result3)
|
||||
|
||||
@@ -184,24 +184,24 @@ func TestJobProgressRecorderWarningWithErrors(t *testing.T) {
|
||||
// Record a result with an error (errors take precedence)
|
||||
errorErr := errors.New("failed to process")
|
||||
result := JobResourceResult{
|
||||
Name: "test-resource",
|
||||
Group: "test.grafana.app",
|
||||
Kind: "Dashboard",
|
||||
Path: "dashboards/test.json",
|
||||
Action: repository.FileActionUpdated,
|
||||
Error: errorErr,
|
||||
name: "test-resource",
|
||||
group: "test.grafana.app",
|
||||
kind: "Dashboard",
|
||||
path: "dashboards/test.json",
|
||||
action: repository.FileActionUpdated,
|
||||
err: errorErr,
|
||||
}
|
||||
recorder.Record(ctx, result)
|
||||
|
||||
// Record a result with only a warning
|
||||
warningErr := errors.New("deprecated API used")
|
||||
result2 := JobResourceResult{
|
||||
Name: "test-resource-2",
|
||||
Group: "test.grafana.app",
|
||||
Kind: "Dashboard",
|
||||
Path: "dashboards/test2.json",
|
||||
Action: repository.FileActionCreated,
|
||||
Warning: warningErr,
|
||||
name: "test-resource-2",
|
||||
group: "test.grafana.app",
|
||||
kind: "Dashboard",
|
||||
path: "dashboards/test2.json",
|
||||
action: repository.FileActionCreated,
|
||||
warning: warningErr,
|
||||
}
|
||||
recorder.Record(ctx, result2)
|
||||
|
||||
@@ -233,12 +233,12 @@ func TestJobProgressRecorderWarningOnlyNoErrors(t *testing.T) {
|
||||
// Record only warnings, no errors
|
||||
warningErr := errors.New("deprecated API used")
|
||||
result := JobResourceResult{
|
||||
Name: "test-resource",
|
||||
Group: "test.grafana.app",
|
||||
Kind: "Dashboard",
|
||||
Path: "dashboards/test.json",
|
||||
Action: repository.FileActionUpdated,
|
||||
Warning: warningErr,
|
||||
name: "test-resource",
|
||||
group: "test.grafana.app",
|
||||
kind: "Dashboard",
|
||||
path: "dashboards/test.json",
|
||||
action: repository.FileActionUpdated,
|
||||
warning: warningErr,
|
||||
}
|
||||
recorder.Record(ctx, result)
|
||||
|
||||
|
||||
@@ -82,20 +82,14 @@ func applyChange(ctx context.Context, change ResourceFileChange, clients resourc
|
||||
|
||||
if change.Action == repository.FileActionDeleted {
|
||||
deleteCtx, deleteSpan := tracer.Start(ctx, "provisioning.sync.full.apply_changes.delete")
|
||||
result := jobs.JobResourceResult{
|
||||
Path: change.Path,
|
||||
Action: change.Action,
|
||||
}
|
||||
|
||||
if change.Existing == nil || change.Existing.Name == "" {
|
||||
result.Error = fmt.Errorf("processing deletion for file %s: missing existing reference", change.Path)
|
||||
result := jobs.NewJobResourceResultWithoutKind(change.Path, change.Action, fmt.Errorf("processing deletion for file %s: missing existing reference", change.Path))
|
||||
progress.Record(deleteCtx, result)
|
||||
deleteSpan.RecordError(result.Error)
|
||||
deleteSpan.RecordError(result.Error())
|
||||
deleteSpan.End()
|
||||
return
|
||||
}
|
||||
result.Name = change.Existing.Name
|
||||
result.Group = change.Existing.Group
|
||||
|
||||
versionlessGVR := schema.GroupVersionResource{
|
||||
Group: change.Existing.Group,
|
||||
@@ -105,17 +99,17 @@ func applyChange(ctx context.Context, change ResourceFileChange, clients resourc
|
||||
// TODO: should we use the clients or the resource manager instead?
|
||||
client, gvk, err := clients.ForResource(deleteCtx, versionlessGVR)
|
||||
if err != nil {
|
||||
result.Kind = versionlessGVR.Resource // could not find a kind
|
||||
result.Error = fmt.Errorf("get client for deleted object: %w", err)
|
||||
result := jobs.NewJobResourceResult(change.Existing.Name, change.Existing.Group, versionlessGVR.Resource, change.Path, change.Action, fmt.Errorf("get client for deleted object: %w", err))
|
||||
progress.Record(deleteCtx, result)
|
||||
deleteSpan.End()
|
||||
return
|
||||
}
|
||||
result.Kind = gvk.Kind
|
||||
|
||||
var deleteErr error
|
||||
if err := client.Delete(deleteCtx, change.Existing.Name, metav1.DeleteOptions{}); err != nil {
|
||||
result.Error = fmt.Errorf("deleting resource %s/%s %s: %w", change.Existing.Group, gvk.Kind, change.Existing.Name, err)
|
||||
deleteErr = fmt.Errorf("deleting resource %s/%s %s: %w", change.Existing.Group, gvk.Kind, change.Existing.Name, err)
|
||||
}
|
||||
result := jobs.NewJobResourceResult(change.Existing.Name, change.Existing.Group, gvk.Kind, change.Path, change.Action, deleteErr)
|
||||
progress.Record(deleteCtx, result)
|
||||
deleteSpan.End()
|
||||
return
|
||||
@@ -125,23 +119,17 @@ func applyChange(ctx context.Context, change ResourceFileChange, clients resourc
|
||||
if safepath.IsDir(change.Path) {
|
||||
// For non-deletions, ensure folder exists
|
||||
ensureFolderCtx, ensureFolderSpan := tracer.Start(ctx, "provisioning.sync.full.apply_changes.ensure_folder_exists")
|
||||
result := jobs.JobResourceResult{
|
||||
Path: change.Path,
|
||||
Action: change.Action,
|
||||
Group: resources.FolderKind.Group,
|
||||
Kind: resources.FolderKind.Kind,
|
||||
}
|
||||
|
||||
folder, err := repositoryResources.EnsureFolderPathExist(ensureFolderCtx, change.Path)
|
||||
if err != nil {
|
||||
result.Error = fmt.Errorf("ensuring folder exists at path %s: %w", change.Path, err)
|
||||
result := jobs.NewJobResourceResult("", resources.FolderKind.Group, resources.FolderKind.Kind, change.Path, change.Action, fmt.Errorf("ensuring folder exists at path %s: %w", change.Path, err))
|
||||
ensureFolderSpan.RecordError(err)
|
||||
ensureFolderSpan.End()
|
||||
progress.Record(ctx, result)
|
||||
return
|
||||
}
|
||||
|
||||
result.Name = folder
|
||||
result := jobs.NewJobResourceResult(folder, resources.FolderKind.Group, resources.FolderKind.Kind, change.Path, change.Action, nil)
|
||||
progress.Record(ensureFolderCtx, result)
|
||||
ensureFolderSpan.End()
|
||||
return
|
||||
@@ -149,18 +137,13 @@ func applyChange(ctx context.Context, change ResourceFileChange, clients resourc
|
||||
|
||||
writeCtx, writeSpan := tracer.Start(ctx, "provisioning.sync.full.apply_changes.write_resource_from_file")
|
||||
name, gvk, err := repositoryResources.WriteResourceFromFile(writeCtx, change.Path, "")
|
||||
result := jobs.JobResourceResult{
|
||||
Path: change.Path,
|
||||
Action: change.Action,
|
||||
Name: name,
|
||||
Group: gvk.Group,
|
||||
Kind: gvk.Kind,
|
||||
}
|
||||
var writeErr error
|
||||
if err != nil {
|
||||
writeSpan.RecordError(err)
|
||||
result.Error = fmt.Errorf("writing resource from file %s: %w", change.Path, err)
|
||||
writeErr = fmt.Errorf("writing resource from file %s: %w", change.Path, err)
|
||||
}
|
||||
|
||||
result := jobs.NewJobResourceResult(name, gvk.Group, gvk.Kind, change.Path, change.Action, writeErr)
|
||||
progress.Record(writeCtx, result)
|
||||
writeSpan.End()
|
||||
}
|
||||
@@ -272,11 +255,7 @@ func applyFoldersSerially(ctx context.Context, folders []ResourceFileChange, cli
|
||||
logger.Error("operation timed out after 15 seconds", "path", folder.Path, "action", folder.Action)
|
||||
|
||||
recordCtx, recordCancel := context.WithTimeout(context.Background(), 15*time.Second)
|
||||
progress.Record(recordCtx, jobs.JobResourceResult{
|
||||
Path: folder.Path,
|
||||
Action: folder.Action,
|
||||
Error: fmt.Errorf("operation timed out after 15 seconds"),
|
||||
})
|
||||
progress.Record(recordCtx, jobs.NewJobResourceResultWithoutKind(folder.Path, folder.Action, fmt.Errorf("operation timed out after 15 seconds")))
|
||||
recordCancel()
|
||||
}
|
||||
|
||||
@@ -341,11 +320,7 @@ func applyChangeWithTimeout(ctx context.Context, change ResourceFileChange, clie
|
||||
logger.Error("operation timed out after 15 seconds", "path", change.Path, "action", change.Action)
|
||||
|
||||
recordCtx, recordCancel := context.WithTimeout(context.Background(), 15*time.Second)
|
||||
progress.Record(recordCtx, jobs.JobResourceResult{
|
||||
Path: change.Path,
|
||||
Action: change.Action,
|
||||
Error: fmt.Errorf("operation timed out after 15 seconds"),
|
||||
})
|
||||
progress.Record(recordCtx, jobs.NewJobResourceResultWithoutKind(change.Path, change.Action, fmt.Errorf("operation timed out after 15 seconds")))
|
||||
recordCancel()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -218,8 +218,8 @@ func TestFullSync_ApplyChanges(t *testing.T) { //nolint:gocyclo
|
||||
}), "").Return("test-dashboard", schema.GroupVersionKind{Kind: "Dashboard", Group: "dashboards"}, nil).Maybe()
|
||||
|
||||
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Action == repository.FileActionCreated &&
|
||||
(result.Path == "dashboards/one.json" || result.Path == "dashboards/two.json" || result.Path == "dashboards/three.json")
|
||||
return result.Action() == repository.FileActionCreated &&
|
||||
(result.Path() == "dashboards/one.json" || result.Path() == "dashboards/two.json" || result.Path() == "dashboards/three.json")
|
||||
})).Return().Maybe()
|
||||
},
|
||||
expectedError: "too many errors",
|
||||
@@ -239,13 +239,14 @@ func TestFullSync_ApplyChanges(t *testing.T) { //nolint:gocyclo
|
||||
repoResources.On("WriteResourceFromFile", mock.Anything, "dashboards/test.json", "").
|
||||
Return("test-dashboard", schema.GroupVersionKind{Kind: "Dashboard", Group: "dashboards"}, nil)
|
||||
|
||||
progress.On("Record", mock.Anything, jobs.JobResourceResult{
|
||||
Action: repository.FileActionCreated,
|
||||
Path: "dashboards/test.json",
|
||||
Name: "test-dashboard",
|
||||
Kind: "Dashboard",
|
||||
Group: "dashboards",
|
||||
}).Return()
|
||||
progress.On("Record", mock.Anything, jobs.NewJobResourceResult(
|
||||
"test-dashboard",
|
||||
"dashboards",
|
||||
"Dashboard",
|
||||
"dashboards/test.json",
|
||||
repository.FileActionCreated,
|
||||
nil,
|
||||
)).Return()
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -264,13 +265,13 @@ func TestFullSync_ApplyChanges(t *testing.T) { //nolint:gocyclo
|
||||
Return("test-dashboard", schema.GroupVersionKind{Kind: "Dashboard", Group: "dashboards"}, fmt.Errorf("write error"))
|
||||
|
||||
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Action == repository.FileActionCreated &&
|
||||
result.Path == "dashboards/test.json" &&
|
||||
result.Name == "test-dashboard" &&
|
||||
result.Kind == "Dashboard" &&
|
||||
result.Group == "dashboards" &&
|
||||
result.Error != nil &&
|
||||
result.Error.Error() == "writing resource from file dashboards/test.json: write error"
|
||||
return result.Action() == repository.FileActionCreated &&
|
||||
result.Path() == "dashboards/test.json" &&
|
||||
result.Name() == "test-dashboard" &&
|
||||
result.Kind() == "Dashboard" &&
|
||||
result.Group() == "dashboards" &&
|
||||
result.Error() != nil &&
|
||||
result.Error().Error() == "writing resource from file dashboards/test.json: write error"
|
||||
})).Return()
|
||||
},
|
||||
},
|
||||
@@ -289,13 +290,14 @@ func TestFullSync_ApplyChanges(t *testing.T) { //nolint:gocyclo
|
||||
repoResources.On("WriteResourceFromFile", mock.Anything, "dashboards/test.json", "").
|
||||
Return("test-dashboard", schema.GroupVersionKind{Kind: "Dashboard", Group: "dashboards"}, nil)
|
||||
|
||||
progress.On("Record", mock.Anything, jobs.JobResourceResult{
|
||||
Action: repository.FileActionUpdated,
|
||||
Path: "dashboards/test.json",
|
||||
Name: "test-dashboard",
|
||||
Kind: "Dashboard",
|
||||
Group: "dashboards",
|
||||
}).Return()
|
||||
progress.On("Record", mock.Anything, jobs.NewJobResourceResult(
|
||||
"test-dashboard",
|
||||
"dashboards",
|
||||
"Dashboard",
|
||||
"dashboards/test.json",
|
||||
repository.FileActionUpdated,
|
||||
nil,
|
||||
)).Return()
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -314,13 +316,13 @@ func TestFullSync_ApplyChanges(t *testing.T) { //nolint:gocyclo
|
||||
Return("test-dashboard", schema.GroupVersionKind{Kind: "Dashboard", Group: "dashboards"}, fmt.Errorf("write error"))
|
||||
|
||||
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Action == repository.FileActionUpdated &&
|
||||
result.Path == "dashboards/test.json" &&
|
||||
result.Name == "test-dashboard" &&
|
||||
result.Kind == "Dashboard" &&
|
||||
result.Group == "dashboards" &&
|
||||
result.Error != nil &&
|
||||
result.Error.Error() == "writing resource from file dashboards/test.json: write error"
|
||||
return result.Action() == repository.FileActionUpdated &&
|
||||
result.Path() == "dashboards/test.json" &&
|
||||
result.Name() == "test-dashboard" &&
|
||||
result.Kind() == "Dashboard" &&
|
||||
result.Group() == "dashboards" &&
|
||||
result.Error() != nil &&
|
||||
result.Error().Error() == "writing resource from file dashboards/test.json: write error"
|
||||
})).Return()
|
||||
},
|
||||
},
|
||||
@@ -337,13 +339,14 @@ func TestFullSync_ApplyChanges(t *testing.T) { //nolint:gocyclo
|
||||
progress.On("TooManyErrors").Return(nil)
|
||||
|
||||
repoResources.On("EnsureFolderPathExist", mock.Anything, "one/two/three/").Return("some-folder", nil)
|
||||
progress.On("Record", mock.Anything, jobs.JobResourceResult{
|
||||
Action: repository.FileActionCreated,
|
||||
Path: "one/two/three/",
|
||||
Name: "some-folder",
|
||||
Kind: "Folder",
|
||||
Group: "folder.grafana.app",
|
||||
}).Return()
|
||||
progress.On("Record", mock.Anything, jobs.NewJobResourceResult(
|
||||
"some-folder",
|
||||
"folder.grafana.app",
|
||||
"Folder",
|
||||
"one/two/three/",
|
||||
repository.FileActionCreated,
|
||||
nil,
|
||||
)).Return()
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -364,13 +367,13 @@ func TestFullSync_ApplyChanges(t *testing.T) { //nolint:gocyclo
|
||||
"one/two/three/",
|
||||
).Return("some-folder", errors.New("folder creation error"))
|
||||
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Action == repository.FileActionCreated &&
|
||||
result.Path == "one/two/three/" &&
|
||||
result.Name == "" &&
|
||||
result.Kind == "Folder" &&
|
||||
result.Group == "folder.grafana.app" &&
|
||||
result.Error != nil &&
|
||||
result.Error.Error() == "ensuring folder exists at path one/two/three/: folder creation error"
|
||||
return result.Action() == repository.FileActionCreated &&
|
||||
result.Path() == "one/two/three/" &&
|
||||
result.Name() == "" &&
|
||||
result.Kind() == "Folder" &&
|
||||
result.Group() == "folder.grafana.app" &&
|
||||
result.Error() != nil &&
|
||||
result.Error().Error() == "ensuring folder exists at path one/two/three/: folder creation error"
|
||||
})).Return()
|
||||
},
|
||||
},
|
||||
@@ -423,14 +426,14 @@ func TestFullSync_ApplyChanges(t *testing.T) { //nolint:gocyclo
|
||||
Version: "v1",
|
||||
}, nil)
|
||||
|
||||
progress.On("Record", mock.Anything, jobs.JobResourceResult{
|
||||
Action: repository.FileActionDeleted,
|
||||
Path: "dashboards/test.json",
|
||||
Name: "test-dashboard",
|
||||
Kind: "Dashboard",
|
||||
Group: "dashboards",
|
||||
Error: nil,
|
||||
}).Return()
|
||||
progress.On("Record", mock.Anything, jobs.NewJobResourceResult(
|
||||
"test-dashboard",
|
||||
"dashboards",
|
||||
"Dashboard",
|
||||
"dashboards/test.json",
|
||||
repository.FileActionDeleted,
|
||||
nil,
|
||||
)).Return()
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -483,13 +486,13 @@ func TestFullSync_ApplyChanges(t *testing.T) { //nolint:gocyclo
|
||||
}, nil)
|
||||
|
||||
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Action == repository.FileActionDeleted &&
|
||||
result.Path == "dashboards/test.json" &&
|
||||
result.Name == "test-dashboard" &&
|
||||
result.Kind == "Dashboard" &&
|
||||
result.Group == "dashboards" &&
|
||||
result.Error != nil &&
|
||||
result.Error.Error() == "deleting resource dashboards/Dashboard test-dashboard: delete failed"
|
||||
return result.Action() == repository.FileActionDeleted &&
|
||||
result.Path() == "dashboards/test.json" &&
|
||||
result.Name() == "test-dashboard" &&
|
||||
result.Kind() == "Dashboard" &&
|
||||
result.Group() == "dashboards" &&
|
||||
result.Error() != nil &&
|
||||
result.Error().Error() == "deleting resource dashboards/Dashboard test-dashboard: delete failed"
|
||||
})).Return()
|
||||
},
|
||||
},
|
||||
@@ -506,10 +509,10 @@ func TestFullSync_ApplyChanges(t *testing.T) { //nolint:gocyclo
|
||||
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, compareFn *MockCompareFn) {
|
||||
progress.On("TooManyErrors").Return(nil)
|
||||
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Action == repository.FileActionDeleted &&
|
||||
result.Path == "dashboards/test.json" &&
|
||||
result.Error != nil &&
|
||||
result.Error.Error() == "processing deletion for file dashboards/test.json: missing existing reference"
|
||||
return result.Action() == repository.FileActionDeleted &&
|
||||
result.Path() == "dashboards/test.json" &&
|
||||
result.Error() != nil &&
|
||||
result.Error().Error() == "processing deletion for file dashboards/test.json: missing existing reference"
|
||||
})).Return()
|
||||
},
|
||||
},
|
||||
@@ -526,10 +529,10 @@ func TestFullSync_ApplyChanges(t *testing.T) { //nolint:gocyclo
|
||||
setupMocks: func(repo *repository.MockRepository, repoResources *resources.MockRepositoryResources, clients *resources.MockResourceClients, progress *jobs.MockJobProgressRecorder, compareFn *MockCompareFn) {
|
||||
progress.On("TooManyErrors").Return(nil)
|
||||
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Action == repository.FileActionDeleted &&
|
||||
result.Path == "dashboards/test.json" &&
|
||||
result.Error != nil &&
|
||||
result.Error.Error() == "processing deletion for file dashboards/test.json: missing existing reference"
|
||||
return result.Action() == repository.FileActionDeleted &&
|
||||
result.Path() == "dashboards/test.json" &&
|
||||
result.Error() != nil &&
|
||||
result.Error().Error() == "processing deletion for file dashboards/test.json: missing existing reference"
|
||||
})).Return()
|
||||
},
|
||||
},
|
||||
@@ -555,14 +558,14 @@ func TestFullSync_ApplyChanges(t *testing.T) { //nolint:gocyclo
|
||||
Resource: "dashboards",
|
||||
}).Return(nil, schema.GroupVersionKind{}, errors.New("didn't work"))
|
||||
|
||||
progress.On("Record", mock.Anything, jobs.JobResourceResult{
|
||||
Name: "test-dashboard",
|
||||
Group: "dashboards",
|
||||
Kind: "dashboards", // could not find a real kind
|
||||
Action: repository.FileActionDeleted,
|
||||
Path: "dashboards/test.json",
|
||||
Error: fmt.Errorf("get client for deleted object: %w", errors.New("didn't work")),
|
||||
}).Return()
|
||||
progress.On("Record", mock.Anything, jobs.NewJobResourceResult(
|
||||
"test-dashboard",
|
||||
"dashboards",
|
||||
"dashboards", // could not find a real kind
|
||||
"dashboards/test.json",
|
||||
repository.FileActionDeleted,
|
||||
fmt.Errorf("get client for deleted object: %w", errors.New("didn't work")),
|
||||
)).Return()
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -614,14 +617,14 @@ func TestFullSync_ApplyChanges(t *testing.T) { //nolint:gocyclo
|
||||
Version: "v1",
|
||||
}, nil)
|
||||
|
||||
progress.On("Record", mock.Anything, jobs.JobResourceResult{
|
||||
Action: repository.FileActionDeleted,
|
||||
Path: "to-be-deleted/",
|
||||
Name: "test-folder",
|
||||
Kind: "Folder",
|
||||
Group: "folders",
|
||||
Error: nil,
|
||||
}).Return()
|
||||
progress.On("Record", mock.Anything, jobs.NewJobResourceResult(
|
||||
"test-folder",
|
||||
"folders",
|
||||
"Folder",
|
||||
"to-be-deleted/",
|
||||
repository.FileActionDeleted,
|
||||
nil,
|
||||
)).Return()
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -674,13 +677,13 @@ func TestFullSync_ApplyChanges(t *testing.T) { //nolint:gocyclo
|
||||
}, nil)
|
||||
|
||||
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Action == repository.FileActionDeleted &&
|
||||
result.Path == "to-be-deleted/" &&
|
||||
result.Name == "test-folder" &&
|
||||
result.Kind == "Folder" &&
|
||||
result.Group == "folders" &&
|
||||
result.Error != nil &&
|
||||
result.Error.Error() == "deleting resource folders/Folder test-folder: delete failed"
|
||||
return result.Action() == repository.FileActionDeleted &&
|
||||
result.Path() == "to-be-deleted/" &&
|
||||
result.Name() == "test-folder" &&
|
||||
result.Kind() == "Folder" &&
|
||||
result.Group() == "folders" &&
|
||||
result.Error() != nil &&
|
||||
result.Error().Error() == "deleting resource folders/Folder test-folder: delete failed"
|
||||
})).Return()
|
||||
},
|
||||
},
|
||||
@@ -709,17 +712,17 @@ func TestFullSync_ApplyChanges(t *testing.T) { //nolint:gocyclo
|
||||
Return("", schema.GroupVersionKind{}, context.DeadlineExceeded)
|
||||
|
||||
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Action == repository.FileActionCreated &&
|
||||
result.Path == "dashboards/slow.json" &&
|
||||
result.Error != nil &&
|
||||
result.Error.Error() == "writing resource from file dashboards/slow.json: context deadline exceeded"
|
||||
return result.Action() == repository.FileActionCreated &&
|
||||
result.Path() == "dashboards/slow.json" &&
|
||||
result.Error() != nil &&
|
||||
result.Error().Error() == "writing resource from file dashboards/slow.json: context deadline exceeded"
|
||||
})).Return().Once()
|
||||
|
||||
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Action == repository.FileActionCreated &&
|
||||
result.Path == "dashboards/slow.json" &&
|
||||
result.Error != nil &&
|
||||
result.Error.Error() == "operation timed out after 15 seconds"
|
||||
return result.Action() == repository.FileActionCreated &&
|
||||
result.Path() == "dashboards/slow.json" &&
|
||||
result.Error() != nil &&
|
||||
result.Error().Error() == "operation timed out after 15 seconds"
|
||||
})).Return().Once()
|
||||
},
|
||||
},
|
||||
|
||||
@@ -101,52 +101,40 @@ func applyIncrementalChanges(ctx context.Context, diff []repository.VersionedFil
|
||||
return nil, tracing.Error(span, fmt.Errorf("unable to create empty file folder: %w", err))
|
||||
}
|
||||
|
||||
progress.Record(ensureFolderCtx, jobs.JobResourceResult{
|
||||
Path: safeSegment,
|
||||
Action: repository.FileActionCreated,
|
||||
Group: resources.FolderResource.Group,
|
||||
Kind: resources.FolderKind.Kind,
|
||||
Name: folder,
|
||||
})
|
||||
result := jobs.NewJobResourceResult(folder, resources.FolderResource.Group, resources.FolderKind.Kind, safeSegment, repository.FileActionCreated, nil)
|
||||
progress.Record(ensureFolderCtx, result)
|
||||
ensureFolderSpan.End()
|
||||
continue
|
||||
}
|
||||
|
||||
progress.Record(ensureFolderCtx, jobs.JobResourceResult{
|
||||
Path: change.Path,
|
||||
Action: repository.FileActionIgnored,
|
||||
})
|
||||
result := jobs.NewJobResourceResultWithoutKind(change.Path, repository.FileActionIgnored, nil)
|
||||
progress.Record(ensureFolderCtx, result)
|
||||
ensureFolderSpan.End()
|
||||
continue
|
||||
}
|
||||
|
||||
result := jobs.JobResourceResult{
|
||||
Path: change.Path,
|
||||
Action: change.Action,
|
||||
}
|
||||
var result jobs.JobResourceResult
|
||||
|
||||
switch change.Action {
|
||||
case repository.FileActionCreated, repository.FileActionUpdated:
|
||||
writeCtx, writeSpan := tracer.Start(ctx, "provisioning.sync.incremental.write_resource_from_file")
|
||||
name, gvk, err := repositoryResources.WriteResourceFromFile(writeCtx, change.Path, change.Ref)
|
||||
var resultErr error
|
||||
if err != nil {
|
||||
writeSpan.RecordError(err)
|
||||
result.Error = fmt.Errorf("writing resource from file %s: %w", change.Path, err)
|
||||
resultErr = fmt.Errorf("writing resource from file %s: %w", change.Path, err)
|
||||
}
|
||||
result.Name = name
|
||||
result.Kind = gvk.Kind
|
||||
result.Group = gvk.Group
|
||||
result = jobs.NewJobResourceResult(name, gvk.Group, gvk.Kind, change.Path, change.Action, resultErr)
|
||||
writeSpan.End()
|
||||
case repository.FileActionDeleted:
|
||||
removeCtx, removeSpan := tracer.Start(ctx, "provisioning.sync.incremental.remove_resource_from_file")
|
||||
name, folderName, gvk, err := repositoryResources.RemoveResourceFromFile(removeCtx, change.Path, change.PreviousRef)
|
||||
var resultErr error
|
||||
if err != nil {
|
||||
removeSpan.RecordError(err)
|
||||
result.Error = fmt.Errorf("removing resource from file %s: %w", change.Path, err)
|
||||
resultErr = fmt.Errorf("removing resource from file %s: %w", change.Path, err)
|
||||
}
|
||||
result.Name = name
|
||||
result.Kind = gvk.Kind
|
||||
result.Group = gvk.Group
|
||||
result = jobs.NewJobResourceResult(name, gvk.Group, gvk.Kind, change.Path, change.Action, resultErr)
|
||||
|
||||
if folderName != "" {
|
||||
affectedFolders[safepath.Dir(change.Path)] = folderName
|
||||
@@ -156,13 +144,12 @@ func applyIncrementalChanges(ctx context.Context, diff []repository.VersionedFil
|
||||
case repository.FileActionRenamed:
|
||||
renameCtx, renameSpan := tracer.Start(ctx, "provisioning.sync.incremental.rename_resource_file")
|
||||
name, oldFolderName, gvk, err := repositoryResources.RenameResourceFile(renameCtx, change.PreviousPath, change.PreviousRef, change.Path, change.Ref)
|
||||
var resultErr error
|
||||
if err != nil {
|
||||
renameSpan.RecordError(err)
|
||||
result.Error = fmt.Errorf("renaming resource file from %s to %s: %w", change.PreviousPath, change.Path, err)
|
||||
resultErr = fmt.Errorf("renaming resource file from %s to %s: %w", change.PreviousPath, change.Path, err)
|
||||
}
|
||||
result.Name = name
|
||||
result.Kind = gvk.Kind
|
||||
result.Group = gvk.Group
|
||||
result = jobs.NewJobResourceResult(name, gvk.Group, gvk.Kind, change.Path, change.Action, resultErr)
|
||||
|
||||
if oldFolderName != "" {
|
||||
affectedFolders[safepath.Dir(change.Path)] = oldFolderName
|
||||
@@ -171,6 +158,7 @@ func applyIncrementalChanges(ctx context.Context, diff []repository.VersionedFil
|
||||
renameSpan.End()
|
||||
case repository.FileActionIgnored:
|
||||
// do nothing
|
||||
result = jobs.NewJobResourceResultWithoutKind(change.Path, change.Action, nil)
|
||||
}
|
||||
progress.Record(ctx, result)
|
||||
}
|
||||
|
||||
@@ -100,10 +100,10 @@ func TestIncrementalSync(t *testing.T) {
|
||||
|
||||
// Mock progress recording
|
||||
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Action == repository.FileActionCreated && result.Path == "dashboards/test.json"
|
||||
return result.Action() == repository.FileActionCreated && result.Path() == "dashboards/test.json"
|
||||
})).Return()
|
||||
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Action == repository.FileActionUpdated && result.Path == "alerts/alert.yaml"
|
||||
return result.Action() == repository.FileActionUpdated && result.Path() == "alerts/alert.yaml"
|
||||
})).Return()
|
||||
|
||||
progress.On("TooManyErrors").Return(nil)
|
||||
@@ -132,13 +132,14 @@ func TestIncrementalSync(t *testing.T) {
|
||||
Return("test-folder", nil)
|
||||
|
||||
// Mock progress recording
|
||||
progress.On("Record", mock.Anything, jobs.JobResourceResult{
|
||||
Action: repository.FileActionCreated,
|
||||
Path: "unsupported/path/",
|
||||
Kind: resources.FolderKind.Kind,
|
||||
Group: resources.FolderResource.Group,
|
||||
Name: "test-folder",
|
||||
}).Return()
|
||||
progress.On("Record", mock.Anything, jobs.NewJobResourceResult(
|
||||
"test-folder",
|
||||
resources.FolderResource.Group,
|
||||
resources.FolderKind.Kind,
|
||||
"unsupported/path/",
|
||||
repository.FileActionCreated,
|
||||
nil,
|
||||
)).Return()
|
||||
|
||||
progress.On("TooManyErrors").Return(nil)
|
||||
},
|
||||
@@ -161,10 +162,11 @@ func TestIncrementalSync(t *testing.T) {
|
||||
progress.On("SetMessage", mock.Anything, "replicating versioned changes").Return()
|
||||
progress.On("SetMessage", mock.Anything, "versioned changes replicated").Return()
|
||||
|
||||
progress.On("Record", mock.Anything, jobs.JobResourceResult{
|
||||
Action: repository.FileActionIgnored,
|
||||
Path: ".unsupported/path/file.txt",
|
||||
}).Return()
|
||||
progress.On("Record", mock.Anything, jobs.NewJobResourceResultWithoutKind(
|
||||
".unsupported/path/file.txt",
|
||||
repository.FileActionIgnored,
|
||||
nil,
|
||||
)).Return()
|
||||
progress.On("TooManyErrors").Return(nil)
|
||||
},
|
||||
previousRef: "old-ref",
|
||||
@@ -191,13 +193,14 @@ func TestIncrementalSync(t *testing.T) {
|
||||
Return("old-dashboard", "", schema.GroupVersionKind{Kind: "Dashboard", Group: "dashboards"}, nil)
|
||||
|
||||
// Mock progress recording
|
||||
progress.On("Record", mock.Anything, jobs.JobResourceResult{
|
||||
Action: repository.FileActionDeleted,
|
||||
Path: "dashboards/old.json",
|
||||
Name: "old-dashboard",
|
||||
Kind: "Dashboard",
|
||||
Group: "dashboards",
|
||||
}).Return()
|
||||
progress.On("Record", mock.Anything, jobs.NewJobResourceResult(
|
||||
"old-dashboard",
|
||||
"dashboards",
|
||||
"Dashboard",
|
||||
"dashboards/old.json",
|
||||
repository.FileActionDeleted,
|
||||
nil,
|
||||
)).Return()
|
||||
|
||||
progress.On("TooManyErrors").Return(nil)
|
||||
},
|
||||
@@ -227,13 +230,14 @@ func TestIncrementalSync(t *testing.T) {
|
||||
Return("renamed-dashboard", "", schema.GroupVersionKind{Kind: "Dashboard", Group: "dashboards"}, nil)
|
||||
|
||||
// Mock progress recording
|
||||
progress.On("Record", mock.Anything, jobs.JobResourceResult{
|
||||
Action: repository.FileActionRenamed,
|
||||
Path: "dashboards/new.json",
|
||||
Name: "renamed-dashboard",
|
||||
Kind: "Dashboard",
|
||||
Group: "dashboards",
|
||||
}).Return()
|
||||
progress.On("Record", mock.Anything, jobs.NewJobResourceResult(
|
||||
"renamed-dashboard",
|
||||
"dashboards",
|
||||
"Dashboard",
|
||||
"dashboards/new.json",
|
||||
repository.FileActionRenamed,
|
||||
nil,
|
||||
)).Return()
|
||||
|
||||
progress.On("TooManyErrors").Return(nil)
|
||||
},
|
||||
@@ -254,10 +258,11 @@ func TestIncrementalSync(t *testing.T) {
|
||||
progress.On("SetTotal", mock.Anything, 1).Return()
|
||||
progress.On("SetMessage", mock.Anything, "replicating versioned changes").Return()
|
||||
progress.On("SetMessage", mock.Anything, "versioned changes replicated").Return()
|
||||
progress.On("Record", mock.Anything, jobs.JobResourceResult{
|
||||
Action: repository.FileActionIgnored,
|
||||
Path: "dashboards/ignored.json",
|
||||
}).Return()
|
||||
progress.On("Record", mock.Anything, jobs.NewJobResourceResultWithoutKind(
|
||||
"dashboards/ignored.json",
|
||||
repository.FileActionIgnored,
|
||||
nil,
|
||||
)).Return()
|
||||
progress.On("TooManyErrors").Return(nil)
|
||||
},
|
||||
previousRef: "old-ref",
|
||||
@@ -309,13 +314,13 @@ func TestIncrementalSync(t *testing.T) {
|
||||
|
||||
// Mock progress recording with error
|
||||
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Action == repository.FileActionCreated &&
|
||||
result.Path == "dashboards/test.json" &&
|
||||
result.Name == "test-dashboard" &&
|
||||
result.Kind == "Dashboard" &&
|
||||
result.Group == "dashboards" &&
|
||||
result.Error != nil &&
|
||||
result.Error.Error() == "writing resource from file dashboards/test.json: write failed"
|
||||
return result.Action() == repository.FileActionCreated &&
|
||||
result.Path() == "dashboards/test.json" &&
|
||||
result.Name() == "test-dashboard" &&
|
||||
result.Kind() == "Dashboard" &&
|
||||
result.Group() == "dashboards" &&
|
||||
result.Error() != nil &&
|
||||
result.Error().Error() == "writing resource from file dashboards/test.json: write failed"
|
||||
})).Return()
|
||||
|
||||
progress.On("TooManyErrors").Return(nil)
|
||||
@@ -345,13 +350,13 @@ func TestIncrementalSync(t *testing.T) {
|
||||
|
||||
// Mock progress recording with error
|
||||
progress.On("Record", mock.Anything, mock.MatchedBy(func(result jobs.JobResourceResult) bool {
|
||||
return result.Action == repository.FileActionDeleted &&
|
||||
result.Path == "dashboards/old.json" &&
|
||||
result.Name == "old-dashboard" &&
|
||||
result.Kind == "Dashboard" &&
|
||||
result.Group == "dashboards" &&
|
||||
result.Error != nil &&
|
||||
result.Error.Error() == "removing resource from file dashboards/old.json: delete failed"
|
||||
return result.Action() == repository.FileActionDeleted &&
|
||||
result.Path() == "dashboards/old.json" &&
|
||||
result.Name() == "old-dashboard" &&
|
||||
result.Kind() == "Dashboard" &&
|
||||
result.Group() == "dashboards" &&
|
||||
result.Error() != nil &&
|
||||
result.Error().Error() == "removing resource from file dashboards/old.json: delete failed"
|
||||
})).Return()
|
||||
progress.On("TooManyErrors").Return(nil)
|
||||
},
|
||||
|
||||
Generated
+2
-2
@@ -875,7 +875,7 @@ func Initialize(ctx context.Context, cfg *setting.Cfg, opts Options, apiOpts api
|
||||
ldapImpl := service12.ProvideService(cfg, featureToggles, ssosettingsimplService)
|
||||
apiService := api4.ProvideService(cfg, routeRegisterImpl, accessControl, userService, authinfoimplService, ossGroups, identitySynchronizer, orgService, ldapImpl, userAuthTokenService, bundleregistryService)
|
||||
dashboardActivityChannel := live.ProvideDashboardActivityChannel(grafanaLive)
|
||||
dashboardsAPIBuilder := dashboard.RegisterAPIService(cfg, featureToggles, apiserverService, dashboardService, dashboardProvisioningService, service15, dashboardServiceImpl, dashboardPermissionsService, accessControl, accessClient, provisioningServiceImpl, dashboardsStore, registerer, sqlStore, tracingService, resourceClient, dualwriteService, sortService, quotaService, libraryPanelService, eventualRestConfigProvider, userService, libraryElementService, publicDashboardServiceImpl, serviceImpl, dashboardActivityChannel)
|
||||
dashboardsAPIBuilder := dashboard.RegisterAPIService(featureToggles, apiserverService, dashboardService, dashboardProvisioningService, service15, dashboardServiceImpl, dashboardPermissionsService, accessControl, accessClient, provisioningServiceImpl, dashboardsStore, registerer, sqlStore, tracingService, resourceClient, dualwriteService, sortService, quotaService, libraryPanelService, eventualRestConfigProvider, userService, libraryElementService, publicDashboardServiceImpl, serviceImpl, dashboardActivityChannel, configProvider)
|
||||
dataSourceAPIBuilder, err := datasource.RegisterAPIService(featureToggles, apiserverService, middlewareHandler, scopedPluginDatasourceProvider, plugincontextProvider, accessControl, registerer, sourcesService)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@@ -1537,7 +1537,7 @@ func InitializeForTest(ctx context.Context, t sqlutil.ITestDB, testingT interfac
|
||||
ldapImpl := service12.ProvideService(cfg, featureToggles, ssosettingsimplService)
|
||||
apiService := api4.ProvideService(cfg, routeRegisterImpl, accessControl, userService, authinfoimplService, ossGroups, identitySynchronizer, orgService, ldapImpl, userAuthTokenService, bundleregistryService)
|
||||
dashboardActivityChannel := live.ProvideDashboardActivityChannel(grafanaLive)
|
||||
dashboardsAPIBuilder := dashboard.RegisterAPIService(cfg, featureToggles, apiserverService, dashboardService, dashboardProvisioningService, service15, dashboardServiceImpl, dashboardPermissionsService, accessControl, accessClient, provisioningServiceImpl, dashboardsStore, registerer, sqlStore, tracingService, resourceClient, dualwriteService, sortService, quotaService, libraryPanelService, eventualRestConfigProvider, userService, libraryElementService, publicDashboardServiceImpl, serviceImpl, dashboardActivityChannel)
|
||||
dashboardsAPIBuilder := dashboard.RegisterAPIService(featureToggles, apiserverService, dashboardService, dashboardProvisioningService, service15, dashboardServiceImpl, dashboardPermissionsService, accessControl, accessClient, provisioningServiceImpl, dashboardsStore, registerer, sqlStore, tracingService, resourceClient, dualwriteService, sortService, quotaService, libraryPanelService, eventualRestConfigProvider, userService, libraryElementService, publicDashboardServiceImpl, serviceImpl, dashboardActivityChannel, configProvider)
|
||||
dataSourceAPIBuilder, err := datasource.RegisterAPIService(featureToggles, apiserverService, middlewareHandler, scopedPluginDatasourceProvider, plugincontextProvider, accessControl, registerer, sourcesService)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
||||
@@ -15,6 +15,8 @@ var _ authorizer.Authorizer = &roleAuthorizer{}
|
||||
|
||||
var orgRoleNoneAsViewerAPIGroups = []string{
|
||||
"productactivation.ext.grafana.com",
|
||||
// playlist can be removed after this issue is resolved: https://github.com/grafana/grafana/issues/115712
|
||||
"playlist.grafana.app",
|
||||
}
|
||||
|
||||
type roleAuthorizer struct{}
|
||||
|
||||
@@ -20,9 +20,10 @@ const (
|
||||
|
||||
// Typed errors
|
||||
var (
|
||||
ErrUserTokenNotFound = errors.New("user token not found")
|
||||
ErrInvalidSessionToken = usertoken.ErrInvalidSessionToken
|
||||
ErrExternalSessionNotFound = errors.New("external session not found")
|
||||
ErrUserTokenNotFound = errors.New("user token not found")
|
||||
ErrInvalidSessionToken = usertoken.ErrInvalidSessionToken
|
||||
ErrExternalSessionNotFound = errors.New("external session not found")
|
||||
ErrExternalSessionTokenNotFound = errors.New("session token was nil")
|
||||
)
|
||||
|
||||
type (
|
||||
|
||||
@@ -572,13 +572,6 @@ var (
|
||||
FrontendOnly: false, // The restore backend feature changes behavior based on this flag
|
||||
Owner: grafanaDashboardsSquad,
|
||||
},
|
||||
{
|
||||
Name: "kubernetesDashboardsV2",
|
||||
Description: "Use the v2 kubernetes API in the frontend for dashboards",
|
||||
Stage: FeatureStageExperimental,
|
||||
FrontendOnly: false,
|
||||
Owner: grafanaDashboardsSquad,
|
||||
},
|
||||
{
|
||||
Name: "dashboardUndoRedo",
|
||||
Description: "Enables undo/redo in dynamic dashboards",
|
||||
@@ -688,6 +681,14 @@ var (
|
||||
HideFromDocs: true,
|
||||
RequiresRestart: true,
|
||||
},
|
||||
{
|
||||
Name: "auditLoggingAppPlatform",
|
||||
Description: "Enable audit logging with Kubernetes under app platform",
|
||||
Stage: FeatureStageExperimental,
|
||||
Owner: grafanaOperatorExperienceSquad,
|
||||
HideFromDocs: true,
|
||||
RequiresRestart: true,
|
||||
},
|
||||
{
|
||||
Name: "secretsManagementAppPlatform",
|
||||
Description: "Enable the secrets management API and services under app platform",
|
||||
|
||||
Generated
+1
-1
@@ -79,7 +79,6 @@ dashboardSceneForViewers,GA,@grafana/dashboards-squad,false,false,true
|
||||
dashboardSceneSolo,GA,@grafana/dashboards-squad,false,false,true
|
||||
dashboardScene,GA,@grafana/dashboards-squad,false,false,true
|
||||
dashboardNewLayouts,experimental,@grafana/dashboards-squad,false,false,false
|
||||
kubernetesDashboardsV2,experimental,@grafana/dashboards-squad,false,false,false
|
||||
dashboardUndoRedo,experimental,@grafana/dashboards-squad,false,false,true
|
||||
unlimitedLayoutsNesting,experimental,@grafana/dashboards-squad,false,false,true
|
||||
drilldownRecommendations,experimental,@grafana/dashboards-squad,false,false,true
|
||||
@@ -95,6 +94,7 @@ kubernetesFeatureToggles,experimental,@grafana/grafana-operator-experience-squad
|
||||
cloudRBACRoles,preview,@grafana/identity-access-team,false,true,false
|
||||
alertingQueryOptimization,GA,@grafana/alerting-squad,false,false,false
|
||||
jitterAlertRulesWithinGroups,preview,@grafana/alerting-squad,false,true,false
|
||||
auditLoggingAppPlatform,experimental,@grafana/grafana-operator-experience-squad,false,true,false
|
||||
secretsManagementAppPlatform,experimental,@grafana/grafana-operator-experience-squad,false,false,false
|
||||
secretsManagementAppPlatformUI,experimental,@grafana/grafana-operator-experience-squad,false,false,false
|
||||
alertingSaveStatePeriodic,privatePreview,@grafana/alerting-squad,false,false,false
|
||||
|
||||
|
Generated
+4
-4
@@ -259,10 +259,6 @@ const (
|
||||
// Enables experimental new dashboard layouts
|
||||
FlagDashboardNewLayouts = "dashboardNewLayouts"
|
||||
|
||||
// FlagKubernetesDashboardsV2
|
||||
// Use the v2 kubernetes API in the frontend for dashboards
|
||||
FlagKubernetesDashboardsV2 = "kubernetesDashboardsV2"
|
||||
|
||||
// FlagPdfTables
|
||||
// Enables generating table data as PDF in reporting
|
||||
FlagPdfTables = "pdfTables"
|
||||
@@ -279,6 +275,10 @@ const (
|
||||
// Distributes alert rule evaluations more evenly over time, including spreading out rules within the same group. Disables sequential evaluation if enabled.
|
||||
FlagJitterAlertRulesWithinGroups = "jitterAlertRulesWithinGroups"
|
||||
|
||||
// FlagAuditLoggingAppPlatform
|
||||
// Enable audit logging with Kubernetes under app platform
|
||||
FlagAuditLoggingAppPlatform = "auditLoggingAppPlatform"
|
||||
|
||||
// FlagSecretsManagementAppPlatform
|
||||
// Enable the secrets management API and services under app platform
|
||||
FlagSecretsManagementAppPlatform = "secretsManagementAppPlatform"
|
||||
|
||||
+17
-2
@@ -658,6 +658,20 @@
|
||||
"frontend": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"metadata": {
|
||||
"name": "auditLoggingAppPlatform",
|
||||
"resourceVersion": "1767013056996",
|
||||
"creationTimestamp": "2025-12-29T12:57:36Z"
|
||||
},
|
||||
"spec": {
|
||||
"description": "Enable audit logging with Kubernetes under app platform",
|
||||
"stage": "experimental",
|
||||
"codeowner": "@grafana/grafana-operator-experience-squad",
|
||||
"requiresRestart": true,
|
||||
"hideFromDocs": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"metadata": {
|
||||
"name": "authZGRPCServer",
|
||||
@@ -2003,8 +2017,9 @@
|
||||
{
|
||||
"metadata": {
|
||||
"name": "kubernetesDashboardsV2",
|
||||
"resourceVersion": "1764664939750",
|
||||
"creationTimestamp": "2025-12-02T08:42:19Z"
|
||||
"resourceVersion": "1764236054307",
|
||||
"creationTimestamp": "2025-11-27T09:34:14Z",
|
||||
"deletionTimestamp": "2025-12-05T13:43:57Z"
|
||||
},
|
||||
"spec": {
|
||||
"description": "Use the v2 kubernetes API in the frontend for dashboards",
|
||||
|
||||
@@ -660,6 +660,10 @@ func (o *Service) getExternalSession(ctx context.Context, usr identity.Requester
|
||||
return externalSessions[0], nil
|
||||
}
|
||||
|
||||
if sessionToken == nil {
|
||||
return nil, auth.ErrExternalSessionTokenNotFound
|
||||
}
|
||||
|
||||
// For regular users, we use the session token ID to fetch the external session
|
||||
return o.sessionService.GetExternalSession(ctx, sessionToken.ExternalSessionId)
|
||||
}
|
||||
|
||||
@@ -2169,6 +2169,43 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"/apis/dashboard.grafana.app/v0alpha1/namespaces/{namespace}/snapshots/settings": {
|
||||
"get": {
|
||||
"tags": [
|
||||
"Snapshot"
|
||||
],
|
||||
"description": "Get Snapshot sharing settings",
|
||||
"operationId": "getSnapshotSettings",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "namespace",
|
||||
"in": "path",
|
||||
"description": "workspace",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"type": "string"
|
||||
},
|
||||
"example": "default"
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {},
|
||||
"example": "{\"snapshotsEnabled\":true,\"externalSnapshotURL\":\"https://externalurl.com\",\"externalSnapshotName\":\"external\",\"externalEnabled\":true}"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"x-grafana-action": "get",
|
||||
"x-kubernetes-group-version-kind": {
|
||||
"group": "dashboard.grafana.app",
|
||||
"version": "v0alpha1",
|
||||
"kind": "SnapshotSharingOptions"
|
||||
}
|
||||
}
|
||||
},
|
||||
"/apis/dashboard.grafana.app/v0alpha1/namespaces/{namespace}/snapshots/{name}": {
|
||||
"get": {
|
||||
"tags": [
|
||||
|
||||
@@ -426,6 +426,45 @@ func doPlaylistTests(t *testing.T, helper *apis.K8sTestHelper) *apis.K8sTestHelp
|
||||
require.Equal(t, metav1.StatusReasonForbidden, rsp.Status.Reason)
|
||||
})
|
||||
|
||||
t.Run("Check CRUD operations with None role", func(t *testing.T) {
|
||||
// Create a playlist with admin user
|
||||
clientAdmin := helper.GetResourceClient(apis.ResourceClientArgs{
|
||||
User: helper.Org1.Admin,
|
||||
GVR: gvr,
|
||||
})
|
||||
created, err := clientAdmin.Resource.Create(context.Background(),
|
||||
helper.LoadYAMLOrJSONFile("testdata/playlist-generate.yaml"),
|
||||
metav1.CreateOptions{},
|
||||
)
|
||||
require.NoError(t, err)
|
||||
|
||||
clientNone := helper.GetResourceClient(apis.ResourceClientArgs{
|
||||
User: helper.Org1.None,
|
||||
GVR: gvr,
|
||||
})
|
||||
|
||||
// Now check if None user can perform a Get to start a playlist
|
||||
_, err = clientNone.Resource.Get(context.Background(), created.GetName(), metav1.GetOptions{})
|
||||
require.NoError(t, err)
|
||||
|
||||
// None role can get but can not create edit or delete a playlist
|
||||
_, err = clientNone.Resource.Create(context.Background(),
|
||||
helper.LoadYAMLOrJSONFile("testdata/playlist-generate.yaml"),
|
||||
metav1.CreateOptions{},
|
||||
)
|
||||
require.Error(t, err)
|
||||
|
||||
_, err = clientNone.Resource.Update(context.Background(), created, metav1.UpdateOptions{})
|
||||
require.Error(t, err)
|
||||
|
||||
err = clientNone.Resource.Delete(context.Background(), created.GetName(), metav1.DeleteOptions{})
|
||||
require.Error(t, err)
|
||||
|
||||
// delete created resource
|
||||
err = clientAdmin.Resource.Delete(context.Background(), created.GetName(), metav1.DeleteOptions{})
|
||||
require.NoError(t, err)
|
||||
})
|
||||
|
||||
t.Run("Check k8s client-go List from different org users", func(t *testing.T) {
|
||||
// Check Org1 Viewer
|
||||
client := helper.GetResourceClient(apis.ResourceClientArgs{
|
||||
|
||||
+72
@@ -60,4 +60,76 @@ describe('LogRecordViewerByTimestamp', () => {
|
||||
expect(within(errorRows[1]).getByText(/Error message:/)).toBeInTheDocument();
|
||||
expect(within(errorRows[1]).getByText(/explicit message/)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
describe('Numeric Value Formatting', () => {
|
||||
it('should format numeric values correctly in AlertInstanceValues', () => {
|
||||
const records: LogRecord[] = [
|
||||
{
|
||||
timestamp: 1681739580000,
|
||||
line: {
|
||||
current: 'Alerting',
|
||||
previous: 'Pending',
|
||||
labels: {},
|
||||
values: {
|
||||
cpu_usage: 42.987654321,
|
||||
memory_mb: 1234567.89,
|
||||
disk_io: 0.001234,
|
||||
request_count: 10000,
|
||||
},
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
render(<LogRecordViewerByTimestamp records={records} commonLabels={[]} />);
|
||||
|
||||
expect(screen.getByText(/cpu_usage/)).toBeInTheDocument();
|
||||
expect(screen.getByText(/4\.299e\+1/i)).toBeInTheDocument();
|
||||
|
||||
expect(screen.getByText(/memory_mb/)).toBeInTheDocument();
|
||||
expect(screen.getByText(/1\.235e\+6/i)).toBeInTheDocument();
|
||||
|
||||
expect(screen.getByText(/disk_io/)).toBeInTheDocument();
|
||||
expect(screen.getByText(/1\.234e-3/i)).toBeInTheDocument();
|
||||
|
||||
expect(screen.getByText(/request_count/)).toBeInTheDocument();
|
||||
expect(screen.getByText(/10000/)).toBeInTheDocument();
|
||||
});
|
||||
|
||||
it('should format various numeric ranges correctly', () => {
|
||||
const records: LogRecord[] = [
|
||||
{
|
||||
timestamp: 1681739580000,
|
||||
line: {
|
||||
current: 'Alerting',
|
||||
previous: 'Pending',
|
||||
labels: {},
|
||||
values: {
|
||||
small: 0.001,
|
||||
normal: 42.5,
|
||||
large: 123456,
|
||||
boundary_low: 0.01,
|
||||
boundary_high: 10000,
|
||||
},
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
render(<LogRecordViewerByTimestamp records={records} commonLabels={[]} />);
|
||||
|
||||
expect(screen.getByText(/small/)).toBeInTheDocument();
|
||||
expect(screen.getByText(/1\.000e-3/i)).toBeInTheDocument();
|
||||
|
||||
expect(screen.getByText(/normal/)).toBeInTheDocument();
|
||||
expect(screen.getByText(/42\.5/)).toBeInTheDocument();
|
||||
|
||||
expect(screen.getByText(/large/)).toBeInTheDocument();
|
||||
expect(screen.getByText(/1\.235e\+5/i)).toBeInTheDocument();
|
||||
|
||||
expect(screen.getByText(/boundary_low/)).toBeInTheDocument();
|
||||
expect(screen.getByText(/0\.01/)).toBeInTheDocument();
|
||||
|
||||
expect(screen.getByText(/boundary_high/)).toBeInTheDocument();
|
||||
expect(screen.getByText(/10000/)).toBeInTheDocument();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
+2
-1
@@ -13,6 +13,7 @@ import { AlertStateTag } from '../AlertStateTag';
|
||||
|
||||
import { ErrorMessageRow } from './ErrorMessageRow';
|
||||
import { LogRecord, omitLabels } from './common';
|
||||
import { formatNumericValue } from './numberFormatter';
|
||||
|
||||
type LogRecordViewerProps = {
|
||||
records: LogRecord[];
|
||||
@@ -182,7 +183,7 @@ const AlertInstanceValues = memo(({ record }: { record: Record<string, number> }
|
||||
return (
|
||||
<>
|
||||
{values.map(([key, value]) => (
|
||||
<AlertLabel key={key} labelKey={key} value={String(value)} />
|
||||
<AlertLabel key={key} labelKey={key} value={formatNumericValue(value)} />
|
||||
))}
|
||||
</>
|
||||
);
|
||||
|
||||
+173
@@ -0,0 +1,173 @@
|
||||
import { formatNumericValue } from './numberFormatter';
|
||||
|
||||
describe('formatNumericValue', () => {
|
||||
describe('Zero and special values', () => {
|
||||
it('should format zero correctly', () => {
|
||||
expect(formatNumericValue(0)).toBe('0');
|
||||
expect(formatNumericValue(-0)).toBe('0');
|
||||
});
|
||||
|
||||
it('should handle NaN', () => {
|
||||
expect(formatNumericValue(NaN)).toBe('NaN');
|
||||
});
|
||||
|
||||
it('should handle Infinity', () => {
|
||||
expect(formatNumericValue(Infinity)).toBe('Infinity');
|
||||
expect(formatNumericValue(-Infinity)).toBe('-Infinity');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Very small numbers (scientific notation)', () => {
|
||||
it('should use scientific notation for values less than 1e-2', () => {
|
||||
const result1 = formatNumericValue(1e-3);
|
||||
expect(result1).toMatch(/^1\.000e-3$/i);
|
||||
|
||||
const result2 = formatNumericValue(0.001);
|
||||
expect(result2).toMatch(/^1\.000e-3$/i);
|
||||
|
||||
const result3 = formatNumericValue(0.009);
|
||||
expect(result3).toMatch(/^9\.000e-3$/i);
|
||||
});
|
||||
|
||||
it('should use scientific notation for values just below 1e-2', () => {
|
||||
const result = formatNumericValue(0.00999);
|
||||
expect(result).toMatch(/^9\.990e-3$/i);
|
||||
});
|
||||
|
||||
it('should format the example from requirements correctly', () => {
|
||||
// 1.4153928131348452 has > 4 decimal places, so should use scientific notation
|
||||
const result = formatNumericValue(1.4153928131348452);
|
||||
expect(result).toMatch(/^1\.415e\+0$/i);
|
||||
});
|
||||
|
||||
it('should handle negative very small numbers', () => {
|
||||
const result = formatNumericValue(-1e-3);
|
||||
expect(result).toMatch(/^-1\.000e-3$/i);
|
||||
|
||||
const result2 = formatNumericValue(-0.001);
|
||||
expect(result2).toMatch(/^-1\.000e-3$/i);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Human-readable range (standard notation)', () => {
|
||||
it('should use standard notation for boundary value 1e-2', () => {
|
||||
expect(formatNumericValue(0.01)).toBe('0.01');
|
||||
});
|
||||
|
||||
it('should use standard notation for values in readable range', () => {
|
||||
expect(formatNumericValue(0.1)).toBe('0.1');
|
||||
expect(formatNumericValue(1)).toBe('1');
|
||||
expect(formatNumericValue(1.234)).toBe('1.234');
|
||||
expect(formatNumericValue(42.5)).toBe('42.5');
|
||||
});
|
||||
|
||||
it('should limit to 4 decimal places without rounding integer parts', () => {
|
||||
expect(formatNumericValue(123.456)).toBe('123.456');
|
||||
expect(formatNumericValue(1234.567)).toBe('1234.567');
|
||||
expect(formatNumericValue(9999.9)).toBe('9999.9');
|
||||
expect(formatNumericValue(9999.1234)).toBe('9999.1234');
|
||||
});
|
||||
|
||||
it('should use scientific notation for numbers with more than 4 decimal places', () => {
|
||||
// Numbers with > 4 decimals should use scientific notation even in readable range
|
||||
const result1 = formatNumericValue(123.456789);
|
||||
expect(result1).toMatch(/^1\.235e\+2$/i);
|
||||
|
||||
const result2 = formatNumericValue(1.23456789);
|
||||
expect(result2).toMatch(/^1\.235e\+0$/i);
|
||||
|
||||
const result3 = formatNumericValue(42.987654321);
|
||||
expect(result3).toMatch(/^4\.299e\+1$/i);
|
||||
});
|
||||
|
||||
it('should use standard notation for boundary value 1e4', () => {
|
||||
expect(formatNumericValue(10000)).toBe('10000');
|
||||
});
|
||||
|
||||
it('should handle negative numbers in readable range', () => {
|
||||
expect(formatNumericValue(-0.1)).toBe('-0.1');
|
||||
expect(formatNumericValue(-123.456)).toBe('-123.456');
|
||||
expect(formatNumericValue(-9999.9)).toBe('-9999.9');
|
||||
});
|
||||
|
||||
it('should use scientific notation for negative numbers with excessive precision', () => {
|
||||
const result = formatNumericValue(-42.987654321);
|
||||
expect(result).toMatch(/^-4\.299e\+1$/i);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Very large numbers (scientific notation)', () => {
|
||||
it('should use scientific notation for values greater than 1e4', () => {
|
||||
const result1 = formatNumericValue(10001);
|
||||
expect(result1).toMatch(/^1\.000e\+4$/i);
|
||||
|
||||
const result2 = formatNumericValue(123456);
|
||||
expect(result2).toMatch(/^1\.235e\+5$/i);
|
||||
});
|
||||
|
||||
it('should handle negative very large numbers', () => {
|
||||
const result = formatNumericValue(-1e5);
|
||||
expect(result).toMatch(/^-1\.000e\+5$/i);
|
||||
|
||||
const result2 = formatNumericValue(-123456);
|
||||
expect(result2).toMatch(/^-1\.235e\+5$/i);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Edge cases', () => {
|
||||
it('should handle numbers exactly at boundaries', () => {
|
||||
expect(formatNumericValue(0.01)).toBe('0.01');
|
||||
|
||||
const justBelow = formatNumericValue(0.009999);
|
||||
expect(justBelow).toMatch(/^9\.999e-3$/i);
|
||||
|
||||
expect(formatNumericValue(10000)).toBe('10000');
|
||||
|
||||
const justAbove = formatNumericValue(10001);
|
||||
expect(justAbove).toMatch(/^1\.000e\+4$/i);
|
||||
});
|
||||
|
||||
it('should use scientific notation for very precise decimals with > 4 decimal places', () => {
|
||||
expect(formatNumericValue(1.23456789)).toMatch(/^1\.235e\+0$/i);
|
||||
expect(formatNumericValue(123.456789)).toMatch(/^1\.235e\+2$/i);
|
||||
expect(formatNumericValue(0.123456789)).toMatch(/^1\.235e-1$/i);
|
||||
});
|
||||
|
||||
it('should use standard notation for numbers with exactly 4 or fewer decimal places', () => {
|
||||
expect(formatNumericValue(1.2345)).toBe('1.2345');
|
||||
expect(formatNumericValue(0.1234)).toBe('0.1234');
|
||||
expect(formatNumericValue(123.4567)).toBe('123.4567');
|
||||
});
|
||||
});
|
||||
|
||||
describe('countDecimalPlaces edge cases', () => {
|
||||
it('should handle numbers that toString() would convert to scientific notation', () => {
|
||||
const result = formatNumericValue(1e-10);
|
||||
expect(result).toMatch(/^1\.000e-10$/i);
|
||||
|
||||
const result2 = formatNumericValue(1e10);
|
||||
expect(result2).toMatch(/^1\.000e\+10$/i);
|
||||
});
|
||||
|
||||
it('should correctly count decimals for numbers with trailing zeros', () => {
|
||||
expect(formatNumericValue(1.234)).toBe('1.234');
|
||||
expect(formatNumericValue(1.2)).toBe('1.2');
|
||||
expect(formatNumericValue(1.0)).toBe('1');
|
||||
});
|
||||
|
||||
it('should handle boundary values correctly', () => {
|
||||
expect(formatNumericValue(0.01)).toBe('0.01');
|
||||
expect(formatNumericValue(10000)).toBe('10000');
|
||||
|
||||
expect(formatNumericValue(0.01001)).toMatch(/^1\.001e-2$/i);
|
||||
expect(formatNumericValue(9999.1234)).toBe('9999.1234');
|
||||
expect(formatNumericValue(9999.12345)).toMatch(/^9\.999e\+3$/i);
|
||||
});
|
||||
|
||||
it('should handle numbers in readable range that have many decimals', () => {
|
||||
expect(formatNumericValue(1.4153928131348452)).toMatch(/^1\.415e\+0$/i);
|
||||
expect(formatNumericValue(42.987654321)).toMatch(/^4\.299e\+1$/i);
|
||||
expect(formatNumericValue(123.456789)).toMatch(/^1\.235e\+2$/i);
|
||||
});
|
||||
});
|
||||
});
|
||||
+75
@@ -0,0 +1,75 @@
|
||||
const SCIENTIFIC_NOTATION_THRESHOLD_SMALL = 1e-2;
|
||||
const SCIENTIFIC_NOTATION_THRESHOLD_LARGE = 1e4;
|
||||
const MAX_DECIMAL_PLACES = 4;
|
||||
const EXPONENTIAL_DECIMALS = 3; // 4 significant digits = 1 digit + 3 decimals
|
||||
|
||||
const readableRangeFormatter = new Intl.NumberFormat(undefined, {
|
||||
maximumFractionDigits: MAX_DECIMAL_PLACES,
|
||||
useGrouping: false,
|
||||
});
|
||||
|
||||
/**
|
||||
* Counts the number of decimal places in a number.
|
||||
* Only processes numbers in readable range (1e-2 to 1e4) to avoid
|
||||
* toString() scientific notation issues for very large/small numbers.
|
||||
*
|
||||
* Uses toFixed(10) to ensure standard notation representation.
|
||||
* 10 decimal places is sufficient to detect if a number has > 4 decimal places.
|
||||
*/
|
||||
function countDecimalPlaces(value: number): number {
|
||||
if (Number.isInteger(value)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const absValue = Math.abs(value);
|
||||
|
||||
// Only count decimals for numbers in readable range
|
||||
if (absValue < SCIENTIFIC_NOTATION_THRESHOLD_SMALL || absValue > SCIENTIFIC_NOTATION_THRESHOLD_LARGE) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const str = value.toFixed(10);
|
||||
const decimalIndex = str.indexOf('.');
|
||||
|
||||
if (decimalIndex === -1) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Count decimal places, removing trailing zeros
|
||||
const decimalPart = str.substring(decimalIndex + 1).replace(/0+$/, '');
|
||||
return decimalPart.length;
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats a numeric value for display in alert rule history.
|
||||
* - For values in human-readable range (1e-2 to 1e4) with ≤ 4 decimal places: shows up to 4 decimal places
|
||||
* - For very small values (< 1e-2): uses scientific notation with 4 significant digits
|
||||
* - For very large values (> 1e4): uses scientific notation with 4 significant digits
|
||||
* - For numbers with > 4 decimal places: uses scientific notation with 4 significant digits
|
||||
*
|
||||
* @param value - The number to format
|
||||
* @returns A formatted string representation of the number
|
||||
*/
|
||||
export function formatNumericValue(value: number): string {
|
||||
if (!Number.isFinite(value)) {
|
||||
return String(value);
|
||||
}
|
||||
|
||||
if (value === 0) {
|
||||
return '0';
|
||||
}
|
||||
|
||||
const absValue = Math.abs(value);
|
||||
|
||||
if (absValue < SCIENTIFIC_NOTATION_THRESHOLD_SMALL || absValue > SCIENTIFIC_NOTATION_THRESHOLD_LARGE) {
|
||||
return value.toExponential(EXPONENTIAL_DECIMALS);
|
||||
}
|
||||
|
||||
const decimalPlaces = countDecimalPlaces(value);
|
||||
|
||||
if (decimalPlaces > MAX_DECIMAL_PLACES) {
|
||||
return value.toExponential(EXPONENTIAL_DECIMALS);
|
||||
}
|
||||
|
||||
return readableRangeFormatter.format(value);
|
||||
}
|
||||
@@ -83,6 +83,24 @@ export function DashboardEditPaneRenderer({ editPane, dashboard, isDocked }: Pro
|
||||
onClick={() => dashboard.openV2SchemaEditor()}
|
||||
/> */}
|
||||
<Sidebar.Divider />
|
||||
<Sidebar.Button
|
||||
style={{ color: '#ff671d' }}
|
||||
icon="comment-alt-message"
|
||||
onClick={() =>
|
||||
window.open(
|
||||
'https://docs.google.com/forms/d/e/1FAIpQLSfDZJM_VlZgRHDx8UPtLWbd9bIBPRxoA28qynTHEYniyPXO6Q/viewform',
|
||||
'_blank'
|
||||
)
|
||||
}
|
||||
title={t(
|
||||
'dashboard-scene.dashboard-edit-pane-renderer.title-feedback-dashboard-editing-experience',
|
||||
'Give feedback on the new dashboard editing experience'
|
||||
)}
|
||||
tooltip={t(
|
||||
'dashboard-scene.dashboard-edit-pane-renderer.title-feedback-dashboard-editing-experience',
|
||||
'Give feedback on the new dashboard editing experience'
|
||||
)}
|
||||
/>
|
||||
</>
|
||||
)}
|
||||
{hasUid && <ShareExportDashboardButton dashboard={dashboard} />}
|
||||
|
||||
@@ -959,7 +959,7 @@ export class DashboardScenePageStateManagerV2 extends DashboardScenePageStateMan
|
||||
}
|
||||
|
||||
export function shouldForceV2API(): boolean {
|
||||
return Boolean(config.featureToggles.kubernetesDashboardsV2 || config.featureToggles.dashboardNewLayouts);
|
||||
return Boolean(config.featureToggles.dashboardNewLayouts);
|
||||
}
|
||||
|
||||
export class UnifiedDashboardScenePageStateManager extends DashboardScenePageStateManagerBase<
|
||||
|
||||
@@ -112,6 +112,37 @@ describe('PanelEditor', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('Entering panel edit', () => {
|
||||
it('should clear edit pane selection', () => {
|
||||
pluginPromise = Promise.resolve(getPanelPlugin({ id: 'text', skipDataQuery: true }));
|
||||
|
||||
const panel = new VizPanel({
|
||||
key: 'panel-1',
|
||||
pluginId: 'text',
|
||||
title: 'original title',
|
||||
});
|
||||
const gridItem = new DashboardGridItem({ body: panel });
|
||||
const panelEditor = buildPanelEditScene(panel);
|
||||
const dashboard = new DashboardScene({
|
||||
editPanel: panelEditor,
|
||||
isEditing: true,
|
||||
$timeRange: new SceneTimeRange({ from: 'now-6h', to: 'now' }),
|
||||
body: new DefaultGridLayoutManager({
|
||||
grid: new SceneGridLayout({
|
||||
children: [gridItem],
|
||||
}),
|
||||
}),
|
||||
});
|
||||
|
||||
dashboard.state.editPane.selectObject(panel, panel.state.key!, { force: true });
|
||||
expect(dashboard.state.editPane.getSelection()).toBe(panel);
|
||||
|
||||
deactivate = activateFullSceneTree(dashboard);
|
||||
|
||||
expect(dashboard.state.editPane.getSelection()).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('When discarding', () => {
|
||||
it('should discard changes revert all changes', async () => {
|
||||
const { panelEditor, panel, dashboard } = await setup();
|
||||
|
||||
@@ -84,6 +84,11 @@ export class PanelEditor extends SceneObjectBase<PanelEditorState> {
|
||||
|
||||
private _activationHandler() {
|
||||
const panel = this.state.panelRef.resolve();
|
||||
const dashboard = getDashboardSceneFor(this);
|
||||
|
||||
// Clear any panel selection when entering panel edit mode.
|
||||
// Need to clear selection here since selection is activated when panel edit mode is entered through the panel actions menu. This causes sidebar panel editor to be open when exiting panel edit mode
|
||||
dashboard.state.editPane.clearSelection();
|
||||
|
||||
if (panel.state.pluginId === UNCONFIGURED_PANEL_PLUGIN_ID) {
|
||||
if (config.featureToggles.newVizSuggestions) {
|
||||
|
||||
@@ -20,7 +20,6 @@ export function isV0V1StoredVersion(version: string | undefined): boolean {
|
||||
export function getDashboardsApiVersion(responseFormat?: 'v1' | 'v2') {
|
||||
const isDashboardSceneEnabled = config.featureToggles.dashboardScene;
|
||||
const isKubernetesDashboardsEnabled = config.featureToggles.kubernetesDashboards;
|
||||
const isV2DashboardAPIVersionEnabled = config.featureToggles.kubernetesDashboardsV2;
|
||||
const isDashboardNewLayoutsEnabled = config.featureToggles.dashboardNewLayouts;
|
||||
|
||||
const forcingOldDashboardArch = locationService.getSearch().get('scenes') === 'false';
|
||||
@@ -39,7 +38,7 @@ export function getDashboardsApiVersion(responseFormat?: 'v1' | 'v2') {
|
||||
if (responseFormat === 'v1') {
|
||||
return 'v1';
|
||||
}
|
||||
if (responseFormat === 'v2' || isV2DashboardAPIVersionEnabled || isDashboardNewLayoutsEnabled) {
|
||||
if (responseFormat === 'v2' || isDashboardNewLayoutsEnabled) {
|
||||
return 'v2';
|
||||
}
|
||||
return 'unified';
|
||||
|
||||
@@ -118,10 +118,7 @@ class K8sAPI implements DashboardSnapshotSrv {
|
||||
}
|
||||
|
||||
async getSharingOptions() {
|
||||
// TODO? should this be in a config service, or in the same service?
|
||||
// we have http://localhost:3000/apis/dashboardsnapshot.grafana.app/v0alpha1/namespaces/default/options
|
||||
// BUT that has an unclear user mapping story still, so lets stick with the existing shared-options endpoint
|
||||
return getBackendSrv().get<SnapshotSharingOptions>('/api/snapshot/shared-options');
|
||||
return getBackendSrv().get<SnapshotSharingOptions>(this.url + '/settings');
|
||||
}
|
||||
|
||||
async getSnapshot(uid: string): Promise<DashboardDTO> {
|
||||
|
||||
@@ -5967,6 +5967,9 @@
|
||||
"name-values-separated-comma": "Values separated by comma",
|
||||
"selection-options": "Selection options"
|
||||
},
|
||||
"dashboard-edit-pane-renderer": {
|
||||
"title-feedback-dashboard-editing-experience": "Give feedback on the new dashboard editing experience"
|
||||
},
|
||||
"dashboard-link-form": {
|
||||
"back-to-list": "Back to list",
|
||||
"label-icon": "Icon",
|
||||
|
||||
Reference in New Issue
Block a user