Compare commits

..

8 Commits

Author SHA1 Message Date
Zoltán Bedi
8555bcba5b Add tests for migrations 2025-12-11 21:36:38 +01:00
Zoltán Bedi
4c601944e1 Add docs 2025-12-11 21:17:36 +01:00
Zoltán Bedi
a632f1f26a Extract labels 2025-12-11 15:50:53 +01:00
Zoltán Bedi
4d82e4295f Enhance SelectRow component with improved alias option handling
- Updated alias options for time series and variable queries to prevent duplicates and ensure correct options are displayed.
2025-12-10 12:14:54 +01:00
Zoltán Bedi
a91f64b9f5 Refactor response parser and tests for backwards compatibility
- Updated `transformMetricFindResponse` to handle cases without `__text` and `__value` fields, ensuring all values are treated as text-only entries.
- Adjusted test cases to reflect the new behavior and maintain backwards compatibility.
- Enhanced property handling to skip reserved field names when applicable.
2025-12-09 14:25:40 +01:00
Zoltán Bedi
b0e1ff8073 Enhance SQL Query Editor with variable query support
- Added `isVariableQuery` prop to `SqlQueryEditor`, `SelectRow`, and `VisualEditor` components to handle variable queries.
- Updated alias options in `SelectRow` to include variable query specific options.
- Modified `VariableQueryEditor` to set `isVariableQuery` to true when passing props to `SqlQueryEditorLazy`.
2025-12-09 13:19:42 +01:00
Zoltán Bedi
5c455ec2bc Refactor response parser to enhance metric transformation logic
- Updated the `transformMetricFindResponse` function to handle multiple fields more effectively, ensuring all values are included in the output.
- Introduced helper functions for better code organization and readability.
- Adjusted tests to reflect changes in the transformation logic, ensuring accurate validation of properties and deduplication behavior.
2025-12-09 12:05:31 +01:00
Zoltán Bedi
d5215a5be2 PostgreSQL: Add variable query editor support
- Introduced a new feature toggle for the PostgreSQL variable query editor `postgresVariableQueryEditor`.
2025-12-08 23:18:00 +01:00
117 changed files with 923 additions and 3568 deletions

View File

@@ -133,12 +133,6 @@ type ExportJobOptions struct {
// FIXME: we should validate this in admission hooks
// Prefix in target file system
Path string `json:"path,omitempty"`
// Resources to export
// This option has been created because currently the frontend does not use
// standarized app platform APIs. For performance and API consistency reasons, the preferred option
// is it to use the resources.
Resources []ResourceRef `json:"resources,omitempty"`
}
type MigrateJobOptions struct {

View File

@@ -88,11 +88,6 @@ func (in *ErrorDetails) DeepCopy() *ErrorDetails {
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *ExportJobOptions) DeepCopyInto(out *ExportJobOptions) {
*out = *in
if in.Resources != nil {
in, out := &in.Resources, &out.Resources
*out = make([]ResourceRef, len(*in))
copy(*out, *in)
}
return
}
@@ -430,7 +425,7 @@ func (in *JobSpec) DeepCopyInto(out *JobSpec) {
if in.Push != nil {
in, out := &in.Push, &out.Push
*out = new(ExportJobOptions)
(*in).DeepCopyInto(*out)
**out = **in
}
if in.Pull != nil {
in, out := &in.Pull, &out.Pull

View File

@@ -258,25 +258,9 @@ func schema_pkg_apis_provisioning_v0alpha1_ExportJobOptions(ref common.Reference
Format: "",
},
},
"resources": {
SchemaProps: spec.SchemaProps{
Description: "Resources to export This option has been created because currently the frontend does not use standarized app platform APIs. For performance and API consistency reasons, the preferred option is it to use the resources.",
Type: []string{"array"},
Items: &spec.SchemaOrArray{
Schema: &spec.Schema{
SchemaProps: spec.SchemaProps{
Default: map[string]interface{}{},
Ref: ref("github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1.ResourceRef"),
},
},
},
},
},
},
},
},
Dependencies: []string{
"github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1.ResourceRef"},
}
}

View File

@@ -1,6 +1,5 @@
API rule violation: list_type_missing,github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1,DeleteJobOptions,Paths
API rule violation: list_type_missing,github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1,DeleteJobOptions,Resources
API rule violation: list_type_missing,github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1,ExportJobOptions,Resources
API rule violation: list_type_missing,github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1,FileList,Items
API rule violation: list_type_missing,github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1,HistoryList,Items
API rule violation: list_type_missing,github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1,JobResourceSummary,Errors

View File

@@ -7,11 +7,10 @@ package v0alpha1
// ExportJobOptionsApplyConfiguration represents a declarative configuration of the ExportJobOptions type for use
// with apply.
type ExportJobOptionsApplyConfiguration struct {
Message *string `json:"message,omitempty"`
Folder *string `json:"folder,omitempty"`
Branch *string `json:"branch,omitempty"`
Path *string `json:"path,omitempty"`
Resources []ResourceRefApplyConfiguration `json:"resources,omitempty"`
Message *string `json:"message,omitempty"`
Folder *string `json:"folder,omitempty"`
Branch *string `json:"branch,omitempty"`
Path *string `json:"path,omitempty"`
}
// ExportJobOptionsApplyConfiguration constructs a declarative configuration of the ExportJobOptions type for use with
@@ -51,16 +50,3 @@ func (b *ExportJobOptionsApplyConfiguration) WithPath(value string) *ExportJobOp
b.Path = &value
return b
}
// WithResources adds the given value to the Resources field in the declarative configuration
// and returns the receiver, so that objects can be build by chaining "With" function invocations.
// If called multiple times, values provided by each call will be appended to the Resources field.
func (b *ExportJobOptionsApplyConfiguration) WithResources(values ...*ResourceRefApplyConfiguration) *ExportJobOptionsApplyConfiguration {
for i := range values {
if values[i] == nil {
panic("nil value passed to WithResources")
}
b.Resources = append(b.Resources, *values[i])
}
return b
}

View File

@@ -7,7 +7,6 @@ import (
provisioning "github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1"
"github.com/grafana/grafana/apps/provisioning/pkg/repository/git"
"github.com/grafana/grafana/apps/provisioning/pkg/safepath"
"github.com/grafana/grafana/pkg/registry/apis/provisioning/resources"
)
// ValidateJob performs validation on the Job specification and returns an error if validation fails
@@ -100,40 +99,6 @@ func validateExportJobOptions(opts *provisioning.ExportJobOptions) field.ErrorLi
}
}
// Validate resources if specified
if len(opts.Resources) > 0 {
for i, r := range opts.Resources {
resourcePath := field.NewPath("spec", "push", "resources").Index(i)
// Validate required fields
if r.Name == "" {
list = append(list, field.Required(resourcePath.Child("name"), "resource name is required"))
}
if r.Kind == "" {
list = append(list, field.Required(resourcePath.Child("kind"), "resource kind is required"))
}
if r.Group == "" {
list = append(list, field.Required(resourcePath.Child("group"), "resource group is required"))
}
// Validate that folders are not allowed
if r.Kind == resources.FolderKind.Kind || r.Group == resources.FolderResource.Group {
list = append(list, field.Invalid(resourcePath, r, "folders are not supported for export"))
continue // Skip further validation for folders
}
// Validate that only supported resources are allowed
// Currently only Dashboard resources are supported (folders are rejected above)
if r.Kind != "" && r.Group != "" {
// Check if it's a Dashboard resource
isDashboard := r.Group == resources.DashboardResource.Group && r.Kind == "Dashboard"
if !isDashboard {
list = append(list, field.Invalid(resourcePath, r, "resource type is not supported for export"))
}
}
}
}
return list
}

View File

@@ -575,242 +575,6 @@ func TestValidateJob(t *testing.T) {
},
wantErr: false,
},
{
name: "push action with valid dashboard resources",
job: &provisioning.Job{
ObjectMeta: metav1.ObjectMeta{
Name: "test-job",
},
Spec: provisioning.JobSpec{
Action: provisioning.JobActionPush,
Repository: "test-repo",
Push: &provisioning.ExportJobOptions{
Resources: []provisioning.ResourceRef{
{
Name: "dashboard-1",
Kind: "Dashboard",
Group: "dashboard.grafana.app",
},
{
Name: "dashboard-2",
Kind: "Dashboard",
Group: "dashboard.grafana.app",
},
},
Path: "dashboards/",
Message: "Export dashboards",
},
},
},
wantErr: false,
},
{
name: "push action with resource missing name",
job: &provisioning.Job{
ObjectMeta: metav1.ObjectMeta{
Name: "test-job",
},
Spec: provisioning.JobSpec{
Action: provisioning.JobActionPush,
Repository: "test-repo",
Push: &provisioning.ExportJobOptions{
Resources: []provisioning.ResourceRef{
{
Kind: "Dashboard",
Group: "dashboard.grafana.app",
},
},
},
},
},
wantErr: true,
validateError: func(t *testing.T, err error) {
require.Contains(t, err.Error(), "spec.push.resources[0].name")
require.Contains(t, err.Error(), "Required value")
},
},
{
name: "push action with resource missing kind",
job: &provisioning.Job{
ObjectMeta: metav1.ObjectMeta{
Name: "test-job",
},
Spec: provisioning.JobSpec{
Action: provisioning.JobActionPush,
Repository: "test-repo",
Push: &provisioning.ExportJobOptions{
Resources: []provisioning.ResourceRef{
{
Name: "dashboard-1",
Group: "dashboard.grafana.app",
},
},
},
},
},
wantErr: true,
validateError: func(t *testing.T, err error) {
require.Contains(t, err.Error(), "spec.push.resources[0].kind")
require.Contains(t, err.Error(), "Required value")
},
},
{
name: "push action with resource missing group",
job: &provisioning.Job{
ObjectMeta: metav1.ObjectMeta{
Name: "test-job",
},
Spec: provisioning.JobSpec{
Action: provisioning.JobActionPush,
Repository: "test-repo",
Push: &provisioning.ExportJobOptions{
Resources: []provisioning.ResourceRef{
{
Name: "dashboard-1",
Kind: "Dashboard",
},
},
},
},
},
wantErr: true,
validateError: func(t *testing.T, err error) {
require.Contains(t, err.Error(), "spec.push.resources[0].group")
require.Contains(t, err.Error(), "Required value")
},
},
{
name: "push action with folder resource by kind",
job: &provisioning.Job{
ObjectMeta: metav1.ObjectMeta{
Name: "test-job",
},
Spec: provisioning.JobSpec{
Action: provisioning.JobActionPush,
Repository: "test-repo",
Push: &provisioning.ExportJobOptions{
Resources: []provisioning.ResourceRef{
{
Name: "my-folder",
Kind: "Folder",
Group: "folder.grafana.app",
},
},
},
},
},
wantErr: true,
validateError: func(t *testing.T, err error) {
require.Contains(t, err.Error(), "spec.push.resources[0]")
require.Contains(t, err.Error(), "folders are not supported for export")
},
},
{
name: "push action with folder resource by group",
job: &provisioning.Job{
ObjectMeta: metav1.ObjectMeta{
Name: "test-job",
},
Spec: provisioning.JobSpec{
Action: provisioning.JobActionPush,
Repository: "test-repo",
Push: &provisioning.ExportJobOptions{
Resources: []provisioning.ResourceRef{
{
Name: "my-folder",
Kind: "SomeKind",
Group: "folder.grafana.app",
},
},
},
},
},
wantErr: true,
validateError: func(t *testing.T, err error) {
require.Contains(t, err.Error(), "spec.push.resources[0]")
require.Contains(t, err.Error(), "folders are not supported for export")
},
},
{
name: "push action with unsupported resource type",
job: &provisioning.Job{
ObjectMeta: metav1.ObjectMeta{
Name: "test-job",
},
Spec: provisioning.JobSpec{
Action: provisioning.JobActionPush,
Repository: "test-repo",
Push: &provisioning.ExportJobOptions{
Resources: []provisioning.ResourceRef{
{
Name: "my-resource",
Kind: "AlertRule",
Group: "alerting.grafana.app",
},
},
},
},
},
wantErr: true,
validateError: func(t *testing.T, err error) {
require.Contains(t, err.Error(), "spec.push.resources[0]")
require.Contains(t, err.Error(), "resource type is not supported for export")
},
},
{
name: "push action with valid folder (old behavior)",
job: &provisioning.Job{
ObjectMeta: metav1.ObjectMeta{
Name: "test-job",
},
Spec: provisioning.JobSpec{
Action: provisioning.JobActionPush,
Repository: "test-repo",
Push: &provisioning.ExportJobOptions{
Folder: "my-folder",
Path: "dashboards/",
Message: "Export folder",
},
},
},
wantErr: false,
},
{
name: "push action with multiple resources including invalid ones",
job: &provisioning.Job{
ObjectMeta: metav1.ObjectMeta{
Name: "test-job",
},
Spec: provisioning.JobSpec{
Action: provisioning.JobActionPush,
Repository: "test-repo",
Push: &provisioning.ExportJobOptions{
Resources: []provisioning.ResourceRef{
{
Name: "dashboard-1",
Kind: "Dashboard",
Group: "dashboard.grafana.app",
},
{
Name: "my-folder",
Kind: "Folder",
Group: "folder.grafana.app",
},
{
Name: "dashboard-2",
Kind: "Dashboard",
Group: "dashboard.grafana.app",
},
},
},
},
},
wantErr: true,
validateError: func(t *testing.T, err error) {
require.Contains(t, err.Error(), "spec.push.resources[1]")
require.Contains(t, err.Error(), "folders are not supported for export")
},
},
}
for _, tt := range tests {

View File

@@ -288,18 +288,18 @@ func (r *localRepository) calculateFileHash(path string) (string, int64, error)
return hex.EncodeToString(hasher.Sum(nil)), size, nil
}
func (r *localRepository) Create(ctx context.Context, filePath string, ref string, data []byte, comment string) error {
func (r *localRepository) Create(ctx context.Context, filepath string, ref string, data []byte, comment string) error {
if err := r.validateRequest(ref); err != nil {
return err
}
fpath := safepath.Join(r.path, filePath)
fpath := safepath.Join(r.path, filepath)
_, err := os.Stat(fpath)
if !errors.Is(err, os.ErrNotExist) {
if err != nil {
return apierrors.NewInternalError(fmt.Errorf("failed to check if file exists: %w", err))
}
return apierrors.NewAlreadyExists(schema.GroupResource{}, filePath)
return apierrors.NewAlreadyExists(schema.GroupResource{}, filepath)
}
if safepath.IsDir(fpath) {
@@ -314,7 +314,7 @@ func (r *localRepository) Create(ctx context.Context, filePath string, ref strin
return nil
}
if err := os.MkdirAll(filepath.Dir(fpath), 0700); err != nil {
if err := os.MkdirAll(path.Dir(fpath), 0700); err != nil {
return apierrors.NewInternalError(fmt.Errorf("failed to create path: %w", err))
}
@@ -352,7 +352,7 @@ func (r *localRepository) Write(ctx context.Context, fpath, ref string, data []b
return os.MkdirAll(fpath, 0700)
}
if err := os.MkdirAll(filepath.Dir(fpath), 0700); err != nil {
if err := os.MkdirAll(path.Dir(fpath), 0700); err != nil {
return apierrors.NewInternalError(fmt.Errorf("failed to create path: %w", err))
}

View File

@@ -279,7 +279,41 @@ Refer to [Templates](ref:templates) for an introduction to creating template var
If you add a `Query` template variable you can write a PostgreSQL query to retrieve items such as measurement names, key names, or key values, which will be displayed in the drop-down menu.
For example, you can use a variable to retrieve all the values from the `hostname` column in a table by creating the following query in the templating variable _Query_ setting.
The PostgreSQL variable query editor supports both **Builder** and **Code** modes, similar to the standard query editor.
#### Builder mode for variables
{{< admonition type="note" >}}
Builder mode for variable queries is currently behind the `postgresVariableQueryEditor` feature toggle.
{{< /admonition >}}
Builder mode provides a visual interface for creating variable queries. When using Builder mode for variable queries, the **Alias** dropdown includes predefined options `__text` and `__value` to easily create key/value variables.
{{< figure src="/static/img/docs/postgresql-variable-query-editor.png" class="docs-image--no-shadow" caption="PostgreSQL variable query editor in Builder mode" >}}
For example, to create a variable that displays hostnames but uses IDs as values:
1. Select your table from the **Table** dropdown.
2. Add a column for the display text (for example, `hostname`) and set its **Alias** to `__text`.
3. Add another column for the value (for example, `id`) and set its **Alias** to `__value`.
This generates a query equivalent to `SELECT hostname AS __text, id AS __value FROM host`.
#### Multiple properties
When you create a key/value variable with `__text` and `__value`, you can also include additional columns to store extra properties. These additional properties can be accessed using dot notation.
For example, if you have a variable named `server` with columns for `hostname` (as `__text`), `id` (as `__value`), and `region`, you can access the region property using `${server.region}`.
To add multiple properties:
1. Set up your `__text` and `__value` columns as described above.
2. Add additional columns for any extra properties you want to include.
3. Access the properties in your queries or panels using `${variableName.propertyName}`.
#### Code mode for variables
In Code mode, you can write PostgreSQL queries directly. For example, you can use a variable to retrieve all the values from the `hostname` column in a table by creating the following query in the templating variable _Query_ setting.
```sql
SELECT hostname FROM host
@@ -297,7 +331,9 @@ To use time range dependent macros like `$__timeFilter(column)` in your query, y
SELECT event_name FROM event_log WHERE $__timeFilter(time_column)
```
Another option is a query that can create a key/value variable. The query should return two columns that are named `__text` and `__value`. The `__text` column must contain unique values (if not, only the first value is used). This allows the drop-down options to display a text-friendly name as the text while using an ID as the value. For example, a query could use `hostname` as the text and `id` as the value:
Another option is a query that can create a key/value variable. The query should return two columns that are named `__text` and `__value`. The `__text` column must contain unique values (if not, only the first value is used). This allows the drop-down options to display a text-friendly name as the text while using an ID as the value.
You can create key/value variables using Builder mode by selecting the predefined `__text` and `__value` alias options, or write the query directly in Code mode. For example, a query could use `hostname` as the text and `id` as the value:
```sql
SELECT hostname AS __text, id AS __value FROM host

View File

@@ -37,11 +37,6 @@ refs:
destination: /docs/grafana/<GRAFANA_VERSION>/introduction/grafana-enterprise/
- pattern: /docs/grafana-cloud/
destination: /docs/grafana/<GRAFANA_VERSION>/introduction/grafana-enterprise/
fixed-role-definitions:
- pattern: /docs/grafana/
destination: /docs/grafana/<GRAFANA_VERSION>/administration/roles-and-permissions/access-control/rbac-fixed-basic-role-definitions/#fixed-role-definitions
- pattern: /docs/grafana-cloud/
destination: /docs/grafana-cloud/security-and-account-management/authentication-and-permissions/access-control/rbac-fixed-basic-role-definitions/#fixed-role-definitions
---
# Externally shared dashboards
@@ -54,8 +49,6 @@ Externally shared dashboards allow you to share your Grafana dashboard with anyo
If you change a dashboard, ensure that you save the changes before sharing.
In order to create public dashboards, you need to be an Admin, have the `dashboards.public:write` permission, or the `fixed:dashboards.public:write` [RBAC role](ref:fixed-role-definitions).
{{< admonition type="warning" >}}
Sharing your dashboard externally could result in a large number of queries to the data sources used by your dashboard.
This can be mitigated by using the Enterprise [caching](ref:caching) and/or rate limiting features.

View File

@@ -95,7 +95,7 @@ test.describe(
await createNewPanelButton.click();
// Close the data source picker modal
const closeButton = page.getByRole('button', { name: 'Close', exact: true });
const closeButton = page.getByRole('button', { name: 'Close menu' });
await closeButton.click({ force: true });
// Select prom data source from the data source list

View File

@@ -2233,6 +2233,14 @@
"count": 2
}
},
"public/app/features/dashboard/components/ShareModal/ShareExport.tsx": {
"@typescript-eslint/no-explicit-any": {
"count": 1
},
"no-restricted-syntax": {
"count": 1
}
},
"public/app/features/dashboard/components/ShareModal/ShareLink.tsx": {
"no-restricted-syntax": {
"count": 3

View File

@@ -1108,8 +1108,6 @@ export type ExportJobOptions = {
message?: string;
/** FIXME: we should validate this in admission hooks Prefix in target file system */
path?: string;
/** Resources to export This option has been created because currently the frontend does not use standarized app platform APIs. For performance and API consistency reasons, the preferred option is it to use the resources. */
resources?: ResourceRef[];
};
export type JobSpec = {
/** Possible enum values:

View File

@@ -1161,6 +1161,10 @@ export interface FeatureToggles {
*/
jaegerEnableGrpcEndpoint?: boolean;
/**
* Enable the new variable query editor for the PostgreSQL data source
*/
postgresVariableQueryEditor?: boolean;
/**
* Load plugins on store service startup instead of wire provider, and call RegisterFixedRoles after all plugins are loaded
* @default false
*/

View File

@@ -21,6 +21,7 @@
"@grafana/i18n": "12.4.0-pre",
"@grafana/plugin-ui": "^0.11.0",
"@grafana/runtime": "12.4.0-pre",
"@grafana/schema": "12.4.0-pre",
"@grafana/ui": "12.4.0-pre",
"@react-awesome-query-builder/ui": "6.6.15",
"immutable": "5.1.4",

View File

@@ -15,7 +15,8 @@ import { RawEditor } from './query-editor-raw/RawEditor';
import { VisualEditor } from './visual-query-builder/VisualEditor';
export interface SqlQueryEditorProps extends QueryEditorProps<SqlDatasource, SQLQuery, SQLOptions> {
queryHeaderProps?: Pick<QueryHeaderProps, 'dialect'>;
queryHeaderProps?: Pick<QueryHeaderProps, 'dialect' | 'hideRunButton' | 'hideFormatSelector'>;
isVariableQuery?: boolean;
}
export default function SqlQueryEditor({
@@ -25,6 +26,7 @@ export default function SqlQueryEditor({
onRunQuery,
range,
queryHeaderProps,
isVariableQuery = false,
}: SqlQueryEditorProps) {
const [isQueryRunnable, setIsQueryRunnable] = useState(true);
const db = datasource.getDB();
@@ -99,6 +101,8 @@ export default function SqlQueryEditor({
query={queryWithDefaults}
isQueryRunnable={isQueryRunnable}
dialect={dialect}
hideRunButton={queryHeaderProps?.hideRunButton}
hideFormatSelector={queryHeaderProps?.hideFormatSelector}
/>
<Space v={0.5} />
@@ -111,6 +115,7 @@ export default function SqlQueryEditor({
queryRowFilter={queryRowFilter}
onValidate={setIsQueryRunnable}
range={range}
isVariableQuery={isVariableQuery}
/>
)}

View File

@@ -25,6 +25,8 @@ export interface QueryHeaderProps {
preconfiguredDataset: string;
query: QueryWithDefaults;
queryRowFilter: QueryRowFilter;
hideRunButton?: boolean;
hideFormatSelector?: boolean;
}
export function QueryHeader({
@@ -37,6 +39,8 @@ export function QueryHeader({
preconfiguredDataset,
query,
queryRowFilter,
hideRunButton,
hideFormatSelector,
}: QueryHeaderProps) {
const { editorMode } = query;
const [_, copyToClipboard] = useCopyToClipboard();
@@ -123,14 +127,16 @@ export function QueryHeader({
return (
<>
<EditorHeader>
<InlineSelect
label={t('grafana-sql.components.query-header.label-format', 'Format')}
value={query.format}
placeholder={t('grafana-sql.components.query-header.placeholder-select-format', 'Select format')}
menuShouldPortal
onChange={onFormatChange}
options={QUERY_FORMAT_OPTIONS}
/>
{!hideFormatSelector && (
<InlineSelect
label={t('grafana-sql.components.query-header.label-format', 'Format')}
value={query.format}
placeholder={t('grafana-sql.components.query-header.placeholder-select-format', 'Select format')}
menuShouldPortal
onChange={onFormatChange}
options={QUERY_FORMAT_OPTIONS}
/>
)}
{editorMode === EditorMode.Builder && (
<>
@@ -222,26 +228,27 @@ export function QueryHeader({
<FlexItem grow={1} />
{isQueryRunnable ? (
<Button icon="play" variant="primary" size="sm" onClick={() => onRunQuery()}>
<Trans i18nKey="grafana-sql.components.query-header.run-query">Run query</Trans>
</Button>
) : (
<Tooltip
theme="error"
content={
<Trans i18nKey="grafana-sql.components.query-header.content-invalid-query">
Your query is invalid. Check below for details. <br />
However, you can still run this query.
</Trans>
}
placement="top"
>
<Button icon="exclamation-triangle" variant="secondary" size="sm" onClick={() => onRunQuery()}>
{!hideRunButton &&
(isQueryRunnable ? (
<Button icon="play" variant="primary" size="sm" onClick={() => onRunQuery()}>
<Trans i18nKey="grafana-sql.components.query-header.run-query">Run query</Trans>
</Button>
</Tooltip>
)}
) : (
<Tooltip
theme="error"
content={
<Trans i18nKey="grafana-sql.components.query-header.content-invalid-query">
Your query is invalid. Check below for details. <br />
However, you can still run this query.
</Trans>
}
placement="top"
>
<Button icon="exclamation-triangle" variant="secondary" size="sm" onClick={() => onRunQuery()}>
<Trans i18nKey="grafana-sql.components.query-header.run-query">Run query</Trans>
</Button>
</Tooltip>
))}
<RadioButtonGroup options={editorModes} size="sm" value={editorMode} onChange={onEditorModeChange} />

View File

@@ -1,6 +1,6 @@
import { css } from '@emotion/css';
import { uniqueId } from 'lodash';
import { useCallback } from 'react';
import { useCallback, useMemo } from 'react';
import { SelectableValue, toOption } from '@grafana/data';
import { selectors } from '@grafana/e2e-selectors';
@@ -20,19 +20,56 @@ interface SelectRowProps {
onQueryChange: (sql: SQLQuery) => void;
db: DB;
columns: Array<SelectableValue<string>>;
isVariableQuery?: boolean;
}
export function SelectRow({ query, onQueryChange, db, columns }: SelectRowProps) {
export function SelectRow({ query, onQueryChange, db, columns, isVariableQuery }: SelectRowProps) {
const styles = useStyles2(getStyles);
const { onSqlChange } = useSqlChange({ query, onQueryChange, db });
const timeSeriesAliasOpts: Array<SelectableValue<string>> = [];
// Add necessary alias options for time series format
// when that format has been selected
if (query.format === QueryFormat.Timeseries) {
timeSeriesAliasOpts.push({ label: t('grafana-sql.components.select-row.label.time', 'time'), value: 'time' });
timeSeriesAliasOpts.push({ label: t('grafana-sql.components.select-row.label.value', 'value'), value: 'value' });
}
// Get currently used aliases from all columns
const usedAliases = useMemo(() => {
const aliases = new Set<string>();
query.sql?.columns?.forEach((col) => {
if (col.alias) {
// Remove quotes from alias
const cleanAlias = col.alias.replace(/"/g, '');
aliases.add(cleanAlias);
}
});
return aliases;
}, [query.sql?.columns]);
// Function to get available alias options for a specific column
const getAliasOptions = useCallback(
(currentAlias?: string): Array<SelectableValue<string>> => {
const aliasOpts: Array<SelectableValue<string>> = [];
const cleanCurrentAlias = currentAlias?.replace(/"/g, '');
// Add necessary alias options for time series format
if (query.format === QueryFormat.Timeseries) {
if (!usedAliases.has('time') || cleanCurrentAlias === 'time') {
aliasOpts.push({ label: t('grafana-sql.components.select-row.label.time', 'time'), value: 'time' });
}
if (!usedAliases.has('value') || cleanCurrentAlias === 'value') {
aliasOpts.push({ label: t('grafana-sql.components.select-row.label.value', 'value'), value: 'value' });
}
}
// Add variable query alias options for __text and __value
if (isVariableQuery) {
if (!usedAliases.has('__text') || cleanCurrentAlias === '__text') {
aliasOpts.push({ label: t('grafana-sql.components.select-row.label.__text', '__text'), value: '__text' });
}
if (!usedAliases.has('__value') || cleanCurrentAlias === '__value') {
aliasOpts.push({ label: t('grafana-sql.components.select-row.label.__value', '__value'), value: '__value' });
}
}
return aliasOpts;
},
[query.format, isVariableQuery, usedAliases]
);
const onAggregationChange = useCallback(
(item: QueryEditorFunctionExpression, index: number) => (aggregation: SelectableValue<string>) => {
@@ -145,7 +182,7 @@ export function SelectRow({ query, onQueryChange, db, columns }: SelectRowProps)
value={item.alias ? toOption(item.alias) : null}
inputId={`select-alias-${index}-${uniqueId()}`}
data-testid={selectors.components.SQLQueryEditor.selectAlias}
options={timeSeriesAliasOpts}
options={getAliasOptions(item.alias)}
onChange={onAliasChange(item, index)}
isClearable
menuShouldPortal

View File

@@ -16,9 +16,18 @@ interface VisualEditorProps extends QueryEditorProps {
db: DB;
queryRowFilter: QueryRowFilter;
onValidate: (isValid: boolean) => void;
isVariableQuery?: boolean;
}
export const VisualEditor = ({ query, db, queryRowFilter, onChange, onValidate, range }: VisualEditorProps) => {
export const VisualEditor = ({
query,
db,
queryRowFilter,
onChange,
onValidate,
range,
isVariableQuery,
}: VisualEditorProps) => {
const state = useAsync(async () => {
const fields = await db.fields(query);
return fields;
@@ -28,7 +37,13 @@ export const VisualEditor = ({ query, db, queryRowFilter, onChange, onValidate,
<>
<EditorRows>
<EditorRow>
<SelectRow columns={state.value || []} query={query} onQueryChange={onChange} db={db} />
<SelectRow
columns={state.value || []}
query={query}
onQueryChange={onChange}
db={db}
isVariableQuery={isVariableQuery}
/>
</EditorRow>
{queryRowFilter.filter && (
<EditorRow>

View File

@@ -2,20 +2,19 @@ import { lastValueFrom, Observable, throwError } from 'rxjs';
import { map } from 'rxjs/operators';
import {
getDefaultTimeRange,
CoreApp,
DataFrame,
DataFrameView,
DataQuery,
DataQueryRequest,
DataQueryResponse,
DataSourceInstanceSettings,
MetricFindValue,
ScopedVars,
CoreApp,
getDefaultTimeRange,
getSearchFilterScopedVar,
LegacyMetricFindQueryOptions,
VariableWithMultiSupport,
MetricFindValue,
ScopedVars,
TimeRange,
VariableWithMultiSupport,
} from '@grafana/data';
import { EditorMode } from '@grafana/plugin-ui';
import {
@@ -24,15 +23,16 @@ import {
FetchResponse,
getBackendSrv,
getTemplateSrv,
toDataQueryResponse,
TemplateSrv,
reportInteraction,
TemplateSrv,
toDataQueryResponse,
} from '@grafana/runtime';
import { DataQuery } from '@grafana/schema';
import { ResponseParser } from '../ResponseParser';
import { SqlQueryEditorLazy } from '../components/QueryEditorLazy';
import { MACRO_NAMES } from '../constants';
import { DB, SQLQuery, SQLOptions, SqlQueryModel, QueryFormat } from '../types';
import { DB, QueryFormat, SQLOptions, SQLQuery, SqlQueryModel } from '../types';
import migrateAnnotation from '../utils/migration';
export abstract class SqlDatasource extends DataSourceWithBackend<SQLQuery, SQLOptions> {
@@ -182,7 +182,7 @@ export abstract class SqlDatasource extends DataSourceWithBackend<SQLQuery, SQLO
return;
}
async metricFindQuery(query: string, options?: LegacyMetricFindQueryOptions): Promise<MetricFindValue[]> {
async metricFindQuery(query: SQLQuery | string, options?: LegacyMetricFindQueryOptions): Promise<MetricFindValue[]> {
const range = options?.range;
if (range == null) {
// i cannot create a scenario where this happens, we handle it just to be sure.
@@ -194,12 +194,17 @@ export abstract class SqlDatasource extends DataSourceWithBackend<SQLQuery, SQLO
refId = options.variable.name;
}
const queryString = typeof query === 'string' ? query : query.rawSql;
if (!queryString) {
return [];
}
const scopedVars = {
...options?.scopedVars,
...getSearchFilterScopedVar({ query, wildcardChar: '%', options }),
...getSearchFilterScopedVar({ query: queryString, wildcardChar: '%', options }),
};
const rawSql = this.templateSrv.replace(query, scopedVars, this.interpolateVariable);
const rawSql = this.templateSrv.replace(queryString, scopedVars, this.interpolateVariable);
const interpolatedQuery: SQLQuery = {
refId: refId,

View File

@@ -107,6 +107,8 @@
}
},
"label": {
"__text": "__text",
"__value": "__value",
"time": "time",
"value": "value"
},

View File

@@ -206,7 +206,7 @@ const getStyles = (theme: GrafanaTheme2) => {
background: 'transparent',
border: `1px solid transparent`,
'&:hover': {
'&:hover, &:focus': {
color: theme.colors.text.primary,
background: theme.colors.action.hover,
},

View File

@@ -64,6 +64,10 @@ func NewAPIBuilder(providerType string, url *url.URL, insecure bool, caFile stri
}
func RegisterAPIService(apiregistration builder.APIRegistrar, cfg *setting.Cfg) (*APIBuilder, error) {
if !cfg.OpenFeature.APIEnabled {
return nil, nil
}
var staticEvaluator featuremgmt.StaticFlagEvaluator // No static evaluator needed for non-static provider
var err error
if cfg.OpenFeature.ProviderType == setting.StaticProviderType {

View File

@@ -13,7 +13,6 @@ import (
provisioning "github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1"
"github.com/grafana/grafana/apps/provisioning/pkg/repository"
"github.com/grafana/grafana/apps/provisioning/pkg/safepath"
"github.com/grafana/grafana/pkg/apimachinery/utils"
"github.com/grafana/grafana/pkg/registry/apis/provisioning/jobs"
"github.com/grafana/grafana/pkg/registry/apis/provisioning/resources"
@@ -24,58 +23,8 @@ import (
// The response status indicates the original stored version, so we can then request it in an un-converted form
type conversionShim = func(ctx context.Context, item *unstructured.Unstructured) (*unstructured.Unstructured, error)
// createDashboardConversionShim creates a conversion shim for dashboards that preserves the original API version.
// It uses a provided versionClients cache to allow sharing across multiple shim calls.
func createDashboardConversionShim(ctx context.Context, clients resources.ResourceClients, gvr schema.GroupVersionResource, versionClients map[string]dynamic.ResourceInterface) conversionShim {
shim := func(ctx context.Context, item *unstructured.Unstructured) (*unstructured.Unstructured, error) {
// Check if there's a stored version in the conversion status.
// This indicates the original API version the dashboard was created with,
// which should be preserved during export regardless of whether conversion succeeded or failed.
storedVersion, _, _ := unstructured.NestedString(item.Object, "status", "conversion", "storedVersion")
if storedVersion != "" {
// For v0 we can simply fallback -- the full model is saved
if strings.HasPrefix(storedVersion, "v0") {
item.SetAPIVersion(fmt.Sprintf("%s/%s", gvr.Group, storedVersion))
return item, nil
}
// For any other version (v1, v2, v3, etc.), fetch the original version via client
// Check if we already have a client cached for this version
versionClient, ok := versionClients[storedVersion]
if !ok {
// Dynamically construct the GroupVersionResource for any version
versionGVR := schema.GroupVersionResource{
Group: gvr.Group,
Version: storedVersion,
Resource: gvr.Resource,
}
var err error
versionClient, _, err = clients.ForResource(ctx, versionGVR)
if err != nil {
return nil, fmt.Errorf("get client for version %s: %w", storedVersion, err)
}
versionClients[storedVersion] = versionClient
}
return versionClient.Get(ctx, item.GetName(), metav1.GetOptions{})
}
// If conversion failed but there's no storedVersion, this is an error condition
failed, _, _ := unstructured.NestedBool(item.Object, "status", "conversion", "failed")
if failed {
return nil, fmt.Errorf("conversion failed but no storedVersion available")
}
return item, nil
}
return shim
}
func ExportResources(ctx context.Context, options provisioning.ExportJobOptions, clients resources.ResourceClients, repositoryResources resources.RepositoryResources, progress jobs.JobProgressRecorder) error {
progress.SetMessage(ctx, "start resource export")
// Create a shared versionClients map for dashboard conversion caching
versionClients := make(map[string]dynamic.ResourceInterface)
for _, kind := range resources.SupportedProvisioningResources {
// skip from folders as we do them first... so only dashboards
if kind == resources.FolderResource {
@@ -89,10 +38,50 @@ func ExportResources(ctx context.Context, options provisioning.ExportJobOptions,
}
// When requesting dashboards over the v1 api, we want to keep the original apiVersion if conversion fails
// Always use the cache version to share clients across all dashboard exports
var shim conversionShim
if kind.GroupResource() == resources.DashboardResource.GroupResource() {
shim = createDashboardConversionShim(ctx, clients, kind, versionClients)
// Cache clients for different versions
versionClients := make(map[string]dynamic.ResourceInterface)
shim = func(ctx context.Context, item *unstructured.Unstructured) (*unstructured.Unstructured, error) {
// Check if there's a stored version in the conversion status.
// This indicates the original API version the dashboard was created with,
// which should be preserved during export regardless of whether conversion succeeded or failed.
storedVersion, _, _ := unstructured.NestedString(item.Object, "status", "conversion", "storedVersion")
if storedVersion != "" {
// For v0 we can simply fallback -- the full model is saved
if strings.HasPrefix(storedVersion, "v0") {
item.SetAPIVersion(fmt.Sprintf("%s/%s", kind.Group, storedVersion))
return item, nil
}
// For any other version (v1, v2, v3, etc.), fetch the original version via client
// Check if we already have a client cached for this version
versionClient, ok := versionClients[storedVersion]
if !ok {
// Dynamically construct the GroupVersionResource for any version
versionGVR := schema.GroupVersionResource{
Group: kind.Group,
Version: storedVersion,
Resource: kind.Resource,
}
var err error
versionClient, _, err = clients.ForResource(ctx, versionGVR)
if err != nil {
return nil, fmt.Errorf("get client for version %s: %w", storedVersion, err)
}
versionClients[storedVersion] = versionClient
}
return versionClient.Get(ctx, item.GetName(), metav1.GetOptions{})
}
// If conversion failed but there's no storedVersion, this is an error condition
failed, _, _ := unstructured.NestedBool(item.Object, "status", "conversion", "failed")
if failed {
return nil, fmt.Errorf("conversion failed but no storedVersion available")
}
return item, nil
}
}
if err := exportResource(ctx, kind.Resource, options, client, shim, repositoryResources, progress); err != nil {
@@ -103,320 +92,6 @@ func ExportResources(ctx context.Context, options provisioning.ExportJobOptions,
return nil
}
// ExportSpecificResources exports a list of specific resources identified by ResourceRef entries.
// It validates that resources are not folders, are supported, and are unmanaged.
// Note: The caller must validate that the repository has a folder sync target before calling this function.
func ExportSpecificResources(ctx context.Context, repoName string, options provisioning.ExportJobOptions, clients resources.ResourceClients, repositoryResources resources.RepositoryResources, progress jobs.JobProgressRecorder) error {
if len(options.Resources) == 0 {
return errors.New("no resources specified for export")
}
progress.SetMessage(ctx, "exporting specific resources")
tree, err := loadUnmanagedFolderTree(ctx, clients, progress)
if err != nil {
return err
}
// Create a shared dashboard conversion shim and cache for all dashboard resources
// Create the versionClients map once so it's shared across all dashboard conversion calls
var dashboardShim conversionShim
versionClients := make(map[string]dynamic.ResourceInterface)
for _, resourceRef := range options.Resources {
if err := exportSingleResource(ctx, resourceRef, options, clients, repositoryResources, tree, &dashboardShim, versionClients, progress); err != nil {
return err
}
}
return nil
}
// loadUnmanagedFolderTree loads all unmanaged folders into a tree structure.
// This is needed to resolve folder paths for resources when exporting.
func loadUnmanagedFolderTree(ctx context.Context, clients resources.ResourceClients, progress jobs.JobProgressRecorder) (resources.FolderTree, error) {
progress.SetMessage(ctx, "loading folder tree from API server")
folderClient, err := clients.Folder(ctx)
if err != nil {
return nil, fmt.Errorf("get folder client: %w", err)
}
tree := resources.NewEmptyFolderTree()
if err := resources.ForEach(ctx, folderClient, func(item *unstructured.Unstructured) error {
if tree.Count() >= resources.MaxNumberOfFolders {
return errors.New("too many folders")
}
meta, err := utils.MetaAccessor(item)
if err != nil {
return fmt.Errorf("extract meta accessor: %w", err)
}
manager, _ := meta.GetManagerProperties()
// Skip if already managed by any manager (repository, file provisioning, etc.)
if manager.Identity != "" {
return nil
}
return tree.AddUnstructured(item)
}); err != nil {
return nil, fmt.Errorf("load folder tree: %w", err)
}
return tree, nil
}
// exportSingleResource exports a single resource, handling validation, fetching, conversion, and writing.
func exportSingleResource(
ctx context.Context,
resourceRef provisioning.ResourceRef,
options provisioning.ExportJobOptions,
clients resources.ResourceClients,
repositoryResources resources.RepositoryResources,
tree resources.FolderTree,
dashboardShim *conversionShim,
versionClients map[string]dynamic.ResourceInterface,
progress jobs.JobProgressRecorder,
) error {
result := jobs.JobResourceResult{
Name: resourceRef.Name,
Group: resourceRef.Group,
Kind: resourceRef.Kind,
Action: repository.FileActionCreated,
}
gvk := schema.GroupVersionKind{
Group: resourceRef.Group,
Kind: resourceRef.Kind,
// Version is left empty so ForKind will use the preferred version
}
// Validate resource reference
if err := validateResourceRef(gvk, &result, progress, ctx); err != nil {
return err
}
if result.Error != nil {
// Validation failed, but we continue processing other resources
return nil
}
// Get client and fetch resource
progress.SetMessage(ctx, fmt.Sprintf("Fetching resource %s/%s/%s", resourceRef.Group, resourceRef.Kind, resourceRef.Name))
client, gvr, err := clients.ForKind(ctx, gvk)
if err != nil {
result.Error = fmt.Errorf("get client for %s/%s/%s: %w", resourceRef.Group, resourceRef.Kind, resourceRef.Name, err)
progress.Record(ctx, result)
return progress.TooManyErrors()
}
// Validate resource type is supported
if err := validateResourceType(gvr, &result, progress, ctx); err != nil {
return err
}
if result.Error != nil {
return nil
}
// Fetch and validate the resource
item, meta, err := fetchAndValidateResource(ctx, client, resourceRef, gvr, &result, progress)
if err != nil {
return err
}
if result.Error != nil {
return nil
}
// Convert dashboard if needed
item, meta, err = convertDashboardIfNeeded(ctx, gvr, item, meta, clients, dashboardShim, versionClients, resourceRef, &result, progress)
if err != nil {
return err
}
if result.Error != nil {
return nil
}
// Compute export path from folder tree
exportPath := computeExportPath(options.Path, meta, tree)
// Export the resource
return writeResourceToRepository(ctx, item, meta, exportPath, options.Branch, repositoryResources, resourceRef, &result, progress)
}
// validateResourceRef validates that a resource reference is not a folder.
func validateResourceRef(gvk schema.GroupVersionKind, result *jobs.JobResourceResult, progress jobs.JobProgressRecorder, ctx context.Context) error {
if gvk.Kind == resources.FolderKind.Kind || gvk.Group == resources.FolderResource.Group {
result.Action = repository.FileActionIgnored
result.Error = fmt.Errorf("folders are not supported for export")
progress.Record(ctx, *result)
return progress.TooManyErrors()
}
return nil
}
// validateResourceType validates that a resource type is supported for export.
func validateResourceType(gvr schema.GroupVersionResource, result *jobs.JobResourceResult, progress jobs.JobProgressRecorder, ctx context.Context) error {
isSupported := false
for _, supported := range resources.SupportedProvisioningResources {
if supported.Group == gvr.Group && supported.Resource == gvr.Resource {
isSupported = true
break
}
}
if !isSupported {
result.Action = repository.FileActionIgnored
result.Error = fmt.Errorf("resource type %s/%s is not supported for export", gvr.Group, gvr.Resource)
progress.Record(ctx, *result)
return progress.TooManyErrors()
}
return nil
}
// fetchAndValidateResource fetches a resource from the API server and validates it's unmanaged.
func fetchAndValidateResource(
ctx context.Context,
client dynamic.ResourceInterface,
resourceRef provisioning.ResourceRef,
gvr schema.GroupVersionResource,
result *jobs.JobResourceResult,
progress jobs.JobProgressRecorder,
) (*unstructured.Unstructured, utils.GrafanaMetaAccessor, error) {
item, err := client.Get(ctx, resourceRef.Name, metav1.GetOptions{})
if err != nil {
result.Error = fmt.Errorf("get resource %s/%s/%s: %w", resourceRef.Group, resourceRef.Kind, resourceRef.Name, err)
progress.Record(ctx, *result)
return nil, nil, progress.TooManyErrors()
}
meta, err := utils.MetaAccessor(item)
if err != nil {
result.Action = repository.FileActionIgnored
result.Error = fmt.Errorf("extracting meta accessor for resource %s: %w", result.Name, err)
progress.Record(ctx, *result)
return nil, nil, progress.TooManyErrors()
}
manager, _ := meta.GetManagerProperties()
// Reject if already managed by any manager (repository, file provisioning, etc.)
if manager.Identity != "" {
result.Action = repository.FileActionIgnored
result.Error = fmt.Errorf("resource %s/%s/%s is managed and cannot be exported", resourceRef.Group, resourceRef.Kind, resourceRef.Name)
progress.Record(ctx, *result)
return nil, nil, progress.TooManyErrors()
}
return item, meta, nil
}
// convertDashboardIfNeeded converts a dashboard to its original API version if needed.
// Returns the potentially updated item and meta accessor.
func convertDashboardIfNeeded(
ctx context.Context,
gvr schema.GroupVersionResource,
item *unstructured.Unstructured,
meta utils.GrafanaMetaAccessor,
clients resources.ResourceClients,
dashboardShim *conversionShim,
versionClients map[string]dynamic.ResourceInterface,
resourceRef provisioning.ResourceRef,
result *jobs.JobResourceResult,
progress jobs.JobProgressRecorder,
) (*unstructured.Unstructured, utils.GrafanaMetaAccessor, error) {
if gvr.GroupResource() != resources.DashboardResource.GroupResource() {
return item, meta, nil
}
// Create or reuse the dashboard shim (shared across all dashboard resources)
// Pass the shared versionClients map to ensure client caching works correctly
if *dashboardShim == nil {
*dashboardShim = createDashboardConversionShim(ctx, clients, gvr, versionClients)
}
var err error
item, err = (*dashboardShim)(ctx, item)
if err != nil {
result.Error = fmt.Errorf("converting dashboard %s/%s/%s: %w", resourceRef.Group, resourceRef.Kind, resourceRef.Name, err)
progress.Record(ctx, *result)
return nil, nil, progress.TooManyErrors()
}
// Re-extract meta after shim conversion in case the item changed
meta, err = utils.MetaAccessor(item)
if err != nil {
result.Action = repository.FileActionIgnored
result.Error = fmt.Errorf("extracting meta accessor after conversion for resource %s: %w", result.Name, err)
progress.Record(ctx, *result)
return nil, nil, progress.TooManyErrors()
}
return item, meta, nil
}
// computeExportPath computes the export path by combining the base path with the folder path from the tree.
func computeExportPath(basePath string, meta utils.GrafanaMetaAccessor, tree resources.FolderTree) string {
exportPath := basePath
resourceFolder := meta.GetFolder()
if resourceFolder != "" {
// Get the folder path from the unmanaged tree (rootFolder is empty string for unmanaged tree)
fid, ok := tree.DirPath(resourceFolder, "")
if !ok {
// Folder not found in tree - this shouldn't happen for unmanaged folders
// but if it does, we'll just use the base path
return exportPath
}
if fid.Path != "" {
if exportPath != "" {
exportPath = safepath.Join(exportPath, fid.Path)
} else {
exportPath = fid.Path
}
}
}
return exportPath
}
// writeResourceToRepository writes a resource to the repository.
func writeResourceToRepository(
ctx context.Context,
item *unstructured.Unstructured,
meta utils.GrafanaMetaAccessor,
exportPath string,
branch string,
repositoryResources resources.RepositoryResources,
resourceRef provisioning.ResourceRef,
result *jobs.JobResourceResult,
progress jobs.JobProgressRecorder,
) error {
// Export the resource
progress.SetMessage(ctx, fmt.Sprintf("Exporting resource %s/%s/%s", resourceRef.Group, resourceRef.Kind, resourceRef.Name))
var err error
// exportPath already includes the folder structure from the unmanaged tree.
// We need to clear the folder metadata so WriteResourceFileFromObject doesn't try to resolve
// folder paths from repository tree (which doesn't have unmanaged folders).
// When folder is empty, WriteResourceFileFromObject will use rootFolder logic:
// - For instance targets: rootFolder is empty, so fid.Path will be empty, and it will use exportPath directly
// - For folder targets: rootFolder is repo name, but fid.Path will still be empty, so it will use exportPath directly
originalFolder := meta.GetFolder()
if originalFolder != "" {
meta.SetFolder("")
defer func() {
meta.SetFolder(originalFolder)
}()
}
result.Path, err = repositoryResources.WriteResourceFileFromObject(ctx, item, resources.WriteOptions{
Path: exportPath, // Path already includes folder structure from unmanaged tree
Ref: branch,
})
if errors.Is(err, resources.ErrAlreadyInRepository) {
result.Action = repository.FileActionIgnored
} else if err != nil {
result.Action = repository.FileActionIgnored
result.Error = fmt.Errorf("writing resource file for %s: %w", result.Name, err)
}
progress.Record(ctx, *result)
return progress.TooManyErrors()
}
func exportResource(ctx context.Context,
resource string,
options provisioning.ExportJobOptions,

View File

@@ -1,340 +0,0 @@
package export
import (
"context"
"fmt"
"testing"
"github.com/grafana/grafana/pkg/apimachinery/utils"
mock "github.com/stretchr/testify/mock"
"github.com/stretchr/testify/require"
"k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
"k8s.io/apimachinery/pkg/runtime/schema"
provisioningV0 "github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1"
"github.com/grafana/grafana/apps/provisioning/pkg/repository"
"github.com/grafana/grafana/pkg/registry/apis/provisioning/jobs"
"github.com/grafana/grafana/pkg/registry/apis/provisioning/resources"
)
// createFolder creates a folder with the given Grafana UID as metadata.name and optional title
func createFolder(grafanaUID, k8sUID, title, parentUID string) unstructured.Unstructured {
folder := unstructured.Unstructured{
Object: map[string]interface{}{
"apiVersion": resources.FolderResource.GroupVersion().String(),
"kind": "Folder",
"metadata": map[string]interface{}{
"name": grafanaUID, // Grafana UID is stored as metadata.name
"uid": k8sUID,
},
"spec": map[string]interface{}{
"title": title,
},
},
}
if parentUID != "" {
meta, _ := utils.MetaAccessor(&folder)
meta.SetFolder(parentUID)
}
return folder
}
// createDashboardWithFolder creates a dashboard in the specified folder
func createDashboardWithFolder(name, folderUID string) unstructured.Unstructured {
dashboard := createDashboardObject(name)
if folderUID != "" {
meta, _ := utils.MetaAccessor(&dashboard)
meta.SetFolder(folderUID)
}
return dashboard
}
func TestExportSpecificResources(t *testing.T) {
tests := []struct {
name string
setupMocks func(t *testing.T) (resourceClients *resources.MockResourceClients, repoResources *resources.MockRepositoryResources, progress *jobs.MockJobProgressRecorder)
options provisioningV0.ExportJobOptions
wantErr string
assertResults func(t *testing.T, resourceClients *resources.MockResourceClients, repoResources *resources.MockRepositoryResources, progress *jobs.MockJobProgressRecorder)
}{
{
name: "success with folder paths",
setupMocks: func(t *testing.T) (*resources.MockResourceClients, *resources.MockRepositoryResources, *jobs.MockJobProgressRecorder) {
folder := createFolder("team-a-uid", "k8s-1", "team-a", "")
dashboard1 := createDashboardWithFolder("dashboard-1", "team-a-uid")
dashboard2 := createDashboardObject("dashboard-2")
resourceClients := resources.NewMockResourceClients(t)
folderClient := &mockDynamicInterface{items: []unstructured.Unstructured{folder}}
resourceClients.On("Folder", mock.Anything).Return(folderClient, nil)
gvk := schema.GroupVersionKind{Group: resources.DashboardResource.Group, Kind: "Dashboard"}
resourceClients.On("ForKind", mock.Anything, gvk).Return(&mockDynamicInterface{items: []unstructured.Unstructured{dashboard1}}, resources.DashboardResource, nil).Once()
resourceClients.On("ForKind", mock.Anything, gvk).Return(&mockDynamicInterface{items: []unstructured.Unstructured{dashboard2}}, resources.DashboardResource, nil).Once()
repoResources := resources.NewMockRepositoryResources(t)
repoResources.On("WriteResourceFileFromObject", mock.Anything,
mock.MatchedBy(func(obj *unstructured.Unstructured) bool { return obj.GetName() == "dashboard-1" }),
mock.MatchedBy(func(opts resources.WriteOptions) bool { return opts.Path == "grafana/team-a" })).
Return("grafana/team-a/dashboard-1.json", nil)
repoResources.On("WriteResourceFileFromObject", mock.Anything,
mock.MatchedBy(func(obj *unstructured.Unstructured) bool { return obj.GetName() == "dashboard-2" }),
mock.MatchedBy(func(opts resources.WriteOptions) bool { return opts.Path == "grafana" })).
Return("grafana/dashboard-2.json", nil)
progress := jobs.NewMockJobProgressRecorder(t)
progress.On("SetMessage", mock.Anything, mock.Anything).Return().Maybe()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Name == "dashboard-1" && r.Action == repository.FileActionCreated
})).Return()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Name == "dashboard-2" && r.Action == repository.FileActionCreated
})).Return()
progress.On("TooManyErrors").Return(nil).Times(2)
return resourceClients, repoResources, progress
},
options: provisioningV0.ExportJobOptions{
Path: "grafana",
Branch: "feature/branch",
Resources: []provisioningV0.ResourceRef{
{Name: "dashboard-1", Kind: "Dashboard", Group: resources.DashboardResource.Group},
{Name: "dashboard-2", Kind: "Dashboard", Group: resources.DashboardResource.Group},
},
},
},
{
name: "empty resources returns error",
setupMocks: func(t *testing.T) (*resources.MockResourceClients, *resources.MockRepositoryResources, *jobs.MockJobProgressRecorder) {
return nil, nil, nil
},
options: provisioningV0.ExportJobOptions{
Resources: []provisioningV0.ResourceRef{},
},
wantErr: "no resources specified for export",
},
{
name: "rejects folders",
setupMocks: func(t *testing.T) (*resources.MockResourceClients, *resources.MockRepositoryResources, *jobs.MockJobProgressRecorder) {
resourceClients := resources.NewMockResourceClients(t)
resourceClients.On("Folder", mock.Anything).Return(&mockDynamicInterface{}, nil)
progress := jobs.NewMockJobProgressRecorder(t)
progress.On("SetMessage", mock.Anything, mock.Anything).Return()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Name == "my-folder" && r.Error != nil && r.Error.Error() == "folders are not supported for export"
})).Return()
progress.On("TooManyErrors").Return(nil)
return resourceClients, nil, progress
},
options: provisioningV0.ExportJobOptions{
Resources: []provisioningV0.ResourceRef{{Name: "my-folder", Kind: "Folder", Group: resources.FolderResource.Group}},
},
},
{
name: "rejects managed resources",
setupMocks: func(t *testing.T) (*resources.MockResourceClients, *resources.MockRepositoryResources, *jobs.MockJobProgressRecorder) {
dashboard := createDashboardObject("managed-dashboard")
meta, _ := utils.MetaAccessor(&dashboard)
meta.SetManagerProperties(utils.ManagerProperties{Kind: utils.ManagerKindRepo, Identity: "some-repo"})
resourceClients := resources.NewMockResourceClients(t)
resourceClients.On("Folder", mock.Anything).Return(&mockDynamicInterface{}, nil)
gvk := schema.GroupVersionKind{Group: resources.DashboardResource.Group, Kind: "Dashboard"}
resourceClients.On("ForKind", mock.Anything, gvk).Return(&mockDynamicInterface{items: []unstructured.Unstructured{dashboard}}, resources.DashboardResource, nil)
progress := jobs.NewMockJobProgressRecorder(t)
progress.On("SetMessage", mock.Anything, mock.Anything).Return().Maybe()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Name == "managed-dashboard" && r.Error != nil && r.Error.Error() == "resource dashboard.grafana.app/Dashboard/managed-dashboard is managed and cannot be exported"
})).Return()
progress.On("TooManyErrors").Return(nil)
return resourceClients, nil, progress
},
options: provisioningV0.ExportJobOptions{
Resources: []provisioningV0.ResourceRef{{Name: "managed-dashboard", Kind: "Dashboard", Group: resources.DashboardResource.Group}},
},
},
{
name: "rejects unsupported resources",
setupMocks: func(t *testing.T) (*resources.MockResourceClients, *resources.MockRepositoryResources, *jobs.MockJobProgressRecorder) {
resourceClients := resources.NewMockResourceClients(t)
resourceClients.On("Folder", mock.Anything).Return(&mockDynamicInterface{}, nil)
gvk := schema.GroupVersionKind{Group: "playlist.grafana.app", Kind: "Playlist"}
gvr := schema.GroupVersionResource{Group: "playlist.grafana.app", Resource: "playlists"}
resourceClients.On("ForKind", mock.Anything, gvk).Return(&mockDynamicInterface{}, gvr, nil)
progress := jobs.NewMockJobProgressRecorder(t)
progress.On("SetMessage", mock.Anything, mock.Anything).Return().Maybe()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Name == "some-resource" && r.Error != nil && r.Error.Error() == "resource type playlist.grafana.app/playlists is not supported for export"
})).Return()
progress.On("TooManyErrors").Return(nil)
return resourceClients, nil, progress
},
options: provisioningV0.ExportJobOptions{
Resources: []provisioningV0.ResourceRef{{Name: "some-resource", Kind: "Playlist", Group: "playlist.grafana.app"}},
},
},
{
name: "resolves nested folder paths",
setupMocks: func(t *testing.T) (*resources.MockResourceClients, *resources.MockRepositoryResources, *jobs.MockJobProgressRecorder) {
parentFolder := createFolder("team-a-uid", "k8s-1", "team-a", "")
childFolder := createFolder("subteam-uid", "k8s-2", "subteam", "team-a-uid")
dashboard := createDashboardWithFolder("dashboard-in-nested-folder", "subteam-uid")
resourceClients := resources.NewMockResourceClients(t)
resourceClients.On("Folder", mock.Anything).Return(&mockDynamicInterface{items: []unstructured.Unstructured{parentFolder, childFolder}}, nil)
gvk := schema.GroupVersionKind{Group: resources.DashboardResource.Group, Kind: "Dashboard"}
resourceClients.On("ForKind", mock.Anything, gvk).Return(&mockDynamicInterface{items: []unstructured.Unstructured{dashboard}}, resources.DashboardResource, nil)
repoResources := resources.NewMockRepositoryResources(t)
repoResources.On("WriteResourceFileFromObject", mock.Anything,
mock.MatchedBy(func(obj *unstructured.Unstructured) bool { return obj.GetName() == "dashboard-in-nested-folder" }),
mock.MatchedBy(func(opts resources.WriteOptions) bool { return opts.Path == "grafana/team-a/subteam" })).
Return("grafana/team-a/subteam/dashboard-in-nested-folder.json", nil)
progress := jobs.NewMockJobProgressRecorder(t)
progress.On("SetMessage", mock.Anything, mock.Anything).Return().Maybe()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Name == "dashboard-in-nested-folder" && r.Action == repository.FileActionCreated
})).Return()
progress.On("TooManyErrors").Return(nil)
return resourceClients, repoResources, progress
},
options: provisioningV0.ExportJobOptions{
Path: "grafana",
Branch: "feature/branch",
Resources: []provisioningV0.ResourceRef{{Name: "dashboard-in-nested-folder", Kind: "Dashboard", Group: resources.DashboardResource.Group}},
},
},
{
name: "folder client error",
setupMocks: func(t *testing.T) (*resources.MockResourceClients, *resources.MockRepositoryResources, *jobs.MockJobProgressRecorder) {
resourceClients := resources.NewMockResourceClients(t)
resourceClients.On("Folder", mock.Anything).Return(nil, fmt.Errorf("folder client error"))
progress := jobs.NewMockJobProgressRecorder(t)
progress.On("SetMessage", mock.Anything, mock.Anything).Return()
return resourceClients, nil, progress
},
options: provisioningV0.ExportJobOptions{
Resources: []provisioningV0.ResourceRef{{Name: "dashboard-1", Kind: "Dashboard", Group: resources.DashboardResource.Group}},
},
wantErr: "get folder client: folder client error",
},
{
name: "resource not found",
setupMocks: func(t *testing.T) (*resources.MockResourceClients, *resources.MockRepositoryResources, *jobs.MockJobProgressRecorder) {
resourceClients := resources.NewMockResourceClients(t)
resourceClients.On("Folder", mock.Anything).Return(&mockDynamicInterface{}, nil)
gvk := schema.GroupVersionKind{Group: resources.DashboardResource.Group, Kind: "Dashboard"}
resourceClients.On("ForKind", mock.Anything, gvk).Return(&mockDynamicInterface{}, resources.DashboardResource, nil)
progress := jobs.NewMockJobProgressRecorder(t)
progress.On("SetMessage", mock.Anything, mock.Anything).Return().Maybe()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Name == "non-existent-dashboard" && r.Error != nil && r.Error.Error() == "get resource dashboard.grafana.app/Dashboard/non-existent-dashboard: no items found"
})).Return()
progress.On("TooManyErrors").Return(nil)
return resourceClients, nil, progress
},
options: provisioningV0.ExportJobOptions{
Resources: []provisioningV0.ResourceRef{{Name: "non-existent-dashboard", Kind: "Dashboard", Group: resources.DashboardResource.Group}},
},
},
{
name: "dashboard version conversion",
setupMocks: func(t *testing.T) (*resources.MockResourceClients, *resources.MockRepositoryResources, *jobs.MockJobProgressRecorder) {
v1Dashboard := unstructured.Unstructured{
Object: map[string]interface{}{
"apiVersion": resources.DashboardResource.GroupVersion().String(),
"kind": "Dashboard",
"metadata": map[string]interface{}{"name": "v2-dashboard"},
"status": map[string]interface{}{
"conversion": map[string]interface{}{"failed": true, "storedVersion": "v2alpha1"},
},
},
}
v2Dashboard := createV2DashboardObject("v2-dashboard", "v2alpha1")
resourceClients := resources.NewMockResourceClients(t)
resourceClients.On("Folder", mock.Anything).Return(&mockDynamicInterface{}, nil)
gvk := schema.GroupVersionKind{Group: resources.DashboardResource.Group, Kind: "Dashboard"}
resourceClients.On("ForKind", mock.Anything, gvk).Return(&mockDynamicInterface{items: []unstructured.Unstructured{v1Dashboard}}, resources.DashboardResource, nil)
v2GVR := schema.GroupVersionResource{Group: resources.DashboardResource.Group, Version: "v2alpha1", Resource: resources.DashboardResource.Resource}
resourceClients.On("ForResource", mock.Anything, v2GVR).Return(&mockDynamicInterface{items: []unstructured.Unstructured{v2Dashboard}}, gvk, nil)
repoResources := resources.NewMockRepositoryResources(t)
repoResources.On("WriteResourceFileFromObject", mock.Anything,
mock.MatchedBy(func(obj *unstructured.Unstructured) bool {
return obj.GetName() == "v2-dashboard" && obj.GetAPIVersion() == "dashboard.grafana.app/v2alpha1"
}),
mock.Anything).Return("grafana/v2-dashboard.json", nil)
progress := jobs.NewMockJobProgressRecorder(t)
progress.On("SetMessage", mock.Anything, mock.Anything).Return().Maybe()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Name == "v2-dashboard" && r.Action == repository.FileActionCreated
})).Return()
progress.On("TooManyErrors").Return(nil)
return resourceClients, repoResources, progress
},
options: provisioningV0.ExportJobOptions{
Resources: []provisioningV0.ResourceRef{{Name: "v2-dashboard", Kind: "Dashboard", Group: resources.DashboardResource.Group}},
},
},
{
name: "too many errors",
setupMocks: func(t *testing.T) (*resources.MockResourceClients, *resources.MockRepositoryResources, *jobs.MockJobProgressRecorder) {
dashboard := createDashboardObject("dashboard-1")
resourceClients := resources.NewMockResourceClients(t)
resourceClients.On("Folder", mock.Anything).Return(&mockDynamicInterface{}, nil)
gvk := schema.GroupVersionKind{Group: resources.DashboardResource.Group, Kind: "Dashboard"}
resourceClients.On("ForKind", mock.Anything, gvk).Return(&mockDynamicInterface{items: []unstructured.Unstructured{dashboard}}, resources.DashboardResource, nil)
repoResources := resources.NewMockRepositoryResources(t)
repoResources.On("WriteResourceFileFromObject", mock.Anything, mock.Anything, mock.Anything).Return("", fmt.Errorf("write error"))
progress := jobs.NewMockJobProgressRecorder(t)
progress.On("SetMessage", mock.Anything, mock.Anything).Return().Maybe()
progress.On("Record", mock.Anything, mock.MatchedBy(func(r jobs.JobResourceResult) bool {
return r.Name == "dashboard-1" && r.Action == repository.FileActionIgnored && r.Error != nil
})).Return()
progress.On("TooManyErrors").Return(fmt.Errorf("too many errors"))
return resourceClients, repoResources, progress
},
options: provisioningV0.ExportJobOptions{
Resources: []provisioningV0.ResourceRef{{Name: "dashboard-1", Kind: "Dashboard", Group: resources.DashboardResource.Group}},
},
wantErr: "too many errors",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
resourceClients, repoResources, progress := tt.setupMocks(t)
err := ExportSpecificResources(context.Background(), "test-repo", tt.options, resourceClients, repoResources, progress)
if tt.wantErr != "" {
require.EqualError(t, err, tt.wantErr)
} else {
require.NoError(t, err)
}
if tt.assertResults != nil {
tt.assertResults(t, resourceClients, repoResources, progress)
}
})
}
}

View File

@@ -21,29 +21,26 @@ type ExportFn func(ctx context.Context, repoName string, options provisioning.Ex
type WrapWithStageFn func(ctx context.Context, repo repository.Repository, stageOptions repository.StageOptions, fn func(repo repository.Repository, staged bool) error) error
type ExportWorker struct {
clientFactory resources.ClientFactory
repositoryResources resources.RepositoryResourcesFactory
exportAllFn ExportFn
exportSpecificResourcesFn ExportFn
wrapWithStageFn WrapWithStageFn
metrics jobs.JobMetrics
clientFactory resources.ClientFactory
repositoryResources resources.RepositoryResourcesFactory
exportFn ExportFn
wrapWithStageFn WrapWithStageFn
metrics jobs.JobMetrics
}
func NewExportWorker(
clientFactory resources.ClientFactory,
repositoryResources resources.RepositoryResourcesFactory,
exportAllFn ExportFn,
exportSpecificResourcesFn ExportFn,
exportFn ExportFn,
wrapWithStageFn WrapWithStageFn,
metrics jobs.JobMetrics,
) *ExportWorker {
return &ExportWorker{
clientFactory: clientFactory,
repositoryResources: repositoryResources,
exportAllFn: exportAllFn,
exportSpecificResourcesFn: exportSpecificResourcesFn,
wrapWithStageFn: wrapWithStageFn,
metrics: metrics,
clientFactory: clientFactory,
repositoryResources: repositoryResources,
exportFn: exportFn,
wrapWithStageFn: wrapWithStageFn,
metrics: metrics,
}
}
@@ -103,19 +100,7 @@ func (r *ExportWorker) Process(ctx context.Context, repo repository.Repository,
return fmt.Errorf("create repository resource client: %w", err)
}
// Check if Resources list is provided (specific resources export mode)
if len(options.Resources) > 0 {
progress.SetTotal(ctx, len(options.Resources))
progress.StrictMaxErrors(1) // Fail fast on any error during export
// Validate that specific resource export is only used with folder sync targets
if cfg.Spec.Sync.Target != provisioning.SyncTargetTypeFolder {
return fmt.Errorf("specific resource export is only supported for folder sync targets, but repository has target type '%s'", cfg.Spec.Sync.Target)
}
return r.exportSpecificResourcesFn(ctx, cfg.Name, *options, clients, repositoryResources, progress)
}
// Fall back to existing ExportAll behavior for backward compatibility
return r.exportAllFn(ctx, cfg.Name, *options, clients, repositoryResources, progress)
return r.exportFn(ctx, cfg.Name, *options, clients, repositoryResources, progress)
}
err := r.wrapWithStageFn(ctx, repo, cloneOptions, fn)

View File

@@ -56,7 +56,7 @@ func TestExportWorker_IsSupported(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
r := NewExportWorker(nil, nil, nil, nil, nil, metrics)
r := NewExportWorker(nil, nil, nil, nil, metrics)
got := r.IsSupported(context.Background(), tt.job)
require.Equal(t, tt.want, got)
})
@@ -70,7 +70,7 @@ func TestExportWorker_ProcessNoExportSettings(t *testing.T) {
},
}
r := NewExportWorker(nil, nil, nil, nil, nil, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
r := NewExportWorker(nil, nil, nil, nil, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
err := r.Process(context.Background(), nil, job, nil)
require.EqualError(t, err, "missing export settings")
}
@@ -93,7 +93,7 @@ func TestExportWorker_ProcessWriteNotAllowed(t *testing.T) {
},
})
r := NewExportWorker(nil, nil, nil, nil, nil, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
r := NewExportWorker(nil, nil, nil, nil, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
err := r.Process(context.Background(), mockRepo, job, nil)
require.EqualError(t, err, "this repository is read only")
}
@@ -117,7 +117,7 @@ func TestExportWorker_ProcessBranchNotAllowedForLocal(t *testing.T) {
},
})
r := NewExportWorker(nil, nil, nil, nil, nil, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
r := NewExportWorker(nil, nil, nil, nil, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
err := r.Process(context.Background(), mockRepo, job, nil)
require.EqualError(t, err, "this repository does not support the branch workflow")
}
@@ -149,7 +149,7 @@ func TestExportWorker_ProcessFailedToCreateClients(t *testing.T) {
return fn(repo, true)
})
r := NewExportWorker(mockClients, nil, nil, nil, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
r := NewExportWorker(mockClients, nil, nil, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
mockProgress := jobs.NewMockJobProgressRecorder(t)
err := r.Process(context.Background(), mockRepo, job, mockProgress)
@@ -185,7 +185,7 @@ func TestExportWorker_ProcessNotReaderWriter(t *testing.T) {
return fn(repo, true)
})
r := NewExportWorker(mockClients, nil, nil, nil, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
r := NewExportWorker(mockClients, nil, nil, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
err := r.Process(context.Background(), mockRepo, job, mockProgress)
require.EqualError(t, err, "export job submitted targeting repository that is not a ReaderWriter")
}
@@ -221,7 +221,7 @@ func TestExportWorker_ProcessRepositoryResourcesError(t *testing.T) {
mockStageFn.On("Execute", context.Background(), mockRepo, mock.Anything, mock.Anything).Return(func(ctx context.Context, repo repository.Repository, stageOpts repository.StageOptions, fn func(repository.Repository, bool) error) error {
return fn(repo, true)
})
r := NewExportWorker(mockClients, mockRepoResources, nil, nil, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
r := NewExportWorker(mockClients, mockRepoResources, nil, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
err := r.Process(context.Background(), mockRepo, job, mockProgress)
require.EqualError(t, err, "create repository resource client: failed to create repository resources client")
}
@@ -273,7 +273,7 @@ func TestExportWorker_ProcessStageOptions(t *testing.T) {
return fn(repo, true)
})
r := NewExportWorker(mockClients, mockRepoResources, mockExportFn.Execute, nil, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
r := NewExportWorker(mockClients, mockRepoResources, mockExportFn.Execute, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
err := r.Process(context.Background(), mockRepo, job, mockProgress)
require.NoError(t, err)
}
@@ -355,7 +355,7 @@ func TestExportWorker_ProcessStageOptionsWithBranch(t *testing.T) {
return fn(repo, true)
})
r := NewExportWorker(mockClients, mockRepoResources, mockExportFn.Execute, nil, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
r := NewExportWorker(mockClients, mockRepoResources, mockExportFn.Execute, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
err := r.Process(context.Background(), mockRepo, job, mockProgress)
require.NoError(t, err)
})
@@ -398,7 +398,7 @@ func TestExportWorker_ProcessExportFnError(t *testing.T) {
return fn(repo, true)
})
r := NewExportWorker(mockClients, mockRepoResources, mockExportFn.Execute, nil, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
r := NewExportWorker(mockClients, mockRepoResources, mockExportFn.Execute, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
err := r.Process(context.Background(), mockRepo, job, mockProgress)
require.EqualError(t, err, "export failed")
}
@@ -426,7 +426,7 @@ func TestExportWorker_ProcessWrapWithStageFnError(t *testing.T) {
mockStageFn := NewMockWrapWithStageFn(t)
mockStageFn.On("Execute", mock.Anything, mockRepo, mock.Anything, mock.Anything).Return(errors.New("stage failed"))
r := NewExportWorker(nil, nil, nil, nil, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
r := NewExportWorker(nil, nil, nil, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
err := r.Process(context.Background(), mockRepo, job, mockProgress)
require.EqualError(t, err, "stage failed")
}
@@ -452,7 +452,7 @@ func TestExportWorker_ProcessBranchNotAllowedForStageableRepositories(t *testing
mockProgress := jobs.NewMockJobProgressRecorder(t)
// No progress messages expected in current implementation
r := NewExportWorker(nil, nil, nil, nil, nil, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
r := NewExportWorker(nil, nil, nil, nil, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
err := r.Process(context.Background(), mockRepo, job, mockProgress)
require.EqualError(t, err, "this repository does not support the branch workflow")
}
@@ -504,7 +504,7 @@ func TestExportWorker_ProcessGitRepository(t *testing.T) {
return fn(repo, true)
})
r := NewExportWorker(mockClients, mockRepoResources, mockExportFn.Execute, nil, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
r := NewExportWorker(mockClients, mockRepoResources, mockExportFn.Execute, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
err := r.Process(context.Background(), mockRepo, job, mockProgress)
require.NoError(t, err)
}
@@ -550,7 +550,7 @@ func TestExportWorker_ProcessGitRepositoryExportFnError(t *testing.T) {
return fn(repo, true)
})
r := NewExportWorker(mockClients, mockRepoResources, mockExportFn.Execute, nil, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
r := NewExportWorker(mockClients, mockRepoResources, mockExportFn.Execute, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
err := r.Process(context.Background(), mockRepo, job, mockProgress)
require.EqualError(t, err, "export failed")
}
@@ -613,7 +613,7 @@ func TestExportWorker_RefURLsSetWithBranch(t *testing.T) {
return fn(mockReaderWriter, true)
})
r := NewExportWorker(mockClients, mockRepoResources, mockExportFn.Execute, nil, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
r := NewExportWorker(mockClients, mockRepoResources, mockExportFn.Execute, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
err := r.Process(context.Background(), mockRepoWithURLs, job, mockProgress)
require.NoError(t, err)
@@ -670,7 +670,7 @@ func TestExportWorker_RefURLsNotSetWithoutBranch(t *testing.T) {
return fn(mockReaderWriter, true)
})
r := NewExportWorker(mockClients, mockRepoResources, mockExportFn.Execute, nil, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
r := NewExportWorker(mockClients, mockRepoResources, mockExportFn.Execute, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
err := r.Process(context.Background(), mockRepoWithURLs, job, mockProgress)
require.NoError(t, err)
@@ -727,7 +727,7 @@ func TestExportWorker_RefURLsNotSetForNonURLRepository(t *testing.T) {
return fn(mockReaderWriter, true)
})
r := NewExportWorker(mockClients, mockRepoResources, mockExportFn.Execute, nil, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
r := NewExportWorker(mockClients, mockRepoResources, mockExportFn.Execute, mockStageFn.Execute, jobs.RegisterJobMetrics(prometheus.NewPedanticRegistry()))
err := r.Process(context.Background(), mockRepo, job, mockProgress)
require.NoError(t, err)

View File

@@ -705,7 +705,6 @@ func (b *APIBuilder) GetPostStartHooks() (map[string]genericapiserver.PostStartH
b.clients,
b.repositoryResources,
export.ExportAll,
export.ExportSpecificResources,
stageIfPossible,
metrics,
)

View File

@@ -167,40 +167,30 @@ func (r *ResourcesManager) WriteResourceFileFromObject(ctx context.Context, obj
title = name
}
fileName := slugify.Slugify(title) + ".json"
folder := meta.GetFolder()
// Get the absolute path of the folder
rootFolder := RootFolder(r.repo.Config())
// Build the full path: start with options.Path, then add folder path, then filename
basePath := options.Path
// If options.Path is provided, use it directly (it already includes folder structure from export).
// Otherwise, resolve folder path from the repository tree.
if basePath == "" {
folder := meta.GetFolder()
// Get the absolute path of the folder
rootFolder := RootFolder(r.repo.Config())
if folder == "" {
// If no folder is specified and no path is provided, set it to the root to ensure everything is written under it
meta.SetFolder(rootFolder) // Set the folder in the metadata to the root folder
} else {
var ok bool
var fid Folder
fid, ok = r.folders.Tree().DirPath(folder, rootFolder)
if !ok {
// Fallback: try without rootFolder (for instance targets where rootFolder is empty)
fid, ok = r.folders.Tree().DirPath(folder, "")
if !ok {
return "", fmt.Errorf("folder %s NOT found in tree", folder)
}
}
if fid.Path != "" {
basePath = fid.Path
}
// If no folder is specified in the file, set it to the root to ensure everything is written under it
var fid Folder
if folder == "" {
fid = Folder{ID: rootFolder}
meta.SetFolder(rootFolder) // Set the folder in the metadata to the root folder
} else {
var ok bool
fid, ok = r.folders.Tree().DirPath(folder, rootFolder)
if !ok {
return "", fmt.Errorf("folder %s NOT found in tree with root: %s", folder, rootFolder)
}
}
if basePath != "" {
fileName = safepath.Join(basePath, fileName)
fileName := slugify.Slugify(title) + ".json"
if fid.Path != "" {
fileName = safepath.Join(fid.Path, fileName)
}
if options.Path != "" {
fileName = safepath.Join(options.Path, fileName)
}
parsed := ParsedResource{

View File

@@ -145,8 +145,6 @@ func (t *folderTree) AddUnstructured(item *unstructured.Unstructured) error {
return fmt.Errorf("extract meta accessor: %w", err)
}
// In Grafana, folder UIDs are stored as metadata.name
// The grafana.app/folder annotation contains the folder's metadata.name (which is its Grafana UID)
folder := Folder{
Title: meta.FindTitle(item.GetName()),
ID: item.GetName(),

View File

@@ -5,7 +5,6 @@ import (
"net"
"path/filepath"
"strconv"
"strings"
"github.com/grafana/grafana/pkg/services/apiserver/options"
"github.com/grafana/grafana/pkg/services/featuremgmt"
@@ -41,15 +40,6 @@ func applyGrafanaConfig(cfg *setting.Cfg, features featuremgmt.FeatureToggles, o
apiserverCfg := cfg.SectionWithEnvOverrides("grafana-apiserver")
runtimeConfig := apiserverCfg.Key("runtime_config").String()
runtimeConfigSplit := strings.Split(runtimeConfig, ",")
// TODO: temporary fix to allow disabling local features service and still being able to use its authz handler
if !cfg.OpenFeature.APIEnabled {
runtimeConfigSplit = append(runtimeConfigSplit, "features.grafana.app/v0alpha1=false")
}
runtimeConfig = strings.Join(runtimeConfigSplit, ",")
if runtimeConfig != "" {
if err := o.APIEnablementOptions.RuntimeConfig.Set(runtimeConfig); err != nil {
return fmt.Errorf("failed to set runtime config: %w", err)

View File

@@ -1913,6 +1913,13 @@ var (
Stage: FeatureStageExperimental,
Owner: grafanaOSSBigTent,
},
{
Name: "postgresVariableQueryEditor",
Description: "Enable the new variable query editor for the PostgreSQL data source",
Stage: FeatureStageExperimental,
Owner: grafanaOSSBigTent,
FrontendOnly: true,
},
{
Name: "pluginStoreServiceLoading",
Description: "Load plugins on store service startup instead of wire provider, and call RegisterFixedRoles after all plugins are loaded",

View File

@@ -260,6 +260,7 @@ newVizSuggestions,preview,@grafana/dataviz-squad,false,false,true
externalVizSuggestions,experimental,@grafana/dataviz-squad,false,false,true
preventPanelChromeOverflow,preview,@grafana/grafana-frontend-platform,false,false,true
jaegerEnableGrpcEndpoint,experimental,@grafana/oss-big-tent,false,false,false
postgresVariableQueryEditor,experimental,@grafana/oss-big-tent,false,false,true
pluginStoreServiceLoading,experimental,@grafana/plugins-platform-backend,false,false,false
newPanelPadding,preview,@grafana/dashboards-squad,false,false,true
onlyStoreActionSets,GA,@grafana/identity-access-team,false,false,false
1 Name Stage Owner requiresDevMode RequiresRestart FrontendOnly
260 externalVizSuggestions experimental @grafana/dataviz-squad false false true
261 preventPanelChromeOverflow preview @grafana/grafana-frontend-platform false false true
262 jaegerEnableGrpcEndpoint experimental @grafana/oss-big-tent false false false
263 postgresVariableQueryEditor experimental @grafana/oss-big-tent false false true
264 pluginStoreServiceLoading experimental @grafana/plugins-platform-backend false false false
265 newPanelPadding preview @grafana/dashboards-squad false false true
266 onlyStoreActionSets GA @grafana/identity-access-team false false false

View File

@@ -2692,6 +2692,19 @@
"expression": "false"
}
},
{
"metadata": {
"name": "postgresVariableQueryEditor",
"resourceVersion": "1765231394616",
"creationTimestamp": "2025-12-08T22:03:14Z"
},
"spec": {
"description": "Enable the new variable query editor for the PostgreSQL data source",
"stage": "experimental",
"codeowner": "@grafana/oss-big-tent",
"frontend": true
}
},
{
"metadata": {
"name": "preferLibraryPanelTitle",

View File

@@ -1084,6 +1084,10 @@ func (s *server) List(ctx context.Context, req *resourcepb.ListRequest) (*resour
return err
}
item := &resourcepb.ResourceWrapper{
ResourceVersion: iter.ResourceVersion(),
Value: iter.Value(),
}
// Trash is only accessible to admins or the user who deleted the object
if req.Source == resourcepb.ListRequest_TRASH {
if !s.isTrashItemAuthorized(ctx, iter, trashChecker) {
@@ -1093,11 +1097,6 @@ func (s *server) List(ctx context.Context, req *resourcepb.ListRequest) (*resour
continue
}
item := &resourcepb.ResourceWrapper{
ResourceVersion: iter.ResourceVersion(),
Value: iter.Value(),
}
pageBytes += len(item.Value)
rsp.Items = append(rsp.Items, item)
if (req.Limit > 0 && len(rsp.Items) >= int(req.Limit)) || pageBytes >= maxPageBytes {

View File

@@ -1,144 +0,0 @@
package apis
import (
"testing"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/tests/testinfra"
"github.com/grafana/grafana/pkg/util/testutil"
)
const pluginsDiscoveryJSON = `[
{
"version": "v0alpha1",
"freshness": "Current",
"resources": [
{
"resource": "pluginmetas",
"responseKind": {
"group": "",
"kind": "PluginMeta",
"version": ""
},
"scope": "Namespaced",
"singularResource": "pluginmeta",
"subresources": [
{
"responseKind": {
"group": "",
"kind": "PluginMeta",
"version": ""
},
"subresource": "status",
"verbs": [
"get",
"patch",
"update"
]
}
],
"verbs": [
"get",
"list"
]
},
{
"resource": "plugins",
"responseKind": {
"group": "",
"kind": "Plugin",
"version": ""
},
"scope": "Namespaced",
"singularResource": "plugins",
"subresources": [
{
"responseKind": {
"group": "",
"kind": "Plugin",
"version": ""
},
"subresource": "status",
"verbs": [
"get",
"patch",
"update"
]
}
],
"verbs": [
"create",
"delete",
"deletecollection",
"get",
"list",
"patch",
"update",
"watch"
]
}
]
}
]`
func setupHelper(t *testing.T, openFeatureAPIEnabled bool) *K8sTestHelper {
t.Helper()
helper := NewK8sTestHelper(t, testinfra.GrafanaOpts{
AppModeProduction: true,
DisableAnonymous: true,
APIServerRuntimeConfig: "plugins.grafana.app/v0alpha1=true",
OpenFeatureAPIEnabled: openFeatureAPIEnabled,
})
t.Cleanup(func() { helper.Shutdown() })
return helper
}
func TestIntegrationAPIServerRuntimeConfig(t *testing.T) {
testutil.SkipIntegrationTestInShortMode(t)
t.Run("discovery with openfeature api enabled", func(t *testing.T) {
helper := setupHelper(t, true)
disco, err := helper.GetGroupVersionInfoJSON("features.grafana.app")
require.NoError(t, err)
require.JSONEq(t, `[
{
"freshness": "Current",
"resources": [
{
"resource": "noop",
"responseKind": {
"group": "",
"kind": "Status",
"version": ""
},
"scope": "Namespaced",
"singularResource": "noop",
"verbs": [
"get"
]
}
],
"version": "v0alpha1"
}
]`, disco)
// plugins should still be discoverable
disco, err = helper.GetGroupVersionInfoJSON("plugins.grafana.app")
require.NoError(t, err)
require.JSONEq(t, pluginsDiscoveryJSON, disco)
require.NoError(t, err)
})
t.Run("discovery with openfeature api false", func(t *testing.T) {
helper := setupHelper(t, false)
_, err := helper.GetGroupVersionInfoJSON("features.grafana.app")
require.Error(t, err, "expected error when openfeature api is disabled")
// plugins should still be discoverable
disco, err := helper.GetGroupVersionInfoJSON("plugins.grafana.app")
require.NoError(t, err)
require.JSONEq(t, pluginsDiscoveryJSON, disco)
require.NoError(t, err)
})
}

View File

@@ -22,11 +22,9 @@ func TestIntegrationFeatures(t *testing.T) {
// Enable a random flag -- check that it is reported as enabled
flag := featuremgmt.FlagGrafanaAPIServerWithExperimentalAPIs
// the test below tests using enable_api = true, without that, the runtime_config has been instructed to skip the API
helper := apis.NewK8sTestHelper(t, testinfra.GrafanaOpts{
OpenFeatureAPIEnabled: true,
AppModeProduction: true,
DisableAnonymous: false, // allow anon user
AppModeProduction: true,
DisableAnonymous: false, // allow anon user
EnableFeatureToggles: []string{
flag, // used in test below
},

View File

@@ -792,7 +792,7 @@ func (c *K8sTestHelper) NewDiscoveryClient() *discovery.DiscoveryClient {
return client
}
func (c *K8sTestHelper) GetGroupVersionInfoJSON(group string) (string, error) {
func (c *K8sTestHelper) GetGroupVersionInfoJSON(group string) string {
c.t.Helper()
disco := c.NewDiscoveryClient()
@@ -823,11 +823,12 @@ func (c *K8sTestHelper) GetGroupVersionInfoJSON(group string) (string, error) {
if item.Metadata.Name == group {
v, err := json.MarshalIndent(item.Versions, "", " ")
require.NoError(c.t, err)
return string(v), nil
return string(v)
}
}
return "", goerrors.New("could not find discovery info for: " + group)
require.Failf(c.t, "could not find discovery info for: %s", group)
return ""
}
func (c *K8sTestHelper) CreateDS(cmd *datasources.AddDataSourceCommand) *datasources.DataSource {

View File

@@ -3288,18 +3288,6 @@
"path": {
"description": "FIXME: we should validate this in admission hooks Prefix in target file system",
"type": "string"
},
"resources": {
"description": "Resources to export This option has been created because currently the frontend does not use standarized app platform APIs. For performance and API consistency reasons, the preferred option is it to use the resources.",
"type": "array",
"items": {
"default": {},
"allOf": [
{
"$ref": "#/components/schemas/com.github.grafana.grafana.apps.provisioning.pkg.apis.provisioning.v0alpha1.ResourceRef"
}
]
}
}
}
},

View File

@@ -50,8 +50,7 @@ func TestIntegrationPlaylist(t *testing.T) {
}))
// The accepted verbs will change when dual write is enabled
disco, err := h.GetGroupVersionInfoJSON("playlist.grafana.app")
require.NoError(t, err)
disco := h.GetGroupVersionInfoJSON("playlist.grafana.app")
// t.Logf("%s", disco)
require.JSONEq(t, `[
{

View File

@@ -13,8 +13,7 @@ func TestIntegrationPluginsIntegrationDiscovery(t *testing.T) {
t.Run("discovery", func(t *testing.T) {
helper := setupHelper(t)
disco, err := helper.GetGroupVersionInfoJSON("plugins.grafana.app")
require.NoError(t, err)
disco := helper.GetGroupVersionInfoJSON("plugins.grafana.app")
require.JSONEq(t, `[
{
"version": "v0alpha1",

View File

@@ -1,390 +0,0 @@
package provisioning
import (
"context"
"encoding/json"
"os"
"path/filepath"
"testing"
"github.com/stretchr/testify/require"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
provisioning "github.com/grafana/grafana/apps/provisioning/pkg/apis/provisioning/v0alpha1"
"github.com/grafana/grafana/pkg/apimachinery/utils"
"github.com/grafana/grafana/pkg/util/testutil"
)
func TestIntegrationProvisioning_ExportSpecificResources(t *testing.T) {
testutil.SkipIntegrationTestInShortMode(t)
helper := runGrafana(t)
ctx := context.Background()
// Create unmanaged dashboards directly in Grafana
dashboard1 := helper.LoadYAMLOrJSONFile("exportunifiedtorepository/dashboard-test-v1.yaml")
dashboard1Obj, err := helper.DashboardsV1.Resource.Create(ctx, dashboard1, metav1.CreateOptions{})
require.NoError(t, err, "should be able to create first dashboard")
dashboard1Name := dashboard1Obj.GetName()
dashboard2 := helper.LoadYAMLOrJSONFile("exportunifiedtorepository/dashboard-test-v2beta1.yaml")
dashboard2Obj, err := helper.DashboardsV2beta1.Resource.Create(ctx, dashboard2, metav1.CreateOptions{})
require.NoError(t, err, "should be able to create second dashboard")
dashboard2Name := dashboard2Obj.GetName()
// Verify dashboards are unmanaged
dash1, err := helper.DashboardsV1.Resource.Get(ctx, dashboard1Name, metav1.GetOptions{})
require.NoError(t, err)
manager1, found1 := dash1.GetAnnotations()[utils.AnnoKeyManagerIdentity]
require.True(t, !found1 || manager1 == "", "dashboard1 should be unmanaged")
dash2, err := helper.DashboardsV2beta1.Resource.Get(ctx, dashboard2Name, metav1.GetOptions{})
require.NoError(t, err)
manager2, found2 := dash2.GetAnnotations()[utils.AnnoKeyManagerIdentity]
require.True(t, !found2 || manager2 == "", "dashboard2 should be unmanaged")
// Create repository with folder sync target (required for specific resource export)
const repo = "export-resources-test-repo"
testRepo := TestRepo{
Name: repo,
Target: "folder",
Copies: map[string]string{},
ExpectedDashboards: 0, // No dashboards expected after sync (we'll export manually)
ExpectedFolders: 0,
SkipResourceAssertions: true, // Skip assertions since we created dashboards before repo
}
helper.CreateRepo(t, testRepo)
// Export specific dashboards using Resources field
spec := provisioning.JobSpec{
Action: provisioning.JobActionPush,
Push: &provisioning.ExportJobOptions{
Path: "",
Resources: []provisioning.ResourceRef{
{
Name: dashboard1Name,
Kind: "Dashboard",
Group: "dashboard.grafana.app",
},
{
Name: dashboard2Name,
Kind: "Dashboard",
Group: "dashboard.grafana.app",
},
},
},
}
helper.TriggerJobAndWaitForSuccess(t, repo, spec)
// Verify both dashboards were exported
dashboard1File := filepath.Join(helper.ProvisioningPath, "test-dashboard-created-at-v1.json")
dashboard2File := filepath.Join(helper.ProvisioningPath, "test-dashboard-created-at-v2beta1.json")
// Check dashboard1
body1, err := os.ReadFile(dashboard1File) //nolint:gosec
require.NoError(t, err, "exported file should exist for dashboard1")
obj1 := map[string]any{}
err = json.Unmarshal(body1, &obj1)
require.NoError(t, err, "exported file should be valid JSON")
val, _, err := unstructured.NestedString(obj1, "metadata", "name")
require.NoError(t, err)
require.Equal(t, "test-v1", val)
// Check dashboard2
body2, err := os.ReadFile(dashboard2File) //nolint:gosec
require.NoError(t, err, "exported file should exist for dashboard2")
obj2 := map[string]any{}
err = json.Unmarshal(body2, &obj2)
require.NoError(t, err, "exported file should be valid JSON")
val, _, err = unstructured.NestedString(obj2, "metadata", "name")
require.NoError(t, err)
require.Equal(t, "test-v2beta1", val)
}
func TestIntegrationProvisioning_ExportSpecificResourcesWithPath(t *testing.T) {
testutil.SkipIntegrationTestInShortMode(t)
helper := runGrafana(t)
ctx := context.Background()
// Create unmanaged dashboard
dashboard := helper.LoadYAMLOrJSONFile("exportunifiedtorepository/dashboard-test-v1.yaml")
dashboardObj, err := helper.DashboardsV1.Resource.Create(ctx, dashboard, metav1.CreateOptions{})
require.NoError(t, err, "should be able to create dashboard")
dashboardName := dashboardObj.GetName()
// Create repository with folder sync target (required for specific resource export)
const repo = "export-resources-path-test-repo"
testRepo := TestRepo{
Name: repo,
Target: "folder",
Copies: map[string]string{},
ExpectedDashboards: 0,
ExpectedFolders: 0,
SkipResourceAssertions: true, // Skip assertions since we created dashboard before repo
}
helper.CreateRepo(t, testRepo)
// Export with custom path
spec := provisioning.JobSpec{
Action: provisioning.JobActionPush,
Push: &provisioning.ExportJobOptions{
Path: "custom/path",
Resources: []provisioning.ResourceRef{
{
Name: dashboardName,
Kind: "Dashboard",
Group: "dashboard.grafana.app",
},
},
},
}
helper.TriggerJobAndWaitForSuccess(t, repo, spec)
// Verify dashboard was exported to custom path
expectedFile := filepath.Join(helper.ProvisioningPath, "custom", "path", "test-dashboard-created-at-v1.json")
body, err := os.ReadFile(expectedFile) //nolint:gosec
require.NoError(t, err, "exported file should exist at custom path")
obj := map[string]any{}
err = json.Unmarshal(body, &obj)
require.NoError(t, err, "exported file should be valid JSON")
val, _, err := unstructured.NestedString(obj, "metadata", "name")
require.NoError(t, err)
require.Equal(t, "test-v1", val)
}
func TestIntegrationProvisioning_ExportSpecificResourcesRejectsFolders(t *testing.T) {
testutil.SkipIntegrationTestInShortMode(t)
helper := runGrafana(t)
ctx := context.Background()
// Create a folder
folder := &unstructured.Unstructured{
Object: map[string]any{
"apiVersion": "folder.grafana.app/v1beta1",
"kind": "Folder",
"metadata": map[string]any{
"name": "test-folder",
},
"spec": map[string]any{
"title": "Test Folder",
},
},
}
folderObj, err := helper.Folders.Resource.Create(ctx, folder, metav1.CreateOptions{})
require.NoError(t, err, "should be able to create folder")
folderName := folderObj.GetName()
// Create repository with folder sync target (required for specific resource export)
const repo = "export-reject-folders-test-repo"
testRepo := TestRepo{
Name: repo,
Target: "folder",
Copies: map[string]string{},
ExpectedDashboards: 0,
ExpectedFolders: 0,
SkipResourceAssertions: true, // Skip assertions since we created folder before repo
}
helper.CreateRepo(t, testRepo)
// Try to export folder (should fail validation)
spec := provisioning.JobSpec{
Action: provisioning.JobActionPush,
Push: &provisioning.ExportJobOptions{
Resources: []provisioning.ResourceRef{
{
Name: folderName,
Kind: "Folder",
Group: "folder.grafana.app",
},
},
},
}
// This should fail with validation error
body := asJSON(spec)
result := helper.AdminREST.Post().
Namespace("default").
Resource("repositories").
Name(repo).
SubResource("jobs").
Body(body).
SetHeader("Content-Type", "application/json").
Do(ctx)
err = result.Error()
require.Error(t, err, "should fail validation when trying to export folder")
require.Contains(t, err.Error(), "folders are not supported", "error should mention folders are not supported")
}
func TestIntegrationProvisioning_ExportSpecificResourcesRejectsManagedResources(t *testing.T) {
testutil.SkipIntegrationTestInShortMode(t)
helper := runGrafana(t)
ctx := context.Background()
// Create a managed dashboard via repository sync (use folder target to allow second repo)
testRepo := TestRepo{
Name: "managed-dashboard-repo",
Target: "folder",
Copies: map[string]string{
"exportunifiedtorepository/dashboard-test-v1.yaml": "dashboard.json",
},
ExpectedDashboards: 1,
ExpectedFolders: 1, // Folder target creates a folder with the repo name
SkipResourceAssertions: true, // Skip assertions since we're testing export, not sync
}
helper.CreateRepo(t, testRepo)
// Get the managed dashboard
dashboards, err := helper.DashboardsV1.Resource.List(ctx, metav1.ListOptions{})
require.NoError(t, err)
require.Len(t, dashboards.Items, 1, "should have one managed dashboard")
managedDashboard := dashboards.Items[0]
managedDashboardName := managedDashboard.GetName()
// Verify it's managed
manager, found := managedDashboard.GetAnnotations()[utils.AnnoKeyManagerIdentity]
require.True(t, found && manager != "", "dashboard should be managed")
// Create another repository for export (must be folder target since instance can only exist alone)
const exportRepo = "export-managed-reject-test-repo"
exportTestRepo := TestRepo{
Name: exportRepo,
Target: "folder",
Copies: map[string]string{},
ExpectedDashboards: 0,
ExpectedFolders: 0,
SkipResourceAssertions: true, // Skip assertions since we're testing export, not sync
}
helper.CreateRepo(t, exportTestRepo)
// Try to export managed dashboard (should fail)
spec := provisioning.JobSpec{
Action: provisioning.JobActionPush,
Push: &provisioning.ExportJobOptions{
Resources: []provisioning.ResourceRef{
{
Name: managedDashboardName,
Kind: "Dashboard",
Group: "dashboard.grafana.app",
},
},
},
}
// This should fail because the resource is managed
body := asJSON(spec)
result := helper.AdminREST.Post().
Namespace("default").
Resource("repositories").
Name(exportRepo).
SubResource("jobs").
Body(body).
SetHeader("Content-Type", "application/json").
Do(ctx)
// Wait for job to complete and check it failed
obj, err := result.Get()
require.NoError(t, err, "job should be created")
unstruct, ok := obj.(*unstructured.Unstructured)
require.True(t, ok, "should get unstructured object")
// Wait for job to complete
job := helper.AwaitJob(t, ctx, unstruct)
lastState := mustNestedString(job.Object, "status", "state")
lastErrors := mustNestedStringSlice(job.Object, "status", "errors")
// Job should fail with error about managed resource
require.Equal(t, string(provisioning.JobStateError), lastState, "job should fail")
require.NotEmpty(t, lastErrors, "job should have errors")
require.Contains(t, lastErrors[0], "managed", "error should mention managed resource")
}
func TestIntegrationProvisioning_ExportSpecificResourcesWithFolderStructure(t *testing.T) {
testutil.SkipIntegrationTestInShortMode(t)
helper := runGrafana(t)
ctx := context.Background()
// Create an unmanaged folder
folder := &unstructured.Unstructured{
Object: map[string]any{
"apiVersion": "folder.grafana.app/v1beta1",
"kind": "Folder",
"metadata": map[string]any{
"name": "test-export-folder",
},
"spec": map[string]any{
"title": "Test Export Folder",
},
},
}
folderObj, err := helper.Folders.Resource.Create(ctx, folder, metav1.CreateOptions{})
require.NoError(t, err, "should be able to create folder")
folderUID := folderObj.GetUID()
// Verify folder is unmanaged
manager, found := folderObj.GetAnnotations()[utils.AnnoKeyManagerIdentity]
require.True(t, !found || manager == "", "folder should be unmanaged")
// Create unmanaged dashboard in the folder
dashboard := helper.LoadYAMLOrJSONFile("exportunifiedtorepository/dashboard-test-v1.yaml")
// Set folder UID in dashboard spec
err = unstructured.SetNestedField(dashboard.Object, string(folderUID), "spec", "folder")
require.NoError(t, err, "should be able to set folder UID")
dashboardObj, err := helper.DashboardsV1.Resource.Create(ctx, dashboard, metav1.CreateOptions{})
require.NoError(t, err, "should be able to create dashboard in folder")
dashboardName := dashboardObj.GetName()
// Create repository with folder sync target (required for specific resource export)
const repo = "export-folder-structure-test-repo"
testRepo := TestRepo{
Name: repo,
Target: "folder",
Copies: map[string]string{},
ExpectedDashboards: 0,
ExpectedFolders: 0,
SkipResourceAssertions: true, // Skip assertions since we created folder and dashboard before repo
}
helper.CreateRepo(t, testRepo)
// Export dashboard (should preserve folder structure)
spec := provisioning.JobSpec{
Action: provisioning.JobActionPush,
Push: &provisioning.ExportJobOptions{
Path: "",
Resources: []provisioning.ResourceRef{
{
Name: dashboardName,
Kind: "Dashboard",
Group: "dashboard.grafana.app",
},
},
},
}
helper.TriggerJobAndWaitForSuccess(t, repo, spec)
// For folder sync targets with specific resource export, the folder structure
// from unmanaged folders should be preserved in the export path
// Expected: <provisioning_path>/<folder_name>/<dashboard>.json
expectedFile := filepath.Join(helper.ProvisioningPath, "Test Export Folder", "test-dashboard-created-at-v1.json")
body, err := os.ReadFile(expectedFile) //nolint:gosec
if err != nil {
// Fallback: if folder structure not preserved, file might be at root
expectedFile = filepath.Join(helper.ProvisioningPath, "test-dashboard-created-at-v1.json")
body, err = os.ReadFile(expectedFile) //nolint:gosec
require.NoError(t, err, "exported file should exist (either with folder structure or at root)")
t.Logf("Note: Dashboard exported to root instead of preserving folder structure")
}
obj := map[string]any{}
err = json.Unmarshal(body, &obj)
require.NoError(t, err, "exported file should be valid JSON")
val, _, err := unstructured.NestedString(obj, "metadata", "name")
require.NoError(t, err)
require.Equal(t, "test-v1", val)
}

View File

@@ -316,21 +316,6 @@ func CreateGrafDir(t *testing.T, opts GrafanaOpts) (string, string) {
_, err = serverSect.NewKey("static_root_path", publicDir)
require.NoError(t, err)
openFeatureSect, err := cfg.NewSection("feature_toggles.openfeature")
require.NoError(t, err)
_, err = openFeatureSect.NewKey("enable_api", strconv.FormatBool(opts.OpenFeatureAPIEnabled))
require.NoError(t, err)
if !opts.OpenFeatureAPIEnabled {
_, err = openFeatureSect.NewKey("provider", "static") // in practice, APIEnabled being false goes with goff type, but trying to make tests work
require.NoError(t, err)
_, err = openFeatureSect.NewKey("targetingKey", "grafana")
require.NoError(t, err)
// so staticFlags can be provided to static provider
_, err := cfg.NewSection("feature_toggles")
require.NoError(t, err)
}
anonSect, err := cfg.NewSection("auth.anonymous")
require.NoError(t, err)
_, err = anonSect.NewKey("enabled", "true")
@@ -669,7 +654,6 @@ type GrafanaOpts struct {
DisableDBCleanup bool
DisableDataMigrations bool
SecretsManagerEnableDBMigrations bool
OpenFeatureAPIEnabled bool
// Allow creating grafana dir beforehand
Dir string

View File

@@ -62,7 +62,6 @@ type QueryModel struct {
IsCounter bool `json:"isCounter"`
CounterMax string `json:"counterMax"`
CounterResetValue string `json:"counterResetValue"`
ExplicitTags bool `json:"explicitTags"`
}
func newInstanceSettings(httpClientProvider *httpclient.Provider) datasource.InstanceFactoryFunc {
@@ -237,19 +236,8 @@ func createInitialFrame(val OpenTsdbCommon, length int, refID string) *data.Fram
labels[label] = value
}
tagKeys := make([]string, 0, len(val.Tags)+len(val.AggregateTags))
for tagKey := range val.Tags {
tagKeys = append(tagKeys, tagKey)
}
sort.Strings(tagKeys)
tagKeys = append(tagKeys, val.AggregateTags...)
frame := data.NewFrameOfFieldTypes(val.Metric, length, data.FieldTypeTime, data.FieldTypeFloat64)
frame.Meta = &data.FrameMeta{
Type: data.FrameTypeTimeSeriesMulti,
TypeVersion: data.FrameTypeVersion{0, 1},
Custom: map[string]any{"tagKeys": tagKeys},
}
frame.Meta = &data.FrameMeta{Type: data.FrameTypeTimeSeriesMulti, TypeVersion: data.FrameTypeVersion{0, 1}}
frame.RefID = refID
timeField := frame.Fields[0]
timeField.Name = data.TimeSeriesTimeFieldName
@@ -367,19 +355,10 @@ func (s *Service) buildMetric(query backend.DataQuery) map[string]any {
if !model.DisableDownsampling {
downsampleInterval := model.DownsampleInterval
if downsampleInterval == "" {
if ms := query.Interval.Milliseconds(); ms > 0 {
downsampleInterval = FormatDownsampleInterval(ms)
} else {
downsampleInterval = "1m"
}
} else if strings.Contains(downsampleInterval, ".") && strings.HasSuffix(downsampleInterval, "s") {
if val, err := strconv.ParseFloat(strings.TrimSuffix(downsampleInterval, "s"), 64); err == nil {
downsampleInterval = strconv.FormatInt(int64(val*1000), 10) + "ms"
}
downsampleInterval = "1m" // default value for blank
}
downsample := downsampleInterval + "-" + model.DownsampleAggregator
if model.DownsampleFillPolicy != "" && model.DownsampleFillPolicy != "none" {
if model.DownsampleFillPolicy != "none" {
metric["downsample"] = downsample + "-" + model.DownsampleFillPolicy
} else {
metric["downsample"] = downsample
@@ -429,10 +408,6 @@ func (s *Service) buildMetric(query backend.DataQuery) map[string]any {
metric["filters"] = model.Filters
}
if model.ExplicitTags {
metric["explicitTags"] = true
}
return metric
}

View File

@@ -70,164 +70,6 @@ func TestCheckHealth(t *testing.T) {
}
}
func TestBuildMetric(t *testing.T) {
service := &Service{}
t.Run("Metric with no downsampleInterval should use query interval", func(t *testing.T) {
query := backend.DataQuery{
JSON: []byte(`
{
"metric": "cpu.average.percent",
"aggregator": "avg",
"disableDownsampling": false,
"downsampleInterval": "",
"downsampleAggregator": "avg",
"downsampleFillPolicy": "none"
}`,
),
Interval: 30 * time.Second,
}
metric := service.buildMetric(query)
require.Equal(t, "30s-avg", metric["downsample"], "should use query interval formatted as seconds")
})
t.Run("Metric with downsampleInterval converts decimal seconds to milliseconds", func(t *testing.T) {
query := backend.DataQuery{
JSON: []byte(`
{
"metric": "cpu.average.percent",
"aggregator": "avg",
"disableDownsampling": false,
"downsampleInterval": "0.5s",
"downsampleAggregator": "avg",
"downsampleFillPolicy": "none"
}`,
),
}
metric := service.buildMetric(query)
require.Equal(t, "500ms-avg", metric["downsample"], "should convert 0.5s to 500ms")
})
t.Run("Metric with no downsampleInterval uses milliseconds for sub-second query interval", func(t *testing.T) {
query := backend.DataQuery{
JSON: []byte(`
{
"metric": "cpu.average.percent",
"aggregator": "avg",
"disableDownsampling": false,
"downsampleInterval": "",
"downsampleAggregator": "avg",
"downsampleFillPolicy": "none"
}`,
),
Interval: 500 * time.Millisecond,
}
metric := service.buildMetric(query)
require.Equal(t, "500ms-avg", metric["downsample"], "should use query interval formatted as milliseconds")
})
t.Run("Metric with no downsampleInterval uses minutes for longer intervals", func(t *testing.T) {
query := backend.DataQuery{
JSON: []byte(`
{
"metric": "cpu.average.percent",
"aggregator": "avg",
"disableDownsampling": false,
"downsampleInterval": "",
"downsampleAggregator": "sum",
"downsampleFillPolicy": "none"
}`,
),
Interval: 5 * time.Minute,
}
metric := service.buildMetric(query)
require.Equal(t, "5m-sum", metric["downsample"], "should use query interval formatted as minutes")
})
t.Run("Metric with no downsampleInterval uses hours for multi-hour intervals", func(t *testing.T) {
query := backend.DataQuery{
JSON: []byte(`
{
"metric": "cpu.average.percent",
"aggregator": "avg",
"disableDownsampling": false,
"downsampleInterval": "",
"downsampleAggregator": "max",
"downsampleFillPolicy": "none"
}`,
),
Interval: 2 * time.Hour,
}
metric := service.buildMetric(query)
require.Equal(t, "2h-max", metric["downsample"], "should use query interval formatted as hours")
})
t.Run("Metric with no downsampleInterval uses days for multi-day intervals", func(t *testing.T) {
query := backend.DataQuery{
JSON: []byte(`
{
"metric": "cpu.average.percent",
"aggregator": "avg",
"disableDownsampling": false,
"downsampleInterval": "",
"downsampleAggregator": "min",
"downsampleFillPolicy": "none"
}`,
),
Interval: 48 * time.Hour,
}
metric := service.buildMetric(query)
require.Equal(t, "2d-min", metric["downsample"], "should use query interval formatted as days")
})
t.Run("Build metric with explicitTags enabled", func(t *testing.T) {
query := backend.DataQuery{
JSON: []byte(`
{
"metric": "cpu.average.percent",
"aggregator": "avg",
"disableDownsampling": true,
"explicitTags": true,
"tags": {
"host": "server01"
}
}`,
),
}
metric := service.buildMetric(query)
require.True(t, metric["explicitTags"].(bool), "explicitTags should be true")
metricTags := metric["tags"].(map[string]any)
require.Equal(t, "server01", metricTags["host"])
})
t.Run("Build metric with explicitTags disabled does not include explicitTags", func(t *testing.T) {
query := backend.DataQuery{
JSON: []byte(`
{
"metric": "cpu.average.percent",
"aggregator": "avg",
"disableDownsampling": true,
"explicitTags": false,
"tags": {
"host": "server01"
}
}`,
),
}
metric := service.buildMetric(query)
require.Nil(t, metric["explicitTags"], "explicitTags should not be present when false")
})
}
func TestOpenTsdbExecutor(t *testing.T) {
service := &Service{}
@@ -277,7 +119,6 @@ func TestOpenTsdbExecutor(t *testing.T) {
testFrame.Meta = &data.FrameMeta{
Type: data.FrameTypeTimeSeriesMulti,
TypeVersion: data.FrameTypeVersion{0, 1},
Custom: map[string]any{"tagKeys": []string{"app", "env"}},
}
testFrame.RefID = "A"
tsdbVersion := float32(4)
@@ -319,7 +160,6 @@ func TestOpenTsdbExecutor(t *testing.T) {
testFrame.Meta = &data.FrameMeta{
Type: data.FrameTypeTimeSeriesMulti,
TypeVersion: data.FrameTypeVersion{0, 1},
Custom: map[string]any{"tagKeys": []string{"app", "env"}},
}
testFrame.RefID = "A"
tsdbVersion := float32(3)
@@ -392,7 +232,6 @@ func TestOpenTsdbExecutor(t *testing.T) {
testFrame.Meta = &data.FrameMeta{
Type: data.FrameTypeTimeSeriesMulti,
TypeVersion: data.FrameTypeVersion{0, 1},
Custom: map[string]any{"tagKeys": []string{"app", "env"}},
}
testFrame.RefID = "A"
tsdbVersion := float32(3)
@@ -436,7 +275,6 @@ func TestOpenTsdbExecutor(t *testing.T) {
testFrame.Meta = &data.FrameMeta{
Type: data.FrameTypeTimeSeriesMulti,
TypeVersion: data.FrameTypeVersion{0, 1},
Custom: map[string]any{"tagKeys": []string{"app", "env"}},
}
testFrame.RefID = myRefid
@@ -452,45 +290,6 @@ func TestOpenTsdbExecutor(t *testing.T) {
}
})
t.Run("tagKeys are returned sorted alphabetically in frame metadata", func(t *testing.T) {
response := `
[
{
"metric": "cpu.usage",
"dps": [
[1405544146, 75.5]
],
"tags" : {
"zone": "us-east-1",
"host": "server01",
"app": "api",
"env": "production"
}
}
]`
tsdbVersion := float32(4)
resp := http.Response{Body: io.NopCloser(strings.NewReader(response))}
resp.StatusCode = 200
result, err := service.parseResponse(logger, &resp, "A", tsdbVersion)
require.NoError(t, err)
frame := result.Responses["A"].Frames[0]
require.NotNil(t, frame.Meta, "frame metadata should not be nil")
require.NotNil(t, frame.Meta.Custom, "frame custom metadata should not be nil")
customMeta, ok := frame.Meta.Custom.(map[string]any)
require.True(t, ok, "custom metadata should be a map")
tagKeys, ok := customMeta["tagKeys"].([]string)
require.True(t, ok, "tagKeys should be present and be a string slice")
require.Len(t, tagKeys, 4, "should have 4 tag keys")
expectedTagKeys := []string{"app", "env", "host", "zone"}
require.Equal(t, expectedTagKeys, tagKeys, "tagKeys should be sorted alphabetically")
})
t.Run("Build metric with downsampling enabled", func(t *testing.T) {
query := backend.DataQuery{
JSON: []byte(`

View File

@@ -7,9 +7,8 @@ type OpenTsdbQuery struct {
}
type OpenTsdbCommon struct {
Metric string `json:"metric"`
Tags map[string]string `json:"tags"`
AggregateTags []string `json:"aggregateTags"`
Metric string `json:"metric"`
Tags map[string]string `json:"tags"`
}
type OpenTsdbResponse struct {

View File

@@ -1,31 +0,0 @@
package opentsdb
import (
"strconv"
"time"
)
func FormatDownsampleInterval(ms int64) string {
duration := time.Duration(ms) * time.Millisecond
seconds := int64(duration / time.Second)
if seconds < 60 {
if seconds < 1 {
return strconv.FormatInt(ms, 10) + "ms"
}
return strconv.FormatInt(seconds, 10) + "s"
}
minutes := int64(duration / time.Minute)
if minutes < 60 {
return strconv.FormatInt(minutes, 10) + "m"
}
hours := int64(duration / time.Hour)
if hours < 24 {
return strconv.FormatInt(hours, 10) + "h"
}
days := int64(duration / (24 * time.Hour))
return strconv.FormatInt(days, 10) + "d"
}

View File

@@ -119,7 +119,7 @@ export class AppChromeService {
};
private getUpdatedHistory(newState: AppChromeState): HistoryEntry[] {
const breadcrumbs = buildBreadcrumbs(newState.sectionNav.node, newState.pageNav, { text: 'Home', url: '/' });
const breadcrumbs = buildBreadcrumbs(newState.sectionNav.node, newState.pageNav, { text: 'Home', url: '/' }, true);
const newPageNav = newState.pageNav || newState.sectionNav.node;
let entries = store.getObject<HistoryEntry[]>(HISTORY_LOCAL_STORAGE_KEY, []);

View File

@@ -0,0 +1,34 @@
import { css } from '@emotion/css';
import type { JSX } from 'react';
import { GrafanaTheme2 } from '@grafana/data';
import { useStyles2 } from '@grafana/ui';
export interface Props {
children: JSX.Element;
}
export const FeatureHighlight = ({ children }: Props): JSX.Element => {
const styles = useStyles2(getStyles);
return (
<>
{children}
<span className={styles.highlight} />
</>
);
};
const getStyles = (theme: GrafanaTheme2) => {
return {
highlight: css({
backgroundColor: theme.colors.success.main,
borderRadius: theme.shape.radius.circle,
width: '6px',
height: '6px',
display: 'inline-block;',
position: 'absolute',
top: '50%',
transform: 'translateY(-50%)',
}),
};
};

View File

@@ -62,6 +62,10 @@ export const MegaMenu = memo(
const activeItem = getActiveItem(navItems, state.sectionNav.node, location.pathname);
const handleMegaMenu = () => {
chrome.setMegaMenuOpen(!state.megaMenuOpen);
};
const handleDockedMenu = () => {
chrome.setMegaMenuDocked(!state.megaMenuDocked);
if (state.megaMenuDocked) {
@@ -104,7 +108,7 @@ export const MegaMenu = memo(
return (
<div data-testid={selectors.components.NavMenu.Menu} ref={ref} {...restProps}>
<MegaMenuHeader handleDockedMenu={handleDockedMenu} onClose={onClose} />
<MegaMenuHeader handleDockedMenu={handleDockedMenu} handleMegaMenu={handleMegaMenu} onClose={onClose} />
<nav className={styles.content}>
<ScrollContainer height="100%" overflowX="hidden" showScrollIndicators>
<>
@@ -155,7 +159,7 @@ const getStyles = (theme: GrafanaTheme2) => {
display: 'flex',
flexDirection: 'column',
listStyleType: 'none',
padding: theme.spacing(1, 1, 2, 0.5),
padding: theme.spacing(1, 1, 2, 1),
[theme.breakpoints.up('md')]: {
width: MENU_WIDTH,
},

View File

@@ -2,16 +2,15 @@ import { css } from '@emotion/css';
import { GrafanaTheme2 } from '@grafana/data';
import { t } from '@grafana/i18n';
import { IconButton, Stack, useTheme2 } from '@grafana/ui';
import { IconButton, Stack, ToolbarButton, useTheme2 } from '@grafana/ui';
import { useGrafana } from 'app/core/context/GrafanaContext';
import { HOME_NAV_ID } from 'app/core/reducers/navModel';
import { useSelector } from 'app/types/store';
import { HomeLink } from '../../Branding/Branding';
import { Branding } from '../../Branding/Branding';
import { OrganizationSwitcher } from '../OrganizationSwitcher/OrganizationSwitcher';
import { getChromeHeaderLevelHeight } from '../TopBar/useChromeHeaderHeight';
export interface Props {
handleMegaMenu: () => void;
handleDockedMenu: () => void;
onClose: () => void;
}
@@ -19,20 +18,26 @@ export interface Props {
export const DOCK_MENU_BUTTON_ID = 'dock-menu-button';
export const MEGA_MENU_HEADER_TOGGLE_ID = 'mega-menu-header-toggle';
export function MegaMenuHeader({ handleDockedMenu, onClose }: Props) {
export function MegaMenuHeader({ handleMegaMenu, handleDockedMenu, onClose }: Props) {
const theme = useTheme2();
const { chrome } = useGrafana();
const state = chrome.useState();
const homeNav = useSelector((state) => state.navIndex)[HOME_NAV_ID];
const styles = getStyles(theme);
return (
<div className={styles.header}>
<Stack alignItems="center" minWidth={0} gap={1}>
<HomeLink homeNav={homeNav} inMegaMenuOverlay={!state.megaMenuDocked} />
<Stack alignItems="center" minWidth={0} gap={0.25}>
<ToolbarButton
narrow
id={MEGA_MENU_HEADER_TOGGLE_ID}
onClick={handleMegaMenu}
tooltip={t('navigation.megamenu.close', 'Close menu')}
>
<Branding.MenuLogo className={styles.img} />
</ToolbarButton>
<OrganizationSwitcher />
</Stack>
<div className={styles.flexGrow} />
<IconButton
id={DOCK_MENU_BUTTON_ID}
className={styles.dockMenuButton}
@@ -46,10 +51,11 @@ export function MegaMenuHeader({ handleDockedMenu, onClose }: Props) {
variant="secondary"
/>
<IconButton
className={styles.mobileCloseButton}
tooltip={t('navigation.megamenu.close', 'Close menu')}
name="times"
onClick={onClose}
size="lg"
size="xl"
variant="secondary"
/>
</div>
@@ -72,9 +78,18 @@ const getStyles = (theme: GrafanaTheme2) => ({
display: 'flex',
gap: theme.spacing(1),
justifyContent: 'space-between',
padding: theme.spacing(0, 1, 0, 1),
padding: theme.spacing(0, 1, 0, 0.75),
height: getChromeHeaderLevelHeight(),
flexShrink: 0,
}),
flexGrow: css({ flexGrow: 1 }),
img: css({
alignSelf: 'center',
height: theme.spacing(3),
width: theme.spacing(3),
}),
mobileCloseButton: css({
[theme.breakpoints.up('md')]: {
display: 'none',
},
}),
});

View File

@@ -11,6 +11,7 @@ import { useGrafana } from 'app/core/context/GrafanaContext';
import { Indent } from '../../Indent/Indent';
import { FeatureHighlight } from './FeatureHighlight';
import { MegaMenuItemText } from './MegaMenuItemText';
import { hasChildMatch } from './utils';
@@ -30,6 +31,7 @@ export function MegaMenuItem({ link, activeItem, level = 0, onClick, onPin, isPi
const state = chrome.useState();
const menuIsDocked = state.megaMenuDocked;
const location = useLocation();
const FeatureHighlightWrapper = link.highlightText ? FeatureHighlight : React.Fragment;
const hasActiveChild = hasChildMatch(link, activeItem);
const isActive = link === activeItem || (level === MAX_DEPTH && hasActiveChild);
const [sectionExpanded, setSectionExpanded] = useLocalStorage(
@@ -79,7 +81,11 @@ export function MegaMenuItem({ link, activeItem, level = 0, onClick, onPin, isPi
return (
<li ref={item} className={styles.listItem}>
<div className={styles.menuItem}>
<div
className={cx(styles.menuItem, {
[styles.menuItemWithIcon]: Boolean(level === 0 && iconElement),
})}
>
{level !== 0 && <Indent level={level === MAX_DEPTH ? level - 1 : level} spacing={3} />}
{level === MAX_DEPTH && <div className={styles.itemConnector} />}
<div className={styles.collapsibleSectionWrapper}>
@@ -100,7 +106,7 @@ export function MegaMenuItem({ link, activeItem, level = 0, onClick, onPin, isPi
[styles.labelWrapperWithIcon]: Boolean(level === 0 && iconElement),
})}
>
{level === 0 && iconElement}
{level === 0 && iconElement && <FeatureHighlightWrapper>{iconElement}</FeatureHighlightWrapper>}
<Text truncate element="p">
{link.text}
</Text>
@@ -173,8 +179,12 @@ const getStyles = (theme: GrafanaTheme2) => ({
alignItems: 'center',
gap: theme.spacing(1.5),
height: theme.spacing(4),
paddingLeft: theme.spacing(0.5),
position: 'relative',
}),
menuItemWithIcon: css({
paddingLeft: theme.spacing(0),
}),
collapseButtonWrapper: css({
display: 'flex',
justifyContent: 'center',
@@ -207,17 +217,17 @@ const getStyles = (theme: GrafanaTheme2) => ({
labelWrapper: css({
display: 'flex',
alignItems: 'center',
gap: theme.spacing(1),
paddingLeft: theme.spacing(1),
gap: theme.spacing(0.75),
minWidth: 0,
paddingLeft: theme.spacing(1),
}),
labelWrapperWithIcon: css({
paddingLeft: theme.spacing(0.5),
gap: theme.spacing(0.75),
}),
hasActiveChild: css({
color: theme.colors.text.primary,
}),
labelWrapperWithIcon: css({
minWidth: theme.spacing(7),
paddingLeft: theme.spacing(0.5),
}),
children: css({
display: 'flex',
listStyleType: 'none',

View File

@@ -35,7 +35,11 @@ export function MegaMenuItemText({ children, isActive, onClick, target, url, onP
);
return (
<div className={cx(styles.wrapper, isActive && styles.wrapperActive)}>
<div
className={cx(styles.wrapper, {
[styles.wrapperActive]: isActive,
})}
>
<LinkComponent
data-testid={selectors.components.NavMenu.item}
className={styles.container}
@@ -88,7 +92,6 @@ const getStyles = (theme: GrafanaTheme2, isActive: Props['isActive']) => ({
borderTopRightRadius: theme.shape.radius.default,
borderBottomRightRadius: theme.shape.radius.default,
position: 'relative',
color: theme.colors.text.primary,
'&::before': {
backgroundImage: theme.colors.gradients.brandVertical,
@@ -98,8 +101,7 @@ const getStyles = (theme: GrafanaTheme2, isActive: Props['isActive']) => ({
height: '100%',
position: 'absolute',
transform: 'translateX(-50%)',
left: 0,
width: theme.spacing(0.25),
width: theme.spacing(0.5),
},
}),
container: css({

View File

@@ -15,7 +15,7 @@ import { contextSrv } from 'app/core/services/context_srv';
import { ScopesSelector } from 'app/features/scopes/selector/ScopesSelector';
import { useSelector } from 'app/types/store';
import { HomeLink } from '../../Branding/Branding';
import { Branding } from '../../Branding/Branding';
import { Breadcrumbs } from '../../Breadcrumbs/Breadcrumbs';
import { buildBreadcrumbs } from '../../Breadcrumbs/utils';
import { ExtensionToolbarItem } from '../ExtensionSidebar/ExtensionToolbarItem';
@@ -77,11 +77,11 @@ export const SingleTopBar = memo(function SingleTopBar({
tooltip={t('navigation.megamenu.open', 'Open menu')}
>
<Stack gap={0} alignItems="center">
<Icon name="bars" size="xl" />
<Branding.MenuLogo className={styles.img} />
<Icon size="sm" name="angle-down" />
</Stack>
</ToolbarButton>
)}
{!menuDockedAndOpen && <HomeLink homeNav={homeNav} />}
{topLevelScopes ? <ScopesSelector /> : undefined}
<Breadcrumbs breadcrumbs={breadcrumbs} className={styles.breadcrumbsWrapper} />
{!showToolbarLevel && breadcrumbActions}

View File

@@ -1,10 +1,8 @@
import { css, cx } from '@emotion/css';
import { FC, type JSX } from 'react';
import { colorManipulator, GrafanaTheme2, NavModelItem } from '@grafana/data';
import { selectors } from '@grafana/e2e-selectors';
import { reportInteraction } from '@grafana/runtime';
import { Tooltip, useStyles2, useTheme2 } from '@grafana/ui';
import { colorManipulator } from '@grafana/data';
import { useTheme2 } from '@grafana/ui';
import g8LoginDarkSvg from 'img/g8_login_dark.svg';
import g8LoginLightSvg from 'img/g8_login_light.svg';
import grafanaIconSvg from 'img/grafana_icon.svg';
@@ -54,56 +52,6 @@ const MenuLogo: FC<BrandComponentProps> = ({ className }) => {
return <img className={className} src={grafanaIconSvg} alt="Grafana" />;
};
/**
* inMegaMenuOverlay = true we just render the logo without link (used in mega menu)
*/
export function HomeLink({ homeNav, inMegaMenuOverlay }: { homeNav?: NavModelItem; inMegaMenuOverlay?: boolean }) {
const styles = useStyles2(homeLinkStyles);
const onHomeClicked = () => {
reportInteraction('grafana_home_clicked');
};
if (inMegaMenuOverlay) {
return (
<div className={styles.homeLink}>
<Branding.MenuLogo />
</div>
);
}
return (
<Tooltip placement="bottom" content={homeNav?.text || 'Home'}>
<a
onClick={onHomeClicked}
data-testid={selectors.components.Breadcrumbs.breadcrumb('Home')}
className={styles.homeLink}
title={homeNav?.text || 'Home'}
href={homeNav?.url}
>
<Branding.MenuLogo />
</a>
</Tooltip>
);
}
function homeLinkStyles(theme: GrafanaTheme2) {
return {
homeLink: css({
display: 'flex',
alignItems: 'center',
justifyContent: 'center',
height: theme.spacing(3),
width: theme.spacing(3),
margin: theme.spacing(0, 0.5),
img: {
maxHeight: '100%',
maxWidth: '100%',
},
}),
};
}
const LoginBoxBackground = () => {
const theme = useTheme2();
return css({

View File

@@ -91,7 +91,10 @@ describe('breadcrumb utils', () => {
url: '/my-parent-section',
},
};
expect(buildBreadcrumbs(sectionNav, pageNav, mockHomeNav)).toEqual([{ text: 'My page', href: '/my-page' }]);
expect(buildBreadcrumbs(sectionNav, pageNav, mockHomeNav)).toEqual([
{ text: 'Home', href: '/home' },
{ text: 'My page', href: '/my-page' },
]);
});
it('matches the home nav ignoring query parameters', () => {
@@ -111,7 +114,10 @@ describe('breadcrumb utils', () => {
url: '/my-parent-section',
},
};
expect(buildBreadcrumbs(sectionNav, pageNav, mockHomeNav)).toEqual([{ text: 'My page', href: '/my-page' }]);
expect(buildBreadcrumbs(sectionNav, pageNav, mockHomeNav)).toEqual([
{ text: 'Home', href: '/home?orgId=1' },
{ text: 'My page', href: '/my-page' },
]);
});
it('does ignore duplicates', () => {

View File

@@ -2,7 +2,12 @@ import { NavModelItem } from '@grafana/data';
import { Breadcrumb } from './types';
export function buildBreadcrumbs(sectionNav: NavModelItem, pageNav?: NavModelItem, homeNav?: NavModelItem) {
export function buildBreadcrumbs(
sectionNav: NavModelItem,
pageNav?: NavModelItem,
homeNav?: NavModelItem,
skipHome?: boolean
) {
const crumbs: Breadcrumb[] = [];
let foundHome = false;
let lastPath: string | undefined = undefined;
@@ -22,6 +27,9 @@ export function buildBreadcrumbs(sectionNav: NavModelItem, pageNav?: NavModelIte
// Check if we found home/root if if so return early
if (homeNav && urlToMatch === homeNav.url) {
if (!skipHome) {
crumbs.unshift({ text: homeNav.text, href: node.url ?? '' });
}
foundHome = true;
return;
}

View File

@@ -21,7 +21,6 @@ const setup = (props: Partial<PageProps>) => {
{
id: HOME_NAV_ID,
text: 'Home',
url: '/',
},
{
text: 'Section name',

View File

@@ -10,17 +10,13 @@ import { buildBreadcrumbs } from '../Breadcrumbs/utils';
export function usePageTitle(navModel?: NavModel, pageNav?: NavModelItem) {
const homeNav = useSelector((state) => state.navIndex)?.[HOME_NAV_ID];
useEffect(() => {
const sectionNav = (navModel?.node !== navModel?.main ? navModel?.node : navModel?.main) ?? {
text: Branding.AppTitle,
};
const sectionNav = (navModel?.node !== navModel?.main ? navModel?.node : navModel?.main) ?? { text: 'Grafana' };
const parts: string[] = buildBreadcrumbs(sectionNav, pageNav, homeNav)
.map((crumb) => crumb.text)
.reverse();
if (parts[parts.length - 1] !== Branding.AppTitle) {
parts.push(Branding.AppTitle);
}
// Override `Home` with the custom brand title
parts[parts.length - 1] = Branding.AppTitle;
document.title = parts.join(' - ');
}, [homeNav, navModel, pageNav]);

View File

@@ -3,14 +3,11 @@ import { useState } from 'react';
import { Trans, t } from '@grafana/i18n';
import { config, reportInteraction } from '@grafana/runtime';
import { Button, Drawer, Stack, Text } from '@grafana/ui';
import { useGetFrontendSettingsQuery } from 'app/api/clients/provisioning/v0alpha1';
import { appEvents } from 'app/core/app_events';
import { ManagerKind } from 'app/features/apiserver/types';
import { BulkDeleteProvisionedResource } from 'app/features/provisioning/components/BulkActions/BulkDeleteProvisionedResource';
import { BulkExportProvisionedResource } from 'app/features/provisioning/components/BulkActions/BulkExportProvisionedResource';
import { BulkMoveProvisionedResource } from 'app/features/provisioning/components/BulkActions/BulkMoveProvisionedResource';
import { useSelectionProvisioningStatus } from 'app/features/provisioning/hooks/useSelectionProvisioningStatus';
import { useSelectionUnmanagedStatus } from 'app/features/provisioning/hooks/useSelectionUnmanagedStatus';
import { useSearchStateManager } from 'app/features/search/state/SearchStateManager';
import { ShowModalReactEvent } from 'app/types/events';
import { FolderDTO } from 'app/types/folders';
@@ -36,7 +33,6 @@ export interface Props {
export function BrowseActions({ folderDTO }: Props) {
const [showBulkDeleteProvisionedResource, setShowBulkDeleteProvisionedResource] = useState(false);
const [showBulkMoveProvisionedResource, setShowBulkMoveProvisionedResource] = useState(false);
const [showBulkExportProvisionedResource, setShowBulkExportProvisionedResource] = useState(false);
const dispatch = useDispatch();
const selectedItems = useActionSelectionState();
@@ -51,9 +47,6 @@ export function BrowseActions({ folderDTO }: Props) {
selectedItems,
folderDTO?.managedBy === ManagerKind.Repo
);
const { hasUnmanaged, isLoading: isLoadingUnmanaged } = useSelectionUnmanagedStatus(selectedItems);
const { data: frontendSettings, isLoading: isLoadingSettings } = useGetFrontendSettingsQuery();
const hasRepositories = (frontendSettings?.items?.length ?? 0) > 0;
const isSearching = stateManager.hasSearchFilters();
@@ -141,36 +134,16 @@ export function BrowseActions({ folderDTO }: Props) {
}
};
const showExportModal = () => {
trackAction('export', selectedItems);
setShowBulkExportProvisionedResource(true);
};
const moveButton = (
<Button onClick={showMoveModal} variant="secondary">
<Trans i18nKey="browse-dashboards.action.move-button">Move</Trans>
</Button>
);
// Check if any dashboards are selected (export only supports dashboards, not folders)
const hasSelectedDashboards =
Object.keys(selectedItems.dashboard || {}).filter((uid) => selectedItems.dashboard[uid]).length > 0;
const exportButton = (
<Button
onClick={showExportModal}
variant="secondary"
disabled={!hasRepositories || isLoadingSettings || !hasUnmanaged || isLoadingUnmanaged || !hasSelectedDashboards}
>
<Trans i18nKey="browse-dashboards.action.export-to-repository-button">Export to Repository</Trans>
</Button>
);
return (
<>
<Stack gap={1} data-testid="manage-actions">
{moveButton}
{provisioningEnabled && exportButton}
<Button onClick={showDeleteModal} variant="destructive">
<Trans i18nKey="browse-dashboards.action.delete-button">Delete</Trans>
@@ -219,32 +192,6 @@ export function BrowseActions({ folderDTO }: Props) {
/>
</Drawer>
)}
{/* bulk export */}
{showBulkExportProvisionedResource && (
<Drawer
title={
// Heading levels should only increase by one (a11y)
<Text variant="h3" element="h2">
{t('browse-dashboards.action.export-provisioned-resources', 'Export Resources')}
</Text>
}
onClose={() => setShowBulkExportProvisionedResource(false)}
size="md"
>
<BulkExportProvisionedResource
selectedItems={selectedItems}
folderUid={folderDTO?.uid}
onActionComplete={() => {
setShowBulkExportProvisionedResource(false);
onActionComplete();
}}
onDismiss={() => {
setShowBulkExportProvisionedResource(false);
}}
/>
</Drawer>
)}
</>
);
}
@@ -252,7 +199,6 @@ export function BrowseActions({ folderDTO }: Props) {
const actionMap = {
move: 'grafana_manage_dashboards_item_moved',
delete: 'grafana_manage_dashboards_item_deleted',
export: 'grafana_manage_dashboards_item_exported',
} as const;
function trackAction(action: keyof typeof actionMap, selectedItems: Omit<DashboardTreeSelection, 'panel' | '$all'>) {

View File

@@ -13,7 +13,7 @@ import { DashboardViewItem } from 'app/features/search/types';
import { useDispatch, useSelector } from 'app/types/store';
import { PAGE_SIZE } from '../api/services';
import { fetchNextChildrenPage, selectFolderWithAllDashboards } from '../state/actions';
import { fetchNextChildrenPage } from '../state/actions';
import {
useFlatTreeState,
useCheckboxSelectionState,
@@ -81,13 +81,7 @@ export function BrowseView({ folderUID, width, height, permissions, isReadOnlyRe
const handleItemSelectionChange = useCallback(
(item: DashboardViewItem, isSelected: boolean) => {
// If selecting a folder, use the async thunk to collect all dashboards recursively
// When deselecting, the normal reducer will handle deselecting all children
if (item.kind === 'folder') {
dispatch(selectFolderWithAllDashboards({ folderUID: item.uid, isSelected }));
} else {
dispatch(setItemSelectionState({ item, isSelected }));
}
dispatch(setItemSelectionState({ item, isSelected }));
},
[dispatch]
);

View File

@@ -2,17 +2,14 @@ import { useState } from 'react';
import { AppEvents } from '@grafana/data';
import { Trans, t } from '@grafana/i18n';
import { config, locationService, reportInteraction } from '@grafana/runtime';
import { locationService, reportInteraction } from '@grafana/runtime';
import { Button, Drawer, Dropdown, Icon, Menu, MenuItem, Text } from '@grafana/ui';
import { useGetFrontendSettingsQuery } from 'app/api/clients/provisioning/v0alpha1';
import { appEvents } from 'app/core/app_events';
import { Permissions } from 'app/core/components/AccessControl/Permissions';
import { RepoType } from 'app/features/provisioning/Wizard/types';
import { BulkExportProvisionedResource } from 'app/features/provisioning/components/BulkActions/BulkExportProvisionedResource';
import { BulkMoveProvisionedResource } from 'app/features/provisioning/components/BulkActions/BulkMoveProvisionedResource';
import { DeleteProvisionedFolderForm } from 'app/features/provisioning/components/Folders/DeleteProvisionedFolderForm';
import { useIsProvisionedInstance } from 'app/features/provisioning/hooks/useIsProvisionedInstance';
import { collectAllDashboardsUnderFolder } from 'app/features/provisioning/utils/collectFolderDashboards';
import { getReadOnlyTooltipText } from 'app/features/provisioning/utils/repository';
import { ShowModalReactEvent } from 'app/types/events';
import { FolderDTO } from 'app/types/folders';
@@ -35,12 +32,8 @@ export function FolderActionsButton({ folder, repoType, isReadOnlyRepo }: Props)
const [showPermissionsDrawer, setShowPermissionsDrawer] = useState(false);
const [showDeleteProvisionedFolderDrawer, setShowDeleteProvisionedFolderDrawer] = useState(false);
const [showMoveProvisionedFolderDrawer, setShowMoveProvisionedFolderDrawer] = useState(false);
const [showExportFolderDrawer, setShowExportFolderDrawer] = useState(false);
const [exportSelectedDashboards, setExportSelectedDashboards] = useState<Record<string, boolean>>({});
const [moveFolder] = useMoveFolderMutationFacade();
const isProvisionedInstance = useIsProvisionedInstance();
const { data: frontendSettings } = useGetFrontendSettingsQuery();
const hasRepositories = (frontendSettings?.items?.length ?? 0) > 0;
const deleteFolder = useDeleteFolderMutationFacade();
@@ -132,38 +125,9 @@ export function FolderActionsButton({ folder, repoType, isReadOnlyRepo }: Props)
setShowMoveProvisionedFolderDrawer(true);
};
const handleExportFolder = async () => {
try {
// Collect all dashboards under this folder and its children
const dashboardUIDs = await collectAllDashboardsUnderFolder(folder.uid);
// Create selected items object with all dashboards
const selectedDashboards: Record<string, boolean> = {};
dashboardUIDs.forEach((uid) => {
selectedDashboards[uid] = true;
});
setExportSelectedDashboards(selectedDashboards);
setShowExportFolderDrawer(true);
} catch (error) {
appEvents.publish({
type: AppEvents.alertError.name,
payload: [
t(
'browse-dashboards.folder-actions-button.export-folder-error',
'Error collecting dashboards. Please try again later.'
),
],
});
}
};
const managePermissionsLabel = t('browse-dashboards.folder-actions-button.manage-permissions', 'Manage permissions');
const moveLabel = t('browse-dashboards.folder-actions-button.move', 'Move this folder');
const deleteLabel = t('browse-dashboards.folder-actions-button.delete', 'Delete this folder');
const exportLabel = t('browse-dashboards.folder-actions-button.export', 'Export to Repository');
const canExportToRepository = config.featureToggles.provisioning && !isProvisionedFolder && hasRepositories;
const menu = (
<Menu>
@@ -183,18 +147,10 @@ export function FolderActionsButton({ folder, repoType, isReadOnlyRepo }: Props)
label={deleteLabel}
/>
)}
{canExportToRepository && <MenuItem onClick={handleExportFolder} label={exportLabel} />}
</Menu>
);
// Show menu if there are any available actions
const hasAnyActions =
(canViewPermissions && !isProvisionedFolder) ||
(canMoveFolder && !isReadOnlyRepo) ||
(canDeleteFolders && !isReadOnlyRepo) ||
canExportToRepository;
if (!hasAnyActions) {
if (!canViewPermissions && !canMoveFolder && !canDeleteFolders) {
return null;
}
@@ -257,30 +213,6 @@ export function FolderActionsButton({ folder, repoType, isReadOnlyRepo }: Props)
/>
</Drawer>
)}
{showExportFolderDrawer && (
<Drawer
title={
<Text variant="h3" element="h2">
{t('browse-dashboards.action.export-folder', 'Export Folder to Repository')}
</Text>
}
subtitle={folder.title}
onClose={() => setShowExportFolderDrawer(false)}
size="md"
>
<BulkExportProvisionedResource
folderUid={folder.uid}
selectedItems={{
dashboard: exportSelectedDashboards,
folder: {},
}}
onDismiss={() => {
setShowExportFolderDrawer(false);
setExportSelectedDashboards({});
}}
/>
</Drawer>
)}
</>
);
}

View File

@@ -5,7 +5,6 @@ import { createAsyncThunk } from 'app/types/store';
import { listDashboards, listFolders, PAGE_SIZE } from '../api/services';
import { DashboardViewItemWithUIItems, UIDashboardViewItem } from '../types';
import { setItemSelectionState } from './slice';
import { findItem } from './utils';
interface FetchNextChildrenPageArgs {
@@ -89,69 +88,6 @@ export const refetchChildren = createAsyncThunk(
}
);
export const selectFolderWithAllDashboards = createAsyncThunk(
'browseDashboards/selectFolderWithAllDashboards',
async ({ folderUID, isSelected }: { folderUID: string; isSelected: boolean }, { dispatch, getState }) => {
const state = getState().browseDashboards;
// Find the folder item to get its parentUID and managedBy
const folderItem = findItem(state.rootItems?.items ?? [], state.childrenByParentUID, folderUID);
if (!isSelected) {
// When deselecting, use the normal action - it will handle deselecting all children recursively
dispatch(
setItemSelectionState({
item: {
kind: 'folder',
uid: folderUID,
parentUID: folderItem?.parentUID,
managedBy: folderItem?.managedBy,
},
isSelected: false,
})
);
return;
}
// When selecting, collect all dashboards recursively
const { collectAllDashboardsUnderFolder } = await import('app/features/provisioning/utils/collectFolderDashboards');
const dashboardUIDs = await collectAllDashboardsUnderFolder(folderUID);
// First, select the folder itself
dispatch(
setItemSelectionState({
item: {
kind: 'folder',
uid: folderUID,
parentUID: folderItem?.parentUID,
managedBy: folderItem?.managedBy,
},
isSelected: true,
})
);
// Then select all dashboards found
// We need to get the parentUID for each dashboard from the state
// If a dashboard isn't in state yet, we still need to select it
for (const dashboardUID of dashboardUIDs) {
const dashboardItem = findItem(state.rootItems?.items ?? [], state.childrenByParentUID, dashboardUID);
// Even if dashboard isn't in state, we can still select it by UID
// The reducer will handle setting selectedItems.dashboard[dashboardUID] = true
dispatch(
setItemSelectionState({
item: {
kind: 'dashboard',
uid: dashboardUID,
parentUID: dashboardItem?.parentUID ?? folderUID, // Fallback to folderUID if not found
managedBy: dashboardItem?.managedBy,
},
isSelected: true,
})
);
}
}
);
export const fetchNextChildrenPage = createAsyncThunk(
'browseDashboards/fetchNextChildrenPage',
async (

View File

@@ -186,17 +186,6 @@ describe('NavToolbarActions', () => {
});
});
});
describe('where dashboard is not editable', () => {
it('should set dashboard to editable on make editable button press', async () => {
const { dashboard } = setup({}, true);
await userEvent.click(await screen.findByTestId(selectors.components.NavToolbar.editDashboard.editButton));
expect(dashboard.state.editable).toBe(true);
expect(dashboard.state.meta.canEdit).toBe(true);
expect(dashboard.state.meta.canSave).toBe(true);
});
});
});
describe('Given new sharing button', () => {
@@ -225,7 +214,7 @@ describe('NavToolbarActions', () => {
});
});
function setup(meta?: DashboardMeta, editable?: boolean) {
function setup(meta?: DashboardMeta) {
const dashboard = new DashboardScene({
$timeRange: new SceneTimeRange({ from: 'now-6h', to: 'now' }),
meta: {
@@ -240,7 +229,6 @@ function setup(meta?: DashboardMeta, editable?: boolean) {
...meta,
},
title: 'hello',
editable: editable || true,
uid: 'dash-1',
body: DefaultGridLayoutManager.fromVizPanels([
new VizPanel({

View File

@@ -351,7 +351,7 @@ export function ToolbarActions({ dashboard }: Props) {
onClick={() => {
trackDashboardSceneEditButtonClicked(dashboard.state.uid);
dashboard.onEnterEditMode();
dashboard.setState({ meta: { ...meta, canEdit: true, canSave: true } });
dashboard.setState({ editable: true, meta: { ...meta, canEdit: true } });
}}
tooltip={t('dashboard.toolbar.enter-edit-mode.tooltip', 'This dashboard was marked as read only')}
key="edit"

View File

@@ -19,7 +19,6 @@ import kbn from 'app/core/utils/kbn';
import { ShowConfirmModalEvent } from 'app/types/events';
import { ConditionalRenderingGroup } from '../../conditional-rendering/group/ConditionalRenderingGroup';
import { dashboardEditActions } from '../../edit-pane/shared';
import { serializeRow } from '../../serialization/layoutSerializers/RowsLayoutSerializer';
import { getElements } from '../../serialization/layoutSerializers/utils';
import { getDashboardSceneFor } from '../../utils/utils';
@@ -111,18 +110,7 @@ export class RowItem
}
public switchLayout(layout: DashboardLayoutManager) {
const currentLayout = this.state.layout;
dashboardEditActions.edit({
description: t('dashboard.edit-actions.switch-layout-row', 'Switch layout'),
source: this,
perform: () => {
this.setState({ layout });
},
undo: () => {
this.setState({ layout: currentLayout });
},
});
this.setState({ layout });
}
public useEditPaneOptions = useEditOptions.bind(this);

View File

@@ -19,7 +19,6 @@ import kbn from 'app/core/utils/kbn';
import { ShowConfirmModalEvent } from 'app/types/events';
import { ConditionalRenderingGroup } from '../../conditional-rendering/group/ConditionalRenderingGroup';
import { dashboardEditActions } from '../../edit-pane/shared';
import { serializeTab } from '../../serialization/layoutSerializers/TabsLayoutSerializer';
import { getElements } from '../../serialization/layoutSerializers/utils';
import { getDashboardSceneFor } from '../../utils/utils';
@@ -109,18 +108,7 @@ export class TabItem
}
public switchLayout(layout: DashboardLayoutManager) {
const currentLayout = this.state.layout;
dashboardEditActions.edit({
description: t('dashboard.edit-actions.switch-layout-tab', 'Switch layout'),
source: this,
perform: () => {
this.setState({ layout });
},
undo: () => {
this.setState({ layout: currentLayout });
},
});
this.setState({ layout });
}
public useEditPaneOptions = useEditOptions.bind(this);

View File

@@ -34,11 +34,10 @@ setPluginImportUtils({
getPanelPluginFromCache: (id: string) => undefined,
});
export function buildTestScene(isEditing?: boolean, editable?: boolean) {
export function buildTestScene(isEditing = false) {
const testScene = new DashboardScene({
$timeRange: new SceneTimeRange({ from: 'now-6h', to: 'now' }),
isEditing: isEditing || false,
editable: editable || true,
isEditing: isEditing,
body: new DefaultGridLayoutManager({
grid: new SceneGridLayout({
children: [new DashboardGridItem({ body: new VizPanel({ key: 'panel-1', pluginId: 'text' }) })],
@@ -77,15 +76,4 @@ describe('MakeDashboardEditableButton', () => {
expect(DashboardInteractions.editButtonClicked).toHaveBeenCalledWith({ outlineExpanded: false });
});
});
it('should set state correctly', async () => {
const scene = buildTestScene(false, false);
render(<MakeDashboardEditableButton dashboard={scene} />);
await userEvent.click(await screen.findByTestId(selectors.components.NavToolbar.editDashboard.editButton));
expect(scene.state.editable).toBe(true);
expect(scene.state.meta.canEdit).toBe(true);
expect(scene.state.meta.canSave).toBe(true);
});
});

View File

@@ -13,7 +13,7 @@ export const MakeDashboardEditableButton = ({ dashboard }: ToolbarActionProps) =
onClick={() => {
trackDashboardSceneEditButtonClicked(dashboard.state.uid);
dashboard.onEnterEditMode();
dashboard.setState({ meta: { ...dashboard.state.meta, canEdit: true, canSave: true } });
dashboard.setState({ editable: true, meta: { ...dashboard.state.meta, canEdit: true } });
}}
tooltip={t('dashboard.toolbar.new.enter-edit-mode.tooltip', 'This dashboard was marked as read only')}
variant="secondary"

View File

@@ -4,7 +4,6 @@ import { selectors as e2eSelectors } from '@grafana/e2e-selectors';
import { t } from '@grafana/i18n';
import { config, locationService } from '@grafana/runtime';
import { IconName, Menu } from '@grafana/ui';
import { useGetFrontendSettingsQuery } from 'app/api/clients/provisioning/v0alpha1';
import { getTrackingSource, shareDashboardType } from 'app/features/dashboard/components/ShareModal/utils';
import { DashboardScene } from '../../scene/DashboardScene';
@@ -29,11 +28,6 @@ export function addDashboardExportDrawerItem(item: ExportDrawerMenuItem) {
}
export default function ExportMenu({ dashboard }: { dashboard: DashboardScene }) {
const provisioningEnabled = config.featureToggles.provisioning;
const { data: frontendSettings } = useGetFrontendSettingsQuery();
const hasRepositories = (frontendSettings?.items?.length ?? 0) > 0;
const canExportToRepository = provisioningEnabled && !dashboard.isManagedRepository() && hasRepositories;
const onMenuItemClick = (shareView: string) => {
locationService.partial({ shareView });
};
@@ -65,20 +59,8 @@ export default function ExportMenu({ dashboard }: { dashboard: DashboardScene })
onClick: () => onMenuItemClick(shareDashboardType.image),
});
// Add "Export to Repository" option for unmanaged dashboards when repositories exist
if (canExportToRepository) {
menuItems.push({
shareId: 'export-to-repository',
testId: 'export-to-repository',
icon: 'cloud-upload',
label: t('share-dashboard.menu.export-to-repository-title', 'Export to Repository'),
renderCondition: true,
onClick: () => onMenuItemClick('export-to-repository'),
});
}
return menuItems.filter((item) => item.renderCondition);
}, [canExportToRepository]);
}, []);
const onClick = (item: ExportDrawerMenuItem) => {
DashboardInteractions.sharingCategoryClicked({

View File

@@ -1,32 +0,0 @@
import { t } from '@grafana/i18n';
import { SceneComponentProps } from '@grafana/scenes';
import { BulkExportProvisionedResource } from 'app/features/provisioning/components/BulkActions/BulkExportProvisionedResource';
import { DashboardScene } from '../../scene/DashboardScene';
import { ShareExportTab } from '../ShareExportTab';
export class ExportToRepository extends ShareExportTab {
static Component = ExportToRepositoryRenderer;
public getTabLabel(): string {
return t('share-modal.export.export-to-repository-title', 'Export Dashboard to Repository');
}
}
function ExportToRepositoryRenderer({ model }: SceneComponentProps<ExportToRepository>) {
const dashboard = model.getRoot();
if (!(dashboard instanceof DashboardScene)) {
return <></>;
}
return (
<BulkExportProvisionedResource
folderUid={dashboard.state.meta.folderUid || ''}
selectedItems={{
dashboard: dashboard.state.uid ? { [dashboard.state.uid]: true } : {},
folder: {},
}}
onDismiss={model.useState().onDismiss}
/>
);
}

View File

@@ -7,7 +7,6 @@ import { DashboardScene } from '../../scene/DashboardScene';
import { getDashboardSceneFor } from '../../utils/utils';
import { ExportAsCode } from '../ExportButton/ExportAsCode';
import { ExportAsImage } from '../ExportButton/ExportAsImage';
import { ExportToRepository } from '../ExportButton/ExportToRepository';
import { ShareExternally } from '../ShareButton/share-externally/ShareExternally';
import { ShareInternally } from '../ShareButton/share-internally/ShareInternally';
import { ShareSnapshot } from '../ShareButton/share-snapshot/ShareSnapshot';
@@ -97,8 +96,6 @@ function getShareView(
return new ExportAsCode({ onDismiss });
case shareDashboardType.image:
return new ExportAsImage({ onDismiss });
case 'export-to-repository':
return new ExportToRepository({ onDismiss });
default:
return new ShareInternally({ onDismiss });
}

View File

@@ -2,13 +2,11 @@ import { saveAs } from 'file-saver';
import { memo, useState, useMemo } from 'react';
import { Trans, t } from '@grafana/i18n';
import { config } from '@grafana/runtime';
import { Button, Drawer, Field, Modal, Switch, Text } from '@grafana/ui';
import { Button, Field, Modal, Switch } from '@grafana/ui';
import { appEvents } from 'app/core/app_events';
import { DashboardExporter } from 'app/features/dashboard/components/DashExportModal/DashboardExporter';
import { makeExportableV1 } from 'app/features/dashboard-scene/scene/export/exporters';
import { DashboardInteractions } from 'app/features/dashboard-scene/utils/interactions';
import { BulkExportProvisionedResource } from 'app/features/provisioning/components/BulkActions/BulkExportProvisionedResource';
import { ShowModalReactEvent } from 'app/types/events';
import { ViewJsonModal } from './ViewJsonModal';
@@ -19,10 +17,7 @@ interface Props extends ShareModalTabProps {}
export const ShareExport = memo(({ dashboard, panel, onDismiss }: Props) => {
const [shareExternally, setShareExternally] = useState(false);
const [showExportToRepositoryDrawer, setShowExportToRepositoryDrawer] = useState(false);
const exporter = useMemo(() => new DashboardExporter(), []);
const provisioningEnabled = config.featureToggles.provisioning;
const isUnmanaged = !dashboard.meta.provisioned;
const onShareExternallyChange = () => setShareExternally((prev) => !prev);
@@ -34,10 +29,6 @@ export const ShareExport = memo(({ dashboard, panel, onDismiss }: Props) => {
if (shareExternally) {
makeExportableV1(dashboard).then((dashboardJson) => {
if ('error' in dashboardJson) {
console.error('Failed to export dashboard:', dashboardJson.error);
return;
}
openSaveAsDialog(dashboardJson);
});
} else {
@@ -60,17 +51,13 @@ export const ShareExport = memo(({ dashboard, panel, onDismiss }: Props) => {
}
};
const openSaveAsDialog = (dash: unknown) => {
const openSaveAsDialog = (dash: any) => {
const dashboardJsonPretty = JSON.stringify(dash, null, 2);
const blob = new Blob([dashboardJsonPretty], {
type: 'application/json;charset=utf-8',
});
const time = new Date().getTime();
const title =
typeof dash === 'object' && dash !== null && 'title' in dash && typeof dash.title === 'string'
? dash.title
: 'dashboard';
saveAs(blob, `${title}-${time}.json`);
saveAs(blob, `${dash.title}-${time}.json`);
};
const openJsonModal = (clone: object) => {
@@ -93,18 +80,13 @@ export const ShareExport = memo(({ dashboard, panel, onDismiss }: Props) => {
<p>
<Trans i18nKey="share-modal.export.info-text">Export this dashboard.</Trans>
</p>
<Field label={exportExternallyTranslation} noMargin>
<Field label={exportExternallyTranslation}>
<Switch id="share-externally-toggle" value={shareExternally} onChange={onShareExternallyChange} />
</Field>
<Modal.ButtonRow>
<Button variant="secondary" onClick={onDismiss} fill="outline">
<Trans i18nKey="share-modal.export.cancel-button">Cancel</Trans>
</Button>
{provisioningEnabled && isUnmanaged && (
<Button variant="secondary" onClick={() => setShowExportToRepositoryDrawer(true)}>
<Trans i18nKey="share-modal.export.export-to-repository-button">Export to Repository</Trans>
</Button>
)}
<Button variant="secondary" onClick={onViewJson}>
<Trans i18nKey="share-modal.export.view-button">View JSON</Trans>
</Button>
@@ -112,30 +94,6 @@ export const ShareExport = memo(({ dashboard, panel, onDismiss }: Props) => {
<Trans i18nKey="share-modal.export.save-button">Save to file</Trans>
</Button>
</Modal.ButtonRow>
{showExportToRepositoryDrawer && (
<Drawer
title={
<Text variant="h3" element="h2">
{t('share-modal.export.export-to-repository-title', 'Export Dashboard to Repository')}
</Text>
}
subtitle={dashboard.title}
onClose={() => setShowExportToRepositoryDrawer(false)}
size="md"
>
<BulkExportProvisionedResource
folderUid={dashboard.meta.folderUid}
selectedItems={{
dashboard: dashboard.uid ? { [dashboard.uid]: true } : {},
folder: {},
}}
onDismiss={() => {
setShowExportToRepositoryDrawer(false);
onDismiss?.();
}}
/>
</Drawer>
)}
</>
);
});

View File

@@ -85,7 +85,7 @@ const mockCleanUpDashboardAndVariables = jest.fn();
function setup(propOverrides?: Partial<Props>) {
config.bootData.navTree = [
{ text: 'Dashboards', id: 'dashboards/browse' },
{ text: 'Home', id: HOME_NAV_ID, url: '/' },
{ text: 'Home', id: HOME_NAV_ID },
{
text: 'Help',
id: 'help',
@@ -101,9 +101,9 @@ function setup(propOverrides?: Partial<Props>) {
'dashboards/browse': {
text: 'Dashboards',
id: 'dashboards/browse',
parentItem: { text: 'Home', id: HOME_NAV_ID, url: '/' },
parentItem: { text: 'Home', id: HOME_NAV_ID },
},
[HOME_NAV_ID]: { text: 'Home', id: HOME_NAV_ID, url: '/' },
[HOME_NAV_ID]: { text: 'Home', id: HOME_NAV_ID },
},
initPhase: DashboardInitPhase.NotStarted,
initError: null,

View File

@@ -196,39 +196,6 @@ export class DashboardAnalyticsAggregator implements performanceUtils.ScenePerfo
return panel;
}
/**
* Send panel_render interactions for each panel with aggregated metrics
*/
private sendPanelRenderInteractions(data: performanceUtils.DashboardInteractionCompleteData): void {
const panelMetrics = this.getPanelMetrics();
panelMetrics.forEach((panel) => {
const totalPanelTime =
panel.totalQueryTime +
panel.totalTransformationTime +
panel.totalRenderTime +
panel.totalFieldConfigTime +
panel.pluginLoadTime;
// logMeasurement requires numeric values in second parameter, metadata in third
const measurementValues = {
totalTime: Math.round(totalPanelTime * 10) / 10,
queryCount: panel.queryOperations.length,
transformCount: panel.transformationOperations.length,
renderCount: panel.renderOperations.length,
fieldConfigCount: panel.fieldConfigOperations.length,
pluginLoadCount: panel.pluginLoadTime > 0 ? 1 : 0,
};
logMeasurement('panel_render', measurementValues, {
panelKey: panel.panelKey,
pluginId: panel.pluginId,
panelId: panel.panelId,
operationId: data.operationId, // Shared operationId for correlating with dashboard_render
});
});
}
/**
* Send analytics report for dashboard interactions
*/
@@ -251,7 +218,6 @@ export class DashboardAnalyticsAggregator implements performanceUtils.ScenePerfo
reportInteraction('dashboard_render', {
interactionType: data.interactionType,
uid: this.dashboardUID,
operationId: data.operationId, // OperationId for correlating with panel_render interactions
...payload,
});
@@ -259,11 +225,7 @@ export class DashboardAnalyticsAggregator implements performanceUtils.ScenePerfo
interactionType: data.interactionType,
dashboard: this.dashboardUID,
title: this.dashboardTitle,
operationId: data.operationId, // OperationId for correlating with panel_render interactions
});
// Send individual panel_render interactions
this.sendPanelRenderInteractions(data);
}
/**
@@ -389,37 +351,6 @@ export class DashboardAnalyticsAggregator implements performanceUtils.ScenePerfo
});
}
// Panel render interactions summary
if (panelMetrics && panelMetrics.length > 0) {
writePerformanceGroupStart('DAA', `📤 Panel render interactions: ${panelMetrics.length} panels reported`);
panelMetrics.forEach((panel) => {
const totalPanelTime =
panel.totalQueryTime +
panel.totalTransformationTime +
panel.totalRenderTime +
panel.totalFieldConfigTime +
panel.pluginLoadTime;
const isSlowPanel = totalPanelTime > SLOW_OPERATION_THRESHOLD_MS;
writePerformanceGroupLog('DAA', `🎨 ${panel.pluginId}-${panel.panelId}:`, {
totalTime: Math.round(totalPanelTime * 10) / 10,
operations: {
queries: panel.queryOperations.length,
transforms: panel.transformationOperations.length,
renders: panel.renderOperations.length,
fieldConfigs: panel.fieldConfigOperations.length,
pluginLoads: panel.pluginLoadTime > 0 ? 1 : 0,
},
isSlowPanel: isSlowPanel,
...(isSlowPanel && { warning: 'SLOW' }),
});
});
writePerformanceGroupEnd();
}
writePerformanceGroupEnd();
}
}

View File

@@ -109,16 +109,10 @@ const payload = {
reportInteraction('dashboard_render', {
interactionType: e.origin,
uid,
operationId: e.operationId, // Shared operationId for correlating with panel_render measurements
...payload,
});
logMeasurement('dashboard_render', payload, {
interactionType: e.origin,
dashboard: uid,
title: title,
operationId: e.operationId, // Shared operationId for correlating with panel_render measurements
});
logMeasurement(`dashboard_render`, payload, { interactionType: e.origin, dashboard: uid, title: title });
```
### Interaction Origin Mapping
@@ -307,7 +301,6 @@ Aggregates panel-level performance metrics for analytics reporting:
- Tracks operation counts and total time spent per panel
- Sends comprehensive analytics reports via `reportInteraction` and `logMeasurement`
- Provides detailed panel breakdowns including slow panel detection
- Sends individual `panel_render` measurements for each panel with aggregated metrics via `logMeasurement`
#### ScenePerformanceLogger
@@ -351,7 +344,6 @@ Reported for each interaction via `reportInteraction` and `logMeasurement`:
{
interactionType: string, // Type of interaction
uid: string, // Dashboard UID
operationId: string, // Unique operationId for correlating with panel_render measurements
duration: number, // Total duration
networkDuration: number, // Network time
processingTime: number, // Client-side processing time
@@ -366,8 +358,6 @@ Reported for each interaction via `reportInteraction` and `logMeasurement`:
}
```
**Correlation**: The `operationId` field is shared between `dashboard_render` and all associated `panel_render` measurements, enabling correlation of panel metrics with their parent dashboard interaction.
#### Panel-Level Metrics
Aggregated by `DashboardAnalyticsAggregator` for each panel with detailed operation tracking:
@@ -416,44 +406,6 @@ Aggregated by `DashboardAnalyticsAggregator` for each panel with detailed operat
}
```
#### Panel Render Measurements
For each dashboard interaction, individual `panel_render` measurements are sent separately from `dashboard_render` via `logMeasurement`. These measurements are sent for all panels with collected metrics (even if all times are 0), providing granular panel-level analytics.
**When sent**: After `dashboard_render` interaction, one `panel_render` measurement per panel
**Correlation**: All `panel_render` measurements share the same `operationId` as their parent `dashboard_render` interaction, enabling correlation between dashboard and panel-level metrics.
**Measurement values** (via `logMeasurement`):
```typescript
{
totalTime: number, // Sum of all operation times
queryCount: number, // Number of query operations
transformCount: number, // Number of transformation operations
renderCount: number, // Number of render operations
fieldConfigCount: number, // Number of field config operations
pluginLoadCount: number // Number of plugin load operations (0 or 1)
}
```
**Measurement metadata** (via `logMeasurement` context):
```typescript
{
panelKey: string, // Panel key identifier
pluginId: string, // Panel plugin identifier
panelId: string, // Panel identifier
operationId: string // Shared operationId for correlating with dashboard_render
}
```
**Note**: `dashboard`, `title`, and `interactionType` are not included in `panel_render` metadata since they can be obtained by correlating with the parent `dashboard_render` interaction using `operationId`.
**Correlating panel_render with dashboard_render**:
All `panel_render` measurements share the same `operationId` as their parent `dashboard_render` interaction.
## Debugging and Development
### Enable Performance Debug Logging
@@ -548,6 +500,7 @@ DAA: [ANALYTICS] dashboard_view | 4 panels analyzed | 1 slow panels ⚠️
transform: 12.3,
render: 23.8,
fieldConfig: 5.0,
pluginLoad: 39.0
}
}
DAA: 📊 Queries: {
@@ -573,59 +526,6 @@ DAA: [ANALYTICS] dashboard_view | 4 panels analyzed | 1 slow panels ⚠️
**Note**: The indentation shows the **console group hierarchy**. In the browser console, each panel creates a collapsible group that can be expanded to see detailed operation breakdowns. The main dashboard analytics group contains nested panel groups for organized analysis.
#### Panel Render Measurements Summary
After the detailed panel breakdown, a summary of all `panel_render` measurements sent is displayed:
```
DAA: 📤 Panel render interactions: 4 panels reported
DAA: 🎨 timeseries-panel-1: {
totalTime: 125.3,
operations: {
queries: 2,
transforms: 1,
renders: 1,
fieldConfigs: 1,
pluginLoads: 1
},
isSlowPanel: true,
warning: "SLOW"
}
DAA: 🎨 stat-panel-2: {
totalTime: 45.2,
operations: {
queries: 1,
transforms: 0,
renders: 1,
fieldConfigs: 1,
pluginLoads: 0
},
isSlowPanel: false
}
DAA: 🎨 table-panel-3: {
totalTime: 89.7,
operations: {
queries: 1,
transforms: 2,
renders: 1,
fieldConfigs: 1
},
isSlowPanel: false
}
DAA: 🎨 graph-panel-4: {
totalTime: 234.1,
operations: {
queries: 3,
transforms: 1,
renders: 2,
fieldConfigs: 1
},
isSlowPanel: false
}
```
This summary provides a quick overview of all panels that had `panel_render` measurements sent, showing total time, operation counts, and slow panel warnings.
### Enable Echo Service Debug Logging
To observe Echo events in the browser console:

View File

@@ -11,7 +11,7 @@ import { JobContent } from './JobContent';
export interface FinishedJobProps {
jobUid: string;
repositoryName: string;
jobType: 'sync' | 'delete' | 'move' | 'push';
jobType: 'sync' | 'delete' | 'move';
onStatusChange?: (statusInfo: StepStatusInfo) => void;
}

View File

@@ -12,7 +12,7 @@ import { StepStatusInfo } from '../Wizard/types';
import { JobSummary } from './JobSummary';
export interface JobContentProps {
jobType: 'sync' | 'delete' | 'move' | 'push';
jobType: 'sync' | 'delete' | 'move';
job?: Job;
isFinishedJob?: boolean;
onStatusChange?: (statusInfo: StepStatusInfo) => void;

View File

@@ -9,7 +9,7 @@ import { JobContent } from './JobContent';
export interface JobStatusProps {
watch: Job;
jobType: 'sync' | 'delete' | 'move' | 'push';
jobType: 'sync' | 'delete' | 'move';
onStatusChange?: (statusInfo: StepStatusInfo) => void;
}

View File

@@ -3,7 +3,7 @@ import { LinkButton, Stack } from '@grafana/ui';
import { RepositoryUrLs } from 'app/api/clients/provisioning/v0alpha1';
interface Props {
jobType?: 'sync' | 'delete' | 'move' | 'push';
jobType?: 'sync' | 'delete' | 'move';
urls?: RepositoryUrLs;
}
export function PullRequestButtons({ urls, jobType }: Props) {

View File

@@ -8,7 +8,7 @@ import { getRepoHrefForProvider } from '../utils/git';
type RepositoryLinkProps = {
name?: string;
jobType: 'sync' | 'delete' | 'move' | 'push';
jobType: 'sync' | 'delete' | 'move';
};
export function RepositoryLink({ name, jobType }: RepositoryLinkProps) {

View File

@@ -1,7 +1,6 @@
import { useState } from 'react';
import { t, Trans } from '@grafana/i18n';
import { locationService } from '@grafana/runtime';
import { Alert, Box, EmptyState, FilterInput, Icon, Stack, TextLink } from '@grafana/ui';
import { Repository } from 'app/api/clients/provisioning/v0alpha1';
@@ -24,11 +23,6 @@ export function RepositoryList({ items }: Props) {
const filteredItems = items.filter((item) => item.metadata?.name?.includes(query));
const { instanceConnected } = checkSyncSettings(items);
const handlePushUnmanaged = () => {
// Navigate to dashboards page
locationService.push('/dashboards');
};
const getResourceCountSection = () => {
if (isProvisionedInstance) {
return (
@@ -45,42 +39,36 @@ export function RepositoryList({ items }: Props) {
if (filteredItems.length) {
return (
<Alert
title={''}
severity="info"
buttonContent={
unmanagedCount > 0 ? (
<Trans i18nKey="provisioning.folder-repository-list.export-remaining-resources-button">
Export remaining resources
</Trans>
) : undefined
}
onRemove={unmanagedCount > 0 ? handlePushUnmanaged : undefined}
>
<Trans i18nKey="provisioning.folder-repository-list.partial-managed" values={{ managedCount, resourceCount }}>
{{ managedCount }}/{{ resourceCount }} resources managed by Git sync.
</Trans>
{unmanagedCount > 0 && (
<>
{' '}
<Trans i18nKey="provisioning.folder-repository-list.unmanaged-resources" count={unmanagedCount}>
{{ count: unmanagedCount }} resources aren&apos;t managed by Git sync.
</Trans>
</>
)}
{isFreeTierLicense() && (
<>
<br />
<Trans i18nKey="provisioning.free-tier-limit.message-connection">
Free-tier accounts are limited to 20 resources per folder. To add more resources per folder,
</Trans>{' '}
<TextLink href={UPGRADE_URL} external>
<Trans i18nKey="provisioning.free-tier-limit.upgrade-link">upgrade your account</Trans>{' '}
</TextLink>
.
</>
)}
</Alert>
<Stack>
<Alert title={''} severity="info">
<Trans
i18nKey="provisioning.folder-repository-list.partial-managed"
values={{ managedCount, resourceCount }}
>
{{ managedCount }}/{{ resourceCount }} resources managed by Git sync.
</Trans>
{unmanagedCount > 0 && (
<>
{' '}
<Trans i18nKey="provisioning.folder-repository-list.unmanaged-resources" count={unmanagedCount}>
{{ count: unmanagedCount }} resources aren&apos;t managed by Git sync.
</Trans>
</>
)}
{isFreeTierLicense() && (
<>
<br />
<Trans i18nKey="provisioning.free-tier-limit.message-connection">
Free-tier accounts are limited to 20 resources per folder. To add more resources per folder,
</Trans>{' '}
<TextLink href={UPGRADE_URL} external>
<Trans i18nKey="provisioning.free-tier-limit.upgrade-link">upgrade your account</Trans>{' '}
</TextLink>
.
</>
)}
</Alert>
</Stack>
);
}
return null;

View File

@@ -1,383 +0,0 @@
import { css } from '@emotion/css';
import { useState, useCallback, useEffect, useMemo } from 'react';
import { FormProvider, useForm } from 'react-hook-form';
import { AppEvents, GrafanaTheme2 } from '@grafana/data';
import { Trans, t } from '@grafana/i18n';
import { getAppEvents, reportInteraction } from '@grafana/runtime';
import { Alert, Box, Button, Field, Input, Select, Stack, Text, useStyles2 } from '@grafana/ui';
import { RepositoryView, Job, useGetFrontendSettingsQuery } from 'app/api/clients/provisioning/v0alpha1';
import { collectSelectedItems } from 'app/features/browse-dashboards/components/utils';
import { JobStatus } from 'app/features/provisioning/Job/JobStatus';
import { useGetResourceRepositoryView } from 'app/features/provisioning/hooks/useGetResourceRepositoryView';
import { GENERAL_FOLDER_UID } from 'app/features/search/constants';
import { ProvisioningAlert } from '../../Shared/ProvisioningAlert';
import { StepStatusInfo } from '../../Wizard/types';
import { useSelectionRepoValidation } from '../../hooks/useSelectionRepoValidation';
import { StatusInfo } from '../../types';
import { ResourceEditFormSharedFields } from '../Shared/ResourceEditFormSharedFields';
import { getDefaultWorkflow, getWorkflowOptions } from '../defaults';
import { generateTimestamp } from '../utils/timestamp';
import { ExportJobSpec, useBulkActionJob } from './useBulkActionJob';
import { BulkActionFormData, BulkActionProvisionResourceProps } from './utils';
interface FormProps extends BulkActionProvisionResourceProps {
initialValues: BulkActionFormData;
workflowOptions: Array<{ label: string; value: string }>;
}
function FormContent({ initialValues, selectedItems, workflowOptions, onDismiss }: FormProps) {
const styles = useStyles2(getPathPrefixStyles);
// States
const [job, setJob] = useState<Job>();
const [jobError, setJobError] = useState<string | StatusInfo>();
const [selectedRepositoryName, setSelectedRepositoryName] = useState<string>('');
const [hasSubmitted, setHasSubmitted] = useState(false);
// Hooks
const { createBulkJob, isLoading: isCreatingJob } = useBulkActionJob();
const methods = useForm<BulkActionFormData>({ defaultValues: initialValues });
const {
handleSubmit,
watch,
setError,
clearErrors,
formState: { errors },
} = methods;
const workflow = watch('workflow');
// Get repositories list from frontend settings (which returns RepositoryView[])
const { data: settingsData, isLoading: isLoadingRepos } = useGetFrontendSettingsQuery();
const repositories = useMemo(() => settingsData?.items ?? [], [settingsData?.items]);
// Auto-select first repository when repositories are loaded
useEffect(() => {
if (repositories.length > 0 && !selectedRepositoryName && !isLoadingRepos) {
setSelectedRepositoryName(repositories[0].name || '');
}
}, [repositories, selectedRepositoryName, isLoadingRepos]);
// Get selected repository
const repositoryView: RepositoryView | undefined = repositories.find((repo) => repo.name === selectedRepositoryName);
// Compute workflow options based on selected repository
const selectedWorkflowOptions = repositoryView ? getWorkflowOptions(repositoryView) : workflowOptions;
const selectedDefaultWorkflow = repositoryView
? getDefaultWorkflow(repositoryView)
: workflowOptions[0]?.value === 'branch' || workflowOptions[0]?.value === 'write'
? workflowOptions[0].value
: undefined;
// Update workflow, branch, and path when repository changes
useEffect(() => {
if (repositoryView && selectedDefaultWorkflow) {
if (selectedDefaultWorkflow === 'branch' || selectedDefaultWorkflow === 'write') {
methods.setValue('workflow', selectedDefaultWorkflow);
if (selectedDefaultWorkflow === 'branch') {
const timestamp = generateTimestamp();
methods.setValue('ref', `bulk-export/${timestamp}`);
} else if (selectedDefaultWorkflow === 'write' && repositoryView.branch) {
methods.setValue('ref', repositoryView.branch);
}
// Clear the path when repository changes - user will enter sub-path only
methods.setValue('path', '');
}
}
}, [repositoryView, selectedDefaultWorkflow, methods]);
const handleSubmitForm = async (data: BulkActionFormData) => {
setHasSubmitted(true);
if (!selectedRepositoryName || !repositoryView) {
// Use a form-level error since 'repository' is not in BulkActionFormData
setError('root', {
type: 'manual',
message: t('browse-dashboards.bulk-export-resources-form.error-no-repository', 'Please select a repository'),
});
setHasSubmitted(false);
return;
}
const resources = collectSelectedItems(selectedItems);
// Filter out folders - only dashboards are supported for export
const dashboardResources = resources.filter((r) => r.kind === 'Dashboard');
if (dashboardResources.length === 0) {
setError('root', {
type: 'manual',
message: t(
'browse-dashboards.bulk-export-resources-form.error-no-dashboards',
'No dashboards selected. Only dashboards can be exported.'
),
});
setHasSubmitted(false);
return;
}
reportInteraction('grafana_provisioning_bulk_export_submitted', {
workflow: data.workflow,
repositoryName: repositoryView.name ?? 'unknown',
repositoryType: repositoryView.type ?? 'unknown',
resourceCount: dashboardResources.length,
});
// Create the export job spec (backend uses 'push' action)
// Combine repository path with user's sub-path
const repoPath = repositoryView.path || '';
const subPath = (data.path || '').trim();
const exportPath = subPath ? `${repoPath}${repoPath.endsWith('/') ? '' : '/'}${subPath}` : repoPath || undefined;
const jobSpec: ExportJobSpec = {
action: 'push',
push: {
message: data.comment || undefined,
branch: data.workflow === 'write' ? undefined : data.ref,
path: exportPath,
resources: dashboardResources,
},
};
const result = await createBulkJob(repositoryView, jobSpec);
if (result.success && result.job) {
setJob(result.job); // Store the job for tracking
} else if (!result.success && result.error) {
getAppEvents().publish({
type: AppEvents.alertError.name,
payload: [
t('browse-dashboards.bulk-export-resources-form.error-exporting-resources', 'Error exporting resources'),
result.error,
],
});
setHasSubmitted(false);
}
};
const onStatusChange = useCallback((statusInfo: StepStatusInfo) => {
if (statusInfo.status === 'error' && statusInfo.error) {
setJobError(statusInfo.error);
}
}, []);
const repositoryOptions = repositories.map((repo) => ({
label: repo.title || repo.name || '',
value: repo.name || '',
}));
return (
<FormProvider {...methods}>
<form onSubmit={handleSubmit(handleSubmitForm)}>
<Stack direction="column" gap={2}>
{hasSubmitted && job ? (
<>
<ProvisioningAlert error={jobError} />
<JobStatus watch={job} jobType="push" onStatusChange={onStatusChange} />
</>
) : (
<>
<Box paddingBottom={2}>
<Trans i18nKey="browse-dashboards.bulk-export-resources-form.export-total">
In total, this will export:
</Trans>
<Text element="p" color="secondary">
{(() => {
// For export, only count explicitly selected dashboards (folders are filtered out)
const selectedDashboardUIDs = Object.keys(selectedItems.dashboard || {}).filter(
(uid) => selectedItems.dashboard[uid]
);
const selectedFolderUIDs = Object.keys(selectedItems.folder || {}).filter(
(uid) => selectedItems.folder[uid]
);
const totalItems = selectedDashboardUIDs.length + selectedFolderUIDs.length;
if (totalItems === 0) {
return t('browse-dashboards.bulk-export-resources-form.no-items', 'No items selected');
}
const parts: string[] = [];
if (selectedFolderUIDs.length > 0) {
parts.push(
t('browse-dashboards.bulk-export-resources-form.folders-count', '{{count}} folder', {
count: selectedFolderUIDs.length,
})
);
}
if (selectedDashboardUIDs.length > 0) {
parts.push(
t('browse-dashboards.bulk-export-resources-form.dashboards-count', '{{count}} dashboard', {
count: selectedDashboardUIDs.length,
})
);
}
return `${totalItems} ${totalItems === 1 ? 'item' : 'items'}: ${parts.join(', ')}`;
})()}
</Text>
</Box>
{/* Show form-level errors */}
{errors.root && <Alert severity="error" title={String(errors.root.message)} />}
{/* Info if folders are selected */}
{Object.keys(selectedItems.folder || {}).filter((uid) => selectedItems.folder[uid]).length > 0 && (
<Alert
severity="info"
title={t('browse-dashboards.bulk-export-resources-form.folders-info', 'Folders in selection')}
>
{t(
'browse-dashboards.bulk-export-resources-form.folders-info-description',
'Folders will be left behind. New folders will be created in the repository based on the resource folder structure.'
)}
</Alert>
)}
{/* Repository selection */}
<Field
noMargin
label={t('browse-dashboards.bulk-export-resources-form.repository', 'Repository')}
error={errors.root?.message}
invalid={!!errors.root && !selectedRepositoryName}
required
>
<Select
options={repositoryOptions}
value={selectedRepositoryName}
onChange={(option) => {
setSelectedRepositoryName(option?.value || '');
clearErrors('root');
}}
isLoading={isLoadingRepos}
placeholder={t(
'browse-dashboards.bulk-export-resources-form.repository-placeholder',
'Select a repository'
)}
/>
</Field>
{/* Path field */}
{repositoryView?.path && (
<Field
noMargin
label={t('browse-dashboards.bulk-export-resources-form.path', 'Path')}
description={t(
'browse-dashboards.bulk-export-resources-form.path-description-with-repo',
'Add a sub-path below to organize exported resources.'
)}
>
<Stack direction="row" gap={0} alignItems="stretch">
<div className={styles.pathPrefix}>
<Text variant="body" color="secondary">
{repositoryView.path}
</Text>
</div>
<Input
type="text"
{...methods.register('path')}
placeholder={t(
'browse-dashboards.bulk-export-resources-form.path-placeholder-with-repo',
'e.g., dashboards/team-a/'
)}
style={{
borderTopLeftRadius: 0,
borderBottomLeftRadius: 0,
flex: 1,
}}
/>
</Stack>
</Field>
)}
{!repositoryView?.path && (
<Field
noMargin
label={t('browse-dashboards.bulk-export-resources-form.path', 'Path')}
description={t(
'browse-dashboards.bulk-export-resources-form.path-description',
'Path relative to the repository root (optional). Resources will be exported under this path.'
)}
>
<Input
type="text"
{...methods.register('path')}
placeholder={t(
'browse-dashboards.bulk-export-resources-form.path-placeholder',
'e.g., dashboards/'
)}
/>
</Field>
)}
{/* Shared fields (comment, workflow, branch) */}
{repositoryView && (
<ResourceEditFormSharedFields
resourceType="dashboard"
isNew={false}
workflow={workflow}
workflowOptions={selectedWorkflowOptions}
repository={repositoryView}
hidePath
/>
)}
<Stack gap={2}>
<Button variant="secondary" fill="outline" onClick={onDismiss} disabled={isCreatingJob}>
<Trans i18nKey="browse-dashboards.bulk-export-resources-form.button-cancel">Cancel</Trans>
</Button>
<Button type="submit" disabled={!!job || isCreatingJob || hasSubmitted || !selectedRepositoryName}>
{isCreatingJob
? t('browse-dashboards.bulk-export-resources-form.button-exporting', 'Exporting...')
: t('browse-dashboards.bulk-export-resources-form.button-export', 'Export')}
</Button>
</Stack>
</>
)}
</Stack>
</form>
</FormProvider>
);
}
export function BulkExportProvisionedResource({
folderUid,
selectedItems,
onDismiss,
}: BulkActionProvisionResourceProps) {
// Check if we're on the root browser dashboards page
const isRootPage = !folderUid || folderUid === GENERAL_FOLDER_UID;
const { selectedItemsRepoUID } = useSelectionRepoValidation(selectedItems);
const { repository } = useGetResourceRepositoryView({
folderName: isRootPage ? selectedItemsRepoUID : folderUid,
});
const workflowOptions = getWorkflowOptions(repository);
const timestamp = generateTimestamp();
const defaultWorkflow = getDefaultWorkflow(repository);
const initialValues = {
comment: '',
ref: defaultWorkflow === 'branch' ? `bulk-export/${timestamp}` : (repository?.branch ?? ''),
workflow: defaultWorkflow,
path: '',
};
// Note: We don't require a repository context for export since user selects target repository
return (
<FormContent
selectedItems={selectedItems}
onDismiss={onDismiss}
initialValues={initialValues}
workflowOptions={workflowOptions}
/>
);
}
const getPathPrefixStyles = (theme: GrafanaTheme2) => ({
pathPrefix: css({
display: 'flex',
alignItems: 'center',
padding: theme.spacing(0, 1),
backgroundColor: theme.colors.background.secondary,
border: `1px solid ${theme.colors.border.strong}`,
borderRight: 'none',
borderTopLeftRadius: theme.shape.borderRadius(1),
borderBottomLeftRadius: theme.shape.borderRadius(1),
whiteSpace: 'nowrap',
}),
});

View File

@@ -24,17 +24,7 @@ export interface MoveJobSpec {
};
}
export interface ExportJobSpec {
action: 'push';
push: {
message?: string;
branch?: string;
path?: string;
resources: ResourceRef[];
};
}
export type BulkJobSpec = DeleteJobSpec | MoveJobSpec | ExportJobSpec;
export type BulkJobSpec = DeleteJobSpec | MoveJobSpec;
interface UseBulkActionJobResult {
createBulkJob: (

View File

@@ -8,7 +8,6 @@ export type BulkActionFormData = {
ref: string;
workflow?: WorkflowOption;
targetFolderUID?: string;
path?: string;
};
export interface BulkActionProvisionResourceProps {

View File

@@ -7,21 +7,9 @@ import { ResourceWrapper } from 'app/api/clients/provisioning/v0alpha1';
import { useProvisionedRequestHandler, RequestHandlers } from './useProvisionedRequestHandler';
jest.mock('@grafana/runtime', () => {
const original = jest.requireActual('@grafana/runtime');
return {
...original,
getAppEvents: jest.fn(),
config: {
...original.config,
bootData: {
user: {},
settings: {},
navTree: [],
},
},
};
});
jest.mock('@grafana/runtime', () => ({
getAppEvents: jest.fn(),
}));
jest.mock('@grafana/i18n', () => ({
t: jest.fn((key: string, defaultValue: string) => defaultValue),

View File

@@ -1,158 +0,0 @@
import { useState, useEffect, useCallback, useMemo, useRef } from 'react';
import { config } from '@grafana/runtime';
import { ScopedResourceClient } from 'app/features/apiserver/client';
import { AnnoKeyManagerKind, ManagerKind } from 'app/features/apiserver/types';
import { isProvisionedDashboard as isProvisionedDashboardFromMeta } from 'app/features/browse-dashboards/api/isProvisioned';
import { getDashboardAPI } from 'app/features/dashboard/api/dashboard_api';
import { useSearchStateManager } from 'app/features/search/state/SearchStateManager';
import { useSelector } from 'app/types/store';
import { findItem } from '../../browse-dashboards/state/utils';
import { DashboardTreeSelection } from '../../browse-dashboards/types';
// This hook checks if selected items are unmanaged (not managed by any repository)
export function useSelectionUnmanagedStatus(selectedItems: Omit<DashboardTreeSelection, 'panel' | '$all'>): {
hasUnmanaged: boolean;
isLoading: boolean;
} {
const browseState = useSelector((state) => state.browseDashboards);
const [, stateManager] = useSearchStateManager();
const isSearching = stateManager.hasSearchFilters();
const provisioningEnabled = config.featureToggles.provisioning;
const [status, setStatus] = useState({ hasUnmanaged: false, isLoading: true });
const [folderCache, setFolderCache] = useState<Record<string, boolean>>({});
const [dashboardCache, setDashboardCache] = useState<Record<string, boolean>>({});
// Create folder resource client for k8s API
const folderClient = useMemo(
() =>
new ScopedResourceClient({
group: 'folder.grafana.app',
version: 'v1beta1',
resource: 'folders',
}),
[]
);
// Memoize the selected item UIDs to avoid unnecessary re-runs when children are loaded
const selectedDashboardUIDs = useMemo(
() => Object.keys(selectedItems.dashboard || {}).filter((uid) => selectedItems.dashboard[uid]),
[selectedItems.dashboard]
);
const selectedFolderUIDs = useMemo(
() => Object.keys(selectedItems.folder || {}).filter((uid) => selectedItems.folder[uid]),
[selectedItems.folder]
);
// Use a ref to always access the latest browseState without causing re-renders
const browseStateRef = useRef(browseState);
browseStateRef.current = browseState;
const findItemInState = useCallback(
(uid: string) => {
const state = browseStateRef.current;
const item = findItem(state.rootItems?.items || [], state.childrenByParentUID, uid);
return item ? { parentUID: item.parentUID, managedBy: item.managedBy } : undefined;
},
[] // No dependencies - always uses latest state via ref
);
const getFolderMeta = useCallback(
async (uid: string) => {
if (folderCache[uid] !== undefined) {
return folderCache[uid];
}
try {
const folder = await folderClient.get(uid);
const managedBy = folder.metadata?.annotations?.[AnnoKeyManagerKind];
// Unmanaged if not managed by repository
const result = managedBy !== ManagerKind.Repo;
setFolderCache((prev) => ({ ...prev, [uid]: result }));
return result;
} catch {
// If we can't fetch, assume unmanaged
return true;
}
},
[folderCache, folderClient]
);
const getDashboardMeta = useCallback(
async (uid: string) => {
if (dashboardCache[uid] !== undefined) {
return dashboardCache[uid];
}
try {
const dto = await getDashboardAPI().getDashboardDTO(uid);
// Unmanaged if not provisioned
const result = !isProvisionedDashboardFromMeta(dto);
setDashboardCache((prev) => ({ ...prev, [uid]: result }));
return result;
} catch {
// If we can't fetch, assume unmanaged
return true;
}
},
[dashboardCache]
);
const checkItemUnmanaged = useCallback(
async (uid: string, isFolder: boolean): Promise<boolean> => {
if (isSearching) {
return isFolder ? await getFolderMeta(uid) : await getDashboardMeta(uid);
}
const item = findItemInState(uid);
if (isFolder) {
// Unmanaged if not managed by repository
return item?.managedBy !== ManagerKind.Repo;
}
// Check parent folder first for dashboards
const parent = item?.parentUID ? findItemInState(item.parentUID) : undefined;
if (parent?.managedBy === ManagerKind.Repo) {
// If parent is managed, dashboard is managed
return false;
}
// Unmanaged if not managed by repository
return item?.managedBy !== ManagerKind.Repo;
},
[isSearching, getFolderMeta, getDashboardMeta, findItemInState]
);
useEffect(() => {
if (!provisioningEnabled) {
setStatus({ hasUnmanaged: false, isLoading: false });
return;
}
const checkUnmanagedStatus = async () => {
setStatus({ hasUnmanaged: false, isLoading: true });
if (selectedDashboardUIDs.length === 0 && selectedFolderUIDs.length === 0) {
setStatus({ hasUnmanaged: false, isLoading: false });
return;
}
// Check all selected items
const checks = [
...selectedDashboardUIDs.map((uid) => checkItemUnmanaged(uid, false)),
...selectedFolderUIDs.map((uid) => checkItemUnmanaged(uid, true)),
];
const results = await Promise.all(checks);
// Export should only be enabled if ALL selected items are unmanaged
// If ANY item is managed, hasUnmanaged should be false
const hasUnmanaged = results.length > 0 && results.every((isUnmanaged) => isUnmanaged);
setStatus({ hasUnmanaged, isLoading: false });
};
checkUnmanagedStatus();
}, [selectedDashboardUIDs, selectedFolderUIDs, provisioningEnabled, checkItemUnmanaged]);
return status;
}

View File

@@ -1,71 +0,0 @@
import { listDashboards } from 'app/features/browse-dashboards/api/services';
import { getGrafanaSearcher } from 'app/features/search/service/searcher';
/**
* Recursively collects all dashboards under a folder and its children
* @param folderUID - The UID of the folder to collect dashboards from
* @returns Array of dashboard UIDs
*/
export async function collectAllDashboardsUnderFolder(folderUID: string): Promise<string[]> {
const dashboardUIDs: string[] = [];
const foldersToProcess: string[] = [folderUID];
const processedFolders = new Set<string>();
while (foldersToProcess.length > 0) {
const currentFolderUID = foldersToProcess.shift()!;
if (processedFolders.has(currentFolderUID)) {
continue;
}
processedFolders.add(currentFolderUID);
// Get dashboards directly in this folder
let page = 1;
const pageSize = 100; // Use a reasonable page size
let hasMore = true;
while (hasMore) {
const dashboards = await listDashboards(currentFolderUID, page, pageSize);
for (const dashboard of dashboards) {
dashboardUIDs.push(dashboard.uid);
}
hasMore = dashboards.length === pageSize;
page++;
}
// Get child folders and add them to the processing queue
// We need to use the search API to find child folders
// Paginate through all folders to ensure we get all child folders
const searcher = getGrafanaSearcher();
let folderPage = 0;
let hasMoreFolders = true;
const folderPageSize = 100;
while (hasMoreFolders) {
const foldersResults = await searcher.search({
kind: ['folder'],
query: '*',
location: currentFolderUID || 'general',
from: folderPage * folderPageSize,
limit: folderPageSize,
});
let foundFolders = 0;
for (const folderItem of foldersResults.view) {
const folderUID = folderItem.uid;
if (folderUID && !processedFolders.has(folderUID)) {
foldersToProcess.push(folderUID);
foundFolders++;
}
}
// Check if we've loaded all folders (if we got fewer than pageSize, we're done)
hasMoreFolders = foldersResults.view.length === folderPageSize;
folderPage++;
}
}
return dashboardUIDs;
}

View File

@@ -0,0 +1,21 @@
import { QueryEditorProps } from '@grafana/data';
import { QueryHeaderProps, SQLOptions, SQLQuery, SqlQueryEditorLazy } from '@grafana/sql';
import { PostgresDatasource } from './datasource';
import { migrateVariableQuery } from './migrations';
const queryHeaderProps: Pick<QueryHeaderProps, 'dialect' | 'hideRunButton' | 'hideFormatSelector'> = {
dialect: 'postgres',
hideRunButton: true,
hideFormatSelector: true,
};
export function VariableQueryEditor(props: QueryEditorProps<PostgresDatasource, SQLQuery, SQLOptions>) {
const newProps = {
...props,
query: migrateVariableQuery(props.query),
queryHeaderProps,
isVariableQuery: true,
};
return <SqlQueryEditorLazy {...newProps} />;
}

View File

@@ -2,7 +2,7 @@ import { v4 as uuidv4 } from 'uuid';
import { DataSourceInstanceSettings, ScopedVars, VariableWithMultiSupport } from '@grafana/data';
import { LanguageDefinition } from '@grafana/plugin-ui';
import { TemplateSrv } from '@grafana/runtime';
import { config, TemplateSrv } from '@grafana/runtime';
import {
COMMON_FNS,
DB,
@@ -16,15 +16,23 @@ import {
import { PostgresQueryModel } from './PostgresQueryModel';
import { getSchema, getTimescaleDBVersion, getVersion, showTables } from './postgresMetaQuery';
import { transformMetricFindResponse } from './responseParser';
import { fetchColumns, fetchTables, getSqlCompletionProvider } from './sqlCompletionProvider';
import { getFieldConfig, toRawSql } from './sqlUtil';
import { PostgresOptions } from './types';
import { SQLVariableSupport } from './variables';
export class PostgresDatasource extends SqlDatasource {
sqlLanguageDefinition: LanguageDefinition | undefined = undefined;
constructor(instanceSettings: DataSourceInstanceSettings<PostgresOptions>) {
super(instanceSettings);
if (config.featureToggles.postgresVariableQueryEditor) {
this.variables = new SQLVariableSupport(this);
this.responseParser = {
transformMetricFindResponse: transformMetricFindResponse,
};
}
}
getQueryModel(target?: SQLQuery, templateSrv?: TemplateSrv, scopedVars?: ScopedVars): PostgresQueryModel {

View File

@@ -0,0 +1,82 @@
import { QueryFormat, SQLQuery } from '@grafana/sql';
import { migrateVariableQuery } from './migrations';
describe('migrateVariableQuery', () => {
describe('when given a string query (legacy format)', () => {
it('should convert to SQLQuery format with rawSql and query fields', () => {
const result = migrateVariableQuery('SELECT hostname FROM hosts');
expect(result.rawSql).toBe('SELECT hostname FROM hosts');
expect(result.query).toBe('SELECT hostname FROM hosts');
expect(result.refId).toBe('SQLVariableQueryEditor-VariableQuery');
});
it('should handle empty string', () => {
const result = migrateVariableQuery('');
expect(result.rawSql).toBe('');
expect(result.query).toBe('');
});
it('should handle complex SQL queries', () => {
const complexQuery = `SELECT hostname AS __text, id AS __value FROM hosts WHERE region = 'us-east-1'`;
const result = migrateVariableQuery(complexQuery);
expect(result.rawSql).toBe(complexQuery);
expect(result.query).toBe(complexQuery);
});
});
describe('when given an SQLQuery object', () => {
it('should preserve the rawSql and add query field', () => {
const sqlQuery = {
refId: 'A',
rawSql: 'SELECT id FROM table',
};
const result = migrateVariableQuery(sqlQuery);
expect(result.rawSql).toBe('SELECT id FROM table');
expect(result.query).toBe('SELECT id FROM table');
expect(result.refId).toBe('A');
});
it('should handle SQLQuery with empty rawSql', () => {
const sqlQuery = {
refId: 'A',
rawSql: '',
};
const result = migrateVariableQuery(sqlQuery);
expect(result.rawSql).toBe('');
expect(result.query).toBe('');
});
it('should handle SQLQuery without rawSql', () => {
const sqlQuery = {
refId: 'A',
};
const result = migrateVariableQuery(sqlQuery);
expect(result.query).toBe('');
});
it('should preserve all existing SQLQuery properties', () => {
const sqlQuery: SQLQuery = {
refId: 'B',
rawSql: 'SELECT * FROM users',
format: QueryFormat.Table,
table: 'users',
dataset: 'mydb',
};
const result = migrateVariableQuery(sqlQuery);
expect(result.refId).toBe('B');
expect(result.rawSql).toBe('SELECT * FROM users');
expect(result.query).toBe('SELECT * FROM users');
expect(result.format).toBe(QueryFormat.Table);
expect(result.table).toBe('users');
expect(result.dataset).toBe('mydb');
});
});
});

View File

@@ -0,0 +1,20 @@
import { applyQueryDefaults, type SQLQuery } from '@grafana/sql';
import type { VariableQuery } from './types';
export function migrateVariableQuery(rawQuery: string | SQLQuery): VariableQuery {
if (typeof rawQuery !== 'string') {
return {
...rawQuery,
query: rawQuery.rawSql || '',
};
}
return {
...applyQueryDefaults({
refId: 'SQLVariableQueryEditor-VariableQuery',
rawSql: rawQuery,
}),
query: rawQuery,
};
}

View File

@@ -0,0 +1,158 @@
import { FieldType, DataFrame } from '@grafana/data';
import { transformMetricFindResponse } from './responseParser';
describe('transformMetricFindResponse function', () => {
it('should handle big arrays', () => {
const stringValues = new Array(150_000).fill('a');
const numberValues = new Array(150_000).fill(1);
const frame: DataFrame = {
fields: [
{ name: 'name', type: FieldType.string, config: {}, values: stringValues },
{ name: 'value', type: FieldType.number, config: {}, values: numberValues },
],
length: stringValues.length,
};
const result = transformMetricFindResponse(frame);
// Without __text and __value fields, all values are added as text-only entries
// 150,000 'a' values + 150,000 1 values = 300,000 total
// After deduplication by text, we get 2 unique items ('a' and 1)
expect(result).toHaveLength(2);
const textValues = result.map((r) => r.text);
expect(textValues).toContain('a');
expect(textValues).toContain(1);
});
it('should add all values from multiple fields without __text/__value (backwards compatible)', () => {
const frame: DataFrame = {
fields: [
{ name: 'id', type: FieldType.string, config: {}, values: ['user1', 'user2', 'user3'] },
{
name: 'email',
type: FieldType.string,
config: {},
values: ['user1@test.com', 'user2@test.com', 'user3@test.com'],
},
{ name: 'role', type: FieldType.string, config: {}, values: ['admin', 'user', 'guest'] },
],
length: 3,
};
const result = transformMetricFindResponse(frame);
// Without __text and __value, all values from all fields are added as text-only entries
expect(result).toHaveLength(9);
// Entries should only have text, no value or properties
const user1Entry = result.find((r) => r.text === 'user1');
expect(user1Entry).toEqual({ text: 'user1' });
const emailEntry = result.find((r) => r.text === 'user1@test.com');
expect(emailEntry).toEqual({ text: 'user1@test.com' });
});
it('should handle single field (backwards compatible)', () => {
const frame: DataFrame = {
fields: [{ name: 'name', type: FieldType.string, config: {}, values: ['value1', 'value2'] }],
length: 2,
};
const result = transformMetricFindResponse(frame);
expect(result).toHaveLength(2);
// Without __text and __value, values are added as text-only entries
expect(result[0]).toEqual({ text: 'value1' });
expect(result[1]).toEqual({ text: 'value2' });
});
it('should still handle __text and __value fields', () => {
const frame: DataFrame = {
fields: [
{ name: '__text', type: FieldType.string, config: {}, values: ['Display 1', 'Display 2'] },
{ name: '__value', type: FieldType.string, config: {}, values: ['val1', 'val2'] },
],
length: 2,
};
const result = transformMetricFindResponse(frame);
expect(result).toHaveLength(2);
expect(result[0]).toEqual({
text: 'Display 1',
value: 'val1',
});
expect(result[1]).toEqual({
text: 'Display 2',
value: 'val2',
});
});
it('should skip fields named "text" or "value" in properties when __text and __value are present', () => {
const frame: DataFrame = {
fields: [
{ name: '__text', type: FieldType.string, config: {}, values: ['Display 1', 'Display 2'] },
{ name: '__value', type: FieldType.string, config: {}, values: ['val1', 'val2'] },
{ name: 'text', type: FieldType.string, config: {}, values: ['Text 1', 'Text 2'] },
{ name: 'value', type: FieldType.string, config: {}, values: ['Value 1', 'Value 2'] },
{ name: 'description', type: FieldType.string, config: {}, values: ['Desc 1', 'Desc 2'] },
],
length: 2,
};
const result = transformMetricFindResponse(frame);
expect(result).toHaveLength(2);
// Fields named 'text' and 'value' should not be in properties
expect(result[0]).toEqual({
text: 'Display 1',
value: 'val1',
properties: {
description: 'Desc 1',
},
});
expect(result[1]).toEqual({
text: 'Display 2',
value: 'val2',
properties: {
description: 'Desc 2',
},
});
});
it('should add additional fields as properties when __text and __value are present', () => {
const frame: DataFrame = {
fields: [
{ name: '__text', type: FieldType.string, config: {}, values: ['Display 1', 'Display 2'] },
{ name: '__value', type: FieldType.string, config: {}, values: ['val1', 'val2'] },
{ name: 'category', type: FieldType.string, config: {}, values: ['cat1', 'cat2'] },
{ name: 'priority', type: FieldType.number, config: {}, values: [1, 2] },
],
length: 2,
};
const result = transformMetricFindResponse(frame);
expect(result).toHaveLength(2);
expect(result[0]).toEqual({
text: 'Display 1',
value: 'val1',
properties: {
category: 'cat1',
priority: '1',
},
});
expect(result[1]).toEqual({
text: 'Display 2',
value: 'val2',
properties: {
category: 'cat2',
priority: '2',
},
});
});
});

View File

@@ -0,0 +1,46 @@
import { uniqBy } from 'lodash';
import { DataFrame, Field, MetricFindValue } from '@grafana/data';
const RESERVED_PROPERTY_NAMES = ['text', 'value', '__text', '__value'];
export function transformMetricFindResponse(frame: DataFrame): MetricFindValue[] {
const values: MetricFindValue[] = [];
const textField = frame.fields.find((f) => f.name === '__text');
const valueField = frame.fields.find((f) => f.name === '__value');
if (textField && valueField) {
for (let i = 0; i < textField.values.length; i++) {
values.push({ text: '' + textField.values[i], value: '' + valueField.values[i] });
const properties = buildProperties(frame.fields, i);
if (properties) {
values[i].properties = properties;
}
}
} else {
for (const field of frame.fields) {
for (const value of field.values) {
values.push({ text: value });
}
}
}
return uniqBy(values, 'text');
}
function buildProperties(fields: Field[], rowIndex: number): Record<string, string> | undefined {
if (fields.length === 0) {
return undefined;
}
const properties: Record<string, string> = {};
for (const field of fields) {
if (!RESERVED_PROPERTY_NAMES.includes(field.name)) {
properties[field.name] = '' + field.values[rowIndex];
}
}
return Object.keys(properties).length > 0 ? properties : undefined;
}

View File

@@ -1,4 +1,4 @@
import { SQLOptions } from '@grafana/sql';
import { SQLOptions, SQLQuery } from '@grafana/sql';
export enum PostgresTLSModes {
disable = 'disable',
@@ -25,3 +25,7 @@ export interface PostgresOptions extends SQLOptions {
export interface SecureJsonData {
password?: string;
}
export interface VariableQuery extends SQLQuery {
query: string;
}

View File

@@ -0,0 +1,31 @@
import { from, map, Observable } from 'rxjs';
import { CustomVariableSupport, DataQueryRequest, MetricFindValue } from '@grafana/data';
import { applyQueryDefaults, SQLQuery } from '@grafana/sql';
import { VariableQueryEditor } from './VariableQueryEditor';
import { PostgresDatasource } from './datasource';
import { migrateVariableQuery } from './migrations';
export class SQLVariableSupport extends CustomVariableSupport<PostgresDatasource, SQLQuery> {
constructor(private readonly datasource: PostgresDatasource) {
super();
}
editor = VariableQueryEditor;
query(request: DataQueryRequest<SQLQuery>): Observable<{ data: MetricFindValue[] }> {
if (!request.targets || request.targets.length === 0) {
return from(Promise.resolve([])).pipe(map((data) => ({ data })));
}
const queryObj = migrateVariableQuery(request.targets[0]);
const result = this.datasource.metricFindQuery(queryObj, { scopedVars: request.scopedVars, range: request.range });
return from(result).pipe(map((data) => ({ data })));
}
getDefaultQuery(): Partial<SQLQuery> {
return applyQueryDefaults({ refId: 'SQLVariableQueryEditor-VariableQuery' });
}
}

View File

@@ -18,7 +18,6 @@ import { catchError, map } from 'rxjs/operators';
import {
AnnotationEvent,
DataFrame,
DataQueryRequest,
DataQueryResponse,
dateMath,
@@ -79,20 +78,6 @@ export default class OpenTsDatasource extends DataSourceWithBackend<OpenTsdbQuer
// Called once per panel (graph)
query(options: DataQueryRequest<OpenTsdbQuery>): Observable<DataQueryResponse> {
if (config.featureToggles.opentsdbBackendMigration) {
const hasValidTargets = options.targets.some((target) => target.metric && !target.hide);
if (!hasValidTargets) {
return of({ data: [] });
}
return super.query(options).pipe(
map((response) => {
this._saveTagKeysFromFrames(response.data);
return response;
})
);
}
// migrate annotations
if (options.targets.some((target: OpenTsdbQuery) => target.fromAnnotations)) {
const streams: Array<Observable<DataQueryResponse>> = [];
@@ -280,15 +265,6 @@ export default class OpenTsDatasource extends DataSourceWithBackend<OpenTsdbQuer
this.tagKeys[metricData.metric] = tagKeys;
}
_saveTagKeysFromFrames(frames: DataFrame[]) {
for (const frame of frames) {
const tagKeys = frame.meta?.custom?.tagKeys;
if (frame.name && tagKeys) {
this.tagKeys[frame.name] = tagKeys;
}
}
}
_performSuggestQuery(query: string, type: string) {
return this._get('/api/suggest', { type, q: query, max: this.lookupLimit }).pipe(
map((result) => {

View File

@@ -2350,8 +2350,11 @@
"label-tenant-sources": "Zdroje tenanta"
},
"rule-details-matching-instances": {
"button-show-all": "",
"showing-count": ""
"showing-count": "Zobrazuje se {{visibleItems}} z {{totalItems}} instancí",
"button-show-all_one": "Zobrazit všechny instance výstrah: {{totalItems}}",
"button-show-all_few": "Zobrazit všechny instance výstrah: {{totalItems}}",
"button-show-all_many": "Zobrazit všechny instance výstrah: {{totalItems}}",
"button-show-all_other": "Zobrazit všechny instance výstrah: {{totalItems}}"
},
"rule-editor": {
"get-content": {
@@ -8321,12 +8324,6 @@
"nextNYears_other": ""
}
},
"field": {
"fieldColor": {
"accessibleGroup": "",
"otherGroup": ""
}
},
"valueFormats": {
"categories": {
"acceleration": {
@@ -9327,8 +9324,6 @@
"actions-confirmation-label": "Potvrzovací zpráva",
"actions-confirmation-message": "Napište popisnou výzvu k potvrzení nebo zrušení akce.",
"footer-add-annotation": "Přidat vysvětlivku",
"footer-apply-series-as-filter": "",
"footer-apply-series-as-inverse-filter": "",
"footer-click-to-action": "Klikněte pro {{actionTitle}}",
"footer-click-to-navigate": "Klikněte pro otevření {{linkTitle}}",
"footer-filter-for-value": "Filtrovat podle hodnoty „{{value}}“",

View File

@@ -2334,8 +2334,9 @@
"label-tenant-sources": "Tenant-Quellen"
},
"rule-details-matching-instances": {
"button-show-all": "",
"showing-count": ""
"showing-count": "Angezeigt werden {{visibleItems}} von {{totalItems}} Instanzen",
"button-show-all_one": "Alle {{totalItems}} Warninstanzen anzeigen",
"button-show-all_other": "Alle {{totalItems}} Warninstanzen anzeigen"
},
"rule-editor": {
"get-content": {
@@ -8251,12 +8252,6 @@
"nextNYears_other": ""
}
},
"field": {
"fieldColor": {
"accessibleGroup": "",
"otherGroup": ""
}
},
"valueFormats": {
"categories": {
"acceleration": {
@@ -9257,8 +9252,6 @@
"actions-confirmation-label": "Bestätigungsnachricht",
"actions-confirmation-message": "Geben Sie eine beschreibende Eingabeaufforderung an, um die Aktion zu bestätigen oder abzubrechen.",
"footer-add-annotation": "Anmerkung hinzufügen",
"footer-apply-series-as-filter": "",
"footer-apply-series-as-inverse-filter": "",
"footer-click-to-action": "Klicken, um zu {{actionTitle}}",
"footer-click-to-navigate": "Zum Öffnen von {{linkTitle}} klicken",
"footer-filter-for-value": "Filtern nach „{{value}}“",

View File

@@ -3562,9 +3562,6 @@
"delete-modal-title": "Delete",
"delete-provisioned-folder": "Delete provisioned folder",
"deleting": "Deleting...",
"export-folder": "Export Folder to Repository",
"export-provisioned-resources": "Export Resources",
"export-to-repository-button": "Export to Repository",
"manage-permissions-button": "Manage permissions",
"move-button": "Move",
"move-modal-alert": "Moving this item may change its permissions.",
@@ -3595,29 +3592,6 @@
"delete-warning": "This will delete selected folders and their descendants. In total, this will affect:",
"error-deleting-resources": "Error deleting resources"
},
"bulk-export-resources-form": {
"button-cancel": "Cancel",
"button-export": "Export",
"button-exporting": "Exporting...",
"dashboards-count_one": "{{count}} dashboard",
"dashboards-count_other": "{{count}} dashboard",
"error-exporting-resources": "Error exporting resources",
"error-no-dashboards": "No dashboards selected. Only dashboards can be exported.",
"error-no-repository": "Please select a repository",
"export-total": "In total, this will export:",
"folders-count_one": "{{count}} folder",
"folders-count_other": "{{count}} folder",
"folders-info": "Folders in selection",
"folders-info-description": "Folders will be left behind. New folders will be created in the repository based on the resource folder structure.",
"no-items": "No items selected",
"path": "Path",
"path-description": "Path relative to the repository root (optional). Resources will be exported under this path.",
"path-description-with-repo": "Add a sub-path below to organize exported resources.",
"path-placeholder": "e.g., dashboards/",
"path-placeholder-with-repo": "e.g., dashboards/team-a/",
"repository": "Repository",
"repository-placeholder": "Select a repository"
},
"bulk-move-resources-form": {
"button-cancel": "Cancel",
"button-move": "Move",
@@ -3683,8 +3657,6 @@
"folder-actions-button": {
"delete": "Delete this folder",
"delete-folder-error": "Error deleting folder. Please try again later.",
"export": "Export to Repository",
"export-folder-error": "Error collecting dashboards. Please try again later.",
"folder-actions": "Folder actions",
"manage-permissions": "Manage permissions",
"move": "Move this folder"
@@ -4809,8 +4781,6 @@
"remove": "Remove {{typeName}}",
"row-title": "Change row title",
"switch-layout": "Switch layout",
"switch-layout-row": "Switch layout",
"switch-layout-tab": "Switch layout",
"tab-title": "Change tab title"
},
"edit-pane": {
@@ -11813,7 +11783,6 @@
"folder-repository-list": {
"all-resources-managed_one": "All {{count}} resource is managed",
"all-resources-managed_other": "All {{count}} resources are managed",
"export-remaining-resources-button": "Export remaining resources",
"no-results-matching-your-query": "No results matching your query",
"partial-managed": "{{managedCount}}/{{resourceCount}} resources managed by Git sync.",
"placeholder-search": "Search",
@@ -12791,7 +12760,6 @@
"menu": {
"export-image-title": "Export as image",
"export-json-title": "Export as JSON",
"export-to-repository-title": "Export to Repository",
"share-externally-title": "Share externally",
"share-internally-title": "Share internally",
"share-snapshot-title": "Share snapshot"
@@ -12819,8 +12787,6 @@
"export": {
"back-button": "Back to export config",
"cancel-button": "Cancel",
"export-to-repository-button": "Export to Repository",
"export-to-repository-title": "Export Dashboard to Repository",
"info-text": "Export this dashboard.",
"loading": "Loading...",
"save-button": "Save to file",

Some files were not shown because too many files have changed in this diff Show More